diff --git a/.gitmodules b/.gitmodules
index e013bbf1b..a700e1709 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -8,3 +8,12 @@
url = https://dawn.googlesource.com/dawn
shallow = true
branch = chromium/6108
+[submodule "deps/emsdk"]
+ path = deps/emsdk
+ url = git@github.com:emscripten-core/emsdk.git
+ shallow = true
+ branch = 3.1.53
+[submodule "deps/manif"]
+ path = deps/manif
+ url = https://github.com/artivis/manif.git
+ shallow = true
diff --git a/CMakeLists.txt b/CMakeLists.txt
index c5c9ab295..e67a88670 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -86,18 +86,15 @@ if (MROVER_BUILD_SIM)
if (NOT Assimp_FOUND AND NOT assimp_FOUND)
message(FATAL_ERROR "Assimp not found")
endif ()
-
find_package(Bullet REQUIRED)
-
find_package(glfw3 REQUIRED)
-
include(cmake/webgpu.cmake)
-
- add_subdirectory(deps/glfw3webgpu)
+ add_subdirectory(deps/glfw3webgpu SYSTEM)
endif ()
find_package(OpenCV REQUIRED)
find_package(ZED QUIET)
find_package(Eigen3 REQUIRED)
+add_subdirectory(deps/manif SYSTEM)
find_package(PkgConfig REQUIRED)
pkg_search_module(NetLink libnl-3.0)
pkg_search_module(NetLinkRoute libnl-route-3.0)
@@ -147,6 +144,7 @@ catkin_package()
## Libraries
mrover_add_library(lie src/util/lie/*.cpp src/util/lie)
+target_link_libraries(lie PUBLIC manif)
## ESW
@@ -188,6 +186,9 @@ endif ()
## Perception
+mrover_add_library(streaming src/esw/streaming/*.cpp src/esw/streaming)
+target_compile_definitions(streaming PUBLIC BOOST_ASIO_NO_DEPRECATED)
+
mrover_add_nodelet(zed_tag_detector src/perception/tag_detector/zed/*.cpp src/perception/tag_detector/zed src/perception/tag_detector/zed/pch.hpp)
mrover_nodelet_link_libraries(zed_tag_detector opencv_core opencv_objdetect opencv_aruco opencv_imgproc tbb lie)
@@ -197,13 +198,18 @@ mrover_nodelet_link_libraries(long_range_tag_detector opencv_core opencv_objdete
mrover_add_nodelet(usb_camera src/perception/long_range_cam/*.cpp src/perception/long_range_cam src/perception/long_range_cam/pch.hpp)
mrover_nodelet_link_libraries(usb_camera opencv_core opencv_objdetect opencv_aruco opencv_imgproc opencv_highgui tbb lie)
if (CUDA_FOUND)
- mrover_add_library(streaming src/esw/streaming/*.c* src/esw/streaming)
- # target_link_libraries(streaming PUBLIC opencv_core opencv_cudacodec)
- target_link_libraries(streaming PUBLIC cuda nvidia-encode opencv_core)
- target_include_directories(streaming SYSTEM PUBLIC deps/nvenc)
- target_compile_definitions(streaming PUBLIC BOOST_ASIO_NO_DEPRECATED)
+ mrover_add_node(hardware_h265_encoder src/esw/hardware_h265_encoder/*.c*)
+ target_link_libraries(hardware_h265_encoder PUBLIC cuda nvidia-encode opencv_core opencv_imgproc opencv_imgcodecs opencv_videoio streaming)
+ target_include_directories(hardware_h265_encoder SYSTEM PUBLIC deps/nvenc)
endif ()
+add_subdirectory(deps/libde265)
+include_directories(deps/libde265/extra deps/libde265 SYSTEM)
+
+mrover_add_node(software_h265_encoder src/esw/software_h265_encoder/*.c*)
+target_link_libraries(software_h265_encoder PUBLIC opencv_core opencv_imgproc opencv_imgcodecs de265 streaming)
+target_include_directories(software_h265_encoder SYSTEM PUBLIC deps/nvenc)
+
if (ZED_FOUND)
mrover_add_nodelet(object_detector src/perception/object_detector/*.c* src/perception/object_detector src/perception/object_detector/pch.hpp)
mrover_nodelet_link_libraries(object_detector PRIVATE opencv_core opencv_dnn opencv_imgproc lie nvinfer nvonnxparser tbb)
diff --git a/config/simulator/simulator.yaml b/config/simulator/simulator.yaml
index acd6866b2..94fa962fc 100644
--- a/config/simulator/simulator.yaml
+++ b/config/simulator/simulator.yaml
@@ -9,7 +9,7 @@ objects:
- type: urdf
name: tag_0
uri: package://mrover/urdf/world/tag_0.urdf.xacro
- translation: [ -3, -3, 0.7 ]
+ translation: [ -2, -2, 0.7 ]
- type: urdf
name: tag_1
uri: package://mrover/urdf/world/tag_1.urdf.xacro
diff --git a/deps/emsdk b/deps/emsdk
new file mode 160000
index 000000000..2aa749071
--- /dev/null
+++ b/deps/emsdk
@@ -0,0 +1 @@
+Subproject commit 2aa74907151b2caa9da865fd0d36436fdce792f0
diff --git a/deps/manif b/deps/manif
new file mode 160000
index 000000000..c8a9fcbfe
--- /dev/null
+++ b/deps/manif
@@ -0,0 +1 @@
+Subproject commit c8a9fcbfe157b55a7a860577febdc5350043a722
diff --git a/launch/simulator.launch b/launch/simulator.launch
index fe9736725..b18f3249d 100644
--- a/launch/simulator.launch
+++ b/launch/simulator.launch
@@ -1,4 +1,5 @@
+
@@ -13,4 +14,9 @@
args="load mrover/SimulatorNodelet perception_nodelet_manager" output="screen" />
+
+
+
+
+
\ No newline at end of file
diff --git a/scripts/build_teleop_stream_client.sh b/scripts/build_teleop_stream_client.sh
new file mode 100755
index 000000000..8faa444fc
--- /dev/null
+++ b/scripts/build_teleop_stream_client.sh
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+source deps/emsdk/emsdk_env.sh
+emcmake cmake -B src/teleoperation/streaming/embuild -G Ninja -DCMAKE_BUILD_TYPE=Release src/teleoperation/streaming/
+cmake --build src/teleoperation/streaming/embuild
diff --git a/scripts/toggle_basestation_networking b/scripts/toggle_basestation_networking
index 628d90e9d..e4f1447b0 100755
--- a/scripts/toggle_basestation_networking
+++ b/scripts/toggle_basestation_networking
@@ -1,12 +1,20 @@
-# !/usr/bin/env bash
+# !/usr/bin/env zsh
# Script to add or remove ROS basestation networking script from bashrc
# TODO: make this work for bash and other catkin workspace locations
-if grep -q "source ~/catkin_ws/src/mrover/ansible/roles/basestation_networks/files/networking_setup_basestation.sh" ~/.zshrc; then
+CATKIN_WORKSPACE_PATH=~/catkin_ws
+network_script_path="${CATKIN_WORKSPACE_PATH}/src/mrover/ansible/roles/basestation_networks/files/networking_setup_basestation"
+enable_message="Basestation network setup enabled, disable with scripts/toggle_basestation_networking"
+
+if grep -q "source ${network_script_path}" ~/.zshrc; then
echo "Removing basestation networking script from .zshrc"
sed -i '/source ~\/catkin_ws\/src\/mrover\/ansible\/roles\/basestation_networks\/files\/networking_setup_basestation.sh/d' ~/.zshrc
else
echo "Adding basestation networking script to .zshrc"
- echo "source ~/catkin_ws/src/mrover/ansible/roles/basestation_networks/files/networking_setup_basestation.sh" >> ~/.zshrc
+ if [[ $(tail -c1 ~/.zshrc | wc -l) -eq 0 ]]; then
+ echo "\n" >> ~/.zshrc
+ fi
+ echo "echo '${enable_message}'" >> ~/.zshrc
+ echo "source ${network_script_path}" >> ~/.zshrc
fi
diff --git a/src/esw/cameras/cameras.cpp b/src/esw/cameras/cameras.cpp
deleted file mode 100644
index 0836ba162..000000000
--- a/src/esw/cameras/cameras.cpp
+++ /dev/null
@@ -1,99 +0,0 @@
-#include
-#include
-
-#include
-#include
-#include
-
-#include
-#include
-#include
-
-//#include
-
-int main() {
- StreamServer streamServer{"0.0.0.0", 8080};
-
- cv::Size size{1280, 720};
-
- NvEncoder nvEncoder{size};
-
- cv::VideoCapture cap{std::format("v4l2src ! videoconvert ! video/x-raw,width={},height={},format=I420,framerate=10/1 ! appsink", size.width, size.height), cv::CAP_GSTREAMER};
-
- // cv::VideoCapture cap{std::format("videotestsrc ! video/x-raw,width={},height={},format=I420,framerate=30/1 ! appsink", size.width, size.height), cv::CAP_GSTREAMER};
-
- if (!cap.isOpened()) throw std::runtime_error{"Failed to open capture"};
-
- size_t totalSent = 0;
- size_t totalFrames = 0;
- auto now = std::chrono::high_resolution_clock::now();
-
- cv::Mat frameI420;
- while (streamServer.m_client->is_open() && cap.read(frameI420)) {
- assert(frameI420.type() == CV_8UC1);
-
- cv::imshow("USB", frameI420);
- cv::waitKey(1);
-
- NvEncoder::BitstreamView view = nvEncoder.feed(frameI420);
- std::span span{static_cast(view.lockParams.bitstreamBufferPtr), view.lockParams.bitstreamSizeInBytes};
- streamServer.feed(span);
-
- totalSent += span.size_bytes();
- totalFrames++;
- double MB = totalSent / 1024.0 / 1024.0;
- auto elapsed = std::chrono::duration(std::chrono::high_resolution_clock::now() - now);
- double MBps = MB / elapsed.count();
- double fps = totalFrames / elapsed.count();
-
- std::cout << std::format("MBps: {} FPS: {}\n", MBps, fps);
- }
-
- // static de265_decoder_context* decoder = de265_new_decoder();
- // if (!decoder) return EXIT_FAILURE;
- // if (de265_error error = de265_start_worker_threads(decoder, 0); error != DE265_OK) throw std::runtime_error{"Errored starting worker threads"};
- //
- // int more = 1;
- // while (more) {
- // de265_error status = de265_decode(decoder, &more);
- //
- // if (status == DE265_ERROR_WAITING_FOR_INPUT_DATA) {
- // cv::Mat frame;
- // cap.read(frame);
- // assert(frame.type() == CV_8UC1);
- //
- // cv::imshow("F", frame);
- // cv::waitKey(0);
- //
- // NvEncoder::BitstreamView view = nvEncoder.feed(frame);
- // if (de265_error error = de265_push_data(decoder, view.lockParams.bitstreamBufferPtr, view.lockParams.bitstreamSizeInBytes, clock(), nullptr); error != DE265_OK) {
- // throw std::runtime_error{"Errored pushing encoder data"};
- // }
- // }
- //
- // const de265_image* image;
- // do {
- // image = de265_peek_next_picture(decoder);
- // if (image) {
- // int width = de265_get_image_width(image, 0);
- // int height = de265_get_image_height(image, 0);
- // int format = de265_get_chroma_format(image);
- // int bpp = de265_get_bits_per_pixel(image, 0);
- //
- // if (cv::Size{width, height} != size) throw std::runtime_error{"Unexpected image size"};
- // if (format != de265_chroma_420) throw std::runtime_error{"Unsupported chroma format"};
- // if (bpp != 8) throw std::runtime_error{"Unsupported bits per pixel"};
- //
- // int ystride;
- // auto* y = de265_get_image_plane(image, 0, &ystride);
- // cv::Mat Y{height, width, CV_8UC1, (void*) y, static_cast(ystride)};
- // cv::imshow("Y", Y);
- // cv::waitKey(1);
- //
- // de265_release_next_picture(decoder);
- // }
- // } while (image);
- // }
-
- return EXIT_SUCCESS;
-}
\ No newline at end of file
diff --git a/src/esw/hardware_h265_encoder/hardware_h265_encoder.cpp b/src/esw/hardware_h265_encoder/hardware_h265_encoder.cpp
new file mode 100644
index 000000000..590ab7efe
--- /dev/null
+++ b/src/esw/hardware_h265_encoder/hardware_h265_encoder.cpp
@@ -0,0 +1,84 @@
+#include "hardware_h265_encoder.hpp"
+
+#include
+#include
+#include
+#include
+#include
+
+#include
+
+#include
+#include
+#include
+
+std::optional streamServer;
+
+std::optional encoder;
+
+auto imageCallback(sensor_msgs::ImageConstPtr const& msg) -> void {
+ try {
+ if (msg->encoding != sensor_msgs::image_encodings::BGR8) throw std::runtime_error{"Unsupported encoding"};
+
+ cv::Size size{static_cast(msg->width), static_cast(msg->height)};
+
+ if (!encoder) encoder.emplace(size);
+
+ cv::Mat bgrFrame{size, CV_8UC3, const_cast(msg->data.data()), msg->step};
+ cv::Mat bgraFrame;
+ cv::cvtColor(bgrFrame, bgraFrame, cv::COLOR_BGR2BGRA);
+
+ bool feedSuccessful = false;
+ {
+ Encoder::BitstreamView view = encoder->feed(bgraFrame);
+ std::span span{static_cast(view.lockParams.bitstreamBufferPtr), view.lockParams.bitstreamSizeInBytes};
+ feedSuccessful = streamServer->feed(span);
+ }
+ if (!feedSuccessful) encoder.reset();
+
+ } catch (std::exception const& e) {
+ ROS_ERROR_STREAM(std::format("Exception encoding frame: {}", e.what()));
+ ros::requestShutdown();
+ }
+}
+
+// auto capture() -> void {
+// cv::VideoCapture cap{std::format("v4l2src ! videoconvert ! video/x-raw,width={},height={},format=I420,framerate=30/1 ! appsink", 640, 480), cv::CAP_GSTREAMER};
+// if (!cap.isOpened()) throw std::runtime_error{"Failed to open capture"};
+//
+// while (cap.isOpened()) {
+// cv::Mat i420Frame;
+// if (!cap.read(i420Frame)) throw std::runtime_error{"Failed to read frame"};
+//
+// cv::Mat bgraFrame;
+// cvtColor(i420Frame, bgraFrame, cv::COLOR_YUV2BGRA_I420);
+//
+// Encoder::BitstreamView view = encoder->feed(bgraFrame);
+// std::span span{static_cast(view.lockParams.bitstreamBufferPtr), view.lockParams.bitstreamSizeInBytes};
+// streamServer->feed(span);
+//
+// ros::spinOnce();
+// }
+// }
+
+auto main(int argc, char** argv) -> int {
+ try {
+ ros::init(argc, argv, "software_h265_encoder");
+ ros::NodeHandle nh, pnh{"~"};
+
+ std::string imageTopic = pnh.param("image_topic", std::string{"/camera/left/image"});
+
+ streamServer.emplace("0.0.0.0", 8080);
+
+ ros::Subscriber imageSubscriber = nh.subscribe(imageTopic, 1, imageCallback);
+
+ // capture();
+
+ ros::spin();
+ return EXIT_SUCCESS;
+
+ } catch (std::exception const& e) {
+ ROS_ERROR_STREAM(std::format("Exception initializing: {}", e.what()));
+ return EXIT_FAILURE;
+ }
+}
diff --git a/src/esw/streaming/encoding.cu b/src/esw/hardware_h265_encoder/hardware_h265_encoder.cu
similarity index 61%
rename from src/esw/streaming/encoding.cu
rename to src/esw/hardware_h265_encoder/hardware_h265_encoder.cu
index 09efffd82..660a92b10 100644
--- a/src/esw/streaming/encoding.cu
+++ b/src/esw/hardware_h265_encoder/hardware_h265_encoder.cu
@@ -1,4 +1,4 @@
-#include "encoding.hpp"
+#include "hardware_h265_encoder.hpp"
#include
#include
@@ -7,15 +7,11 @@
#include
#include
+#include
+
#include
#include
-void NvCheck(NVENCSTATUS status) {
- if (status != NV_ENC_SUCCESS) {
- throw std::runtime_error("NvEnc failed");
- }
-}
-
void cuCheck(CUresult status) {
if (status != CUDA_SUCCESS) {
throw std::runtime_error("CUDA failed");
@@ -28,10 +24,16 @@ void cudaCheck(cudaError_t status) {
}
}
+void NvCheck(NVENCSTATUS status) {
+ if (status != NV_ENC_SUCCESS) {
+ throw std::runtime_error("NvEnc failed");
+ }
+}
+
namespace std {
template<>
struct equal_to {
- bool operator()(GUID const& g1, GUID const& g2) const {
+ auto operator()(GUID const& g1, GUID const& g2) const -> bool {
return g1.Data1 == g2.Data1 && g1.Data2 == g2.Data2 && g1.Data3 == g2.Data3 &&
g1.Data4[0] == g2.Data4[0] && g1.Data4[1] == g2.Data4[1] && g1.Data4[2] == g2.Data4[2] &&
g1.Data4[3] == g2.Data4[3] && g1.Data4[4] == g2.Data4[4] && g1.Data4[5] == g2.Data4[5] &&
@@ -41,13 +43,13 @@ namespace std {
template<>
struct hash {
- std::size_t operator()(GUID const& g) const {
+ auto operator()(GUID const& g) const noexcept -> std::size_t {
std::size_t seed = 0;
seed ^= std::hash{}(g.Data1);
seed ^= std::hash{}(g.Data2);
seed ^= std::hash{}(g.Data3);
- for (std::size_t i = 0; i < 8; ++i) {
- seed ^= std::hash{}(g.Data4[i]);
+ for (unsigned char i: g.Data4) {
+ seed ^= std::hash{}(i);
}
return seed;
}
@@ -60,34 +62,39 @@ std::unordered_map GUID_TO_NAME{
{NV_ENC_CODEC_AV1_GUID, "AV1"},
};
-NvEncoder::NvEncoder(cv::Size const& size) : m_size{size} {
- cudaCheck(cudaSetDevice(0));
- CUcontext context;
- cuCheck(cuCtxGetCurrent(&context));
+NV_ENCODE_API_FUNCTION_LIST NVENV_API{.version = NV_ENCODE_API_FUNCTION_LIST_VER};
+CUcontext CUDA_CONTEXT = nullptr;
+
+Encoder::Encoder(cv::Size const& size) : m_size{size} {
+ if (!CUDA_CONTEXT) {
+ cudaCheck(cudaSetDevice(0));
+ cuCheck(cuCtxGetCurrent(&CUDA_CONTEXT));
+
+ NvCheck(NvEncodeAPICreateInstance(&NVENV_API));
+ }
- NvCheck(NvEncodeAPICreateInstance(&m_nvenc));
NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS params{
.version = NV_ENC_OPEN_ENCODE_SESSION_EX_PARAMS_VER,
.deviceType = NV_ENC_DEVICE_TYPE_CUDA,
- .device = context,
+ .device = CUDA_CONTEXT,
.apiVersion = NVENCAPI_VERSION,
};
- NvCheck(m_nvenc.nvEncOpenEncodeSessionEx(¶ms, &m_encoder));
+ NvCheck(NVENV_API.nvEncOpenEncodeSessionEx(¶ms, &m_encoder));
if (!m_encoder) {
throw std::runtime_error("No encoder");
}
std::uint32_t guidCount;
- NvCheck(m_nvenc.nvEncGetEncodeGUIDCount(m_encoder, &guidCount));
+ NvCheck(NVENV_API.nvEncGetEncodeGUIDCount(m_encoder, &guidCount));
if (guidCount == 0) {
throw std::runtime_error("No GUIDs");
}
std::vector guids(guidCount);
- NvCheck(m_nvenc.nvEncGetEncodeGUIDs(m_encoder, guids.data(), guidCount, &guidCount));
- std::cout << "Supported encoders:" << std::endl;
+ NvCheck(NVENV_API.nvEncGetEncodeGUIDs(m_encoder, guids.data(), guidCount, &guidCount));
+ ROS_INFO("Supported encoders:");
for (GUID const& guid: guids) {
- std::cout << "\t" << GUID_TO_NAME[guid] << std::endl;
+ ROS_INFO_STREAM("\t" << GUID_TO_NAME[guid]);
}
GUID desiredEncodeGuid = NV_ENC_CODEC_HEVC_GUID;
@@ -100,9 +107,9 @@ NvEncoder::NvEncoder(cv::Size const& size) : m_size{size} {
}
std::uint32_t presetCount;
- NvCheck(m_nvenc.nvEncGetEncodePresetCount(m_encoder, desiredEncodeGuid, &presetCount));
+ NvCheck(NVENV_API.nvEncGetEncodePresetCount(m_encoder, desiredEncodeGuid, &presetCount));
std::vector presetGuids(presetCount);
- NvCheck(m_nvenc.nvEncGetEncodePresetGUIDs(m_encoder, desiredEncodeGuid, presetGuids.data(), presetCount, &presetCount));
+ NvCheck(NVENV_API.nvEncGetEncodePresetGUIDs(m_encoder, desiredEncodeGuid, presetGuids.data(), presetCount, &presetCount));
if (std::none_of(presetGuids.begin(), presetGuids.end(), [&](const GUID& guid) {
return std::equal_to{}(guid, desiredPresetGuid);
})) {
@@ -116,7 +123,7 @@ NvEncoder::NvEncoder(cv::Size const& size) : m_size{size} {
.version = NV_ENC_CONFIG_VER,
},
};
- NvCheck(m_nvenc.nvEncGetEncodePresetConfigEx(m_encoder, desiredEncodeGuid, desiredPresetGuid, tuningInfo, &presetConfig));
+ NvCheck(NVENV_API.nvEncGetEncodePresetConfigEx(m_encoder, desiredEncodeGuid, desiredPresetGuid, tuningInfo, &presetConfig));
NV_ENC_INITIALIZE_PARAMS initEncParams{
.version = NV_ENC_INITIALIZE_PARAMS_VER,
@@ -130,46 +137,46 @@ NvEncoder::NvEncoder(cv::Size const& size) : m_size{size} {
.encodeConfig = &presetConfig.presetCfg,
.tuningInfo = tuningInfo,
};
- NvCheck(m_nvenc.nvEncInitializeEncoder(m_encoder, &initEncParams));
+ NvCheck(NVENV_API.nvEncInitializeEncoder(m_encoder, &initEncParams));
NV_ENC_CREATE_INPUT_BUFFER createInputBufferParams{
.version = NV_ENC_CREATE_INPUT_BUFFER_VER,
.width = static_cast(m_size.width),
.height = static_cast(m_size.height),
- .bufferFmt = NV_ENC_BUFFER_FORMAT_IYUV,
+ .bufferFmt = NV_ENC_BUFFER_FORMAT_ARGB,
};
- NvCheck(m_nvenc.nvEncCreateInputBuffer(m_encoder, &createInputBufferParams));
+ NvCheck(NVENV_API.nvEncCreateInputBuffer(m_encoder, &createInputBufferParams));
m_input = createInputBufferParams.inputBuffer;
NV_ENC_CREATE_BITSTREAM_BUFFER createBitstreamBufferParams{
.version = NV_ENC_CREATE_BITSTREAM_BUFFER_VER,
};
- NvCheck(m_nvenc.nvEncCreateBitstreamBuffer(m_encoder, &createBitstreamBufferParams));
+ NvCheck(NVENV_API.nvEncCreateBitstreamBuffer(m_encoder, &createBitstreamBufferParams));
m_output = createBitstreamBufferParams.bitstreamBuffer;
}
-NvEncoder::BitstreamView NvEncoder::feed(cv::InputArray frameI420) {
- if (!frameI420.isContinuous()) throw std::runtime_error("Frame is not continuous");
- if (frameI420.type() != CV_8UC1) throw std::runtime_error("Not single channel, note that YUV420 is expected");
- if (frameI420.size() != cv::Size{m_size.width, m_size.height + m_size.height / 2}) throw std::runtime_error("Wrong size, note that YUV420 is expected");
+auto Encoder::feed(cv::InputArray frameBgra) -> BitstreamView {
+ if (!frameBgra.isContinuous()) throw std::runtime_error("Frame is not continuous");
+ // if (frameI420.type() != CV_8UC1) throw std::runtime_error("Not single channel, note that YUV420 is expected");
+ // if (frameI420.size() != cv::Size{m_size.width, m_size.height + m_size.height / 2}) throw std::runtime_error("Wrong size, note that YUV420 is expected");
NV_ENC_LOCK_INPUT_BUFFER lockInputBufferParams{
.version = NV_ENC_LOCK_INPUT_BUFFER_VER,
.inputBuffer = m_input,
};
- NvCheck(m_nvenc.nvEncLockInputBuffer(m_encoder, &lockInputBufferParams));
- for (int r = 0; r < frameI420.rows(); ++r) {
- std::byte* row = static_cast(lockInputBufferParams.bufferDataPtr) + r * lockInputBufferParams.pitch;
- std::memcpy(row, frameI420.getMat().ptr(r), frameI420.cols());
+ NvCheck(NVENV_API.nvEncLockInputBuffer(m_encoder, &lockInputBufferParams));
+ for (int r = 0; r < frameBgra.rows(); ++r) {
+ auto* row = static_cast(lockInputBufferParams.bufferDataPtr) + r * lockInputBufferParams.pitch;
+ std::memcpy(row, frameBgra.getMat().ptr(r), frameBgra.cols() * 4);
}
- NvCheck(m_nvenc.nvEncUnlockInputBuffer(m_encoder, m_input));
+ NvCheck(NVENV_API.nvEncUnlockInputBuffer(m_encoder, m_input));
NV_ENC_PIC_PARAMS picParams{
.version = NV_ENC_PIC_PARAMS_VER,
.inputWidth = static_cast(m_size.width),
.inputHeight = static_cast(m_size.height),
.frameIdx = m_frame_index++,
- .inputTimeStamp = static_cast(m_clock.now().time_since_epoch().count()),
+ .inputTimeStamp = static_cast(std::chrono::high_resolution_clock::now().time_since_epoch().count()),
.inputBuffer = m_input,
.outputBitstream = m_output,
.completionEvent = nullptr,
@@ -177,27 +184,27 @@ NvEncoder::BitstreamView NvEncoder::feed(cv::InputArray frameI420) {
.pictureStruct = NV_ENC_PIC_STRUCT_FRAME,
};
- NvCheck(m_nvenc.nvEncEncodePicture(m_encoder, &picParams));
+ NvCheck(NVENV_API.nvEncEncodePicture(m_encoder, &picParams));
- return {&m_nvenc, m_encoder, m_output};
+ return {&NVENV_API, m_encoder, m_output};
}
-NvEncoder::BitstreamView::BitstreamView(NV_ENCODE_API_FUNCTION_LIST* nvenc, void* encoder, NV_ENC_OUTPUT_PTR output)
+Encoder::BitstreamView::BitstreamView(NV_ENCODE_API_FUNCTION_LIST* nvenc, void* encoder, NV_ENC_OUTPUT_PTR output)
: nvenc{nvenc}, encoder{encoder}, output{output}, lockParams{.version = NV_ENC_LOCK_BITSTREAM_VER, .outputBitstream = output} {
NvCheck(nvenc->nvEncLockBitstream(encoder, &lockParams));
}
-NvEncoder::BitstreamView::~BitstreamView() {
+Encoder::BitstreamView::~BitstreamView() {
if (nvenc && encoder && output) {
NvCheck(nvenc->nvEncUnlockBitstream(encoder, output));
}
}
-NvEncoder::BitstreamView::BitstreamView(NvEncoder::BitstreamView&& other) noexcept {
+Encoder::BitstreamView::BitstreamView(BitstreamView&& other) noexcept {
*this = std::move(other);
}
-NvEncoder::BitstreamView& NvEncoder::BitstreamView::operator=(NvEncoder::BitstreamView&& other) noexcept {
+auto Encoder::BitstreamView::operator=(BitstreamView&& other) noexcept -> BitstreamView& {
std::swap(nvenc, other.nvenc);
std::swap(encoder, other.encoder);
std::swap(output, other.output);
@@ -205,8 +212,8 @@ NvEncoder::BitstreamView& NvEncoder::BitstreamView::operator=(NvEncoder::Bitstre
return *this;
}
-NvEncoder::~NvEncoder() {
- NvCheck(m_nvenc.nvEncDestroyInputBuffer(m_encoder, m_input));
- NvCheck(m_nvenc.nvEncDestroyBitstreamBuffer(m_encoder, m_output));
- NvCheck(m_nvenc.nvEncDestroyEncoder(m_encoder));
+Encoder::~Encoder() {
+ NvCheck(NVENV_API.nvEncDestroyInputBuffer(m_encoder, m_input));
+ NvCheck(NVENV_API.nvEncDestroyBitstreamBuffer(m_encoder, m_output));
+ NvCheck(NVENV_API.nvEncDestroyEncoder(m_encoder));
}
diff --git a/src/esw/streaming/encoding.hpp b/src/esw/hardware_h265_encoder/hardware_h265_encoder.hpp
similarity index 64%
rename from src/esw/streaming/encoding.hpp
rename to src/esw/hardware_h265_encoder/hardware_h265_encoder.hpp
index 241a013f8..394ec7a14 100644
--- a/src/esw/streaming/encoding.hpp
+++ b/src/esw/hardware_h265_encoder/hardware_h265_encoder.hpp
@@ -1,13 +1,12 @@
#pragma once
#include
-#include
#include
#include
-struct NvEncoder {
-
+class Encoder {
+public:
struct BitstreamView {
NV_ENCODE_API_FUNCTION_LIST* nvenc = nullptr;
void* encoder = nullptr;
@@ -19,24 +18,24 @@ struct NvEncoder {
~BitstreamView();
BitstreamView(BitstreamView const&) = delete;
- BitstreamView& operator=(BitstreamView const&) = delete;
+ auto operator=(BitstreamView const&) -> BitstreamView& = delete;
BitstreamView(BitstreamView&& other) noexcept;
- BitstreamView& operator=(BitstreamView&& other) noexcept;
+ auto operator=(BitstreamView&& other) noexcept -> BitstreamView&;
};
+private:
cv::Size m_size;
- NV_ENCODE_API_FUNCTION_LIST m_nvenc{.version = NV_ENCODE_API_FUNCTION_LIST_VER};
void* m_encoder = nullptr;
NV_ENC_INPUT_PTR m_input = nullptr;
NV_ENC_OUTPUT_PTR m_output = nullptr;
- uint32_t m_frame_index = 0;
+ std::uint32_t m_frame_index = 0;
std::chrono::high_resolution_clock m_clock;
public:
- NvEncoder(cv::Size const& size);
+ explicit Encoder(cv::Size const& size);
- [[nodiscard]] BitstreamView feed(cv::InputArray frameI420);
+ [[nodiscard]] auto feed(cv::InputArray frameBgra) -> BitstreamView;
- ~NvEncoder();
-};
\ No newline at end of file
+ ~Encoder();
+};
diff --git a/src/esw/software_h265_encoder/software_h265_encoder.cpp b/src/esw/software_h265_encoder/software_h265_encoder.cpp
new file mode 100644
index 000000000..53cd716ce
--- /dev/null
+++ b/src/esw/software_h265_encoder/software_h265_encoder.cpp
@@ -0,0 +1,88 @@
+#include "software_h265_encoder.hpp"
+
+#include
+#include
+
+#include
+
+#include
+#include
+
+#include
+#include
+
+#include
+#include
+#include
+
+std::optional streamServer;
+
+en265_encoder_context* encoder = nullptr;
+
+auto imageCallback(sensor_msgs::ImageConstPtr const& msg) -> void {
+ try {
+ cv::Mat bgraImage{static_cast(msg->height), static_cast(msg->width), CV_8UC4, const_cast(msg->data.data()), msg->step};
+ cv::Mat yuv420Image;
+ cvtColor(bgraImage, yuv420Image, cv::COLOR_BGRA2YUV_I420);
+
+ de265_image* frame = en265_allocate_image(encoder, static_cast(msg->width), static_cast(msg->height), de265_chroma_420, 0, nullptr);
+ if (frame == nullptr) {
+ throw std::runtime_error{"Failed to allocate image"};
+ }
+ std::uint8_t* y = frame->get_image_plane(0);
+ std::uint8_t* u = frame->get_image_plane(1);
+ std::uint8_t* v = frame->get_image_plane(2);
+ // int yStride = frame->get_image_stride(0);
+ // int uStride = frame->get_image_stride(1);
+ // int vStride = frame->get_image_stride(2);
+ std::size_t area = bgraImage.total();
+ std::memcpy(y, yuv420Image.data, area);
+ std::memcpy(u, yuv420Image.data + area, area / 4);
+ std::memcpy(v, yuv420Image.data + area * 5 / 4, area / 4);
+
+ if (en265_push_image(encoder, frame) != DE265_OK) {
+ throw std::runtime_error{"Failed to push image"};
+ }
+ if (en265_encode(encoder) != DE265_OK) {
+ throw std::runtime_error{"Failed to encode"};
+ }
+
+ en265_packet* packet;
+ while ((packet = en265_get_packet(encoder, 0)) != nullptr) {
+ std::span span{std::bit_cast(packet->data), static_cast(packet->length)};
+ ROS_INFO_STREAM(span.size());
+ streamServer->feed(span);
+ en265_free_packet(encoder, packet);
+ }
+
+ } catch (std::exception const& e) {
+ ROS_ERROR_STREAM(std::format("Unhandled exception: {}", e.what()));
+ ros::requestShutdown();
+ }
+}
+
+auto main(int argc, char** argv) -> int {
+ try {
+ ros::init(argc, argv, "software_h265_encoder");
+ ros::NodeHandle nh;
+
+ streamServer.emplace("0.0.0.0", 8080);
+
+ encoder = en265_new_encoder();
+ if (encoder == nullptr) {
+ throw std::runtime_error{"Failed to create encoder context"};
+ }
+ if (en265_start_encoder(encoder, 0) != DE265_OK) {
+ throw std::runtime_error{"Failed to start encoder"};
+ }
+
+ ros::Subscriber imageSubscriber = nh.subscribe("/camera/left/image", 1, imageCallback);
+
+ ros::spin();
+ return EXIT_SUCCESS;
+
+ } catch (std::exception const& e) {
+ ROS_ERROR_STREAM(std::format("Unhandled exception: {}", e.what()));
+ return EXIT_FAILURE;
+ }
+}
\ No newline at end of file
diff --git a/src/esw/software_h265_encoder/software_h265_encoder.hpp b/src/esw/software_h265_encoder/software_h265_encoder.hpp
new file mode 100644
index 000000000..b71113648
--- /dev/null
+++ b/src/esw/software_h265_encoder/software_h265_encoder.hpp
@@ -0,0 +1,6 @@
+#pragma once
+
+#include
+
+#include
+
diff --git a/src/esw/streaming/streaming.cpp b/src/esw/streaming/streaming.cpp
index c3bc815b7..e5af3cd07 100644
--- a/src/esw/streaming/streaming.cpp
+++ b/src/esw/streaming/streaming.cpp
@@ -3,18 +3,19 @@
#include
#include
-#include
-#include
#include
+#include
+
+using namespace std::chrono_literals;
+
StreamServer::StreamServer(std::string_view host, std::uint16_t port)
: m_acceptor{m_context} {
- tcp::socket socket{m_context};
beast::error_code ec;
tcp::endpoint endpoint{net::ip::make_address(host), port};
- std::cout << "Starting H.265 streaming server @" << endpoint << std::endl;
+ ROS_INFO_STREAM("Starting H.265 streaming server @" << endpoint);
m_acceptor.open(endpoint.protocol(), ec);
if (ec) throw std::runtime_error{std::format("Failed to open acceptor: {}", ec.message())};
@@ -27,20 +28,32 @@ StreamServer::StreamServer(std::string_view host, std::uint16_t port)
m_acceptor.listen(net::socket_base::max_listen_connections, ec);
if (ec) throw std::runtime_error{std::format("Failed to listen: {}", ec.message())};
-
- m_client.emplace(m_acceptor.accept());
-
- m_client->binary(true);
- m_client->set_option(websocket::stream_base::timeout::suggested(beast::role_type::server));
- m_client->set_option(websocket::stream_base::decorator([](websocket::response_type& res) {
- res.set(http::field::server, std::format("{} {}", BOOST_BEAST_VERSION_STRING, "mrover-stream-server"));
- }));
- m_client->accept();
-
- std::cout << "Client connected" << std::endl;
}
-void StreamServer::feed(std::span data) {
+auto StreamServer::feed(std::span data) -> bool {
+ if (!m_client) {
+ m_client.emplace(m_acceptor.accept());
+ m_client->binary(true);
+ m_client->set_option(websocket::stream_base::timeout{
+ .handshake_timeout = 3s,
+ .idle_timeout = 3s,
+ .keep_alive_pings = true,
+ });
+ m_client->set_option(websocket::stream_base::decorator([](websocket::response_type& res) {
+ res.set(http::field::server, std::format("{} {}", BOOST_BEAST_VERSION_STRING, "mrover-stream-server"));
+ }));
+ ROS_INFO("Waiting for client..");
+ m_client->accept();
+ ROS_INFO("Client connected");
+ }
+
net::mutable_buffer buffer{data.data(), data.size()};
- m_client->write(buffer);
+ try {
+ m_client->write(buffer);
+ return true;
+ } catch (std::exception const& e) {
+ ROS_WARN_STREAM(std::format("Exception writing to client: {}", e.what()));
+ m_client.reset();
+ return false;
+ }
}
diff --git a/src/esw/streaming/streaming.hpp b/src/esw/streaming/streaming.hpp
index 0510655f6..e2437e6ae 100644
--- a/src/esw/streaming/streaming.hpp
+++ b/src/esw/streaming/streaming.hpp
@@ -16,7 +16,7 @@ namespace websocket = beast::websocket;
using tcp = net::ip::tcp;
-struct StreamServer {
+class StreamServer {
net::io_context m_context;
tcp::acceptor m_acceptor;
@@ -25,5 +25,5 @@ struct StreamServer {
public:
StreamServer(std::string_view host, std::uint16_t port);
- void feed(std::span data);
+ auto feed(std::span data) -> bool;
};
diff --git a/src/perception/long_range_cam/long_range_cam.cpp b/src/perception/long_range_cam/long_range_cam.cpp
index 83b24c97d..f9b5f6db9 100644
--- a/src/perception/long_range_cam/long_range_cam.cpp
+++ b/src/perception/long_range_cam/long_range_cam.cpp
@@ -28,33 +28,42 @@ namespace mrover {
}
auto LongRangeCamNodelet::grabUpdate() -> void {
- // See: http://wiki.ros.org/roscpp/Overview/NodeHandles for public vs. private node handle
- // MT means multithreaded
- mNh = getMTNodeHandle();
- mPnh = getMTPrivateNodeHandle();
- mCamInfoPub = mNh.advertise("long_range_cam/camera_info", 1);
- mImgPub = mNh.advertise("long_range_image", 1);
- auto width = mPnh.param("width", 1920);
- auto height = mPnh.param("height", 1080);
- auto framerate = mPnh.param("framerate", 5);
- auto device = mPnh.param("device", "/dev/arducam");
- cv::VideoCapture capture{std::format("v4l2src device={} ! videoconvert ! video/x-raw,width={},height={},format=I420,framerate={}/1 ! appsink", device, width, height, framerate), cv::CAP_GSTREAMER};
- if (!capture.isOpened()) throw std::runtime_error{"Long range cam failed to open"};
+ try {
+ NODELET_INFO("Starting USB grab thread...");
- cv::Mat frame;
- while (ros::ok()) {
- capture.read(frame);
- if (frame.empty()) break;
+ // See: http://wiki.ros.org/roscpp/Overview/NodeHandles for public vs. private node handle
+ // MT means multithreaded
+ mNh = getMTNodeHandle();
+ mPnh = getMTPrivateNodeHandle();
+ mCamInfoPub = mNh.advertise("long_range_cam/camera_info", 1);
+ mImgPub = mNh.advertise("long_range_image", 1);
+ auto width = mPnh.param("width", 1920);
+ auto height = mPnh.param("height", 1080);
+ auto framerate = mPnh.param("framerate", 5);
+ auto device = mPnh.param("device", "/dev/arducam");
+ cv::VideoCapture capture{std::format("v4l2src device={} ! videoconvert ! video/x-raw,width={},height={},format=I420,framerate={}/1 ! appsink", device, width, height, framerate), cv::CAP_GSTREAMER};
+ if (!capture.isOpened()) throw std::runtime_error{"Long range cam failed to open"};
- if (mImgPub.getNumSubscribers()) {
- auto imageMessage = boost::make_shared();
- cv::Mat bgr;
- cv::cvtColor(frame, bgr, cv::COLOR_YUV2BGR_I420);
- fillImageMessage(bgr, imageMessage);
- imageMessage->header.frame_id = "long_range_cam_frame";
- imageMessage->header.stamp = ros::Time::now();
- mImgPub.publish(imageMessage);
+ NODELET_INFO_STREAM(std::format("USB camera opened: {}x{} @ {} fps", width, height, framerate));
+
+ cv::Mat frame;
+ while (ros::ok()) {
+ capture.read(frame);
+ if (frame.empty()) break;
+
+ if (mImgPub.getNumSubscribers()) {
+ auto imageMessage = boost::make_shared();
+ cv::Mat bgr;
+ cv::cvtColor(frame, bgr, cv::COLOR_YUV2BGR_I420);
+ fillImageMessage(bgr, imageMessage);
+ imageMessage->header.frame_id = "long_range_cam_frame";
+ imageMessage->header.stamp = ros::Time::now();
+ mImgPub.publish(imageMessage);
+ }
}
+ } catch (std::exception const& e) {
+ NODELET_FATAL_STREAM(std::format("USB camera exception: {}", e.what()));
+ ros::requestShutdown();
}
}
diff --git a/src/perception/long_range_cam/pch.hpp b/src/perception/long_range_cam/pch.hpp
index f0c16770d..4b5399d54 100644
--- a/src/perception/long_range_cam/pch.hpp
+++ b/src/perception/long_range_cam/pch.hpp
@@ -29,4 +29,4 @@
#include
#include
-#include
+#include
diff --git a/src/perception/object_detector/object_detector.hpp b/src/perception/object_detector/object_detector.hpp
index 05a8acc51..1940156d8 100644
--- a/src/perception/object_detector/object_detector.hpp
+++ b/src/perception/object_detector/object_detector.hpp
@@ -53,10 +53,10 @@ namespace mrover {
auto onInit() -> void override;
auto getObjectInCamFromPixel(sensor_msgs::PointCloud2ConstPtr const& cloudPtr,
- size_t u, size_t v, size_t width, size_t height) -> std::optional;
+ size_t u, size_t v, size_t width, size_t height) -> std::optional;
auto spiralSearchInImg(sensor_msgs::PointCloud2ConstPtr const& cloudPtr,
- size_t xCenter, size_t yCenter, size_t width, size_t height) -> std::optional;
+ size_t xCenter, size_t yCenter, size_t width, size_t height) -> std::optional;
static auto convertPointCloudToRGBA(sensor_msgs::PointCloud2ConstPtr const& msg, cv::Mat& img) -> void;
diff --git a/src/perception/object_detector/object_detector.processing.cpp b/src/perception/object_detector/object_detector.processing.cpp
index 141fa8d99..815fc6388 100644
--- a/src/perception/object_detector/object_detector.processing.cpp
+++ b/src/perception/object_detector/object_detector.processing.cpp
@@ -1,4 +1,5 @@
#include "object_detector.hpp"
+#include "lie.hpp"
namespace mrover {
@@ -195,7 +196,7 @@ namespace mrover {
}
} // namespace mrover
- auto ObjectDetectorNodelet::getObjectInCamFromPixel(sensor_msgs::PointCloud2ConstPtr const& cloudPtr, size_t u, size_t v, size_t width, size_t height) -> std::optional {
+ auto ObjectDetectorNodelet::getObjectInCamFromPixel(sensor_msgs::PointCloud2ConstPtr const& cloudPtr, size_t u, size_t v, size_t width, size_t height) -> std::optional {
assert(cloudPtr);
if (u >= cloudPtr->width || v >= cloudPtr->height) {
NODELET_WARN("Tag center out of bounds: [%zu %zu]", u, v);
@@ -206,7 +207,7 @@ namespace mrover {
return spiralSearchInImg(cloudPtr, u, v, width, height);
}
- auto ObjectDetectorNodelet::spiralSearchInImg(sensor_msgs::PointCloud2ConstPtr const& cloudPtr, size_t xCenter, size_t yCenter, size_t width, size_t height) -> std::optional {
+ auto ObjectDetectorNodelet::spiralSearchInImg(sensor_msgs::PointCloud2ConstPtr const& cloudPtr, size_t xCenter, size_t yCenter, size_t width, size_t height) -> std::optional {
size_t currX = xCenter;
size_t currY = yCenter;
size_t radius = 0;
@@ -243,7 +244,7 @@ namespace mrover {
}
}
- return std::make_optional(R3{point.x, point.y, point.z}, SO3{});
+ return std::make_optional(R3{point.x, point.y, point.z}, SO3d::Identity());
}
auto ObjectDetectorNodelet::convertPointCloudToRGBA(sensor_msgs::PointCloud2ConstPtr const& msg, cv::Mat& img) -> void {
@@ -270,12 +271,12 @@ namespace mrover {
seenObjects.at(detection.classId) = true;
//Get the object's position in 3D from the point cloud and run this statement if the optional has a value
- if (std::optional objectLocation = getObjectInCamFromPixel(msg, centerWidth, centerHeight, box.width, box.height); objectLocation) {
+ if (std::optional objectLocation = getObjectInCamFromPixel(msg, centerWidth, centerHeight, box.width, box.height)) {
try {
- std::string immediateFrameId = "immediateDetectedObject" + classes.at(detection.classId);
+ std::string immediateFrameId = std::format("immediateDetectedObject{}", classes.at(detection.classId));
//Push the immediate detections to the zed frame
- SE3::pushToTfTree(mTfBroadcaster, immediateFrameId, mCameraFrameId, objectLocation.value());
+ SE3Conversions::pushToTfTree(mTfBroadcaster, immediateFrameId, mCameraFrameId, objectLocation.value());
//Since the object is seen we need to increment the hit counter
@@ -284,12 +285,11 @@ namespace mrover {
//Only publish to permament if we are confident in the object
if (mObjectHitCounts.at(detection.classId) > mObjHitThreshold) {
- std::string permanentFrameId = "detectedObject" + classes.at(detection.classId);
-
+ std::string permanentFrameId = std::format("detectedObject{}", classes.at(detection.classId));
//Grab the object inside of the camera frame and push it into the map frame
- SE3 objectInsideCamera = SE3::fromTfTree(mTfBuffer, mMapFrameId, immediateFrameId);
- SE3::pushToTfTree(mTfBroadcaster, permanentFrameId, mCameraFrameId, objectInsideCamera);
+ SE3d objectInsideCamera = SE3Conversions::fromTfTree(mTfBuffer, mMapFrameId, immediateFrameId);
+ SE3Conversions::pushToTfTree(mTfBroadcaster, permanentFrameId, mCameraFrameId, objectInsideCamera);
}
} catch (tf2::ExtrapolationException const&) {
diff --git a/src/perception/object_detector/pch.hpp b/src/perception/object_detector/pch.hpp
index 18c32f3a5..b61565645 100644
--- a/src/perception/object_detector/pch.hpp
+++ b/src/perception/object_detector/pch.hpp
@@ -41,7 +41,7 @@
#include
#include
-#include
+#include
#include
#include
diff --git a/src/perception/tag_detector/long_range_cam/pch.hpp b/src/perception/tag_detector/long_range_cam/pch.hpp
index afaebe976..8a8401168 100644
--- a/src/perception/tag_detector/long_range_cam/pch.hpp
+++ b/src/perception/tag_detector/long_range_cam/pch.hpp
@@ -29,7 +29,7 @@
#include
#include
-#include
+#include
#include
#include
diff --git a/src/perception/tag_detector/zed/pch.hpp b/src/perception/tag_detector/zed/pch.hpp
index 8b8ed1c34..dfa943021 100644
--- a/src/perception/tag_detector/zed/pch.hpp
+++ b/src/perception/tag_detector/zed/pch.hpp
@@ -30,5 +30,6 @@
#include
#include
+#include
#include
-#include
+#include
diff --git a/src/perception/tag_detector/zed/tag_detector.cpp b/src/perception/tag_detector/zed/tag_detector.cpp
index 52f254d10..f5aaf7a42 100644
--- a/src/perception/tag_detector/zed/tag_detector.cpp
+++ b/src/perception/tag_detector/zed/tag_detector.cpp
@@ -140,5 +140,5 @@ namespace mrover {
res.success = true;
return true;
}
-
+
} // namespace mrover
diff --git a/src/perception/tag_detector/zed/tag_detector.hpp b/src/perception/tag_detector/zed/tag_detector.hpp
index 5d758a7c8..395958424 100644
--- a/src/perception/tag_detector/zed/tag_detector.hpp
+++ b/src/perception/tag_detector/zed/tag_detector.hpp
@@ -8,7 +8,7 @@ namespace mrover {
int id = -1;
int hitCount = 0;
cv::Point2f imageCenter{};
- std::optional tagInCam;
+ std::optional tagInCam;
};
class TagDetectorNodelet : public nodelet::Nodelet {
@@ -54,7 +54,7 @@ namespace mrover {
auto publishThresholdedImage() -> void;
- auto getTagInCamFromPixel(sensor_msgs::PointCloud2ConstPtr const& cloudPtr, size_t u, size_t v) const -> std::optional;
+ auto getTagInCamFromPixel(sensor_msgs::PointCloud2ConstPtr const& cloudPtr, size_t u, size_t v) -> std::optional;
public:
TagDetectorNodelet() = default;
diff --git a/src/perception/tag_detector/zed/tag_detector.processing.cpp b/src/perception/tag_detector/zed/tag_detector.processing.cpp
index 901841cca..556ba3df6 100644
--- a/src/perception/tag_detector/zed/tag_detector.processing.cpp
+++ b/src/perception/tag_detector/zed/tag_detector.processing.cpp
@@ -10,7 +10,7 @@ namespace mrover {
* @param u X Pixel Position
* @param v Y Pixel Position
*/
- auto TagDetectorNodelet::getTagInCamFromPixel(sensor_msgs::PointCloud2ConstPtr const& cloudPtr, size_t u, size_t v) const -> std::optional {
+ auto TagDetectorNodelet::getTagInCamFromPixel(sensor_msgs::PointCloud2ConstPtr const& cloudPtr, size_t u, size_t v) -> std::optional {
assert(cloudPtr);
if (u >= cloudPtr->width || v >= cloudPtr->height) {
@@ -25,7 +25,7 @@ namespace mrover {
return std::nullopt;
}
- return std::make_optional(R3{point.x, point.y, point.z}, SO3{});
+ return std::make_optional(R3{point.x, point.y, point.z}, SO3d::Identity());
}
/**
@@ -83,7 +83,7 @@ namespace mrover {
if (tag.tagInCam) {
// Publish tag to immediate
std::string immediateFrameId = std::format("immediateFiducial{}", tag.id);
- SE3::pushToTfTree(mTfBroadcaster, immediateFrameId, mCameraFrameId, tag.tagInCam.value());
+ SE3Conversions::pushToTfTree(mTfBroadcaster, immediateFrameId, mCameraFrameId, tag.tagInCam.value());
}
}
@@ -109,8 +109,8 @@ namespace mrover {
std::string immediateFrameId = std::format("immediateFiducial{}", tag.id);
// Publish tag to odom
std::string const& parentFrameId = mUseOdom ? mOdomFrameId : mMapFrameId;
- SE3 tagInParent = SE3::fromTfTree(mTfBuffer, parentFrameId, immediateFrameId);
- SE3::pushToTfTree(mTfBroadcaster, std::format("fiducial{}", id), parentFrameId, tagInParent);
+ SE3d tagInParent = SE3Conversions::fromTfTree(mTfBuffer, parentFrameId, immediateFrameId);
+ SE3Conversions::pushToTfTree(mTfBroadcaster, std::format("fiducial{}", tag.id), parentFrameId, tagInParent);
} catch (tf2::ExtrapolationException const&) {
NODELET_WARN("Old data for immediate tag");
} catch (tf2::LookupException const&) {
diff --git a/src/perception/zed_wrapper/pch.hpp b/src/perception/zed_wrapper/pch.hpp
index 8088686af..4df657b64 100644
--- a/src/perception/zed_wrapper/pch.hpp
+++ b/src/perception/zed_wrapper/pch.hpp
@@ -24,6 +24,5 @@
#include
#include
-#include
-#include
#include
+#include
diff --git a/src/perception/zed_wrapper/zed_wrapper.bridge.cu b/src/perception/zed_wrapper/zed_wrapper.bridge.cu
index 5be3946bd..cacb63f77 100644
--- a/src/perception/zed_wrapper/zed_wrapper.bridge.cu
+++ b/src/perception/zed_wrapper/zed_wrapper.bridge.cu
@@ -1,5 +1,4 @@
// Be careful what you include in this file, it is compiled with nvcc (NVIDIA CUDA compiler)
-// For example OpenCV and lie includes cause problems
#include "zed_wrapper.hpp"
diff --git a/src/perception/zed_wrapper/zed_wrapper.cpp b/src/perception/zed_wrapper/zed_wrapper.cpp
index e28019a99..d25080945 100644
--- a/src/perception/zed_wrapper/zed_wrapper.cpp
+++ b/src/perception/zed_wrapper/zed_wrapper.cpp
@@ -1,5 +1,8 @@
#include "zed_wrapper.hpp"
+#include
+#include
+
namespace mrover {
/**
@@ -245,11 +248,11 @@ namespace mrover {
sl::Translation const& translation = pose.getTranslation();
sl::Orientation const& orientation = pose.getOrientation();
try {
- SE3 leftCameraInOdom{{translation.x, translation.y, translation.z},
- Eigen::Quaterniond{orientation.w, orientation.x, orientation.y, orientation.z}.normalized()};
- SE3 leftCameraInBaseLink = SE3::fromTfTree(mTfBuffer, "base_link", "zed_left_camera_frame");
- SE3 baseLinkInOdom = leftCameraInBaseLink * leftCameraInOdom;
- SE3::pushToTfTree(mTfBroadcaster, "base_link", "odom", baseLinkInOdom);
+ SE3d leftCameraInOdom{{translation.x, translation.y, translation.z},
+ Eigen::Quaterniond{orientation.w, orientation.x, orientation.y, orientation.z}.normalized()};
+ SE3d baseLinkToLeftCamera = SE3Conversions::fromTfTree(mTfBuffer, "base_link", "zed_left_camera_frame");
+ SE3d baseLinkInOdom = leftCameraInOdom * baseLinkToLeftCamera;
+ SE3Conversions::pushToTfTree(mTfBroadcaster, "base_link", "odom", baseLinkInOdom);
} catch (tf2::TransformException& e) {
NODELET_WARN_STREAM("Failed to get transform: " << e.what());
}
diff --git a/src/preload/execution b/src/preload/execution
index 8d49b61ff..c1e64bf8e 100644
--- a/src/preload/execution
+++ b/src/preload/execution
@@ -4,6 +4,8 @@
#ifdef __APPLE__
+#include
+
namespace std {
enum class execution {
diff --git a/src/simulator/pch.hpp b/src/simulator/pch.hpp
index 9249a5c6b..5e9b139e7 100644
--- a/src/simulator/pch.hpp
+++ b/src/simulator/pch.hpp
@@ -15,6 +15,7 @@
#include
#include
#include
+#include
#include
#include
@@ -68,9 +69,10 @@
#include
#include
+#include
#include
#include
-#include
+#include
#include
#include
diff --git a/src/simulator/simulator.controls.cpp b/src/simulator/simulator.controls.cpp
index 6d50b1f91..24ac9e169 100644
--- a/src/simulator/simulator.controls.cpp
+++ b/src/simulator/simulator.controls.cpp
@@ -82,40 +82,37 @@ namespace mrover {
}
auto SimulatorNodelet::freeLook(Clock::duration dt) -> void {
- float flySpeed = mFlySpeed * std::chrono::duration_cast>(dt).count();
- if (glfwGetKey(mWindow.get(), mCamRightKey) == GLFW_PRESS) {
- mCameraInWorld = SE3{R3{0, -flySpeed, 0}, SO3{}} * mCameraInWorld;
- }
- if (glfwGetKey(mWindow.get(), mCamLeftKey) == GLFW_PRESS) {
- mCameraInWorld = SE3{R3{0, flySpeed, 0}, SO3{}} * mCameraInWorld;
- }
- if (glfwGetKey(mWindow.get(), mCamForwardKey) == GLFW_PRESS) {
- mCameraInWorld = SE3{R3{flySpeed, 0, 0}, SO3{}} * mCameraInWorld;
- }
- if (glfwGetKey(mWindow.get(), mCamBackwardKey) == GLFW_PRESS) {
- mCameraInWorld = SE3{R3{-flySpeed, 0, 0}, SO3{}} * mCameraInWorld;
- }
- if (glfwGetKey(mWindow.get(), mCamUpKey) == GLFW_PRESS) {
- mCameraInWorld = mCameraInWorld * SE3{R3{0, 0, flySpeed}, SO3{}};
- }
- if (glfwGetKey(mWindow.get(), mCamDownKey) == GLFW_PRESS) {
- mCameraInWorld = mCameraInWorld * SE3{R3{0, 0, -flySpeed}, SO3{}};
- }
+ auto axis = [this](int positive, int negative) -> double {
+ return (glfwGetKey(mWindow.get(), positive) == GLFW_PRESS) - (glfwGetKey(mWindow.get(), negative) == GLFW_PRESS);
+ };
- Eigen::Vector2i size;
- glfwGetWindowSize(mWindow.get(), &size.x(), &size.y());
-
- Eigen::Vector2d center = (size / 2).cast();
+ float flySpeed = mFlySpeed * std::chrono::duration_cast>(dt).count();
+ R3 keyDelta = R3{axis(mCamForwardKey, mCamBackwardKey), axis(mCamLeftKey, mCamRightKey), axis(mCamUpKey, mCamDownKey)} * flySpeed;
+ Eigen::Vector2i windowSize;
+ glfwGetWindowSize(mWindow.get(), &windowSize.x(), &windowSize.y());
+ Eigen::Vector2d windowCenter = (windowSize / 2).cast();
Eigen::Vector2d mouse;
glfwGetCursorPos(mWindow.get(), &mouse.x(), &mouse.y());
+ Eigen::Vector2d mouseDelta = (mouse - windowCenter) * mLookSense;
+
+ SE3d::Tangent cameraRelativeTwist;
+ cameraRelativeTwist << keyDelta.x(), keyDelta.y(), 0.0, 0.0, mouseDelta.y(), 0.0;
+
+ SE3d::Tangent worldRelativeTwist;
+ worldRelativeTwist << 0.0, 0.0, keyDelta.z(), 0.0, 0.0, 0.0;
+
+ // The plus operator is overloaded and corresponds to lplus and rplus in the micro Lie paper
+ // It applies the exponential map to the tangent space element
+ // The result can be acted upon a group element (in this case SE3)
+ // The side of the plus operator determines word vs. camera space application
+ mCameraInWorld = worldRelativeTwist + mCameraInWorld + cameraRelativeTwist;
- Eigen::Vector2d delta = (mouse - center) * mLookSense;
+ SO3d::Tangent worldRelativeAngularVelocity;
+ worldRelativeAngularVelocity << 0.0, 0.0, -mouseDelta.x();
- // TODO(quintin): use lie algebra more here? we have a perturbation in the tangent space
- R3 p = mCameraInWorld.position();
- SO3 q = SO3{delta.y(), R3::UnitY()} * mCameraInWorld.rotation() * SO3{-delta.x(), R3::UnitZ()};
- mCameraInWorld = SE3{p, q};
+ // TODO: Is there a way to combine this with the above?
+ mCameraInWorld.asSO3() = worldRelativeAngularVelocity + mCameraInWorld.asSO3();
centerCursor();
}
@@ -160,4 +157,4 @@ namespace mrover {
}
}
-} // namespace mrover
+} // namespace mrover
\ No newline at end of file
diff --git a/src/simulator/simulator.gui.cpp b/src/simulator/simulator.gui.cpp
index be565cdd5..5888cff53 100644
--- a/src/simulator/simulator.gui.cpp
+++ b/src/simulator/simulator.gui.cpp
@@ -94,15 +94,15 @@ namespace mrover {
URDF const& rover = it->second;
{
- SE3 baseLinkInMap = rover.linkInWorld("base_link");
- R3 p = baseLinkInMap.position();
- S3 q = baseLinkInMap.rotation().quaternion();
+ SE3d baseLinkInMap = rover.linkInWorld("base_link");
+ R3 p = baseLinkInMap.translation();
+ S3 q = baseLinkInMap.quat();
ImGui::Text("Rover Position: (%.2f, %.2f, %.2f)", p.x(), p.y(), p.z());
ImGui::Text("Rover Orientation: (%.2f, %.2f, %.2f, %.2f)", q.w(), q.x(), q.y(), q.z());
}
{
- R3 p = mCameraInWorld.position();
- S3 q = mCameraInWorld.rotation().quaternion();
+ R3 p = mCameraInWorld.translation();
+ S3 q = mCameraInWorld.quat();
ImGui::Text("Camera Position: (%.2f, %.2f, %.2f)", p.x(), p.y(), p.z());
ImGui::Text("Camera Orientation: (%.2f, %.2f, %.2f, %.2f)", q.w(), q.x(), q.y(), q.z());
}
diff --git a/src/simulator/simulator.hpp b/src/simulator/simulator.hpp
index 1076d3aa1..e37a5aae3 100644
--- a/src/simulator/simulator.hpp
+++ b/src/simulator/simulator.hpp
@@ -97,7 +97,7 @@ namespace mrover {
auto makeCameraForLink(SimulatorNodelet& simulator, btMultibodyLink const* link) -> Camera;
- [[nodiscard]] auto linkInWorld(std::string const& linkName) const -> SE3;
+ [[nodiscard]] auto linkInWorld(std::string const& linkName) const -> SE3d;
};
struct PeriodicTask {
@@ -225,6 +225,16 @@ namespace mrover {
R3 mGpsLinerizationReferencePoint{};
double mGpsLinerizationReferenceHeading{};
+ // TODO: make variances configurable
+ std::default_random_engine mRNG;
+ std::normal_distribution mGPSDist{0, 0.2},
+ mAccelDist{0, 0.05},
+ mGyroDist{0, 0.02},
+ mMagDist{0, 0.1},
+ mRollDist{0, 0.05},
+ mPitchDist{0, 0.05},
+ mYawDist{0, 0.1};
+
PeriodicTask mGpsTask;
PeriodicTask mImuTask;
@@ -305,7 +315,7 @@ namespace mrover {
auto getUrdf(std::string const& name) -> std::optional>;
- SE3 mCameraInWorld{R3{-3.0, 0.0, 1.5}, SO3{}};
+ SE3d mCameraInWorld{R3{-3.0, 0.0, 1.5}, SO3d::Identity()};
std::vector mStereoCameras;
std::vector mCameras;
@@ -432,7 +442,7 @@ namespace mrover {
auto urdfPoseToBtTransform(urdf::Pose const& pose) -> btTransform;
- auto btTransformToSe3(btTransform const& transform) -> SE3;
+ auto btTransformToSe3(btTransform const& transform) -> SE3d;
auto computeCameraToClip(float fovY, float aspect, float zNear, float zFar) -> Eigen::Matrix4f;
diff --git a/src/simulator/simulator.physics.cpp b/src/simulator/simulator.physics.cpp
index 5f6d82d2e..b8f0c9c3e 100644
--- a/src/simulator/simulator.physics.cpp
+++ b/src/simulator/simulator.physics.cpp
@@ -2,10 +2,12 @@
namespace mrover {
- auto btTransformToSe3(btTransform const& transform) -> SE3 {
+ auto btTransformToSe3(btTransform const& transform) -> SE3d {
btVector3 const& p = transform.getOrigin();
btQuaternion const& q = transform.getRotation();
- return SE3{R3{p.x(), p.y(), p.z()}, SO3{q.w(), q.x(), q.y(), q.z()}};
+ // Note: Must convert the Bullet quaternion (floats) to a normalized Eigen quaternion (doubles).
+ // Otherwise the normality check will fail in the SO3 constructor.
+ return SE3d{R3{p.x(), p.y(), p.z()}, Eigen::Quaterniond{q.w(), q.x(), q.y(), q.z()}.normalized()};
}
auto SimulatorNodelet::initPhysics() -> void {
@@ -76,9 +78,9 @@ namespace mrover {
int index = urdf.linkNameToMeta.at(link->name).index;
// TODO(quintin): figure out why we need to negate rvector
btTransform parentToChild{urdf.physics->getParentToLocalRot(index), -urdf.physics->getRVector(index)};
- SE3 childInParent = btTransformToSe3(parentToChild.inverse());
+ SE3d childInParent = btTransformToSe3(parentToChild.inverse());
- SE3::pushToTfTree(mTfBroadcaster, link->name, link->getParent()->name, childInParent);
+ SE3Conversions::pushToTfTree(mTfBroadcaster, link->name, link->getParent()->name, childInParent);
}
for (urdf::JointSharedPtr const& child_joint: link->child_joints) {
@@ -95,17 +97,17 @@ namespace mrover {
if (auto modelOpt = getUrdf(modelName)) {
URDF const& model = *modelOpt;
- SE3 modelInMap = btTransformToSe3(model.physics->getBaseWorldTransform());
- SE3 roverInMap = btTransformToSe3(rover.physics->getBaseWorldTransform());
+ SE3d modelInMap = btTransformToSe3(model.physics->getBaseWorldTransform());
+ SE3d roverInMap = btTransformToSe3(rover.physics->getBaseWorldTransform());
- R3 roverToModel = modelInMap.position() - roverInMap.position();
+ R3 roverToModel = modelInMap.translation() - roverInMap.translation();
double roverDistanceToModel = roverToModel.norm();
roverToModel /= roverDistanceToModel;
R3 roverForward = roverInMap.rotation().matrix().col(0);
double roverDotModel = roverToModel.dot(roverForward);
if (roverDotModel > 0 && roverDistanceToModel < threshold) {
- SE3::pushToTfTree(mTfBroadcaster, modelName, "map", modelInMap);
+ SE3Conversions::pushToTfTree(mTfBroadcaster, modelName, "map", modelInMap);
}
}
};
@@ -115,7 +117,7 @@ namespace mrover {
}
}
- auto URDF::linkInWorld(std::string const& linkName) const -> SE3 {
+ auto URDF::linkInWorld(std::string const& linkName) const -> SE3d {
int index = linkNameToMeta.at(linkName).index;
return btTransformToSe3(index == -1 ? physics->getBaseWorldTransform() : physics->getLink(index).m_cachedWorldTransform);
}
diff --git a/src/simulator/simulator.render.cpp b/src/simulator/simulator.render.cpp
index 6c2a3c116..dfc5c6845 100644
--- a/src/simulator/simulator.render.cpp
+++ b/src/simulator/simulator.render.cpp
@@ -409,10 +409,10 @@ namespace mrover {
if (auto urdfMesh = std::dynamic_pointer_cast(visual->geometry)) {
Model& model = mUriToModel.at(urdfMesh->filename);
URDF::LinkMeta& meta = urdf.linkNameToMeta.at(link->name);
- SE3 linkInWorld = urdf.linkInWorld(link->name);
- SE3 modelInLink = btTransformToSe3(urdfPoseToBtTransform(link->visual->origin));
- SE3 modelInWorld = linkInWorld * modelInLink;
- renderModel(pass, model, meta.visualUniforms.at(visualIndex), SIM3{modelInWorld.position(), modelInWorld.rotation(), R3::Ones()});
+ SE3d linkInWorld = urdf.linkInWorld(link->name);
+ SE3d modelInLink = btTransformToSe3(urdfPoseToBtTransform(link->visual->origin));
+ SE3d modelToWorld = linkInWorld * modelInLink;
+ renderModel(pass, model, meta.visualUniforms.at(visualIndex), SIM3{modelToWorld, R3::Ones()});
visualIndex++;
}
}
@@ -439,27 +439,27 @@ namespace mrover {
assert(collider);
btCollisionShape* shape = collider->getCollisionShape();
assert(shape);
- SE3 linkInWorld = urdf.linkInWorld(link->name);
+ SE3d linkToWorld = urdf.linkInWorld(link->name);
if (auto* compound = dynamic_cast(shape)) {
for (int i = 0; i < compound->getNumChildShapes(); ++i) {
- SE3 modelInLink = btTransformToSe3(urdfPoseToBtTransform(link->collision_array.at(i)->origin));
- SE3 shapeToWorld = modelInLink * linkInWorld;
+ SE3d modelInLink = btTransformToSe3(urdfPoseToBtTransform(link->collision_array.at(i)->origin));
+ SE3d modelInWorld = linkToWorld * modelInLink;
auto* shape = compound->getChildShape(i);
if (auto* box = dynamic_cast(shape)) {
btVector3 extents = box->getHalfExtentsWithoutMargin() * 2;
- SIM3 modelToWorld{shapeToWorld.position(), shapeToWorld.rotation(), R3{extents.x(), extents.y(), extents.z()}};
+ SIM3 modelToWorld{modelInWorld, R3{extents.x(), extents.y(), extents.z()}};
renderModel(pass, mUriToModel.at(CUBE_PRIMITIVE_URI), meta.collisionUniforms.at(i), modelToWorld);
} else if (auto* sphere = dynamic_cast(shape)) {
btScalar diameter = sphere->getRadius() * 2;
- SIM3 modelToWorld{shapeToWorld.position(), shapeToWorld.rotation(), R3{diameter, diameter, diameter}};
+ SIM3 modelToWorld{modelInWorld, R3{diameter, diameter, diameter}};
renderModel(pass, mUriToModel.at(SPHERE_PRIMITIVE_URI), meta.collisionUniforms.at(i), modelToWorld);
} else if (auto* cylinder = dynamic_cast(shape)) {
btVector3 extents = cylinder->getHalfExtentsWithoutMargin() * 2;
- SIM3 modelToWorld{shapeToWorld.position(), shapeToWorld.rotation(), R3{extents.x(), extents.y(), extents.z()}};
+ SIM3 modelToWorld{modelInWorld, R3{extents.x(), extents.y(), extents.z()}};
renderModel(pass, mUriToModel.at(CYLINDER_PRIMITIVE_URI), meta.collisionUniforms.at(i), modelToWorld);
} else if (auto* mesh = dynamic_cast(shape)) {
- SIM3 modelToWorld{shapeToWorld.position(), shapeToWorld.rotation(), R3::Ones()};
+ SIM3 modelToWorld{modelInWorld, R3::Ones()};
renderModel(pass, mUriToModel.at(mMeshToUri.at(const_cast(mesh))), meta.collisionUniforms.at(i), modelToWorld);
} else {
NODELET_WARN_STREAM_ONCE(std::format("Tried to render unsupported collision shape: {}", shape->getName()));
@@ -694,8 +694,8 @@ namespace mrover {
glfwGetFramebufferSize(mWindow.get(), &width, &height);
float aspect = static_cast(width) / static_cast(height);
mSceneUniforms.value.cameraToClip = computeCameraToClip(mFovDegrees * DEG_TO_RAD, aspect, NEAR, FAR).cast();
- mSceneUniforms.value.worldToCamera = mCameraInWorld.matrix().inverse().cast();
- mSceneUniforms.value.cameraInWorld = mCameraInWorld.position().cast().homogeneous();
+ mSceneUniforms.value.worldToCamera = mCameraInWorld.inverse().transform().cast();
+ mSceneUniforms.value.cameraInWorld = mCameraInWorld.translation().cast().homogeneous();
mSceneUniforms.enqueueWrite();
wgpu::BindGroupEntry entry;
diff --git a/src/simulator/simulator.sensors.cpp b/src/simulator/simulator.sensors.cpp
index af4bc60db..ec3acfbdd 100644
--- a/src/simulator/simulator.sensors.cpp
+++ b/src/simulator/simulator.sensors.cpp
@@ -11,9 +11,9 @@ namespace mrover {
camera.sceneUniforms.value.lightInWorld = {0, 0, 5, 1};
float aspect = static_cast(camera.resolution.x()) / static_cast(camera.resolution.y());
camera.sceneUniforms.value.cameraToClip = computeCameraToClip(camera.fov * DEG_TO_RAD, aspect, NEAR, FAR).cast();
- SE3 cameraInWorld = btTransformToSe3(camera.link->m_cachedWorldTransform);
- camera.sceneUniforms.value.worldToCamera = cameraInWorld.matrix().inverse().cast();
- camera.sceneUniforms.value.cameraInWorld = cameraInWorld.position().cast().homogeneous();
+ SE3d cameraInWorld = btTransformToSe3(camera.link->m_cachedWorldTransform);
+ camera.sceneUniforms.value.worldToCamera = cameraInWorld.inverse().transform().cast();
+ camera.sceneUniforms.value.cameraInWorld = cameraInWorld.translation().cast().homogeneous();
camera.sceneUniforms.enqueueWrite();
wgpu::BindGroupEntry entry;
@@ -90,7 +90,7 @@ namespace mrover {
computePass.release();
}
- auto cartesianToGeodetic(R3 const& cartesian, Eigen::Vector3d const& referenceGeodetic, double referenceHeadingDegrees) -> Eigen::Vector3d {
+ auto cartesianToGeodetic(R3 const& cartesian, R3 const& referenceGeodetic, double referenceHeadingDegrees) -> R3 {
constexpr double equatorialRadius = 6378137.0;
constexpr double flattening = 1.0 / 298.257223563;
constexpr double eccentricity2 = 2 * flattening - flattening * flattening;
@@ -111,22 +111,22 @@ namespace mrover {
return {lat, lon, alt};
}
- auto computeNavSatFix(SE3 const& gpuInMap, Eigen::Vector3d const& referenceGeodetic, double referenceHeadingDegrees) -> sensor_msgs::NavSatFix {
+ auto computeNavSatFix(R3 const& gpsInMap, R3 const& referenceGeodetic, double referenceHeadingDegrees) -> sensor_msgs::NavSatFix {
sensor_msgs::NavSatFix gpsMessage;
gpsMessage.header.stamp = ros::Time::now();
gpsMessage.header.frame_id = "map";
- auto geodetic = cartesianToGeodetic(gpuInMap.position(), referenceGeodetic, referenceHeadingDegrees);
+ auto geodetic = cartesianToGeodetic(gpsInMap, referenceGeodetic, referenceHeadingDegrees);
gpsMessage.latitude = geodetic(0);
gpsMessage.longitude = geodetic(1);
gpsMessage.altitude = geodetic(2);
return gpsMessage;
}
- auto computeImu(SE3 const& imuInMap, R3 const& imuAngularVelocity, R3 const& linearAcceleration, R3 const& magneticField) -> ImuAndMag {
+ auto computeImu(SO3d const& imuInMap, R3 const& imuAngularVelocity, R3 const& linearAcceleration, R3 const& magneticField) -> ImuAndMag {
ImuAndMag imuMessage;
imuMessage.header.stamp = ros::Time::now();
imuMessage.header.frame_id = "map";
- S3 q = imuInMap.rotation().quaternion();
+ S3 q = imuInMap.quat();
imuMessage.imu.orientation.w = q.w();
imuMessage.imu.orientation.x = q.x();
imuMessage.imu.orientation.y = q.y();
@@ -152,33 +152,57 @@ namespace mrover {
URDF const& rover = *lookup;
{
- SE3 baseLinkInMap = rover.linkInWorld("base_link");
+ SE3d baseLinkInMap = rover.linkInWorld("base_link");
nav_msgs::Odometry odometry;
odometry.header.stamp = ros::Time::now();
odometry.header.frame_id = "map";
- R3 p = baseLinkInMap.position();
+ R3 p = baseLinkInMap.translation();
odometry.pose.pose.position.x = p.x();
odometry.pose.pose.position.y = p.y();
odometry.pose.pose.position.z = p.z();
- S3 q = baseLinkInMap.rotation().quaternion();
+ S3 q = baseLinkInMap.quat();
odometry.pose.pose.orientation.w = q.w();
odometry.pose.pose.orientation.x = q.x();
odometry.pose.pose.orientation.y = q.y();
odometry.pose.pose.orientation.z = q.z();
- // TODO(quintin, riley): fill in twist?
+ R3 v = btVector3ToR3(rover.physics->getBaseVel());
+ odometry.twist.twist.linear.x = v.x();
+ odometry.twist.twist.linear.y = v.y();
+ odometry.twist.twist.linear.z = v.z();
+ R3 w = btVector3ToR3(rover.physics->getBaseOmega());
+ odometry.twist.twist.angular.x = w.x();
+ odometry.twist.twist.angular.y = w.y();
+ odometry.twist.twist.angular.z = w.z();
mGroundTruthPub.publish(odometry);
}
if (mGpsTask.shouldUpdate()) {
- SE3 gpsInMap = rover.linkInWorld("chassis_link");
+ R3 gpsInMap = rover.linkInWorld("chassis_link").translation();
+ R3 gpsNoise;
+ gpsNoise << mGPSDist(mRNG), mGPSDist(mRNG), mGPSDist(mRNG);
+ gpsInMap += gpsNoise;
+
mGpsPub.publish(computeNavSatFix(gpsInMap, mGpsLinerizationReferencePoint, mGpsLinerizationReferenceHeading));
}
if (mImuTask.shouldUpdate()) {
- R3 imuAngularVelocity = btVector3ToR3(rover.physics->getBaseOmega());
+ R3 roverAngularVelocity = btVector3ToR3(rover.physics->getBaseOmega());
R3 roverLinearVelocity = btVector3ToR3(rover.physics->getBaseVel());
- R3 imuLinearAcceleration = (roverLinearVelocity - mRoverLinearVelocity) / std::chrono::duration_cast>(dt).count();
+ R3 roverLinearAcceleration = (roverLinearVelocity - mRoverLinearVelocity) / std::chrono::duration_cast>(dt).count();
mRoverLinearVelocity = roverLinearVelocity;
- SE3 imuInMap = rover.linkInWorld("imu");
- mImuPub.publish(computeImu(imuInMap, imuAngularVelocity, imuLinearAcceleration, imuInMap.rotation().matrix().transpose().col(1)));
+ SO3d imuInMap = rover.linkInWorld("imu").asSO3();
+ R3 roverMagVector = imuInMap.inverse().rotation().col(1);
+
+ R3 accelNoise{mAccelDist(mRNG), mAccelDist(mRNG), mAccelDist(mRNG)},
+ gyroNoise{mGyroDist(mRNG), mGyroDist(mRNG), mGyroDist(mRNG)},
+ magNoise{mMagDist(mRNG), mMagDist(mRNG), mMagDist(mRNG)};
+ roverLinearAcceleration += accelNoise;
+ roverAngularVelocity += gyroNoise;
+ roverMagVector += magNoise;
+
+ SO3d::Tangent orientationNoise;
+ orientationNoise << mRollDist(mRNG), mPitchDist(mRNG), mYawDist(mRNG);
+ imuInMap += orientationNoise;
+
+ mImuPub.publish(computeImu(imuInMap, roverAngularVelocity, roverLinearAcceleration, roverMagVector));
}
}
}
diff --git a/src/teleoperation/streaming/stream_client.cpp b/src/teleoperation/streaming/stream_client.cpp
index 48531ed33..86e6f80b4 100644
--- a/src/teleoperation/streaming/stream_client.cpp
+++ b/src/teleoperation/streaming/stream_client.cpp
@@ -77,7 +77,7 @@ EM_BOOL on_message(int _event_type, const EmscriptenWebSocketMessageEvent* webso
const vStride = $7;
const canvas = document.getElementById('canvas');
- const ctx = canvas.getContext ('2d');
+ const ctx = canvas.getContext('2d');
if (Module.imageBuffer === undefined) {
Module.imageBuffer = ctx.createImageData(width, height);
canvas.width = width;
diff --git a/src/util/lie/lie.cpp b/src/util/lie/lie.cpp
new file mode 100644
index 000000000..b58beee26
--- /dev/null
+++ b/src/util/lie/lie.cpp
@@ -0,0 +1,75 @@
+#include "lie.hpp"
+
+SIM3::SIM3(SE3d const& se3, R3 const& scale) {
+ mTransform.fromPositionOrientationScale(se3.translation(), se3.rotation(), scale);
+}
+
+auto SIM3::matrix() const -> Eigen::Matrix4d {
+ return mTransform.matrix();
+}
+
+auto SIM3::position() const -> R3 {
+ return mTransform.translation();
+}
+
+auto SE3Conversions::fromTfTree(tf2_ros::Buffer const& buffer, std::string const& fromFrameId, std::string const& toFrameId) -> SE3d {
+ geometry_msgs::TransformStamped transform = buffer.lookupTransform(fromFrameId, toFrameId, ros::Time{});
+ return fromTf(transform.transform);
+}
+
+auto SE3Conversions::pushToTfTree(tf2_ros::TransformBroadcaster& broadcaster, std::string const& childFrameId, std::string const& parentFrameId, SE3d const& tf) -> void {
+ broadcaster.sendTransform(toTransformStamped(tf, parentFrameId, childFrameId));
+}
+
+auto SE3Conversions::fromTf(geometry_msgs::Transform const& transform) -> SE3d {
+ return {{transform.translation.x, transform.translation.y, transform.translation.z},
+ Eigen::Quaterniond{transform.rotation.w, transform.rotation.x, transform.rotation.y, transform.rotation.z}};
+}
+
+auto SE3Conversions::fromPose(geometry_msgs::Pose const& pose) -> SE3d {
+ return {{pose.position.x, pose.position.y, pose.position.z},
+ Eigen::Quaterniond{pose.orientation.w, pose.orientation.x, pose.orientation.y, pose.orientation.z}};
+}
+
+auto SE3Conversions::toPose(SE3d const& tf) -> geometry_msgs::Pose {
+ geometry_msgs::Pose pose;
+ pose.position.x = tf.x();
+ pose.position.y = tf.y();
+ pose.position.z = tf.z();
+ SE3d::Quaternion const& q = tf.quat();
+ pose.orientation.x = q.x();
+ pose.orientation.y = q.y();
+ pose.orientation.z = q.z();
+ pose.orientation.w = q.w();
+ return pose;
+}
+
+auto SE3Conversions::toTransform(SE3d const& tf) -> geometry_msgs::Transform {
+ geometry_msgs::Transform transform;
+ transform.translation.x = tf.x();
+ transform.translation.y = tf.y();
+ transform.translation.z = tf.z();
+ SE3d::Quaternion const& q = tf.quat();
+ transform.rotation.x = q.x();
+ transform.rotation.y = q.y();
+ transform.rotation.z = q.z();
+ transform.rotation.w = q.w();
+ return transform;
+}
+
+auto SE3Conversions::toPoseStamped(SE3d const& tf, std::string const& frameId) -> geometry_msgs::PoseStamped {
+ geometry_msgs::PoseStamped pose;
+ pose.pose = toPose(tf);
+ pose.header.frame_id = frameId;
+ pose.header.stamp = ros::Time::now();
+ return pose;
+}
+
+auto SE3Conversions::toTransformStamped(SE3d const& tf, std::string const& parentFrameId, std::string const& childFrameId) -> geometry_msgs::TransformStamped {
+ geometry_msgs::TransformStamped transform;
+ transform.transform = toTransform(tf);
+ transform.header.frame_id = parentFrameId;
+ transform.header.stamp = ros::Time::now();
+ transform.child_frame_id = childFrameId;
+ return transform;
+}
diff --git a/src/util/lie/lie.hpp b/src/util/lie/lie.hpp
new file mode 100644
index 000000000..1e51e4dab
--- /dev/null
+++ b/src/util/lie/lie.hpp
@@ -0,0 +1,50 @@
+#pragma once
+
+#include
+#include
+
+#include
+#include
+#include
+
+#include
+#include
+
+using manif::SE3d, manif::SO3d;
+
+using R3 = Eigen::Vector3d;
+using S3 = Eigen::Quaterniond;
+
+class SE3Conversions {
+public:
+ static auto fromTf(geometry_msgs::Transform const& transform) -> SE3d;
+
+ static auto fromPose(geometry_msgs::Pose const& pose) -> SE3d;
+
+ [[nodiscard]] static auto toPose(SE3d const& tf) -> geometry_msgs::Pose;
+
+ [[nodiscard]] static auto toTransform(SE3d const& tf) -> geometry_msgs::Transform;
+
+ [[nodiscard]] static auto toPoseStamped(SE3d const& tf, std::string const& frameId) -> geometry_msgs::PoseStamped;
+
+ [[nodiscard]] static auto toTransformStamped(SE3d const& tf, std::string const& parentFrameId, std::string const& childFrameId) -> geometry_msgs::TransformStamped;
+
+ [[nodiscard]] static auto fromTfTree(tf2_ros::Buffer const& buffer, std::string const& fromFrameId, std::string const& toFrameId) -> SE3d;
+
+ static auto pushToTfTree(tf2_ros::TransformBroadcaster& broadcaster, std::string const& childFrameId, std::string const& parentFrameId, SE3d const& tf) -> void;
+};
+
+class SIM3 {
+ using Transform = Eigen::Transform;
+
+ Transform mTransform = Transform::Identity();
+
+public:
+ SIM3() = default;
+
+ SIM3(SE3d const& se3, R3 const& scale);
+
+ [[nodiscard]] auto matrix() const -> Eigen::Matrix4d;
+
+ [[nodiscard]] auto position() const -> R3;
+};
diff --git a/src/util/lie/se3.cpp b/src/util/lie/se3.cpp
deleted file mode 100644
index ce4e4593d..000000000
--- a/src/util/lie/se3.cpp
+++ /dev/null
@@ -1,40 +0,0 @@
-#include "se3.hpp"
-
-SE3 SE3::fromTfTree(tf2_ros::Buffer const& buffer, std::string const& fromFrameId, std::string const& toFrameId) {
- geometry_msgs::TransformStamped transform = buffer.lookupTransform(fromFrameId, toFrameId, ros::Time{});
- return fromTf(transform.transform);
-}
-
-void SE3::pushToTfTree(tf2_ros::TransformBroadcaster& broadcaster, std::string const& childFrameId, std::string const& parentFrameId, SE3 const& tf) {
- broadcaster.sendTransform(tf.toTransformStamped(parentFrameId, childFrameId));
-}
-
-SE3::SE3(R3 const& position, SO3 const& rotation) {
- assert(!position.hasNaN());
- mTransform.translate(position);
- mTransform.rotate(rotation.mAngleAxis);
-}
-
-Eigen::Matrix4d SE3::matrix() const {
- return mTransform.matrix();
-}
-
-R3 SE3::position() const {
- return mTransform.translation();
-}
-
-SO3 SE3::rotation() const {
- return mTransform.rotation();
-}
-
-double SE3::distanceTo(SE3 const& other) const {
- return (position() - other.position()).norm();
-}
-
-auto SE3::inverse() const -> SE3 {
- return SE3{mTransform.inverse()};
-}
-
-SE3 SE3::operator*(SE3 const& other) const {
- return other.mTransform * mTransform;
-}
diff --git a/src/util/lie/se3.hpp b/src/util/lie/se3.hpp
deleted file mode 100644
index f821d32eb..000000000
--- a/src/util/lie/se3.hpp
+++ /dev/null
@@ -1,112 +0,0 @@
-#pragma once
-
-#include
-#include
-#include
-
-#include
-#include
-#include
-
-#include
-
-using R3 = Eigen::Vector3d;
-using S3 = Eigen::Quaterniond;
-
-/**
- * @brief A 3D rotation.
- */
-class SO3 {
- using AngleAxis = Eigen::AngleAxis;
-
- AngleAxis mAngleAxis = AngleAxis::Identity();
-
-public:
- friend class SE3;
- friend class SIM3;
-
- SO3() = default;
-
- SO3(double w, double x, double y, double z) : mAngleAxis{Eigen::Quaterniond{w, x, y, z}} {}
-
- template>,
- typename = std::enable_if_t<(sizeof...(Args) > 0)>>
- SO3(Args&&... args) : mAngleAxis{std::forward(args)...} {}
-
- [[nodiscard]] SO3 operator*(SO3 const& other) const;
-
- [[nodiscard]] R3 operator*(R3 const& other) const;
-
- [[nodiscard]] Eigen::Matrix3d matrix() const;
-
- [[nodiscard]] Eigen::Quaterniond quaternion() const;
-
- EIGEN_MAKE_ALIGNED_OPERATOR_NEW
-};
-
-/**
- * @brief A 3D rigid transformation (direct isometry).
- *
- * In simpler terms: a 3D rotation followed by a 3D translation.
- */
-class SE3 {
- using Transform = Eigen::Transform;
-
- Transform mTransform = Transform::Identity();
-
- static SE3 fromTf(geometry_msgs::Transform const& transform);
-
- static SE3 fromPose(geometry_msgs::Pose const& pose);
-
- [[nodiscard]] geometry_msgs::Pose toPose() const;
-
- [[nodiscard]] geometry_msgs::Transform toTransform() const;
-
- [[nodiscard]] geometry_msgs::PoseStamped toPoseStamped(std::string const& frameId) const;
-
- [[nodiscard]] geometry_msgs::TransformStamped toTransformStamped(std::string const& parentFrameId, std::string const& childFrameId) const;
-
-public:
- [[nodiscard]] static SE3 fromTfTree(tf2_ros::Buffer const& buffer, std::string const& fromFrameId, std::string const& toFrameId);
-
- static void pushToTfTree(tf2_ros::TransformBroadcaster& broadcaster, std::string const& childFrameId, std::string const& parentFrameId, SE3 const& tf);
-
- SE3() = default;
-
- SE3(R3 const& position, SO3 const& rotation);
-
- template>,
- typename = std::enable_if_t<(sizeof...(Args) > 0)>>
- SE3(Args&&... args) : mTransform{std::forward(args)...} {}
-
- [[nodiscard]] SE3 operator*(SE3 const& other) const;
-
- [[nodiscard]] Eigen::Matrix4d matrix() const;
-
- [[nodiscard]] R3 position() const;
-
- [[nodiscard]] SO3 rotation() const;
-
- [[nodiscard]] double distanceTo(SE3 const& other) const;
-
- [[nodiscard]] auto inverse() const -> SE3;
-
- EIGEN_MAKE_ALIGNED_OPERATOR_NEW
-};
-
-class SIM3 {
- using Transform = Eigen::Transform;
-
- Transform mTransform = Transform::Identity();
-
-public:
- SIM3() = default;
-
- SIM3(R3 const& position, SO3 const& rotation, R3 const& scale);
-
- [[nodiscard]] auto matrix() const -> Eigen::Matrix4d;
-
- [[nodiscard]] auto position() const -> R3;
-};
diff --git a/src/util/lie/se3_conversion.cpp b/src/util/lie/se3_conversion.cpp
deleted file mode 100644
index 44a28774f..000000000
--- a/src/util/lie/se3_conversion.cpp
+++ /dev/null
@@ -1,52 +0,0 @@
-#include "se3.hpp"
-
-SE3 SE3::fromTf(geometry_msgs::Transform const& transform) {
- return {{transform.translation.x, transform.translation.y, transform.translation.z},
- {Eigen::Quaterniond{transform.rotation.w, transform.rotation.x, transform.rotation.y, transform.rotation.z}}};
-}
-
-SE3 SE3::fromPose(geometry_msgs::Pose const& pose) {
- return {{pose.position.x, pose.position.y, pose.position.z},
- {Eigen::Quaterniond{pose.orientation.w, pose.orientation.x, pose.orientation.y, pose.orientation.z}}};
-}
-
-geometry_msgs::Pose SE3::toPose() const {
- geometry_msgs::Pose pose;
- pose.position.x = position().x();
- pose.position.y = position().y();
- pose.position.z = position().z();
- pose.orientation.x = rotation().quaternion().x();
- pose.orientation.y = rotation().quaternion().y();
- pose.orientation.z = rotation().quaternion().z();
- pose.orientation.w = rotation().quaternion().w();
- return pose;
-}
-
-geometry_msgs::Transform SE3::toTransform() const {
- geometry_msgs::Transform transform;
- transform.translation.x = position().x();
- transform.translation.y = position().y();
- transform.translation.z = position().z();
- transform.rotation.x = rotation().quaternion().x();
- transform.rotation.y = rotation().quaternion().y();
- transform.rotation.z = rotation().quaternion().z();
- transform.rotation.w = rotation().quaternion().w();
- return transform;
-}
-
-geometry_msgs::PoseStamped SE3::toPoseStamped(std::string const& frameId) const {
- geometry_msgs::PoseStamped pose;
- pose.pose = toPose();
- pose.header.frame_id = frameId;
- pose.header.stamp = ros::Time::now();
- return pose;
-}
-
-geometry_msgs::TransformStamped SE3::toTransformStamped(const std::string& parentFrameId, const std::string& childFrameId) const {
- geometry_msgs::TransformStamped transform;
- transform.transform = toTransform();
- transform.header.frame_id = parentFrameId;
- transform.header.stamp = ros::Time::now();
- transform.child_frame_id = childFrameId;
- return transform;
-}
diff --git a/src/util/lie/sim3.cpp b/src/util/lie/sim3.cpp
deleted file mode 100644
index 65f9110f9..000000000
--- a/src/util/lie/sim3.cpp
+++ /dev/null
@@ -1,13 +0,0 @@
-#include "se3.hpp"
-
-SIM3::SIM3(R3 const& position, SO3 const& rotation, R3 const& scale) {
- mTransform.fromPositionOrientationScale(position, rotation.mAngleAxis, scale);
-}
-
-auto SIM3::matrix() const -> Eigen::Matrix4d {
- return mTransform.matrix();
-}
-
-auto SIM3::position() const -> R3 {
- return mTransform.translation();
-}
diff --git a/src/util/lie/so3.cpp b/src/util/lie/so3.cpp
deleted file mode 100644
index 422cd5c42..000000000
--- a/src/util/lie/so3.cpp
+++ /dev/null
@@ -1,17 +0,0 @@
-#include "se3.hpp"
-
-Eigen::Quaterniond SO3::quaternion() const {
- return Eigen::Quaterniond{mAngleAxis};
-}
-
-Eigen::Matrix3d SO3::matrix() const {
- return mAngleAxis.toRotationMatrix();
-}
-
-SO3 SO3::operator*(SO3 const& other) const {
- return other.quaternion() * quaternion();
-}
-
-R3 SO3::operator*(R3 const& other) const {
- return quaternion() * other;
-}