From fff839b495ae3bf6d1ef72014648746bb51b4e91 Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Sun, 23 Jul 2023 14:39:39 -0700 Subject: [PATCH 01/18] Enable raw data and start work on a poser --- .circleci/config.yml | 8 + .gitignore | 52 ++ CMakeLists.txt | 200 +++++-- Dockerfile | 5 +- README.md | 218 ++++++-- config/config.json | 42 -- config/example_config_driver.json | 96 ++++ config/example_config_merger.yaml | 66 +++ config/example_config_poser.yaml | 38 ++ doc/model-v1.png | Bin 0 -> 51848 bytes doc/model-v2.png | Bin 0 -> 23775 bytes docker-compose.yml | 9 +- include/libsurvive_ros2/component.hpp | 76 --- include/libsurvive_ros2/driver_component.hpp | 108 ++++ include/libsurvive_ros2/poser_component.hpp | 212 ++++++++ launch/libsurvive_ros2.launch.py | 252 ++++++--- libsurvive_ros2_py/__init__.py | 0 libsurvive_ros2_py/config_tools.py | 352 +++++++++++++ msg/Angle.msg | 36 ++ msg/Lighthouse.msg | 49 ++ msg/Tracker.msg | 39 ++ package.xml | 46 +- scripts/config_merger | 38 ++ src/component.cpp | 280 ---------- src/driver_component.cpp | 504 ++++++++++++++++++ src/driver_node.cpp | 39 ++ src/poser_component.cpp | 516 +++++++++++++++++++ src/{node.cpp => poser_node.cpp} | 5 +- test/example_poser.yaml | 38 ++ test/example_registration.yaml | 66 +++ test/input_config_0_1_2.json | 60 +++ test/input_config_1_2_3.json | 60 +++ test/input_config_2_3_4.json | 60 +++ test/input_config_7_8_9.json | 60 +++ test/input_config_malformed.json | 1 + test/output_with_registration.json | 296 +++++++++++ test/output_without_registration.json | 296 +++++++++++ test/test_config_tools.py | 108 ++++ 38 files changed, 3748 insertions(+), 583 deletions(-) delete mode 100644 config/config.json create mode 100644 config/example_config_driver.json create mode 100644 config/example_config_merger.yaml create mode 100644 config/example_config_poser.yaml create mode 100644 doc/model-v1.png create mode 100644 doc/model-v2.png delete mode 100644 include/libsurvive_ros2/component.hpp create mode 100644 include/libsurvive_ros2/driver_component.hpp create mode 100644 include/libsurvive_ros2/poser_component.hpp create mode 100644 libsurvive_ros2_py/__init__.py create mode 100644 libsurvive_ros2_py/config_tools.py create mode 100644 msg/Angle.msg create mode 100644 msg/Lighthouse.msg create mode 100644 msg/Tracker.msg create mode 100755 scripts/config_merger delete mode 100644 src/component.cpp create mode 100644 src/driver_component.cpp create mode 100644 src/driver_node.cpp create mode 100644 src/poser_component.cpp rename src/{node.cpp => poser_node.cpp} (89%) create mode 100644 test/example_poser.yaml create mode 100644 test/example_registration.yaml create mode 100644 test/input_config_0_1_2.json create mode 100644 test/input_config_1_2_3.json create mode 100644 test/input_config_2_3_4.json create mode 100644 test/input_config_7_8_9.json create mode 100644 test/input_config_malformed.json create mode 100644 test/output_with_registration.json create mode 100644 test/output_without_registration.json create mode 100644 test/test_config_tools.py diff --git a/.circleci/config.yml b/.circleci/config.yml index b177a0f..23acbd9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -49,6 +49,10 @@ workflows: resource_class: arm.medium arch: arm64v8 ros_distro: humble + - checkout_build_test: + resource_class: arm.medium + arch: arm64v8 + ros_distro: irwin - checkout_build_test: resource_class: arm.medium arch: arm64v8 @@ -65,6 +69,10 @@ workflows: resource_class: medium arch: amd64 ros_distro: humble + - checkout_build_test: + resource_class: medium + arch: amd64 + ros_distro: irwin - checkout_build_test: resource_class: medium arch: amd64 diff --git a/.gitignore b/.gitignore index 600d2d3..c231e0d 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,53 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] + +# C extensions +*.so + +# Distribution / packaging +bin/ +build/ +develop-eggs/ +dist/ +eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +.tox/ +.coverage +.cache +nosetests.xml +coverage.xml + +# Translations +*.mo + +# Mr Developer +.mr.developer.cfg +.project +.pydevproject + +# Rope +.ropeproject + +# Django stuff: +*.log +*.pot + +# Sphinx documentation +docs/_build/ + +# Visual studio code files .vscode \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt index a032552..098d48f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -24,6 +24,8 @@ cmake_minimum_required(VERSION 3.5) # Set the project name project(libsurvive_ros2) +# COMPILATION OPTIONS + # Default to C99 if(NOT CMAKE_C_STANDARD) set(CMAKE_C_STANDARD 99) @@ -37,43 +39,129 @@ endif() # Set compile options add_compile_options(-Wall -Wextra -Wpedantic) -# Find the core interface library +# DEPENDENCIES + +# Obtain a specific version of the libsurvive low-level driver. include(ExternalProject) externalproject_add(libsurvive GIT_REPOSITORY https://github.com/cntools/libsurvive.git - GIT_TAG master + GIT_TAG d89876a1af0efcfcddeaaf2a282eec8753a1bac4 CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX=${CMAKE_CURRENT_BINARY_DIR}/libsurvive-install + -DCMAKE_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR} + -DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX} -DCMAKE_BUILD_TYPE=Release ) -# Find dependencies +# Obtain a specific version of the GTSAM estimation software. +include(ExternalProject) +externalproject_add(gtsam + GIT_REPOSITORY https://github.com/borglab/gtsam.git + GIT_TAG 621ef2e7c560e6fb06b463769d9cf7bc6f626308 + CMAKE_ARGS + -DCMAKE_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR} + -DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX} + -DCMAKE_BUILD_TYPE=Release + -DGTSAM_BUILD_PYTHON=1 +) + +# Third party libraries +find_package(Boost REQUIRED) +find_package(Eigen3 REQUIRED) +find_package(TBB REQUIRED) +find_package(yaml-cpp REQUIRED) + +# ROS packages find_package(ament_cmake REQUIRED) find_package(rclcpp REQUIRED) find_package(rclcpp_components REQUIRED) +find_package(rosidl_default_generators REQUIRED) +find_package(tf2 REQUIRED) +find_package(tf2_ros REQUIRED) + +# Interfaces find_package(diagnostic_msgs REQUIRED) find_package(geometry_msgs REQUIRED) find_package(sensor_msgs REQUIRED) -find_package(tf2 REQUIRED) -find_package(tf2_ros REQUIRED) # Universally add this components includes include_directories(include) -# Add the component -add_library(libsurvive_ros2_component SHARED - src/component.cpp) -add_dependencies(libsurvive_ros2_component libsurvive) -target_include_directories(libsurvive_ros2_component PUBLIC - ${CMAKE_CURRENT_BINARY_DIR}/libsurvive-install/include - ${CMAKE_CURRENT_BINARY_DIR}/libsurvive-install/include/libsurvive - ${CMAKE_CURRENT_BINARY_DIR}/libsurvive-install/include/libsurvive/redist) -target_link_directories(libsurvive_ros2_component PUBLIC - ${CMAKE_CURRENT_BINARY_DIR}/libsurvive-install/lib) -target_link_libraries(libsurvive_ros2_component -lsurvive) -target_compile_definitions(libsurvive_ros2_component +# IDL GENERATION + +# Generate messages used in this project +rosidl_generate_interfaces(libsurvive_ros2 + "msg/Angle.msg" + "msg/Lighthouse.msg" + "msg/Tracker.msg" + DEPENDENCIES + diagnostic_msgs + geometry_msgs + std_msgs) +rosidl_get_typesupport_target(cpp_typesupport_target + libsurvive_ros2 "rosidl_typesupport_cpp") + +# BUILDING DRIVER + +# Component +add_library(libsurvive_ros2_driver_component SHARED + src/driver_component.cpp) +add_dependencies(libsurvive_ros2_driver_component + libsurvive) +target_include_directories(libsurvive_ros2_driver_component PUBLIC + ${CMAKE_INSTALL_PREFIX}/include + ${CMAKE_INSTALL_PREFIX}/include/libsurvive + ${CMAKE_INSTALL_PREFIX}/include/libsurvive/redist) +target_link_directories(libsurvive_ros2_driver_component PUBLIC + ${CMAKE_INSTALL_PREFIX}/lib) +target_link_libraries(libsurvive_ros2_driver_component + "${cpp_typesupport_target}" + -lsurvive) +target_compile_definitions(libsurvive_ros2_driver_component + PRIVATE "COMPOSITION_BUILDING_DLL") +ament_target_dependencies(libsurvive_ros2_driver_component + rclcpp + rclcpp_components + diagnostic_msgs + geometry_msgs + sensor_msgs + tf2 + tf2_ros) +rclcpp_components_register_nodes(libsurvive_ros2_driver_component + "libsurvive_ros2::DriverComponent") + +# Node +add_executable(libsurvive_ros2_driver_node + src/driver_node.cpp) +target_link_libraries(libsurvive_ros2_driver_node + libsurvive_ros2_driver_component) +ament_target_dependencies(libsurvive_ros2_driver_node rclcpp) + +# BUILDING POSER + +# Component +add_library(libsurvive_ros2_poser_component SHARED + src/poser_component.cpp) +add_dependencies(libsurvive_ros2_poser_component + gtsam) +target_include_directories(libsurvive_ros2_poser_component PUBLIC + ${CMAKE_INSTALL_PREFIX}/include + ${YAML_CPP_INCLUDE_DIRS} + ${Boost_INCLUDE_DIRS} + ${TBB_INCLUDE_DIRS}) +target_link_directories(libsurvive_ros2_poser_component PUBLIC + ${CMAKE_INSTALL_PREFIX}/lib) +target_link_libraries(libsurvive_ros2_poser_component + ${YAML_CPP_LIBRARIES} + ${Boost_LIBRARIES} + ${TBB_LIBRARIES} + "${cpp_typesupport_target}" + Eigen3::Eigen + -lgtsam) +target_compile_definitions(libsurvive_ros2_poser_component PRIVATE "COMPOSITION_BUILDING_DLL") -ament_target_dependencies(libsurvive_ros2_component +target_compile_options(libsurvive_ros2_poser_component + PUBLIC -Wno-deprecated-copy -Wno-unused-parameter) +ament_target_dependencies(libsurvive_ros2_poser_component rclcpp rclcpp_components diagnostic_msgs @@ -81,57 +169,83 @@ ament_target_dependencies(libsurvive_ros2_component sensor_msgs tf2 tf2_ros) -rclcpp_components_register_nodes(libsurvive_ros2_component "libsurvive_ros2::Component") +rclcpp_components_register_nodes(libsurvive_ros2_poser_component + "libsurvive_ros2::PoserComponent") -# Export the library path because the package installs libraries without exporting them -if(NOT WIN32) - ament_environment_hooks( - "${ament_cmake_package_templates_ENVIRONMENT_HOOK_LIBRARY_PATH}") -endif() +# Node +add_executable(libsurvive_ros2_poser_node + src/poser_node.cpp) +target_link_libraries(libsurvive_ros2_poser_node + libsurvive_ros2_poser_component) +ament_target_dependencies(libsurvive_ros2_poser_node rclcpp) + +# INSTALLATION -# Manually compose the component into a node -add_executable(libsurvive_ros2_node - src/node.cpp) -target_link_libraries(libsurvive_ros2_node - libsurvive_ros2_component) -ament_target_dependencies(libsurvive_ros2_node rclcpp) +# Install python packages -# Install libsurvive +externalproject_get_property(gtsam binary_dir) install( DIRECTORY - ${CMAKE_CURRENT_BINARY_DIR}/libsurvive-install/ + ${binary_dir}/python/gtsam DESTINATION - ${CMAKE_INSTALL_PREFIX} -) + ${PYTHON_INSTALL_DIR}) + +# Install the components -# Install the component install( TARGETS - libsurvive_ros2_component + libsurvive_ros2_driver_component + libsurvive_ros2_poser_component ARCHIVE DESTINATION lib LIBRARY DESTINATION lib RUNTIME DESTINATION bin) -# Install the node +# Install the nodes + install( TARGETS - libsurvive_ros2_node + libsurvive_ros2_driver_node + libsurvive_ros2_poser_node DESTINATION lib/${PROJECT_NAME}) -# Install the shared things +# Install helper programs + +install( + PROGRAMS + scripts/config_merger + DESTINATION + lib/${PROJECT_NAME}) + +# Install the shared files + install( DIRECTORY - config launch + config DESTINATION share/${PROJECT_NAME}) -# For testing only +# Install the python package + +ament_python_install_package(libsurvive_ros2_py) + +# TESTING + if(BUILD_TESTING) find_package(ament_lint_auto REQUIRED) + find_package(ament_cmake_ros REQUIRED) + + # Lint tests ament_lint_auto_find_test_dependencies() + + # Unit tests + ament_add_ros_isolated_pytest_test(test_config_tools + test/test_config_tools.py + TIMEOUT 120) + endif() +# EXPORTS + # Export all include directories and declare the package ament_export_include_directories(include) ament_package() - diff --git a/Dockerfile b/Dockerfile index 8b02451..29bb51f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -37,8 +37,10 @@ RUN apt-get update && apt-get install -y --no-install-recommends liblapacke-dev \ libopenblas-dev \ libpcap-dev \ + libsciplot-dev \ libusb-1.0-0-dev \ libx11-dev \ + libyaml-cpp-dev \ sudo \ valgrind \ zlib1g-dev \ @@ -62,12 +64,13 @@ RUN sudo apt-get update && rosdep update \ && rosdep install --from-paths src --ignore-src -r -y \ && source /opt/ros/$ROS_DISTRO/setup.bash \ - && colcon build \ + && colcon build --symlink-install \ && sudo rm -rf /var/lib/apt/lists/* # Initialization RUN echo -e "#!/bin/bash \n\ set -e\n\ +source /opt/ros/${ROS_DISTRO}/setup.bash\n\ source /home/ubuntu/ros2_ws/install/setup.bash \n\ exec \$@" > /home/ubuntu/ros2_ws/entrypoint.sh RUN chmod 755 /home/ubuntu/ros2_ws/entrypoint.sh diff --git a/README.md b/README.md index fc8c1fe..c39f3a4 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,11 @@ # libsurvive_ros2 -**A Steam-free driver for republishing Lighthouse 1.0 and 2.0 pose and sensor data to ROS2** +**A ROS2 driver for Steam Tracking 2.0** [![CI Status](https://circleci.com/gh/asymingt/libsurvive_ros2.svg?style=svg)](https://app.circleci.com/pipelines/github/asymingt/libsurvive_ros2) -This is a lightweight ROS2 wrapper around the [libsurvive](https://github.com/cntools/libsurvive) project, which provides a set of drivers for 6DoF rigid body tracking using SteamVR 1.0 and 2.0 hardware. It also listens for inertial, button, configuration and device connection events, and forwards these to various topics. +This is a lightweight ROS2 wrapper around the [libsurvive](https://github.com/cntools/libsurvive) project, which provides a set of drivers for 6DoF rigid body tracking using SteamVR 2.0 hardware. It also listens for inertial, button, configuration and device connection events, and forwards these to various topics. -This hardware is particularly useful to robotics projects, because it provides a cost effective method of obtaining ground truth with a positional accuracy typically in the sub-centimeter, sub-degree range. The final accuracy of course depends on the tracking volume, base station number and placement and level of occlusion, as well as calibration quality. +This hardware is particularly useful to robotics projects, because it provides a cost effective method of obtaining ground truth with a positional accuracy typically in the sub-centimeter, sub-degree range. The final accuracy of course depends on the tracking volume, lighthouse number and placement and level of occlusion, as well as calibration quality. The driver in this repo is largely based on the ROS1 driver [here](https://github.com/cntools/libsurvive/tree/master/tools/ros_publisher). It has been migrated to ROS2, and refactored slightly -- we use a thread to manage the blocking interaction with libsurvive, so that it doesn't lock the ROS2 callback queue and prevent messages from propagating correctly. @@ -16,15 +16,23 @@ Progress: - [x] code ported - [x] documentation added - [x] foxglove example +- [x] fix timestamp errors - [x] test imu callback - [x] test connect callback -- [x] fix timestamp errors -- [x] fix button callback (only works on Tracker 2.0 wirelessly via watchman) -- [x] add linting checks (nice to have) -- [ ] add unit tests -- [ ] fix composable node (not a functional blocker right now) +- [x] fix button callback (only works wirelessly via watchman, not with USB) +- [x] add linting checks +- [x] add unit tests +- [x] fix composable node (not a functional blocker right now) +- [x] add circle ci pipeline for ROS2 rolling: arm64, amd64 +- [x] add circle ci pipeline for ROS2 foxy: arm64, amd64 +- [x] add circle ci pipeline for ROS2 galactic: arm64, amd64 +- [x] add circle ci pipeline for ROS2 humble: arm64, amd64 +- [x] add circle ci pipeline for ROS2 irwin: arm64, amd64 +- [x] add configuration stitcher +- [ ] add rigid body tracker +- [ ] add mergify integration (nice to have) +- [ ] add dependebot integration (nice to have) - [ ] convert to lifecycle node (nice to have) -- [ ] add circlce, mergify and dependebot integration # Installation instructions @@ -49,10 +57,10 @@ $ docker compose build $ docker compose run libsurvive_ros2 colcon test ``` -This will checkout a lightweight ROS2 rolling container, augment it with a few system dependencies, checkout and build the code and drop you into a bash shell as user `ubuntu` at the home directory `~/ros2_ws/src`. If you'd rather build and test for ROS2 Foxy on arm64, as an example, you'd do this: +This will checkout a lightweight ROS2 rolling container, augment it with a few system dependencies, checkout and build the code and drop you into a bash shell as user `ubuntu` at the home directory `~/ros2_ws/src`. If you'd rather build and test for ROS2 Rolling on arm64, as an example, you'd do this: ```sh -$ docker compose build --build-arg ARCH=arm64 --build-arg ROS_DISTRO=foxy +$ docker compose build --build-arg ARCH=arm64 --build-arg ROS_DISTRO=rolling $ docker compose run libsurvive_ros2 colcon test # optional, to test the package ``` @@ -60,40 +68,13 @@ Note that if you are building on a different architecture than the host you must ## Native build and test (not recommended) -You'll need ROS2 installed: https://docs.ros.org/en/humble/Installation.html - -You'll also need to follow the instructions here: - -```sh -sudo apt-get install build-essential \ - cmake \ - freeglut3-dev \ - libatlas-base-dev \ - liblapacke-dev \ - libopenblas-dev \ - libpcap-dev \ - libusb-1.0-0-dev \ - libx11-dev \ - zlib1g-dev -``` - -Finally source ROS2, create a workspace, checkout, compile and test: - -```sh -$ mkdir ~/ros2_ws/src -$ cd ~/ros2_ws/src -$ git clone https://github.com/asymingt/libsurvive_ros2.git -$ cd .. -$ rosdep update -$ rosdep install --from-paths src --ignore-src -r -y -$ colcon build -$ colcon test # optional, to test the package -$ source install/setup.bash -``` +You'll need ROS2 installed on Ubuntu 22.04: https://docs.ros.org/en/humble/Installation.html. After installation please refer to the [Dockerfile](./Dockerfile) to see what commands must be run to get the system working correctly. # Running the code -To run the driver on containerized installations do the following: +Before running this driver, please make sure that you are only using Steam Tracking 2.0 lighthouses and trackers, and that each Gen 2 lighthouse has been assigned a unique ID. The best way to do this is to use the [crazyflie python client](https://www.bitcraze.io/documentation/tutorials/getting-started-with-lighthouse). Please refer to the section entitled `Configure the base stations channel (mode)` in their getting-started guide. From this point on it wil be assumed that the "mode" or "channel" of each lighthouse has been set correctly. Failing to do this will cause problems. + +To run the driver in a containerized context do the following: ```sh $ docker compose up @@ -102,7 +83,7 @@ $ docker compose up Alternatively, to run the driver on native installations run the following: ```sh -$ ros2 run libsurvive_ros2 libsurvive_ros2.launch.py rosbridge:=true +$ ros2 run libsurvive_ros2 libsurvive_ros2.launch.py web_bridge:=true ``` There are three launch arguments to `libsurvive_ros2.launch.py` to help get up and running: @@ -112,7 +93,7 @@ There are three launch arguments to `libsurvive_ros2.launch.py` to help get up a - `rosbridge = boolean (default: false)` : Starts a `rosbridge_server` and `rosapi`, which offers data over a websocket by default at port 9090, which allows you to stream directly into the [foxglove.dev](https://foxglove.dev) robotics visualization tool. - `composable = boolean (default: false)`: For advanced users only -- it shows how to load the component-based version of the code to get zero-copy IPC between it and other composable nodes. -# Example visualization with Foxglove +# Example visualization in Foxglove Studio After launch the stack (with `rosbridge:=true`), navigate to [this Foxglove link](https://studio.foxglove.dev/?ds=rosbridge-websocket&ds.url=ws%3A%2F%2Flocalhost%3A9090 ) and you should see the data streaming: @@ -121,13 +102,156 @@ After launch the stack (with `rosbridge:=true`), navigate to [this Foxglove link Now move the tracker around and you should see its corresponding transform move around inn the user interface. -# Common questions +# Additional tooling + +## Lighthouse calibration file stitching + +Libsurvive is capapable + +Each tracker can only receive data from up to four lighthouses. For this reason, in the case where you want to cover a wider region, you would choose a lighthouse + +So, this makes calibration tricky, because some lighthouses just wont be visible to a single tracker. A common way + +```sh +# move the tracker to a new subset of anchors + +docker compose run ros2 launch libsurvive_ros2 libsurvive_ros2.launch.py \ + recalibrate:=true driver_config_out:=/data/config1.json + +# move the tracker to a new subset of anchors + +docker compose run ros2 launch libsurvive_ros2 libsurvive_ros2.launch.py \ + recalibrate:=true driver_config_out:=/data/config2.json + +# move the tracker to a new subset of anchors + +docker compose run ros2 launch libsurvive_ros2 libsurvive_ros2.launch.py \ + recalibrate:=true driver_config_out:=/data/config3.json + +... + +docker compose run ros2 launch libsurvive_ros2 libsurvive_ros2.launch.py \ + recalibrate:=true driver_config_out:=/data/configN.json + +# stitch all of them together + +docker compose run ros2 run libsurvive_ros2 config_merger -i \ + -i /data/config1.json ... /data/configN.json \ + -r src/libsurvive_ros2/test/example_registration.yaml \ + -o /data/config_merged.json + +# then use the final stitched config + +docker compose run ros2 launch libsurvive_ros2 libsurvive_ros2.launch.py \ + recalibrate:=false driver_config_in:=/data/config_merged.json +``` + +## Rigid body poser + +### Background terminology + +This driver includes its own "poser" which in stead of tracking a single device in isolation, treats devices attached to a rigid body as sources from which to draw measurements. This allows a large body to be tracked by many devices, and this redundancy should with stability. + +We'll adopt this frame nomenclature shorthand: + +``` +g : the global frame (the parent frame which tracking is relative to) +l : the lighthouse frame +t : the tracker "tracking" frame (virtual point within the tracker) +s : the tracker "sensor" frame (midpoint of a sensor) +h : the tracker "head" frame (the bolt on the back of the tracker) +i : the tracker "imu" frame (the IMU origin) +b : the origin of a rigid body to which a tracker is attached +``` + +The variable `aTb` is the transform that moves a point `a_x` from being expressed in frame `a` to a point `b_x`, expressed in frame `.`. So, `b_x = aTb * b_a`. This is great because you can fact check long transforms for correctness by looking at adjacent variables. As an example, let's assume that a tracking sensor is located at some point `t_p` expressed in the tacking frame. To move it into the lighthouse frame, where it's angle can be predicted, we just do this chain of frame transforms: `l_p = lTg * gTt * t_p`. + +### Algorithmic approach + +Our tracking problem can be modeled with a nonlinear factor graph. In this paradigm nodes represent quantities we want to estimate, and factors are nonlinear constraints that limit the relative or absolute values that nodes can assume. + +The nice thing about this representation is that probability is handled well, and measurements are all weighted to produce the best estimate possible. We also have a great library, GTSAM, which takes care of some the harder parts of probability handling and sensor fusion, which would be very tricky to write correctly ourselves. + +GTSAM has many optimizer, but the one we'll be using is iSAM2. A distinct advantage with this solver is that we can perform updates incrementally -- the whole graph need not be solved every time over-and-over again. In fact, iSAM2 is clever enough to understand local cliques of constraints and perform isolated updates to limit compute. + +Although it is possible to add custom factor types in GTSAM, it involves deriving a Jacobian for your factor, which is not straightforward to do correctly. For this reason, we're going to shoehorn our problem into a format that couples nicely with this framework. To do th + +### Model 1 (how not to do it) + +Here's the naive way to solve the problem, where we treat every IMU measurement as a constraint on the evolution of the IMU frame pose. The major issue with this representation is that the graph size scales as a function of the number of IMUs (trackers) and not the number of rigid bodies. Moreover, it requires a CustomFactor to project the `gTi[t]` pose into the tracking frame in order to add an angle constraint. + +![alt text](doc/model-v1.png) + +#### Variables + +We'll estimate the following quantities for each lighthouse `m`, tracker `j` with `k` sensors over `t` time steps, a subset `t'` of which are used to model IMU bias: + +``` +(V0) gTi[j,t] : the pose of the imu in the global frame for all times t. +(V1) B[t'] : the imu accel and gyro bias for a subset of times t'. +(V2) tTs[j,k] : the sensor to tracking frame transform for each sensor. +(V3) tTh[j] : the bolt-head to tracking frame transform +(V4) tTi[j] : the IMU frame to tracking frame transform +(V5) bTh[j] : the bolt-head to body origin frame transform +(V6) gTl[m] : the lighthouse -> global frame transform for all times t. +``` + +#### Factors + +The evolution of the pose in the IMU frame is captured by an IMU factor along with a bias drift factor. GTSAM uses a pre-integration approach when solving in order to prevent the 250 measurements being inserted every second from causing the graph size to become unsolvable in the time budget. + +``` +(F0) ImuFactor(gTi[j,t-1], V[j,t-1], gTi[j,t], V[j,t], B[j,t']) [red] +(F1) BetweenFactor(B[j,t'], B[j,t'']) [brown] +``` + +There are a couple of transforms which have initial values calibrated and written into the tracker firmware. They are the following: + +``` +(F2) PriorFactor(tTh[j]) [yellow] +(F3) PriorFactor(tTi[j]) [orange] +(F4) PriorFactor(tTs[j]) [dark blue] +``` + +Libsurvive comes up with its own pose solution using MPFIT, which provides us with priors on the static poses of the lighthouses and the time-varying pose of a trackers "tracking frame": + +``` +(F5) PriorFactor(gTl[m]) [pink] +(F6) CustomFactor(gTt[j],tTi[j,t]) [light blue] +``` + +We perform our own extrinsic calibration procedure to determine the pose of the tracker's head frame in the rigid body frame. + +``` +(F7) PriorFactor(bTh[j]) [purple] +``` + +The light measurement essentially applies a constraint on the arctangent of the 2D vector pointing towards the sensor position projected into the lighthouse frame. This requires a custom factor and a target axis (X or Y). + +``` +(F8) CustomFactor(gTl[m], gTi[j,t], tTi[j], tTs[j,k], axis) [green] +``` + +The rigid body constraint enforces the fact there is consistency between a pair of tracker poses at any time stamp. In order words that when their pose gTt[j,t] is used to resolve the bTg transform, they match. In other words for two sensors A and B: `gTt[A,t] x tTh[A] x (bTh[A])^-1 == gTt[B,t] x tTh[B] x (bTh[B])^-1`. + +This yields the following factor for all: + +``` +(F9) CustomFactor(gTt[A,t], tTh[A], bTh[A], gTt[B,t], tTh[B], bTh[B]) [red] +``` + +### Model 2 (implemented in this code) + + + + +# Frequently asked questions - **I don't see any data streaming** Examine the console log. If you see a LIBUSB error, chances are high that you either haven't installed the udev rules, or you haven't mounted the /dev/bus/usb volume correctly into the docker container. -- **How do I configure this for my specific tracker ID?** There's no need -- the libsurvive driver will enumerate all devices, query their ID and publish this ID as the transform name using the TF2 standard topic `/tf`. Base station positions change less frequently, and so they are published at a lowe rate on `/tf_static`. +- **How do I configure this for my specific tracker ID?** There's no need -- the libsurvive driver will enumerate all devices, query their ID and publish this ID as the transform name using the TF2 standard topic `/tf`. Lighthouse positions change less frequently, and so they are published at a lowe rate on `/tf_static`. -- **The base stations locations are not where I'd expect them to be** -- The calibration phase of libsurvive works out the relative location of the base stations. It has no idea of their orientation with respect to the room. To fix this, you will need to write your own static transform broadcaster to provide the relationship between your world frame and the `libsurvive_world` frame. +- **The ligthhouse locations are not where I'd expect them to be** -- The calibration phase of libsurvive works out the relative location of the lighthouses. It has no idea of their orientation with respect to the room. To fix this, you will need to write your own static transform broadcaster to provide the relationship between your world frame and the `libsurvive_world` frame. - **In need to send extra arguments to the driver** -- Have a look at the `libsurvive_ros2.launch.py` file, and particularly at the `parameters` variable. You should probably be writing your own launch file, and you can include custom modifications for your specific tracking setup by changing the parameters you pass to the driver. diff --git a/config/config.json b/config/config.json deleted file mode 100644 index 2e77055..0000000 --- a/config/config.json +++ /dev/null @@ -1,42 +0,0 @@ -"v":"0", -"poser":"MPFIT", -"disambiguator":"StateBased", -"configed-lighthouse-gen":"2", -"ootx-ignore-sync-error":"0", -"floor-offset":"-13.095158576965" -"lighthouse0":{ -"index":"0", -"id":"384784632", -"mode":"0", -"pose":["-2.731781959534","-20.364048004150","-10.354103088379","-0.640586972523","-0.253050029391","-0.278953194743","-0.669177949728"], -"variance":["0.493555217981","0.908196926117","0.793995380402","0.016432996839","0.017605012283","0.004369542468"], -"unlock_count":"0", -"accel":["-1.000000000000","125.000000000000","127.000000000000"], -"fcalphase":["0.000000000000","-0.005214691162"], -"fcaltilt":["-0.048187255859","0.045806884766"], -"fcalcurve":["-0.022827148438","0.124389648438"], -"fcalgibpha":["0.479248046875","0.283935546875"], -"fcalgibmag":["0.002111434937","-0.002265930176"], -"fcalogeephase":["1.112304687500","2.271484375000"], -"fcalogeemag":["-0.391601562500","-0.382080078125"], -"OOTXSet":"1", -"PositionSet":"1" -} -"lighthouse1":{ -"index":"1", -"id":"1749379901", -"mode":"1", -"pose":["-7.594938278198","-19.912893295288","-9.974508285522","0.726724440392","0.175403191329","-0.168665705852","-0.642383987874"], -"variance":["204.543045043945","1017.870483398438","650.074829101562","0.574005842209","0.190413609147","0.094315797091"], -"unlock_count":"0", -"accel":["6.000000000000","111.000000000000","127.000000000000"], -"fcalphase":["0.000000000000","-0.003511428833"], -"fcaltilt":["-0.049621582031","0.045410156250"], -"fcalcurve":["0.049346923828","0.314208984375"], -"fcalgibpha":["2.273437500000","2.234375000000"], -"fcalgibmag":["-0.004455566406","-0.002807617188"], -"fcalogeephase":["0.187988281250","1.401367187500"], -"fcalogeemag":["-0.229248046875","-0.134887695312"], -"OOTXSet":"1", -"PositionSet":"1" -} diff --git a/config/example_config_driver.json b/config/example_config_driver.json new file mode 100644 index 0000000..b142e47 --- /dev/null +++ b/config/example_config_driver.json @@ -0,0 +1,96 @@ +"v":"0", +"poser":"MPFIT", +"disambiguator":"StateBased", +"configed-lighthouse-gen":"2", +"ootx-ignore-sync-error":"0", +"floor-offset":"0" +"lighthouse0": { +"index":0, +"id":"384784632", +"mode":"4", +"pose":["0.000000000","1.358985932","10.020991301","-0.118739017","-0.151070461","0.759929443","0.620955718"], +"variance":["0.000000726","0.000000996","0.000000974","0.000005692","0.000000481","0.000000488"], +"unlock_count":"1", +"accel":["2.000000000000","127.000000000000","-24.000000000000"], +"fcalphase":["0.000000000000","-0.005214691162"], +"fcaltilt":["-0.048187255859","0.045806884766"], +"fcalcurve":["-0.022827148438","0.124389648438"], +"fcalgibpha":["0.479248046875","0.283935546875"], +"fcalgibmag":["0.002111434937","-0.002265930176"], +"fcalogeephase":["1.112304687500","2.271484375000"], +"fcalogeemag":["-0.391601562500","-0.382080078125"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse1": { +"index":1, +"id":"2179997076", +"mode":"2", +"pose":["1.351791828","2.419027929","10.408610237","0.093801074","0.080356890","0.704925286","0.698444321"], +"variance": ["0.000000227","0.000000625","0.000000683","0.000022800","0.000000627","0.000003618"], +"unlock_count":"1", +"accel":["-1.000000000000","127.000000000000","1.000000000000"], +"fcalphase":["0.000000000000","0.000309944153"], +"fcaltilt":["-0.048522949219","0.045379638672"], +"fcalcurve":["0.109558105469","0.485107421875"], +"fcalgibpha":["1.365234375000","2.183593750000"], +"fcalgibmag":["0.009498596191","0.008605957031"], +"fcalogeephase":["1.474609375000","2.023437500000"], +"fcalogeemag":["-0.724609375000","-0.678222656250"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse2": { +"index":2, +"id":"1974130315", +"mode":"1", +"pose":["1.780496198","2.485230341","10.484703391","0.133537220","0.103533152","0.696177156","0.697700555"], +"variance":["0.000000237","0.000000651","0.000000425","0.000013222","0.000001169","0.000003076"], +"unlock_count":"1", +"accel":["-1.000000000000","127.000000000000","1.000000000000"], +"fcalphase":["0.000000000000","0.000309944153"], +"fcaltilt":["-0.048522949219","0.045379638672"], +"fcalcurve":["0.109558105469","0.485107421875"], +"fcalgibpha":["1.365234375000","2.183593750000"], +"fcalgibmag":["0.009498596191","0.008605957031"], +"fcalogeephase":["1.474609375000","2.023437500000"], +"fcalogeemag":["-0.724609375000","-0.678222656250"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse3": { +"index":3, +"id":"1749379901", +"mode":"0", +"pose":["3.024384244","2.537668640","10.302714252","0.241596319","0.227914282","0.666014862","0.667915042"], +"variance":["0.000000703","0.000000998","0.000000616","0.000011824","0.000001060","0.000003858"], +"unlock_count":"1", +"accel":["0.000000000000","127.000000000000","2.000000000000"], +"fcalphase":["0.000000000000","-0.003511428833"], +"fcaltilt":["-0.049621582031","0.045410156250"], +"fcalcurve":["0.049346923828","0.314208984375"], +"fcalgibpha":["2.273437500000","2.234375000000"], +"fcalgibmag":["-0.004455566406","-0.002807617188"], +"fcalogeephase":["0.187988281250","1.401367187500"], +"fcalogeemag":["-0.229248046875","-0.134887695312"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse4": { +"index":4, +"id":"3590859256", +"mode":"3", +"pose":["0.779046607","2.578838980","10.948755065","0.074242472","0.073850892","0.706703381","0.699717395"], +"variance":["0.000000848","0.000001000","0.000000939","0.004435801","0.670535812","0.000003761"], +"unlock_count":"1", +"accel":["0.000000000000","127.000000000000","-1.000000000000"], +"fcalphase":["0.000000000000","-0.003845214844"], +"fcaltilt":["-0.048614501953","0.046630859375"], +"fcalcurve":["0.086425781250","0.200195312500"], +"fcalgibpha":["2.226562500000","2.824218750000"], +"fcalgibmag":["-0.006622314453","-0.004924774170"], +"fcalogeephase":["2.923828125000","0.600585937500"], +"fcalogeemag":["0.236816406250","-0.302490234375"], +"OOTXSet":"1", +"PositionSet":"1" +} diff --git a/config/example_config_merger.yaml b/config/example_config_merger.yaml new file mode 100644 index 0000000..5786391 --- /dev/null +++ b/config/example_config_merger.yaml @@ -0,0 +1,66 @@ +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# This is the offset of the marker position from the origin of the lighthouse. +# So, if you use an external system like a theodolite, precise RTK setup or +# motion capture system, this is the offset of its frame w.r.t the lighthouse. +# +# Looking directly at a lighthouse oriented upright, the coordinate frame is +# a conventional right-handed system with the following properties: +# +# > +x points left. +# > +y points up. +# > +z points into the base station. +# +# In the example above the registration marker is 10cm above the lighthouse. +# +# NOTE: We force the id and channel to be redeclared in order to make sure +# that the markers are for the configuration file and avoid mistakes. +# +# Note: All poses are expressed in the following order: [x,y,z,qw,qx,qy,qz] +# +lighthouse_from_marker: + position: [0.0, 0.1, 0.0] # 10cm "above" a base station +markers: + lighthouse0: + id: 384784632 # Serial number of lighthouse is LHR-hexidecimal(id) + mode: 4 # This is the same as the channel + position: [-0.000000000000, 1.458985931705, 10.020991301227] + sigma: [0.001, 0.001, 0.001] + lighthouse1: + id: 2179997076 # Serial number of lighthouse is LHR-hexidecimal(id) + mode: 2 # This is the same as the channel + position: [1.351791827768, 2.519027929396, 10.408610236531] + sigma: [0.001, 0.001, 0.001] + lighthouse2: + id: 1974130315 # Serial number of lighthouse is LHR-hexidecimal(id) + mode: 1 # This is the same as the channel + position: [1.780496197943, 2.585230340850, 10.484703390713] + sigma: [0.001, 0.001, 0.001] + lighthouse3: + id: 1749379901 # Serial number of lighthouse is LHR-hexidecimal(id) + mode: 0 # This is the same as the channel + position: [3.024384244020, 2.637668639886, 10.302714251888] + sigma: [0.001, 0.001, 0.001] + lighthouse4: + id: 3590859256 # Serial number of lighthouse is LHR-hexidecimal(id) + mode: 3 # This is the same as the channel + position: [0.779046606866, 2.678838980482, 10.948755064650] + sigma: [0.001, 0.001, 0.001] diff --git a/config/example_config_poser.yaml b/config/example_config_poser.yaml new file mode 100644 index 0000000..3f8166f --- /dev/null +++ b/config/example_config_poser.yaml @@ -0,0 +1,38 @@ +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# This file allows you to declare rigid bodies and the trackers attached to +# them. This is useful if you are trying to track a robot in a large space, +# where periodically trackers go out of sight of lighthouses. So, we rely on +# redundancy to increase robustness, but also accuracy. +# +# Note that "poses" describes the bTh transform, which moves a point from +# the head frame (the frame described as the "Coordinate system" of the +# vive tracker, centered on the 1/4" bolt) denoted 'h' to the rigid body. +# frame, which we denote 'b'. In the example below we mount trackers on a +# 30cm ruler "wand", with Y running along the ruler length. +# +rigid_bodies: + - body_id: "rigid_body/wand" + trackers: + - tracker_id: LHR-74EFB987 + bTh: [0.0, 0.15, 0.0, 1.0, 0.0, 0.0, 0.0] + - tracker_id: LHR-933F150A + bTh: [0.0, -0.15, 0.0, 1.0, 0.0, 0.0, 0.0] diff --git a/doc/model-v1.png b/doc/model-v1.png new file mode 100644 index 0000000000000000000000000000000000000000..7309a3fa1d182395804351c7ff9f0aab0db56e86 GIT binary patch literal 51848 zcmd43c|6qZ7eB03DO!-EK`W6YTV^b&P-N^Td!-nA7{;zr2^p0vVJtHkOGYthETdA$ zKK8*-*~Y%_#{917zVGjS|NePi&wtPJ>W@!muFrMOd7pEgb6w}0Yr^#5THIX6xHvdC zxNqLLZpgv01Ixj&ov~*(_+*3h`XvX)BaWNb)s20uC+VE!$4zK8b2`!eJJYozNTL#t zTXYy2JJWCQs~;NE-F5oTjYrqFw_1okJ!yDSe(}nlirr&Nk$f82>N#)Uj#jYDMhaFM zq{?8Ulkl~`1z*)f7!2(_qbzIRBwN%7$Z>G&NpFPw`OhP1KEMW`eMcO+js2f5`l#*f z=aV$XPWE%>HR@jWbI)@eFZ+3LzlIq5DS9*<&VF9G_@4<<1)|UD7NlJZ?QySx#PznU zbPa6dh)jj-e6<_AwRo@l!`;r#&e)BX|Lfzc{B9w}RJi-5?t|v`L<{ifoHu6DF3nO0aYZk+( z^)to!j!%g`k^|O~Du60}h0xnC_(uG=Q4Ok1qszy`GtDFIpVx8kuz9`0zA`fBU0P@o zV*Jle*{7(O$z?|7=g$Ppr$_fiU4JZ}3kmr7jMdF-X2E2XU4X{(G=RNTAWiKj3WTPM zXU>ye&`~Veu$9)G*gOwE=bxYNe__ZhUgm$RRoL@o%A4L++`Vic_R9g71X79(#j~iU zPTd2_Ld+m9OP{4YY&9*Yt75WQ!18Wo_sXkpGlacRi(yp9e7zW#MvBb6^|7h?18OL> zBfEz2kmKH+S#X!1Zmi1<)hcU_H5)6c8`8Z2i4d2>lquH|)y;t4D{VWcL|e@N7@r6> zZz2Dvt!JZ(o|e5+K9i?#?a1vG471IQx^bXl5tCOrcZAA9Sb19cqkeCMImjJD71xd8 z!+3c$?!EBw?7O^zH_%t@Q7>|L;_gnzL;9R2AkL3>31Gr?D}vkSf-&CZk)OuxdN)!B z#Ddgx|A)>jm1amKGthAixyesPO@%QV`GU;g+U!w_%8st$Jak`U(1$ul@2_J5XEUit z0DelBYq^Yp=<^qGnvouA$JnA8Q`a^I;p1}7G>Rl`cHu(A)__}4(PT~Ul(U6PWv&#> z-pqfqlO$Mp1#`I*KM}l$b`}_@x zR0$(#c0AZ6lpg$`Y|Dei7?tnp%^DmLgwk(m!RP>n_5S+eV3yJD{U7#D1@AN1{4^z| zc9ZYLiQln}ih2PG1`YZfY2K%GVT>bK=rdC-Xub#wT{d6L=^RQAqRl0%j;4D`>)ygs zk?D-ht(8g)$~8#!>Y);8gT2aW|{ROai-!p7tdulHLNo!PhhiRcTzt5Y`;y!AB*y*#dE3&K=? z0V~f?EA+}!jC+mhRyg+3?|im8`LI`<4A|>BfFHY0svw>|-?9Kn8yjN~nkN*?xG<<# z=>)HIqm;oQAk(Z@x@XF@sBz>DFO=sRe=st1^VjOPPZZ$d{vc@QHejsv3DE4k)qiUJT z>{pAhxxtom#S+@`lnv{=zX_IgjeF|z0P8GaluyjCUbqUA7OXeUR(oC3(xl&A`oC5I zkvL{K?q5$E!)uCFM_yArVveqA#K9Aitwt#}PQU;OEzM))9<&E^u;$m0oOCCs{W~v( zUSz!sRQO%1)#lh_giGF%B7A?o7?SgGC+=@n#dbUb(T?PK2p(51wc zzNT14U?kz!x(ZX3V@PmHRG%H!4)$aj*vU5Nk6nhiKizQc!sf+Scloae>T+_-zW_w# z0nt5UFJ&7t6N#;Np>x+)=;6sw3*124Mgk_2lf(BaY|mR^u-|)n>Hdw3XgTJhojrD# zvJu(O9@K@BOU~TK;d=$P=QH3+^yL6^IH)x#q6szw zs$C*443uVU8tA&kr z-WyZXN3C)Rg=Z3onfU|M(eF{mIUbpU%}vWLAi(Ueh33ZM{?gWM2~HOZH>N#n7}gb7 zDeB^q(z;3@QB{P)JPd3r71=E2>8Z*0KCIHNvEN#2JbUY54a4jd7&^*WSS(u)2t2@n z+y(4Jg}uz=R98PcN6Je;k~NOV_;2~5U-rxQ&<+PCt0F4%XVPYNa5P?r?MW42ul6i- zzs$z31CkJIM(m}H8nr$d$LP$$GwYUtl+3gl{LC(nMgUUkZuZz0!CP(4z2%btuW=P*^9fopBlmDZIVk6s};x3#|jhBLlYCuyg1-e9bIal(EX1c9SVb5y{> zfj|*modLZ8UXH84Me1_`5VC4yo8zTO`We>>mwW6)f*2SBH5(w=3YcLiE@K!x!Ke9_XQNv-WQreFZfU1Rd^5%(}bP? zMlh2-lk4!GW58p!YxP*_u#;^x9{X_o;UgN!VfyIUvvKDzN&s142<48RMEK8ZKEn<>i4h_|20u`@%PM%rs zcnQ|SOv@-^rgeamLj`PPf9?fl{ONa7^I}^f))TWq3~mGzw}hEijqA!BYe1EsPqA_I zUi}q=seLJ%VCDU8!}Vz6Bp7so8MuCnS;3(n>3#5#Jzz@Ih>$Q+t4p(%{yp^nHB40OKS-S5=1!a zo_W9WKDg&4j`xah;CvYE(Y6>&_4a-7DsxX^nx3QrV0C3nD-$qe5R$?n&e`*Yt4Cc_ z9I!Dg=kSI$o{NAUjkw^r{B_^-(Z;rMHcDLqEh{Yn)Xi=B&ZH15IJB_Ojp8XK0}2)n z8QL{CJ&{6$jaLnlcpk95puTWzKx08`Veb_gDvvh+WX}`83b}mlIqg!tF_bTGBY`=p z1}2Y-X^Let!Pr2mIs|PKKhpCh8BE>}Hd*3PaW)DT0dv|~_c}}(`~90Cc=>{M+Tlil zMv*|VM(IXHj_+4sw}pDtKTEPXpIvOT+jrsi*p>HXM4FZe*_H(F1eUdiUrV3KSj$}F z6dgcvPF#cS@c}2?)K%f2Zs&eqNl0j;qT2fSNk!?e~Q+I5^ zB9jId&e!|N-OjVYVCZ0}!-WslBWBg|y2Wv_9GiXb_eU{xA|L%04dgymg@!Iv>SU)4F^ z=_Za~UPH`goWn*^M(`BENbZRABWFBhF#?=-j%tWK`YkB))%m%g1SI6~4m!F}>;Q1r zBLqtBNdAcIBWE0Bf2jAtEFm!Pp3*R1;aFxaYR~fn=z+#eWiOx@lL5qn*@DoO;ufCg zTChFsAo5&sKko0G!Fv(7?NRI~CDF=v0uaw1DWsH)z#ciLLY##`M6m+~J+IeAJMMAg zK~?^Q^M%Sj$f@4>ZlI6RRX>?V)_EK0-F%R#J5Ink%LB1qxs~F7N(YjSKY;=Y1{$0_#0g`vV8HMLRYXds`BZ_7Yhxwsm~ zR~)3_2;kd-&G+bFRAZ-2Wx-um6S^TepcQh;CcK4qcvyX{WdOG+uE+5;71D4PjMia~ z9%imIuCL@fw+L7?OlHLO$;=bPu~G)AZv~htbVVQW=N>uq z%irg8>~-Gi4Vwh#&%Y1Y!2sIpMsgZl1&|dCmM=79rceq_s4m}Lq5qaJvX{)<9CtIN z#GpC&Ng>m)?;5ZdhuK!aT@{K@ptR}Qd)J+tF8gj;4X|piE7|fM77>T_r~^Dw0k5R~ z&yQocr}xXoM;`q}u?TnoEN4Cs980=9INtJ4iL(E3vAx>_qck>zC&wA-WmV6Q_+k&%4@pv9el;BmYl8tE}yV{;j<$oMiOvt+z=?Kf_WJudI@3h!jE!5nQg5%>NXkPe|1m{cyLcrz=t6 zNv#IbS0g%LErYh)i%@v)*>G3EH3_CkZlUG0c%%-iFI=q;jpdc&#^#*Wq4EToee()o z^K;QNQ#a9W;(Z}}F}!-06x3_yP3+pX9lK9s&L6sNdbOSqYs3`Mou%2^CAwMDQk!7; zw5kJTwy25WI2&(H{*c&;?dan1aZxAV7LT;GwBhUZ`<~#9I_{tAQ6ENDGM7C$M{U%5 zzm)b=9-X?Asoa-*V2O7ryjnhkkuDo`-d4#47o~d10!^=bj{35zF*&FQg14BVY!F?F zI!vd3-V;Xx_3?OEeSaCdPf8>~%-ovPCn^p#(S3`9@C_r^#|R~H)_y+%B?q{=c`|#{ zufXC3Lq&po+EvH-Bk_#uV;_?ge)!M4%PVz!u%mSKH`E#I2=Fqt)qI8cGrK08zrU3p z&YQlc$;PHpKuXa(=-_^U&`lbTc2aKO3cc&^nft-a-5x#eAAJ0INKIo!L`W9nDULUz z#-nE^Yc0xbfD*$!dZ|9XV(vLiGasY6&Jfqzm@JIiFx}$+P{?mo^XV5L5*91aC?ut3 zj`;}zMCMv}Oh+AyX?BGx1Hl83GnGIDKN_2m;hdSMdwazWeuy~K!0$Zaam9L$S&C{5 z_lUOSy9#D8OjB2(`%Z|so@}$S=SQfy;b&mFdgDh45_Y6=B$>KN^++GyYfa5|+^#p3 znwZNAzx;(ges>)Ga$)kPVSs3dz!8?W!XxXZu1oPboRUwR8;Er6XO@4v|j*l2pU? zpmYh=6_cVMEJ&bh)Q@w|T#+Ixpdaa>MqYECCu~!meMIZ?L@2y09h@9ZDYI=K?(=Nc zm0N!{hP?R&gyYRmN;*~zNTt^Kbl;3&oq9pTG$a03quv*rOf0{Bo5PDRYuTjQc}ih> ze$N)raS-QYD-BOB8%|x3k_m^u6|T>=x2;KNRP}!l@~I7O7cB;8OpU6sd~sA8_wW34 z>U{1M5E7wZU7Pr_BLt=N%!zW9q$CI5YgGWa!>N!-350B;oY5lGweK=|ar2x$?C6YW zi^`syg<%)YZBut1(x)!#pD*sdH|Cuh3tXXg)Nng~f>^km=ltN>r+L z4b!Z=2$4Fc58R@FBEsG0oE;mzR^_(&%wZlckaAk0&jiK6P4L@EA@pGh)$hdzn=f9_ zhjn-GJXzgSR5EpB>W)4gyX&ylI3(-Lb!u3vtk5GOiy2hB#0 z67tCV;tQhpapTZ9K;YV61b!xYzOU%wO|QCW*ARc*RwOi>+J`2ki?TyppHoxcta-z? z1$cc*?6vln@EH4Ty>0cl7J3Sii5iDiH)yh-s%vQ+7MP6p=pT&~-u-zbryN?|fj*n3$ zzQ-V}Y1Hfbnh5$T9RiD7pi8@IC(TV z!lR?a&1NMpK=KZ7;+E&^>#W+4 zywcORP4)HZp>6dJk*XUF&TYDqLLc0Mdnc({beSjqqgXV~QtHF!?b8-(rk|TkZ;=g~ z-j-#y&SwuBFPPNl#oC}Eygz6PB02XZ=58+zk!t3jN)?c5wpC}_!^xYqJ7U|81+1pg zdYljnmiwgcFE_|eAJvt4P{)^e_qiahg|!d=2rK2_OCcpW`gW9e%;ydtwE_UBYxevN zz*zu5y!qMr+Z_J_ATUF5>dd!!5K{E*uw8!nZw%E~3g&Uda2w3xP1vb6bL*l$$1O*t z&iR)+d%jrT&w`u>itpyJJBuKNWZyphrxkYiF>C+>ZIw2kVKgIG~XNvl*3 z))~=_zoWp~jNxv5E|d_so(h~jJT|X|Y>0U)IouG~ zjA+M7N;R7k)Lh(9ZZ_Fah}H>mRc{p#=ES0a z5|%5e+=<-$sdXfW->^7jC`Vli$rm1+Q9**~>>|On5v&{q!fVhFHLhxKnxYdRq(8md`2vnO)DEh%)RP zn^1D#vP>eh1A@2vcYalbc~eojQ@_(%m)|RcjAj#AAIJa9-@aC7@YnoaJf66}=dbm* z#nqxhJbs*Er{riqH>c6lWnT6E6!SN^M^MF2EdzDyD@H0icjO%qyiR0C={LE%i@@>4 zpUBrg9|N|RFyCf;$#FYNw*Lu-ufAHTzW$oFd-#3fpB;Ld;3bfFJMljzU*$@eZ^eMw z%%v|qx7;;EhMY!lJF3ItS1iw!k4O%JP%@vam9NzBfo(>7*x3^^kzpsJx$DBjOcgG$ z_wm(&(`DTMVNi)Aj11JjKnl9Qhs<^Q3fmak!IpQ1Z0OUBc;5SEdPpUz-!tBTy^h79 zzpnKQ@oJ15dwUAAIwF@~fbT;a{rFc}FZGFQ0SOtM(Jw_2+jjmJJ<+u~i)72llDj5wzJ3WuK%s>mhkGKVlC^Js(S9jM}|$b9JFRBULn9(&|zgcQU1W}#=sWsll()X^zc zne9}##f)CLjh@^Ygt-l=UtG;Rilka}Mbg7-el2+Hc1hD({ZiP17nIB*! z*5FKay#HA*ni4I2M==ITofky9HBzhKnN-H&+P?9*ZuZmK2#eoFl30 z$`&z%b@3aI%KB4rM5Xf2l4Vd^)qZK@L^_WEzRBXXjv5)2+W7QN-RpF`gb!6^iyxUL!~qbpN)h zjwC;KS0E9XhCXuNu#Y_jBuc#inm_6GZ}=ItP3rjO`D}$s-P^IT!zYGQf91z&gIy5)#x9 z7Iyr58vcI$`O158$sWsHi8GP&ONA;yE^z}Wy?f?Gnns>y+$atT-tTEI4h@h(`~sKF z(6?p+M#d*~@^y#Ac_3#jT)&H4w;#!3C~uJt84ytlUs8XKp3XFJl2bvr!dBZ-5>&vU zf}Mf(8*`_=|yr?!(WnCIc7LS6>6y;la53YVA0fmlycqR^g?F$eWvu24qit<%8 z9ec3v{HKRC&1fc5gqkdYUB}@(-|$@A7*^0?*Yog32H!(|2j=fQDLbv{#4A0ZndDmfj^`Ems$V zfjeDGHFBjpbQ~MX`Pg!(-p6Xa>1srGi>&ncs@b8GpmcbFN5*ac5Z%JS^uknf=?qyB zHeXO(D%8`@$)pU9mZbzp)nTiRsM-2Nx#UhIG=O{7GA#V9Y_l9V-Mb~rGOGJL6CpmS zG?2NdoQiC=RN0(|LCCZz1ZVf|M3;`aLho)QEVE741SO$W*O4&ca-R@cE+La=jz34v zc|fl-Ba-N0o}QB_O$BE*&bK*K7=FzDYMkHWo@43qX-#kPCw{kBritFpW`q>k(Gb=R zN%4K}D$;fA!%fu4mCc1!f_0QP&QW|5=ou$KDJEoybuO8m4F6&{hWb5&o8Kk8n_PzY33awsrxP( z6b3OV4x2>=E%cG!kyRjP57Jx1^1}Ad_#0fy8nJH#B8MiD2?HOPJR(Tgi5k^)O$W zNJpYw64e=_Br##Ae9O1~A(2l)G`O^08kdCi{UL8)ahtjINH^FyR0KvEwCBI%E_Qnt z!Ml2l)$P5!}Pz!eNJ^h2 zFaTBa_DTjE@7q3X)H_v5_){>?4Gi>b9_dC#RAdq6ii2&5G zR8^yA6MjN2$*V8@PTmf|8P)u*t}Ih|r+_GSPr9oaS0{?6|l+CTU^LLpmb^6`A-`}bGCfK8_6kt?}b(qW0AZc zSBQM2#d<>bh8+i`N{Zguzms=2e{F$E4x)BYF+$ABnFH$@DFl6D$6il0pk8fa4y>|B zQ`5ui=b;eXTApRL=X*=;AV#Ben{J7RWzd?#x;s>+#29{mXMW|`a*F4Y+ATWAo% z<`7S6&Nw8cOn$IHjXmBFEYkd7d2^|>gHB~kpFBlw<(YD8F1dp8TL6LgAl5ruM?Hrv zyCvwfz&oXzpoo;F-Bf$WigxOy71{ByJ8N}!SU6bZkikR)*8t0)T{Q=5w80QgdKdv; zlCn<(y#g$-@+i$;sP@F(e%0~T^DDBBk$03D;{VSiBT+&Z~I*95hhV&XZqjpJxWd{#fO@oEN{$5+7Ez%Abh(Ho>WOqiI3YcB;rC zR`UD32x;0_yZvx>GOi74oiLj$IakzZ_@3gY-^~`+vJ~I3>jyb*KtBn$5UJQO zf>oq_Ry^kZ%Xf2bT1-B@KY&|prU2fxHdcM9t;lVb%K#;T7})amNCtznt`MYr_1%al zb6Vd*P#OLeRgj}J%;-coe1IH|*C4kFauhbp*n^;Zo@`agJ51f;bjJSofA1)>N@NVaby+e{8LM!TYB-#Jarn^yO9pzTJusgz z+mkw3Y*BDmPe{TJHI)$B7CYV;H{FzVQlm@8L9bF6pEj^!fXcaS_+^l1=A6fC$%=lz z+zkh>Ttx&I_DMQC&Lcy}0m-S}gEJxvUtU~94id|hOJ~pm@0v}HOVz~?70sQPk$B57 zO~F#r9xZA%EOXp1%MX5TAwiNZcVnTWhI&*H2u?p_)SDGSB%hMFXMAg20y>j48w^nt z%sEp&qV3&tPAMhudW1)Y(LFCmyUqO;^o&k0mA5hTz%zK9_rS~raD2NCV|)ZKXwH8c z4GPsH6y*lCFOeNZ0s)X`7gcif=u=YVBNwaKsS5+t*HF!4ywAcL8%t<1rGn47j;Z9N zturOv{6*cjRSHM8wpE)c^0mqvmY$ma-bH`sQ&~3Knab##>p+JsKuR&EGBoGC+IBQ+ zqF)(($u4t&4{Rxp-_YHp2=7hBr*(93I99#qeg@0CjQVCArB9U_qp$dJ<&$%kM)pNM zojw>p-?Zj?uQo4v(}lGcU``*VmKv*II?3a`HBMGkt7mkF?`aeGcEL7?VbNes zAYdRl_n^j09Izj&TPDXP5S2=k2dIsYFtFir_#op&&&^Tmp1Hzl)7(?IY0~eOPo;L& zap&7TL*(7at+ob7EY2%M1%98eFFB3iqSK9CQoYB;N!aWl8<%8P9ut*YgDb|H6E85X zs~x>OrQ@@)l6%RG<86coZ6vWZ?shLH^Z>Gaqi(8pdrzWDtHOu z?!FjT`6u5rPGUn|-( zqh&|^VA+m|o{X6bWK*eWGtQp$ow_#Aa(U@Bk z$JAHWP?66w=><5?v5e@2E;QqA-?{cg$!A>eGAJdMi??@UH7x=i?Gp6AWS`7T#=Wpy zIu3ncrqtSWNhP*ctLAoE1rzF#W2E9VbsXfgU7LoQECztQV<=QnV4_RKwZ%^tG|D{* z)D|92uO}UAE@o8n&nG|DCS(-``k3EJ<=#j_zPDlwun9!GtpsQT!LWsPiQ$4{`-vwN zf{5-?zocvin64h6!{q=oVp{G^jV%jhiVBPA(ZX`&xtRBe@iWY-do;)2L$Fwr3HoBi z@WsLZE^;`wB|Lv&sUx*)rc1~VSqj~`UI`a?DRR(K$Hzh6I!KB3BJ_p(V4ogQ2ft=` zBHsw-g-}|l(8NE`u$T47;Qo}n^6t0yV&sT+mx5E&2V#-efYWI?h*$rGiXhA170F+n zh*z=Zxh*7%w~~z({PRner#g)oe#NUjj$W;PWyu$Lq{-_mm6G*=wMHELz$NymT6Gb$ zIzG+FANHRL_8~R4dO4I@lh&s{sXU9byeZrBMbO^)CXwVqeXy(aW$HQ)HS)QKh|QZ+ z#BdXYP&WG}-)>0zbphMgEilRg+u_1kzdY8&$3R^d5zqgn5lAw74~^d+D;46uq}aD` z|2K2`t4;{6H@=M9=fzR^I_6O6{c|=_lF-E0xn}lx<^6#r8)NeI+C-A|*djVGYCKV4 z@d){c$?;va9lpU8GJfA)!)4la!yTs{`v2&Fpp8dt88{<@N0#5I1*fo$<|SOOB(Vj; z1IQau_uccOXU>%m<(iSTX<++oI)B-3@)Wh=>K3Z$47DhwNLXpn^E!4ijpydi0o2Ek58<}-Fz_P6DDMoU5> zsT0M$XVogJYqg16;H--gLNbsg8$syTiI(31W9LX!y5H^d2pGVM<3wKcf}AnmX+*e| zwQtGvM7`mtHub$%t|+-Ra=v|cYh{*N!CNh>{tV?#&rZ`*8l#{m+*aoT`M%cifks;g zQ%m>n828T3g%@QH8EK*337w$0IEMMH@lWD016N3GL5mD2PD_-3nug|IKfY2p z@#2;LaW5&eBuV8S%p-TuZm45ZM&Ql8GD!(mthR!0~ z?_WlQz*S3Z>DvCBHN>wvL^)QU?=cBOQb?Mo*K$Ue_fXEdI}ZhX!3WfQDM`{QMQ~JG zaXDvn@{xI!4#neDZt_W#u$L|2a@3rBqUyD_yb^011EmjwY>UPyyZs*ey+nC20Hr9a zBGL#EbCGLJJ>BPHTd?iGq4gxbt0Ow-gPjo9m|XUKgWNn7Q5kqotuApSxojTl{T^85 zzIT$}515*PrQ>?1b%m2zj{f8koxAFrC)m^>;q;z&SX=RodFi0n zw}!V>>b{ItC?AsKB#+-@D38NQ?i*P(erx?{yotmt`{<@aZIU$kVm2sWEoqQ+e#{7{WkQIxkw#dLjs#DK;qTZZMByawzj6aiig zm?96PQF$(-ljMG#LA(iZF=beq2w<TS(}d15KnOq0bdQV{*(nr$Kj1~3xcMx3A218p9@kRVJ0LlT zk8&b(-wBwZ>Niys(HP|z816Z|fl_lLznKY?RtgT(to8(}W?8*)ob&J1JcA zik$Euwr(OLt~AA5K!n_idGxrd3EVw-@udoH@9Q}8P=;jg7C(6vmG5|D7%-Qv!kps1#+i=gGnRcING@et_BO`>?Fq8$|eJ=2wPb@mzChRUsGr zdzk&p$)j-A$^9a}?M&sq<+-5+)?q7QL%M3RC(Wz4IZf*fxI*Jof4BO7Nma1Bb$$DB z`gx+)X8DSGWeNLG4wUdIGe30AZk<8%pBdZ9yl*>wY9t^NUJ5U`Ktry%Qo^$J2IPoU+q)k|3T(|Y*WZDLsvt+JfFbQ?4Iq`@;cg^0wJq80 z@h*JLzOgJ)W}Z=}SI*QYF&o{@pafCGQcXc1B3u}aW2r4i-Bg$fSkQv~M9FEknKD>@ zFC#a~{xTMMHM;MlB=(eXN zmSH0g0@`E;UZ+`(?LTL=euz~LZg6PsdKumnS=gz#1-DwGfFEEqC5 z(5f4I^uQiswQi7|128roXZba)l#E)4mJVIfg68(t)VqFy&9V9UcP~2I5wE=ZyH{b< z(zBPYM>(QWX+YQ-!#!jkS35PE$(gLXnH&WUQQ)SSb4%dU%!$YO0ok6%QxuA~&RS^t zM=X_-FC%)|Y$ihDCc;F?^MN~cwVH(k+nJ=&wYnI%QmgKz#o(!JolVD?Hd^mvBye!&Fhbr%&kw+?7K^cnKlP-UF?A{zqgWJnZh9R6gHqEWbK);HnvgP zv|5`T>fM4MYGNpjv1UQv8LaWuCKmP?4y)9&mT5UwI|@>idYjfgvk8)O6^-FhH|)g` z<62q4Y1oGnrXz_g7}f0ZoUD+L@0MNOFL@h^Cx2+D<L_-ogxegfp z=-H-fcl+<)_cK?mg=w~#(+BPEwp<6B@QLZ3v6Zjz00$&#Cm(a2(XLTT8SO+`k z5>)I@l1UluT>#l^82rVaSV<8@{&0f(mtEjk{S85ESF#TJE}{wBcElp!2htw%*O(N7 ztKE5ogyBfCeLuta5;j(8u_5B{;L|<%pO`pcNX3Out&LLFhVzg9Zb*Jz=hKzYE zFT4^2pck{Cdgb)@4kbYTEZ3A^r$bQhbh*}I!}%4a{;vvks)NKF_pw&*QaMNzp<=Zz z4q5|U($i|Wnl#^MwX-oM>Qr)7Xjw|jMH`Ulse#6vFM)ZcLlLSNAG?bPE-#E1qzn$J zA8tyDAIXO`Pel4~hSYes*b`CVBi0)tt`X85Vx^h%K(*^TK^SmJQT?%IqxWsfUenX7 z`g?GM>m_`dnxd0E=Bqy?r}qo#mkf~Ooz}44%7#8`@K+pDt)wD%Ho5BMpYl2=!38Ls z9Y5|gm}g`&?LXc4ZKETNyIi%cW{MFaC+=9=HvPJN#=l>5wV?~tns~7V#(_Jsrmy;+ zZD!ZIel2N^c<|2{$D^$agrJxGleW|P*?-fAXS}$%9I#VISb=t$l^Om?3?UU(nZq^ zRPTv56EoW2p45}!I;)anVf#n5Cn7HNn%(*G%)cV(isEdmy#z{De1++E@&@Hnd~Wh; z!yqlrTBs-rWM}Pvt0;ZTJn~X` zP7H3vGWy&>Zjiff=!abhXJVs#mm+td6!v~({y^J||A?$jX{KjeUa`)8^tC8-i?AKN z{7Ybdf2}1EH9jz1mdNhK``!-9JM1DPF__F+3l{6o#pD$a$d_8=Oz+8Jfg=a6Zlj5m zR<`FGNiR$n?O`Fq++w9-yoGM$g@9u(RM0wMr~BCgYs*#24>*yV3ZYStd(>0NhLejC zfw+_3A=A%`EhJZL+IZAPzJ2pwy53^xRpja=&@p8T2e}t6h|jmwZx0n0SKqFhHP3pZztaLpuJ;(v@S$&H~tv=AK21t#+Iu z&Y(|pny=hJgE+mup(c^9ck8`F=v6&+;@42FcSf3<@};&p8`dg^lo7bKd}VPFS4_^S zhp_G(%Q4Jksq^VJ4g2?syo}u`EyocP>!rfyit3W{87*{( zrvb{;)6?tMGdsn8Id%Q9UGEx2HP~h*qV;=-dx! zPM5W!0#C2A98TpI;4v)Edr5w{TWjNR$@wg=3;pgnHKkP5WzZx2t}DFydRon1#_}li zB)%8Pw#Ww#`spTXIL3P$Nsl5>$k9|pw%FTkk(TxYMLx&MOO(=H!D;Ve>bqDKaqB!DTps)i7fMGk;Ka7*jep+%#PZnzh)mhyy8H6C36V4X zxni{@o>vv;f3Fwx6iFcJqI*7*U?lzrbqKV1j25KGRBdh!p?0Gjq4)MoC?B@A+*BUa z8P0QugKmN$2{m=%JnvtRKd;Ou*Zjp%9Ow=)h~2F*pDgVWlOl>(+xyoe{L*kPXe6;U z^SA+8cIrV>ia6lbw^@K(r}rv%bLXE&fX2J|4$vR-@w`$x@_#%5*7Di#Mz1Kx{v~6%;@NOuId4`1UjM@rn>EDSb2juJ{=)Tdu7$(zTbCan zOPe>BSva~r{lHA%|5?(1o0Isg7y(i}=%9(pDi3PzMso9C;Hcit-`tDMwcEuebHavx zPFpx4nN6lY{bsZ?qa}^%@piOjAhj}sadmWJ#hz8YTn&!EGwdVf~IR{&t@(bg8YTVc9JJ?(f-d)iv zuVTt`c!8T$fAQBw4A?BslI=6H+!zvH>EdW--{Sg1=9d4)pyJe{+!9^BgpRrK4QX=a z?o=MY+aGrC_mfv3DdKX2m+cz*74n!OkE(%4e@W>pUBN`PpHTfdexLdyyXcDFKQ9e^ zZRtHQW$Y|GyCg~I+&=cdH}*x9d!v7gi-8Y=z@3Y~rr8%wfETOvePVyQ?SG#Nm8htd z8nNenB=Wy`s|hwi&Y>^+UmH1XYX)Gl@x_rcgSMm-mB*|%#`7koA`~^d5pDzbqt%k` zw4Dte!i4W9&(AGfE;=#`T1nX=6YNDO&dckOcGA{Jg*0Y!(DMGmIB6OG6`4W@qrQ(+ z=QhfEe&F0XdHKiFsY(3Cx))?6-!4VP|5e~pQ{HK*kp8VzUD(fY7bP$!`<5SjPEHZZ zz2%=@?1HTtoUdD~$T;#8x|hWV&L+Wj?d-Bv4w|dpwLBLxVagw8#g_yRxOCS)aq~;a zbbHG^1pIOo5GU$yaWZFr2-%@$T)n?6{@7kv6W=ts#ca$B8ipS_!C;Ko>#?8)`klNi zf8SSUhv3Jga4_2T+0eh3un(~=`!e~PTqZ8=6gDQ2Z`&!mv8jTto4vVfR4mGE!E5f5 zHZK0Xm}=m79CeXn@i#uqE4^*Sbt!T^y^xk8OX5JkT>(6K_=!P>>ce`1M_bEB)&v>w@AS z(kt_iF4_Az<8Qcy@B7{`vq&;QdMidhs|4IEF#*LZLX45;c8TgBD_2cu6%Jk{Ugc^4MK8~yG!N$;2#JiLCrpKZQ;Aq zD9YgcdIYNiMFOvrvwoJ(`~RqZ|GLF%x8=`%!#{S%&du^?GXmCHwFk1MVI7@^@;ART zjpfFp_rgwtpRGv4XDL_lW@K2n5mdg8J)eGGEmdK`{(+XiSU<^+Rdh?5JhjF{x7jhK z%iOdHJ1KMh)>=F#5tp8P^o{dp!|z=si=Bn(Er9zd%> zmJ&9%RKm8)Tn|`k(w)}Y@tx3iM>;`;_9!EAUuLdcfyf700{gUaIN4U|MqvjQ}AnEaM0UJ?U3#jaKU#^HmGSB z8H6{*UC3E{c2#7|xIEzF##mOD0PA>+_`z%61X$&@Ol0aR(fR`I;o3_B&xeSjBmWwV zxW;I7)f*;GKdt#H)MjfmflX~u=}LBhoD2>5>;+oHQ%&&kABXVAGT!0}-)TW@j{XXQ zj?1qx=h*TwJRceQGOO?7`A?$C9?J29d!M4|?6S7TIcTDJ3TJ5kn zMJ)8JQ2xQW6<)fY=AHk-|JF{K;EA$XF`{NQi}C33g@ZgHr}pTxg=lWXi6=TOVsn&IAHY?&PXj*CO45Hk*C|bTarR&JATdt@h_{exmcrLbaGqO5nV35?) zLfPy(W5xGsDKvA(+y2n}r)8h4z%MsYQUAVC#NeZk{{HRzCNR_Qs@COmm;VoY?-|zA z*0qi5b}Nd7tx^;OR8*RPbO}v?fC_?$5IPDpWjY^Rc zh}0xNR9Xlmh>!#b<;>vT&*M|hb-wfOyx;kL>}y|=wbop7%rWO0;~rz&BW{Q7waWTXWGZbKhzb z;%rji-5hO);|~mKhq+w?$s(*jbHAV1dH-lbG|6EpEo@~%+`Bg#!hx%$Lhy%s3D`s9U? z7v0JfTSbNVP`{u5LvPTxb@$-A%i`1?&KY_d>Y_X{y}=Ob@+Q}QPX0VIEv13 z4AXSOU^8BnSVnnr`D`(Gp!8eLS5FU$vESM^9l4*ZVVFPQu~+ux{LJ8CsYYZ{Y^*Ub zQyMZI6VQs9F}QOQkOjbVa^WJ6d_$FkxhcWkDE`$%n&Toky=7C279PYaH-z(~a83ie zEXv{owBjEzxK*G-8ZT9vXA?U*y+?EXeF@V$r}iYWl+#-boWVhVZcqXODna%H*2|w? z@7Z@8>=B+Bz8>3oNHvkTMte=3%K*d5et;O@Kj|M^81mTj@VxrtLw^m`JoAeCT{1aL zB`9;w?omKIIw4MH@|pC}Jp(eJvUu%A{cowUXhl&82tneeNX>8WuaD?FLHCLdNWTUE z!ot1(VVA#0WYpd2US-P*h0d&-;?*cg%c++imb zzc-Uv1UF2IonJHokOYW^jmO9J*?S{}NMrArhlWLxD_ghGo|T(t{HuVZFeVL4Z7muL zF!oqiV>EYH`nM$;o(gr~WUUDIJ!0f*&DFaEcFH?<$PA83dd;YVBxZ_DIuw zZzE27B}%<6cv+yiZ#882$ZJX`my9)_WOx+o?89pvZM*eGsdr*WpPSii(p2$V-4cSN zsV$Q~YSqgC`*pfwwm{OMHX-r>BVQ#!JZ-Otr^?i;Pv71ay!5X1S=a73?Q9@?i@7{S zzlJYTB|5Q~FPol2orC1f5U++s%w&4Onx2on26zC!qOF7?PalPyJ5*>hktvpliD&Bq z<6n8-J9iR=F|Lj*HECQOLTTie+B1v|q`3A_Y_AOnDpm+w?lo~(-VpS7S-;Vs9&lo~ zM`Xb|dyssguwNv@(%BR~`G7zBx&-6{R8fW7>U{CtGnUou=eJLlsnQZ8PRztT(DxCj zPY>=snx^8F%9dXeoG}Lkcg$%sNx|NafLz30mGIHiy#@KPvhO?7b>R=P2l7e!fDt~0 z$NCB2IouTR$vccr)%*HV z1)g72JU>3EW9L*)9esK%N#aQFqhiLj`A=PuN08QGgFXX-9yXK3^KUqdrv(;|d4y#f zk{9Q?A(FgT$l1g$$nM)(`tfo7V;*mx7ATqK*dBARF7vMUn<+BIs=lpf<*D?9HKt!5 zp{B&V|FF(1*TYk!TN>`5*w0D_3Sqv(q4XO3O2C99W~hTUlgbX4+3#L|)%0xZkt7Mh zUj5Lpsf*$@8J3P_#4HCy#dhuyTM6CGr8NditJ%60OLuRb#rjm-8*&^8$cl>uxikCS zvz|h&+RRfW%!;4N60#NL6T^tj;3A9|S-ML^wm zqSCQG#;JobP_7uS1+i`%So+E{Gn!env(kR=5!dMZco&MV)Dj@L!E2eIvE9`EpwJEl z##z1?TxbvapwY7!91mdY2;BH&GMm*ZVptbM5kct9)s(UJsSiaVwTQGi;5t!aq|qB0 zVV&}UO<$U#eQRT{0M*P_g*A+I8ywjm#w`7?ue{X&Xtp+I@IcBAmXOG57H*`ZuJMJ7U9c-EOOX%l8q-urs{%G`2dhJ;foEEW2TP*B%^X8 zEU_HZrh1;pprY7w)tE+E{*eI5czR9|dV$8K$F7$MLxSw!rjD;hkN1UCzUNMo;0=2e zevyESB1pIA^)WrCmzWe^P3LQ}4C_EH1eT zwD?}Lzl8%?TOqvk$CXvn>s8%Zyli?!g3JnP4!#=7-Y%e5645xBnoF^*0a_Tp@WW8f zYieyC8SOS8SZFhXm`kWWz+pU`reQ{qvh@`+{USC#UKmjTU2ohGh6FaQ&Dtn-_02C( z%uGP6PzQ<$9QSOT&cUxRh(mz0EhP4jnf{N_1ExnFJ>g^_$!1m;`pLLFo>41sDrPSA zDIC)SdF6N;Pb{iB+7U2PxPpR>bFJOt8S0%oP)a@FZ$BWje5=<811i@O7@nC>{wFN~ zn0}ip@Q^D2CRmntLdP|cjn%uf0^{x7=iaoW++ov%KYT-*4a(<9)Wa!P5RG9kM+>w* zFTcO6N*ECGe7ZDDKmJkwSOK-mJV3*5B9Yn27$lXe4qrDs{q{m~85yTeigU>j=G&mp zNe|fFN~D+@_*I;bSfFDMjxF-uYUlfLva2@x4zoq5$R|T9@Y}Jdp_!mjg1qfuLeWzF zT!0|!E3?X2-7_Bv5C5?=Hc;Y2{s{K~8;P#DHkrloSTz=@c65Ve`&7oIwWzoOOEIcw z-JOLl;SZHEXjoLZrFUumg?czlCVfAtxH}a$t_ET`QXx@+v(BXAZN)$FKI6pvk10MU`k)g{j)q zkW*HcYsg$~JCuFWIkjiVuOu;1Y7P6wQz-m@jKLwO^L5Ft8)|TzETUs z!yV=y+)9p%Z_kzJKf7^6RaV~EWb^dOF5!{Qzl zP*(c`Nzj?mX7O~_Z5B2%@*~!|WCXrpfi%~>9yju1I8o31RSN0zU`=6s{OWf88y zeSU!dR*6%pkEMjAa4j9UKr+L;q^Qu_hfohq~qiA?9$uq|?yl)JS1XqS3 z8iHxV9XO*GS(zajnG-kQFNoLG~;;lu0p%1w83B!yiUsvTi9FX@|jIC^aC8 zz7Z91lpnR-bhJ^VQGHE}u7O-Y47pf)-S?Vh%Lh5_=@aa)ejNdVs z8S?7d09S^0IYBDMC)T=?zsY1o>BecU>5k0S6P5f7bA(x8wE?I5kB3etA43~{% z4s3qg;)-qhO?~jw>*1+AtjP;5gZM%*6epdmL&Pl}F6Ax>33zbSPDG*G)*m`}JHGz= z`;TzJxONrk320QhdY;AyD@gR{p_F^ z^?LpL=-vkac~r!m(QU=cMaJw$wH(+O25ag7mfWf_#NS+ae!8;%L#;F<+0O4%d`7_U zr?mE38@w4a^PQ=Weu_4#(P?;NRsGF10RO4BztY*RH?RA=i`!sTH&vJ`JrV=YQSZT7H07qpyXUzQNDTeW_at2El^B%k0N)Mcu3`#%Nnp%_sA$T z@fK0~mLRmN+B4oy7P{4aTDP;gV{m6UM3y_sMX2hv;Jd7zEx03 z|I`cnP!@5TlqGW;yc9+f!qQ$2c0hndTH09CZSipY* zC+2W>Q+?sUObg0ufYvbe-7P~OUK{kHPA$Pn`eJ{d+2DmO$QJ;Te(S08NQ@ImSky=Q z<%Z)*i!q~jx|G%9s>htupdUuaCI<^`G==cTL9E0!$9_h^Ane18MsGQj{KoGt^{UU|ta{ZzF7`;Spx z&7#c8@PZ;wY1YOwnZ|E^RrCBpfr3%!ULF~hCf+se!vE?DJxYFxM-bW%XV{Wjt6>;8SUs^sc3hs(A=HVFHxp6xD*`NBM zt)ilQFJWuT-NGN>4?{{a)+>7N)GCKD7CKwv1aEQCYdsu8;`CDdB112hd*3={SslXs zBp^z-F*y4|f0+<^2 z2pj_@zwR$4KXGK+L8#Ma9u7Q?k&ghWIaTW`9LxA)$c%YTtgLO1h*!GPImAs*jxLt} z@z+Z|6^bl8_S55e-FuyDq|OI@$MeMcodut>MO6ZYFwI`KQbUu zToB%{(`K0dmQC~e3deIgzNml9u#NAJ9WxfexmS!3AI$)LD?xprU^MSa-~LP0j|;^I z?2_-7JF8Z-+FDFAR0U^jPVF|%gMASiPZkQn2mdjRDji0;0w`4;}o-GhbXepeUSP~8xLXGa4hN&GM7+6J4) zht6C-w^%*H2|iN*#Xf*)ic*tjm3OQH(%IGB_4dkQmL*TcXDOeeKJ%k86PJBhaV@{M zaVS8~xq`oP0UWn|wVKUeEC1DX&mWJH!oV+E&fyPVNZDZ4{oDlL-vCvWTp*{bZo0p_ zLFWJfzTR3VEj-#(PS9ikc-(I~BM_rcHxVPyN`M!2@5QoDw~*iVhfTHHw`>7`?o)*g zTJX>P37QjJf$z9F*{JEWxk30r6M~;_GPQRTT>5Rxsy2ePRnEYi>AQMr(H6Wk>VwmpuOZ!bSUIcXz$sDRK1SsP4f74|{YdC+|x9&i4ck-I=-l#2d}I zwQlDRqlzs02tln}-DQ&1HMKlEOdt@j>`pcR^c2EjFf+xglPnQIzwI|4RQ-HUv0LGD znPBQFt~Q%f|Nq_vClbIX=ix3D_dW?y@DwU*Yb%rFJ2v0)k)Nwu;52`5$q4-sjUDublk#7 zMKEk-F*@A40>PbQ@@dP=yT8h|2;fsk1PvTZxv(Qj{js5p>E_70KH} z(Yf=s!zs%>Rqp`^5s{ z=GybxvWcPyS5G{C{TG3OpK4a91+5@3iw9&&h>bPYNb=r&zs?76F6=sZ>SN5u(~g}l z<>`i8QB4+S+xVXDB?4`Jst28FdnV?z-I3pS8YEfE2_M;E^A*5oKb7hLA4 z>v-Ci3!updcrTZNhTgovUBcvBzbs=e#k0TPg8|8O`c(*~cu9}KGX25}i-*_PtRM%4 z3%@SY30Vl&UO`@IV6h}Q{Qhshh#S8G9((DzLd(VdyJg>9uUP$e`AUjsg!&a;U)*o) zZF=SxmHK=QuNFYPMgo*yPMojz1fy~o{p;Gi$wQj~`t*Ss>LMh)G* zT_X@u3;-m7_xnRQvqW-c2@rBosJKw%@PC;QurZOGhztF{OaPb@|97tpg0`z!l$%*k z5t&n$Kr-I3Bbn7QL_WX(zr8{P>auPMe{0%S-vcVO%o_fP?;-KixP$euzgZ*EiW~nq zy~I(jj5?2qjX8(evgDQ_+E__>tG$SmTm$1(*y{AnAPc3(f&F#64tDCtDPWfz!{VKj z9lSXtG1$3U|KIHXZGO_yJ`rd8(-C80h*<~GA3@Q zow`!w{q zR`+M2fI(nOoXO>i=_4zi7dQ%-*r|Jvp0DPS=++W!^z_guk~)wF1>@^J5)H5$~5VAoiO zDd^%;>~7YZwk7;j%Fl-uSS-iLo8`W^dY>Vs_&Wo9oDHKqS>mHHI`Q~cuW#7I)Er$K z?~Hrn#dsLlU!G^Ct^S_SiSMF2#>z-_K!v3asPA6ev_M`l%FBnUC6lDVj=`9VEApZU z&?KSAx?P*XEwDna&pfaMF`nY!Z!)Y~S2njTb>t0_T$MqtbE&)Qsjv^*aJ4Qzd_9Df zeP%Z;-$gvb(=?M(@TE|8sz-MO_TE#fjVn+;3X{v9&bBJP!VzWKrYNnvd4HVRJPt~n zDcltXb`HzNIW}i=;u_bcoLCcQ3^}XUkL?g)dllX`5{e3FG4WZAznp%cCtJn_>?%#v z4s4GOzch$8ahsrW)Whr8F^}Q=D`oHHy*lM*ri7C~1RG516qr<8^E{XEO_R~vx}G8R zZ6LT~5Zu-uHP%Al0nr46SLe$iC*bV=NPL02yT-69X^nw8$S=kZLL45LZN>KDssd*3 zaS?k}M-Qc?pKyf%s{gNzE}|)&#}_qCKUS98Nv^N7Y7@IzWNnptd|STVUcsAXAlfrq znG9aZxWH=?eLk)w)uQIC>ApQD#-o? zCeuLVFrx4oIE0=HvVKIF`rhe#9MR0J?=U|uZM*bFo#t-SJrd&4m%eWW7vzXMxr=BX zrmCT-VIi^d#EgD+IP9V&n`o{aKje~;8>T7ms5e`tf4m@2`TE^vV9u3&xj+0tF{iu- z$rsb$SZt+T^47LZ7a#lqH8}LAS9PgZ;843r@cR1DSp9aW3pZ36Qf=v@DdanuQZ~&M z!NTI-*2W^_YCi7pF1=^BK_a)LV|q-DyUIUIrjgP>*alID*_3kEc)pkOD7a_FdMR|02az_~q(&LRl*!R$)RqZ=q#f-QvVb z{BDsYB>QbC+^axvFx~f&W+>bVqidbvj9n^k!@E|)%9E8O-+K*N5Pl)(zgdDIR@m3Ywy4$&#pX*AQ)u$?dF4}R+-1`ZCstO zOuOQ{+#J4EYlN63dSs#0Md3_@M@gA}UwZgz{TzV+K(I|oQB#bI0%AX&{wBJyR>1z8 zom^!{W5oG%j#^pAwU%IUlNmvyLv`M^>=_4AIF>G0S)$zdJu)W{!hg&?k1c88uax8WX*e z=R$iMYDbA2C_MTvi9Lp5CO2C6ZS)Y7O7YJq3f>ZNa041FQOHkiE{A|wckA-;}(yk1sL9`uV{_shda zUA(5UD4{z30Lu?Ytde2NZ;y7XIEe-fEJV!JlIx{%d0AU0^*dn+C2-@3!n{gH@fKKugz z%ZPQ#Z-Ur}wHfBDOm%0v&xTJ}2fcxU9-0Xh=>sT~(pwC_ zdDW$Fl?)=45%k6gCcK)`hwsda`2K)W`-jwNn3Z>pEA|cQTYqq$RvIDCj9~BOl;*Sk znpT|w#S@IO*dD+MRjm5a?wqkv*BsLj*jf@k;eG4U?0X!g?d&9q;XjBwn^!L%_{CCa zNWOF$y?|tmUN;Q)-bHe&7WSeAG=Lr_`0n_n<{NgCc`!KB#>t#L;U+$GcxrE@a$}!% z)hdXdC`KOVMrDH?vnQ;Dgy`XVX5d(jOSiX?fsBODHKNw+ zpU;&KK3bdxFMr{zEE^mKZv3WlKNH{fMC5D z=9ziKpt(mtNewYLL<-^|cR~-JVx(A;zXR&hQ29e0)m=aTjQt}!ImgRf7m8`fuvafi zgo@Mh5fNcy(LV?7F((H0U%=-H6KcKht(TO{oPOJ*!=zLWwj}i%fwS8FPLm@5V=@Pu zP8Tg1yD&sv2icI!Np|JXDRiuidyrNLX_m}$c9CnZ018q+U}vxR^I$LzQ#FG{_D;WD zF?OK^CGhg1)|C?q++9Bm`!w^>CjD{K3Mc?tTL&<#n?HUs8%dU{R|1Mf?S?lU7(tXn z;?U)5r;NiDIh!v;7@k;VH*F!lV(;R$f0`eqJ#*^vkK8pb8Z^%DEk7I4QpMv%G1lh(92RX2p z*V#cS;gocQK8vpq6}V)N2v}a9G-=#eqiAK&71B6+C(mLS<3A)^QX{w9aIh-hmwYQ3 zx^{ymSyg+f2*H1WL9L`OfHs=>4Zs4^0LIH68Bt$1QojSF`Z5<_+Jfg82M~9oNXIW1 ztQUKnqiB)Uy~`rkZnH+}g(t4v>@S93?^=RXqJ1s_Pdu8#Ak}5I7i{2K>qsuIu`bmo zvZpGnH^Qsvk7kBUkm_|E8oa1!*MLt(_!ykVrt!$VX|zk{`O0{hErtR zz|S8Je3E|E;C8OS#mk6~F;LOW3HRm4S#}K1lKRsYXI36-vfvK8o<%bWEZOqM0T40q zv_UZJy(qeikayL+wUyNyc!ebE5J|-NGW*w|e{mb?Aa0iy>v3XOkPp{_8Y2fd>?_}N zkL$5cs|?d;cZMF@0dQ2&cc?JKTDH~zOC=PPq{_T~+#J)Q1W0rfC(vBtLHY(eGR078 zwegmK$gKL}GA+_&C{tT~DBSmRH+n9hcHt(V{`fiVU&hv4DdBP4?nGQ#3)`rghP;UJ zj^qF+^=Vp8wWt8{Yb$70#(T4lUO&7hFwJX=E@o))Zz6S3q&JH?gFQk+hlSIf-c4L0 zq7`|-Hv0|g68Mavf6U}3%5ur6lT41~i;s31NW5*b+*|OLl{NB)e)=BdHuu#5``iIe?mkZm}>1;?y8_o2I?#| z(ej>^knY@_e(#9D0|u%GxjArHYF1WaR(gHQqhVjm!lA;wn#Id!Yg6{nq*A6)Bg{lt zs#kqLw}FJZf21*I8(RbTUq<1ilz8I8LC?S_01v6E z!>|atJjmb(i%-m0vFm;IY!^yCUs0C6Cwmo*J484v*N1xn0JMJrwc0thkvsB1PH)HcEQIMhiUywCv4{)Y$Zk_cj@D<(vf_GVg@&$1Y zYWD*~^F=u*;lIC+AjqQTs8FIdcvjDUB73j_5SZJ=S%TF}%3{=i!Q0+~bZrxk+dshr zp2a~e{|$ml1l0`@oA6@jwuZqH9Iu-{oR8{JA-G4z) z|5Gd_M7qild@c9RZu!M(ff`!kUm5b%xA+Qz_EexC#!td)k;{APk?0NxEG;bHn7a4I zEp!ImsUd66ayv&xR@FuK3WA2B)y+4rB$@+BuHPIfw&tsle)xuu=3TSZkhp>^*}n!C zT{sN&Li{U+{7rRRD=`w4QOvoUdu=U56q+1ROs{h^VSi}~;EV+7q5m;bS>s_>>P|*) zSSPHf7!;l?iq8c0$Go;Xrc~q_)cOi>yU&D*VZSVq@FPik;ZrIx#xX6{c$0C`3z*!K z7~?fP0Eq5*7=Pvvs4cld2KZ*|%EdK3KY-rfDCFB0gDV9g%)fsYg}Olg&kz5TM3NYx zs1-btb2;Qst?>2ST*~TF%?B@PgipEKTwHldueP6yf$@J^;6xKeOoNM=y&CzHHlrxF zr%)gvgE*oUwiZKCYvYN*-R;@G1C&K=p=MExqR@(BPb&SKj~=>iwPnjC-HUqXuT8?! zeo$|-zd!2&vDK3K@A@he6XcYxr9L|;;$5^N_#QQZ8mYT05|Cby39{3vXtn2AJ8~oB zO5ICOiGfNU5o^_z4TJ90*Zx*>*jR(uUv%`R5~hcpX_`$mbq=E29`+ciEFd(tre3}g zI?OM1R$RNrU$yz>`#X2CC{wN7!k=sU4%0hc19tRnI{3qTKb5VVshcPvU_4_499Wj#EGU7Nx2LGPKeSQbDs&4gh1xP8BkdES}O(zhYau1;wz)P zl2={>p=NzAQ!Clkl-0g!r${u}vfJ4>ZMNaGHD^tJdAaG^;+u8I(+Sc}xnDHa`n^yb!P4p|F;h^}{edMXj)fNsjXqy3(RdH_X}&R`Jih zu@B5-AYA0YD@w1AgZjkU7OQe_YKeXdcy};V${N6&sq-MIj3Ob$`flXY?vdOAAQeaO z|96}3+bQX7d8C3Ix7O%-2KvYE=lhV@^}qyDBzo(nfB}Z zW;h=z{~W9kzv_YE(z7t{D@)`|Z7IXE?d^F>vAzc+Tb?gL3a)XJ_ccYXf4c%Als$x<@*4&9TL`0QFiv3`T-I08-v`{h((^q`kU@|o>9u(kk zKn8?aw+)xRyUi)NwW4y?@eO*xN8n@Y1*?blT+^3>m+OA|2o<2VaNji_%UPLsH+-?2 zTHCjMUyZ-#fyDfd>Oe)~_M?lC3qF&5tl19+Dsgy&KM&Z!GA-3)%q0`)KN0t0b49{w>9# zTzk38a(>9jNTm88rhjq^)J4em!?*W(g}1lo+TIBRv-47vSdmpiXxi$yY#FdG zPl1z3Nz_1f0SF@w2;;^-g#kcOy=s$`0GpaVJa8Z?I-Z!=SGy&Ml;-)2I-1$npIONx zD+@0JKuRCTyY!#(?n9fc`-OzQ>H}WE7D0pm_fb3Su6*ZSCN=W$qK2mE zIR##n*GHg=oQCs%DU}>#qDZ{c+kUFN?0HOk>?9o$UVDdL46E;74H3CJ4yHSspFIXe zcfa%IWAWjC`=_{c@shQr)3LDBHMf~mu?%C^T9Ls}+ns!_Al{m$3BOSHVBs)$Yngw( zbrQOKpw%a}-WN&Oot&Z>aCHJC6~MOu@{6hzZ;4?FYuEEfOdMFlDo#1}&ow+8YrKVk zrgYvzupSFM(Yi0hU%mGqvImb|(g&)K-y16sEdbio|7ZpR7W#*PK|$?Z(5hYy=+SOY zm-3%;22%(H;pzQg^*M*^&(#N7^F;{if`ixG@O@yQ71gaaBd&THw;Oyo&Lx3yevd>e zM%7|(%r`Ft$|%UCl+0*mgRwy93urcn-b4b7(+tiiP7m-l4mrtdaL(No;^*Q}Pdq&d zo^u{_5CuVMFf$H0$`$ZX5PAmt6%73cZ;bJuAYT5Du?~Jm{=dELp0Q8Zl26b9zZmpC z?|w1biHmRaN`>Z>sFYuD23jDEL}Bgp~E2K9KvT$}o#lMKI z9R(Yo7bT&&!{)~3f{OlJ(CpUh(x*8iFFyGBK8qkT2PoZlWcVMO>;FRJK@qU4e_O}% zY^n_CmsrZj&k^Mc$XEnAW*Pz#jX-h#l7aTlpNsVW^b&3L7ZXkqdNOsh7!t8(Cx;(o zH6{J@Q@HArfv4ion$B*`rSx$!Qvie4F24zJYdp{Mg4=+fiSO1Ym`TjjRs{~XQ;n+U zP^qhCyHt`;>6@ZPwjdtdLe6AegMeKEjIrbX>-zULA!^XPK`Zt>FeNNtULM zLK$x)f=`EVh>sBc4A=TN)oM}E0G+3qjrFI!jx7RmBhGMuBnjMjgTuL$(DN6Y5-We^ znc$>i$>n_ti`B{5G7kZb6QA+ZzppO@VbYKTAwP+bK>i6T8hu(g=6}A{4S zHIJ*FA)h4-qmI+s2X(tQNFvMZ2&dkJ9X0J*A`$am*bTy< zSngyD(PUcwJuHq)AO(^<8iy9&oG*UMQ$MPo=EIkkuG9uHRtK8JJeT=kRZ$yE=h?Co z;CCeG6*QV;=_iTxUi?tj`W#SLX3THx0Z1tDL;*)G{WV>Rj z*_O{GHmR}@TIva{_IISMje~rt1ObBEl%2PjzV?AG#Mj5#1QVooWRtIs)0;*^ zQ8$<8I!d;Oql6JZd^{8Gky?@xl-ggE0A9na7Jl#b{LJ-v@z!)D#3(IrRFhaGnN0V4 zuD8ikHoQPc>ElO9UFElR{Yq#op!A)4XWcftNUKMiCGOK$ox&gOva@*Z>K3JN<31tR z)LFoxkf(bVbjAbS%so2~mGfxbo?_x$8uFo2>rqGeWX!Y~*ceP(wnjWKw=;_*mD0z7%f-?J*AhCn*wPxo|dG<6V|D6sxTQk7vh^D zvJyHawKnjWXwC>?Ak)HWGx$)a^kB*M@`n4Bbp15WbgffAsBny9PXD^k@Kb~2W!Jt; zZN_IrAS*#GFxY5YNp`YLW`|P1@fG{m%p-E;!!dZtG#hVhmKUqbr!@(6LSgR8&$KJg zIAw@UfWaEbG@gB;DAoPxT={Y>?d#O+4;y-%vAW>e5E(7z*xyos2{VNe=9sUXW9e(|d5gMxA<4Q2d+rqu&#F!m*QkUFCVI+VY_meBjF)Q_d z3O6z|*~CC~sFz1xc5)?qW5wNbIc`S{Rr5ubp?96)%4)y;aPLj7@HHVk4n>`#O82iX zMhHXAGP7)p-nHik5=7^sWg78q8n7QAI$&_*M%Cv_^Q(O=_jjk#YA}SQdHR7mR~49G zoqh03mXZ0=lNxqL|zMOQ$7ef{8f4tHH2tr zZymqnOB3dSy68*#&{->R>ssTCB2-SeSy<+zUEp&2lFs(dYUb*BPr}9H3!xLPq4)&n zg(ZsiXzGZz`>U@^q|IDuxp46Nyz+>_py7hGG5muFKEAZ}u}cmV0hw02@T_OSpuZ$A z=Q;f=U2_!i`gam!s>_rISKQ%J87l7`EQ=zNgF%6r@}YtKN~lXY8U52jo^NmE$GY7d z$TUiGR=N3@ptHMKePh&dG`K5X9XFd`GMt_B*{GpmBTEcD2lZsf`4H~Pl{hWnR3nmg za{^Nt1P(J&s-axKk?j9Mi?|GQps-WO%!VlOlg&Y zbT^EzxQ-QS45S+eekl^(H)%38;gL_Ly?u%c%nIs*|Fb7mR@-rmWbYut~gCt}|rCvr+W#to{ zLCYB53#M{@FABuh)$jv^+r1RU0ju8}R@!UJy0KuoV~-W196St;MEJo&yaHB;Gu|($ z>i1G63R0!{*1fetVxO!ur7I;E(7xo+L}8+(Nx~6SAMEB7wB~uz8vRH5HwFpU69@vS zeOu#cLCeNKSaF?xFBLf+5~oe3A}X4@g`r9~4w<;nD7vz+VF(=Vqqf*qL(DL>EuIaD z(?2B~!#uI?$f3DepeCpqMp>dsouyIxY)~A=(BHKX|1CBN~>NxdgZuh7aPWYbH~%U(sd@)7MB;xNw&# z3GRTw0}5-WF1fJgaR0B$_>D>f2lbtTptw(|1qE|Z?g1P~0fNy+ekZF(-K*M;&ub_h z=(qJMgOdC!XIKqBTx*L%lntVH-b=R%N*yiJh4wUyDTzid54xRD_;h0*$7AL6TI_)up8)-0;$;q1^;I7Mp&e zD=Q|Pfg~P=p>hhys@Cw@4nrZ(d)4f3rTUE1n@7!U^z2(Bd?7T>AW)H@t~JE89B~vI z8$6k}sxiUKN(9qzcwoY|>nnpr47R+UJL1m^Vjidh_pdeNaJsbeNw?x+!GMSg)pvoZ z#)Q*)c&oGmrR^@t`H?PZe%X#@v|Q$Wh1z#!H)3#KKa6Dr6J5=Uc?*ggoRjAsPTzM0 zL-tpxkTuqWG0`fMm&k^L&&H{9f`+^~l*Rh4eEJYm39Ra84fj{_t*`v`onomKb`4l? zUbb)`I!@c7H>hZx2ex}36<|Ew$7ltVYnZI8o%F?!ua&xL%j0)i(^Zt(23ndvxEWqt z$Tm|FuyOq4T3SF>mKazF<3sh6b}0$gbSE0yeePDL-*%8GdsFlIku`a$Fx|aaEF(-k zR$3H_@k?7o^rnnUO|yQRfd*K4wI%927|s9;`->3_8+RAbLW)sv(PU&XwLm<5sZe++ zh8q7!W#SUqV{rRL_yd9o-F|4r{w0fFrl8)*x z2~s{_@Bh;2?-d2dgwFP8$bMAS8=Ecfgzh?nhC4j#Zg3ku8M#ke2sTk@V{D2XgT>#! zb#NMkEqQ6FxcnK9b1$wUuBYJ_3IxuWDsdX#t@Gitr$?q4pp#2#jq8=HvdPrM-I{ND zjEb)r<&Q|fYTxGl$U!9c;}R+;t6ke+2HUFF_NghjJJ828x7Xy0)^g{^zb>kyg3@aR z-i;vQ9JUi&*~h{#!c--tfx}cU^W~GTQq=lai9JLg<)|+_0-yXE~ zIbYUYnu0bHD4s4VDctK*bD*+_>Xy`?3rqCY1usF7)-VjEFW}7$1GZb+?a2*HcLoK| z(XUAt+@>DL4o_%}n%(J0_Dfv+%_L4QU@9$Y9%_>MDJ$@fv;kE#;HhJrT(}xjvkT<9 z6rZhc9v~UG5Kq?3+^aq^X&a^9#Oa*eW8~u3F@&MDAkrVlcY;GeHs|Z8i!ml6Z(AP9 zqW)G4RwI(BRdDq4MnFIx@t(EM*CO4+N)n@)>^*jLjqEqepIz)Vi;Q63dmo(>D9A$@ z2n7dq-=N8u)yDdlyi;4T_fd9<)BmF;KiwJ8Z%!{b1&)i_3-@=Jtu*>&$F`rYYeSoQ zyHA;6^Vej5-qwq&_hcIHunQoD)nRrXhxH3XWN?W(0$x`mEb0|%8(tH+!RMYAMBczNru6j5m(*f#rkuV`zLe^)B_wy50H?C4h<^E zEuBb|?ai+?@=b=t&FE_%;gk20hLn5S#Hfs48hreYvF?J_GIkl?qqcBgR)GI%ZnwWr z@1`i~+;jiL$D^xIsDyxGjVC|#_1*4VnjfMX&`m6G#mh1J7a$siXQ+71g_CHW}c;3_Ux^~J6TI9Vri|aMJGjpXv zV3gJt_GpLST;p7W`*E=9A_Xp zcItQCRg_*1;89ga5=WiWmRx4kiBAi};)NU?tbDR^dHN)KV&&%5u+yWyWy{YW#vB(` z>t*J90c*P8qc-St^~l!EB|iSbOgl!3%W|U3{-(v;*q-YOcOH@>4VsebW_GQS(_Vk9_(TTZyW?b-Z3jo5dKS6PV%KN&Xg0{sTv zoT)(wkV_oKcrxPF{U z$4X~paqnd*GKIIdB?eATqaE6l;~OZ~&IhG~&Qis1_`D23)1~Qx#FBcM1H*Ol-J3U|f15RsR42cCvO1s2g zU1!s($+Hw(5lBnEm5xadX-75N2ZK{(;Z@LRF#UAi|5e`#%?s$BxRkk4#t{w7(+2*nKAsH)4TWk^Zx$x z`|Eqo_jFF@bb8Hw-g}?-x!n7__dfm=?VM5qKZh<}p6K#;>}%x1MM0fB{{iiyVAIe8 z;+`*a^hdw$b``wwu6M69D1T~3={>SSeDcvTao>ID!cpIVOK9S|X`+nZd9xP@F4cXB z^+^G&X{km3<%(es8?6ujcst1Uk8I*}!&~J+1l6U-{L|4TjTw)D9^E~--m$FaDDf(k zecE1~yvexC1p`FjK#$7><5xM9oXxCApuiXJrfwiE3af6FwexO7N?_eHs9b38&^*}@ z-Nn7;t{-fX{X37M%2y1d(qgr~XjgA)Kvvmmigm+ha(TSUu^_7;2v}kXK(q8&mdITT zrB+9REQI4uVf*a5y`&gQXG@M-l)Gj~z5JoOytfu$N+s%INH{IfS6*s&=^(MeMR<;C!mR$yh{F^sb(3fB8UC%~@!&FgVI#mvwao~YRqs-uzBcXM7;dg%x%I;Gg z**VSA9=98mEVxl5!FT6WN~hW}Do$IXCHN}^%TsNli-<9Edq3`a>+e8Dh-|7H@N)!>sC zsW|Pk7xm5sH<|5z4|s^i{$IWg}eKLM-VUGWO99w$j-+v4!??C|H- ziPC1aKPE5gT?@WHX!RKg=G}~xnB!#m9n5&?k>@^q(@b$ON_bw!eOJ)6VAItIhk5UY zfS+T@*Ay<1tfmsmOiM>$iDzA`lEKGKtk3y@CzQF z42un{hMf=xgi(GZE2amFh?^r27P50R3AY2Y{4&c)H-(25Vm#n)Wl+qOhWl0Px0Z*y zUJ$NKV9mTsD}}{`$vZ8HQKCQj(|}!v4%yaKD+BdnU?|$HfE-O3lIZCV3Ov*Ww%5ueU=vk zN-|(SdxT6cvAs&gnSteSdk^01$amB0?Q?Fw(VSq-GxS9NuOuNTUU@1=l&}URT$0?_iyw-tAogd9B{*h6f zIqfCNbWi4DaQ%B};DvJCrm_0im4Vsa7Jj0nYQ|GrLMbYybNNA@Fy|pKhc6Cct_zL7 zb`K(K31TY^u}T}7LKa`v84WAcX4?I_v>b)gRW|l6zBbEyO|z}8c31jQxky;b*F28k*7Id|9=z** zk=lC*E{8%k8$v2oN z*O24$bMbe@T#?v(75m&rZF_5;tp|>&>S-ueft#F}l*}qa0;<7#aRAtGzG6$^f7WQ* zh>?ON*-l$qs9C>oEtC1YJDiP4(C zua~+%A24Khf8`LdX8*(D{CY9cIhjUe zO~5ZwRo~??zGcDqNePrvw_c{x%(K2JSdGqGfbq12aNG8w|8%PtqHN${P2FoGH^{N* z0bfL``z&_}uv%iO)iY#O+%Lu-^_sjhU2OKKer0M%C%4>Q%I!l=SAbtWC(Y^uQrYdm z(BcP@XAE*Q*-tiQ!n+Bn3R}!d>!SGlT(y3afWaflKUBjMyZ>AQS7vpHC#C2Qqx5h2V|p>bDR{p4jvyHQ(R(PDd=(Tp6PM*J^fA$82rUgd2VboL*_RI9GL> zxt@c$eW)!25Kdk^9zTwmdDkT-$+;%vgbEQo^5iZX<{jyaOWWm35DK?HEz=l7bt6yF zCy#2K85Uy$#G;O~@kh2eKn5C!BA81*xB+^3Rc?F|^DV*o$i`ZMoBUOHMQNyiEQB_W-WoY^tH*@?gt_j_FsJ!NzBF&`v_*UD% z7C-c-Qd8y1_hqx+3d2>`UrPZI10J^z zqYO$vR0ng%_?1uf5m5{3FFrSB6O)MPOLn9-AMp0lpqeMS~$bA z9$kxY<%;xaLSDI->pu>{CMwus2mDmP2VqVc?av1s5H_yeS!_B2AqL>{go@fjMFxDL zH2RfZ3&2)p{gNsAe+f?!f0zMrwii)zI~>j;aQ-(P^zz&ZK7+RX-{}U%ZzM_+X4JIl zTJ-FC%BviE>H$)IZrLsOMfc@GfK^PG(T(^$3Ckf}RmVNj4TMC?7oP`yowSpqyvdPg z1k)RkPSiIRcI+>2CM0+{mU@&|2(v;CnVtN6y=K+0KeQi?*Z(IP6iL(=J~qH`u)r$q zrPuCcbv@UEtvU)6RSU_Y|56S^0RsZ%Xavu3`wjKT@M;NMgtRTThV30?tCiPfo;@6A zsC?VcJ{Z`<@Cl$Xfk<$3lo^91oivWh@ip!hz@I>s2NqIZ&&lHawr3C^=KZeSPXh`> z2_4>7hhxYa9+6yt!0R`gphP22#w8-t{rMFH9S?35zf|&L{&J~@ia{x8u zYvF$DDJTePzg|+M-Y&#i9xN-WeQP6DoZ#X)^)th4wd3>3fP{18O}!&djY#2bCLNKc zA4j@qXSeYyi*uKd3d&yGUb^W8MQ@k%Uq9>-dG0)y1TU}x{9I7%IZ3cpoG|X<^4;Ag zSSejIG1SXA*^W0YO@TV1@&epyyYV2{4`(aS*>NAPvY@kCz{Y9&sAo#QJ?Al*le%iJokzLQyU!IFMhT-lUr6;# zTk5u)D4?(9D=s8{E84iIMb^D2mr;MqXfxqip{OYMj`k>?@*Q>EeE;nE@6B1&1`ui=<$%tb6naIwEF%&GK&^PQ3klS%{ZbL(gCRJ(MjN>iF-Wz}DMwUzPstJ2b(m24fvu(r_Ft z0HYGyi$OOzl+3tXz^Yt&HrZ;ToHx(qIQS>NOQCfVNoYQF-?_TZ?Vx zX1zW?d2CB=rs`XzZbz)@IpK`eN!bdzyjLXt%!BHkmwTg3CvMR4$@eitJ9u0Uu?d&dk8ZAJ31YfUG@0}AX2P4_=@)o_(Zozwyn0HjKx+FM=dGW4@@Kt2eZI^ry5GR#&{`C^X^K>*|Zc)XcXjFo+C8g zANe1RD>jMNbYsx0 zDYol9H~yq`b-R(?{!gRfBwdo?YytjVl0k#N;KPDbPR%i7ZCo--r?lF>O4H1#8TpJd z@JB2C@e-^nE|mE1o4Cp38o#v?axHRg_UabU76G)oS4Mz1KUXE`l7tRc7V~y;CfNX& zPeq!z3Kq#P{ZR=XsH80iKT&A;KXG;}54+$&aUVY=QW?*PqJ_4gcz8`Z7Qke8v9d_F zX@HPi`UK(J6ZwBMiysGMD)8flLH{G8r3ZJycioQR??)%ZRV^neBToNe=mTIVlc}Ga z#+A<2NZSp0670-CzfTy!+>U0ih3h{*-kfGSZQ~I?%IMVdN}dFV?&+QVlg7FKD$Kz9 z$JXK)!)(V9vU&e(&4$O+&%Bqb=Nc@VyNduQWhX0>q)!7QFtW>P3&jwdjRyqqxn`xt zS6H=aSeR0iUe(GkY~U(pqB9-wH-G?fvNn`(iM&G&dfmNtnTZ3L>FV&e zJN_LiNhiq>-)$JfKRguJ*SemHEnlU@l4UW@ z#`7iX$6omcj|mzY9rG30VcjLL>~)Yr&&l|Oq0JiWTC=ZIg_;Z;dReNtO#yo3Id z#Vs;R)0xTT#itPPUKs{90A$rNjPzvPDRwdZX;xJI<#r8s&+-p?jc5g{vt?vOW?cQ8$=d$hA3jA2+1P#|xyNL0?R0)Ne&e~|`t6lNg; z|F(3=hhs0$i!BCymUCqm2E*`nJ1r&EkN8FD7GUW7_;g>BFnh-j5)!9BFg0iqFku|r z7&~G{ANh0v2-Bbrr;2&}@@ztj9Vme2PD*sggiiDJhp)FMbG=x;$-Q9vKS-DoE8Z%U z>v$P-$KWOzDf)isjJD@B<;wE8mh2M9?w2BT%s;o~wE3ozG-)r$&<7bxm!(?3l-%eO zC`Xu8zAK6o0;fw%{vFi83~0+%76Sf#Ek&43CI_y&XF5*ak>oY{J=RU3U-29{T-yIwyJHc_oS<| zF!KJAmI&!;s_xY{@w2=J$whIft#sb~Y0#EatnUn&zq{)3U*3K^q>UAR(3d7T$orw_ zp4q*v$mlr{)sE>Kp_DPBwp#LL(8Q=zk~15#kk3?04kquyLDkwXwF_h@S<8|D)2FY( zpKWR`p}zZbnelw^{OH0#W4+Sq)cT>oVaLT*3E^H6hB(_OKB!pU-8Fr~$Mg0@UX#vD zy|MrfOA}7JJrom&*~V25#JXJrZuAMJ&sD}>0eiN_+JW5rD%S8uiznq{QHq)EvEst} zgNot#pM!DAbD!!GyJ8k_l{Ah6kQ(wDToF^)wdU;tsarrAddYB!9hyB0u9?8P`y|Ug zxg9XM%WKk9yntKITXRr_G=&Ge1b_By1?2}vp$yU16Ombt-c#>7XN#H#6vK1=3YMHe z`HR-T3ITESA1+Jq-tlni z%+pi>{A+~?z4=?Kcg%U@fCG7m%6kjcr9u|yDPb)K%}R<=)>%ERFjhn234cv7$Fmkq z2}8*XS^k@@12c96x%Z&*0$7yJEbL3yniIJvF=MIibGw=`^cp^~SIv#e zTc-ivt^qU~@*{ae&cr&V=_y;=LWWHW98mm%{0cZuQ<=t`b!mbIxr2|d!ne0@ITq0z z3=(oGK#o7Y*yhX#zV{xwr5qG3?0IuMDf>=(tzo+!&z6OtfD6M^-p7Z4I)0p`XdVui z={;9huNzeP9@-+#y7Jxxi1X!6dNWDrUIFq=zY@gfD*UZ9m=7+%X-Li)1kvvrKmil*_7v11oQZ6fk_GKqn;;|4PPCwj z?o$n!ZJ_kSl({L3{;020mY%cIz7cNJ1t{2l=N#84&?{ zY>xn5MnPk?7DJ;H8MT!rRLhWWbK(i;0<5=VDubjKm71}nMG|1vWiq}c`5)n$5nEb4z}vyR1zY2e~?uwbXE+g1vJiwOk8rewYE&T zOqW}3CpQ9reZGcum+Ho&znc-94EP1@A9lPSkoq)ITt)dVG6tg@p^G_AV^CO6>LNs6 z>h~4`dcyOJuxPE5Nfk09mq&ugfq|K3!ID*aRTfqFp&A%3WI_Xe9lrem7X^4cucGCk z(R-Pny-o_nXAK4vi#j7~-wY)WWtyYrP3EC$gm!^)H^@&es%^HIUyI|fdWp?1ad?>o zmA=Fi!Z_~e)12u`%m8LJj~YT$VWEgK$Y6)Hy`JOziS;-)O8!J z2d5K%AjrXzoIw_wd4$Ubq-{H>@Bx?ro77aHfsXjG$e71XUv?!72G%GeeD4C&TArCF zFEC+@0LalD;M22UUsiL;xGCH3klz$!UA+p@>D{NUi{}6abU9VVcrF_dTd;V74gmeX z-z$rLE9z-vJt8yp4$ZwVwpX49M#5%z*%Me5%;n8$M}S=MAnhHZwYCr&!=sZ7L)#=y zz$hZafKf9RyF`am1sT~72!;THM?bm|LGp2#%-h2%y%;&gWbl!y@tbuRClT^;D`0R3 zR~Fg4@|ogJ{BFgQgLWpcZ6>}Lj;L|ux)cWk%ALSR{9HYv2R-|3gtB_?3xIx=gNjF@ zJqtecbkiITv-}udJZchc0-J2XmJLb&H4==DIDh*v)v*TS7CWjZ+uK2#_F(#RL|J~2 z_c36y8QePA_xJv# zSsa7PMYgPq=lyK$cPD_xqOXWRzOEqgq$xJpHL|l{+ z%Tc@WC8m-Vq;|js&rty|zWZ;YzEh&b{0g~zA#d(}o9(k1R2+@s!V9jzb4&I&3_i{46dA6K3 zpm@$#*l`-XhfKCFl$b)c^MK+sP*)c;E9&QH{l2R#zi)M_0!xNE+6AcUVI349dya$i zr;BSdg2c;p+*9KH^lO817`f9@d(o~ORJd+dYXDn#2Y2oa3FG^U6(_U!-bbo*I8L@S zxqB6SnD#a8l&eYNg=nh5x4#B;j*ya$R)vICSv~8crSsLG`K*Vi6o`(ErzAG# z#GvL_2A#~ykOPM~NA-^Yg|qK!Vvn$Q0gYs2Z9pHr#6I=>kbe+9n%C|CK^(506S1w}*O z68f=NrCL7`pI%b=8bg%3Z2nJbS-bIa4}B&}8$pL<=WQmC4siCrC50*B^l(xQ=Z#CO;gKM3?)xK++$(c=pSUvv3Ra3XX<)XXN&V!D{wmxT||6%gj9oY=so0yQ!7q3hl_eu&)fD72) z^R+ctGGt~tS}$^E{k7_fzU!u!X;bQ9Vl&kTc1He(1`FH?QkAoiS>X5CL#v-HW~XC^=Vh?lOyDcO^qfG6hZRY0KOeqn zn#J?GUFoL>wOa{cHqTe6ye^ef&-*YVF=azGzq3lecZ)lhaunze{qzUeniWrwyYS?{ z<81ATEFw(B^^p6$-PofP8U>_Ox^Q3bUmW6GXRIV!aN6W4{t|wp$@`sxZ}NUiN?}l%3C&o<~6xDA+5m~Y0Ak%?~*DhaH?tcngzvy&%`!36AFDB zWnTFgU1O|8J{LQ!Y&qS%&+0t$YUOF9)TKjP1aUV)B@dQcmh)2rsJz>ewp;e0o^+Jo z6x5+ClN{?C&=BH2B_7v17ai-{6m90hDCQ=s zg9&v)Lg#hF;6JIdKMay#3Vq-2LfH=%B-MO{HJb~%RoIAYGCYNmlnOfo-yZjm=UA3} zi3%KB&dOq<-|kS^p^e86Np&Y07q`X+$eMEKF+bvCa=ui$P1+1uWH7E!&!_A^iV&Lyp`qt;vG!wuI4piuUjm#P0FZ=YG0TZ0z; z2PK=fd{vtH7`jWAZ8&(hFz_qvc7{NPx{b=0u@g{^MhA2(1uuk|h)U=ERzAK^{gamI z^#FgXBs_#@R^b`i-roL831C?GYF&OfqShZ@7bcJy8*=}9WGT*PQ1g@RQ7&i5l*O)2 zUF|Q{x!%|N;-G5}y{#BA<1Z=s=H;K9Fd{^A|HZ@>&+t2I169#mwtaZ56c>(3*Ma_p zkqij#n+didZI~K$m7U*_>@X*?6JBMrxI*ED21@0w4N1m19l#w=aT}`iY4kH@&TA3x znHQsP_Q(JD@q-an;I}-7zl6bHrcbpoLv({gkfFq7B20B)J;E=#{E?;8O3T_}oi$3T zH5XUz-`Q75A@?Wj;PbdMpyxZBt(=qXqEu73G6N)ElphQ2L?;Ja`xNMHf_CaY<$l#> zMZ!2dA~{ zR9JULkD-|+>wVjf?!AB8$<2dGWpmz_7z{>;Q{b8d8WV$j&}-$ZT<+y^`Y8Fw<4xFY)*G;ssgQTI1(J6@)R-;>yV6St^%A6G@I zzOXh6m7$$BtbFmAA8wSO5{Yv4|4~|FP1gCqXVO~Y)}5`RpIbD#WEXjkHPYsS$1`h{ z=L1i!&2zS9IoS< zz=3M7%zHU;k#qe*Pth$6A@%3ZIV(6PPg3td2Y!7Uj!wq)YYR&L`E~opdlCW7hXtBn zVDs7QxO<)Z`(+(6Za>9MQmP&Ap1xpstDnv8tVnJ4$QHogpoUD}1Ve%!(PD+6{AB|H zh=osHT7j;M3`YFN*Y|P=>#JmwAGLRjWl{=t>go0Lj`N8#!MJ@&_k8z*xpcSh^|jt; ze^}o!A0oky6)zLKz7ddZ-D{5@UplL*8u!yE>l=Pd{%mX4M_6xm{c!o|MM~pq*!ILz z?wiJ`i~=N{&t%Pq!*UEdum4C{E*iG|^)c{27aZI9m-zTYQ{rSdgpX$WK2F)ds2>>? z8rE~04puvSRa|X%-NC`p#8~s4Sb%upo%I(grE31Q0p$tWANDJ|>=DKv+L|G-$$C4m zSUwZ?=>7B6!b{FWB6umIpBpYP13?RH4VE_Q+s6A?hPzCjN|txm>=xbW0^jiWb{qTl zbD_5eyCMt2=GSSkj_N-Jb6;1mRW9*g{&;R?{!3o1^M6dWx&Pn&+_;WxBfWA}=95lY zcyfQi*4HV2h9%=nV)-OGt44=;^h5rzb=h_N`XLb!o%JyPa>KQ?U-OVNSD{;)_JF<> zkgOMNI{;jZQwRWUf!Ln_jW8hk!YTU!$_)i52?FW526|mU-pgMF;4F|*uulJff3n8A Y=UEbRXAXh{zJ_RN>T2X&wS4mb0l2s0NB{r; literal 0 HcmV?d00001 diff --git a/doc/model-v2.png b/doc/model-v2.png new file mode 100644 index 0000000000000000000000000000000000000000..dbcac2d5e5553b53e921ef2c858faebe3f35c2c1 GIT binary patch literal 23775 zcmc$`bzIZ!7dLJJN{4{743Ux+L25TLLR3;1F$XvV0i}@!1%Z)s(o)jR2&D%K%8;(n zDIq1@wclsx{eA8~o`0X$kG}@H;#{Y$bDi@(glKE3QIWHfpF4Ms>b^Qe_uRP)hv&|n zcf3pr{N=)HO1*REVA=N}N{`*)E8`?dS9gxc)-xX7yL4oA{JoKy@-c++AuS!l#S)dr zv<&a`Xtlooqk?97EPtSkiM?0Qyw@OJi z>8@{9z)$a6B=P40-y`gsvroq;rBvbv{@2&GLx>+PApd{=!w=>=c4PcS#-HDBZ@Ww~ z!NI3AAW&@;^$if;we26w5)u+OgoQiy0to@g7LnqMAuU%(eKrH#CVd8n)F0;>-cPaJ zuUI3;uwl3eWOV2!Ffw{H1Lff?1v+AEE8;^-d(5a0)O~!cg$}LvzwdzGbmAR3)FGN; z3sIsDJZcd0X4mUT@%!=2c9CTEzQtlgj)IJ>-8WR6=8Yu@7GkJe25?(;As;X9&)3rx1BJ z2|jG8q6uXC552Q)N4qQNLMGu%=MehXe@wMqWqw{xIG#@|la@2_F(kk8FB3`ZW?Q<- zq+h&yBRy_$W!({)cCWdseh`L`X9}eG?}E_F^9r4JajX+B4UW=PB2okL4Ps7?cJs89 z&*T2XwgWoqK^E<-P~J~+MY}4Y*TCh*6^~OxFGMc?19I|LO7(ujc6XEKl9N;+A1siZ z1-G(Ao8mAn`wp38*3DuP2Or%HvI1E^OcGf~LN|(Ei(X3KifN^`N+F#E@)aPyI>W*0E5mBfS8?Al(3f zUL~Arv2`=E?#bzhD&9LYvTl38OTyteX>6KN(4h6?WTdsSC7O3x7&ZN=3QfV4bkUUR zm+bQBy+C)JBHEr(>cSI?lQGvRW!Kaw=?UAHD?e~zmrii0KEwMQ%Zh~;@ke~RfM#Wp z0(#~j88Mc;r^qio4u3Rp^eOrFi(XwDd@)~^2A2>|j;MGLm*3AHszVJ6W#mp<9C+b9 ztxRA`X4r-S)x2GQeoqEIC;N48DYg9S+o;(e%xrASUzk~?B^%{;f6nwhxOBpxb(&gA z{VgZao#EpI82wZB9lsROyj)()(r)9AOnvd6H&^+7hfikgEr54jE?V!+oNGhK2;0pz z&UKx?cq?rz!gwev?6Ci;w(P*0%ZJIHG+ifNtz2xG4rq{sMQ@$F*LHcVJ^FzVj3cD<_Hm;J~YmuFI0EIafrAtB*csbyzUUgW*2hD7Pf z-f^<&#+l3Ur8wnpv)=u_&*oJ@s+Git^-B;OoN%{Y{Vhx0<)zuVOvuz=Oh`%TBzLDO zGU%Xh9nbw*Iceu%XCOwgOkJoRE2M7fAlY zL>Md;G)=2oNYEB-z=OK>;#{riGaB;GR8W(#*Ig? zADEOy_TaX(Wfly{@`G1DuI3*&^uBsEWJG3ZWz`lhj&QQ=OuDHL>3Af62tJ-92atdRGg}I-oO*;y}bU#x=#1|M2 zm;qD1mw(3z)$jFnf75u>4$Out%Ws+?+L85Sih{&P4aNuBTA7FX`4;jbfe-Qr-td$9 zfCx{C4iK+u*ZO{^`8$GD%4f0)RwpYz0l2?9qKD@Em4vvmqDc;jWJYH{(}n}t`2CZt z1$F6|T@G+nYEYe#XgBL}RoZDf$U$*EJMq@naZRdRSOPcr+y5@pDl|_+)ASu9VPo}Q z^t`vYPqp!j3mFkd8xF}1Gygi`Db+~-3s!9jBZUbTDQ+pe{c3|+R;e$;aplXj4*$6` zmEH=Cp5Ar%cxz!uCp#yH4ZI~Kys*blXXL@I%y4PocAt$NUH+#s>|d>FX=xF=LvPnlaI#U-jJUhb;8!rS zAb89LfxN1&riSQu=FOA&<~O-80T<+t?|{9OIL@eA8p?ucS(CapSJP!#KBLVniGZOR|nkKg%G7w{~{F%=NbK~l9o8reC6L?Pc~%S<>f`_bhO3( zP)h=xX`T-%c&>vkeu7WyoN`=ucZbOX$D#dq08KsQXr+8!mBKXrcb^&ukGOkXOPuG1 zJc&@{DR1hVvv>ovdGRj;r@jOh=R>8X z4YD*s!r5{n)0jEAx&nSRZRImhfIyoRe}|b3l`7B9Cw5Tj50`=ZataVt7tcohJlK#$ zB9DWVQu*Hj_bcJfOZq&+o-y@g@3NNTgQaV$ja$h@O;3rBwtjN&iD>I4+V#}=-N-0H0;lI@=)&Tk|H6QYp(_Z~mU2ZFhyv zV}aXQ*?GGM#G{&vxWsU(b=C5KkS?34#0otdGd{Xyeh!FutTmYZ8P-AdpxO!47Mq87 z%aDCBn=G3mnpMmAfJ?k>2fL{Wmer3c!+kNSy%DDA-s&z9n>U%6#Sl7oM>c-~`y zUlE*>?Hu$(60@1J*|NFl&}>9P1w54eCOLt(5a@}9@kJjc{KjRpfPLu7oHdz`?WP5~ zH>-o{$M4Fpm65DSrBWj4<~8Pr=Cbs3i_Jv#{(*d0^_7PSo(fSe1TPLdE6~m=32$2K0 zkAcQ#w+V4+ra+3YPqsPKyvbrd;>qGz@$nF3646SCw7qno_I~isAWV7facYb%2@ch; z+4^+`=eMuK5is_8k^O-4QZ~4OOo{k1XCTr2;2FSF^;6j2Js33LP+Z_p=o|bLM6i3D zn*5&1>TMphYfODAT|aLiJHM8SuhlPT&h66tu|4Sr zqLDYVyB?`%JR4`)FV4>*HZFeBL$-aD+8 zbkl5;oG7Jsi-OXXi7t`IJmL2q;ir>4cq=~XA|oEB>A-o?>B4d+&yyTkQjhb8{-+i(-zgLsJa$e--cJ4*LTvqUF2sfZUpv5u$6E5+n+~z|GYGIJ61r~h4694S-wGJIJ4%9DY5ryU zXM0r37eTTMHO0#APMlUsYC8$)mA)sCNSQ|1#$W+onHxry^nmUV>6?yO5IuYdIdgzS z&3$H1d$5=kz&%bs_Hv+bU`g_w9r*aV?x)=9UcWg;^vMxkyrrvKYn$;zCAG&ML@xc8 zeg~4q(=UmIY2%u%0?$%XQ^)Yd$?DUM^W=nNxBj%NH-KHjeKXb|SK)t;GR4Z?9EEQ@ zWp;7&CgiHARA3%+pOd_)L&=x$`OOus|dLiOlFywCwi#c2Ze85*qnE^Tr_?htkG8rc|3RNwbp+Zog3 zHA1AJl(jg@y{3Y&<6%>awx_NSohTv)?YgI?HbxXYa}nCtKCYqHo|*Yh?MWaT&s+%L zczMbuqyiLubvLIdZ@DhT3%Mn|d3LVhBAv~G-|WK&@uKe8yy@#ik8y+aq+|qZl4&69 z{2g9b)w0mDbnU8YJdGIGtp3aKn}fGVe;{i}yt)v-$GtpABFeTQC*3+m>*uf7MKzJxX^?-xh5A6(cXn`dYjjkibgR}nQUL<#)x z&I{T+?XR$e)K?!`(p8qOLCDNczj*r}kZe)SbAiZj5{2}%nI6xx(&sYV}W;QH=cA9IC+5gTQR`>+)tkGcNhGGOW#l~X@0fSp!})Q=(@gC7~1g`N$WO#gR%^ z!R_F9lo;5&zVTQE^7YG~>sLw2x2{DT7caD0yT@d5xpAV94%@4(_@W^%ldtP`hb-mskTGy5aA*?(N?~PW#jIR5=C}>apFnla~ci$nQ@;a`N%{s6nQK zxwA@gN-;IB6V1GIyIVX#jEf#k_!Bt$(pp%=3oYCWgeV?8pn*)X| zfq7_qWxQWVOX43jn-r~hL05h1>8#@YGXzv_Re+%NpqbZe$Hd!<6P_yG1g-r&DcG<9 zJ{v039<#of6L!leJyL1*NxZ4}AnX3>5B9~PE?TndHnctt1}d{lmL)~|nVqf0vL!Wd58C`2Z|&tDY^i7} z4g-n2LjE%AC}13*vBb4lx1{HtK^h7~K8Fe|*pF4|7=O0mU0hr&?vK~Xzox_m#+%Dh z5H6lFcO@!5$$VktoaMFE0g`)_i19?#sCpSOJYs6*onkHhV`NjL+N~7279D}vMWXmSh2IZLA(J`zgMEY*J?E+6s$|TJuE~(&|pQ2KN9RZ{ZUtR z(w_2q6i<)V->Q3cOtWt)Z#yK0&WMzNR-oiTWmYIz)i=izLZ;jIqq_IViN|eUi(eBb zcs*Y5n&VlCQ5Lv-xDrDs4W6(PS^!VxhP1<~$KwJAR~IdJ7kQk9%;!UQhpkkIR)P$h z^qMOFW#`% z>g?l0=M_V9i^fo-#M|ud#<|^AFu?F@+b^N5o^#wQX?x-ed~EgJ`^Q`JC%Ms*&DzIoiUSM01uK;Rj@1bF=~Vzcq|7@^2Nk6-iGl8W%5d`SimQ ze!BQ82jwDew69?|A*t`*Vg=8|qFNR$OUGnn#tTJOV*E;=)Zz=H#nC&h%0tS4aceQb zuIH*!Ijb0#Wwu}%@)6Ic_uyG|TKB)fzn^cr63cu`+R*m}{2xu`{~OIsksP4P37cX_ zTQpt5oktwXYz=#YSw0#PyMmgA@RBjJgRf3+=LCKX2@%XN>`%KZp9n0+1eXL);6AQZ z2%A8Pu>-I-DK76@#;TXD83SSwSk@yif!;Bz{<;}3ce#xrw&2E7%y9tL38^&sDytC6 z*~sZ>su6J7^Zmg!RKWntT&H-DG$FSr{NO~Z?Ih{3Zeq1%L`w@A^m~+xl8-Ni|9wnW zWL`2G_CuKkHcJg%bG)=1?g-@POv_^JOUDN7ct2~shVLv}rBo^?Jjr~(jMm9`zd5HR z?J4Eb>-_))a&1eNvN$a3P{s5g-ObC~v+2EoX3&(`&t|du#R7&~42rlu{5`gSrwQ== zyv?t>2i}c-2tP*EZLH#A|4ab8L!Qo-7xq36{+E4F&x znfapkH;WW2$GYxkWkPvW+2ucdvV3GJ3Kdp78tBbX$Kp5j4xTmZ-UpJ!bH18_yjYDM zrx-42&ue+Hsg;%6V!HMf``;w$qcl8e^sZ+qfpr6Jwo7q>m%EBQ>_p+|nUtwDEx^}K z*o4bL6A-KZ`BmS5n1`sAt}_ zbDpW^`Z%GcMTR{iJpFk&`Fjq5qu9fATl<5C@=joIi2y~GMIPeTjBH-!wqVL!{$l|& zv*}f%Izw#b9lJg9uQQ>x#a+&$DCOoKfV?Wgmowk@3bE_%EargXo>=~-vuVA$!&&T5 z3l+ugq>OnEVm<6^N3j?fP|}EXOn$8E8AyvtW{VNzsIv<+K5tme=~eSae*c@Khn=!} z-9hBipFwlzIYF@9o|rC5Ve7s8`hH*RC__=^w%B*zIrjAaC-g~kXEZB~f{65(cdd5< zgQAV1E??KWp;Ck6R(KN3JN_}YHak4NVrVn|RU6(?Gg(kKt??@f4RrT0G-Y4Ti3Ii- z$k{Xn<)NZWAYORHf_wE*y;^4LOE)&G=XM_R7A1;tyHt&$E9B@AP}~AmX~iq59B%|4 zWDE5cEGVXhGBH-4yh7VIFb>xl0h|fh%?GMwP9{6XsteEivf_Gz7p6=76?d<4y&9T! z>bi(B=IpYoo)B`a){PVR?5D=g-COfnD7E>g12v{IbN--kMk`iJ|0#)& z_l1CD4r1PV)BRQ34*V;X^taUqnd@0g!CXuS(`4xJTD+HS)N_oY-Qc((!JNhV|&(Nd2FqdV^)pq1I?|@UFtJ$m@jVRR6 zPH@sGxD?*DpC6{v158HoTv`uWA=YZ36L!`f^0jj|7!bK}@w&jTD6LRR&KJAVKj?p( zXOge*2Gk>ffWP!cuex`p$JiA<7OwJRnSJT%!$U#UZX_q4LHF9-r)BIO{W{GWZoTQQ zvAEAB#Zu)6T!0rVqDTy(zujnDSv(K{2Vom?__ErfWA+qZT3tE}6DhdPm*k?6;s`Ke zEa=y91@6n+y`|cL9Av{OJy5{fC6_ed@G5Kx=K5geW%Nni;+N(Q9mj)AC>WI@f{VS| zX{ac>!zp5w;*%2Hu-+>_^Nsw_UD?59<7aQ_FYPwOh%nTP5epkG%GXz^u!nMqbBAo_ zw{`JExL&6w?9T_EI&Bg(qilsH-&-XCI(2|EZ42QeXw~eVd_Pkt_}7(T5EN$qTgBJ4 znFTcIVNnH9nc+Fn+tfH+ zu0Ff!0ijzAdt->-Ds`@-FH?+*Cl1zy5Fgx@rHze!dVa`Uy=E@v$u6}pBi8S^V=6^` zw}bso4{`U;aFyfDuofdLBQLb6IR;GKbT7NR4mQV!=2RsQwodh#AlZB}siP`d8gLOC z#u=#Qw;QpVpEQ!vwh6`6(hGIPi02#tE{4=Em;)< z?`J~Y@ZS7g%u;@=vfS5{Ob49;YnB9sef%Mw`asQ!H{<2tScBs=Ei-@LzU+a;)y#G4 z*V$Ta^o#gvX4tLYeymAH(ytyLC<0Z(y2y820!cfq7u)a~d?~lEs+vE5hp?OCWz}vq z>Z&!xWfwPHC_L|Fb*mOgA^fTnY2;7jafEUu%l6KFJ$ocCTJ`Bgx4F;Kv!~-nae0np zC-l4zJR}!p)=`MaQjy1+LQcqgZLv#|!?EfZT6X(_SEj3z`}-q%N`7-1=vTiX)Zj7M z@noR9@C>NLos9yFJguf{?(}4OaPk1WfT$MKX_W0?$is0#Q@^8u6BfKu@JMqkA8VL& z$GF0jxF+K^UUYLQj2z#qmBlg$z)lPUAfB#$zwwzr$X+@Tn#K3(sSjl=<rhxAf>rD8&UTgmDaQHqeY#2*i#=tx^ownW z=U1ptbW$@u_D&QWtyOB;a!9%49zLw#1vE9Uv2^0{JDJt`l?}dT#_glX1oM@Z{W@o@ z8T&$tjqI;KO1~R>F<#l%Y*2R0Jv^}58GZq{EJOS0qpOB-(_YWq_1Lw#am+ItKB2F` z)Gb)5)G;%kJViC7tk2N>8+ysjHG<#U81E_8ulb9S$?42M1T31=MK^qyGvXXCos^avw9ItXzhG~c^*v>6 zn`zcF_FB+6bLAuINtH}qs@*r<(s*0n^pvT;VoHQttRw3=&Sx7JjchheP0k)B+DUev zkEG&IWYi|IVxXP(!A}AxC+cv4XNTqG{ZG&UzD`M|55dcd_TH?{ODtCNEQ$`t>>jzD z<*`TH-#$W>`9#GIgQVSLY*DgsrK}>Y*I~C7WhrmefKVY@*Z8jYz;`&Jo)uD-rZ*)h ziB$hwl5e8ByzyOf$JY>@4mExCn@3B2udH-z+_&qbna$31OgUx%UR@1K_IcHqQ~u}^ zbVv@RW!6o@j`2g_Yd0xz%=wP-i0zjPZIYNe>q54MgZhRI4%Rw!J%CAx#fj#A+93}< zA1!PPFqz!6_tAREJBU}{Ipj;mMy?8zC8M7u@TP;I4Ev^>4~DTIrP;VbaJXnoffaQ~ z&Zk|$!xh;Y3f!9<_v*$(ft4R>-MG!2#{q-Dw%e%FP7?Arw4>55e${iy4D8o4Q~;QX z)tt{zI}zwyD=*P+_i(BT_CBaXtNNvQmpZ0QmwJXM8O7827m`4dWtE&>8Y1;S1>4+%1y>lqlF~FFx)9rH1skyymi5PFuH3yw%&?D&8Nz^r z;{wk_Jf2+(dSq;T;l6yJ=*H7E_0|Bz&htf@rpbX+PcG0eG8%Xe{iAZK+M<2&zC7c> zCDOLs@ut~NyGsdHg?&(~=@G^&>VTdy(Lz9eGmm0x=)!VJGA^b*e>#&*Ed+2b|E0ju7J$-eUUSa~WVl}!dz$vF&VOklI2EXQs#|yCJT~#x1+}_WuxbAD z-Vz~`j-{&JX1`KHgSwFq z9<2fcB?a+Z+J1{8b8MkD2nTRLrQyt4+%DNJ3H~#vN=-zvD~@AHa^$@_W?f*= zgBb>5hB*Z$xvm~G$2t2Y9#sO2AZL?E3ZkCwTNMcYyd&5Uw!0vB3o4^}_dFn~oTBg3 zc7`b>MfWa$o90*ogj;x2l6-IBA;7ioCH;#9wGQW7F5T!Ay5yWh&bX}Py&0}mzXDI@ zNS&C9>e|?Xvf+*kf@zo^k9f`n0O4~8@RS%Q3zzal!hb%0?d};eSJp3{6|sDKdR=v8 zMHdI??oM|TF`(}0QTK)giC)OB#OYm5+~+52m|-~2hg-C@zFAD0EAo%s)%tfc?;_#c zZ)K<=&>9}yIF-`=#r#@QD&15(69=|YJXGekCRQP(k&`Zojv_7`8vI>v#4{#*j@;_u zNR^JyeAVItJ~yE)yy+@_bVh*v>FcKXb-97SR}F;pEa>t;!ke!Vh^>s}E->DzLPZVZ zus~Jcl|I3#H60~&i7}~wW3sm7NXFRnv*+jPzLuhPDil5$yj^*@( zeehJiCCj$>X|--oX=dmtd%@Z)536v;%H(9txoVv!HMw2v(xThsF`FMJ%X3_7mWsO+ z^-}S%kN3#Ayb!2s_pXowOrK%T|Fk1$)P|22Uy{TLzxesAqI=cOP3X z>Noh#yY#z~795ig)~f%L@5PuGz1qX2IF=M^7lczk0M4S)o z_>&)hwiyV-Mbw9vt7VK(%+EXvVW^aiVX9oGhAT#UXbY+Qfc#Kh&fq(~;KDq# z`pC{%wuXaq(zESwNrfZx*ZjmI#opqX#S zlj5=B!dvJcQAHThn)MG)zY;!1dVSO#SN0AWS>(&&=Tq|ATODuq0lBhJV?(Dqli2AM zCMK&Kl7EJY*0)G%ns}QkmlFfpI@)ap;kw=VrEsopo0R!U_;-8`jiR=dj^0f3fa7>! zC}UBj3X~mF?qU4Ji}9mP3e!6LyLdzEVoojBHU)W}V_oOG_4zgZ*ZF4yaL0mtsV}gp zs~;pt@_IeJT&AVI`xBHnr1L-I9MlF=9?~a$^g|e*U#p|YxA^VxZ7a8n!z$}=+%=sM zno2&R?61+xdGaf@vi=SmM)E2)G^Ywy4R_qhe|;e3)&-!LjB|=Q8wG;!gO)R{LFrHu z{n?K9qxP-Zuwv?QfSH4tzk6`5rWdNzz&%g9!PohfCha7JNhwQ2)zBhOYGzq- z5-D<&9sz;p_q7PfUF+=cy;%b!EfI}dGP8Sd>qUlJrgq82H58^A;TzNn@D==dJ5eYa z`vF!=zl;YH;>|BviRtE)7Kgs&*cB{=b8DL!*RXK%Y@PRtF~oq{ViK|Ux^Z2XJY%?k z%p6gdH@tXs0N;FiFc7G2m9_3~1m}P0b3qzI9wPgT9SAARG3jQ;`M_o5SSj{I8INhP9L*RX|bxlIy=^gKEV;o4$p8JdF zL$q_fv~9LxfiSR%EtCA{dXapk!sETWGq9tjDp?H^jKk$~*;W?z#hsxm9eYDgffQy^ zHiy2elYOx-C5mmQW>?fUqATDku|m>{!eQ+7F;hU6Xj~yY=B|UQBTPo0gSgwD&>+1Q z&7Z^! z^clP9aUl@E@qICuvb#k87MFp#19p1!nhh)a>o_dD!0D$OOra9U2Zie;=g%zi7hxN{ zP3x*$jhFdpM4&QH*01m-KZV6G_{}^uBnHmA2OBfOFR>$)U1MPdnh1`-xux6;`AXQd z;5S~4SP7B&fCfhu1DFDoLknvC8_x(e%qcJ4oN@{b>*aV-2U(S+fj;~m*w(+{0>pD3 z-qo$BRnByWo%Ov@m)pL3`t$3#M9Uw0exq0&dYaxAJ}I}udUH;+u3sXuvv;IDR=9{l z55zdUjKn>9$A)bTG%dz%7|iSq{I2gh8IR~4;d|b5Z#LFsJJ|>1#6IV>me*yG2#h0% zS*>5An{itq2{~R>wO)%^of9l`YtLw7+?@iS7J3Zt{$i~cCZ?jEu|EN#ahXS4yYz7a zh~#-qp|U}iHe3I(1;$Qx)-PJvA>k>CxB z8SMhSE*U*ff&^Y7vKV;mOkg_{-U8M7VhT>a%#Sf-?=JIVPO=nj&nOlJrV-}EX zdN#Yw8|&y2vkP}!d2VOrva*7F>kTBCsz!e64~{>F(bA3v@b}EV;1D{}XxCqX9%Sa%$mbPPgS^H<$f)epzQq`m04}|UluBrCV3uPSR-^jwO ztnKH6sK^@rS#Tj?y%OopE}uebx064Rp~V_^MGdpA^FbB{wO-TvglSN`Cb+^R^4w{_ zXdYLvJdlT4L7HG|fMnw)F+V`ilxuQVE}|aPFWwgPtadU+D;xlRM5JNKc*g50Gu$)q zS$wBc7WDPGo?P^2vW)@WIOLd1R5i?6_S2Ulc(Tsw3q*CkOW(^_bJ?~S_-DJ90F#91 zHx=q((^bc1xK#cfQ!lkuk13rthvYd8jM>qvt=)r=kR*eYvPAsnY6C%&3Vqi7YcKZ_)K%axJx-$Bvb}u z)l>a!C=R9!Fe5IDyunf-0V-bcX*Zh>vXY$&pzg{+ zKuV1lcs-65N>7{&N!0f_6+vYtPfBqg`X)O&^A<#rhZfbID~qJ67$9Yr;x$^Z=6xN% zhI#pT_J}DY))wmzR5TqnGG`*ocW*QZniyn0V%v)M>mV|1iCcnfAc98z)MfRhO#ZoG z84%CX$lVni8U+xLMqsLB4e>mjlb)@u&u}6X8x&MHs)mYyQMd1f$P&p&*x#qeHK>?E zZZ^a`ccJ~dUzHk!0%ueY3tjrY&_uC*v?D!!6G#BHNE8F3wR&Ey-GL&5m+&lB$hzaQ z767=7{p?r(-M_~dDZnXfUB4l{yEK#WVukSl$fkR2?99g=$p}Ebo>^6gDv}OcwK3M; zMoztvZr^9aHieKo$k24Q28jCR?>Xug?+X5GAdsI%Z>5K|m;2ptI0^Bu!V9q%2b8~= zT2EdWwZ#`(vG?Xiu6=_2W~8R@SSj7_T%}it?20X@M_68y%Ss3LKu`cW-|Fh_VZomj zpN`%EDxRNq`D%(ACXV|p+n9DHik%CB`7I*hJUO$c5w0`u?r&w{x;HjEgH4T^-E{9S znuih}nm;Xh@S&%@B=<69$E>R-9wzymV=LL5E21!W0WhZz)?bhu+_%Iygl{0N!%mpD zOMiZw_|~UYxl0z<3&dc{?GWfm*=3W&n&G*`N!I+De6yiWP7^u5! zlLLu*#1^geXE54n+Fv2xVmJRl?d4osl5|)7l|#A*vGH;etuf`h{52QKU=fLxtuj0; znwqo+x}PI8G%)gIz|+u30m9Ao^BalGPsH&tK6YADP(a~fDbZ-&5345`2yE73{C#iV za=A$M!PLJo?w<_?KR$Ewhn5&^!#9T_3u-1++L=&MG!6JRstKpTz-4dsEDvE5w*vos zsY21?6sUEZn801l(*^1eyEaRYi72&aiva9X8lNm1LdkiHSSGG`^2*l-1m+z=I9>JOw=P+cD|TxxUk`M2@7P4 z!d`qEv+qr7AoEw|iTn+~NHyBJe^6v1y3(2_e9wk*X21UBuh^(ykR%gXwKOgz+>~r^ z?ZcHHD~zpsN?_LAZM8@$HSGj3B@X2R3qT29jb}9npsXnla-(+zf4s*Ftz;d3Ql^=4 zWA8<3r$^3Yz8@&i$)HZ*ECGtiD=%Cr2YCyiRV|3X(3tQ>mU|NQOHWyF_j!w;obL+Z z(~nq0vI8f3D)$ue&aEBMJLFx#&u$HOu_*)OHvXVkXRwNC;!9a_;EO5ayom+~)f`98 zT&LS~h5JZRcD%LNC1=e2NF!sB13+4ML!D_6_8|`dnzk=1UE(sCad&v~D@M};^Pb-b z+o(xhc;pZylr*j~C?FlxE9A{B?PM9tm*TJ!?Hl~Q5U$d?=e4az94Sj1>~q_lXM89j zcNyd_u&l2hpjf{h$-p>uNd&F+oW3Y}M=V61d0VHU^Pr7?XBQ$!BxR2ry8o?u6hE3A z%Xj!zl1cukup0E8xwpm*it7de%fgRa;<~jFRz)S@Gnw)|1vl;+V6-nsE->sG=QSn8 zE@J`qIB0-xNm8+W@XJw`@M08zq4lvHfSCx<07kK{;&uzVpI$55ZVi+-xv38MldAr) zQhaDc6V(!>G#NpzEcq2E<~~mSa9bRj?KYN1-Q!Oza0^n)xrMr{7EXL{`BT*mlv!8? z%DQ49*S0S<12bT$zQ;Y3Vj^b5@+q+*{-@&nS-;gY9zcJepV@;4W@&vnt0n`LZ(?CM zg2Oh2`wCbu{H&<_8mMDA1b|Yv*;)DjO7P|sMKDm# zP<~-f+)dUDM~ybIm!e^F-#2&7<1OoE->9 z#L$_(>^$O2z9HdNAsh66nV>?VrIc1-I z{SrgxKbv1JTGGlc@uTP3@Zy_uuQ2a*>X-B{+#4^j>}U^bu69wlGX95t_QnaT7tZn} z%pcBxBK4y|9c$0t;?9>hK*`WJ%9Nw19--sRi@U!WJWf3%Y3WFMQWTbiA<=@Ix7Bw~GkYwbz@*~B~3{fR(MrIOx5YioX$Uj0{d5*~pnAFpfFc5rb#IkYU#vsAxoX|t z(Q$fpE_^&Apwb*@ttfj2z>elxK_(Ts0PKjV-r}f*XI|cO77aQ%0)Rv4L7-`i3OJUZ z9W04~-K%8`Z?trM^BSp7;(QnL-JQqIS>B6TAlpGAXsNmQ3CL~yS!W{e1J9pHW00$4 zG|6hIbzKZfsmB3IlW$^#R9fStFX~r2$&-H2rD3kG0YFMA@~7_TTdNk_AaW3Jxds;s`b|QR{Qm%L2u!3WmwrzOOFRqp6r!V}L%#y}Oc9_+{0Ii5ff*@llv(a< zZ@8Hs(1HMvQ{I;Q4ropVV15h6eSo9vE5PT&P)C5z@}-~u@R=8YSZ*FJ6QTLa$fV_A zz1fl`NGK!2!HF9{*8yEu#+jh4irDSn;xrk+27t1Gd{j^sSR^r?`KV7DC?9(msr6N;dRTn1}=o2~WBi z09#{|r`!Hj)G5)37&V>LJ7Wjm55Q^BC_X2Z2FU?1CVTGm>wqy32M*es^2nf0`@y)s zB0(Z`FXy!W9*N0m&#D4s7Uc(^_&mOW3F|c!5Qq!V2>?jcH*~rrTqmR7MBoL;bi}Uv zAC7wvNHhS3eqr9NC4Q+E!1_tJ3yySuP<%35HPeo8v9D6PA!h>c0@(rp8Xb)qe2UtQ zq&j3s1pQAdXTa~iFVDAhj*sGXemwd;!cjM3ouMl4!t|G_{Zg56#xxcJ}HX*rsafeq_jU=?icr?XZ!*v?)PkP z_{aTeWb@K>jWUkf%KM4p9k;kAG<~EOC5OvVUTjZ-5uZ`u2}-}gCwyhe!8#-Z$}|Z+ z^B(0P$X`8Y(jbh~wO>ZEG_mCc@Q2WD_K|_@x6^$ssUEzp)Z>bD$IEO7!z!MmTNZN? zmEJLHkQWn9XC~x85AatENbXXTmO9iXQ zNz%W{u7i~bvLMqH<`d!X~WZUMDW40(T+qsHv|G; zYtKx2yh2~~y0@FUk1H+j*Vn_~YkdvA*;1Lyxp7|OqJ$TwMT)}JSClqdg;Wl=b%NhP zUyj_+ElsaJ->{|#SOFU7@gTZV(*~aipjM`$t)b}sK0U~JJSG7T_c3mdSd9h~2 zwX7Bcw|zwnCG~tfo!EBg5g5gLvqG5lDj!UO)Pvb?3>~LaB^>Pa$`DBAl>pzCh$BQ5 zZ~Ss`OAZNwiAFay<^6fwBA4S^mYcSNOv6oK2_B6>R3IFu?e1K~XuIf7dOAAmZy&jw zfa|T|l5?fa&@p_ePp>STg-5q669AACyLyxb^c4xP**9eF9G5^KClX*AUIfZF^QOp= zijSv8Hjt2h^_%FQk^CR8Z7YiadDez_YxrB+ug1QP#Q)5|Zv=P&$2+lhdL!zV1C>kH zos4D+&!^LxRO?~`kuCKAN`C2o>OSZv7xB-~{;v^0-Tf_+)Cggv z9rzqxOuU$q#i7fNK`)7wY$M+8K^#+pf(eHP#DeCps|LCNE(b&$$`?|n-$24x1|#zQ zrhY}wA^LN8MGfnQC_nOQAj4U+sIwr`lxiqN8@p)7`HVLg@qj2-{(3FKXzl3b0L-?p zHV5Q>$;o6S~Xw z0YeI&=?;B5~ek(hvPw+kz87e`{`5d0!3R}Nad$XX>SLZCgO3%6~OUzb|vV%0RWsUV7ChVQ25E zQRT}RSwWKFikC~m(*w1YErX^7m4M27TmX(D;l8S++Ktm9Pr%gwG_pu{T`==?tadO* z7v3U9BRZ{s0EH`yBv^UsDgNh*OQ3$QMUy{fO)egF|fu-HQI*aQ{? zG&Csfl{7gqjXiV?pK2tW&PI_D_EHpxx;Qq7Nv-B=)=IzNesiIrXU&o4pPvs!XP;0W z-T?4ux9z+gFs}^j!-4+=aRBD>`P8yL4TK%(vr`^`%k8>!A`NtlT$rs`=>S4IpwXpu zr$-GGNFMSSKxYG&)@}DYc2}#XNzwPC`Tc25c)0pSE1CP<8dUclZ59aKGeIic;Tn8@izCr4rI9+?LaAljspv`x z0)Zw0hhYMo)=p+6OO;Aeo>_EtJnW&*X|;#KIiO0`0mSYQNIlS@Q~_LW!d`o!{RLJ* z3r*AvM4!d8lj5Zbv^Jnc>$QVv>F5qv0WJgNM%HZvvE*Ve2QN-Nr~>An2Mz|zX8^+7 z10pB8Jra6fYOonKB>(E!lK|T8LFreQ2DLIZIkxz8&I1-_Lf8zVyB_z_F!zjad_|{+ z4=ncVb`0EJOuT?!vZcb{l^}}ETSO;yjm}TJORF6V4ITkbc9!d}G_5icubp@L0p0>$Hh?rUJL;02on%I}Qw_vGLjinS zWE!qFYo3i@>*oivPalvJ2n}OP#u}2X&|ol@m<(eX!#ro)J@@mwp6hz9>-p=so`2?# z^F811obNg3`#I}hO{n(cz@igY+6V?c=vf96Ka&BglO=i+L-wNB zQJrA`brS~;ql#-C^4W8bGqCba0fG)tD*Uux`vM|!xEXQ#Ef_kq7DHBI#>DQDshG|s zU0w8@E3WDInz#bm5oisCq3C$eT-AZ>H z=Yc;h-2`vT-PYA#=+bWMV;EE3754zP9BxBSl$Q-=0~fkzxX|F^Nhz6w%l&nZCCz~?{#4@p36uI2OTgA{$9 z!}M-`|Jk#T#`3;YJ9M@jkWETeRXz!cu zCfv`(*ET%Cx_I_5*f9dR@WCdEU?y2D?O4p*d8lJ?ce_$_70tFRONywTpV6gm^1x>* zI{z&oTM@bVKbkj9uPO0J)gI=<58^<%?B2@kY>>2TnirtaS0gfo-wR{KH4;DnKJM6JE6d=TELP z&x~4O?0gq zssBMwx%=pAuW81+%^TC(8do)0b6X?6ktQON_v$?*wpvzj?nY;9#OH4$_GY3VTQw27 z&MCZ7Z3)3&^2>HpNHT3nz>VKJZXF?II?@{69Vf1Vx$=GdXVVTCCqDEZAPATk{VTBu zF($o$PM{xm@E~Y>o=`h)by&;pi`SSk?tV#4waJDy>e~%}kmSX1*2v%J(J-wY7$s52 z)|3Yn+Bb$cF8>+HDE4$XcF6|)^3-w1kXW;P8|&YWo|B|dF_o7GUY&D+LoA8rVFXbX z^W7#bdfpQs5C9}*kS?;knSms^_Z1})*Mt0#mE*QJ0NnyDH6eSJRy^?^Rmt_XXSmB` z1{Cdyyl$QBwlxSSQA#dt%*zx`MhAx?HrwZ!JOv<6gO3|SXU3y=)CYv&tme*O`EfhT z@#mR(wUD^cU_1x~k{5%czfBxaawXmGSa?IW{9RdGORYgabo&FMe5&sYTRQ$2>&!wO z`&yS;Roc`xw|RT$_O7xBKWM=VuJmZE-SpePOy3*u=iexcyP0J1UN2g~K^q*FCaN1Z zi+baVrff0efOJ#|&{_ZsWXHh*`S?#)tm;0q`9SEJGs}AN<7A{W7ejg0;T6+wavEvfC^0*ogO)nNoAG=SELlNUWbWDfOveOFIDA0)*oJT?8 z5yQmbx(Q&UM13AUm}FtN-FR*2-IuM2-o~Bnw6~IJxdFZOn*6^h}O=;;Q zMT+VS)O{o31bSl0bUd|V3NNde7?*Qx62_Xo0f+b;j!HVKfYN6qc>288E9kH8ps#Zu zby&l?L)(YW-O=rpLYiMM0+cZiiN|?&BqB4>eV_HIgko?=m&4Lc60Q%N*V5Y#{3gtS zvH3Am+=L-cg@1F`lC<{mUJQG(bEua-Ty&E}R_zi$7)*pi>a~`8ZAZ`3Tukc>0RpfNUaoL&rHFtFm$&N%@(=fFR;?20r(hQ*QCUEdofY3%< z^!IaMOPd$8uB@yiSGbO-GRT)$D$Y+_MrLfFNIYnL6g*6hqBqDCR_?12!fmn2!KH5( z153aqNJ7r)WuYw(7}`2ld)%yKgPmZIG0_16)d5^6^E} zkCwtgd6eeV6!F0V=H%kQ(nf6R()K29kl#Kn&p8P~l>rkFWU+9fx3wv*BwCUqqIl`A zGa=~u>-rMwoMB7j+PZLu1eAllzh?(de(o$jU>0P}bYd?ky10SNIJeg4r{>PyJ(3kE zo4XCO_%2G%l6o>%Vs*SbwMMN&$squxUXmDe{3<*{J0N8xKj`?L)q+Aw*7rrn?uTJL zXp2CGLPSv_#h+~T@f^`Q%4KnG7Ww`CSV80G;1v@>W>I!#jN*VrU5@D@SyhnQnjQV_ z=*p4e2HIz~1;>6EYXnf~g{AR#I!;KV^sdJjX;ob-*3alv-fluuIefgTB&Uk8*kVDV0;)Jk|(08UPe4 zK=neuORHroHH&H-cG@jlQty165oe%sHMStz)B_xFoq&xKDIhh6pVi(=${@DzaQu_>im&#yu4Gd2SYLhDA4VvcP! zi?olPy=>AjJJfn=2J&E?pX9(Y7`0f?B+SrdE5u_Ug&^M0F4%GDgr1eG-H^cdfPuc; z>++=%|MpJ%nfq5`8Dw=xh|7s;Ay`1mC%^&V7%9Pbfp+?}->Ccy_p6GrnK4%bqh(OP zFTPSI2W)wERW`@&$=q7C0$BnbQ*-mBO5(Dfv#&?SAM-GOqMdoB)#mRW;1&}lSivC>%j2?O zH3z8$!j5jRudT7@1)D14Z(!wGcqUl(#Q$F6^1Aw9(ant1-Pc;w&Rg`fq;G-voj(8y z5C))quCCXDf`Yt@W0u-li>%A(7-gpqTnNFh1b?@5Up1=#Q{iU5r6v_sJ$nG2Jm^UJ zL_1jMlxP6bPrUHHq_kP|?{Q_y`t1q2l-EN}&37BWSraH@_tQUHH@R%5hE+K|49TOs zvz6gtDn3&IV9X>*LBNA_7RyF|jMZQMJ3HsF72~h7a(_mMLuT-+J_#`u@stbz*O4HgPHsuS%~?oM@_R&czwUw*1qa z-A%#Ush;Ec6%W>?oS)OD^5~lavfL&B`xkrU=$lB^ zkUXbfz$WqKnaWD86`jJ6+zpabFaGA-mvDzHxK=+117}+ri^&dTrW=S!Oj9Cs0A28t zQe6UwFocHq^sS&aR`cLE-w7EGVbCc_LGOGZ#fXsY5jpcxGsjCzNMaXJnmJ>2Xp!cEq2Dj7yE_i`|LQk^`K>=wqlg z`w1s@!zuL+aM}P#->r~1f5q&yw8Rx(F(Em_gG~M~>UKi#4EALZI2WL|y{URV)w37x zGI)mJG`kC^9<}yHMJj1iYP9vm9!$(;nK$Y zY8Qw=ffZk`t<5HfOQEc1DJs}n22ZfoS%&hbdePOO2GzYBv3qy9glBGa_Ao~hD&|#F z`{d);>8O$DaUniRq!34Eh56KLKwByI)1&pp5`(Qw0KajPT3Cv6rx7LQ%kh0RrUpBr zFfOy+hwZ@e<2|}eM|6m@47SQq6+zIbJz;en_kBBJVf|8hy?m^XWa5mA0?=9pt3QTL zm1}Lvk)L^tABJT8!I9~&C1>=9Q)>-fw=}Ftaw2ELsH^9VQwaALa-J78=b{NueNq;*d`MD48oW1kk+y$sFz&rVW yTkx4(WxG!aP{? - -// C++ system -#include -#include -#include -#include -#include -#include - -// Other -#include "diagnostic_msgs/msg/key_value.hpp" -#include "geometry_msgs/msg/point_stamped.hpp" -#include "geometry_msgs/msg/pose_stamped.hpp" -#include "libsurvive/survive_api.h" -#include "libsurvive/survive.h" -#include "rclcpp/rclcpp.hpp" -#include "sensor_msgs/msg/imu.hpp" -#include "sensor_msgs/msg/joy.hpp" -#include "tf2_ros/static_transform_broadcaster.h" -#include "tf2_ros/transform_broadcaster.h" - -namespace libsurvive_ros2 -{ - -class Component : public rclcpp::Node -{ -public: - explicit Component(const rclcpp::NodeOptions & options); - virtual ~Component(); - rclcpp::Time get_ros_time(const std::string & str, FLT timecode); - void publish_imu(const sensor_msgs::msg::Imu & msg); - -private: - void work(); - SurviveSimpleContext * actx_; - std::unique_ptr tf_broadcaster_; - std::shared_ptr tf_static_broadcaster_; - rclcpp::Publisher::SharedPtr joy_publisher_; - rclcpp::Publisher::SharedPtr imu_publisher_; - rclcpp::Publisher::SharedPtr cfg_publisher_; - std::thread worker_thread_; - rclcpp::Time last_base_station_update_; - std::string tracking_frame_; - double lighthouse_rate_; -}; - -} // namespace libsurvive_ros2 - -#endif // LIBSURVIVE_ROS2__COMPONENT_HPP_ diff --git a/include/libsurvive_ros2/driver_component.hpp b/include/libsurvive_ros2/driver_component.hpp new file mode 100644 index 0000000..6dd7d0b --- /dev/null +++ b/include/libsurvive_ros2/driver_component.hpp @@ -0,0 +1,108 @@ +// Copyright 2022 Andrew Symington +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +#ifndef LIBSURVIVE_ROS2__DRIVER_COMPONENT_HPP_ +#define LIBSURVIVE_ROS2__DRIVER_COMPONENT_HPP_ + +// So we can access the full gamut of callbacks from libsurvive. +#define SURVIVE_ENABLE_FULL_API + +// C system +#include + +// C++ system +#include +#include +#include +#include +#include +#include +#include + +// Other +#include "diagnostic_msgs/msg/key_value.hpp" +#include "geometry_msgs/msg/point_stamped.hpp" +#include "geometry_msgs/msg/pose_with_covariance_stamped.hpp" +#include "geometry_msgs/msg/pose_stamped.hpp" +#include "geometry_msgs/msg/transform_stamped.hpp" +#include "libsurvive/survive_api.h" +#include "libsurvive/survive.h" +#include "libsurvive_ros2/msg/angle.hpp" +#include "libsurvive_ros2/msg/lighthouse.hpp" +#include "libsurvive_ros2/msg/tracker.hpp" +#include "rclcpp/rclcpp.hpp" +#include "sensor_msgs/msg/imu.hpp" +#include "sensor_msgs/msg/joy.hpp" +#include "tf2_ros/static_transform_broadcaster.h" +#include "tf2_ros/transform_broadcaster.h" + +namespace libsurvive_ros2 +{ + +class DriverComponent : public rclcpp::Node +{ +public: + explicit DriverComponent(const rclcpp::NodeOptions & options); + virtual ~DriverComponent(); + rclcpp::Time get_ros_time(FLT timecode_s); + void publish_imu(const sensor_msgs::msg::Imu & msg); + void publish_angle(const libsurvive_ros2::msg::Angle & msg); + void publish_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg); + void publish_tracker(const libsurvive_ros2::msg::Tracker & msg); + void publish_button(const sensor_msgs::msg::Joy & msg); + void publish_tf(const geometry_msgs::msg::TransformStamped & msg, bool is_static); + void publish_lighthouse_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg); + void publish_tracker_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg); + void add_or_update_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg); + void add_or_update_tracker(const libsurvive_ros2::msg::Tracker & msg); + const std::string & get_tracking_frame() const; + +private: + void timer_callback(); + void work(); + SurviveContext * ctx_; + std::unique_ptr tf_broadcaster_; + std::shared_ptr tf_static_broadcaster_; + rclcpp::Publisher::SharedPtr + tracker_pose_publisher_; + rclcpp::Publisher::SharedPtr + lighthouse_pose_publisher_; + rclcpp::Publisher::SharedPtr imu_publisher_; + rclcpp::Publisher::SharedPtr button_publisher_; + rclcpp::Publisher::SharedPtr angle_publisher_; + rclcpp::Publisher::SharedPtr lighthouse_publisher_; + rclcpp::Publisher::SharedPtr tracker_publisher_; + std::unordered_map lighthouses_; + std::unordered_map trackers_; + rclcpp::TimerBase::SharedPtr timer_; + std::thread worker_thread_; + rclcpp::Time last_base_station_update_; + std::string tracking_frame_; + std::string driver_config_in_; + std::string driver_config_out_; + std::string driver_args_; + std::string meta_config_; + bool recalibrate_; + int lighthouse_count_; +}; + +} // namespace libsurvive_ros2 + +#endif // LIBSURVIVE_ROS2__DRIVER_COMPONENT_HPP_ diff --git a/include/libsurvive_ros2/poser_component.hpp b/include/libsurvive_ros2/poser_component.hpp new file mode 100644 index 0000000..9e2bbc4 --- /dev/null +++ b/include/libsurvive_ros2/poser_component.hpp @@ -0,0 +1,212 @@ +// Copyright 2023 Andrew Symington +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +// DESCRIPTION +// +// This code was written to fill in several gaps I see in the pose estimation +// capabilities of libsurvive project. They are: +// +// 1. The MPFIT poser algorithm doesn't seem to converge cleanly when there +// are many trackers and many base stations in an experiment. It might be +// because the graphical model gets too big to solve in real-time. +// +// 2. A brief analysis of the posers available in the stock compile (MPFIT, +// KalmanOnly, EPNP) suggests they either use IMU measurements or light +// measurements, but not both. We can improve tracking by using both. +// +// 3. In large tracking volumes the body being tracked can occlude one or +// more trackers' view of any lighthouses In this case the estimate +// becomes wildly inaccurate. To circumvent this problem we should +// exploit prior knowledge about system, and consider solving for the +// pose of a rigid body, using any available raw measurement from all +// trackers attached to this rigid body. +// +// Our tracking problem can be modeled with a factor graph. In this paradigm +// nodes represent quantities we want to estimate, and factors are constraints +// that limit the relative values that nodes can assume. The nice thing about +// this representation is that probability is handled well, and measurements +// are all weighted to produce the best estimate possible. Once of the tools +// we can use to perform this function is GTSAM, but in order to use this tool +// effectively we have to formally model the problem. Please refer to the +// README for a complete description of how this poser models the problem. + + +#ifndef LIBSURVIVE_ROS2__POSER_COMPONENT_HPP_ +#define LIBSURVIVE_ROS2__POSER_COMPONENT_HPP_ + +// STL includes +#include +#include +#include +#include +#include +#include + +// Project includes +#include "geometry_msgs/msg/pose_with_covariance_stamped.hpp" +#include "gtsam/inference/Symbol.h" +#include "gtsam/navigation/ImuBias.h" +#include "gtsam/navigation/ImuFactor.h" +#include "gtsam/nonlinear/ISAM2.h" +#include "gtsam/nonlinear/NonlinearFactorGraph.h" +#include "gtsam/nonlinear/PriorFactor.h" +#include "gtsam/slam/BetweenFactor.h" +#include "libsurvive_ros2/msg/angle.hpp" +#include "libsurvive_ros2/msg/lighthouse.hpp" +#include "libsurvive_ros2/msg/tracker.hpp" +#include "rclcpp/rclcpp.hpp" +#include "sensor_msgs/msg/imu.hpp" +#include "tf2_ros/transform_broadcaster.h" + +namespace libsurvive_ros2 +{ + +// Nanosecond timestamp w.r.t unix epoch +typedef int64_t Timestamp; + +// Encodes body information +struct BodyInfo +{ + // Sequence of poses of the body in the global frame + std::map gTb; + + // Sequence of velocities in the body frame. + std::map g_V; + + // Tracker head to body transforms for all sensors + std::unordered_map bTh; +}; + +// Encodes tracker information +struct TrackerInfo +{ + // Associated body -- we don't store a pointer here because there are no + // guarantees about how a std::unordered_map organizes its memory. It's + // cheap to look up on-demand, though. + std::string body_id; + + // Things that are provided by the tracker, but can be estimated over time. + gtsam::Pose3 tTi; + gtsam::Pose3 tTh; + + // Sensor in the tracking frame. + std::unordered_map> t_sensors; + + // Constant scale and bias factors for the IMU, in the imu frame. + gtsam::Vector3 accel_scale; + gtsam::Vector3 accel_bias; + gtsam::Vector3 omega_scale; + gtsam::Vector3 omega_bias; + + // IMU pre-integrator + std::shared_ptr preintegrator; + + // Time varying IMU accelerometer and gyroscope bias in the body frame. + std::map b_B; +}; + +// Encodes lighthouse information +struct LighthouseInfo +{ + // Static pose of this lighthouse + gtsam::Key gTl; +}; + +// Helper to find the closest key lower that the supplied value. This is used to map +// pose corrections to their nearest IMU timestamp. +std::optional +find_key_for_closest_stamp(std::map & m, Timestamp t) +{ + std::map::iterator it = m.lower_bound(t); + it = it != m.begin() ? --it : m.end(); + if (it != m.end()) { + return it->second; + } + return std::nullopt; +} + +class PoserComponent : public rclcpp::Node +{ +public: + // Create a new metaposer class to track + explicit PoserComponent(const rclcpp::NodeOptions & options); + virtual ~PoserComponent(); + +private: + // Raw measurements streaming in from the low-level driver + void add_angle(const libsurvive_ros2::msg::Angle & msg); + void add_tracker(const libsurvive_ros2::msg::Tracker & msg); + void add_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg); + void add_imu(const sensor_msgs::msg::Imu & msg); + + // Estimates from the low-level driver, which we treat as priors + void add_tracker_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg); + void add_lighthouse_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg); + + // Calculate and publish a solution + void solution_callback(); + + // Keep track of the next available symbol + gtsam::Key next_available_key_; + + // Body information information + std::unordered_map id_to_body_info_; + + // Tracker information + std::unordered_map id_to_tracker_info_; + + // Lighthouse information + std::unordered_map id_to_lighthouse_info_; + + // This is the graphical model into which we'll store factors. + gtsam::NonlinearFactorGraph graph_; + + // These are the initial set of values for the factors. + gtsam::Values initial_values_; + + // This is an incremental smoothing and mapping algorithm for estimating. + gtsam::ISAM2 isam2_; + + // Parent frame for all pose solutions. + std::string tracking_frame_; + + // Topic subscribers. + rclcpp::Subscription::SharedPtr tracker_subscriber_; + rclcpp::Subscription::SharedPtr lighthouse_subscriber_; + rclcpp::Subscription::SharedPtr imu_subscriber_; + rclcpp::Subscription::SharedPtr angle_subscriber_; + rclcpp::Subscription::SharedPtr + tracker_pose_subscriber_; + rclcpp::Subscription::SharedPtr + lighthouse_pose_subscriber_; + + // Solution publisher + rclcpp::Publisher::SharedPtr body_pose_publisher_; + + // TF2 publisher + std::unique_ptr tf_broadcaster_; + + // Solution timer + rclcpp::TimerBase::SharedPtr timer_; +}; + +} // namespace libsurvive_ros2 + +#endif // LIBSURVIVE_ROS2__POSER_COMPONENT_HPP_ diff --git a/launch/libsurvive_ros2.launch.py b/launch/libsurvive_ros2.launch.py index 88a7e81..020f8b0 100644 --- a/launch/libsurvive_ros2.launch.py +++ b/launch/libsurvive_ros2.launch.py @@ -24,102 +24,194 @@ import launch from launch import LaunchDescription -from launch.actions import ExecuteProcess, DeclareLaunchArgument +from launch.actions import DeclareLaunchArgument, ExecuteProcess, GroupAction +from launch_ros.actions import LoadComposableNodes from launch_ros.descriptions import ComposableNode from launch.conditions import IfCondition, UnlessCondition -from launch.substitutions import LaunchConfiguration +from launch.substitutions import LaunchConfiguration, PythonExpression from launch_ros.actions import Node, ComposableNodeContainer -# Bag to save data -BAG_FILE = os.path.join(launch.logging.launch_config.log_dir, "libsurvive.bag") - -# Default libsurvive configuration file -CFG_FILE = os.path.join( - get_package_share_directory('libsurvive_ros2'), 'config', 'config.json' -) +# Bas directories for resource loading and writing. +LOG_DIR = launch.logging.launch_config.log_dir +CFG_DIR = os.path.join(get_package_share_directory("libsurvive_ros2"), "config") -# Sow we don't have to repeat for composable and non-composable versions. -PARAMETERS = [ - {'driver_args': f'--force-recalibrate 1 -c {CFG_FILE}'}, - {'tracking_frame': 'libsurvive_world'}, - {'imu_topic': 'imu'}, - {'joy_topic': 'joy'}, - {'cfg_topic': 'cfg'}, - {'lighthouse_rate': 4.0}] +# Bag to save data +BAG_FILE = os.path.join(LOG_DIR, "libsurvive.bag") +DRIVER_CONFIG_IN = os.path.join(LOG_DIR, "dummy_driver.json") +DRIVER_CONFIG_OUT = os.path.join(LOG_DIR, "config_driver.json") def generate_launch_description(): arguments = [ - DeclareLaunchArgument('namespace', default_value='libsurvive', - description='Namespace for the non-TF topics'), - DeclareLaunchArgument('composable', default_value='false', - description='Launch in a composable container'), - DeclareLaunchArgument('rosbridge', default_value='false', - description='Launch a rosbridge'), - DeclareLaunchArgument('record', default_value='false', - description='Record data with rosbag')] - - # Non-composable launch (regular node) - libsurvive_node = Node( - package='libsurvive_ros2', - executable='libsurvive_ros2_node', - name='libsurvive_ros2_node', - namespace=LaunchConfiguration('namespace'), - condition=UnlessCondition(LaunchConfiguration('composable')), - output='screen', - parameters=PARAMETERS) - - # Composable launch (zero-copy node example) - libsurvive_composable_node = ComposableNodeContainer( - package='rclcpp_components', - executable='component_container', - name='libsurvive_ros2_container', - namespace=LaunchConfiguration('namespace'), - condition=IfCondition(LaunchConfiguration('composable')), - composable_node_descriptions=[ - ComposableNode( - package='libsurvive_ros2', - plugin='libsurvive_ros2::Component', - name='libsurvive_ros2_component', - parameters=PARAMETERS, - extra_arguments=[ - {'use_intra_process_comms': True} - ] - ) + # General options + DeclareLaunchArgument( + "namespace", + default_value="libsurvive", + description="Namespace for the non-TF topics", + ), + DeclareLaunchArgument( + "tracking_frame", + default_value="libsurvive_world", + description="Tracking parent frame name", + ), + DeclareLaunchArgument( + "web_bridge", + default_value="true", + description="Launch a foxglove web bridge", + ), + DeclareLaunchArgument( + "composable", default_value="false", description="Record data with rosbag" + ), + DeclareLaunchArgument( + "record", default_value="false", description="Record data with rosbag" + ), + DeclareLaunchArgument( + "replay", default_value="", description="Record data with rosbag" + ), + # Driver configuration + DeclareLaunchArgument( + "driver_config_in", + default_value=DRIVER_CONFIG_IN, + description="Input configuration file", + ), + DeclareLaunchArgument( + "driver_config_out", + default_value=DRIVER_CONFIG_OUT, + description="Output configuration file", + ), + DeclareLaunchArgument( + "recalibrate", + default_value="false", + description="Recalibrate lighthouse positions", + ), + # Meta configuration + DeclareLaunchArgument( + "meta_config", + default_value="", + description="Meta estimation configuration file", + ), + ] + + # If we are replaying, then we should not run the driver + enable_driver = PythonExpression( + ["not bool('", LaunchConfiguration("replay"), "')"] + ) + + # If we are using a poser, then we should launch it in a composable context. + enable_poser = PythonExpression( + ["bool('", LaunchConfiguration("meta_config"), "')"] + ) + + # Sow we don't have to repeat for composable and non-composable versions. + driver_parameters = [ + {"driver_args": "--v 100 --lighthouse-gen 2"}, + {"driver_config_in": LaunchConfiguration("driver_config_in")}, + {"driver_config_out": LaunchConfiguration("driver_config_out")}, + {"recalibrate": LaunchConfiguration("recalibrate")}, + {"tracking_frame": LaunchConfiguration("tracking_frame")}, + ] + poser_parameters = [ + {"meta_config": LaunchConfiguration("meta_config")}, + {"tracking_frame": LaunchConfiguration("tracking_frame")}, + ] + + # Composed pipeline. + composed_pipeline = GroupAction( + actions=[ + ComposableNodeContainer( + package="rclcpp_components", + executable="component_container", + name="libsurvive_ros2_container", + namespace=LaunchConfiguration("namespace"), + output="screen", + ), + LoadComposableNodes( + target_container="libsurvive_ros2_container", + composable_node_descriptions=[ + ComposableNode( + package="libsurvive_ros2", + plugin="libsurvive_ros2::DriverComponent", + name="libsurvive_ros2_driver_component", + namespace=LaunchConfiguration("namespace"), + parameters=driver_parameters, + extra_arguments=[{"use_intra_process_comms": False}], + ), + ], + condition=IfCondition(enable_driver), + ), + LoadComposableNodes( + target_container="libsurvive_ros2_container", + composable_node_descriptions=[ + ComposableNode( + package="libsurvive_ros2", + plugin="libsurvive_ros2::PoserComponent", + name="libsurvive_ros2_poser_component", + namespace=LaunchConfiguration("namespace"), + parameters=poser_parameters, + extra_arguments=[{"use_intra_process_comms": False}], + ), + ], + condition=IfCondition(enable_poser), + ), ], - output='log') + condition=IfCondition(LaunchConfiguration("composable")), + ) + + # Non-composed pipeline + non_composed_pipeline = GroupAction( + actions=[ + Node( + package="libsurvive_ros2", + executable="libsurvive_ros2_driver_node", + name="libsurvive_ros2_driver_node", + namespace=LaunchConfiguration("namespace"), + condition=IfCondition(enable_driver), + output="screen", + parameters=driver_parameters, + ), + Node( + package="libsurvive_ros2", + executable="libsurvive_ros2_poser_node", + name="libsurvive_ros2_poser_node", + namespace=LaunchConfiguration("namespace"), + condition=IfCondition(enable_poser), + output="screen", + parameters=poser_parameters, + ), + ], + condition=UnlessCondition(LaunchConfiguration("composable")), + ) # For bridging connection to foxglove running on a remote server. - rosbridge_node = Node( - package='rosbridge_server', - executable='rosbridge_websocket', - name='rosbridge_server_node', - condition=IfCondition(LaunchConfiguration('rosbridge')), + web_bridge_node = Node( + name="foxglove_bridge", + package="foxglove_bridge", + executable="foxglove_bridge", parameters=[ - {"port": 9090}, + {"address": "0.0.0.0"}, + {"port": 8765}, + {"send_buffer_limit": 100000000}, + {"min_qos_depth": 100}, ], - output='log') - rosapi_node = Node( - package='rosapi', - executable='rosapi_node', - name='rosapi_node', - condition=IfCondition(LaunchConfiguration('rosbridge')), - output='log') + condition=IfCondition(LaunchConfiguration("web_bridge")), + output="log", + ) # For recording all data from the experiment bag_record_node = ExecuteProcess( - cmd=['ros2', 'bag', 'record', '-o', BAG_FILE] + [ - '/tf', - '/tf_static' - ], - condition=IfCondition(LaunchConfiguration('record')), - output='log') + cmd=["ros2", "bag", "record", "-o", BAG_FILE, "-a"], + condition=IfCondition(LaunchConfiguration("record")), + output="log", + ) + + # For recording all data from the experiment + bag_record_node = ExecuteProcess( + cmd=["ros2", "bag", "play", LaunchConfiguration("replay")], + condition=UnlessCondition(enable_driver), + output="log", + ) return LaunchDescription( - arguments + [ - libsurvive_node, - libsurvive_composable_node, - rosbridge_node, - rosapi_node, - bag_record_node - ]) + arguments + + [composed_pipeline, non_composed_pipeline, web_bridge_node, bag_record_node] + ) diff --git a/libsurvive_ros2_py/__init__.py b/libsurvive_ros2_py/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/libsurvive_ros2_py/config_tools.py b/libsurvive_ros2_py/config_tools.py new file mode 100644 index 0000000..3660ff0 --- /dev/null +++ b/libsurvive_ros2_py/config_tools.py @@ -0,0 +1,352 @@ +# Copyright 2022 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# DESCRIPTION +# +# Registration works by hand-labeling the positions of marker points in some +# global frame. Our goal is to find the transforms that move the lighthouses +# from their internal coordinate frame to some global frame, such that their +# final positions coincide with labeled registration markers. +# +# Let's assume some shorthand notation +# +# - g: global frame # This is the global frame for libsurvive +# - l: lighthouse frame # This is a frame centered on a lighthouse +# - c: calibration frame # This is the config file global frame +# - m: marker frame # This is the frame of the registration marker +# +# What we are trying to solve for is a transform (gTl) for each lighthouse +# that moves it from some local frame (l) to the global frame (g). The hand +# labeled registration points are translation elements of gTm, and so can +# be considered PoseTranslationPrior3D factors. To use these labels we need +# to first correct for the marker offset from the lighthouse to resolve +# gTl = gTm * lTm.inverse(). Then they can be treated as direct observations +# of the translation component of the quantity we are estimating. +# +# Each round of libsurvive calibration results int a config file that is +# locally consistent within an arbitrary global frame -- this is either from +# a tracker's initial orientation, or the first lighthouse that was seen. +# Regardless, what we care about is applying a constraint to our system that +# penalizes pairs of relative lighthouse poses that deviate from the values +# in the calibration files. So, for every pair of lighthouses in each file +# we apply a BetweenFactorPose3 that enforces internal consistency. +# +# The initial values wwe choose for our values (gTl) are copied directly +# from the last value specified in the configuration file. This ensures that +# clusters of lighthouses are "roughly correct", which helps steer the +# optimizer in the correct direction and avoid converging to local minima. + +# System includes +import json +import numpy as np +import yaml + +from scipy.sparse.csgraph import connected_components + +import gtsam +from gtsam.symbol_shorthand import X + +# Maximum numver of interfaces +MAX_NUM_LIGHTHOUSES = 16 + + +# The same lighthouse had a different channel in two config files +class ChannelInconsistencyError(Exception): + pass + + +# The graph formed by configurations is disconnected, and cannot be registered. +class ChannelOverlapError(Exception): + pass + + +# An input configuration file could not be read. +class InputConfigReadError(Exception): + pass + + +# The output configuration could not be read +class OutputConfigWriteError(Exception): + pass + + +# The output configuration could not be read +class RegistrationReadError(Exception): + pass + + +def _load_config_file(path): + """Load the content from a configration JSON file.""" + try: + with open(path, "r") as file: + content = file.read() + content = content.replace('"lighthouse', ', "lighthouse') + content = "{" + content + "}" + return json.loads(content) + except Exception: + print("Error: input file is not readable") + raise InputConfigReadError("input file is not readable") + + +def _write_config_file(path, config): + """Write content to a configuration JSON file.""" + try: + with open(path, "w") as file: + file.write('"v":"0",\n') + file.write('"poser":"MPFIT",\n') + file.write('"disambiguator":"StateBased",\n') + file.write('"configed-lighthouse-gen":"2",\n') + file.write('"ootx-ignore-sync-error":"0",\n') + file.write('"floor-offset":"0"\n') + for lighthouse_name, lighthouse_data in config.items(): + serialized_content = json.dumps(lighthouse_data, indent=2) + file.write(f'"{lighthouse_name}": {serialized_content}\n') + except Exception: + print("Error: output file is not writable") + raise OutputConfigWriteError("output file is not writable") + + +def _read_registration_config(path): + """Read registration YAML file.""" + try: + with open(path, "r") as file: + return yaml.safe_load(file) + except Exception: + print("Error: registration file is not readable") + raise RegistrationReadError("registration file is not readable") + + +def config_merge(inputs, output, reg_config=None): + """Merge one or more libsurvive input configuration files.""" + graph = gtsam.NonlinearFactorGraph() + value = gtsam.Values() + + # Helps ensure that we don't + print("Loading input configuration files") + adjacency = np.eye(MAX_NUM_LIGHTHOUSES, dtype=np.float64) + lighthouses = {} + for path in inputs: + input_config = _load_config_file(path=path) + + # Now process all lighthouses within this config file + edges = set() + cTl_value = {} + cTl_error = {} + for lighthouse_name, lighthouse_data in input_config.items(): + if lighthouse_name.startswith("lighthouse"): + lh_serial = lighthouse_data["id"] + lh_mode = lighthouse_data["mode"] + + # Insert the lighthouse. + if lh_serial in lighthouses.keys(): + if lighthouses[lh_serial]["mode"] != lh_mode: + raise ChannelInconsistencyError( + "Inconsistent ID <-> channel mapping" + ) + i = lighthouses[lh_serial]["index"] + else: + i = len(lighthouses) + lighthouses[lh_serial] = lighthouse_data + lighthouses[lh_serial]["index"] = i + + # Add a value representing the ith lighthouse pose in the global frame. + # This value will be updated continually. This is just to ensure that one + # already exists, so that updating it doesn't require an existence check. + gTl_value = gtsam.Pose3( + gtsam.Rot3.Quaternion(1.0, 0.0, 0.0, 0.0), + gtsam.Point3(0.0, 0.0, 0.0), + ) + value.insert(X(i), gTl_value) + + # Add the lighthouse -> config frame transform for each lighthouse i. + cTl_value[i] = gtsam.Pose3( + gtsam.Rot3.Quaternion( + float(lighthouse_data["pose"][3]), # qw + float(lighthouse_data["pose"][4]), # qx + float(lighthouse_data["pose"][5]), # qy + float(lighthouse_data["pose"][6]), + ), # qz + gtsam.Point3( + float(lighthouse_data["pose"][0]), # x + float(lighthouse_data["pose"][1]), # y + float(lighthouse_data["pose"][2]), + ), + ) # z + cTl_error[i] = np.array( + [ + float(lighthouse_data["variance"][3]), # rx + float(lighthouse_data["variance"][4]), # ry + float(lighthouse_data["variance"][5]), # rz + float(lighthouse_data["variance"][0]), # tx + float(lighthouse_data["variance"][1]), # ty + float(lighthouse_data["variance"][2]), # tz + ], + dtype=float, + ) + + # Update our best estimate -- yes, this will overwrite any previous value + # but that's ok. It just needs to bea reasonably correct. + value.update(X(i), cTl_value[i]) + + # Add to the solvability determinator + edges.add(i) + + # Complete the solvability determinator + print("-> ", path, edges) + for i in edges: + for j in edges: + adjacency[i, j] = 1.0 + if i < j: + jTi_value = cTl_value[i].between(cTl_value[j]) + c_sigma = cTl_error[i] + cTl_error[j] + lAc = cTl_value[i].inverse().AdjointMap() + l_sigma = gtsam.noiseModel.Gaussian.Covariance( + lAc * c_sigma * lAc.T + ) + graph.add(gtsam.BetweenFactorPose3(X(i), X(j), jTi_value, l_sigma)) + + # Make sure the channel graph is fully connected. If it's not, then we won't be + # able to register the entire system in a common frame, so there is little point + # adding it to GTSAM and event trying to find a solutidon. + num_lighthouses = len(lighthouses) + adjacency = adjacency[0:num_lighthouses, 0:num_lighthouses] + n_components, _ = connected_components(adjacency) + print("Adjacency matrix", adjacency) + print("Connected components", n_components) + if n_components != 1: + print("Error: too many connected components in channel graph") + raise ChannelOverlapError("channel graph is disconnected") + + # If registration information is supplied. + if reg_config is not None: + + print("Reading registration from", reg_config) + registration = _read_registration_config(path=reg_config) + + print("Grabbing lTm estimates") + lTm = gtsam.Pose3( + gtsam.Rot3.Quaternion(1.0, 0.0, 0.0, 0.0), # [qw, qx, qy, qz] + gtsam.Point3( + registration["lighthouse_from_marker"]["position"][0], # x + registration["lighthouse_from_marker"]["position"][1], # y + registration["lighthouse_from_marker"]["position"][2], + ), + ) # z + + print("Grabbing gTm estimates") + assert len(registration["markers"]) > 3, "you need four or more markers" + for lh_name, marker_data in registration["markers"].items(): + lh_serial = str(marker_data["id"]) + if lh_serial not in lighthouses.keys(): + print( + "WARNING: marker for non-existent lighthouse in config files. Ignoring" + ) + continue + i = int(lighthouses[lh_serial]["index"]) + + # The registration information expresses the position of the marker in the global + # frame. But what we want is the position of the lighthouse in the global frame. + gTm_value = gtsam.Pose3( + gtsam.Rot3.Quaternion(1.0, 0.0, 0.0, 0.0), # qw # qx # qy # qz + gtsam.Point3( + marker_data["position"][0], # x + marker_data["position"][1], # y + marker_data["position"][2], + ), # z + ) + gTl_value = gTm_value * lTm.inverse() + + # The noise vector expresses how much uncertainty we expect in our ability to + # measure the marker position in the global frame. Since the parent frame is not + # changing the + gTm_noise = gtsam.noiseModel.Diagonal.Sigmas( + np.array( + [ + marker_data["sigma"][0], + marker_data["sigma"][1], + marker_data["sigma"][2], + ], + dtype=float, + ) + ) + gTl_noise = gTm_noise + + # Add a factor constraining the position. + graph.add(gtsam.PoseTranslationPrior3D(X(i), gTl_value, gTl_noise)) + + else: + print("No registration information. Assuming lighthouse 1 is the global frame") + gTl_value = gtsam.Pose3( + gtsam.Rot3.Quaternion(1.0, 0.0, 0.0, 0.0), # qw # qx # qy # qz + gtsam.Point3(0.0, 0.0, 0.0), # x # y + ) + gTl_noise = gtsam.noiseModel.Diagonal.Variances( + np.array([1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4]) + ) + graph.add(gtsam.PriorFactorPose3(X(0), gTl_value, gTl_noise)) + + print("Running optimizer") + graph.print("full graph") + value.print("initial values") + result = gtsam.LevenbergMarquardtOptimizer(graph, value).optimize() + result.print("final result") + marginals = gtsam.Marginals(graph, result) + + # Helper function to serialize floating points consistently to strings + def str_from_float(f): + return np.format_float_positional(f, precision=9, trim="k", unique=False) + + # Write the updated global configuration. + print("Writing output config to", output) + config = {} + for lh_serial, lh_data in lighthouses.items(): + name = "lighthouse{index}".format(index=lh_data["index"]) + i = int(lh_data["index"]) + cTl_est = result.atPose3(X(i)) + cTl_cov = marginals.marginalCovariance(X(i)) + config[name] = lh_data + config[name]["pose"] = [ + str_from_float(cTl_est.translation()[0]), # x + str_from_float(cTl_est.translation()[1]), # y + str_from_float(cTl_est.translation()[2]), # z + str_from_float(cTl_est.rotation().toQuaternion().w()), # qw + str_from_float(cTl_est.rotation().toQuaternion().x()), # qx + str_from_float(cTl_est.rotation().toQuaternion().y()), # qy + str_from_float(cTl_est.rotation().toQuaternion().z()), + ] # qz + config[name]["variance"] = [ + str_from_float(cTl_cov[3, 3]), # tx + str_from_float(cTl_cov[4, 4]), # ty + str_from_float(cTl_cov[5, 5]), # tz + str_from_float(cTl_cov[0, 0]), # rx + str_from_float(cTl_cov[1, 1]), # ry + str_from_float(cTl_cov[2, 2]), + ] # rz + _write_config_file(path=output, config=config) + + # Return the full configuration. + return config + + +def config_equal(path1, path2): + """Load two configs and verify that they are the same.""" + config1 = _load_config_file(path=path1) + config2 = _load_config_file(path=path2) + return config1 == config2 diff --git a/msg/Angle.msg b/msg/Angle.msg new file mode 100644 index 0000000..060e5c7 --- /dev/null +++ b/msg/Angle.msg @@ -0,0 +1,36 @@ +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +# Information about the origin of this data +# > stamp: precise time of capture +# > frame_id: [] +std_msgs/Header header + +# The sensor which was struck by the sweep +int32 sensor_id + +# The ID of the lighthouse from which this sweep originated +uint8 channel + +# The plane in which this sweep originated +uint8 plane + +# Thee angle of the pulse +float64 angle diff --git a/msg/Lighthouse.msg b/msg/Lighthouse.msg new file mode 100644 index 0000000..f1f9138 --- /dev/null +++ b/msg/Lighthouse.msg @@ -0,0 +1,49 @@ +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +# Information about the origin of this data +# > stamp: precise time of capture +# > frame_id: [] +std_msgs/Header header + +# Index of this lighthouse +uint8 index + +# Operating 'channel' or 'mode' (1-16) +uint8 channel + +# Number of times system has been unlocked. +uint8 unlock_count + +# Acceleration reading +geometry_msgs/Vector3 accel + +# Lighthouse calibration parameters +float64[2] fcalphase +float64[2] fcaltilt +float64[2] fcalcurve +float64[2] fcalgibpha +float64[2] fcalgibmag +float64[2] fcalogeephase +float64[2] fcalogeemag + +# Need to work out what these are for +uint64 ootx_set +uint64 position_set diff --git a/msg/Tracker.msg b/msg/Tracker.msg new file mode 100644 index 0000000..877b182 --- /dev/null +++ b/msg/Tracker.msg @@ -0,0 +1,39 @@ +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +# Information about the origin of this data +# > stamp: precise time of capture +# > frame_id: [] +std_msgs/Header header + +# Extrinsic calibration +geometry_msgs/Pose tracker_from_imu +geometry_msgs/Pose tracker_from_head + +# IMU calibration +geometry_msgs/Vector3 accel_bias +geometry_msgs/Vector3 accel_scale +geometry_msgs/Vector3 omega_bias +geometry_msgs/Vector3 omega_scale + +# Lighthouse sensors (tracker_from_sensor transform components) +uint8[] channels +geometry_msgs/Vector3[] points +geometry_msgs/Vector3[] normals \ No newline at end of file diff --git a/package.xml b/package.xml index 098c994..498aef0 100644 --- a/package.xml +++ b/package.xml @@ -1,28 +1,56 @@ - + + + libsurvive_ros2 - 0.0.1 - libsurvive driver for ROS2 - Andrew Symington - MIT - rclcpp + + + boost + eigen + tbb + rclcpp_components + rclcpp + tf2_ros + tf2 + diagnostic_msgs geometry_msgs sensor_msgs - tf2 - tf2_ros + + + + ament_cmake + rosidl_default_generators - rosbridge_suite + + + rosidl_default_runtime + foxglove_bridge + python3-numpy + python3-scipy + python3-yaml + rosbag2_storage_mcap + + + + ament_cmake_ros ament_lint_auto ament_lint_common + launch_pytest + + + + rosidl_interface_packages + + ament_cmake diff --git a/scripts/config_merger b/scripts/config_merger new file mode 100755 index 0000000..c19dd27 --- /dev/null +++ b/scripts/config_merger @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 +# +# Copyright 2022 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import argparse +import sys + +from libsurvive_ros2_py.config_tools import config_merge + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("-o", "--output", type=str, required=True, + help="the output configuration") + parser.add_argument("-r", "--registration", type=str, required=False, + help="the registration configuration") + parser.add_argument("-i", "--inputs", nargs='+', default=[], + help="the input configurations") + args = parser.parse_args() + config_merge(inputs=args.inputs,output=args.output, + reg_config=args.registration) diff --git a/src/component.cpp b/src/component.cpp deleted file mode 100644 index 01ecb42..0000000 --- a/src/component.cpp +++ /dev/null @@ -1,280 +0,0 @@ -// Copyright 2022 Andrew Symington -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -// C++ system -#include -#include -#include - -// Other -#include "libsurvive_ros2/component.hpp" -#include "rclcpp_components/register_node_macro.hpp" - - -// Scale factor to move from G to m/s^2. -constexpr double SI_GRAVITY = 9.80665; - -// We can only ever load one version of the driver, so we store a pointer to the instance of the -// driver here, so the IMU callback can push data to it. -libsurvive_ros2::Component * _singleton = nullptr; - -static void imu_func( - SurviveObject * so, int mask, const FLT * accelgyromag, uint32_t rawtime, int id) -{ - if (_singleton) { - survive_default_imu_process(so, mask, accelgyromag, rawtime, id); - FLT timecode = SurviveSensorActivations_runtime( - &so->activations, so->activations.last_imu) / FLT(1e6); - sensor_msgs::msg::Imu imu_msg; - imu_msg.header.frame_id = std::string(so->serial_number) + "_imu"; - imu_msg.header.stamp = _singleton->get_ros_time("inertial", timecode); - imu_msg.angular_velocity.x = accelgyromag[3]; - imu_msg.angular_velocity.y = accelgyromag[4]; - imu_msg.angular_velocity.z = accelgyromag[5]; - imu_msg.linear_acceleration.x = accelgyromag[0] * SI_GRAVITY; - imu_msg.linear_acceleration.y = accelgyromag[1] * SI_GRAVITY; - imu_msg.linear_acceleration.z = accelgyromag[2] * SI_GRAVITY; - _singleton->publish_imu(imu_msg); - } -} - -static void ros_from_pose( - geometry_msgs::msg::Transform * const tx, const SurvivePose & pose) -{ - tx->translation.x = pose.Pos[0]; - tx->translation.y = pose.Pos[1]; - tx->translation.z = pose.Pos[2]; - tx->rotation.w = pose.Rot[0]; - tx->rotation.x = pose.Rot[1]; - tx->rotation.y = pose.Rot[2]; - tx->rotation.z = pose.Rot[3]; -} - -namespace libsurvive_ros2 -{ - -Component::Component(const rclcpp::NodeOptions & options) -: Node("libsurvive_ros2", options), - actx_(nullptr), - tf_broadcaster_(std::make_unique(*this)), - tf_static_broadcaster_(std::make_unique(*this)) -{ - // Store the instance globally to be used by a C callback. - _singleton = this; - - // Global parameters - this->declare_parameter("tracking_frame", "libsurvive_frame"); - this->get_parameter("tracking_frame", tracking_frame_); - this->declare_parameter("lighthouse_rate", 4.0); - this->get_parameter("lighthouse_rate", lighthouse_rate_); - - // Setup topic for IMU. - std::string imu_topic; - this->declare_parameter("imu_topic", "imu"); - this->get_parameter("imu_topic", imu_topic); - imu_publisher_ = this->create_publisher(imu_topic, 10); - - // Setup topic for joystick. - std::string joy_topic; - this->declare_parameter("joy_topic", "joy"); - this->get_parameter("joy_topic", joy_topic); - joy_publisher_ = this->create_publisher(joy_topic, 10); - - // Setup topic for configuration. - std::string cfg_topic; - this->declare_parameter("cfg_topic", "cfg"); - this->get_parameter("cfg_topic", cfg_topic); - cfg_publisher_ = this->create_publisher(cfg_topic, 10); - - // Setup driver parameters. - std::string driver_args; - this->declare_parameter("driver_args", "--force-recalibrate 1"); - this->get_parameter("driver_args", driver_args); - std::vector args; - std::stringstream driver_ss(driver_args); - std::string token; - while (getline(driver_ss, token, ' ')) { - args.emplace_back(token.c_str()); - } - - // Try and initialize survive with the arguments supplied. - actx_ = survive_simple_init(args.size(), const_cast(args.data())); - if (actx_ == nullptr) { - RCLCPP_FATAL(this->get_logger(), "Could not initialize the libsurvive context"); - return; - } - - // Setup callback for reading IMU data. - SurviveContext * ctx = survive_simple_get_ctx(actx_); - survive_install_imu_fn(ctx, imu_func); - - // Initialize the survive thread. - survive_simple_start_thread(actx_); - - // Start the work thread - worker_thread_ = std::thread(&Component::work, this); -} - -Component::~Component() -{ - RCLCPP_INFO(this->get_logger(), "Cleaning up."); - worker_thread_.join(); - - RCLCPP_INFO(this->get_logger(), "Shutting down libsurvive driver"); - if (actx_) { - survive_simple_close(actx_); - } - - RCLCPP_INFO(this->get_logger(), "Clearing singleton instance"); - _singleton = nullptr; -} - -rclcpp::Time Component::get_ros_time(const std::string & /*str*/, FLT timecode) -{ - return rclcpp::Time() + rclcpp::Duration(std::chrono::duration(timecode)); -} - -void Component::publish_imu(const sensor_msgs::msg::Imu & msg) -{ - if (imu_publisher_) { - imu_publisher_->publish(msg); - } -} - -void Component::work() -{ - RCLCPP_INFO(this->get_logger(), "Start listening for events.."); - - // Poll for events. - struct SurviveSimpleEvent event = {}; - while (survive_simple_wait_for_event( - actx_, - &event) != SurviveSimpleEventType_Shutdown && rclcpp::ok()) - { - // Business logic depends on the event type - switch (event.event_type) { - // TYPE: Pose update (limit to non-lighthouses only) - case SurviveSimpleEventType_PoseUpdateEvent: { - const struct SurviveSimplePoseUpdatedEvent * pose_event = - survive_simple_get_pose_updated_event(&event); - if (survive_simple_object_get_type(pose_event->object) != - SurviveSimpleObject_LIGHTHOUSE) - { - SurvivePose pose = {}; - auto timecode = survive_simple_object_get_latest_pose(pose_event->object, &pose); - if (timecode > 0) { - geometry_msgs::msg::TransformStamped pose_msg; - pose_msg.header.stamp = this->get_ros_time("tracker", timecode); - pose_msg.header.frame_id = tracking_frame_; - pose_msg.child_frame_id = survive_simple_serial_number(pose_event->object); - ros_from_pose(&pose_msg.transform, pose); - tf_broadcaster_->sendTransform(pose_msg); - } - } - break; - } - - // TYPE: Button update - case SurviveSimpleEventType_ButtonEvent: { - const struct SurviveSimpleButtonEvent * button_event = survive_simple_get_button_event( - &event); - auto obj = button_event->object; - sensor_msgs::msg::Joy joy_msg; - joy_msg.header.frame_id = survive_simple_serial_number(button_event->object); - joy_msg.header.stamp = this->get_ros_time("button", button_event->time); - joy_msg.axes.resize(SURVIVE_MAX_AXIS_COUNT); - joy_msg.buttons.resize(SURVIVE_BUTTON_MAX * 2); - int64_t mask = survive_simple_object_get_button_mask(obj); - mask |= (survive_simple_object_get_touch_mask(obj) << SURVIVE_BUTTON_MAX); - for (int i = 0; i < SURVIVE_MAX_AXIS_COUNT; i++) { - joy_msg.axes[i] = - static_cast(survive_simple_object_get_input_axis(obj, (enum SurviveAxis)i)); - } - for (int i = 0; i < mask && i < static_cast(joy_msg.buttons.size()); i++) { - joy_msg.buttons[i] = (mask >> i) & 1; - } - joy_publisher_->publish(joy_msg); - break; - } - - // TYPE: Configuration update - case SurviveSimpleEventType_ConfigEvent: { - const struct SurviveSimpleConfigEvent * config_event = survive_simple_get_config_event( - &event); - diagnostic_msgs::msg::KeyValue cfg_msg; - cfg_msg.key = survive_simple_serial_number(config_event->object); - cfg_msg.value = config_event->cfg; - cfg_publisher_->publish(cfg_msg); - break; - } - - // TYPE: Device add event - case SurviveSimpleEventType_DeviceAdded: { - const struct SurviveSimpleObjectEvent * object_event = survive_simple_get_object_event( - &event); - RCLCPP_INFO( - this->get_logger(), "A new device %s was added at time %lf", - survive_simple_serial_number(object_event->object), - this->get_ros_time("connect", object_event->time).seconds() - ); - break; - } - - // TYPE: no-op - case SurviveSimpleEventType_None: { - break; - } - - // We should never get here. - default: - RCLCPP_WARN(this->get_logger(), "Unknown event"); - break; - } - - // Always update the base stations - auto time_now = this->get_clock()->now(); - if (time_now.seconds() - last_base_station_update_.seconds() > lighthouse_rate_) { - last_base_station_update_ = time_now; - for (const SurviveSimpleObject * it = survive_simple_get_first_object(actx_); it != 0; - it = survive_simple_get_next_object(actx_, it)) - { - if (survive_simple_object_get_type(it) == SurviveSimpleObject_LIGHTHOUSE) { - SurvivePose pose = {}; - auto timecode = survive_simple_object_get_latest_pose(it, &pose); - if (timecode > 0) { - geometry_msgs::msg::TransformStamped pose_msg; - pose_msg.header.stamp = this->get_ros_time("lighthouse", timecode); - pose_msg.header.frame_id = tracking_frame_; - pose_msg.child_frame_id = survive_simple_serial_number(it); - ros_from_pose(&pose_msg.transform, pose); - tf_static_broadcaster_->sendTransform(pose_msg); - } - } - } - } - } -} - -} // namespace libsurvive_ros2 - -// Register the component with class_loader. -// This acts as a sort of entry point, allowing the component to be discoverable when its library -// is being loaded into a running process. -RCLCPP_COMPONENTS_REGISTER_NODE(libsurvive_ros2::Component) diff --git a/src/driver_component.cpp b/src/driver_component.cpp new file mode 100644 index 0000000..ab49187 --- /dev/null +++ b/src/driver_component.cpp @@ -0,0 +1,504 @@ +// Copyright 2022 Andrew Symington +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +// C++ system +#include +#include +#include +#include + +// This project +#include "libsurvive_ros2/driver_component.hpp" +#include "rclcpp_components/register_node_macro.hpp" + +using namespace std::chrono_literals; + +// Hard-coded variances from libsurvive priors +const double kPosSigma_meters = 0.1; +const double kRotSigma_radians = 0.1; + +// We can only ever load one version of the driver, so we store a pointer to the instance of the +// driver here, so the IMU callback can push data to it. +libsurvive_ros2::DriverComponent * _singleton = nullptr; + +// Convert a libsurvive point to a geometry::msg::Vector3 +static void ros_from_point( + geometry_msgs::msg::Vector3 * const ros, const FLT * const arr) +{ + ros->x = arr[0]; + ros->y = arr[1]; + ros->z = arr[2]; +} + +// Convert a libsurvive pose to a geometry::msg::Transform +static void ros_from_pose( + geometry_msgs::msg::Transform * const tx, const SurvivePose & pose) +{ + tx->translation.x = pose.Pos[0]; + tx->translation.y = pose.Pos[1]; + tx->translation.z = pose.Pos[2]; + tx->rotation.w = pose.Rot[0]; + tx->rotation.x = pose.Rot[1]; + tx->rotation.y = pose.Rot[2]; + tx->rotation.z = pose.Rot[3]; +} + +// Callback for inertial event +static void imu_func( + SurviveObject * so, int mode, const FLT * accelgyromag, uint32_t rawtime, int id) +{ + survive_default_imu_process(so, mode, accelgyromag, rawtime, id); + if (_singleton) { + // Get precise time + auto long_timecode = SurviveSensorActivations_long_timecode_imu(&so->activations, rawtime); + FLT timecode = SurviveSensorActivations_runtime(&so->activations, long_timecode) / FLT(1e6); + // Package up an inertial message + sensor_msgs::msg::Imu imu_msg; + imu_msg.header.stamp = _singleton->get_ros_time(timecode); + imu_msg.header.frame_id = std::string(so->serial_number); + ros_from_point(&imu_msg.linear_acceleration, &accelgyromag[0]); + ros_from_point(&imu_msg.angular_velocity, &accelgyromag[3]); + _singleton->publish_imu(imu_msg); + } +} + +// Callback for button event +static void button_func( + SurviveObject * so, + enum SurviveInputEvent eventType, + enum SurviveButton buttonId, + const enum SurviveAxis * axisIds, + const SurviveAxisVal_t * axisVals) +{ + survive_default_button_process(so, eventType, buttonId, axisIds, axisVals); + if (_singleton) { + // Get approximate time + FLT timecode = survive_run_time_since_epoch(so->ctx); + // Package up a button message + sensor_msgs::msg::Joy button_msg; + button_msg.header.stamp = _singleton->get_ros_time(timecode); + button_msg.header.frame_id = so->serial_number; + button_msg.buttons.resize(SURVIVE_BUTTON_MAX * 2); + for (int i = 0; i < SURVIVE_BUTTON_MAX; i++) { + button_msg.buttons[i] = (buttonId == i ? eventType : 0); + } + button_msg.axes.resize(SURVIVE_MAX_AXIS_COUNT); + for (int i = 0; i < SURVIVE_MAX_AXIS_COUNT; i++) { + button_msg.axes[axisIds[i]] = axisVals[i]; + } + _singleton->publish_button(button_msg); + } +} + +// Callback for lighthouse event +static void ootx_received_func( + struct SurviveContext * ctx, uint8_t idx) +{ + survive_default_ootx_received_process(ctx, idx); + if (_singleton) { + printf("IDX: index:%d == channel:%d\n", idx, ctx->bsd[idx].mode); + // Get serial number of lighthouse. + char serial[16]; + snprintf(serial, sizeof(serial), "LHB-%X", (unsigned)ctx->bsd[idx].BaseStationID); + // Get approximate time + FLT timecode = survive_run_time_since_epoch(ctx); + // Package up a lighthouse message + libsurvive_ros2::msg::Lighthouse lighthouse_msg; + lighthouse_msg.header.stamp = _singleton->get_ros_time(timecode); + lighthouse_msg.header.frame_id = serial; + lighthouse_msg.index = idx; + lighthouse_msg.channel = ctx->bsd[idx].mode; + lighthouse_msg.unlock_count = ctx->bsd[idx].sys_unlock_count; + ros_from_point(&lighthouse_msg.accel, ctx->bsd[idx].accel); + for (size_t i = 0; i < 2; i++) { + lighthouse_msg.fcalphase[i] = ctx->bsd[idx].fcal[i].phase; + lighthouse_msg.fcaltilt[i] = ctx->bsd[idx].fcal[i].tilt; + lighthouse_msg.fcalcurve[i] = ctx->bsd[idx].fcal[i].curve; + lighthouse_msg.fcalgibpha[i] = ctx->bsd[idx].fcal[i].gibpha; + lighthouse_msg.fcalgibmag[i] = ctx->bsd[idx].fcal[i].gibmag; + lighthouse_msg.fcalogeephase[i] = ctx->bsd[idx].fcal[i].ogeephase; + lighthouse_msg.fcalogeemag[i] = ctx->bsd[idx].fcal[i].ogeemag; + } + lighthouse_msg.ootx_set = ctx->bsd[idx].OOTXSet; + lighthouse_msg.position_set = ctx->bsd[idx].PositionSet; + // Add or update the lighthouse in our internal data structure. It will + // be published at a fixed cadence for all to see. + _singleton->add_or_update_lighthouse(lighthouse_msg); + } +} + +// Callback for tracker event +static int config_func(struct SurviveObject * so, char * ct0conf, int len) +{ + const int res = survive_default_config_process(so, ct0conf, len); + if (_singleton) { + // Get the approximate time + FLT timecode = survive_run_time_since_epoch(so->ctx); + // Package up tracker config data packet + libsurvive_ros2::msg::Tracker tracker_msg; + tracker_msg.header.stamp = _singleton->get_ros_time(timecode); + tracker_msg.header.frame_id = std::string(so->serial_number); + tracker_msg.tracker_from_imu.position.x = so->imu2trackref.Pos[0]; + tracker_msg.tracker_from_imu.position.y = so->imu2trackref.Pos[1]; + tracker_msg.tracker_from_imu.position.z = so->imu2trackref.Pos[2]; + tracker_msg.tracker_from_imu.orientation.w = so->imu2trackref.Rot[0]; + tracker_msg.tracker_from_imu.orientation.x = so->imu2trackref.Rot[1]; + tracker_msg.tracker_from_imu.orientation.y = so->imu2trackref.Rot[2]; + tracker_msg.tracker_from_imu.orientation.z = so->imu2trackref.Rot[3]; + tracker_msg.tracker_from_head.position.x = so->head2trackref.Pos[0]; + tracker_msg.tracker_from_head.position.y = so->head2trackref.Pos[1]; + tracker_msg.tracker_from_head.position.z = so->head2trackref.Pos[2]; + tracker_msg.tracker_from_head.orientation.w = so->head2trackref.Rot[0]; + tracker_msg.tracker_from_head.orientation.x = so->head2trackref.Rot[1]; + tracker_msg.tracker_from_head.orientation.y = so->head2trackref.Rot[2]; + tracker_msg.tracker_from_head.orientation.z = so->head2trackref.Rot[3]; + ros_from_point(&tracker_msg.accel_bias, so->acc_bias); + ros_from_point(&tracker_msg.accel_scale, so->acc_scale); + ros_from_point(&tracker_msg.omega_bias, so->gyro_bias); + ros_from_point(&tracker_msg.omega_scale, so->gyro_scale); + tracker_msg.channels.resize(so->sensor_ct); + tracker_msg.normals.resize(so->sensor_ct); + tracker_msg.points.resize(so->sensor_ct); + for (uint8_t k = 0; k < so->sensor_ct; k++) { + tracker_msg.channels[k] = so->channel_map[k]; + ros_from_point(&tracker_msg.points[k], &so->sensor_locations[k * 3]); + ros_from_point(&tracker_msg.normals[k], &so->sensor_normals[k * 3]); + } + // Add or update the lighthouse in our internal data structure. It will + // be published at a fixed cadence for all to see. + _singleton->add_or_update_tracker(tracker_msg); + } + return res; +} + +// Callback for angle event +static void sweep_angle_func( + SurviveObject * so, + uint8_t channel, int sensor_id, uint32_t rawtime, int8_t plane, FLT angle) +{ + survive_default_sweep_angle_process(so, channel, sensor_id, rawtime, plane, angle); + if (_singleton) { + // Get the approximate time + auto long_timecode = SurviveSensorActivations_long_timecode_light(&so->activations, rawtime); + FLT timecode = SurviveSensorActivations_runtime(&so->activations, long_timecode) / FLT(1e6); + // Package up an angle measurement + libsurvive_ros2::msg::Angle angle_msg; + angle_msg.header.stamp = _singleton->get_ros_time(timecode); + angle_msg.header.frame_id = std::string(so->serial_number); + angle_msg.sensor_id = sensor_id; + angle_msg.channel = channel; + angle_msg.plane = plane; + angle_msg.angle = angle; + _singleton->publish_angle(angle_msg); + } +} + +// Callback for new lighthouse pose +static void lighthouse_pose_func( + SurviveContext * ctx, uint8_t lighthouse, const SurvivePose * lighthouse_pose) +{ + survive_default_lighthouse_pose_process(ctx, lighthouse, lighthouse_pose); + if (_singleton && lighthouse_pose) { + // Get serial number of lighthouse. + char serial[16]; + snprintf(serial, sizeof(serial), "LHB-%X", (unsigned)ctx->bsd[lighthouse].BaseStationID); + // Get the approximate time + FLT timecode = survive_run_time_since_epoch(ctx); + // Package up a transform + geometry_msgs::msg::TransformStamped transform_msg; + transform_msg.header.stamp = _singleton->get_ros_time(timecode); + transform_msg.header.frame_id = _singleton->get_tracking_frame(); + transform_msg.child_frame_id = serial; + ros_from_pose(&transform_msg.transform, *lighthouse_pose); + _singleton->publish_tf(transform_msg, /* is_static= */ true); + // Publish a separate pose message with covariance + geometry_msgs::msg::PoseWithCovarianceStamped pose_msg; + pose_msg.header.stamp = transform_msg.header.stamp; + pose_msg.header.frame_id = serial; + pose_msg.pose.pose.orientation = transform_msg.transform.rotation; + pose_msg.pose.pose.position.x = transform_msg.transform.translation.x; + pose_msg.pose.pose.position.y = transform_msg.transform.translation.y; + pose_msg.pose.pose.position.z = transform_msg.transform.translation.z; + pose_msg.pose.covariance.fill(0.0); + pose_msg.pose.covariance[0] = kPosSigma_meters * kPosSigma_meters; + pose_msg.pose.covariance[7] = kPosSigma_meters * kPosSigma_meters; + pose_msg.pose.covariance[14] = kPosSigma_meters * kPosSigma_meters; + pose_msg.pose.covariance[21] = kRotSigma_radians * kRotSigma_radians; + pose_msg.pose.covariance[28] = kRotSigma_radians * kRotSigma_radians; + pose_msg.pose.covariance[35] = kRotSigma_radians * kRotSigma_radians; + _singleton->publish_lighthouse_pose(pose_msg); + } +} + +// Callback for tracked object pose +static void tracker_pose_func( + SurviveObject * so, survive_long_timecode rawtime, const SurvivePose * pose) +{ + survive_default_pose_process(so, rawtime, pose); + if (_singleton && pose) { + // Get the precise time + FLT timecode = SurviveSensorActivations_runtime( + &so->activations, so->OutPose_timecode) / FLT(1e6); + // Package up a transform + geometry_msgs::msg::TransformStamped transform_msg; + transform_msg.header.stamp = _singleton->get_ros_time(timecode); + transform_msg.header.frame_id = _singleton->get_tracking_frame(); + transform_msg.child_frame_id = std::string(so->serial_number); + ros_from_pose(&transform_msg.transform, *pose); + _singleton->publish_tf(transform_msg, /* is_static= */ false); + // Publish a separate pose message with covariance + geometry_msgs::msg::PoseWithCovarianceStamped pose_msg; + pose_msg.header.stamp = transform_msg.header.stamp; + pose_msg.header.frame_id = std::string(so->serial_number); + pose_msg.pose.pose.orientation = transform_msg.transform.rotation; + pose_msg.pose.pose.position.x = transform_msg.transform.translation.x; + pose_msg.pose.pose.position.y = transform_msg.transform.translation.y; + pose_msg.pose.pose.position.z = transform_msg.transform.translation.z; + pose_msg.pose.covariance.fill(0.0); + pose_msg.pose.covariance[0] = kPosSigma_meters * kPosSigma_meters; + pose_msg.pose.covariance[7] = kPosSigma_meters * kPosSigma_meters; + pose_msg.pose.covariance[14] = kPosSigma_meters * kPosSigma_meters; + pose_msg.pose.covariance[21] = kRotSigma_radians * kRotSigma_radians; + pose_msg.pose.covariance[28] = kRotSigma_radians * kRotSigma_radians; + pose_msg.pose.covariance[35] = kRotSigma_radians * kRotSigma_radians; + _singleton->publish_tracker_pose(pose_msg); + } +} + +namespace libsurvive_ros2 +{ + +DriverComponent::DriverComponent(const rclcpp::NodeOptions & options) +: Node("libsurvive_ros2_driver_node", options), + ctx_(nullptr), + tf_broadcaster_(std::make_unique(*this)), + tf_static_broadcaster_(std::make_unique(*this)) +{ + // Store the instance globally to be used by a C callback. + _singleton = this; + + // Low-level driver + this->declare_parameter("driver_args", ""); + this->get_parameter("driver_args", driver_args_); + this->declare_parameter("driver_config_in", "~/.config/libsurvive/config.json"); + this->get_parameter("driver_config_in", driver_config_in_); + this->declare_parameter("driver_config_out", "~/.config/libsurvive/config.json"); + this->get_parameter("driver_config_out", driver_config_out_); + + // General options + this->declare_parameter("tracking_frame", "libsurvive_frame"); + this->get_parameter("tracking_frame", tracking_frame_); + this->declare_parameter("recalibrate", false); + this->get_parameter("recalibrate", recalibrate_); + + // TODO(asymingt) this might not play very well with composable node inter-process + // communication. So, we might want to avoid it until everything else is ready. + rclcpp::QoS qos_profile(10); + // qos_profile.durability_volatile(); + + // Setup topics for publishing + imu_publisher_ = + this->create_publisher("imu", qos_profile); + button_publisher_ = + this->create_publisher("button", qos_profile); + angle_publisher_ = + this->create_publisher("angle", qos_profile); + lighthouse_publisher_ = + this->create_publisher("lighthouse", qos_profile); + tracker_publisher_ = + this->create_publisher("tracker", qos_profile); + tracker_pose_publisher_ = + this->create_publisher( + "pose/tracker", + qos_profile); + lighthouse_pose_publisher_ = + this->create_publisher( + "pose/lighthouse", + qos_profile); + + // Callback timer for republishing lighthouse and tracker information. We do this + // because composable nodes don't support transient local topics, and so if the + // poser joins after the tracker info is published, it will be lost. + timer_ = this->create_wall_timer(1s, std::bind(&DriverComponent::timer_callback, this)); + + // Setup driver parameters. + std::string driver_args; + std::vector args; + std::stringstream driver_ss(driver_args); + std::string token; + args.emplace_back(this->get_name()); + args.emplace_back("--init-configfile"); + args.emplace_back(driver_config_in_.c_str()); + args.emplace_back("--configfile"); + args.emplace_back(driver_config_out_.c_str()); + if (recalibrate_) { + args.emplace_back("--force-calibrate"); + } else { + args.emplace_back("--disable-calibrate"); + } + while (getline(driver_ss, token, ' ')) { + args.emplace_back(token.c_str()); + } + + // Try and initialize survive with the arguments supplied. + ctx_ = survive_init(args.size(), const_cast(args.data())); + if (ctx_ == nullptr) { + RCLCPP_FATAL(this->get_logger(), "Could not initialize the libsurvive context"); + return; + } + + // Initialize the driver library. + survive_startup(ctx_); + + // Setup callback for reading button data. + survive_install_button_fn(ctx_, button_func); + + // Setup callbacks for tracked objects and lighthouse poses + survive_install_pose_fn(ctx_, tracker_pose_func); + survive_install_lighthouse_pose_fn(ctx_, lighthouse_pose_func); + + // Setup callbacks for raw data. + survive_install_imu_fn(ctx_, imu_func); + survive_install_sweep_angle_fn(ctx_, sweep_angle_func); + survive_install_ootx_received_fn(ctx_, ootx_received_func); + survive_install_config_fn(ctx_, config_func); + + // Start the work thread + worker_thread_ = std::thread(&DriverComponent::work, this); +} + +DriverComponent::~DriverComponent() +{ + RCLCPP_INFO(this->get_logger(), "Cleaning up."); + worker_thread_.join(); + + RCLCPP_INFO(this->get_logger(), "Shutting down libsurvive driver"); + if (ctx_) { + survive_close(ctx_); + } + + RCLCPP_INFO(this->get_logger(), "Clearing singleton instance"); + _singleton = nullptr; +} + +void DriverComponent::add_or_update_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg) +{ + this->lighthouses_.emplace(msg.header.frame_id, msg); +} + +void DriverComponent::add_or_update_tracker(const libsurvive_ros2::msg::Tracker & msg) +{ + this->trackers_.emplace(msg.header.frame_id, msg); +} + +void DriverComponent::timer_callback() +{ + for (const auto & [name, msg] : this->lighthouses_) { + this->publish_lighthouse(msg); + } + for (const auto & [name, msg] : this->trackers_) { + this->publish_tracker(msg); + } +} + +rclcpp::Time DriverComponent::get_ros_time(FLT timecode) +{ + return rclcpp::Time(0, 0) + rclcpp::Duration(std::chrono::duration(timecode)); +} + +void DriverComponent::publish_imu(const sensor_msgs::msg::Imu & msg) +{ + if (imu_publisher_) { + imu_publisher_->publish(msg); + } +} + +void DriverComponent::publish_angle(const libsurvive_ros2::msg::Angle & msg) +{ + if (angle_publisher_) { + angle_publisher_->publish(msg); + } +} + +void DriverComponent::publish_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg) +{ + if (lighthouse_publisher_) { + lighthouse_publisher_->publish(msg); + } +} + +void DriverComponent::publish_tracker(const libsurvive_ros2::msg::Tracker & msg) +{ + if (tracker_publisher_) { + tracker_publisher_->publish(msg); + } +} + +void DriverComponent::publish_button(const sensor_msgs::msg::Joy & msg) +{ + if (button_publisher_) { + button_publisher_->publish(msg); + } +} + +void DriverComponent::publish_tracker_pose( + const geometry_msgs::msg::PoseWithCovarianceStamped & msg) +{ + if (tracker_pose_publisher_) { + tracker_pose_publisher_->publish(msg); + } +} + +void DriverComponent::publish_lighthouse_pose( + const geometry_msgs::msg::PoseWithCovarianceStamped & msg) +{ + if (lighthouse_pose_publisher_) { + lighthouse_pose_publisher_->publish(msg); + } +} + +void DriverComponent::publish_tf(const geometry_msgs::msg::TransformStamped & msg, bool is_static) +{ + if (is_static) { + if (tf_static_broadcaster_) { + tf_static_broadcaster_->sendTransform(msg); + } + } else { + if (tf_broadcaster_) { + tf_broadcaster_->sendTransform(msg); + } + } +} + +const std::string & DriverComponent::get_tracking_frame() const +{ + return tracking_frame_; +} + +void DriverComponent::work() +{ + RCLCPP_INFO(this->get_logger(), "Start listening for events.."); + while (rclcpp::ok() && (survive_poll(ctx_) == 0)) {} +} + +} // namespace libsurvive_ros2 + +// Register the component with class_loader. +RCLCPP_COMPONENTS_REGISTER_NODE(libsurvive_ros2::DriverComponent) diff --git a/src/driver_node.cpp b/src/driver_node.cpp new file mode 100644 index 0000000..568467c --- /dev/null +++ b/src/driver_node.cpp @@ -0,0 +1,39 @@ +// Copyright 2022 Andrew Symington +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +// C++ system +#include + +// Others +#include "libsurvive_ros2/driver_component.hpp" +#include "rclcpp/rclcpp.hpp" + + +// Main entry point of application. +int main(int argc, char * argv[]) +{ + rclcpp::init(argc, argv); + auto node = std::make_shared(rclcpp::NodeOptions{}); + rclcpp::executors::SingleThreadedExecutor exec; + exec.add_node(node); + exec.spin(); + rclcpp::shutdown(); + return 0; +} diff --git a/src/poser_component.cpp b/src/poser_component.cpp new file mode 100644 index 0000000..f353c02 --- /dev/null +++ b/src/poser_component.cpp @@ -0,0 +1,516 @@ +// Copyright 2023 Andrew Symington +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +// This is included as a reminder of the frame names. +// +// g : the global frame +// l : the lighthouse frame +// s : the tracker "sensor" frame (the origin about which sensors are located) +// h : the tracker "head" frame (the bolt on the back of the tracker) +// i : the tracker "imu" frame (the IMU origin) +// b : the origin of a rigid body to which a tracker is attached + +#include "libsurvive_ros2/poser_component.hpp" +#include "rclcpp_components/register_node_macro.hpp" + +#include "yaml-cpp/yaml.h" + +using namespace std::chrono_literals; +using std::placeholders::_1; + +namespace libsurvive_ros2 +{ + +// IMU integration uncertainties +const double kGravity = 9.81; +const double kDeltaT = 0.004; // 4ms or 250Hz +const double kAccelSigma = 0.1; +const double kAccelDriftSigma = 0.1; +const double kOmegaSigma = 0.1; +const double kOmegaDriftSigma = 0.1; +const double kIntegrationSigma = 0.1; + +// Time between bias states +const Timestamp kImuBiasTimeNs = 1e9; + +// Helper functions + +Timestamp gtsam_from_ros(const builtin_interfaces::msg::Time & msg) +{ + return rclcpp::Time(msg).nanoseconds(); +} + +builtin_interfaces::msg::Time ros_from_gtsam(Timestamp stamp) +{ + return rclcpp::Time(stamp); +} + +gtsam::Pose3 gtsam_from_ros(const geometry_msgs::msg::Pose & pose) +{ + return gtsam::Pose3( + gtsam::Rot3::Quaternion( + pose.orientation.w, + pose.orientation.x, + pose.orientation.y, + pose.orientation.z), + gtsam::Point3( + pose.position.x, + pose.position.y, + pose.position.z)); +} + +geometry_msgs::msg::Pose ros_from_gtsam(const gtsam::Pose3 & pose) +{ + geometry_msgs::msg::Pose msg; + msg.position.x = pose.translation().x(); + msg.position.y = pose.translation().y(); + msg.position.z = pose.translation().z(); + gtsam::Quaternion quaternion = pose.rotation().toQuaternion(); + msg.orientation.w = quaternion.w(); + msg.orientation.x = quaternion.x(); + msg.orientation.y = quaternion.y(); + msg.orientation.z = quaternion.z(); + return msg; +} + +gtsam::noiseModel::Gaussian::shared_ptr gtsam_from_ros(const std::array & msg) +{ + gtsam::Matrix6 cov; + cov << msg[21], msg[22], msg[23], msg[18], msg[19], msg[20], // Rx + msg[27], msg[28], msg[29], msg[24], msg[25], msg[26], // Ry + msg[33], msg[34], msg[35], msg[30], msg[31], msg[32], // Rz + msg[3], msg[4], msg[5], msg[0], msg[1], msg[2], // Tx + msg[9], msg[10], msg[11], msg[6], msg[7], msg[8], // Ty + msg[15], msg[16], msg[17], msg[12], msg[13], msg[14]; // Tz + return gtsam::noiseModel::Gaussian::Covariance(cov); +} + +std::array ros_from_gtsam(const gtsam::Matrix & cov) +{ + return { + cov(3, 3), cov(3, 4), cov(3, 5), cov(3, 0), cov(3, 1), cov(3, 2), // Tx + cov(4, 3), cov(4, 4), cov(4, 5), cov(4, 0), cov(4, 1), cov(4, 2), // Ty + cov(5, 3), cov(5, 4), cov(5, 5), cov(5, 0), cov(5, 1), cov(5, 2), // Tz + cov(0, 3), cov(0, 4), cov(0, 5), cov(0, 0), cov(0, 1), cov(0, 2), // Rx + cov(1, 3), cov(1, 4), cov(1, 5), cov(1, 0), cov(1, 1), cov(1, 2), // Ry + cov(2, 3), cov(2, 4), cov(2, 5), cov(2, 0), cov(2, 1), cov(2, 2), // Rz + }; +} + +gtsam::Point3 gtsam_from_ros(const geometry_msgs::msg::Vector3 & msg) +{ + return gtsam::Point3(msg.x, msg.y, msg.z); +} + +geometry_msgs::msg::Vector3 ros_from_gtsam(const gtsam::Point3 & point3) +{ + geometry_msgs::msg::Vector3 msg; + msg.x = point3.x(); + msg.y = point3.y(); + msg.z = point3.z(); + return msg; +} + +PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) +: Node("libsurvive_ros2_poser_node", options), + next_available_key_(0), + tf_broadcaster_(std::make_unique(*this)) +{ + std::string meta_config; + this->declare_parameter("meta_config", "~/.config/libsurvive/poser.yaml"); + this->get_parameter("meta_config", meta_config); + this->declare_parameter("tracking_frame", "libsurvive_frame"); + this->get_parameter("tracking_frame", tracking_frame_); + + // Example: poser.yaml + // ------------------- + // rigid_bodies: + // - body_id: "rigid_body/wand" + // trackers: + // - tracker_id: LHR-74EFB987 + // bTh: [0.0, 0.15, 0.0, 1.0, 0.0, 0.0, 0.0] + // - tracker_id: LHR-933F150A + // bTh: [0.0, -0.15, 0.0, 1.0, 0.0, 0.0, 0.0] + YAML::Node config = YAML::LoadFile(meta_config); + for (auto body : config["rigid_bodies"]) { + std::string body_id = body["body_id"].as(); + for (auto tracker : body["trackers"]) { + std::string tracker_id = tracker["tracker_id"].as(); + std::vector bTh = tracker["bTh"].as>(); + id_to_body_info_[body_id].bTh[tracker_id] = gtsam::Pose3( + gtsam::Rot3::Quaternion(bTh[3], bTh[4], bTh[5], bTh[6]), + gtsam::Point3(bTh[0], bTh[1], bTh[2])); + } + } + + // TODO(asymingt) this might not play very well with composable node inter-process + // communication. So, we might want to avoid it until everything else is ready. + rclcpp::QoS qos_profile(10); + // qos_profile.durability_volatile(); + + // Setup low-level driver message subscribers. + tracker_subscriber_ = this->create_subscription( + "tracker", qos_profile, std::bind(&PoserComponent::add_tracker, this, _1)); + lighthouse_subscriber_ = this->create_subscription( + "lighthouse", qos_profile, std::bind(&PoserComponent::add_lighthouse, this, _1)); + imu_subscriber_ = this->create_subscription( + "imu", qos_profile, std::bind(&PoserComponent::add_imu, this, _1)); + angle_subscriber_ = this->create_subscription( + "angle", qos_profile, std::bind(&PoserComponent::add_angle, this, _1)); + tracker_pose_subscriber_ = + this->create_subscription( + "pose/tracker", qos_profile, std::bind(&PoserComponent::add_tracker_pose, this, _1)); + lighthouse_pose_subscriber_ = + this->create_subscription( + "pose/lighthouse", qos_profile, std::bind(&PoserComponent::add_lighthouse_pose, this, _1)); + + // Setup refined body frame pose publisher + body_pose_publisher_ = + this->create_publisher("pose/body", qos_profile); + + // Add a timer to extract solution and publish pose + timer_ = this->create_wall_timer( + 100ms, std::bind(&PoserComponent::solution_callback, this)); +} + +PoserComponent::~PoserComponent() +{ + RCLCPP_INFO(this->get_logger(), "Destructor called"); +} + +void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) +{ + /* TODO fix this -- might need a CustomFactor? + + // Only accept angle measurements from trackers already inserted into the graph. + const std::string & tracker_id = msg.header.frame_id; + if (id_to_tracker_info_.count(tracker_id) == 0) { + return; + } + const Timestamp stamp = rclcpp::Time(msg.header.stamp).nanoseconds(); + auto & tracker_info = id_to_tracker_info_[tracker_id]; + auto & body_info = id_to_body_info_[tracker_info.body_id]; + + // Find the closest time lower than the given stamp in the trajectory. + auto gTb_iter = body_info.gTb.lower_bound(stamp); + if (gTb == body_info.gTb.end()) { + RCLCPP_WARN_STREAM(this->get_logger(), "No matching timestamp found"); + return; + } + + // Add a factor constraining the adjacent poses and velocities + graph_.add(gtsam::BearingFactor( + body_info.gTb[tracker_info.last_imu_stamp], // last pose + body_info.g_V[tracker_info.last_imu_stamp], // last velocity + tracker_info.preintegrator)); // preintegrator + + */ +} + +void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) +{ + // Only accept IMU measurements from trackers already inserted into the graph. + const std::string & tracker_id = msg.header.frame_id; + if (!id_to_tracker_info_.count(tracker_id)) { + return; + } + const Timestamp stamp_curr = gtsam_from_ros(msg.header.stamp); + auto & tracker_info = id_to_tracker_info_[tracker_id]; + auto & body_info = id_to_body_info_[tracker_info.body_id]; + + // Extract the linear acceleration and correct for constant bias / scale errors. + gtsam::Vector i_accel(3); + i_accel << + tracker_info.accel_scale[0] * msg.linear_acceleration.x + tracker_info.accel_bias[0], + tracker_info.accel_scale[1] * msg.linear_acceleration.y + tracker_info.accel_bias[1], + tracker_info.accel_scale[2] * msg.linear_acceleration.z + tracker_info.accel_bias[2]; + + // Now normalize to gravitational units + i_accel *= kGravity; + + // Extract the angular velocity and correct for constant bias / scale errors. + gtsam::Vector i_omega(3); + i_omega << + tracker_info.omega_scale[0] * msg.angular_velocity.x + tracker_info.omega_bias[0], + tracker_info.omega_scale[1] * msg.angular_velocity.y + tracker_info.omega_bias[1], + tracker_info.omega_scale[2] * msg.angular_velocity.z + tracker_info.omega_bias[2]; + + // Add the pre-integrated IMU measurement -- take note that the IMU is aware of + // the bTi fixed transform and will correct for it as part of the model. + tracker_info.preintegrator->integrateMeasurement(i_accel, i_omega, kDeltaT); + + // If bias hs not been set or needs to be reset, then lets add one + if (!tracker_info.b_B.size() || + (stamp_curr - tracker_info.b_B.rbegin()->first) > kImuBiasTimeNs) + { + // Add a pose state, provided another IMU has not already set one for the + // current timestamp. This is possible, but very, very unlikely. + if (!body_info.gTb.count(stamp_curr)) { + body_info.gTb[stamp_curr] = next_available_key_++; + auto gTb_obs = gtsam::Pose3( + gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), + gtsam::Point3(0.0, 0.0, 0.0)); + auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6(100.0, 100.0, 100.0, 100.0, 100.0, 100.0)); + graph_.add( + gtsam::PriorFactor( + body_info.gTb[stamp_curr], gTb_obs, gTb_cov)); + initial_values_.insert(body_info.gTb[stamp_curr], gTb_obs); + } + + // Add a velocity state, provided another IMU has not already set one for the + // current timestamp. This is possible, but very, very unlikely. + if (!body_info.g_V.count(stamp_curr)) { + body_info.g_V[stamp_curr] = next_available_key_++; + auto g_V_obs = gtsam::Vector3(0.0, 0.0, 0.0); + auto g_V_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector3(10.0, 10.0, 10.0)); + graph_.add( + gtsam::PriorFactor( + body_info.g_V[stamp_curr], g_V_obs, g_V_cov)); + initial_values_.insert(body_info.g_V[stamp_curr], g_V_obs); + } + + // Add an IMU bias state. This will always need to be added, because each IMU + // tracks its own bias internally. + tracker_info.b_B[stamp_curr] = next_available_key_++; + auto b_B_obs = gtsam::imuBias::ConstantBias(); + auto b_B_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6( + kAccelDriftSigma, kAccelDriftSigma, kAccelDriftSigma, + kOmegaDriftSigma, kOmegaDriftSigma, kOmegaDriftSigma)); + graph_.add( + gtsam::PriorFactor( + tracker_info.b_B[stamp_curr], b_B_obs, b_B_cov)); + initial_values_.insert(tracker_info.b_B[stamp_curr], b_B_obs); + + // Get the last pose timestamp, if one exists for this body. + std::optional stamp_prev = std::nullopt; + if (body_info.gTb.size()) { + stamp_prev = body_info.gTb.rbegin()->first; + } + + // Get the last bias timestamp, if one exists for this IMU. + std::optional stamp_bias = std::nullopt; + if (tracker_info.b_B.size()) { + stamp_bias = tracker_info.b_B.rbegin()->first; + } + + // Only add between factors for IMU measurements + if (stamp_prev && stamp_bias) { + graph_.add( + gtsam::ImuFactor( + body_info.gTb[*stamp_prev], // last pose + body_info.g_V[*stamp_prev], // last velocity + body_info.gTb[stamp_curr], // last pose + body_info.g_V[stamp_curr], // last velocity + tracker_info.b_B[*stamp_bias], // bias state + *tracker_info.preintegrator)); // preintegrator + graph_.add( + gtsam::BetweenFactor( + tracker_info.b_B[*stamp_bias], + tracker_info.b_B[stamp_curr], + b_B_obs, + b_B_cov)); + } + + // Reset the IMU integrator to reflect that we have a new preintgrattion period. + tracker_info.preintegrator->resetIntegration(); + } +} + +void PoserComponent::add_tracker(const libsurvive_ros2::msg::Tracker & msg) +{ + const std::string & tracker_id = msg.header.frame_id; + + // Don't re-add a tracker. + if (id_to_tracker_info_.count(tracker_id)) { + return; + } + + // Find the extrinsics for this tracker + std::optional body_id; + for (const auto & [id, info] : id_to_body_info_) { + if (info.bTh.count(tracker_id)) { + body_id = id; + } + } + if (!body_id) { + RCLCPP_ERROR_STREAM(this->get_logger(), "No extrinsics for " << tracker_id); + return; + } + RCLCPP_INFO_STREAM(this->get_logger(), "Adding tracker " << tracker_id); + + // If we get here, then we can add this tracker because it has a body + auto & tracker_info = id_to_tracker_info_[tracker_id]; + tracker_info.body_id = *body_id; + + // Get a reference to the body info + auto & body_info = id_to_body_info_[tracker_info.body_id]; + + // Fixed imu bias and scale terms. + tracker_info.accel_scale = gtsam_from_ros(msg.accel_scale); + tracker_info.accel_bias = gtsam_from_ros(msg.accel_bias); + tracker_info.omega_scale = gtsam_from_ros(msg.omega_scale); + tracker_info.omega_bias = gtsam_from_ros(msg.omega_bias); + + // Fixed transforms. + tracker_info.tTi = gtsam_from_ros(msg.tracker_from_imu); + tracker_info.tTh = gtsam_from_ros(msg.tracker_from_head); + + // Sensor locations and normals. + for (size_t k = 0; k < msg.channels.size(); k++) { + uint8_t channel = msg.channels[k]; + tracker_info.t_sensors[channel] = std::make_pair( + gtsam_from_ros(msg.points[k]), gtsam_from_ros(msg.normals[k])); + } + + // The IMU factor needs to know the pose of the IMU sensor in the body frame in + // order to correct for centripetal and coriolis terms. Calculate this now. + const auto & tTi = tracker_info.tTi; + const auto & hTt = tracker_info.tTh.inverse(); + const auto & bTh = body_info.bTh[msg.header.frame_id]; + + // Set up IMU pre-integrator for a Z-up world frame (eg. MakeSharedU). + auto params = gtsam::PreintegrationParams::MakeSharedU(kGravity); + params->setBodyPSensor(bTh * hTt * tTi); + params->setAccelerometerCovariance(gtsam::I_3x3 * (kAccelSigma * kAccelSigma)); + params->setGyroscopeCovariance(gtsam::I_3x3 * (kOmegaSigma * kOmegaSigma)); + params->setIntegrationCovariance(gtsam::I_3x3 * (kIntegrationSigma * kIntegrationSigma)); + params->setUse2ndOrderCoriolis(false); + params->setOmegaCoriolis(gtsam::Vector3(0, 0, 0)); + tracker_info.preintegrator = + std::make_shared(params); +} + +void PoserComponent::add_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg) +{ + const std::string & lighthouse_id = msg.header.frame_id; + if (id_to_lighthouse_info_.count(lighthouse_id)) { + return; + } + RCLCPP_INFO_STREAM(this->get_logger(), "Adding lighthouse " << lighthouse_id); + + auto & lighthouse_info = id_to_lighthouse_info_[lighthouse_id]; + + // This is out initial prior on bae station location given no observations. + auto obs_gTl = gtsam::Pose3( + gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), + gtsam::Point3(0.0, 0.0, 0.0)); + auto cov_gTl = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6(100.0, 100.0, 100.0, 100.0, 100.0, 100.0)); + + // Allocate a new variable. + lighthouse_info.gTl = next_available_key_++; + + // Set its initial value. + initial_values_.insert(lighthouse_info.gTl, obs_gTl); + + // Add a prior on the value. + graph_.add(gtsam::PriorFactor(lighthouse_info.gTl, obs_gTl, cov_gTl)); +} + +void PoserComponent::add_tracker_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg) +{ + const std::string & tracker_id = msg.header.frame_id; + if (!id_to_tracker_info_.count(tracker_id)) { + return; + } + const Timestamp stamp = gtsam_from_ros(msg.header.stamp); + auto & tracker_info = this->id_to_tracker_info_[tracker_id]; + auto & body_info = this->id_to_body_info_[tracker_info.body_id]; + + // We can't add an observation for a state that does not exist. + auto gTb = find_key_for_closest_stamp(body_info.gTb, stamp); + if (!gTb) { + return; + } + + // Add a prior factor on the pose state + auto gTt_obs = gtsam_from_ros(msg.pose.pose); + auto hTb = body_info.bTh[tracker_id].inverse(); + auto tTb = tracker_info.tTh * hTb; + auto gTb_obs = gTt_obs * tTb; + auto gTt_cov = gtsam_from_ros(msg.pose.covariance); + auto gTb_cov = gTt_cov; + graph_.add(gtsam::PriorFactor(*gTb, gTb_obs, gTb_cov)); +} + +void PoserComponent::add_lighthouse_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg) +{ + const std::string & lighthouse_id = msg.header.frame_id; + if (!id_to_lighthouse_info_.count(lighthouse_id)) { + return; + } + auto & lighthouse_info = this->id_to_lighthouse_info_[lighthouse_id]; + + // Collect observation + auto obs_gTl = gtsam_from_ros(msg.pose.pose); + auto cov_gTl = gtsam_from_ros(msg.pose.covariance); + + // Add the observation + graph_.add(gtsam::PriorFactor(lighthouse_info.gTl, obs_gTl, cov_gTl)); +} + +void PoserComponent::solution_callback() +{ + // Incremental update of the graph usng the latest values and factors. + isam2_.update(graph_, initial_values_); + + // Print solution + geometry_msgs::msg::PoseWithCovarianceStamped pose_msg; + for (const auto & [body_id, body_info] : id_to_body_info_) { + // RCLCPP_INFO_STREAM(this->get_logger(), "Broadcasting body " << body_id); + auto iter = body_info.gTb.rbegin(); + if (iter != body_info.gTb.rend()) { + // Extract the solution + gtsam::Pose3 pose = isam2_.calculateEstimate(iter->second); + gtsam::Matrix cov = isam2_.marginalCovariance(iter->second); + + // Create a message containing the transform + geometry_msgs::msg::PoseWithCovarianceStamped msg; + msg.header.stamp = ros_from_gtsam(iter->first); + msg.header.frame_id = body_id; + msg.pose.pose = ros_from_gtsam(pose); + msg.pose.covariance = ros_from_gtsam(cov); + body_pose_publisher_->publish(msg); + + // Package up a transform + geometry_msgs::msg::TransformStamped transform_msg; + transform_msg.header.stamp = ros_from_gtsam(iter->first); + transform_msg.header.frame_id = tracking_frame_; + transform_msg.child_frame_id = body_id; + transform_msg.transform.translation.x = msg.pose.pose.position.x; + transform_msg.transform.translation.y = msg.pose.pose.position.y; + transform_msg.transform.translation.z = msg.pose.pose.position.z; + transform_msg.transform.rotation = msg.pose.pose.orientation; + tf_broadcaster_->sendTransform(transform_msg); + } + } + + // Flush both the variables and factors, because they have been added + graph_.resize(0); + initial_values_.clear(); +} + +} // namespace libsurvive_ros2 + +// Register the component with class_loader. +RCLCPP_COMPONENTS_REGISTER_NODE(libsurvive_ros2::PoserComponent) diff --git a/src/node.cpp b/src/poser_node.cpp similarity index 89% rename from src/node.cpp rename to src/poser_node.cpp index 86c4840..70a5890 100644 --- a/src/node.cpp +++ b/src/poser_node.cpp @@ -22,7 +22,7 @@ #include // Others -#include "libsurvive_ros2/component.hpp" +#include "libsurvive_ros2/poser_component.hpp" #include "rclcpp/rclcpp.hpp" @@ -30,8 +30,9 @@ int main(int argc, char * argv[]) { rclcpp::init(argc, argv); + auto node = std::make_shared(rclcpp::NodeOptions{}); rclcpp::executors::SingleThreadedExecutor exec; - exec.add_node(std::make_shared(rclcpp::NodeOptions{})); + exec.add_node(node); exec.spin(); rclcpp::shutdown(); return 0; diff --git a/test/example_poser.yaml b/test/example_poser.yaml new file mode 100644 index 0000000..3f8166f --- /dev/null +++ b/test/example_poser.yaml @@ -0,0 +1,38 @@ +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# This file allows you to declare rigid bodies and the trackers attached to +# them. This is useful if you are trying to track a robot in a large space, +# where periodically trackers go out of sight of lighthouses. So, we rely on +# redundancy to increase robustness, but also accuracy. +# +# Note that "poses" describes the bTh transform, which moves a point from +# the head frame (the frame described as the "Coordinate system" of the +# vive tracker, centered on the 1/4" bolt) denoted 'h' to the rigid body. +# frame, which we denote 'b'. In the example below we mount trackers on a +# 30cm ruler "wand", with Y running along the ruler length. +# +rigid_bodies: + - body_id: "rigid_body/wand" + trackers: + - tracker_id: LHR-74EFB987 + bTh: [0.0, 0.15, 0.0, 1.0, 0.0, 0.0, 0.0] + - tracker_id: LHR-933F150A + bTh: [0.0, -0.15, 0.0, 1.0, 0.0, 0.0, 0.0] diff --git a/test/example_registration.yaml b/test/example_registration.yaml new file mode 100644 index 0000000..5786391 --- /dev/null +++ b/test/example_registration.yaml @@ -0,0 +1,66 @@ +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# +# This is the offset of the marker position from the origin of the lighthouse. +# So, if you use an external system like a theodolite, precise RTK setup or +# motion capture system, this is the offset of its frame w.r.t the lighthouse. +# +# Looking directly at a lighthouse oriented upright, the coordinate frame is +# a conventional right-handed system with the following properties: +# +# > +x points left. +# > +y points up. +# > +z points into the base station. +# +# In the example above the registration marker is 10cm above the lighthouse. +# +# NOTE: We force the id and channel to be redeclared in order to make sure +# that the markers are for the configuration file and avoid mistakes. +# +# Note: All poses are expressed in the following order: [x,y,z,qw,qx,qy,qz] +# +lighthouse_from_marker: + position: [0.0, 0.1, 0.0] # 10cm "above" a base station +markers: + lighthouse0: + id: 384784632 # Serial number of lighthouse is LHR-hexidecimal(id) + mode: 4 # This is the same as the channel + position: [-0.000000000000, 1.458985931705, 10.020991301227] + sigma: [0.001, 0.001, 0.001] + lighthouse1: + id: 2179997076 # Serial number of lighthouse is LHR-hexidecimal(id) + mode: 2 # This is the same as the channel + position: [1.351791827768, 2.519027929396, 10.408610236531] + sigma: [0.001, 0.001, 0.001] + lighthouse2: + id: 1974130315 # Serial number of lighthouse is LHR-hexidecimal(id) + mode: 1 # This is the same as the channel + position: [1.780496197943, 2.585230340850, 10.484703390713] + sigma: [0.001, 0.001, 0.001] + lighthouse3: + id: 1749379901 # Serial number of lighthouse is LHR-hexidecimal(id) + mode: 0 # This is the same as the channel + position: [3.024384244020, 2.637668639886, 10.302714251888] + sigma: [0.001, 0.001, 0.001] + lighthouse4: + id: 3590859256 # Serial number of lighthouse is LHR-hexidecimal(id) + mode: 3 # This is the same as the channel + position: [0.779046606866, 2.678838980482, 10.948755064650] + sigma: [0.001, 0.001, 0.001] diff --git a/test/input_config_0_1_2.json b/test/input_config_0_1_2.json new file mode 100644 index 0000000..d1ffd2d --- /dev/null +++ b/test/input_config_0_1_2.json @@ -0,0 +1,60 @@ +"v":"0", +"poser":"MPFIT", +"disambiguator":"StateBased", +"configed-lighthouse-gen":"2", +"ootx-ignore-sync-error":"0", +"floor-offset":"-0.001784126158" +"lighthouse0":{ +"index":"0", +"id":"384784632", +"mode":"4", +"pose":["-0.000000000000","1.458985931705","0.020991301227","0.118739016806","0.151070461064","-0.759929442802","-0.620955717942"], +"variance":["0.000001535856","0.000000443934","0.000009292485","0.000227927692","0.000049545983","0.000213264360"], +"unlock_count":"1", +"accel":["2.000000000000","127.000000000000","-24.000000000000"], +"fcalphase":["0.000000000000","-0.005214691162"], +"fcaltilt":["-0.048187255859","0.045806884766"], +"fcalcurve":["-0.022827148438","0.124389648438"], +"fcalgibpha":["0.479248046875","0.283935546875"], +"fcalgibmag":["0.002111434937","-0.002265930176"], +"fcalogeephase":["1.112304687500","2.271484375000"], +"fcalogeemag":["-0.391601562500","-0.382080078125"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse1":{ +"index":"1", +"id":"2179997076", +"mode":"2", +"pose":["1.351791827768","2.519027929396","0.408610236531","-0.093801073883","-0.080356889779","-0.704925285958","-0.698444321346"], +"variance":["0.000035365565","0.000020057002","0.000805542552","0.001320795222","0.000161485072","0.001348722142"], +"unlock_count":"1", +"accel":["1.000000000000","127.000000000000","0.000000000000"], +"fcalphase":["0.000000000000","-0.011276245117"], +"fcaltilt":["-0.052947998047","0.043518066406"], +"fcalcurve":["0.736328125000","0.268310546875"], +"fcalgibpha":["3.072265625000","0.745117187500"], +"fcalgibmag":["-0.003021240234","0.000689983368"], +"fcalogeephase":["0.818359375000","1.488281250000"], +"fcalogeemag":["-0.374023437500","-0.307617187500"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse2":{ +"index":"2", +"id":"1974130315", +"mode":"1", +"pose":["1.780496197943","2.585230340850","0.484703390713","-0.133537220306","-0.103533151525","-0.696177156412","-0.697700554836"], +"variance":["0.000043273352","0.000009902823","0.000566890502","0.000562151938","0.000070668596","0.000606325467"], +"unlock_count":"1", +"accel":["-1.000000000000","127.000000000000","1.000000000000"], +"fcalphase":["0.000000000000","0.000309944153"], +"fcaltilt":["-0.048522949219","0.045379638672"], +"fcalcurve":["0.109558105469","0.485107421875"], +"fcalgibpha":["1.365234375000","2.183593750000"], +"fcalgibmag":["0.009498596191","0.008605957031"], +"fcalogeephase":["1.474609375000","2.023437500000"], +"fcalogeemag":["-0.724609375000","-0.678222656250"], +"OOTXSet":"1", +"PositionSet":"1" +} \ No newline at end of file diff --git a/test/input_config_1_2_3.json b/test/input_config_1_2_3.json new file mode 100644 index 0000000..0dd394b --- /dev/null +++ b/test/input_config_1_2_3.json @@ -0,0 +1,60 @@ +"v":"0", +"poser":"MPFIT", +"disambiguator":"StateBased", +"configed-lighthouse-gen":"2", +"ootx-ignore-sync-error":"0", +"floor-offset":"-0.001784126158" +"lighthouse0":{ +"index":"0", +"id":"2179997076", +"mode":"2", +"pose":["1.351791827768","2.519027929396","0.408610236531","-0.093801073883","-0.080356889779","-0.704925285958","-0.698444321346"], +"variance":["0.000035365565","0.000020057002","0.000805542552","0.001320795222","0.000161485072","0.001348722142"], +"unlock_count":"1", +"accel":["1.000000000000","127.000000000000","0.000000000000"], +"fcalphase":["0.000000000000","-0.011276245117"], +"fcaltilt":["-0.052947998047","0.043518066406"], +"fcalcurve":["0.736328125000","0.268310546875"], +"fcalgibpha":["3.072265625000","0.745117187500"], +"fcalgibmag":["-0.003021240234","0.000689983368"], +"fcalogeephase":["0.818359375000","1.488281250000"], +"fcalogeemag":["-0.374023437500","-0.307617187500"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse1":{ +"index":"1", +"id":"1974130315", +"mode":"1", +"pose":["1.780496197943","2.585230340850","0.484703390713","-0.133537220306","-0.103533151525","-0.696177156412","-0.697700554836"], +"variance":["0.000043273352","0.000009902823","0.000566890502","0.000562151938","0.000070668596","0.000606325467"], +"unlock_count":"1", +"accel":["-1.000000000000","127.000000000000","1.000000000000"], +"fcalphase":["0.000000000000","0.000309944153"], +"fcaltilt":["-0.048522949219","0.045379638672"], +"fcalcurve":["0.109558105469","0.485107421875"], +"fcalgibpha":["1.365234375000","2.183593750000"], +"fcalgibmag":["0.009498596191","0.008605957031"], +"fcalogeephase":["1.474609375000","2.023437500000"], +"fcalogeemag":["-0.724609375000","-0.678222656250"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse2":{ +"index":"2", +"id":"1749379901", +"mode":"0", +"pose":["3.024384244020","2.637668639886","0.302714251888","-0.241596318852","-0.227914281895","-0.666014861685","-0.667915041633"], +"variance":["0.000028759981","0.000003398398","0.000888214566","0.000438514645","0.000134327878","0.000047829104"], +"unlock_count":"1", +"accel":["0.000000000000","127.000000000000","2.000000000000"], +"fcalphase":["0.000000000000","-0.003511428833"], +"fcaltilt":["-0.049621582031","0.045410156250"], +"fcalcurve":["0.049346923828","0.314208984375"], +"fcalgibpha":["2.273437500000","2.234375000000"], +"fcalgibmag":["-0.004455566406","-0.002807617188"], +"fcalogeephase":["0.187988281250","1.401367187500"], +"fcalogeemag":["-0.229248046875","-0.134887695312"], +"OOTXSet":"1", +"PositionSet":"1" +} \ No newline at end of file diff --git a/test/input_config_2_3_4.json b/test/input_config_2_3_4.json new file mode 100644 index 0000000..37068b5 --- /dev/null +++ b/test/input_config_2_3_4.json @@ -0,0 +1,60 @@ +"v":"0", +"poser":"MPFIT", +"disambiguator":"StateBased", +"configed-lighthouse-gen":"2", +"ootx-ignore-sync-error":"0", +"floor-offset":"-0.001784126158" +"lighthouse0":{ +"index":"0", +"id":"1974130315", +"mode":"1", +"pose":["1.780496197943","2.585230340850","0.484703390713","-0.133537220306","-0.103533151525","-0.696177156412","-0.697700554836"], +"variance":["0.000043273352","0.000009902823","0.000566890502","0.000562151938","0.000070668596","0.000606325467"], +"unlock_count":"1", +"accel":["-1.000000000000","127.000000000000","1.000000000000"], +"fcalphase":["0.000000000000","0.000309944153"], +"fcaltilt":["-0.048522949219","0.045379638672"], +"fcalcurve":["0.109558105469","0.485107421875"], +"fcalgibpha":["1.365234375000","2.183593750000"], +"fcalgibmag":["0.009498596191","0.008605957031"], +"fcalogeephase":["1.474609375000","2.023437500000"], +"fcalogeemag":["-0.724609375000","-0.678222656250"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse1":{ +"index":"1", +"id":"1749379901", +"mode":"0", +"pose":["3.024384244020","2.637668639886","0.302714251888","-0.241596318852","-0.227914281895","-0.666014861685","-0.667915041633"], +"variance":["0.000028759981","0.000003398398","0.000888214566","0.000438514645","0.000134327878","0.000047829104"], +"unlock_count":"1", +"accel":["0.000000000000","127.000000000000","2.000000000000"], +"fcalphase":["0.000000000000","-0.003511428833"], +"fcaltilt":["-0.049621582031","0.045410156250"], +"fcalcurve":["0.049346923828","0.314208984375"], +"fcalgibpha":["2.273437500000","2.234375000000"], +"fcalgibmag":["-0.004455566406","-0.002807617188"], +"fcalogeephase":["0.187988281250","1.401367187500"], +"fcalogeemag":["-0.229248046875","-0.134887695312"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse2":{ +"index":"2", +"id":"3590859256", +"mode":"3", +"pose":["0.779046606866","2.678838980482","0.948755064650","-0.074242472202","-0.073850891887","-0.706703380718","-0.699717394933"], +"variance":["0.000000538435","0.000011123554","0.000095102801","0.000483119833","0.000073791653","0.000462186818"], +"unlock_count":"1", +"accel":["0.000000000000","127.000000000000","-1.000000000000"], +"fcalphase":["0.000000000000","-0.003845214844"], +"fcaltilt":["-0.048614501953","0.046630859375"], +"fcalcurve":["0.086425781250","0.200195312500"], +"fcalgibpha":["2.226562500000","2.824218750000"], +"fcalgibmag":["-0.006622314453","-0.004924774170"], +"fcalogeephase":["2.923828125000","0.600585937500"], +"fcalogeemag":["0.236816406250","-0.302490234375"], +"OOTXSet":"1", +"PositionSet":"1" +} \ No newline at end of file diff --git a/test/input_config_7_8_9.json b/test/input_config_7_8_9.json new file mode 100644 index 0000000..0256b5b --- /dev/null +++ b/test/input_config_7_8_9.json @@ -0,0 +1,60 @@ +"v":"0", +"poser":"MPFIT", +"disambiguator":"StateBased", +"configed-lighthouse-gen":"2", +"ootx-ignore-sync-error":"0", +"floor-offset":"-0.001784126158" +"lighthouse0":{ +"index":"0", +"id":"0000000000", +"mode":"7", +"pose":["1.780496197943","2.585230340850","0.484703390713","-0.133537220306","-0.103533151525","-0.696177156412","-0.697700554836"], +"variance":["0.000043273352","0.000009902823","0.000566890502","0.000562151938","0.000070668596","0.000606325467"], +"unlock_count":"1", +"accel":["-1.000000000000","127.000000000000","1.000000000000"], +"fcalphase":["0.000000000000","0.000309944153"], +"fcaltilt":["-0.048522949219","0.045379638672"], +"fcalcurve":["0.109558105469","0.485107421875"], +"fcalgibpha":["1.365234375000","2.183593750000"], +"fcalgibmag":["0.009498596191","0.008605957031"], +"fcalogeephase":["1.474609375000","2.023437500000"], +"fcalogeemag":["-0.724609375000","-0.678222656250"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse1":{ +"index":"1", +"id":"1111111111", +"mode":"8", +"pose":["3.024384244020","2.637668639886","0.302714251888","-0.241596318852","-0.227914281895","-0.666014861685","-0.667915041633"], +"variance":["0.000028759981","0.000003398398","0.000888214566","0.000438514645","0.000134327878","0.000047829104"], +"unlock_count":"1", +"accel":["0.000000000000","127.000000000000","2.000000000000"], +"fcalphase":["0.000000000000","-0.003511428833"], +"fcaltilt":["-0.049621582031","0.045410156250"], +"fcalcurve":["0.049346923828","0.314208984375"], +"fcalgibpha":["2.273437500000","2.234375000000"], +"fcalgibmag":["-0.004455566406","-0.002807617188"], +"fcalogeephase":["0.187988281250","1.401367187500"], +"fcalogeemag":["-0.229248046875","-0.134887695312"], +"OOTXSet":"1", +"PositionSet":"1" +} +"lighthouse2":{ +"index":"2", +"id":"22222222222", +"mode":"9", +"pose":["0.779046606866","2.678838980482","0.948755064650","-0.074242472202","-0.073850891887","-0.706703380718","-0.699717394933"], +"variance":["0.000000538435","0.000011123554","0.000095102801","0.000483119833","0.000073791653","0.000462186818"], +"unlock_count":"1", +"accel":["0.000000000000","127.000000000000","-1.000000000000"], +"fcalphase":["0.000000000000","-0.003845214844"], +"fcaltilt":["-0.048614501953","0.046630859375"], +"fcalcurve":["0.086425781250","0.200195312500"], +"fcalgibpha":["2.226562500000","2.824218750000"], +"fcalgibmag":["-0.006622314453","-0.004924774170"], +"fcalogeephase":["2.923828125000","0.600585937500"], +"fcalogeemag":["0.236816406250","-0.302490234375"], +"OOTXSet":"1", +"PositionSet":"1" +} \ No newline at end of file diff --git a/test/input_config_malformed.json b/test/input_config_malformed.json new file mode 100644 index 0000000..e3e4482 --- /dev/null +++ b/test/input_config_malformed.json @@ -0,0 +1 @@ +This file should not be readable as a config. \ No newline at end of file diff --git a/test/output_with_registration.json b/test/output_with_registration.json new file mode 100644 index 0000000..1caf072 --- /dev/null +++ b/test/output_with_registration.json @@ -0,0 +1,296 @@ +"v":"0", +"poser":"MPFIT", +"disambiguator":"StateBased", +"configed-lighthouse-gen":"2", +"ootx-ignore-sync-error":"0", +"floor-offset":"0" +"lighthouse0": { + "index": 0, + "id": "384784632", + "mode": "4", + "pose": [ + "0.000000000", + "1.358985932", + "10.020991301", + "-0.118739017", + "-0.151070461", + "0.759929443", + "0.620955718" + ], + "variance": [ + "0.000000726", + "0.000000996", + "0.000000974", + "0.000005692", + "0.000000481", + "0.000000488" + ], + "unlock_count": "1", + "accel": [ + "2.000000000000", + "127.000000000000", + "-24.000000000000" + ], + "fcalphase": [ + "0.000000000000", + "-0.005214691162" + ], + "fcaltilt": [ + "-0.048187255859", + "0.045806884766" + ], + "fcalcurve": [ + "-0.022827148438", + "0.124389648438" + ], + "fcalgibpha": [ + "0.479248046875", + "0.283935546875" + ], + "fcalgibmag": [ + "0.002111434937", + "-0.002265930176" + ], + "fcalogeephase": [ + "1.112304687500", + "2.271484375000" + ], + "fcalogeemag": [ + "-0.391601562500", + "-0.382080078125" + ], + "OOTXSet": "1", + "PositionSet": "1" +} +"lighthouse1": { + "index": 1, + "id": "2179997076", + "mode": "2", + "pose": [ + "1.351791828", + "2.419027929", + "10.408610237", + "0.093801074", + "0.080356890", + "0.704925286", + "0.698444321" + ], + "variance": [ + "0.000000227", + "0.000000625", + "0.000000683", + "0.000022800", + "0.000000627", + "0.000003618" + ], + "unlock_count": "1", + "accel": [ + "1.000000000000", + "127.000000000000", + "0.000000000000" + ], + "fcalphase": [ + "0.000000000000", + "-0.011276245117" + ], + "fcaltilt": [ + "-0.052947998047", + "0.043518066406" + ], + "fcalcurve": [ + "0.736328125000", + "0.268310546875" + ], + "fcalgibpha": [ + "3.072265625000", + "0.745117187500" + ], + "fcalgibmag": [ + "-0.003021240234", + "0.000689983368" + ], + "fcalogeephase": [ + "0.818359375000", + "1.488281250000" + ], + "fcalogeemag": [ + "-0.374023437500", + "-0.307617187500" + ], + "OOTXSet": "1", + "PositionSet": "1" +} +"lighthouse2": { + "index": 2, + "id": "1974130315", + "mode": "1", + "pose": [ + "1.780496198", + "2.485230341", + "10.484703391", + "0.133537220", + "0.103533152", + "0.696177156", + "0.697700555" + ], + "variance": [ + "0.000000237", + "0.000000651", + "0.000000425", + "0.000013222", + "0.000001169", + "0.000003076" + ], + "unlock_count": "1", + "accel": [ + "-1.000000000000", + "127.000000000000", + "1.000000000000" + ], + "fcalphase": [ + "0.000000000000", + "0.000309944153" + ], + "fcaltilt": [ + "-0.048522949219", + "0.045379638672" + ], + "fcalcurve": [ + "0.109558105469", + "0.485107421875" + ], + "fcalgibpha": [ + "1.365234375000", + "2.183593750000" + ], + "fcalgibmag": [ + "0.009498596191", + "0.008605957031" + ], + "fcalogeephase": [ + "1.474609375000", + "2.023437500000" + ], + "fcalogeemag": [ + "-0.724609375000", + "-0.678222656250" + ], + "OOTXSet": "1", + "PositionSet": "1" +} +"lighthouse3": { + "index": 3, + "id": "1749379901", + "mode": "0", + "pose": [ + "3.024384244", + "2.537668640", + "10.302714252", + "0.241596319", + "0.227914282", + "0.666014862", + "0.667915042" + ], + "variance": [ + "0.000000703", + "0.000000998", + "0.000000616", + "0.000011824", + "0.000001060", + "0.000003858" + ], + "unlock_count": "1", + "accel": [ + "0.000000000000", + "127.000000000000", + "2.000000000000" + ], + "fcalphase": [ + "0.000000000000", + "-0.003511428833" + ], + "fcaltilt": [ + "-0.049621582031", + "0.045410156250" + ], + "fcalcurve": [ + "0.049346923828", + "0.314208984375" + ], + "fcalgibpha": [ + "2.273437500000", + "2.234375000000" + ], + "fcalgibmag": [ + "-0.004455566406", + "-0.002807617188" + ], + "fcalogeephase": [ + "0.187988281250", + "1.401367187500" + ], + "fcalogeemag": [ + "-0.229248046875", + "-0.134887695312" + ], + "OOTXSet": "1", + "PositionSet": "1" +} +"lighthouse4": { + "index": 4, + "id": "3590859256", + "mode": "3", + "pose": [ + "0.779046607", + "2.578838980", + "10.948755065", + "0.074242472", + "0.073850892", + "0.706703381", + "0.699717395" + ], + "variance": [ + "0.000000848", + "0.000001000", + "0.000000939", + "0.004435801", + "0.670535812", + "0.000003761" + ], + "unlock_count": "1", + "accel": [ + "0.000000000000", + "127.000000000000", + "-1.000000000000" + ], + "fcalphase": [ + "0.000000000000", + "-0.003845214844" + ], + "fcaltilt": [ + "-0.048614501953", + "0.046630859375" + ], + "fcalcurve": [ + "0.086425781250", + "0.200195312500" + ], + "fcalgibpha": [ + "2.226562500000", + "2.824218750000" + ], + "fcalgibmag": [ + "-0.006622314453", + "-0.004924774170" + ], + "fcalogeephase": [ + "2.923828125000", + "0.600585937500" + ], + "fcalogeemag": [ + "0.236816406250", + "-0.302490234375" + ], + "OOTXSet": "1", + "PositionSet": "1" +} diff --git a/test/output_without_registration.json b/test/output_without_registration.json new file mode 100644 index 0000000..0a281af --- /dev/null +++ b/test/output_without_registration.json @@ -0,0 +1,296 @@ +"v":"0", +"poser":"MPFIT", +"disambiguator":"StateBased", +"configed-lighthouse-gen":"2", +"ootx-ignore-sync-error":"0", +"floor-offset":"0" +"lighthouse0": { + "index": 0, + "id": "384784632", + "mode": "4", + "pose": [ + "-0.000000000", + "-0.000000000", + "-0.000000000", + "1.000000000", + "0.000000000", + "-0.000000000", + "0.000000000" + ], + "variance": [ + "0.000100000", + "0.000100000", + "0.000100000", + "0.000001000", + "0.000001000", + "0.000001000" + ], + "unlock_count": "1", + "accel": [ + "2.000000000000", + "127.000000000000", + "-24.000000000000" + ], + "fcalphase": [ + "0.000000000000", + "-0.005214691162" + ], + "fcaltilt": [ + "-0.048187255859", + "0.045806884766" + ], + "fcalcurve": [ + "-0.022827148438", + "0.124389648438" + ], + "fcalgibpha": [ + "0.479248046875", + "0.283935546875" + ], + "fcalgibmag": [ + "0.002111434937", + "-0.002265930176" + ], + "fcalogeephase": [ + "1.112304687500", + "2.271484375000" + ], + "fcalogeemag": [ + "-0.391601562500", + "-0.382080078125" + ], + "OOTXSet": "1", + "PositionSet": "1" +} +"lighthouse1": { + "index": 1, + "id": "2179997076", + "mode": "2", + "pose": [ + "-1.654452594", + "0.462870039", + "0.387061445", + "0.946119075", + "-0.088411944", + "-0.310396709", + "0.026380049" + ], + "variance": [ + "0.000101982", + "0.000153773", + "0.000265136", + "0.000220471", + "0.000032373", + "0.000006100" + ], + "unlock_count": "1", + "accel": [ + "1.000000000000", + "127.000000000000", + "0.000000000000" + ], + "fcalphase": [ + "0.000000000000", + "-0.011276245117" + ], + "fcaltilt": [ + "-0.052947998047", + "0.043518066406" + ], + "fcalcurve": [ + "0.736328125000", + "0.268310546875" + ], + "fcalgibpha": [ + "3.072265625000", + "0.745117187500" + ], + "fcalgibmag": [ + "-0.003021240234", + "0.000689983368" + ], + "fcalogeephase": [ + "0.818359375000", + "1.488281250000" + ], + "fcalogeemag": [ + "-0.374023437500", + "-0.307617187500" + ], + "OOTXSet": "1", + "PositionSet": "1" +} +"lighthouse2": { + "index": 2, + "id": "1974130315", + "mode": "1", + "pose": [ + "-2.077007292", + "0.514326372", + "0.274100665", + "0.930789788", + "-0.090027903", + "-0.353833703", + "0.018084716" + ], + "variance": [ + "0.000102900", + "0.000122291", + "0.000214640", + "0.000251137", + "0.000043050", + "0.000005514" + ], + "unlock_count": "1", + "accel": [ + "-1.000000000000", + "127.000000000000", + "1.000000000000" + ], + "fcalphase": [ + "0.000000000000", + "0.000309944153" + ], + "fcaltilt": [ + "-0.048522949219", + "0.045379638672" + ], + "fcalcurve": [ + "0.109558105469", + "0.485107421875" + ], + "fcalgibpha": [ + "1.365234375000", + "2.183593750000" + ], + "fcalgibmag": [ + "0.009498596191", + "0.008605957031" + ], + "fcalogeephase": [ + "1.474609375000", + "2.023437500000" + ], + "fcalogeemag": [ + "-0.724609375000", + "-0.678222656250" + ], + "OOTXSet": "1", + "PositionSet": "1" +} +"lighthouse3": { + "index": 3, + "id": "1749379901", + "mode": "0", + "pose": [ + "-3.247515281", + "0.243472182", + "-0.099632325", + "0.857751942", + "-0.084566819", + "-0.505105016", + "0.044485754" + ], + "variance": [ + "0.000110702", + "0.001442807", + "0.000139939", + "0.000231860", + "0.000037058", + "0.000032631" + ], + "unlock_count": "1", + "accel": [ + "0.000000000000", + "127.000000000000", + "2.000000000000" + ], + "fcalphase": [ + "0.000000000000", + "-0.003511428833" + ], + "fcaltilt": [ + "-0.049621582031", + "0.045410156250" + ], + "fcalcurve": [ + "0.049346923828", + "0.314208984375" + ], + "fcalgibpha": [ + "2.273437500000", + "2.234375000000" + ], + "fcalgibmag": [ + "-0.004455566406", + "-0.002807617188" + ], + "fcalogeephase": [ + "0.187988281250", + "1.401367187500" + ], + "fcalogeemag": [ + "-0.229248046875", + "-0.134887695312" + ], + "OOTXSet": "1", + "PositionSet": "1" +} +"lighthouse4": { + "index": 4, + "id": "3590859256", + "mode": "3", + "pose": [ + "-1.188121923", + "1.068339474", + "0.634600459", + "0.951566057", + "-0.090457483", + "-0.291897068", + "0.033698430" + ], + "variance": [ + "0.000223733", + "0.129193010", + "0.000423632", + "0.005811014", + "0.838143205", + "0.000011014" + ], + "unlock_count": "1", + "accel": [ + "0.000000000000", + "127.000000000000", + "-1.000000000000" + ], + "fcalphase": [ + "0.000000000000", + "-0.003845214844" + ], + "fcaltilt": [ + "-0.048614501953", + "0.046630859375" + ], + "fcalcurve": [ + "0.086425781250", + "0.200195312500" + ], + "fcalgibpha": [ + "2.226562500000", + "2.824218750000" + ], + "fcalgibmag": [ + "-0.006622314453", + "-0.004924774170" + ], + "fcalogeephase": [ + "2.923828125000", + "0.600585937500" + ], + "fcalogeemag": [ + "0.236816406250", + "-0.302490234375" + ], + "OOTXSet": "1", + "PositionSet": "1" +} diff --git a/test/test_config_tools.py b/test/test_config_tools.py new file mode 100644 index 0000000..a08ce67 --- /dev/null +++ b/test/test_config_tools.py @@ -0,0 +1,108 @@ +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + + +# System includes +import os +import pytest + +# Python API for config merger +from libsurvive_ros2_py.config_tools import ( + ChannelOverlapError, + InputConfigReadError, + OutputConfigWriteError, + config_merge, + config_equal, +) + +# Path of all test fixtures +TEST_FOLDER_PATH = os.path.dirname(__file__) + + +def test_config_merge_without_registration(): + """Shows a good set of configs merge correctly without registration.""" + inputs = [ + os.path.join(TEST_FOLDER_PATH, "input_config_0_1_2.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_1_2_3.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_2_3_4.json"), + ] + output = "merged_without_registration.json" + config = config_merge(inputs=inputs, output=output) + for idx in range(0, 5): + assert f"lighthouse{idx}" in config.keys(), f"missing lighthouse {idx}" + output_truth = os.path.join(TEST_FOLDER_PATH, "output_without_registration.json") + assert config_equal(output, output_truth), "configuration mismatch" + + +def test_config_merge_with_registration(): + """Shows a good set of configs merge correctly with registration.""" + inputs = [ + os.path.join(TEST_FOLDER_PATH, "input_config_0_1_2.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_1_2_3.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_2_3_4.json"), + ] + output = "merged_with_registration.json" + reg_config = os.path.join(TEST_FOLDER_PATH, "example_registration.yaml") + config = config_merge(inputs=inputs, output=output, reg_config=reg_config) + for idx in range(0, 5): + assert f"lighthouse{idx}" in config.keys(), f"missing lighthouse {idx}" + output_truth = os.path.join(TEST_FOLDER_PATH, "output_with_registration.json") + assert config_equal(output, output_truth), "configuration mismatch" + + +def test_malformed_input_config(): + """Add a malformed config file and show that it causes the right error.""" + inputs = [ + os.path.join(TEST_FOLDER_PATH, "input_config_0_1_2.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_1_2_3.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_2_3_4.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_malformed.json"), + ] + output = "merged_with_registration.json" + reg_config = os.path.join(TEST_FOLDER_PATH, "example_registration.yaml") + with pytest.raises(InputConfigReadError, match="input file is not readable"): + config_merge(inputs=inputs, output=output, reg_config=reg_config) + + +def test_non_overlapping_channel_error(): + """Add a non-overlapping config and show that it causes the right error.""" + inputs = [ + os.path.join(TEST_FOLDER_PATH, "input_config_0_1_2.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_1_2_3.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_2_3_4.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_7_8_9.json"), + ] + output = "merged_with_registration.json" + reg_config = os.path.join(TEST_FOLDER_PATH, "example_registration.yaml") + with pytest.raises(ChannelOverlapError, match="channel graph is disconnected"): + config_merge(inputs=inputs, output=output, reg_config=reg_config) + + +def test_unreadable_output_file(): + """Shows that a non-writable output configuration location causes the right error.""" + inputs = [ + os.path.join(TEST_FOLDER_PATH, "input_config_0_1_2.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_1_2_3.json"), + os.path.join(TEST_FOLDER_PATH, "input_config_2_3_4.json"), + ] + output = "/root/merged_config.json" + reg_config = os.path.join(TEST_FOLDER_PATH, "example_registration.yaml") + with pytest.raises(OutputConfigWriteError, match="output file is not writable"): + config_merge(inputs=inputs, output=output, reg_config=reg_config) From 0fc03cbfeb4017e9077ecbb4c07a0ac9669d99c9 Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Wed, 2 Aug 2023 12:12:34 -0700 Subject: [PATCH 02/18] Irwin -> Iron --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 23acbd9..9d68489 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -52,7 +52,7 @@ workflows: - checkout_build_test: resource_class: arm.medium arch: arm64v8 - ros_distro: irwin + ros_distro: iron - checkout_build_test: resource_class: arm.medium arch: arm64v8 @@ -72,7 +72,7 @@ workflows: - checkout_build_test: resource_class: medium arch: amd64 - ros_distro: irwin + ros_distro: iron - checkout_build_test: resource_class: medium arch: amd64 From f4f59abcf94961bd963eeefe6cb7e86a72669d8b Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Wed, 2 Aug 2023 12:36:14 -0700 Subject: [PATCH 03/18] remove foxy and galactic support and fix environment --- .circleci/config.yml | 16 ---------------- Dockerfile | 3 ++- package.xml | 1 - 3 files changed, 2 insertions(+), 18 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9d68489..aac0186 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,14 +37,6 @@ jobs: workflows: build: jobs: - - checkout_build_test: - resource_class: arm.medium - arch: arm64v8 - ros_distro: foxy - - checkout_build_test: - resource_class: arm.medium - arch: arm64v8 - ros_distro: galactic - checkout_build_test: resource_class: arm.medium arch: arm64v8 @@ -57,14 +49,6 @@ workflows: resource_class: arm.medium arch: arm64v8 ros_distro: rolling - - checkout_build_test: - resource_class: medium - arch: amd64 - ros_distro: foxy - - checkout_build_test: - resource_class: medium - arch: amd64 - ros_distro: galactic - checkout_build_test: resource_class: medium arch: amd64 diff --git a/Dockerfile b/Dockerfile index 29bb51f..97868b8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -33,11 +33,12 @@ RUN apt-get update && apt-get install -y --no-install-recommends cmake \ freeglut3-dev \ gdb \ + libboost-all-dev \ libatlas-base-dev \ liblapacke-dev \ libopenblas-dev \ + libtbb-dev \ libpcap-dev \ - libsciplot-dev \ libusb-1.0-0-dev \ libx11-dev \ libyaml-cpp-dev \ diff --git a/package.xml b/package.xml index 498aef0..a8b0921 100644 --- a/package.xml +++ b/package.xml @@ -35,7 +35,6 @@ rosidl_default_runtime foxglove_bridge python3-numpy - python3-scipy python3-yaml rosbag2_storage_mcap From 6cf00a2be79454c532665cc73ac782b33d2c005a Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Thu, 3 Aug 2023 10:20:46 -0700 Subject: [PATCH 04/18] Fix bugs from testing --- launch/libsurvive_ros2.launch.py | 15 ++++++++++----- package.xml | 1 + src/driver_component.cpp | 31 +++++++++++++++++++++---------- 3 files changed, 32 insertions(+), 15 deletions(-) diff --git a/launch/libsurvive_ros2.launch.py b/launch/libsurvive_ros2.launch.py index 020f8b0..b0af7ca 100644 --- a/launch/libsurvive_ros2.launch.py +++ b/launch/libsurvive_ros2.launch.py @@ -198,20 +198,25 @@ def generate_launch_description(): ) # For recording all data from the experiment - bag_record_node = ExecuteProcess( - cmd=["ros2", "bag", "record", "-o", BAG_FILE, "-a"], + bag_recorder_node = ExecuteProcess( + cmd=["ros2", "bag", "record", "-s", "mcap", "-o", BAG_FILE, "-a"], condition=IfCondition(LaunchConfiguration("record")), output="log", ) # For recording all data from the experiment - bag_record_node = ExecuteProcess( + bag_player_node = ExecuteProcess( cmd=["ros2", "bag", "play", LaunchConfiguration("replay")], condition=UnlessCondition(enable_driver), output="log", ) return LaunchDescription( - arguments - + [composed_pipeline, non_composed_pipeline, web_bridge_node, bag_record_node] + arguments + [ + composed_pipeline, + non_composed_pipeline, + web_bridge_node, + bag_recorder_node, + bag_player_node + ] ) diff --git a/package.xml b/package.xml index a8b0921..498aef0 100644 --- a/package.xml +++ b/package.xml @@ -35,6 +35,7 @@ rosidl_default_runtime foxglove_bridge python3-numpy + python3-scipy python3-yaml rosbag2_storage_mcap diff --git a/src/driver_component.cpp b/src/driver_component.cpp index ab49187..9a7f030 100644 --- a/src/driver_component.cpp +++ b/src/driver_component.cpp @@ -339,26 +339,37 @@ DriverComponent::DriverComponent(const rclcpp::NodeOptions & options) timer_ = this->create_wall_timer(1s, std::bind(&DriverComponent::timer_callback, this)); // Setup driver parameters. - std::string driver_args; - std::vector args; - std::stringstream driver_ss(driver_args); - std::string token; + std::vector args; args.emplace_back(this->get_name()); args.emplace_back("--init-configfile"); - args.emplace_back(driver_config_in_.c_str()); + args.emplace_back(driver_config_in); args.emplace_back("--configfile"); - args.emplace_back(driver_config_out_.c_str()); + args.emplace_back(driver_config_out_); if (recalibrate_) { args.emplace_back("--force-calibrate"); + args.emplace_back("1"); } else { - args.emplace_back("--disable-calibrate"); + args.emplace_back("--force-calibrate"); + args.emplace_back("0"); } + std::stringstream driver_ss(driver_args_); + std::string token; while (getline(driver_ss, token, ' ')) { - args.emplace_back(token.c_str()); + args.emplace_back(token); } - // Try and initialize survive with the arguments supplied. - ctx_ = survive_init(args.size(), const_cast(args.data())); + // Do an argument conversion, initialize, survive and clean up memory immediately. + int argc = args.size(); + char **argv = new (char*)[args.size()]; + for (int i = 0; i < argc; i++) { + argv[i] = new char[args[i].length()]; + std::strcpy(argv[i], args[i].c_str()); + } + ctx_ = survive_init(argc, argv); + for (int i = 0; i < argc; i++) { + delete argv[i]; + } + delete[] argv; if (ctx_ == nullptr) { RCLCPP_FATAL(this->get_logger(), "Could not initialize the libsurvive context"); return; From 7a81d1e7a1a71e9b3faf860dda4f4deddc3c61bf Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Thu, 3 Aug 2023 10:41:52 -0700 Subject: [PATCH 05/18] Fix ignores --- .dockerignore | 1 + data/.gitignore | 1 + 2 files changed, 2 insertions(+) create mode 100644 .dockerignore create mode 100644 data/.gitignore diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..6320cd2 --- /dev/null +++ b/.dockerignore @@ -0,0 +1 @@ +data \ No newline at end of file diff --git a/data/.gitignore b/data/.gitignore new file mode 100644 index 0000000..fbf828d --- /dev/null +++ b/data/.gitignore @@ -0,0 +1 @@ +log \ No newline at end of file From f37167d6a65b1f4dadf7d821b3a6449e6201bda3 Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Thu, 3 Aug 2023 13:08:51 -0700 Subject: [PATCH 06/18] More fixes --- CMakeLists.txt | 10 +++- Dockerfile | 2 +- data/.gitignore | 3 +- docker-compose.yml | 4 +- launch/libsurvive_ros2.launch.py | 39 ++++++++++++---- scripts/bag_parser | 78 ++++++++++++++++++++++++++++++++ scripts/config_merger | 1 - src/driver_component.cpp | 12 +++-- src/driver_poser_null.cpp | 39 ++++++++++++++++ 9 files changed, 167 insertions(+), 21 deletions(-) create mode 100755 scripts/bag_parser create mode 100644 src/driver_poser_null.cpp diff --git a/CMakeLists.txt b/CMakeLists.txt index 098d48f..d86db8b 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -39,6 +39,12 @@ endif() # Set compile options add_compile_options(-Wall -Wextra -Wpedantic) +# Cached compilation +find_program(CCACHE_PROGRAM ccache) +if(CCACHE_PROGRAM) + set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE "${CCACHE_PROGRAM}") +endif() + # DEPENDENCIES # Obtain a specific version of the libsurvive low-level driver. @@ -104,7 +110,8 @@ rosidl_get_typesupport_target(cpp_typesupport_target # Component add_library(libsurvive_ros2_driver_component SHARED - src/driver_component.cpp) + src/driver_component.cpp + src/driver_poser_null.cpp) add_dependencies(libsurvive_ros2_driver_component libsurvive) target_include_directories(libsurvive_ros2_driver_component PUBLIC @@ -212,6 +219,7 @@ install( install( PROGRAMS + scripts/bag_parser scripts/config_merger DESTINATION lib/${PROJECT_NAME}) diff --git a/Dockerfile b/Dockerfile index 97868b8..c262898 100644 --- a/Dockerfile +++ b/Dockerfile @@ -30,6 +30,7 @@ SHELL ["/bin/bash", "-c"] # Install baseline tools RUN apt-get update && apt-get install -y --no-install-recommends \ build-essential \ + ccache \ cmake \ freeglut3-dev \ gdb \ @@ -76,5 +77,4 @@ source /home/ubuntu/ros2_ws/install/setup.bash \n\ exec \$@" > /home/ubuntu/ros2_ws/entrypoint.sh RUN chmod 755 /home/ubuntu/ros2_ws/entrypoint.sh WORKDIR /home/ubuntu/ros2_ws -ENTRYPOINT [ "/home/ubuntu/ros2_ws/entrypoint.sh" ] diff --git a/data/.gitignore b/data/.gitignore index fbf828d..c96a04f 100644 --- a/data/.gitignore +++ b/data/.gitignore @@ -1 +1,2 @@ -log \ No newline at end of file +* +!.gitignore \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 3a52e3b..4c1f838 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -27,7 +27,6 @@ version: '3.6' services: libsurvive_ros2: privileged: false - network_mode: bridge device_cgroup_rules: - 'c *:* rmw' volumes: @@ -35,8 +34,7 @@ services: - .:/home/ubuntu/ros2_ws/src/libsurvive_ros2 - ./data:/home/ubuntu/.ros build: . - ports: - - "8765:8765" + network_mode: host working_dir: /home/ubuntu/ros2_ws entrypoint: /home/ubuntu/ros2_ws/entrypoint.sh command: ros2 launch libsurvive_ros2 libsurvive_ros2.launch.py diff --git a/launch/libsurvive_ros2.launch.py b/launch/libsurvive_ros2.launch.py index b0af7ca..8da6480 100644 --- a/launch/libsurvive_ros2.launch.py +++ b/launch/libsurvive_ros2.launch.py @@ -63,10 +63,10 @@ def generate_launch_description(): "composable", default_value="false", description="Record data with rosbag" ), DeclareLaunchArgument( - "record", default_value="false", description="Record data with rosbag" + "record", default_value=BAG_FILE, description="Record data to this bag path" ), DeclareLaunchArgument( - "replay", default_value="", description="Record data with rosbag" + "replay", default_value="", description="Replay data from this bag path" ), # Driver configuration DeclareLaunchArgument( @@ -92,19 +92,29 @@ def generate_launch_description(): ), ] - # If we are replaying, then we should not run the driver + # If we have specified a valid record location, then enable recording + enable_record = PythonExpression( + ["bool('", LaunchConfiguration("record"), "')"] + ) + + # If we are replaying, then we should not run the low-level driver enable_driver = PythonExpression( ["not bool('", LaunchConfiguration("replay"), "')"] ) - # If we are using a poser, then we should launch it in a composable context. + # If have a meta poser config file, then we need to enable the poser enable_poser = PythonExpression( ["bool('", LaunchConfiguration("meta_config"), "')"] ) + # If have a meta poser config file specified, disable the low-level poser. + driver_args = PythonExpression( + ["'-p Null' if bool('", LaunchConfiguration("meta_config"), "') else '-p MPFIT'"] + ) + # Sow we don't have to repeat for composable and non-composable versions. driver_parameters = [ - {"driver_args": "--v 100 --lighthouse-gen 2"}, + {"driver_args": driver_args}, {"driver_config_in": LaunchConfiguration("driver_config_in")}, {"driver_config_out": LaunchConfiguration("driver_config_out")}, {"recalibrate": LaunchConfiguration("recalibrate")}, @@ -197,16 +207,25 @@ def generate_launch_description(): output="log", ) - # For recording all data from the experiment + # For recording all data from the experiment. We'll use the MCAP format by + # default in order to keep consistency across ROS2 releases. bag_recorder_node = ExecuteProcess( - cmd=["ros2", "bag", "record", "-s", "mcap", "-o", BAG_FILE, "-a"], - condition=IfCondition(LaunchConfiguration("record")), + cmd=["ros2", "bag", "record", "-s", "mcap", "-o", LaunchConfiguration("record"), "-a"], + condition=IfCondition(enable_record), output="log", ) - # For recording all data from the experiment + # In replay we're going to take the raw data and recalculate the poses and TF2 + # transforms. To avoid interference with the old estimates, we're going to map + # the replayed values to the /old prefix. bag_player_node = ExecuteProcess( - cmd=["ros2", "bag", "play", LaunchConfiguration("replay")], + cmd=["ros2", "bag", "play", "-s", "mcap", LaunchConfiguration("replay"), "--remap", + "/roslog:=/old/roslog", + "/tf:=/old/tf", + "/tf_static:=/old/tf_static", + "/libsurvive/pose/tracker:=/old/pose/tracker", + "/libsurvive/pose/lighthouse:=/old/pose/lighthouse", + ], condition=UnlessCondition(enable_driver), output="log", ) diff --git a/scripts/bag_parser b/scripts/bag_parser new file mode 100755 index 0000000..c0ab957 --- /dev/null +++ b/scripts/bag_parser @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 +# +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import argparse + +import rosbag2_py +from rclpy.serialization import deserialize_message +from rclpy.time import Time +from rosidl_runtime_py.utilities import get_message + +def get_rosbag_options(path, storage_id, serialization_format='cdr'): + """Get the storage and converter options for a fiven rosbag""" + storage_options = rosbag2_py.StorageOptions( + uri=path, storage_id=storage_id) + converter_options = rosbag2_py.ConverterOptions( + input_serialization_format=serialization_format, + output_serialization_format=serialization_format) + return storage_options, converter_options + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("-b", "--bag", type=str, required=True, help="the bag file to input") + parser.add_argument("-n", "--namespace", default='/libsurvive', help="the bag file to output") + args = parser.parse_args() + + storage_options, converter_options = get_rosbag_options(args.bag, 'mcap') + reader = rosbag2_py.SequentialReader() + reader.open(storage_options, converter_options) + + type_map = {} + for topic_metadata in reader.get_all_topics_and_types(): + type_map[topic_metadata.name] = topic_metadata.type + + lighthouses = dict() + trackers = dict() + while reader.has_next(): + (topic, data, time_stamp) = reader.read_next() + if topic == f"{args.namespace}/lighthouse": + msg = deserialize_message(data, get_message(type_map[topic])) + lighthouses[msg.channel] = msg.header.frame_id + if topic == f"{args.namespace}/tracker": + msg = deserialize_message(data, get_message(type_map[topic])) + if msg.header.frame_id not in trackers.keys(): + trackers[msg.header.frame_id] = dict() + if topic == f"{args.namespace}/angle": + msg = deserialize_message(data, get_message(type_map[topic])) + if msg.header.frame_id not in trackers.keys(): + trackers[msg.header.frame_id] = dict() + if msg.channel not in trackers[msg.header.frame_id].keys(): + trackers[msg.header.frame_id][msg.channel] = 0 + trackers[msg.header.frame_id][msg.channel] += 1 + print("Lighthouses:") + for channel in sorted(lighthouses.keys()): + print(f"- [CH{channel}] {lighthouses[channel]}") + print("Trackers:") + for tracker, angles in trackers.items(): + print(f"- {tracker}", angles) + del reader \ No newline at end of file diff --git a/scripts/config_merger b/scripts/config_merger index c19dd27..0070791 100755 --- a/scripts/config_merger +++ b/scripts/config_merger @@ -21,7 +21,6 @@ # THE SOFTWARE. import argparse -import sys from libsurvive_ros2_py.config_tools import config_merge diff --git a/src/driver_component.cpp b/src/driver_component.cpp index 9a7f030..b3d0da6 100644 --- a/src/driver_component.cpp +++ b/src/driver_component.cpp @@ -326,11 +326,11 @@ DriverComponent::DriverComponent(const rclcpp::NodeOptions & options) this->create_publisher("tracker", qos_profile); tracker_pose_publisher_ = this->create_publisher( - "pose/tracker", + "/pose/tracker", qos_profile); lighthouse_pose_publisher_ = this->create_publisher( - "pose/lighthouse", + "/pose/lighthouse", qos_profile); // Callback timer for republishing lighthouse and tracker information. We do this @@ -341,8 +341,12 @@ DriverComponent::DriverComponent(const rclcpp::NodeOptions & options) // Setup driver parameters. std::vector args; args.emplace_back(this->get_name()); + args.emplace_back("--v"); + args.emplace_back("100"); + args.emplace_back("--lighthouse-gen"); + args.emplace_back("2"); args.emplace_back("--init-configfile"); - args.emplace_back(driver_config_in); + args.emplace_back(driver_config_in_); args.emplace_back("--configfile"); args.emplace_back(driver_config_out_); if (recalibrate_) { @@ -360,7 +364,7 @@ DriverComponent::DriverComponent(const rclcpp::NodeOptions & options) // Do an argument conversion, initialize, survive and clean up memory immediately. int argc = args.size(); - char **argv = new (char*)[args.size()]; + char **argv = new char*[args.size()]; for (int i = 0; i < argc; i++) { argv[i] = new char[args[i].length()]; std::strcpy(argv[i], args[i].c_str()); diff --git a/src/driver_poser_null.cpp b/src/driver_poser_null.cpp new file mode 100644 index 0000000..773a1fc --- /dev/null +++ b/src/driver_poser_null.cpp @@ -0,0 +1,39 @@ +// Copyright 2023 Andrew Symington +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. +// +// DESCRIPTION +// This is a poser that does nothing right now. The current 'Dummy' poser in +// the libsurvive repo is meant to do this, but it segfaults. + +#include +#include + +#include "libsurvive/survive.h" + +extern "C" { + +int PoserNull(SurviveObject* /* so */, PoserData* /* pd */) { + // no-op: do nothing with the data. + return 0; +} + +REGISTER_POSER(PoserNull) + +} From fabc042004ba0513b69d01497006732a2e6d02f7 Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Thu, 3 Aug 2023 16:29:52 -0700 Subject: [PATCH 07/18] Switch to distributed GTSAM version and fix a segfault in the poser --- CMakeLists.txt | 47 +++---- Dockerfile | 61 ++++---- docker-compose.yml | 6 +- include/libsurvive_ros2/poser_component.hpp | 2 + launch/libsurvive_ros2.launch.py | 20 ++- package.xml | 13 +- src/poser_component.cpp | 148 +++++++++++++++----- 7 files changed, 198 insertions(+), 99 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index d86db8b..06f934d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -59,20 +59,21 @@ externalproject_add(libsurvive ) # Obtain a specific version of the GTSAM estimation software. -include(ExternalProject) -externalproject_add(gtsam - GIT_REPOSITORY https://github.com/borglab/gtsam.git - GIT_TAG 621ef2e7c560e6fb06b463769d9cf7bc6f626308 - CMAKE_ARGS - -DCMAKE_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR} - -DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX} - -DCMAKE_BUILD_TYPE=Release - -DGTSAM_BUILD_PYTHON=1 -) +# include(ExternalProject) +# externalproject_add(gtsam +# GIT_REPOSITORY https://github.com/borglab/gtsam.git +# GIT_TAG 621ef2e7c560e6fb06b463769d9cf7bc6f626308 +# CMAKE_ARGS +# -DCMAKE_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR} +# -DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX} +# -DCMAKE_BUILD_TYPE=Release +# -DGTSAM_BUILD_PYTHON=1 +# ) # Third party libraries find_package(Boost REQUIRED) find_package(Eigen3 REQUIRED) +find_package(GTSAM REQUIRED) find_package(TBB REQUIRED) find_package(yaml-cpp REQUIRED) @@ -148,22 +149,16 @@ ament_target_dependencies(libsurvive_ros2_driver_node rclcpp) # Component add_library(libsurvive_ros2_poser_component SHARED src/poser_component.cpp) -add_dependencies(libsurvive_ros2_poser_component - gtsam) target_include_directories(libsurvive_ros2_poser_component PUBLIC - ${CMAKE_INSTALL_PREFIX}/include + ${GTSAM_INCLUDE_DIR} ${YAML_CPP_INCLUDE_DIRS} ${Boost_INCLUDE_DIRS} ${TBB_INCLUDE_DIRS}) -target_link_directories(libsurvive_ros2_poser_component PUBLIC - ${CMAKE_INSTALL_PREFIX}/lib) target_link_libraries(libsurvive_ros2_poser_component - ${YAML_CPP_LIBRARIES} - ${Boost_LIBRARIES} - ${TBB_LIBRARIES} - "${cpp_typesupport_target}" + gtsam Eigen3::Eigen - -lgtsam) + ${YAML_CPP_LIBRARIES} + "${cpp_typesupport_target}") target_compile_definitions(libsurvive_ros2_poser_component PRIVATE "COMPOSITION_BUILDING_DLL") target_compile_options(libsurvive_ros2_poser_component @@ -190,12 +185,12 @@ ament_target_dependencies(libsurvive_ros2_poser_node rclcpp) # Install python packages -externalproject_get_property(gtsam binary_dir) -install( - DIRECTORY - ${binary_dir}/python/gtsam - DESTINATION - ${PYTHON_INSTALL_DIR}) +# externalproject_get_property(gtsam binary_dir) +# install( +# DIRECTORY +# ${binary_dir}/python/gtsam +# DESTINATION +# ${PYTHON_INSTALL_DIR}) # Install the components diff --git a/Dockerfile b/Dockerfile index c262898..ead7ed8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,24 +28,30 @@ FROM ${ARCH}/ros:${ROS_DISTRO}-ros-base SHELL ["/bin/bash", "-c"] # Install baseline tools -RUN apt-get update && apt-get install -y --no-install-recommends \ - build-essential \ - ccache \ - cmake \ - freeglut3-dev \ - gdb \ - libboost-all-dev \ - libatlas-base-dev \ - liblapacke-dev \ - libopenblas-dev \ - libtbb-dev \ - libpcap-dev \ - libusb-1.0-0-dev \ - libx11-dev \ - libyaml-cpp-dev \ - sudo \ - valgrind \ - zlib1g-dev \ +RUN apt-get update && apt-get install -y --no-install-recommends \ + build-essential \ + ccache \ + cmake \ + freeglut3-dev \ + gdb \ + libboost-all-dev \ + libatlas-base-dev \ + liblapacke-dev \ + libopenblas-dev \ + libtbb-dev \ + libpcap-dev \ + libusb-1.0-0-dev \ + libx11-dev \ + libyaml-cpp-dev \ + python3-scipy \ + ros-${ROS_DISTRO}-foxglove-bridge \ + ros-${ROS_DISTRO}-gtsam \ + ros-${ROS_DISTRO}-launch-pytest \ + ros-${ROS_DISTRO}-rosbag2-storage-mcap \ + sudo \ + valgrind \ + xterm \ + zlib1g-dev \ && sudo rm -rf /var/lib/apt/lists/* # Add an 'ubuntu' user with dialout/plugdev access and can use sudo passwordless. @@ -60,13 +66,18 @@ USER ubuntu RUN mkdir -p /home/ubuntu/ros2_ws/src/libsurvive_ros2 COPY --chown=ubuntu:ubuntu . /home/ubuntu/ros2_ws/src/libsurvive_ros2 -# Install baseline tools -RUN sudo apt-get update \ - && cd /home/ubuntu/ros2_ws \ - && rosdep update \ - && rosdep install --from-paths src --ignore-src -r -y \ - && source /opt/ros/$ROS_DISTRO/setup.bash \ - && colcon build --symlink-install \ +# Compile the code. Normally you'd also add these, but they are not cached by docker, +# which makes compilation long and tedious. +# +# rosdep update \ +# rosdep install --from-paths src --ignore-src -r -y \ +# source /opt/ros/$ROS_DISTRO/setup.bash +# +RUN sudo apt-get update \ + && source /opt/ros/$ROS_DISTRO/setup.bash \ + && cd /home/ubuntu/ros2_ws \ + && colcon build --symlink-install --event-handlers console_direct+ \ + --cmake-args -DCMAKE_BUILD_TYPE=RelWithDebInfo \ && sudo rm -rf /var/lib/apt/lists/* # Initialization diff --git a/docker-compose.yml b/docker-compose.yml index 4c1f838..e50d8f7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -29,10 +29,14 @@ services: privileged: false device_cgroup_rules: - 'c *:* rmw' + environment: + DISPLAY: $DISPLAY volumes: - - /dev:/dev - .:/home/ubuntu/ros2_ws/src/libsurvive_ros2 - ./data:/home/ubuntu/.ros + - /dev:/dev + - /tmp/.X11-unix:/tmp/.X11-unix + - ~/.Xauthority:/home/ubuntu/.Xauthority build: . network_mode: host working_dir: /home/ubuntu/ros2_ws diff --git a/include/libsurvive_ros2/poser_component.hpp b/include/libsurvive_ros2/poser_component.hpp index 9e2bbc4..533e86e 100644 --- a/include/libsurvive_ros2/poser_component.hpp +++ b/include/libsurvive_ros2/poser_component.hpp @@ -74,6 +74,7 @@ #include "rclcpp/rclcpp.hpp" #include "sensor_msgs/msg/imu.hpp" #include "tf2_ros/transform_broadcaster.h" +#include "tf2_ros/static_transform_broadcaster.h" namespace libsurvive_ros2 { @@ -202,6 +203,7 @@ class PoserComponent : public rclcpp::Node // TF2 publisher std::unique_ptr tf_broadcaster_; + std::unique_ptr tf_static_broadcaster_; // Solution timer rclcpp::TimerBase::SharedPtr timer_; diff --git a/launch/libsurvive_ros2.launch.py b/launch/libsurvive_ros2.launch.py index 8da6480..52b681a 100644 --- a/launch/libsurvive_ros2.launch.py +++ b/launch/libsurvive_ros2.launch.py @@ -25,11 +25,10 @@ import launch from launch import LaunchDescription from launch.actions import DeclareLaunchArgument, ExecuteProcess, GroupAction -from launch_ros.actions import LoadComposableNodes -from launch_ros.descriptions import ComposableNode from launch.conditions import IfCondition, UnlessCondition from launch.substitutions import LaunchConfiguration, PythonExpression -from launch_ros.actions import Node, ComposableNodeContainer +from launch_ros.actions import LoadComposableNodes, Node, ComposableNodeContainer +from launch_ros.descriptions import ComposableNode # Bas directories for resource loading and writing. LOG_DIR = launch.logging.launch_config.log_dir @@ -67,6 +66,11 @@ def generate_launch_description(): ), DeclareLaunchArgument( "replay", default_value="", description="Replay data from this bag path" + ), + DeclareLaunchArgument( + "debug", + default_value="true", + description="Launch in a debugger xterm window", ), # Driver configuration DeclareLaunchArgument( @@ -125,6 +129,12 @@ def generate_launch_description(): {"tracking_frame": LaunchConfiguration("tracking_frame")}, ] + # If have a meta poser config file specified, disable the low-level poser. + launch_prefix = PythonExpression( + ["'xterm -e gdb -ex=r --args' if bool('", LaunchConfiguration("debug"), "') else None"] + ) + + # Composed pipeline. composed_pipeline = GroupAction( actions=[ @@ -133,6 +143,7 @@ def generate_launch_description(): executable="component_container", name="libsurvive_ros2_container", namespace=LaunchConfiguration("namespace"), + prefix=launch_prefix, output="screen", ), LoadComposableNodes( @@ -176,6 +187,7 @@ def generate_launch_description(): name="libsurvive_ros2_driver_node", namespace=LaunchConfiguration("namespace"), condition=IfCondition(enable_driver), + prefix=launch_prefix, output="screen", parameters=driver_parameters, ), @@ -185,6 +197,7 @@ def generate_launch_description(): name="libsurvive_ros2_poser_node", namespace=LaunchConfiguration("namespace"), condition=IfCondition(enable_poser), + prefix=launch_prefix, output="screen", parameters=poser_parameters, ), @@ -223,6 +236,7 @@ def generate_launch_description(): "/roslog:=/old/roslog", "/tf:=/old/tf", "/tf_static:=/old/tf_static", + "/libsurvive/pose/body:=/old/pose/body", "/libsurvive/pose/tracker:=/old/pose/tracker", "/libsurvive/pose/lighthouse:=/old/pose/lighthouse", ], diff --git a/package.xml b/package.xml index 498aef0..b679f5a 100644 --- a/package.xml +++ b/package.xml @@ -13,18 +13,17 @@ boost + diagnostic_msgs eigen - tbb - + geometry_msgs + gtsam rclcpp_components rclcpp + sensor_msgs + tbb tf2_ros tf2 - diagnostic_msgs - geometry_msgs - sensor_msgs - ament_cmake @@ -32,12 +31,12 @@ - rosidl_default_runtime foxglove_bridge python3-numpy python3-scipy python3-yaml rosbag2_storage_mcap + rosidl_default_runtime diff --git a/src/poser_component.cpp b/src/poser_component.cpp index f353c02..3b1a140 100644 --- a/src/poser_component.cpp +++ b/src/poser_component.cpp @@ -76,6 +76,20 @@ gtsam::Pose3 gtsam_from_ros(const geometry_msgs::msg::Pose & pose) pose.position.z)); } +gtsam::Pose3 gtsam_from_ros(const geometry_msgs::msg::Transform & pose) +{ + return gtsam::Pose3( + gtsam::Rot3::Quaternion( + pose.rotation.w, + pose.rotation.x, + pose.rotation.y, + pose.rotation.z), + gtsam::Point3( + pose.translation.x, + pose.translation.y, + pose.translation.z)); +} + geometry_msgs::msg::Pose ros_from_gtsam(const gtsam::Pose3 & pose) { geometry_msgs::msg::Pose msg; @@ -90,6 +104,20 @@ geometry_msgs::msg::Pose ros_from_gtsam(const gtsam::Pose3 & pose) return msg; } +geometry_msgs::msg::Transform ros_transform_from_gtsam(const gtsam::Pose3 & pose) +{ + geometry_msgs::msg::Transform msg; + msg.translation.x = pose.translation().x(); + msg.translation.y = pose.translation().y(); + msg.translation.z = pose.translation().z(); + gtsam::Quaternion quaternion = pose.rotation().toQuaternion(); + msg.rotation.w = quaternion.w(); + msg.rotation.x = quaternion.x(); + msg.rotation.y = quaternion.y(); + msg.rotation.z = quaternion.z(); + return msg; +} + gtsam::noiseModel::Gaussian::shared_ptr gtsam_from_ros(const std::array & msg) { gtsam::Matrix6 cov; @@ -131,7 +159,8 @@ geometry_msgs::msg::Vector3 ros_from_gtsam(const gtsam::Point3 & point3) PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) : Node("libsurvive_ros2_poser_node", options), next_available_key_(0), - tf_broadcaster_(std::make_unique(*this)) + tf_broadcaster_(std::make_unique(*this)), + tf_static_broadcaster_(std::make_unique(*this)) { std::string meta_config; this->declare_parameter("meta_config", "~/.config/libsurvive/poser.yaml"); @@ -256,10 +285,24 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) // the bTi fixed transform and will correct for it as part of the model. tracker_info.preintegrator->integrateMeasurement(i_accel, i_omega, kDeltaT); - // If bias hs not been set or needs to be reset, then lets add one + // If bias has not been set or needs to be reset, then lets add one if (!tracker_info.b_B.size() || (stamp_curr - tracker_info.b_B.rbegin()->first) > kImuBiasTimeNs) { + RCLCPP_INFO_STREAM(this->get_logger(), "Bias state event for tracker " << tracker_id); + + // Get the last pose timestamp, if one exists for this body. + std::optional stamp_prev = std::nullopt; + if (body_info.gTb.size()) { + stamp_prev = body_info.gTb.rbegin()->first; + } + + // Get the last bias timestamp, if one exists for this IMU. + std::optional stamp_bias = std::nullopt; + if (tracker_info.b_B.size()) { + stamp_bias = tracker_info.b_B.rbegin()->first; + } + // Add a pose state, provided another IMU has not already set one for the // current timestamp. This is possible, but very, very unlikely. if (!body_info.gTb.count(stamp_curr)) { @@ -273,6 +316,8 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) gtsam::PriorFactor( body_info.gTb[stamp_curr], gTb_obs, gTb_cov)); initial_values_.insert(body_info.gTb[stamp_curr], gTb_obs); + } else { + RCLCPP_WARN_STREAM(this->get_logger(), "Pose state exists for tracker " << tracker_id); } // Add a velocity state, provided another IMU has not already set one for the @@ -286,35 +331,34 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) gtsam::PriorFactor( body_info.g_V[stamp_curr], g_V_obs, g_V_cov)); initial_values_.insert(body_info.g_V[stamp_curr], g_V_obs); + } else { + RCLCPP_WARN_STREAM(this->get_logger(), "Velocity state exists for tracker " << tracker_id); } // Add an IMU bias state. This will always need to be added, because each IMU // tracks its own bias internally. - tracker_info.b_B[stamp_curr] = next_available_key_++; - auto b_B_obs = gtsam::imuBias::ConstantBias(); - auto b_B_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6( - kAccelDriftSigma, kAccelDriftSigma, kAccelDriftSigma, - kOmegaDriftSigma, kOmegaDriftSigma, kOmegaDriftSigma)); - graph_.add( - gtsam::PriorFactor( - tracker_info.b_B[stamp_curr], b_B_obs, b_B_cov)); - initial_values_.insert(tracker_info.b_B[stamp_curr], b_B_obs); - - // Get the last pose timestamp, if one exists for this body. - std::optional stamp_prev = std::nullopt; - if (body_info.gTb.size()) { - stamp_prev = body_info.gTb.rbegin()->first; - } - - // Get the last bias timestamp, if one exists for this IMU. - std::optional stamp_bias = std::nullopt; - if (tracker_info.b_B.size()) { - stamp_bias = tracker_info.b_B.rbegin()->first; + if (!tracker_info.b_B.count(stamp_curr)) { + tracker_info.b_B[stamp_curr] = next_available_key_++; + auto b_B_obs = gtsam::imuBias::ConstantBias(); + auto b_B_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6( + kAccelDriftSigma, kAccelDriftSigma, kAccelDriftSigma, + kOmegaDriftSigma, kOmegaDriftSigma, kOmegaDriftSigma)); + graph_.add( + gtsam::PriorFactor( + tracker_info.b_B[stamp_curr], b_B_obs, b_B_cov)); + initial_values_.insert(tracker_info.b_B[stamp_curr], b_B_obs); + } else { + RCLCPP_WARN_STREAM(this->get_logger(), "Bias state exists for tracker " << tracker_id); } // Only add between factors for IMU measurements if (stamp_prev && stamp_bias) { + auto b_B_obs = gtsam::imuBias::ConstantBias(); + auto b_B_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6( + kAccelDriftSigma, kAccelDriftSigma, kAccelDriftSigma, + kOmegaDriftSigma, kOmegaDriftSigma, kOmegaDriftSigma)); graph_.add( gtsam::ImuFactor( body_info.gTb[*stamp_prev], // last pose @@ -331,7 +375,7 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) b_B_cov)); } - // Reset the IMU integrator to reflect that we have a new preintgrattion period. + // Reset the IMU integrator to reflect that we have a new pre-integration period. tracker_info.preintegrator->resetIntegration(); } } @@ -472,39 +516,69 @@ void PoserComponent::add_lighthouse_pose(const geometry_msgs::msg::PoseWithCovar void PoserComponent::solution_callback() { // Incremental update of the graph usng the latest values and factors. + graph_.print("Graph"); + initial_values_.print("Values"); isam2_.update(graph_, initial_values_); - // Print solution - geometry_msgs::msg::PoseWithCovarianceStamped pose_msg; + // Static transforms get the current time + const auto now = this->get_clock()->now(); + + // Print bodies and trackers for (const auto & [body_id, body_info] : id_to_body_info_) { - // RCLCPP_INFO_STREAM(this->get_logger(), "Broadcasting body " << body_id); + // Package up a transform for each tracker on the body + for (const auto& [tracker_id, bTh] : body_info.bTh) { + geometry_msgs::msg::TransformStamped transform_msg; + transform_msg.header.stamp = now; + transform_msg.header.frame_id = body_id; + transform_msg.child_frame_id = tracker_id; + transform_msg.transform = ros_transform_from_gtsam(bTh); + tf_static_broadcaster_->sendTransform(transform_msg); + } + // Pack up all the timesteps auto iter = body_info.gTb.rbegin(); if (iter != body_info.gTb.rend()) { // Extract the solution - gtsam::Pose3 pose = isam2_.calculateEstimate(iter->second); + gtsam::Pose3 gTb = isam2_.calculateEstimate(iter->second); gtsam::Matrix cov = isam2_.marginalCovariance(iter->second); - // Create a message containing the transform geometry_msgs::msg::PoseWithCovarianceStamped msg; msg.header.stamp = ros_from_gtsam(iter->first); msg.header.frame_id = body_id; - msg.pose.pose = ros_from_gtsam(pose); + msg.pose.pose = ros_from_gtsam(gTb); msg.pose.covariance = ros_from_gtsam(cov); body_pose_publisher_->publish(msg); - - // Package up a transform + // Package up a transform for the body geometry_msgs::msg::TransformStamped transform_msg; - transform_msg.header.stamp = ros_from_gtsam(iter->first); + transform_msg.header.stamp = msg.header.stamp; transform_msg.header.frame_id = tracking_frame_; transform_msg.child_frame_id = body_id; - transform_msg.transform.translation.x = msg.pose.pose.position.x; - transform_msg.transform.translation.y = msg.pose.pose.position.y; - transform_msg.transform.translation.z = msg.pose.pose.position.z; - transform_msg.transform.rotation = msg.pose.pose.orientation; + transform_msg.transform = ros_transform_from_gtsam(gTb); tf_broadcaster_->sendTransform(transform_msg); } } + // Print lighthouses + for (const auto & [lighthouse_id, lighthouse_info] : id_to_lighthouse_info_) { + // Extract the solution + gtsam::Pose3 gTl = isam2_.calculateEstimate(lighthouse_info.gTl); + gtsam::Matrix cov = isam2_.marginalCovariance(lighthouse_info.gTl); + // Create a message containing the transform + geometry_msgs::msg::PoseWithCovarianceStamped msg; + msg.header.stamp = now; + msg.header.frame_id = lighthouse_id; + msg.pose.pose = ros_from_gtsam(gTl); + msg.pose.covariance = ros_from_gtsam(cov); + body_pose_publisher_->publish(msg); + // Package up a transform + geometry_msgs::msg::TransformStamped transform_msg; + transform_msg.header.stamp = msg.header.stamp; + transform_msg.header.frame_id = tracking_frame_; + transform_msg.child_frame_id = lighthouse_id; + transform_msg.transform = ros_transform_from_gtsam(gTl); + tf_static_broadcaster_->sendTransform(transform_msg); + } + + // Flush both the variables and factors, because they have been added graph_.resize(0); initial_values_.clear(); From d9036bdb6061ed81120759e0f95d80f61630d49d Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Mon, 7 Aug 2023 11:22:47 -0700 Subject: [PATCH 08/18] Add a pinhole model for lighthouse measurementsy --- config/example_config_poser.yaml | 11 +- include/libsurvive_ros2/poser_component.hpp | 44 ++-- launch/libsurvive_ros2.launch.py | 4 +- src/poser_component.cpp | 267 ++++++++++++++------ 4 files changed, 226 insertions(+), 100 deletions(-) diff --git a/config/example_config_poser.yaml b/config/example_config_poser.yaml index 3f8166f..20e656f 100644 --- a/config/example_config_poser.yaml +++ b/config/example_config_poser.yaml @@ -30,9 +30,14 @@ # 30cm ruler "wand", with Y running along the ruler length. # rigid_bodies: - - body_id: "rigid_body/wand" + - body_id: "world_frame" trackers: - tracker_id: LHR-74EFB987 - bTh: [0.0, 0.15, 0.0, 1.0, 0.0, 0.0, 0.0] + bTh: [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0] + - body_id: "body_frame" + trackers: - tracker_id: LHR-933F150A - bTh: [0.0, -0.15, 0.0, 1.0, 0.0, 0.0, 0.0] + bTh: [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0] +registration: + - body_id: "world_frame" + gTb: [0.0, 0.0, 10.0, 1.0, 0.0, 0.0, 0.0] \ No newline at end of file diff --git a/include/libsurvive_ros2/poser_component.hpp b/include/libsurvive_ros2/poser_component.hpp index 533e86e..93485e4 100644 --- a/include/libsurvive_ros2/poser_component.hpp +++ b/include/libsurvive_ros2/poser_component.hpp @@ -61,13 +61,17 @@ // Project includes #include "geometry_msgs/msg/pose_with_covariance_stamped.hpp" +#include "gtsam/geometry/Rot2.h" #include "gtsam/inference/Symbol.h" #include "gtsam/navigation/ImuBias.h" #include "gtsam/navigation/ImuFactor.h" #include "gtsam/nonlinear/ISAM2.h" +#include "gtsam/nonlinear/ExpressionFactorGraph.h" #include "gtsam/nonlinear/NonlinearFactorGraph.h" #include "gtsam/nonlinear/PriorFactor.h" +#include "gtsam/slam/expressions.h" #include "gtsam/slam/BetweenFactor.h" +#include #include "libsurvive_ros2/msg/angle.hpp" #include "libsurvive_ros2/msg/lighthouse.hpp" #include "libsurvive_ros2/msg/tracker.hpp" @@ -81,18 +85,26 @@ namespace libsurvive_ros2 // Nanosecond timestamp w.r.t unix epoch typedef int64_t Timestamp; +typedef uint8_t Channel; +typedef uint8_t Sensor; // Encodes body information struct BodyInfo { - // Sequence of poses of the body in the global frame + // This is a list of all trackers attached to this body and their poses + // relative to the mounitng bolt frame. + std::unordered_map bTh; + + // If this is a static body. This is currently only used for registration + // markers. If this is the case then the std::map<...> data strcutures + // below include a single (key = 0) value representing the pose/velocity. + bool is_static; + + // Sequence of poses of the body in the global frame std::map gTb; // Sequence of velocities in the body frame. std::map g_V; - - // Tracker head to body transforms for all sensors - std::unordered_map bTh; }; // Encodes tracker information @@ -107,8 +119,8 @@ struct TrackerInfo gtsam::Pose3 tTi; gtsam::Pose3 tTh; - // Sensor in the tracking frame. - std::unordered_map> t_sensors; + // Sensor points in the tracking frame. + std::unordered_map t_sensors; // Constant scale and bias factors for the IMU, in the imu frame. gtsam::Vector3 accel_scale; @@ -126,8 +138,14 @@ struct TrackerInfo // Encodes lighthouse information struct LighthouseInfo { + // Serial number / identifier + std::string id; + // Static pose of this lighthouse gtsam::Key gTl; + + // Calibration information for this lighthouse. + std::shared_ptr K; }; // Helper to find the closest key lower that the supplied value. This is used to map @@ -157,10 +175,6 @@ class PoserComponent : public rclcpp::Node void add_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg); void add_imu(const sensor_msgs::msg::Imu & msg); - // Estimates from the low-level driver, which we treat as priors - void add_tracker_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg); - void add_lighthouse_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg); - // Calculate and publish a solution void solution_callback(); @@ -174,10 +188,10 @@ class PoserComponent : public rclcpp::Node std::unordered_map id_to_tracker_info_; // Lighthouse information - std::unordered_map id_to_lighthouse_info_; + std::unordered_map channel_to_lighthouse_info_; // This is the graphical model into which we'll store factors. - gtsam::NonlinearFactorGraph graph_; + gtsam::ExpressionFactorGraph graph_; // These are the initial set of values for the factors. gtsam::Values initial_values_; @@ -193,13 +207,11 @@ class PoserComponent : public rclcpp::Node rclcpp::Subscription::SharedPtr lighthouse_subscriber_; rclcpp::Subscription::SharedPtr imu_subscriber_; rclcpp::Subscription::SharedPtr angle_subscriber_; - rclcpp::Subscription::SharedPtr - tracker_pose_subscriber_; - rclcpp::Subscription::SharedPtr - lighthouse_pose_subscriber_; // Solution publisher rclcpp::Publisher::SharedPtr body_pose_publisher_; + rclcpp::Publisher::SharedPtr tracker_pose_publisher_; + rclcpp::Publisher::SharedPtr lighthouse_pose_publisher_; // TF2 publisher std::unique_ptr tf_broadcaster_; diff --git a/launch/libsurvive_ros2.launch.py b/launch/libsurvive_ros2.launch.py index 52b681a..b37c930 100644 --- a/launch/libsurvive_ros2.launch.py +++ b/launch/libsurvive_ros2.launch.py @@ -69,7 +69,7 @@ def generate_launch_description(): ), DeclareLaunchArgument( "debug", - default_value="true", + default_value="false", description="Launch in a debugger xterm window", ), # Driver configuration @@ -131,7 +131,7 @@ def generate_launch_description(): # If have a meta poser config file specified, disable the low-level poser. launch_prefix = PythonExpression( - ["'xterm -e gdb -ex=r --args' if bool('", LaunchConfiguration("debug"), "') else None"] + ["'xterm -e gdb -ex=r --args' if '", LaunchConfiguration("debug"), "'=='true' else ''"] ) diff --git a/src/poser_component.cpp b/src/poser_component.cpp index 3b1a140..bb33910 100644 --- a/src/poser_component.cpp +++ b/src/poser_component.cpp @@ -27,6 +27,8 @@ // i : the tracker "imu" frame (the IMU origin) // b : the origin of a rigid body to which a tracker is attached +#include + #include "libsurvive_ros2/poser_component.hpp" #include "rclcpp_components/register_node_macro.hpp" @@ -46,6 +48,8 @@ const double kAccelDriftSigma = 0.1; const double kOmegaSigma = 0.1; const double kOmegaDriftSigma = 0.1; const double kIntegrationSigma = 0.1; +const double kSensorPositionSigma = 0.0001; +const double kSigmaAngle = 0.001; // Time between bias states const Timestamp kImuBiasTimeNs = 1e9; @@ -168,18 +172,51 @@ PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) this->declare_parameter("tracking_frame", "libsurvive_frame"); this->get_parameter("tracking_frame", tracking_frame_); - // Example: poser.yaml + // Example: example_config_poser.yaml // ------------------- // rigid_bodies: - // - body_id: "rigid_body/wand" + // - body_id: "world_frame" // trackers: // - tracker_id: LHR-74EFB987 - // bTh: [0.0, 0.15, 0.0, 1.0, 0.0, 0.0, 0.0] + // bTh: [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0] + // - body_id: "body_frame" + // trackers: // - tracker_id: LHR-933F150A - // bTh: [0.0, -0.15, 0.0, 1.0, 0.0, 0.0, 0.0] + // bTh: [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0] + // registration: + // - body_id: "world_frame" + // gTb: [0.0, 0.0, 10.0, 1.0, 0.0, 0.0, 0.0] YAML::Node config = YAML::LoadFile(meta_config); for (auto body : config["rigid_bodies"]) { std::string body_id = body["body_id"].as(); + + // If this is a static body, we need to only assign it a single state + // the global frame, and set a prior on that state + id_to_body_info_[body_id].is_static = body["static"].as(); + if (id_to_body_info_[body_id].is_static) { + + // Velocity is always zero. + id_to_body_info_[body_id].g_V[0] = next_available_key_++; + auto g_V_obs = gtsam::Vector3(0.0, 0.0, 0.0); + auto g_V_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector3(1e-9, 1e-9, 1e-9)); + graph_.add(gtsam::PriorFactor( + id_to_body_info_[body_id].g_V[0], g_V_obs, g_V_cov)); + initial_values_.insert(id_to_body_info_[body_id].g_V[0], g_V_obs); + + // Pose is unknown, but it start it off somewhere reasonable. + id_to_body_info_[body_id].gTb[0] = next_available_key_++; + auto gTb_obs = gtsam::Pose3( + gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), + gtsam::Point3(0.0, 0.0, 0.0)); + auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6(10.0, 10.0, 10.0, 100.0, 100.0, 100.0)); + graph_.add(gtsam::PriorFactor( + id_to_body_info_[body_id].gTb[0], gTb_obs, gTb_cov)); + initial_values_.insert(id_to_body_info_[body_id].gTb[0], gTb_obs); + } + + // Add tracker information for (auto tracker : body["trackers"]) { std::string tracker_id = tracker["tracker_id"].as(); std::vector bTh = tracker["bTh"].as>(); @@ -189,6 +226,21 @@ PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) } } + // Add registration information + for (auto registration : config["registration"]) { + std::string body_id = registration["body_id"].as(); + if (id_to_body_info_.count(body_id) == 0) { + RCLCPP_WARN_STREAM(this->get_logger(), + "Unknown body frame '" << body_id << "' specified in registration"); + continue; + } + std::vector gTb = registration["gTb"].as>(); + auto gTb_obs = gtsam::Pose3( + gtsam::Rot3::Quaternion(gTb[3], gTb[4], gTb[5], gTb[6]), + gtsam::Point3(gTb[0], gTb[1], gTb[2])); + initial_values_.update(id_to_body_info_[body_id].gTb[0], gTb_obs); + } + // TODO(asymingt) this might not play very well with composable node inter-process // communication. So, we might want to avoid it until everything else is ready. rclcpp::QoS qos_profile(10); @@ -203,16 +255,14 @@ PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) "imu", qos_profile, std::bind(&PoserComponent::add_imu, this, _1)); angle_subscriber_ = this->create_subscription( "angle", qos_profile, std::bind(&PoserComponent::add_angle, this, _1)); - tracker_pose_subscriber_ = - this->create_subscription( - "pose/tracker", qos_profile, std::bind(&PoserComponent::add_tracker_pose, this, _1)); - lighthouse_pose_subscriber_ = - this->create_subscription( - "pose/lighthouse", qos_profile, std::bind(&PoserComponent::add_lighthouse_pose, this, _1)); // Setup refined body frame pose publisher body_pose_publisher_ = this->create_publisher("pose/body", qos_profile); + tracker_pose_publisher_ = + this->create_publisher("pose/tracker", qos_profile); + lighthouse_pose_publisher_ = + this->create_publisher("pose/lighthouse", qos_profile); // Add a timer to extract solution and publish pose timer_ = this->create_wall_timer( @@ -226,31 +276,65 @@ PoserComponent::~PoserComponent() void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) { - /* TODO fix this -- might need a CustomFactor? - // Only accept angle measurements from trackers already inserted into the graph. const std::string & tracker_id = msg.header.frame_id; if (id_to_tracker_info_.count(tracker_id) == 0) { return; } - const Timestamp stamp = rclcpp::Time(msg.header.stamp).nanoseconds(); auto & tracker_info = id_to_tracker_info_[tracker_id]; auto & body_info = id_to_body_info_[tracker_info.body_id]; - // Find the closest time lower than the given stamp in the trajectory. - auto gTb_iter = body_info.gTb.lower_bound(stamp); - if (gTb == body_info.gTb.end()) { - RCLCPP_WARN_STREAM(this->get_logger(), "No matching timestamp found"); + // Only accept angle measurements from channels we have already seen + if (channel_to_lighthouse_info_.count(msg.channel) == 0) { return; } + auto & lighthouse_info = channel_to_lighthouse_info_[msg.channel]; + + // Verify that the tracker has this channel + if (tracker_info.t_sensors.count(msg.sensor_id) == 0) { + RCLCPP_ERROR_STREAM(this->get_logger(), + "Tracker " << tracker_id << " sensor " << msg.sensor_id << " does not exist"); + } + auto & t_sensor = tracker_info.t_sensors[msg.sensor_id]; - // Add a factor constraining the adjacent poses and velocities - graph_.add(gtsam::BearingFactor( - body_info.gTb[tracker_info.last_imu_stamp], // last pose - body_info.g_V[tracker_info.last_imu_stamp], // last velocity - tracker_info.preintegrator)); // preintegrator + // We rely on IMU inserting the actual states. We're just going to find the + // closest one to the angle timestamp and use it. There should be one... + const Timestamp stamp = rclcpp::Time(msg.header.stamp).nanoseconds(); + auto gTb = find_key_for_closest_stamp(body_info.gTb, stamp); + if (!gTb) { + return; + } - */ + // Expression to move the the position of the sensor from the tracking to global frame. + gtsam::Point3_ t_sensor_(t_sensor); + gtsam::Pose3_ gTb_(*gTb); + gtsam::Pose3_ bTh_(body_info.bTh[tracker_id]); + gtsam::Pose3_ hTt_(tracker_info.tTh.inverse()); + gtsam::Point3_ g_sensor_ = + gtsam::transformFrom(gtsam::compose(gTb_, gtsam::compose(bTh_, hTt_)), t_sensor_); + + // Now we can use a simple pinhole model to predict the (u, v, 1) coordinate of the + // sensor on some virtual image plane located at 1m from the sensor. This is because + // the "K" has fx = 1.0, and fy = 1.0, and all other elements zero. + gtsam::Pose3_ gTl_(lighthouse_info.gTl); + gtsam::Cal3_S2_ K_(*lighthouse_info.K); + auto expression_factor = gtsam::project3(gTl_, g_sensor_, K_); + + // The factor that gets added depends if this is an angle about axis 0 or axis 1. + switch (msg.plane) { + case 0: { // Angle about X + auto angle_obs = gtsam::Point2(std::atan(msg.angle), 0); + auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector2(kSigmaAngle, 1e6)); + graph_.addExpressionFactor(expression_factor, angle_obs, angle_cov); + break; + } + case 1: { // Angle about Y + auto angle_obs = gtsam::Point2(0, std::atan(msg.angle)); + auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector2(1e6, kSigmaAngle)); + graph_.addExpressionFactor(expression_factor, angle_obs, angle_cov); + break; + } + } } void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) @@ -260,10 +344,18 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) if (!id_to_tracker_info_.count(tracker_id)) { return; } - const Timestamp stamp_curr = gtsam_from_ros(msg.header.stamp); auto & tracker_info = id_to_tracker_info_[tracker_id]; auto & body_info = id_to_body_info_[tracker_info.body_id]; + // There is no point in adding IMU information about a static object, because all we'd + // be doing is spending a lot of compute on tracking the IMU random walks. Perhaps at + // some point in the future we might want to work out the gravitation vector and use + // it to somehow adjust the pose, but that seems a little far-fetched. + if (body_info.is_static) { + return; + } + const Timestamp stamp_curr = gtsam_from_ros(msg.header.stamp); + // Extract the linear acceleration and correct for constant bias / scale errors. gtsam::Vector i_accel(3); i_accel << @@ -421,9 +513,8 @@ void PoserComponent::add_tracker(const libsurvive_ros2::msg::Tracker & msg) // Sensor locations and normals. for (size_t k = 0; k < msg.channels.size(); k++) { - uint8_t channel = msg.channels[k]; - tracker_info.t_sensors[channel] = std::make_pair( - gtsam_from_ros(msg.points[k]), gtsam_from_ros(msg.normals[k])); + const uint8_t channel = msg.channels[k]; + tracker_info.t_sensors[channel] = gtsam_from_ros(msg.points[k]); } // The IMU factor needs to know the pose of the IMU sensor in the body frame in @@ -446,15 +537,30 @@ void PoserComponent::add_tracker(const libsurvive_ros2::msg::Tracker & msg) void PoserComponent::add_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg) { + const Channel lighthouse_channel = msg.channel; const std::string & lighthouse_id = msg.header.frame_id; - if (id_to_lighthouse_info_.count(lighthouse_id)) { + if (channel_to_lighthouse_info_.count(lighthouse_channel)) { + const std::string & orig_lighthouse_id = channel_to_lighthouse_info_[lighthouse_channel].id; + if (orig_lighthouse_id != lighthouse_id) { + RCLCPP_ERROR_STREAM(this->get_logger(), + "New lighthouse " << lighthouse_id << " on channel " << lighthouse_channel + << " previously received from " << orig_lighthouse_id); + } return; } - RCLCPP_INFO_STREAM(this->get_logger(), "Adding lighthouse " << lighthouse_id); + RCLCPP_INFO_STREAM(this->get_logger(), + "Adding lighthouse " << lighthouse_id << " on channel " << lighthouse_channel); + auto & lighthouse_info = channel_to_lighthouse_info_[lighthouse_channel]; - auto & lighthouse_info = id_to_lighthouse_info_[lighthouse_id]; + // Se the ID immediately. + lighthouse_info.id = lighthouse_id; - // This is out initial prior on bae station location given no observations. + // The camera projection matrix for this lighthouse is super easy. We want the focal length in + // X and Y to be 1. This is so that a point [x,y,z] when projected on the image plane lies along + // unit 1 in z. So we can go atan(angle_about_y) == u, or atan(angle_about_x) == v. + lighthouse_info.K.reset(new gtsam::Cal3_S2(1.0, 1.0, 0.0, 0.0, 0.0)); + + // This is out initial prior on the lighthouse location given no observations. auto obs_gTl = gtsam::Pose3( gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), gtsam::Point3(0.0, 0.0, 0.0)); @@ -471,47 +577,47 @@ void PoserComponent::add_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg graph_.add(gtsam::PriorFactor(lighthouse_info.gTl, obs_gTl, cov_gTl)); } -void PoserComponent::add_tracker_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg) -{ - const std::string & tracker_id = msg.header.frame_id; - if (!id_to_tracker_info_.count(tracker_id)) { - return; - } - const Timestamp stamp = gtsam_from_ros(msg.header.stamp); - auto & tracker_info = this->id_to_tracker_info_[tracker_id]; - auto & body_info = this->id_to_body_info_[tracker_info.body_id]; - - // We can't add an observation for a state that does not exist. - auto gTb = find_key_for_closest_stamp(body_info.gTb, stamp); - if (!gTb) { - return; - } - - // Add a prior factor on the pose state - auto gTt_obs = gtsam_from_ros(msg.pose.pose); - auto hTb = body_info.bTh[tracker_id].inverse(); - auto tTb = tracker_info.tTh * hTb; - auto gTb_obs = gTt_obs * tTb; - auto gTt_cov = gtsam_from_ros(msg.pose.covariance); - auto gTb_cov = gTt_cov; - graph_.add(gtsam::PriorFactor(*gTb, gTb_obs, gTb_cov)); -} - -void PoserComponent::add_lighthouse_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg) -{ - const std::string & lighthouse_id = msg.header.frame_id; - if (!id_to_lighthouse_info_.count(lighthouse_id)) { - return; - } - auto & lighthouse_info = this->id_to_lighthouse_info_[lighthouse_id]; - - // Collect observation - auto obs_gTl = gtsam_from_ros(msg.pose.pose); - auto cov_gTl = gtsam_from_ros(msg.pose.covariance); - - // Add the observation - graph_.add(gtsam::PriorFactor(lighthouse_info.gTl, obs_gTl, cov_gTl)); -} +// void PoserComponent::add_tracker_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg) +// { +// const std::string & tracker_id = msg.header.frame_id; +// if (!id_to_tracker_info_.count(tracker_id)) { +// return; +// } +// const Timestamp stamp = gtsam_from_ros(msg.header.stamp); +// auto & tracker_info = this->id_to_tracker_info_[tracker_id]; +// auto & body_info = this->id_to_body_info_[tracker_info.body_id]; + +// // We can't add an observation for a state that does not exist. +// auto gTb = find_key_for_closest_stamp(body_info.gTb, stamp); +// if (!gTb) { +// return; +// } + +// // Add a prior factor on the pose state +// auto gTt_obs = gtsam_from_ros(msg.pose.pose); +// auto hTb = body_info.bTh[tracker_id].inverse(); +// auto tTb = tracker_info.tTh * hTb; +// auto gTb_obs = gTt_obs * tTb; +// auto gTt_cov = gtsam_from_ros(msg.pose.covariance); +// auto gTb_cov = gTt_cov; +// graph_.add(gtsam::PriorFactor(*gTb, gTb_obs, gTb_cov)); +// } + +// void PoserComponent::add_lighthouse_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg) +// { +// const std::string & lighthouse_id = msg.header.frame_id; +// if (!id_to_lighthouse_info_.count(lighthouse_id)) { +// return; +// } +// auto & lighthouse_info = this->id_to_lighthouse_info_[lighthouse_id]; + +// // Collect observation +// auto obs_gTl = gtsam_from_ros(msg.pose.pose); +// auto cov_gTl = gtsam_from_ros(msg.pose.covariance); + +// // Add the observation +// graph_.add(gtsam::PriorFactor(lighthouse_info.gTl, obs_gTl, cov_gTl)); +// } void PoserComponent::solution_callback() { @@ -547,38 +653,41 @@ void PoserComponent::solution_callback() msg.pose.pose = ros_from_gtsam(gTb); msg.pose.covariance = ros_from_gtsam(cov); body_pose_publisher_->publish(msg); - // Package up a transform for the body + // Package up a transform for the body, which may be static. geometry_msgs::msg::TransformStamped transform_msg; transform_msg.header.stamp = msg.header.stamp; transform_msg.header.frame_id = tracking_frame_; transform_msg.child_frame_id = body_id; transform_msg.transform = ros_transform_from_gtsam(gTb); - tf_broadcaster_->sendTransform(transform_msg); + if (body_info.is_static) { + tf_static_broadcaster_->sendTransform(transform_msg); + } else { + tf_broadcaster_->sendTransform(transform_msg); + } } } // Print lighthouses - for (const auto & [lighthouse_id, lighthouse_info] : id_to_lighthouse_info_) { + for (const auto & [channel, lighthouse_info] : channel_to_lighthouse_info_) { // Extract the solution gtsam::Pose3 gTl = isam2_.calculateEstimate(lighthouse_info.gTl); gtsam::Matrix cov = isam2_.marginalCovariance(lighthouse_info.gTl); // Create a message containing the transform geometry_msgs::msg::PoseWithCovarianceStamped msg; msg.header.stamp = now; - msg.header.frame_id = lighthouse_id; + msg.header.frame_id = lighthouse_info.id; msg.pose.pose = ros_from_gtsam(gTl); msg.pose.covariance = ros_from_gtsam(cov); - body_pose_publisher_->publish(msg); + lighthouse_pose_publisher_->publish(msg); // Package up a transform geometry_msgs::msg::TransformStamped transform_msg; transform_msg.header.stamp = msg.header.stamp; transform_msg.header.frame_id = tracking_frame_; - transform_msg.child_frame_id = lighthouse_id; + transform_msg.child_frame_id = lighthouse_info.id; transform_msg.transform = ros_transform_from_gtsam(gTl); tf_static_broadcaster_->sendTransform(transform_msg); } - // Flush both the variables and factors, because they have been added graph_.resize(0); initial_values_.clear(); From e746c1c3fdfb7b220acc471513671bb1329b05bb Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Mon, 7 Aug 2023 11:26:51 -0700 Subject: [PATCH 09/18] Fix a bug --- src/poser_component.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/poser_component.cpp b/src/poser_component.cpp index bb33910..8544dc3 100644 --- a/src/poser_component.cpp +++ b/src/poser_component.cpp @@ -323,13 +323,13 @@ void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) // The factor that gets added depends if this is an angle about axis 0 or axis 1. switch (msg.plane) { case 0: { // Angle about X - auto angle_obs = gtsam::Point2(std::atan(msg.angle), 0); + auto angle_obs = gtsam::Point2(std::tan(msg.angle), 0); auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector2(kSigmaAngle, 1e6)); graph_.addExpressionFactor(expression_factor, angle_obs, angle_cov); break; } case 1: { // Angle about Y - auto angle_obs = gtsam::Point2(0, std::atan(msg.angle)); + auto angle_obs = gtsam::Point2(0, std::tan(msg.angle)); auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector2(1e6, kSigmaAngle)); graph_.addExpressionFactor(expression_factor, angle_obs, angle_cov); break; From b0f5cb866d4f6162850020add0ff179ab38717f1 Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Tue, 8 Aug 2023 15:06:12 -0700 Subject: [PATCH 10/18] Add a custom factor to do the heavy lifting for us --- CMakeLists.txt | 21 - Dockerfile | 2 + include/libsurvive_ros2/poser_component.hpp | 10 +- include/libsurvive_ros2/poser_factors.hpp | 1354 +++++++++++++++++++ scripts/bag_parser | 57 +- src/driver_component.cpp | 7 +- src/poser_component.cpp | 181 ++- test/output_without_registration.json | 4 +- 8 files changed, 1494 insertions(+), 142 deletions(-) create mode 100644 include/libsurvive_ros2/poser_factors.hpp diff --git a/CMakeLists.txt b/CMakeLists.txt index 06f934d..86e951f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -58,18 +58,6 @@ externalproject_add(libsurvive -DCMAKE_BUILD_TYPE=Release ) -# Obtain a specific version of the GTSAM estimation software. -# include(ExternalProject) -# externalproject_add(gtsam -# GIT_REPOSITORY https://github.com/borglab/gtsam.git -# GIT_TAG 621ef2e7c560e6fb06b463769d9cf7bc6f626308 -# CMAKE_ARGS -# -DCMAKE_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR} -# -DCMAKE_INSTALL_PREFIX=${CMAKE_INSTALL_PREFIX} -# -DCMAKE_BUILD_TYPE=Release -# -DGTSAM_BUILD_PYTHON=1 -# ) - # Third party libraries find_package(Boost REQUIRED) find_package(Eigen3 REQUIRED) @@ -183,15 +171,6 @@ ament_target_dependencies(libsurvive_ros2_poser_node rclcpp) # INSTALLATION -# Install python packages - -# externalproject_get_property(gtsam binary_dir) -# install( -# DIRECTORY -# ${binary_dir}/python/gtsam -# DESTINATION -# ${PYTHON_INSTALL_DIR}) - # Install the components install( diff --git a/Dockerfile b/Dockerfile index ead7ed8..151b8c0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -43,6 +43,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends libusb-1.0-0-dev \ libx11-dev \ libyaml-cpp-dev \ + python3-pip \ python3-scipy \ ros-${ROS_DISTRO}-foxglove-bridge \ ros-${ROS_DISTRO}-gtsam \ @@ -52,6 +53,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends valgrind \ xterm \ zlib1g-dev \ + && pip3 install gtsam \ && sudo rm -rf /var/lib/apt/lists/* # Add an 'ubuntu' user with dialout/plugdev access and can use sudo passwordless. diff --git a/include/libsurvive_ros2/poser_component.hpp b/include/libsurvive_ros2/poser_component.hpp index 93485e4..be4bae8 100644 --- a/include/libsurvive_ros2/poser_component.hpp +++ b/include/libsurvive_ros2/poser_component.hpp @@ -61,7 +61,6 @@ // Project includes #include "geometry_msgs/msg/pose_with_covariance_stamped.hpp" -#include "gtsam/geometry/Rot2.h" #include "gtsam/inference/Symbol.h" #include "gtsam/navigation/ImuBias.h" #include "gtsam/navigation/ImuFactor.h" @@ -80,6 +79,8 @@ #include "tf2_ros/transform_broadcaster.h" #include "tf2_ros/static_transform_broadcaster.h" +#include "poser_factors.hpp" + namespace libsurvive_ros2 { @@ -120,7 +121,8 @@ struct TrackerInfo gtsam::Pose3 tTh; // Sensor points in the tracking frame. - std::unordered_map t_sensors; + std::unordered_map t_points; + std::unordered_map t_normals; // Constant scale and bias factors for the IMU, in the imu frame. gtsam::Vector3 accel_scale; @@ -144,8 +146,8 @@ struct LighthouseInfo // Static pose of this lighthouse gtsam::Key gTl; - // Calibration information for this lighthouse. - std::shared_ptr K; + // Calibration parameters for the two axes + gtsam::BaseStationCal bcal[2]; }; // Helper to find the closest key lower that the supplied value. This is used to map diff --git a/include/libsurvive_ros2/poser_factors.hpp b/include/libsurvive_ros2/poser_factors.hpp new file mode 100644 index 0000000..0e1e86c --- /dev/null +++ b/include/libsurvive_ros2/poser_factors.hpp @@ -0,0 +1,1354 @@ +// Copyright 2023 Andrew Symington +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +// DESCRIPTION +// +// Custom factor for integrating Gen 2 light measurements into a GTSAM model. +// A lot of the code here is selectively copied and simplified from the +// libsurvive autogenerated code, which is not installed by default. + +#ifndef LIBSURVIVE_ROS2__POSER_FACTORS_HPP_ +#define LIBSURVIVE_ROS2__POSER_FACTORS_HPP_ + +// For trig functions +#include + +// Project includes +#include "gtsam/nonlinear/NonlinearFactor.h" +#include "gtsam/geometry/Point3.h" +#include "gtsam/geometry/Pose3.h" +#include "gtsam/inference/Symbol.h" + +// The stuff below is extracted from libsurvive until we can implement the equations +// in closed form as pure combinations of other factors. + +namespace gtsam { + +typedef double LinmathPoint3d[3]; +typedef double LinmathAxisAngle[3]; + +struct LinmathAxisAnglePose { + LinmathPoint3d Pos; + LinmathAxisAngle AxisAngleRot; +}; + +struct BaseStationCal { + double phase; + double tilt; + double curve; + double gibpha; + double gibmag; + double ogeephase; + double ogeemag; +}; + +// Predict a Gen2 X axis angle from object, lighthouse, sensor and calibration params. +static inline double gen_reproject_axis_x_gen2_axis_angle( + const LinmathAxisAnglePose* obj_p, + const LinmathPoint3d sensor_pt, + const LinmathAxisAnglePose* lh_p, + const BaseStationCal* bsc0) +{ + const double obj_px = (*obj_p).Pos[0]; + const double obj_py = (*obj_p).Pos[1]; + const double obj_pz = (*obj_p).Pos[2]; + const double obj_qi = (*obj_p).AxisAngleRot[0]; + const double obj_qj = (*obj_p).AxisAngleRot[1]; + const double obj_qk = (*obj_p).AxisAngleRot[2]; + const double sensor_x = sensor_pt[0]; + const double sensor_y = sensor_pt[1]; + const double sensor_z = sensor_pt[2]; + const double lh_px = (*lh_p).Pos[0]; + const double lh_py = (*lh_p).Pos[1]; + const double lh_pz = (*lh_p).Pos[2]; + const double lh_qi = (*lh_p).AxisAngleRot[0]; + const double lh_qj = (*lh_p).AxisAngleRot[1]; + const double lh_qk = (*lh_p).AxisAngleRot[2]; + const double phase_0 = (*bsc0).phase; + const double tilt_0 = (*bsc0).tilt; + const double curve_0 = (*bsc0).curve; + const double gibPhase_0 = (*bsc0).gibpha; + const double gibMag_0 = (*bsc0).gibmag; + const double ogeeMag_0 = (*bsc0).ogeephase; + const double ogeePhase_0 = (*bsc0).ogeemag; + const double x0 = obj_qk * obj_qk; + const double x1 = obj_qi * obj_qi; + const double x2 = obj_qj * obj_qj; + const double x3 = 1e-10 + x2 + x0 + x1; + const double x4 = sqrt(x3); + const double x5 = cos(x4); + const double x6 = (1. / x3) * (1 + (-1 * x5)); + const double x7 = (1. / x4) * sin(x4); + const double x8 = x7 * obj_qj; + const double x9 = x6 * obj_qk; + const double x10 = x9 * obj_qi; + const double x11 = x7 * obj_qi; + const double x12 = x9 * obj_qj; + const double x13 = ((x12 + x11) * sensor_y) + obj_pz + ((x10 + (-1 * x8)) * sensor_x) + ((x5 + (x0 * x6)) * sensor_z); + const double x14 = lh_qk * lh_qk; + const double x15 = lh_qi * lh_qi; + const double x16 = lh_qj * lh_qj; + const double x17 = 1e-10 + x14 + x16 + x15; + const double x18 = sqrt(x17); + const double x19 = cos(x18); + const double x20 = (1. / x17) * (1 + (-1 * x19)); + const double x21 = x7 * obj_qk; + const double x22 = x6 * obj_qj * obj_qi; + const double x23 = ((x22 + (-1 * x21)) * sensor_y) + ((x10 + x8) * sensor_z) + ((x5 + (x1 * x6)) * sensor_x) + obj_px; + const double x24 = (1. / x18) * sin(x18); + const double x25 = x24 * lh_qj; + const double x26 = x20 * lh_qk * lh_qi; + const double x27 = ((x5 + (x2 * x6)) * sensor_y) + ((x22 + x21) * sensor_x) + obj_py + ((x12 + (-1 * x11)) * sensor_z); + const double x28 = x24 * lh_qi; + const double x29 = x20 * lh_qj; + const double x30 = x29 * lh_qk; + const double x31 = ((x30 + x28) * x27) + ((x26 + (-1 * x25)) * x23) + lh_pz + (x13 * (x19 + (x20 * x14))); + const double x32 = x24 * lh_qk; + const double x33 = x29 * lh_qi; + const double x34 = ((x33 + (-1 * x32)) * x27) + (x23 * (x19 + (x20 * x15))) + lh_px + ((x26 + x25) * x13); + const double x35 = atan2(-1 * x31, x34); + const double x36 = (x27 * (x19 + (x20 * x16))) + ((x33 + x32) * x23) + lh_py + ((x30 + (-1 * x28)) * x13); + const double x37 = 0.523598775598299 + tilt_0; + const double x38 = cos(x37); + const double x39 = (x34 * x34) + (x31 * x31); + const double x40 = asin(x36 * (1. / x38) * (1. / sqrt(x39 + (x36 * x36)))); + const double x41 = 0.0028679863 + (x40 * (-8.0108022e-06 + (-8.0108022e-06 * x40))); + const double x42 = 5.3685255e-06 + (x40 * x41); + const double x43 = 0.0076069798 + (x40 * x42); + const double x44 = x36 * (1. / sqrt(x39)) * tan(x37); + const double x45 = (sin((-1 * asin(x44)) + x35 + ogeeMag_0) * ogeePhase_0) + curve_0; + const double x46 = asin(x44 + ((x40 * x40) * x43 * x45 * (1. / (x38 + (-1 * x45 * sin(x37) * ((x40 * (x43 + (x40 * (x42 + (x40 * (x41 + (x40 * (-8.0108022e-06 + (-1.60216044e-05 * x40))))))))) + (x40 * x43))))))); + return -1.5707963267949 + (-1 * x46) + x35 + (-1 * phase_0) + (-1 * sin(x46 + (-1 * gibPhase_0) + (-1 * x35)) * gibMag_0); +} + +// Predict a Gen2 Y axis angle from object, lighthouse, sensor and calibration params. +static inline double gen_reproject_axis_y_gen2_axis_angle( + const LinmathAxisAnglePose* obj_p, + const LinmathPoint3d sensor_pt, + const LinmathAxisAnglePose* lh_p, + const BaseStationCal* bsc1) +{ + const double obj_px = (*obj_p).Pos[0]; + const double obj_py = (*obj_p).Pos[1]; + const double obj_pz = (*obj_p).Pos[2]; + const double obj_qi = (*obj_p).AxisAngleRot[0]; + const double obj_qj = (*obj_p).AxisAngleRot[1]; + const double obj_qk = (*obj_p).AxisAngleRot[2]; + const double sensor_x = sensor_pt[0]; + const double sensor_y = sensor_pt[1]; + const double sensor_z = sensor_pt[2]; + const double lh_px = (*lh_p).Pos[0]; + const double lh_py = (*lh_p).Pos[1]; + const double lh_pz = (*lh_p).Pos[2]; + const double lh_qi = (*lh_p).AxisAngleRot[0]; + const double lh_qj = (*lh_p).AxisAngleRot[1]; + const double lh_qk = (*lh_p).AxisAngleRot[2]; + const double phase_1 = (*bsc1).phase; + const double tilt_1 = (*bsc1).tilt; + const double curve_1 = (*bsc1).curve; + const double gibPhase_1 = (*bsc1).gibpha; + const double gibMag_1 = (*bsc1).gibmag; + const double ogeeMag_1 = (*bsc1).ogeephase; + const double ogeePhase_1 = (*bsc1).ogeemag; + const double x0 = obj_qk * obj_qk; + const double x1 = obj_qi * obj_qi; + const double x2 = obj_qj * obj_qj; + const double x3 = 1e-10 + x2 + x0 + x1; + const double x4 = sqrt(x3); + const double x5 = cos(x4); + const double x6 = (1. / x3) * (1 + (-1 * x5)); + const double x7 = (1. / x4) * sin(x4); + const double x8 = x7 * obj_qj; + const double x9 = x6 * obj_qk; + const double x10 = x9 * obj_qi; + const double x11 = x7 * obj_qi; + const double x12 = x9 * obj_qj; + const double x13 = ((x12 + x11) * sensor_y) + obj_pz + ((x10 + (-1 * x8)) * sensor_x) + ((x5 + (x0 * x6)) * sensor_z); + const double x14 = lh_qk * lh_qk; + const double x15 = lh_qi * lh_qi; + const double x16 = lh_qj * lh_qj; + const double x17 = 1e-10 + x14 + x16 + x15; + const double x18 = sqrt(x17); + const double x19 = cos(x18); + const double x20 = (1. / x17) * (1 + (-1 * x19)); + const double x21 = x7 * obj_qk; + const double x22 = x6 * obj_qj * obj_qi; + const double x23 = ((x22 + (-1 * x21)) * sensor_y) + ((x10 + x8) * sensor_z) + ((x5 + (x1 * x6)) * sensor_x) + obj_px; + const double x24 = (1. / x18) * sin(x18); + const double x25 = x24 * lh_qj; + const double x26 = x20 * lh_qk * lh_qi; + const double x27 = ((x5 + (x2 * x6)) * sensor_y) + ((x22 + x21) * sensor_x) + obj_py + ((x12 + (-1 * x11)) * sensor_z); + const double x28 = x24 * lh_qi; + const double x29 = x20 * lh_qj; + const double x30 = x29 * lh_qk; + const double x31 = ((x30 + x28) * x27) + ((x26 + (-1 * x25)) * x23) + lh_pz + (x13 * (x19 + (x20 * x14))); + const double x32 = x24 * lh_qk; + const double x33 = x29 * lh_qi; + const double x34 = ((x33 + (-1 * x32)) * x27) + (x23 * (x19 + (x20 * x15))) + lh_px + ((x26 + x25) * x13); + const double x35 = atan2(-1 * x31, x34); + const double x36 = (x27 * (x19 + (x20 * x16))) + ((x33 + x32) * x23) + lh_py + ((x30 + (-1 * x28)) * x13); + const double x37 = 0.523598775598299 + (-1 * tilt_1); + const double x38 = (x34 * x34) + (x31 * x31); + const double x39 = -1 * x36 * (1. / sqrt(x38)) * tan(x37); + const double x40 = (sin((-1 * asin(x39)) + ogeeMag_1 + x35) * ogeePhase_1) + curve_1; + const double x41 = cos(x37); + const double x42 = asin((1. / x41) * x36 * (1. / sqrt(x38 + (x36 * x36)))); + const double x43 = 0.0028679863 + (x42 * (-8.0108022e-06 + (-8.0108022e-06 * x42))); + const double x44 = 5.3685255e-06 + (x42 * x43); + const double x45 = 0.0076069798 + (x42 * x44); + const double x46 = asin(x39 + (x40 * (x42 * x42) * x45 * (1. / (x41 + (x40 * sin(x37) * ((x42 * (x45 + (x42 * (x44 + (x42 * (x43 + (x42 * (-8.0108022e-06 + (-1.60216044e-05 * x42))))))))) + (x42 * x45))))))); + return -1.5707963267949 + (-1 * x46) + (-1 * phase_1) + (-1 * sin((-1 * x35) + x46 + (-1 * gibPhase_1)) * gibMag_1) + x35; +} + +// Jacobian of reproject_axis_x_gen2 wrt [lh_px, lh_py, lh_pz, lh_qi, lh_qj, lh_qk] +static inline void gen_reproject_axis_x_gen2_jac_lh_p_axis_angle( + double out[6], + const LinmathAxisAnglePose* obj_p, + const LinmathPoint3d sensor_pt, + const LinmathAxisAnglePose* lh_p, + const BaseStationCal* bsc0) +{ + const double obj_px = (*obj_p).Pos[0]; + const double obj_py = (*obj_p).Pos[1]; + const double obj_pz = (*obj_p).Pos[2]; + const double obj_qi = (*obj_p).AxisAngleRot[0]; + const double obj_qj = (*obj_p).AxisAngleRot[1]; + const double obj_qk = (*obj_p).AxisAngleRot[2]; + const double sensor_x = sensor_pt[0]; + const double sensor_y = sensor_pt[1]; + const double sensor_z = sensor_pt[2]; + const double lh_px = (*lh_p).Pos[0]; + const double lh_py = (*lh_p).Pos[1]; + const double lh_pz = (*lh_p).Pos[2]; + const double lh_qi = (*lh_p).AxisAngleRot[0]; + const double lh_qj = (*lh_p).AxisAngleRot[1]; + const double lh_qk = (*lh_p).AxisAngleRot[2]; + // const double phase_0 = (*bsc0).phase; + const double tilt_0 = (*bsc0).tilt; + const double curve_0 = (*bsc0).curve; + const double gibPhase_0 = (*bsc0).gibpha; + const double gibMag_0 = (*bsc0).gibmag; + const double ogeeMag_0 = (*bsc0).ogeephase; + const double ogeePhase_0 = (*bsc0).ogeemag; + const double x0 = lh_qk * lh_qk; + const double x1 = lh_qi * lh_qi; + const double x2 = lh_qj * lh_qj; + const double x3 = 1e-10 + x2 + x0 + x1; + const double x4 = sqrt(x3); + const double x5 = sin(x4); + const double x6 = (1. / x4) * x5; + const double x7 = x6 * lh_qj; + const double x8 = 1. / x3; + const double x9 = cos(x4); + const double x10 = 1 + (-1 * x9); + const double x11 = x8 * x10; + const double x12 = x11 * lh_qi; + const double x13 = x12 * lh_qk; + const double x14 = obj_qk * obj_qk; + const double x15 = obj_qi * obj_qi; + const double x16 = obj_qj * obj_qj; + const double x17 = 1e-10 + x14 + x16 + x15; + const double x18 = sqrt(x17); + const double x19 = cos(x18); + const double x20 = (1. / x17) * (1 + (-1 * x19)); + const double x21 = (1. / x18) * sin(x18); + const double x22 = x21 * obj_qj; + const double x23 = x20 * obj_qk; + const double x24 = x23 * obj_qi; + const double x25 = x21 * obj_qi; + const double x26 = x23 * obj_qj; + const double x27 = ((x26 + x25) * sensor_y) + ((x24 + (-1 * x22)) * sensor_x) + obj_pz + ((x19 + (x20 * x14)) * sensor_z); + const double x28 = x21 * obj_qk; + const double x29 = x20 * obj_qj * obj_qi; + const double x30 = ((x29 + (-1 * x28)) * sensor_y) + ((x24 + x22) * sensor_z) + ((x19 + (x20 * x15)) * sensor_x) + obj_px; + const double x31 = ((x29 + x28) * sensor_x) + ((x19 + (x20 * x16)) * sensor_y) + obj_py + ((x26 + (-1 * x25)) * sensor_z); + const double x32 = x6 * lh_qk; + const double x33 = -1 * x32; + const double x34 = x11 * lh_qj; + const double x35 = x34 * lh_qi; + const double x36 = ((x35 + x33) * x31) + lh_px + (x30 * (x9 + (x1 * x11))) + (x27 * (x13 + x7)); + const double x37 = x6 * lh_qi; + const double x38 = -1 * x37; + const double x39 = x34 * lh_qk; + const double x40 = (x31 * (x9 + (x2 * x11))) + lh_py + ((x35 + x32) * x30) + ((x39 + x38) * x27); + const double x41 = x40 * x36; + const double x42 = 0.523598775598299 + tilt_0; + const double x43 = tan(x42); + const double x44 = -1 * x7; + const double x45 = ((x39 + x37) * x31) + ((x13 + x44) * x30) + lh_pz + (x27 * (x9 + (x0 * x11))); + const double x46 = x36 * x36; + const double x47 = x46 + (x45 * x45); + const double x48 = x43 * (1. / (x47 * sqrt(x47))); + const double x49 = x41 * x48; + const double x50 = cos(x42); + const double x51 = 1. / x50; + const double x52 = x40 * x40; + const double x53 = x47 + x52; + const double x54 = (1. / sqrt(x53)) * x51; + const double x55 = asin(x54 * x40); + const double x56 = -8.0108022e-06 + (-1.60216044e-05 * x55); + const double x57 = 8.0108022e-06 * x55; + const double x58 = -8.0108022e-06 + (-1 * x57); + const double x59 = 0.0028679863 + (x58 * x55); + const double x60 = x59 + (x56 * x55); + const double x61 = 5.3685255e-06 + (x55 * x59); + const double x62 = x61 + (x60 * x55); + const double x63 = 0.0076069798 + (x61 * x55); + const double x64 = x63 + (x62 * x55); + const double x65 = 1. / sqrt(1 + (-1 * (1. / (x50 * x50)) * (1. / x53) * x52)); + const double x66 = (1. / (x53 * sqrt(x53))) * x51; + const double x67 = x66 * x41; + const double x68 = x67 * x65; + const double x69 = -1 * x68 * x58; + const double x70 = 2.40324066e-05 * x55; + const double x71 = x65 * x59; + const double x72 = (x55 * (x69 + (x68 * x57))) + (-1 * x71 * x67); + const double x73 = (x72 * x55) + (-1 * x61 * x68); + const double x74 = atan2(-1 * x45, x36); + const double x75 = x43 * (1. / sqrt(x47)); + const double x76 = x75 * x40; + const double x77 = (-1 * asin(x76)) + x74 + ogeeMag_0; + const double x78 = (sin(x77) * ogeePhase_0) + curve_0; + const double x79 = sin(x42); + const double x80 = x79 * x78; + const double x81 = 1. / x47; + const double x82 = 1. / sqrt(1 + (-1 * x81 * x52 * (x43 * x43))); + const double x83 = x81 * x45; + const double x84 = x83 + (x82 * x49); + const double x85 = cos(x77) * ogeePhase_0; + const double x86 = x63 * x55; + const double x87 = (x64 * x55) + x86; + const double x88 = x87 * x79; + const double x89 = x88 * x85; + const double x90 = x50 + (-1 * x80 * x87); + const double x91 = x55 * x55; + const double x92 = x78 * x91; + const double x93 = x63 * x92 * (1. / (x90 * x90)); + const double x94 = 2 * x36; + const double x95 = x66 * x40; + const double x96 = 1. / x90; + const double x97 = x86 * x78 * x96; + const double x98 = x65 * x97 * x95; + const double x99 = x92 * x96; + const double x100 = x63 * x91 * x96; + const double x101 = x85 * x100; + const double x102 = x76 + (x63 * x99); + const double x103 = 1. / sqrt(1 + (-1 * (x102 * x102))); + const double x104 = x103 * ((x84 * x101) + (x73 * x99) + (-1 * x98 * x94) + (-1 * x49) + (-1 * x93 * ((-1 * x89 * x84) + (-1 * x80 * ((x73 * x55) + (-1 * x63 * x68) + (-1 * x64 * x68) + (x55 * (x73 + (x55 * (x72 + (-1 * x60 * x68) + (x55 * ((x70 * x68) + x69 + (-1 * x68 * x56))))) + (-1 * x62 * x68)))))))); + const double x105 = cos((-1 * asin(x102)) + gibPhase_0 + x74) * gibMag_0; + const double x106 = x65 * (x54 + (-1 * x66 * x52)); + const double x107 = x58 * x106; + const double x108 = (x55 * (x107 + (-1 * x57 * x106))) + (x59 * x106); + const double x109 = (x55 * x108) + (x61 * x106); + const double x110 = x82 * x75; + const double x111 = 2 * x97; + const double x112 = x103 * ((x106 * x111) + (-1 * x101 * x110) + (x99 * x109) + x75 + (-1 * x93 * ((x89 * x110) + (-1 * x80 * ((x63 * x106) + (x55 * x109) + (x64 * x106) + (x55 * (x109 + (x55 * (x108 + (x60 * x106) + (x55 * (x107 + (-1 * x70 * x106) + (x56 * x106))))) + (x62 * x106)))))))); + const double x113 = x40 * x48; + const double x114 = x45 * x113; + const double x115 = x95 * x45; + const double x116 = x65 * x115; + const double x117 = -1 * x58 * x116; + const double x118 = (x55 * (x117 + (x57 * x116))) + (-1 * x71 * x115); + const double x119 = (x55 * x118) + (-1 * x61 * x116); + const double x120 = x81 * x36; + const double x121 = -1 * x120; + const double x122 = x121 + (x82 * x114); + const double x123 = 2 * x45; + const double x124 = x103 * ((-1 * x98 * x123) + (x101 * x122) + (-1 * x114) + (x99 * x119) + (-1 * x93 * ((-1 * x89 * x122) + (-1 * x80 * ((-1 * x63 * x116) + (x55 * (x119 + (x55 * (x118 + (-1 * x60 * x116) + (x55 * ((x70 * x116) + x117 + (-1 * x56 * x116))))) + (-1 * x62 * x116))) + (-1 * x64 * x116) + (x55 * x119)))))); + const double x125 = 2 * (1. / (x3 * x3)) * x10; + const double x126 = x125 * lh_qi; + const double x127 = (1. / (x3 * sqrt(x3))) * x5; + const double x128 = x2 * x127; + const double x129 = (x128 * lh_qi) + (-1 * x2 * x126); + const double x130 = x8 * x9; + const double x131 = x1 * x130; + const double x132 = x1 * x127; + const double x133 = lh_qj * lh_qi; + const double x134 = x127 * lh_qk; + const double x135 = x125 * lh_qk; + const double x136 = (-1 * x133 * x135) + (x133 * x134); + const double x137 = x136 + (-1 * x6); + const double x138 = x125 * lh_qj; + const double x139 = (x132 * lh_qj) + (-1 * x1 * x138); + const double x140 = x139 + x34; + const double x141 = x130 * lh_qk; + const double x142 = x141 * lh_qi; + const double x143 = x134 * lh_qi; + const double x144 = (-1 * x143) + x142; + const double x145 = ((x144 + x140) * x30) + (x31 * (x129 + x38)) + (x27 * (x137 + (-1 * x131) + x132)); + const double x146 = 2 * x40; + const double x147 = x143 + (-1 * x142); + const double x148 = x11 * lh_qk; + const double x149 = (x132 * lh_qk) + (-1 * x1 * x135); + const double x150 = x149 + x148; + const double x151 = x130 * x133; + const double x152 = x127 * x133; + const double x153 = (-1 * x152) + x151; + const double x154 = lh_qi * lh_qi * lh_qi; + const double x155 = (((2 * x12) + (-1 * x125 * x154) + x38 + (x127 * x154)) * x30) + ((x147 + x140) * x31) + ((x153 + x150) * x27); + const double x156 = x136 + x6; + const double x157 = x0 * x127; + const double x158 = (x157 * lh_qi) + (-1 * x0 * x126); + const double x159 = x152 + (-1 * x151); + const double x160 = ((x159 + x150) * x30) + (x31 * (x156 + x131 + (-1 * x132))) + (x27 * (x158 + x38)); + const double x161 = (x123 * x160) + (x94 * x155); + const double x162 = 1.0/2.0 * x95; + const double x163 = x65 * ((x54 * x145) + (-1 * x162 * (x161 + (x146 * x145)))); + const double x164 = x58 * x163; + const double x165 = (x55 * (x164 + (-1 * x57 * x163))) + (x59 * x163); + const double x166 = (x55 * x165) + (x61 * x163); + const double x167 = 1.0/2.0 * x113; + const double x168 = (x75 * x145) + (-1 * x161 * x167); + const double x169 = 1. / x36; + const double x170 = x45 * (1. / x46); + const double x171 = x81 * x46; + const double x172 = ((x170 * x155) + (-1 * x160 * x169)) * x171; + const double x173 = x172 + (-1 * x82 * x168); + const double x174 = x103 * ((-1 * x93 * ((-1 * x89 * x173) + (-1 * x80 * ((x55 * x166) + (x55 * (x166 + (x55 * (x165 + (x60 * x163) + (x55 * ((-1 * x70 * x163) + x164 + (x56 * x163))))) + (x62 * x163))) + (x63 * x163) + (x64 * x163))))) + (x101 * x173) + x168 + (x99 * x166) + (x111 * x163)); + const double x175 = lh_qj * lh_qj * lh_qj; + const double x176 = (x128 * lh_qk) + (-1 * x2 * x135); + const double x177 = x176 + x148; + const double x178 = x129 + x12; + const double x179 = x141 * lh_qj; + const double x180 = x134 * lh_qj; + const double x181 = (-1 * x180) + x179; + const double x182 = ((x181 + x178) * x30) + (((-1 * x125 * x175) + (x127 * x175) + x44 + (2 * x34)) * x31) + ((x177 + x159) * x27); + const double x183 = x180 + (-1 * x179); + const double x184 = x2 * x130; + const double x185 = (x30 * (x139 + x44)) + ((x183 + x178) * x31) + (x27 * (x156 + x184 + (-1 * x128))); + const double x186 = (x157 * lh_qj) + (-1 * x0 * x138); + const double x187 = ((x177 + x153) * x31) + (x30 * (x137 + (-1 * x184) + x128)) + (x27 * (x186 + x44)); + const double x188 = (x123 * x187) + (x94 * x185); + const double x189 = x65 * ((x54 * x182) + (-1 * x162 * (x188 + (x182 * x146)))); + const double x190 = x58 * x189; + const double x191 = (x55 * (x190 + (-1 * x57 * x189))) + (x59 * x189); + const double x192 = (x55 * x191) + (x61 * x189); + const double x193 = (x75 * x182) + (-1 * x167 * x188); + const double x194 = ((x170 * x185) + (-1 * x169 * x187)) * x171; + const double x195 = x194 + (-1 * x82 * x193); + const double x196 = x103 * ((x111 * x189) + x193 + (x99 * x192) + (-1 * x93 * ((-1 * x89 * x195) + (-1 * x80 * ((x63 * x189) + (x55 * x192) + (x64 * x189) + (x55 * (x192 + (x55 * ((x60 * x189) + x191 + (x55 * ((-1 * x70 * x189) + (x56 * x189) + x190)))) + (x62 * x189))))))) + (x101 * x195)); + const double x197 = x186 + x34; + const double x198 = x0 * x130; + const double x199 = ((x197 + x147) * x27) + (x30 * (x156 + (-1 * x157) + x198)) + (x31 * (x176 + x33)); + const double x200 = x158 + x12; + const double x201 = (x30 * (x149 + x33)) + (x31 * (x137 + x157 + (-1 * x198))) + ((x200 + x181) * x27); + const double x202 = lh_qk * lh_qk * lh_qk; + const double x203 = ((x200 + x183) * x30) + ((x197 + x144) * x31) + (x27 * (x33 + (2 * x148) + (x202 * x127) + (-1 * x202 * x125))); + const double x204 = (x203 * x123) + (x94 * x201); + const double x205 = x65 * ((x54 * x199) + (-1 * x162 * (x204 + (x199 * x146)))); + const double x206 = (x75 * x199) + (-1 * x204 * x167); + const double x207 = ((x201 * x170) + (-1 * x203 * x169)) * x171; + const double x208 = x85 * (x207 + (-1 * x82 * x206)); + const double x209 = x58 * x205; + const double x210 = (x55 * (x209 + (-1 * x57 * x205))) + (x59 * x205); + const double x211 = (x55 * x210) + (x61 * x205); + const double x212 = x103 * (x206 + (-1 * x93 * ((-1 * x88 * x208) + (-1 * x80 * ((x63 * x205) + (x64 * x205) + (x55 * x211) + (x55 * (x211 + (x55 * (x210 + (x60 * x205) + (x55 * ((-1 * x70 * x205) + x209 + (x56 * x205))))) + (x62 * x205))))))) + (x99 * x211) + (x205 * x111) + (x208 * x100)); + out[0] = (-1 * x105 * ((-1 * x83) + x104)) + x83 + (-1 * x104); + out[1] = (-1 * x105 * x112) + (-1 * x112); + out[2] = x121 + (-1 * x124) + (-1 * (x120 + x124) * x105); + out[3] = x172 + (-1 * x174) + (-1 * ((-1 * x172) + x174) * x105); + out[4] = x194 + (-1 * x196) + (-1 * ((-1 * x194) + x196) * x105); + out[5] = x207 + (-1 * x212) + (-1 * ((-1 * x207) + x212) * x105); +} + +// Jacobian of reproject_axis_y_gen2 wrt [lh_px, lh_py, lh_pz, lh_qi, lh_qj, lh_qk] +static inline void gen_reproject_axis_y_gen2_jac_lh_p_axis_angle( + double out[6], + const LinmathAxisAnglePose* obj_p, + const LinmathPoint3d sensor_pt, + const LinmathAxisAnglePose* lh_p, + const BaseStationCal* bsc1) +{ + const double obj_px = (*obj_p).Pos[0]; + const double obj_py = (*obj_p).Pos[1]; + const double obj_pz = (*obj_p).Pos[2]; + const double obj_qi = (*obj_p).AxisAngleRot[0]; + const double obj_qj = (*obj_p).AxisAngleRot[1]; + const double obj_qk = (*obj_p).AxisAngleRot[2]; + const double sensor_x = sensor_pt[0]; + const double sensor_y = sensor_pt[1]; + const double sensor_z = sensor_pt[2]; + const double lh_px = (*lh_p).Pos[0]; + const double lh_py = (*lh_p).Pos[1]; + const double lh_pz = (*lh_p).Pos[2]; + const double lh_qi = (*lh_p).AxisAngleRot[0]; + const double lh_qj = (*lh_p).AxisAngleRot[1]; + const double lh_qk = (*lh_p).AxisAngleRot[2]; + // const double phase_1 = (*bsc1).phase; + const double tilt_1 = (*bsc1).tilt; + const double curve_1 = (*bsc1).curve; + const double gibPhase_1 = (*bsc1).gibpha; + const double gibMag_1 = (*bsc1).gibmag; + const double ogeeMag_1 = (*bsc1).ogeephase; + const double ogeePhase_1 = (*bsc1).ogeemag; + const double x0 = obj_qk * obj_qk; + const double x1 = obj_qi * obj_qi; + const double x2 = obj_qj * obj_qj; + const double x3 = 1e-10 + x2 + x0 + x1; + const double x4 = sqrt(x3); + const double x5 = cos(x4); + const double x6 = (1. / x3) * (1 + (-1 * x5)); + const double x7 = (1. / x4) * sin(x4); + const double x8 = x7 * obj_qj; + const double x9 = x6 * obj_qk; + const double x10 = x9 * obj_qi; + const double x11 = x7 * obj_qi; + const double x12 = x9 * obj_qj; + const double x13 = ((x12 + x11) * sensor_y) + obj_pz + ((x10 + (-1 * x8)) * sensor_x) + ((x5 + (x0 * x6)) * sensor_z); + const double x14 = lh_qk * lh_qk; + const double x15 = lh_qi * lh_qi; + const double x16 = lh_qj * lh_qj; + const double x17 = 1e-10 + x14 + x16 + x15; + const double x18 = 1. / x17; + const double x19 = sqrt(x17); + const double x20 = cos(x19); + const double x21 = 1 + (-1 * x20); + const double x22 = x21 * x18; + const double x23 = x7 * obj_qk; + const double x24 = x6 * obj_qj * obj_qi; + const double x25 = ((x10 + x8) * sensor_z) + ((x24 + (-1 * x23)) * sensor_y) + ((x5 + (x1 * x6)) * sensor_x) + obj_px; + const double x26 = sin(x19); + const double x27 = x26 * (1. / x19); + const double x28 = x27 * lh_qj; + const double x29 = -1 * x28; + const double x30 = x22 * lh_qk; + const double x31 = x30 * lh_qi; + const double x32 = ((x5 + (x2 * x6)) * sensor_y) + ((x24 + x23) * sensor_x) + obj_py + ((x12 + (-1 * x11)) * sensor_z); + const double x33 = x27 * lh_qi; + const double x34 = x30 * lh_qj; + const double x35 = ((x34 + x33) * x32) + ((x31 + x29) * x25) + lh_pz + (x13 * (x20 + (x22 * x14))); + const double x36 = x27 * lh_qk; + const double x37 = -1 * x36; + const double x38 = x22 * lh_qi; + const double x39 = x38 * lh_qj; + const double x40 = ((x39 + x37) * x32) + (x25 * (x20 + (x22 * x15))) + lh_px + ((x31 + x28) * x13); + const double x41 = x40 * x40; + const double x42 = x41 + (x35 * x35); + const double x43 = 1. / x42; + const double x44 = x43 * x35; + const double x45 = -1 * x33; + const double x46 = (x32 * (x20 + (x22 * x16))) + ((x39 + x36) * x25) + lh_py + ((x34 + x45) * x13); + const double x47 = 0.523598775598299 + (-1 * tilt_1); + const double x48 = cos(x47); + const double x49 = 1. / x48; + const double x50 = x46 * x46; + const double x51 = x42 + x50; + const double x52 = (1. / sqrt(x51)) * x49; + const double x53 = asin(x52 * x46); + const double x54 = 8.0108022e-06 * x53; + const double x55 = -8.0108022e-06 + (-1 * x54); + const double x56 = 0.0028679863 + (x53 * x55); + const double x57 = 5.3685255e-06 + (x53 * x56); + const double x58 = 0.0076069798 + (x53 * x57); + const double x59 = x53 * x53; + const double x60 = atan2(-1 * x35, x40); + const double x61 = tan(x47); + const double x62 = x61 * (1. / sqrt(x42)); + const double x63 = -1 * x62 * x46; + const double x64 = ogeeMag_1 + (-1 * asin(x63)) + x60; + const double x65 = (sin(x64) * ogeePhase_1) + curve_1; + const double x66 = x53 * x58; + const double x67 = -8.0108022e-06 + (-1.60216044e-05 * x53); + const double x68 = x56 + (x67 * x53); + const double x69 = x57 + (x68 * x53); + const double x70 = x58 + (x69 * x53); + const double x71 = (x70 * x53) + x66; + const double x72 = sin(x47); + const double x73 = x72 * x65; + const double x74 = x48 + (x71 * x73); + const double x75 = 1. / x74; + const double x76 = x75 * x65; + const double x77 = x76 * x59; + const double x78 = x63 + (x77 * x58); + const double x79 = 1. / sqrt(1 + (-1 * (x78 * x78))); + const double x80 = x40 * x46; + const double x81 = x61 * (1. / (x42 * sqrt(x42))); + const double x82 = x80 * x81; + const double x83 = 1. / sqrt(1 + (-1 * x50 * (1. / x51) * (1. / (x48 * x48)))); + const double x84 = 2 * x46; + const double x85 = (1. / (x51 * sqrt(x51))) * x49; + const double x86 = x76 * x66; + const double x87 = x83 * x84 * x85 * x86; + const double x88 = 1. / sqrt(1 + (-1 * (x61 * x61) * x50 * x43)); + const double x89 = x44 + (-1 * x82 * x88); + const double x90 = cos(x64) * ogeePhase_1; + const double x91 = x71 * x72; + const double x92 = x91 * x90; + const double x93 = x83 * x70; + const double x94 = x80 * x85; + const double x95 = x83 * x94; + const double x96 = x83 * x55; + const double x97 = -1 * x96 * x94; + const double x98 = 2.40324066e-05 * x53; + const double x99 = (-1 * x56 * x95) + (x53 * (x97 + (x54 * x95))); + const double x100 = (-1 * x57 * x95) + (x53 * x99); + const double x101 = x83 * x58; + const double x102 = x58 * x59; + const double x103 = (1. / (x74 * x74)) * x65 * x102; + const double x104 = x75 * x102; + const double x105 = x90 * x104; + const double x106 = x79 * ((x77 * x100) + (x89 * x105) + x82 + (-1 * x103 * ((x73 * ((x53 * x100) + (-1 * x94 * x101) + (-1 * x93 * x94) + (x53 * (x100 + (x53 * (x99 + (-1 * x68 * x95) + (x53 * (x97 + (x98 * x95) + (-1 * x67 * x95))))) + (-1 * x69 * x95))))) + (x89 * x92))) + (-1 * x87 * x40)); + const double x107 = cos((-1 * asin(x78)) + x60 + gibPhase_1) * gibMag_1; + const double x108 = x52 + (-1 * x85 * x50); + const double x109 = x83 * x108; + const double x110 = 2 * x86; + const double x111 = x88 * x62; + const double x112 = x96 * x108; + const double x113 = (x56 * x109) + (x53 * (x112 + (-1 * x54 * x109))); + const double x114 = (x57 * x109) + (x53 * x113); + const double x115 = x79 * ((x105 * x111) + (x77 * x114) + (-1 * x62) + (x109 * x110) + (-1 * x103 * ((x73 * ((x53 * x114) + (x58 * x109) + (x70 * x109) + (x53 * (x114 + (x53 * (x113 + (x68 * x109) + (x53 * ((-1 * x98 * x109) + x112 + (x67 * x109))))) + (x69 * x109))))) + (x92 * x111)))); + const double x116 = x40 * x43; + const double x117 = -1 * x116; + const double x118 = x81 * x46; + const double x119 = x35 * x118; + const double x120 = x117 + (-1 * x88 * x119); + const double x121 = x85 * x46; + const double x122 = x35 * x121; + const double x123 = x83 * x122; + const double x124 = -1 * x96 * x122; + const double x125 = (-1 * x56 * x123) + (x53 * (x124 + (x54 * x123))); + const double x126 = (-1 * x57 * x123) + (x53 * x125); + const double x127 = x79 * ((x77 * x126) + (-1 * x103 * ((x73 * ((-1 * x101 * x122) + (x53 * x126) + (-1 * x93 * x122) + (x53 * (x126 + (x53 * (x125 + (-1 * x68 * x123) + (x53 * ((x98 * x123) + (-1 * x67 * x123) + x124)))) + (-1 * x69 * x123))))) + (x92 * x120))) + (x105 * x120) + x119 + (-1 * x87 * x35)); + const double x128 = 2 * x21 * (1. / (x17 * x17)); + const double x129 = x26 * (1. / (x17 * sqrt(x17))); + const double x130 = x16 * x129; + const double x131 = (x130 * lh_qi) + (-1 * x16 * x128 * lh_qi); + const double x132 = x20 * x18; + const double x133 = x15 * x132; + const double x134 = x15 * x129; + const double x135 = x129 * lh_qk; + const double x136 = x135 * lh_qi; + const double x137 = lh_qk * lh_qi; + const double x138 = x128 * lh_qj; + const double x139 = (-1 * x137 * x138) + (x136 * lh_qj); + const double x140 = x139 + (-1 * x27); + const double x141 = x22 * lh_qj; + const double x142 = (x134 * lh_qj) + (-1 * x15 * x138); + const double x143 = x142 + x141; + const double x144 = x132 * x137; + const double x145 = (-1 * x136) + x144; + const double x146 = ((x145 + x143) * x25) + (x32 * (x131 + x45)) + (x13 * (x140 + (-1 * x133) + x134)); + const double x147 = x136 + (-1 * x144); + const double x148 = x128 * lh_qk; + const double x149 = (x134 * lh_qk) + (-1 * x15 * x148); + const double x150 = x149 + x30; + const double x151 = x132 * lh_qj; + const double x152 = x151 * lh_qi; + const double x153 = x129 * lh_qj * lh_qi; + const double x154 = (-1 * x153) + x152; + const double x155 = lh_qi * lh_qi * lh_qi; + const double x156 = (((2 * x38) + (-1 * x128 * x155) + x45 + (x129 * x155)) * x25) + ((x147 + x143) * x32) + ((x154 + x150) * x13); + const double x157 = 2 * x40; + const double x158 = x139 + x27; + const double x159 = x14 * x128; + const double x160 = x14 * x129; + const double x161 = (x160 * lh_qi) + (-1 * x159 * lh_qi); + const double x162 = x153 + (-1 * x152); + const double x163 = ((x162 + x150) * x25) + (x32 * (x133 + x158 + (-1 * x134))) + (x13 * (x161 + x45)); + const double x164 = 2 * x35; + const double x165 = (x164 * x163) + (x156 * x157); + const double x166 = 1.0/2.0 * x121; + const double x167 = (x52 * x146) + (-1 * x166 * (x165 + (x84 * x146))); + const double x168 = x83 * x167; + const double x169 = x96 * x167; + const double x170 = (x56 * x168) + (x53 * (x169 + (-1 * x54 * x168))); + const double x171 = (x57 * x168) + (x53 * x170); + const double x172 = 1.0/2.0 * x118; + const double x173 = (-1 * x62 * x146) + (x165 * x172); + const double x174 = 1. / x40; + const double x175 = (1. / x41) * x35; + const double x176 = x41 * x43; + const double x177 = ((x175 * x156) + (-1 * x163 * x174)) * x176; + const double x178 = x177 + (-1 * x88 * x173); + const double x179 = x79 * ((-1 * x103 * ((x73 * ((x58 * x168) + (x53 * x171) + (x70 * x168) + (x53 * (x171 + (x53 * (x170 + (x68 * x168) + (x53 * ((-1 * x98 * x168) + x169 + (x67 * x168))))) + (x69 * x168))))) + (x92 * x178))) + (x77 * x171) + x173 + (x110 * x168) + (x105 * x178)); + const double x180 = lh_qj * lh_qj * lh_qj; + const double x181 = (x130 * lh_qk) + (-1 * x16 * x148); + const double x182 = x181 + x30; + const double x183 = x131 + x38; + const double x184 = x151 * lh_qk; + const double x185 = x135 * lh_qj; + const double x186 = (-1 * x185) + x184; + const double x187 = ((x186 + x183) * x25) + (x32 * ((-1 * x128 * x180) + (x129 * x180) + x29 + (2 * x141))) + ((x162 + x182) * x13); + const double x188 = x185 + (-1 * x184); + const double x189 = x16 * x132; + const double x190 = (x25 * (x142 + x29)) + ((x188 + x183) * x32) + (x13 * (x158 + x189 + (-1 * x130))); + const double x191 = (x160 * lh_qj) + (-1 * x159 * lh_qj); + const double x192 = (x25 * ((-1 * x189) + x140 + x130)) + ((x182 + x154) * x32) + (x13 * (x191 + x29)); + const double x193 = (x164 * x192) + (x190 * x157); + const double x194 = x83 * ((x52 * x187) + (-1 * x166 * (x193 + (x84 * x187)))); + const double x195 = x55 * x194; + const double x196 = (x56 * x194) + (x53 * (x195 + (-1 * x54 * x194))); + const double x197 = (x57 * x194) + (x53 * x196); + const double x198 = (-1 * x62 * x187) + (x172 * x193); + const double x199 = ((x175 * x190) + (-1 * x174 * x192)) * x176; + const double x200 = x199 + (-1 * x88 * x198); + const double x201 = x79 * ((-1 * x103 * ((x73 * ((x70 * x194) + (x53 * (x197 + (x53 * (x196 + (x68 * x194) + (x53 * ((-1 * x98 * x194) + x195 + (x67 * x194))))) + (x69 * x194))) + (x58 * x194) + (x53 * x197))) + (x92 * x200))) + (x77 * x197) + x198 + (x110 * x194) + (x200 * x105)); + const double x202 = x14 * x132; + const double x203 = x161 + x38; + const double x204 = (x25 * (x149 + x37)) + (x32 * (x140 + x160 + (-1 * x202))) + ((x203 + x186) * x13); + const double x205 = x191 + x141; + const double x206 = lh_qk * lh_qk * lh_qk; + const double x207 = ((x203 + x188) * x25) + ((x205 + x145) * x32) + (((2 * x30) + x37 + (x206 * x129) + (-1 * x206 * x128)) * x13); + const double x208 = (x207 * x164) + (x204 * x157); + const double x209 = (x25 * ((-1 * x160) + x158 + x202)) + ((x205 + x147) * x13) + (x32 * (x181 + x37)); + const double x210 = (-1 * x62 * x209) + (x208 * x172); + const double x211 = ((x204 * x175) + (-1 * x207 * x174)) * x176; + const double x212 = x90 * (x211 + (-1 * x88 * x210)); + const double x213 = (x52 * x209) + (-1 * x166 * (x208 + (x84 * x209))); + const double x214 = x83 * x213; + const double x215 = x96 * x213; + const double x216 = (x56 * x214) + (x53 * (x215 + (-1 * x54 * x214))); + const double x217 = (x57 * x214) + (x53 * x216); + const double x218 = x79 * (x210 + (-1 * x103 * ((x73 * ((x53 * x217) + (x70 * x214) + (x58 * x214) + (x53 * (x217 + (x53 * (x216 + (x68 * x214) + (x53 * (x215 + (-1 * x98 * x214) + (x67 * x214))))) + (x69 * x214))))) + (x91 * x212))) + (x214 * x110) + (x212 * x104) + (x77 * x217)); + out[0] = (-1 * x107 * ((-1 * x44) + x106)) + x44 + (-1 * x106); + out[1] = (-1 * x107 * x115) + (-1 * x115); + out[2] = (-1 * (x116 + x127) * x107) + x117 + (-1 * x127); + out[3] = (-1 * ((-1 * x177) + x179) * x107) + (-1 * x179) + x177; + out[4] = (-1 * ((-1 * x199) + x201) * x107) + (-1 * x201) + x199; + out[5] = (-1 * ((-1 * x211) + x218) * x107) + (-1 * x218) + x211; +} + + +// Jacobian of reproject_axis_x_gen2 wrt [obj_px, obj_py, obj_pz, obj_qi, obj_qj, obj_qk] +static inline void gen_reproject_axis_x_gen2_jac_obj_p_axis_angle( + double out[6], + const LinmathAxisAnglePose* obj_p, + const LinmathPoint3d sensor_pt, + const LinmathAxisAnglePose* lh_p, + const BaseStationCal* bsc0) +{ + const double obj_px = (*obj_p).Pos[0]; + const double obj_py = (*obj_p).Pos[1]; + const double obj_pz = (*obj_p).Pos[2]; + const double obj_qi = (*obj_p).AxisAngleRot[0]; + const double obj_qj = (*obj_p).AxisAngleRot[1]; + const double obj_qk = (*obj_p).AxisAngleRot[2]; + const double sensor_x = sensor_pt[0]; + const double sensor_y = sensor_pt[1]; + const double sensor_z = sensor_pt[2]; + const double lh_px = (*lh_p).Pos[0]; + const double lh_py = (*lh_p).Pos[1]; + const double lh_pz = (*lh_p).Pos[2]; + const double lh_qi = (*lh_p).AxisAngleRot[0]; + const double lh_qj = (*lh_p).AxisAngleRot[1]; + const double lh_qk = (*lh_p).AxisAngleRot[2]; + // const double phase_0 = (*bsc0).phase; + const double tilt_0 = (*bsc0).tilt; + const double curve_0 = (*bsc0).curve; + const double gibPhase_0 = (*bsc0).gibpha; + const double gibMag_0 = (*bsc0).gibmag; + const double ogeeMag_0 = (*bsc0).ogeephase; + const double ogeePhase_0 = (*bsc0).ogeemag; + const double x0 = lh_qk * lh_qk; + const double x1 = lh_qi * lh_qi; + const double x2 = lh_qj * lh_qj; + const double x3 = 1e-10 + x2 + x0 + x1; + const double x4 = sqrt(x3); + const double x5 = (1. / x4) * sin(x4); + const double x6 = x5 * lh_qi; + const double x7 = cos(x4); + const double x8 = (1. / x3) * (1 + (-1 * x7)); + const double x9 = x8 * lh_qj; + const double x10 = x9 * lh_qk; + const double x11 = x10 + (-1 * x6); + const double x12 = obj_qk * obj_qk; + const double x13 = obj_qi * obj_qi; + const double x14 = obj_qj * obj_qj; + const double x15 = 1e-10 + x14 + x12 + x13; + const double x16 = 1. / x15; + const double x17 = sqrt(x15); + const double x18 = cos(x17); + const double x19 = 1 + (-1 * x18); + const double x20 = x19 * x16; + const double x21 = sin(x17); + const double x22 = x21 * (1. / x17); + const double x23 = x22 * obj_qj; + const double x24 = -1 * x23; + const double x25 = x20 * obj_qk; + const double x26 = x25 * obj_qi; + const double x27 = x22 * obj_qi; + const double x28 = x25 * obj_qj; + const double x29 = ((x26 + x24) * sensor_x) + obj_pz + ((x28 + x27) * sensor_y) + ((x18 + (x20 * x12)) * sensor_z); + const double x30 = x22 * obj_qk; + const double x31 = -1 * x30; + const double x32 = x20 * obj_qi; + const double x33 = x32 * obj_qj; + const double x34 = ((x33 + x31) * sensor_y) + ((x26 + x23) * sensor_z) + ((x18 + (x20 * x13)) * sensor_x) + obj_px; + const double x35 = x5 * lh_qk; + const double x36 = x9 * lh_qi; + const double x37 = x36 + x35; + const double x38 = -1 * x27; + const double x39 = ((x18 + (x20 * x14)) * sensor_y) + ((x33 + x30) * sensor_x) + obj_py + ((x28 + x38) * sensor_z); + const double x40 = x7 + (x2 * x8); + const double x41 = (x40 * x39) + (x34 * x37) + lh_py + (x29 * x11); + const double x42 = 0.523598775598299 + tilt_0; + const double x43 = cos(x42); + const double x44 = 1. / x43; + const double x45 = x41 * x41; + const double x46 = x7 + (x0 * x8); + const double x47 = x5 * lh_qj; + const double x48 = x8 * lh_qk * lh_qi; + const double x49 = x48 + (-1 * x47); + const double x50 = x10 + x6; + const double x51 = (x50 * x39) + (x49 * x34) + lh_pz + (x46 * x29); + const double x52 = x48 + x47; + const double x53 = x7 + (x1 * x8); + const double x54 = x36 + (-1 * x35); + const double x55 = (x54 * x39) + (x53 * x34) + lh_px + (x52 * x29); + const double x56 = x55 * x55; + const double x57 = x56 + (x51 * x51); + const double x58 = x57 + x45; + const double x59 = (1. / sqrt(x58)) * x44; + const double x60 = asin(x59 * x41); + const double x61 = 8.0108022e-06 * x60; + const double x62 = -8.0108022e-06 + (-1 * x61); + const double x63 = 0.0028679863 + (x60 * x62); + const double x64 = 5.3685255e-06 + (x60 * x63); + const double x65 = 0.0076069798 + (x60 * x64); + const double x66 = x60 * x65; + const double x67 = -8.0108022e-06 + (-1.60216044e-05 * x60); + const double x68 = x63 + (x60 * x67); + const double x69 = x64 + (x60 * x68); + const double x70 = x65 + (x60 * x69); + const double x71 = (x70 * x60) + x66; + const double x72 = atan2(-1 * x51, x55); + const double x73 = tan(x42); + const double x74 = x73 * (1. / sqrt(x57)); + const double x75 = x74 * x41; + const double x76 = (-1 * asin(x75)) + x72 + ogeeMag_0; + const double x77 = (sin(x76) * ogeePhase_0) + curve_0; + const double x78 = sin(x42); + const double x79 = x78 * x77; + const double x80 = x43 + (-1 * x71 * x79); + const double x81 = 1. / x80; + const double x82 = x60 * x60; + const double x83 = x82 * x77; + const double x84 = x81 * x83; + const double x85 = x75 + (x84 * x65); + const double x86 = 1. / sqrt(1 + (-1 * (x85 * x85))); + const double x87 = 1. / sqrt(1 + (-1 * (1. / x58) * (1. / (x43 * x43)) * x45)); + const double x88 = 2 * x41; + const double x89 = 2 * x55; + const double x90 = 2 * x51; + const double x91 = (x90 * x49) + (x89 * x53); + const double x92 = 1.0/2.0 * x41; + const double x93 = (1. / (x58 * sqrt(x58))) * x92 * x44; + const double x94 = x87 * ((x59 * x37) + (-1 * x93 * (x91 + (x88 * x37)))); + const double x95 = 2 * x81 * x77 * x66; + const double x96 = x62 * x94; + const double x97 = (x60 * (x96 + (-1 * x61 * x94))) + (x63 * x94); + const double x98 = (x60 * x97) + (x64 * x94); + const double x99 = 1. / x57; + const double x100 = 1. / sqrt(1 + (-1 * (x73 * x73) * x99 * x45)); + const double x101 = x73 * (1. / (x57 * sqrt(x57))) * x92; + const double x102 = (x74 * x37) + (-1 * x91 * x101); + const double x103 = 1. / x55; + const double x104 = x51 * (1. / x56); + const double x105 = x56 * x99; + const double x106 = ((x53 * x104) + (-1 * x49 * x103)) * x105; + const double x107 = cos(x76) * ogeePhase_0; + const double x108 = x107 * (x106 + (-1 * x100 * x102)); + const double x109 = x81 * x82 * x65; + const double x110 = 2.40324066e-05 * x60; + const double x111 = x71 * x78; + const double x112 = (1. / (x80 * x80)) * x83 * x65; + const double x113 = x86 * (x102 + (x109 * x108) + (x95 * x94) + (-1 * x112 * ((-1 * x108 * x111) + (-1 * x79 * ((x65 * x94) + (x70 * x94) + (x60 * (x98 + (x60 * (x97 + (x68 * x94) + (x60 * ((-1 * x94 * x110) + x96 + (x67 * x94))))) + (x69 * x94))) + (x60 * x98))))) + (x84 * x98)); + const double x114 = cos((-1 * asin(x85)) + gibPhase_0 + x72) * gibMag_0; + const double x115 = (x50 * x90) + (x89 * x54); + const double x116 = (x74 * x40) + (-1 * x101 * x115); + const double x117 = ((x54 * x104) + (-1 * x50 * x103)) * x105; + const double x118 = x117 + (-1 * x100 * x116); + const double x119 = x109 * x107; + const double x120 = x87 * ((x59 * x40) + (-1 * x93 * (x115 + (x88 * x40)))); + const double x121 = x62 * x120; + const double x122 = (x60 * (x121 + (-1 * x61 * x120))) + (x63 * x120); + const double x123 = (x60 * x122) + (x64 * x120); + const double x124 = x107 * x111; + const double x125 = x86 * (x116 + (x84 * x123) + (-1 * x112 * ((-1 * x118 * x124) + (-1 * x79 * ((x65 * x120) + (x70 * x120) + (x60 * (x123 + (x60 * (x122 + (x68 * x120) + (x60 * ((-1 * x110 * x120) + x121 + (x67 * x120))))) + (x69 * x120))) + (x60 * x123))))) + (x118 * x119) + (x95 * x120)); + const double x126 = (x90 * x46) + (x89 * x52); + const double x127 = x87 * ((x59 * x11) + (-1 * x93 * (x126 + (x88 * x11)))); + const double x128 = (x74 * x11) + (-1 * x101 * x126); + const double x129 = ((x52 * x104) + (-1 * x46 * x103)) * x105; + const double x130 = x129 + (-1 * x100 * x128); + const double x131 = x62 * x127; + const double x132 = (x60 * (x131 + (-1 * x61 * x127))) + (x63 * x127); + const double x133 = (x60 * x132) + (x64 * x127); + const double x134 = x86 * (x128 + (-1 * x112 * ((-1 * x124 * x130) + (-1 * x79 * ((x65 * x127) + (x60 * x133) + (x70 * x127) + (x60 * (x133 + (x60 * (x132 + (x68 * x127) + (x60 * ((-1 * x110 * x127) + x131 + (x67 * x127))))) + (x69 * x127))))))) + (x95 * x127) + (x84 * x133) + (x119 * x130)); + const double x135 = 2 * (1. / (x15 * x15)) * x19; + const double x136 = x135 * obj_qi; + const double x137 = x21 * (1. / (x15 * sqrt(x15))); + const double x138 = x14 * x137; + const double x139 = (x138 * obj_qi) + (-1 * x14 * x136); + const double x140 = x18 * x16; + const double x141 = x13 * x140; + const double x142 = x13 * x137; + const double x143 = obj_qj * obj_qi; + const double x144 = x135 * obj_qk; + const double x145 = x137 * x143; + const double x146 = (x145 * obj_qk) + (-1 * x144 * x143); + const double x147 = x146 + (-1 * x22); + const double x148 = x137 * obj_qk; + const double x149 = x148 * obj_qi; + const double x150 = x140 * obj_qk; + const double x151 = x150 * obj_qi; + const double x152 = x151 + (-1 * x149); + const double x153 = x20 * obj_qj; + const double x154 = x13 * x135; + const double x155 = (x142 * obj_qj) + (-1 * x154 * obj_qj); + const double x156 = x155 + x153; + const double x157 = ((x156 + x152) * sensor_x) + ((x139 + x38) * sensor_y) + ((x147 + (-1 * x141) + x142) * sensor_z); + const double x158 = x12 * x137; + const double x159 = (x158 * obj_qi) + (-1 * x12 * x136); + const double x160 = x146 + x22; + const double x161 = x140 * x143; + const double x162 = (-1 * x161) + x145; + const double x163 = (x142 * obj_qk) + (-1 * x154 * obj_qk); + const double x164 = x163 + x25; + const double x165 = ((x164 + x162) * sensor_x) + ((x159 + x38) * sensor_z) + ((x160 + x141 + (-1 * x142)) * sensor_y); + const double x166 = obj_qi * obj_qi * obj_qi; + const double x167 = (-1 * x151) + x149; + const double x168 = x161 + (-1 * x145); + const double x169 = ((x168 + x164) * sensor_z) + (((2 * x32) + (x166 * x137) + x38 + (-1 * x166 * x135)) * sensor_x) + ((x156 + x167) * sensor_y); + const double x170 = (x53 * x169) + (x54 * x157) + (x52 * x165); + const double x171 = (x49 * x169) + (x50 * x157) + (x46 * x165); + const double x172 = (x90 * x171) + (x89 * x170); + const double x173 = (x37 * x169) + (x11 * x165) + (x40 * x157); + const double x174 = (x74 * x173) + (-1 * x101 * x172); + const double x175 = ((x104 * x170) + (-1 * x103 * x171)) * x105; + const double x176 = x175 + (-1 * x100 * x174); + const double x177 = x87 * ((x59 * x173) + (-1 * x93 * (x172 + (x88 * x173)))); + const double x178 = x62 * x177; + const double x179 = (x60 * (x178 + (-1 * x61 * x177))) + (x63 * x177); + const double x180 = (x60 * x179) + (x64 * x177); + const double x181 = x86 * (x174 + (x84 * x180) + (-1 * x112 * ((-1 * x124 * x176) + (-1 * x79 * ((x60 * x180) + (x60 * (x180 + (x60 * (x179 + (x68 * x177) + (x60 * ((-1 * x110 * x177) + x178 + (x67 * x177))))) + (x69 * x177))) + (x65 * x177) + (x70 * x177))))) + (x119 * x176) + (x95 * x177)); + const double x182 = obj_qj * obj_qj * obj_qj; + const double x183 = (x138 * obj_qk) + (-1 * x14 * x144); + const double x184 = x183 + x25; + const double x185 = x139 + x32; + const double x186 = x148 * obj_qj; + const double x187 = x150 * obj_qj; + const double x188 = x187 + (-1 * x186); + const double x189 = ((x188 + x185) * sensor_x) + (((x182 * x137) + (2 * x153) + x24 + (-1 * x182 * x135)) * sensor_y) + ((x162 + x184) * sensor_z); + const double x190 = (x158 * obj_qj) + (-1 * x12 * x135 * obj_qj); + const double x191 = x14 * x140; + const double x192 = ((x147 + (-1 * x191) + x138) * sensor_x) + ((x168 + x184) * sensor_y) + ((x190 + x24) * sensor_z); + const double x193 = (-1 * x187) + x186; + const double x194 = ((x160 + x191 + (-1 * x138)) * sensor_z) + ((x193 + x185) * sensor_y) + ((x155 + x24) * sensor_x); + const double x195 = (x40 * x189) + (x37 * x194) + (x11 * x192); + const double x196 = (x54 * x189) + (x53 * x194) + (x52 * x192); + const double x197 = (x49 * x194) + (x50 * x189) + (x46 * x192); + const double x198 = (x90 * x197) + (x89 * x196); + const double x199 = x87 * ((x59 * x195) + (-1 * x93 * (x198 + (x88 * x195)))); + const double x200 = (x74 * x195) + (-1 * x101 * x198); + const double x201 = ((x104 * x196) + (-1 * x103 * x197)) * x105; + const double x202 = x201 + (-1 * x200 * x100); + const double x203 = x62 * x199; + const double x204 = (x60 * (x203 + (-1 * x61 * x199))) + (x63 * x199); + const double x205 = (x60 * x204) + (x64 * x199); + const double x206 = x86 * (x200 + (x84 * x205) + (-1 * x112 * ((-1 * x202 * x124) + (-1 * x79 * ((x65 * x199) + (x70 * x199) + (x60 * x205) + (x60 * (x205 + (x60 * (x204 + (x68 * x199) + (x60 * ((-1 * x110 * x199) + x203 + (x67 * x199))))) + (x69 * x199))))))) + (x95 * x199) + (x202 * x119)); + const double x207 = x190 + x153; + const double x208 = x12 * x140; + const double x209 = ((x183 + x31) * sensor_y) + ((x160 + (-1 * x158) + x208) * sensor_x) + ((x167 + x207) * sensor_z); + const double x210 = obj_qk * obj_qk * obj_qk; + const double x211 = x159 + x32; + const double x212 = ((x211 + x193) * sensor_x) + ((x152 + x207) * sensor_y) + (((2 * x25) + (-1 * x210 * x135) + x31 + (x210 * x137)) * sensor_z); + const double x213 = ((x211 + x188) * sensor_z) + ((x147 + x158 + (-1 * x208)) * sensor_y) + ((x163 + x31) * sensor_x); + const double x214 = (x53 * x213) + (x54 * x209) + (x52 * x212); + const double x215 = (x49 * x213) + (x50 * x209) + (x46 * x212); + const double x216 = (x90 * x215) + (x89 * x214); + const double x217 = (x37 * x213) + (x40 * x209) + (x11 * x212); + const double x218 = (x74 * x217) + (-1 * x216 * x101); + const double x219 = ((x214 * x104) + (-1 * x215 * x103)) * x105; + const double x220 = x219 + (-1 * x218 * x100); + const double x221 = x87 * ((x59 * x217) + (-1 * x93 * (x216 + (x88 * x217)))); + const double x222 = x62 * x221; + const double x223 = (x60 * (x222 + (-1 * x61 * x221))) + (x63 * x221); + const double x224 = (x60 * x223) + (x64 * x221); + const double x225 = x86 * ((-1 * x112 * ((-1 * x220 * x124) + (-1 * x79 * ((x65 * x221) + (x60 * x224) + (x70 * x221) + (x60 * ((x60 * (x223 + (x68 * x221) + (x60 * ((-1 * x221 * x110) + (x67 * x221) + x222)))) + x224 + (x69 * x221))))))) + (x95 * x221) + (x220 * x119) + x218 + (x84 * x224)); + out[0] = x106 + (-1 * x113) + (-1 * ((-1 * x106) + x113) * x114); + out[1] = x117 + (-1 * x125) + (-1 * ((-1 * x117) + x125) * x114); + out[2] = x129 + (-1 * x134) + (-1 * ((-1 * x129) + x134) * x114); + out[3] = (-1 * x181) + x175 + (-1 * ((-1 * x175) + x181) * x114); + out[4] = x201 + (-1 * x206) + (-1 * ((-1 * x201) + x206) * x114); + out[5] = (-1 * x225) + x219 + (-1 * ((-1 * x219) + x225) * x114); +} + +// Jacobian of reproject_axis_y_gen2 wrt [obj_px, obj_py, obj_pz, obj_qi, obj_qj, obj_qk] +static inline void gen_reproject_axis_y_gen2_jac_obj_p_axis_angle( + double out[6], + const LinmathAxisAnglePose* obj_p, + const LinmathPoint3d sensor_pt, + const LinmathAxisAnglePose* lh_p, + const BaseStationCal* bsc1) +{ + const double obj_px = (*obj_p).Pos[0]; + const double obj_py = (*obj_p).Pos[1]; + const double obj_pz = (*obj_p).Pos[2]; + const double obj_qi = (*obj_p).AxisAngleRot[0]; + const double obj_qj = (*obj_p).AxisAngleRot[1]; + const double obj_qk = (*obj_p).AxisAngleRot[2]; + const double sensor_x = sensor_pt[0]; + const double sensor_y = sensor_pt[1]; + const double sensor_z = sensor_pt[2]; + const double lh_px = (*lh_p).Pos[0]; + const double lh_py = (*lh_p).Pos[1]; + const double lh_pz = (*lh_p).Pos[2]; + const double lh_qi = (*lh_p).AxisAngleRot[0]; + const double lh_qj = (*lh_p).AxisAngleRot[1]; + const double lh_qk = (*lh_p).AxisAngleRot[2]; + // const double phase_1 = (*bsc1).phase; + const double tilt_1 = (*bsc1).tilt; + const double curve_1 = (*bsc1).curve; + const double gibPhase_1 = (*bsc1).gibpha; + const double gibMag_1 = (*bsc1).gibmag; + const double ogeeMag_1 = (*bsc1).ogeephase; + const double ogeePhase_1 = (*bsc1).ogeemag; + const double x0 = lh_qk * lh_qk; + const double x1 = lh_qi * lh_qi; + const double x2 = lh_qj * lh_qj; + const double x3 = 1e-10 + x2 + x0 + x1; + const double x4 = sqrt(x3); + const double x5 = (1. / x4) * sin(x4); + const double x6 = x5 * lh_qi; + const double x7 = cos(x4); + const double x8 = (1. / x3) * (1 + (-1 * x7)); + const double x9 = x8 * lh_qk * lh_qj; + const double x10 = x9 + (-1 * x6); + const double x11 = obj_qk * obj_qk; + const double x12 = obj_qi * obj_qi; + const double x13 = obj_qj * obj_qj; + const double x14 = 1e-10 + x13 + x11 + x12; + const double x15 = 1. / x14; + const double x16 = sqrt(x14); + const double x17 = cos(x16); + const double x18 = 1 + (-1 * x17); + const double x19 = x15 * x18; + const double x20 = sin(x16); + const double x21 = x20 * (1. / x16); + const double x22 = x21 * obj_qj; + const double x23 = -1 * x22; + const double x24 = x19 * obj_qi; + const double x25 = x24 * obj_qk; + const double x26 = x21 * obj_qi; + const double x27 = x19 * obj_qj; + const double x28 = x27 * obj_qk; + const double x29 = ((x28 + x26) * sensor_y) + ((x25 + x23) * sensor_x) + obj_pz + ((x17 + (x11 * x19)) * sensor_z); + const double x30 = x21 * obj_qk; + const double x31 = -1 * x30; + const double x32 = x27 * obj_qi; + const double x33 = ((x32 + x31) * sensor_y) + ((x25 + x22) * sensor_z) + ((x17 + (x12 * x19)) * sensor_x) + obj_px; + const double x34 = x5 * lh_qk; + const double x35 = x8 * lh_qi; + const double x36 = x35 * lh_qj; + const double x37 = x36 + x34; + const double x38 = -1 * x26; + const double x39 = ((x17 + (x13 * x19)) * sensor_y) + ((x32 + x30) * sensor_x) + obj_py + ((x28 + x38) * sensor_z); + const double x40 = x7 + (x2 * x8); + const double x41 = (x40 * x39) + (x33 * x37) + lh_py + (x29 * x10); + const double x42 = 0.523598775598299 + (-1 * tilt_1); + const double x43 = cos(x42); + const double x44 = 1. / x43; + const double x45 = x41 * x41; + const double x46 = x7 + (x0 * x8); + const double x47 = x5 * lh_qj; + const double x48 = x35 * lh_qk; + const double x49 = x48 + (-1 * x47); + const double x50 = x9 + x6; + const double x51 = (x50 * x39) + (x49 * x33) + lh_pz + (x46 * x29); + const double x52 = x48 + x47; + const double x53 = x7 + (x1 * x8); + const double x54 = x36 + (-1 * x34); + const double x55 = (x54 * x39) + (x53 * x33) + lh_px + (x52 * x29); + const double x56 = x55 * x55; + const double x57 = x56 + (x51 * x51); + const double x58 = x57 + x45; + const double x59 = (1. / sqrt(x58)) * x44; + const double x60 = asin(x59 * x41); + const double x61 = 8.0108022e-06 * x60; + const double x62 = -8.0108022e-06 + (-1 * x61); + const double x63 = 0.0028679863 + (x60 * x62); + const double x64 = 5.3685255e-06 + (x60 * x63); + const double x65 = 0.0076069798 + (x60 * x64); + const double x66 = x60 * x60; + const double x67 = atan2(-1 * x51, x55); + const double x68 = tan(x42); + const double x69 = x68 * (1. / sqrt(x57)); + const double x70 = -1 * x69 * x41; + const double x71 = (-1 * asin(x70)) + ogeeMag_1 + x67; + const double x72 = (sin(x71) * ogeePhase_1) + curve_1; + const double x73 = x60 * x65; + const double x74 = -8.0108022e-06 + (-1.60216044e-05 * x60); + const double x75 = x63 + (x74 * x60); + const double x76 = x64 + (x75 * x60); + const double x77 = x65 + (x76 * x60); + const double x78 = (x77 * x60) + x73; + const double x79 = sin(x42); + const double x80 = x72 * x79; + const double x81 = x43 + (x80 * x78); + const double x82 = 1. / x81; + const double x83 = x82 * x72; + const double x84 = x83 * x66; + const double x85 = x70 + (x84 * x65); + const double x86 = 1. / sqrt(1 + (-1 * (x85 * x85))); + const double x87 = 1. / sqrt(1 + (-1 * (1. / x58) * (1. / (x43 * x43)) * x45)); + const double x88 = 2 * x41; + const double x89 = 2 * x55; + const double x90 = 2 * x51; + const double x91 = (x90 * x49) + (x89 * x53); + const double x92 = 1.0/2.0 * x41; + const double x93 = (1. / (x58 * sqrt(x58))) * x92 * x44; + const double x94 = (x59 * x37) + (-1 * x93 * (x91 + (x88 * x37))); + const double x95 = x87 * x94; + const double x96 = x62 * x95; + const double x97 = (x63 * x95) + (x60 * (x96 + (-1 * x61 * x95))); + const double x98 = (x64 * x95) + (x60 * x97); + const double x99 = 1. / x57; + const double x100 = 1. / sqrt(1 + (-1 * (x68 * x68) * x99 * x45)); + const double x101 = x68 * (1. / (x57 * sqrt(x57))) * x92; + const double x102 = (-1 * x69 * x37) + (x91 * x101); + const double x103 = 1. / x55; + const double x104 = x51 * (1. / x56); + const double x105 = x56 * x99; + const double x106 = ((x53 * x104) + (-1 * x49 * x103)) * x105; + const double x107 = x106 + (-1 * x100 * x102); + const double x108 = cos(x71) * ogeePhase_1; + const double x109 = x79 * x78; + const double x110 = x109 * x108; + const double x111 = x87 * x77; + const double x112 = 2.40324066e-05 * x60; + const double x113 = x65 * x66; + const double x114 = (1. / (x81 * x81)) * x72 * x113; + const double x115 = x82 * x113; + const double x116 = x108 * x115; + const double x117 = 2 * x83 * x73; + const double x118 = x86 * ((x95 * x117) + x102 + (x107 * x116) + (x84 * x98) + (-1 * x114 * ((x80 * ((x65 * x95) + (x94 * x111) + (x60 * x98) + (x60 * (x98 + (x60 * (x97 + (x75 * x95) + (x60 * (x96 + (-1 * x95 * x112) + (x74 * x95))))) + (x76 * x95))))) + (x107 * x110)))); + const double x119 = cos((-1 * asin(x85)) + x67 + gibPhase_1) * gibMag_1; + const double x120 = (x50 * x90) + (x89 * x54); + const double x121 = (-1 * x69 * x40) + (x101 * x120); + const double x122 = ((x54 * x104) + (-1 * x50 * x103)) * x105; + const double x123 = x108 * (x122 + (-1 * x100 * x121)); + const double x124 = (x59 * x40) + (-1 * x93 * (x120 + (x88 * x40))); + const double x125 = x87 * x124; + const double x126 = x62 * x125; + const double x127 = (x63 * x125) + (x60 * (x126 + (-1 * x61 * x125))); + const double x128 = (x64 * x125) + (x60 * x127); + const double x129 = x86 * (x121 + (x117 * x125) + (-1 * x114 * ((x80 * ((x65 * x125) + (x60 * x128) + (x111 * x124) + (x60 * (x128 + (x60 * (x127 + (x75 * x125) + (x60 * ((-1 * x112 * x125) + x126 + (x74 * x125))))) + (x76 * x125))))) + (x109 * x123))) + (x115 * x123) + (x84 * x128)); + const double x130 = (x90 * x46) + (x89 * x52); + const double x131 = (x59 * x10) + (-1 * x93 * (x130 + (x88 * x10))); + const double x132 = x87 * x131; + const double x133 = x62 * x132; + const double x134 = (x63 * x132) + (x60 * (x133 + (-1 * x61 * x132))); + const double x135 = (x64 * x132) + (x60 * x134); + const double x136 = (-1 * x69 * x10) + (x101 * x130); + const double x137 = ((x52 * x104) + (-1 * x46 * x103)) * x105; + const double x138 = x137 + (-1 * x100 * x136); + const double x139 = x86 * ((x117 * x132) + x136 + (x116 * x138) + (x84 * x135) + (-1 * x114 * ((x80 * ((x65 * x132) + (x60 * x135) + (x111 * x131) + (x60 * (x135 + (x60 * (x134 + (x75 * x132) + (x60 * ((-1 * x112 * x132) + x133 + (x74 * x132))))) + (x76 * x132))))) + (x110 * x138)))); + const double x140 = 2 * (1. / (x14 * x14)) * x18; + const double x141 = x140 * obj_qi; + const double x142 = x20 * (1. / (x14 * sqrt(x14))); + const double x143 = x13 * x142; + const double x144 = (x143 * obj_qi) + (-1 * x13 * x141); + const double x145 = x15 * x17; + const double x146 = x12 * x145; + const double x147 = x12 * x142; + const double x148 = obj_qj * obj_qi; + const double x149 = x140 * obj_qk; + const double x150 = x142 * x148; + const double x151 = (x150 * obj_qk) + (-1 * x148 * x149); + const double x152 = x151 + (-1 * x21); + const double x153 = x142 * obj_qk; + const double x154 = x153 * obj_qi; + const double x155 = x145 * obj_qk; + const double x156 = x155 * obj_qi; + const double x157 = x156 + (-1 * x154); + const double x158 = x12 * x140; + const double x159 = (x147 * obj_qj) + (-1 * x158 * obj_qj); + const double x160 = x159 + x27; + const double x161 = ((x160 + x157) * sensor_x) + ((x144 + x38) * sensor_y) + ((x152 + (-1 * x146) + x147) * sensor_z); + const double x162 = x11 * x142; + const double x163 = (x162 * obj_qi) + (-1 * x11 * x141); + const double x164 = x151 + x21; + const double x165 = x19 * obj_qk; + const double x166 = (x147 * obj_qk) + (-1 * x158 * obj_qk); + const double x167 = x166 + x165; + const double x168 = x145 * x148; + const double x169 = (-1 * x168) + x150; + const double x170 = ((x169 + x167) * sensor_x) + ((x163 + x38) * sensor_z) + ((x164 + x146 + (-1 * x147)) * sensor_y); + const double x171 = obj_qi * obj_qi * obj_qi; + const double x172 = (-1 * x156) + x154; + const double x173 = x168 + (-1 * x150); + const double x174 = ((x167 + x173) * sensor_z) + (((2 * x24) + x38 + (x171 * x142) + (-1 * x171 * x140)) * sensor_x) + ((x160 + x172) * sensor_y); + const double x175 = (x49 * x174) + (x50 * x161) + (x46 * x170); + const double x176 = (x53 * x174) + (x54 * x161) + (x52 * x170); + const double x177 = ((x104 * x176) + (-1 * x103 * x175)) * x105; + const double x178 = (x37 * x174) + (x10 * x170) + (x40 * x161); + const double x179 = (x90 * x175) + (x89 * x176); + const double x180 = (x59 * x178) + (-1 * x93 * (x179 + (x88 * x178))); + const double x181 = x87 * x180; + const double x182 = x62 * x181; + const double x183 = (x63 * x181) + (x60 * (x182 + (-1 * x61 * x181))); + const double x184 = (x64 * x181) + (x60 * x183); + const double x185 = (-1 * x69 * x178) + (x101 * x179); + const double x186 = x177 + (-1 * x100 * x185); + const double x187 = x86 * ((x117 * x181) + x185 + (x116 * x186) + (x84 * x184) + (-1 * x114 * ((x80 * ((x65 * x181) + (x60 * x184) + (x111 * x180) + (x60 * (x184 + (x60 * ((x75 * x181) + x183 + (x60 * (x182 + (-1 * x112 * x181) + (x74 * x181))))) + (x76 * x181))))) + (x110 * x186)))); + const double x188 = obj_qj * obj_qj * obj_qj; + const double x189 = (x143 * obj_qk) + (-1 * x13 * x149); + const double x190 = x189 + x165; + const double x191 = x144 + x24; + const double x192 = x153 * obj_qj; + const double x193 = x155 * obj_qj; + const double x194 = x193 + (-1 * x192); + const double x195 = ((x194 + x191) * sensor_x) + (((2 * x27) + (x188 * x142) + x23 + (-1 * x188 * x140)) * sensor_y) + ((x169 + x190) * sensor_z); + const double x196 = (x162 * obj_qj) + (-1 * x11 * x140 * obj_qj); + const double x197 = x13 * x145; + const double x198 = ((x173 + x190) * sensor_y) + ((x152 + (-1 * x197) + x143) * sensor_x) + ((x196 + x23) * sensor_z); + const double x199 = (-1 * x193) + x192; + const double x200 = ((x164 + x197 + (-1 * x143)) * sensor_z) + ((x199 + x191) * sensor_y) + ((x159 + x23) * sensor_x); + const double x201 = (x53 * x200) + (x54 * x195) + (x52 * x198); + const double x202 = (x49 * x200) + (x50 * x195) + (x46 * x198); + const double x203 = (x90 * x202) + (x89 * x201); + const double x204 = (x40 * x195) + (x37 * x200) + (x10 * x198); + const double x205 = (-1 * x69 * x204) + (x203 * x101); + const double x206 = ((x201 * x104) + (-1 * x202 * x103)) * x105; + const double x207 = x206 + (-1 * x205 * x100); + const double x208 = (x59 * x204) + (-1 * x93 * (x203 + (x88 * x204))); + const double x209 = x87 * x208; + const double x210 = x62 * x209; + const double x211 = (x63 * x209) + (x60 * (x210 + (-1 * x61 * x209))); + const double x212 = (x64 * x209) + (x60 * x211); + const double x213 = x86 * ((x209 * x117) + (x84 * x212) + x205 + (-1 * x114 * ((x80 * ((x60 * (x212 + (x60 * (x211 + (x75 * x209) + (x60 * (x210 + (-1 * x209 * x112) + (x74 * x209))))) + (x76 * x209))) + (x208 * x111) + (x65 * x209) + (x60 * x212))) + (x207 * x110))) + (x207 * x116)); + const double x214 = x196 + x27; + const double x215 = x11 * x145; + const double x216 = ((x164 + (-1 * x162) + x215) * sensor_x) + ((x189 + x31) * sensor_y) + ((x172 + x214) * sensor_z); + const double x217 = obj_qk * obj_qk * obj_qk; + const double x218 = x163 + x24; + const double x219 = ((x218 + x199) * sensor_x) + ((x157 + x214) * sensor_y) + ((x31 + (2 * x165) + (-1 * x217 * x140) + (x217 * x142)) * sensor_z); + const double x220 = ((x218 + x194) * sensor_z) + ((x152 + x162 + (-1 * x215)) * sensor_y) + ((x166 + x31) * sensor_x); + const double x221 = (x37 * x220) + (x40 * x216) + (x10 * x219); + const double x222 = (x53 * x220) + (x54 * x216) + (x52 * x219); + const double x223 = (x49 * x220) + (x50 * x216) + (x46 * x219); + const double x224 = (x90 * x223) + (x89 * x222); + const double x225 = (x59 * x221) + (-1 * x93 * (x224 + (x88 * x221))); + const double x226 = x87 * x225; + const double x227 = x62 * x226; + const double x228 = (x63 * x226) + (x60 * (x227 + (-1 * x61 * x226))); + const double x229 = (x64 * x226) + (x60 * x228); + const double x230 = (-1 * x69 * x221) + (x224 * x101); + const double x231 = ((x222 * x104) + (-1 * x223 * x103)) * x105; + const double x232 = x231 + (-1 * x230 * x100); + const double x233 = x86 * (x230 + (-1 * x114 * ((x80 * ((x65 * x226) + (x60 * x229) + (x225 * x111) + (x60 * ((x60 * (x228 + (x75 * x226) + (x60 * ((-1 * x226 * x112) + (x74 * x226) + x227)))) + x229 + (x76 * x226))))) + (x232 * x110))) + (x226 * x117) + (x84 * x229) + (x232 * x116)); + out[0] = (-1 * x118) + (-1 * ((-1 * x106) + x118) * x119) + x106; + out[1] = (-1 * ((-1 * x122) + x129) * x119) + (-1 * x129) + x122; + out[2] = (-1 * ((-1 * x137) + x139) * x119) + (-1 * x139) + x137; + out[3] = (-1 * ((-1 * x177) + x187) * x119) + x177 + (-1 * x187); + out[4] = (-1 * ((-1 * x206) + x213) * x119) + (-1 * x213) + x206; + out[5] = (-1 * ((-1 * x231) + x233) * x119) + (-1 * x233) + x231; +} + +// This is a custom gtsam factor for predicting a Gen2 lighthouse angle about axis 0 (x) or 1 (y). +class Gen2LightAngleFactor : public NoiseModelFactor2 { + typedef NoiseModelFactor1 Base; + typedef Gen2LightAngleFactor This; + + public: + Gen2LightAngleFactor() {} + + Gen2LightAngleFactor( + Key key_gTl, // gtsam::Key for global -> lighthouse frame transform + Key key_gTb, // gtsam::Key for body -> global frame transform + const double & angle, // Measured agle in radians + const SharedNoiseModel& model, // Noise model + bool is_y_axis, // if true, axis = 1 else axis = 0 + const Point3 & sensor, // Pose of the sensor in the body frame + const BaseStationCal & bcal) // Base station calibration + : Base(model, key_gTl, key_gTb) + , angle_(angle) + , is_y_axis_(is_y_axis) + , bcal_(bcal) + { + sensor_pt_[0] = sensor[0]; + sensor_pt_[1] = sensor[1]; + sensor_pt_[2] = sensor[2]; + } + + Vector evaluateError( + const Pose3& gTl, + const Pose3& gTb, + boost::optional H1 = boost::none, + boost::optional H2 = boost::none) const override + { + LinmathAxisAnglePose lh_p, obj_p; + + auto [gTl_axis, gTl_angle] = gTl.rotation().axisAngle(); + lh_p.Pos[0] = gTl.translation().x(); + lh_p.Pos[1] = gTl.translation().y(); + lh_p.Pos[2] = gTl.translation().z(); + lh_p.AxisAngleRot[0] = gTl_axis.unitVector().x() * gTl_angle; + lh_p.AxisAngleRot[1] = gTl_axis.unitVector().y() * gTl_angle; + lh_p.AxisAngleRot[2] = gTl_axis.unitVector().z() * gTl_angle; + + auto [gTb_axis, gTb_angle] = gTl.rotation().axisAngle(); + obj_p.Pos[0] = gTl.translation().x(); + obj_p.Pos[1] = gTl.translation().y(); + obj_p.Pos[2] = gTl.translation().z(); + obj_p.AxisAngleRot[0] = gTb_axis.unitVector().x() * gTb_angle; + obj_p.AxisAngleRot[1] = gTb_axis.unitVector().y() * gTb_angle; + obj_p.AxisAngleRot[2] = gTb_axis.unitVector().z() * gTb_angle; + + double predicted_angle = is_y_axis_ + ? gen_reproject_axis_y_gen2_axis_angle(&obj_p, sensor_pt_, &lh_p, &bcal_) + : gen_reproject_axis_x_gen2_axis_angle(&obj_p, sensor_pt_, &lh_p, &bcal_); + + if (H1) { + double J[6]; + if (is_y_axis_) { + gen_reproject_axis_y_gen2_jac_lh_p_axis_angle(J, &obj_p, sensor_pt_, &lh_p, &bcal_); + } else { + gen_reproject_axis_x_gen2_jac_lh_p_axis_angle(J, &obj_p, sensor_pt_, &lh_p, &bcal_); + } + *H1 = (gtsam::Matrix(1, 6) << J[0], J[1], J[2], J[3], J[4], J[5]).finished(); + } + + if (H2) { + double J[6]; + if (is_y_axis_) { + gen_reproject_axis_y_gen2_jac_obj_p_axis_angle(J, &obj_p, sensor_pt_, &lh_p, &bcal_); + } else { + gen_reproject_axis_x_gen2_jac_obj_p_axis_angle(J, &obj_p, sensor_pt_, &lh_p, &bcal_); + } + *H2 = (gtsam::Matrix(1, 6) << J[0], J[1], J[2], J[3], J[4], J[5]).finished(); + } + + return Vector1(predicted_angle - angle_); + } + + private: + double angle_; + bool is_y_axis_; + BaseStationCal bcal_; + LinmathPoint3d sensor_pt_; + + public: + GTSAM_MAKE_ALIGNED_OPERATOR_NEW +}; + +} // namespace gtsam + +#endif // LIBSURVIVE_ROS2__POSER_FACTORS_HPP_ diff --git a/scripts/bag_parser b/scripts/bag_parser index c0ab957..7fc7eef 100755 --- a/scripts/bag_parser +++ b/scripts/bag_parser @@ -21,6 +21,7 @@ # THE SOFTWARE. import argparse +import yaml import rosbag2_py from rclpy.serialization import deserialize_message @@ -51,28 +52,54 @@ if __name__ == "__main__": for topic_metadata in reader.get_all_topics_and_types(): type_map[topic_metadata.name] = topic_metadata.type - lighthouses = dict() - trackers = dict() + lighthouses = {} + bodies = {} + angles = dict() + num = 0 while reader.has_next(): + num += 1 (topic, data, time_stamp) = reader.read_next() if topic == f"{args.namespace}/lighthouse": msg = deserialize_message(data, get_message(type_map[topic])) - lighthouses[msg.channel] = msg.header.frame_id + lighthouse_id = str(msg.header.frame_id) + # Bug in low-level driver... + if lighthouse_id != "LHB-0": + lighthouses[lighthouse_id] = { + "lighthouse_id" : lighthouse_id, + "channel" : int(msg.channel), + "fcalphase" : [float(msg.fcalphase[0]), float(msg.fcalphase[1])], + "fcaltilt" : [float(msg.fcaltilt[0]), float(msg.fcaltilt[1])], + "fcalcurve" : [float(msg.fcalcurve[0]), float(msg.fcalcurve[1])], + "fcalgibpha" : [float(msg.fcalgibpha[0]), float(msg.fcalgibpha[1])], + "fcalgibmag" : [float(msg.fcalgibmag[0]), float(msg.fcalgibmag[1])], + "fcalogeephase" : [float(msg.fcalogeephase[0]), float(msg.fcalogeephase[1])], + "fcalogeemag" : [float(msg.fcalogeemag[0]), float(msg.fcalogeemag[1])], + } if topic == f"{args.namespace}/tracker": msg = deserialize_message(data, get_message(type_map[topic])) - if msg.header.frame_id not in trackers.keys(): - trackers[msg.header.frame_id] = dict() + tracker_id = str(msg.header.frame_id) + bodies[tracker_id] = { + "body_id" : f"rigid_body/{tracker_id}", + "trackers" : [ + { + "tracker_id" : tracker_id, + "bTh" : [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0], + } + ] + } + if topic == f"{args.namespace}/angle": msg = deserialize_message(data, get_message(type_map[topic])) - if msg.header.frame_id not in trackers.keys(): - trackers[msg.header.frame_id] = dict() - if msg.channel not in trackers[msg.header.frame_id].keys(): - trackers[msg.header.frame_id][msg.channel] = 0 - trackers[msg.header.frame_id][msg.channel] += 1 + if msg.header.frame_id not in angles.keys(): + angles[msg.header.frame_id] = dict() + if msg.channel not in angles[msg.header.frame_id].keys(): + angles[msg.header.frame_id][msg.channel] = 0 + angles[msg.header.frame_id][msg.channel] += 1 + yaml_bodies = yaml.dump(list(bodies.values()), default_flow_style=None) + print("Bodies:") + print(yaml_bodies) + yaml_lighthouses = yaml.dump(list(lighthouses.values()), default_flow_style=None) print("Lighthouses:") - for channel in sorted(lighthouses.keys()): - print(f"- [CH{channel}] {lighthouses[channel]}") - print("Trackers:") - for tracker, angles in trackers.items(): - print(f"- {tracker}", angles) + print(yaml_lighthouses) + print("Angles:", angles) del reader \ No newline at end of file diff --git a/src/driver_component.cpp b/src/driver_component.cpp index b3d0da6..49c46f8 100644 --- a/src/driver_component.cpp +++ b/src/driver_component.cpp @@ -113,7 +113,12 @@ static void ootx_received_func( { survive_default_ootx_received_process(ctx, idx); if (_singleton) { - printf("IDX: index:%d == channel:%d\n", idx, ctx->bsd[idx].mode); + // Sometimes we get callbacks without a valid base station ID, which points to + // a potential bug in the driver. Catch and filter these out. + if (ctx->bsd[idx].BaseStationID == 0) { + printf("Invalid base station ID %d on channel %d", idx, ctx->bsd[idx].mode); + return; + } // Get serial number of lighthouse. char serial[16]; snprintf(serial, sizeof(serial), "LHB-%X", (unsigned)ctx->bsd[idx].BaseStationID); diff --git a/src/poser_component.cpp b/src/poser_component.cpp index 8544dc3..be5b59e 100644 --- a/src/poser_component.cpp +++ b/src/poser_component.cpp @@ -34,6 +34,7 @@ #include "yaml-cpp/yaml.h" + using namespace std::chrono_literals; using std::placeholders::_1; @@ -124,25 +125,26 @@ geometry_msgs::msg::Transform ros_transform_from_gtsam(const gtsam::Pose3 & pose gtsam::noiseModel::Gaussian::shared_ptr gtsam_from_ros(const std::array & msg) { + //# gtsam::Matrix6 cov; - cov << msg[21], msg[22], msg[23], msg[18], msg[19], msg[20], // Rx - msg[27], msg[28], msg[29], msg[24], msg[25], msg[26], // Ry - msg[33], msg[34], msg[35], msg[30], msg[31], msg[32], // Rz - msg[3], msg[4], msg[5], msg[0], msg[1], msg[2], // Tx - msg[9], msg[10], msg[11], msg[6], msg[7], msg[8], // Ty - msg[15], msg[16], msg[17], msg[12], msg[13], msg[14]; // Tz + cov << msg[21], msg[22], msg[23], msg[18], msg[19], msg[20], // Rx // NOLINT + msg[27], msg[28], msg[29], msg[24], msg[25], msg[26], // Ry // NOLINT + msg[33], msg[34], msg[35], msg[30], msg[31], msg[32], // Rz // NOLINT + msg[3], msg[4], msg[5], msg[0], msg[1], msg[2], // Tx // NOLINT + msg[9], msg[10], msg[11], msg[6], msg[7], msg[8], // Ty // NOLINT + msg[15], msg[16], msg[17], msg[12], msg[13], msg[14]; // Tz // NOLINT return gtsam::noiseModel::Gaussian::Covariance(cov); } std::array ros_from_gtsam(const gtsam::Matrix & cov) { return { - cov(3, 3), cov(3, 4), cov(3, 5), cov(3, 0), cov(3, 1), cov(3, 2), // Tx - cov(4, 3), cov(4, 4), cov(4, 5), cov(4, 0), cov(4, 1), cov(4, 2), // Ty - cov(5, 3), cov(5, 4), cov(5, 5), cov(5, 0), cov(5, 1), cov(5, 2), // Tz - cov(0, 3), cov(0, 4), cov(0, 5), cov(0, 0), cov(0, 1), cov(0, 2), // Rx - cov(1, 3), cov(1, 4), cov(1, 5), cov(1, 0), cov(1, 1), cov(1, 2), // Ry - cov(2, 3), cov(2, 4), cov(2, 5), cov(2, 0), cov(2, 1), cov(2, 2), // Rz + cov(3, 3), cov(3, 4), cov(3, 5), cov(3, 0), cov(3, 1), cov(3, 2), // Tx // NOLINT + cov(4, 3), cov(4, 4), cov(4, 5), cov(4, 0), cov(4, 1), cov(4, 2), // Ty // NOLINT + cov(5, 3), cov(5, 4), cov(5, 5), cov(5, 0), cov(5, 1), cov(5, 2), // Tz // NOLINT + cov(0, 3), cov(0, 4), cov(0, 5), cov(0, 0), cov(0, 1), cov(0, 2), // Rx // NOLINT + cov(1, 3), cov(1, 4), cov(1, 5), cov(1, 0), cov(1, 1), cov(1, 2), // Ry // NOLINT + cov(2, 3), cov(2, 4), cov(2, 5), cov(2, 0), cov(2, 1), cov(2, 2), // Rz // NOLINT }; } @@ -226,6 +228,25 @@ PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) } } + // Add lighthouses to avoid needing to wait for OOTX packets. + for (auto lighthouse : config["lighthouses"]) { + libsurvive_ros2::msg::Lighthouse msg; + msg.header.stamp = this->get_clock()->now(); + msg.header.frame_id = lighthouse["lighthouse_id"].as(); + msg.channel = static_cast(lighthouse["channel"].as()); + for (size_t i = 0; i < 2; i++) { + msg.fcalcurve[i] = lighthouse["fcalcurve"][i].as(); + msg.fcalgibmag[i] = lighthouse["fcalgibmag"][i].as(); + msg.fcalgibpha[i] = lighthouse["fcalgibpha"][i].as(); + msg.fcalogeemag[i] = lighthouse["fcalogeemag"][i].as(); + msg.fcalogeephase[i] = lighthouse["fcalogeephase"][i].as(); + msg.fcalphase[i] = lighthouse["fcalphase"][i].as(); + msg.fcaltilt[i] = lighthouse["fcaltilt"][i].as(); + } + // TODO(asymingt) you can't insert a lighthouse before you have a measurement + // this->add_lighthouse(msg); + } + // Add registration information for (auto registration : config["registration"]) { std::string body_id = registration["body_id"].as(); @@ -290,13 +311,21 @@ void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) } auto & lighthouse_info = channel_to_lighthouse_info_[msg.channel]; - // Verify that the tracker has this channel - if (tracker_info.t_sensors.count(msg.sensor_id) == 0) { + // Verify that the tracker has this sensor id + if (tracker_info.t_points.count(msg.sensor_id) == 0) { + RCLCPP_ERROR_STREAM(this->get_logger(), + "Tracker " << tracker_id << " sensor " << int(msg.sensor_id) << " does not exist"); + } + + // Verify that we have a plane of 0 or 1 + if (msg.plane > 1) { RCLCPP_ERROR_STREAM(this->get_logger(), - "Tracker " << tracker_id << " sensor " << msg.sensor_id << " does not exist"); + "Tracker " << tracker_id << " plane " << int(msg.plane) << " is not in {0, 1}"); } - auto & t_sensor = tracker_info.t_sensors[msg.sensor_id]; + // Get the point and normal of the sensor. + auto & t_sensor = tracker_info.t_points[msg.sensor_id]; + // We rely on IMU inserting the actual states. We're just going to find the // closest one to the angle timestamp and use it. There should be one... const Timestamp stamp = rclcpp::Time(msg.header.stamp).nanoseconds(); @@ -305,36 +334,25 @@ void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) return; } - // Expression to move the the position of the sensor from the tracking to global frame. - gtsam::Point3_ t_sensor_(t_sensor); - gtsam::Pose3_ gTb_(*gTb); - gtsam::Pose3_ bTh_(body_info.bTh[tracker_id]); - gtsam::Pose3_ hTt_(tracker_info.tTh.inverse()); - gtsam::Point3_ g_sensor_ = - gtsam::transformFrom(gtsam::compose(gTb_, gtsam::compose(bTh_, hTt_)), t_sensor_); - - // Now we can use a simple pinhole model to predict the (u, v, 1) coordinate of the - // sensor on some virtual image plane located at 1m from the sensor. This is because - // the "K" has fx = 1.0, and fy = 1.0, and all other elements zero. - gtsam::Pose3_ gTl_(lighthouse_info.gTl); - gtsam::Cal3_S2_ K_(*lighthouse_info.K); - auto expression_factor = gtsam::project3(gTl_, g_sensor_, K_); - - // The factor that gets added depends if this is an angle about axis 0 or axis 1. - switch (msg.plane) { - case 0: { // Angle about X - auto angle_obs = gtsam::Point2(std::tan(msg.angle), 0); - auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector2(kSigmaAngle, 1e6)); - graph_.addExpressionFactor(expression_factor, angle_obs, angle_cov); - break; - } - case 1: { // Angle about Y - auto angle_obs = gtsam::Point2(0, std::tan(msg.angle)); - auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector2(1e6, kSigmaAngle)); - graph_.addExpressionFactor(expression_factor, angle_obs, angle_cov); - break; - } - } + // Transform sensor to body-frame. We can maybe optimize this at some point in the + // future to prevent having to this for every angle measurement. Not sure what sort + // of performance increase it would give us though. + auto & bTh = body_info.bTh[tracker_id]; + auto & tTh = tracker_info.tTh; + auto b_sensor = (bTh * tTh.inverse()).transformFrom(t_sensor); + + // Observed angle and error in that angle. + auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector1(kSigmaAngle)); + + // Add a between factor + graph_.emplace_shared( + lighthouse_info.gTl, // lighthouse -> global frame + *gTb, // body -> global frame + msg.angle, // observed angle + angle_cov, // uncertainty in our observation + msg.plane > 0, // is this the y-axis? + b_sensor, // sensor location in the body frame + lighthouse_info.bcal[msg.plane]); // lighthouse calibration parameters } void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) @@ -514,7 +532,8 @@ void PoserComponent::add_tracker(const libsurvive_ros2::msg::Tracker & msg) // Sensor locations and normals. for (size_t k = 0; k < msg.channels.size(); k++) { const uint8_t channel = msg.channels[k]; - tracker_info.t_sensors[channel] = gtsam_from_ros(msg.points[k]); + tracker_info.t_points[channel] = gtsam_from_ros(msg.points[k]); + tracker_info.t_normals[channel] = gtsam_from_ros(msg.normals[k]); } // The IMU factor needs to know the pose of the IMU sensor in the body frame in @@ -543,29 +562,35 @@ void PoserComponent::add_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg const std::string & orig_lighthouse_id = channel_to_lighthouse_info_[lighthouse_channel].id; if (orig_lighthouse_id != lighthouse_id) { RCLCPP_ERROR_STREAM(this->get_logger(), - "New lighthouse " << lighthouse_id << " on channel " << lighthouse_channel + "New lighthouse " << lighthouse_id << " on channel " << int(lighthouse_channel) << " previously received from " << orig_lighthouse_id); } return; } RCLCPP_INFO_STREAM(this->get_logger(), - "Adding lighthouse " << lighthouse_id << " on channel " << lighthouse_channel); + "Adding lighthouse " << lighthouse_id << " on channel " << int(lighthouse_channel)); auto & lighthouse_info = channel_to_lighthouse_info_[lighthouse_channel]; - // Se the ID immediately. + // Set the ID immediately. lighthouse_info.id = lighthouse_id; - // The camera projection matrix for this lighthouse is super easy. We want the focal length in - // X and Y to be 1. This is so that a point [x,y,z] when projected on the image plane lies along - // unit 1 in z. So we can go atan(angle_about_y) == u, or atan(angle_about_x) == v. - lighthouse_info.K.reset(new gtsam::Cal3_S2(1.0, 1.0, 0.0, 0.0, 0.0)); + // Copy over all calibration params for this lighthouse. + for (size_t i = 0; i < 2; i++) { + lighthouse_info.bcal[i].phase = msg.fcalphase[i]; + lighthouse_info.bcal[i].tilt = msg.fcaltilt[i]; + lighthouse_info.bcal[i].curve = msg.fcalcurve[i]; + lighthouse_info.bcal[i].gibpha = msg.fcalgibpha[i]; + lighthouse_info.bcal[i].gibmag = msg.fcalgibmag[i]; + lighthouse_info.bcal[i].ogeephase = msg.fcalogeephase[i]; + lighthouse_info.bcal[i].ogeemag = msg.fcalogeemag[i]; + } - // This is out initial prior on the lighthouse location given no observations. + // Weak initial estimate of lighthouse pose auto obs_gTl = gtsam::Pose3( gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), gtsam::Point3(0.0, 0.0, 0.0)); auto cov_gTl = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6(100.0, 100.0, 100.0, 100.0, 100.0, 100.0)); + gtsam::Vector6(100.0, 100.0, 100.0, 10.0, 10.0, 10.0)); // Allocate a new variable. lighthouse_info.gTl = next_available_key_++; @@ -573,52 +598,10 @@ void PoserComponent::add_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg // Set its initial value. initial_values_.insert(lighthouse_info.gTl, obs_gTl); - // Add a prior on the value. + // Add a prior on the value``. graph_.add(gtsam::PriorFactor(lighthouse_info.gTl, obs_gTl, cov_gTl)); } -// void PoserComponent::add_tracker_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg) -// { -// const std::string & tracker_id = msg.header.frame_id; -// if (!id_to_tracker_info_.count(tracker_id)) { -// return; -// } -// const Timestamp stamp = gtsam_from_ros(msg.header.stamp); -// auto & tracker_info = this->id_to_tracker_info_[tracker_id]; -// auto & body_info = this->id_to_body_info_[tracker_info.body_id]; - -// // We can't add an observation for a state that does not exist. -// auto gTb = find_key_for_closest_stamp(body_info.gTb, stamp); -// if (!gTb) { -// return; -// } - -// // Add a prior factor on the pose state -// auto gTt_obs = gtsam_from_ros(msg.pose.pose); -// auto hTb = body_info.bTh[tracker_id].inverse(); -// auto tTb = tracker_info.tTh * hTb; -// auto gTb_obs = gTt_obs * tTb; -// auto gTt_cov = gtsam_from_ros(msg.pose.covariance); -// auto gTb_cov = gTt_cov; -// graph_.add(gtsam::PriorFactor(*gTb, gTb_obs, gTb_cov)); -// } - -// void PoserComponent::add_lighthouse_pose(const geometry_msgs::msg::PoseWithCovarianceStamped & msg) -// { -// const std::string & lighthouse_id = msg.header.frame_id; -// if (!id_to_lighthouse_info_.count(lighthouse_id)) { -// return; -// } -// auto & lighthouse_info = this->id_to_lighthouse_info_[lighthouse_id]; - -// // Collect observation -// auto obs_gTl = gtsam_from_ros(msg.pose.pose); -// auto cov_gTl = gtsam_from_ros(msg.pose.covariance); - -// // Add the observation -// graph_.add(gtsam::PriorFactor(lighthouse_info.gTl, obs_gTl, cov_gTl)); -// } - void PoserComponent::solution_callback() { // Incremental update of the graph usng the latest values and factors. diff --git a/test/output_without_registration.json b/test/output_without_registration.json index 0a281af..c1f15b3 100644 --- a/test/output_without_registration.json +++ b/test/output_without_registration.json @@ -11,10 +11,10 @@ "pose": [ "-0.000000000", "-0.000000000", - "-0.000000000", + "0.000000000", "1.000000000", "0.000000000", - "-0.000000000", + "0.000000000", "0.000000000" ], "variance": [ From e1c733ee63327d1a6f48a887931e85a93624045d Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Wed, 9 Aug 2023 15:52:39 -0700 Subject: [PATCH 11/18] Check in work in progress --- include/libsurvive_ros2/poser_factors.hpp | 61 ++++- launch/libsurvive_ros2.launch.py | 12 +- src/driver_component.cpp | 1 + src/poser_component.cpp | 283 ++++++++++++++++++---- 4 files changed, 290 insertions(+), 67 deletions(-) diff --git a/include/libsurvive_ros2/poser_factors.hpp b/include/libsurvive_ros2/poser_factors.hpp index 0e1e86c..0449af1 100644 --- a/include/libsurvive_ros2/poser_factors.hpp +++ b/include/libsurvive_ros2/poser_factors.hpp @@ -23,6 +23,10 @@ // Custom factor for integrating Gen 2 light measurements into a GTSAM model. // A lot of the code here is selectively copied and simplified from the // libsurvive autogenerated code, which is not installed by default. +// +// Lighthouse: looking towards, +x left, +y up, +z into the lighthouse. +// Tracker: looking underside connecors down, +x right, +y info, +x up. + #ifndef LIBSURVIVE_ROS2__POSER_FACTORS_HPP_ #define LIBSURVIVE_ROS2__POSER_FACTORS_HPP_ @@ -41,14 +45,17 @@ namespace gtsam { +// Typedefs for mathematical vector types typedef double LinmathPoint3d[3]; typedef double LinmathAxisAngle[3]; +// An axis angle in struct representation struct LinmathAxisAnglePose { LinmathPoint3d Pos; LinmathAxisAngle AxisAngleRot; }; +// Base station calibration struct struct BaseStationCal { double phase; double tilt; @@ -1298,24 +1305,60 @@ class Gen2LightAngleFactor : public NoiseModelFactor2 { auto [gTl_axis, gTl_angle] = gTl.rotation().axisAngle(); lh_p.Pos[0] = gTl.translation().x(); - lh_p.Pos[1] = gTl.translation().y(); - lh_p.Pos[2] = gTl.translation().z(); + lh_p.Pos[1] = gTl.translation().y(); + lh_p.Pos[2] = gTl.translation().z(); lh_p.AxisAngleRot[0] = gTl_axis.unitVector().x() * gTl_angle; - lh_p.AxisAngleRot[1] = gTl_axis.unitVector().y() * gTl_angle; - lh_p.AxisAngleRot[2] = gTl_axis.unitVector().z() * gTl_angle; + lh_p.AxisAngleRot[1] = gTl_axis.unitVector().y() * gTl_angle; + lh_p.AxisAngleRot[2] = gTl_axis.unitVector().z() * gTl_angle; auto [gTb_axis, gTb_angle] = gTl.rotation().axisAngle(); - obj_p.Pos[0] = gTl.translation().x(); - obj_p.Pos[1] = gTl.translation().y(); - obj_p.Pos[2] = gTl.translation().z(); + obj_p.Pos[0] = gTb.translation().x(); + obj_p.Pos[1] = gTb.translation().y(); + obj_p.Pos[2] = gTb.translation().z(); obj_p.AxisAngleRot[0] = gTb_axis.unitVector().x() * gTb_angle; - obj_p.AxisAngleRot[1] = gTb_axis.unitVector().y() * gTb_angle; - obj_p.AxisAngleRot[2] = gTb_axis.unitVector().z() * gTb_angle; + obj_p.AxisAngleRot[1] = gTb_axis.unitVector().y() * gTb_angle; + obj_p.AxisAngleRot[2] = gTb_axis.unitVector().z() * gTb_angle; double predicted_angle = is_y_axis_ ? gen_reproject_axis_y_gen2_axis_angle(&obj_p, sensor_pt_, &lh_p, &bcal_) : gen_reproject_axis_x_gen2_axis_angle(&obj_p, sensor_pt_, &lh_p, &bcal_); + ///////////////////////////////////////////////////// + // Helps find stray nans + ///////////////////////////////////////////////////// + static std::mutex mutex; + if (std::isnan(predicted_angle)) { + std::scoped_lock lock(mutex); + std::cout << "NaN detected ********** " << std::endl + << "I: " << angle_ << std::endl + << "O: " << predicted_angle << std::endl + << "b_p:t[0]: " << obj_p.Pos[0] << std::endl + << "b_p:t[1]: " << obj_p.Pos[1] << std::endl + << "b_p:t[2]: " << obj_p.Pos[2] << std::endl + << "b_p:r[0]: " << obj_p.AxisAngleRot[0] << std::endl + << "b_p:r[1]: " << obj_p.AxisAngleRot[1] << std::endl + << "b_p:r[2]: " << obj_p.AxisAngleRot[2] << std::endl + << "l_p:t[0]: " << lh_p.Pos[0] << std::endl + << "l_p:t[1]: " << lh_p.Pos[1] << std::endl + << "l_p:t[2]: " << lh_p.Pos[2] << std::endl + << "l_p:r[0]: " << lh_p.AxisAngleRot[0] << std::endl + << "l_p:r[1]: " << lh_p.AxisAngleRot[1] << std::endl + << "l_p:r[2]: " << lh_p.AxisAngleRot[2] << std::endl + << "b_sen[0]: " << sensor_pt_[0] << std::endl + << "b_sen[1]: " << sensor_pt_[1] << std::endl + << "b_sen[2]: " << sensor_pt_[2] << std::endl + << "c:phase: " << bcal_.phase << std::endl + << "c:tilt: " << bcal_.tilt << std::endl + << "c:curve: " << bcal_.curve << std::endl + << "c:gibpha: " << bcal_.gibpha << std::endl + << "c:gibmag: " << bcal_.gibmag << std::endl + << "c:ogeephase: " << bcal_.ogeephase << std::endl + << "c:ogeemag: " << bcal_.ogeemag<< std::endl; + exit(0); + } + + ///////////////////////////////////////////////////// + if (H1) { double J[6]; if (is_y_axis_) { diff --git a/launch/libsurvive_ros2.launch.py b/launch/libsurvive_ros2.launch.py index b37c930..713fba2 100644 --- a/launch/libsurvive_ros2.launch.py +++ b/launch/libsurvive_ros2.launch.py @@ -144,7 +144,7 @@ def generate_launch_description(): name="libsurvive_ros2_container", namespace=LaunchConfiguration("namespace"), prefix=launch_prefix, - output="screen", + output="own_log", ), LoadComposableNodes( target_container="libsurvive_ros2_container", @@ -188,7 +188,7 @@ def generate_launch_description(): namespace=LaunchConfiguration("namespace"), condition=IfCondition(enable_driver), prefix=launch_prefix, - output="screen", + output="own_log", parameters=driver_parameters, ), Node( @@ -198,7 +198,7 @@ def generate_launch_description(): namespace=LaunchConfiguration("namespace"), condition=IfCondition(enable_poser), prefix=launch_prefix, - output="screen", + output="own_log", parameters=poser_parameters, ), ], @@ -217,7 +217,7 @@ def generate_launch_description(): {"min_qos_depth": 100}, ], condition=IfCondition(LaunchConfiguration("web_bridge")), - output="log", + output="own_log", ) # For recording all data from the experiment. We'll use the MCAP format by @@ -225,7 +225,7 @@ def generate_launch_description(): bag_recorder_node = ExecuteProcess( cmd=["ros2", "bag", "record", "-s", "mcap", "-o", LaunchConfiguration("record"), "-a"], condition=IfCondition(enable_record), - output="log", + output="own_log", ) # In replay we're going to take the raw data and recalculate the poses and TF2 @@ -241,7 +241,7 @@ def generate_launch_description(): "/libsurvive/pose/lighthouse:=/old/pose/lighthouse", ], condition=UnlessCondition(enable_driver), - output="log", + output="own_log", ) return LaunchDescription( diff --git a/src/driver_component.cpp b/src/driver_component.cpp index 49c46f8..d06bcd3 100644 --- a/src/driver_component.cpp +++ b/src/driver_component.cpp @@ -152,6 +152,7 @@ static void ootx_received_func( // Callback for tracker event static int config_func(struct SurviveObject * so, char * ct0conf, int len) { + std::cout << ct0conf << std::endl; const int res = survive_default_config_process(so, ct0conf, len); if (_singleton) { // Get the approximate time diff --git a/src/poser_component.cpp b/src/poser_component.cpp index be5b59e..429a7d4 100644 --- a/src/poser_component.cpp +++ b/src/poser_component.cpp @@ -51,6 +51,7 @@ const double kOmegaDriftSigma = 0.1; const double kIntegrationSigma = 0.1; const double kSensorPositionSigma = 0.0001; const double kSigmaAngle = 0.001; +const double kUseImuData = true; // Time between bias states const Timestamp kImuBiasTimeNs = 1e9; @@ -97,11 +98,11 @@ gtsam::Pose3 gtsam_from_ros(const geometry_msgs::msg::Transform & pose) geometry_msgs::msg::Pose ros_from_gtsam(const gtsam::Pose3 & pose) { + gtsam::Quaternion quaternion = pose.rotation().toQuaternion(); geometry_msgs::msg::Pose msg; msg.position.x = pose.translation().x(); msg.position.y = pose.translation().y(); msg.position.z = pose.translation().z(); - gtsam::Quaternion quaternion = pose.rotation().toQuaternion(); msg.orientation.w = quaternion.w(); msg.orientation.x = quaternion.x(); msg.orientation.y = quaternion.y(); @@ -111,11 +112,11 @@ geometry_msgs::msg::Pose ros_from_gtsam(const gtsam::Pose3 & pose) geometry_msgs::msg::Transform ros_transform_from_gtsam(const gtsam::Pose3 & pose) { + gtsam::Quaternion quaternion = pose.rotation().toQuaternion(); geometry_msgs::msg::Transform msg; msg.translation.x = pose.translation().x(); msg.translation.y = pose.translation().y(); msg.translation.z = pose.translation().z(); - gtsam::Quaternion quaternion = pose.rotation().toQuaternion(); msg.rotation.w = quaternion.w(); msg.rotation.x = quaternion.x(); msg.rotation.y = quaternion.y(); @@ -243,8 +244,7 @@ PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) msg.fcalphase[i] = lighthouse["fcalphase"][i].as(); msg.fcaltilt[i] = lighthouse["fcaltilt"][i].as(); } - // TODO(asymingt) you can't insert a lighthouse before you have a measurement - // this->add_lighthouse(msg); + this->add_lighthouse(msg); } // Add registration information @@ -255,11 +255,21 @@ PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) "Unknown body frame '" << body_id << "' specified in registration"); continue; } + auto & body_info = id_to_body_info_[body_id]; + if (!body_info.is_static) { + RCLCPP_WARN_STREAM(this->get_logger(), + "Body frame '" << body_id << "' is not marked static"); + continue; + } std::vector gTb = registration["gTb"].as>(); auto gTb_obs = gtsam::Pose3( gtsam::Rot3::Quaternion(gTb[3], gTb[4], gTb[5], gTb[6]), gtsam::Point3(gTb[0], gTb[1], gTb[2])); - initial_values_.update(id_to_body_info_[body_id].gTb[0], gTb_obs); + auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6(1e-9, 1e-9, 1e-9, 1e-9, 1e-9, 1e-9)); + graph_.add(gtsam::PriorFactor( + body_info.gTb[0], gTb_obs, gTb_cov)); + initial_values_.update(body_info.gTb[0], gTb_obs); } // TODO(asymingt) this might not play very well with composable node inter-process @@ -288,6 +298,43 @@ PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) // Add a timer to extract solution and publish pose timer_ = this->create_wall_timer( 100ms, std::bind(&PoserComponent::solution_callback, this)); + + // TESTING!!! + // auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector1(kSigmaAngle)); + // gtsam::BaseStationCal bcal = { + // .phase = 0, + // .tilt = 0, + // .curve = 0, + // .gibpha = 0, + // .gibmag = 0, + // .ogeephase = 0, + // .ogeemag = 0, + // }; + // auto factorX = std::make_shared( + // 0, // lighthouse -> global frame + // 1, // body -> global frame + // 0, // observed angle + // angle_cov, // uncertainty in our observation + // 0, // is this the y-axis? + // gtsam::Point3(0, 0, 0), // sensor location in the body frame + // bcal); // perfect sensor + // auto factorY = std::make_shared( + // 0, // lighthouse -> global frame + // 1, // body -> global frame + // 0, // observed angle + // angle_cov, // uncertainty in our observation + // 1, // is this the y-axis? + // gtsam::Point3(0, 0, 0), // sensor location in the body frame + // bcal); // perfect sensor + // auto test_gTb = gtsam::Pose3( + // gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), + // gtsam::Point3(0.1, 0.1, -1.0)); + // auto test_gTl = gtsam::Pose3( + // gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), + // gtsam::Point3(0.0, 0.0, 0.0)); + // auto errX = factorX->evaluateError(test_gTl, test_gTb); + // auto errY = factorY->evaluateError(test_gTl, test_gTb); + // RCLCPP_WARN_STREAM(this->get_logger(), "Error state X = " << errX(0) << " and Y = " << errY(0)); } PoserComponent::~PoserComponent() @@ -322,29 +369,74 @@ void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) RCLCPP_ERROR_STREAM(this->get_logger(), "Tracker " << tracker_id << " plane " << int(msg.plane) << " is not in {0, 1}"); } - - // Get the point and normal of the sensor. - auto & t_sensor = tracker_info.t_points[msg.sensor_id]; + + // Get the current timestamp + const Timestamp stamp_curr = rclcpp::Time(msg.header.stamp).nanoseconds(); + + // Get the last pose timestamp, if one exists for this body. + std::optional stamp_prev = std::nullopt; + if (body_info.gTb.size()) { + stamp_prev = body_info.gTb.rbegin()->first; + } // We rely on IMU inserting the actual states. We're just going to find the // closest one to the angle timestamp and use it. There should be one... - const Timestamp stamp = rclcpp::Time(msg.header.stamp).nanoseconds(); - auto gTb = find_key_for_closest_stamp(body_info.gTb, stamp); - if (!gTb) { - return; + std::optional gTb; + if (kUseImuData) { + // If the IMU is being used and there is no pose state, we have to wait for one to + // be inserted, so it makes more sense to return at this point. + gTb = find_key_for_closest_stamp(body_info.gTb, stamp_curr); + if (!gTb) { + return; + } + } else { + // If the IMU is ignored, then we need to insert a state to represent the pose of + // body at the current time. But first, let's cover the case where we've received + // an angle measurement from this body before. + if (body_info.gTb.count(stamp_curr) == 0) { + body_info.gTb[stamp_curr] = next_available_key_++; + if (!stamp_prev) { + RCLCPP_INFO_STREAM(this->get_logger(), + "Adding prior pose for body " << tracker_info.body_id); + auto gTb_obs = gtsam::Pose3( + gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), + gtsam::Point3(0.0, 0.0, 0.0)); + auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6(100.0, 100.0, 100.0, 100.0, 100.0, 100.0)); + graph_.add( + gtsam::PriorFactor( + body_info.gTb[stamp_curr], gTb_obs, gTb_cov)); + initial_values_.insert(body_info.gTb[stamp_curr], gTb_obs); + } else { + auto gTb_obs = gtsam::Pose3( + gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), + gtsam::Point3(0.0, 0.0, 0.0)); + auto gTb_cov = gtsam::noiseModel::Isotropic::Variance(6, 1e-3); + graph_.add( + gtsam::BetweenFactor( + body_info.gTb[*stamp_prev], + body_info.gTb[stamp_curr], + gTb_obs, + gTb_cov)); + initial_values_.insert(body_info.gTb[stamp_curr], gTb_obs); + } + } else { + RCLCPP_WARN_STREAM(this->get_logger(), "Pose state exists for tracker " << tracker_id); + } + // Use the keu for the current time stamp. + gTb = body_info.gTb[stamp_curr]; } // Transform sensor to body-frame. We can maybe optimize this at some point in the // future to prevent having to this for every angle measurement. Not sure what sort // of performance increase it would give us though. + auto & t_sensor = tracker_info.t_points[msg.sensor_id]; auto & bTh = body_info.bTh[tracker_id]; auto & tTh = tracker_info.tTh; auto b_sensor = (bTh * tTh.inverse()).transformFrom(t_sensor); // Observed angle and error in that angle. auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector1(kSigmaAngle)); - - // Add a between factor graph_.emplace_shared( lighthouse_info.gTl, // lighthouse -> global frame *gTb, // body -> global frame @@ -357,6 +449,11 @@ void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) { + // If ignore IMU data is requested, respect this. + if (!kUseImuData) { + return; + } + // Only accept IMU measurements from trackers already inserted into the graph. const std::string & tracker_id = msg.header.frame_id; if (!id_to_tracker_info_.count(tracker_id)) { @@ -417,15 +514,22 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) // current timestamp. This is possible, but very, very unlikely. if (!body_info.gTb.count(stamp_curr)) { body_info.gTb[stamp_curr] = next_available_key_++; - auto gTb_obs = gtsam::Pose3( - gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), - gtsam::Point3(0.0, 0.0, 0.0)); - auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6(100.0, 100.0, 100.0, 100.0, 100.0, 100.0)); - graph_.add( - gtsam::PriorFactor( - body_info.gTb[stamp_curr], gTb_obs, gTb_cov)); - initial_values_.insert(body_info.gTb[stamp_curr], gTb_obs); + if (!stamp_prev) { + RCLCPP_INFO_STREAM(this->get_logger(), + "Adding prior pose for body " << tracker_info.body_id); + auto gTb_obs = gtsam::Pose3( + gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), + gtsam::Point3(0.0, 0.0, 0.0)); + auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6(100.0, 100.0, 100.0, 100.0, 100.0, 100.0)); + graph_.add( + gtsam::PriorFactor( + body_info.gTb[stamp_curr], gTb_obs, gTb_cov)); + initial_values_.insert(body_info.gTb[stamp_curr], gTb_obs); + } else { + auto gTb_prev = isam2_.calculateEstimate(body_info.gTb[*stamp_prev]); + initial_values_.insert(body_info.gTb[stamp_curr], gTb_prev); + } } else { RCLCPP_WARN_STREAM(this->get_logger(), "Pose state exists for tracker " << tracker_id); } @@ -434,13 +538,20 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) // current timestamp. This is possible, but very, very unlikely. if (!body_info.g_V.count(stamp_curr)) { body_info.g_V[stamp_curr] = next_available_key_++; - auto g_V_obs = gtsam::Vector3(0.0, 0.0, 0.0); - auto g_V_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector3(10.0, 10.0, 10.0)); - graph_.add( - gtsam::PriorFactor( - body_info.g_V[stamp_curr], g_V_obs, g_V_cov)); - initial_values_.insert(body_info.g_V[stamp_curr], g_V_obs); + if (!stamp_prev) { + RCLCPP_INFO_STREAM(this->get_logger(), + "Adding prior velocity for body " << tracker_info.body_id); + auto g_V_obs = gtsam::Vector3(0.0, 0.0, 0.0); + auto g_V_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector3(10.0, 10.0, 10.0)); + graph_.add( + gtsam::PriorFactor( + body_info.g_V[stamp_curr], g_V_obs, g_V_cov)); + initial_values_.insert(body_info.g_V[stamp_curr], g_V_obs); + } else { + auto g_V_prev = isam2_.calculateEstimate(body_info.g_V[*stamp_prev]); + initial_values_.insert(body_info.g_V[stamp_curr], g_V_prev); + } } else { RCLCPP_WARN_STREAM(this->get_logger(), "Velocity state exists for tracker " << tracker_id); } @@ -449,26 +560,29 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) // tracks its own bias internally. if (!tracker_info.b_B.count(stamp_curr)) { tracker_info.b_B[stamp_curr] = next_available_key_++; - auto b_B_obs = gtsam::imuBias::ConstantBias(); - auto b_B_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6( - kAccelDriftSigma, kAccelDriftSigma, kAccelDriftSigma, - kOmegaDriftSigma, kOmegaDriftSigma, kOmegaDriftSigma)); - graph_.add( - gtsam::PriorFactor( - tracker_info.b_B[stamp_curr], b_B_obs, b_B_cov)); - initial_values_.insert(tracker_info.b_B[stamp_curr], b_B_obs); + if (!stamp_bias) { + RCLCPP_INFO_STREAM(this->get_logger(), + "Adding prior bias for tracker " << tracker_id); + auto b_B_obs = gtsam::imuBias::ConstantBias(); + auto b_B_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6( + kAccelDriftSigma, kAccelDriftSigma, kAccelDriftSigma, + kOmegaDriftSigma, kOmegaDriftSigma, kOmegaDriftSigma)); + graph_.add( + gtsam::PriorFactor( + tracker_info.b_B[stamp_curr], b_B_obs, b_B_cov)); + initial_values_.insert(tracker_info.b_B[stamp_curr], b_B_obs); + } else { + auto b_B_prev = isam2_.calculateEstimate( + tracker_info.b_B[*stamp_bias]); + initial_values_.insert(tracker_info.b_B[stamp_curr], b_B_prev); + } } else { RCLCPP_WARN_STREAM(this->get_logger(), "Bias state exists for tracker " << tracker_id); } // Only add between factors for IMU measurements if (stamp_prev && stamp_bias) { - auto b_B_obs = gtsam::imuBias::ConstantBias(); - auto b_B_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6( - kAccelDriftSigma, kAccelDriftSigma, kAccelDriftSigma, - kOmegaDriftSigma, kOmegaDriftSigma, kOmegaDriftSigma)); graph_.add( gtsam::ImuFactor( body_info.gTb[*stamp_prev], // last pose @@ -477,16 +591,23 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) body_info.g_V[stamp_curr], // last velocity tracker_info.b_B[*stamp_bias], // bias state *tracker_info.preintegrator)); // preintegrator + auto b_B_obs = gtsam::imuBias::ConstantBias(); + auto b_B_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6( + kAccelDriftSigma, kAccelDriftSigma, kAccelDriftSigma, + kOmegaDriftSigma, kOmegaDriftSigma, kOmegaDriftSigma)); graph_.add( gtsam::BetweenFactor( tracker_info.b_B[*stamp_bias], tracker_info.b_B[stamp_curr], b_B_obs, b_B_cov)); + auto b_B_prev = isam2_.calculateEstimate( + tracker_info.b_B[*stamp_bias]); + tracker_info.preintegrator->resetIntegrationAndSetBias(b_B_prev); + } else { + tracker_info.preintegrator->resetIntegration(); } - - // Reset the IMU integrator to reflect that we have a new pre-integration period. - tracker_info.preintegrator->resetIntegration(); } } @@ -525,16 +646,45 @@ void PoserComponent::add_tracker(const libsurvive_ros2::msg::Tracker & msg) tracker_info.omega_scale = gtsam_from_ros(msg.omega_scale); tracker_info.omega_bias = gtsam_from_ros(msg.omega_bias); - // Fixed transforms. - tracker_info.tTi = gtsam_from_ros(msg.tracker_from_imu); - tracker_info.tTh = gtsam_from_ros(msg.tracker_from_head); - // Sensor locations and normals. + double x = 0.0; + double y = 0.0; + double n = 0.0; for (size_t k = 0; k < msg.channels.size(); k++) { const uint8_t channel = msg.channels[k]; tracker_info.t_points[channel] = gtsam_from_ros(msg.points[k]); tracker_info.t_normals[channel] = gtsam_from_ros(msg.normals[k]); + switch(channel) { + case 0: case 8: case 18: + x += msg.points[k].x; + y += msg.points[k].y; + n += 1.0; + break; + default: + break; + } } + x /= n; + y /= n; + + // Transform that moves from the IMU frame to the tracking reference frame, in + // which the sensor coordinates are expressed. + tracker_info.tTi = gtsam_from_ros(msg.tracker_from_imu); + + // I've looked at some plots in foxglove and the "head" location reported by the + // tracker is not nearly where it should be relative to the tracking reference + // frame ("light"). I am going to assume the reported "head" to "light" z offset + // is right, but not trust the X, Y or orientation. It seems like the light frame + // is related to the head frame by two 90 degree rotations. I'm going to construct + // the x and y offset by averaging three equidistant sensors spaced at 120deg to + // find the center coordinate and use that as the bolt location. This transform + // must be hard-coded somewhere in SteamVR to make things work. + auto identity_rotation = gtsam::Rot3::AxisAngle(gtsam::Point3(1.0, 0.0, 0.0), M_PI_2); + auto ccw_90deg_about_x = gtsam::Rot3::AxisAngle(gtsam::Point3(1.0, 0.0, 0.0), M_PI_2); + auto ccw_90deg_about_y = gtsam::Rot3::AxisAngle(gtsam::Point3(0.0, 1.0, 0.0), M_PI_2); + auto ccw_90deg_about_z = gtsam::Rot3::AxisAngle(gtsam::Point3(0.0, 0.0, 1.0), M_PI_2); + tracker_info.tTh = gtsam::Pose3(ccw_90deg_about_x * ccw_90deg_about_y, + gtsam::Point3(x, y, -0.007)); // The IMU factor needs to know the pose of the IMU sensor in the body frame in // order to correct for centripetal and coriolis terms. Calculate this now. @@ -588,7 +738,7 @@ void PoserComponent::add_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg // Weak initial estimate of lighthouse pose auto obs_gTl = gtsam::Pose3( gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), - gtsam::Point3(0.0, 0.0, 0.0)); + gtsam::Point3(0.0, 0.0, -1.0)); auto cov_gTl = gtsam::noiseModel::Diagonal::Sigmas( gtsam::Vector6(100.0, 100.0, 100.0, 10.0, 10.0, 10.0)); @@ -622,6 +772,35 @@ void PoserComponent::solution_callback() transform_msg.child_frame_id = tracker_id; transform_msg.transform = ros_transform_from_gtsam(bTh); tf_static_broadcaster_->sendTransform(transform_msg); + // If we have info about this tracker, lets send some transforms + if (id_to_tracker_info_.count(tracker_id)) { + auto & tracker_info = id_to_tracker_info_[tracker_id]; + transform_msg.header.stamp = now; + transform_msg.header.frame_id = tracker_id; + transform_msg.child_frame_id = tracker_id + "/light"; + transform_msg.transform = ros_transform_from_gtsam(tracker_info.tTh.inverse()); + tf_static_broadcaster_->sendTransform(transform_msg); + // Send imu transform -- looks like there + transform_msg.header.stamp = now; + transform_msg.header.frame_id = tracker_id + "/light"; + transform_msg.child_frame_id = tracker_id + "/imu"; + transform_msg.transform = ros_transform_from_gtsam(tracker_info.tTi); + tf_static_broadcaster_->sendTransform(transform_msg); + // Send sensor transforms + for (const auto& [channel_id, t_point] : tracker_info.t_points) { + transform_msg.header.stamp = now; + transform_msg.header.frame_id = tracker_id + "/light"; + transform_msg.child_frame_id = tracker_id + "/" + std::to_string(channel_id); + transform_msg.transform.translation.x = t_point.x(); + transform_msg.transform.translation.y = t_point.y(); + transform_msg.transform.translation.z = t_point.z(); + transform_msg.transform.rotation.w = 1.0; + transform_msg.transform.rotation.x = 0.0; + transform_msg.transform.rotation.y = 0.0; + transform_msg.transform.rotation.z = 0.0; + tf_static_broadcaster_->sendTransform(transform_msg); + } + } } // Pack up all the timesteps auto iter = body_info.gTb.rbegin(); From c7336f8db9cfaf842026d48e687a4f515f9c7eda Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Wed, 9 Aug 2023 16:47:17 -0700 Subject: [PATCH 12/18] Check in work in progress --- include/libsurvive_ros2/poser_component.hpp | 5 +- include/libsurvive_ros2/poser_factors.hpp | 22 +++--- src/poser_component.cpp | 85 +++++---------------- 3 files changed, 34 insertions(+), 78 deletions(-) diff --git a/include/libsurvive_ros2/poser_component.hpp b/include/libsurvive_ros2/poser_component.hpp index be4bae8..33793f2 100644 --- a/include/libsurvive_ros2/poser_component.hpp +++ b/include/libsurvive_ros2/poser_component.hpp @@ -143,8 +143,9 @@ struct LighthouseInfo // Serial number / identifier std::string id; - // Static pose of this lighthouse - gtsam::Key gTl; + // Static pose of this lighthouse. We express this in the opposite way to what you + // would expect because the jacobians used in the factor expect it in this way. + gtsam::Key lTg; // Calibration parameters for the two axes gtsam::BaseStationCal bcal[2]; diff --git a/include/libsurvive_ros2/poser_factors.hpp b/include/libsurvive_ros2/poser_factors.hpp index 0449af1..17b69af 100644 --- a/include/libsurvive_ros2/poser_factors.hpp +++ b/include/libsurvive_ros2/poser_factors.hpp @@ -1278,14 +1278,14 @@ class Gen2LightAngleFactor : public NoiseModelFactor2 { Gen2LightAngleFactor() {} Gen2LightAngleFactor( - Key key_gTl, // gtsam::Key for global -> lighthouse frame transform + Key key_lTg, // gtsam::Key for global -> lighthouse frame transform Key key_gTb, // gtsam::Key for body -> global frame transform const double & angle, // Measured agle in radians const SharedNoiseModel& model, // Noise model bool is_y_axis, // if true, axis = 1 else axis = 0 const Point3 & sensor, // Pose of the sensor in the body frame const BaseStationCal & bcal) // Base station calibration - : Base(model, key_gTl, key_gTb) + : Base(model, key_lTg, key_gTb) , angle_(angle) , is_y_axis_(is_y_axis) , bcal_(bcal) @@ -1296,22 +1296,22 @@ class Gen2LightAngleFactor : public NoiseModelFactor2 { } Vector evaluateError( - const Pose3& gTl, + const Pose3& lTg, const Pose3& gTb, boost::optional H1 = boost::none, boost::optional H2 = boost::none) const override { LinmathAxisAnglePose lh_p, obj_p; - auto [gTl_axis, gTl_angle] = gTl.rotation().axisAngle(); - lh_p.Pos[0] = gTl.translation().x(); - lh_p.Pos[1] = gTl.translation().y(); - lh_p.Pos[2] = gTl.translation().z(); - lh_p.AxisAngleRot[0] = gTl_axis.unitVector().x() * gTl_angle; - lh_p.AxisAngleRot[1] = gTl_axis.unitVector().y() * gTl_angle; - lh_p.AxisAngleRot[2] = gTl_axis.unitVector().z() * gTl_angle; + auto [lTg_axis, lTg_angle] = lTg.rotation().axisAngle(); + lh_p.Pos[0] = lTg.translation().x(); + lh_p.Pos[1] = lTg.translation().y(); + lh_p.Pos[2] = lTg.translation().z(); + lh_p.AxisAngleRot[0] = lTg_axis.unitVector().x() * lTg_angle; + lh_p.AxisAngleRot[1] = lTg_axis.unitVector().y() * lTg_angle; + lh_p.AxisAngleRot[2] = lTg_axis.unitVector().z() * lTg_angle; - auto [gTb_axis, gTb_angle] = gTl.rotation().axisAngle(); + auto [gTb_axis, gTb_angle] = gTb.rotation().axisAngle(); obj_p.Pos[0] = gTb.translation().x(); obj_p.Pos[1] = gTb.translation().y(); obj_p.Pos[2] = gTb.translation().z(); diff --git a/src/poser_component.cpp b/src/poser_component.cpp index 429a7d4..5abe3b5 100644 --- a/src/poser_component.cpp +++ b/src/poser_component.cpp @@ -197,15 +197,6 @@ PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) // the global frame, and set a prior on that state id_to_body_info_[body_id].is_static = body["static"].as(); if (id_to_body_info_[body_id].is_static) { - - // Velocity is always zero. - id_to_body_info_[body_id].g_V[0] = next_available_key_++; - auto g_V_obs = gtsam::Vector3(0.0, 0.0, 0.0); - auto g_V_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector3(1e-9, 1e-9, 1e-9)); - graph_.add(gtsam::PriorFactor( - id_to_body_info_[body_id].g_V[0], g_V_obs, g_V_cov)); - initial_values_.insert(id_to_body_info_[body_id].g_V[0], g_V_obs); // Pose is unknown, but it start it off somewhere reasonable. id_to_body_info_[body_id].gTb[0] = next_available_key_++; @@ -213,7 +204,7 @@ PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), gtsam::Point3(0.0, 0.0, 0.0)); auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6(10.0, 10.0, 10.0, 100.0, 100.0, 100.0)); + gtsam::Vector6(100.0, 100.0, 100.0, 100.0, 100.0, 100.0)); graph_.add(gtsam::PriorFactor( id_to_body_info_[body_id].gTb[0], gTb_obs, gTb_cov)); initial_values_.insert(id_to_body_info_[body_id].gTb[0], gTb_obs); @@ -298,43 +289,6 @@ PoserComponent::PoserComponent(const rclcpp::NodeOptions & options) // Add a timer to extract solution and publish pose timer_ = this->create_wall_timer( 100ms, std::bind(&PoserComponent::solution_callback, this)); - - // TESTING!!! - // auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector1(kSigmaAngle)); - // gtsam::BaseStationCal bcal = { - // .phase = 0, - // .tilt = 0, - // .curve = 0, - // .gibpha = 0, - // .gibmag = 0, - // .ogeephase = 0, - // .ogeemag = 0, - // }; - // auto factorX = std::make_shared( - // 0, // lighthouse -> global frame - // 1, // body -> global frame - // 0, // observed angle - // angle_cov, // uncertainty in our observation - // 0, // is this the y-axis? - // gtsam::Point3(0, 0, 0), // sensor location in the body frame - // bcal); // perfect sensor - // auto factorY = std::make_shared( - // 0, // lighthouse -> global frame - // 1, // body -> global frame - // 0, // observed angle - // angle_cov, // uncertainty in our observation - // 1, // is this the y-axis? - // gtsam::Point3(0, 0, 0), // sensor location in the body frame - // bcal); // perfect sensor - // auto test_gTb = gtsam::Pose3( - // gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), - // gtsam::Point3(0.1, 0.1, -1.0)); - // auto test_gTl = gtsam::Pose3( - // gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), - // gtsam::Point3(0.0, 0.0, 0.0)); - // auto errX = factorX->evaluateError(test_gTl, test_gTb); - // auto errY = factorY->evaluateError(test_gTl, test_gTb); - // RCLCPP_WARN_STREAM(this->get_logger(), "Error state X = " << errX(0) << " and Y = " << errY(0)); } PoserComponent::~PoserComponent() @@ -379,10 +333,12 @@ void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) stamp_prev = body_info.gTb.rbegin()->first; } - // We rely on IMU inserting the actual states. We're just going to find the - // closest one to the angle timestamp and use it. There should be one... std::optional gTb; - if (kUseImuData) { + if (body_info.is_static) { + // If this is a static body, then we shouldn't keep adding states. Just use the + // static single state and treat all observations. + gTb = body_info.gTb[0]; + } else if (kUseImuData) { // If the IMU is being used and there is no pose state, we have to wait for one to // be inserted, so it makes more sense to return at this point. gTb = find_key_for_closest_stamp(body_info.gTb, stamp_curr); @@ -438,7 +394,7 @@ void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) // Observed angle and error in that angle. auto angle_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector1(kSigmaAngle)); graph_.emplace_shared( - lighthouse_info.gTl, // lighthouse -> global frame + lighthouse_info.lTg, // lighthouse -> global frame *gTb, // body -> global frame msg.angle, // observed angle angle_cov, // uncertainty in our observation @@ -679,10 +635,8 @@ void PoserComponent::add_tracker(const libsurvive_ros2::msg::Tracker & msg) // the x and y offset by averaging three equidistant sensors spaced at 120deg to // find the center coordinate and use that as the bolt location. This transform // must be hard-coded somewhere in SteamVR to make things work. - auto identity_rotation = gtsam::Rot3::AxisAngle(gtsam::Point3(1.0, 0.0, 0.0), M_PI_2); auto ccw_90deg_about_x = gtsam::Rot3::AxisAngle(gtsam::Point3(1.0, 0.0, 0.0), M_PI_2); auto ccw_90deg_about_y = gtsam::Rot3::AxisAngle(gtsam::Point3(0.0, 1.0, 0.0), M_PI_2); - auto ccw_90deg_about_z = gtsam::Rot3::AxisAngle(gtsam::Point3(0.0, 0.0, 1.0), M_PI_2); tracker_info.tTh = gtsam::Pose3(ccw_90deg_about_x * ccw_90deg_about_y, gtsam::Point3(x, y, -0.007)); @@ -735,26 +689,27 @@ void PoserComponent::add_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg lighthouse_info.bcal[i].ogeemag = msg.fcalogeemag[i]; } - // Weak initial estimate of lighthouse pose - auto obs_gTl = gtsam::Pose3( + // Weak initial estimate of the global -> lighthouse pose. Take note that this is + // the inverse representation of what you'd expect. + auto obs_lTg = gtsam::Pose3( gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), gtsam::Point3(0.0, 0.0, -1.0)); - auto cov_gTl = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6(100.0, 100.0, 100.0, 10.0, 10.0, 10.0)); + auto cov_lTg = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6(10.0, 10.0, 10.0, 10.0, 10.0, 10.0)); // Allocate a new variable. - lighthouse_info.gTl = next_available_key_++; + lighthouse_info.lTg = next_available_key_++; // Set its initial value. - initial_values_.insert(lighthouse_info.gTl, obs_gTl); + initial_values_.insert(lighthouse_info.lTg, obs_lTg); // Add a prior on the value``. - graph_.add(gtsam::PriorFactor(lighthouse_info.gTl, obs_gTl, cov_gTl)); + graph_.add(gtsam::PriorFactor(lighthouse_info.lTg, obs_lTg, cov_lTg)); } void PoserComponent::solution_callback() { - // Incremental update of the graph usng the latest values and factors. + // Incremental update of the graph using the latest values and factors. graph_.print("Graph"); initial_values_.print("Values"); isam2_.update(graph_, initial_values_); @@ -832,13 +787,13 @@ void PoserComponent::solution_callback() // Print lighthouses for (const auto & [channel, lighthouse_info] : channel_to_lighthouse_info_) { // Extract the solution - gtsam::Pose3 gTl = isam2_.calculateEstimate(lighthouse_info.gTl); - gtsam::Matrix cov = isam2_.marginalCovariance(lighthouse_info.gTl); + gtsam::Pose3 lTg = isam2_.calculateEstimate(lighthouse_info.lTg); + gtsam::Matrix cov = isam2_.marginalCovariance(lighthouse_info.lTg); // Create a message containing the transform geometry_msgs::msg::PoseWithCovarianceStamped msg; msg.header.stamp = now; msg.header.frame_id = lighthouse_info.id; - msg.pose.pose = ros_from_gtsam(gTl); + msg.pose.pose = ros_from_gtsam(lTg.inverse()); msg.pose.covariance = ros_from_gtsam(cov); lighthouse_pose_publisher_->publish(msg); // Package up a transform @@ -846,7 +801,7 @@ void PoserComponent::solution_callback() transform_msg.header.stamp = msg.header.stamp; transform_msg.header.frame_id = tracking_frame_; transform_msg.child_frame_id = lighthouse_info.id; - transform_msg.transform = ros_transform_from_gtsam(gTl); + transform_msg.transform = ros_transform_from_gtsam(lTg.inverse()); tf_static_broadcaster_->sendTransform(transform_msg); } From 75c610e68ec12374fe3b868e80dc258794f3a707 Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Fri, 25 Aug 2023 21:16:43 -0700 Subject: [PATCH 13/18] Check in work in progress --- CMakeLists.txt | 1 + include/libsurvive_ros2/poser_factors.hpp | 12 + scripts/bag_parser | 9 +- scripts/jacobianfun | 92 ++++++++ scripts/offline_optimizer | 275 ++++++++++++++++++++++ src/poser_component.cpp | 81 ++++--- 6 files changed, 439 insertions(+), 31 deletions(-) create mode 100755 scripts/jacobianfun create mode 100755 scripts/offline_optimizer diff --git a/CMakeLists.txt b/CMakeLists.txt index 86e951f..f0e72b8 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -195,6 +195,7 @@ install( PROGRAMS scripts/bag_parser scripts/config_merger + scripts/offline_optimizer DESTINATION lib/${PROJECT_NAME}) diff --git a/include/libsurvive_ros2/poser_factors.hpp b/include/libsurvive_ros2/poser_factors.hpp index 17b69af..cf72d7b 100644 --- a/include/libsurvive_ros2/poser_factors.hpp +++ b/include/libsurvive_ros2/poser_factors.hpp @@ -1319,10 +1319,22 @@ class Gen2LightAngleFactor : public NoiseModelFactor2 { obj_p.AxisAngleRot[1] = gTb_axis.unitVector().y() * gTb_angle; obj_p.AxisAngleRot[2] = gTb_axis.unitVector().z() * gTb_angle; + // Predict the angle, given the pose of the body in the sensor frame, the position + // of the sensor in the body frame, the pose of the global frame in the lighthouse + // frame and the calibration information for this specific lighthouse. double predicted_angle = is_y_axis_ ? gen_reproject_axis_y_gen2_axis_angle(&obj_p, sensor_pt_, &lh_p, &bcal_) : gen_reproject_axis_x_gen2_axis_angle(&obj_p, sensor_pt_, &lh_p, &bcal_); + // The predicted angle can occasionally come out as NaN, so this is a hack to + // move it to some value that us + if (std::isnan(predicted_angle)) { + predicted_angle = 0; + } + if (!std::isfinite(predicted_angle)) { + predicted_angle = std::clamp(predicted_angle, -1e3, 1e3); + } + ///////////////////////////////////////////////////// // Helps find stray nans ///////////////////////////////////////////////////// diff --git a/scripts/bag_parser b/scripts/bag_parser index 7fc7eef..5ad1d4e 100755 --- a/scripts/bag_parser +++ b/scripts/bag_parser @@ -41,6 +41,7 @@ def get_rosbag_options(path, storage_id, serialization_format='cdr'): if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("-b", "--bag", type=str, required=True, help="the bag file to input") + parser.add_argument("-d", "--duration", type=float, required=False, help="number of seconds to read") parser.add_argument("-n", "--namespace", default='/libsurvive', help="the bag file to output") args = parser.parse_args() @@ -55,10 +56,14 @@ if __name__ == "__main__": lighthouses = {} bodies = {} angles = dict() - num = 0 + start_time = None while reader.has_next(): - num += 1 (topic, data, time_stamp) = reader.read_next() + if args.duration is not None: + if start_time is None: + start_time = time_stamp + elif float(time_stamp - start_time) > args.duration * 1e9: + break if topic == f"{args.namespace}/lighthouse": msg = deserialize_message(data, get_message(type_map[topic])) lighthouse_id = str(msg.header.frame_id) diff --git a/scripts/jacobianfun b/scripts/jacobianfun new file mode 100755 index 0000000..315ed08 --- /dev/null +++ b/scripts/jacobianfun @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 +# +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import math +from sympy import atan2, asin, sin, cos, tan, sqrt, Matrix, Symbol +from sympy.abc import x, y, z + +# Calibration params +tilt = Symbol("tilt") +ogeephase = Symbol("ogeephase") +ogeemag = Symbol("ogeemag") +curve = Symbol("curve") +gibpha = Symbol("gibpha") +gibmag = Symbol("gibmag") +phase = Symbol("phase") + +def calc_cal_series(s): + f = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] + m = f[0] + a = 0 + for i in range(0, 6): + a = a * s + m + m = m * s + f[i] + return m, a + +def reproject_axis_gen2(X, Y, Z, axis): + B = atan2(Z, X) + Ydeg = tilt + (-1 if axis else 1) * math.pi / 6. + asinArg = tan(Ydeg) * Y / sqrt(X * X + Z * Z) + sinYdeg = sin(Ydeg) + cosYdeg = cos(Ydeg) + sinPart = sin(B - asin(asinArg) + ogeephase) * ogeemag + modAsinArg = Y / sqrt(X * X + Y * Y + Z * Z) / cosYdeg + asinOut = asin(modAsinArg) + mod, acc = calc_cal_series(asinOut) + BcalCurved = sinPart + curve + asinArg2 = asinArg + mod * BcalCurved / (cosYdeg - acc * BcalCurved * sinYdeg) + asinOut2 = asin(asinArg2) + sinOut2 = sin(B - asinOut2 + gibpha) + return B - asinOut2 + sinOut2 * gibmag - phase - math.pi / 2. + +observation = Matrix([ + reproject_axis_gen2(x, y, -z, 0), + reproject_axis_gen2(x, y, -z, 1) +]) + +subs = { + x: 0.0, + y: 0.1, + z: -1.0, + phase: 0.000169277191, + tilt: 0.047973632812, + curve: 0.574707031250, + gibpha: 2.039062500000, + gibmag: 0.000566482544, + ogeephase: 1.857421875000, + ogeemag: -0.277832031250 +} + +v = observation.evalf(subs=subs) +print("pred", v) + +state = Matrix([x, y, z]) +J = observation.jacobian(state) +v = J.evalf(subs=subs) +print("J", v) + + +# state = Matrix([x, y, z]) + +# J = observation.jacobian(state) + +# print(J) \ No newline at end of file diff --git a/scripts/offline_optimizer b/scripts/offline_optimizer new file mode 100755 index 0000000..2dc624a --- /dev/null +++ b/scripts/offline_optimizer @@ -0,0 +1,275 @@ +#!/usr/bin/env python3 +# +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import argparse +import numpy as np +import yaml + + +import gtsam +from gtsam.symbol_shorthand import L, X +import gtsam.utils.plot as gtsam_plot +import matplotlib.pyplot as plt + +import rosbag2_py +from rclpy.serialization import deserialize_message +from rclpy.time import Time +from rosidl_runtime_py.utilities import get_message + +def get_rosbag_options(path, storage_id, serialization_format='cdr'): + """Get the storage and converter options for a fiven rosbag""" + storage_options = rosbag2_py.StorageOptions( + uri=path, storage_id=storage_id) + converter_options = rosbag2_py.ConverterOptions( + input_serialization_format=serialization_format, + output_serialization_format=serialization_format) + return storage_options, converter_options + + +def get_all_lighthouses(reader, namespace, duration): + """Extract all lighthouses from a bag, up to some duration""" + topic = f"{namespace}/lighthouse" + reader.reset_filter() + reader.set_filter(rosbag2_py.StorageFilter(topics=[topic])) + reader.seek(0) + lighthouses = {} + start_time = None + while reader.has_next(): + (topic, data, time_stamp) = reader.read_next() + if duration is not None: + if start_time is None: + start_time = time_stamp + elif float(time_stamp - start_time) > duration * 1e9: + break + msg = deserialize_message(data, get_message('libsurvive_ros2/msg/Lighthouse')) + lighthouse_id = str(msg.header.frame_id) + if lighthouse_id != "LHB-0": + lighthouses[msg.channel] = { + "id" : lighthouse_id, + "channel" : int(msg.channel), + "fcalphase" : [float(msg.fcalphase[0]), float(msg.fcalphase[1])], + "fcaltilt" : [float(msg.fcaltilt[0]), float(msg.fcaltilt[1])], + "fcalcurve" : [float(msg.fcalcurve[0]), float(msg.fcalcurve[1])], + "fcalgibpha" : [float(msg.fcalgibpha[0]), float(msg.fcalgibpha[1])], + "fcalgibmag" : [float(msg.fcalgibmag[0]), float(msg.fcalgibmag[1])], + "fcalogeephase" : [float(msg.fcalogeephase[0]), float(msg.fcalogeephase[1])], + "fcalogeemag" : [float(msg.fcalogeemag[0]), float(msg.fcalogeemag[1])], + } + return lighthouses + +def get_all_trackers(reader, namespace, duration): + """Extract all trackers from a bag, up to some duration""" + topic = f"{namespace}/tracker" + reader.reset_filter() + reader.set_filter(rosbag2_py.StorageFilter(topics=[topic])) + reader.seek(0) + trackers = {} + start_time = None + while reader.has_next(): + (topic, data, time_stamp) = reader.read_next() + if duration is not None: + if start_time is None: + start_time = time_stamp + elif float(time_stamp - start_time) > duration * 1e9: + break + msg = deserialize_message(data, get_message('libsurvive_ros2/msg/Tracker')) + tracker_id = str(msg.header.frame_id) + if tracker_id != "LHR-0": + trackers[tracker_id] = { + "id" : tracker_id, + } + return trackers + +def get_all_angles(reader, namespace, duration): + """Extract all trackers from a bag, up to some duration""" + topic = f"{namespace}/angle" + reader.reset_filter() + reader.set_filter(rosbag2_py.StorageFilter(topics=[topic])) + reader.seek(0) + angles = {} + start_time = None + step = 0 + while reader.has_next(): + (topic, data, time_stamp) = reader.read_next() + if duration is not None: + if start_time is None: + start_time = time_stamp + elif float(time_stamp - start_time) > duration * 1e9: + break + msg = deserialize_message(data, get_message('libsurvive_ros2/msg/Angle')) + #stamp_ns = int(Time.from_msg(msg.header.stamp).nanoseconds) + angles[int(step)] = { + "tracker": msg.header.frame_id, + "sensor": msg.sensor_id, + "channel": msg.channel, + "plane": msg.plane, + "angle": msg.angle + } + step += 1 + return angles + + +def calc_cal_series(s): + f = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] + m = f[0] + a = 0 + for i in range(0, 6): + a = a * s + m + m = m * s + f[i] + return m, a + +def reproject_axis_gen2(X, Y, Z, axis, cal): + B = np.atan2(Z, X) + Ydeg = cal["tilt"] + (-1 if axis else 1) * np.pi / 6. + tanA = np.tan(Ydeg) + normXZ = np.sqrt(X * X + Z * Z) + asinArg = tanA * Y / normXZ + sinYdeg = np.sin(Ydeg) + cosYdeg = np.cos(Ydeg) + sinPart = np.sin(B - np.asin(asinArg) + cal["ogeephase"]) * cal["ogeemag"] + normXYZ = np.sqrt(X * X + Y * Y + Z * Z) + modAsinArg = Y / normXYZ / cosYdeg + asinOut = np.asin(modAsinArg) + mod, acc = calc_cal_series(asinOut) + BcalCurved = sinPart + cal.curve + asinArg2 = asinArg + mod * BcalCurved / (cosYdeg - acc * BcalCurved * sinYdeg) + asinOut2 = np.asin(asinArg2) + sinOut2 = np.sin(B - asinOut2 + cal["gibpha"]) + return B - asinOut2 + sinOut2 * cal["gibmag"] - cal["phase"] - np.pi / 2. + + + +def error_light( + measurement: np.ndarray, + this: gtsam.CustomFactor, + values: gtsam.Values, + jacobians: Optional[List[np.ndarray]]) -> float: + + key1 = this.keys()[0] + key2 = this.keys()[1] + lTg, gTt = values.atVector(key1), values.atVector(key2) + l_sensor = s + + error = (pos2 - pos1) - measurement + if jacobians is not None: + jacobians[0] = -I + jacobians[1] = I + + return error + + +# lTg x gTt x sensor_t +# return apply_pose_to_pt(lh_p, apply_pose_to_pt(obj_p, sensor_pt)) + +def reproject_axis_x_gen2(obj_p, sensor_pt, lh_p, bsc0): + XYZ = sensor_to_world(obj_p, sensor_pt, lh_p) + return reproject_axis_gen2(XYZ[0], XYZ[1], -XYZ[2], 0, bsc0) + +def reproject_axis_y_gen2(obj_p, sensor_pt, lh_p, bsc1): + XYZ = sensor_to_world(obj_p, sensor_pt, lh_p) + return reproject_axis_gen2(XYZ[0], XYZ[1], -XYZ[2], 1, bsc1) + +def plot_result(lighthouses, trackers, result): + fignum = 0 + fig = plt.figure(fignum) + if not fig.axes: + axes = fig.add_subplot(projection='3d') + else: + axes = fig.axes[0] + plt.cla() + + gtsam_plot.plot_trajectory(fignum, result, title="Vive solution") + + # Plot points + #gtsam_plot.plot_3d_points(fignum, result, 'rx') + + # Plot lighthouses + #for channel, lighthouse_info in lighthouses.items(): + # gtsam_plot.plot_pose3(fignum, result.atPose3(L(channel)), 10) + + + # i = 0 + # while result.exists(L(i)): + # pose_i = result.atPose3(X(i)) + # gtsam_plot.plot_pose3(fignum, pose_i, 10) + # i += 1 + + # draw + #axes.set_xlim3d(-40, 40) + #axes.set_ylim3d(-40, 40) + #axes.set_zlim3d(-40, 40) + plt.pause(10) + +if __name__ == "__main__": + + # Handle arguments + parser = argparse.ArgumentParser() + parser.add_argument("-b", "--bag", type=str, required=True, help="the bag file to input") + parser.add_argument("-d", "--duration", type=float, required=False, help="number of seconds to read") + parser.add_argument("-n", "--namespace", default='/libsurvive', help="the bag file to output") + args = parser.parse_args() + + storage_options, converter_options = get_rosbag_options(args.bag, 'mcap') + reader = rosbag2_py.SequentialReader() + reader.open(storage_options, converter_options) + + # Read all measurements + lighthouses = get_all_lighthouses(reader, args.namespace, args.duration) + trackers = get_all_trackers(reader, args.namespace, args.duration) + angles = get_all_angles(reader, args.namespace, args.duration) + + # Construct graph + graph = gtsam.NonlinearFactorGraph() + initial_estimate = gtsam.Values() + + # Add lighthouse states (static) + for channel, lighthouse_info in lighthouses.items(): + initial_est = gtsam.Pose3(gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), gtsam.Point3(1, 1, 1)) + initial_cov = gtsam.noiseModel.Isotropic.Sigma(6, 10) + initial_estimate.insert(L(channel), initial_est) + graph.push_back(gtsam.PriorFactorPose3(L(channel), initial_est, initial_cov)) + + # Add tracker states + stamps = {} + for stamp_curr, angle_info in angles.items(): + tracker_id = angle_info.get("tracker", None) + if tracker_id == 'LHR-160BA755': + initial_est = gtsam.Pose3(gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), gtsam.Point3(0, 0, 1)) + if tracker_id not in stamps.keys(): + initial_cov = gtsam.noiseModel.Isotropic.Sigma(6, 10) + graph.push_back(gtsam.PriorFactorPose3(X(stamp_curr), initial_est, initial_cov)) + else: + stamp_prev = stamps[tracker_id] + delta_est = gtsam.Pose3(gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), gtsam.Point3(0, 0, 1)) + delta_cov = gtsam.noiseModel.Isotropic.Sigma(6, 0.01) + graph.add(gtsam.BetweenFactorPose3(X(stamp_prev), X(stamp_curr), delta_est, delta_cov)) + initial_estimate.insert(X(stamp_curr), initial_est) + stamps[tracker_id] = stamp_curr + + # Solve optimization problem + params = gtsam.LevenbergMarquardtParams() + optimizer = gtsam.LevenbergMarquardtOptimizer(graph, initial_estimate, params) + result = optimizer.optimize() + plot_result(lighthouses, trackers, result) + + # Print final result + #print("\nFinal Result:\n{}".format(optimizer_result)) diff --git a/src/poser_component.cpp b/src/poser_component.cpp index 5abe3b5..7e1e6bb 100644 --- a/src/poser_component.cpp +++ b/src/poser_component.cpp @@ -41,9 +41,19 @@ using std::placeholders::_1; namespace libsurvive_ros2 { +// Initial sigmas pn tracker and lighthouse poses. +const double kTrackerPosSigma = 10.0; +const double kTrackerRotSigma = 10.0; +const double kLighthousePosSigma = 10.0; +const double kLighthouseRotSigma = 10.0; +const double kDeltaPosSigma = 1e-3; +const double kDeltaRotSigma = 1e-3; + // IMU integration uncertainties +const double kUseImuData = false; const double kGravity = 9.81; -const double kDeltaT = 0.004; // 4ms or 250Hz +const double kDeltaT = 0.004; +const double kVelSigma = 1.0; const double kAccelSigma = 0.1; const double kAccelDriftSigma = 0.1; const double kOmegaSigma = 0.1; @@ -51,9 +61,6 @@ const double kOmegaDriftSigma = 0.1; const double kIntegrationSigma = 0.1; const double kSensorPositionSigma = 0.0001; const double kSigmaAngle = 0.001; -const double kUseImuData = true; - -// Time between bias states const Timestamp kImuBiasTimeNs = 1e9; // Helper functions @@ -340,34 +347,42 @@ void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) gTb = body_info.gTb[0]; } else if (kUseImuData) { // If the IMU is being used and there is no pose state, we have to wait for one to - // be inserted, so it makes more sense to return at this point. + // be inserted, so it makes more sense to return at this point that attempt to + // include an angle measurement for a state that does not exist. gTb = find_key_for_closest_stamp(body_info.gTb, stamp_curr); - if (!gTb) { - return; - } } else { - // If the IMU is ignored, then we need to insert a state to represent the pose of - // body at the current time. But first, let's cover the case where we've received - // an angle measurement from this body before. + // Multiple angle measurements may be contributing to this body pose estimate + // and so we need to make sure that we don't add an extra state for a + // second angle measurement sharing a timestamp. if (body_info.gTb.count(stamp_curr) == 0) { + // If we get here then we need to add a timestamp. body_info.gTb[stamp_curr] = next_available_key_++; if (!stamp_prev) { + // If there was no previous timestamp for this body, it means that this + // is the first angle being received. So, we need to add a prior, which + // keeps the tracker pose roughly in the right location. RCLCPP_INFO_STREAM(this->get_logger(), "Adding prior pose for body " << tracker_info.body_id); auto gTb_obs = gtsam::Pose3( gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), gtsam::Point3(0.0, 0.0, 0.0)); auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6(100.0, 100.0, 100.0, 100.0, 100.0, 100.0)); - graph_.add( - gtsam::PriorFactor( + gtsam::Vector6( + kTrackerPosSigma, kTrackerPosSigma, kTrackerPosSigma, + kTrackerRotSigma, kTrackerRotSigma, kTrackerRotSigma)); + graph_.add(gtsam::PriorFactor( body_info.gTb[stamp_curr], gTb_obs, gTb_cov)); initial_values_.insert(body_info.gTb[stamp_curr], gTb_obs); } else { + // If there was a previous timestamp, then we need to add a new state and + // connect this state to the previous state auto gTb_obs = gtsam::Pose3( gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), gtsam::Point3(0.0, 0.0, 0.0)); - auto gTb_cov = gtsam::noiseModel::Isotropic::Variance(6, 1e-3); + auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas( + gtsam::Vector6( + kDeltaPosSigma, kDeltaPosSigma, kDeltaPosSigma, + kDeltaRotSigma, kDeltaRotSigma, kDeltaRotSigma)); graph_.add( gtsam::BetweenFactor( body_info.gTb[*stamp_prev], @@ -379,10 +394,16 @@ void PoserComponent::add_angle(const libsurvive_ros2::msg::Angle & msg) } else { RCLCPP_WARN_STREAM(this->get_logger(), "Pose state exists for tracker " << tracker_id); } - // Use the keu for the current time stamp. + // Use the key for the current time stamp. gTb = body_info.gTb[stamp_curr]; } + // In any case where we don't get a gTb key to update, it means that we can't add + // this angle measurement. So we should return here to avoid an issue. + if (!gTb) { + return; + } + // Transform sensor to body-frame. We can maybe optimize this at some point in the // future to prevent having to this for every angle measurement. Not sure what sort // of performance increase it would give us though. @@ -467,17 +488,21 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) } // Add a pose state, provided another IMU has not already set one for the - // current timestamp. This is possible, but very, very unlikely. + // current timestamp. This is possible, but unlikely. if (!body_info.gTb.count(stamp_curr)) { body_info.gTb[stamp_curr] = next_available_key_++; if (!stamp_prev) { + // If we get here then this is the first IMU measurement for this body + // and so we need to add a position prior to ground the trajectory in + // something reasonable. RCLCPP_INFO_STREAM(this->get_logger(), "Adding prior pose for body " << tracker_info.body_id); auto gTb_obs = gtsam::Pose3( gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), gtsam::Point3(0.0, 0.0, 0.0)); - auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6(100.0, 100.0, 100.0, 100.0, 100.0, 100.0)); + auto gTb_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector6( + kTrackerPosSigma, kTrackerPosSigma, kTrackerPosSigma, + kTrackerRotSigma, kTrackerRotSigma, kTrackerRotSigma)); graph_.add( gtsam::PriorFactor( body_info.gTb[stamp_curr], gTb_obs, gTb_cov)); @@ -498,10 +523,9 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) RCLCPP_INFO_STREAM(this->get_logger(), "Adding prior velocity for body " << tracker_info.body_id); auto g_V_obs = gtsam::Vector3(0.0, 0.0, 0.0); - auto g_V_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector3(10.0, 10.0, 10.0)); - graph_.add( - gtsam::PriorFactor( + auto g_V_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector3( + kVelSigma, kVelSigma, kVelSigma)); + graph_.add(gtsam::PriorFactor( body_info.g_V[stamp_curr], g_V_obs, g_V_cov)); initial_values_.insert(body_info.g_V[stamp_curr], g_V_obs); } else { @@ -520,12 +544,10 @@ void PoserComponent::add_imu(const sensor_msgs::msg::Imu & msg) RCLCPP_INFO_STREAM(this->get_logger(), "Adding prior bias for tracker " << tracker_id); auto b_B_obs = gtsam::imuBias::ConstantBias(); - auto b_B_cov = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6( + auto b_B_cov = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector6( kAccelDriftSigma, kAccelDriftSigma, kAccelDriftSigma, kOmegaDriftSigma, kOmegaDriftSigma, kOmegaDriftSigma)); - graph_.add( - gtsam::PriorFactor( + graph_.add(gtsam::PriorFactor( tracker_info.b_B[stamp_curr], b_B_obs, b_B_cov)); initial_values_.insert(tracker_info.b_B[stamp_curr], b_B_obs); } else { @@ -694,8 +716,9 @@ void PoserComponent::add_lighthouse(const libsurvive_ros2::msg::Lighthouse & msg auto obs_lTg = gtsam::Pose3( gtsam::Rot3::Quaternion(1.0, 0.0, 0.0, 0.0), gtsam::Point3(0.0, 0.0, -1.0)); - auto cov_lTg = gtsam::noiseModel::Diagonal::Sigmas( - gtsam::Vector6(10.0, 10.0, 10.0, 10.0, 10.0, 10.0)); + auto cov_lTg = gtsam::noiseModel::Diagonal::Sigmas(gtsam::Vector6( + kLighthousePosSigma, kLighthousePosSigma, kLighthousePosSigma, + kLighthouseRotSigma, kLighthouseRotSigma, kLighthouseRotSigma)); // Allocate a new variable. lighthouse_info.lTg = next_available_key_++; From 536df0d61eb136878a45eca7701df29066746716 Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Sun, 27 Aug 2023 10:41:16 -0700 Subject: [PATCH 14/18] Add localizer --- docker-compose.yml | 4 +- launch/libsurvive_ros2.launch.py | 16 +- scripts/jacobianfun | 300 ++++++++++++++++----- scripts/simple_localizer | 430 +++++++++++++++++++++++++++++++ test/test_simple_localizer.py | 0 5 files changed, 685 insertions(+), 65 deletions(-) create mode 100755 scripts/simple_localizer create mode 100644 test/test_simple_localizer.py diff --git a/docker-compose.yml b/docker-compose.yml index e50d8f7..44b4124 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -41,4 +41,6 @@ services: network_mode: host working_dir: /home/ubuntu/ros2_ws entrypoint: /home/ubuntu/ros2_ws/entrypoint.sh - command: ros2 launch libsurvive_ros2 libsurvive_ros2.launch.py + tty: true + stdin_open: true + command: bash # ros2 launch libsurvive_ros2 libsurvive_ros2.launch.py diff --git a/launch/libsurvive_ros2.launch.py b/launch/libsurvive_ros2.launch.py index 713fba2..acf1d48 100644 --- a/launch/libsurvive_ros2.launch.py +++ b/launch/libsurvive_ros2.launch.py @@ -134,7 +134,6 @@ def generate_launch_description(): ["'xterm -e gdb -ex=r --args' if '", LaunchConfiguration("debug"), "'=='true' else ''"] ) - # Composed pipeline. composed_pipeline = GroupAction( actions=[ @@ -191,13 +190,22 @@ def generate_launch_description(): output="own_log", parameters=driver_parameters, ), + # Node( + # package="libsurvive_ros2", + # executable="libsurvive_ros2_poser_node", + # name="libsurvive_ros2_poser_node", + # namespace=LaunchConfiguration("namespace"), + # condition=IfCondition(enable_poser), + # prefix=launch_prefix, + # output="own_log", + # parameters=poser_parameters, + # ), Node( package="libsurvive_ros2", - executable="libsurvive_ros2_poser_node", - name="libsurvive_ros2_poser_node", + executable="libsurvive_ros2_dev_node", + name="libsurvive_ros2_dev_node", namespace=LaunchConfiguration("namespace"), condition=IfCondition(enable_poser), - prefix=launch_prefix, output="own_log", parameters=poser_parameters, ), diff --git a/scripts/jacobianfun b/scripts/jacobianfun index 315ed08..4025ed0 100755 --- a/scripts/jacobianfun +++ b/scripts/jacobianfun @@ -21,68 +21,248 @@ # THE SOFTWARE. import math -from sympy import atan2, asin, sin, cos, tan, sqrt, Matrix, Symbol -from sympy.abc import x, y, z +from sympy import atan2, asin, sin, cos, tan, sqrt, Matrix, Symbol, simplify, compose, init_printing +from sympy.abc import X, Y, Z # Calibration params -tilt = Symbol("tilt") -ogeephase = Symbol("ogeephase") -ogeemag = Symbol("ogeemag") -curve = Symbol("curve") -gibpha = Symbol("gibpha") -gibmag = Symbol("gibmag") -phase = Symbol("phase") - -def calc_cal_series(s): - f = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] - m = f[0] - a = 0 - for i in range(0, 6): - a = a * s + m - m = m * s + f[i] - return m, a - -def reproject_axis_gen2(X, Y, Z, axis): - B = atan2(Z, X) - Ydeg = tilt + (-1 if axis else 1) * math.pi / 6. - asinArg = tan(Ydeg) * Y / sqrt(X * X + Z * Z) - sinYdeg = sin(Ydeg) - cosYdeg = cos(Ydeg) - sinPart = sin(B - asin(asinArg) + ogeephase) * ogeemag - modAsinArg = Y / sqrt(X * X + Y * Y + Z * Z) / cosYdeg - asinOut = asin(modAsinArg) - mod, acc = calc_cal_series(asinOut) - BcalCurved = sinPart + curve - asinArg2 = asinArg + mod * BcalCurved / (cosYdeg - acc * BcalCurved * sinYdeg) - asinOut2 = asin(asinArg2) - sinOut2 = sin(B - asinOut2 + gibpha) - return B - asinOut2 + sinOut2 * gibmag - phase - math.pi / 2. - -observation = Matrix([ - reproject_axis_gen2(x, y, -z, 0), - reproject_axis_gen2(x, y, -z, 1) -]) - -subs = { - x: 0.0, - y: 0.1, - z: -1.0, - phase: 0.000169277191, - tilt: 0.047973632812, - curve: 0.574707031250, - gibpha: 2.039062500000, - gibmag: 0.000566482544, - ogeephase: 1.857421875000, - ogeemag: -0.277832031250 -} - -v = observation.evalf(subs=subs) -print("pred", v) - -state = Matrix([x, y, z]) -J = observation.jacobian(state) -v = J.evalf(subs=subs) -print("J", v) +tilt = Symbol("tilt", real = True, constant = True) +ogeephase = Symbol("ogeephase", real = True, constant = True) +ogeemag = Symbol("ogeemag", real = True, constant = True) +curve = Symbol("curve", real = True, constant = True) +gibpha = Symbol("gibpha", real = True, constant = True) +gibmag = Symbol("gibmag", real = True, constant = True) +phase = Symbol("phase", real = True, constant = True) + +# Intermediary parameters +ccwAngleAboutY = Symbol("ccwAngleAboutY") +A_over_B = Symbol("A_over_B") +Y_over_R = Symbol("Y_over_R") +A_over_R = Symbol("A_over_R") +R_over_C = Symbol("R_over_C") +ccwMotorAndLensAndTiltCorrectedAngleAboutY = Symbol("ccwMotorAndLensAndTiltCorrectedAngleAboutY") + +def proj_model(X, Y, Z, plane): + """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" + + # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X + ccwAngleAboutY = atan2(Z, X) + + # Distances between sensor and origin + B = sqrt(X * X + Z * Z) # L2 norm of (sensor - origin) in X-Z plane + C = sqrt(X * X + Y * Y + Z * Z) # L2 norm of (sensor - origin) + + # Tilt angle of the laser plane with respect to motor shaft, nominally 30 degrees + planeTiltAngle = tilt + (-1 if plane else 1) * math.pi / 6. + + # CORRECTIONS FOR THE TILTED LASER PLANE + + # Viewed from any direction around the lighthouse: + #------------------------------------------------ + # The moment the laser plane strikes the sensing element (@). THe optical axis runs + # from the sensor to the midpoint of the lens. + # + # +Y + # (@)__A__| + # \ | Angle * = "planeTiltAngle" + # \ | + # R \ | Y/R = sin(*) "how much do I move along A as I move along R" + # \ | A/R = cos(*) "how much do I move along Y as I move along R" + # \*| A/Y = tan(*) "how much do I move along A as I move along Y" + # \| + # +W <----+ <------------ this is the optical axis of the laser + # | \ + # | \ <-------- laser plane + # | \ + # + A_over_R = sin(planeTiltAngle) + Y_over_R = cos(planeTiltAngle) + A_over_Y = tan(planeTiltAngle) + + # For a given Y coordinate, work out how much we'd need to be along A. Intuitively, A is the + # distance from the Y axis introduced by the fact that the plane is tilted with respect to Y. + A = A_over_Y * Y + + # Viewed from above the lighthouse: + # --------------------------------- + # Now that we know how far along A we've moved we can calculate the small angle error (*) as + # A / B = sin(#). Intuitively, this is the delta to the sweep angle that corrects for the + # plane tilt. In an ideal setting without fabrication errors, we could return the measurement + # here as ccwAngleAboutY + np.sin(A / B) and we'd be done! + # + # +X + # | + # | + # (@) | B = np.sqrt(X * X + Z * Z) + # A, ' \ | + # , ` 90 \ B | A/B = sin(#) "sweep angle error added by tilt" + # +W ', \ | + # '., \*| + # '-,# \| + # +Z <---------------------+ optical center + A_over_B = A / B + + # R is the distance from the sensor to the optical axis in the X-W plane, which is a + # frame that continually rotates about the lighthouse + R = Y / Y_over_R + R_over_C = R / C + + # For convenience, let's create a new value here to capture the tilt-compensated angle + return Matrix([ + ccwAngleAboutY, + A_over_B, + Y_over_R, + A_over_R, + R_over_C, + ]) + +def lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C): + + # A laser plane is interesting because unlike typical lens correction, which happens in + # a 2D image plane, this happens in 1D. So all that matters is deviation from the optical + # axis in 1D within the laser plane -- the further you move from the optical axis in the + # laser's plane, the more lens distortion you'd suffer. One neat thing about the 1D nature + # of the problem is that you need not model distortion as euclidean distance from the + # optical axis; you can just model it directly on the angle from the optical axis, which + # is proportional to the horizontal distance moved away from the optical axis :) + # + # (@) sin(*) = R / C + # /| "how much we've deviated from the optical axis" + # C / | + # / | R + # / * | + # optical center +--------------> +W laser optical axis + # + angleFromOpticalCenterOfLens = asin(R_over_C) + + # I don't exactly understand what this error is about, but it looks to be very similar to + # the mechanical error model. This suggests that there is some periodic component to the + # lens curvature that is a function of the motor spinning. My only guess is that the lens + # warps slightly as it rotates because of centripetal forces, and this correct for that. + # + # x' = x + a sin (x + b) + # | +-----+-----+ + # + | + # curve at zero | + # | + # + + # Fourier correction + centripetalCompensatedBaseCurvature = curve + ogeemag * sin( + ccwAngleAboutY - asin(A_over_B) + ogeephase) + + # This is a 5th order polynomial expression that corrects for lens distortion, but instead + # of describing an error in distance from optical axis, it describes an error as a multiple + # of the base curvature of the lens. Something like final_curvature = f(x) * base_curvature. + # The polynomial function looks something like the graph below. Domain is [-pi/2, +pi/2]. + # + # | 4| | f(x) = a[0]x^5 + a[1]x^4 + a[2]x^3 + a[3]x^2 + a[4]x^1 + a[5] + # \ | / + # \ 2| / + # `-.__|__.-' + #---------------+-------------> +x + # -pi -2 0 +2 +pi + # + a = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] + d = 0 # this ends up being f'(x) + b = a[0] # this ends up being f(x) + for i in range(1, 6): + d = d * angleFromOpticalCenterOfLens + b + b = b * angleFromOpticalCenterOfLens + a[i] + + # To make this easier, lets just multiply to get the curvature and its first derivative. + lensCurvature = b * centripetalCompensatedBaseCurvature + lensCurvatureFirstDerivative = d * centripetalCompensatedBaseCurvature + + # Here is where we put all the lens correction terms together. + # + # A lensCurvature + # asin ( - + --------------------------------------------- ) + # B Y / R - lensCurvatureFirstDerivative * A / R + # + # The dividend represents the deviation along A, which if course depends on how far you + # have shifted from the laser optical axis. The divisor itself must also shift by a + # proportional amount. + # + + ccwMotorAndLensAndTiltCorrectedAngleAboutY = ccwAngleAboutY - asin( + A_over_B + lensCurvature / (Y_over_R - lensCurvatureFirstDerivative * A_over_R)) + return Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY]) + + +def mech_model(ccwMotorAndLensAndTiltCorrectedAngleAboutY): + return Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY + sin(ccwMotorAndLensAndTiltCorrectedAngleAboutY + gibpha) * gibmag - phase - math.pi / 2.0]) + + +# Proj model +print("--------------") +Oproj = proj_model(X, Y, Z, 0) +Sproj = Matrix([X, Y, Z]) +Jproj = Oproj.jacobian(Sproj) +print("Jproj", simplify(Jproj)) + +# Lens model +print("--------------") +Olens = lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C) +Slens = Matrix([ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C]) +Jlens = Olens.jacobian(Slens) +print("Jlens", Jlens) + +# Dist model +print("--------------") +Odist = lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C) +Sdist = Matrix([ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C]) +Jdist = Odist.jacobian(Slens) +print("Jdist", Jdist) + +# Mech model +print("--------------") +Omech = mech_model(ccwMotorAndLensAndTiltCorrectedAngleAboutY) +Smech = Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY]) +Jmech = Omech.jacobian(Smech) +print("Jmech", simplify(Jmech)) + +# subs = { +# X: 0.1, +# Y: 0.1, +# Z: -2.0, +# phase: 0.0, +# tilt: 0.0, +# curve: 0.0, +# gibpha: 0.0, +# gibmag: 0.0, +# ogeephase: 0.0, +# ogeemag: 0.0 +# } +# v = (Omech * Olens * Oproj).evalf(subs=subs) +# print(v) +print(Omech * Olens * Oproj) + +# J = Jmech * Jlens * Jproj +# print(J) + +# subs = { +# x: 0.1, +# y: 0.1, +# z: -2.0, +# phase: 0.0, +# tilt: 0.0, +# curve: 0.0, +# gibpha: 0.0, +# gibmag: 0.0, +# ogeephase: 0.0, +# ogeemag: 0.0 +# } +# v = J.evalf(subs=subs) +# print("v", v) + +# v = observation.evalf(subs=subs) +# print("pred", v) + +# state = Matrix([x, y, z]) +# J = observation.jacobian(state) +# print("J", J) +# v = J.evalf(subs=subs) +# print("v", v) + # state = Matrix([x, y, z]) diff --git a/scripts/simple_localizer b/scripts/simple_localizer new file mode 100755 index 0000000..bd36256 --- /dev/null +++ b/scripts/simple_localizer @@ -0,0 +1,430 @@ +#!/usr/bin/env python3 +# +# Copyright 2023 Andrew Symington +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. + +import argparse +import numpy as np +import pytransform3d.transformations as pt + +import rosbag2_py +from rclpy.serialization import deserialize_message +from rclpy.time import Time +from rosidl_runtime_py.utilities import get_message + +## DATA HANDLERS + +def get_rosbag_options(path, storage_id, serialization_format='cdr'): + """Get the storage and converter options for a fiven rosbag""" + storage_options = rosbag2_py.StorageOptions( + uri=path, storage_id=storage_id) + converter_options = rosbag2_py.ConverterOptions( + input_serialization_format=serialization_format, + output_serialization_format=serialization_format) + return storage_options, converter_options + + +def get_all_lighthouses(reader, namespace, duration): + """Extract all lighthouses from a bag, up to some duration""" + topic = f"{namespace}/lighthouse" + reader.reset_filter() + reader.set_filter(rosbag2_py.StorageFilter(topics=[topic])) + reader.seek(0) + lighthouses = {} + start_time = None + while reader.has_next(): + (topic, data, time_stamp) = reader.read_next() + if duration is not None: + if start_time is None: + start_time = time_stamp + elif float(time_stamp - start_time) > duration * 1e9: + break + msg = deserialize_message(data, get_message('libsurvive_ros2/msg/Lighthouse')) + lighthouse_id = str(msg.header.frame_id) + if lighthouse_id != "LHB-0": + lighthouses[msg.channel] = { + "id" : lighthouse_id, + "channel" : int(msg.channel), + "calibration": {} + } + for axis in [0, 1]: + lighthouses[msg.channel][axis] = { + "phase" : float(msg.fcalphase[axis]), + "tilt" : float(msg.fcaltilt[axis]), + "curve" : float(msg.fcalcurve[axis]), + "gibpha" : float(msg.fcalgibpha[axis]), + "gibmag" : float(msg.fcalgibmag[axis]), + "ogeephase" : float(msg.fcalogeephase[axis]), + "ogeemag" : float(msg.fcalogeemag[axis]), + } + return lighthouses + +def get_all_trackers(reader, namespace, duration): + """Extract all trackers from a bag, up to some duration""" + topic = f"{namespace}/tracker" + reader.reset_filter() + reader.set_filter(rosbag2_py.StorageFilter(topics=[topic])) + reader.seek(0) + trackers = {} + start_time = None + while reader.has_next(): + (topic, data, time_stamp) = reader.read_next() + if duration is not None: + if start_time is None: + start_time = time_stamp + elif float(time_stamp - start_time) > duration * 1e9: + break + msg = deserialize_message(data, get_message('libsurvive_ros2/msg/Tracker')) + tracker_id = str(msg.header.frame_id) + if tracker_id != "LHR-0": + trackers[tracker_id] = {} + for channel, point, normal in zip(msg.channels, msg.points, msg.normals): + trackers[tracker_id][channel] = { + "point" : np.array([point.x, point.y, point.z, 1.0]), + "normal" : np.array([normal.x, normal.y, normal.z]), + } + return trackers + +def get_all_light_bundles(reader, namespace, duration): + """Extract all trackers from a bag, up to some duration""" + topic = f"{namespace}/angle" + reader.reset_filter() + reader.set_filter(rosbag2_py.StorageFilter(topics=[topic])) + reader.seek(0) + start_time = None + bundles = {} + while reader.has_next(): + (topic, data, time_stamp) = reader.read_next() + if duration is not None: + if start_time is None: + start_time = time_stamp + elif float(time_stamp - start_time) > duration * 1e9: + break + msg = deserialize_message(data, get_message('libsurvive_ros2/msg/Angle')) + tracker_id = str(msg.header.frame_id) + if tracker_id == "LHR-0": + continue + this_stamp = int(Time.from_msg(msg.header.stamp).nanoseconds) + if tracker_id in bundles.keys() and msg.channel in bundles[tracker_id].keys(): + bund_stamp = max(bundles[tracker_id][msg.channel].keys()) + if bund_stamp is not None and this_stamp - bund_stamp > 20e6: + bund_stamp = this_stamp + bundles[tracker_id][msg.channel][this_stamp] = [] + elif tracker_id in bundles.keys(): + bundles[tracker_id][msg.channel] = { + this_stamp: [] + } + bund_stamp = this_stamp + else: + bundles[tracker_id] = { + msg.channel : { + this_stamp: [] + } + } + bund_stamp = this_stamp + bundles[tracker_id][msg.channel][bund_stamp].append({ + "stamp" : this_stamp, + "sensor": msg.sensor_id, + "plane": msg.plane, + "angle": msg.angle + }) + return bundles + +## REFERENCE IMPLEMENTATION + +def calc_cal_series(s): + f = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] + m = f[0] + a = 0 + for i in range(0, 6): + a = a * s + m + m = m * s + f[i] + return m, a + +def calc_gen2_error_orig(X, Y, Z, axis, cal, obs): + B = np.arctan2(Z, X) + Ydeg = cal["tilt"] + (-1 if axis else 1) * np.pi / 6. + asinArg = np.tan(Ydeg) * Y / np.sqrt(X * X + Z * Z) + sinYdeg = np.sin(Ydeg) + cosYdeg = np.cos(Ydeg) + sinPart = np.sin(B - np.arcsin(asinArg) + cal["ogeephase"]) * cal["ogeemag"] + modAsinArg = Y / np.sqrt(X * X + Y * Y + Z * Z) / cosYdeg + asinOut = np.arcsin(modAsinArg) + mod, acc = calc_cal_series(asinOut) + BcalCurved = sinPart + cal["curve"] + asinArg2 = asinArg + mod * BcalCurved / (cosYdeg - acc * BcalCurved * sinYdeg) + asinOut2 = np.arcsin(asinArg2) + sinOut2 = np.sin(B - asinOut2 + cal["gibpha"]) + return B - asinOut2 + sinOut2 * cal["gibmag"] - cal["phase"] - np.pi / 2. - obs + +## MORE HUMAN READABLE IMPLEMENTATION + +def calc_gen2_error_corr(X, Y, Z, plane, cal, obs): + """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" + + # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X + ccwAngleAboutY = np.arctan2(Z, X) + ccwAngleAboutX = np.arctan2(Z, Y) + + # We know from the Vive documentation that the HFOV is 150 degrees and the VFOV is + # 110 degrees. It turns out that beyond this region results in numerical instability, + # so the best thing to do is try and steer the numerical solver closer to a solution. + # This basically nudges the solver towards the base station frustrum + + if ccwAngleAboutY < np.radians(15) or ccwAngleAboutY > np.radians(165): + return np.abs(ccwAngleAboutY - np.radians(90)) + if ccwAngleAboutX < np.radians(35) or ccwAngleAboutX > np.radians(145): + return np.abs(ccwAngleAboutX - np.radians(90)) + + # Distances between sensor and origin + B = np.sqrt(X * X + Z * Z) # L2 norm of (sensor - origin) in X-Z plane + C = np.sqrt(X * X + Y * Y + Z * Z) # L2 norm of (sensor - origin) + + # Tilt angle of the laser plane with respect to motor shaft, nominally 30 degrees + planeTiltAngle = cal["tilt"] + (-1 if plane else 1) * np.pi / 6. + + # CORRECTIONS FOR THE TILTED LASET PLANE + + # Viewed from any direction around the lighthouse: + #------------------------------------------------ + # The moment the laser plane strikes the sensing element (@). THe optical axis runs + # from the sensor to the midpoint of the lens. + # + # +Y + # (@)__A__| + # \ | Angle * = "planeTiltAngle" + # \ | + # R \ | Y/R = sin(*) "how much do I move along A as I move along R" + # \ | A/R = cos(*) "how much do I move along Y as I move along R" + # \*| A/Y = tan(*) "how much do I move along A as I move along Y" + # \| + # +W <----+ <------------ this is the optical axis of the laser + # | \ + # | \ <-------- laser plane + # | \ + # + A_over_R = np.sin(planeTiltAngle) + Y_over_R = np.cos(planeTiltAngle) + A_over_Y = np.tan(planeTiltAngle) + + # For a given Y coordinate, work out how much we'd need to be along A. Intuitively, A is the + # distance from the Y axis introduced by the fact that the plane is tilted with respect to Y. + A = A_over_Y * Y + + # Viewed from above the lighthouse: + # --------------------------------- + # Now that we know how far along A we've moved we can calculate the small angle error (*) as + # A / B = sin(#). Intuitively, this is the delta to the sweep angle that corrects for the + # plane tilt. In an ideal setting without fabrication errors, we could return the measurement + # here as ccwAngleAboutY + np.sin(A / B) and we'd be done! + # + # +X + # | + # | + # (@) | B = np.sqrt(X * X + Z * Z) + # A, ' \ | + # , ` 90 \ B | A/B = sin(#) "sweep angle error added by tilt" + # +W ', \ | + # '., \*| + # '-,# \| + # +Z <---------------------+ optical center + A_over_B = A / B + + # For convenience, let's create a new value here to capture the tilt-compensated angle + tiltCorrectedCcwAngleAboutY = ccwAngleAboutY - np.arcsin(A_over_B) + + # CORRECTIONS FOR LENS ERRORS + + # R is the distance from the sensor to the optical axis in the X-W plane, which is a + # frame that continually rotates about the lighthouse + R = Y / Y_over_R + + # A laser plane is interesting because unlike typical lens correction, which happens in + # a 2D image plane, this happens in 1D. So all that matters is deviation from the optical + # axis in 1D within the laser plane -- the further you move from the optical axis in the + # laser's plane, the more lens distortion you'd suffer. One neat thing about the 1D nature + # of the problem is that you need not model distortion as euclidean distance from the + # optical axis; you can just model it directly on the angle from the optical axis, which + # is proportional to the horizontal distance moved away from the optical axis :) + # + # (@) sin(*) = R / C + # /| "how much we've deviated from the optical axis" + # C / | + # / | R + # / * | + # optical center +--------------> +W laser optical axis + # + angleFromOpticalCenterOfLens = np.arcsin(R / C) + + # I don't exactly understand what this error is about, but it looks to be very similar to + # the mechanical error model. This suggests that there is some periodic component to the + # lens curvature that is a function of the motor spinning. My only guess is that the lens + # warps slightly as it rotates because of centripetal forces, and this correct for that. + # + # x' = x + a sin (x + b) + # | +-----+-----+ + # + | + # curve at zero | + # | + # + + # Fourier correction + centripetalCompensatedBaseCurvature = cal["curve"] + cal["ogeemag"] * np.sin( + tiltCorrectedCcwAngleAboutY + cal["ogeephase"]) + + # This is a 5th order polynomial expression that corrects for lens distortion, but in stead + # of describing an error in distance from optical axis, it describes an error as a multiple + # of the base curvature of the lens. Something like final_curvature = f(x) * base_curvature. + # The polynomial function looks something like the graph below. Domain is [-pi/2, +pi/2]. + # + # | 4| | f(x) = a[0]x^5 + a[1]x^4 + a[2]x^3 + a[3]x^2 + a[4]x^1 + a[5] + # \ | / + # \ 2| / + # `-.__|__.-' + #---------------+-------------> +x + # -pi -2 0 +2 +pi + # + a = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] + d = 0 # this ends up being f'(x) + b = a[0] # this ends up being f(x) + for i in range(1, 6): + d = d * angleFromOpticalCenterOfLens + b + b = b * angleFromOpticalCenterOfLens + a[i] + + # To make this easier, lets just multiply to get the curvature and its first derivative. + lensCurvature = b * centripetalCompensatedBaseCurvature + lensCurvatureFirstDerivative = d * centripetalCompensatedBaseCurvature + + # Here is where we put all the lens correction terms together. + # + # A lensCurvature + # asin ( - + --------------------------------------------- ) + # B Y / R - lensCurvatureFirstDerivative * A / R + # + # The dividend represents the deviation along A, which if course depends on how far you + # have shifted from the laser optical axis. The divisor itself must also shift by a + # proportional amount. + # + ccwLensCorrectedAngleAboutY = ccwAngleAboutY - np.arcsin( + A_over_B + lensCurvature / (Y_over_R - lensCurvatureFirstDerivative * A_over_R)) + + # CORRECTIONS FOR MECHANICAL ERRORS + + # Now that we have a tilt and lens-corrected angle we have to shift it by a small amount to + # compensate for mechanical vibration and lens mounting errors. We use a fourier term here + # that captures periodic error. In other words this error term repeats every 2pi, or one + # rotation about Y. Ultimately, it models the fact that the trajectory taken by the lens on + # an imperfect, imbalanced motorized system deviates from a perfect circle. + # + # x' = x + a sin (x + b) - c + # | +-----+-----+ | + # + | +--- Phase offset from zero. This is always 0 for axis = 0 + # raw angle | because that axis defines the start of the sweep. Axis + # | 1 has a small fabrication error around the motor spindle + # + that phase advances or delays with respect to X, and this + # Fourier correction term corrects for that. + # + ccwMotorAndLensAndTiltCorrectedAngleAboutY = ccwLensCorrectedAngleAboutY + np.sin( + ccwLensCorrectedAngleAboutY + cal["gibpha"]) * cal["gibmag"] - cal["phase"] + + # Shift the angle clockwise 90 degree to be described with respect to +Z, and not +X. + return np.abs(ccwMotorAndLensAndTiltCorrectedAngleAboutY - np.pi / 2.0 - obs) + + +def calculate_poses(lighthouses, trackers, bundles): + """Iterates through tracker/lighthouses and calculates poses""" + for tracker_id, tracker_info in bundles.items(): + print("Tracker", id) + for channel_id, channel_info in tracker_info.items(): + print("+ Channel", channel_id) + for stamp, bundle in channel_info.items(): + print("++ Bundle at", stamp, "contains", len(bundle), "observations") + # Run particle filter + Nparticles = 1000 + Nrefresh = 0 + Niteration = 10 + init_mean = pt.transform_from(R=np.eye(3), p=np.array([0.0, 0.0, 0.0])) + init_cov = np.diag([36.0, 36.0, 36.0, 0.1, 0.1, 0.1]) + inno_mean = pt.transform_from(R=np.eye(3), p=np.array([0.0, 0.0, 0.0])) + inno_cov = 1.0e-3 * np.diag(np.ones(6)) + rng = np.random.default_rng(0) + particles = np.array([ + pt.random_transform(rng=rng, mean=init_mean, cov=init_cov) for _ in range(Nparticles)]) + weights = np.zeros(Nparticles) + for iter in range(Niteration): + for i, pose in enumerate(particles): + weights[i] = 0 + for data in bundle: + sensor_id = data["sensor"] + plane_id = data["plane"] + meas_angle = data["angle"] + sensor_pt = pt.transform(pose, trackers[tracker_id][sensor_id]["point"]) + weights[i] += calc_gen2_error_corr( + sensor_pt[0], + sensor_pt[1], + -sensor_pt[2], + plane_id, + lighthouses[channel_id][plane_id], + meas_angle) + print(f"++ Best cost at {iter} is", np.min(weights)) + weights = np.reciprocal(weights) + weights /= np.sum(weights) + weights = np.cumsum(weights) + probs = np.random.random(size=Nparticles) + particles = np.array([ + pt.concat( + pt.random_transform(rng=rng, mean=inno_mean, cov=inno_cov), + particles[np.argmax(weights > prob)]) for prob in probs]) + if Nrefresh > 0: + particles[0:Nrefresh] = np.array([ + pt.random_transform(rng=rng, mean=init_mean, cov=init_cov) + for _ in range(Nrefresh)]) + pos = np.zeros((Nparticles, 3)) + rot = np.zeros((Nparticles, 4)) + for i in range(Nparticles): + pos[i, :] = pt.pq_from_transform(particles[i])[0:3] + rot[i, :] = pt.pq_from_transform(particles[i])[3:7] + pos_mean = np.average(pos, weights=weights, axis=0) + pos_var = np.average((pos - pos_mean)**2, weights=weights, axis=0) + rot_mean = np.average(rot, weights=weights, axis=0) + rot_var = np.average((rot - rot_mean)**2, weights=weights, axis=0) + print("pos", pos_mean, np.sqrt(pos_var)) + print("rot", rot_mean, np.sqrt(rot_var)) + + +if __name__ == "__main__": + + # Handle arguments + parser = argparse.ArgumentParser() + parser.add_argument("-b", "--bag", type=str, required=True, help="the bag file to input") + parser.add_argument("-d", "--duration", type=float, required=False, help="number of seconds to read") + parser.add_argument("-n", "--namespace", default='/libsurvive', help="the bag file to output") + args = parser.parse_args() + + storage_options, converter_options = get_rosbag_options(args.bag, 'mcap') + reader = rosbag2_py.SequentialReader() + reader.open(storage_options, converter_options) + + # Read all measurements + lighthouses = get_all_lighthouses(reader, args.namespace, args.duration) + trackers = get_all_trackers(reader, args.namespace, args.duration) + bundles = get_all_light_bundles(reader, args.namespace, args.duration) + + # A tracker spins at 50Hz, so there should be a gap of around 10ms between light bundles. + calculate_poses(lighthouses, trackers, bundles) diff --git a/test/test_simple_localizer.py b/test/test_simple_localizer.py new file mode 100644 index 0000000..e69de29 From 1a31c9bbe8622b984e69849c4e9aebe91f0c708b Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Sun, 27 Aug 2023 14:48:35 -0700 Subject: [PATCH 15/18] More additions --- scripts/jacobianfun | 228 ++++++++++++++++++++++++++++++++++----- scripts/simple_localizer | 170 +++++++++++++++++++---------- 2 files changed, 316 insertions(+), 82 deletions(-) diff --git a/scripts/jacobianfun b/scripts/jacobianfun index 4025ed0..540d01e 100755 --- a/scripts/jacobianfun +++ b/scripts/jacobianfun @@ -192,33 +192,207 @@ def mech_model(ccwMotorAndLensAndTiltCorrectedAngleAboutY): return Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY + sin(ccwMotorAndLensAndTiltCorrectedAngleAboutY + gibpha) * gibmag - phase - math.pi / 2.0]) -# Proj model -print("--------------") -Oproj = proj_model(X, Y, Z, 0) -Sproj = Matrix([X, Y, Z]) -Jproj = Oproj.jacobian(Sproj) -print("Jproj", simplify(Jproj)) - -# Lens model -print("--------------") -Olens = lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C) -Slens = Matrix([ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C]) -Jlens = Olens.jacobian(Slens) -print("Jlens", Jlens) - -# Dist model -print("--------------") -Odist = lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C) -Sdist = Matrix([ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C]) -Jdist = Odist.jacobian(Slens) -print("Jdist", Jdist) - -# Mech model -print("--------------") -Omech = mech_model(ccwMotorAndLensAndTiltCorrectedAngleAboutY) -Smech = Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY]) -Jmech = Omech.jacobian(Smech) -print("Jmech", simplify(Jmech)) +# # Proj model +# print("--------------") +# Oproj = proj_model(X, Y, Z, 0) +# Sproj = Matrix([X, Y, Z]) +# Jproj = Oproj.jacobian(Sproj) +# print("Jproj", simplify(Jproj)) + +# # Lens model +# print("--------------") +# Olens = lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C) +# Slens = Matrix([ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C]) +# Jlens = Olens.jacobian(Slens) +# print("Jlens", Jlens) + +# # Dist model +# print("--------------") +# Odist = lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C) +# Sdist = Matrix([ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C]) +# Jdist = Odist.jacobian(Slens) +# print("Jdist", Jdist) + +# # Mech model +# print("--------------") +# Omech = mech_model(ccwMotorAndLensAndTiltCorrectedAngleAboutY) +# Smech = Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY]) +# Jmech = Omech.jacobian(Smech) +# print("Jmech", simplify(Jmech)) + +import numpy as np + +def calc_gen2_err(X, Y, Z, plane, cal, obs): + """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" + + # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X. + # Note that we do a sign inversion on Z here to convert to a left-hand system, + # which is the convention adopted by the projection model. + ccwAngleAboutY = np.arctan2(-Z, X) + ccwAngleAboutX = np.arctan2(-Z, Y) + + # We know from the Vive documentation that the HFOV is 150 degrees and the VFOV is + # 110 degrees. It turns out that beyond this region results in numerical instability, + # so the best thing to do is try and steer the numerical solver closer to a solution. + # This basically nudges the solver towards the base station frustrum + + if ccwAngleAboutY < np.radians(15) or ccwAngleAboutY > np.radians(165): + return np.abs(ccwAngleAboutY - np.radians(90)) + if ccwAngleAboutX < np.radians(35) or ccwAngleAboutX > np.radians(145): + return np.abs(ccwAngleAboutX - np.radians(90)) + + # Distances between sensor and origin + B = np.sqrt(X * X + Z * Z) # L2 norm of (sensor - origin) in X-Z plane + C = np.sqrt(X * X + Y * Y + Z * Z) # L2 norm of (sensor - origin) + + # Tilt angle of the laser plane with respect to motor shaft, nominally 30 degrees + planeTiltAngle = cal["tilt"] + (-1 if plane else 1) * np.pi / 6. + + # CORRECTIONS FOR THE TILTED LASET PLANE + + # Viewed from any direction around the lighthouse: + #------------------------------------------------ + # The moment the laser plane strikes the sensing element (@). THe optical axis runs + # from the sensor to the midpoint of the lens. + # + # +Y + # (@)__A__| + # \ | Angle * = "planeTiltAngle" + # \ | + # R \ | Y/R = sin(*) "how much do I move along A as I move along R" + # \ | A/R = cos(*) "how much do I move along Y as I move along R" + # \*| A/Y = tan(*) "how much do I move along A as I move along Y" + # \| + # +W <----+ <------------ this is the optical axis of the laser + # | \ + # | \ <-------- laser plane + # | \ + # + A_over_R = np.sin(planeTiltAngle) + Y_over_R = np.cos(planeTiltAngle) + A_over_Y = np.tan(planeTiltAngle) + + # For a given Y coordinate, work out how much we'd need to be along A. Intuitively, A is the + # distance from the Y axis introduced by the fact that the plane is tilted with respect to Y. + A = A_over_Y * Y + + # Viewed from above the lighthouse: + # --------------------------------- + # Now that we know how far along A we've moved we can calculate the small angle error (*) as + # A / B = sin(#). Intuitively, this is the delta to the sweep angle that corrects for the + # plane tilt. In an ideal setting without fabrication errors, we could return the measurement + # here as ccwAngleAboutY + np.sin(A / B) and we'd be done! + # + # +X + # | + # | + # (@) | B = np.sqrt(X * X + Z * Z) + # A, ' \ | + # , ` 90 \ B | A/B = sin(#) "sweep angle error added by tilt" + # +W ', \ | + # '., \*| + # '-,# \| + # +Z <---------------------+ optical center + A_over_B = A / B + + # For convenience, let's create a new value here to capture the tilt-compensated angle + tiltCorrectedCcwAngleAboutY = ccwAngleAboutY - np.arcsin(A_over_B) + + # CORRECTIONS FOR LENS ERRORS + + # R is the distance from the sensor to the optical axis in the X-W plane, which is a + # frame that continually rotates about the lighthouse + R = Y / Y_over_R + + # A laser plane is interesting because unlike typical lens correction, which happens in + # a 2D image plane, this happens in 1D. So all that matters is deviation from the optical + # axis in 1D within the laser plane -- the further you move from the optical axis in the + # laser's plane, the more lens distortion you'd suffer. One neat thing about the 1D nature + # of the problem is that you need not model distortion as euclidean distance from the + # optical axis; you can just model it directly on the angle from the optical axis, which + # is proportional to the horizontal distance moved away from the optical axis :) + # + # (@) sin(*) = R / C + # /| "how much we've deviated from the optical axis" + # C / | + # / | R + # / * | + # optical center +--------------> +W laser optical axis + # + angleFromOpticalCenterOfLens = np.arcsin(R / C) + + # I don't exactly understand what this error is about, but it looks to be very similar to + # the mechanical error model. This suggests that there is some periodic component to the + # lens curvature that is a function of the motor spinning. My only guess is that the lens + # warps slightly as it rotates because of centripetal forces, and this correct for that. + # + # x' = x + a sin (x + b) + # | +-----+-----+ + # + | + # curve at zero | + # | + # + + # Fourier correction + centripetalCompensatedBaseCurvature = cal["curve"] + cal["ogeemag"] * np.sin( + tiltCorrectedCcwAngleAboutY + cal["ogeephase"]) + + # This is a 5th order polynomial expression that corrects for lens distortion, but in stead + # of describing an error in distance from optical axis, it describes an error as a multiple + # of the base curvature of the lens. Something like final_curvature = f(x) * base_curvature. + # The polynomial function looks something like the graph below. Domain is [-pi/2, +pi/2]. + # + # | 4| | f(x) = a[0]x^5 + a[1]x^4 + a[2]x^3 + a[3]x^2 + a[4]x^1 + a[5] + # \ | / + # \ 2| / + # `-.__|__.-' + #---------------+-------------> +x + # -pi -2 0 +2 +pi + # + a = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] + d = 0 # this ends up being f'(x) + b = a[0] # this ends up being f(x) + for i in range(1, 6): + d = d * angleFromOpticalCenterOfLens + b + b = b * angleFromOpticalCenterOfLens + a[i] + + # To make this easier, lets just multiply to get the curvature and its first derivative. + lensCurvature = b * centripetalCompensatedBaseCurvature + lensCurvatureFirstDerivative = d * centripetalCompensatedBaseCurvature + + # Here is where we put all the lens correction terms together. + # + # A lensCurvature + # asin ( - + --------------------------------------------- ) + # B Y / R - lensCurvatureFirstDerivative * A / R + # + # The dividend represents the deviation along A, which if course depends on how far you + # have shifted from the laser optical axis. The divisor itself must also shift by a + # proportional amount. + # + ccwLensCorrectedAngleAboutY = ccwAngleAboutY - np.arcsin( + A_over_B + lensCurvature / (Y_over_R - lensCurvatureFirstDerivative * A_over_R)) + + # CORRECTIONS FOR MECHANICAL ERRORS + + # Now that we have a tilt and lens-corrected angle we have to shift it by a small amount to + # compensate for mechanical vibration and lens mounting errors. We use a fourier term here + # that captures periodic error. In other words this error term repeats every 2pi, or one + # rotation about Y. Ultimately, it models the fact that the trajectory taken by the lens on + # an imperfect, imbalanced motorized system deviates from a perfect circle. + # + # x' = x + a sin (x + b) - c + # | +-----+-----+ | + # + | +--- Phase offset from zero. This is always 0 for axis = 0 + # raw angle | because that axis defines the start of the sweep. Axis + # | 1 has a small fabrication error around the motor spindle + # + that phase advances or delays with respect to X, and this + # Fourier correction term corrects for that. + # + ccwMotorAndLensAndTiltCorrectedAngleAboutY = ccwLensCorrectedAngleAboutY + np.sin( + ccwLensCorrectedAngleAboutY + cal["gibpha"]) * cal["gibmag"] - cal["phase"] + + # Shift the angle clockwise 90 degree to be described with respect to +Z, and not +X. + return np.abs(ccwMotorAndLensAndTiltCorrectedAngleAboutY - np.pi / 2.0 - obs) # subs = { # X: 0.1, diff --git a/scripts/simple_localizer b/scripts/simple_localizer index bd36256..b15a35c 100755 --- a/scripts/simple_localizer +++ b/scripts/simple_localizer @@ -29,6 +29,10 @@ from rclpy.serialization import deserialize_message from rclpy.time import Time from rosidl_runtime_py.utilities import get_message +from functools import partial +from typing import List, Optional +import gtsam + ## DATA HANDLERS def get_rosbag_options(path, storage_id, serialization_format='cdr'): @@ -176,12 +180,14 @@ def calc_gen2_error_orig(X, Y, Z, axis, cal, obs): ## MORE HUMAN READABLE IMPLEMENTATION -def calc_gen2_error_corr(X, Y, Z, plane, cal, obs): +def calc_gen2_err(X, Y, Z, plane, cal, obs): """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" - # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X - ccwAngleAboutY = np.arctan2(Z, X) - ccwAngleAboutX = np.arctan2(Z, Y) + # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X. + # Note that we do a sign inversion on Z here to convert to a left-hand system, + # which is the convention adopted by the projection model. + ccwAngleAboutY = np.arctan2(-Z, X) + ccwAngleAboutX = np.arctan2(-Z, Y) # We know from the Vive documentation that the HFOV is 150 degrees and the VFOV is # 110 degrees. It turns out that beyond this region results in numerical instability, @@ -346,6 +352,34 @@ def calc_gen2_error_corr(X, Y, Z, plane, cal, obs): # Shift the angle clockwise 90 degree to be described with respect to +Z, and not +X. return np.abs(ccwMotorAndLensAndTiltCorrectedAngleAboutY - np.pi / 2.0 - obs) +# So th + +def calc_gen2_jac(): + + +def light_error(calibration: dict, + t_sensor: np.ndarray, + axis: float, + angle: float, + this: gtsam.CustomFactor, + values: gtsam.Values, + jacobians: Optional[List[np.ndarray]]) -> float: + # Extract the pose from the attached keys + pose_key = this.keys()[0] + lTt = values.atPose3(pose_key) + + # Transform the sensor into the lighthouse frame by the proposed pose + l_sensor = lTt.transformTo(gtsam.Point3(t_sensor[0], t_sensor[1], t_sensor[2])) + + # + error = calc_gen2_err(l_sensor[0], l_sensor[1], l_sensor[2], axis, calibration, angle) + + # Calculate dZ/dX, a 1x6 matrix expressing the error state as a function of the pose + if jacobians is not None: + jacobians[0] = calc_gen2_jac() + + + return np.array([error]) def calculate_poses(lighthouses, trackers, bundles): """Iterates through tracker/lighthouses and calculates poses""" @@ -355,57 +389,83 @@ def calculate_poses(lighthouses, trackers, bundles): print("+ Channel", channel_id) for stamp, bundle in channel_info.items(): print("++ Bundle at", stamp, "contains", len(bundle), "observations") - # Run particle filter - Nparticles = 1000 - Nrefresh = 0 - Niteration = 10 - init_mean = pt.transform_from(R=np.eye(3), p=np.array([0.0, 0.0, 0.0])) - init_cov = np.diag([36.0, 36.0, 36.0, 0.1, 0.1, 0.1]) - inno_mean = pt.transform_from(R=np.eye(3), p=np.array([0.0, 0.0, 0.0])) - inno_cov = 1.0e-3 * np.diag(np.ones(6)) - rng = np.random.default_rng(0) - particles = np.array([ - pt.random_transform(rng=rng, mean=init_mean, cov=init_cov) for _ in range(Nparticles)]) - weights = np.zeros(Nparticles) - for iter in range(Niteration): - for i, pose in enumerate(particles): - weights[i] = 0 - for data in bundle: - sensor_id = data["sensor"] - plane_id = data["plane"] - meas_angle = data["angle"] - sensor_pt = pt.transform(pose, trackers[tracker_id][sensor_id]["point"]) - weights[i] += calc_gen2_error_corr( - sensor_pt[0], - sensor_pt[1], - -sensor_pt[2], - plane_id, - lighthouses[channel_id][plane_id], - meas_angle) - print(f"++ Best cost at {iter} is", np.min(weights)) - weights = np.reciprocal(weights) - weights /= np.sum(weights) - weights = np.cumsum(weights) - probs = np.random.random(size=Nparticles) - particles = np.array([ - pt.concat( - pt.random_transform(rng=rng, mean=inno_mean, cov=inno_cov), - particles[np.argmax(weights > prob)]) for prob in probs]) - if Nrefresh > 0: - particles[0:Nrefresh] = np.array([ - pt.random_transform(rng=rng, mean=init_mean, cov=init_cov) - for _ in range(Nrefresh)]) - pos = np.zeros((Nparticles, 3)) - rot = np.zeros((Nparticles, 4)) - for i in range(Nparticles): - pos[i, :] = pt.pq_from_transform(particles[i])[0:3] - rot[i, :] = pt.pq_from_transform(particles[i])[3:7] - pos_mean = np.average(pos, weights=weights, axis=0) - pos_var = np.average((pos - pos_mean)**2, weights=weights, axis=0) - rot_mean = np.average(rot, weights=weights, axis=0) - rot_var = np.average((rot - rot_mean)**2, weights=weights, axis=0) - print("pos", pos_mean, np.sqrt(pos_var)) - print("rot", rot_mean, np.sqrt(rot_var)) + factor_graph = gtsam.NonlinearFactorGraph() + initial_estimate = gtsam.Values() + initial_estimate.insert(0, gtsam.Pose3( + gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), gtsam.Point3(0.0, 0.0, -3.0))) + light_model = gtsam.noiseModel.Isotropic.Sigma(1, 0.000001) + for data in bundle: + factor_graph.add(gtsam.CustomFactor( + light_model, [0], partial(light_error, + lighthouses[channel_id][data["plane"]], + trackers[tracker_id][data["sensor"]]["point"], + data["plane"], + data["angle"]))) + params = gtsam.LevenbergMarquardtParams() + optimizer = gtsam.LevenbergMarquardtOptimizer(factor_graph, initial_estimate, params) + result = optimizer.optimize() + result.print("result") + return + + # sensor_pt = pt.transform(pose, trackers[tracker_id][sensor_id]["point"]) + # weights[i] += calc_gen2_error_corr( + # sensor_pt[0], + # sensor_pt[1], + # -sensor_pt[2], + # plane_id, + # lighthouses[channel_id][plane_id], + # meas_angle) + ## Run particle filter + # Nparticles = 1000 + # Nrefresh = 0 + # Niteration = 10 + # init_mean = pt.transform_from(R=np.eye(3), p=np.array([0.0, 0.0, 0.0])) + # init_cov = np.diag([36.0, 36.0, 36.0, 0.1, 0.1, 0.1]) + # inno_mean = pt.transform_from(R=np.eye(3), p=np.array([0.0, 0.0, 0.0])) + # inno_cov = 1.0e-3 * np.diag(np.ones(6)) + # rng = np.random.default_rng(0) + # particles = np.array([ + # pt.random_transform(rng=rng, mean=init_mean, cov=init_cov) for _ in range(Nparticles)]) + # weights = np.zeros(Nparticles) + # for iter in range(Niteration): + # for i, pose in enumerate(particles): + # weights[i] = 0 + # for data in bundle: + # sensor_id = data["sensor"] + # plane_id = data["plane"] + # meas_angle = data["angle"] + # sensor_pt = pt.transform(pose, trackers[tracker_id][sensor_id]["point"]) + # weights[i] += calc_gen2_error_corr( + # sensor_pt[0], + # sensor_pt[1], + # -sensor_pt[2], + # plane_id, + # lighthouses[channel_id][plane_id], + # meas_angle) + # print(f"++ Best cost at {iter} is", np.min(weights)) + # weights = np.reciprocal(weights) + # weights /= np.sum(weights) + # weights = np.cumsum(weights) + # probs = np.random.random(size=Nparticles) + # particles = np.array([ + # pt.concat( + # pt.random_transform(rng=rng, mean=inno_mean, cov=inno_cov), + # particles[np.argmax(weights > prob)]) for prob in probs]) + # if Nrefresh > 0: + # particles[0:Nrefresh] = np.array([ + # pt.random_transform(rng=rng, mean=init_mean, cov=init_cov) + # for _ in range(Nrefresh)]) + # pos = np.zeros((Nparticles, 3)) + # rot = np.zeros((Nparticles, 4)) + # for i in range(Nparticles): + # pos[i, :] = pt.pq_from_transform(particles[i])[0:3] + # rot[i, :] = pt.pq_from_transform(particles[i])[3:7] + # pos_mean = np.average(pos, weights=weights, axis=0) + # pos_var = np.average((pos - pos_mean)**2, weights=weights, axis=0) + # rot_mean = np.average(rot, weights=weights, axis=0) + # rot_var = np.average((rot - rot_mean)**2, weights=weights, axis=0) + # print("pos", pos_mean, np.sqrt(pos_var)) + # print("rot", rot_mean, np.sqrt(rot_var)) if __name__ == "__main__": From 72dceb50278d51fdc28c9674c6618f012af22343 Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Sun, 27 Aug 2023 20:45:54 -0700 Subject: [PATCH 16/18] More work in progress --- scripts/jacobianfun | 780 ++++++++++++++++++++------------------- scripts/simple_localizer | 98 ++--- 2 files changed, 455 insertions(+), 423 deletions(-) diff --git a/scripts/jacobianfun b/scripts/jacobianfun index 540d01e..1da93a3 100755 --- a/scripts/jacobianfun +++ b/scripts/jacobianfun @@ -22,393 +22,411 @@ import math from sympy import atan2, asin, sin, cos, tan, sqrt, Matrix, Symbol, simplify, compose, init_printing -from sympy.abc import X, Y, Z - -# Calibration params -tilt = Symbol("tilt", real = True, constant = True) -ogeephase = Symbol("ogeephase", real = True, constant = True) -ogeemag = Symbol("ogeemag", real = True, constant = True) -curve = Symbol("curve", real = True, constant = True) -gibpha = Symbol("gibpha", real = True, constant = True) -gibmag = Symbol("gibmag", real = True, constant = True) -phase = Symbol("phase", real = True, constant = True) - -# Intermediary parameters -ccwAngleAboutY = Symbol("ccwAngleAboutY") -A_over_B = Symbol("A_over_B") -Y_over_R = Symbol("Y_over_R") -A_over_R = Symbol("A_over_R") -R_over_C = Symbol("R_over_C") -ccwMotorAndLensAndTiltCorrectedAngleAboutY = Symbol("ccwMotorAndLensAndTiltCorrectedAngleAboutY") - -def proj_model(X, Y, Z, plane): - """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" - - # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X - ccwAngleAboutY = atan2(Z, X) - - # Distances between sensor and origin - B = sqrt(X * X + Z * Z) # L2 norm of (sensor - origin) in X-Z plane - C = sqrt(X * X + Y * Y + Z * Z) # L2 norm of (sensor - origin) - - # Tilt angle of the laser plane with respect to motor shaft, nominally 30 degrees - planeTiltAngle = tilt + (-1 if plane else 1) * math.pi / 6. - - # CORRECTIONS FOR THE TILTED LASER PLANE - - # Viewed from any direction around the lighthouse: - #------------------------------------------------ - # The moment the laser plane strikes the sensing element (@). THe optical axis runs - # from the sensor to the midpoint of the lens. - # - # +Y - # (@)__A__| - # \ | Angle * = "planeTiltAngle" - # \ | - # R \ | Y/R = sin(*) "how much do I move along A as I move along R" - # \ | A/R = cos(*) "how much do I move along Y as I move along R" - # \*| A/Y = tan(*) "how much do I move along A as I move along Y" - # \| - # +W <----+ <------------ this is the optical axis of the laser - # | \ - # | \ <-------- laser plane - # | \ - # - A_over_R = sin(planeTiltAngle) - Y_over_R = cos(planeTiltAngle) - A_over_Y = tan(planeTiltAngle) - - # For a given Y coordinate, work out how much we'd need to be along A. Intuitively, A is the - # distance from the Y axis introduced by the fact that the plane is tilted with respect to Y. - A = A_over_Y * Y - - # Viewed from above the lighthouse: - # --------------------------------- - # Now that we know how far along A we've moved we can calculate the small angle error (*) as - # A / B = sin(#). Intuitively, this is the delta to the sweep angle that corrects for the - # plane tilt. In an ideal setting without fabrication errors, we could return the measurement - # here as ccwAngleAboutY + np.sin(A / B) and we'd be done! - # - # +X - # | - # | - # (@) | B = np.sqrt(X * X + Z * Z) - # A, ' \ | - # , ` 90 \ B | A/B = sin(#) "sweep angle error added by tilt" - # +W ', \ | - # '., \*| - # '-,# \| - # +Z <---------------------+ optical center - A_over_B = A / B - - # R is the distance from the sensor to the optical axis in the X-W plane, which is a - # frame that continually rotates about the lighthouse - R = Y / Y_over_R - R_over_C = R / C - - # For convenience, let's create a new value here to capture the tilt-compensated angle +from sympy.abc import x, y, z, pi + +def foo(X, Y, Z): return Matrix([ - ccwAngleAboutY, - A_over_B, - Y_over_R, - A_over_R, - R_over_C, + atan2(-Z, X) - math.pi / 2, + atan2(-Z, Y) - math.pi / 2 ]) - -def lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C): - - # A laser plane is interesting because unlike typical lens correction, which happens in - # a 2D image plane, this happens in 1D. So all that matters is deviation from the optical - # axis in 1D within the laser plane -- the further you move from the optical axis in the - # laser's plane, the more lens distortion you'd suffer. One neat thing about the 1D nature - # of the problem is that you need not model distortion as euclidean distance from the - # optical axis; you can just model it directly on the angle from the optical axis, which - # is proportional to the horizontal distance moved away from the optical axis :) - # - # (@) sin(*) = R / C - # /| "how much we've deviated from the optical axis" - # C / | - # / | R - # / * | - # optical center +--------------> +W laser optical axis - # - angleFromOpticalCenterOfLens = asin(R_over_C) - - # I don't exactly understand what this error is about, but it looks to be very similar to - # the mechanical error model. This suggests that there is some periodic component to the - # lens curvature that is a function of the motor spinning. My only guess is that the lens - # warps slightly as it rotates because of centripetal forces, and this correct for that. - # - # x' = x + a sin (x + b) - # | +-----+-----+ - # + | - # curve at zero | - # | - # + - # Fourier correction - centripetalCompensatedBaseCurvature = curve + ogeemag * sin( - ccwAngleAboutY - asin(A_over_B) + ogeephase) +observation = foo(x, y, z) +state = Matrix([x, y, z]) +subs = { + x: 0.1, + y: 0.1, + z: -2.0, +} +v = observation.evalf(subs=subs) +J = observation.jacobian(state) +print(J) + +# # Calibration params +# tilt = Symbol("tilt", real = True, constant = True) +# ogeephase = Symbol("ogeephase", real = True, constant = True) +# ogeemag = Symbol("ogeemag", real = True, constant = True) +# curve = Symbol("curve", real = True, constant = True) +# gibpha = Symbol("gibpha", real = True, constant = True) +# gibmag = Symbol("gibmag", real = True, constant = True) +# phase = Symbol("phase", real = True, constant = True) + +# # Intermediary parameters +# ccwAngleAboutY = Symbol("ccwAngleAboutY") +# A_over_B = Symbol("A_over_B") +# Y_over_R = Symbol("Y_over_R") +# A_over_R = Symbol("A_over_R") +# R_over_C = Symbol("R_over_C") +# ccwMotorAndLensAndTiltCorrectedAngleAboutY = Symbol("ccwMotorAndLensAndTiltCorrectedAngleAboutY") + +# def proj_model(X, Y, Z, plane): +# """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" + +# # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X +# ccwAngleAboutY = atan2(Z, X) + +# # Distances between sensor and origin +# B = sqrt(X * X + Z * Z) # L2 norm of (sensor - origin) in X-Z plane +# C = sqrt(X * X + Y * Y + Z * Z) # L2 norm of (sensor - origin) + +# # Tilt angle of the laser plane with respect to motor shaft, nominally 30 degrees +# planeTiltAngle = tilt + (-1 if plane else 1) * math.pi / 6. + +# # CORRECTIONS FOR THE TILTED LASER PLANE + +# # Viewed from any direction around the lighthouse: +# #------------------------------------------------ +# # The moment the laser plane strikes the sensing element (@). THe optical axis runs +# # from the sensor to the midpoint of the lens. +# # +# # +Y +# # (@)__A__| +# # \ | Angle * = "planeTiltAngle" +# # \ | +# # R \ | Y/R = sin(*) "how much do I move along A as I move along R" +# # \ | A/R = cos(*) "how much do I move along Y as I move along R" +# # \*| A/Y = tan(*) "how much do I move along A as I move along Y" +# # \| +# # +W <----+ <------------ this is the optical axis of the laser +# # | \ +# # | \ <-------- laser plane +# # | \ +# # +# A_over_R = sin(planeTiltAngle) +# Y_over_R = cos(planeTiltAngle) +# A_over_Y = tan(planeTiltAngle) + +# # For a given Y coordinate, work out how much we'd need to be along A. Intuitively, A is the +# # distance from the Y axis introduced by the fact that the plane is tilted with respect to Y. +# A = A_over_Y * Y + +# # Viewed from above the lighthouse: +# # --------------------------------- +# # Now that we know how far along A we've moved we can calculate the small angle error (*) as +# # A / B = sin(#). Intuitively, this is the delta to the sweep angle that corrects for the +# # plane tilt. In an ideal setting without fabrication errors, we could return the measurement +# # here as ccwAngleAboutY + np.sin(A / B) and we'd be done! +# # +# # +X +# # | +# # | +# # (@) | B = np.sqrt(X * X + Z * Z) +# # A, ' \ | +# # , ` 90 \ B | A/B = sin(#) "sweep angle error added by tilt" +# # +W ', \ | +# # '., \*| +# # '-,# \| +# # +Z <---------------------+ optical center +# A_over_B = A / B + +# # R is the distance from the sensor to the optical axis in the X-W plane, which is a +# # frame that continually rotates about the lighthouse +# R = Y / Y_over_R +# R_over_C = R / C + +# # For convenience, let's create a new value here to capture the tilt-compensated angle +# return Matrix([ +# ccwAngleAboutY, +# A_over_B, +# Y_over_R, +# A_over_R, +# R_over_C, +# ]) + +# def lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C): + +# # A laser plane is interesting because unlike typical lens correction, which happens in +# # a 2D image plane, this happens in 1D. So all that matters is deviation from the optical +# # axis in 1D within the laser plane -- the further you move from the optical axis in the +# # laser's plane, the more lens distortion you'd suffer. One neat thing about the 1D nature +# # of the problem is that you need not model distortion as euclidean distance from the +# # optical axis; you can just model it directly on the angle from the optical axis, which +# # is proportional to the horizontal distance moved away from the optical axis :) +# # +# # (@) sin(*) = R / C +# # /| "how much we've deviated from the optical axis" +# # C / | +# # / | R +# # / * | +# # optical center +--------------> +W laser optical axis +# # +# angleFromOpticalCenterOfLens = asin(R_over_C) + +# # I don't exactly understand what this error is about, but it looks to be very similar to +# # the mechanical error model. This suggests that there is some periodic component to the +# # lens curvature that is a function of the motor spinning. My only guess is that the lens +# # warps slightly as it rotates because of centripetal forces, and this correct for that. +# # +# # x' = x + a sin (x + b) +# # | +-----+-----+ +# # + | +# # curve at zero | +# # | +# # + +# # Fourier correction +# centripetalCompensatedBaseCurvature = curve + ogeemag * sin( +# ccwAngleAboutY - asin(A_over_B) + ogeephase) - # This is a 5th order polynomial expression that corrects for lens distortion, but instead - # of describing an error in distance from optical axis, it describes an error as a multiple - # of the base curvature of the lens. Something like final_curvature = f(x) * base_curvature. - # The polynomial function looks something like the graph below. Domain is [-pi/2, +pi/2]. - # - # | 4| | f(x) = a[0]x^5 + a[1]x^4 + a[2]x^3 + a[3]x^2 + a[4]x^1 + a[5] - # \ | / - # \ 2| / - # `-.__|__.-' - #---------------+-------------> +x - # -pi -2 0 +2 +pi - # - a = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] - d = 0 # this ends up being f'(x) - b = a[0] # this ends up being f(x) - for i in range(1, 6): - d = d * angleFromOpticalCenterOfLens + b - b = b * angleFromOpticalCenterOfLens + a[i] - - # To make this easier, lets just multiply to get the curvature and its first derivative. - lensCurvature = b * centripetalCompensatedBaseCurvature - lensCurvatureFirstDerivative = d * centripetalCompensatedBaseCurvature - - # Here is where we put all the lens correction terms together. - # - # A lensCurvature - # asin ( - + --------------------------------------------- ) - # B Y / R - lensCurvatureFirstDerivative * A / R - # - # The dividend represents the deviation along A, which if course depends on how far you - # have shifted from the laser optical axis. The divisor itself must also shift by a - # proportional amount. - # +# # This is a 5th order polynomial expression that corrects for lens distortion, but instead +# # of describing an error in distance from optical axis, it describes an error as a multiple +# # of the base curvature of the lens. Something like final_curvature = f(x) * base_curvature. +# # The polynomial function looks something like the graph below. Domain is [-pi/2, +pi/2]. +# # +# # | 4| | f(x) = a[0]x^5 + a[1]x^4 + a[2]x^3 + a[3]x^2 + a[4]x^1 + a[5] +# # \ | / +# # \ 2| / +# # `-.__|__.-' +# #---------------+-------------> +x +# # -pi -2 0 +2 +pi +# # +# a = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] +# d = 0 # this ends up being f'(x) +# b = a[0] # this ends up being f(x) +# for i in range(1, 6): +# d = d * angleFromOpticalCenterOfLens + b +# b = b * angleFromOpticalCenterOfLens + a[i] + +# # To make this easier, lets just multiply to get the curvature and its first derivative. +# lensCurvature = b * centripetalCompensatedBaseCurvature +# lensCurvatureFirstDerivative = d * centripetalCompensatedBaseCurvature + +# # Here is where we put all the lens correction terms together. +# # +# # A lensCurvature +# # asin ( - + --------------------------------------------- ) +# # B Y / R - lensCurvatureFirstDerivative * A / R +# # +# # The dividend represents the deviation along A, which if course depends on how far you +# # have shifted from the laser optical axis. The divisor itself must also shift by a +# # proportional amount. +# # - ccwMotorAndLensAndTiltCorrectedAngleAboutY = ccwAngleAboutY - asin( - A_over_B + lensCurvature / (Y_over_R - lensCurvatureFirstDerivative * A_over_R)) - return Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY]) - - -def mech_model(ccwMotorAndLensAndTiltCorrectedAngleAboutY): - return Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY + sin(ccwMotorAndLensAndTiltCorrectedAngleAboutY + gibpha) * gibmag - phase - math.pi / 2.0]) - - -# # Proj model -# print("--------------") -# Oproj = proj_model(X, Y, Z, 0) -# Sproj = Matrix([X, Y, Z]) -# Jproj = Oproj.jacobian(Sproj) -# print("Jproj", simplify(Jproj)) - -# # Lens model -# print("--------------") -# Olens = lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C) -# Slens = Matrix([ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C]) -# Jlens = Olens.jacobian(Slens) -# print("Jlens", Jlens) - -# # Dist model -# print("--------------") -# Odist = lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C) -# Sdist = Matrix([ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C]) -# Jdist = Odist.jacobian(Slens) -# print("Jdist", Jdist) - -# # Mech model -# print("--------------") -# Omech = mech_model(ccwMotorAndLensAndTiltCorrectedAngleAboutY) -# Smech = Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY]) -# Jmech = Omech.jacobian(Smech) -# print("Jmech", simplify(Jmech)) - -import numpy as np - -def calc_gen2_err(X, Y, Z, plane, cal, obs): - """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" - - # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X. - # Note that we do a sign inversion on Z here to convert to a left-hand system, - # which is the convention adopted by the projection model. - ccwAngleAboutY = np.arctan2(-Z, X) - ccwAngleAboutX = np.arctan2(-Z, Y) - - # We know from the Vive documentation that the HFOV is 150 degrees and the VFOV is - # 110 degrees. It turns out that beyond this region results in numerical instability, - # so the best thing to do is try and steer the numerical solver closer to a solution. - # This basically nudges the solver towards the base station frustrum - - if ccwAngleAboutY < np.radians(15) or ccwAngleAboutY > np.radians(165): - return np.abs(ccwAngleAboutY - np.radians(90)) - if ccwAngleAboutX < np.radians(35) or ccwAngleAboutX > np.radians(145): - return np.abs(ccwAngleAboutX - np.radians(90)) - - # Distances between sensor and origin - B = np.sqrt(X * X + Z * Z) # L2 norm of (sensor - origin) in X-Z plane - C = np.sqrt(X * X + Y * Y + Z * Z) # L2 norm of (sensor - origin) - - # Tilt angle of the laser plane with respect to motor shaft, nominally 30 degrees - planeTiltAngle = cal["tilt"] + (-1 if plane else 1) * np.pi / 6. - - # CORRECTIONS FOR THE TILTED LASET PLANE - - # Viewed from any direction around the lighthouse: - #------------------------------------------------ - # The moment the laser plane strikes the sensing element (@). THe optical axis runs - # from the sensor to the midpoint of the lens. - # - # +Y - # (@)__A__| - # \ | Angle * = "planeTiltAngle" - # \ | - # R \ | Y/R = sin(*) "how much do I move along A as I move along R" - # \ | A/R = cos(*) "how much do I move along Y as I move along R" - # \*| A/Y = tan(*) "how much do I move along A as I move along Y" - # \| - # +W <----+ <------------ this is the optical axis of the laser - # | \ - # | \ <-------- laser plane - # | \ - # - A_over_R = np.sin(planeTiltAngle) - Y_over_R = np.cos(planeTiltAngle) - A_over_Y = np.tan(planeTiltAngle) - - # For a given Y coordinate, work out how much we'd need to be along A. Intuitively, A is the - # distance from the Y axis introduced by the fact that the plane is tilted with respect to Y. - A = A_over_Y * Y - - # Viewed from above the lighthouse: - # --------------------------------- - # Now that we know how far along A we've moved we can calculate the small angle error (*) as - # A / B = sin(#). Intuitively, this is the delta to the sweep angle that corrects for the - # plane tilt. In an ideal setting without fabrication errors, we could return the measurement - # here as ccwAngleAboutY + np.sin(A / B) and we'd be done! - # - # +X - # | - # | - # (@) | B = np.sqrt(X * X + Z * Z) - # A, ' \ | - # , ` 90 \ B | A/B = sin(#) "sweep angle error added by tilt" - # +W ', \ | - # '., \*| - # '-,# \| - # +Z <---------------------+ optical center - A_over_B = A / B - - # For convenience, let's create a new value here to capture the tilt-compensated angle - tiltCorrectedCcwAngleAboutY = ccwAngleAboutY - np.arcsin(A_over_B) - - # CORRECTIONS FOR LENS ERRORS - - # R is the distance from the sensor to the optical axis in the X-W plane, which is a - # frame that continually rotates about the lighthouse - R = Y / Y_over_R - - # A laser plane is interesting because unlike typical lens correction, which happens in - # a 2D image plane, this happens in 1D. So all that matters is deviation from the optical - # axis in 1D within the laser plane -- the further you move from the optical axis in the - # laser's plane, the more lens distortion you'd suffer. One neat thing about the 1D nature - # of the problem is that you need not model distortion as euclidean distance from the - # optical axis; you can just model it directly on the angle from the optical axis, which - # is proportional to the horizontal distance moved away from the optical axis :) - # - # (@) sin(*) = R / C - # /| "how much we've deviated from the optical axis" - # C / | - # / | R - # / * | - # optical center +--------------> +W laser optical axis - # - angleFromOpticalCenterOfLens = np.arcsin(R / C) - - # I don't exactly understand what this error is about, but it looks to be very similar to - # the mechanical error model. This suggests that there is some periodic component to the - # lens curvature that is a function of the motor spinning. My only guess is that the lens - # warps slightly as it rotates because of centripetal forces, and this correct for that. - # - # x' = x + a sin (x + b) - # | +-----+-----+ - # + | - # curve at zero | - # | - # + - # Fourier correction - centripetalCompensatedBaseCurvature = cal["curve"] + cal["ogeemag"] * np.sin( - tiltCorrectedCcwAngleAboutY + cal["ogeephase"]) +# ccwMotorAndLensAndTiltCorrectedAngleAboutY = ccwAngleAboutY - asin( +# A_over_B + lensCurvature / (Y_over_R - lensCurvatureFirstDerivative * A_over_R)) +# return Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY]) + + +# def mech_model(ccwMotorAndLensAndTiltCorrectedAngleAboutY): +# return Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY + sin(ccwMotorAndLensAndTiltCorrectedAngleAboutY + gibpha) * gibmag - phase - math.pi / 2.0]) + + +# # # Proj model +# # print("--------------") +# # Oproj = proj_model(X, Y, Z, 0) +# # Sproj = Matrix([X, Y, Z]) +# # Jproj = Oproj.jacobian(Sproj) +# # print("Jproj", simplify(Jproj)) + +# # # Lens model +# # print("--------------") +# # Olens = lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C) +# # Slens = Matrix([ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C]) +# # Jlens = Olens.jacobian(Slens) +# # print("Jlens", Jlens) + +# # # Dist model +# # print("--------------") +# # Odist = lens_model(ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C) +# # Sdist = Matrix([ccwAngleAboutY, A_over_B, Y_over_R, A_over_R, R_over_C]) +# # Jdist = Odist.jacobian(Slens) +# # print("Jdist", Jdist) + +# # # Mech model +# # print("--------------") +# # Omech = mech_model(ccwMotorAndLensAndTiltCorrectedAngleAboutY) +# # Smech = Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY]) +# # Jmech = Omech.jacobian(Smech) +# # print("Jmech", simplify(Jmech)) + +# import numpy as np + +# def calc_gen2_err(X, Y, Z, plane, cal, obs): +# """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" + +# # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X. +# # Note that we do a sign inversion on Z here to convert to a left-hand system, +# # which is the convention adopted by the projection model. +# ccwAngleAboutY = np.arctan2(-Z, X) +# ccwAngleAboutX = np.arctan2(-Z, Y) + +# # We know from the Vive documentation that the HFOV is 150 degrees and the VFOV is +# # 110 degrees. It turns out that beyond this region results in numerical instability, +# # so the best thing to do is try and steer the numerical solver closer to a solution. +# # This basically nudges the solver towards the base station frustrum + +# if ccwAngleAboutY < np.radians(15) or ccwAngleAboutY > np.radians(165): +# return np.abs(ccwAngleAboutY - np.radians(90)) +# if ccwAngleAboutX < np.radians(35) or ccwAngleAboutX > np.radians(145): +# return np.abs(ccwAngleAboutX - np.radians(90)) + +# # Distances between sensor and origin +# B = np.sqrt(X * X + Z * Z) # L2 norm of (sensor - origin) in X-Z plane +# C = np.sqrt(X * X + Y * Y + Z * Z) # L2 norm of (sensor - origin) + +# # Tilt angle of the laser plane with respect to motor shaft, nominally 30 degrees +# planeTiltAngle = cal["tilt"] + (-1 if plane else 1) * np.pi / 6. + +# # CORRECTIONS FOR THE TILTED LASER PLANE + +# # Viewed from any direction around the lighthouse: +# #------------------------------------------------ +# # The moment the laser plane strikes the sensing element (@). THe optical axis runs +# # from the sensor to the midpoint of the lens. +# # +# # +Y +# # (@)__A__| +# # \ | Angle * = "planeTiltAngle" +# # \ | +# # R \ | Y/R = sin(*) "how much do I move along A as I move along R" +# # \ | A/R = cos(*) "how much do I move along Y as I move along R" +# # \*| A/Y = tan(*) "how much do I move along A as I move along Y" +# # \| +# # +W <----+ <------------ this is the optical axis of the laser +# # | \ +# # | \ <-------- laser plane +# # | \ +# # +# A_over_R = np.sin(planeTiltAngle) +# Y_over_R = np.cos(planeTiltAngle) +# A_over_Y = np.tan(planeTiltAngle) + +# # For a given Y coordinate, work out how much we'd need to be along A. Intuitively, A is the +# # distance from the Y axis introduced by the fact that the plane is tilted with respect to Y. +# A = A_over_Y * Y + +# # Viewed from above the lighthouse: +# # --------------------------------- +# # Now that we know how far along A we've moved we can calculate the small angle error (*) as +# # A / B = sin(#). Intuitively, this is the delta to the sweep angle that corrects for the +# # plane tilt. In an ideal setting without fabrication errors, we could return the measurement +# # here as ccwAngleAboutY + np.sin(A / B) and we'd be done! +# # +# # +X +# # | +# # | +# # (@) | B = np.sqrt(X * X + Z * Z) +# # A, ' \ | +# # , ` 90 \ B | A/B = sin(#) "sweep angle error added by tilt" +# # +W ', \ | +# # '., \*| +# # '-,# \| +# # +Z <---------------------+ optical center +# A_over_B = A / B + +# # For convenience, let's create a new value here to capture the tilt-compensated angle +# tiltCorrectedCcwAngleAboutY = ccwAngleAboutY - np.arcsin(A_over_B) + +# # CORRECTIONS FOR LENS ERRORS + +# # R is the distance from the sensor to the optical axis in the X-W plane, which is a +# # frame that continually rotates about the lighthouse +# R = Y / Y_over_R + +# # A laser plane is interesting because unlike typical lens correction, which happens in +# # a 2D image plane, this happens in 1D. So all that matters is deviation from the optical +# # axis in 1D within the laser plane -- the further you move from the optical axis in the +# # laser's plane, the more lens distortion you'd suffer. One neat thing about the 1D nature +# # of the problem is that you need not model distortion as euclidean distance from the +# # optical axis; you can just model it directly on the angle from the optical axis, which +# # is proportional to the horizontal distance moved away from the optical axis :) +# # +# # (@) sin(*) = R / C +# # /| "how much we've deviated from the optical axis" +# # C / | +# # / | R +# # / * | +# # optical center +--------------> +W laser optical axis +# # +# angleFromOpticalCenterOfLens = np.arcsin(R / C) + +# # I don't exactly understand what this error is about, but it looks to be very similar to +# # the mechanical error model. This suggests that there is some periodic component to the +# # lens curvature that is a function of the motor spinning. My only guess is that the lens +# # warps slightly as it rotates because of centripetal forces, and this correct for that. +# # +# # x' = x + a sin (x + b) +# # | +-----+-----+ +# # + | +# # curve at zero | +# # | +# # + +# # Fourier correction +# centripetalCompensatedBaseCurvature = cal["curve"] + cal["ogeemag"] * np.sin( +# tiltCorrectedCcwAngleAboutY + cal["ogeephase"]) - # This is a 5th order polynomial expression that corrects for lens distortion, but in stead - # of describing an error in distance from optical axis, it describes an error as a multiple - # of the base curvature of the lens. Something like final_curvature = f(x) * base_curvature. - # The polynomial function looks something like the graph below. Domain is [-pi/2, +pi/2]. - # - # | 4| | f(x) = a[0]x^5 + a[1]x^4 + a[2]x^3 + a[3]x^2 + a[4]x^1 + a[5] - # \ | / - # \ 2| / - # `-.__|__.-' - #---------------+-------------> +x - # -pi -2 0 +2 +pi - # - a = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] - d = 0 # this ends up being f'(x) - b = a[0] # this ends up being f(x) - for i in range(1, 6): - d = d * angleFromOpticalCenterOfLens + b - b = b * angleFromOpticalCenterOfLens + a[i] - - # To make this easier, lets just multiply to get the curvature and its first derivative. - lensCurvature = b * centripetalCompensatedBaseCurvature - lensCurvatureFirstDerivative = d * centripetalCompensatedBaseCurvature - - # Here is where we put all the lens correction terms together. - # - # A lensCurvature - # asin ( - + --------------------------------------------- ) - # B Y / R - lensCurvatureFirstDerivative * A / R - # - # The dividend represents the deviation along A, which if course depends on how far you - # have shifted from the laser optical axis. The divisor itself must also shift by a - # proportional amount. - # - ccwLensCorrectedAngleAboutY = ccwAngleAboutY - np.arcsin( - A_over_B + lensCurvature / (Y_over_R - lensCurvatureFirstDerivative * A_over_R)) - - # CORRECTIONS FOR MECHANICAL ERRORS - - # Now that we have a tilt and lens-corrected angle we have to shift it by a small amount to - # compensate for mechanical vibration and lens mounting errors. We use a fourier term here - # that captures periodic error. In other words this error term repeats every 2pi, or one - # rotation about Y. Ultimately, it models the fact that the trajectory taken by the lens on - # an imperfect, imbalanced motorized system deviates from a perfect circle. - # - # x' = x + a sin (x + b) - c - # | +-----+-----+ | - # + | +--- Phase offset from zero. This is always 0 for axis = 0 - # raw angle | because that axis defines the start of the sweep. Axis - # | 1 has a small fabrication error around the motor spindle - # + that phase advances or delays with respect to X, and this - # Fourier correction term corrects for that. - # - ccwMotorAndLensAndTiltCorrectedAngleAboutY = ccwLensCorrectedAngleAboutY + np.sin( - ccwLensCorrectedAngleAboutY + cal["gibpha"]) * cal["gibmag"] - cal["phase"] - - # Shift the angle clockwise 90 degree to be described with respect to +Z, and not +X. - return np.abs(ccwMotorAndLensAndTiltCorrectedAngleAboutY - np.pi / 2.0 - obs) +# # This is a 5th order polynomial expression that corrects for lens distortion, but in stead +# # of describing an error in distance from optical axis, it describes an error as a multiple +# # of the base curvature of the lens. Something like final_curvature = f(x) * base_curvature. +# # The polynomial function looks something like the graph below. Domain is [-pi/2, +pi/2]. +# # +# # | 4| | f(x) = a[0]x^5 + a[1]x^4 + a[2]x^3 + a[3]x^2 + a[4]x^1 + a[5] +# # \ | / +# # \ 2| / +# # `-.__|__.-' +# #---------------+-------------> +x +# # -pi -2 0 +2 +pi +# # +# a = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] +# d = 0 # this ends up being f'(x) +# b = a[0] # this ends up being f(x) +# for i in range(1, 6): +# d = d * angleFromOpticalCenterOfLens + b +# b = b * angleFromOpticalCenterOfLens + a[i] + +# # To make this easier, lets just multiply to get the curvature and its first derivative. +# lensCurvature = b * centripetalCompensatedBaseCurvature +# lensCurvatureFirstDerivative = d * centripetalCompensatedBaseCurvature + +# # Here is where we put all the lens correction terms together. +# # +# # A lensCurvature +# # asin ( - + --------------------------------------------- ) +# # B Y / R - lensCurvatureFirstDerivative * A / R +# # +# # The dividend represents the deviation along A, which if course depends on how far you +# # have shifted from the laser optical axis. The divisor itself must also shift by a +# # proportional amount. +# # +# ccwLensCorrectedAngleAboutY = ccwAngleAboutY - np.arcsin( +# A_over_B + lensCurvature / (Y_over_R - lensCurvatureFirstDerivative * A_over_R)) + +# # CORRECTIONS FOR MECHANICAL ERRORS + +# # Now that we have a tilt and lens-corrected angle we have to shift it by a small amount to +# # compensate for mechanical vibration and lens mounting errors. We use a fourier term here +# # that captures periodic error. In other words this error term repeats every 2pi, or one +# # rotation about Y. Ultimately, it models the fact that the trajectory taken by the lens on +# # an imperfect, imbalanced motorized system deviates from a perfect circle. +# # +# # x' = x + a sin (x + b) - c +# # | +-----+-----+ | +# # + | +--- Phase offset from zero. This is always 0 for axis = 0 +# # raw angle | because that axis defines the start of the sweep. Axis +# # | 1 has a small fabrication error around the motor spindle +# # + that phase advances or delays with respect to X, and this +# # Fourier correction term corrects for that. +# # +# ccwMotorAndLensAndTiltCorrectedAngleAboutY = ccwLensCorrectedAngleAboutY + np.sin( +# ccwLensCorrectedAngleAboutY + cal["gibpha"]) * cal["gibmag"] - cal["phase"] + +# # Shift the angle clockwise 90 degree to be described with respect to +Z, and not +X. +# return np.abs(ccwMotorAndLensAndTiltCorrectedAngleAboutY - np.pi / 2.0 - obs) + +# # subs = { +# # X: 0.1, +# # Y: 0.1, +# # Z: -2.0, +# # phase: 0.0, +# # tilt: 0.0, +# # curve: 0.0, +# # gibpha: 0.0, +# # gibmag: 0.0, +# # ogeephase: 0.0, +# # ogeemag: 0.0 +# # } +# # v = (Omech * Olens * Oproj).evalf(subs=subs) +# # print(v) +# #print(Omech * Olens * Oproj) + -# subs = { -# X: 0.1, -# Y: 0.1, -# Z: -2.0, -# phase: 0.0, -# tilt: 0.0, -# curve: 0.0, -# gibpha: 0.0, -# gibmag: 0.0, -# ogeephase: 0.0, -# ogeemag: 0.0 -# } -# v = (Omech * Olens * Oproj).evalf(subs=subs) -# print(v) -print(Omech * Olens * Oproj) # J = Jmech * Jlens * Jproj # print(J) diff --git a/scripts/simple_localizer b/scripts/simple_localizer index b15a35c..99f94a9 100755 --- a/scripts/simple_localizer +++ b/scripts/simple_localizer @@ -33,6 +33,9 @@ from functools import partial from typing import List, Optional import gtsam +from sympy import atan2, asin, sin, cos, tan, sqrt, Matrix, Symbol, simplify, compose, init_printing +from sympy.abc import x, y, z + ## DATA HANDLERS def get_rosbag_options(path, storage_id, serialization_format='cdr'): @@ -151,34 +154,7 @@ def get_all_light_bundles(reader, namespace, duration): }) return bundles -## REFERENCE IMPLEMENTATION - -def calc_cal_series(s): - f = [-8.0108022e-06, 0.0028679863, 5.3685255000000001e-06, 0.0076069798000000001, 0, 0] - m = f[0] - a = 0 - for i in range(0, 6): - a = a * s + m - m = m * s + f[i] - return m, a - -def calc_gen2_error_orig(X, Y, Z, axis, cal, obs): - B = np.arctan2(Z, X) - Ydeg = cal["tilt"] + (-1 if axis else 1) * np.pi / 6. - asinArg = np.tan(Ydeg) * Y / np.sqrt(X * X + Z * Z) - sinYdeg = np.sin(Ydeg) - cosYdeg = np.cos(Ydeg) - sinPart = np.sin(B - np.arcsin(asinArg) + cal["ogeephase"]) * cal["ogeemag"] - modAsinArg = Y / np.sqrt(X * X + Y * Y + Z * Z) / cosYdeg - asinOut = np.arcsin(modAsinArg) - mod, acc = calc_cal_series(asinOut) - BcalCurved = sinPart + cal["curve"] - asinArg2 = asinArg + mod * BcalCurved / (cosYdeg - acc * BcalCurved * sinYdeg) - asinOut2 = np.arcsin(asinArg2) - sinOut2 = np.sin(B - asinOut2 + cal["gibpha"]) - return B - asinOut2 + sinOut2 * cal["gibmag"] - cal["phase"] - np.pi / 2. - obs - -## MORE HUMAN READABLE IMPLEMENTATION +# PROJECTION MODEL def calc_gen2_err(X, Y, Z, plane, cal, obs): """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" @@ -354,11 +330,8 @@ def calc_gen2_err(X, Y, Z, plane, cal, obs): # So th -def calc_gen2_jac(): - - def light_error(calibration: dict, - t_sensor: np.ndarray, + t_sensor: gtsam.Point3, axis: float, angle: float, this: gtsam.CustomFactor, @@ -368,16 +341,51 @@ def light_error(calibration: dict, pose_key = this.keys()[0] lTt = values.atPose3(pose_key) + # Calculate the self-jacobian, given the point to be transformed + q = lTt.rotation().inverse().rotate(t_sensor - lTt.translation()) + wx = q[0] + wy = q[1] + wz = q[2] + J = np.array([ + [0.0, -wz, +wy,-1.0, 0.0, 0.0], + [+wz, 0.0, -wx, 0.0,-1.0, 0.0], + [-wy, +wx, 0.0, 0.0, 0.0,-1.0]]) + # Transform the sensor into the lighthouse frame by the proposed pose - l_sensor = lTt.transformTo(gtsam.Point3(t_sensor[0], t_sensor[1], t_sensor[2])) - - # - error = calc_gen2_err(l_sensor[0], l_sensor[1], l_sensor[2], axis, calibration, angle) + l_sensor = lTt.transformTo(t_sensor) + + # Extract x, y, z coord in the RHS lighthouse frame + x = l_sensor[0] + y = l_sensor[1] + z = l_sensor[2] + + # HFOV is 150 degrees - if we are outside of this then we need to construct a gradient + # that steers the solver towards the frustrum. + ccwAngleAboutY = np.arctan2(-z, x) - np.pi / 2.0 + if np.abs(ccwAngleAboutY) > np.radians(150.0 / 2): + print("Y", np.degrees(ccwAngleAboutY)) + if jacobians is not None: + jacobians[0] = np.matmul(np.array([[z/(x**2 + z**2), 0, -x/(x**2 + z**2)]]), J) + return np.array([ccwAngleAboutY]) + + # VFOV is 110 degrees - if we are outside of this then we need to construct a gradient + # that steers the solver towards the frustrum. + ccwAngleAboutX = np.arctan2(-z, y) - np.pi / 2.0 + if np.abs(ccwAngleAboutX) > np.radians(110.0 / 2): + print("X", np.degrees(ccwAngleAboutX)) + if jacobians is not None: + jacobians[0] = np.matmul(np.array([[0, z/(y**2 + z**2), -y/(y**2 + z**2)]]), J) + return np.array([ccwAngleAboutX]) + + # If we get here then the sensor is in the camera frustrum, now we can actually do the + # hard work of calculating the projection error. + err = calc_gen2_err(l_sensor[0], l_sensor[1], l_sensor[2], axis, calibration, angle) + jac = calc_gen2_jac(axis, calibration, angle) - # Calculate dZ/dX, a 1x6 matrix expressing the error state as a function of the pose + # Calculate the sweep angle S with repect to the pose as dS/dP = dS/dX * dX/dP using the + # chain rule, where X is the point of the sensor in the lighthouse frame. if jacobians is not None: - jacobians[0] = calc_gen2_jac() - + jacobians[0] = np.matmul(np.array([[1, 1, 1]]), J) return np.array([error]) @@ -392,19 +400,25 @@ def calculate_poses(lighthouses, trackers, bundles): factor_graph = gtsam.NonlinearFactorGraph() initial_estimate = gtsam.Values() initial_estimate.insert(0, gtsam.Pose3( - gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), gtsam.Point3(0.0, 0.0, -3.0))) + gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), gtsam.Point3(0.0, 0.0, 1.0))) light_model = gtsam.noiseModel.Isotropic.Sigma(1, 0.000001) for data in bundle: factor_graph.add(gtsam.CustomFactor( light_model, [0], partial(light_error, lighthouses[channel_id][data["plane"]], - trackers[tracker_id][data["sensor"]]["point"], + gtsam.Point3( + trackers[tracker_id][data["sensor"]]["point"][0], + trackers[tracker_id][data["sensor"]]["point"][1], + trackers[tracker_id][data["sensor"]]["point"][2] + ), data["plane"], data["angle"]))) + factor_graph.print("Graph:") + initial_estimate.print("Initial estimate:") params = gtsam.LevenbergMarquardtParams() optimizer = gtsam.LevenbergMarquardtOptimizer(factor_graph, initial_estimate, params) result = optimizer.optimize() - result.print("result") + result.print("Result:") return # sensor_pt = pt.transform(pose, trackers[tracker_id][sensor_id]["point"]) From 77c1d994f57ec9f4e0e31864278a56b3b88e7e30 Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Sun, 27 Aug 2023 21:29:46 -0700 Subject: [PATCH 17/18] WIP --- scripts/simple_localizer | 95 ++++++++++++++++++++++++---------------- 1 file changed, 57 insertions(+), 38 deletions(-) diff --git a/scripts/simple_localizer b/scripts/simple_localizer index 99f94a9..1f9ac06 100755 --- a/scripts/simple_localizer +++ b/scripts/simple_localizer @@ -33,8 +33,19 @@ from functools import partial from typing import List, Optional import gtsam -from sympy import atan2, asin, sin, cos, tan, sqrt, Matrix, Symbol, simplify, compose, init_printing -from sympy.abc import x, y, z +from sympy import atan2, asin, sin, cos, tan, sqrt, Matrix, Symbol +from sympy.abc import X, Y, Z + +# Additional symbols +axis = Symbol("axis") +tilt = Symbol("tilt") +ogeephase = Symbol("ogeephase") +ogeemag = Symbol("ogeemag") +curve = Symbol("curve") +gibpha = Symbol("gibpha") +gibmag = Symbol("gibmag") +phase = Symbol("phase") +obs = Symbol("obs") ## DATA HANDLERS @@ -156,31 +167,20 @@ def get_all_light_bundles(reader, namespace, duration): # PROJECTION MODEL -def calc_gen2_err(X, Y, Z, plane, cal, obs): +def gen2_pred(X, Y, Z, axis, tilt, curve, ogeemag, ogeephase, gibpha, gibmag, phase, obs): """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X. # Note that we do a sign inversion on Z here to convert to a left-hand system, # which is the convention adopted by the projection model. - ccwAngleAboutY = np.arctan2(-Z, X) - ccwAngleAboutX = np.arctan2(-Z, Y) - - # We know from the Vive documentation that the HFOV is 150 degrees and the VFOV is - # 110 degrees. It turns out that beyond this region results in numerical instability, - # so the best thing to do is try and steer the numerical solver closer to a solution. - # This basically nudges the solver towards the base station frustrum - - if ccwAngleAboutY < np.radians(15) or ccwAngleAboutY > np.radians(165): - return np.abs(ccwAngleAboutY - np.radians(90)) - if ccwAngleAboutX < np.radians(35) or ccwAngleAboutX > np.radians(145): - return np.abs(ccwAngleAboutX - np.radians(90)) + ccwAngleAboutY = atan2(-Z, X) # Distances between sensor and origin - B = np.sqrt(X * X + Z * Z) # L2 norm of (sensor - origin) in X-Z plane - C = np.sqrt(X * X + Y * Y + Z * Z) # L2 norm of (sensor - origin) + B = sqrt(X * X + Z * Z) # L2 norm of (sensor - origin) in X-Z plane + C = sqrt(X * X + Y * Y + Z * Z) # L2 norm of (sensor - origin) - # Tilt angle of the laser plane with respect to motor shaft, nominally 30 degrees - planeTiltAngle = cal["tilt"] + (-1 if plane else 1) * np.pi / 6. + # Tilt angle of the laser axis with respect to motor shaft, nominally 30 degrees + planeTiltAngle = tilt + (-1 if axis else 1) * np.pi / 6. # CORRECTIONS FOR THE TILTED LASET PLANE @@ -202,9 +202,9 @@ def calc_gen2_err(X, Y, Z, plane, cal, obs): # | \ <-------- laser plane # | \ # - A_over_R = np.sin(planeTiltAngle) - Y_over_R = np.cos(planeTiltAngle) - A_over_Y = np.tan(planeTiltAngle) + A_over_R = sin(planeTiltAngle) + Y_over_R = cos(planeTiltAngle) + A_over_Y = tan(planeTiltAngle) # For a given Y coordinate, work out how much we'd need to be along A. Intuitively, A is the # distance from the Y axis introduced by the fact that the plane is tilted with respect to Y. @@ -230,7 +230,7 @@ def calc_gen2_err(X, Y, Z, plane, cal, obs): A_over_B = A / B # For convenience, let's create a new value here to capture the tilt-compensated angle - tiltCorrectedCcwAngleAboutY = ccwAngleAboutY - np.arcsin(A_over_B) + tiltCorrectedCcwAngleAboutY = ccwAngleAboutY - asin(A_over_B) # CORRECTIONS FOR LENS ERRORS @@ -253,7 +253,7 @@ def calc_gen2_err(X, Y, Z, plane, cal, obs): # / * | # optical center +--------------> +W laser optical axis # - angleFromOpticalCenterOfLens = np.arcsin(R / C) + angleFromOpticalCenterOfLens = asin(R / C) # I don't exactly understand what this error is about, but it looks to be very similar to # the mechanical error model. This suggests that there is some periodic component to the @@ -267,8 +267,8 @@ def calc_gen2_err(X, Y, Z, plane, cal, obs): # | # + # Fourier correction - centripetalCompensatedBaseCurvature = cal["curve"] + cal["ogeemag"] * np.sin( - tiltCorrectedCcwAngleAboutY + cal["ogeephase"]) + centripetalCompensatedBaseCurvature = curve + ogeemag * sin( + tiltCorrectedCcwAngleAboutY + ogeephase) # This is a 5th order polynomial expression that corrects for lens distortion, but in stead # of describing an error in distance from optical axis, it describes an error as a multiple @@ -303,7 +303,7 @@ def calc_gen2_err(X, Y, Z, plane, cal, obs): # have shifted from the laser optical axis. The divisor itself must also shift by a # proportional amount. # - ccwLensCorrectedAngleAboutY = ccwAngleAboutY - np.arcsin( + ccwLensCorrectedAngleAboutY = ccwAngleAboutY - asin( A_over_B + lensCurvature / (Y_over_R - lensCurvatureFirstDerivative * A_over_R)) # CORRECTIONS FOR MECHANICAL ERRORS @@ -322,13 +322,15 @@ def calc_gen2_err(X, Y, Z, plane, cal, obs): # + that phase advances or delays with respect to X, and this # Fourier correction term corrects for that. # - ccwMotorAndLensAndTiltCorrectedAngleAboutY = ccwLensCorrectedAngleAboutY + np.sin( - ccwLensCorrectedAngleAboutY + cal["gibpha"]) * cal["gibmag"] - cal["phase"] + ccwMotorAndLensAndTiltCorrectedAngleAboutY = ccwLensCorrectedAngleAboutY + sin( + ccwLensCorrectedAngleAboutY + gibpha) * gibmag - phase # Shift the angle clockwise 90 degree to be described with respect to +Z, and not +X. - return np.abs(ccwMotorAndLensAndTiltCorrectedAngleAboutY - np.pi / 2.0 - obs) + return Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY - np.pi / 2.0 - obs]) -# So th +# Predict the sweep angle, given the sensor position, axis and lighthouse calibration +light_fun = gen2_pred(X, Y, Z, axis, tilt, curve, ogeemag, ogeephase, gibpha, gibmag, phase, obs) +light_jac = light_fun.jacobian(Matrix([X, Y, Z])) def light_error(calibration: dict, t_sensor: gtsam.Point3, @@ -378,16 +380,33 @@ def light_error(calibration: dict, return np.array([ccwAngleAboutX]) # If we get here then the sensor is in the camera frustrum, now we can actually do the - # hard work of calculating the projection error. - err = calc_gen2_err(l_sensor[0], l_sensor[1], l_sensor[2], axis, calibration, angle) - jac = calc_gen2_jac(axis, calibration, angle) - + # hard work of calculating the projection error. This is a list of subsistutions for + # named variables in a symbolic expression. + subs = { + X: x, + Y: y, + Z: z, + axis: axis, + tilt: calibration["tilt"], + curve: calibration["curve"], + ogeemag: calibration["ogeemag"], + ogeephase: calibration["ogeephase"], + gibpha: calibration["gibpha"], + gibmag: calibration["gibmag"], + phase: calibration["phase"], + obs: angle + } + + # The residual error is the predicted angle less the observed angle + error = np.array(light_fun.evalf(subs=subs), dtype=float) + print(error) + # Calculate the sweep angle S with repect to the pose as dS/dP = dS/dX * dX/dP using the # chain rule, where X is the point of the sensor in the lighthouse frame. if jacobians is not None: - jacobians[0] = np.matmul(np.array([[1, 1, 1]]), J) + jacobians[0] = np.matmul(np.array(light_jac.evalf(subs=subs), dtype=float), J) - return np.array([error]) + return error def calculate_poses(lighthouses, trackers, bundles): """Iterates through tracker/lighthouses and calculates poses""" @@ -400,7 +419,7 @@ def calculate_poses(lighthouses, trackers, bundles): factor_graph = gtsam.NonlinearFactorGraph() initial_estimate = gtsam.Values() initial_estimate.insert(0, gtsam.Pose3( - gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), gtsam.Point3(0.0, 0.0, 1.0))) + gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), gtsam.Point3(0.0, 0.0, -3.0))) light_model = gtsam.noiseModel.Isotropic.Sigma(1, 0.000001) for data in bundle: factor_graph.add(gtsam.CustomFactor( From 3b1786bcce0ce78aafdd876ad9dc9d53d9cb8493 Mon Sep 17 00:00:00 2001 From: Andrew Symington Date: Sun, 27 Aug 2023 23:15:57 -0700 Subject: [PATCH 18/18] Finally convergence but maybe not perfect --- scripts/jacobianfun | 63 +++++++++++- scripts/simple_localizer | 213 ++++++++++++++++----------------------- 2 files changed, 151 insertions(+), 125 deletions(-) diff --git a/scripts/jacobianfun b/scripts/jacobianfun index 1da93a3..27f3638 100755 --- a/scripts/jacobianfun +++ b/scripts/jacobianfun @@ -461,4 +461,65 @@ print(J) # J = observation.jacobian(state) -# print(J) \ No newline at end of file +# print(J) + + +# sensor_pt = pt.transform(pose, trackers[tracker_id][sensor_id]["point"]) +# weights[i] += calc_gen2_error_corr( +# sensor_pt[0], +# sensor_pt[1], +# -sensor_pt[2], +# plane_id, +# lighthouses[channel_id][plane_id], +# meas_angle) +## Run particle filter +# Nparticles = 1000 +# Nrefresh = 0 +# Niteration = 10 +# init_mean = pt.transform_from(R=np.eye(3), p=np.array([0.0, 0.0, 0.0])) +# init_cov = np.diag([36.0, 36.0, 36.0, 0.1, 0.1, 0.1]) +# inno_mean = pt.transform_from(R=np.eye(3), p=np.array([0.0, 0.0, 0.0])) +# inno_cov = 1.0e-3 * np.diag(np.ones(6)) +# rng = np.random.default_rng(0) +# particles = np.array([ +# pt.random_transform(rng=rng, mean=init_mean, cov=init_cov) for _ in range(Nparticles)]) +# weights = np.zeros(Nparticles) +# for iter in range(Niteration): +# for i, pose in enumerate(particles): +# weights[i] = 0 +# for data in bundle: +# sensor_id = data["sensor"] +# plane_id = data["plane"] +# meas_angle = data["angle"] +# sensor_pt = pt.transform(pose, trackers[tracker_id][sensor_id]["point"]) +# weights[i] += calc_gen2_error_corr( +# sensor_pt[0], +# sensor_pt[1], +# -sensor_pt[2], +# plane_id, +# lighthouses[channel_id][plane_id], +# meas_angle) +# print(f"++ Best cost at {iter} is", np.min(weights)) +# weights = np.reciprocal(weights) +# weights /= np.sum(weights) +# weights = np.cumsum(weights) +# probs = np.random.random(size=Nparticles) +# particles = np.array([ +# pt.concat( +# pt.random_transform(rng=rng, mean=inno_mean, cov=inno_cov), +# particles[np.argmax(weights > prob)]) for prob in probs]) +# if Nrefresh > 0: +# particles[0:Nrefresh] = np.array([ +# pt.random_transform(rng=rng, mean=init_mean, cov=init_cov) +# for _ in range(Nrefresh)]) +# pos = np.zeros((Nparticles, 3)) +# rot = np.zeros((Nparticles, 4)) +# for i in range(Nparticles): +# pos[i, :] = pt.pq_from_transform(particles[i])[0:3] +# rot[i, :] = pt.pq_from_transform(particles[i])[3:7] +# pos_mean = np.average(pos, weights=weights, axis=0) +# pos_var = np.average((pos - pos_mean)**2, weights=weights, axis=0) +# rot_mean = np.average(rot, weights=weights, axis=0) +# rot_var = np.average((rot - rot_mean)**2, weights=weights, axis=0) +# print("pos", pos_mean, np.sqrt(pos_var)) +# print("rot", rot_mean, np.sqrt(rot_var)) diff --git a/scripts/simple_localizer b/scripts/simple_localizer index 1f9ac06..eeec125 100755 --- a/scripts/simple_localizer +++ b/scripts/simple_localizer @@ -22,6 +22,7 @@ import argparse import numpy as np +import time import pytransform3d.transformations as pt import rosbag2_py @@ -33,19 +34,9 @@ from functools import partial from typing import List, Optional import gtsam -from sympy import atan2, asin, sin, cos, tan, sqrt, Matrix, Symbol +from sympy import atan2, asin, sin, cos, tan, pi, sqrt, Matrix, Symbol from sympy.abc import X, Y, Z - -# Additional symbols -axis = Symbol("axis") -tilt = Symbol("tilt") -ogeephase = Symbol("ogeephase") -ogeemag = Symbol("ogeemag") -curve = Symbol("curve") -gibpha = Symbol("gibpha") -gibmag = Symbol("gibmag") -phase = Symbol("phase") -obs = Symbol("obs") +from sympy.utilities.lambdify import lambdify ## DATA HANDLERS @@ -115,7 +106,7 @@ def get_all_trackers(reader, namespace, duration): trackers[tracker_id] = {} for channel, point, normal in zip(msg.channels, msg.points, msg.normals): trackers[tracker_id][channel] = { - "point" : np.array([point.x, point.y, point.z, 1.0]), + "point" : np.array([point.x, point.y, point.z]), "normal" : np.array([normal.x, normal.y, normal.z]), } return trackers @@ -165,9 +156,21 @@ def get_all_light_bundles(reader, namespace, duration): }) return bundles -# PROJECTION MODEL +# ADDITIONAL SYMBOLS + +axis = Symbol("axis") +tilt = Symbol("tilt") +ogeephase = Symbol("ogeephase") +ogeemag = Symbol("ogeemag") +curve = Symbol("curve") +gibpha = Symbol("gibpha") +gibmag = Symbol("gibmag") +phase = Symbol("phase") +obs = Symbol("obs") + +# LIGHTHOUS GEN2 PREDICTION MODEL -def gen2_pred(X, Y, Z, axis, tilt, curve, ogeemag, ogeephase, gibpha, gibmag, phase, obs): +def gen2_pred(X, Y, Z, axis, tilt, curve, ogeemag, ogeephase, gibpha, gibmag, phase): """Reprojection function for predicting an angle from sensor pose in lighthouse frame""" # Angle in the X-Z plane (about the +Y axis) sweeping counter-clockwise from +X. @@ -180,7 +183,7 @@ def gen2_pred(X, Y, Z, axis, tilt, curve, ogeemag, ogeephase, gibpha, gibmag, ph C = sqrt(X * X + Y * Y + Z * Z) # L2 norm of (sensor - origin) # Tilt angle of the laser axis with respect to motor shaft, nominally 30 degrees - planeTiltAngle = tilt + (-1 if axis else 1) * np.pi / 6. + planeTiltAngle = tilt + (-1 if axis else 1) * pi / 6 # CORRECTIONS FOR THE TILTED LASET PLANE @@ -326,37 +329,48 @@ def gen2_pred(X, Y, Z, axis, tilt, curve, ogeemag, ogeephase, gibpha, gibmag, ph ccwLensCorrectedAngleAboutY + gibpha) * gibmag - phase # Shift the angle clockwise 90 degree to be described with respect to +Z, and not +X. - return Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY - np.pi / 2.0 - obs]) + return Matrix([ccwMotorAndLensAndTiltCorrectedAngleAboutY - pi / 2]) + +# SYMBOLIC EVALUATION, JACOBIAN AND LAMBDIFICATION # Predict the sweep angle, given the sensor position, axis and lighthouse calibration -light_fun = gen2_pred(X, Y, Z, axis, tilt, curve, ogeemag, ogeephase, gibpha, gibmag, phase, obs) +light_fun = gen2_pred(X, Y, Z, axis, tilt, curve, ogeemag, ogeephase, gibpha, gibmag, phase) +light_fun_lambda = lambdify( + [X, Y, Z, axis, tilt, curve, ogeemag, ogeephase, gibpha, gibmag, phase], light_fun, "numpy") + +# Get the derivative of the sweep angle with respect to the sensor position light_jac = light_fun.jacobian(Matrix([X, Y, Z])) +light_jac_lambda = lambdify( + [X, Y, Z, axis, tilt, curve, ogeemag, ogeephase, gibpha, gibmag, phase], light_jac, "numpy") -def light_error(calibration: dict, +# LIGHT FACTOR + +def gen2_factor(calibration: dict, t_sensor: gtsam.Point3, axis: float, angle: float, this: gtsam.CustomFactor, values: gtsam.Values, jacobians: Optional[List[np.ndarray]]) -> float: + # Extract the pose from the attached keys pose_key = this.keys()[0] lTt = values.atPose3(pose_key) # Calculate the self-jacobian, given the point to be transformed - q = lTt.rotation().inverse().rotate(t_sensor - lTt.translation()) - wx = q[0] - wy = q[1] - wz = q[2] - J = np.array([ - [0.0, -wz, +wy,-1.0, 0.0, 0.0], - [+wz, 0.0, -wx, 0.0,-1.0, 0.0], - [-wy, +wx, 0.0, 0.0, 0.0,-1.0]]) - - # Transform the sensor into the lighthouse frame by the proposed pose - l_sensor = lTt.transformTo(t_sensor) - - # Extract x, y, z coord in the RHS lighthouse frame + wx = -t_sensor[0] + wy = -t_sensor[1] + wz = -t_sensor[2] + skew_symmetric_matrix = np.array([ + [0.0, -wz, +wy], + [+wz, 0.0, -wx], + [-wy, +wx, 0.0]]) + J = np.concatenate(( + np.matmul(lTt.rotation().matrix(), skew_symmetric_matrix), + lTt.rotation().matrix()), axis=1) + + # Transform the sensor into the lighthouse frame by the proposed pose and extract coord + l_sensor = lTt.transformFrom(t_sensor) x = l_sensor[0] y = l_sensor[1] z = l_sensor[2] @@ -365,7 +379,6 @@ def light_error(calibration: dict, # that steers the solver towards the frustrum. ccwAngleAboutY = np.arctan2(-z, x) - np.pi / 2.0 if np.abs(ccwAngleAboutY) > np.radians(150.0 / 2): - print("Y", np.degrees(ccwAngleAboutY)) if jacobians is not None: jacobians[0] = np.matmul(np.array([[z/(x**2 + z**2), 0, -x/(x**2 + z**2)]]), J) return np.array([ccwAngleAboutY]) @@ -374,39 +387,42 @@ def light_error(calibration: dict, # that steers the solver towards the frustrum. ccwAngleAboutX = np.arctan2(-z, y) - np.pi / 2.0 if np.abs(ccwAngleAboutX) > np.radians(110.0 / 2): - print("X", np.degrees(ccwAngleAboutX)) if jacobians is not None: jacobians[0] = np.matmul(np.array([[0, z/(y**2 + z**2), -y/(y**2 + z**2)]]), J) return np.array([ccwAngleAboutX]) - # If we get here then the sensor is in the camera frustrum, now we can actually do the - # hard work of calculating the projection error. This is a list of subsistutions for - # named variables in a symbolic expression. - subs = { - X: x, - Y: y, - Z: z, - axis: axis, - tilt: calibration["tilt"], - curve: calibration["curve"], - ogeemag: calibration["ogeemag"], - ogeephase: calibration["ogeephase"], - gibpha: calibration["gibpha"], - gibmag: calibration["gibmag"], - phase: calibration["phase"], - obs: angle - } - # The residual error is the predicted angle less the observed angle - error = np.array(light_fun.evalf(subs=subs), dtype=float) - print(error) + err = light_fun_lambda( + X=x, + Y=y, + Z=z, + axis=axis, + tilt=calibration["tilt"], + curve=calibration["curve"], + ogeemag=calibration["ogeemag"], + ogeephase=calibration["ogeephase"], + gibpha=calibration["gibpha"], + gibmag=calibration["gibmag"], + phase=calibration["phase"]) # Calculate the sweep angle S with repect to the pose as dS/dP = dS/dX * dX/dP using the # chain rule, where X is the point of the sensor in the lighthouse frame. if jacobians is not None: - jacobians[0] = np.matmul(np.array(light_jac.evalf(subs=subs), dtype=float), J) - - return error + jac = light_jac_lambda( + X=x, + Y=y, + Z=z, + axis=axis, + tilt=calibration["tilt"], + curve=calibration["curve"], + ogeemag=calibration["ogeemag"], + ogeephase=calibration["ogeephase"], + gibpha=calibration["gibpha"], + gibmag=calibration["gibmag"], + phase=calibration["phase"]) + jacobians[0] = np.matmul(jac, J) + + return err - angle def calculate_poses(lighthouses, trackers, bundles): """Iterates through tracker/lighthouses and calculates poses""" @@ -419,11 +435,23 @@ def calculate_poses(lighthouses, trackers, bundles): factor_graph = gtsam.NonlinearFactorGraph() initial_estimate = gtsam.Values() initial_estimate.insert(0, gtsam.Pose3( - gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), gtsam.Point3(0.0, 0.0, -3.0))) - light_model = gtsam.noiseModel.Isotropic.Sigma(1, 0.000001) + gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), gtsam.Point3(0.0, 0.0, -1.0))) + # factor_graph.add( + # gtsam.PriorFactorPose3( + # 0, + # gtsam.Pose3( + # gtsam.Rot3.Rodrigues(0.0, 0.0, 0.0), + # gtsam.Point3(0.0, 0.0, -0.6) + # ), + # gtsam.noiseModel.Diagonal.Variances(np.array([9, 9, 9, 36, 36, 36])))) + + # This value is based on Alan Yate's tech presentation, where he stated that the + # resolution of one sweep is 800 000 ticks, which means that we can't resolve + # beyond the resolution of 1 tick, which at 60Hz is about 7.85398e-7 radians. + light_model = gtsam.noiseModel.Isotropic.Sigma(1, 7.85398e-7) for data in bundle: factor_graph.add(gtsam.CustomFactor( - light_model, [0], partial(light_error, + light_model, [0], partial(gen2_factor, lighthouses[channel_id][data["plane"]], gtsam.Point3( trackers[tracker_id][data["sensor"]]["point"][0], @@ -432,74 +460,11 @@ def calculate_poses(lighthouses, trackers, bundles): ), data["plane"], data["angle"]))) - factor_graph.print("Graph:") - initial_estimate.print("Initial estimate:") params = gtsam.LevenbergMarquardtParams() + params.setVerbosityLM("SUMMARY") optimizer = gtsam.LevenbergMarquardtOptimizer(factor_graph, initial_estimate, params) result = optimizer.optimize() result.print("Result:") - return - - # sensor_pt = pt.transform(pose, trackers[tracker_id][sensor_id]["point"]) - # weights[i] += calc_gen2_error_corr( - # sensor_pt[0], - # sensor_pt[1], - # -sensor_pt[2], - # plane_id, - # lighthouses[channel_id][plane_id], - # meas_angle) - ## Run particle filter - # Nparticles = 1000 - # Nrefresh = 0 - # Niteration = 10 - # init_mean = pt.transform_from(R=np.eye(3), p=np.array([0.0, 0.0, 0.0])) - # init_cov = np.diag([36.0, 36.0, 36.0, 0.1, 0.1, 0.1]) - # inno_mean = pt.transform_from(R=np.eye(3), p=np.array([0.0, 0.0, 0.0])) - # inno_cov = 1.0e-3 * np.diag(np.ones(6)) - # rng = np.random.default_rng(0) - # particles = np.array([ - # pt.random_transform(rng=rng, mean=init_mean, cov=init_cov) for _ in range(Nparticles)]) - # weights = np.zeros(Nparticles) - # for iter in range(Niteration): - # for i, pose in enumerate(particles): - # weights[i] = 0 - # for data in bundle: - # sensor_id = data["sensor"] - # plane_id = data["plane"] - # meas_angle = data["angle"] - # sensor_pt = pt.transform(pose, trackers[tracker_id][sensor_id]["point"]) - # weights[i] += calc_gen2_error_corr( - # sensor_pt[0], - # sensor_pt[1], - # -sensor_pt[2], - # plane_id, - # lighthouses[channel_id][plane_id], - # meas_angle) - # print(f"++ Best cost at {iter} is", np.min(weights)) - # weights = np.reciprocal(weights) - # weights /= np.sum(weights) - # weights = np.cumsum(weights) - # probs = np.random.random(size=Nparticles) - # particles = np.array([ - # pt.concat( - # pt.random_transform(rng=rng, mean=inno_mean, cov=inno_cov), - # particles[np.argmax(weights > prob)]) for prob in probs]) - # if Nrefresh > 0: - # particles[0:Nrefresh] = np.array([ - # pt.random_transform(rng=rng, mean=init_mean, cov=init_cov) - # for _ in range(Nrefresh)]) - # pos = np.zeros((Nparticles, 3)) - # rot = np.zeros((Nparticles, 4)) - # for i in range(Nparticles): - # pos[i, :] = pt.pq_from_transform(particles[i])[0:3] - # rot[i, :] = pt.pq_from_transform(particles[i])[3:7] - # pos_mean = np.average(pos, weights=weights, axis=0) - # pos_var = np.average((pos - pos_mean)**2, weights=weights, axis=0) - # rot_mean = np.average(rot, weights=weights, axis=0) - # rot_var = np.average((rot - rot_mean)**2, weights=weights, axis=0) - # print("pos", pos_mean, np.sqrt(pos_var)) - # print("rot", rot_mean, np.sqrt(rot_var)) - if __name__ == "__main__":