diff --git a/.github/scripts/unix.sh b/.github/scripts/unix.sh index 09fcd788bf..b2e960fc10 100644 --- a/.github/scripts/unix.sh +++ b/.github/scripts/unix.sh @@ -52,6 +52,8 @@ function configure() -DGTSAM_POSE3_EXPMAP=${GTSAM_POSE3_EXPMAP:-ON} \ -DGTSAM_USE_SYSTEM_EIGEN=${GTSAM_USE_SYSTEM_EIGEN:-OFF} \ -DGTSAM_USE_SYSTEM_METIS=${GTSAM_USE_SYSTEM_METIS:-OFF} \ + -DGTSAM_USE_BOOST_FEATURES=${GTSAM_USE_BOOST_FEATURES:-ON} \ + -DGTSAM_ENABLE_BOOST_SERIALIZATION=${GTSAM_ENABLE_BOOST_SERIALIZATION:-ON} \ -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF \ -DGTSAM_SINGLE_TEST_EXE=OFF } diff --git a/.github/workflows/build-linux.yml b/.github/workflows/build-linux.yml index 3c1275a55a..b4b391df3e 100644 --- a/.github/workflows/build-linux.yml +++ b/.github/workflows/build-linux.yml @@ -2,8 +2,8 @@ name: Linux CI on: [pull_request] -# Every time you make a push to your PR, it cancel immediately the previous checks, -# and start a new one. The other runner will be available more quickly to your PR. +# Every time you make a push to your PR, it cancel immediately the previous checks, +# and start a new one. The other runner will be available more quickly to your PR. concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true @@ -25,11 +25,12 @@ jobs: # Github Actions requires a single row to be added to the build matrix. # See https://help.github.com/en/articles/workflow-syntax-for-github-actions. name: [ - ubuntu-20.04-gcc-9, - ubuntu-20.04-clang-9, - ubuntu-22.04-gcc-12, - ubuntu-22.04-clang-14, - ] + # "Bracket" the versions from GCC [9-14] and Clang [9-16] + ubuntu-20.04-gcc-9, + ubuntu-20.04-clang-9, + ubuntu-24.04-gcc-14, + ubuntu-24.04-clang-16, + ] build_type: [Debug, Release] build_unstable: [ON] @@ -44,15 +45,15 @@ jobs: compiler: clang version: "9" - - name: ubuntu-22.04-gcc-12 - os: ubuntu-22.04 + - name: ubuntu-24.04-gcc-14 + os: ubuntu-24.04 compiler: gcc - version: "11" + version: "14" - - name: ubuntu-22.04-clang-14 - os: ubuntu-22.04 + - name: ubuntu-24.04-clang-16 + os: ubuntu-24.04 compiler: clang - version: "14" + version: "16" steps: - name: Checkout diff --git a/.github/workflows/build-special.yml b/.github/workflows/build-special.yml index 3a7dd974dc..d0a182975b 100644 --- a/.github/workflows/build-special.yml +++ b/.github/workflows/build-special.yml @@ -2,8 +2,8 @@ name: Special Cases CI on: [pull_request] -# Every time you make a push to your PR, it cancel immediately the previous checks, -# and start a new one. The other runner will be available more quickly to your PR. +# Every time you make a push to your PR, it cancel immediately the previous checks, +# and start a new one. The other runner will be available more quickly to your PR. concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true @@ -115,19 +115,24 @@ jobs: echo "CXX=clang++-${{ matrix.version }}" >> $GITHUB_ENV fi - - name: Install Boost - if: runner.os == 'Linux' - run: | - sudo apt-get -y install libboost-all-dev - - name: Install (macOS) if: runner.os == 'macOS' run: | - brew install cmake ninja boost + brew install cmake ninja sudo xcode-select -switch /Applications/Xcode_${{ matrix.version }}.app echo "CC=clang" >> $GITHUB_ENV echo "CXX=clang++" >> $GITHUB_ENV + - name: Install Boost + run: | + if [ ${{matrix.flag}} != 'no_boost' ]; then + if [ ${{runner.os}} == 'Linux' ]; then + sudo apt-get -y install libboost-all-dev + elif [ ${{runner.os}} == 'macOS' ]; then + brew install boost + fi + fi + - name: Set Allow Deprecated Flag if: matrix.flag == 'deprecated' run: | @@ -181,7 +186,6 @@ jobs: with: swap-size-gb: 12 - - name: Build & Test run: | bash .github/scripts/unix.sh -t diff --git a/gtsam/base/std_optional_serialization.h b/gtsam/base/std_optional_serialization.h index 93a5c8dbaf..db5e994e9a 100644 --- a/gtsam/base/std_optional_serialization.h +++ b/gtsam/base/std_optional_serialization.h @@ -48,7 +48,8 @@ */ #ifdef __GNUC__ #if __GNUC__ >= 7 && __cplusplus >= 201703L -namespace boost { namespace serialization { struct U; } } +// Based on https://github.com/borglab/gtsam/issues/1738, we define U as a complete type. +namespace boost { namespace serialization { struct U{}; } } namespace std { template<> struct is_trivially_default_constructible : std::false_type {}; } namespace std { template<> struct is_trivially_copy_constructible : std::false_type {}; } namespace std { template<> struct is_trivially_move_constructible : std::false_type {}; } diff --git a/gtsam/base/tests/testStdOptionalSerialization.cpp b/gtsam/base/tests/testStdOptionalSerialization.cpp index d9bd1da4af..3c1310aa62 100644 --- a/gtsam/base/tests/testStdOptionalSerialization.cpp +++ b/gtsam/base/tests/testStdOptionalSerialization.cpp @@ -60,8 +60,6 @@ class TestOptionalStruct { TestOptionalStruct() = default; TestOptionalStruct(const int& opt) : opt(opt) {} - // A copy constructor is needed for serialization - TestOptionalStruct(const TestOptionalStruct& other) = default; bool operator==(const TestOptionalStruct& other) const { // check the values are equal return *opt == *other.opt; diff --git a/gtsam/constrained/InequalityPenaltyFunction.cpp b/gtsam/constrained/InequalityPenaltyFunction.cpp index 2e331292ce..238733c975 100644 --- a/gtsam/constrained/InequalityPenaltyFunction.cpp +++ b/gtsam/constrained/InequalityPenaltyFunction.cpp @@ -20,12 +20,15 @@ namespace gtsam { -/* ********************************************************************************************* */ -InequalityPenaltyFunction::UnaryScalarFunc InequalityPenaltyFunction::function() const { - return [=](const double& x, OptionalJacobian<1, 1> H = {}) -> double { return (*this)(x, H); }; +/* ************************************************************************* */ +InequalityPenaltyFunction::UnaryScalarFunc InequalityPenaltyFunction::function() + const { + return [this](const double& x, OptionalJacobian<1, 1> H = {}) { + return this->operator()(x, H); + }; } -/* ********************************************************************************************* */ +/* ************************************************************************* */ double RampFunction::Ramp(const double x, OptionalJacobian<1, 1> H) { if (x < 0) { if (H) { @@ -40,8 +43,9 @@ double RampFunction::Ramp(const double x, OptionalJacobian<1, 1> H) { } } -/* ********************************************************************************************* */ -double SmoothRampPoly2::operator()(const double& x, OptionalJacobian<1, 1> H) const { +/* ************************************************************************* */ +double SmoothRampPoly2::operator()(const double& x, + OptionalJacobian<1, 1> H) const { if (x <= 0) { if (H) { H->setZero(); @@ -60,8 +64,9 @@ double SmoothRampPoly2::operator()(const double& x, OptionalJacobian<1, 1> H) co } } -/* ********************************************************************************************* */ -double SmoothRampPoly3::operator()(const double& x, OptionalJacobian<1, 1> H) const { +/* ************************************************************************* */ +double SmoothRampPoly3::operator()(const double& x, + OptionalJacobian<1, 1> H) const { if (x <= 0) { if (H) { H->setZero(); @@ -80,8 +85,9 @@ double SmoothRampPoly3::operator()(const double& x, OptionalJacobian<1, 1> H) co } } -/* ********************************************************************************************* */ -double SoftPlusFunction::operator()(const double& x, OptionalJacobian<1, 1> H) const { +/* ************************************************************************* */ +double SoftPlusFunction::operator()(const double& x, + OptionalJacobian<1, 1> H) const { if (H) { H->setConstant(1 / (1 + std::exp(-k_ * x))); } diff --git a/gtsam/geometry/tests/testSO3.cpp b/gtsam/geometry/tests/testSO3.cpp index 41777ae3aa..17b27daeae 100644 --- a/gtsam/geometry/tests/testSO3.cpp +++ b/gtsam/geometry/tests/testSO3.cpp @@ -333,7 +333,7 @@ TEST(SO3, CrossB) { Matrix aH1; for (bool nearZero : {true, false}) { std::function f = - [=](const Vector3& omega, const Vector3& v) { + [nearZero](const Vector3& omega, const Vector3& v) { return so3::DexpFunctor(omega, nearZero).crossB(v); }; for (const Vector3& omega : test_cases::omegas(nearZero)) { @@ -351,7 +351,7 @@ TEST(SO3, DoubleCrossC) { Matrix aH1; for (bool nearZero : {true, false}) { std::function f = - [=](const Vector3& omega, const Vector3& v) { + [nearZero](const Vector3& omega, const Vector3& v) { return so3::DexpFunctor(omega, nearZero).doubleCrossC(v); }; for (const Vector3& omega : test_cases::omegas(nearZero)) { @@ -369,7 +369,7 @@ TEST(SO3, ApplyDexp) { Matrix aH1, aH2; for (bool nearZero : {true, false}) { std::function f = - [=](const Vector3& omega, const Vector3& v) { + [nearZero](const Vector3& omega, const Vector3& v) { return so3::DexpFunctor(omega, nearZero).applyDexp(v); }; for (const Vector3& omega : test_cases::omegas(nearZero)) { @@ -390,7 +390,7 @@ TEST(SO3, ApplyInvDexp) { Matrix aH1, aH2; for (bool nearZero : {true, false}) { std::function f = - [=](const Vector3& omega, const Vector3& v) { + [nearZero](const Vector3& omega, const Vector3& v) { return so3::DexpFunctor(omega, nearZero).applyInvDexp(v); }; for (const Vector3& omega : test_cases::omegas(nearZero)) { @@ -412,7 +412,7 @@ TEST(SO3, ApplyLeftJacobian) { Matrix aH1, aH2; for (bool nearZero : {true, false}) { std::function f = - [=](const Vector3& omega, const Vector3& v) { + [nearZero](const Vector3& omega, const Vector3& v) { return so3::DexpFunctor(omega, nearZero).applyLeftJacobian(v); }; for (const Vector3& omega : test_cases::omegas(nearZero)) { @@ -433,7 +433,7 @@ TEST(SO3, ApplyLeftJacobianInverse) { Matrix aH1, aH2; for (bool nearZero : {true, false}) { std::function f = - [=](const Vector3& omega, const Vector3& v) { + [nearZero](const Vector3& omega, const Vector3& v) { return so3::DexpFunctor(omega, nearZero).applyLeftJacobianInverse(v); }; for (const Vector3& omega : test_cases::omegas(nearZero)) { diff --git a/gtsam/navigation/tests/testCombinedImuFactor.cpp b/gtsam/navigation/tests/testCombinedImuFactor.cpp index c4fefb8ffa..80a7b02982 100644 --- a/gtsam/navigation/tests/testCombinedImuFactor.cpp +++ b/gtsam/navigation/tests/testCombinedImuFactor.cpp @@ -144,7 +144,7 @@ TEST(CombinedImuFactor, FirstOrderPreIntegratedMeasurements) { auto p = testing::Params(); testing::SomeMeasurements measurements; - auto preintegrated = [=](const Vector3& a, const Vector3& w) { + auto preintegrated = [&](const Vector3& a, const Vector3& w) { PreintegratedImuMeasurements pim(p, Bias(a, w)); testing::integrateMeasurements(measurements, &pim); return pim.preintegrated(); diff --git a/gtsam/navigation/tests/testImuFactor.cpp b/gtsam/navigation/tests/testImuFactor.cpp index d4bc763ee5..2e1528ae87 100644 --- a/gtsam/navigation/tests/testImuFactor.cpp +++ b/gtsam/navigation/tests/testImuFactor.cpp @@ -399,7 +399,7 @@ TEST(ImuFactor, PartialDerivative_wrt_Bias) { Vector3 measuredOmega(0.1, 0, 0); double deltaT = 0.5; - auto evaluateRotation = [=](const Vector3 biasOmega) { + auto evaluateRotation = [&measuredOmega, &deltaT](const Vector3 biasOmega) { return Rot3::Expmap((measuredOmega - biasOmega) * deltaT); }; @@ -424,7 +424,7 @@ TEST(ImuFactor, PartialDerivativeLogmap) { // Measurements Vector3 deltaTheta(0, 0, 0); - auto evaluateLogRotation = [=](const Vector3 delta) { + auto evaluateLogRotation = [&thetaHat](const Vector3 delta) { return Rot3::Logmap( Rot3::Expmap(thetaHat).compose(Rot3::Expmap(delta))); }; diff --git a/gtsam/navigation/tests/testManifoldPreintegration.cpp b/gtsam/navigation/tests/testManifoldPreintegration.cpp index 82f9876fb0..f9c6ebacb7 100644 --- a/gtsam/navigation/tests/testManifoldPreintegration.cpp +++ b/gtsam/navigation/tests/testManifoldPreintegration.cpp @@ -43,21 +43,21 @@ TEST(ManifoldPreintegration, BiasCorrectionJacobians) { testing::SomeMeasurements measurements; std::function deltaRij = - [=](const Vector3& a, const Vector3& w) { + [&](const Vector3& a, const Vector3& w) { ManifoldPreintegration pim(testing::Params(), Bias(a, w)); testing::integrateMeasurements(measurements, &pim); return pim.deltaRij(); }; std::function deltaPij = - [=](const Vector3& a, const Vector3& w) { + [&](const Vector3& a, const Vector3& w) { ManifoldPreintegration pim(testing::Params(), Bias(a, w)); testing::integrateMeasurements(measurements, &pim); return pim.deltaPij(); }; std::function deltaVij = - [=](const Vector3& a, const Vector3& w) { + [&](const Vector3& a, const Vector3& w) { ManifoldPreintegration pim(testing::Params(), Bias(a, w)); testing::integrateMeasurements(measurements, &pim); return pim.deltaVij(); diff --git a/gtsam/navigation/tests/testTangentPreintegration.cpp b/gtsam/navigation/tests/testTangentPreintegration.cpp index af91f4f2c4..73cea9f717 100644 --- a/gtsam/navigation/tests/testTangentPreintegration.cpp +++ b/gtsam/navigation/tests/testTangentPreintegration.cpp @@ -78,7 +78,7 @@ TEST(ImuFactor, BiasCorrectionJacobians) { testing::SomeMeasurements measurements; std::function preintegrated = - [=](const Vector3& a, const Vector3& w) { + [&](const Vector3& a, const Vector3& w) { TangentPreintegration pim(testing::Params(), Bias(a, w)); testing::integrateMeasurements(measurements, &pim); return pim.preintegrated(); @@ -149,7 +149,7 @@ TEST(TangentPreintegration, Compose) { TEST(TangentPreintegration, MergedBiasDerivatives) { testing::SomeMeasurements measurements; - auto f = [=](const Vector3& a, const Vector3& w) { + auto f = [&](const Vector3& a, const Vector3& w) { TangentPreintegration pim02(testing::Params(), Bias(a, w)); testing::integrateMeasurements(measurements, &pim02); testing::integrateMeasurements(measurements, &pim02);