Skip to content

Commit

Permalink
intermediate review
Browse files Browse the repository at this point in the history
  • Loading branch information
jbrhm committed Dec 2, 2024
1 parent 8bff333 commit 33030af
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 3 deletions.
1 change: 0 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,6 @@ install(PROGRAMS
localization/basestation_gps_driver.py
superstructure/superstructure.py
scripts/debug_course_publisher.py
scripts/visualizer.py

# starter project sources
starter_project/autonomy/src/localization.py
Expand Down
2 changes: 1 addition & 1 deletion perception/object_detector/object_detector.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ namespace mrover {

using namespace std::placeholders;

mModel = Model(modelName, {0, 0}, {"bottle", "hammer"}, mTensorRT.getInputTensorSize(), mTensorRT.getOutputTensorSize(), [this](Model const& model, cv::Mat& rgbImage, cv::Mat& blobSizedImage, cv::Mat& blob){preprocessYOLOv8Input(model, rgbImage, blobSizedImage, blob);}, [this](Model const& model, cv::Mat& output, std::vector<Detection>& detections){parseYOLOv8Output(model, output, detections);});
mModel = Model(modelName, {0, 0}, {"bottle", "hammer"}, mTensorRT.getInputTensorSize(), mTensorRT.getOutputTensorSize(), [](Model const& model, cv::Mat& rgbImage, cv::Mat& blobSizedImage, cv::Mat& blob){preprocessYOLOv8Input(model, rgbImage, blobSizedImage, blob);}, [this](Model const& model, cv::Mat& output, std::vector<Detection>& detections){parseYOLOv8Output(model, output, detections);});

RCLCPP_INFO_STREAM(get_logger(), std::format("Object detector initialized with model: {} and thresholds: {} and {}", mModel.modelName, mModelScoreThreshold, mModelNMSThreshold));
}
Expand Down
3 changes: 2 additions & 1 deletion tensorrt/inference.cu
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#include "inference.cuh"
#include <ios>

using namespace nvinfer1;

Expand Down Expand Up @@ -106,7 +107,7 @@ auto Inference::createCudaEngine() -> ICudaEngine* {
// Save Engine to File
auto trtModelStream = tempEng->serialize();
std::ofstream outputFileStream{mEngineModelPath, std::ios::binary};
outputFileStream.write(static_cast<char const*>(trtModelStream->data()), static_cast<int32_t>(trtModelStream->size()));
outputFileStream.write(static_cast<char const*>(trtModelStream->data()), static_cast<std::streamsize>(trtModelStream->size()));
outputFileStream.close();

return tempEng;
Expand Down

0 comments on commit 33030af

Please sign in to comment.