I'm using the Oak-d-pro camera, and running the depthai_example stereo_node.
the right camera and depth are synchronized properly but the left camera does not with either right camera and with the depth.
Any ideas?
here is the publisher code:
//Modified by Ran to run stereo at 640/400 on 10 fps and IMU on 100 hz
//To execute just replace stereo_publisher.cpp in /depthai_examples/src/stereo_publisher.cpp with this node
//Build using "colcon build --packages-select depthai_examples --symlink-install"
//Run using "ros2 run depthai_examples stereo_node"
#include <cstdio>
#include <functional>
#include <iostream>
#include <tuple>
#include "camera_info_manager/camera_info_manager.hpp"
#include "rclcpp/executors.hpp"
#include "rclcpp/node.hpp"
#include "sensor_msgs/msg/image.hpp"
#include "sensor_msgs/msg/imu.hpp"
#include "stereo_msgs/msg/disparity_image.hpp"
// Inludes common necessary includes for development using depthai library
#include "depthai/device/DataQueue.hpp"
#include "depthai/device/Device.hpp"
#include "depthai/pipeline/Pipeline.hpp"
#include "depthai/pipeline/node/IMU.hpp"
#include "depthai/pipeline/node/MonoCamera.hpp"
#include "depthai/pipeline/node/StereoDepth.hpp"
#include "depthai/pipeline/node/XLinkOut.hpp"
#include "depthai_bridge/ImageConverter.hpp"
#include "depthai_bridge/ImuConverter.hpp"
#include "depthai_bridge/BridgePublisher.hpp"
#include "depthai_bridge/DisparityConverter.hpp"
#include "depthai_bridge/ImageConverter.hpp"
std::tuple<dai:😛ipeline, int, int> createPipeline(
bool withDepth, bool lrcheck, bool extended, bool subpixel, int confidence, int LRchecktresh, std::string resolution = "400p", int fps = 10) {
dai::Pipeline pipeline;
dai::node::MonoCamera::Properties::SensorResolution monoResolution;
auto monoLeft = pipeline.create<dai::node::MonoCamera>();
auto monoRight = pipeline.create<dai::node::MonoCamera>();
auto xoutLeft = pipeline.create<dai::node::XLinkOut>();
auto xoutRight = pipeline.create<dai::node::XLinkOut>();
auto stereo = pipeline.create<dai::node::StereoDepth>();
auto xoutDepth = pipeline.create<dai::node::XLinkOut>();
auto imu = pipeline.create<dai::node::IMU>();
auto xoutImu = pipeline.create<dai::node::XLinkOut>();
// XLinkOut
xoutLeft->setStreamName("left");
xoutRight->setStreamName("right");
if(withDepth) {
xoutDepth->setStreamName("depth");
} else {
xoutDepth->setStreamName("disparity");
}
xoutImu->setStreamName("imu");
int width, height;
if(resolution == "720p") {
monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_720_P;
width = 1280;
height = 720;
} else if(resolution == "400p") {
monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_400_P;
width = 640;
height = 400;
} else if(resolution == "800p") {
monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_800_P;
width = 1280;
height = 800;
} else if(resolution == "480p") {
monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_480_P;
width = 640;
height = 480;
} else {
RCLCPP_ERROR(rclcpp::get_logger("rclcpp"), "Invalid parameter. -> monoResolution: %s", resolution.c_str());
throw std::runtime_error("Invalid mono camera resolution.");
}
// MonoCamera
monoLeft->setResolution(dai::node::MonoCamera::Properties::SensorResolution::THE_400_P);
monoLeft->setBoardSocket(dai::CameraBoardSocket::CAM_B);
monoLeft->setFps(fps); // Set FPS for left camera
monoRight->setResolution(dai::node::MonoCamera::Properties::SensorResolution::THE_400_P);
monoRight->setBoardSocket(dai::CameraBoardSocket::CAM_C);
monoRight->setFps(fps); // Set FPS for right camera
// StereoDepth
stereo->initialConfig.setConfidenceThreshold(confidence);
stereo->setRectifyEdgeFillColor(0); // black, to better see the cutout
stereo->initialConfig.setLeftRightCheckThreshold(LRchecktresh);
stereo->setLeftRightCheck(lrcheck);
stereo->setExtendedDisparity(extended);
stereo->setSubpixel(subpixel);
// Link plugins CAM -> STEREO -> XLINK
monoLeft->out.link(stereo->left);
monoRight->out.link(stereo->right);
stereo->rectifiedLeft.link(xoutLeft->input);
stereo->rectifiedRight.link(xoutRight->input);
// Imu
imu->enableIMUSensor(dai::IMUSensor::ACCELEROMETER_RAW, 100);
imu->enableIMUSensor(dai::IMUSensor::GYROSCOPE_RAW, 100);
imu->setBatchReportThreshold(1);
imu->setMaxBatchReports(1); // Get one message only for now.
if(withDepth) {
stereo->depth.link(xoutDepth->input);
} else {
stereo->disparity.link(xoutDepth->input);
}
imu->out.link(xoutImu->input);
return std::make_tuple(pipeline, width, height);
}
int main(int argc, char** argv) {
rclcpp::init(argc, argv);
auto node = rclcpp::Node::make_shared("stereo_node");
std::string tfPrefix, mode, monoResolution;
bool lrcheck, extended, subpixel, enableDepth;
int confidence, LRchecktresh, imuModeParam;
int monoWidth, monoHeight;
double angularVelCovariance, linearAccelCovariance;
bool enableRosBaseTimeUpdate;
dai::Pipeline pipeline;
node->declare_parameter("tf_prefix", "oak");
node->declare_parameter("mode", "depth");
node->declare_parameter("lrcheck", true);
node->declare_parameter("extended", false);
node->declare_parameter("subpixel", true);
node->declare_parameter("confidence", 200);
node->declare_parameter("LRchecktresh", 5);
node->declare_parameter("monoResolution", "480p");
node->declare_parameter("imuMode", 1);
node->declare_parameter("angularVelCovariance", 0.02);
node->declare_parameter("linearAccelCovariance", 0.0);
node->declare_parameter("enableRosBaseTimeUpdate", false);
node->get_parameter("tf_prefix", tfPrefix);
node->get_parameter("mode", mode);
node->get_parameter("lrcheck", lrcheck);
node->get_parameter("extended", extended);
node->get_parameter("subpixel", subpixel);
node->get_parameter("confidence", confidence);
node->get_parameter("LRchecktresh", LRchecktresh);
node->get_parameter("monoResolution", monoResolution);
node->get_parameter("imuMode", imuModeParam);
node->get_parameter("angularVelCovariance", angularVelCovariance);
node->get_parameter("linearAccelCovariance", linearAccelCovariance);
node->get_parameter("enableRosBaseTimeUpdate", enableRosBaseTimeUpdate);
if(mode == "depth") {
enableDepth = true;
} else {
enableDepth = false;
}
dai::ros::ImuSyncMethod imuMode = static_cast<dai::ros::ImuSyncMethod>(imuModeParam);
std::tie(pipeline, monoWidth, monoHeight) = createPipeline(enableDepth, lrcheck, extended, subpixel, confidence, LRchecktresh, monoResolution);
// dai::Device device(pipeline);
auto device = std::make_shared<dai::Device>(pipeline);
auto leftQueue = device->getOutputQueue("left", 5, false);
auto rightQueue = device->getOutputQueue("right", 5, false);
std::shared_ptr<dai::DataOutputQueue> stereoQueue;
if(enableDepth) {
stereoQueue = device->getOutputQueue("depth", 5, false);
} else {
stereoQueue = device->getOutputQueue("disparity", 30, false);
}
auto imuQueue = device->getOutputQueue("imu", 5, false);
auto calibrationHandler = device->readCalibration();
auto boardName = calibrationHandler.getEepromData().boardName;
if(monoHeight > 480 && boardName == "OAK-D-LITE") {
monoWidth = 640;
monoHeight = 480;
}
dai::rosBridge::ImageConverter converter(tfPrefix + "_left_camera_optical_frame", true);
auto leftCameraInfo = converter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::CAM_B, monoWidth, monoHeight);
dai::rosBridge::BridgePublisher<sensor_msgs::msg::Image, dai::ImgFrame> leftPublish(
leftQueue,
node,
std::string("left/image_rect"),
std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &converter, std::placeholders::_1, std::placeholders::_2),
30,
leftCameraInfo,
"left");
leftPublish.addPublisherCallback();
dai::rosBridge::ImageConverter rightconverter(tfPrefix + "_right_camera_optical_frame", true);
auto rightCameraInfo = converter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::CAM_C, monoWidth, monoHeight);
dai::rosBridge::BridgePublisher<sensor_msgs::msg::Image, dai::ImgFrame> rightPublish(
rightQueue,
node,
std::string("right/image_rect"),
std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &rightconverter, std::placeholders::_1, std::placeholders::_2),
30,
rightCameraInfo,
"right");
rightPublish.addPublisherCallback();
dai::rosBridge::ImuConverter imuConverter(tfPrefix + "_imu_frame", imuMode, linearAccelCovariance, angularVelCovariance);
if(enableRosBaseTimeUpdate) {
imuConverter.setUpdateRosBaseTimeOnToRosMsg();
}
dai::rosBridge::BridgePublisher<sensor_msgs::msg::Imu, dai::IMUData> imuPublish(
imuQueue,
node,
std::string("imu"),
std::bind(&dai::rosBridge::ImuConverter::toRosMsg, &imuConverter, std::placeholders::_1, std::placeholders::_2),
30,
"",
"imu");
imuPublish.addPublisherCallback();
if(mode == "depth") {
dai::rosBridge::BridgePublisher<sensor_msgs::msg::Image, dai::ImgFrame> depthPublish(
stereoQueue,
node,
std::string("stereo/depth"),
std::bind(&dai::rosBridge::ImageConverter::toRosMsg,
&rightconverter, // since the converter has the same frame name
// and image type is also same we can reuse it
std::placeholders::_1,
std::placeholders::_2),
30,
rightCameraInfo,
"stereo");
depthPublish.addPublisherCallback();
rclcpp::spin(node);
} else {
dai::rosBridge::DisparityConverter dispConverter(tfPrefix + "_right_camera_optical_frame", 880, 7.5, 20, 2000);
dai::rosBridge::BridgePublisher<stereo_msgs::msg::DisparityImage, dai::ImgFrame> dispPublish(
stereoQueue,
node,
std::string("stereo/disparity"),
std::bind(&dai::rosBridge::DisparityConverter::toRosMsg, &dispConverter, std::placeholders::_1, std::placeholders::_2),
30,
rightCameraInfo,
"stereo");
dispPublish.addPublisherCallback();
rclcpp::spin(node);
}
return 0;
}