Hello,
I am currently testing with OAK FFC 4P(R7M1E7) with four OV 9782w cameras.
I implemented the ROS node(below code) based on the this link, but CAM A and CAM D are not synchronized.
(I am working on the develop branch of depthai-core with commit 57de7c0cafd5724e10adce3ca8f69c8230ac6e9c.)
#include <iostream>
#include <string>
#include <thread>
#include <opencv2/opencv.hpp>
#include <depthai/depthai.hpp>
#include <sensor_msgs/Imu.h>
#include <ros/ros.h>
#include "depthai_bridge/BridgePublisher.hpp"
#include "depthai_bridge/ImageConverter.hpp"
#include "depthai_bridge/ImuConverter.hpp"
int main(int argc, char** argv){
ros::init(argc, argv, "dai_wrapper_node");
ros::NodeHandle nh("~");
dai::Pipeline pipeline;
// pipeline.setXLinkChunkSize(0);
auto control_in = pipeline.create<dai::node::XLinkIn>();
auto color_a = pipeline.create<dai::node::ColorCamera>();
auto color_b = pipeline.create<dai::node::ColorCamera>();
auto color_c = pipeline.create<dai::node::ColorCamera>();
auto color_d = pipeline.create<dai::node::ColorCamera>();
auto imu = pipeline.create<dai::node::IMU>();
auto xout_imu = pipeline.create<dai::node::XLinkOut>();
control_in->setStreamName("control");
control_in->out.link(color_a->inputControl);
control_in->out.link(color_b->inputControl);
control_in->out.link(color_c->inputControl);
control_in->out.link(color_d->inputControl);
xout_imu->setStreamName("imu");
dai::node::ColorCamera::Properties::SensorResolution color_resolution;
color_resolution = dai::node::ColorCamera::Properties::SensorResolution::THE_720_P;
float color_fps = 1.0;
//cameras
color_a->setResolution(color_resolution);
color_a->setBoardSocket(dai::CameraBoardSocket::CAM_A);
// color_a->setIspScale(1,2);
color_a->initialControl.setFrameSyncMode(dai::CameraControl::FrameSyncMode::OUTPUT);
color_a->initialControl.setSharpness(0);
color_a->initialControl.setLumaDenoise(0);
color_a->initialControl.setChromaDenoise(4);
color_a->initialControl.setManualExposure(3000, 100);
color_a->setFps(color_fps);
color_b->setResolution(color_resolution);
color_b->setBoardSocket(dai::CameraBoardSocket::CAM_B);
color_b->initialControl.setFrameSyncMode(dai::CameraControl::FrameSyncMode::INPUT);
color_b->initialControl.setSharpness(0);
color_b->initialControl.setLumaDenoise(0);
color_b->initialControl.setChromaDenoise(4);
color_b->initialControl.setManualExposure(3000, 100);
// color_b->setIspScale(1,2);
color_b->setFps(color_fps);
color_c->setResolution(color_resolution);
color_c->setBoardSocket(dai::CameraBoardSocket::CAM_C);
color_c->initialControl.setFrameSyncMode(dai::CameraControl::FrameSyncMode::INPUT);
color_c->initialControl.setSharpness(0);
color_c->initialControl.setLumaDenoise(0);
color_c->initialControl.setChromaDenoise(4);
color_c->initialControl.setManualExposure(3000, 100);
color_c->setFps(color_fps);
color_d->setResolution(color_resolution);
color_d->setBoardSocket(dai::CameraBoardSocket::CAM_D);
// color_d->setIspScale(1,2);
color_d->initialControl.setFrameSyncMode(dai::CameraControl::FrameSyncMode::INPUT);
color_d->initialControl.setSharpness(0);
color_d->initialControl.setLumaDenoise(0);
color_d->initialControl.setChromaDenoise(4);
color_d->initialControl.setManualExposure(3000, 100);
color_d->setFps(color_fps);
//imu
imu->enableIMUSensor(dai::IMUSensor::ACCELEROMETER_RAW, 500);
imu->enableIMUSensor(dai::IMUSensor::GYROSCOPE_RAW, 400);
imu->setBatchReportThreshold(5);
imu->setMaxBatchReports(20);
auto xout_color_a = pipeline.create<dai::node::XLinkOut>();
auto xout_color_b = pipeline.create<dai::node::XLinkOut>();
auto xout_color_c = pipeline.create<dai::node::XLinkOut>();
auto xout_color_d = pipeline.create<dai::node::XLinkOut>();
xout_color_a->setStreamName("color_a");
xout_color_b->setStreamName("color_b");
xout_color_c->setStreamName("color_c");
xout_color_d->setStreamName("color_d");
color_a->isp.link(xout_color_a->input);
color_b->isp.link(xout_color_b->input);
color_c->isp.link(xout_color_c->input);
color_d->isp.link(xout_color_d->input);
imu->out.link(xout_imu->input);
//-------------------------------------- connect device
std::shared_ptr<dai::Device> device;
std::vector<dai::DeviceInfo> available_devices = dai::Device::getAllAvailableDevices();
dai::Device::Config cfg = pipeline.getDeviceConfig();
cfg.board.gpio[6] = dai::BoardConfig::GPIO(dai::BoardConfig::GPIO::Direction::OUTPUT, dai::BoardConfig::GPIO::Level::HIGH);
device = std::make_shared<dai::Device>(cfg, dai::UsbSpeed::SUPER_PLUS);
// device = std::make_shared<dai::Device>(pipeline, dai::Device::getAllAvailableDevices().front(), dai::UsbSpeed::SUPER_PLUS);
device->startPipeline(pipeline);
// auto controlQueue = device->getInputQueue("control");
//-------------------------------------- set outputs queue
auto imu_queue = device->getOutputQueue("imu", 30, false);
dai::ros::ImuSyncMethod imu_mode = static_cast<dai::ros::ImuSyncMethod>(1);
//1-> LINEAR_INTERPOLATE_GYRO
dai::rosBridge::ImuConverter imu_converter("oak_imu_frame", imu_mode, 0.0, 0.0);
dai::rosBridge::ImageConverter converter("oak_a_camera_optical_frame", false);
dai::ros::ImageMsgs::CameraInfo cam_info;
{
cam_info.width = static_cast<uint32_t>(1280);
cam_info.height = static_cast<uint32_t>(720);
std::vector<double> flat_intrinsics;
flat_intrinsics.assign(9, 0);
std::copy(flat_intrinsics.begin(), flat_intrinsics.end() ,cam_info.K.begin());
for(size_t i = 0 ; i < 8; i++){
cam_info.D.push_back(static_cast<double>(0.0));
}
std::vector<double> stereo_flat_intrinsics, flat_rectified_rotation;
stereo_flat_intrinsics.assign(12,0);
flat_rectified_rotation.assign(9,0);
std::copy(stereo_flat_intrinsics.begin(), stereo_flat_intrinsics.end(), cam_info.P.begin());
std::copy(flat_rectified_rotation.begin(), flat_rectified_rotation.end(), cam_info.R.begin());
cam_info.distortion_model = "rational_polynomial";
}
auto color_a_queue = device->getOutputQueue("color_a", 10, false);
dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> color_a_pub(
color_a_queue,
nh,
std::string("color_a"),
std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &converter, std::placeholders::_1, std::placeholders::_2),
30,
cam_info,
"color_a");
color_a_pub.addPublisherCallback();
auto color_b_queue = device->getOutputQueue("color_b", 10, false);
dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> color_b_pub(
color_b_queue,
nh,
std::string("color_b"),
std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &converter, std::placeholders::_1, std::placeholders::_2),
30,
cam_info,
"color_b");
color_b_pub.addPublisherCallback();
auto color_c_queue = device->getOutputQueue("color_c", 10, false);
dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> color_c_pub(
color_c_queue,
nh,
std::string("color_c"),
std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &converter, std::placeholders::_1, std::placeholders::_2),
30,
cam_info,
"color_c");
color_c_pub.addPublisherCallback();
auto color_d_queue = device->getOutputQueue("color_d", 10, false);
dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> color_d_pub(
color_d_queue,
nh,
std::string("color_d"),
std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &converter, std::placeholders::_1, std::placeholders::_2),
30,
cam_info,
"color_d");
color_d_pub.addPublisherCallback();
dai::rosBridge::BridgePublisher<sensor_msgs::Imu, dai::IMUData> imu_publish(
imu_queue,
nh,
std::string("imu"),
std::bind(&dai::rosBridge::ImuConverter::toRosMsg, &imu_converter, std::placeholders::_1, std::placeholders::_2),
30,
"",
"imu");
imu_publish.addPublisherCallback();
ros::spin();
return EXIT_SUCCESS;
}
Additionally, when I set CAM B to the sync OUTPUT configuration as described in this link, following error occur.
[14442C107183D5D600] [1.3.4.4] [2.120] [system] [critical] Fatal error. Please report to developers. Log: 'Fatal error on MSS CPU: trap: 00, address: 00000000' '0'