• DepthAI-v2
  • A function that returns a rosBridge::BridgePublisher

Hi I'm making a ros node that turns on the camera function the user specified for example if the user just wants to run the imu and color camera then they just need to set those options to true and they will run. I'm trying to simplify the code by making functions that return the BridgePublisher of the feature. For example pub_rgb_camera should return a dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> . However whenever I compile I get this error:

/opt/ros/noetic/include/depthai_bridge/BridgePublisher.hpp:289:78: error: no matching function for call to ‘image_transport::ImageTransport::ImageTransport()’
289 | BridgePublisher<RosMsg, SimMsg>::BridgePublisher(const BridgePublisher& other) {
| ^
In file included from /opt/ros/noetic/include/depthai_bridge/BridgePublisher.hpp:23,
from /home/sidney/rapid-docker/src/perception/oak_custom_launch/include/oak_custom_launch/oak_custom_launch.h:23,
from /home/sidney/rapid-docker/src/perception/oak_custom_launch/src/oak_custom_launch.cpp:9:
/opt/ros/noetic/include/image_transport/image_transport.h:55:12: note: candidate: ‘image_transport::ImageTransport::ImageTransport(const ros::NodeHandle&)’
55 | explicit ImageTransport(const ros::NodeHandle& nh);
| ^~~~~~~~~~~~~~
/opt/ros/noetic/include/image_transport/image_transport.h:55:12: note: candidate expects 1 argument, 0 provided
/opt/ros/noetic/include/image_transport/image_transport.h:52:28: note: candidate: ‘image_transport::ImageTransport::ImageTransport(const image_transport::ImageTransport&)’
52 | class IMAGE_TRANSPORT_DECL ImageTransport
| ^~~~~~~~~~~~~~
/opt/ros/noetic/include/image_transport/image_transport.h:52:28: note: candidate expects 1 argument, 0 provided
In file included from /home/rapid-docker/src/perception/oak_custom_launch/include/oak_custom_launch/oak_custom_launch.h:23,
from /home/rapid-docker/src/perception/oak_custom_launch/src/oak_custom_launch.cpp:9:
/opt/ros/noetic/include/depthai_bridge/BridgePublisher.hpp:300:43: error: no matching function for call to ‘ros::Publisher::Publisher(const std::shared_ptr<ros::Publisher>&)’
300 | _cameraInfoPublisher = rosOrigin::Publisher(other._cameraInfoPublisher);
| ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In file included from /opt/ros/noetic/include/ros/node_handle.h:32,
from /opt/ros/noetic/include/ros/ros.h:45,
from /home/sidney/rapid-docker/src/perception/oak_custom_launch/src/oak_custom_launch.cpp:1:
/opt/ros/noetic/include/ros/publisher.h:179:5: note: candidate: ‘ros::Publisher::Publisher(const string&, const string&, const string&, bool, const ros::NodeHandle&, const SubscriberCallbacksPtr&)’
179 | Publisher(const std::string& topic, const std::string& md5sum,
| ^~~~~~~~~
/opt/ros/noetic/include/ros/publisher.h:179:5: note: candidate expects 6 arguments, 1 provided
/opt/ros/noetic/include/ros/publisher.h:52:5: note: candidate: ‘ros::Publisher::Publisher(const ros::Publisher&)’
52 | Publisher(const Publisher& rhs);
| ^~~~~~~~~
/opt/ros/noetic/include/ros/publisher.h:52:32: note: no known conversion for argument 1 from ‘const std::shared_ptr<ros::Publisher>’ to ‘const ros::Publisher&’
52 | Publisher(const Publisher& rhs);
| ~~~~~~~~~~~~~~~~~^~~
/opt/ros/noetic/include/ros/publisher.h:51:5: note: candidate: ‘ros::Publisher::Publisher()’
51 | Publisher() {}
| ^~~~~~~~~

Is there anything I can do to set these publishers in a function

Here's my code

namespace oak_custom_launch
{

std::tuple<dai::Pipeline, int, int> OakCustomLaunch::createPipeline(bool enableDepth,
                                                                    bool enableSpatialDetection,
                                                                    bool enableRGB,
                                                                    bool enableImu,
                                                                    bool single,
                                                                    bool lrcheck,
                                                                    bool extended,
                                                                    bool subpixel,
                                                                    bool rectify,
                                                                    int stereo_fps,
                                                                    int confidence,
                                                                    int LRchecktresh,
                                                                    int detectionClassesCount,
                                                                    std::string resolution,
                                                                    bool syncNN,
                                                                    std::string nnPath) 
{
    dai::Pipeline pipeline;

    dai::node::MonoCamera::Properties::SensorResolution monoResolution;
    int width, height;
    if(resolution == "720p") {
        monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_720_P;
        width = 1280;
        height = 720;
    } else if(resolution == "400p") {
        monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_400_P;
        width = 640;
        height = 400;
    } else if(resolution == "800p") {
        monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_800_P;
        width = 1280;
        height = 800;
    } else if(resolution == "480p") {
        monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_480_P;
        width = 640;
        height = 480;
    } else {
        ROS_ERROR("Invalid parameter. -> monoResolution: %s", resolution.c_str());
        throw std::runtime_error("Invalid mono camera resolution.");
    }

    // Imu
    if(enableImu)
    {
        auto imu = pipeline.create<dai::node::IMU>();
        auto xoutImu = pipeline.create<dai::node::XLinkOut>();
        xoutImu->setStreamName("imu");

        imu->enableIMUSensor(dai::IMUSensor::ACCELEROMETER_RAW, 500);
        imu->enableIMUSensor(dai::IMUSensor::GYROSCOPE_RAW, 400);
        imu->setBatchReportThreshold(5);
        imu->setMaxBatchReports(20);  // Get one message only for now. 

        imu->out.link(xoutImu->input);
    }


    auto camRgb = pipeline.create<dai::node::ColorCamera>();
    auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
    if(enableRGB || enableSpatialDetection)
    {
        // RGB image
        
        xoutRgb->setStreamName("rgb");

        camRgb->setPreviewSize(416, 416);
        camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P);
        camRgb->setBoardSocket(dai::CameraBoardSocket::RGB);
        camRgb->setInterleaved(false);

        if(height < 720) {
            camRgb->setIspScale(1, 3);
        } else {
            camRgb->setIspScale(2, 3);
        }
        camRgb->isp.link(xoutRgb->input);
    }

    auto stereo = pipeline.create<dai::node::StereoDepth>();
    auto xoutDepth = pipeline.create<dai::node::XLinkOut>();
    if (enableDepth || enableSpatialDetection)
    {
        //Depth Camera
        auto monoLeft = pipeline.create<dai::node::MonoCamera>();
        auto monoRight = pipeline.create<dai::node::MonoCamera>();

        xoutDepth->setStreamName("depth");

        // MonoCamera
        monoLeft->setResolution(monoResolution);
        monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
        monoLeft->setFps(stereo_fps);
        monoRight->setResolution(monoResolution);
        monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);
        monoRight->setFps(stereo_fps);

        // StereoDepth
        stereo->initialConfig.setConfidenceThreshold(confidence);        // Known to be best
        stereo->setRectifyEdgeFillColor(0);                              // black, to better see the cutout
        stereo->initialConfig.setLeftRightCheckThreshold(LRchecktresh);  // Known to be best
        stereo->setLeftRightCheck(lrcheck);
        stereo->setExtendedDisparity(extended);
        stereo->setSubpixel(subpixel);
        if(enableRGB) stereo->setDepthAlign(dai::CameraBoardSocket::RGB);

        // Link plugins CAM -> STEREO -> XLINK
        stereo->setRectifyEdgeFillColor(0);
        monoLeft->out.link(stereo->left);
        monoRight->out.link(stereo->right);
        stereo->depth.link(xoutDepth->input);
    }
    
    if(enableSpatialDetection)
    {
        
        //Yolo
        camRgb->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR);
        camRgb->setInterleaved(false);
        camRgb->setPreviewSize(416, 416);

        auto spatialDetectionNetwork = pipeline.create<dai::node::YoloSpatialDetectionNetwork>();
        auto xoutNN = pipeline.create<dai::node::XLinkOut>();
        auto xoutPreview = pipeline.create<dai::node::XLinkOut>();
        xoutPreview->setStreamName("preview");
        xoutNN->setStreamName("detections");

        spatialDetectionNetwork->setBlobPath(nnPath);
        spatialDetectionNetwork->setConfidenceThreshold(0.5f);
        spatialDetectionNetwork->input.setBlocking(false);
        spatialDetectionNetwork->setBoundingBoxScaleFactor(0.5);
        spatialDetectionNetwork->setDepthLowerThreshold(100);
        spatialDetectionNetwork->setDepthUpperThreshold(10000);

        // yolo specific parameters
        spatialDetectionNetwork->setNumClasses(detectionClassesCount);
        spatialDetectionNetwork->setCoordinateSize(4);
        spatialDetectionNetwork->setAnchors({10, 14, 23, 27, 37, 58, 81, 82, 135, 169, 344, 319});
        spatialDetectionNetwork->setAnchorMasks({{"side13", {3, 4, 5}}, {"side26", {1, 2, 3}}});
        spatialDetectionNetwork->setIouThreshold(0.5f);

        // Link plugins CAM -> NN -> XLINK
        camRgb->preview.link(spatialDetectionNetwork->input);
        if(syncNN)
            spatialDetectionNetwork->passthrough.link(xoutPreview->input);
        else
            camRgb->preview.link(xoutPreview->input);
        spatialDetectionNetwork->out.link(xoutNN->input);

        stereo->depth.link(spatialDetectionNetwork->inputDepth);
    }

    return std::make_tuple(pipeline, width, height);
}


OakCustomLaunch::OakCustomLaunch()
{
    std::string tfPrefix, mode = "depth", mxId, resourceBaseFolder = "package://depthai_examples/resources", nnPath;
    std::string monoResolution = "720p";
    int badParams = 0, stereo_fps = 15, confidence = 200, LRchecktresh = 5, imuModeParam = 1, detectionClassesCount = 80;
    bool lrcheck = true, extended = false, subpixel = true, enableDepth = false, rectify = true, depth_aligned = true, enableRGB = false, enableImu = false;
    bool enableSpatialDetection = false, enableDotProjector = false, enableFloodLight = false;
    bool usb2Mode = false, poeMode = false, syncNN = true;
    double angularVelCovariance = 0, linearAccelCovariance = 0;
    double dotProjectormA = 200.0, floodLightmA = 200.0;
    std::string nnName("tiny-yolo-v4_openvino_2021.2_6shave.blob");  // Set your blob name for the model here

    badParams += !pnh.getParam("mxId", mxId);
    badParams += !pnh.getParam("usb2Mode", usb2Mode);
    badParams += !pnh.getParam("poeMode", poeMode);

    badParams += !pnh.getParam("tf_prefix", tfPrefix);
    badParams += !pnh.getParam("mode", mode);
    badParams += !pnh.getParam("imuMode", imuModeParam);

    badParams += !pnh.getParam("lrcheck", lrcheck);
    badParams += !pnh.getParam("extended", extended);
    badParams += !pnh.getParam("subpixel", subpixel);
    badParams += !pnh.getParam("rectify", rectify);
    badParams += !pnh.getParam("enableRGB", enableRGB);
    badParams += !pnh.getParam("enableDepth", enableDepth);
    badParams += !pnh.getParam("enableImu", enableImu);

    badParams += !pnh.getParam("depth_aligned", depth_aligned);
    badParams += !pnh.getParam("stereo_fps", stereo_fps);
    badParams += !pnh.getParam("confidence", confidence);
    badParams += !pnh.getParam("LRchecktresh", LRchecktresh);
    badParams += !pnh.getParam("monoResolution", monoResolution);
    badParams += !pnh.getParam("angularVelCovariance", angularVelCovariance);
    badParams += !pnh.getParam("linearAccelCovariance", linearAccelCovariance);
    badParams += !pnh.getParam("enableSpatialDetection", enableSpatialDetection);
    badParams += !pnh.getParam("syncNN", syncNN);
    badParams += !pnh.getParam("resourceBaseFolder", resourceBaseFolder);

    // Applies only to PRO model
    badParams += !pnh.getParam("enableDotProjector", enableDotProjector);
    badParams += !pnh.getParam("enableFloodLight", enableFloodLight);
    badParams += !pnh.getParam("dotProjectormA", dotProjectormA);
    badParams += !pnh.getParam("floodLightmA", floodLightmA);
    badParams += !pnh.getParam("detectionClassesCount", detectionClassesCount);

    std::string nnParam;
    pnh.getParam("nnName", nnParam);
    if(nnParam != "x") {
        pnh.getParam("nnName", nnName);
    }

    if(resourceBaseFolder.empty()) {
        throw std::runtime_error("Send the path to the resouce folder containing NNBlob in \'resourceBaseFolder\' ");
    }
    nnPath = resourceBaseFolder + "/" + nnName;
    std::cout << " nnPath ,, " << nnPath << std::endl;


    std::vector<std::string> usbStrings = {"UNKNOWN", "LOW", "FULL", "HIGH", "SUPER", "SUPER_PLUS"};

    dai::ros::ImuSyncMethod imuMode = static_cast<dai::ros::ImuSyncMethod>(imuModeParam);

    dai::Pipeline pipeline;
    int monoWidth, monoHeight;
    bool isDeviceFound = false;

    //Check if only one feature is called
    int count = 0;
    bool single = false;
    bool check[4] = {enableDepth, enableSpatialDetection, enableRGB, enableImu};
    for(int i = 0; i < 4; i++)
    {
        if(check[i])
        {
            count++;
        }
    }

    if(count == 1)
    {
        single = true;
    }

    std::cout << "single: " << single << std::endl;

    std::tie(pipeline, monoWidth, monoHeight) =  createPipeline(enableDepth,
                                                                enableSpatialDetection,
                                                                enableRGB,
                                                                enableImu,
                                                                single,
                                                                lrcheck,
                                                                extended,
                                                                subpixel,
                                                                rectify,
                                                                stereo_fps,
                                                                confidence,
                                                                LRchecktresh,
                                                                detectionClassesCount,
                                                                monoResolution,
                                                                syncNN,
                                                                nnPath);

    std::shared_ptr<dai::Device> device;
    std::vector<dai::DeviceInfo> availableDevices = dai::Device::getAllAvailableDevices();

    std::cout << "Listing available devices..." << std::endl;
    for(auto deviceInfo : availableDevices) {
        std::cout << "Device Mx ID: " << deviceInfo.getMxId() << std::endl;
        if(deviceInfo.getMxId() == mxId) {
            if(deviceInfo.state == X_LINK_UNBOOTED || deviceInfo.state == X_LINK_BOOTLOADER) {
                isDeviceFound = true;
                if(poeMode) {
                    device = std::make_shared<dai::Device>(pipeline, deviceInfo);
                } else {
                    device = std::make_shared<dai::Device>(pipeline, deviceInfo, usb2Mode);
                }
                break;
            } else if(deviceInfo.state == X_LINK_BOOTED) {
                throw std::runtime_error("ros::NodeHandle() from Node \"" + pnh.getNamespace() + "\" DepthAI Device with MxId  \"" + mxId
                                         + "\" is already booted on different process.  \"");
            }
        } else if(mxId.empty()) {
            isDeviceFound = true;
            device = std::make_shared<dai::Device>(pipeline);
        }
    }

    std::cout << "blah " << std::endl;

    if(!isDeviceFound) {
        throw std::runtime_error("ros::NodeHandle() from Node \"" + pnh.getNamespace() + "\" DepthAI Device with MxId  \"" + mxId + "\" not found.  \"");
    }

    if(!poeMode) {
        std::cout << "Device USB status: " << usbStrings[static_cast<int32_t>(device->getUsbSpeed())] << std::endl;
    }

    auto calibrationHandler = device->readCalibration();

    auto boardName = calibrationHandler.getEepromData().boardName;
    if(monoHeight > 480 && boardName == "OAK-D-LITE") {
        monoWidth = 640;
        monoHeight = 480;
    }
    
    if(boardName.find("PRO") != std::string::npos) 
    {
        if(enableDotProjector) 
        {
            device->setIrLaserDotProjectorBrightness(dotProjectormA);
        }

        if(enableFloodLight) 
        {
            device->setIrFloodLightBrightness(floodLightmA);
        }
    }

    int colorWidth = 1280, colorHeight = 720;
    if(monoHeight < 720) {
        colorWidth = 640;
        colorHeight = 360;
    }

    /////////Publishing Camera Data

    if (enableImu)
    {
        dai::rosBridge::BridgePublisher<sensor_msgs::Imu, dai::IMUData> ImuPublish = pub_imu(device, tfPrefix, imuMode, linearAccelCovariance, angularVelCovariance);
        ImuPublish.addPublisherCallback();
        //everythings running
        if (enableRGB && enableDepth && enableSpatialDetection)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish = pub_rgb(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned);
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix);

            rgbPublish.addPublisherCallback();
            depthPublish.addPublisherCallback();
            previewPublish.addPublisherCallback();
            detectionPublish.addPublisherCallback();
            ros::spin();
        }
        //imu and depth and yolo
        if (enableDepth && enableSpatialDetection)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned);
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix);

            depthPublish.addPublisherCallback();
            previewPublish.addPublisherCallback();
            detectionPublish.addPublisherCallback();
            ros::spin();
        }
        //imu and rgb and yolo
        if (enableRGB && enableSpatialDetection)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish = pub_rgb(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix);

            rgbPublish.addPublisherCallback();
            previewPublish.addPublisherCallback();
            detectionPublish.addPublisherCallback();
            ros::spin();
        }
        //imu and rgb and depth
        if (enableRGB && enableDepth)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish = pub_rgb(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned);

            rgbPublish.addPublisherCallback();
            depthPublish.addPublisherCallback();
            ros::spin();
        }
        //imu and rgb
        if (enableRGB)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish = pub_rgb(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            rgbPublish.addPublisherCallback();
            ros::spin();
        }
        //imu and depth
        if (enableDepth)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned);
            depthPublish.addPublisherCallback();
            ros::spin();
        }
        //imu and yolo
        if (enableSpatialDetection)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix);

            previewPublish.addPublisherCallback();
            detectionPublish.addPublisherCallback();
            ros::spin();
        }
        //just imu
        ros::spin();
    }

    if (enableRGB)
    {
        dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish = pub_rgb(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
        rgbPublish.addPublisherCallback();
        //rgb depth yolo 
        if (enableDepth && enableSpatialDetection)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned);
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix);

            rgbPublish.addPublisherCallback();
            depthPublish.addPublisherCallback();
            previewPublish.addPublisherCallback();
            detectionPublish.addPublisherCallback();
            ros::spin();
        }
        //rgb and depth
        if (enableDepth)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned);

            depthPublish.addPublisherCallback();
            ros::spin();
        }
        //rgb and yolo
        if (enableSpatialDetection)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix);

            previewPublish.addPublisherCallback();
            detectionPublish.addPublisherCallback();
            ros::spin();
        }
        //just rgb
        ros::spin();
    }
    if (enableDepth)
    {
        dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned);
        depthPublish.addPublisherCallback();
        //depth and yolo
        if (enableSpatialDetection)
        {
            dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
            dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix);

            previewPublish.addPublisherCallback();
            detectionPublish.addPublisherCallback();
            ros::spin();
        }
        //just depth
        ros::spin();
    }
    if (enableSpatialDetection)
    {
        dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix);
        dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix);

        depthPublish.addPublisherCallback();
        previewPublish.addPublisherCallback();
        detectionPublish.addPublisherCallback();
        //just yolo
        ros::spin();
    }
    
}

dai::rosBridge::BridgePublisher<sensor_msgs::Imu, dai::IMUData> OakCustomLaunch::pub_imu(std::shared_ptr<dai::Device> device, 
                              std::string tfPrefix, 
                              dai::ros::ImuSyncMethod imuMode, 
                              double linearAccelCovariance, 
                              double angularVelCovariance)
{
    auto imuQueue = device->getOutputQueue("imu", 30, false);
    dai::rosBridge::ImuConverter imuConverter(tfPrefix + "_imu_frame", imuMode, linearAccelCovariance, angularVelCovariance);
    dai::rosBridge::BridgePublisher<sensor_msgs::Imu, dai::IMUData> ImuPublish(
        imuQueue,
        pnh,
        std::string("imu"),
        std::bind(&dai::rosBridge::ImuConverter::toRosMsg, &imuConverter, std::placeholders::_1, std::placeholders::_2),
        30,
        "",
        "imu");
    
    return ImuPublish;
}

dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> OakCustomLaunch::pub_rgb(std::shared_ptr<dai::Device> device, 
                              int monoWidth, 
                              int monoHeight, 
                              int colorWidth, 
                              int colorHeight, 
                              std::string tfPrefix)
{
    auto calibrationHandler = device->readCalibration();
    dai::rosBridge::ImageConverter rgbConverter(tfPrefix + "_rgb_camera_optical_frame", false);

    auto rgbCameraInfo = rgbConverter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::RGB, colorWidth, colorHeight);
    auto imgQueue = device->getOutputQueue("rgb", 30, false);
    dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish(
        imgQueue,
        pnh,
        std::string("color/image"),
        std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &rgbConverter, std::placeholders::_1, std::placeholders::_2),
        30,
        rgbCameraInfo,
        "color");
    return rgbPublish;
}

dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> OakCustomLaunch::pub_depth(std::shared_ptr<dai::Device> device, 
                                int monoWidth, 
                                int monoHeight, 
                                int colorWidth, 
                                int colorHeight, 
                                std::string tfPrefix, 
                                bool depth_aligned)
{
    auto calibrationHandler = device->readCalibration();

    dai::rosBridge::ImageConverter rgbConverter(tfPrefix + "_rgb_camera_optical_frame", false);

    dai::rosBridge::ImageConverter converter(tfPrefix + "_left_camera_optical_frame", true);
    dai::rosBridge::ImageConverter rightconverter(tfPrefix + "_right_camera_optical_frame", true);
    auto leftCameraInfo = converter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::LEFT, monoWidth, monoHeight);
    auto rightCameraInfo = converter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::RIGHT, monoWidth, monoHeight);

    std::shared_ptr<dai::DataOutputQueue> stereoQueue;
    stereoQueue = device->getOutputQueue("depth", 30, false);
    auto depthCameraInfo = depth_aligned ? rgbConverter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::RGB, colorWidth, colorHeight) : rightCameraInfo;
    auto depthconverter = depth_aligned ? rgbConverter : rightconverter;
    dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish(
        stereoQueue,
        pnh,
        std::string("stereo/depth"),
        std::bind(&dai::rosBridge::ImageConverter::toRosMsg,
                &depthconverter,  // since the converter has the same frame name
                                    // and image type is also same we can reuse it
                std::placeholders::_1,
                std::placeholders::_2),
        30,
        depthCameraInfo,
        "stereo");
    
    return depthPublish;
}

dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> OakCustomLaunch::pub_preview(std::shared_ptr<dai::Device> device, 
                                                                                                int monoWidth, 
                                                                                                int monoHeight, 
                                                                                                int colorWidth, 
                                                                                                int colorHeight, 
                                                                                                std::string tfPrefix)
{
    auto calibrationHandler = device->readCalibration();
    dai::rosBridge::ImageConverter rgbConverter(tfPrefix + "_rgb_camera_optical_frame", false);

    auto boardName = calibrationHandler.getEepromData().boardName;

    auto previewQueue = device->getOutputQueue("preview", 30, false);
    auto previewCameraInfo = rgbConverter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::RGB, 416, 416);

    dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish(
        previewQueue,
        pnh,
        std::string("color/preview/image"),
        std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &rgbConverter, std::placeholders::_1, std::placeholders::_2),
        30,
        previewCameraInfo,
        "color/preview");
    return previewPublish;
}

dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> OakCustomLaunch::pub_detect(std::shared_ptr<dai::Device> device, std::string tfPrefix)
{
    auto detectionQueue = device->getOutputQueue("detections", 30, false);
    
    dai::rosBridge::SpatialDetectionConverter detConverter(tfPrefix + "_rgb_camera_optical_frame", 416, 416, false);
    dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish(
        detectionQueue,
        pnh,
        std::string("color/yolov4_Spatial_detections"),
        std::bind(&dai::rosBridge::SpatialDetectionConverter::toRosMsg, &detConverter, std::placeholders::_1, std::placeholders::_2),
        30);

    return detectionPublish;
}



} // namespace oak_custom_launch
15 days later

luxonis-Sachin

Sorry just realized I made a separate comment instead of replying to you here is my code.

namespace oak_custom_launch
{

std::tuple<dai::Pipeline, int, int> OakCustomLaunch::createPipeline(bool enableDepth,
                                                                    bool enableSpatialDetection,
                                                                    bool enableRGB,
                                                                    bool enableImu,
                                                                    bool single,
                                                                    bool lrcheck,
                                                                    bool extended,
                                                                    bool subpixel,
                                                                    bool rectify,
                                                                    int stereo_fps,
                                                                    int confidence,
                                                                    int LRchecktresh,
                                                                    int detectionClassesCount,
                                                                    std::string resolution,
                                                                    bool syncNN,
                                                                    std::string nnPath) 
{
    dai::Pipeline pipeline;

```
    dai::node::MonoCamera::Properties::SensorResolution monoResolution;
    int width, height;
    if(resolution == "720p") {
        monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_720_P;
        width = 1280;
        height = 720;
    } else if(resolution == "400p") {
        monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_400_P;
        width = 640;
        height = 400;
    } else if(resolution == "800p") {
        monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_800_P;
        width = 1280;
        height = 800;
    } else if(resolution == "480p") {
        monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_480_P;
        width = 640;
        height = 480;
    } else {
        ROS_ERROR("Invalid parameter. -> monoResolution: %s", resolution.c_str());
        throw std::runtime_error("Invalid mono camera resolution.");
    }

    // Imu
    if(enableImu)
    {
        auto imu = pipeline.create<dai::node::IMU>();
        auto xoutImu = pipeline.create<dai::node::XLinkOut>();
        xoutImu->setStreamName("imu");

        imu->enableIMUSensor(dai::IMUSensor::ACCELEROMETER_RAW, 500);
        imu->enableIMUSensor(dai::IMUSensor::GYROSCOPE_RAW, 400);
        imu->setBatchReportThreshold(5);
        imu->setMaxBatchReports(20);  // Get one message only for now. 

        imu->out.link(xoutImu->input);
    }


    auto camRgb = pipeline.create<dai::node::ColorCamera>();
    auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
    if(enableRGB || enableSpatialDetection)
    {
        // RGB image
        
xoutRgb->setStreamName("rgb"); camRgb->setPreviewSize(416, 416); camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P); camRgb->setBoardSocket(dai::CameraBoardSocket::RGB); camRgb->setInterleaved(false); if(height < 720) { camRgb->setIspScale(1, 3); } else { camRgb->setIspScale(2, 3); } camRgb->isp.link(xoutRgb->input); } auto stereo = pipeline.create<dai::node::StereoDepth>(); auto xoutDepth = pipeline.create<dai::node::XLinkOut>(); if (enableDepth || enableSpatialDetection) { //Depth Camera auto monoLeft = pipeline.create<dai::node::MonoCamera>(); auto monoRight = pipeline.create<dai::node::MonoCamera>(); xoutDepth->setStreamName("depth"); // MonoCamera monoLeft->setResolution(monoResolution); monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT); monoLeft->setFps(stereo_fps); monoRight->setResolution(monoResolution); monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT); monoRight->setFps(stereo_fps); // StereoDepth stereo->initialConfig.setConfidenceThreshold(confidence); // Known to be best stereo->setRectifyEdgeFillColor(0); // black, to better see the cutout stereo->initialConfig.setLeftRightCheckThreshold(LRchecktresh); // Known to be best stereo->setLeftRightCheck(lrcheck); stereo->setExtendedDisparity(extended); stereo->setSubpixel(subpixel); if(enableRGB) stereo->setDepthAlign(dai::CameraBoardSocket::RGB); // Link plugins CAM -> STEREO -> XLINK stereo->setRectifyEdgeFillColor(0); monoLeft->out.link(stereo->left); monoRight->out.link(stereo->right); stereo->depth.link(xoutDepth->input); }
if(enableSpatialDetection) {
//Yolo camRgb->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR); camRgb->setInterleaved(false); camRgb->setPreviewSize(416, 416); auto spatialDetectionNetwork = pipeline.create<dai::node::YoloSpatialDetectionNetwork>(); auto xoutNN = pipeline.create<dai::node::XLinkOut>(); auto xoutPreview = pipeline.create<dai::node::XLinkOut>(); xoutPreview->setStreamName("preview"); xoutNN->setStreamName("detections"); spatialDetectionNetwork->setBlobPath(nnPath); spatialDetectionNetwork->setConfidenceThreshold(0.5f); spatialDetectionNetwork->input.setBlocking(false); spatialDetectionNetwork->setBoundingBoxScaleFactor(0.5); spatialDetectionNetwork->setDepthLowerThreshold(100); spatialDetectionNetwork->setDepthUpperThreshold(10000); // yolo specific parameters spatialDetectionNetwork->setNumClasses(detectionClassesCount); spatialDetectionNetwork->setCoordinateSize(4); spatialDetectionNetwork->setAnchors({10, 14, 23, 27, 37, 58, 81, 82, 135, 169, 344, 319}); spatialDetectionNetwork->setAnchorMasks({{"side13", {3, 4, 5}}, {"side26", {1, 2, 3}}}); spatialDetectionNetwork->setIouThreshold(0.5f); // Link plugins CAM -> NN -> XLINK camRgb->preview.link(spatialDetectionNetwork->input); if(syncNN) spatialDetectionNetwork->passthrough.link(xoutPreview->input); else camRgb->preview.link(xoutPreview->input); spatialDetectionNetwork->out.link(xoutNN->input); stereo->depth.link(spatialDetectionNetwork->inputDepth); } return std::make_tuple(pipeline, width, height); } OakCustomLaunch::OakCustomLaunch() { std::string tfPrefix, mode = "depth", mxId, resourceBaseFolder = "package://depthai_examples/resources", nnPath; std::string monoResolution = "720p"; int badParams = 0, stereo_fps = 15, confidence = 200, LRchecktresh = 5, imuModeParam = 1, detectionClassesCount = 80; bool lrcheck = true, extended = false, subpixel = true, enableDepth = false, rectify = true, depth_aligned = true, enableRGB = false, enableImu = false; bool enableSpatialDetection = false, enableDotProjector = false, enableFloodLight = false; bool usb2Mode = false, poeMode = false, syncNN = true; double angularVelCovariance = 0, linearAccelCovariance = 0; double dotProjectormA = 200.0, floodLightmA = 200.0; std::string nnName("tiny-yolo-v4_openvino_2021.2_6shave.blob"); // Set your blob name for the model here badParams += !pnh.getParam("mxId", mxId); badParams += !pnh.getParam("usb2Mode", usb2Mode); badParams += !pnh.getParam("poeMode", poeMode); badParams += !pnh.getParam("tf_prefix", tfPrefix); badParams += !pnh.getParam("mode", mode); badParams += !pnh.getParam("imuMode", imuModeParam); badParams += !pnh.getParam("lrcheck", lrcheck); badParams += !pnh.getParam("extended", extended); badParams += !pnh.getParam("subpixel", subpixel); badParams += !pnh.getParam("rectify", rectify); badParams += !pnh.getParam("enableRGB", enableRGB); badParams += !pnh.getParam("enableDepth", enableDepth); badParams += !pnh.getParam("enableImu", enableImu); badParams += !pnh.getParam("depth_aligned", depth_aligned); badParams += !pnh.getParam("stereo_fps", stereo_fps); badParams += !pnh.getParam("confidence", confidence); badParams += !pnh.getParam("LRchecktresh", LRchecktresh); badParams += !pnh.getParam("monoResolution", monoResolution); badParams += !pnh.getParam("angularVelCovariance", angularVelCovariance); badParams += !pnh.getParam("linearAccelCovariance", linearAccelCovariance); badParams += !pnh.getParam("enableSpatialDetection", enableSpatialDetection); badParams += !pnh.getParam("syncNN", syncNN); badParams += !pnh.getParam("resourceBaseFolder", resourceBaseFolder); // Applies only to PRO model badParams += !pnh.getParam("enableDotProjector", enableDotProjector); badParams += !pnh.getParam("enableFloodLight", enableFloodLight); badParams += !pnh.getParam("dotProjectormA", dotProjectormA); badParams += !pnh.getParam("floodLightmA", floodLightmA); badParams += !pnh.getParam("detectionClassesCount", detectionClassesCount); std::string nnParam; pnh.getParam("nnName", nnParam); if(nnParam != "x") { pnh.getParam("nnName", nnName); } if(resourceBaseFolder.empty()) { throw std::runtime_error("Send the path to the resouce folder containing NNBlob in \'resourceBaseFolder\' "); } nnPath = resourceBaseFolder + "/" + nnName; std::cout << " nnPath ,, " << nnPath << std::endl; std::vector<std::string> usbStrings = {"UNKNOWN", "LOW", "FULL", "HIGH", "SUPER", "SUPER_PLUS"}; dai::ros::ImuSyncMethod imuMode = static_cast<dai::ros::ImuSyncMethod>(imuModeParam); dai::Pipeline pipeline; int monoWidth, monoHeight; bool isDeviceFound = false; //Check if only one feature is called int count = 0; bool single = false; bool check[4] = {enableDepth, enableSpatialDetection, enableRGB, enableImu}; for(int i = 0; i < 4; i++) { if(check[i]) { count++; } } if(count == 1) { single = true; } std::cout << "single: " << single << std::endl; std::tie(pipeline, monoWidth, monoHeight) = createPipeline(enableDepth, enableSpatialDetection, enableRGB, enableImu, single, lrcheck, extended, subpixel, rectify, stereo_fps, confidence, LRchecktresh, detectionClassesCount, monoResolution, syncNN, nnPath); std::shared_ptr<dai::Device> device; std::vector<dai::DeviceInfo> availableDevices = dai::Device::getAllAvailableDevices(); std::cout << "Listing available devices..." << std::endl; for(auto deviceInfo : availableDevices) { std::cout << "Device Mx ID: " << deviceInfo.getMxId() << std::endl; if(deviceInfo.getMxId() == mxId) { if(deviceInfo.state == X_LINK_UNBOOTED || deviceInfo.state == X_LINK_BOOTLOADER) { isDeviceFound = true; if(poeMode) { device = std::make_shared<dai::Device>(pipeline, deviceInfo); } else { device = std::make_shared<dai::Device>(pipeline, deviceInfo, usb2Mode); } break; } else if(deviceInfo.state == X_LINK_BOOTED) { throw std::runtime_error("ros::NodeHandle() from Node \"" + pnh.getNamespace() + "\" DepthAI Device with MxId \"" + mxId + "\" is already booted on different process. \""); } } else if(mxId.empty()) { isDeviceFound = true; device = std::make_shared<dai::Device>(pipeline); } } std::cout << "blah " << std::endl; if(!isDeviceFound) { throw std::runtime_error("ros::NodeHandle() from Node \"" + pnh.getNamespace() + "\" DepthAI Device with MxId \"" + mxId + "\" not found. \""); } if(!poeMode) { std::cout << "Device USB status: " << usbStrings[static_cast<int32_t>(device->getUsbSpeed())] << std::endl; } auto calibrationHandler = device->readCalibration(); auto boardName = calibrationHandler.getEepromData().boardName; if(monoHeight > 480 && boardName == "OAK-D-LITE") { monoWidth = 640; monoHeight = 480; }
if(boardName.find("PRO") != std::string::npos) { if(enableDotProjector) { device->setIrLaserDotProjectorBrightness(dotProjectormA); } if(enableFloodLight) { device->setIrFloodLightBrightness(floodLightmA); } } int colorWidth = 1280, colorHeight = 720; if(monoHeight < 720) { colorWidth = 640; colorHeight = 360; } /////////Publishing Camera Data if (enableImu) { dai::rosBridge::BridgePublisher<sensor_msgs::Imu, dai::IMUData> ImuPublish = pub_imu(device, tfPrefix, imuMode, linearAccelCovariance, angularVelCovariance); ImuPublish.addPublisherCallback(); //everythings running if (enableRGB && enableDepth && enableSpatialDetection) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish = pub_rgb(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned); dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix); rgbPublish.addPublisherCallback(); depthPublish.addPublisherCallback(); previewPublish.addPublisherCallback(); detectionPublish.addPublisherCallback(); ros::spin(); } //imu and depth and yolo if (enableDepth && enableSpatialDetection) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned); dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix); depthPublish.addPublisherCallback(); previewPublish.addPublisherCallback(); detectionPublish.addPublisherCallback(); ros::spin(); } //imu and rgb and yolo if (enableRGB && enableSpatialDetection) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish = pub_rgb(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix); rgbPublish.addPublisherCallback(); previewPublish.addPublisherCallback(); detectionPublish.addPublisherCallback(); ros::spin(); } //imu and rgb and depth if (enableRGB && enableDepth) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish = pub_rgb(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned); rgbPublish.addPublisherCallback(); depthPublish.addPublisherCallback(); ros::spin(); } //imu and rgb if (enableRGB) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish = pub_rgb(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); rgbPublish.addPublisherCallback(); ros::spin(); } //imu and depth if (enableDepth) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned); depthPublish.addPublisherCallback(); ros::spin(); } //imu and yolo if (enableSpatialDetection) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix); previewPublish.addPublisherCallback(); detectionPublish.addPublisherCallback(); ros::spin(); } //just imu ros::spin(); } if (enableRGB) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish = pub_rgb(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); rgbPublish.addPublisherCallback(); //rgb depth yolo if (enableDepth && enableSpatialDetection) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned); dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix); rgbPublish.addPublisherCallback(); depthPublish.addPublisherCallback(); previewPublish.addPublisherCallback(); detectionPublish.addPublisherCallback(); ros::spin(); } //rgb and depth if (enableDepth) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned); depthPublish.addPublisherCallback(); ros::spin(); } //rgb and yolo if (enableSpatialDetection) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix); previewPublish.addPublisherCallback(); detectionPublish.addPublisherCallback(); ros::spin(); } //just rgb ros::spin(); } if (enableDepth) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish = pub_depth(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix, depth_aligned); depthPublish.addPublisherCallback(); //depth and yolo if (enableSpatialDetection) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix); previewPublish.addPublisherCallback(); detectionPublish.addPublisherCallback(); ros::spin(); } //just depth ros::spin(); } if (enableSpatialDetection) { dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish = pub_preview(device, monoWidth, monoHeight, colorWidth, colorHeight, tfPrefix); dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish = pub_detect(device, tfPrefix); depthPublish.addPublisherCallback(); previewPublish.addPublisherCallback(); detectionPublish.addPublisherCallback(); //just yolo ros::spin(); }
} dai::rosBridge::BridgePublisher<sensor_msgs::Imu, dai::IMUData> OakCustomLaunch::pub_imu(std::shared_ptr<dai::Device> device, std::string tfPrefix, dai::ros::ImuSyncMethod imuMode, double linearAccelCovariance, double angularVelCovariance) { auto imuQueue = device->getOutputQueue("imu", 30, false); dai::rosBridge::ImuConverter imuConverter(tfPrefix + "_imu_frame", imuMode, linearAccelCovariance, angularVelCovariance); dai::rosBridge::BridgePublisher<sensor_msgs::Imu, dai::IMUData> ImuPublish( imuQueue, pnh, std::string("imu"), std::bind(&dai::rosBridge::ImuConverter::toRosMsg, &imuConverter, std::placeholders::_1, std::placeholders::_2), 30, "", "imu");
return ImuPublish; } dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> OakCustomLaunch::pub_rgb(std::shared_ptr<dai::Device> device, int monoWidth, int monoHeight, int colorWidth, int colorHeight, std::string tfPrefix) { auto calibrationHandler = device->readCalibration(); dai::rosBridge::ImageConverter rgbConverter(tfPrefix + "_rgb_camera_optical_frame", false); auto rgbCameraInfo = rgbConverter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::RGB, colorWidth, colorHeight); auto imgQueue = device->getOutputQueue("rgb", 30, false); dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> rgbPublish( imgQueue, pnh, std::string("color/image"), std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &rgbConverter, std::placeholders::_1, std::placeholders::_2), 30, rgbCameraInfo, "color"); return rgbPublish; } dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> OakCustomLaunch::pub_depth(std::shared_ptr<dai::Device> device, int monoWidth, int monoHeight, int colorWidth, int colorHeight, std::string tfPrefix, bool depth_aligned) { auto calibrationHandler = device->readCalibration(); dai::rosBridge::ImageConverter rgbConverter(tfPrefix + "_rgb_camera_optical_frame", false); dai::rosBridge::ImageConverter converter(tfPrefix + "_left_camera_optical_frame", true); dai::rosBridge::ImageConverter rightconverter(tfPrefix + "_right_camera_optical_frame", true); auto leftCameraInfo = converter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::LEFT, monoWidth, monoHeight); auto rightCameraInfo = converter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::RIGHT, monoWidth, monoHeight); std::shared_ptr<dai::DataOutputQueue> stereoQueue; stereoQueue = device->getOutputQueue("depth", 30, false); auto depthCameraInfo = depth_aligned ? rgbConverter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::RGB, colorWidth, colorHeight) : rightCameraInfo; auto depthconverter = depth_aligned ? rgbConverter : rightconverter; dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> depthPublish( stereoQueue, pnh, std::string("stereo/depth"), std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &depthconverter, // since the converter has the same frame name // and image type is also same we can reuse it std::placeholders::_1, std::placeholders::_2), 30, depthCameraInfo, "stereo");
return depthPublish; } dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> OakCustomLaunch::pub_preview(std::shared_ptr<dai::Device> device, int monoWidth, int monoHeight, int colorWidth, int colorHeight, std::string tfPrefix) { auto calibrationHandler = device->readCalibration(); dai::rosBridge::ImageConverter rgbConverter(tfPrefix + "_rgb_camera_optical_frame", false); auto boardName = calibrationHandler.getEepromData().boardName; auto previewQueue = device->getOutputQueue("preview", 30, false); auto previewCameraInfo = rgbConverter.calibrationToCameraInfo(calibrationHandler, dai::CameraBoardSocket::RGB, 416, 416); dai::rosBridge::BridgePublisher<sensor_msgs::Image, dai::ImgFrame> previewPublish( previewQueue, pnh, std::string("color/preview/image"), std::bind(&dai::rosBridge::ImageConverter::toRosMsg, &rgbConverter, std::placeholders::_1, std::placeholders::_2), 30, previewCameraInfo, "color/preview"); return previewPublish; } dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> OakCustomLaunch::pub_detect(std::shared_ptr<dai::Device> device, std::string tfPrefix) { auto detectionQueue = device->getOutputQueue("detections", 30, false);
dai::rosBridge::SpatialDetectionConverter detConverter(tfPrefix + "_rgb_camera_optical_frame", 416, 416, false); dai::rosBridge::BridgePublisher<depthai_ros_msgs::SpatialDetectionArray, dai::SpatialImgDetections> detectionPublish( detectionQueue, pnh, std::string("color/yolov4_Spatial_detections"), std::bind(&dai::rosBridge::SpatialDetectionConverter::toRosMsg, &detConverter, std::placeholders::_1, std::placeholders::_2), 30); return detectionPublish; } } // namespace oak_custom_launch`