Getting this issue "possible device error/misconfiguration. Original message 'Couldn't read data from stream: 'nn' (X_LINK_ERROR)'" while running the camera module with two oakd devices. The cameras are connected to a laptop on usb 3 ports.
Tried to save the crash dump as well after the crash but it always says no crash dump found i am using depthai version 2.24
Adding full MRE for proper analysis.
#include <chrono>
#include <iostream>
// Includes common necessary includes for development using depthai library
#include "depthai/depthai.hpp"
using namespace std;
static const std::vector<std::string> labelMap = {"background", "aeroplane", "bicycle",
"bird", "boat", "bottle", "bus",
"car", "cat", "chair", "cow", "diningtable", "dog", "horse",
"motorbike", "person", "pottedplant", "sheep", "sofa", "train", "tvmonitor"};
// function to get camera pipeline
std::shared_ptr<dai::Pipeline> getPipeline(std::string nnPath)
{
auto pipeline = std::make_shared<dai::Pipeline>();
auto camRgb = pipeline->create<dai::node::ColorCamera>();
camRgb->setPreviewSize(300, 300);
camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_4_K);
camRgb->setInterleaved(false);
camRgb->setIspScale(1, 3);
camRgb->setFps(20);
camRgb->setPreviewKeepAspectRatio(false);
camRgb->initialControl.setManualFocus(100);
auto xoutFrames = pipeline->create<dai::node::XLinkOut>();
xoutFrames->setStreamName("frames");
camRgb->video.link(xoutFrames->input);
auto spatialDetectionNetwork = pipeline->create<dai::node::MobileNetSpatialDetectionNetwork>();
spatialDetectionNetwork->setBlobPath(nnPath);
spatialDetectionNetwork->setConfidenceThreshold(0.5f);
spatialDetectionNetwork->input.setBlocking(false);
spatialDetectionNetwork->setBoundingBoxScaleFactor(0.5);
spatialDetectionNetwork->setDepthLowerThreshold(100);
spatialDetectionNetwork->setDepthUpperThreshold(5000);
camRgb->preview.link(spatialDetectionNetwork->input);
auto monoLeft = pipeline->create<dai::node::MonoCamera>();
auto monoRight = pipeline->create<dai::node::MonoCamera>();
auto stereo = pipeline->create<dai::node::StereoDepth>();
stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::HIGH_DENSITY);
stereo->setDepthAlign(dai::CameraBoardSocket::RGB);
stereo->setOutputSize(monoLeft->getResolutionWidth(), monoLeft->getResolutionHeight());
monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);
monoLeft->out.link(stereo->left);
monoRight->out.link(stereo->right);
auto xoutRgb = pipeline->create<dai::node::XLinkOut>();
xoutRgb->setStreamName("rgb");
spatialDetectionNetwork->passthrough.link(xoutRgb->input);
auto xoutNN = pipeline->create<dai::node::XLinkOut>();
xoutNN->setStreamName("nn");
spatialDetectionNetwork->out.link(xoutNN->input);
auto xoutDepth = pipeline->create<dai::node::XLinkOut>();
xoutDepth->setStreamName("depth");
stereo->depth.link(spatialDetectionNetwork->inputDepth);
spatialDetectionNetwork->passthroughDepth.link(xoutDepth->input);
return pipeline;
}
std::vector<std::pair<std::string, std::pair<std::shared_ptr<dai::DataOutputQueue>, std::pair<std::shared_ptr<dai::DataOutputQueue>, std::shared_ptr<dai::DataOutputQueue>>>>> getDevicesPipeline(string nnPath, int &Camera_Status_API_x, std::vector<std::shared_ptr<dai::Device>> &devices)
{
auto device_infos = dai::Device::getAllAvailableDevices();
auto usb2_mode = dai::UsbSpeed::SUPER;
auto openvino_version = dai::OpenVINO::Version::VERSION_2021_4;
auto color = cv::Scalar(255, 255, 255);
std::vector<std::pair<std::string, std::pair<std::shared_ptr<dai::DataOutputQueue>, std::pair<std::shared_ptr<dai::DataOutputQueue>, std::shared_ptr<dai::DataOutputQueue>>>>> qRgbMap;
// LOG camera status
int cameraStatus = 0;
if (device_infos.size() == 0)
{
cameraStatus = 0;
}
if (device_infos.size() == 1)
{
cameraStatus = 1;
auto device = std::make_shared<dai::Device>(openvino_version, device_infos[0], usb2_mode);
}
if (device_infos.size() == 2)
{
cameraStatus = 2;
}
Camera_Status_API_x = 2 - cameraStatus;
if (Camera_Status_API_x == 0)
{
// Getting pipleline connected for multiple cameras
for (auto &device_info : device_infos)
{
auto device = std::make_shared<dai::Device>(openvino_version, device_info, usb2_mode);
devices.push_back(device);
auto mxId = device->getMxId();
auto usbSpeed = device->getUsbSpeed();
auto cameraControl = std::make_shared<dai::CameraControl>();
cameraControl->setAutoFocusMode(dai::RawCameraControl::AutoFocusMode::OFF);
auto pipeline = getPipeline(nnPath);
device->startPipeline(*pipeline);
auto qrgb = device->getOutputQueue("rgb", 4, false);
auto qnn = device->getOutputQueue("nn", 4, false);
auto qframes = device->getOutputQueue("frames", 4, false);
auto qdepth = device->getOutputQueue("depth", 4, false);
std::string streamName = mxId;
auto qcmap = std::make_pair(qnn, qframes);
auto qsdmap = std::make_pair(qdepth, qcmap);
auto last = std::make_pair(streamName, qsdmap);
qRgbMap.push_back(last);
}
if(qRgbMap.size()==2)
{
std::vector<std::string> camIDs;
for(auto t : qRgbMap)
camIDs.push_back(t.first);
sort(camIDs.begin(),camIDs.end());
if(qRgbMap[0].first!=camIDs[0])
reverse(qRgbMap.begin(),qRgbMap.end());
}
}
return qRgbMap;
}
// function to get the frames from the pipeline
void getCameraFrames(string nnPath)
{
int Camera_Status_API_x;
std::vector<std::shared_ptr<dai::Device>> devices;
std::vector<std::pair<std::string, std::pair<std::shared_ptr<dai::DataOutputQueue>, std::pair<std::shared_ptr<dai::DataOutputQueue>, std::shared_ptr<dai::DataOutputQueue>>>>> qRgbMap = getDevicesPipeline(nnPath, Camera_Status_API_x, devices);
auto color = cv::Scalar(255, 255, 255);
while (true)
{
try
{
for (int i = 0; i < qRgbMap.size(); i++)
{
auto qRgb = qRgbMap[i].second.second.second;
auto inDets = qRgbMap[i].second.second.first;
auto streamName = qRgbMap[i].first;
auto qDepth = qRgbMap[i].second.first;
auto inRgb = qRgb->tryGet<dai::ImgFrame>();
auto inDet = inDets->get<dai::SpatialImgDetections>();
auto inDepth = qDepth->tryGet<dai::ImgFrame>();
cv::Mat zeroframe = cv::Mat::zeros(cv::Size(1280,720),CV_8UC1);
if (inRgb != nullptr && inDet != nullptr && inDepth != nullptr)
{
cv::Mat oframe = inRgb->getCvFrame();
cv::Mat depthFrame = inDepth->getFrame();
auto detections = inDet->detections;
for (const auto &detection : detections)
{
uint32_t labelIndex = detection.label;
std::string labelStr = to_string(labelIndex);
if (labelIndex < labelMap.size())
labelStr = labelMap[labelIndex];
int x1 = std::max((int)(detection.xmin * oframe.cols), 0);
int y1 = std::max((int)(detection.ymin * oframe.rows), 0);
int x2 = std::min((int)(detection.xmax * oframe.cols), oframe.cols);
int y2 = std::min((int)(detection.ymax * oframe.rows), oframe.rows);
cv::putText(oframe, labelStr, cv::Point(x1 + 10, y1 + 20), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream confStr;
confStr << std::fixed << std::setprecision(2) << detection.confidence * 100;
cv::putText(oframe, confStr.str(), cv::Point(x1 + 10, y1 + 35), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream depthX;
depthX << "X: " << (int)detection.spatialCoordinates.x << " mm";
cv::putText(oframe, depthX.str(), cv::Point(x1 + 10, y1 + 50), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream depthY;
depthY << "Y: " << (int)detection.spatialCoordinates.y << " mm";
cv::putText(oframe, depthY.str(), cv::Point(x1 + 10, y1 + 65), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
std::stringstream depthZ;
depthZ << "Z: " << (int)detection.spatialCoordinates.z << " mm";
cv::putText(oframe, depthZ.str(), cv::Point(x1 + 10, y1 + 80), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
cv::rectangle(oframe, cv::Rect(cv::Point(x1, y1), cv::Point(x2, y2)), color, cv::FONT_HERSHEY_SIMPLEX);
}
cv::imshow(streamName, oframe);
}
else
{
cv::imshow(streamName, zeroframe);
}
}
}
catch (const std::exception &e)
{
Camera_Status_API_x = -1;
while (Camera_Status_API_x != 0)
{
devices.clear();
qRgbMap.clear();
qRgbMap = getDevicesPipeline(nnPath, Camera_Status_API_x, devices);
}
}
int key = cv::waitKey(1);
if(key == 'q' || key == 'Q')
{
break;
}
}
}
int main(int argc, char** argv)
{
std::string nnPath("../mobilenet-ssd_openvino_2021.4_6shave.blob");
getCameraFrames(nnPath);
return 0;
}