I'm trying to run the rgb camera at 720p but I keep getting this error:
[1844301051F53E0E00] [1.2.4.4] [163.050] [system] [critical] Fatal error. Please report to developers. Log: 'ResourceLocker' '358'
I've played around with the scaling but nothing has worked yet. I'm using an oak-d camera.
std::tuple<dai::Pipeline, int, int> OakCustomLaunch::createPipeline(bool enableDepth,
bool enableSpatialDetection,
bool enableRGB,
bool enableImu,
bool single,
bool useMobile,
bool lrcheck,
bool extended,
bool subpixel,
int stereo_fps,
int confidence,
int LRchecktresh,
int previewWidth,
int previewHeight,
int mobile_previewWidth,
int mobile_previewHeight,
int rgbScaleNumerator,
int rgbScaleDinominator,
int detectionClassesCount,
std::string resolution,
std::string colorResolution,
bool syncNN,
int imuRate,
std::string mobliePath,
std::string nnPath)
{
dai::Pipeline pipeline;
dai::node::MonoCamera::Properties::SensorResolution monoResolution;
int width, height, rgbWidth, rgbHeight;
if(resolution == "720p") {
monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_720_P;
width = 1280;
height = 720;
} else if(resolution == "400p") {
monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_400_P;
width = 640;
height = 400;
} else if(resolution == "800p") {
monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_800_P;
width = 1280;
height = 800;
} else if(resolution == "480p") {
monoResolution = dai::node::MonoCamera::Properties::SensorResolution::THE_480_P;
width = 640;
height = 480;
} else {
ROS_ERROR("Invalid parameter. -> monoResolution: %s", resolution.c_str());
throw std::runtime_error("Invalid mono camera resolution.");
}
//imu
auto imu = pipeline.create<dai::node::IMU>();
auto xoutImu = pipeline.create<dai::node::XLinkOut>();
xoutImu->setStreamName("imu");
imu->enableIMUSensor({dai::IMUSensor::ACCELEROMETER_RAW, dai::IMUSensor::GYROSCOPE_RAW}, imuRate);
imu->setBatchReportThreshold(5);
imu->setMaxBatchReports(20); // Get one message only for now.
imu->out.link(xoutImu->input);
// RGB image
dai::ColorCameraProperties::SensorResolution rgbResolution;
if(colorResolution == "1080p") {
rgbResolution = dai::node::ColorCamera::Properties::SensorResolution::THE_1080_P;
rgbWidth = 1920;
rgbHeight = 1080;
} else if(colorResolution == "720p") {
rgbResolution = dai::node::ColorCamera::Properties::SensorResolution::THE_720_P;
rgbWidth = 1280;
rgbHeight = 720;
ROS_WARN_STREAM("uhhhhhh");
} else if(colorResolution == "800p") {
rgbResolution = dai::node::ColorCamera::Properties::SensorResolution::THE_800_P;
rgbWidth = 1280;
rgbHeight = 800;
} else {
ROS_ERROR("Invalid parameter. -> rgbResolution: %s", colorResolution.c_str());
throw std::runtime_error("Invalid color camera resolution.");
}
auto camRgb = pipeline.create<dai::node::ColorCamera>();
auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
xoutRgb->setStreamName("rgb");
camRgb->setResolution(rgbResolution);
camRgb->setBoardSocket(dai::CameraBoardSocket::RGB);
camRgb->setInterleaved(false);
rgbWidth = rgbWidth * rgbScaleNumerator / rgbScaleDinominator;
ROS_WARN_STREAM("rgb width is " << rgbWidth << " the %16 is " << rgbWidth % 16);
rgbHeight = rgbHeight * rgbScaleNumerator / rgbScaleDinominator;
camRgb->setIspScale(rgbScaleNumerator, rgbScaleDinominator);
camRgb->setPreviewSize(416, 416);
camRgb->isp.link(xoutRgb->input);
if(rgbWidth % 16 != 0) {
ROS_ERROR_STREAM("RGB Camera width is "<< (rgbWidth%16) <<" but it should be multiple of 16. Please choose a different scaling factor.");
throw std::runtime_error("Adjust RGB Camaera scaling.");
}
if(rgbWidth > width || rgbHeight > height) {
ROS_WARN_STREAM(
"RGB Camera resolution is heigher than the configured stereo resolution. Upscaling the stereo depth/disparity to match RGB camera resolution.");
} else if(rgbWidth > width || rgbHeight > height) {
ROS_WARN_STREAM(
"RGB Camera resolution is heigher than the configured stereo resolution. Downscaling the stereo depth/disparity to match RGB camera resolution.");
}
auto stereo = pipeline.create<dai::node::StereoDepth>();
auto xoutDepth = pipeline.create<dai::node::XLinkOut>();
auto monoLeft = pipeline.create<dai::node::MonoCamera>();
auto monoRight = pipeline.create<dai::node::MonoCamera>();
xoutDepth->setStreamName("depth");
// MonoCamera
monoLeft->setResolution(monoResolution);
monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
monoLeft->setFps(stereo_fps);
monoRight->setResolution(monoResolution);
monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);
monoRight->setFps(stereo_fps);
// StereoDepth
stereo->initialConfig.setConfidenceThreshold(confidence); // Known to be best
stereo->setRectifyEdgeFillColor(0); // black, to better see the cutout
stereo->initialConfig.setLeftRightCheckThreshold(LRchecktresh); // Known to be best
stereo->setLeftRightCheck(lrcheck);
stereo->setExtendedDisparity(extended);
stereo->setSubpixel(subpixel);
stereo->setDepthAlign(dai::CameraBoardSocket::RGB);
// Link plugins CAM -> STEREO -> XLINK
stereo->setRectifyEdgeFillColor(0);
monoLeft->out.link(stereo->left);
monoRight->out.link(stereo->right);
stereo->depth.link(xoutDepth->input);
//Yolo
if (enableSpatialDetection && !useMobile)
{
camRgb->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR);
camRgb->setInterleaved(false);
camRgb->setPreviewSize(previewWidth, previewHeight);
auto spatialDetectionNetwork = pipeline.create<dai::node::YoloSpatialDetectionNetwork>();
auto xoutNN = pipeline.create<dai::node::XLinkOut>();
auto xoutPreview = pipeline.create<dai::node::XLinkOut>();
xoutPreview->setStreamName("preview");
xoutNN->setStreamName("detections");
spatialDetectionNetwork->setBlobPath(nnPath);
spatialDetectionNetwork->setConfidenceThreshold(0.5f);
spatialDetectionNetwork->input.setBlocking(false);
spatialDetectionNetwork->setBoundingBoxScaleFactor(0.5);
spatialDetectionNetwork->setDepthLowerThreshold(100);
spatialDetectionNetwork->setDepthUpperThreshold(10000);
// yolo specific parameters
spatialDetectionNetwork->setNumClasses(detectionClassesCount);
spatialDetectionNetwork->setCoordinateSize(4);
spatialDetectionNetwork->setAnchors({10, 14, 23, 27, 37, 58, 81, 82, 135, 169, 344, 319});
spatialDetectionNetwork->setAnchorMasks({{"side13", {3, 4, 5}}, {"side26", {1, 2, 3}}});
spatialDetectionNetwork->setIouThreshold(0.5f);
// Link plugins CAM -> NN -> XLINK
camRgb->preview.link(spatialDetectionNetwork->input);
if(syncNN){
spatialDetectionNetwork->passthrough.link(xoutPreview->input);
}else{
camRgb->preview.link(xoutPreview->input);
}
spatialDetectionNetwork->out.link(xoutNN->input);
stereo->depth.link(spatialDetectionNetwork->inputDepth);
}
//Mobilenet
if(enableSpatialDetection && useMobile)
{
auto detectionNetwork = pipeline.create<dai::node::MobileNetDetectionNetwork>();
auto dnOut = pipeline.create<dai::node::XLinkOut>();
auto xoutPreview = pipeline.create<dai::node::XLinkOut>();
xoutPreview->setStreamName("preview");
dnOut->setStreamName("detections");
camRgb->setPreviewSize(mobile_previewWidth, mobile_previewHeight);
camRgb->setInterleaved(false);
camRgb->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR);
// testing MobileNet DetectionNetwork
detectionNetwork->setBlobPath(mobliePath);
detectionNetwork->setConfidenceThreshold(0.5f);
// Link plugins CAM -> NN -> XLINK
camRgb->preview.link(detectionNetwork->input);
if(syncNN){
detectionNetwork->passthrough.link(xoutPreview->input);
}else{
camRgb->preview.link(xoutPreview->input);
}
detectionNetwork->out.link(dnOut->input);
}
return std::make_tuple(pipeline, width, height);
}