The current pipeline is being set up and used as follows.
auto camRgb = pipeline.create<dai::node::ColorCamera>();
auto left = pipeline.create<dai::node::MonoCamera>();
auto right = pipeline.create<dai::node::MonoCamera>();
auto stereo = pipeline.create<dai::node::StereoDepth>();
auto rgbOut = pipeline.create<dai::node::XLinkOut>();
auto depthOut = pipeline.create<dai::node::XLinkOut>();
auto nn = pipeline.create<dai::node::MobileNetDetectionNetwork>();
auto xinNN = pipeline.create<dai::node::XLinkIn>();
auto xoutNN = pipeline.create<dai::node::XLinkOut>();
The application uses three main things.
RGB output, Depth output, NN output
Create rgb output via camRGB.
Drive the stereo pipeline via left and right, and create a depth output.
The rgb output is re-entered through xinNN to produce an NN output through nn.
camRGB is set to 1080p, left and right are set to 400p.
It's all set to 30fps.
nn used mobilenet-ssd and set thread to 1.
I found that the camera stops due to overheating when using the application for a long time in the current setup.
Because the device was stopped due to overheating, resetting the pipeline and device did not help to restart the device.
The reason for being sure of overheating is that the camera runs for more than 48 hours without stopping when external cooling is applied through the fan.
When the NN module was removed, it was confirmed that it ran for more than 24 hours.
But I have to use the NN module without external cooling.
Is there a setting option I can take here?
I need any help. Thank you.
P.S. The full code is as follows.
`static std::atomic<bool> downscaleColor{true};`
static constexpr int fps = 30;
static constexpr auto monoRes = dai::MonoCameraProperties::SensorResolution::THE_400_P;
// Create pipeline
dai::Pipeline pipeline;
dai::Device device;
std::vector<std::string> queueNames;
// Define sources and outputs
auto camRgb = pipeline.create<dai::node::ColorCamera>();
auto left = pipeline.create<dai::node::MonoCamera>();
auto right = pipeline.create<dai::node::MonoCamera>();
auto stereo = pipeline.create<dai::node::StereoDepth>();
auto rgbOut = pipeline.create<dai::node::XLinkOut>();
auto depthOut = pipeline.create<dai::node::XLinkOut>();
rgbOut->setStreamName("rgb");
queueNames.push_back("rgb");
depthOut->setStreamName("depth");
queueNames.push_back("depth");
// Properties
camRgb->setBoardSocket(dai::CameraBoardSocket::CAM_A);
camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P);
camRgb->setFps(fps);
if(downscaleColor) camRgb->setIspScale(2, 3);
// For now, RGB needs fixed focus to properly align with depth.
// This value was used during calibration
while (true) {
try {
auto calibData = device.readCalibration2();
auto lensPosition = calibData.getLensPosition(dai::CameraBoardSocket::CAM_A);
if(lensPosition) {
camRgb->initialControl.setManualFocus(lensPosition);
}
hfov = calibData.getFov(dai::CameraBoardSocket::CAM_A);
break;
} catch(const std::exception& ex) {
std::cout << ex.what() << std::endl;
continue;
}
}
left->setResolution(monoRes);
left->setCamera("left");
left->setFps(fps);
right->setResolution(monoRes);
right->setCamera("right");
right->setFps(fps);
stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::HIGH_DENSITY);
stereo->initialConfig.setMedianFilter(dai::MedianFilter::KERNEL_7x7);
// LR-check is required for depth alignment
stereo->setLeftRightCheck(true);
stereo->setSubpixel(false);
stereo->setExtendedDisparity(false);
auto config = stereo->initialConfig.get();
config.postProcessing.speckleFilter.enable = false;
config.postProcessing.speckleFilter.speckleRange = 50;
config.postProcessing.temporalFilter.enable = true;
config.postProcessing.spatialFilter.enable = true;
config.postProcessing.spatialFilter.holeFillingRadius = 2;
config.postProcessing.spatialFilter.numIterations = 1;
config.postProcessing.thresholdFilter.minRange = 400;
config.postProcessing.thresholdFilter.maxRange = 15000;
// config.postProcessing.decimationFilter.decimationFactor = 1;
stereo->initialConfig.set(config);
stereo->setDepthAlign(dai::CameraBoardSocket::CAM_A);
// You can later send configs from the host (XLinkIn) / scripting node to the InputConfig
auto nn = pipeline.create<dai::node::MobileNetDetectionNetwork>();
nn->setConfidenceThreshold((float)0.5);
nn->setBlobPath(nnBlobPath);
// nn->setNumInferenceThreads(1);
nn->input.setBlocking(false);
auto xinNN = pipeline.create<dai::node::XLinkIn>();
xinNN->setStreamName("nn_input");
xinNN->out.link(nn->input);
auto xoutNN = pipeline.create<dai::node::XLinkOut>();
xoutNN->setStreamName("nn");
nn->out.link(xoutNN->input);
// Linking
camRgb->isp.link(rgbOut->input);
left->out.link(stereo->left);
right->out.link(stereo->right);
stereo->disparity.link(depthOut->input);
// Connect to device and start pipeline
device.startPipeline(pipeline);
// Sets queues size and behavior
for(const auto& name : queueNames) {
device.getOutputQueue(name, 4, false);
}
std::unordered_map<std::string, cv::Mat> frame;
auto nninputQueue = device.getInputQueue("nn_input");
auto nnQueue = device.getOutputQueue("nn", 4, false);
// auto sysinfoQueue = device.getOutputQueue("sysinfo", 4, false);
auto rgbWindowName = "rgb";
auto depthWindowName = "depth";