When I run my pipeline, I immediately got this error:
RuntimeError: Communication exception - possible device error/misconfiguration. Original message 'Couldn't read data from stream: 'video_rgb' (X_LINK_ERROR)'
If I removed the two lines, the pipeline works and 3 videos streaming good.
device.setIrLaserDotProjectorIntensity(0.8)
device.setIrFloodLightIntensity(0.8)
But the video is very slow. latency is more than 2s, sometimes 5s or even more.
How to make latency mus better?
Your latency page proclaims that 30ms latency can be achieved. Is that true and what are the changes needed for the above pipeline to achieve that?
Here is my pipeline:
iimport depthai as dai
import cv2
from flask import Flask, Response
import numpy as np
import time
Initialize the Flask app
app = Flask(name)
Set up the pipeline for the Luxonis camera
pipeline = dai.Pipeline()
fps = 10
RGB Camera Node (Color Camera)
cam_rgb = pipeline.create(dai.node.ColorCamera)
cam_rgb.setBoardSocket(dai.CameraBoardSocket.CAM_A)
cam_rgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
cam_rgb.setInterleaved(False)
cam_rgb.setFps(fps)
Mono Camera Nodes (Grayscale cameras)
cam_mono_left = pipeline.create(dai.node.MonoCamera)
cam_mono_left.setBoardSocket(dai.CameraBoardSocket.CAM_B)
cam_mono_left.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
cam_mono_left.setFps(fps)
cam_mono_right = pipeline.create(dai.node.MonoCamera)
cam_mono_right.setBoardSocket(dai.CameraBoardSocket.CAM_C)
cam_mono_right.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
cam_mono_right.setFps(fps)
Create output streams to send the frames to the host
xout_rgb = pipeline.create(dai.node.XLinkOut)
xout_rgb.setStreamName("video_rgb")
cam_rgb.video.link(xout_rgb.input)
xout_mono_left = pipeline.create(dai.node.XLinkOut)
xout_mono_left.setStreamName("video_mono_left")
cam_mono_left.out.link(xout_mono_left.input)
xout_mono_right = pipeline.create(dai.node.XLinkOut)
xout_mono_right.setStreamName("video_mono_right")
cam_mono_right.out.link(xout_mono_right.input)
Connect to the device and start the pipeline
device = dai.Device(pipeline)
device.setIrLaserDotProjectorIntensity(0.8)
device.setIrFloodLightIntensity(0.8)
Output queues for the video feeds
q_rgb = device.getOutputQueue(name="video_rgb", maxSize=5, blocking=False)
q_mono_left = device.getOutputQueue(name="video_mono_left", maxSize=5, blocking=False)
q_mono_right = device.getOutputQueue(name="video_mono_right", maxSize=5, blocking=False)
def generate_mjpeg_stream(queue):
while True:
frame = queue.tryGet() # Get a frame from the queue
if frame is not None:
img = frame.getCvFrame() # Convert to OpenCV format (numpy array)
ret, jpeg = cv2.imencode('.jpg', img) # Encode frame to JPEG
if not ret:
continue
# Yield MJPEG frame to the HTTP stream
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + jpeg.tobytes() + b'\r\n\r\n')
@app.route('/stream/1')
def video_rgb_feed():
MJPEG stream for the RGB feed
return Response(generate_mjpeg_stream(q_rgb),
mimetype='multipart/x-mixed-replace; boundary=frame')
@app.route('/stream/2')
def video_mono_left_feed():
MJPEG stream for the left mono feed
return Response(generate_mjpeg_stream(q_mono_left),
mimetype='multipart/x-mixed-replace; boundary=frame')
@app.route('/stream/3')
def video_mono_right_feed():
MJPEG stream for the right mono feed
return Response(generate_mjpeg_stream(q_mono_right),
mimetype='multipart/x-mixed-replace; boundary=frame')
if name == 'main':
Run the Flask app and stream MJPEG over HTTP
app.run(host='0.0.0.0', port=5000, threaded=True)