- Edited
I have created this pipeline:
def create_pipeline(SETTINGS):
# Create pipeline
pipeline = dai.Pipeline()
# Define INPUT RGB camera
camRgb = pipeline.create(dai.node.ColorCamera)
# camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
camRgb.setBoardSocket(dai.CameraBoardSocket.CAM_A)
camRgb.setPreviewSize(300, 300) # NN input
camRgb.setInterleaved(False)
camRgb.setPreviewKeepAspectRatio(False)
camRgb.setFps(int(SETTINGS['NN']['fps']))
# Define INPUT MONO-RIGHT camera
monoRight = pipeline.create(dai.node.MonoCamera)
monoRight.setCamera("right")
monoRight.setFps(int(SETTINGS['NN']['fps']))
# monoRight.setNumFramesPool(8)
monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
# Define INPUT MONO-LEFT camera
monoLeft = pipeline.create(dai.node.MonoCamera)
monoLeft.setCamera("left")
monoLeft.setFps(int(SETTINGS['NN']['fps']))
# monoLeft.setNumFramesPool(8)
monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
# Define OUTPUTS
nnOut = pipeline.create(dai.node.XLinkOut)
nnOut.setStreamName("nn")
disparityOut = pipeline.create(dai.node.XLinkOut)
disparityOut.setStreamName("disparity")
xoutRight = pipeline.create(dai.node.XLinkOut)
xoutRight.setStreamName("rectifiedRight")
xout_color = pipeline.create(dai.node.XLinkOut)
xout_color.setStreamName("video")
# Define COMPUTE-NODES
stereo = pipeline.create(dai.node.StereoDepth)
manip = pipeline.create(dai.node.ImageManip)
nn = pipeline.create(dai.node.MobileNetDetectionNetwork)
stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.DEFAULT)
# stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
stereo.setRectifyEdgeFillColor(0)
manip.initialConfig.setResize(300, 300)
manip.initialConfig.setFrameType(dai.ImgFrame.Type.BGR888p)
nn.setConfidenceThreshold(float(SETTINGS['NN']['confidence']))
nnPath = str((Path(__file__).parent / Path(str(SETTINGS['NN']['blob_path']))).resolve().absolute())
nn.setBlobPath(nnPath)
nn.setNumInferenceThreads(2)
nn.input.setBlocking(False)
# Linking
camRgb.video.link(xout_color.input)
monoRight.out.link(stereo.right)
monoLeft.out.link(stereo.left)
stereo.rectifiedRight.link(manip.inputImage)
stereo.disparity.link(disparityOut.input)
manip.out.link(nn.input)
manip.out.link(xoutRight.input)
nn.out.link(nnOut.input)
return pipeline
and I'm using it in this way in order to process with NN the mono frame with IR enabled (for night-vision):
def Tracking(SETTINGS, selected_devices):
DETECTED_OBJ=json.loads(SETTINGS['NN']['object'])
pipeline = create_pipeline(SETTINGS)
print("PIPELINE CREATED")
devices = {}
# Connect to device and start pipeline
with contextlib.ExitStack() as stack:
for device_info in selected_devices:
openvino_version = dai.OpenVINO.Version.VERSION_2021_4
usb2_mode = False
device = stack.enter_context(dai.Device(openvino_version, device_info, usb2_mode))
# Note: currently on POE, DeviceInfo.getMxId() and Device.getMxId() are different!
print("\n==> Connected to " + device_info.getMxId())
mxid = device.getMxId()
device.startPipeline(pipeline)
device.setIrLaserDotProjectorIntensity(0) # in %, from 0 to 1
device.setIrFloodLightIntensity(0.9) # in %, from 0 to 1
devices[mxid] = {
'nn': device.getOutputQueue("nn", maxSize=4, blocking=False),
'mono': device.getOutputQueue("rectifiedRight", maxSize=4, blocking=False),
'rgb':device.getOutputQueue("video", maxSize=4, blocking=False),
'disparity':device.getOutputQueue("disparity", maxSize=4, blocking=False),
}
...
I notice that the IR intensity is enabled only for the first frames.
Is it correct the pipeline and the IR intensity parameters?
Thank you