That doesn't seem to work? Your calibration file might be different to mine?
Here is line 356 from the calibration file:
A colleague suggested hard coding it in here like this instead which allowed me to run calibration (edited from line 553).
cam_node.setBoardSocket(stringToCam[cam_id])
# sketchy hard code to check please remove
#sensorName = cam_info['sensorName']
cam_info['sensorName'] = "OV9782"
Calibration ran.
Results are pretty sad still I think? I'm not sure what to do at the moment?
Running this example code:
#!/usr/bin/env python3
import cv2
import depthai as dai
import numpy as np
# Closer-in minimum depth, disparity range is doubled (from 95 to 190):
extended_disparity = False
# Better accuracy for longer distance, fractional disparity 32-levels:
subpixel = False
# Better handling for occlusions:
lr_check = True
# Create pipeline
pipeline = dai.Pipeline()
# Define sources and outputs
monoLeft = pipeline.create(dai.node.MonoCamera)
monoRight = pipeline.create(dai.node.MonoCamera)
depth = pipeline.create(dai.node.StereoDepth)
# Output nodes for disparity, left, and right images
xoutDisparity = pipeline.create(dai.node.XLinkOut)
xoutLeft = pipeline.create(dai.node.XLinkOut)
xoutRight = pipeline.create(dai.node.XLinkOut)
xoutDisparity.setStreamName("disparity")
xoutLeft.setStreamName("left")
xoutRight.setStreamName("right")
# Properties
monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
monoLeft.setBoardSocket(dai.CameraBoardSocket.LEFT) # Use setBoardSocket with enum
monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
monoRight.setBoardSocket(dai.CameraBoardSocket.RIGHT) # Use setBoardSocket with enum
# Configure StereoDepth node
depth.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
depth.initialConfig.setMedianFilter(dai.MedianFilter.KERNEL_7x7)
depth.setLeftRightCheck(lr_check)
depth.setExtendedDisparity(extended_disparity)
depth.setSubpixel(subpixel)
# Linking
monoLeft.out.link(depth.left)
monoRight.out.link(depth.right)
depth.disparity.link(xoutDisparity.input)
monoLeft.out.link(xoutLeft.input)
monoRight.out.link(xoutRight.input)
# Connect to device and start pipeline
with dai.Device(pipeline) as device:
# Output queues for disparity, left, and right frames
qDisparity = device.getOutputQueue(name="disparity", maxSize=4, blocking=False)
qLeft = device.getOutputQueue(name="left", maxSize=4, blocking=False)
qRight = device.getOutputQueue(name="right", maxSize=4, blocking=False)
while True:
inDisparity = qDisparity.get() # blocking call, will wait until a new data has arrived
disparityFrame = inDisparity.getFrame()
# Normalization for better visualization of disparity
disparityFrame = (disparityFrame \* (255 / depth.initialConfig.getMaxDisparity())).astype(np.uint8)
# Applying color map to disparity
disparityColor = cv2.applyColorMap(disparityFrame, cv2.COLORMAP_JET)
# Retrieve left and right camera frames
inLeft = qLeft.tryGet()
inRight = qRight.tryGet()
if inLeft:
leftFrame = inLeft.getCvFrame()
cv2.imshow("left", leftFrame)
if inRight:
rightFrame = inRight.getCvFrame()
cv2.imshow("right", rightFrame)
cv2.imshow("disparity", disparityFrame)
cv2.imshow("disparity_color", disparityColor)
if cv2.waitKey(1) == ord('q'):
break
Updated the dataset images for review: https://www.dropbox.com/scl/fo/h9nav3q3y448q6mqkjrjf/h?rlkey=nikezpxqkgh68u1q3gd6x9b36&dl=0