I noticed the following bug:
When turning on decimation filter on a stereo depth node the frame data buffer size is not decimated with the image. The output image is decimated as expected, however the size of the buffer is unchanged.
Here's my code:
I expect the print statement to print (640*360*2,) 640 360
, but it prints (1843200,) 640 360
. 1843200 = 720x1080x2, which is the undecimated size.
import depthai as dai
# Create pipeline
pipeline = dai.Pipeline()
# Define sources and outputs
monoLeft = pipeline.create(dai.node.MonoCamera)
monoRight = pipeline.create(dai.node.MonoCamera)
depth = pipeline.create(dai.node.StereoDepth)
xout = pipeline.create(dai.node.XLinkOut)
xout.setStreamName("depth")
# Properties
monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
monoLeft.setBoardSocket(dai.CameraBoardSocket.LEFT)
monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
monoRight.setBoardSocket(dai.CameraBoardSocket.RIGHT)
# Comment these three lines to turn decimation on and off.
config = depth.initialConfig.get()
config.postProcessing.decimationFilter.decimationFactor = 2
depth.initialConfig.set(config)
# Linking
monoLeft.out.link(depth.left)
monoRight.out.link(depth.right)
depth.depth.link(xout.input)
with dai.Device(pipeline) as device:
q = device.getOutputQueue(name="depth", maxSize=4, blocking=False)
while True:
in_depth = q.get()
frame = in_depth.getFrame()
print(in_depth.getData().shape, in_depth.getWidth(), in_depth.getHeight())