Hey ZhifeiShen – I tried your script and fixed it a bit. The main issue is it wasn’t actually “locked”: AWB and anti‑banding were still auto, and you only flushed 1s of frames, so the ISP was still settling. On top of that, the Laplacian/Sobel metrics are very sensitive to tiny exposure/white‑balance/noise changes, so they’ll drift even if the image doesn’t look worse. Thermal focus shift can also change edge sharpness over time. I’ve updated the script to lock AWB/anti‑banding and warm up longer, which stabilizes the results(I actually saw the results get better every time I ran the example on my camera):
import cv2
import depthai as dai
import numpy as np
import argparse
import os
import time
def compute_image_quality(img):
"""Compute sharpness and resolution metrics for an image"""
if img.ndim == 3:
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Sharpness using Laplacian variance
_laplacian = cv2.Laplacian(img, cv2.CV_64F)
_sharpness = _laplacian.var()
# Resolution using Sobel operator
_sobel_x = cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize=5)
_sobel_y = cv2.Sobel(img, cv2.CV_64F, 0, 1, ksize=5)
_sobel = np.sqrt(_sobel_x**2 + _sobel_y**2)
_resolution = np.mean(_sobel)
return int(_sharpness), int(_resolution)
def create_camera_pipeline(device):
"""Create camera pipeline with undistortion and resize"""
pipeline = dai.Pipeline()
# Read calibration data
calibData = device.readCalibration()
# Define camera
camRgb = pipeline.create(dai.node.ColorCamera)
camRes = dai.ColorCameraProperties.SensorResolution.THE_1080_P
camRgb.setResolution(camRes)
# Control input
controlIn = pipeline.create(dai.node.XLinkIn)
controlIn.setStreamName('control')
controlIn.out.link(camRgb.inputControl)
# Properties
camSocket = dai.CameraBoardSocket.CAM_A
ispScale = (1, 1)
camRgb.setIspScale(ispScale)
camRgb.setResolution(camRes)
camRgb.setBoardSocket(camSocket)
camRgb.setFps(20)
# Create undistortion mesh
M1 = np.array(calibData.getCameraIntrinsics(camSocket, camRgb.getIspWidth(), camRgb.getIspHeight()))
d1 = np.array(calibData.getDistortionCoefficients(camSocket))
R1 = np.identity(3)
mapX, mapY = cv2.initUndistortRectifyMap(M1, d1, R1, M1, (camRgb.getIspWidth(), camRgb.getIspHeight()), cv2.CV_32FC1)
meshCellSize = 16
mesh0 = []
# Create subsampled mesh for undistortion
for y in range(mapX.shape[0] + 1):
if y % meshCellSize == 0:
rowLeft = []
for x in range(mapX.shape[1]):
if x % meshCellSize == 0:
if y == mapX.shape[0] and x == mapX.shape[1]:
rowLeft.append(mapX[y - 1, x - 1])
rowLeft.append(mapY[y - 1, x - 1])
elif y == mapX.shape[0]:
rowLeft.append(mapX[y - 1, x])
rowLeft.append(mapY[y - 1, x])
elif x == mapX.shape[1]:
rowLeft.append(mapX[y, x - 1])
rowLeft.append(mapY[y, x - 1])
else:
rowLeft.append(mapX[y, x])
rowLeft.append(mapY[y, x])
if (mapX.shape[1] % meshCellSize) % 2 != 0:
rowLeft.append(0)
rowLeft.append(0)
mesh0.append(rowLeft)
mesh0 = np.array(mesh0)
meshWidth = mesh0.shape[1] // 2
meshHeight = mesh0.shape[0]
mesh0.resize(meshWidth * meshHeight, 2)
mesh = list(map(tuple, mesh0))
# First ImageManip for undistortion
manip_undistort = pipeline.create(dai.node.ImageManip)
manip_undistort.initialConfig.setFrameType(dai.ImgFrame.Type.NV12)
manip_undistort.setWarpMesh(mesh, meshWidth, meshHeight)
manip_undistort.setMaxOutputFrameSize(camRgb.getIspWidth() * camRgb.getIspHeight() * 3 // 2)
# Second ImageManip for resize to 640x360
manip_resize = pipeline.create(dai.node.ImageManip)
target_width = 640
target_height = 360
manip_resize.initialConfig.setResize(target_width, target_height)
manip_resize.initialConfig.setFrameType(dai.ImgFrame.Type.BGR888p)
manip_resize.setMaxOutputFrameSize(target_width * target_height * 3)
# Output
rgbVideoOut = pipeline.create(dai.node.XLinkOut)
rgbVideoOut.setStreamName('rgbVideoOut')
# Link pipeline
camRgb.isp.link(manip_undistort.inputImage)
manip_undistort.out.link(manip_resize.inputImage)
manip_resize.out.link(rgbVideoOut.input)
return pipeline
def run_camera_test(config, img_name):
"""Run camera test with manual controls"""
root, ext = os.path.splitext(img_name)
if not ext:
img_name = f"{img_name}.png"
device = dai.Device()
pipeline = create_camera_pipeline(device)
with device as dev:
dev.startPipeline(pipeline)
outQ = dev.getOutputQueue(name='rgbVideoOut', maxSize=1, blocking=False)
controlQueue = dev.getInputQueue('control')
ctrl = dai.CameraControl()
# Set manual controls for camera stream stability
ctrl.setControlMode(dai.CameraControl.ControlMode.OFF)
ctrl.setBrightness(config["oak_camera_configs"]["brightness"])
ctrl.setContrast(config["oak_camera_configs"]["contrast"])
ctrl.setManualFocus(config["oak_camera_configs"]["manualFocus"])
ctrl.setManualExposure(config["oak_camera_configs"]["exposureTimeUs"],
config["oak_camera_configs"]["sensitivityIso"])
anti_banding_name = config["oak_camera_configs"].get("antiBandingMode", "MAINS_60_HZ")
anti_banding = getattr(dai.CameraControl.AntiBandingMode, anti_banding_name, None)
if anti_banding is None:
anti_banding = dai.CameraControl.AntiBandingMode.MAINS_60_HZ
ctrl.setAntiBandingMode(anti_banding)
wb_k = config["oak_camera_configs"].get("manualWhiteBalanceK", None)
if wb_k is not None:
ctrl.setAutoWhiteBalanceMode(dai.CameraControl.AutoWhiteBalanceMode.OFF)
ctrl.setManualWhiteBalance(wb_k)
controlQueue.send(ctrl)
# Flush frames before camera is ready
warmup_frames = config["oak_camera_configs"].get("warmupFrames", 100)
for i in range(warmup_frames):
imgRGB = outQ.get()
frameRGB = imgRGB.getCvFrame()
time.sleep(0.2)
# Capture image
imgRGB = outQ.get()
frameRGB = imgRGB.getCvFrame()
sharpness, resolution = compute_image_quality(frameRGB)
print(f"Sharpness: {sharpness}")
print(f"Resolution: {resolution}")
print(f"Image saved as {img_name}")
cv2.imwrite(img_name, frameRGB)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run camera quality checkout")
parser.add_argument("--img_name", type=str, required=True,
help="String to save the output image (e.g., 'test_image.png')")
args = parser.parse_args()
config = {
"oak_camera_configs": {
"brightness": 0,
"contrast": 1,
"manualFocus": 150,
"exposureTimeUs": 10000,
"sensitivityIso": 400,
"manualWhiteBalanceK": 4500,
"antiBandingMode": "MAINS_60_HZ",
"warmupFrames": 100
}
}
run_camera_test(config, args.img_name)