I am wanting to use the Intrinsics and Distortion parameters for a given camera in my OAK-D PoE sensor. I need these in order to extrinsic sensor calibration between this camera and my LiDAR. However while working on this, I noticed that the intrinsic matrix parameters didn't seem to line up with the actual image size that I got from the following code:
import cv2
import numpy as np
import time
from datetime import datetime, timezone
import depthai as dai
# OAK-D Thread
def oak_d_thread(pipeline, data_dir):
# 1. Capture "monotonic now" (time since host PC boot, unaffected by system clock changes)
monotonic_ref = dai.Clock.now() # Returns datetime.timedelta
# 2. Capture "UTC now" at the same instant
utc_ref = datetime.now(timezone.utc) # A naive datetime in UTC
# 3. Compute offset: (UTC time) - (monotonic time)
offset = utc_ref - monotonic_ref
frame_cnt = 0
with dai.Device(pipeline) as device:
q_rgb = device.getOutputQueue("rgb")
while True:
in_rgb = q_rgb.tryGet()
if in_rgb is not None:
if frame_cnt == 0:
frame_cnt += 1
continue
frame = in_rgb.getCvFrame()
cam_deltatime = in_rgb.getTimestamp()
cam_time = cam_deltatime + offset
print(f"Camera: Timestamp {cam_time}")
# Save Camera Image
cv2.imwrite(f"{data_dir}/oakd_cam/img_{cam_time.timestamp()}.jpg", frame)
frame_cnt += 1
# if frame_cnt > 50:
# break
# Main Program
if __name__ == "__main__":
data_dir = '/lidar_camera_calibration/data/'
pipeline = dai.Pipeline()
cam_rgb = pipeline.createColorCamera()
cam_rgb.setFps(2)
cam_rgb.setBoardSocket(dai.CameraBoardSocket.CAM_A)
cam_rgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1200_P)
cam_rgb.setPreviewSize(1024, 512)
cam_rgb.setInterleaved(False)
cam_rgb.setColorOrder(dai.ColorCameraProperties.ColorOrder.RGB)
xout_rgb = pipeline.createXLinkOut()
xout_rgb.setStreamName("rgb")
cam_rgb.preview.link(xout_rgb.input)
# Threads for both devices
oak_d_thread(pipeline, data_dir)
print("Finished processing Camera!")