R
RBJin

  • Jul 1, 2024
  • Joined Oct 9, 2022
  • 0 best answers
  • Do you have any plans to design the thermal sensor as a module similar to OAK-FFC ToF33D ?

  • @Luxonis-Alex The camera module I use is the Arducam OV9282 with UC-796 FFC cable.

    Do I just need to connect the FSIN testpoint on each cable to the FSIN pin on the corresponding camera board with a jump wire? As shown in the red circle. Is there anything else I need to do?

  • The delay is still greater than 1ms. I used the2.17.3.1 version of depthai. Do I need to switch to a specific version?

    import depthai as dai
    import time
    import cv2
    import collections
    
    set_fps = 30
    
    class FPS:
        def __init__(self, window_size=30):
            self.dq = collections.deque(maxlen=window_size)
            self.fps = 0
    
        def update(self, timestamp=None):
            if timestamp == None: timestamp = time.monotonic()
            count = len(self.dq)
            if count > 0: self.fps = count / (timestamp - self.dq[0])
            self.dq.append(timestamp)
    
        def get(self):
            return self.fps
    
    
    # cam_list = ['left', 'right']
    # cam_socket_opts = {
    #     'left' : dai.CameraBoardSocket.LEFT,  # Or CAM_B
    #     'right': dai.CameraBoardSocket.RIGHT, # Or CAM_C
    # }
    
    cam_list = ['rgb', 'left', 'right', 'camd']
    cam_socket_opts = {
        'rgb'  : dai.CameraBoardSocket.RGB,   # Or CAM_A
        'left' : dai.CameraBoardSocket.LEFT,  # Or CAM_B
        'right': dai.CameraBoardSocket.RIGHT, # Or CAM_C
        'camd' : dai.CameraBoardSocket.CAM_D,
    }
    
    pipeline = dai.Pipeline()
    cam = {}
    xout = {}
    for c in cam_list:
        cam[c] = pipeline.create(dai.node.MonoCamera)
        cam[c].setResolution(dai.MonoCameraProperties.SensorResolution.THE_800_P)
        if c == 'rgb':
            cam[c].initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.OUTPUT)
        else:
            cam[c].initialControl.setFrameSyncMode(dai.CameraControl.FrameSyncMode.INPUT)
        cam[c].setBoardSocket(cam_socket_opts[c])
        xout[c] = pipeline.create(dai.node.XLinkOut)
        xout[c].setStreamName(c)
        cam[c].out.link(xout[c].input)
    
    
    config = dai.Device.Config()
    config.board.gpio[6] = dai.BoardConfig.GPIO(dai.BoardConfig.GPIO.OUTPUT,
                                                dai.BoardConfig.GPIO.Level.HIGH)
    
    with dai.Device(config) as device:
        device.startPipeline(pipeline)
        q = {}
        fps_host = {}  # FPS computed based on the time we receive frames in app
        fps_capt = {}  # FPS computed based on capture timestamps from device
        for c in cam_list:
            q[c] = device.getOutputQueue(name=c, maxSize=1, blocking=False)
            cv2.namedWindow(c, cv2.WINDOW_NORMAL)
            cv2.resizeWindow(c, (640, 480))
            fps_host[c] = FPS()
            fps_capt[c] = FPS()
    
        while True:
            frame_list = []
            for c in cam_list:
                pkt = q[c].tryGet()
                if pkt is not None:
                    fps_host[c].update()
                    fps_capt[c].update(pkt.getTimestamp().total_seconds())
                    print(c+":",pkt.getTimestampDevice())
                    frame = pkt.getCvFrame()
                    cv2.imshow(c, frame)
            print("-------------------------------")
            # print("\rFPS:",
            #       *["{:6.2f}|{:6.2f}".format(fps_host[c].get(), fps_capt[c].get()) for c in cam_list],
            #       end='', flush=True)
    
            key = cv2.waitKey(1)
            if key == ord('q'):
                break

  • The following is my test code and the output device timestamp. If I only use left and right cameras, the output is always in pairs and the delay is less than 1ms. But when I turn on four cameras, the delay is obviously greater than 1ms. I guess it may be a problem with my code, so I want to find some test codes that can be referenced.

    import depthai as dai
    import time
    import cv2
    import collections
    
    set_fps = 30
    
    class FPS:
        def __init__(self, window_size=30):
            self.dq = collections.deque(maxlen=window_size)
            self.fps = 0
    
        def update(self, timestamp=None):
            if timestamp == None: timestamp = time.monotonic()
            count = len(self.dq)
            if count > 0: self.fps = count / (timestamp - self.dq[0])
            self.dq.append(timestamp)
    
        def get(self):
            return self.fps
    
    
    # cam_list = ['left', 'right']
    # cam_socket_opts = {
    #     'left' : dai.CameraBoardSocket.LEFT,  # Or CAM_B
    #     'right': dai.CameraBoardSocket.RIGHT, # Or CAM_C
    # }
    
    cam_list = ['rgb', 'left', 'right', 'camd']
    cam_socket_opts = {
        'rgb'  : dai.CameraBoardSocket.RGB,   # Or CAM_A
        'left' : dai.CameraBoardSocket.LEFT,  # Or CAM_B
        'right': dai.CameraBoardSocket.RIGHT, # Or CAM_C
        'camd' : dai.CameraBoardSocket.CAM_D,
    }
    
    pipeline = dai.Pipeline()
    cam = {}
    xout = {}
    for c in cam_list:
        cam[c] = pipeline.create(dai.node.MonoCamera)
        cam[c].setResolution(dai.MonoCameraProperties.SensorResolution.THE_800_P)
        cam[c].setBoardSocket(cam_socket_opts[c])
        xout[c] = pipeline.create(dai.node.XLinkOut)
        xout[c].setStreamName(c)
        cam[c].out.link(xout[c].input)
    
    with dai.Device(pipeline) as device:
        q = {}
        fps_host = {}  # FPS computed based on the time we receive frames in app
        fps_capt = {}  # FPS computed based on capture timestamps from device
        for c in cam_list:
            q[c] = device.getOutputQueue(name=c, maxSize=1, blocking=False)
            cv2.namedWindow(c, cv2.WINDOW_NORMAL)
            cv2.resizeWindow(c, (640, 480))
            fps_host[c] = FPS()
            fps_capt[c] = FPS()
    
        while True:
            frame_list = []
            for c in cam_list:
                pkt = q[c].tryGet()
                if pkt is not None:
                    fps_host[c].update()
                    fps_capt[c].update(pkt.getTimestamp().total_seconds())
                    print(c+":",pkt.getTimestampDevice())
                    frame = pkt.getCvFrame()
                    cv2.imshow(c, frame)
            print("-------------------------------")
            # print("\rFPS:",
            #       *["{:6.2f}|{:6.2f}".format(fps_host[c].get(), fps_capt[c].get()) for c in cam_list],
            #       end='', flush=True)
    
            key = cv2.waitKey(1)
            if key == ord('q'):
                break

  • FFC-4p should support four ov9282 cameras for hardware synchronization, but is there any code to test this? Can they synchronize in microseconds?

    • erik replied to this.