• DepthAI-v2
  • Synchronization of NN-Frame with an external IMU

Hi jakaskerl

Thank you for your answers in this post.

Since the topic shifted towards synchronization rather than latencies, I am posting a new discussion.

Trying to sync externalIMU(Threaded)-Frame-NN is harder so I first try to understand Frame-NN-host snyc with this code.

import cv2
import numpy as np
import depthai as dai
import blobconverter
import threading
import time
import serial
from datetime import timedelta

# Threshold for synchronization
MS_THRESHOLD = 30

msgs = dict()

def add_msg(msg, name, ts=None):

    print(f"[ADD MESSAGE] New message received: {msg}, {name}, {ts}")  
    if ts is None:
        ts = msg.getTimestamp()

    if not name in msgs:
        msgs[name] = []

    msgs[name].append((ts, msg))
    for name, arr in msgs.items():
        if len(arr) > 0:
            print(f"[ADD MESSAGE DICTIONARY] key: {name}: value length {len(arr)} | last value: {arr[-1]}")  
    synced = {}
    for name, arr in msgs.items():
        diffs = []
        for i, (msg_ts, msg) in enumerate(arr):
            diffs.append(abs(msg_ts - ts))
        if len(diffs) == 0: break
        diffsSorted = diffs.copy()
        diffsSorted.sort()
        dif = diffsSorted[0]
        print(f"[ADD MESSAGE] Difference: {dif}")
        print(f"[ADD MESSAGE] Difference in milliseconds: {dif / timedelta(milliseconds=1)}")
        print(f"convertedtime: {timedelta(milliseconds=MS_THRESHOLD)}")
        if dif < timedelta(milliseconds=MS_THRESHOLD):
            synced[name] = diffs.index(dif)
 
    if len(synced) == 2:
        print(f"[ADD MESSAGE] +++WILL BE SYNCED!+++")
        for name, i in synced.items():
            msgs[name] = msgs[name][i:]
        ret = {}
        for name, arr in msgs.items():
            print(f"[ADD MESSAGE] +++will be popped+++: {arr[0]}")
            ret[name] = arr.pop(0)
        print(f"[ADD MESSAGE] +++SYNCED data+++: {ret}")
        return ret
    else:
        print(f"[ADD MESSAGE] ---Not synced yet---")
        return False

def create_pipeline():
    pipeline = dai.Pipeline()

    camRgb = pipeline.create(dai.node.ColorCamera)
    camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
    camRgb.setInterleaved(False)
    camRgb.setFps(30)

    detection_nn = pipeline.create(dai.node.MobileNetDetectionNetwork)
    detection_nn.setBlobPath(blobconverter.from_zoo(name='mobilenet-ssd', shaves=6))
    detection_nn.setConfidenceThreshold(0.5)

    rgbOut = pipeline.create(dai.node.XLinkOut)
    rgbOut.setStreamName("rgb")

    xout_nn = pipeline.create(dai.node.XLinkOut)
    xout_nn.setStreamName("nn")

    camRgb.preview.link(detection_nn.input)
    camRgb.preview.link(rgbOut.input)
    detection_nn.out.link(xout_nn.input)
    return pipeline

def td2ms(td) -> int:
    return int(td / timedelta(milliseconds=1))

def new_msg(msg, name, ts=None):
    print(f"[NEWMSG] msg: {type(msg).__name__}, name: {name}, ts: {ts}")  
    synced = add_msg(msg, name, ts)
    if not synced: return
    rgb_ts, rgb = synced['rgb']
    nn_ts, nn = synced['nn']

    print(f"[NEWMSG: SYNCED] RGB: {rgb_ts}, NN: {nn_ts}")
    print(f"[Seq {rgb.getSequenceNum()}] Mid of RGB exposure ts: {td2ms(rgb_ts)}ms, RGB ts: {td2ms(rgb.getTimestampDevice())}ms, RGB exposure time: {td2ms(rgb.getExposureTime())}ms")
    print(f"[Seq {nn.getSequenceNum()}] NN ts: {td2ms(nn_ts)}ms")
    print('-----------')
    frameRgb = rgb.getCvFrame()
    detections = nn.detections

    for detection in detections:
        bbox = frameNorm(frameRgb, (detection.xmin, detection.ymin, detection.xmax, detection.ymax))
        cv2.putText(frameRgb, labelMap[detection.label], (bbox[0] + 10, bbox[1] + 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
        cv2.rectangle(frameRgb, (bbox[0], bbox[1]), (bbox[2], bbox[3]), (255, 0, 0), 2)

    cv2.imshow("Frame", frameRgb)

def frameNorm(frame, bbox):
    normVals = np.full(len(bbox), frame.shape[0])
    normVals[::2] = frame.shape[1]
    return (np.clip(np.array(bbox), 0, 1) * normVals).astype(int)

# Label map for MobileNet-SSD
labelMap = ["background", "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", "chair", "cow",
            "diningtable", "dog", "horse", "motorbike", "person", "pottedplant", "sheep", "sofa", "train", "tvmonitor"]

# Connect to device and start pipeline
with dai.Device() as device:
    device.startPipeline(create_pipeline())

    windowName = "Frame"
    cv2.namedWindow(windowName)

    while True:
        for name in ['rgb', 'nn']:
            msg = device.getOutputQueue(name).tryGet()
            if msg is not None:
                if name == 'rgb':
                    # print(f"[WHILE LOOP] New message received: msg: {type(msg).__name__}, name: {name}")  
                    # ts = msg.getTimestampDevice(dai.CameraExposureOffset.MIDDLE)
                    ts = msg.getTimestamp()
                    # print(f"Mid of exposure timestamp: {ts}")
                    print(f"[WHILE LOOP] [Seq {msg.getSequenceNum()}] Mid of '{name}' exposure ts: {td2ms(ts)}ms, '{name}' exposure time: {td2ms(msg.getExposureTime())} ms")
                else: # For 'nn' messages or other types that don't have exposure time
                    try:
                        ts = msg.getTimestamp()
                        print(f"[WHILE LOOP] [Seq {msg.getSequenceNum()}] '{name}' ts: {td2ms(ts)}ms")
                    except Exception as e:
                        print(f"[WHILE LOOP] Exception occurred while getting timestamp for '{name}'")  
                # Call new_msg with RGB and NN data
                new_msg(msg, name, ts) # frame will be displayed in new_msg

        if cv2.waitKey(1) == ord('q'):
            break

    cv2.destroyAllWindows()

I think this code is MRE if you neglect printing outputs.

When I run this code, synchronization is working but:

  • actually is this code operation the same as nn.passthrough.link() does?

  • If you run this code, can you confirm that it synchronizes the NN-frame as it should be?

  • Can it be used further for synchronization of an external IMU data on top this code with multithreading approach?

Thanks
Cem

  • jakaskerl replied to this.
  • Uce timestamp NN is obtained and it shows at what time it is received by host afaik.

    If you are using getTimestamp then the time is recorded at start of image exposure. So it takes 80ms to process the frame (ISP, NN, Xlink). There is no getting around it. Check with env variable DEPTHAI_LEVEL=TRACE to see how much time each operation takes; it is HW bound, you can not speed it up other than by reducing image size, setting optimal SHAVE count, ..

    Thanks,
    Jaka

    Uce actually is this code operation the same as nn.passthrough.link() does?

    Yep, instead of this:

    camRgb.preview.link(detection_nn.input)
    camRgb.preview.link(rgbOut.input) # outputs whenever it wants
    detection_nn.out.link(xout_nn.input)

    you could do

    camRgb.preview.link(detection_nn.input)
    detection_nn.passthrough.link(rgbOut.input) # synced
    detection_nn.out.link(xout_nn.input) # synced

    Uce If you run this code, can you confirm that it synchronizes the NN-frame as it should be?

    It looks to sync the frames.

    Uce Can it be used further for synchronization of an external IMU data on top this code with multithreading approach?

    Yep. If you can get an accurate IMU timestamp into dai.Clock time, you should be able to sync the frames.

    Thanks,
    Jaka

    • Uce replied to this.
    • Uce likes this.

      Hi jakaskerl

      thank you for your comments. Here is the code output for syncing IMU- Frame-NN.

      2024-06-13 16:41:42 DEBUG [LOOP] =============== STARTING OF THE STREAM | Seq: 195 ===============
      2024-06-13 16:41:42 DEBUG  [LOOP] CHECK rgb getTimestamp: 92393.795 | RGB getTimestampDevice: 4.737
      2024-06-13 16:41:42 DEBUG [LOOP] [rgb] [ELAPSED] Read RGB msg: 0.143 ms | timestamp: 92393.795
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: msg: ImgFrame | name: rgb | ts: 92393.795 at time: 92393.832
      2024-06-13 16:41:42 WARNING [ADDMSG] [NOT SYNCED]---NN data does not arrive yet---
      2024-06-13 16:41:42 DEBUG [LOOP] [nn] [ELAPSED] Read nn msg: 0.033 ms | timestamp: 92393.762
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: msg: ImgDetections | name: nn | ts: 92393.762 at time: 92393.832
      2024-06-13 16:41:42 DEBUG [ADDMSG] [ELAPSED] SYNCING messages: 0.291 ms
      2024-06-13 16:41:42 DEBUG [NEWMSG] [+++SYNCED+++ TIMESTAMPS] RGB: 92393.762 | NN: 92393.762 | IMU: 92393.754
      2024-06-13 16:41:42 DEBUG [FRAME] Time before getting Frame and BB: 92393.833
      2024-06-13 16:41:42 DEBUG [FRAME] [ELAPSED] Sync: 0.478ms | NN: 1.699ms | IMU: 0.279ms | Render: 0.158ms | Display: 92393.835 | FPS: 38.462
      2024-06-13 16:41:42 DEBUG [IMU] [ELAPSED] read IMU data: 0.007 ms
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: IMU DATAMSG: -2.716 | name: imu | ts: 92393.817 at time: 92393.836
      2024-06-13 16:41:42 WARNING [ADDMSG] [NOT SYNCED]---NN data does not arrive yet---
      2024-06-13 16:41:42 DEBUG [LOOP] =============== END OF THE STREAM ===============
      2024-06-13 16:41:42 DEBUG [LOOP] =============== STARTING OF THE STREAM | Seq: 196 ===============
      2024-06-13 16:41:42 DEBUG [IMU] [ELAPSED] read IMU data: 0.001 ms
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: IMU DATAMSG: -2.851 | name: imu | ts: 92393.836 at time: 92393.847
      2024-06-13 16:41:42 WARNING [ADDMSG] [NOT SYNCED]---NN data does not arrive yet---
      2024-06-13 16:41:42 DEBUG [LOOP] =============== END OF THE STREAM ===============
      2024-06-13 16:41:42 DEBUG [LOOP] =============== STARTING OF THE STREAM | Seq: 197 ===============
      2024-06-13 16:41:42 DEBUG  [LOOP] CHECK rgb getTimestamp: 92393.829 | RGB getTimestampDevice: 4.770
      2024-06-13 16:41:42 DEBUG [LOOP] [rgb] [ELAPSED] Read RGB msg: 0.153 ms | timestamp: 92393.829
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: msg: ImgFrame | name: rgb | ts: 92393.829 at time: 92393.863
      2024-06-13 16:41:42 WARNING [ADDMSG] [NOT SYNCED]---NN data does not arrive yet---
      2024-06-13 16:41:42 DEBUG [LOOP] [nn] [ELAPSED] Read nn msg: 0.016 ms | timestamp: 92393.795
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: msg: ImgDetections | name: nn | ts: 92393.795 at time: 92393.863
      2024-06-13 16:41:42 DEBUG [ADDMSG] [ELAPSED] SYNCING messages: 0.110 ms
      2024-06-13 16:41:42 DEBUG [NEWMSG] [+++SYNCED+++ TIMESTAMPS] RGB: 92393.795 | NN: 92393.795 | IMU: 92393.801
      2024-06-13 16:41:42 DEBUG [FRAME] Time before getting Frame and BB: 92393.863
      2024-06-13 16:41:42 DEBUG [FRAME] [ELAPSED] Sync: 0.158ms | NN: 0.585ms | IMU: 0.061ms | Render: 0.096ms | Display: 92393.864 | FPS: 37.234
      2024-06-13 16:41:42 DEBUG [IMU] [ELAPSED] read IMU data: 0.003 ms
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: IMU DATAMSG: -2.880 | name: imu | ts: 92393.848 at time: 92393.864
      2024-06-13 16:41:42 WARNING [ADDMSG] [NOT SYNCED]---NN data does not arrive yet---
      2024-06-13 16:41:42 DEBUG [LOOP] =============== END OF THE STREAM ===============
      2024-06-13 16:41:42 DEBUG [LOOP] =============== STARTING OF THE STREAM | Seq: 198 ===============
      2024-06-13 16:41:42 DEBUG [IMU] [ELAPSED] read IMU data: 0.001 ms
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: IMU DATAMSG: -2.967 | name: imu | ts: 92393.865 at time: 92393.878
      2024-06-13 16:41:42 WARNING [ADDMSG] [NOT SYNCED]---NN data does not arrive yet---
      2024-06-13 16:41:42 DEBUG [LOOP] =============== END OF THE STREAM ===============
      2024-06-13 16:41:42 DEBUG [LOOP] =============== STARTING OF THE STREAM | Seq: 199 ===============
      2024-06-13 16:41:42 DEBUG [IMU] [ELAPSED] read IMU data: 0.001 ms
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: IMU DATAMSG: -2.960 | name: imu | ts: 92393.879 at time: 92393.894
      2024-06-13 16:41:42 WARNING [ADDMSG] [NOT SYNCED]---NN data does not arrive yet---
      2024-06-13 16:41:42 DEBUG [LOOP] =============== END OF THE STREAM ===============
      2024-06-13 16:41:42 DEBUG [LOOP] =============== STARTING OF THE STREAM | Seq: 200 ===============
      2024-06-13 16:41:42 DEBUG  [LOOP] CHECK rgb getTimestamp: 92393.862 | RGB getTimestampDevice: 4.803
      2024-06-13 16:41:42 DEBUG [LOOP] [rgb] [ELAPSED] Read RGB msg: 0.247 ms | timestamp: 92393.862
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: msg: ImgFrame | name: rgb | ts: 92393.862 at time: 92393.910
      2024-06-13 16:41:42 WARNING [ADDMSG] [NOT SYNCED]---NN data does not arrive yet---
      2024-06-13 16:41:42 DEBUG [LOOP] [nn] [ELAPSED] Read nn msg: 0.011 ms | timestamp: 92393.829
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: msg: ImgDetections | name: nn | ts: 92393.829 at time: 92393.910
      2024-06-13 16:41:42 DEBUG [ADDMSG] [ELAPSED] SYNCING messages: 0.064 ms
      2024-06-13 16:41:42 DEBUG [NEWMSG] [+++SYNCED+++ TIMESTAMPS] RGB: 92393.829 | NN: 92393.829 | IMU: 92393.836
      2024-06-13 16:41:42 DEBUG [FRAME] Time before getting Frame and BB: 92393.910
      2024-06-13 16:41:42 DEBUG [FRAME] [ELAPSED] Sync: 0.100ms | NN: 0.560ms | IMU: 0.084ms | Render: 0.070ms | Display: 92393.911 | FPS: 34.043
      2024-06-13 16:41:42 DEBUG [IMU] [ELAPSED] read IMU data: 0.003 ms
      2024-06-13 16:41:42 DEBUG [ADDMSG] Message received: IMU DATAMSG: -2.857 | name: imu | ts: 92393.895 at time: 92393.911
      2024-06-13 16:41:42 WARNING [ADDMSG] [NOT SYNCED]---NN data does not arrive yet---
      2024-06-13 16:41:42 DEBUG [LOOP] =============== END OF THE STREAM ===============
      • This includes 6 loops inside a while loop.

      • If you check

      Message received: msg: ImgDetections | name: nn | ts:

      this part shows at what timestamp NN is obtained and it shows at what time it is received by host afaik. So it is observable that most of the time there is 80ms of delay between NN data read and received by the host. I have used nn.passthrough but it is still 80ms. How can I decrease this? Using more cores for NN?

      • Now it shows the displaying of synchronized data.

      2024-06-13 16:41:42 DEBUG [NEWMSG] [+++SYNCED+++ TIMESTAMPS] RGB: 92393.762 | NN: 92393.762 | IMU: 92393.754
      2024-06-13 16:41:42 DEBUG [FRAME] Time before getting Frame and BB: 92393.833

      It shows that there is 79ms of delay between the earliest synced data and the displaying them onto the frame at the host side. How to decrease this delay?:

      • Using USB3.0 helps a bit?

      • Putting the camera pipeline into the different thread, and let camera pipeline thread and IMU thread run paralelly with the main loop can help?

        I can share the code if looking into the loggings are not good enough.

      Thanks in advance!

        Uce timestamp NN is obtained and it shows at what time it is received by host afaik.

        If you are using getTimestamp then the time is recorded at start of image exposure. So it takes 80ms to process the frame (ISP, NN, Xlink). There is no getting around it. Check with env variable DEPTHAI_LEVEL=TRACE to see how much time each operation takes; it is HW bound, you can not speed it up other than by reducing image size, setting optimal SHAVE count, ..

        Thanks,
        Jaka

        • Uce likes this.