Hi jakaskerl
Thank you for your answers in this post.
Since the topic shifted towards synchronization rather than latencies, I am posting a new discussion.
Trying to sync externalIMU(Threaded)-Frame-NN is harder so I first try to understand Frame-NN-host snyc with this code.
import cv2
import numpy as np
import depthai as dai
import blobconverter
import threading
import time
import serial
from datetime import timedelta
# Threshold for synchronization
MS_THRESHOLD = 30
msgs = dict()
def add_msg(msg, name, ts=None):
print(f"[ADD MESSAGE] New message received: {msg}, {name}, {ts}")
if ts is None:
ts = msg.getTimestamp()
if not name in msgs:
msgs[name] = []
msgs[name].append((ts, msg))
for name, arr in msgs.items():
if len(arr) > 0:
print(f"[ADD MESSAGE DICTIONARY] key: {name}: value length {len(arr)} | last value: {arr[-1]}")
synced = {}
for name, arr in msgs.items():
diffs = []
for i, (msg_ts, msg) in enumerate(arr):
diffs.append(abs(msg_ts - ts))
if len(diffs) == 0: break
diffsSorted = diffs.copy()
diffsSorted.sort()
dif = diffsSorted[0]
print(f"[ADD MESSAGE] Difference: {dif}")
print(f"[ADD MESSAGE] Difference in milliseconds: {dif / timedelta(milliseconds=1)}")
print(f"convertedtime: {timedelta(milliseconds=MS_THRESHOLD)}")
if dif < timedelta(milliseconds=MS_THRESHOLD):
synced[name] = diffs.index(dif)
if len(synced) == 2:
print(f"[ADD MESSAGE] +++WILL BE SYNCED!+++")
for name, i in synced.items():
msgs[name] = msgs[name][i:]
ret = {}
for name, arr in msgs.items():
print(f"[ADD MESSAGE] +++will be popped+++: {arr[0]}")
ret[name] = arr.pop(0)
print(f"[ADD MESSAGE] +++SYNCED data+++: {ret}")
return ret
else:
print(f"[ADD MESSAGE] ---Not synced yet---")
return False
def create_pipeline():
pipeline = dai.Pipeline()
camRgb = pipeline.create(dai.node.ColorCamera)
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
camRgb.setInterleaved(False)
camRgb.setFps(30)
detection_nn = pipeline.create(dai.node.MobileNetDetectionNetwork)
detection_nn.setBlobPath(blobconverter.from_zoo(name='mobilenet-ssd', shaves=6))
detection_nn.setConfidenceThreshold(0.5)
rgbOut = pipeline.create(dai.node.XLinkOut)
rgbOut.setStreamName("rgb")
xout_nn = pipeline.create(dai.node.XLinkOut)
xout_nn.setStreamName("nn")
camRgb.preview.link(detection_nn.input)
camRgb.preview.link(rgbOut.input)
detection_nn.out.link(xout_nn.input)
return pipeline
def td2ms(td) -> int:
return int(td / timedelta(milliseconds=1))
def new_msg(msg, name, ts=None):
print(f"[NEWMSG] msg: {type(msg).__name__}, name: {name}, ts: {ts}")
synced = add_msg(msg, name, ts)
if not synced: return
rgb_ts, rgb = synced['rgb']
nn_ts, nn = synced['nn']
print(f"[NEWMSG: SYNCED] RGB: {rgb_ts}, NN: {nn_ts}")
print(f"[Seq {rgb.getSequenceNum()}] Mid of RGB exposure ts: {td2ms(rgb_ts)}ms, RGB ts: {td2ms(rgb.getTimestampDevice())}ms, RGB exposure time: {td2ms(rgb.getExposureTime())}ms")
print(f"[Seq {nn.getSequenceNum()}] NN ts: {td2ms(nn_ts)}ms")
print('-----------')
frameRgb = rgb.getCvFrame()
detections = nn.detections
for detection in detections:
bbox = frameNorm(frameRgb, (detection.xmin, detection.ymin, detection.xmax, detection.ymax))
cv2.putText(frameRgb, labelMap[detection.label], (bbox[0] + 10, bbox[1] + 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
cv2.rectangle(frameRgb, (bbox[0], bbox[1]), (bbox[2], bbox[3]), (255, 0, 0), 2)
cv2.imshow("Frame", frameRgb)
def frameNorm(frame, bbox):
normVals = np.full(len(bbox), frame.shape[0])
normVals[::2] = frame.shape[1]
return (np.clip(np.array(bbox), 0, 1) * normVals).astype(int)
# Label map for MobileNet-SSD
labelMap = ["background", "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", "chair", "cow",
"diningtable", "dog", "horse", "motorbike", "person", "pottedplant", "sheep", "sofa", "train", "tvmonitor"]
# Connect to device and start pipeline
with dai.Device() as device:
device.startPipeline(create_pipeline())
windowName = "Frame"
cv2.namedWindow(windowName)
while True:
for name in ['rgb', 'nn']:
msg = device.getOutputQueue(name).tryGet()
if msg is not None:
if name == 'rgb':
# print(f"[WHILE LOOP] New message received: msg: {type(msg).__name__}, name: {name}")
# ts = msg.getTimestampDevice(dai.CameraExposureOffset.MIDDLE)
ts = msg.getTimestamp()
# print(f"Mid of exposure timestamp: {ts}")
print(f"[WHILE LOOP] [Seq {msg.getSequenceNum()}] Mid of '{name}' exposure ts: {td2ms(ts)}ms, '{name}' exposure time: {td2ms(msg.getExposureTime())} ms")
else: # For 'nn' messages or other types that don't have exposure time
try:
ts = msg.getTimestamp()
print(f"[WHILE LOOP] [Seq {msg.getSequenceNum()}] '{name}' ts: {td2ms(ts)}ms")
except Exception as e:
print(f"[WHILE LOOP] Exception occurred while getting timestamp for '{name}'")
# Call new_msg with RGB and NN data
new_msg(msg, name, ts) # frame will be displayed in new_msg
if cv2.waitKey(1) == ord('q'):
break
cv2.destroyAllWindows()
I think this code is MRE if you neglect printing outputs.
When I run this code, synchronization is working but:
actually is this code operation the same as nn.passthrough.link()
does?
If you run this code, can you confirm that it synchronizes the NN-frame as it should be?
Can it be used further for synchronization of an external IMU data on top this code with multithreading approach?
Thanks
Cem