- Edited
We are still struggling to access the depth data. We have tried going through the documentation and exploring many examples on Github. We keeping hitting walls. The goal is to have the depth data of objects exported to the terminal with a print command.
import cv2
import numpy as np
import depthai as dai
from depthai import NNData
from depthai_sdk import OakCamera, ArgsParser
from depthai_sdk.managers import PipelineManager
from depthai_sdk.classes import DetectionPacket, Detections
from depthai_sdk.visualize.configs import TextPosition
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-conf", "--config", help="Trained YOLO json config path", default='json/best.json', type=str)
args = ArgsParser.parseArgs(parser)
def decode(nn_data: NNData) -> Detections:
print(nn_data)
layer = nn_data.getFirstLayerFp16()
print(layer)
results = np.array(layer).reshape((1, 1, -1, 7))
dets = Detections(nn_data)
print(dets)
for result in results[0][0]:
if result[2] > 0.5:
dets.add(result[1], result[2], result[3:])
return dets
def callback(packet: DetectionPacket):
visualizer = packet.visualizer
num = len(packet.img_detections.detections)
#print(packet.img_detections.detections)
#print('New msgs! Number of Balls detected:', num)
#print(packet.imgFrame)
visualizer.add_text(f"Number of Objects: {num}", position=TextPosition.TOP_MID)
visualizer.draw(packet.frame)
cv2.imshow(f'frame {packet.name}', packet.frame)
with OakCamera(args=args) as oak:
color = oak.create_camera('color')
nn = oak.create_nn(args['config'], color,spatial=True, decode_fn=decode) #nn_type='yolo' decode_fn=decode
oak.visualize(nn, callback=callback)
oak.visualize(nn.out.passthrough,fps=True)
oak.start(blocking=True)