Hi! I posted about an issue I'm seeing with encoding 4k@30fps on an Oak-1 PoE: the encoder can normally do about 28 fps, but it drops to about 20 fps once I start running a neural net to do object detection. From what I understand the NN should not be draining resources from the encoder, so perhaps I'm doing something wrong.
I put together some simplified code to show the problem:
#!/usr/bin/env python3
from pathlib import Path
import sys
import cv2
import depthai as dai
import numpy as np
from datetime import datetime
import time
import json
import sacn
import random
from pathlib import Path
# Get argument first
nnPath = str((Path(__file__).parent / Path('depthai-python/examples/models/mobilenet-ssd_openvino_2021.4_5shave.blob')).resolve().absolute())
if not Path(nnPath).exists():
import sys
raise FileNotFoundError(f'Required file/s not found, please run "{sys.executable} install_requirements.py"')
use_nn = True
preview_size = (300, 300)
sensor_resolution = dai.ColorCameraProperties.SensorResolution.THE_4_K
codec = dai.VideoEncoderProperties.Profile.H265_MAIN
file_extension = 'h265'
fps = 30
pipeline = dai.Pipeline()
camRgb = pipeline.create(dai.node.ColorCamera)
videoEncoder = pipeline.create(dai.node.VideoEncoder)
nn = None
if use_nn:
nn = pipeline.create(dai.node.MobileNetDetectionNetwork)
videoOut = pipeline.create(dai.node.XLinkOut)
nnOut = None
if use_nn:
nnOut = pipeline.create(dai.node.XLinkOut)
videoOut.setStreamName("h265")
if use_nn:
nnOut.setStreamName("nn")
# Properties
camRgb.setBoardSocket(dai.CameraBoardSocket.RGB)
camRgb.setResolution(sensor_resolution)
camRgb.setPreviewSize(preview_size)
camRgb.setInterleaved(False)
videoEncoder.setDefaultProfilePreset(fps, codec)
if use_nn:
nn.setConfidenceThreshold(0.5)
nn.setBlobPath(nnPath)
nn.setNumInferenceThreads(2)
nn.input.setBlocking(False)
# Linking
camRgb.video.link(videoEncoder.input)
videoEncoder.bitstream.link(videoOut.input)
if use_nn:
nn.out.link(nnOut.input)
camRgb.preview.link(nn.input)
# Connect to device and start pipeline
print(datetime.now().strftime('%H:%M.%S.%f: Starting device'))
with dai.Device(pipeline) as device:
print(datetime.now().strftime('%H:%M.%S.%f: Device started'))
# Set debugging level
#device.setLogLevel(dai.LogLevel.DEBUG)
#device.setLogOutputLevel(dai.LogLevel.DEBUG)
# Queues
queue_size = 8
qDet = None
if use_nn:
qDet = device.getOutputQueue("nn", queue_size)
qRgbEnc = device.getOutputQueue('h265', maxSize=30, blocking=True)
frameCount = 0
frameStart = None
while True:
inDet = None
if qDet:
inDet = qDet.tryGet()
while qRgbEnc.has():
encFrame = qRgbEnc.get()
# Ordinarily we might write this to a file
frameCount += 1
if not frameStart:
frameStart = datetime.now()
frameCount = 0
if (datetime.now() - frameStart).total_seconds() > 10:
print("Saw %s frames in 10 seconds" % frameCount)
frameStart = datetime.now()
frameCount = 0
time.sleep(.01)
You can set use_nn = False
to see the higher framerate when the neural net is disabled.