I threw together a quick example (below). Basically it uses a timer which starts the ffplay
process when it times out, which plays a video file, while it's still being written to. Note: be careful when running this, as when you hit Q to close the ffplay
process, it does not terminate the main program and is therefore still writing video data to the file. You have to stop the main program with Ctrl + C
.
#!/usr/bin/env python3
import depthai as dai
import subprocess as sp
from os import name as osName
import threading
# Create pipeline
pipeline = dai.Pipeline()
# Define sources and output
camRgb = pipeline.createColorCamera()
videoEnc = pipeline.createVideoEncoder()
xout = pipeline.createXLinkOut()
xout.setStreamName("h264")
# Properties
camRgb.setBoardSocket(dai.CameraBoardSocket.RGB)
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_4_K)
videoEnc.setDefaultProfilePreset(camRgb.getVideoSize(), camRgb.getFps(), dai.VideoEncoderProperties.Profile.H264_MAIN)
# Linking
camRgb.video.link(videoEnc.input)
videoEnc.bitstream.link(xout.input)
width, height = 720, 500
filename = "video.h264"
command = [
"ffplay", filename,
"-x", str(width),
"-y", str(height),
"-framerate", "60",
"-fflags", "nobuffer",
"-flags", "low_delay",
"-framedrop",
"-strict", "experimental",
]
if osName == "nt": # Running on Windows
command = ["cmd", "/c"] + command
def playVideo():
try:
sp.Popen(command) # Start playing the video with delay
except:
exit("Error: cannot run ffplay!\nTry running: sudo apt install ffmpeg")
delay = 5
timer = threading.Timer(delay, playVideo)
# Connect to device and start pipeline
with dai.Device(pipeline) as device:
# Output queue will be used to get the encoded data from the output defined above
q = device.getOutputQueue(name="h264", maxSize=30, blocking=True)
with open(filename, "wb") as videoFile:
timer.start()
try:
while True:
data = q.get().getData() # Blocking call, will wait until new data has arrived
data.tofile(videoFile)
except:
pass