• DepthAI-v2
  • Flicker in video stream when modifying settings

DanGoodrick
Set it either as initial control before running the pipeline, or keep it in the while True loop after the first image is received using .get().

Thanks,
Jaka

7 days later

I'm not sure how to set the brightness before running the pipeline. I used the device object to access getInputQueue("control"). This didn't fix the problem. The video still flickers.

import time
from fractions import Fraction
import depthai as dai
import av

from rgb_camera_control import ids, get_log_level_obj, camera_models


def write_stream(cam_id, settings):
    pipeline = dai.Pipeline()
    camRgb = pipeline.create(dai.node.ColorCamera)
    camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_4_K)
    # camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_4000X3000)
    # camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_5312X6000)
    width, height = camRgb.getVideoSize()
    print(width, height)
    camRgb.setFps(settings["fps"])
    imageManip = pipeline.create(dai.node.ImageManip)
    frame_size = width * height * 3
    imageManip.setMaxOutputFrameSize(int(frame_size))
    imageManip.initialConfig.setFrameType(dai.ImgFrame.Type.NV12)
    camRgb.isp.link(imageManip.inputImage)
    videoEnc = pipeline.create(dai.node.VideoEncoder)
    videoEnc.setDefaultProfilePreset(settings["fps"], dai.VideoEncoderProperties.Profile.H264_MAIN)
    imageManip.out.link(videoEnc.input)
    xoutEnc = pipeline.create(dai.node.XLinkOut)
    xoutEnc.setStreamName('video')
    videoEnc.bitstream.link(xoutEnc.input)
    controlIn = pipeline.create(dai.node.XLinkIn)
    controlIn.setStreamName('control')
    controlIn.out.link(camRgb.inputControl)

    with dai.Device(pipeline, dai.DeviceInfo(ids[cam_id]), maxUsbSpeed=dai.UsbSpeed.HIGH) as device:
        device.setLogLevel(get_log_level_obj(settings["log_level"]))
        device.setLogOutputLevel(get_log_level_obj(settings["log_level"]))        
        controlQueue = device.getInputQueue('control')  # For sending control commands
        # apply_settings(controlQueue, settings)
        ctrl = dai.CameraControl()
        ctrl.setBrightness(-5)
        ctrl.setSharpness(2)
        controlQueue.send(ctrl)
        videoQueue = device.getOutputQueue('video', maxSize=30, blocking=True)
        output_container = av.open("video.mp4", 'w')
        stream = output_container.add_stream("h264", rate=Fraction(camRgb.getFps()))
        stream.time_base = Fraction(1, 1000 * 1000)
        start = time.time()
        print("Recording...ctrl+c to stop\n")
        try:
            while True:
                data = videoQueue.get().getData()
                ctrl.setBrightness(-5)
                controlQueue.send(ctrl)
                packet = av.Packet(data)
                packet.pts = int((time.time() - start) * 1000 * 1000)
                output_container.mux_one(packet)
        except KeyboardInterrupt:
            pass
        output_container.close()


if __name__ == "__main__":
    import json
    devices = dai.Device.getAllAvailableDevices()
    print(f"Found {len(devices)} devices")
    for i, device_info in enumerate(devices):
        ids[i+1] = device_info.getMxId()
    for id in ids:
        print(camera_models[ids[id]], ": ", ids[id])
    with open('settings.json', 'r') as f:
        settings = json.load(f)
    write_stream(2, settings)

    DanGoodrick

    camRgb.initialControl.(some control)

    Thanks,
    Jaka

    thanks for that tip, but setting the control before the pipeline didn't fix the problem.

    camRgb.initialControl.setBrightness(-5)

    camRgb.initialControl.setSharpness(2)

    yes I did. Is this not happening on your end? This is my script:

    import time
    from fractions import Fraction
    import depthai as dai
    import av
    
    fps = 30
    pipeline = dai.Pipeline()
    camRgb = pipeline.create(dai.node.ColorCamera)
    camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_4_K)
    width, height = camRgb.getVideoSize()
    print(width, height)
    camRgb.setFps(fps)
    imageManip = pipeline.create(dai.node.ImageManip)
    frame_size = width * height * 3
    imageManip.setMaxOutputFrameSize(int(frame_size))
    imageManip.initialConfig.setFrameType(dai.ImgFrame.Type.NV12)
    camRgb.isp.link(imageManip.inputImage)
    
    camRgb.initialControl.setBrightness(-5)
    camRgb.initialControl.setSharpness(2)
    
    videoEnc = pipeline.create(dai.node.VideoEncoder)
    videoEnc.setDefaultProfilePreset(fps, dai.VideoEncoderProperties.Profile.H264_MAIN)
    imageManip.out.link(videoEnc.input)
    xoutEnc = pipeline.create(dai.node.XLinkOut)
    xoutEnc.setStreamName('video')
    videoEnc.bitstream.link(xoutEnc.input)
    controlIn = pipeline.create(dai.node.XLinkIn)
    controlIn.setStreamName('control')
    controlIn.out.link(camRgb.inputControl)
    
    with dai.Device(pipeline, dai.DeviceInfo("184430109118E1F400"), maxUsbSpeed=dai.UsbSpeed.HIGH) as device:
        # apply_settings(controlQueue, settings)
        videoQueue = device.getOutputQueue('video', maxSize=30, blocking=True)
        output_container = av.open("video.mp4", 'w')
        stream = output_container.add_stream("h264", rate=Fraction(camRgb.getFps()))
        stream.time_base = Fraction(1, 1000 * 1000)
        start = time.time()
        print("Recording...ctrl+c to stop\n")
        try:
            while True:
                data = videoQueue.get().getData()
                packet = av.Packet(data)
                packet.pts = int((time.time() - start) * 1000 * 1000)
                output_container.mux_one(packet)
        except KeyboardInterrupt:
            pass
        output_container.close()

      Hi DanGoodrick
      It looks to be a FW problem. The only way I found it works is if send the controls each iteration:

      with dai.Device(pipeline, dai.DeviceInfo(), maxUsbSpeed=dai.UsbSpeed.SUPER_PLUS) as device:
          # apply_settings(controlQueue, settings)
          inputQueue = device.getInputQueue('control')
      
          control = dai.CameraControl()
          control.setSharpness(-10)
          control.setBrightness(-2)
      
          videoQueue = device.getOutputQueue('video', maxSize=30, blocking=True)
          output_container = av.open("video.mp4", 'w')
          stream = output_container.add_stream("h264", rate=Fraction(camRgb.getFps()))
          stream.time_base = Fraction(1, 1000 * 1000)
          start = time.time()
          print("Recording...ctrl+c to stop\n")
          try:
              while True:
                  inputQueue.send(control)
                  data = videoQueue.get().getData()
                  packet = av.Packet(data)
                  packet.pts = int((time.time() - start) * 1000 * 1000)
                  output_container.mux_one(packet)
          except KeyboardInterrupt:
              pass
          output_container.close()

      Thanks,
      Jaka

      I installed the dev branch and but get the same flickery result.

      (venv) bwadmin@proactive-sandy:~/luxonis$ pip show depthai
      Name: depthai
      Version: 2.28.0.0.dev0+d3b92a92782894b33015d97d435451ff35532d06

        Hi DanGoodrick
        Sorry, sent you a different script; was testing different configs and I sent you the one I though was working. The issue disappears if you put send under getData.

        data = videoQueue.get().getData()
        inputQueue.send(control)

        I don't know why that happens, it doesn't really make sense to me but seems like it stems from the blocking get() call. We will try to fix this in V3.

        Thanks,
        Jaka