Hi,
I’m running into an issue with my new OAK-4S camera when capturing at full 48 MP resolution with ISP cropping.
Setup:
Camera: OAK-4S
Resolution: 48 MP, cropping to 7500 × 1500 region from ISP (8000 × 6000)
Exposure: Manual exposure set to 50 µs
Gain: 200
Lighting: Red strobe light, triggered to remain ON for the entire exposure duration (not just a pulse)
Frame type: NV12 → ImageManip → BGR, saving as PNG
Problem:
Even though the strobe light is ON during the whole exposure time, the captured images consistently show only red light bands across the frame (see attached examples). The rest of the image remains dark.
Why would the image only capture red light bands instead of full illumination when the strobe is ON for the entire exposure duration? Could this be related to ISP crop timing instead of rolling shutter as i don't think so its a rolling shutter issue given the light is kept on throughout the exposure time?
Note: Currently the light is not being strobed using the OAK4S but still even when my strobe durations are extremely long I still see the same issue.



This does not look like a rolling shutter artifact (since the strobe covers the entire exposure). I also tested:
But not a single frame shows even illumination – every capture still only has these bands.
Code:
from util.arguments import initialize_argparser
from util.manual_camera_control import ManualCameraControl
import cv2
import numpy as np
from datetime import datetime
def contains_red_light(frame, red_pixel_threshold=10000):
"""Return True if frame has enough red pixels."""
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# Red in HSV can wrap around hue=0, so two ranges are needed
lower_red1 = np.array([0, 120, 70])
upper_red1 = np.array([10, 255, 255])
lower_red2 = np.array([170, 120, 70])
upper_red2 = np.array([180, 255, 255])
# Create masks
mask1 = cv2.inRange(hsv, lower_red1, upper_red1)
mask2 = cv2.inRange(hsv, lower_red2, upper_red2)
mask = mask1 | mask2
red_pixels = cv2.countNonZero(mask)
return red_pixels > red_pixel_threshold
_, args = initialize_argparser()
#visualizer = dai.RemoteConnection(httpPort=8082)
device = dai.Device(dai.DeviceInfo(args.device)) if args.device else dai.Device()
width = 7500
height = 1500
CROP_X_OFFSET = 0
CROP_Y_OFFSET = 1700
ISP_WIDTH = 8000
ISP_HEIGHT = 6000
# Clamp crop to valid range
crop_x2 = min(CROP_X_OFFSET + width, ISP_WIDTH)
crop_y2 = min(CROP_Y_OFFSET + height, ISP_HEIGHT)
# Convert to normalized for ImageManip (0.0 - 1.0)
crop_norm_x1 = CROP_X_OFFSET / ISP_WIDTH
crop_norm_y1 = CROP_Y_OFFSET / ISP_HEIGHT
crop_norm_x2 = crop_x2 / ISP_WIDTH
crop_norm_y2 = crop_y2 / ISP_HEIGHT
with dai.Pipeline(device) as pipeline:
print("Creating pipeline...")
cam = pipeline.create(dai.node.Camera).build(dai.CameraBoardSocket.CAM_A)
manip = pipeline.create(dai.node.ImageManip)
manip.initialConfig.addCrop(CROP_X_OFFSET, CROP_Y_OFFSET, width, height)
cam_out = cam.requestFullResolutionOutput(type=dai.ImgFrame.Type.NV12, fps=12, useHighestResolution = True)
cam_out.link(manip.inputImage)
#camQ = cam_out.createOutputQueue() #args.fps_limit
manipQ = manip.out.createOutputQueue()
manip.setMaxOutputFrameSize(width * height * 3 + 10000)
ctrlQ = cam.inputControl.createInputQueue()
print("Pipeline created.")
pipeline.start()
ctrl = dai.CameraControl()
#print("Test 1")
ctrl.setManualExposure(50, 200) # Adjust these values as needed
"""ctrl.setAutoFocusMode(dai.CameraControl.AutoFocusMode.CONTINUOUS_VIDEO)
ctrl.setAutoFocusRegion(
int(crop_norm_x1 * 10000), # X min
int(crop_norm_y1 * 10000), # Y min
int((crop_norm_x2 - crop_norm_x1) * 10000), # width
int((crop_norm_y2 - crop_norm_y1) * 10000) # height
)"""
ctrl.setManualFocus(179)
ctrlQ.send(ctrl)
#print("Test 2")
# Give AF time to work
import time
time.sleep(2.5) # adjust if needed
# Grab a fresh frame and read lens position
for _ in range(5): # small buffer flush
camIn = manipQ.get()
#print("Test 3")
camIn = manipQ.get() # now get the current frame
lens_pos = camIn.getLensPosition()
if lens_pos and lens_pos > 0:
print(f"Autofocus complete. Lens position = {lens_pos}")
# Lock focus at this position
#lock_ctrl = dai.CameraControl()
#lock_ctrl.setManualFocus(lens_pos)
#ctrlQ.send(lock_ctrl)
print(f"Focus locked at lens position {lens_pos}")
else:
print("Warning: Could not get valid lens position from AF")
"""lock_ctrl = dai.CameraControl()
lock_ctrl.setManualFocus(179)
ctrlQ.send(lock_ctrl)"""
last_save_end = None
while pipeline.isRunning():
#print("Test 4")
camIn = manipQ.get()
assert isinstance(camIn, dai.ImgFrame)
#print("Test 5")
lens_pos = camIn.getLensPosition()
print(f"Autofocus complete. Lens position = {lens_pos}")
cam_cvframe = camIn.getCvFrame()
#cv2.namedWindow("image", cv2.WINDOW_NORMAL)
#cv2.imshow("image", camIn.getCvFrame())
t0 = time.perf_counter()
if contains_red_light(cam_cvframe):
print("------------------------------------------Saving Image------------------------------------")
img_dir = "C:/oak-examples/camera-controls/manual-camera-control/image_capture/Copper_Data/"
datetime_str = datetime.now().strftime("%Y-%m-%d %H_%M_%S")
capture_file_name = img_dir + f"capture_{camIn.getSequenceNum()}_" + datetime_str + ".png"
cv2.imwrite(capture_file_name, cam_cvframe)
t1 = time.perf_counter()
if last_save_end is not None:
dt = t1 - last_save_end
if dt > 0:
fps = 1.0 / dt
print(f"[seq {camIn.getSequenceNum()}] End-to-End FPS: {fps:.2f} ({dt*1000:.1f} ms)")
last_save_end = t1```
Thanks & Reagrds
Yishu