I am using a depthai OAK-D4 module. I am using Video Encoder Node to convert the ColorCamera Node output to get h264 encoded video data so I can Publish it to ROS topic using bridge.cv2_to_imgmsg . But I cannot figure out how to use the bitstream given by VideoEncoder Node.
`
import depthai as dai
import numpy as np
import rospy,cv2
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
pub = rospy.Publisher('/webcamEnc',Image,queue_size= 10)
pub1 = rospy.Publisher('/webcamRGB',Image,queue_size= 10)
rospy.init_node('image',anonymous = True)
pipeline = dai.Pipeline()
camRgb = pipeline.create(dai.node.ColorCamera)
videnc = pipeline.create(dai.node.VideoEncoder)
xoutRgb = pipeline.create(dai.node.XLinkOut)
encout = pipeline.create(dai.node.XLinkOut)
xoutRgb.setStreamName("rgb")
encout.setStreamName("h264")
camRgb.setPreviewKeepAspectRatio(False)
camRgb.setPreviewSize(320,320)
camRgb.setInterleaved(True)
camRgb.setColorOrder(dai.ColorCameraProperties.ColorOrder.BGR)
videnc.setDefaultProfilePreset(30,dai.VideoEncoderProperties.Profile.MJPEG)
camRgb.preview.link(xoutRgb.input)
camRgb.video.link(videnc.input)
videnc.bitstream.link(encout.input)
bridge = CvBridge()
print(videnc.getFrameRate())
with dai.Device(pipeline) as device:
qRgb = device.getOutputQueue(name="rgb", maxSize=2, blocking=False)
qenc = device.getOutputQueue(name="h264",maxSize=1, blocking=False)
print("Type(RGB):",qRgb.get().getType()," Type(ENC):",qenc.get().getType())
qenc.get().setType(qRgb.get().getType())
print(qenc.get().getType())
while True:
rgbFrame = qRgb.get().getCvFrame()
encFrame = qenc.get().getFrame()
if not rospy.is_shutdown():
msg1 = bridge.cv2_to_imgmsg(rgbFrame,encoding='bgr8')
pub1.publish(msg1)
msg = bridge.cv2_to_imgmsg(encFrame,encoding='bgr8')
pub.publish(msg)
`