- Edited
I ran the tiny yolo code available at the league (https://docs.luxonis.com/software/depthai/examples/tiny_yolo/) on my computer with Ubuntu 22.04.4 LTS using the OAK-1 LITE and it worked correctly.
Then I tried to run exactly the same code on a raspberry pi 4 with Ubuntu server 22.04.4 LTS and it returned the following error:
File "/home/qsm/depthai-python/examples/Yolo/tiny_yolo.py", line 123, in <module>
inRgb = qRgb.get()
RuntimeError: Communication exception - possible device error/misconfiguration. Original message 'Couldn't read data from stream: 'rgb' (X_LINK_ERROR)'
I'm sure it's not the fault of the cable or the camera because it works fine on the computer and I made sure to connect it to USB 3 (the blue one).
I don't know what to do, I thank you in advance to everyone who answers.
Note:
The following code runs correctly inside the raspberry without any problem (the program sends a video signal through the socket library and I receive the video in a flutter app that works correctly):
import sys
sys.path.append('/home/qsm/.local/lib/python3.10/site-packages')
import socket
import cv2
import depthai as dai
import time
import struct
import netifaces
import threading
wifi_ip =''
for interface in netifaces.interfaces():
if interface.startswith('w'):
Obtener la dirección IP de la interfaz WiFi
wifi_ip = netifaces.ifaddresses(interface)[netifaces.AF_INET][0]['addr']
print(wifi_ip)
Create pipeline
pipeline = dai.Pipeline()
Define source and output
camRgb = pipeline.create(dai.node.ColorCamera)
xoutVideo = pipeline.create(dai.node.XLinkOut)
xoutVideo.setStreamName("video")
Properties
camRgb.setBoardSocket(dai.CameraBoardSocket.CAM_A)
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
camRgb.setVideoSize(1280,720)
camRgb.setFps(30)
xoutVideo.input.setBlocking(False)
xoutVideo.input.setQueueSize(1)
Linking
camRgb.video.link(xoutVideo.input)
Crear un socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
Establecer la dirección y el puerto del servidor
server_address = (wifi_ip, 5000)
Conectar al servidor
sock.bind(server_address)
sock.listen(1)
print('Servidor de video iniciado')
def Rx(connection):
while True:
try:
data = connection.recv(1024)
if not data:
break
print(data.decode())
except ConnectionResetError:
print('Conexión cerrada')
break
except Exception as error:
print(f'Error: {error}')
break
Connect to device and start pipeline
with dai.Device(pipeline) as device:
video = device.getOutputQueue(name="video", maxSize=1, blocking=False)
while True:
try:
Aceptar una conexión
connection, client_address = sock.accept()
print(f'Conexión establecida con {client_address}')
thread = threading.Thread(target=Rx, args=[connection])
thread.daemon = True # El hilo muere cuando el programa principal termina
thread.start()
Recibir la señal de video
while True:
videoIn = video.get()
frame = videoIn.getCvFrame()
_, buffer = cv2.imencode('.jpg', frame, [int(cv2.IMWRITE_JPEG_QUALITY), 50]) # Adjust JPEG quality to 50
data = buffer.tobytes()
Enviar el tamaño del frame primero
connection.sendall(struct.pack("L", len(data)) + data)
time.sleep(0.03) # Reduce the sleep interval for smoother streaming
Cerrar la conexión
connection.close()
except Exception as error:
print(f'Error: {error}')