hi everyone, I have an issue with my script if someone could help please
I'm using an oak-1 camera, to track people in a room, I should devide the room into 4 zones , zoneA, zoneB, zoneC and zoneD, I need to track people in that room so I can count how many people are there in each zone, if someone tracked leave a zone to another one, once he leaves a zone its counter decreases by 1 and the counter of the zone to where he's entering should encrease by 1, in a way that I can count the number of people in the four zones in real time by tracking them.
I have an issue with the if close that should update the counters of zones, it doesn't work even if it's sounds logical to me, here's the code if you may help me please.
I have done the whole except the part which updates the counter of each zone, here is the code :
from pathlib import Path
import cv2
import depthai as dai
import numpy as np
import time
import argparse
labelMap = ["background", "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", "chair", "cow",
"diningtable", "dog", "horse", "motorbike", "person", "pottedplant", "sheep", "sofa", "train", "tvmonitor"]
nnPathDefault = str((Path(__file__).parent / Path('models/mobilenet-ssd_openvino_2021.2_6shave.blob')).resolve().absolute())
parser = argparse.ArgumentParser()
parser.add_argument('nnPath', nargs='?', help="Path to mobilenet detection network blob", default=nnPathDefault)
parser.add_argument('-ff', '--full_frame', action="store_true", help="Perform tracking on full RGB frame", default=False)
args = parser.parse_args()
fullFrameTracking = args.full_frame
class TrackableObject:
def __init__(self, objectID, centroid,status):
# store the object ID, then initialize a list of centroids
# using the current centroid
self.objectID = objectID
self.centroids = [centroid]
# initialize a boolean used to indicate if the object has
# already been counted or not
self.counted = False
self.status = None
# Create pipeline
pipeline = dai.Pipeline()
# Define sources and outputs
camRgb = pipeline.create(dai.node.ColorCamera)
detectionNetwork = pipeline.create(dai.node.MobileNetDetectionNetwork)
objectTracker = pipeline.create(dai.node.ObjectTracker)
xlinkOut = pipeline.create(dai.node.XLinkOut)
trackerOut = pipeline.create(dai.node.XLinkOut)
xlinkOut.setStreamName("preview")
trackerOut.setStreamName("tracklets")
# Properties
camRgb.setPreviewSize(300, 300)
camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
camRgb.setInterleaved(False)
camRgb.setColorOrder(dai.ColorCameraProperties.ColorOrder.BGR)
camRgb.setFps(40)
# testing MobileNet DetectionNetwork
detectionNetwork.setBlobPath(args.nnPath)
detectionNetwork.setConfidenceThreshold(0.5)
detectionNetwork.input.setBlocking(False)
objectTracker.setDetectionLabelsToTrack([15]) # track only person
# possible tracking types: ZERO_TERM_COLOR_HISTOGRAM, ZERO_TERM_IMAGELESS, SHORT_TERM_IMAGELESS, SHORT_TERM_KCF
objectTracker.setTrackerType(dai.TrackerType.ZERO_TERM_COLOR_HISTOGRAM)
# take the smallest ID when new object is tracked, possible options: SMALLEST_ID, UNIQUE_ID
objectTracker.setTrackerIdAssignmentPolicy(dai.TrackerIdAssignmentPolicy.SMALLEST_ID)
# Linking
camRgb.preview.link(detectionNetwork.input)
objectTracker.passthroughTrackerFrame.link(xlinkOut.input)
if fullFrameTracking:
camRgb.video.link(objectTracker.inputTrackerFrame)
else:
detectionNetwork.passthrough.link(objectTracker.inputTrackerFrame)
detectionNetwork.passthrough.link(objectTracker.inputDetectionFrame)
detectionNetwork.out.link(objectTracker.inputDetections)
objectTracker.out.link(trackerOut.input)
# Connect to device and start pipeline
with dai.Device(pipeline) as device:
preview = device.getOutputQueue("preview", 4, False)
tracklets = device.getOutputQueue("tracklets", 4, False)
startTime = time.monotonic()
counter = 0
fps = 0
frame = None
counter_zone = [0,0,0,0]
trackableObjects = {}
while(True):
imgFrame = preview.get()
track = tracklets.get()
counter+=1
current_time = time.monotonic()
if (current_time - startTime) > 1 :
fps = counter / (current_time - startTime)
counter = 0
startTime = current_time
color = (255, 0, 0)
frame = imgFrame.getCvFrame()
frame = cv2.resize(frame, (640, 480), interpolation=cv2.INTER_LINEAR)
width = frame.shape[1]
height = frame.shape[0]
# Définition du centroide de la personne détectée
x3 = width//2
y3 = height//2
trackletsData = track.tracklets
for t in trackletsData:
to = trackableObjects.get(t.id, None)
roi1 = t.roi.denormalize(frame.shape[1], frame.shape[0])
x1 = int(roi1.topLeft().x)
y1 = int(roi1.topLeft().y)
x2 = int(roi1.bottomRight().x)
y2 = int(roi1.bottomRight().y)
centroid = (int((x2-x1)/2+x1), int((y2-y1)/2+y1))
try:
label = labelMap[t.label]
except:
label = t.label
#####################Here where I am struggling #############################
if t.status == dai.Tracklet.TrackingStatus.NEW:
to = TrackableObject(t.id, centroid,t.status)
if centroid[1] < 0.5*width and centroid[0] < height*0.5 :
counter_zone[1] += 1
to.counted = True
if centroid[1] > 0.5*width and centroid[0] < height*0.5 :
counter_zone[0] += 1
to.counted = True
if centroid[1] < 0.5*width and centroid[0] > height*0.5 :
counter_zone[3] += 1
to.counted = True
if centroid[1] > 0.5*width and centroid[0] > height*0.5 :
counter_zone[2] += 1
to.counted = True
to.centroids.append(centroid)
trackableObjects[t.id] = to
while to.status == dai.Tracklet.TrackingStatus.TRACKED and to.counted == True:
directionx = centroid[0]-to.centroids[-1][0]
directiony = centroid[0]-to.centroids[-1][1]
if centroid[0] > 0.5*width and directiony > 0 and np.mean(to.centroids[0]) < 0.5*width:
if centroid[1] > 0.5*height and direction > 0 and np.mean(to.centroids[1]) < 0.5*height:
counter[2] += 1
counter[0]-=1
elif centroid[1] < 0.5*height and direction < 0 and np.mean(to.centroids[1]) > 0.5*height:
counter[0] += 1
counter[2]-=1
elif centroid[0] < 0.5*width and directiony< 0 and np.mean(to.centroids[0]) > 0.5*width:
if centroid[1] > 0.5*height and direction > 0 and np.mean(to.centroids[1]) < 0.5*height:
counter[3] += 1
counter[1]-=1
elif centroid[1] < 0.5*height and direction < 0 and np.mean(to.centroids[1]) > 0.5*height:
counter[1] += 1
counter[3]-=1
####################################################################
cv2.putText(frame, str(label), (x1 + 10, y1 + 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
cv2.putText(frame, f"ID: {[t.id]}", (x1 + 10, y1 + 35), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
cv2.putText(frame, t.status.name, (x1 + 10, y1 + 50), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
cv2.rectangle(frame, (x1, y1), (x2, y2), color, cv2.FONT_HERSHEY_SIMPLEX)
cv2.putText(frame, "person", (x1 + 10, y1 + 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
cv2.putText(frame, f"ID: {[t.id]}", (x1 + 10, y1 + 35), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
cv2.putText(frame, t.status.name, (x1 + 10, y1 + 50), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
cv2.rectangle(frame, (x1, y1), (x2, y2), color, cv2.FONT_HERSHEY_SIMPLEX)
if t.status != dai.Tracklet.TrackingStatus.LOST and t.status != dai.Tracklet.TrackingStatus.REMOVED:
text = "ID {}".format(t.id)
cv2.putText(frame, text, (centroid[0] - 10, centroid[1] - 10),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
cv2.circle(frame, (centroid[0], centroid[1]), 4, (255, 255, 255), -1)
cv2.putText(frame, "NN fps: {:.2f}".format(fps), (2, frame.shape[0] - 4), cv2.FONT_HERSHEY_TRIPLEX, 0.4, color)
cv2.line(frame, (int(0.5*width), 0),(int(0.5*width), height), (0xFF, 0, 0), 5)
cv2.line(frame, (0, int(0.5*height)),(width, int(0.5*height)), (0xFF, 0, 0), 5)
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(frame, f'Zone A: {counter_zone[0]}', (10, 35), font, 0.6, (0, 0xFF, 0xFF), 2, cv2.FONT_HERSHEY_SIMPLEX)
cv2.putText(frame, f'Zone B: {counter_zone[1]}', (330, 35), font, 0.6, (0, 0xFF, 0xFF), 2, cv2.FONT_HERSHEY_SIMPLEX)
cv2.putText(frame, f'Zone C: {counter_zone[2]}', (10, 270), font, 0.6, (0, 0xFF, 0xFF), 2, cv2.FONT_HERSHEY_SIMPLEX)
cv2.putText(frame, f'Zone D: {counter_zone[3]}', (330, 270), font, 0.6, (0, 0xFF, 0xFF), 2, cv2.FONT_HERSHEY_SIMPLEX)
cv2.imshow("tracker", frame)
if cv2.waitKey(1) == ord('q'):
break`