Various fixes (#20785)

* Catch case where detector overflows

* Add more debug logs

* Cleanup

* Adjust no class wording

* Adjustments
This commit is contained in:
Nicolas Mowen 2025-11-03 17:42:59 -07:00 committed by GitHub
parent 9e83888133
commit 84409eab7e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 39 additions and 4 deletions

View File

@ -397,7 +397,14 @@ class EmbeddingMaintainer(threading.Thread):
source_type, _, camera, frame_name, data = update source_type, _, camera, frame_name, data = update
logger.debug(
f"Received update - source_type: {source_type}, camera: {camera}, data label: {data.get('label') if data else 'None'}"
)
if not camera or source_type != EventTypeEnum.tracked_object: if not camera or source_type != EventTypeEnum.tracked_object:
logger.debug(
f"Skipping update - camera: {camera}, source_type: {source_type}"
)
return return
if self.config.semantic_search.enabled: if self.config.semantic_search.enabled:
@ -407,6 +414,9 @@ class EmbeddingMaintainer(threading.Thread):
# no need to process updated objects if no processors are active # no need to process updated objects if no processors are active
if len(self.realtime_processors) == 0 and len(self.post_processors) == 0: if len(self.realtime_processors) == 0 and len(self.post_processors) == 0:
logger.debug(
f"No processors active - realtime: {len(self.realtime_processors)}, post: {len(self.post_processors)}"
)
return return
# Create our own thumbnail based on the bounding box and the frame time # Create our own thumbnail based on the bounding box and the frame time
@ -415,6 +425,7 @@ class EmbeddingMaintainer(threading.Thread):
frame_name, camera_config.frame_shape_yuv frame_name, camera_config.frame_shape_yuv
) )
except FileNotFoundError: except FileNotFoundError:
logger.debug(f"Frame {frame_name} not found for camera {camera}")
pass pass
if yuv_frame is None: if yuv_frame is None:
@ -423,7 +434,11 @@ class EmbeddingMaintainer(threading.Thread):
) )
return return
logger.debug(
f"Processing {len(self.realtime_processors)} realtime processors for object {data.get('id')} (label: {data.get('label')})"
)
for processor in self.realtime_processors: for processor in self.realtime_processors:
logger.debug(f"Calling process_frame on {processor.__class__.__name__}")
processor.process_frame(data, yuv_frame) processor.process_frame(data, yuv_frame)
for processor in self.post_processors: for processor in self.post_processors:

View File

@ -9,6 +9,7 @@ from multiprocessing import Queue, Value
from multiprocessing.synchronize import Event as MpEvent from multiprocessing.synchronize import Event as MpEvent
import numpy as np import numpy as np
import zmq
from frigate.comms.object_detector_signaler import ( from frigate.comms.object_detector_signaler import (
ObjectDetectorPublisher, ObjectDetectorPublisher,
@ -377,6 +378,15 @@ class RemoteObjectDetector:
if self.stop_event.is_set(): if self.stop_event.is_set():
return detections return detections
# Drain any stale detection results from the ZMQ buffer before making a new request
# This prevents reading detection results from a previous request
# NOTE: This should never happen, but can in some rare cases
while True:
try:
self.detector_subscriber.socket.recv_string(flags=zmq.NOBLOCK)
except zmq.Again:
break
# copy input to shared memory # copy input to shared memory
self.np_shm[:] = tensor_input[:] self.np_shm[:] = tensor_input[:]
self.detection_queue.put(self.name) self.detection_queue.put(self.name)

View File

@ -181,6 +181,7 @@ type GroupedClassificationCardProps = {
selectedItems: string[]; selectedItems: string[];
i18nLibrary: string; i18nLibrary: string;
objectType: string; objectType: string;
noClassificationLabel?: string;
onClick: (data: ClassificationItemData | undefined) => void; onClick: (data: ClassificationItemData | undefined) => void;
children?: (data: ClassificationItemData) => React.ReactNode; children?: (data: ClassificationItemData) => React.ReactNode;
}; };
@ -190,6 +191,7 @@ export function GroupedClassificationCard({
threshold, threshold,
selectedItems, selectedItems,
i18nLibrary, i18nLibrary,
noClassificationLabel = "details.none",
onClick, onClick,
children, children,
}: GroupedClassificationCardProps) { }: GroupedClassificationCardProps) {
@ -222,10 +224,14 @@ export function GroupedClassificationCard({
const bestTyped: ClassificationItemData = best; const bestTyped: ClassificationItemData = best;
return { return {
...bestTyped, ...bestTyped,
name: event ? (event.sub_label ?? t("details.unknown")) : bestTyped.name, name: event
? event.sub_label && event.sub_label !== "none"
? event.sub_label
: t(noClassificationLabel)
: bestTyped.name,
score: event?.data?.sub_label_score || bestTyped.score, score: event?.data?.sub_label_score || bestTyped.score,
}; };
}, [group, event, t]); }, [group, event, noClassificationLabel, t]);
const bestScoreStatus = useMemo(() => { const bestScoreStatus = useMemo(() => {
if (!bestItem?.score || !threshold) { if (!bestItem?.score || !threshold) {
@ -311,8 +317,10 @@ export function GroupedClassificationCard({
isMobile && "px-2", isMobile && "px-2",
)} )}
> >
{event?.sub_label ? event.sub_label : t("details.unknown")} {event?.sub_label && event.sub_label !== "none"
{event?.sub_label && ( ? event.sub_label
: t(noClassificationLabel)}
{event?.sub_label && event.sub_label !== "none" && (
<div <div
className={cn( className={cn(
"", "",

View File

@ -845,6 +845,7 @@ function FaceAttemptGroup({
selectedItems={selectedFaces} selectedItems={selectedFaces}
i18nLibrary="views/faceLibrary" i18nLibrary="views/faceLibrary"
objectType="person" objectType="person"
noClassificationLabel="details.unknown"
onClick={(data) => { onClick={(data) => {
if (data) { if (data) {
onClickFaces([data.filename], true); onClickFaces([data.filename], true);

View File

@ -961,6 +961,7 @@ function ObjectTrainGrid({
selectedItems={selectedImages} selectedItems={selectedImages}
i18nLibrary="views/classificationModel" i18nLibrary="views/classificationModel"
objectType={model.object_config?.objects?.at(0) ?? "Object"} objectType={model.object_config?.objects?.at(0) ?? "Object"}
noClassificationLabel="details.none"
onClick={(data) => { onClick={(data) => {
if (data) { if (data) {
onClickImages([data.filename], true); onClickImages([data.filename], true);