Compare commits

...

5 Commits

Author SHA1 Message Date
Josh Hawkins
256817d5c2
Make events summary endpoint DST-aware (#20786)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
2025-11-03 17:54:33 -07:00
Nicolas Mowen
84409eab7e
Various fixes (#20785)
* Catch case where detector overflows

* Add more debug logs

* Cleanup

* Adjust no class wording

* Adjustments
2025-11-03 18:42:59 -06:00
Josh Hawkins
9e83888133
Fix recordings summary for DST (#20784)
* make recordings summary endpoints DST aware

* remove unused

* clean up
2025-11-03 17:30:56 -07:00
Abinila Siva
85f7138361
update installation code to hold SDK 2.1 version (#20781) 2025-11-03 13:23:51 -07:00
Nicolas Mowen
fc1cad2872
Adjust LPR packages for licensing (#20780) 2025-11-03 14:11:02 -06:00
12 changed files with 285 additions and 123 deletions

View File

@ -2,9 +2,9 @@
set -e
# Download the MxAccl for Frigate github release
wget https://github.com/memryx/mx_accl_frigate/archive/refs/heads/main.zip -O /tmp/mxaccl.zip
wget https://github.com/memryx/mx_accl_frigate/archive/refs/tags/v2.1.0.zip -O /tmp/mxaccl.zip
unzip /tmp/mxaccl.zip -d /tmp
mv /tmp/mx_accl_frigate-main /opt/mx_accl_frigate
mv /tmp/mx_accl_frigate-2.1.0 /opt/mx_accl_frigate
rm /tmp/mxaccl.zip
# Install Python dependencies

View File

@ -56,7 +56,7 @@ pywebpush == 2.0.*
# alpr
pyclipper == 1.3.*
shapely == 2.0.*
Levenshtein==0.26.*
rapidfuzz==3.12.*
# HailoRT Wheels
appdirs==1.4.*
argcomplete==2.0.*

View File

@ -24,10 +24,13 @@ echo "Adding MemryX GPG key and repository..."
wget -qO- https://developer.memryx.com/deb/memryx.asc | sudo tee /etc/apt/trusted.gpg.d/memryx.asc >/dev/null
echo 'deb https://developer.memryx.com/deb stable main' | sudo tee /etc/apt/sources.list.d/memryx.list >/dev/null
# Update and install memx-drivers
echo "Installing memx-drivers..."
# Update and install specific SDK 2.1 packages
echo "Installing MemryX SDK 2.1 packages..."
sudo apt update
sudo apt install -y memx-drivers
sudo apt install -y memx-drivers=2.1.* memx-accl=2.1.* mxa-manager=2.1.*
# Hold packages to prevent automatic upgrades
sudo apt-mark hold memx-drivers memx-accl mxa-manager
# ARM-specific board setup
if [[ "$arch" == "aarch64" || "$arch" == "arm64" ]]; then
@ -37,11 +40,5 @@ fi
echo -e "\n\n\033[1;31mYOU MUST RESTART YOUR COMPUTER NOW\033[0m\n\n"
# Install other runtime packages
packages=("memx-accl" "mxa-manager")
for pkg in "${packages[@]}"; do
echo "Installing $pkg..."
sudo apt install -y "$pkg"
done
echo "MemryX SDK 2.1 installation complete!"
echo "MemryX installation complete!"

View File

@ -2,6 +2,7 @@
import base64
import datetime
import json
import logging
import os
import random
@ -58,7 +59,7 @@ from frigate.embeddings import EmbeddingsContext
from frigate.models import Event, ReviewSegment, Timeline, Trigger
from frigate.track.object_processing import TrackedObject
from frigate.util.path import get_event_thumbnail_bytes
from frigate.util.time import get_tz_modifiers
from frigate.util.time import get_dst_transitions, get_tz_modifiers
logger = logging.getLogger(__name__)
@ -813,7 +814,6 @@ def events_summary(
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
):
tz_name = params.timezone
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(tz_name)
has_clip = params.has_clip
has_snapshot = params.has_snapshot
@ -828,33 +828,91 @@ def events_summary(
if len(clauses) == 0:
clauses.append((True))
groups = (
time_range_query = (
Event.select(
Event.camera,
Event.label,
Event.sub_label,
Event.data,
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Event.start_time, "unixepoch", hour_modifier, minute_modifier
),
).alias("day"),
Event.zones,
fn.COUNT(Event.id).alias("count"),
fn.MIN(Event.start_time).alias("min_time"),
fn.MAX(Event.start_time).alias("max_time"),
)
.where(reduce(operator.and_, clauses) & (Event.camera << allowed_cameras))
.group_by(
Event.camera,
Event.label,
Event.sub_label,
Event.data,
(Event.start_time + seconds_offset).cast("int") / (3600 * 24),
Event.zones,
)
.dicts()
.get()
)
return JSONResponse(content=[e for e in groups.dicts()])
min_time = time_range_query.get("min_time")
max_time = time_range_query.get("max_time")
if min_time is None or max_time is None:
return JSONResponse(content=[])
dst_periods = get_dst_transitions(tz_name, min_time, max_time)
grouped: dict[tuple, dict] = {}
for period_start, period_end, period_offset in dst_periods:
hours_offset = int(period_offset / 60 / 60)
minutes_offset = int(period_offset / 60 - hours_offset * 60)
period_hour_modifier = f"{hours_offset} hour"
period_minute_modifier = f"{minutes_offset} minute"
period_groups = (
Event.select(
Event.camera,
Event.label,
Event.sub_label,
Event.data,
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Event.start_time,
"unixepoch",
period_hour_modifier,
period_minute_modifier,
),
).alias("day"),
Event.zones,
fn.COUNT(Event.id).alias("count"),
)
.where(
reduce(operator.and_, clauses)
& (Event.camera << allowed_cameras)
& (Event.start_time >= period_start)
& (Event.start_time <= period_end)
)
.group_by(
Event.camera,
Event.label,
Event.sub_label,
Event.data,
(Event.start_time + period_offset).cast("int") / (3600 * 24),
Event.zones,
)
.namedtuples()
)
for g in period_groups:
key = (
g.camera,
g.label,
g.sub_label,
json.dumps(g.data, sort_keys=True) if g.data is not None else None,
g.day,
json.dumps(g.zones, sort_keys=True) if g.zones is not None else None,
)
if key in grouped:
grouped[key]["count"] += int(g.count or 0)
else:
grouped[key] = {
"camera": g.camera,
"label": g.label,
"sub_label": g.sub_label,
"data": g.data,
"day": g.day,
"zones": g.zones,
"count": int(g.count or 0),
}
return JSONResponse(content=list(grouped.values()))
@router.get(

View File

@ -46,7 +46,7 @@ from frigate.models import Event, Previews, Recordings, Regions, ReviewSegment
from frigate.track.object_processing import TrackedObjectProcessor
from frigate.util.image import get_image_from_recording
from frigate.util.path import get_event_thumbnail_bytes
from frigate.util.time import get_tz_modifiers
from frigate.util.time import get_dst_transitions
logger = logging.getLogger(__name__)
@ -424,7 +424,6 @@ def all_recordings_summary(
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
):
"""Returns true/false by day indicating if recordings exist"""
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(params.timezone)
cameras = params.cameras
if cameras != "all":
@ -432,41 +431,70 @@ def all_recordings_summary(
filtered = requested.intersection(allowed_cameras)
if not filtered:
return JSONResponse(content={})
cameras = ",".join(filtered)
camera_list = list(filtered)
else:
cameras = allowed_cameras
camera_list = allowed_cameras
query = (
time_range_query = (
Recordings.select(
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Recordings.start_time + seconds_offset,
"unixepoch",
hour_modifier,
minute_modifier,
),
).alias("day")
fn.MIN(Recordings.start_time).alias("min_time"),
fn.MAX(Recordings.start_time).alias("max_time"),
)
.group_by(
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Recordings.start_time + seconds_offset,
"unixepoch",
hour_modifier,
minute_modifier,
),
)
)
.order_by(Recordings.start_time.desc())
.where(Recordings.camera << camera_list)
.dicts()
.get()
)
if params.cameras != "all":
query = query.where(Recordings.camera << cameras.split(","))
min_time = time_range_query.get("min_time")
max_time = time_range_query.get("max_time")
recording_days = query.namedtuples()
days = {day.day: True for day in recording_days}
if min_time is None or max_time is None:
return JSONResponse(content={})
dst_periods = get_dst_transitions(params.timezone, min_time, max_time)
days: dict[str, bool] = {}
for period_start, period_end, period_offset in dst_periods:
hours_offset = int(period_offset / 60 / 60)
minutes_offset = int(period_offset / 60 - hours_offset * 60)
period_hour_modifier = f"{hours_offset} hour"
period_minute_modifier = f"{minutes_offset} minute"
period_query = (
Recordings.select(
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Recordings.start_time,
"unixepoch",
period_hour_modifier,
period_minute_modifier,
),
).alias("day")
)
.where(
(Recordings.camera << camera_list)
& (Recordings.end_time >= period_start)
& (Recordings.start_time <= period_end)
)
.group_by(
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Recordings.start_time,
"unixepoch",
period_hour_modifier,
period_minute_modifier,
),
)
)
.order_by(Recordings.start_time.desc())
.namedtuples()
)
for g in period_query:
days[g.day] = True
return JSONResponse(content=days)
@ -476,61 +504,103 @@ def all_recordings_summary(
)
async def recordings_summary(camera_name: str, timezone: str = "utc"):
"""Returns hourly summary for recordings of given camera"""
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(timezone)
recording_groups = (
time_range_query = (
Recordings.select(
fn.strftime(
"%Y-%m-%d %H",
fn.datetime(
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
),
).alias("hour"),
fn.SUM(Recordings.duration).alias("duration"),
fn.SUM(Recordings.motion).alias("motion"),
fn.SUM(Recordings.objects).alias("objects"),
fn.MIN(Recordings.start_time).alias("min_time"),
fn.MAX(Recordings.start_time).alias("max_time"),
)
.where(Recordings.camera == camera_name)
.group_by((Recordings.start_time + seconds_offset).cast("int") / 3600)
.order_by(Recordings.start_time.desc())
.namedtuples()
.dicts()
.get()
)
event_groups = (
Event.select(
fn.strftime(
"%Y-%m-%d %H",
fn.datetime(
Event.start_time, "unixepoch", hour_modifier, minute_modifier
),
).alias("hour"),
fn.COUNT(Event.id).alias("count"),
min_time = time_range_query.get("min_time")
max_time = time_range_query.get("max_time")
days: dict[str, dict] = {}
if min_time is None or max_time is None:
return JSONResponse(content=list(days.values()))
dst_periods = get_dst_transitions(timezone, min_time, max_time)
for period_start, period_end, period_offset in dst_periods:
hours_offset = int(period_offset / 60 / 60)
minutes_offset = int(period_offset / 60 - hours_offset * 60)
period_hour_modifier = f"{hours_offset} hour"
period_minute_modifier = f"{minutes_offset} minute"
recording_groups = (
Recordings.select(
fn.strftime(
"%Y-%m-%d %H",
fn.datetime(
Recordings.start_time,
"unixepoch",
period_hour_modifier,
period_minute_modifier,
),
).alias("hour"),
fn.SUM(Recordings.duration).alias("duration"),
fn.SUM(Recordings.motion).alias("motion"),
fn.SUM(Recordings.objects).alias("objects"),
)
.where(
(Recordings.camera == camera_name)
& (Recordings.end_time >= period_start)
& (Recordings.start_time <= period_end)
)
.group_by((Recordings.start_time + period_offset).cast("int") / 3600)
.order_by(Recordings.start_time.desc())
.namedtuples()
)
.where(Event.camera == camera_name, Event.has_clip)
.group_by((Event.start_time + seconds_offset).cast("int") / 3600)
.namedtuples()
)
event_map = {g.hour: g.count for g in event_groups}
event_groups = (
Event.select(
fn.strftime(
"%Y-%m-%d %H",
fn.datetime(
Event.start_time,
"unixepoch",
period_hour_modifier,
period_minute_modifier,
),
).alias("hour"),
fn.COUNT(Event.id).alias("count"),
)
.where(Event.camera == camera_name, Event.has_clip)
.where(
(Event.start_time >= period_start) & (Event.start_time <= period_end)
)
.group_by((Event.start_time + period_offset).cast("int") / 3600)
.namedtuples()
)
days = {}
event_map = {g.hour: g.count for g in event_groups}
for recording_group in recording_groups:
parts = recording_group.hour.split()
hour = parts[1]
day = parts[0]
events_count = event_map.get(recording_group.hour, 0)
hour_data = {
"hour": hour,
"events": events_count,
"motion": recording_group.motion,
"objects": recording_group.objects,
"duration": round(recording_group.duration),
}
if day not in days:
days[day] = {"events": events_count, "hours": [hour_data], "day": day}
else:
days[day]["events"] += events_count
days[day]["hours"].append(hour_data)
for recording_group in recording_groups:
parts = recording_group.hour.split()
hour = parts[1]
day = parts[0]
events_count = event_map.get(recording_group.hour, 0)
hour_data = {
"hour": hour,
"events": events_count,
"motion": recording_group.motion,
"objects": recording_group.objects,
"duration": round(recording_group.duration),
}
if day in days:
# merge counts if already present (edge-case at DST boundary)
days[day]["events"] += events_count or 0
days[day]["hours"].append(hour_data)
else:
days[day] = {
"events": events_count or 0,
"hours": [hour_data],
"day": day,
}
return JSONResponse(content=list(days.values()))

View File

@ -36,7 +36,7 @@ from frigate.config import FrigateConfig
from frigate.embeddings import EmbeddingsContext
from frigate.models import Recordings, ReviewSegment, UserReviewStatus
from frigate.review.types import SeverityEnum
from frigate.util.time import get_dst_transitions, get_tz_modifiers
from frigate.util.time import get_dst_transitions
logger = logging.getLogger(__name__)
@ -197,7 +197,6 @@ async def review_summary(
user_id = current_user["username"]
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(params.timezone)
day_ago = (datetime.datetime.now() - datetime.timedelta(hours=24)).timestamp()
cameras = params.cameras

View File

@ -14,8 +14,8 @@ from typing import Any, List, Optional, Tuple
import cv2
import numpy as np
from Levenshtein import distance, jaro_winkler
from pyclipper import ET_CLOSEDPOLYGON, JT_ROUND, PyclipperOffset
from rapidfuzz.distance import JaroWinkler, Levenshtein
from shapely.geometry import Polygon
from frigate.comms.event_metadata_updater import (
@ -1123,7 +1123,9 @@ class LicensePlateProcessingMixin:
for i, plate in enumerate(plates):
merged = False
for j, cluster in enumerate(clusters):
sims = [jaro_winkler(plate["plate"], v["plate"]) for v in cluster]
sims = [
JaroWinkler.similarity(plate["plate"], v["plate"]) for v in cluster
]
if len(sims) > 0:
avg_sim = sum(sims) / len(sims)
if avg_sim >= self.cluster_threshold:
@ -1500,7 +1502,7 @@ class LicensePlateProcessingMixin:
and current_time - data["last_seen"]
<= self.config.cameras[camera].lpr.expire_time
):
similarity = jaro_winkler(data["plate"], top_plate)
similarity = JaroWinkler.similarity(data["plate"], top_plate)
if similarity >= self.similarity_threshold:
plate_id = existing_id
logger.debug(
@ -1580,7 +1582,8 @@ class LicensePlateProcessingMixin:
for label, plates_list in self.lpr_config.known_plates.items()
if any(
re.match(f"^{plate}$", rep_plate)
or distance(plate, rep_plate) <= self.lpr_config.match_distance
or Levenshtein.distance(plate, rep_plate)
<= self.lpr_config.match_distance
for plate in plates_list
)
),

View File

@ -397,7 +397,14 @@ class EmbeddingMaintainer(threading.Thread):
source_type, _, camera, frame_name, data = update
logger.debug(
f"Received update - source_type: {source_type}, camera: {camera}, data label: {data.get('label') if data else 'None'}"
)
if not camera or source_type != EventTypeEnum.tracked_object:
logger.debug(
f"Skipping update - camera: {camera}, source_type: {source_type}"
)
return
if self.config.semantic_search.enabled:
@ -407,6 +414,9 @@ class EmbeddingMaintainer(threading.Thread):
# no need to process updated objects if no processors are active
if len(self.realtime_processors) == 0 and len(self.post_processors) == 0:
logger.debug(
f"No processors active - realtime: {len(self.realtime_processors)}, post: {len(self.post_processors)}"
)
return
# Create our own thumbnail based on the bounding box and the frame time
@ -415,6 +425,7 @@ class EmbeddingMaintainer(threading.Thread):
frame_name, camera_config.frame_shape_yuv
)
except FileNotFoundError:
logger.debug(f"Frame {frame_name} not found for camera {camera}")
pass
if yuv_frame is None:
@ -423,7 +434,11 @@ class EmbeddingMaintainer(threading.Thread):
)
return
logger.debug(
f"Processing {len(self.realtime_processors)} realtime processors for object {data.get('id')} (label: {data.get('label')})"
)
for processor in self.realtime_processors:
logger.debug(f"Calling process_frame on {processor.__class__.__name__}")
processor.process_frame(data, yuv_frame)
for processor in self.post_processors:

View File

@ -9,6 +9,7 @@ from multiprocessing import Queue, Value
from multiprocessing.synchronize import Event as MpEvent
import numpy as np
import zmq
from frigate.comms.object_detector_signaler import (
ObjectDetectorPublisher,
@ -377,6 +378,15 @@ class RemoteObjectDetector:
if self.stop_event.is_set():
return detections
# Drain any stale detection results from the ZMQ buffer before making a new request
# This prevents reading detection results from a previous request
# NOTE: This should never happen, but can in some rare cases
while True:
try:
self.detector_subscriber.socket.recv_string(flags=zmq.NOBLOCK)
except zmq.Again:
break
# copy input to shared memory
self.np_shm[:] = tensor_input[:]
self.detection_queue.put(self.name)

View File

@ -181,6 +181,7 @@ type GroupedClassificationCardProps = {
selectedItems: string[];
i18nLibrary: string;
objectType: string;
noClassificationLabel?: string;
onClick: (data: ClassificationItemData | undefined) => void;
children?: (data: ClassificationItemData) => React.ReactNode;
};
@ -190,6 +191,7 @@ export function GroupedClassificationCard({
threshold,
selectedItems,
i18nLibrary,
noClassificationLabel = "details.none",
onClick,
children,
}: GroupedClassificationCardProps) {
@ -222,10 +224,14 @@ export function GroupedClassificationCard({
const bestTyped: ClassificationItemData = best;
return {
...bestTyped,
name: event ? (event.sub_label ?? t("details.unknown")) : bestTyped.name,
name: event
? event.sub_label && event.sub_label !== "none"
? event.sub_label
: t(noClassificationLabel)
: bestTyped.name,
score: event?.data?.sub_label_score || bestTyped.score,
};
}, [group, event, t]);
}, [group, event, noClassificationLabel, t]);
const bestScoreStatus = useMemo(() => {
if (!bestItem?.score || !threshold) {
@ -311,8 +317,10 @@ export function GroupedClassificationCard({
isMobile && "px-2",
)}
>
{event?.sub_label ? event.sub_label : t("details.unknown")}
{event?.sub_label && (
{event?.sub_label && event.sub_label !== "none"
? event.sub_label
: t(noClassificationLabel)}
{event?.sub_label && event.sub_label !== "none" && (
<div
className={cn(
"",

View File

@ -845,6 +845,7 @@ function FaceAttemptGroup({
selectedItems={selectedFaces}
i18nLibrary="views/faceLibrary"
objectType="person"
noClassificationLabel="details.unknown"
onClick={(data) => {
if (data) {
onClickFaces([data.filename], true);

View File

@ -961,6 +961,7 @@ function ObjectTrainGrid({
selectedItems={selectedImages}
i18nLibrary="views/classificationModel"
objectType={model.object_config?.objects?.at(0) ?? "Object"}
noClassificationLabel="details.none"
onClick={(data) => {
if (data) {
onClickImages([data.filename], true);