Compare commits

..

No commits in common. "256817d5c2640b4cd1167272ec606d6bbf9eb772" and "55294328564e6eda2b667dc4751a211e6feb2b74" have entirely different histories.

12 changed files with 123 additions and 285 deletions

View File

@ -2,9 +2,9 @@
set -e
# Download the MxAccl for Frigate github release
wget https://github.com/memryx/mx_accl_frigate/archive/refs/tags/v2.1.0.zip -O /tmp/mxaccl.zip
wget https://github.com/memryx/mx_accl_frigate/archive/refs/heads/main.zip -O /tmp/mxaccl.zip
unzip /tmp/mxaccl.zip -d /tmp
mv /tmp/mx_accl_frigate-2.1.0 /opt/mx_accl_frigate
mv /tmp/mx_accl_frigate-main /opt/mx_accl_frigate
rm /tmp/mxaccl.zip
# Install Python dependencies

View File

@ -56,7 +56,7 @@ pywebpush == 2.0.*
# alpr
pyclipper == 1.3.*
shapely == 2.0.*
rapidfuzz==3.12.*
Levenshtein==0.26.*
# HailoRT Wheels
appdirs==1.4.*
argcomplete==2.0.*

View File

@ -24,13 +24,10 @@ echo "Adding MemryX GPG key and repository..."
wget -qO- https://developer.memryx.com/deb/memryx.asc | sudo tee /etc/apt/trusted.gpg.d/memryx.asc >/dev/null
echo 'deb https://developer.memryx.com/deb stable main' | sudo tee /etc/apt/sources.list.d/memryx.list >/dev/null
# Update and install specific SDK 2.1 packages
echo "Installing MemryX SDK 2.1 packages..."
# Update and install memx-drivers
echo "Installing memx-drivers..."
sudo apt update
sudo apt install -y memx-drivers=2.1.* memx-accl=2.1.* mxa-manager=2.1.*
# Hold packages to prevent automatic upgrades
sudo apt-mark hold memx-drivers memx-accl mxa-manager
sudo apt install -y memx-drivers
# ARM-specific board setup
if [[ "$arch" == "aarch64" || "$arch" == "arm64" ]]; then
@ -40,5 +37,11 @@ fi
echo -e "\n\n\033[1;31mYOU MUST RESTART YOUR COMPUTER NOW\033[0m\n\n"
echo "MemryX SDK 2.1 installation complete!"
# Install other runtime packages
packages=("memx-accl" "mxa-manager")
for pkg in "${packages[@]}"; do
echo "Installing $pkg..."
sudo apt install -y "$pkg"
done
echo "MemryX installation complete!"

View File

@ -2,7 +2,6 @@
import base64
import datetime
import json
import logging
import os
import random
@ -59,7 +58,7 @@ from frigate.embeddings import EmbeddingsContext
from frigate.models import Event, ReviewSegment, Timeline, Trigger
from frigate.track.object_processing import TrackedObject
from frigate.util.path import get_event_thumbnail_bytes
from frigate.util.time import get_dst_transitions, get_tz_modifiers
from frigate.util.time import get_tz_modifiers
logger = logging.getLogger(__name__)
@ -814,6 +813,7 @@ def events_summary(
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
):
tz_name = params.timezone
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(tz_name)
has_clip = params.has_clip
has_snapshot = params.has_snapshot
@ -828,91 +828,33 @@ def events_summary(
if len(clauses) == 0:
clauses.append((True))
time_range_query = (
groups = (
Event.select(
fn.MIN(Event.start_time).alias("min_time"),
fn.MAX(Event.start_time).alias("max_time"),
Event.camera,
Event.label,
Event.sub_label,
Event.data,
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Event.start_time, "unixepoch", hour_modifier, minute_modifier
),
).alias("day"),
Event.zones,
fn.COUNT(Event.id).alias("count"),
)
.where(reduce(operator.and_, clauses) & (Event.camera << allowed_cameras))
.dicts()
.get()
.group_by(
Event.camera,
Event.label,
Event.sub_label,
Event.data,
(Event.start_time + seconds_offset).cast("int") / (3600 * 24),
Event.zones,
)
)
min_time = time_range_query.get("min_time")
max_time = time_range_query.get("max_time")
if min_time is None or max_time is None:
return JSONResponse(content=[])
dst_periods = get_dst_transitions(tz_name, min_time, max_time)
grouped: dict[tuple, dict] = {}
for period_start, period_end, period_offset in dst_periods:
hours_offset = int(period_offset / 60 / 60)
minutes_offset = int(period_offset / 60 - hours_offset * 60)
period_hour_modifier = f"{hours_offset} hour"
period_minute_modifier = f"{minutes_offset} minute"
period_groups = (
Event.select(
Event.camera,
Event.label,
Event.sub_label,
Event.data,
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Event.start_time,
"unixepoch",
period_hour_modifier,
period_minute_modifier,
),
).alias("day"),
Event.zones,
fn.COUNT(Event.id).alias("count"),
)
.where(
reduce(operator.and_, clauses)
& (Event.camera << allowed_cameras)
& (Event.start_time >= period_start)
& (Event.start_time <= period_end)
)
.group_by(
Event.camera,
Event.label,
Event.sub_label,
Event.data,
(Event.start_time + period_offset).cast("int") / (3600 * 24),
Event.zones,
)
.namedtuples()
)
for g in period_groups:
key = (
g.camera,
g.label,
g.sub_label,
json.dumps(g.data, sort_keys=True) if g.data is not None else None,
g.day,
json.dumps(g.zones, sort_keys=True) if g.zones is not None else None,
)
if key in grouped:
grouped[key]["count"] += int(g.count or 0)
else:
grouped[key] = {
"camera": g.camera,
"label": g.label,
"sub_label": g.sub_label,
"data": g.data,
"day": g.day,
"zones": g.zones,
"count": int(g.count or 0),
}
return JSONResponse(content=list(grouped.values()))
return JSONResponse(content=[e for e in groups.dicts()])
@router.get(

View File

@ -46,7 +46,7 @@ from frigate.models import Event, Previews, Recordings, Regions, ReviewSegment
from frigate.track.object_processing import TrackedObjectProcessor
from frigate.util.image import get_image_from_recording
from frigate.util.path import get_event_thumbnail_bytes
from frigate.util.time import get_dst_transitions
from frigate.util.time import get_tz_modifiers
logger = logging.getLogger(__name__)
@ -424,6 +424,7 @@ def all_recordings_summary(
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
):
"""Returns true/false by day indicating if recordings exist"""
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(params.timezone)
cameras = params.cameras
if cameras != "all":
@ -431,70 +432,41 @@ def all_recordings_summary(
filtered = requested.intersection(allowed_cameras)
if not filtered:
return JSONResponse(content={})
camera_list = list(filtered)
cameras = ",".join(filtered)
else:
camera_list = allowed_cameras
cameras = allowed_cameras
time_range_query = (
query = (
Recordings.select(
fn.MIN(Recordings.start_time).alias("min_time"),
fn.MAX(Recordings.start_time).alias("max_time"),
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Recordings.start_time + seconds_offset,
"unixepoch",
hour_modifier,
minute_modifier,
),
).alias("day")
)
.where(Recordings.camera << camera_list)
.dicts()
.get()
.group_by(
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Recordings.start_time + seconds_offset,
"unixepoch",
hour_modifier,
minute_modifier,
),
)
)
.order_by(Recordings.start_time.desc())
)
min_time = time_range_query.get("min_time")
max_time = time_range_query.get("max_time")
if params.cameras != "all":
query = query.where(Recordings.camera << cameras.split(","))
if min_time is None or max_time is None:
return JSONResponse(content={})
dst_periods = get_dst_transitions(params.timezone, min_time, max_time)
days: dict[str, bool] = {}
for period_start, period_end, period_offset in dst_periods:
hours_offset = int(period_offset / 60 / 60)
minutes_offset = int(period_offset / 60 - hours_offset * 60)
period_hour_modifier = f"{hours_offset} hour"
period_minute_modifier = f"{minutes_offset} minute"
period_query = (
Recordings.select(
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Recordings.start_time,
"unixepoch",
period_hour_modifier,
period_minute_modifier,
),
).alias("day")
)
.where(
(Recordings.camera << camera_list)
& (Recordings.end_time >= period_start)
& (Recordings.start_time <= period_end)
)
.group_by(
fn.strftime(
"%Y-%m-%d",
fn.datetime(
Recordings.start_time,
"unixepoch",
period_hour_modifier,
period_minute_modifier,
),
)
)
.order_by(Recordings.start_time.desc())
.namedtuples()
)
for g in period_query:
days[g.day] = True
recording_days = query.namedtuples()
days = {day.day: True for day in recording_days}
return JSONResponse(content=days)
@ -504,103 +476,61 @@ def all_recordings_summary(
)
async def recordings_summary(camera_name: str, timezone: str = "utc"):
"""Returns hourly summary for recordings of given camera"""
time_range_query = (
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(timezone)
recording_groups = (
Recordings.select(
fn.MIN(Recordings.start_time).alias("min_time"),
fn.MAX(Recordings.start_time).alias("max_time"),
fn.strftime(
"%Y-%m-%d %H",
fn.datetime(
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
),
).alias("hour"),
fn.SUM(Recordings.duration).alias("duration"),
fn.SUM(Recordings.motion).alias("motion"),
fn.SUM(Recordings.objects).alias("objects"),
)
.where(Recordings.camera == camera_name)
.dicts()
.get()
.group_by((Recordings.start_time + seconds_offset).cast("int") / 3600)
.order_by(Recordings.start_time.desc())
.namedtuples()
)
min_time = time_range_query.get("min_time")
max_time = time_range_query.get("max_time")
days: dict[str, dict] = {}
if min_time is None or max_time is None:
return JSONResponse(content=list(days.values()))
dst_periods = get_dst_transitions(timezone, min_time, max_time)
for period_start, period_end, period_offset in dst_periods:
hours_offset = int(period_offset / 60 / 60)
minutes_offset = int(period_offset / 60 - hours_offset * 60)
period_hour_modifier = f"{hours_offset} hour"
period_minute_modifier = f"{minutes_offset} minute"
recording_groups = (
Recordings.select(
fn.strftime(
"%Y-%m-%d %H",
fn.datetime(
Recordings.start_time,
"unixepoch",
period_hour_modifier,
period_minute_modifier,
),
).alias("hour"),
fn.SUM(Recordings.duration).alias("duration"),
fn.SUM(Recordings.motion).alias("motion"),
fn.SUM(Recordings.objects).alias("objects"),
)
.where(
(Recordings.camera == camera_name)
& (Recordings.end_time >= period_start)
& (Recordings.start_time <= period_end)
)
.group_by((Recordings.start_time + period_offset).cast("int") / 3600)
.order_by(Recordings.start_time.desc())
.namedtuples()
event_groups = (
Event.select(
fn.strftime(
"%Y-%m-%d %H",
fn.datetime(
Event.start_time, "unixepoch", hour_modifier, minute_modifier
),
).alias("hour"),
fn.COUNT(Event.id).alias("count"),
)
.where(Event.camera == camera_name, Event.has_clip)
.group_by((Event.start_time + seconds_offset).cast("int") / 3600)
.namedtuples()
)
event_groups = (
Event.select(
fn.strftime(
"%Y-%m-%d %H",
fn.datetime(
Event.start_time,
"unixepoch",
period_hour_modifier,
period_minute_modifier,
),
).alias("hour"),
fn.COUNT(Event.id).alias("count"),
)
.where(Event.camera == camera_name, Event.has_clip)
.where(
(Event.start_time >= period_start) & (Event.start_time <= period_end)
)
.group_by((Event.start_time + period_offset).cast("int") / 3600)
.namedtuples()
)
event_map = {g.hour: g.count for g in event_groups}
event_map = {g.hour: g.count for g in event_groups}
days = {}
for recording_group in recording_groups:
parts = recording_group.hour.split()
hour = parts[1]
day = parts[0]
events_count = event_map.get(recording_group.hour, 0)
hour_data = {
"hour": hour,
"events": events_count,
"motion": recording_group.motion,
"objects": recording_group.objects,
"duration": round(recording_group.duration),
}
if day in days:
# merge counts if already present (edge-case at DST boundary)
days[day]["events"] += events_count or 0
days[day]["hours"].append(hour_data)
else:
days[day] = {
"events": events_count or 0,
"hours": [hour_data],
"day": day,
}
for recording_group in recording_groups:
parts = recording_group.hour.split()
hour = parts[1]
day = parts[0]
events_count = event_map.get(recording_group.hour, 0)
hour_data = {
"hour": hour,
"events": events_count,
"motion": recording_group.motion,
"objects": recording_group.objects,
"duration": round(recording_group.duration),
}
if day not in days:
days[day] = {"events": events_count, "hours": [hour_data], "day": day}
else:
days[day]["events"] += events_count
days[day]["hours"].append(hour_data)
return JSONResponse(content=list(days.values()))

View File

@ -36,7 +36,7 @@ from frigate.config import FrigateConfig
from frigate.embeddings import EmbeddingsContext
from frigate.models import Recordings, ReviewSegment, UserReviewStatus
from frigate.review.types import SeverityEnum
from frigate.util.time import get_dst_transitions
from frigate.util.time import get_dst_transitions, get_tz_modifiers
logger = logging.getLogger(__name__)
@ -197,6 +197,7 @@ async def review_summary(
user_id = current_user["username"]
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(params.timezone)
day_ago = (datetime.datetime.now() - datetime.timedelta(hours=24)).timestamp()
cameras = params.cameras

View File

@ -14,8 +14,8 @@ from typing import Any, List, Optional, Tuple
import cv2
import numpy as np
from Levenshtein import distance, jaro_winkler
from pyclipper import ET_CLOSEDPOLYGON, JT_ROUND, PyclipperOffset
from rapidfuzz.distance import JaroWinkler, Levenshtein
from shapely.geometry import Polygon
from frigate.comms.event_metadata_updater import (
@ -1123,9 +1123,7 @@ class LicensePlateProcessingMixin:
for i, plate in enumerate(plates):
merged = False
for j, cluster in enumerate(clusters):
sims = [
JaroWinkler.similarity(plate["plate"], v["plate"]) for v in cluster
]
sims = [jaro_winkler(plate["plate"], v["plate"]) for v in cluster]
if len(sims) > 0:
avg_sim = sum(sims) / len(sims)
if avg_sim >= self.cluster_threshold:
@ -1502,7 +1500,7 @@ class LicensePlateProcessingMixin:
and current_time - data["last_seen"]
<= self.config.cameras[camera].lpr.expire_time
):
similarity = JaroWinkler.similarity(data["plate"], top_plate)
similarity = jaro_winkler(data["plate"], top_plate)
if similarity >= self.similarity_threshold:
plate_id = existing_id
logger.debug(
@ -1582,8 +1580,7 @@ class LicensePlateProcessingMixin:
for label, plates_list in self.lpr_config.known_plates.items()
if any(
re.match(f"^{plate}$", rep_plate)
or Levenshtein.distance(plate, rep_plate)
<= self.lpr_config.match_distance
or distance(plate, rep_plate) <= self.lpr_config.match_distance
for plate in plates_list
)
),

View File

@ -397,14 +397,7 @@ class EmbeddingMaintainer(threading.Thread):
source_type, _, camera, frame_name, data = update
logger.debug(
f"Received update - source_type: {source_type}, camera: {camera}, data label: {data.get('label') if data else 'None'}"
)
if not camera or source_type != EventTypeEnum.tracked_object:
logger.debug(
f"Skipping update - camera: {camera}, source_type: {source_type}"
)
return
if self.config.semantic_search.enabled:
@ -414,9 +407,6 @@ class EmbeddingMaintainer(threading.Thread):
# no need to process updated objects if no processors are active
if len(self.realtime_processors) == 0 and len(self.post_processors) == 0:
logger.debug(
f"No processors active - realtime: {len(self.realtime_processors)}, post: {len(self.post_processors)}"
)
return
# Create our own thumbnail based on the bounding box and the frame time
@ -425,7 +415,6 @@ class EmbeddingMaintainer(threading.Thread):
frame_name, camera_config.frame_shape_yuv
)
except FileNotFoundError:
logger.debug(f"Frame {frame_name} not found for camera {camera}")
pass
if yuv_frame is None:
@ -434,11 +423,7 @@ class EmbeddingMaintainer(threading.Thread):
)
return
logger.debug(
f"Processing {len(self.realtime_processors)} realtime processors for object {data.get('id')} (label: {data.get('label')})"
)
for processor in self.realtime_processors:
logger.debug(f"Calling process_frame on {processor.__class__.__name__}")
processor.process_frame(data, yuv_frame)
for processor in self.post_processors:

View File

@ -9,7 +9,6 @@ from multiprocessing import Queue, Value
from multiprocessing.synchronize import Event as MpEvent
import numpy as np
import zmq
from frigate.comms.object_detector_signaler import (
ObjectDetectorPublisher,
@ -378,15 +377,6 @@ class RemoteObjectDetector:
if self.stop_event.is_set():
return detections
# Drain any stale detection results from the ZMQ buffer before making a new request
# This prevents reading detection results from a previous request
# NOTE: This should never happen, but can in some rare cases
while True:
try:
self.detector_subscriber.socket.recv_string(flags=zmq.NOBLOCK)
except zmq.Again:
break
# copy input to shared memory
self.np_shm[:] = tensor_input[:]
self.detection_queue.put(self.name)

View File

@ -181,7 +181,6 @@ type GroupedClassificationCardProps = {
selectedItems: string[];
i18nLibrary: string;
objectType: string;
noClassificationLabel?: string;
onClick: (data: ClassificationItemData | undefined) => void;
children?: (data: ClassificationItemData) => React.ReactNode;
};
@ -191,7 +190,6 @@ export function GroupedClassificationCard({
threshold,
selectedItems,
i18nLibrary,
noClassificationLabel = "details.none",
onClick,
children,
}: GroupedClassificationCardProps) {
@ -224,14 +222,10 @@ export function GroupedClassificationCard({
const bestTyped: ClassificationItemData = best;
return {
...bestTyped,
name: event
? event.sub_label && event.sub_label !== "none"
? event.sub_label
: t(noClassificationLabel)
: bestTyped.name,
name: event ? (event.sub_label ?? t("details.unknown")) : bestTyped.name,
score: event?.data?.sub_label_score || bestTyped.score,
};
}, [group, event, noClassificationLabel, t]);
}, [group, event, t]);
const bestScoreStatus = useMemo(() => {
if (!bestItem?.score || !threshold) {
@ -317,10 +311,8 @@ export function GroupedClassificationCard({
isMobile && "px-2",
)}
>
{event?.sub_label && event.sub_label !== "none"
? event.sub_label
: t(noClassificationLabel)}
{event?.sub_label && event.sub_label !== "none" && (
{event?.sub_label ? event.sub_label : t("details.unknown")}
{event?.sub_label && (
<div
className={cn(
"",

View File

@ -845,7 +845,6 @@ function FaceAttemptGroup({
selectedItems={selectedFaces}
i18nLibrary="views/faceLibrary"
objectType="person"
noClassificationLabel="details.unknown"
onClick={(data) => {
if (data) {
onClickFaces([data.filename], true);

View File

@ -961,7 +961,6 @@ function ObjectTrainGrid({
selectedItems={selectedImages}
i18nLibrary="views/classificationModel"
objectType={model.object_config?.objects?.at(0) ?? "Object"}
noClassificationLabel="details.none"
onClick={(data) => {
if (data) {
onClickImages([data.filename], true);