mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-12-18 19:16:42 +03:00
Compare commits
5 Commits
d44340eca6
...
5529432856
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5529432856 | ||
|
|
59963fc47e | ||
|
|
31fa87ce73 | ||
|
|
740c618240 | ||
|
|
4f76b34f44 |
@ -1 +1,2 @@
|
||||
cuda-python == 12.6.*; platform_machine == 'aarch64'
|
||||
numpy == 1.26.*; platform_machine == 'aarch64'
|
||||
|
||||
@ -37,7 +37,6 @@ from frigate.stats.prometheus import get_metrics, update_metrics
|
||||
from frigate.util.builtin import (
|
||||
clean_camera_user_pass,
|
||||
flatten_config_data,
|
||||
get_tz_modifiers,
|
||||
process_config_query_string,
|
||||
update_yaml_file_bulk,
|
||||
)
|
||||
@ -48,6 +47,7 @@ from frigate.util.services import (
|
||||
restart_frigate,
|
||||
vainfo_hwaccel,
|
||||
)
|
||||
from frigate.util.time import get_tz_modifiers
|
||||
from frigate.version import VERSION
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -403,12 +403,13 @@ def config_set(request: Request, body: AppConfigSetBody):
|
||||
settings,
|
||||
)
|
||||
else:
|
||||
# Handle nested config updates (e.g., config/classification/custom/{name})
|
||||
# Generic handling for global config updates
|
||||
settings = config.get_nested_object(body.update_topic)
|
||||
if settings:
|
||||
request.app.config_publisher.publisher.publish(
|
||||
body.update_topic, settings
|
||||
)
|
||||
|
||||
# Publish None for removal, actual config for add/update
|
||||
request.app.config_publisher.publisher.publish(
|
||||
body.update_topic, settings
|
||||
)
|
||||
|
||||
return JSONResponse(
|
||||
content=(
|
||||
|
||||
@ -31,7 +31,7 @@ from frigate.api.defs.response.generic_response import GenericResponse
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.config.camera import DetectConfig
|
||||
from frigate.const import CLIPS_DIR, FACE_DIR
|
||||
from frigate.const import CLIPS_DIR, FACE_DIR, MODEL_CACHE_DIR
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.models import Event
|
||||
from frigate.util.classification import (
|
||||
@ -828,9 +828,13 @@ def delete_classification_model(request: Request, name: str):
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
# Delete the classification model's data directory
|
||||
model_dir = os.path.join(CLIPS_DIR, sanitize_filename(name))
|
||||
# Delete the classification model's data directory in clips
|
||||
data_dir = os.path.join(CLIPS_DIR, sanitize_filename(name))
|
||||
if os.path.exists(data_dir):
|
||||
shutil.rmtree(data_dir)
|
||||
|
||||
# Delete the classification model's files in model_cache
|
||||
model_dir = os.path.join(MODEL_CACHE_DIR, sanitize_filename(name))
|
||||
if os.path.exists(model_dir):
|
||||
shutil.rmtree(model_dir)
|
||||
|
||||
|
||||
@ -57,8 +57,8 @@ from frigate.const import CLIPS_DIR, TRIGGER_DIR
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.models import Event, ReviewSegment, Timeline, Trigger
|
||||
from frigate.track.object_processing import TrackedObject
|
||||
from frigate.util.builtin import get_tz_modifiers
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
from frigate.util.time import get_tz_modifiers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -34,7 +34,7 @@ from frigate.record.export import (
|
||||
PlaybackSourceEnum,
|
||||
RecordingExporter,
|
||||
)
|
||||
from frigate.util.builtin import is_current_hour
|
||||
from frigate.util.time import is_current_hour
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -44,9 +44,9 @@ from frigate.const import (
|
||||
)
|
||||
from frigate.models import Event, Previews, Recordings, Regions, ReviewSegment
|
||||
from frigate.track.object_processing import TrackedObjectProcessor
|
||||
from frigate.util.builtin import get_tz_modifiers
|
||||
from frigate.util.image import get_image_from_recording
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
from frigate.util.time import get_tz_modifiers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ from frigate.config import FrigateConfig
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.models import Recordings, ReviewSegment, UserReviewStatus
|
||||
from frigate.review.types import SeverityEnum
|
||||
from frigate.util.builtin import get_tz_modifiers
|
||||
from frigate.util.time import get_dst_transitions, get_tz_modifiers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -329,89 +329,135 @@ async def review_summary(
|
||||
)
|
||||
clauses.append(reduce(operator.or_, label_clauses))
|
||||
|
||||
day_in_seconds = 60 * 60 * 24
|
||||
last_month_query = (
|
||||
# Find the time range of available data
|
||||
time_range_query = (
|
||||
ReviewSegment.select(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
ReviewSegment.start_time,
|
||||
"unixepoch",
|
||||
hour_modifier,
|
||||
minute_modifier,
|
||||
),
|
||||
).alias("day"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.alert)
|
||||
& (UserReviewStatus.has_been_reviewed == True),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_alert"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.detection)
|
||||
& (UserReviewStatus.has_been_reviewed == True),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_detection"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.alert),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("total_alert"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.detection),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("total_detection"),
|
||||
)
|
||||
.left_outer_join(
|
||||
UserReviewStatus,
|
||||
on=(
|
||||
(ReviewSegment.id == UserReviewStatus.review_segment)
|
||||
& (UserReviewStatus.user_id == user_id)
|
||||
),
|
||||
fn.MIN(ReviewSegment.start_time).alias("min_time"),
|
||||
fn.MAX(ReviewSegment.start_time).alias("max_time"),
|
||||
)
|
||||
.where(reduce(operator.and_, clauses) if clauses else True)
|
||||
.group_by(
|
||||
(ReviewSegment.start_time + seconds_offset).cast("int") / day_in_seconds
|
||||
)
|
||||
.order_by(ReviewSegment.start_time.desc())
|
||||
.dicts()
|
||||
.get()
|
||||
)
|
||||
|
||||
min_time = time_range_query.get("min_time")
|
||||
max_time = time_range_query.get("max_time")
|
||||
|
||||
data = {
|
||||
"last24Hours": last_24_query,
|
||||
}
|
||||
|
||||
for e in last_month_query.dicts().iterator():
|
||||
data[e["day"]] = e
|
||||
# If no data, return early
|
||||
if min_time is None or max_time is None:
|
||||
return JSONResponse(content=data)
|
||||
|
||||
# Get DST transition periods
|
||||
dst_periods = get_dst_transitions(params.timezone, min_time, max_time)
|
||||
|
||||
day_in_seconds = 60 * 60 * 24
|
||||
|
||||
# Query each DST period separately with the correct offset
|
||||
for period_start, period_end, period_offset in dst_periods:
|
||||
# Calculate hour/minute modifiers for this period
|
||||
hours_offset = int(period_offset / 60 / 60)
|
||||
minutes_offset = int(period_offset / 60 - hours_offset * 60)
|
||||
period_hour_modifier = f"{hours_offset} hour"
|
||||
period_minute_modifier = f"{minutes_offset} minute"
|
||||
|
||||
# Build clauses including time range for this period
|
||||
period_clauses = clauses.copy()
|
||||
period_clauses.append(
|
||||
(ReviewSegment.start_time >= period_start)
|
||||
& (ReviewSegment.start_time <= period_end)
|
||||
)
|
||||
|
||||
period_query = (
|
||||
ReviewSegment.select(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
ReviewSegment.start_time,
|
||||
"unixepoch",
|
||||
period_hour_modifier,
|
||||
period_minute_modifier,
|
||||
),
|
||||
).alias("day"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.alert)
|
||||
& (UserReviewStatus.has_been_reviewed == True),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_alert"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.detection)
|
||||
& (UserReviewStatus.has_been_reviewed == True),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_detection"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.alert),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("total_alert"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.detection),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("total_detection"),
|
||||
)
|
||||
.left_outer_join(
|
||||
UserReviewStatus,
|
||||
on=(
|
||||
(ReviewSegment.id == UserReviewStatus.review_segment)
|
||||
& (UserReviewStatus.user_id == user_id)
|
||||
),
|
||||
)
|
||||
.where(reduce(operator.and_, period_clauses))
|
||||
.group_by(
|
||||
(ReviewSegment.start_time + period_offset).cast("int") / day_in_seconds
|
||||
)
|
||||
.order_by(ReviewSegment.start_time.desc())
|
||||
)
|
||||
|
||||
# Merge results from this period
|
||||
for e in period_query.dicts().iterator():
|
||||
day_key = e["day"]
|
||||
if day_key in data:
|
||||
# Merge counts if day already exists (edge case at DST boundary)
|
||||
data[day_key]["reviewed_alert"] += e["reviewed_alert"] or 0
|
||||
data[day_key]["reviewed_detection"] += e["reviewed_detection"] or 0
|
||||
data[day_key]["total_alert"] += e["total_alert"] or 0
|
||||
data[day_key]["total_detection"] += e["total_detection"] or 0
|
||||
else:
|
||||
data[day_key] = e
|
||||
|
||||
return JSONResponse(content=data)
|
||||
|
||||
|
||||
@ -166,6 +166,7 @@ class FaceRealTimeProcessor(RealTimeProcessorApi):
|
||||
camera = obj_data["camera"]
|
||||
|
||||
if not self.config.cameras[camera].face_recognition.enabled:
|
||||
logger.debug(f"Face recognition disabled for camera {camera}, skipping")
|
||||
return
|
||||
|
||||
start = datetime.datetime.now().timestamp()
|
||||
@ -208,6 +209,7 @@ class FaceRealTimeProcessor(RealTimeProcessorApi):
|
||||
person_box = obj_data.get("box")
|
||||
|
||||
if not person_box:
|
||||
logger.debug(f"No person box available for {id}")
|
||||
return
|
||||
|
||||
rgb = cv2.cvtColor(frame, cv2.COLOR_YUV2RGB_I420)
|
||||
@ -233,7 +235,8 @@ class FaceRealTimeProcessor(RealTimeProcessorApi):
|
||||
|
||||
try:
|
||||
face_frame = cv2.cvtColor(face_frame, cv2.COLOR_RGB2BGR)
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
logger.debug(f"Failed to convert face frame color for {id}: {e}")
|
||||
return
|
||||
else:
|
||||
# don't run for object without attributes
|
||||
@ -251,6 +254,7 @@ class FaceRealTimeProcessor(RealTimeProcessorApi):
|
||||
|
||||
# no faces detected in this frame
|
||||
if not face:
|
||||
logger.debug(f"No face attributes found for {id}")
|
||||
return
|
||||
|
||||
face_box = face.get("box")
|
||||
@ -274,6 +278,7 @@ class FaceRealTimeProcessor(RealTimeProcessorApi):
|
||||
res = self.recognizer.classify(face_frame)
|
||||
|
||||
if not res:
|
||||
logger.debug(f"Face recognizer returned no result for {id}")
|
||||
self.__update_metrics(datetime.datetime.now().timestamp() - start)
|
||||
return
|
||||
|
||||
@ -330,6 +335,7 @@ class FaceRealTimeProcessor(RealTimeProcessorApi):
|
||||
def handle_request(self, topic, request_data) -> dict[str, Any] | None:
|
||||
if topic == EmbeddingsRequestEnum.clear_face_classifier.value:
|
||||
self.recognizer.clear()
|
||||
return {"success": True, "message": "Face classifier cleared."}
|
||||
elif topic == EmbeddingsRequestEnum.recognize_face.value:
|
||||
img = cv2.imdecode(
|
||||
np.frombuffer(base64.b64decode(request_data["image"]), dtype=np.uint8),
|
||||
|
||||
@ -158,11 +158,13 @@ class EmbeddingMaintainer(threading.Thread):
|
||||
self.realtime_processors: list[RealTimeProcessorApi] = []
|
||||
|
||||
if self.config.face_recognition.enabled:
|
||||
logger.debug("Face recognition enabled, initializing FaceRealTimeProcessor")
|
||||
self.realtime_processors.append(
|
||||
FaceRealTimeProcessor(
|
||||
self.config, self.requestor, self.event_metadata_publisher, metrics
|
||||
)
|
||||
)
|
||||
logger.debug("FaceRealTimeProcessor initialized successfully")
|
||||
|
||||
if self.config.classification.bird.enabled:
|
||||
self.realtime_processors.append(
|
||||
@ -283,44 +285,65 @@ class EmbeddingMaintainer(threading.Thread):
|
||||
logger.info("Exiting embeddings maintenance...")
|
||||
|
||||
def _check_classification_config_updates(self) -> None:
|
||||
"""Check for classification config updates and add new processors."""
|
||||
"""Check for classification config updates and add/remove processors."""
|
||||
topic, model_config = self.classification_config_subscriber.check_for_update()
|
||||
|
||||
if topic and model_config:
|
||||
if topic:
|
||||
model_name = topic.split("/")[-1]
|
||||
self.config.classification.custom[model_name] = model_config
|
||||
|
||||
# Check if processor already exists
|
||||
for processor in self.realtime_processors:
|
||||
if isinstance(
|
||||
processor,
|
||||
(
|
||||
CustomStateClassificationProcessor,
|
||||
CustomObjectClassificationProcessor,
|
||||
),
|
||||
):
|
||||
if processor.model_config.name == model_name:
|
||||
logger.debug(
|
||||
f"Classification processor for model {model_name} already exists, skipping"
|
||||
if model_config is None:
|
||||
self.realtime_processors = [
|
||||
processor
|
||||
for processor in self.realtime_processors
|
||||
if not (
|
||||
isinstance(
|
||||
processor,
|
||||
(
|
||||
CustomStateClassificationProcessor,
|
||||
CustomObjectClassificationProcessor,
|
||||
),
|
||||
)
|
||||
return
|
||||
and processor.model_config.name == model_name
|
||||
)
|
||||
]
|
||||
|
||||
if model_config.state_config is not None:
|
||||
processor = CustomStateClassificationProcessor(
|
||||
self.config, model_config, self.requestor, self.metrics
|
||||
logger.info(
|
||||
f"Successfully removed classification processor for model: {model_name}"
|
||||
)
|
||||
else:
|
||||
processor = CustomObjectClassificationProcessor(
|
||||
self.config,
|
||||
model_config,
|
||||
self.event_metadata_publisher,
|
||||
self.metrics,
|
||||
)
|
||||
self.config.classification.custom[model_name] = model_config
|
||||
|
||||
self.realtime_processors.append(processor)
|
||||
logger.info(
|
||||
f"Added classification processor for model: {model_name} (type: {type(processor).__name__})"
|
||||
)
|
||||
# Check if processor already exists
|
||||
for processor in self.realtime_processors:
|
||||
if isinstance(
|
||||
processor,
|
||||
(
|
||||
CustomStateClassificationProcessor,
|
||||
CustomObjectClassificationProcessor,
|
||||
),
|
||||
):
|
||||
if processor.model_config.name == model_name:
|
||||
logger.debug(
|
||||
f"Classification processor for model {model_name} already exists, skipping"
|
||||
)
|
||||
return
|
||||
|
||||
if model_config.state_config is not None:
|
||||
processor = CustomStateClassificationProcessor(
|
||||
self.config, model_config, self.requestor, self.metrics
|
||||
)
|
||||
else:
|
||||
processor = CustomObjectClassificationProcessor(
|
||||
self.config,
|
||||
model_config,
|
||||
self.event_metadata_publisher,
|
||||
self.metrics,
|
||||
)
|
||||
|
||||
self.realtime_processors.append(processor)
|
||||
logger.info(
|
||||
f"Added classification processor for model: {model_name} (type: {type(processor).__name__})"
|
||||
)
|
||||
|
||||
def _process_requests(self) -> None:
|
||||
"""Process embeddings requests"""
|
||||
|
||||
@ -14,7 +14,8 @@ from frigate.config import CameraConfig, FrigateConfig, RetainModeEnum
|
||||
from frigate.const import CACHE_DIR, CLIPS_DIR, MAX_WAL_SIZE, RECORD_DIR
|
||||
from frigate.models import Previews, Recordings, ReviewSegment, UserReviewStatus
|
||||
from frigate.record.util import remove_empty_directories, sync_recordings
|
||||
from frigate.util.builtin import clear_and_unlink, get_tomorrow_at_time
|
||||
from frigate.util.builtin import clear_and_unlink
|
||||
from frigate.util.time import get_tomorrow_at_time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -28,7 +28,7 @@ from frigate.ffmpeg_presets import (
|
||||
parse_preset_hardware_acceleration_encode,
|
||||
)
|
||||
from frigate.models import Export, Previews, Recordings
|
||||
from frigate.util.builtin import is_current_hour
|
||||
from frigate.util.time import is_current_hour
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -15,12 +15,9 @@ from collections.abc import Mapping
|
||||
from multiprocessing.sharedctypes import Synchronized
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Tuple, Union
|
||||
from zoneinfo import ZoneInfoNotFoundError
|
||||
|
||||
import numpy as np
|
||||
import pytz
|
||||
from ruamel.yaml import YAML
|
||||
from tzlocal import get_localzone
|
||||
|
||||
from frigate.const import REGEX_HTTP_CAMERA_USER_PASS, REGEX_RTSP_CAMERA_USER_PASS
|
||||
|
||||
@ -157,17 +154,6 @@ def load_labels(path: Optional[str], encoding="utf-8", prefill=91):
|
||||
return labels
|
||||
|
||||
|
||||
def get_tz_modifiers(tz_name: str) -> Tuple[str, str, float]:
|
||||
seconds_offset = (
|
||||
datetime.datetime.now(pytz.timezone(tz_name)).utcoffset().total_seconds()
|
||||
)
|
||||
hours_offset = int(seconds_offset / 60 / 60)
|
||||
minutes_offset = int(seconds_offset / 60 - hours_offset * 60)
|
||||
hour_modifier = f"{hours_offset} hour"
|
||||
minute_modifier = f"{minutes_offset} minute"
|
||||
return hour_modifier, minute_modifier, seconds_offset
|
||||
|
||||
|
||||
def to_relative_box(
|
||||
width: int, height: int, box: Tuple[int, int, int, int]
|
||||
) -> Tuple[int | float, int | float, int | float, int | float]:
|
||||
@ -298,34 +284,6 @@ def find_by_key(dictionary, target_key):
|
||||
return None
|
||||
|
||||
|
||||
def get_tomorrow_at_time(hour: int) -> datetime.datetime:
|
||||
"""Returns the datetime of the following day at 2am."""
|
||||
try:
|
||||
tomorrow = datetime.datetime.now(get_localzone()) + datetime.timedelta(days=1)
|
||||
except ZoneInfoNotFoundError:
|
||||
tomorrow = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(
|
||||
days=1
|
||||
)
|
||||
logger.warning(
|
||||
"Using utc for maintenance due to missing or incorrect timezone set"
|
||||
)
|
||||
|
||||
return tomorrow.replace(hour=hour, minute=0, second=0).astimezone(
|
||||
datetime.timezone.utc
|
||||
)
|
||||
|
||||
|
||||
def is_current_hour(timestamp: int) -> bool:
|
||||
"""Returns if timestamp is in the current UTC hour."""
|
||||
start_of_next_hour = (
|
||||
datetime.datetime.now(datetime.timezone.utc).replace(
|
||||
minute=0, second=0, microsecond=0
|
||||
)
|
||||
+ datetime.timedelta(hours=1)
|
||||
).timestamp()
|
||||
return timestamp < start_of_next_hour
|
||||
|
||||
|
||||
def clear_and_unlink(file: Path, missing_ok: bool = True) -> None:
|
||||
"""clear file then unlink to avoid space retained by file descriptors."""
|
||||
if not missing_ok and not file.exists():
|
||||
|
||||
100
frigate/util/time.py
Normal file
100
frigate/util/time.py
Normal file
@ -0,0 +1,100 @@
|
||||
"""Time utilities."""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Tuple
|
||||
from zoneinfo import ZoneInfoNotFoundError
|
||||
|
||||
import pytz
|
||||
from tzlocal import get_localzone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_tz_modifiers(tz_name: str) -> Tuple[str, str, float]:
|
||||
seconds_offset = (
|
||||
datetime.datetime.now(pytz.timezone(tz_name)).utcoffset().total_seconds()
|
||||
)
|
||||
hours_offset = int(seconds_offset / 60 / 60)
|
||||
minutes_offset = int(seconds_offset / 60 - hours_offset * 60)
|
||||
hour_modifier = f"{hours_offset} hour"
|
||||
minute_modifier = f"{minutes_offset} minute"
|
||||
return hour_modifier, minute_modifier, seconds_offset
|
||||
|
||||
|
||||
def get_tomorrow_at_time(hour: int) -> datetime.datetime:
|
||||
"""Returns the datetime of the following day at 2am."""
|
||||
try:
|
||||
tomorrow = datetime.datetime.now(get_localzone()) + datetime.timedelta(days=1)
|
||||
except ZoneInfoNotFoundError:
|
||||
tomorrow = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(
|
||||
days=1
|
||||
)
|
||||
logger.warning(
|
||||
"Using utc for maintenance due to missing or incorrect timezone set"
|
||||
)
|
||||
|
||||
return tomorrow.replace(hour=hour, minute=0, second=0).astimezone(
|
||||
datetime.timezone.utc
|
||||
)
|
||||
|
||||
|
||||
def is_current_hour(timestamp: int) -> bool:
|
||||
"""Returns if timestamp is in the current UTC hour."""
|
||||
start_of_next_hour = (
|
||||
datetime.datetime.now(datetime.timezone.utc).replace(
|
||||
minute=0, second=0, microsecond=0
|
||||
)
|
||||
+ datetime.timedelta(hours=1)
|
||||
).timestamp()
|
||||
return timestamp < start_of_next_hour
|
||||
|
||||
|
||||
def get_dst_transitions(
|
||||
tz_name: str, start_time: float, end_time: float
|
||||
) -> list[tuple[float, float]]:
|
||||
"""
|
||||
Find DST transition points and return time periods with consistent offsets.
|
||||
|
||||
Args:
|
||||
tz_name: Timezone name (e.g., 'America/New_York')
|
||||
start_time: Start timestamp (UTC)
|
||||
end_time: End timestamp (UTC)
|
||||
|
||||
Returns:
|
||||
List of (period_start, period_end, seconds_offset) tuples representing
|
||||
continuous periods with the same UTC offset
|
||||
"""
|
||||
try:
|
||||
tz = pytz.timezone(tz_name)
|
||||
except pytz.UnknownTimeZoneError:
|
||||
# If timezone is invalid, return single period with no offset
|
||||
return [(start_time, end_time, 0)]
|
||||
|
||||
periods = []
|
||||
current = start_time
|
||||
|
||||
# Get initial offset
|
||||
dt = datetime.datetime.utcfromtimestamp(current).replace(tzinfo=pytz.UTC)
|
||||
local_dt = dt.astimezone(tz)
|
||||
prev_offset = local_dt.utcoffset().total_seconds()
|
||||
period_start = start_time
|
||||
|
||||
# Check each day for offset changes
|
||||
while current <= end_time:
|
||||
dt = datetime.datetime.utcfromtimestamp(current).replace(tzinfo=pytz.UTC)
|
||||
local_dt = dt.astimezone(tz)
|
||||
current_offset = local_dt.utcoffset().total_seconds()
|
||||
|
||||
if current_offset != prev_offset:
|
||||
# Found a transition - close previous period
|
||||
periods.append((period_start, current, prev_offset))
|
||||
period_start = current
|
||||
prev_offset = current_offset
|
||||
|
||||
current += 86400 # Check daily
|
||||
|
||||
# Add final period
|
||||
periods.append((period_start, end_time, prev_offset))
|
||||
|
||||
return periods
|
||||
@ -34,7 +34,7 @@ from frigate.ptz.autotrack import ptz_moving_at_frame_time
|
||||
from frigate.track import ObjectTracker
|
||||
from frigate.track.norfair_tracker import NorfairTracker
|
||||
from frigate.track.tracked_object import TrackedObjectAttribute
|
||||
from frigate.util.builtin import EventsPerSecond, get_tomorrow_at_time
|
||||
from frigate.util.builtin import EventsPerSecond
|
||||
from frigate.util.image import (
|
||||
FrameManager,
|
||||
SharedMemoryFrameManager,
|
||||
@ -53,6 +53,7 @@ from frigate.util.object import (
|
||||
reduce_detections,
|
||||
)
|
||||
from frigate.util.process import FrigateProcess
|
||||
from frigate.util.time import get_tomorrow_at_time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -271,6 +271,8 @@
|
||||
"disconnectStream": "Disconnect",
|
||||
"estimatedBandwidth": "Estimated Bandwidth",
|
||||
"roles": "Roles",
|
||||
"ffmpegModule": "Use stream compatibility mode",
|
||||
"ffmpegModuleDescription": "If the stream does not load after several attempts, try enabling this. When enabled, Frigate will use the ffmpeg module with go2rtc. This may provide better compatibility with some camera streams.",
|
||||
"none": "None",
|
||||
"error": "Error",
|
||||
"streamValidated": "Stream {{number}} validated successfully",
|
||||
|
||||
@ -317,6 +317,21 @@ export default function Step3ChooseExamples({
|
||||
return unclassifiedImages.length === 0;
|
||||
}, [unclassifiedImages]);
|
||||
|
||||
const handleBack = useCallback(() => {
|
||||
if (currentClassIndex > 0) {
|
||||
const previousClass = allClasses[currentClassIndex - 1];
|
||||
setCurrentClassIndex((prev) => prev - 1);
|
||||
|
||||
// Restore selections for the previous class
|
||||
const previousSelections = Object.entries(imageClassifications)
|
||||
.filter(([_, className]) => className === previousClass)
|
||||
.map(([imageName, _]) => imageName);
|
||||
setSelectedImages(new Set(previousSelections));
|
||||
} else {
|
||||
onBack();
|
||||
}
|
||||
}, [currentClassIndex, allClasses, imageClassifications, onBack]);
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-6">
|
||||
{isTraining ? (
|
||||
@ -420,7 +435,7 @@ export default function Step3ChooseExamples({
|
||||
|
||||
{!isTraining && (
|
||||
<div className="flex flex-col gap-3 pt-3 sm:flex-row sm:justify-end sm:gap-4">
|
||||
<Button type="button" onClick={onBack} className="sm:flex-1">
|
||||
<Button type="button" onClick={handleBack} className="sm:flex-1">
|
||||
{t("button.back", { ns: "common" })}
|
||||
</Button>
|
||||
<Button
|
||||
|
||||
@ -174,9 +174,7 @@ export default function CameraWizardDialog({
|
||||
...(friendlyName && { friendly_name: friendlyName }),
|
||||
ffmpeg: {
|
||||
inputs: wizardData.streams.map((stream, index) => {
|
||||
const isRestreamed =
|
||||
wizardData.restreamIds?.includes(stream.id) ?? false;
|
||||
if (isRestreamed) {
|
||||
if (stream.restream) {
|
||||
const go2rtcStreamName =
|
||||
wizardData.streams!.length === 1
|
||||
? finalCameraName
|
||||
@ -234,7 +232,11 @@ export default function CameraWizardDialog({
|
||||
wizardData.streams!.length === 1
|
||||
? finalCameraName
|
||||
: `${finalCameraName}_${index + 1}`;
|
||||
go2rtcStreams[streamName] = [stream.url];
|
||||
|
||||
const streamUrl = stream.useFfmpeg
|
||||
? `ffmpeg:${stream.url}`
|
||||
: stream.url;
|
||||
go2rtcStreams[streamName] = [streamUrl];
|
||||
});
|
||||
|
||||
if (Object.keys(go2rtcStreams).length > 0) {
|
||||
|
||||
@ -608,6 +608,12 @@ export default function Step1NameCamera({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isTesting && (
|
||||
<div className="flex items-center gap-2 text-sm text-muted-foreground">
|
||||
<ActivityIndicator className="size-4" />
|
||||
{testStatus}
|
||||
</div>
|
||||
)}
|
||||
<div className="flex flex-col gap-3 pt-3 sm:flex-row sm:justify-end sm:gap-4">
|
||||
<Button
|
||||
type="button"
|
||||
@ -635,10 +641,7 @@ export default function Step1NameCamera({
|
||||
variant="select"
|
||||
className="flex items-center justify-center gap-2 sm:flex-1"
|
||||
>
|
||||
{isTesting && <ActivityIndicator className="size-4" />}
|
||||
{isTesting && testStatus
|
||||
? testStatus
|
||||
: t("cameraWizard.step1.testConnection")}
|
||||
{t("cameraWizard.step1.testConnection")}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@ -201,16 +201,12 @@ export default function Step2StreamConfig({
|
||||
|
||||
const setRestream = useCallback(
|
||||
(streamId: string) => {
|
||||
const currentIds = wizardData.restreamIds || [];
|
||||
const isSelected = currentIds.includes(streamId);
|
||||
const newIds = isSelected
|
||||
? currentIds.filter((id) => id !== streamId)
|
||||
: [...currentIds, streamId];
|
||||
onUpdate({
|
||||
restreamIds: newIds,
|
||||
});
|
||||
const stream = streams.find((s) => s.id === streamId);
|
||||
if (!stream) return;
|
||||
|
||||
updateStream(streamId, { restream: !stream.restream });
|
||||
},
|
||||
[wizardData.restreamIds, onUpdate],
|
||||
[streams, updateStream],
|
||||
);
|
||||
|
||||
const hasDetectRole = streams.some((s) => s.roles.includes("detect"));
|
||||
@ -435,9 +431,7 @@ export default function Step2StreamConfig({
|
||||
{t("cameraWizard.step2.go2rtc")}
|
||||
</span>
|
||||
<Switch
|
||||
checked={(wizardData.restreamIds || []).includes(
|
||||
stream.id,
|
||||
)}
|
||||
checked={stream.restream || false}
|
||||
onCheckedChange={() => setRestream(stream.id)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@ -1,7 +1,13 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import {
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverTrigger,
|
||||
} from "@/components/ui/popover";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { LuRotateCcw } from "react-icons/lu";
|
||||
import { LuRotateCcw, LuInfo } from "react-icons/lu";
|
||||
import { useState, useCallback, useMemo, useEffect } from "react";
|
||||
import ActivityIndicator from "@/components/indicators/activity-indicator";
|
||||
import axios from "axios";
|
||||
@ -216,7 +222,6 @@ export default function Step3Validation({
|
||||
brandTemplate: wizardData.brandTemplate,
|
||||
customUrl: wizardData.customUrl,
|
||||
streams: wizardData.streams,
|
||||
restreamIds: wizardData.restreamIds,
|
||||
};
|
||||
|
||||
onSave(configData);
|
||||
@ -322,6 +327,51 @@ export default function Step3Validation({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{result?.success && (
|
||||
<div className="mb-3 flex items-center justify-between">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-sm">
|
||||
{t("cameraWizard.step3.ffmpegModule")}
|
||||
</span>
|
||||
<Popover>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="h-4 w-4 p-0"
|
||||
>
|
||||
<LuInfo className="size-3" />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="pointer-events-auto w-80 text-xs">
|
||||
<div className="space-y-2">
|
||||
<div className="font-medium">
|
||||
{t("cameraWizard.step3.ffmpegModule")}
|
||||
</div>
|
||||
<div className="text-muted-foreground">
|
||||
{t(
|
||||
"cameraWizard.step3.ffmpegModuleDescription",
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
<Switch
|
||||
checked={stream.useFfmpeg || false}
|
||||
onCheckedChange={(checked) => {
|
||||
onUpdate({
|
||||
streams: streams.map((s) =>
|
||||
s.id === stream.id
|
||||
? { ...s, useFfmpeg: checked }
|
||||
: s,
|
||||
),
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="mb-2 flex flex-col justify-between gap-1 md:flex-row md:items-center">
|
||||
<span className="break-all text-sm text-muted-foreground">
|
||||
{stream.url}
|
||||
@ -491,8 +541,7 @@ function StreamIssues({
|
||||
|
||||
// Restreaming check
|
||||
if (stream.roles.includes("record")) {
|
||||
const restreamIds = wizardData.restreamIds || [];
|
||||
if (restreamIds.includes(stream.id)) {
|
||||
if (stream.restream) {
|
||||
result.push({
|
||||
type: "warning",
|
||||
message: t("cameraWizard.step3.issues.restreamingWarning"),
|
||||
@ -660,9 +709,10 @@ function StreamPreview({ stream, onBandwidthUpdate }: StreamPreviewProps) {
|
||||
|
||||
useEffect(() => {
|
||||
// Register stream with go2rtc
|
||||
const streamUrl = stream.useFfmpeg ? `ffmpeg:${stream.url}` : stream.url;
|
||||
axios
|
||||
.put(`go2rtc/streams/${streamId}`, null, {
|
||||
params: { src: stream.url },
|
||||
params: { src: streamUrl },
|
||||
})
|
||||
.then(() => {
|
||||
// Add small delay to allow go2rtc api to run and initialize the stream
|
||||
@ -680,7 +730,7 @@ function StreamPreview({ stream, onBandwidthUpdate }: StreamPreviewProps) {
|
||||
// do nothing on cleanup errors - go2rtc won't consume the streams
|
||||
});
|
||||
};
|
||||
}, [stream.url, streamId]);
|
||||
}, [stream.url, stream.useFfmpeg, streamId]);
|
||||
|
||||
const resolution = stream.testResult?.resolution;
|
||||
let aspectRatio = "16/9";
|
||||
|
||||
@ -85,6 +85,8 @@ export type StreamConfig = {
|
||||
quality?: string;
|
||||
testResult?: TestResult;
|
||||
userTested?: boolean;
|
||||
useFfmpeg?: boolean;
|
||||
restream?: boolean;
|
||||
};
|
||||
|
||||
export type TestResult = {
|
||||
@ -105,7 +107,6 @@ export type WizardFormData = {
|
||||
brandTemplate?: CameraBrand;
|
||||
customUrl?: string;
|
||||
streams?: StreamConfig[];
|
||||
restreamIds?: string[];
|
||||
};
|
||||
|
||||
// API Response Types
|
||||
@ -146,6 +147,7 @@ export type CameraConfigData = {
|
||||
inputs: {
|
||||
path: string;
|
||||
roles: string[];
|
||||
input_args?: string;
|
||||
}[];
|
||||
};
|
||||
live?: {
|
||||
|
||||
@ -10,7 +10,7 @@ import {
|
||||
CustomClassificationModelConfig,
|
||||
FrigateConfig,
|
||||
} from "@/types/frigateConfig";
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { FaFolderPlus } from "react-icons/fa";
|
||||
import { MdModelTraining } from "react-icons/md";
|
||||
@ -21,7 +21,6 @@ import Heading from "@/components/ui/heading";
|
||||
import { useOverlayState } from "@/hooks/use-overlay-state";
|
||||
import axios from "axios";
|
||||
import { toast } from "sonner";
|
||||
import useKeyboardListener from "@/hooks/use-keyboard-listener";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
@ -212,42 +211,44 @@ function ModelCard({ config, onClick, onDelete }: ModelCardProps) {
|
||||
}>(`classification/${config.name}/dataset`, { revalidateOnFocus: false });
|
||||
|
||||
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
|
||||
const bypassDialogRef = useRef(false);
|
||||
|
||||
useKeyboardListener(["Shift"], (_, modifiers) => {
|
||||
bypassDialogRef.current = modifiers.shift;
|
||||
return false;
|
||||
});
|
||||
|
||||
const handleDelete = useCallback(async () => {
|
||||
await axios
|
||||
.delete(`classification/${config.name}`)
|
||||
.then((resp) => {
|
||||
if (resp.status == 200) {
|
||||
toast.success(t("toast.success.deletedModel", { count: 1 }), {
|
||||
position: "top-center",
|
||||
});
|
||||
onDelete();
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
const errorMessage =
|
||||
error.response?.data?.message ||
|
||||
error.response?.data?.detail ||
|
||||
"Unknown error";
|
||||
toast.error(t("toast.error.deleteModelFailed", { errorMessage }), {
|
||||
position: "top-center",
|
||||
});
|
||||
try {
|
||||
await axios.delete(`classification/${config.name}`);
|
||||
await axios.put("/config/set", {
|
||||
requires_restart: 0,
|
||||
update_topic: `config/classification/custom/${config.name}`,
|
||||
config_data: {
|
||||
classification: {
|
||||
custom: {
|
||||
[config.name]: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
toast.success(t("toast.success.deletedModel", { count: 1 }), {
|
||||
position: "top-center",
|
||||
});
|
||||
onDelete();
|
||||
} catch (err) {
|
||||
const error = err as {
|
||||
response?: { data?: { message?: string; detail?: string } };
|
||||
};
|
||||
const errorMessage =
|
||||
error.response?.data?.message ||
|
||||
error.response?.data?.detail ||
|
||||
"Unknown error";
|
||||
toast.error(t("toast.error.deleteModelFailed", { errorMessage }), {
|
||||
position: "top-center",
|
||||
});
|
||||
}
|
||||
}, [config, onDelete, t]);
|
||||
|
||||
const handleDeleteClick = useCallback(() => {
|
||||
if (bypassDialogRef.current) {
|
||||
handleDelete();
|
||||
} else {
|
||||
setDeleteDialogOpen(true);
|
||||
}
|
||||
}, [handleDelete]);
|
||||
const handleDeleteClick = useCallback((e: React.MouseEvent) => {
|
||||
e.stopPropagation();
|
||||
setDeleteDialogOpen(true);
|
||||
}, []);
|
||||
|
||||
const coverImage = useMemo(() => {
|
||||
if (!dataset) {
|
||||
@ -304,7 +305,7 @@ function ModelCard({ config, onClick, onDelete }: ModelCardProps) {
|
||||
className="size-full"
|
||||
src={`${baseUrl}clips/${config.name}/dataset/${coverImage?.name}/${coverImage?.img}`}
|
||||
/>
|
||||
<ImageShadowOverlay />
|
||||
<ImageShadowOverlay lowerClassName="h-[30%] z-0" />
|
||||
<div className="absolute bottom-2 left-3 text-lg text-white smart-capitalize">
|
||||
{config.name}
|
||||
</div>
|
||||
@ -315,14 +316,13 @@ function ModelCard({ config, onClick, onDelete }: ModelCardProps) {
|
||||
<FiMoreVertical className="size-5 text-white" />
|
||||
</BlurredIconButton>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end">
|
||||
<DropdownMenuContent
|
||||
align="end"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<DropdownMenuItem onClick={handleDeleteClick}>
|
||||
<LuTrash2 className="mr-2 size-4" />
|
||||
<span>
|
||||
{bypassDialogRef.current
|
||||
? t("button.deleteNow", { ns: "common" })
|
||||
: t("button.delete", { ns: "common" })}
|
||||
</span>
|
||||
<span>{t("button.delete", { ns: "common" })}</span>
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
|
||||
Loading…
Reference in New Issue
Block a user