mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-12-06 05:24:11 +03:00
parent
4f76b34f44
commit
740c618240
@ -37,7 +37,6 @@ from frigate.stats.prometheus import get_metrics, update_metrics
|
||||
from frigate.util.builtin import (
|
||||
clean_camera_user_pass,
|
||||
flatten_config_data,
|
||||
get_tz_modifiers,
|
||||
process_config_query_string,
|
||||
update_yaml_file_bulk,
|
||||
)
|
||||
@ -48,6 +47,7 @@ from frigate.util.services import (
|
||||
restart_frigate,
|
||||
vainfo_hwaccel,
|
||||
)
|
||||
from frigate.util.time import get_tz_modifiers
|
||||
from frigate.version import VERSION
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -57,8 +57,8 @@ from frigate.const import CLIPS_DIR, TRIGGER_DIR
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.models import Event, ReviewSegment, Timeline, Trigger
|
||||
from frigate.track.object_processing import TrackedObject
|
||||
from frigate.util.builtin import get_tz_modifiers
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
from frigate.util.time import get_tz_modifiers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -34,7 +34,7 @@ from frigate.record.export import (
|
||||
PlaybackSourceEnum,
|
||||
RecordingExporter,
|
||||
)
|
||||
from frigate.util.builtin import is_current_hour
|
||||
from frigate.util.time import is_current_hour
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -44,9 +44,9 @@ from frigate.const import (
|
||||
)
|
||||
from frigate.models import Event, Previews, Recordings, Regions, ReviewSegment
|
||||
from frigate.track.object_processing import TrackedObjectProcessor
|
||||
from frigate.util.builtin import get_tz_modifiers
|
||||
from frigate.util.image import get_image_from_recording
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
from frigate.util.time import get_tz_modifiers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ from frigate.config import FrigateConfig
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.models import Recordings, ReviewSegment, UserReviewStatus
|
||||
from frigate.review.types import SeverityEnum
|
||||
from frigate.util.builtin import get_tz_modifiers
|
||||
from frigate.util.time import get_dst_transitions, get_tz_modifiers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -329,89 +329,135 @@ async def review_summary(
|
||||
)
|
||||
clauses.append(reduce(operator.or_, label_clauses))
|
||||
|
||||
day_in_seconds = 60 * 60 * 24
|
||||
last_month_query = (
|
||||
# Find the time range of available data
|
||||
time_range_query = (
|
||||
ReviewSegment.select(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
ReviewSegment.start_time,
|
||||
"unixepoch",
|
||||
hour_modifier,
|
||||
minute_modifier,
|
||||
),
|
||||
).alias("day"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.alert)
|
||||
& (UserReviewStatus.has_been_reviewed == True),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_alert"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.detection)
|
||||
& (UserReviewStatus.has_been_reviewed == True),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_detection"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.alert),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("total_alert"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.detection),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("total_detection"),
|
||||
)
|
||||
.left_outer_join(
|
||||
UserReviewStatus,
|
||||
on=(
|
||||
(ReviewSegment.id == UserReviewStatus.review_segment)
|
||||
& (UserReviewStatus.user_id == user_id)
|
||||
),
|
||||
fn.MIN(ReviewSegment.start_time).alias("min_time"),
|
||||
fn.MAX(ReviewSegment.start_time).alias("max_time"),
|
||||
)
|
||||
.where(reduce(operator.and_, clauses) if clauses else True)
|
||||
.group_by(
|
||||
(ReviewSegment.start_time + seconds_offset).cast("int") / day_in_seconds
|
||||
)
|
||||
.order_by(ReviewSegment.start_time.desc())
|
||||
.dicts()
|
||||
.get()
|
||||
)
|
||||
|
||||
min_time = time_range_query.get("min_time")
|
||||
max_time = time_range_query.get("max_time")
|
||||
|
||||
data = {
|
||||
"last24Hours": last_24_query,
|
||||
}
|
||||
|
||||
for e in last_month_query.dicts().iterator():
|
||||
data[e["day"]] = e
|
||||
# If no data, return early
|
||||
if min_time is None or max_time is None:
|
||||
return JSONResponse(content=data)
|
||||
|
||||
# Get DST transition periods
|
||||
dst_periods = get_dst_transitions(params.timezone, min_time, max_time)
|
||||
|
||||
day_in_seconds = 60 * 60 * 24
|
||||
|
||||
# Query each DST period separately with the correct offset
|
||||
for period_start, period_end, period_offset in dst_periods:
|
||||
# Calculate hour/minute modifiers for this period
|
||||
hours_offset = int(period_offset / 60 / 60)
|
||||
minutes_offset = int(period_offset / 60 - hours_offset * 60)
|
||||
period_hour_modifier = f"{hours_offset} hour"
|
||||
period_minute_modifier = f"{minutes_offset} minute"
|
||||
|
||||
# Build clauses including time range for this period
|
||||
period_clauses = clauses.copy()
|
||||
period_clauses.append(
|
||||
(ReviewSegment.start_time >= period_start)
|
||||
& (ReviewSegment.start_time <= period_end)
|
||||
)
|
||||
|
||||
period_query = (
|
||||
ReviewSegment.select(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
ReviewSegment.start_time,
|
||||
"unixepoch",
|
||||
period_hour_modifier,
|
||||
period_minute_modifier,
|
||||
),
|
||||
).alias("day"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.alert)
|
||||
& (UserReviewStatus.has_been_reviewed == True),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_alert"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.detection)
|
||||
& (UserReviewStatus.has_been_reviewed == True),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_detection"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.alert),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("total_alert"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == SeverityEnum.detection),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("total_detection"),
|
||||
)
|
||||
.left_outer_join(
|
||||
UserReviewStatus,
|
||||
on=(
|
||||
(ReviewSegment.id == UserReviewStatus.review_segment)
|
||||
& (UserReviewStatus.user_id == user_id)
|
||||
),
|
||||
)
|
||||
.where(reduce(operator.and_, period_clauses))
|
||||
.group_by(
|
||||
(ReviewSegment.start_time + period_offset).cast("int") / day_in_seconds
|
||||
)
|
||||
.order_by(ReviewSegment.start_time.desc())
|
||||
)
|
||||
|
||||
# Merge results from this period
|
||||
for e in period_query.dicts().iterator():
|
||||
day_key = e["day"]
|
||||
if day_key in data:
|
||||
# Merge counts if day already exists (edge case at DST boundary)
|
||||
data[day_key]["reviewed_alert"] += e["reviewed_alert"] or 0
|
||||
data[day_key]["reviewed_detection"] += e["reviewed_detection"] or 0
|
||||
data[day_key]["total_alert"] += e["total_alert"] or 0
|
||||
data[day_key]["total_detection"] += e["total_detection"] or 0
|
||||
else:
|
||||
data[day_key] = e
|
||||
|
||||
return JSONResponse(content=data)
|
||||
|
||||
|
||||
@ -14,7 +14,8 @@ from frigate.config import CameraConfig, FrigateConfig, RetainModeEnum
|
||||
from frigate.const import CACHE_DIR, CLIPS_DIR, MAX_WAL_SIZE, RECORD_DIR
|
||||
from frigate.models import Previews, Recordings, ReviewSegment, UserReviewStatus
|
||||
from frigate.record.util import remove_empty_directories, sync_recordings
|
||||
from frigate.util.builtin import clear_and_unlink, get_tomorrow_at_time
|
||||
from frigate.util.builtin import clear_and_unlink
|
||||
from frigate.util.time import get_tomorrow_at_time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -28,7 +28,7 @@ from frigate.ffmpeg_presets import (
|
||||
parse_preset_hardware_acceleration_encode,
|
||||
)
|
||||
from frigate.models import Export, Previews, Recordings
|
||||
from frigate.util.builtin import is_current_hour
|
||||
from frigate.util.time import is_current_hour
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -15,12 +15,9 @@ from collections.abc import Mapping
|
||||
from multiprocessing.sharedctypes import Synchronized
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Tuple, Union
|
||||
from zoneinfo import ZoneInfoNotFoundError
|
||||
|
||||
import numpy as np
|
||||
import pytz
|
||||
from ruamel.yaml import YAML
|
||||
from tzlocal import get_localzone
|
||||
|
||||
from frigate.const import REGEX_HTTP_CAMERA_USER_PASS, REGEX_RTSP_CAMERA_USER_PASS
|
||||
|
||||
@ -157,17 +154,6 @@ def load_labels(path: Optional[str], encoding="utf-8", prefill=91):
|
||||
return labels
|
||||
|
||||
|
||||
def get_tz_modifiers(tz_name: str) -> Tuple[str, str, float]:
|
||||
seconds_offset = (
|
||||
datetime.datetime.now(pytz.timezone(tz_name)).utcoffset().total_seconds()
|
||||
)
|
||||
hours_offset = int(seconds_offset / 60 / 60)
|
||||
minutes_offset = int(seconds_offset / 60 - hours_offset * 60)
|
||||
hour_modifier = f"{hours_offset} hour"
|
||||
minute_modifier = f"{minutes_offset} minute"
|
||||
return hour_modifier, minute_modifier, seconds_offset
|
||||
|
||||
|
||||
def to_relative_box(
|
||||
width: int, height: int, box: Tuple[int, int, int, int]
|
||||
) -> Tuple[int | float, int | float, int | float, int | float]:
|
||||
@ -298,34 +284,6 @@ def find_by_key(dictionary, target_key):
|
||||
return None
|
||||
|
||||
|
||||
def get_tomorrow_at_time(hour: int) -> datetime.datetime:
|
||||
"""Returns the datetime of the following day at 2am."""
|
||||
try:
|
||||
tomorrow = datetime.datetime.now(get_localzone()) + datetime.timedelta(days=1)
|
||||
except ZoneInfoNotFoundError:
|
||||
tomorrow = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(
|
||||
days=1
|
||||
)
|
||||
logger.warning(
|
||||
"Using utc for maintenance due to missing or incorrect timezone set"
|
||||
)
|
||||
|
||||
return tomorrow.replace(hour=hour, minute=0, second=0).astimezone(
|
||||
datetime.timezone.utc
|
||||
)
|
||||
|
||||
|
||||
def is_current_hour(timestamp: int) -> bool:
|
||||
"""Returns if timestamp is in the current UTC hour."""
|
||||
start_of_next_hour = (
|
||||
datetime.datetime.now(datetime.timezone.utc).replace(
|
||||
minute=0, second=0, microsecond=0
|
||||
)
|
||||
+ datetime.timedelta(hours=1)
|
||||
).timestamp()
|
||||
return timestamp < start_of_next_hour
|
||||
|
||||
|
||||
def clear_and_unlink(file: Path, missing_ok: bool = True) -> None:
|
||||
"""clear file then unlink to avoid space retained by file descriptors."""
|
||||
if not missing_ok and not file.exists():
|
||||
|
||||
100
frigate/util/time.py
Normal file
100
frigate/util/time.py
Normal file
@ -0,0 +1,100 @@
|
||||
"""Time utilities."""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Tuple
|
||||
from zoneinfo import ZoneInfoNotFoundError
|
||||
|
||||
import pytz
|
||||
from tzlocal import get_localzone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_tz_modifiers(tz_name: str) -> Tuple[str, str, float]:
|
||||
seconds_offset = (
|
||||
datetime.datetime.now(pytz.timezone(tz_name)).utcoffset().total_seconds()
|
||||
)
|
||||
hours_offset = int(seconds_offset / 60 / 60)
|
||||
minutes_offset = int(seconds_offset / 60 - hours_offset * 60)
|
||||
hour_modifier = f"{hours_offset} hour"
|
||||
minute_modifier = f"{minutes_offset} minute"
|
||||
return hour_modifier, minute_modifier, seconds_offset
|
||||
|
||||
|
||||
def get_tomorrow_at_time(hour: int) -> datetime.datetime:
|
||||
"""Returns the datetime of the following day at 2am."""
|
||||
try:
|
||||
tomorrow = datetime.datetime.now(get_localzone()) + datetime.timedelta(days=1)
|
||||
except ZoneInfoNotFoundError:
|
||||
tomorrow = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(
|
||||
days=1
|
||||
)
|
||||
logger.warning(
|
||||
"Using utc for maintenance due to missing or incorrect timezone set"
|
||||
)
|
||||
|
||||
return tomorrow.replace(hour=hour, minute=0, second=0).astimezone(
|
||||
datetime.timezone.utc
|
||||
)
|
||||
|
||||
|
||||
def is_current_hour(timestamp: int) -> bool:
|
||||
"""Returns if timestamp is in the current UTC hour."""
|
||||
start_of_next_hour = (
|
||||
datetime.datetime.now(datetime.timezone.utc).replace(
|
||||
minute=0, second=0, microsecond=0
|
||||
)
|
||||
+ datetime.timedelta(hours=1)
|
||||
).timestamp()
|
||||
return timestamp < start_of_next_hour
|
||||
|
||||
|
||||
def get_dst_transitions(
|
||||
tz_name: str, start_time: float, end_time: float
|
||||
) -> list[tuple[float, float]]:
|
||||
"""
|
||||
Find DST transition points and return time periods with consistent offsets.
|
||||
|
||||
Args:
|
||||
tz_name: Timezone name (e.g., 'America/New_York')
|
||||
start_time: Start timestamp (UTC)
|
||||
end_time: End timestamp (UTC)
|
||||
|
||||
Returns:
|
||||
List of (period_start, period_end, seconds_offset) tuples representing
|
||||
continuous periods with the same UTC offset
|
||||
"""
|
||||
try:
|
||||
tz = pytz.timezone(tz_name)
|
||||
except pytz.UnknownTimeZoneError:
|
||||
# If timezone is invalid, return single period with no offset
|
||||
return [(start_time, end_time, 0)]
|
||||
|
||||
periods = []
|
||||
current = start_time
|
||||
|
||||
# Get initial offset
|
||||
dt = datetime.datetime.utcfromtimestamp(current).replace(tzinfo=pytz.UTC)
|
||||
local_dt = dt.astimezone(tz)
|
||||
prev_offset = local_dt.utcoffset().total_seconds()
|
||||
period_start = start_time
|
||||
|
||||
# Check each day for offset changes
|
||||
while current <= end_time:
|
||||
dt = datetime.datetime.utcfromtimestamp(current).replace(tzinfo=pytz.UTC)
|
||||
local_dt = dt.astimezone(tz)
|
||||
current_offset = local_dt.utcoffset().total_seconds()
|
||||
|
||||
if current_offset != prev_offset:
|
||||
# Found a transition - close previous period
|
||||
periods.append((period_start, current, prev_offset))
|
||||
period_start = current
|
||||
prev_offset = current_offset
|
||||
|
||||
current += 86400 # Check daily
|
||||
|
||||
# Add final period
|
||||
periods.append((period_start, end_time, prev_offset))
|
||||
|
||||
return periods
|
||||
@ -34,7 +34,7 @@ from frigate.ptz.autotrack import ptz_moving_at_frame_time
|
||||
from frigate.track import ObjectTracker
|
||||
from frigate.track.norfair_tracker import NorfairTracker
|
||||
from frigate.track.tracked_object import TrackedObjectAttribute
|
||||
from frigate.util.builtin import EventsPerSecond, get_tomorrow_at_time
|
||||
from frigate.util.builtin import EventsPerSecond
|
||||
from frigate.util.image import (
|
||||
FrameManager,
|
||||
SharedMemoryFrameManager,
|
||||
@ -53,6 +53,7 @@ from frigate.util.object import (
|
||||
reduce_detections,
|
||||
)
|
||||
from frigate.util.process import FrigateProcess
|
||||
from frigate.util.time import get_tomorrow_at_time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
Loading…
Reference in New Issue
Block a user