mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-01-22 20:18:30 +03:00
Compare commits
6 Commits
f4e93efc22
...
c8d288e8d1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c8d288e8d1 | ||
|
|
bfe46d9f4a | ||
|
|
4dd2567848 | ||
|
|
208a83cf79 | ||
|
|
af942fb64e | ||
|
|
cfeb86646f |
@ -23,7 +23,12 @@ from markupsafe import escape
|
||||
from peewee import SQL, fn, operator
|
||||
from pydantic import ValidationError
|
||||
|
||||
from frigate.api.auth import allow_any_authenticated, allow_public, require_role
|
||||
from frigate.api.auth import (
|
||||
allow_any_authenticated,
|
||||
allow_public,
|
||||
get_allowed_cameras_for_filter,
|
||||
require_role,
|
||||
)
|
||||
from frigate.api.defs.query.app_query_parameters import AppTimelineHourlyQueryParameters
|
||||
from frigate.api.defs.request.app_body import AppConfigSetBody
|
||||
from frigate.api.defs.tags import Tags
|
||||
@ -687,13 +692,19 @@ def plusModels(request: Request, filterByCurrentModelDetector: bool = False):
|
||||
@router.get(
|
||||
"/recognized_license_plates", dependencies=[Depends(allow_any_authenticated())]
|
||||
)
|
||||
def get_recognized_license_plates(split_joined: Optional[int] = None):
|
||||
def get_recognized_license_plates(
|
||||
split_joined: Optional[int] = None,
|
||||
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
|
||||
):
|
||||
try:
|
||||
query = (
|
||||
Event.select(
|
||||
SQL("json_extract(data, '$.recognized_license_plate') AS plate")
|
||||
)
|
||||
.where(SQL("json_extract(data, '$.recognized_license_plate') IS NOT NULL"))
|
||||
.where(
|
||||
(SQL("json_extract(data, '$.recognized_license_plate') IS NOT NULL"))
|
||||
& (Event.camera << allowed_cameras)
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
recognized_license_plates = [row[0] for row in query.tuples()]
|
||||
|
||||
@ -662,6 +662,13 @@ class FrigateConfig(FrigateBaseModel):
|
||||
# generate zone contours
|
||||
if len(camera_config.zones) > 0:
|
||||
for zone in camera_config.zones.values():
|
||||
if zone.filters:
|
||||
for object_name, filter_config in zone.filters.items():
|
||||
zone.filters[object_name] = RuntimeFilterConfig(
|
||||
frame_shape=camera_config.frame_shape,
|
||||
**filter_config.model_dump(exclude_unset=True),
|
||||
)
|
||||
|
||||
zone.generate_contour(camera_config.frame_shape)
|
||||
|
||||
# Set live view stream if none is set
|
||||
|
||||
@ -97,6 +97,7 @@ class RecordingMaintainer(threading.Thread):
|
||||
self.object_recordings_info: dict[str, list] = defaultdict(list)
|
||||
self.audio_recordings_info: dict[str, list] = defaultdict(list)
|
||||
self.end_time_cache: dict[str, Tuple[datetime.datetime, float]] = {}
|
||||
self.unexpected_cache_files_logged: bool = False
|
||||
|
||||
async def move_files(self) -> None:
|
||||
cache_files = [
|
||||
@ -112,7 +113,14 @@ class RecordingMaintainer(threading.Thread):
|
||||
for cache in cache_files:
|
||||
cache_path = os.path.join(CACHE_DIR, cache)
|
||||
basename = os.path.splitext(cache)[0]
|
||||
camera, date = basename.rsplit("@", maxsplit=1)
|
||||
try:
|
||||
camera, date = basename.rsplit("@", maxsplit=1)
|
||||
except ValueError:
|
||||
if not self.unexpected_cache_files_logged:
|
||||
logger.warning("Skipping unexpected files in cache")
|
||||
self.unexpected_cache_files_logged = True
|
||||
continue
|
||||
|
||||
start_time = datetime.datetime.strptime(
|
||||
date, CACHE_SEGMENT_FORMAT
|
||||
).astimezone(datetime.timezone.utc)
|
||||
@ -164,7 +172,13 @@ class RecordingMaintainer(threading.Thread):
|
||||
|
||||
cache_path = os.path.join(CACHE_DIR, cache)
|
||||
basename = os.path.splitext(cache)[0]
|
||||
camera, date = basename.rsplit("@", maxsplit=1)
|
||||
try:
|
||||
camera, date = basename.rsplit("@", maxsplit=1)
|
||||
except ValueError:
|
||||
if not self.unexpected_cache_files_logged:
|
||||
logger.warning("Skipping unexpected files in cache")
|
||||
self.unexpected_cache_files_logged = True
|
||||
continue
|
||||
|
||||
# important that start_time is utc because recordings are stored and compared in utc
|
||||
start_time = datetime.datetime.strptime(
|
||||
|
||||
@ -632,6 +632,49 @@ class TestConfig(unittest.TestCase):
|
||||
)
|
||||
assert frigate_config.cameras["back"].zones["test"].color != (0, 0, 0)
|
||||
|
||||
def test_zone_filter_area_percent_converts_to_pixels(self):
|
||||
config = {
|
||||
"mqtt": {"host": "mqtt"},
|
||||
"record": {
|
||||
"alerts": {
|
||||
"retain": {
|
||||
"days": 20,
|
||||
}
|
||||
}
|
||||
},
|
||||
"cameras": {
|
||||
"back": {
|
||||
"ffmpeg": {
|
||||
"inputs": [
|
||||
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
|
||||
]
|
||||
},
|
||||
"detect": {
|
||||
"height": 1080,
|
||||
"width": 1920,
|
||||
"fps": 5,
|
||||
},
|
||||
"zones": {
|
||||
"notification": {
|
||||
"coordinates": "0.03,1,0.025,0,0.626,0,0.643,1",
|
||||
"objects": ["person"],
|
||||
"filters": {"person": {"min_area": 0.1}},
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
frigate_config = FrigateConfig(**config)
|
||||
expected_min_area = int(1080 * 1920 * 0.1)
|
||||
assert (
|
||||
frigate_config.cameras["back"]
|
||||
.zones["notification"]
|
||||
.filters["person"]
|
||||
.min_area
|
||||
== expected_min_area
|
||||
)
|
||||
|
||||
def test_zone_relative_matches_explicit(self):
|
||||
config = {
|
||||
"mqtt": {"host": "mqtt"},
|
||||
|
||||
66
frigate/test/test_maintainer.py
Normal file
66
frigate/test/test_maintainer.py
Normal file
@ -0,0 +1,66 @@
|
||||
import sys
|
||||
import unittest
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
# Mock complex imports before importing maintainer
|
||||
sys.modules["frigate.comms.inter_process"] = MagicMock()
|
||||
sys.modules["frigate.comms.detections_updater"] = MagicMock()
|
||||
sys.modules["frigate.comms.recordings_updater"] = MagicMock()
|
||||
sys.modules["frigate.config.camera.updater"] = MagicMock()
|
||||
|
||||
# Now import the class under test
|
||||
from frigate.config import FrigateConfig # noqa: E402
|
||||
from frigate.record.maintainer import RecordingMaintainer # noqa: E402
|
||||
|
||||
|
||||
class TestMaintainer(unittest.IsolatedAsyncioTestCase):
|
||||
async def test_move_files_survives_bad_filename(self):
|
||||
config = MagicMock(spec=FrigateConfig)
|
||||
config.cameras = {}
|
||||
stop_event = MagicMock()
|
||||
|
||||
maintainer = RecordingMaintainer(config, stop_event)
|
||||
|
||||
# We need to mock end_time_cache to avoid key errors if logic proceeds
|
||||
maintainer.end_time_cache = {}
|
||||
|
||||
# Mock filesystem
|
||||
# One bad file, one good file
|
||||
files = ["bad_filename.mp4", "camera@20210101000000+0000.mp4"]
|
||||
|
||||
with patch("os.listdir", return_value=files):
|
||||
with patch("os.path.isfile", return_value=True):
|
||||
with patch(
|
||||
"frigate.record.maintainer.psutil.process_iter", return_value=[]
|
||||
):
|
||||
with patch("frigate.record.maintainer.logger.warning") as warn:
|
||||
# Mock validate_and_move_segment to avoid further logic
|
||||
maintainer.validate_and_move_segment = MagicMock()
|
||||
|
||||
try:
|
||||
await maintainer.move_files()
|
||||
except ValueError as e:
|
||||
if "not enough values to unpack" in str(e):
|
||||
self.fail("move_files() crashed on bad filename!")
|
||||
raise e
|
||||
except Exception:
|
||||
# Ignore other errors (like DB connection) as we only care about the unpack crash
|
||||
pass
|
||||
|
||||
# The bad filename is encountered in multiple loops, but should only warn once.
|
||||
matching = [
|
||||
c
|
||||
for c in warn.call_args_list
|
||||
if c.args
|
||||
and isinstance(c.args[0], str)
|
||||
and "Skipping unexpected files in cache" in c.args[0]
|
||||
]
|
||||
self.assertEqual(
|
||||
1,
|
||||
len(matching),
|
||||
f"Expected a single warning for unexpected files, got {len(matching)}",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
@ -206,7 +206,7 @@ function Exports() {
|
||||
>
|
||||
{Object.values(exports).map((item) => (
|
||||
<ExportCard
|
||||
key={item.name}
|
||||
key={item.id}
|
||||
className={
|
||||
search == "" || filteredExports.includes(item) ? "" : "hidden"
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user