mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-05 10:45:21 +03:00
Merge remote-tracking branch 'upstream/dev' into 230523-optimize-sync-records
This commit is contained in:
commit
1ec8bc0c46
@ -52,7 +52,9 @@
|
|||||||
"mikestead.dotenv",
|
"mikestead.dotenv",
|
||||||
"csstools.postcss",
|
"csstools.postcss",
|
||||||
"blanu.vscode-styled-jsx",
|
"blanu.vscode-styled-jsx",
|
||||||
"bradlc.vscode-tailwindcss"
|
"bradlc.vscode-tailwindcss",
|
||||||
|
"ms-python.isort",
|
||||||
|
"charliermarsh.ruff"
|
||||||
],
|
],
|
||||||
"settings": {
|
"settings": {
|
||||||
"remote.autoForwardPorts": false,
|
"remote.autoForwardPorts": false,
|
||||||
@ -68,6 +70,7 @@
|
|||||||
"python.testing.unittestArgs": ["-v", "-s", "./frigate/test"],
|
"python.testing.unittestArgs": ["-v", "-s", "./frigate/test"],
|
||||||
"files.trimTrailingWhitespace": true,
|
"files.trimTrailingWhitespace": true,
|
||||||
"eslint.workingDirectories": ["./web"],
|
"eslint.workingDirectories": ["./web"],
|
||||||
|
"isort.args": ["--settings-path=./pyproject.toml"],
|
||||||
"[python]": {
|
"[python]": {
|
||||||
"editor.defaultFormatter": "ms-python.black-formatter",
|
"editor.defaultFormatter": "ms-python.black-formatter",
|
||||||
"editor.formatOnSave": true
|
"editor.formatOnSave": true
|
||||||
|
|||||||
@ -2,6 +2,12 @@
|
|||||||
|
|
||||||
set -euxo pipefail
|
set -euxo pipefail
|
||||||
|
|
||||||
|
# Cleanup the old github host key
|
||||||
|
sed -i -e '/AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31\/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi\/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==/d' ~/.ssh/known_hosts
|
||||||
|
# Add new github host key
|
||||||
|
curl -L https://api.github.com/meta | jq -r '.ssh_keys | .[]' | \
|
||||||
|
sed -e 's/^/github.com /' >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
# Frigate normal container runs as root, so it have permission to create
|
# Frigate normal container runs as root, so it have permission to create
|
||||||
# the folders. But the devcontainer runs as the host user, so we need to
|
# the folders. But the devcontainer runs as the host user, so we need to
|
||||||
# create the folders and give the host user permission to write to them.
|
# create the folders and give the host user permission to write to them.
|
||||||
|
|||||||
35
.github/workflows/pull_request.yml
vendored
35
.github/workflows/pull_request.yml
vendored
@ -65,24 +65,26 @@ jobs:
|
|||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v4.6.0
|
uses: actions/setup-python@v4.6.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Install requirements
|
- name: Install requirements
|
||||||
run: |
|
run: |
|
||||||
pip install pip
|
python3 -m pip install -U pip
|
||||||
pip install -r requirements-dev.txt
|
python3 -m pip install -r requirements-dev.txt
|
||||||
- name: Lint
|
- name: Check black
|
||||||
run: |
|
run: |
|
||||||
python3 -m black frigate --check
|
black --check --diff frigate migrations docker *.py
|
||||||
|
- name: Check isort
|
||||||
|
run: |
|
||||||
|
isort --check --diff frigate migrations docker *.py
|
||||||
|
- name: Check ruff
|
||||||
|
run: |
|
||||||
|
ruff check frigate migrations docker *.py
|
||||||
|
|
||||||
python_tests:
|
python_tests:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Python Tests
|
name: Python Tests
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
platform: [amd64,arm64]
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -94,22 +96,13 @@ jobs:
|
|||||||
- name: Build web
|
- name: Build web
|
||||||
run: npm run build
|
run: npm run build
|
||||||
working-directory: ./web
|
working-directory: ./web
|
||||||
- run: make version
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v2
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v2
|
||||||
- name: Build
|
- name: Build
|
||||||
uses: docker/build-push-action@v4
|
run: make
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
push: false
|
|
||||||
load: true
|
|
||||||
platforms: linux/${{ matrix.platform }}
|
|
||||||
tags: |
|
|
||||||
frigate:${{ matrix.platform }}
|
|
||||||
target: frigate
|
|
||||||
- name: Run mypy
|
- name: Run mypy
|
||||||
run: docker run --platform linux/${{ matrix.platform }} --rm --entrypoint=python3 frigate:${{ matrix.platform }} -u -m mypy --config-file frigate/mypy.ini frigate
|
run: docker run --rm --entrypoint=python3 frigate:latest -u -m mypy --config-file frigate/mypy.ini frigate
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: docker run --platform linux/${{ matrix.platform }} --rm --entrypoint=python3 frigate:${{ matrix.platform }} -u -m unittest
|
run: docker run --rm --entrypoint=python3 frigate:latest -u -m unittest
|
||||||
|
|||||||
@ -227,8 +227,8 @@ CMD ["sleep", "infinity"]
|
|||||||
|
|
||||||
|
|
||||||
# Frigate web build
|
# Frigate web build
|
||||||
# force this to run on amd64 because QEMU is painfully slow
|
# This should be architecture agnostic, so speed up the build on multiarch by not using QEMU.
|
||||||
FROM --platform=linux/amd64 node:16 AS web-build
|
FROM --platform=$BUILDPLATFORM node:16 AS web-build
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
COPY web/package.json web/package-lock.json ./
|
COPY web/package.json web/package-lock.json ./
|
||||||
|
|||||||
12
benchmark.py
12
benchmark.py
@ -1,11 +1,11 @@
|
|||||||
import os
|
|
||||||
from statistics import mean
|
|
||||||
import multiprocessing as mp
|
|
||||||
import numpy as np
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import multiprocessing as mp
|
||||||
|
from statistics import mean
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
from frigate.config import DetectorTypeEnum
|
from frigate.config import DetectorTypeEnum
|
||||||
from frigate.object_detection import (
|
from frigate.object_detection import (
|
||||||
LocalObjectDetector,
|
|
||||||
ObjectDetectProcess,
|
ObjectDetectProcess,
|
||||||
RemoteObjectDetector,
|
RemoteObjectDetector,
|
||||||
load_labels,
|
load_labels,
|
||||||
@ -53,7 +53,7 @@ def start(id, num_detections, detection_queue, event):
|
|||||||
frame_times = []
|
frame_times = []
|
||||||
for x in range(0, num_detections):
|
for x in range(0, num_detections):
|
||||||
start_frame = datetime.datetime.now().timestamp()
|
start_frame = datetime.datetime.now().timestamp()
|
||||||
detections = object_detector.detect(my_frame)
|
object_detector.detect(my_frame)
|
||||||
frame_times.append(datetime.datetime.now().timestamp() - start_frame)
|
frame_times.append(datetime.datetime.now().timestamp() - start_frame)
|
||||||
|
|
||||||
duration = datetime.datetime.now().timestamp() - start
|
duration = datetime.datetime.now().timestamp() - start
|
||||||
|
|||||||
@ -3,11 +3,14 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
sys.path.insert(0, "/opt/frigate")
|
sys.path.insert(0, "/opt/frigate")
|
||||||
from frigate.const import BIRDSEYE_PIPE, BTBN_PATH
|
from frigate.const import BIRDSEYE_PIPE, BTBN_PATH # noqa: E402
|
||||||
from frigate.ffmpeg_presets import parse_preset_hardware_acceleration_encode
|
from frigate.ffmpeg_presets import ( # noqa: E402
|
||||||
|
parse_preset_hardware_acceleration_encode,
|
||||||
|
)
|
||||||
|
|
||||||
sys.path.remove("/opt/frigate")
|
sys.path.remove("/opt/frigate")
|
||||||
|
|
||||||
|
|||||||
@ -172,6 +172,27 @@ http {
|
|||||||
root /media/frigate;
|
root /media/frigate;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
location /exports/ {
|
||||||
|
add_header 'Access-Control-Allow-Origin' "$http_origin" always;
|
||||||
|
add_header 'Access-Control-Allow-Credentials' 'true';
|
||||||
|
add_header 'Access-Control-Expose-Headers' 'Content-Length';
|
||||||
|
if ($request_method = 'OPTIONS') {
|
||||||
|
add_header 'Access-Control-Allow-Origin' "$http_origin";
|
||||||
|
add_header 'Access-Control-Max-Age' 1728000;
|
||||||
|
add_header 'Content-Type' 'text/plain charset=UTF-8';
|
||||||
|
add_header 'Content-Length' 0;
|
||||||
|
return 204;
|
||||||
|
}
|
||||||
|
|
||||||
|
types {
|
||||||
|
video/mp4 mp4;
|
||||||
|
}
|
||||||
|
|
||||||
|
autoindex on;
|
||||||
|
autoindex_format json;
|
||||||
|
root /media/frigate;
|
||||||
|
}
|
||||||
|
|
||||||
location /ws {
|
location /ws {
|
||||||
proxy_pass http://mqtt_ws/;
|
proxy_pass http://mqtt_ws/;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
|
|||||||
@ -2,4 +2,4 @@
|
|||||||
|
|
||||||
This website is built using [Docusaurus 2](https://v2.docusaurus.io/), a modern static website generator.
|
This website is built using [Docusaurus 2](https://v2.docusaurus.io/), a modern static website generator.
|
||||||
|
|
||||||
For installation and contributing instructions, please follow the [Contributing Docs](https://blakeblackshear.github.io/frigate/contributing).
|
For installation and contributing instructions, please follow the [Contributing Docs](https://docs.frigate.video/development/contributing).
|
||||||
|
|||||||
@ -80,3 +80,7 @@ record:
|
|||||||
dog: 2
|
dog: 2
|
||||||
car: 7
|
car: 7
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## How do I export recordings?
|
||||||
|
|
||||||
|
The export page in the Frigate WebUI allows for exporting real time clips with a designated start and stop time as well as exporting a timelapse for a designated start and stop time. These exports can take a while so it is important to leave the file until it is no longer in progress.
|
||||||
|
|||||||
@ -24,6 +24,7 @@ Frigate uses the following locations for read/write operations in the container.
|
|||||||
- `/config`: Used to store the Frigate config file and sqlite database. You will also see a few files alongside the database file while Frigate is running.
|
- `/config`: Used to store the Frigate config file and sqlite database. You will also see a few files alongside the database file while Frigate is running.
|
||||||
- `/media/frigate/clips`: Used for snapshot storage. In the future, it will likely be renamed from `clips` to `snapshots`. The file structure here cannot be modified and isn't intended to be browsed or managed manually.
|
- `/media/frigate/clips`: Used for snapshot storage. In the future, it will likely be renamed from `clips` to `snapshots`. The file structure here cannot be modified and isn't intended to be browsed or managed manually.
|
||||||
- `/media/frigate/recordings`: Internal system storage for recording segments. The file structure here cannot be modified and isn't intended to be browsed or managed manually.
|
- `/media/frigate/recordings`: Internal system storage for recording segments. The file structure here cannot be modified and isn't intended to be browsed or managed manually.
|
||||||
|
- `/media/frigate/exports`: Storage for clips and timelapses that have been exported via the WebUI or API.
|
||||||
- `/tmp/cache`: Cache location for recording segments. Initial recordings are written here before being checked and converted to mp4 and moved to the recordings folder.
|
- `/tmp/cache`: Cache location for recording segments. Initial recordings are written here before being checked and converted to mp4 and moved to the recordings folder.
|
||||||
- `/dev/shm`: It is not recommended to modify this directory or map it with docker. This is the location for raw decoded frames in shared memory and it's size is impacted by the `shm-size` calculations below.
|
- `/dev/shm`: It is not recommended to modify this directory or map it with docker. This is the location for raw decoded frames in shared memory and it's size is impacted by the `shm-size` calculations below.
|
||||||
|
|
||||||
@ -221,7 +222,7 @@ These settings were tested on DSM 7.1.1-42962 Update 4
|
|||||||
|
|
||||||
The `Execute container using high privilege` option needs to be enabled in order to give the frigate container the elevated privileges it may need.
|
The `Execute container using high privilege` option needs to be enabled in order to give the frigate container the elevated privileges it may need.
|
||||||
|
|
||||||
The `Enable auto-restart` option can be enabled if you want the container to automatically restart whenever it improperly shuts down due to an error.
|
The `Enable auto-restart` option can be enabled if you want the container to automatically restart whenever it improperly shuts down due to an error.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
|||||||
@ -271,6 +271,20 @@ HTTP Live Streaming Video on Demand URL for the specified event. Can be viewed i
|
|||||||
|
|
||||||
HTTP Live Streaming Video on Demand URL for the camera with the specified time range. Can be viewed in an application like VLC.
|
HTTP Live Streaming Video on Demand URL for the camera with the specified time range. Can be viewed in an application like VLC.
|
||||||
|
|
||||||
|
### `POST /api/export/<camera>/start/<start-timestamp>/end/<end-timestamp>`
|
||||||
|
|
||||||
|
Export recordings from `start-timestamp` to `end-timestamp` for `camera` as a single mp4 file. These recordings will be exported to the `/media/frigate/exports` folder.
|
||||||
|
|
||||||
|
It is also possible to export this recording as a timelapse.
|
||||||
|
|
||||||
|
**Optional Body:**
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"playback": "realtime", // playback factor: realtime or timelapse_25x
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### `GET /api/<camera_name>/recordings/summary`
|
### `GET /api/<camera_name>/recordings/summary`
|
||||||
|
|
||||||
Hourly summary of recordings data for a camera.
|
Hourly summary of recordings data for a camera.
|
||||||
|
|||||||
1111
docs/package-lock.json
generated
1111
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -14,8 +14,8 @@
|
|||||||
"write-heading-ids": "docusaurus write-heading-ids"
|
"write-heading-ids": "docusaurus write-heading-ids"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "^2.4.0",
|
"@docusaurus/core": "^2.4.1",
|
||||||
"@docusaurus/preset-classic": "^2.4.0",
|
"@docusaurus/preset-classic": "^2.4.1",
|
||||||
"@mdx-js/react": "^1.6.22",
|
"@mdx-js/react": "^1.6.22",
|
||||||
"clsx": "^1.2.1",
|
"clsx": "^1.2.1",
|
||||||
"prism-react-renderer": "^1.3.5",
|
"prism-react-renderer": "^1.3.5",
|
||||||
|
|||||||
@ -1,13 +1,14 @@
|
|||||||
import faulthandler
|
import faulthandler
|
||||||
from flask import cli
|
|
||||||
|
|
||||||
faulthandler.enable()
|
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
threading.current_thread().name = "frigate"
|
from flask import cli
|
||||||
|
|
||||||
from frigate.app import FrigateApp
|
from frigate.app import FrigateApp
|
||||||
|
|
||||||
|
faulthandler.enable()
|
||||||
|
|
||||||
|
threading.current_thread().name = "frigate"
|
||||||
|
|
||||||
cli.show_server_banner = lambda *x: None
|
cli.show_server_banner = lambda *x: None
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@ -1,16 +1,16 @@
|
|||||||
import logging
|
import logging
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
from multiprocessing.queues import Queue
|
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import signal
|
import signal
|
||||||
import sys
|
import sys
|
||||||
from typing import Optional
|
|
||||||
from types import FrameType
|
|
||||||
import psutil
|
|
||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
from multiprocessing.queues import Queue
|
||||||
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
|
from types import FrameType
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import psutil
|
||||||
from peewee_migrate import Router
|
from peewee_migrate import Router
|
||||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||||
from playhouse.sqliteq import SqliteQueueDatabase
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
@ -24,16 +24,17 @@ from frigate.const import (
|
|||||||
CLIPS_DIR,
|
CLIPS_DIR,
|
||||||
CONFIG_DIR,
|
CONFIG_DIR,
|
||||||
DEFAULT_DB_PATH,
|
DEFAULT_DB_PATH,
|
||||||
|
EXPORT_DIR,
|
||||||
MODEL_CACHE_DIR,
|
MODEL_CACHE_DIR,
|
||||||
RECORD_DIR,
|
RECORD_DIR,
|
||||||
)
|
)
|
||||||
from frigate.object_detection import ObjectDetectProcess
|
|
||||||
from frigate.events.cleanup import EventCleanup
|
from frigate.events.cleanup import EventCleanup
|
||||||
from frigate.events.external import ExternalEventProcessor
|
from frigate.events.external import ExternalEventProcessor
|
||||||
from frigate.events.maintainer import EventProcessor
|
from frigate.events.maintainer import EventProcessor
|
||||||
from frigate.http import create_app
|
from frigate.http import create_app
|
||||||
from frigate.log import log_process, root_configurer
|
from frigate.log import log_process, root_configurer
|
||||||
from frigate.models import Event, Recordings, Timeline
|
from frigate.models import Event, Recordings, Timeline
|
||||||
|
from frigate.object_detection import ObjectDetectProcess
|
||||||
from frigate.object_processing import TrackedObjectProcessor
|
from frigate.object_processing import TrackedObjectProcessor
|
||||||
from frigate.output import output_frames
|
from frigate.output import output_frames
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
@ -42,10 +43,10 @@ from frigate.record.record import manage_recordings
|
|||||||
from frigate.stats import StatsEmitter, stats_init
|
from frigate.stats import StatsEmitter, stats_init
|
||||||
from frigate.storage import StorageMaintainer
|
from frigate.storage import StorageMaintainer
|
||||||
from frigate.timeline import TimelineProcessor
|
from frigate.timeline import TimelineProcessor
|
||||||
|
from frigate.types import CameraMetricsTypes, RecordMetricsTypes
|
||||||
from frigate.version import VERSION
|
from frigate.version import VERSION
|
||||||
from frigate.video import capture_camera, track_camera
|
from frigate.video import capture_camera, track_camera
|
||||||
from frigate.watchdog import FrigateWatchdog
|
from frigate.watchdog import FrigateWatchdog
|
||||||
from frigate.types import CameraMetricsTypes, RecordMetricsTypes
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -68,7 +69,14 @@ class FrigateApp:
|
|||||||
os.environ[key] = value
|
os.environ[key] = value
|
||||||
|
|
||||||
def ensure_dirs(self) -> None:
|
def ensure_dirs(self) -> None:
|
||||||
for d in [CONFIG_DIR, RECORD_DIR, CLIPS_DIR, CACHE_DIR, MODEL_CACHE_DIR]:
|
for d in [
|
||||||
|
CONFIG_DIR,
|
||||||
|
RECORD_DIR,
|
||||||
|
CLIPS_DIR,
|
||||||
|
CACHE_DIR,
|
||||||
|
MODEL_CACHE_DIR,
|
||||||
|
EXPORT_DIR,
|
||||||
|
]:
|
||||||
if not os.path.exists(d) and not os.path.islink(d):
|
if not os.path.exists(d) and not os.path.islink(d):
|
||||||
logger.info(f"Creating directory: {d}")
|
logger.info(f"Creating directory: {d}")
|
||||||
os.makedirs(d)
|
os.makedirs(d)
|
||||||
@ -133,10 +141,10 @@ class FrigateApp:
|
|||||||
for log, level in self.config.logger.logs.items():
|
for log, level in self.config.logger.logs.items():
|
||||||
logging.getLogger(log).setLevel(level.value.upper())
|
logging.getLogger(log).setLevel(level.value.upper())
|
||||||
|
|
||||||
if not "werkzeug" in self.config.logger.logs:
|
if "werkzeug" not in self.config.logger.logs:
|
||||||
logging.getLogger("werkzeug").setLevel("ERROR")
|
logging.getLogger("werkzeug").setLevel("ERROR")
|
||||||
|
|
||||||
if not "ws4py" in self.config.logger.logs:
|
if "ws4py" not in self.config.logger.logs:
|
||||||
logging.getLogger("ws4py").setLevel("ERROR")
|
logging.getLogger("ws4py").setLevel("ERROR")
|
||||||
|
|
||||||
def init_queues(self) -> None:
|
def init_queues(self) -> None:
|
||||||
@ -294,7 +302,7 @@ class FrigateApp:
|
|||||||
def start_video_output_processor(self) -> None:
|
def start_video_output_processor(self) -> None:
|
||||||
output_processor = mp.Process(
|
output_processor = mp.Process(
|
||||||
target=output_frames,
|
target=output_frames,
|
||||||
name=f"output_processor",
|
name="output_processor",
|
||||||
args=(
|
args=(
|
||||||
self.config,
|
self.config,
|
||||||
self.video_output_queue,
|
self.video_output_queue,
|
||||||
@ -467,7 +475,7 @@ class FrigateApp:
|
|||||||
self.stop()
|
self.stop()
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
logger.info(f"Stopping...")
|
logger.info("Stopping...")
|
||||||
self.stop_event.set()
|
self.stop_event.set()
|
||||||
|
|
||||||
for detector in self.detectors.values():
|
for detector in self.detectors.values():
|
||||||
|
|||||||
@ -1,17 +1,14 @@
|
|||||||
"""Handle communication between Frigate and other applications."""
|
"""Handle communication between Frigate and other applications."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
from typing import Any, Callable
|
from typing import Any, Callable
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.ptz import OnvifController, OnvifCommandEnum
|
from frigate.ptz import OnvifCommandEnum, OnvifController
|
||||||
from frigate.types import CameraMetricsTypes, RecordMetricsTypes
|
from frigate.types import CameraMetricsTypes, RecordMetricsTypes
|
||||||
from frigate.util import restart_frigate
|
from frigate.util import restart_frigate
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -72,7 +69,7 @@ class Dispatcher:
|
|||||||
camera_name = topic.split("/")[-3]
|
camera_name = topic.split("/")[-3]
|
||||||
command = topic.split("/")[-2]
|
command = topic.split("/")[-2]
|
||||||
self._camera_settings_handlers[command](camera_name, payload)
|
self._camera_settings_handlers[command](camera_name, payload)
|
||||||
except IndexError as e:
|
except IndexError:
|
||||||
logger.error(f"Received invalid set command: {topic}")
|
logger.error(f"Received invalid set command: {topic}")
|
||||||
return
|
return
|
||||||
elif topic.endswith("ptz"):
|
elif topic.endswith("ptz"):
|
||||||
@ -80,7 +77,7 @@ class Dispatcher:
|
|||||||
# example /cam_name/ptz payload=MOVE_UP|MOVE_DOWN|STOP...
|
# example /cam_name/ptz payload=MOVE_UP|MOVE_DOWN|STOP...
|
||||||
camera_name = topic.split("/")[-2]
|
camera_name = topic.split("/")[-2]
|
||||||
self._on_ptz_command(camera_name, payload)
|
self._on_ptz_command(camera_name, payload)
|
||||||
except IndexError as e:
|
except IndexError:
|
||||||
logger.error(f"Received invalid ptz command: {topic}")
|
logger.error(f"Received invalid ptz command: {topic}")
|
||||||
return
|
return
|
||||||
elif topic == "restart":
|
elif topic == "restart":
|
||||||
@ -128,7 +125,7 @@ class Dispatcher:
|
|||||||
elif payload == "OFF":
|
elif payload == "OFF":
|
||||||
if self.camera_metrics[camera_name]["detection_enabled"].value:
|
if self.camera_metrics[camera_name]["detection_enabled"].value:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Turning off motion is not allowed when detection is enabled."
|
"Turning off motion is not allowed when detection is enabled."
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -196,7 +193,7 @@ class Dispatcher:
|
|||||||
if payload == "ON":
|
if payload == "ON":
|
||||||
if not self.config.cameras[camera_name].record.enabled_in_config:
|
if not self.config.cameras[camera_name].record.enabled_in_config:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Recordings must be enabled in the config to be turned on via MQTT."
|
"Recordings must be enabled in the config to be turned on via MQTT."
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from typing import Any, Callable
|
from typing import Any, Callable
|
||||||
|
|
||||||
import paho.mqtt.client as mqtt
|
import paho.mqtt.client as mqtt
|
||||||
@ -8,7 +7,6 @@ import paho.mqtt.client as mqtt
|
|||||||
from frigate.comms.dispatcher import Communicator
|
from frigate.comms.dispatcher import Communicator
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -177,10 +175,10 @@ class MqttClient(Communicator): # type: ignore[misc]
|
|||||||
f"{self.mqtt_config.topic_prefix}/restart", self.on_mqtt_command
|
f"{self.mqtt_config.topic_prefix}/restart", self.on_mqtt_command
|
||||||
)
|
)
|
||||||
|
|
||||||
if not self.mqtt_config.tls_ca_certs is None:
|
if self.mqtt_config.tls_ca_certs is not None:
|
||||||
if (
|
if (
|
||||||
not self.mqtt_config.tls_client_cert is None
|
self.mqtt_config.tls_client_cert is not None
|
||||||
and not self.mqtt_config.tls_client_key is None
|
and self.mqtt_config.tls_client_key is not None
|
||||||
):
|
):
|
||||||
self.client.tls_set(
|
self.client.tls_set(
|
||||||
self.mqtt_config.tls_ca_certs,
|
self.mqtt_config.tls_ca_certs,
|
||||||
@ -189,9 +187,9 @@ class MqttClient(Communicator): # type: ignore[misc]
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.client.tls_set(self.mqtt_config.tls_ca_certs)
|
self.client.tls_set(self.mqtt_config.tls_ca_certs)
|
||||||
if not self.mqtt_config.tls_insecure is None:
|
if self.mqtt_config.tls_insecure is not None:
|
||||||
self.client.tls_insecure_set(self.mqtt_config.tls_insecure)
|
self.client.tls_insecure_set(self.mqtt_config.tls_insecure)
|
||||||
if not self.mqtt_config.user is None:
|
if self.mqtt_config.user is not None:
|
||||||
self.client.username_pw_set(
|
self.client.username_pw_set(
|
||||||
self.mqtt_config.user, password=self.mqtt_config.password
|
self.mqtt_config.user, password=self.mqtt_config.password
|
||||||
)
|
)
|
||||||
|
|||||||
@ -3,10 +3,9 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from wsgiref.simple_server import make_server
|
from wsgiref.simple_server import make_server
|
||||||
|
|
||||||
from ws4py.server.wsgirefserver import (
|
from ws4py.server.wsgirefserver import (
|
||||||
WebSocketWSGIHandler,
|
WebSocketWSGIHandler,
|
||||||
WebSocketWSGIRequestHandler,
|
WebSocketWSGIRequestHandler,
|
||||||
@ -18,7 +17,6 @@ from ws4py.websocket import WebSocket
|
|||||||
from frigate.comms.dispatcher import Communicator
|
from frigate.comms.dispatcher import Communicator
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -45,7 +43,7 @@ class WebSocketClient(Communicator): # type: ignore[misc]
|
|||||||
"topic": json_message.get("topic"),
|
"topic": json_message.get("topic"),
|
||||||
"payload": json_message.get("payload"),
|
"payload": json_message.get("payload"),
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Unable to parse websocket message as valid json: {message.data.decode('utf-8')}"
|
f"Unable to parse websocket message as valid json: {message.data.decode('utf-8')}"
|
||||||
)
|
)
|
||||||
@ -82,7 +80,7 @@ class WebSocketClient(Communicator): # type: ignore[misc]
|
|||||||
"payload": payload,
|
"payload": payload,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
# if the payload can't be decoded don't relay to clients
|
# if the payload can't be decoded don't relay to clients
|
||||||
logger.debug(f"payload for {topic} wasn't text. Skipping...")
|
logger.debug(f"payload for {topic} wasn't text. Skipping...")
|
||||||
return
|
return
|
||||||
|
|||||||
@ -8,26 +8,14 @@ from typing import Dict, List, Optional, Tuple, Union
|
|||||||
|
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import yaml
|
from pydantic import BaseModel, Extra, Field, parse_obj_as, validator
|
||||||
from pydantic import BaseModel, Extra, Field, validator, parse_obj_as
|
|
||||||
from pydantic.fields import PrivateAttr
|
from pydantic.fields import PrivateAttr
|
||||||
|
|
||||||
from frigate.const import (
|
from frigate.const import CACHE_DIR, DEFAULT_DB_PATH, REGEX_CAMERA_NAME, YAML_EXT
|
||||||
CACHE_DIR,
|
from frigate.detectors import DetectorConfig, ModelConfig
|
||||||
DEFAULT_DB_PATH,
|
from frigate.detectors.detector_config import InputTensorEnum # noqa: F401
|
||||||
REGEX_CAMERA_NAME,
|
from frigate.detectors.detector_config import PixelFormatEnum # noqa: F401
|
||||||
YAML_EXT,
|
|
||||||
)
|
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||||
from frigate.plus import PlusApi
|
|
||||||
from frigate.util import (
|
|
||||||
create_mask,
|
|
||||||
deep_merge,
|
|
||||||
get_ffmpeg_arg_list,
|
|
||||||
escape_special_characters,
|
|
||||||
load_config_with_no_duplicates,
|
|
||||||
load_labels,
|
|
||||||
)
|
|
||||||
from frigate.ffmpeg_presets import (
|
from frigate.ffmpeg_presets import (
|
||||||
parse_preset_hardware_acceleration_decode,
|
parse_preset_hardware_acceleration_decode,
|
||||||
parse_preset_hardware_acceleration_scale,
|
parse_preset_hardware_acceleration_scale,
|
||||||
@ -35,14 +23,14 @@ from frigate.ffmpeg_presets import (
|
|||||||
parse_preset_output_record,
|
parse_preset_output_record,
|
||||||
parse_preset_output_rtmp,
|
parse_preset_output_rtmp,
|
||||||
)
|
)
|
||||||
from frigate.detectors import (
|
from frigate.plus import PlusApi
|
||||||
PixelFormatEnum,
|
from frigate.util import (
|
||||||
InputTensorEnum,
|
create_mask,
|
||||||
ModelConfig,
|
deep_merge,
|
||||||
DetectorConfig,
|
escape_special_characters,
|
||||||
|
get_ffmpeg_arg_list,
|
||||||
|
load_config_with_no_duplicates,
|
||||||
)
|
)
|
||||||
from frigate.version import VERSION
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -487,7 +475,7 @@ class CameraFfmpegConfig(FfmpegConfig):
|
|||||||
if len(roles) > len(roles_set):
|
if len(roles) > len(roles_set):
|
||||||
raise ValueError("Each input role may only be used once.")
|
raise ValueError("Each input role may only be used once.")
|
||||||
|
|
||||||
if not "detect" in roles:
|
if "detect" not in roles:
|
||||||
raise ValueError("The detect role is required.")
|
raise ValueError("The detect role is required.")
|
||||||
|
|
||||||
return v
|
return v
|
||||||
@ -776,12 +764,12 @@ def verify_config_roles(camera_config: CameraConfig) -> None:
|
|||||||
set([r for i in camera_config.ffmpeg.inputs for r in i.roles])
|
set([r for i in camera_config.ffmpeg.inputs for r in i.roles])
|
||||||
)
|
)
|
||||||
|
|
||||||
if camera_config.record.enabled and not "record" in assigned_roles:
|
if camera_config.record.enabled and "record" not in assigned_roles:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Camera {camera_config.name} has record enabled, but record is not assigned to an input."
|
f"Camera {camera_config.name} has record enabled, but record is not assigned to an input."
|
||||||
)
|
)
|
||||||
|
|
||||||
if camera_config.rtmp.enabled and not "rtmp" in assigned_roles:
|
if camera_config.rtmp.enabled and "rtmp" not in assigned_roles:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Camera {camera_config.name} has rtmp enabled, but rtmp is not assigned to an input."
|
f"Camera {camera_config.name} has rtmp enabled, but rtmp is not assigned to an input."
|
||||||
)
|
)
|
||||||
@ -1062,7 +1050,7 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
config.model.dict(exclude_unset=True),
|
config.model.dict(exclude_unset=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
if not "path" in merged_model:
|
if "path" not in merged_model:
|
||||||
if detector_config.type == "cpu":
|
if detector_config.type == "cpu":
|
||||||
merged_model["path"] = "/cpu_model.tflite"
|
merged_model["path"] = "/cpu_model.tflite"
|
||||||
elif detector_config.type == "edgetpu":
|
elif detector_config.type == "edgetpu":
|
||||||
|
|||||||
@ -4,6 +4,7 @@ MODEL_CACHE_DIR = f"{CONFIG_DIR}/model_cache"
|
|||||||
BASE_DIR = "/media/frigate"
|
BASE_DIR = "/media/frigate"
|
||||||
CLIPS_DIR = f"{BASE_DIR}/clips"
|
CLIPS_DIR = f"{BASE_DIR}/clips"
|
||||||
RECORD_DIR = f"{BASE_DIR}/recordings"
|
RECORD_DIR = f"{BASE_DIR}/recordings"
|
||||||
|
EXPORT_DIR = f"{BASE_DIR}/exports"
|
||||||
BIRDSEYE_PIPE = "/tmp/cache/birdseye"
|
BIRDSEYE_PIPE = "/tmp/cache/birdseye"
|
||||||
CACHE_DIR = "/tmp/cache"
|
CACHE_DIR = "/tmp/cache"
|
||||||
YAML_EXT = (".yaml", ".yml")
|
YAML_EXT = (".yaml", ".yml")
|
||||||
@ -13,9 +14,9 @@ BTBN_PATH = "/usr/lib/btbn-ffmpeg"
|
|||||||
|
|
||||||
# Regex Consts
|
# Regex Consts
|
||||||
|
|
||||||
REGEX_CAMERA_NAME = "^[a-zA-Z0-9_-]+$"
|
REGEX_CAMERA_NAME = r"^[a-zA-Z0-9_-]+$"
|
||||||
REGEX_RTSP_CAMERA_USER_PASS = ":\/\/[a-zA-Z0-9_-]+:[\S]+@"
|
REGEX_RTSP_CAMERA_USER_PASS = r":\/\/[a-zA-Z0-9_-]+:[\S]+@"
|
||||||
REGEX_HTTP_CAMERA_USER_PASS = "user=[a-zA-Z0-9_-]+&password=[\S]+"
|
REGEX_HTTP_CAMERA_USER_PASS = r"user=[a-zA-Z0-9_-]+&password=[\S]+"
|
||||||
|
|
||||||
# Known Driver Names
|
# Known Driver Names
|
||||||
|
|
||||||
@ -28,3 +29,4 @@ DRIVER_INTEL_iHD = "iHD"
|
|||||||
|
|
||||||
MAX_SEGMENT_DURATION = 600
|
MAX_SEGMENT_DURATION = 600
|
||||||
SECONDS_IN_DAY = 60 * 60 * 24
|
SECONDS_IN_DAY = 60 * 60 * 24
|
||||||
|
MAX_PLAYLIST_SECONDS = 7200 # support 2 hour segments for a single playlist to account for cameras with inconsistent segment times
|
||||||
|
|||||||
@ -1,13 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .detection_api import DetectionApi
|
from .detector_config import InputTensorEnum, ModelConfig, PixelFormatEnum # noqa: F401
|
||||||
from .detector_config import (
|
from .detector_types import DetectorConfig, DetectorTypeEnum, api_types # noqa: F401
|
||||||
PixelFormatEnum,
|
|
||||||
InputTensorEnum,
|
|
||||||
ModelConfig,
|
|
||||||
)
|
|
||||||
from .detector_types import DetectorTypeEnum, api_types, DetectorConfig
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,20 +1,18 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from enum import Enum
|
|
||||||
import os
|
import os
|
||||||
from typing import Dict, List, Optional, Tuple, Union, Literal
|
from enum import Enum
|
||||||
|
from typing import Dict, Optional, Tuple
|
||||||
|
|
||||||
|
|
||||||
import requests
|
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
from pydantic import BaseModel, Extra, Field, validator
|
import requests
|
||||||
|
from pydantic import BaseModel, Extra, Field
|
||||||
from pydantic.fields import PrivateAttr
|
from pydantic.fields import PrivateAttr
|
||||||
|
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
|
|
||||||
from frigate.util import load_labels
|
from frigate.util import load_labels
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,16 +1,16 @@
|
|||||||
import logging
|
|
||||||
import importlib
|
import importlib
|
||||||
|
import logging
|
||||||
import pkgutil
|
import pkgutil
|
||||||
from typing import Union
|
|
||||||
from typing_extensions import Annotated
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
from typing_extensions import Annotated
|
||||||
|
|
||||||
from . import plugins
|
from . import plugins
|
||||||
from .detection_api import DetectionApi
|
from .detection_api import DetectionApi
|
||||||
from .detector_config import BaseDetectorConfig
|
from .detector_config import BaseDetectorConfig
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from pydantic import Field
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||||
from typing_extensions import Literal
|
|
||||||
from pydantic import Extra, Field
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from tflite_runtime.interpreter import Interpreter
|
from tflite_runtime.interpreter import Interpreter
|
||||||
|
|||||||
@ -1,14 +1,14 @@
|
|||||||
|
import io
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import requests
|
import requests
|
||||||
import io
|
from PIL import Image
|
||||||
|
from pydantic import Field
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||||
from typing_extensions import Literal
|
|
||||||
from pydantic import Extra, Field
|
|
||||||
from PIL import Image
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -64,11 +64,11 @@ class DeepStack(DetectionApi):
|
|||||||
for i, detection in enumerate(response_json.get("predictions")):
|
for i, detection in enumerate(response_json.get("predictions")):
|
||||||
logger.debug(f"Response: {detection}")
|
logger.debug(f"Response: {detection}")
|
||||||
if detection["confidence"] < 0.4:
|
if detection["confidence"] < 0.4:
|
||||||
logger.debug(f"Break due to confidence < 0.4")
|
logger.debug("Break due to confidence < 0.4")
|
||||||
break
|
break
|
||||||
label = self.get_label_index(detection["label"])
|
label = self.get_label_index(detection["label"])
|
||||||
if label < 0:
|
if label < 0:
|
||||||
logger.debug(f"Break due to unknown label")
|
logger.debug("Break due to unknown label")
|
||||||
break
|
break
|
||||||
detections[i] = [
|
detections[i] = [
|
||||||
label,
|
label,
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from pydantic import Field
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||||
from typing_extensions import Literal
|
|
||||||
from pydantic import Extra, Field
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from tflite_runtime.interpreter import Interpreter, load_delegate
|
from tflite_runtime.interpreter import Interpreter, load_delegate
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import openvino.runtime as ov
|
import openvino.runtime as ov
|
||||||
|
from pydantic import Field
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig, ModelTypeEnum
|
from frigate.detectors.detector_config import BaseDetectorConfig, ModelTypeEnum
|
||||||
from typing_extensions import Literal
|
|
||||||
from pydantic import Extra, Field
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -41,7 +41,7 @@ class OvDetector(DetectionApi):
|
|||||||
tensor_shape = self.interpreter.output(self.output_indexes).shape
|
tensor_shape = self.interpreter.output(self.output_indexes).shape
|
||||||
logger.info(f"Model Output-{self.output_indexes} Shape: {tensor_shape}")
|
logger.info(f"Model Output-{self.output_indexes} Shape: {tensor_shape}")
|
||||||
self.output_indexes += 1
|
self.output_indexes += 1
|
||||||
except:
|
except Exception:
|
||||||
logger.info(f"Model has {self.output_indexes} Output Tensors")
|
logger.info(f"Model has {self.output_indexes} Output Tensors")
|
||||||
break
|
break
|
||||||
if self.ov_model_type == ModelTypeEnum.yolox:
|
if self.ov_model_type == ModelTypeEnum.yolox:
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
|
import ctypes
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import ctypes
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -8,13 +8,14 @@ try:
|
|||||||
from cuda import cuda
|
from cuda import cuda
|
||||||
|
|
||||||
TRT_SUPPORT = True
|
TRT_SUPPORT = True
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError:
|
||||||
TRT_SUPPORT = False
|
TRT_SUPPORT = False
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||||
from typing_extensions import Literal
|
|
||||||
from pydantic import Field
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -172,7 +173,7 @@ class TensorRtDetector(DetectionApi):
|
|||||||
if not self.context.execute_async_v2(
|
if not self.context.execute_async_v2(
|
||||||
bindings=self.bindings, stream_handle=self.stream
|
bindings=self.bindings, stream_handle=self.stream
|
||||||
):
|
):
|
||||||
logger.warn(f"Execute returned false")
|
logger.warn("Execute returned false")
|
||||||
|
|
||||||
# Transfer predictions back from the GPU.
|
# Transfer predictions back from the GPU.
|
||||||
[
|
[
|
||||||
|
|||||||
@ -4,17 +4,13 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from peewee import fn
|
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.const import CLIPS_DIR
|
from frigate.const import CLIPS_DIR
|
||||||
from frigate.models import Event
|
from frigate.models import Event
|
||||||
|
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -45,9 +41,9 @@ class EventCleanup(threading.Thread):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# loop over object types in db
|
# loop over object types in db
|
||||||
for l in distinct_labels:
|
for event in distinct_labels:
|
||||||
# get expiration time for this label
|
# get expiration time for this label
|
||||||
expire_days = retain_config.objects.get(l.label, retain_config.default)
|
expire_days = retain_config.objects.get(event.label, retain_config.default)
|
||||||
expire_after = (
|
expire_after = (
|
||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
).timestamp()
|
).timestamp()
|
||||||
@ -55,8 +51,8 @@ class EventCleanup(threading.Thread):
|
|||||||
expired_events = Event.select().where(
|
expired_events = Event.select().where(
|
||||||
Event.camera.not_in(self.camera_keys),
|
Event.camera.not_in(self.camera_keys),
|
||||||
Event.start_time < expire_after,
|
Event.start_time < expire_after,
|
||||||
Event.label == l.label,
|
Event.label == event.label,
|
||||||
Event.retain_indefinitely == False,
|
Event.retain_indefinitely is False,
|
||||||
)
|
)
|
||||||
# delete the media from disk
|
# delete the media from disk
|
||||||
for event in expired_events:
|
for event in expired_events:
|
||||||
@ -75,8 +71,8 @@ class EventCleanup(threading.Thread):
|
|||||||
update_query = Event.update(update_params).where(
|
update_query = Event.update(update_params).where(
|
||||||
Event.camera.not_in(self.camera_keys),
|
Event.camera.not_in(self.camera_keys),
|
||||||
Event.start_time < expire_after,
|
Event.start_time < expire_after,
|
||||||
Event.label == l.label,
|
Event.label == event.label,
|
||||||
Event.retain_indefinitely == False,
|
Event.retain_indefinitely is False,
|
||||||
)
|
)
|
||||||
update_query.execute()
|
update_query.execute()
|
||||||
|
|
||||||
@ -92,9 +88,11 @@ class EventCleanup(threading.Thread):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# loop over object types in db
|
# loop over object types in db
|
||||||
for l in distinct_labels:
|
for event in distinct_labels:
|
||||||
# get expiration time for this label
|
# get expiration time for this label
|
||||||
expire_days = retain_config.objects.get(l.label, retain_config.default)
|
expire_days = retain_config.objects.get(
|
||||||
|
event.label, retain_config.default
|
||||||
|
)
|
||||||
expire_after = (
|
expire_after = (
|
||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
).timestamp()
|
).timestamp()
|
||||||
@ -102,8 +100,8 @@ class EventCleanup(threading.Thread):
|
|||||||
expired_events = Event.select().where(
|
expired_events = Event.select().where(
|
||||||
Event.camera == name,
|
Event.camera == name,
|
||||||
Event.start_time < expire_after,
|
Event.start_time < expire_after,
|
||||||
Event.label == l.label,
|
Event.label == event.label,
|
||||||
Event.retain_indefinitely == False,
|
Event.retain_indefinitely is False,
|
||||||
)
|
)
|
||||||
# delete the grabbed clips from disk
|
# delete the grabbed clips from disk
|
||||||
for event in expired_events:
|
for event in expired_events:
|
||||||
@ -121,8 +119,8 @@ class EventCleanup(threading.Thread):
|
|||||||
update_query = Event.update(update_params).where(
|
update_query = Event.update(update_params).where(
|
||||||
Event.camera == name,
|
Event.camera == name,
|
||||||
Event.start_time < expire_after,
|
Event.start_time < expire_after,
|
||||||
Event.label == l.label,
|
Event.label == event.label,
|
||||||
Event.retain_indefinitely == False,
|
Event.retain_indefinitely is False,
|
||||||
)
|
)
|
||||||
update_query.execute()
|
update_query.execute()
|
||||||
|
|
||||||
@ -131,9 +129,9 @@ class EventCleanup(threading.Thread):
|
|||||||
select id,
|
select id,
|
||||||
label,
|
label,
|
||||||
camera,
|
camera,
|
||||||
has_snapshot,
|
has_snapshot,
|
||||||
has_clip,
|
has_clip,
|
||||||
row_number() over (
|
row_number() over (
|
||||||
partition by label, camera, round(start_time/5,0)*5
|
partition by label, camera, round(start_time/5,0)*5
|
||||||
order by end_time-start_time desc
|
order by end_time-start_time desc
|
||||||
) as copy_number
|
) as copy_number
|
||||||
@ -169,8 +167,8 @@ class EventCleanup(threading.Thread):
|
|||||||
|
|
||||||
# drop events from db where has_clip and has_snapshot are false
|
# drop events from db where has_clip and has_snapshot are false
|
||||||
delete_query = Event.delete().where(
|
delete_query = Event.delete().where(
|
||||||
Event.has_clip == False, Event.has_snapshot == False
|
Event.has_clip is False, Event.has_snapshot is False
|
||||||
)
|
)
|
||||||
delete_query.execute()
|
delete_query.execute()
|
||||||
|
|
||||||
logger.info(f"Exiting event cleanup...")
|
logger.info("Exiting event cleanup...")
|
||||||
|
|||||||
@ -1,17 +1,15 @@
|
|||||||
"""Handle external events created by the user."""
|
"""Handle external events created by the user."""
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import cv2
|
|
||||||
import datetime
|
import datetime
|
||||||
import glob
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
from multiprocessing.queues import Queue
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from multiprocessing.queues import Queue
|
import cv2
|
||||||
|
|
||||||
from frigate.config import CameraConfig, FrigateConfig
|
from frigate.config import CameraConfig, FrigateConfig
|
||||||
from frigate.const import CLIPS_DIR
|
from frigate.const import CLIPS_DIR
|
||||||
|
|||||||
@ -2,20 +2,16 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
import queue
|
import queue
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
from multiprocessing.queues import Queue
|
||||||
from peewee import fn
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
from frigate.config import EventsConfig, FrigateConfig
|
from frigate.config import EventsConfig, FrigateConfig
|
||||||
from frigate.models import Event
|
from frigate.models import Event
|
||||||
from frigate.types import CameraMetricsTypes
|
from frigate.types import CameraMetricsTypes
|
||||||
from frigate.util import to_relative_box
|
from frigate.util import to_relative_box
|
||||||
|
|
||||||
from multiprocessing.queues import Queue
|
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
|
||||||
from typing import Dict
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -65,7 +61,7 @@ class EventProcessor(threading.Thread):
|
|||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
# set an end_time on events without an end_time on startup
|
# set an end_time on events without an end_time on startup
|
||||||
Event.update(end_time=Event.start_time + 30).where(
|
Event.update(end_time=Event.start_time + 30).where(
|
||||||
Event.end_time == None
|
Event.end_time is None
|
||||||
).execute()
|
).execute()
|
||||||
|
|
||||||
while not self.stop_event.is_set():
|
while not self.stop_event.is_set():
|
||||||
@ -99,9 +95,9 @@ class EventProcessor(threading.Thread):
|
|||||||
|
|
||||||
# set an end_time on events without an end_time before exiting
|
# set an end_time on events without an end_time before exiting
|
||||||
Event.update(end_time=datetime.datetime.now().timestamp()).where(
|
Event.update(end_time=datetime.datetime.now().timestamp()).where(
|
||||||
Event.end_time == None
|
Event.end_time is None
|
||||||
).execute()
|
).execute()
|
||||||
logger.info(f"Exiting event processor...")
|
logger.info("Exiting event processor...")
|
||||||
|
|
||||||
def handle_object_detection(
|
def handle_object_detection(
|
||||||
self,
|
self,
|
||||||
|
|||||||
@ -2,13 +2,12 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
from enum import Enum
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from frigate.version import VERSION
|
|
||||||
from frigate.const import BTBN_PATH
|
from frigate.const import BTBN_PATH
|
||||||
from frigate.util import vainfo_hwaccel
|
from frigate.util import vainfo_hwaccel
|
||||||
|
from frigate.version import VERSION
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -118,7 +117,7 @@ PRESETS_HW_ACCEL_SCALE = {
|
|||||||
"default": "-r {0} -s {1}x{2}",
|
"default": "-r {0} -s {1}x{2}",
|
||||||
}
|
}
|
||||||
|
|
||||||
PRESETS_HW_ACCEL_ENCODE = {
|
PRESETS_HW_ACCEL_ENCODE_BIRDSEYE = {
|
||||||
"preset-rpi-32-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m {1}",
|
"preset-rpi-32-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m {1}",
|
||||||
"preset-rpi-64-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m {1}",
|
"preset-rpi-64-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m {1}",
|
||||||
"preset-vaapi": "ffmpeg -hide_banner -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device {2} {0} -c:v h264_vaapi -g 50 -bf 0 -profile:v high -level:v 4.1 -sei:v 0 -an -vf format=vaapi|nv12,hwupload {1}",
|
"preset-vaapi": "ffmpeg -hide_banner -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device {2} {0} -c:v h264_vaapi -g 50 -bf 0 -profile:v high -level:v 4.1 -sei:v 0 -an -vf format=vaapi|nv12,hwupload {1}",
|
||||||
@ -129,6 +128,17 @@ PRESETS_HW_ACCEL_ENCODE = {
|
|||||||
"default": "ffmpeg -hide_banner {0} -c:v libx264 -g 50 -profile:v high -level:v 4.1 -preset:v superfast -tune:v zerolatency {1}",
|
"default": "ffmpeg -hide_banner {0} -c:v libx264 -g 50 -profile:v high -level:v 4.1 -preset:v superfast -tune:v zerolatency {1}",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PRESETS_HW_ACCEL_ENCODE_TIMELAPSE = {
|
||||||
|
"preset-rpi-32-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m {1}",
|
||||||
|
"preset-rpi-64-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m {1}",
|
||||||
|
"preset-vaapi": "ffmpeg -hide_banner -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device {2} {0} -c:v h264_vaapi {1}",
|
||||||
|
"preset-intel-qsv-h264": "ffmpeg -hide_banner {0} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
||||||
|
"preset-intel-qsv-h265": "ffmpeg -hide_banner {0} -c:v hevc_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
||||||
|
"preset-nvidia-h264": "ffmpeg -hide_banner -hwaccel cuda -hwaccel_output_format cuda -extra_hw_frames 8 {0} -c:v h264_nvenc {1}",
|
||||||
|
"preset-nvidia-h265": "ffmpeg -hide_banner -hwaccel cuda -hwaccel_output_format cuda -extra_hw_frames 8 {0} -c:v hevc_nvenc {1}",
|
||||||
|
"default": "ffmpeg -hide_banner {0} -c:v libx264 -preset:v ultrafast -tune:v zerolatency {1}",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def parse_preset_hardware_acceleration_decode(arg: Any) -> list[str]:
|
def parse_preset_hardware_acceleration_decode(arg: Any) -> list[str]:
|
||||||
"""Return the correct preset if in preset format otherwise return None."""
|
"""Return the correct preset if in preset format otherwise return None."""
|
||||||
@ -163,12 +173,24 @@ def parse_preset_hardware_acceleration_scale(
|
|||||||
return scale
|
return scale
|
||||||
|
|
||||||
|
|
||||||
def parse_preset_hardware_acceleration_encode(arg: Any, input: str, output: str) -> str:
|
class EncodeTypeEnum(str, Enum):
|
||||||
"""Return the correct scaling preset or default preset if none is set."""
|
birdseye = "birdseye"
|
||||||
if not isinstance(arg, str):
|
timelapse = "timelapse"
|
||||||
return PRESETS_HW_ACCEL_ENCODE["default"].format(input, output)
|
|
||||||
|
|
||||||
return PRESETS_HW_ACCEL_ENCODE.get(arg, PRESETS_HW_ACCEL_ENCODE["default"]).format(
|
|
||||||
|
def parse_preset_hardware_acceleration_encode(
|
||||||
|
arg: Any, input: str, output: str, type: EncodeTypeEnum = EncodeTypeEnum.birdseye
|
||||||
|
) -> str:
|
||||||
|
"""Return the correct scaling preset or default preset if none is set."""
|
||||||
|
if type == EncodeTypeEnum.birdseye:
|
||||||
|
arg_map = PRESETS_HW_ACCEL_ENCODE_BIRDSEYE
|
||||||
|
elif type == EncodeTypeEnum.timelapse:
|
||||||
|
arg_map = PRESETS_HW_ACCEL_ENCODE_TIMELAPSE
|
||||||
|
|
||||||
|
if not isinstance(arg, str):
|
||||||
|
return arg_map["default"].format(input, output)
|
||||||
|
|
||||||
|
return arg_map.get(arg, arg_map["default"]).format(
|
||||||
input,
|
input,
|
||||||
output,
|
output,
|
||||||
_gpu_selector.get_selected_gpu(),
|
_gpu_selector.get_selected_gpu(),
|
||||||
|
|||||||
@ -1,23 +1,20 @@
|
|||||||
import base64
|
import base64
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
import copy
|
import copy
|
||||||
import logging
|
|
||||||
import glob
|
import glob
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import pytz
|
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tzlocal import get_localzone_name
|
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import pytz
|
||||||
from flask import (
|
from flask import (
|
||||||
Blueprint,
|
Blueprint,
|
||||||
Flask,
|
Flask,
|
||||||
@ -27,26 +24,27 @@ from flask import (
|
|||||||
make_response,
|
make_response,
|
||||||
request,
|
request,
|
||||||
)
|
)
|
||||||
|
from peewee import DoesNotExist, SqliteDatabase, fn, operator
|
||||||
from peewee import SqliteDatabase, operator, fn, DoesNotExist
|
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
|
from tzlocal import get_localzone_name
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.const import CLIPS_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
|
from frigate.const import CLIPS_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
|
||||||
from frigate.models import Event, Recordings, Timeline
|
|
||||||
from frigate.events.external import ExternalEventProcessor
|
from frigate.events.external import ExternalEventProcessor
|
||||||
|
from frigate.models import Event, Recordings, Timeline
|
||||||
from frigate.object_processing import TrackedObject
|
from frigate.object_processing import TrackedObject
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
from frigate.ptz import OnvifController
|
from frigate.ptz import OnvifController
|
||||||
|
from frigate.record.export import PlaybackFactorEnum, RecordingExporter
|
||||||
from frigate.stats import stats_snapshot
|
from frigate.stats import stats_snapshot
|
||||||
|
from frigate.storage import StorageMaintainer
|
||||||
from frigate.util import (
|
from frigate.util import (
|
||||||
clean_camera_user_pass,
|
clean_camera_user_pass,
|
||||||
ffprobe_stream,
|
ffprobe_stream,
|
||||||
|
get_tz_modifiers,
|
||||||
restart_frigate,
|
restart_frigate,
|
||||||
vainfo_hwaccel,
|
vainfo_hwaccel,
|
||||||
get_tz_modifiers,
|
|
||||||
)
|
)
|
||||||
from frigate.storage import StorageMaintainer
|
|
||||||
from frigate.version import VERSION
|
from frigate.version import VERSION
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -105,10 +103,10 @@ def events_summary():
|
|||||||
|
|
||||||
clauses = []
|
clauses = []
|
||||||
|
|
||||||
if not has_clip is None:
|
if has_clip is not None:
|
||||||
clauses.append((Event.has_clip == has_clip))
|
clauses.append((Event.has_clip == has_clip))
|
||||||
|
|
||||||
if not has_snapshot is None:
|
if has_snapshot is not None:
|
||||||
clauses.append((Event.has_snapshot == has_snapshot))
|
clauses.append((Event.has_snapshot == has_snapshot))
|
||||||
|
|
||||||
if len(clauses) == 0:
|
if len(clauses) == 0:
|
||||||
@ -253,7 +251,7 @@ def send_to_plus(id):
|
|||||||
event.plus_id = plus_id
|
event.plus_id = plus_id
|
||||||
event.save()
|
event.save()
|
||||||
|
|
||||||
if not include_annotation is None:
|
if include_annotation is not None:
|
||||||
box = event.data["box"]
|
box = event.data["box"]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -296,12 +294,12 @@ def false_positive(id):
|
|||||||
|
|
||||||
# events from before the conversion to relative dimensions cant include annotations
|
# events from before the conversion to relative dimensions cant include annotations
|
||||||
if event.data.get("box") is None:
|
if event.data.get("box") is None:
|
||||||
message = f"Events prior to 0.13 cannot be submitted as false positives"
|
message = "Events prior to 0.13 cannot be submitted as false positives"
|
||||||
logger.error(message)
|
logger.error(message)
|
||||||
return make_response(jsonify({"success": False, "message": message}), 400)
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
||||||
|
|
||||||
if event.false_positive:
|
if event.false_positive:
|
||||||
message = f"False positive already submitted to Frigate+"
|
message = "False positive already submitted to Frigate+"
|
||||||
logger.error(message)
|
logger.error(message)
|
||||||
return make_response(jsonify({"success": False, "message": message}), 400)
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
||||||
|
|
||||||
@ -437,7 +435,7 @@ def get_sub_labels():
|
|||||||
parts = label.split(",")
|
parts = label.split(",")
|
||||||
|
|
||||||
for part in parts:
|
for part in parts:
|
||||||
if not (part.strip()) in sub_labels:
|
if part.strip() not in sub_labels:
|
||||||
sub_labels.append(part.strip())
|
sub_labels.append(part.strip())
|
||||||
|
|
||||||
sub_labels.sort()
|
sub_labels.sort()
|
||||||
@ -476,7 +474,7 @@ def event_thumbnail(id, max_cache_age=2592000):
|
|||||||
event_complete = False
|
event_complete = False
|
||||||
try:
|
try:
|
||||||
event = Event.get(Event.id == id)
|
event = Event.get(Event.id == id)
|
||||||
if not event.end_time is None:
|
if event.end_time is not None:
|
||||||
event_complete = True
|
event_complete = True
|
||||||
thumbnail_bytes = base64.b64decode(event.thumbnail)
|
thumbnail_bytes = base64.b64decode(event.thumbnail)
|
||||||
except DoesNotExist:
|
except DoesNotExist:
|
||||||
@ -486,9 +484,9 @@ def event_thumbnail(id, max_cache_age=2592000):
|
|||||||
for camera_state in camera_states:
|
for camera_state in camera_states:
|
||||||
if id in camera_state.tracked_objects:
|
if id in camera_state.tracked_objects:
|
||||||
tracked_obj = camera_state.tracked_objects.get(id)
|
tracked_obj = camera_state.tracked_objects.get(id)
|
||||||
if not tracked_obj is None:
|
if tracked_obj is not None:
|
||||||
thumbnail_bytes = tracked_obj.get_thumbnail()
|
thumbnail_bytes = tracked_obj.get_thumbnail()
|
||||||
except:
|
except Exception:
|
||||||
return "Event not found", 404
|
return "Event not found", 404
|
||||||
|
|
||||||
if thumbnail_bytes is None:
|
if thumbnail_bytes is None:
|
||||||
@ -593,7 +591,7 @@ def event_snapshot(id):
|
|||||||
event_complete = False
|
event_complete = False
|
||||||
jpg_bytes = None
|
jpg_bytes = None
|
||||||
try:
|
try:
|
||||||
event = Event.get(Event.id == id, Event.end_time != None)
|
event = Event.get(Event.id == id, Event.end_time is not None)
|
||||||
event_complete = True
|
event_complete = True
|
||||||
if not event.has_snapshot:
|
if not event.has_snapshot:
|
||||||
return "Snapshot not available", 404
|
return "Snapshot not available", 404
|
||||||
@ -609,7 +607,7 @@ def event_snapshot(id):
|
|||||||
for camera_state in camera_states:
|
for camera_state in camera_states:
|
||||||
if id in camera_state.tracked_objects:
|
if id in camera_state.tracked_objects:
|
||||||
tracked_obj = camera_state.tracked_objects.get(id)
|
tracked_obj = camera_state.tracked_objects.get(id)
|
||||||
if not tracked_obj is None:
|
if tracked_obj is not None:
|
||||||
jpg_bytes = tracked_obj.get_jpg_bytes(
|
jpg_bytes = tracked_obj.get_jpg_bytes(
|
||||||
timestamp=request.args.get("timestamp", type=int),
|
timestamp=request.args.get("timestamp", type=int),
|
||||||
bounding_box=request.args.get("bbox", type=int),
|
bounding_box=request.args.get("bbox", type=int),
|
||||||
@ -617,9 +615,9 @@ def event_snapshot(id):
|
|||||||
height=request.args.get("h", type=int),
|
height=request.args.get("h", type=int),
|
||||||
quality=request.args.get("quality", default=70, type=int),
|
quality=request.args.get("quality", default=70, type=int),
|
||||||
)
|
)
|
||||||
except:
|
except Exception:
|
||||||
return "Event not found", 404
|
return "Event not found", 404
|
||||||
except:
|
except Exception:
|
||||||
return "Event not found", 404
|
return "Event not found", 404
|
||||||
|
|
||||||
if jpg_bytes is None:
|
if jpg_bytes is None:
|
||||||
@ -645,7 +643,7 @@ def label_snapshot(camera_name, label):
|
|||||||
event_query = (
|
event_query = (
|
||||||
Event.select()
|
Event.select()
|
||||||
.where(Event.camera == camera_name)
|
.where(Event.camera == camera_name)
|
||||||
.where(Event.has_snapshot == True)
|
.where(Event.has_snapshot is True)
|
||||||
.order_by(Event.start_time.desc())
|
.order_by(Event.start_time.desc())
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -653,7 +651,7 @@ def label_snapshot(camera_name, label):
|
|||||||
Event.select()
|
Event.select()
|
||||||
.where(Event.camera == camera_name)
|
.where(Event.camera == camera_name)
|
||||||
.where(Event.label == label)
|
.where(Event.label == label)
|
||||||
.where(Event.has_snapshot == True)
|
.where(Event.has_snapshot is True)
|
||||||
.order_by(Event.start_time.desc())
|
.order_by(Event.start_time.desc())
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -820,13 +818,13 @@ def events():
|
|||||||
if before:
|
if before:
|
||||||
clauses.append((Event.start_time < before))
|
clauses.append((Event.start_time < before))
|
||||||
|
|
||||||
if not has_clip is None:
|
if has_clip is not None:
|
||||||
clauses.append((Event.has_clip == has_clip))
|
clauses.append((Event.has_clip == has_clip))
|
||||||
|
|
||||||
if not has_snapshot is None:
|
if has_snapshot is not None:
|
||||||
clauses.append((Event.has_snapshot == has_snapshot))
|
clauses.append((Event.has_snapshot == has_snapshot))
|
||||||
|
|
||||||
if not in_progress is None:
|
if in_progress is not None:
|
||||||
clauses.append((Event.end_time.is_null(in_progress)))
|
clauses.append((Event.end_time.is_null(in_progress)))
|
||||||
|
|
||||||
if not include_thumbnails:
|
if not include_thumbnails:
|
||||||
@ -894,12 +892,12 @@ def create_event(camera_name, label):
|
|||||||
def end_event(event_id):
|
def end_event(event_id):
|
||||||
try:
|
try:
|
||||||
current_app.external_processor.finish_manual_event(event_id)
|
current_app.external_processor.finish_manual_event(event_id)
|
||||||
except:
|
except Exception:
|
||||||
return jsonify(
|
return jsonify(
|
||||||
{"success": False, "message": f"{event_id} must be set and valid."}, 404
|
{"success": False, "message": f"{event_id} must be set and valid."}, 404
|
||||||
)
|
)
|
||||||
|
|
||||||
return jsonify({"success": True, "message": f"Event successfully ended."}, 200)
|
return jsonify({"success": True, "message": "Event successfully ended."}, 200)
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/config")
|
@bp.route("/config")
|
||||||
@ -959,9 +957,8 @@ def config_save():
|
|||||||
|
|
||||||
# Validate the config schema
|
# Validate the config schema
|
||||||
try:
|
try:
|
||||||
new_yaml = FrigateConfig.parse_raw(new_config)
|
FrigateConfig.parse_raw(new_config)
|
||||||
check_runtime = new_yaml.runtime_config
|
except Exception:
|
||||||
except Exception as e:
|
|
||||||
return make_response(
|
return make_response(
|
||||||
jsonify(
|
jsonify(
|
||||||
{
|
{
|
||||||
@ -985,12 +982,12 @@ def config_save():
|
|||||||
with open(config_file, "w") as f:
|
with open(config_file, "w") as f:
|
||||||
f.write(new_config)
|
f.write(new_config)
|
||||||
f.close()
|
f.close()
|
||||||
except Exception as e:
|
except Exception:
|
||||||
return make_response(
|
return make_response(
|
||||||
jsonify(
|
jsonify(
|
||||||
{
|
{
|
||||||
"success": False,
|
"success": False,
|
||||||
"message": f"Could not write config file, be sure that Frigate has write permission on the config file.",
|
"message": "Could not write config file, be sure that Frigate has write permission on the config file.",
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
400,
|
400,
|
||||||
@ -1351,6 +1348,7 @@ def recording_clip(camera_name, start_ts, end_ts):
|
|||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
ffmpeg_cmd = [
|
ffmpeg_cmd = [
|
||||||
"ffmpeg",
|
"ffmpeg",
|
||||||
|
"-hide_banner",
|
||||||
"-y",
|
"-y",
|
||||||
"-protocol_whitelist",
|
"-protocol_whitelist",
|
||||||
"pipe,file",
|
"pipe,file",
|
||||||
@ -1507,6 +1505,22 @@ def vod_event(id):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/export/<camera_name>/start/<start_time>/end/<end_time>", methods=["POST"])
|
||||||
|
def export_recording(camera_name: str, start_time: int, end_time: int):
|
||||||
|
playback_factor = request.get_json(silent=True).get("playback", "realtime")
|
||||||
|
exporter = RecordingExporter(
|
||||||
|
current_app.frigate_config,
|
||||||
|
camera_name,
|
||||||
|
int(start_time),
|
||||||
|
int(end_time),
|
||||||
|
PlaybackFactorEnum[playback_factor]
|
||||||
|
if playback_factor in PlaybackFactorEnum.__members__.values()
|
||||||
|
else PlaybackFactorEnum.realtime,
|
||||||
|
)
|
||||||
|
exporter.start()
|
||||||
|
return "Starting export of recording", 200
|
||||||
|
|
||||||
|
|
||||||
def imagestream(detected_frames_processor, camera_name, fps, height, draw_options):
|
def imagestream(detected_frames_processor, camera_name, fps, height, draw_options):
|
||||||
while True:
|
while True:
|
||||||
# max out at specified FPS
|
# max out at specified FPS
|
||||||
@ -1531,7 +1545,7 @@ def ffprobe():
|
|||||||
|
|
||||||
if not path_param:
|
if not path_param:
|
||||||
return jsonify(
|
return jsonify(
|
||||||
{"success": False, "message": f"Path needs to be provided."}, "404"
|
{"success": False, "message": "Path needs to be provided."}, "404"
|
||||||
)
|
)
|
||||||
|
|
||||||
if path_param.startswith("camera"):
|
if path_param.startswith("camera"):
|
||||||
|
|||||||
@ -1,18 +1,17 @@
|
|||||||
# adapted from https://medium.com/@jonathonbao/python3-logging-with-multiprocessing-f51f460b8778
|
# adapted from https://medium.com/@jonathonbao/python3-logging-with-multiprocessing-f51f460b8778
|
||||||
import logging
|
import logging
|
||||||
import threading
|
|
||||||
import os
|
|
||||||
import signal
|
|
||||||
import queue
|
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
from multiprocessing.queues import Queue
|
import os
|
||||||
from logging import handlers
|
import queue
|
||||||
from typing import Optional
|
import signal
|
||||||
from types import FrameType
|
import threading
|
||||||
from setproctitle import setproctitle
|
|
||||||
from typing import Deque, Optional
|
|
||||||
from types import FrameType
|
|
||||||
from collections import deque
|
from collections import deque
|
||||||
|
from logging import handlers
|
||||||
|
from multiprocessing.queues import Queue
|
||||||
|
from types import FrameType
|
||||||
|
from typing import Deque, Optional
|
||||||
|
|
||||||
|
from setproctitle import setproctitle
|
||||||
|
|
||||||
from frigate.util import clean_camera_user_pass
|
from frigate.util import clean_camera_user_pass
|
||||||
|
|
||||||
@ -44,7 +43,7 @@ def root_configurer(queue: Queue) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def log_process(log_queue: Queue) -> None:
|
def log_process(log_queue: Queue) -> None:
|
||||||
threading.current_thread().name = f"logger"
|
threading.current_thread().name = "logger"
|
||||||
setproctitle("frigate.logger")
|
setproctitle("frigate.logger")
|
||||||
listener_configurer()
|
listener_configurer()
|
||||||
|
|
||||||
@ -63,6 +62,8 @@ def log_process(log_queue: Queue) -> None:
|
|||||||
if stop_event.is_set():
|
if stop_event.is_set():
|
||||||
break
|
break
|
||||||
continue
|
continue
|
||||||
|
if record.msg.startswith("You are using a scalar distance function"):
|
||||||
|
continue
|
||||||
logger = logging.getLogger(record.name)
|
logger = logging.getLogger(record.name)
|
||||||
logger.handle(record)
|
logger.handle(record)
|
||||||
|
|
||||||
|
|||||||
@ -1,12 +1,11 @@
|
|||||||
from numpy import unique
|
|
||||||
from peewee import (
|
from peewee import (
|
||||||
Model,
|
BooleanField,
|
||||||
CharField,
|
CharField,
|
||||||
DateTimeField,
|
DateTimeField,
|
||||||
FloatField,
|
FloatField,
|
||||||
BooleanField,
|
|
||||||
TextField,
|
|
||||||
IntegerField,
|
IntegerField,
|
||||||
|
Model,
|
||||||
|
TextField,
|
||||||
)
|
)
|
||||||
from playhouse.sqlite_ext import JSONField
|
from playhouse.sqlite_ext import JSONField
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
import cv2
|
import cv2
|
||||||
import imutils
|
import imutils
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from frigate.config import MotionConfig
|
from frigate.config import MotionConfig
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -12,7 +12,6 @@ from setproctitle import setproctitle
|
|||||||
|
|
||||||
from frigate.config import InputTensorEnum
|
from frigate.config import InputTensorEnum
|
||||||
from frigate.detectors import create_detector
|
from frigate.detectors import create_detector
|
||||||
|
|
||||||
from frigate.util import EventsPerSecond, SharedMemoryFrameManager, listen, load_labels
|
from frigate.util import EventsPerSecond, SharedMemoryFrameManager, listen, load_labels
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -161,7 +160,7 @@ class ObjectDetectProcess:
|
|||||||
|
|
||||||
def start_or_restart(self):
|
def start_or_restart(self):
|
||||||
self.detection_start.value = 0.0
|
self.detection_start.value = 0.0
|
||||||
if (not self.detect_process is None) and self.detect_process.is_alive():
|
if (self.detect_process is not None) and self.detect_process.is_alive():
|
||||||
self.stop()
|
self.stop()
|
||||||
self.detect_process = mp.Process(
|
self.detect_process = mp.Process(
|
||||||
target=run_detector,
|
target=run_detector,
|
||||||
|
|||||||
@ -15,10 +15,10 @@ import numpy as np
|
|||||||
from frigate.comms.dispatcher import Dispatcher
|
from frigate.comms.dispatcher import Dispatcher
|
||||||
from frigate.config import (
|
from frigate.config import (
|
||||||
CameraConfig,
|
CameraConfig,
|
||||||
MqttConfig,
|
|
||||||
SnapshotsConfig,
|
|
||||||
RecordConfig,
|
|
||||||
FrigateConfig,
|
FrigateConfig,
|
||||||
|
MqttConfig,
|
||||||
|
RecordConfig,
|
||||||
|
SnapshotsConfig,
|
||||||
)
|
)
|
||||||
from frigate.const import CLIPS_DIR
|
from frigate.const import CLIPS_DIR
|
||||||
from frigate.events.maintainer import EventTypeEnum
|
from frigate.events.maintainer import EventTypeEnum
|
||||||
@ -141,7 +141,7 @@ class TrackedObject:
|
|||||||
# check each zone
|
# check each zone
|
||||||
for name, zone in self.camera_config.zones.items():
|
for name, zone in self.camera_config.zones.items():
|
||||||
# if the zone is not for this object type, skip
|
# if the zone is not for this object type, skip
|
||||||
if len(zone.objects) > 0 and not obj_data["label"] in zone.objects:
|
if len(zone.objects) > 0 and obj_data["label"] not in zone.objects:
|
||||||
continue
|
continue
|
||||||
contour = zone.contour
|
contour = zone.contour
|
||||||
# check if the object is in the zone
|
# check if the object is in the zone
|
||||||
@ -177,11 +177,7 @@ class TrackedObject:
|
|||||||
return (thumb_update, significant_change)
|
return (thumb_update, significant_change)
|
||||||
|
|
||||||
def to_dict(self, include_thumbnail: bool = False):
|
def to_dict(self, include_thumbnail: bool = False):
|
||||||
snapshot_time = (
|
(self.thumbnail_data["frame_time"] if self.thumbnail_data is not None else 0.0)
|
||||||
self.thumbnail_data["frame_time"]
|
|
||||||
if not self.thumbnail_data is None
|
|
||||||
else 0.0
|
|
||||||
)
|
|
||||||
event = {
|
event = {
|
||||||
"id": self.obj_data["id"],
|
"id": self.obj_data["id"],
|
||||||
"camera": self.camera,
|
"camera": self.camera,
|
||||||
@ -526,7 +522,7 @@ class CameraState:
|
|||||||
for id in removed_ids:
|
for id in removed_ids:
|
||||||
# publish events to mqtt
|
# publish events to mqtt
|
||||||
removed_obj = tracked_objects[id]
|
removed_obj = tracked_objects[id]
|
||||||
if not "end_time" in removed_obj.obj_data:
|
if "end_time" not in removed_obj.obj_data:
|
||||||
removed_obj.obj_data["end_time"] = frame_time
|
removed_obj.obj_data["end_time"] = frame_time
|
||||||
for c in self.callbacks["end"]:
|
for c in self.callbacks["end"]:
|
||||||
c(self.name, removed_obj, frame_time)
|
c(self.name, removed_obj, frame_time)
|
||||||
@ -1028,4 +1024,4 @@ class TrackedObjectProcessor(threading.Thread):
|
|||||||
event_id, camera = self.event_processed_queue.get()
|
event_id, camera = self.event_processed_queue.get()
|
||||||
self.camera_states[camera].finished(event_id)
|
self.camera_states[camera].finished(event_id)
|
||||||
|
|
||||||
logger.info(f"Exiting object processor...")
|
logger.info("Exiting object processor...")
|
||||||
|
|||||||
@ -4,7 +4,6 @@ import logging
|
|||||||
import math
|
import math
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
import os
|
import os
|
||||||
import operator
|
|
||||||
import queue
|
import queue
|
||||||
import signal
|
import signal
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
@ -149,7 +148,7 @@ class BroadcastThread(threading.Thread):
|
|||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
ws.send(buf, binary=True)
|
ws.send(buf, binary=True)
|
||||||
except:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
elif self.converter.process.poll() is not None:
|
elif self.converter.process.poll() is not None:
|
||||||
break
|
break
|
||||||
@ -185,7 +184,7 @@ class BirdsEyeFrameManager:
|
|||||||
if len(logo_files) > 0:
|
if len(logo_files) > 0:
|
||||||
birdseye_logo = cv2.imread(logo_files[0], cv2.IMREAD_UNCHANGED)
|
birdseye_logo = cv2.imread(logo_files[0], cv2.IMREAD_UNCHANGED)
|
||||||
|
|
||||||
if not birdseye_logo is None:
|
if birdseye_logo is not None:
|
||||||
transparent_layer = birdseye_logo[:, :, 3]
|
transparent_layer = birdseye_logo[:, :, 3]
|
||||||
y_offset = height // 2 - transparent_layer.shape[0] // 2
|
y_offset = height // 2 - transparent_layer.shape[0] // 2
|
||||||
x_offset = width // 2 - transparent_layer.shape[1] // 2
|
x_offset = width // 2 - transparent_layer.shape[1] // 2
|
||||||
@ -229,7 +228,7 @@ class BirdsEyeFrameManager:
|
|||||||
self.last_output_time = 0.0
|
self.last_output_time = 0.0
|
||||||
|
|
||||||
def clear_frame(self):
|
def clear_frame(self):
|
||||||
logger.debug(f"Clearing the birdseye frame")
|
logger.debug("Clearing the birdseye frame")
|
||||||
self.frame[:] = self.blank_frame
|
self.frame[:] = self.blank_frame
|
||||||
|
|
||||||
def copy_to_position(self, position, camera=None, frame_time=None):
|
def copy_to_position(self, position, camera=None, frame_time=None):
|
||||||
@ -301,7 +300,7 @@ class BirdsEyeFrameManager:
|
|||||||
# reset the layout if it needs to be different
|
# reset the layout if it needs to be different
|
||||||
if layout_dim != self.layout_dim or reset_layout:
|
if layout_dim != self.layout_dim or reset_layout:
|
||||||
if reset_layout:
|
if reset_layout:
|
||||||
logger.debug(f"Added new cameras, resetting layout...")
|
logger.debug("Added new cameras, resetting layout...")
|
||||||
|
|
||||||
logger.debug(f"Changing layout size from {self.layout_dim} to {layout_dim}")
|
logger.debug(f"Changing layout size from {self.layout_dim} to {layout_dim}")
|
||||||
self.layout_dim = layout_dim
|
self.layout_dim = layout_dim
|
||||||
@ -385,7 +384,7 @@ class BirdsEyeFrameManager:
|
|||||||
]
|
]
|
||||||
# if not an empty spot and the camera has a newer frame, copy it
|
# if not an empty spot and the camera has a newer frame, copy it
|
||||||
elif (
|
elif (
|
||||||
not camera is None
|
camera is not None
|
||||||
and self.cameras[camera]["current_frame"]
|
and self.cameras[camera]["current_frame"]
|
||||||
!= self.cameras[camera]["layout_frame"]
|
!= self.cameras[camera]["layout_frame"]
|
||||||
):
|
):
|
||||||
@ -423,8 +422,8 @@ class BirdsEyeFrameManager:
|
|||||||
|
|
||||||
|
|
||||||
def output_frames(config: FrigateConfig, video_output_queue):
|
def output_frames(config: FrigateConfig, video_output_queue):
|
||||||
threading.current_thread().name = f"output"
|
threading.current_thread().name = "output"
|
||||||
setproctitle(f"frigate.output")
|
setproctitle("frigate.output")
|
||||||
|
|
||||||
stop_event = mp.Event()
|
stop_event = mp.Event()
|
||||||
|
|
||||||
|
|||||||
@ -3,12 +3,14 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Any, Dict, List
|
from typing import Any, List
|
||||||
import requests
|
|
||||||
from frigate.const import PLUS_ENV_VAR, PLUS_API_HOST
|
|
||||||
from requests.models import Response
|
|
||||||
import cv2
|
import cv2
|
||||||
|
import requests
|
||||||
from numpy import ndarray
|
from numpy import ndarray
|
||||||
|
from requests.models import Response
|
||||||
|
|
||||||
|
from frigate.const import PLUS_API_HOST, PLUS_ENV_VAR
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|||||||
@ -2,13 +2,12 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import site
|
import site
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
from onvif import ONVIFCamera, ONVIFError
|
from onvif import ONVIFCamera, ONVIFError
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -145,7 +144,7 @@ class OnvifController:
|
|||||||
onvif.get_service("ptz").ContinuousMove(move_request)
|
onvif.get_service("ptz").ContinuousMove(move_request)
|
||||||
|
|
||||||
def _move_to_preset(self, camera_name: str, preset: str) -> None:
|
def _move_to_preset(self, camera_name: str, preset: str) -> None:
|
||||||
if not preset in self.cams[camera_name]["presets"]:
|
if preset not in self.cams[camera_name]["presets"]:
|
||||||
logger.error(f"{preset} is not a valid preset for {camera_name}")
|
logger.error(f"{preset} is not a valid preset for {camera_name}")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@ -275,7 +275,7 @@ class RecordingCleanup(threading.Thread):
|
|||||||
# Expire tmp clips every minute, recordings and clean directories every hour.
|
# Expire tmp clips every minute, recordings and clean directories every hour.
|
||||||
for counter in itertools.cycle(range(self.config.record.expire_interval)):
|
for counter in itertools.cycle(range(self.config.record.expire_interval)):
|
||||||
if self.stop_event.wait(60):
|
if self.stop_event.wait(60):
|
||||||
logger.info(f"Exiting recording cleanup...")
|
logger.info("Exiting recording cleanup...")
|
||||||
break
|
break
|
||||||
self.clean_tmp_clips()
|
self.clean_tmp_clips()
|
||||||
|
|
||||||
|
|||||||
101
frigate/record/export.py
Normal file
101
frigate/record/export.py
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
"""Export recordings to storage."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import subprocess as sp
|
||||||
|
import threading
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from frigate.config import FrigateConfig
|
||||||
|
from frigate.const import EXPORT_DIR, MAX_PLAYLIST_SECONDS
|
||||||
|
from frigate.ffmpeg_presets import (
|
||||||
|
EncodeTypeEnum,
|
||||||
|
parse_preset_hardware_acceleration_encode,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PlaybackFactorEnum(str, Enum):
|
||||||
|
realtime = "realtime"
|
||||||
|
timelapse_25x = "timelapse_25x"
|
||||||
|
|
||||||
|
|
||||||
|
class RecordingExporter(threading.Thread):
|
||||||
|
"""Exports a specific set of recordings for a camera to storage as a single file."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
config: FrigateConfig,
|
||||||
|
camera: str,
|
||||||
|
start_time: int,
|
||||||
|
end_time: int,
|
||||||
|
playback_factor: PlaybackFactorEnum,
|
||||||
|
) -> None:
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.config = config
|
||||||
|
self.camera = camera
|
||||||
|
self.start_time = start_time
|
||||||
|
self.end_time = end_time
|
||||||
|
self.playback_factor = playback_factor
|
||||||
|
|
||||||
|
def get_datetime_from_timestamp(self, timestamp: int) -> str:
|
||||||
|
"""Convenience fun to get a simple date time from timestamp."""
|
||||||
|
return datetime.datetime.fromtimestamp(timestamp).strftime("%Y_%m_%d_%I:%M")
|
||||||
|
|
||||||
|
def run(self) -> None:
|
||||||
|
logger.debug(
|
||||||
|
f"Beginning export for {self.camera} from {self.start_time} to {self.end_time}"
|
||||||
|
)
|
||||||
|
file_name = f"{EXPORT_DIR}/in_progress.{self.camera}@{self.get_datetime_from_timestamp(self.start_time)}__{self.get_datetime_from_timestamp(self.end_time)}.mp4"
|
||||||
|
final_file_name = f"{EXPORT_DIR}/{self.camera}_{self.get_datetime_from_timestamp(self.start_time)}__{self.get_datetime_from_timestamp(self.end_time)}.mp4"
|
||||||
|
|
||||||
|
if (self.end_time - self.start_time) <= MAX_PLAYLIST_SECONDS:
|
||||||
|
playlist_lines = f"http://127.0.0.1:5000/vod/{self.camera}/start/{self.start_time}/end/{self.end_time}/index.m3u8"
|
||||||
|
ffmpeg_input = (
|
||||||
|
f"-y -protocol_whitelist pipe,file,http,tcp -i {playlist_lines}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
playlist_lines = []
|
||||||
|
playlist_start = self.start_time
|
||||||
|
|
||||||
|
while playlist_start < self.end_time:
|
||||||
|
playlist_lines.append(
|
||||||
|
f"file 'http://127.0.0.1:5000/vod/{self.camera}/start/{playlist_start}/end/{min(playlist_start + MAX_PLAYLIST_SECONDS, self.end_time)}/index.m3u8'"
|
||||||
|
)
|
||||||
|
playlist_start += MAX_PLAYLIST_SECONDS
|
||||||
|
|
||||||
|
ffmpeg_input = "-y -protocol_whitelist pipe,file,http,tcp -f concat -safe 0 -i /dev/stdin"
|
||||||
|
|
||||||
|
if self.playback_factor == PlaybackFactorEnum.realtime:
|
||||||
|
ffmpeg_cmd = (
|
||||||
|
f"ffmpeg -hide_banner {ffmpeg_input} -c copy {file_name}"
|
||||||
|
).split(" ")
|
||||||
|
elif self.playback_factor == PlaybackFactorEnum.timelapse_25x:
|
||||||
|
ffmpeg_cmd = (
|
||||||
|
parse_preset_hardware_acceleration_encode(
|
||||||
|
self.config.ffmpeg.hwaccel_args,
|
||||||
|
ffmpeg_input,
|
||||||
|
f"-vf setpts=0.04*PTS -r 30 -an {file_name}",
|
||||||
|
EncodeTypeEnum.timelapse,
|
||||||
|
)
|
||||||
|
).split(" ")
|
||||||
|
|
||||||
|
p = sp.run(
|
||||||
|
ffmpeg_cmd,
|
||||||
|
input="\n".join(playlist_lines),
|
||||||
|
encoding="ascii",
|
||||||
|
capture_output=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if p.returncode != 0:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to export recording for command {' '.join(ffmpeg_cmd)}"
|
||||||
|
)
|
||||||
|
logger.error(p.stderr)
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.debug(f"Updating finalized export {file_name}")
|
||||||
|
os.rename(file_name, final_file_name)
|
||||||
|
logger.debug(f"Finished exporting {file_name}")
|
||||||
@ -9,14 +9,14 @@ import random
|
|||||||
import string
|
import string
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import threading
|
import threading
|
||||||
import psutil
|
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Tuple
|
from typing import Any, Tuple
|
||||||
|
|
||||||
from frigate.config import RetainModeEnum, FrigateConfig
|
import psutil
|
||||||
|
|
||||||
|
from frigate.config import FrigateConfig, RetainModeEnum
|
||||||
from frigate.const import CACHE_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
|
from frigate.const import CACHE_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
|
||||||
from frigate.models import Event, Recordings
|
from frigate.models import Event, Recordings
|
||||||
from frigate.types import RecordMetricsTypes
|
from frigate.types import RecordMetricsTypes
|
||||||
@ -63,7 +63,7 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
for nt in flist:
|
for nt in flist:
|
||||||
if nt.path.startswith(CACHE_DIR):
|
if nt.path.startswith(CACHE_DIR):
|
||||||
files_in_use.append(nt.path.split("/")[-1])
|
files_in_use.append(nt.path.split("/")[-1])
|
||||||
except:
|
except psutil.Error:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# group recordings by camera
|
# group recordings by camera
|
||||||
@ -115,7 +115,7 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
Event.select()
|
Event.select()
|
||||||
.where(
|
.where(
|
||||||
Event.camera == camera,
|
Event.camera == camera,
|
||||||
(Event.end_time == None)
|
(Event.end_time is None)
|
||||||
| (Event.end_time >= recordings[0]["start_time"].timestamp()),
|
| (Event.end_time >= recordings[0]["start_time"].timestamp()),
|
||||||
Event.has_clip,
|
Event.has_clip,
|
||||||
)
|
)
|
||||||
@ -127,7 +127,7 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
|
|
||||||
# Just delete files if recordings are turned off
|
# Just delete files if recordings are turned off
|
||||||
if (
|
if (
|
||||||
not camera in self.config.cameras
|
camera not in self.config.cameras
|
||||||
or not self.process_info[camera]["record_enabled"].value
|
or not self.process_info[camera]["record_enabled"].value
|
||||||
):
|
):
|
||||||
Path(cache_path).unlink(missing_ok=True)
|
Path(cache_path).unlink(missing_ok=True)
|
||||||
@ -296,6 +296,7 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
# add faststart to kept segments to improve metadata reading
|
# add faststart to kept segments to improve metadata reading
|
||||||
ffmpeg_cmd = [
|
ffmpeg_cmd = [
|
||||||
"ffmpeg",
|
"ffmpeg",
|
||||||
|
"-hide_banner",
|
||||||
"-y",
|
"-y",
|
||||||
"-i",
|
"-i",
|
||||||
cache_path,
|
cache_path,
|
||||||
@ -394,4 +395,4 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
duration = datetime.datetime.now().timestamp() - run_start
|
duration = datetime.datetime.now().timestamp() - run_start
|
||||||
wait_time = max(0, 5 - duration)
|
wait_time = max(0, 5 - duration)
|
||||||
|
|
||||||
logger.info(f"Exiting recording maintenance...")
|
logger.info("Exiting recording maintenance...")
|
||||||
|
|||||||
@ -4,12 +4,11 @@ import logging
|
|||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
import signal
|
import signal
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from setproctitle import setproctitle
|
|
||||||
from types import FrameType
|
from types import FrameType
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from playhouse.sqliteq import SqliteQueueDatabase
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
|
from setproctitle import setproctitle
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.models import Event, Recordings, RecordingsToDelete, Timeline
|
from frigate.models import Event, Recordings, RecordingsToDelete, Timeline
|
||||||
|
|||||||
@ -1,23 +1,30 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import psutil
|
|
||||||
import shutil
|
|
||||||
import os
|
|
||||||
import requests
|
|
||||||
from typing import Optional, Any
|
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import psutil
|
||||||
|
import requests
|
||||||
|
from requests.exceptions import RequestException
|
||||||
|
|
||||||
from frigate.comms.dispatcher import Dispatcher
|
from frigate.comms.dispatcher import Dispatcher
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.const import DRIVER_AMD, DRIVER_ENV_VAR, RECORD_DIR, CLIPS_DIR, CACHE_DIR
|
from frigate.const import CACHE_DIR, CLIPS_DIR, DRIVER_AMD, DRIVER_ENV_VAR, RECORD_DIR
|
||||||
from frigate.types import StatsTrackingTypes, CameraMetricsTypes
|
|
||||||
from frigate.util import get_amd_gpu_stats, get_intel_gpu_stats, get_nvidia_gpu_stats
|
|
||||||
from frigate.version import VERSION
|
|
||||||
from frigate.util import get_cpu_stats, get_bandwidth_stats
|
|
||||||
from frigate.object_detection import ObjectDetectProcess
|
from frigate.object_detection import ObjectDetectProcess
|
||||||
|
from frigate.types import CameraMetricsTypes, StatsTrackingTypes
|
||||||
|
from frigate.util import (
|
||||||
|
get_amd_gpu_stats,
|
||||||
|
get_bandwidth_stats,
|
||||||
|
get_cpu_stats,
|
||||||
|
get_intel_gpu_stats,
|
||||||
|
get_nvidia_gpu_stats,
|
||||||
|
)
|
||||||
|
from frigate.version import VERSION
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -31,7 +38,7 @@ def get_latest_version(config: FrigateConfig) -> str:
|
|||||||
"https://api.github.com/repos/blakeblackshear/frigate/releases/latest",
|
"https://api.github.com/repos/blakeblackshear/frigate/releases/latest",
|
||||||
timeout=10,
|
timeout=10,
|
||||||
)
|
)
|
||||||
except:
|
except RequestException:
|
||||||
return "unknown"
|
return "unknown"
|
||||||
|
|
||||||
response = request.json()
|
response = request.json()
|
||||||
@ -308,4 +315,4 @@ class StatsEmitter(threading.Thread):
|
|||||||
)
|
)
|
||||||
self.dispatcher.publish("stats", json.dumps(stats), retain=False)
|
self.dispatcher.publish("stats", json.dumps(stats), retain=False)
|
||||||
logger.debug("Finished stats collection")
|
logger.debug("Finished stats collection")
|
||||||
logger.info(f"Exiting stats emitter...")
|
logger.info("Exiting stats emitter...")
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
"""Handle storage retention and usage."""
|
"""Handle storage retention and usage."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
|
||||||
import shutil
|
import shutil
|
||||||
import threading
|
import threading
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from peewee import fn
|
from peewee import fn
|
||||||
|
|
||||||
@ -107,7 +107,7 @@ class StorageMaintainer(threading.Thread):
|
|||||||
retained_events: Event = (
|
retained_events: Event = (
|
||||||
Event.select()
|
Event.select()
|
||||||
.where(
|
.where(
|
||||||
Event.retain_indefinitely == True,
|
Event.retain_indefinitely is True,
|
||||||
Event.has_clip,
|
Event.has_clip,
|
||||||
)
|
)
|
||||||
.order_by(Event.start_time.asc())
|
.order_by(Event.start_time.asc())
|
||||||
@ -188,4 +188,4 @@ class StorageMaintainer(threading.Thread):
|
|||||||
if self.check_storage_needs_cleanup():
|
if self.check_storage_needs_cleanup():
|
||||||
self.reduce_storage_consumption()
|
self.reduce_storage_consumption()
|
||||||
|
|
||||||
logger.info(f"Exiting storage maintainer...")
|
logger.info("Exiting storage maintainer...")
|
||||||
|
|||||||
@ -1,13 +1,11 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
|
|
||||||
from frigate.config import (
|
from frigate.config import BirdseyeModeEnum, FrigateConfig
|
||||||
BirdseyeModeEnum,
|
|
||||||
FrigateConfig,
|
|
||||||
)
|
|
||||||
from frigate.const import MODEL_CACHE_DIR
|
from frigate.const import MODEL_CACHE_DIR
|
||||||
from frigate.detectors import DetectorTypeEnum
|
from frigate.detectors import DetectorTypeEnum
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
@ -675,7 +673,7 @@ class TestConfig(unittest.TestCase):
|
|||||||
runtime_config = frigate_config.runtime_config()
|
runtime_config = frigate_config.runtime_config()
|
||||||
ffmpeg_cmds = runtime_config.cameras["back"].ffmpeg_cmds
|
ffmpeg_cmds = runtime_config.cameras["back"].ffmpeg_cmds
|
||||||
assert len(ffmpeg_cmds) == 1
|
assert len(ffmpeg_cmds) == 1
|
||||||
assert not "clips" in ffmpeg_cmds[0]["roles"]
|
assert "clips" not in ffmpeg_cmds[0]["roles"]
|
||||||
|
|
||||||
def test_max_disappeared_default(self):
|
def test_max_disappeared_default(self):
|
||||||
config = {
|
config = {
|
||||||
@ -986,7 +984,7 @@ class TestConfig(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
frigate_config = FrigateConfig(**config)
|
frigate_config = FrigateConfig(**config)
|
||||||
runtime_config = frigate_config.runtime_config()
|
frigate_config.runtime_config()
|
||||||
|
|
||||||
def test_global_detect(self):
|
def test_global_detect(self):
|
||||||
config = {
|
config = {
|
||||||
@ -1145,7 +1143,7 @@ class TestConfig(unittest.TestCase):
|
|||||||
assert config == frigate_config.dict(exclude_unset=True)
|
assert config == frigate_config.dict(exclude_unset=True)
|
||||||
|
|
||||||
runtime_config = frigate_config.runtime_config()
|
runtime_config = frigate_config.runtime_config()
|
||||||
assert runtime_config.cameras["back"].snapshots.bounding_box == False
|
assert runtime_config.cameras["back"].snapshots.bounding_box is False
|
||||||
assert runtime_config.cameras["back"].snapshots.height == 150
|
assert runtime_config.cameras["back"].snapshots.height == 150
|
||||||
assert runtime_config.cameras["back"].snapshots.enabled
|
assert runtime_config.cameras["back"].snapshots.enabled
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,9 @@
|
|||||||
|
from unittest import TestCase, main
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from unittest import TestCase, main
|
|
||||||
from frigate.util import get_yuv_crop, copy_yuv_to_position
|
from frigate.util import copy_yuv_to_position, get_yuv_crop
|
||||||
|
|
||||||
|
|
||||||
class TestCopyYuvToPosition(TestCase):
|
class TestCopyYuvToPosition(TestCase):
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from frigate.config import FFMPEG_INPUT_ARGS_DEFAULT, FrigateConfig
|
from frigate.config import FFMPEG_INPUT_ARGS_DEFAULT, FrigateConfig
|
||||||
from frigate.ffmpeg_presets import parse_preset_input
|
from frigate.ffmpeg_presets import parse_preset_input
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
from frigate.util import get_amd_gpu_stats, get_intel_gpu_stats, get_nvidia_gpu_stats
|
from frigate.util import get_amd_gpu_stats, get_intel_gpu_stats
|
||||||
|
|
||||||
|
|
||||||
class TestGpuStats(unittest.TestCase):
|
class TestGpuStats(unittest.TestCase):
|
||||||
|
|||||||
@ -6,15 +6,14 @@ import unittest
|
|||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from peewee_migrate import Router
|
from peewee_migrate import Router
|
||||||
|
from playhouse.shortcuts import model_to_dict
|
||||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||||
from playhouse.sqliteq import SqliteQueueDatabase
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
from playhouse.shortcuts import model_to_dict
|
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.http import create_app
|
from frigate.http import create_app
|
||||||
from frigate.models import Event, Recordings
|
from frigate.models import Event, Recordings
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
|
|
||||||
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
||||||
|
|
||||||
|
|
||||||
@ -128,22 +127,22 @@ class TestHttp(unittest.TestCase):
|
|||||||
|
|
||||||
with app.test_client() as client:
|
with app.test_client() as client:
|
||||||
_insert_mock_event(id)
|
_insert_mock_event(id)
|
||||||
events = client.get(f"/events").json
|
events = client.get("/events").json
|
||||||
assert events
|
assert events
|
||||||
assert len(events) == 1
|
assert len(events) == 1
|
||||||
assert events[0]["id"] == id
|
assert events[0]["id"] == id
|
||||||
_insert_mock_event(id2)
|
_insert_mock_event(id2)
|
||||||
events = client.get(f"/events").json
|
events = client.get("/events").json
|
||||||
assert events
|
assert events
|
||||||
assert len(events) == 2
|
assert len(events) == 2
|
||||||
events = client.get(
|
events = client.get(
|
||||||
f"/events",
|
"/events",
|
||||||
query_string={"limit": 1},
|
query_string={"limit": 1},
|
||||||
).json
|
).json
|
||||||
assert events
|
assert events
|
||||||
assert len(events) == 1
|
assert len(events) == 1
|
||||||
events = client.get(
|
events = client.get(
|
||||||
f"/events",
|
"/events",
|
||||||
query_string={"has_clip": 0},
|
query_string={"has_clip": 0},
|
||||||
).json
|
).json
|
||||||
assert not events
|
assert not events
|
||||||
@ -230,12 +229,12 @@ class TestHttp(unittest.TestCase):
|
|||||||
event = client.get(f"/events/{id}").json
|
event = client.get(f"/events/{id}").json
|
||||||
assert event
|
assert event
|
||||||
assert event["id"] == id
|
assert event["id"] == id
|
||||||
assert event["retain_indefinitely"] == True
|
assert event["retain_indefinitely"] is True
|
||||||
client.delete(f"/events/{id}/retain")
|
client.delete(f"/events/{id}/retain")
|
||||||
event = client.get(f"/events/{id}").json
|
event = client.get(f"/events/{id}").json
|
||||||
assert event
|
assert event
|
||||||
assert event["id"] == id
|
assert event["id"] == id
|
||||||
assert event["retain_indefinitely"] == False
|
assert event["retain_indefinitely"] is False
|
||||||
|
|
||||||
def test_set_delete_sub_label(self):
|
def test_set_delete_sub_label(self):
|
||||||
app = create_app(
|
app = create_app(
|
||||||
|
|||||||
@ -4,10 +4,10 @@ from unittest.mock import Mock, patch
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import parse_obj_as
|
from pydantic import parse_obj_as
|
||||||
|
|
||||||
from frigate.config import DetectorConfig, InputTensorEnum, ModelConfig
|
|
||||||
from frigate.detectors import DetectorTypeEnum
|
|
||||||
import frigate.detectors as detectors
|
import frigate.detectors as detectors
|
||||||
import frigate.object_detection
|
import frigate.object_detection
|
||||||
|
from frigate.config import DetectorConfig, InputTensorEnum, ModelConfig
|
||||||
|
from frigate.detectors import DetectorTypeEnum
|
||||||
|
|
||||||
|
|
||||||
class TestLocalObjectDetector(unittest.TestCase):
|
class TestLocalObjectDetector(unittest.TestCase):
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import numpy as np
|
|
||||||
from unittest import TestCase, main
|
from unittest import TestCase, main
|
||||||
|
|
||||||
from frigate.video import box_overlaps, reduce_boxes
|
from frigate.video import box_overlaps, reduce_boxes
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,21 +1,17 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
from peewee import DoesNotExist
|
from peewee import DoesNotExist
|
||||||
from peewee_migrate import Router
|
from peewee_migrate import Router
|
||||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||||
from playhouse.sqliteq import SqliteQueueDatabase
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
from playhouse.shortcuts import model_to_dict
|
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.http import create_app
|
|
||||||
from frigate.models import Event, Recordings
|
from frigate.models import Event, Recordings
|
||||||
from frigate.storage import StorageMaintainer
|
from frigate.storage import StorageMaintainer
|
||||||
|
|
||||||
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,8 @@
|
|||||||
|
from unittest import TestCase, main
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from unittest import TestCase, main
|
|
||||||
from frigate.util import yuv_region_2_rgb
|
from frigate.util import yuv_region_2_rgb
|
||||||
|
|
||||||
|
|
||||||
@ -33,7 +35,7 @@ class TestYuvRegion2RGB(TestCase):
|
|||||||
# cv2.imwrite(f"bgr_frame.jpg", self.bgr_frame)
|
# cv2.imwrite(f"bgr_frame.jpg", self.bgr_frame)
|
||||||
yuv_frame = cv2.cvtColor(bgr_frame, cv2.COLOR_BGR2YUV_I420)
|
yuv_frame = cv2.cvtColor(bgr_frame, cv2.COLOR_BGR2YUV_I420)
|
||||||
|
|
||||||
cropped = yuv_region_2_rgb(yuv_frame, (0, 852, 648, 1500))
|
yuv_region_2_rgb(yuv_frame, (0, 852, 648, 1500))
|
||||||
# cv2.imwrite(f"cropped.jpg", cv2.cvtColor(cropped, cv2.COLOR_RGB2BGR))
|
# cv2.imwrite(f"cropped.jpg", cv2.cvtColor(cropped, cv2.COLOR_RGB2BGR))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,16 +1,14 @@
|
|||||||
"""Record events for object, audio, etc. detections."""
|
"""Record events for object, audio, etc. detections."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import threading
|
|
||||||
import queue
|
import queue
|
||||||
|
import threading
|
||||||
|
from multiprocessing.queues import Queue
|
||||||
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.events.maintainer import EventTypeEnum
|
from frigate.events.maintainer import EventTypeEnum
|
||||||
from frigate.models import Timeline
|
from frigate.models import Timeline
|
||||||
|
|
||||||
from multiprocessing.queues import Queue
|
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
|
||||||
|
|
||||||
from frigate.util import to_relative_box
|
from frigate.util import to_relative_box
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|||||||
13
frigate/track/__init__.py
Normal file
13
frigate/track/__init__.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
from frigate.config import DetectConfig
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectTracker(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def __init__(self, config: DetectConfig):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def match_and_update(self, detections):
|
||||||
|
pass
|
||||||
@ -1,22 +1,16 @@
|
|||||||
import copy
|
|
||||||
import datetime
|
|
||||||
import itertools
|
|
||||||
import multiprocessing as mp
|
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import cv2
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from scipy.spatial import distance as dist
|
from scipy.spatial import distance as dist
|
||||||
|
|
||||||
from frigate.config import DetectConfig
|
from frigate.config import DetectConfig
|
||||||
|
from frigate.track import ObjectTracker
|
||||||
from frigate.util import intersection_over_union
|
from frigate.util import intersection_over_union
|
||||||
|
|
||||||
|
|
||||||
class ObjectTracker:
|
class CentroidTracker(ObjectTracker):
|
||||||
def __init__(self, config: DetectConfig):
|
def __init__(self, config: DetectConfig):
|
||||||
self.tracked_objects = {}
|
self.tracked_objects = {}
|
||||||
self.disappeared = {}
|
self.disappeared = {}
|
||||||
@ -141,11 +135,11 @@ class ObjectTracker:
|
|||||||
if self.is_expired(id):
|
if self.is_expired(id):
|
||||||
self.deregister(id)
|
self.deregister(id)
|
||||||
|
|
||||||
def match_and_update(self, frame_time, new_objects):
|
def match_and_update(self, frame_time, detections):
|
||||||
# group by name
|
# group by name
|
||||||
new_object_groups = defaultdict(lambda: [])
|
detection_groups = defaultdict(lambda: [])
|
||||||
for obj in new_objects:
|
for obj in detections:
|
||||||
new_object_groups[obj[0]].append(
|
detection_groups[obj[0]].append(
|
||||||
{
|
{
|
||||||
"label": obj[0],
|
"label": obj[0],
|
||||||
"score": obj[1],
|
"score": obj[1],
|
||||||
@ -160,17 +154,17 @@ class ObjectTracker:
|
|||||||
# update any tracked objects with labels that are not
|
# update any tracked objects with labels that are not
|
||||||
# seen in the current objects and deregister if needed
|
# seen in the current objects and deregister if needed
|
||||||
for obj in list(self.tracked_objects.values()):
|
for obj in list(self.tracked_objects.values()):
|
||||||
if not obj["label"] in new_object_groups:
|
if obj["label"] not in detection_groups:
|
||||||
if self.disappeared[obj["id"]] >= self.max_disappeared:
|
if self.disappeared[obj["id"]] >= self.max_disappeared:
|
||||||
self.deregister(obj["id"])
|
self.deregister(obj["id"])
|
||||||
else:
|
else:
|
||||||
self.disappeared[obj["id"]] += 1
|
self.disappeared[obj["id"]] += 1
|
||||||
|
|
||||||
if len(new_objects) == 0:
|
if len(detections) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
# track objects for each label type
|
# track objects for each label type
|
||||||
for label, group in new_object_groups.items():
|
for label, group in detection_groups.items():
|
||||||
current_objects = [
|
current_objects = [
|
||||||
o for o in self.tracked_objects.values() if o["label"] == label
|
o for o in self.tracked_objects.values() if o["label"] == label
|
||||||
]
|
]
|
||||||
285
frigate/track/norfair_tracker.py
Normal file
285
frigate/track/norfair_tracker.py
Normal file
@ -0,0 +1,285 @@
|
|||||||
|
import random
|
||||||
|
import string
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from norfair import Detection, Drawable, Tracker, draw_boxes
|
||||||
|
from norfair.drawing.drawer import Drawer
|
||||||
|
|
||||||
|
from frigate.config import DetectConfig
|
||||||
|
from frigate.track import ObjectTracker
|
||||||
|
from frigate.util import intersection_over_union
|
||||||
|
|
||||||
|
|
||||||
|
# Normalizes distance from estimate relative to object size
|
||||||
|
# Other ideas:
|
||||||
|
# - if estimates are inaccurate for first N detections, compare with last_detection (may be fine)
|
||||||
|
# - could be variable based on time since last_detection
|
||||||
|
# - include estimated velocity in the distance (car driving by of a parked car)
|
||||||
|
# - include some visual similarity factor in the distance for occlusions
|
||||||
|
def distance(detection: np.array, estimate: np.array) -> float:
|
||||||
|
# ultimately, this should try and estimate distance in 3-dimensional space
|
||||||
|
# consider change in location, width, and height
|
||||||
|
|
||||||
|
estimate_dim = np.diff(estimate, axis=0).flatten()
|
||||||
|
detection_dim = np.diff(detection, axis=0).flatten()
|
||||||
|
|
||||||
|
# get bottom center positions
|
||||||
|
detection_position = np.array(
|
||||||
|
[np.average(detection[:, 0]), np.max(detection[:, 1])]
|
||||||
|
)
|
||||||
|
estimate_position = np.array([np.average(estimate[:, 0]), np.max(estimate[:, 1])])
|
||||||
|
|
||||||
|
distance = (detection_position - estimate_position).astype(float)
|
||||||
|
# change in x relative to w
|
||||||
|
distance[0] /= estimate_dim[0]
|
||||||
|
# change in y relative to h
|
||||||
|
distance[1] /= estimate_dim[1]
|
||||||
|
|
||||||
|
# get ratio of widths and heights
|
||||||
|
# normalize to 1
|
||||||
|
widths = np.sort([estimate_dim[0], detection_dim[0]])
|
||||||
|
heights = np.sort([estimate_dim[1], detection_dim[1]])
|
||||||
|
width_ratio = widths[1] / widths[0] - 1.0
|
||||||
|
height_ratio = heights[1] / heights[0] - 1.0
|
||||||
|
|
||||||
|
# change vector is relative x,y change and w,h ratio
|
||||||
|
change = np.append(distance, np.array([width_ratio, height_ratio]))
|
||||||
|
|
||||||
|
# calculate euclidean distance of the change vector
|
||||||
|
return np.linalg.norm(change)
|
||||||
|
|
||||||
|
|
||||||
|
def frigate_distance(detection: Detection, tracked_object) -> float:
|
||||||
|
return distance(detection.points, tracked_object.estimate)
|
||||||
|
|
||||||
|
|
||||||
|
class NorfairTracker(ObjectTracker):
|
||||||
|
def __init__(self, config: DetectConfig):
|
||||||
|
self.tracked_objects = {}
|
||||||
|
self.disappeared = {}
|
||||||
|
self.positions = {}
|
||||||
|
self.max_disappeared = config.max_disappeared
|
||||||
|
self.detect_config = config
|
||||||
|
self.track_id_map = {}
|
||||||
|
# TODO: could also initialize a tracker per object class if there
|
||||||
|
# was a good reason to have different distance calculations
|
||||||
|
self.tracker = Tracker(
|
||||||
|
distance_function=frigate_distance,
|
||||||
|
distance_threshold=2.5,
|
||||||
|
initialization_delay=0,
|
||||||
|
hit_counter_max=self.max_disappeared,
|
||||||
|
)
|
||||||
|
|
||||||
|
def register(self, track_id, obj):
|
||||||
|
rand_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=6))
|
||||||
|
id = f"{obj['frame_time']}-{rand_id}"
|
||||||
|
self.track_id_map[track_id] = id
|
||||||
|
obj["id"] = id
|
||||||
|
obj["start_time"] = obj["frame_time"]
|
||||||
|
obj["motionless_count"] = 0
|
||||||
|
obj["position_changes"] = 0
|
||||||
|
self.tracked_objects[id] = obj
|
||||||
|
self.disappeared[id] = 0
|
||||||
|
self.positions[id] = {
|
||||||
|
"xmins": [],
|
||||||
|
"ymins": [],
|
||||||
|
"xmaxs": [],
|
||||||
|
"ymaxs": [],
|
||||||
|
"xmin": 0,
|
||||||
|
"ymin": 0,
|
||||||
|
"xmax": self.detect_config.width,
|
||||||
|
"ymax": self.detect_config.height,
|
||||||
|
}
|
||||||
|
|
||||||
|
def deregister(self, id):
|
||||||
|
del self.tracked_objects[id]
|
||||||
|
del self.disappeared[id]
|
||||||
|
|
||||||
|
# tracks the current position of the object based on the last N bounding boxes
|
||||||
|
# returns False if the object has moved outside its previous position
|
||||||
|
def update_position(self, id, box):
|
||||||
|
position = self.positions[id]
|
||||||
|
position_box = (
|
||||||
|
position["xmin"],
|
||||||
|
position["ymin"],
|
||||||
|
position["xmax"],
|
||||||
|
position["ymax"],
|
||||||
|
)
|
||||||
|
|
||||||
|
xmin, ymin, xmax, ymax = box
|
||||||
|
|
||||||
|
iou = intersection_over_union(position_box, box)
|
||||||
|
|
||||||
|
# if the iou drops below the threshold
|
||||||
|
# assume the object has moved to a new position and reset the computed box
|
||||||
|
if iou < 0.6:
|
||||||
|
self.positions[id] = {
|
||||||
|
"xmins": [xmin],
|
||||||
|
"ymins": [ymin],
|
||||||
|
"xmaxs": [xmax],
|
||||||
|
"ymaxs": [ymax],
|
||||||
|
"xmin": xmin,
|
||||||
|
"ymin": ymin,
|
||||||
|
"xmax": xmax,
|
||||||
|
"ymax": ymax,
|
||||||
|
}
|
||||||
|
return False
|
||||||
|
|
||||||
|
# if there are less than 10 entries for the position, add the bounding box
|
||||||
|
# and recompute the position box
|
||||||
|
if len(position["xmins"]) < 10:
|
||||||
|
position["xmins"].append(xmin)
|
||||||
|
position["ymins"].append(ymin)
|
||||||
|
position["xmaxs"].append(xmax)
|
||||||
|
position["ymaxs"].append(ymax)
|
||||||
|
# by using percentiles here, we hopefully remove outliers
|
||||||
|
position["xmin"] = np.percentile(position["xmins"], 15)
|
||||||
|
position["ymin"] = np.percentile(position["ymins"], 15)
|
||||||
|
position["xmax"] = np.percentile(position["xmaxs"], 85)
|
||||||
|
position["ymax"] = np.percentile(position["ymaxs"], 85)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def is_expired(self, id):
|
||||||
|
obj = self.tracked_objects[id]
|
||||||
|
# get the max frames for this label type or the default
|
||||||
|
max_frames = self.detect_config.stationary.max_frames.objects.get(
|
||||||
|
obj["label"], self.detect_config.stationary.max_frames.default
|
||||||
|
)
|
||||||
|
|
||||||
|
# if there is no max_frames for this label type, continue
|
||||||
|
if max_frames is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# if the object has exceeded the max_frames setting, deregister
|
||||||
|
if (
|
||||||
|
obj["motionless_count"] - self.detect_config.stationary.threshold
|
||||||
|
> max_frames
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def update(self, track_id, obj):
|
||||||
|
id = self.track_id_map[track_id]
|
||||||
|
self.disappeared[id] = 0
|
||||||
|
# update the motionless count if the object has not moved to a new position
|
||||||
|
if self.update_position(id, obj["box"]):
|
||||||
|
self.tracked_objects[id]["motionless_count"] += 1
|
||||||
|
if self.is_expired(id):
|
||||||
|
self.deregister(id)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
# register the first position change and then only increment if
|
||||||
|
# the object was previously stationary
|
||||||
|
if (
|
||||||
|
self.tracked_objects[id]["position_changes"] == 0
|
||||||
|
or self.tracked_objects[id]["motionless_count"]
|
||||||
|
>= self.detect_config.stationary.threshold
|
||||||
|
):
|
||||||
|
self.tracked_objects[id]["position_changes"] += 1
|
||||||
|
self.tracked_objects[id]["motionless_count"] = 0
|
||||||
|
|
||||||
|
self.tracked_objects[id].update(obj)
|
||||||
|
|
||||||
|
def update_frame_times(self, frame_time):
|
||||||
|
# if the object was there in the last frame, assume it's still there
|
||||||
|
detections = [
|
||||||
|
(
|
||||||
|
obj["label"],
|
||||||
|
obj["score"],
|
||||||
|
obj["box"],
|
||||||
|
obj["area"],
|
||||||
|
obj["ratio"],
|
||||||
|
obj["region"],
|
||||||
|
)
|
||||||
|
for id, obj in self.tracked_objects.items()
|
||||||
|
if self.disappeared[id] == 0
|
||||||
|
]
|
||||||
|
self.match_and_update(frame_time, detections=detections)
|
||||||
|
|
||||||
|
def match_and_update(self, frame_time, detections):
|
||||||
|
norfair_detections = []
|
||||||
|
|
||||||
|
for obj in detections:
|
||||||
|
# centroid is used for other things downstream
|
||||||
|
centroid_x = int((obj[2][0] + obj[2][2]) / 2.0)
|
||||||
|
centroid_y = int((obj[2][1] + obj[2][3]) / 2.0)
|
||||||
|
|
||||||
|
# track based on top,left and bottom,right corners instead of centroid
|
||||||
|
points = np.array([[obj[2][0], obj[2][1]], [obj[2][2], obj[2][3]]])
|
||||||
|
|
||||||
|
norfair_detections.append(
|
||||||
|
Detection(
|
||||||
|
points=points,
|
||||||
|
label=obj[0],
|
||||||
|
data={
|
||||||
|
"label": obj[0],
|
||||||
|
"score": obj[1],
|
||||||
|
"box": obj[2],
|
||||||
|
"area": obj[3],
|
||||||
|
"ratio": obj[4],
|
||||||
|
"region": obj[5],
|
||||||
|
"frame_time": frame_time,
|
||||||
|
"centroid": (centroid_x, centroid_y),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
tracked_objects = self.tracker.update(detections=norfair_detections)
|
||||||
|
|
||||||
|
# update or create new tracks
|
||||||
|
active_ids = []
|
||||||
|
for t in tracked_objects:
|
||||||
|
active_ids.append(t.global_id)
|
||||||
|
if t.global_id not in self.track_id_map:
|
||||||
|
self.register(t.global_id, t.last_detection.data)
|
||||||
|
# if there wasn't a detection in this frame, increment disappeared
|
||||||
|
elif t.last_detection.data["frame_time"] != frame_time:
|
||||||
|
id = self.track_id_map[t.global_id]
|
||||||
|
self.disappeared[id] += 1
|
||||||
|
# else update it
|
||||||
|
else:
|
||||||
|
self.update(t.global_id, t.last_detection.data)
|
||||||
|
|
||||||
|
# clear expired tracks
|
||||||
|
expired_ids = [k for k in self.track_id_map.keys() if k not in active_ids]
|
||||||
|
for e_id in expired_ids:
|
||||||
|
self.deregister(self.track_id_map[e_id])
|
||||||
|
del self.track_id_map[e_id]
|
||||||
|
|
||||||
|
def debug_draw(self, frame, frame_time):
|
||||||
|
active_detections = [
|
||||||
|
Drawable(id=obj.id, points=obj.last_detection.points, label=obj.label)
|
||||||
|
for obj in self.tracker.tracked_objects
|
||||||
|
if obj.last_detection.data["frame_time"] == frame_time
|
||||||
|
]
|
||||||
|
missing_detections = [
|
||||||
|
Drawable(id=obj.id, points=obj.last_detection.points, label=obj.label)
|
||||||
|
for obj in self.tracker.tracked_objects
|
||||||
|
if obj.last_detection.data["frame_time"] != frame_time
|
||||||
|
]
|
||||||
|
# draw the estimated bounding box
|
||||||
|
draw_boxes(frame, self.tracker.tracked_objects, color="green", draw_ids=True)
|
||||||
|
# draw the detections that were detected in the current frame
|
||||||
|
draw_boxes(frame, active_detections, color="blue", draw_ids=True)
|
||||||
|
# draw the detections that are missing in the current frame
|
||||||
|
draw_boxes(frame, missing_detections, color="red", draw_ids=True)
|
||||||
|
|
||||||
|
# draw the distance calculation for the last detection
|
||||||
|
# estimate vs detection
|
||||||
|
for obj in self.tracker.tracked_objects:
|
||||||
|
ld = obj.last_detection
|
||||||
|
# bottom right
|
||||||
|
text_anchor = (
|
||||||
|
ld.points[1, 0],
|
||||||
|
ld.points[1, 1],
|
||||||
|
)
|
||||||
|
frame = Drawer.text(
|
||||||
|
frame,
|
||||||
|
f"{obj.id}: {str(obj.last_distance)}",
|
||||||
|
position=text_anchor,
|
||||||
|
size=None,
|
||||||
|
color=(255, 0, 0),
|
||||||
|
thickness=None,
|
||||||
|
)
|
||||||
@ -1,7 +1,7 @@
|
|||||||
from typing import Optional, TypedDict
|
from multiprocessing.context import Process
|
||||||
from multiprocessing.queues import Queue
|
from multiprocessing.queues import Queue
|
||||||
from multiprocessing.sharedctypes import Synchronized
|
from multiprocessing.sharedctypes import Synchronized
|
||||||
from multiprocessing.context import Process
|
from typing import Optional, TypedDict
|
||||||
|
|
||||||
from frigate.object_detection import ObjectDetectProcess
|
from frigate.object_detection import ObjectDetectProcess
|
||||||
|
|
||||||
|
|||||||
@ -1,28 +1,26 @@
|
|||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
|
||||||
import shlex
|
|
||||||
import subprocess as sp
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shlex
|
||||||
import signal
|
import signal
|
||||||
|
import subprocess as sp
|
||||||
import traceback
|
import traceback
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import yaml
|
|
||||||
import os
|
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from multiprocessing import shared_memory
|
from multiprocessing import shared_memory
|
||||||
from typing import Any, AnyStr, Optional, Tuple
|
from typing import Any, AnyStr, Optional, Tuple
|
||||||
import py3nvml.py3nvml as nvml
|
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import os
|
|
||||||
import psutil
|
import psutil
|
||||||
|
import py3nvml.py3nvml as nvml
|
||||||
import pytz
|
import pytz
|
||||||
|
import yaml
|
||||||
|
|
||||||
from frigate.const import REGEX_HTTP_CAMERA_USER_PASS, REGEX_RTSP_CAMERA_USER_PASS
|
from frigate.const import REGEX_HTTP_CAMERA_USER_PASS, REGEX_RTSP_CAMERA_USER_PASS
|
||||||
|
|
||||||
@ -457,7 +455,7 @@ def copy_yuv_to_position(
|
|||||||
# clear v2
|
# clear v2
|
||||||
destination_frame[v2[1] : v2[3], v2[0] : v2[2]] = 128
|
destination_frame[v2[1] : v2[3], v2[0] : v2[2]] = 128
|
||||||
|
|
||||||
if not source_frame is None:
|
if source_frame is not None:
|
||||||
# calculate the resized frame, maintaining the aspect ratio
|
# calculate the resized frame, maintaining the aspect ratio
|
||||||
source_aspect_ratio = source_frame.shape[1] / (source_frame.shape[0] // 3 * 2)
|
source_aspect_ratio = source_frame.shape[1] / (source_frame.shape[0] // 3 * 2)
|
||||||
dest_aspect_ratio = destination_shape[1] / destination_shape[0]
|
dest_aspect_ratio = destination_shape[1] / destination_shape[0]
|
||||||
@ -840,7 +838,7 @@ def get_cpu_stats() -> dict[str, dict]:
|
|||||||
"mem": f"{mem_pct}",
|
"mem": f"{mem_pct}",
|
||||||
"cmdline": " ".join(cmdline),
|
"cmdline": " ".join(cmdline),
|
||||||
}
|
}
|
||||||
except:
|
except Exception:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return usages
|
return usages
|
||||||
@ -865,13 +863,13 @@ def get_bandwidth_stats() -> dict[str, dict]:
|
|||||||
stats = list(filter(lambda a: a != "", line.strip().split("\t")))
|
stats = list(filter(lambda a: a != "", line.strip().split("\t")))
|
||||||
try:
|
try:
|
||||||
if re.search(
|
if re.search(
|
||||||
"(^ffmpeg|\/go2rtc|frigate\.detector\.[a-z]+)/([0-9]+)/", stats[0]
|
r"(^ffmpeg|\/go2rtc|frigate\.detector\.[a-z]+)/([0-9]+)/", stats[0]
|
||||||
):
|
):
|
||||||
process = stats[0].split("/")
|
process = stats[0].split("/")
|
||||||
usages[process[len(process) - 2]] = {
|
usages[process[len(process) - 2]] = {
|
||||||
"bandwidth": round(float(stats[1]) + float(stats[2]), 1),
|
"bandwidth": round(float(stats[1]) + float(stats[2]), 1),
|
||||||
}
|
}
|
||||||
except:
|
except (IndexError, ValueError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return usages
|
return usages
|
||||||
@ -932,7 +930,7 @@ def get_intel_gpu_stats() -> dict[str, str]:
|
|||||||
|
|
||||||
# render is used for qsv
|
# render is used for qsv
|
||||||
render = []
|
render = []
|
||||||
for result in re.findall('"Render/3D/0":{[a-z":\d.,%]+}', reading):
|
for result in re.findall(r'"Render/3D/0":{[a-z":\d.,%]+}', reading):
|
||||||
packet = json.loads(result[14:])
|
packet = json.loads(result[14:])
|
||||||
single = packet.get("busy", 0.0)
|
single = packet.get("busy", 0.0)
|
||||||
render.append(float(single))
|
render.append(float(single))
|
||||||
@ -991,11 +989,11 @@ def get_nvidia_gpu_stats() -> dict[int, dict]:
|
|||||||
"gpu": gpu_util,
|
"gpu": gpu_util,
|
||||||
"mem": gpu_mem_util,
|
"mem": gpu_mem_util,
|
||||||
}
|
}
|
||||||
except:
|
except Exception:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
return results
|
return results
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def ffprobe_stream(path: str) -> sp.CompletedProcess:
|
def ffprobe_stream(path: str) -> sp.CompletedProcess:
|
||||||
"""Run ffprobe on stream."""
|
"""Run ffprobe on stream."""
|
||||||
|
|||||||
@ -10,16 +10,17 @@ import threading
|
|||||||
import time
|
import time
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
import cv2
|
import cv2
|
||||||
|
import numpy as np
|
||||||
from setproctitle import setproctitle
|
from setproctitle import setproctitle
|
||||||
|
|
||||||
from frigate.config import CameraConfig, DetectConfig, PixelFormatEnum
|
from frigate.config import CameraConfig, DetectConfig, PixelFormatEnum
|
||||||
from frigate.const import CACHE_DIR
|
from frigate.const import CACHE_DIR
|
||||||
from frigate.object_detection import RemoteObjectDetector
|
|
||||||
from frigate.log import LogPipe
|
from frigate.log import LogPipe
|
||||||
from frigate.motion import MotionDetector
|
from frigate.motion import MotionDetector
|
||||||
from frigate.objects import ObjectTracker
|
from frigate.object_detection import RemoteObjectDetector
|
||||||
|
from frigate.track import ObjectTracker
|
||||||
|
from frigate.track.norfair_tracker import NorfairTracker
|
||||||
from frigate.util import (
|
from frigate.util import (
|
||||||
EventsPerSecond,
|
EventsPerSecond,
|
||||||
FrameManager,
|
FrameManager,
|
||||||
@ -30,8 +31,8 @@ from frigate.util import (
|
|||||||
intersection,
|
intersection,
|
||||||
intersection_over_union,
|
intersection_over_union,
|
||||||
listen,
|
listen,
|
||||||
yuv_region_2_rgb,
|
|
||||||
yuv_region_2_bgr,
|
yuv_region_2_bgr,
|
||||||
|
yuv_region_2_rgb,
|
||||||
yuv_region_2_yuv,
|
yuv_region_2_yuv,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -45,7 +46,7 @@ def filtered(obj, objects_to_track, object_filters):
|
|||||||
object_area = obj[3]
|
object_area = obj[3]
|
||||||
object_ratio = obj[4]
|
object_ratio = obj[4]
|
||||||
|
|
||||||
if not object_name in objects_to_track:
|
if object_name not in objects_to_track:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if object_name in object_filters:
|
if object_name in object_filters:
|
||||||
@ -73,7 +74,7 @@ def filtered(obj, objects_to_track, object_filters):
|
|||||||
if obj_settings.max_ratio < object_ratio:
|
if obj_settings.max_ratio < object_ratio:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if not obj_settings.mask is None:
|
if obj_settings.mask is not None:
|
||||||
# compute the coordinates of the object and make sure
|
# compute the coordinates of the object and make sure
|
||||||
# the location isn't outside the bounds of the image (can happen from rounding)
|
# the location isn't outside the bounds of the image (can happen from rounding)
|
||||||
object_xmin = object_box[0]
|
object_xmin = object_box[0]
|
||||||
@ -169,20 +170,20 @@ def capture_frames(
|
|||||||
skipped_eps.start()
|
skipped_eps.start()
|
||||||
while True:
|
while True:
|
||||||
fps.value = frame_rate.eps()
|
fps.value = frame_rate.eps()
|
||||||
skipped_fps = skipped_eps.eps()
|
skipped_eps.eps()
|
||||||
|
|
||||||
current_frame.value = datetime.datetime.now().timestamp()
|
current_frame.value = datetime.datetime.now().timestamp()
|
||||||
frame_name = f"{camera_name}{current_frame.value}"
|
frame_name = f"{camera_name}{current_frame.value}"
|
||||||
frame_buffer = frame_manager.create(frame_name, frame_size)
|
frame_buffer = frame_manager.create(frame_name, frame_size)
|
||||||
try:
|
try:
|
||||||
frame_buffer[:] = ffmpeg_process.stdout.read(frame_size)
|
frame_buffer[:] = ffmpeg_process.stdout.read(frame_size)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
# shutdown has been initiated
|
# shutdown has been initiated
|
||||||
if stop_event.is_set():
|
if stop_event.is_set():
|
||||||
break
|
break
|
||||||
logger.error(f"{camera_name}: Unable to read frames from ffmpeg process.")
|
logger.error(f"{camera_name}: Unable to read frames from ffmpeg process.")
|
||||||
|
|
||||||
if ffmpeg_process.poll() != None:
|
if ffmpeg_process.poll() is not None:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"{camera_name}: ffmpeg process is not running. exiting capture thread..."
|
f"{camera_name}: ffmpeg process is not running. exiting capture thread..."
|
||||||
)
|
)
|
||||||
@ -472,7 +473,7 @@ def track_camera(
|
|||||||
name, labelmap, detection_queue, result_connection, model_config, stop_event
|
name, labelmap, detection_queue, result_connection, model_config, stop_event
|
||||||
)
|
)
|
||||||
|
|
||||||
object_tracker = ObjectTracker(config.detect)
|
object_tracker = NorfairTracker(config.detect)
|
||||||
|
|
||||||
frame_manager = SharedMemoryFrameManager()
|
frame_manager = SharedMemoryFrameManager()
|
||||||
|
|
||||||
@ -604,7 +605,7 @@ def process_frames(
|
|||||||
|
|
||||||
while not stop_event.is_set():
|
while not stop_event.is_set():
|
||||||
if exit_on_empty and frame_queue.empty():
|
if exit_on_empty and frame_queue.empty():
|
||||||
logger.info(f"Exiting track_objects...")
|
logger.info("Exiting track_objects...")
|
||||||
break
|
break
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -655,7 +656,7 @@ def process_frames(
|
|||||||
tracked_object_boxes = [
|
tracked_object_boxes = [
|
||||||
obj["box"]
|
obj["box"]
|
||||||
for obj in object_tracker.tracked_objects.values()
|
for obj in object_tracker.tracked_objects.values()
|
||||||
if not obj["id"] in stationary_object_ids
|
if obj["id"] not in stationary_object_ids
|
||||||
]
|
]
|
||||||
|
|
||||||
# combine motion boxes with known locations of existing objects
|
# combine motion boxes with known locations of existing objects
|
||||||
@ -847,6 +848,17 @@ def process_frames(
|
|||||||
else:
|
else:
|
||||||
object_tracker.update_frame_times(frame_time)
|
object_tracker.update_frame_times(frame_time)
|
||||||
|
|
||||||
|
# debug tracking by writing frames
|
||||||
|
if False:
|
||||||
|
bgr_frame = cv2.cvtColor(
|
||||||
|
frame,
|
||||||
|
cv2.COLOR_YUV2BGR_I420,
|
||||||
|
)
|
||||||
|
object_tracker.debug_draw(bgr_frame, frame_time)
|
||||||
|
cv2.imwrite(
|
||||||
|
f"debug/frames/track-{'{:.6f}'.format(frame_time)}.jpg", bgr_frame
|
||||||
|
)
|
||||||
|
|
||||||
# add to the queue if not full
|
# add to the queue if not full
|
||||||
if detected_objects_queue.full():
|
if detected_objects_queue.full():
|
||||||
frame_manager.delete(f"{camera_name}{frame_time}")
|
frame_manager.delete(f"{camera_name}{frame_time}")
|
||||||
|
|||||||
@ -2,12 +2,10 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import os
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
import signal
|
|
||||||
|
|
||||||
from frigate.object_detection import ObjectDetectProcess
|
from frigate.object_detection import ObjectDetectProcess
|
||||||
from frigate.util import restart_frigate
|
from frigate.util import restart_frigate
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -39,4 +37,4 @@ class FrigateWatchdog(threading.Thread):
|
|||||||
logger.info("Detection appears to have stopped. Exiting Frigate...")
|
logger.info("Detection appears to have stopped. Exiting Frigate...")
|
||||||
restart_frigate()
|
restart_frigate()
|
||||||
|
|
||||||
logger.info(f"Exiting watchdog...")
|
logger.info("Exiting watchdog...")
|
||||||
|
|||||||
@ -21,14 +21,7 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
|
|
||||||
try:
|
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,15 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -22,8 +22,6 @@ Some examples (model - class or model name)::
|
|||||||
"""
|
"""
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
|
|
||||||
from frigate.models import Recordings
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,10 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
from playhouse.sqlite_ext import JSONField
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Recordings
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Recordings
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -22,6 +22,7 @@ Some examples (model - class or model name)::
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
|
|
||||||
from frigate.models import Event
|
from frigate.models import Event
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Recordings
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Recordings
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,7 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Recordings
|
|
||||||
|
|
||||||
try:
|
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
@ -39,9 +30,15 @@ def migrate(migrator, database, fake=False, **kwargs):
|
|||||||
migrator.sql(
|
migrator.sql(
|
||||||
'CREATE TABLE IF NOT EXISTS "timeline" ("timestamp" DATETIME NOT NULL, "camera" VARCHAR(20) NOT NULL, "source" VARCHAR(20) NOT NULL, "source_id" VARCHAR(30), "class_type" VARCHAR(50) NOT NULL, "data" JSON)'
|
'CREATE TABLE IF NOT EXISTS "timeline" ("timestamp" DATETIME NOT NULL, "camera" VARCHAR(20) NOT NULL, "source" VARCHAR(20) NOT NULL, "source_id" VARCHAR(30), "class_type" VARCHAR(50) NOT NULL, "data" JSON)'
|
||||||
)
|
)
|
||||||
migrator.sql('CREATE INDEX IF NOT EXISTS "timeline_camera" ON "timeline" ("camera")')
|
migrator.sql(
|
||||||
migrator.sql('CREATE INDEX IF NOT EXISTS "timeline_source" ON "timeline" ("source")')
|
'CREATE INDEX IF NOT EXISTS "timeline_camera" ON "timeline" ("camera")'
|
||||||
migrator.sql('CREATE INDEX IF NOT EXISTS "timeline_source_id" ON "timeline" ("source_id")')
|
)
|
||||||
|
migrator.sql(
|
||||||
|
'CREATE INDEX IF NOT EXISTS "timeline_source" ON "timeline" ("source")'
|
||||||
|
)
|
||||||
|
migrator.sql(
|
||||||
|
'CREATE INDEX IF NOT EXISTS "timeline_source_id" ON "timeline" ("source_id")'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, fake=False, **kwargs):
|
def rollback(migrator, database, fake=False, **kwargs):
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,10 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
from playhouse.sqlite_ext import JSONField
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.migrate import *
|
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from frigate.models import Event
|
from frigate.models import Event
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +1,4 @@
|
|||||||
import sys
|
import csv
|
||||||
from typing_extensions import runtime
|
|
||||||
|
|
||||||
sys.path.append("/lab/frigate")
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
@ -11,21 +7,26 @@ import subprocess as sp
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import csv
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
sys.path.append("/workspace/frigate")
|
||||||
from frigate.object_detection import LocalObjectDetector
|
|
||||||
from frigate.motion import MotionDetector
|
from frigate.config import FrigateConfig # noqa: E402
|
||||||
from frigate.object_processing import CameraState
|
from frigate.motion import MotionDetector # noqa: E402
|
||||||
from frigate.objects import ObjectTracker
|
from frigate.object_detection import LocalObjectDetector # noqa: E402
|
||||||
from frigate.util import (
|
from frigate.object_processing import CameraState # noqa: E402
|
||||||
|
from frigate.track.centroid_tracker import CentroidTracker # noqa: E402
|
||||||
|
from frigate.util import ( # noqa: E402
|
||||||
EventsPerSecond,
|
EventsPerSecond,
|
||||||
SharedMemoryFrameManager,
|
SharedMemoryFrameManager,
|
||||||
draw_box_with_label,
|
draw_box_with_label,
|
||||||
)
|
)
|
||||||
from frigate.video import capture_frames, process_frames, start_or_restart_ffmpeg
|
from frigate.video import ( # noqa: E402
|
||||||
|
capture_frames,
|
||||||
|
process_frames,
|
||||||
|
start_or_restart_ffmpeg,
|
||||||
|
)
|
||||||
|
|
||||||
logging.basicConfig()
|
logging.basicConfig()
|
||||||
logging.root.setLevel(logging.DEBUG)
|
logging.root.setLevel(logging.DEBUG)
|
||||||
@ -107,7 +108,7 @@ class ProcessClip:
|
|||||||
motion_detector = MotionDetector(self.frame_shape, self.camera_config.motion)
|
motion_detector = MotionDetector(self.frame_shape, self.camera_config.motion)
|
||||||
motion_detector.save_images = False
|
motion_detector.save_images = False
|
||||||
|
|
||||||
object_tracker = ObjectTracker(self.camera_config.detect)
|
object_tracker = CentroidTracker(self.camera_config.detect)
|
||||||
process_info = {
|
process_info = {
|
||||||
"process_fps": mp.Value("d", 0.0),
|
"process_fps": mp.Value("d", 0.0),
|
||||||
"detection_fps": mp.Value("d", 0.0),
|
"detection_fps": mp.Value("d", 0.0),
|
||||||
@ -247,7 +248,7 @@ def process(path, label, output, debug_path):
|
|||||||
clips.append(path)
|
clips.append(path)
|
||||||
|
|
||||||
json_config = {
|
json_config = {
|
||||||
"mqtt": {"host": "mqtt"},
|
"mqtt": {"enabled": False},
|
||||||
"detectors": {"coral": {"type": "edgetpu", "device": "usb"}},
|
"detectors": {"coral": {"type": "edgetpu", "device": "usb"}},
|
||||||
"cameras": {
|
"cameras": {
|
||||||
"camera": {
|
"camera": {
|
||||||
@ -281,7 +282,7 @@ def process(path, label, output, debug_path):
|
|||||||
json_config["cameras"]["camera"]["ffmpeg"]["inputs"][0]["path"] = c
|
json_config["cameras"]["camera"]["ffmpeg"]["inputs"][0]["path"] = c
|
||||||
|
|
||||||
frigate_config = FrigateConfig(**json_config)
|
frigate_config = FrigateConfig(**json_config)
|
||||||
runtime_config = frigate_config.runtime_config
|
runtime_config = frigate_config.runtime_config()
|
||||||
runtime_config.cameras["camera"].create_ffmpeg_cmds()
|
runtime_config.cameras["camera"].create_ffmpeg_cmds()
|
||||||
|
|
||||||
process_clip = ProcessClip(c, frame_shape, runtime_config)
|
process_clip = ProcessClip(c, frame_shape, runtime_config)
|
||||||
@ -310,7 +311,6 @@ def process(path, label, output, debug_path):
|
|||||||
|
|
||||||
for result in results:
|
for result in results:
|
||||||
if count == 0:
|
if count == 0:
|
||||||
|
|
||||||
# Writing headers of CSV file
|
# Writing headers of CSV file
|
||||||
header = ["file"] + list(result[1].keys())
|
header = ["file"] + list(result[1].keys())
|
||||||
csv_writer.writerow(header)
|
csv_writer.writerow(header)
|
||||||
|
|||||||
5
pyproject.toml
Normal file
5
pyproject.toml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
ignore = ["E501"]
|
||||||
@ -1,2 +1,3 @@
|
|||||||
pylint == 2.17.*
|
|
||||||
black == 23.3.*
|
black == 23.3.*
|
||||||
|
isort
|
||||||
|
ruff
|
||||||
|
|||||||
@ -8,17 +8,18 @@ onvif_zeep == 0.2.12
|
|||||||
opencv-python-headless == 4.5.5.*
|
opencv-python-headless == 4.5.5.*
|
||||||
paho-mqtt == 1.6.*
|
paho-mqtt == 1.6.*
|
||||||
peewee == 3.16.*
|
peewee == 3.16.*
|
||||||
peewee_migrate == 1.7.*
|
peewee_migrate == 1.10.*
|
||||||
psutil == 5.9.*
|
psutil == 5.9.*
|
||||||
pydantic == 1.10.*
|
pydantic == 1.10.*
|
||||||
git+https://github.com/fbcotter/py3nvml#egg=py3nvml
|
git+https://github.com/fbcotter/py3nvml#egg=py3nvml
|
||||||
PyYAML == 6.0
|
PyYAML == 6.0
|
||||||
pytz == 2023.3
|
pytz == 2023.3
|
||||||
tzlocal == 4.3
|
tzlocal == 5.0.*
|
||||||
types-PyYAML == 6.0.*
|
types-PyYAML == 6.0.*
|
||||||
requests == 2.30.*
|
requests == 2.31.*
|
||||||
types-requests == 2.28.*
|
types-requests == 2.31.*
|
||||||
scipy == 1.10.*
|
scipy == 1.10.*
|
||||||
|
norfair == 2.2.*
|
||||||
setproctitle == 1.3.*
|
setproctitle == 1.3.*
|
||||||
ws4py == 0.5.*
|
ws4py == 0.5.*
|
||||||
# Openvino Library - Custom built with MYRIAD support
|
# Openvino Library - Custom built with MYRIAD support
|
||||||
|
|||||||
940
web/package-lock.json
generated
940
web/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -44,6 +44,7 @@ export default function Sidebar() {
|
|||||||
</Match>
|
</Match>
|
||||||
{birdseye?.enabled ? <Destination href="/birdseye" text="Birdseye" /> : null}
|
{birdseye?.enabled ? <Destination href="/birdseye" text="Birdseye" /> : null}
|
||||||
<Destination href="/events" text="Events" />
|
<Destination href="/events" text="Events" />
|
||||||
|
<Destination href="/exports" text="Exports" />
|
||||||
<Separator />
|
<Separator />
|
||||||
<Destination href="/storage" text="Storage" />
|
<Destination href="/storage" text="Storage" />
|
||||||
<Destination href="/system" text="System" />
|
<Destination href="/system" text="System" />
|
||||||
|
|||||||
@ -31,6 +31,7 @@ export default function App() {
|
|||||||
<AsyncRoute path="/cameras/:camera" getComponent={cameraComponent} />
|
<AsyncRoute path="/cameras/:camera" getComponent={cameraComponent} />
|
||||||
<AsyncRoute path="/birdseye" getComponent={Routes.getBirdseye} />
|
<AsyncRoute path="/birdseye" getComponent={Routes.getBirdseye} />
|
||||||
<AsyncRoute path="/events" getComponent={Routes.getEvents} />
|
<AsyncRoute path="/events" getComponent={Routes.getEvents} />
|
||||||
|
<AsyncRoute path="/exports" getComponent={Routes.getExports} />
|
||||||
<AsyncRoute
|
<AsyncRoute
|
||||||
path="/recording/:camera/:date?/:hour?/:minute?/:second?"
|
path="/recording/:camera/:date?/:hour?/:minute?/:second?"
|
||||||
getComponent={Routes.getRecording}
|
getComponent={Routes.getRecording}
|
||||||
|
|||||||
83
web/src/routes/Export.jsx
Normal file
83
web/src/routes/Export.jsx
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
import { h } from 'preact';
|
||||||
|
import Heading from '../components/Heading';
|
||||||
|
import { useState } from 'preact/hooks';
|
||||||
|
import useSWR from 'swr';
|
||||||
|
import Button from '../components/Button';
|
||||||
|
import axios from 'axios';
|
||||||
|
|
||||||
|
export default function Export() {
|
||||||
|
const { data: config } = useSWR('config');
|
||||||
|
|
||||||
|
const [camera, setCamera] = useState('select');
|
||||||
|
const [playback, setPlayback] = useState('select');
|
||||||
|
const [message, setMessage] = useState({ text: '', error: false });
|
||||||
|
|
||||||
|
const onHandleExport = () => {
|
||||||
|
if (camera == 'select') {
|
||||||
|
setMessage({ text: 'A camera needs to be selected.', error: true });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (playback == 'select') {
|
||||||
|
setMessage({ text: 'A playback factor needs to be selected.', error: true });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const start = new Date(document.getElementById('start').value).getTime() / 1000;
|
||||||
|
const end = new Date(document.getElementById('end').value).getTime() / 1000;
|
||||||
|
|
||||||
|
if (!start || !end) {
|
||||||
|
setMessage({ text: 'A start and end time needs to be selected', error: true });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setMessage({ text: 'Successfully started export. View the file in the /exports folder.', error: false });
|
||||||
|
axios.post(`export/${camera}/start/${start}/end/${end}`, { playback });
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-4 p-2 px-4 w-full">
|
||||||
|
<Heading>Export</Heading>
|
||||||
|
|
||||||
|
{message.text && (
|
||||||
|
<div className={`max-h-20 ${message.error ? 'text-red-500' : 'text-green-500'}`}>{message.text}</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<select
|
||||||
|
className="me-2 cursor-pointer rounded dark:bg-slate-800"
|
||||||
|
value={camera}
|
||||||
|
onChange={(e) => setCamera(e.target.value)}
|
||||||
|
>
|
||||||
|
<option value="select">Select A Camera</option>
|
||||||
|
{Object.keys(config?.cameras || {}).map((item) => (
|
||||||
|
<option key={item} value={item}>
|
||||||
|
{item.replaceAll('_', ' ')}
|
||||||
|
</option>
|
||||||
|
))}
|
||||||
|
</select>
|
||||||
|
<select
|
||||||
|
className="ms-2 cursor-pointer rounded dark:bg-slate-800"
|
||||||
|
value={playback}
|
||||||
|
onChange={(e) => setPlayback(e.target.value)}
|
||||||
|
>
|
||||||
|
<option value="select">Select A Playback Factor</option>
|
||||||
|
<option value="realtime">Realtime</option>
|
||||||
|
<option value="timelapse_25x">Timelapse</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<Heading className="py-2" size="sm">
|
||||||
|
From:
|
||||||
|
</Heading>
|
||||||
|
<input className="dark:bg-slate-800" id="start" type="datetime-local" />
|
||||||
|
<Heading className="py-2" size="sm">
|
||||||
|
To:
|
||||||
|
</Heading>
|
||||||
|
<input className="dark:bg-slate-800" id="end" type="datetime-local" />
|
||||||
|
</div>
|
||||||
|
<Button onClick={() => onHandleExport()}>Submit</Button>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@ -23,6 +23,11 @@ export async function getEvents(_url, _cb, _props) {
|
|||||||
return module.default;
|
return module.default;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function getExports(_url, _cb, _props) {
|
||||||
|
const module = await import('./Export.jsx');
|
||||||
|
return module.default;
|
||||||
|
}
|
||||||
|
|
||||||
export async function getRecording(_url, _cb, _props) {
|
export async function getRecording(_url, _cb, _props) {
|
||||||
const module = await import('./Recording.jsx');
|
const module = await import('./Recording.jsx');
|
||||||
return module.default;
|
return module.default;
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user