mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-05 10:45:21 +03:00
Merge remote-tracking branch 'origin/dev' into prometheus-metrics
This commit is contained in:
commit
03222bff1e
@ -52,7 +52,9 @@
|
|||||||
"mikestead.dotenv",
|
"mikestead.dotenv",
|
||||||
"csstools.postcss",
|
"csstools.postcss",
|
||||||
"blanu.vscode-styled-jsx",
|
"blanu.vscode-styled-jsx",
|
||||||
"bradlc.vscode-tailwindcss"
|
"bradlc.vscode-tailwindcss",
|
||||||
|
"ms-python.isort",
|
||||||
|
"charliermarsh.ruff"
|
||||||
],
|
],
|
||||||
"settings": {
|
"settings": {
|
||||||
"remote.autoForwardPorts": false,
|
"remote.autoForwardPorts": false,
|
||||||
@ -68,6 +70,7 @@
|
|||||||
"python.testing.unittestArgs": ["-v", "-s", "./frigate/test"],
|
"python.testing.unittestArgs": ["-v", "-s", "./frigate/test"],
|
||||||
"files.trimTrailingWhitespace": true,
|
"files.trimTrailingWhitespace": true,
|
||||||
"eslint.workingDirectories": ["./web"],
|
"eslint.workingDirectories": ["./web"],
|
||||||
|
"isort.args": ["--settings-path=./pyproject.toml"],
|
||||||
"[python]": {
|
"[python]": {
|
||||||
"editor.defaultFormatter": "ms-python.black-formatter",
|
"editor.defaultFormatter": "ms-python.black-formatter",
|
||||||
"editor.formatOnSave": true
|
"editor.formatOnSave": true
|
||||||
|
|||||||
@ -2,6 +2,12 @@
|
|||||||
|
|
||||||
set -euxo pipefail
|
set -euxo pipefail
|
||||||
|
|
||||||
|
# Cleanup the old github host key
|
||||||
|
sed -i -e '/AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31\/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi\/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==/d' ~/.ssh/known_hosts
|
||||||
|
# Add new github host key
|
||||||
|
curl -L https://api.github.com/meta | jq -r '.ssh_keys | .[]' | \
|
||||||
|
sed -e 's/^/github.com /' >> ~/.ssh/known_hosts
|
||||||
|
|
||||||
# Frigate normal container runs as root, so it have permission to create
|
# Frigate normal container runs as root, so it have permission to create
|
||||||
# the folders. But the devcontainer runs as the host user, so we need to
|
# the folders. But the devcontainer runs as the host user, so we need to
|
||||||
# create the folders and give the host user permission to write to them.
|
# create the folders and give the host user permission to write to them.
|
||||||
|
|||||||
16
.github/workflows/pull_request.yml
vendored
16
.github/workflows/pull_request.yml
vendored
@ -65,16 +65,22 @@ jobs:
|
|||||||
- name: Check out the repository
|
- name: Check out the repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v4.6.0
|
uses: actions/setup-python@v4.6.1
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Install requirements
|
- name: Install requirements
|
||||||
run: |
|
run: |
|
||||||
pip install pip
|
python3 -m pip install -U pip
|
||||||
pip install -r requirements-dev.txt
|
python3 -m pip install -r requirements-dev.txt
|
||||||
- name: Lint
|
- name: Check black
|
||||||
run: |
|
run: |
|
||||||
python3 -m black frigate --check
|
black --check --diff frigate migrations docker *.py
|
||||||
|
- name: Check isort
|
||||||
|
run: |
|
||||||
|
isort --check --diff frigate migrations docker *.py
|
||||||
|
- name: Check ruff
|
||||||
|
run: |
|
||||||
|
ruff check frigate migrations docker *.py
|
||||||
|
|
||||||
python_tests:
|
python_tests:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
@ -27,7 +27,7 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
|||||||
FROM wget AS go2rtc
|
FROM wget AS go2rtc
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
WORKDIR /rootfs/usr/local/go2rtc/bin
|
WORKDIR /rootfs/usr/local/go2rtc/bin
|
||||||
RUN wget -qO go2rtc "https://github.com/AlexxIT/go2rtc/releases/download/v1.2.0/go2rtc_linux_${TARGETARCH}" \
|
RUN wget -qO go2rtc "https://github.com/AlexxIT/go2rtc/releases/download/v1.5.0/go2rtc_linux_${TARGETARCH}" \
|
||||||
&& chmod +x go2rtc
|
&& chmod +x go2rtc
|
||||||
|
|
||||||
|
|
||||||
@ -227,8 +227,8 @@ CMD ["sleep", "infinity"]
|
|||||||
|
|
||||||
|
|
||||||
# Frigate web build
|
# Frigate web build
|
||||||
# force this to run on amd64 because QEMU is painfully slow
|
# This should be architecture agnostic, so speed up the build on multiarch by not using QEMU.
|
||||||
FROM --platform=linux/amd64 node:16 AS web-build
|
FROM --platform=$BUILDPLATFORM node:16 AS web-build
|
||||||
|
|
||||||
WORKDIR /work
|
WORKDIR /work
|
||||||
COPY web/package.json web/package-lock.json ./
|
COPY web/package.json web/package-lock.json ./
|
||||||
|
|||||||
12
benchmark.py
12
benchmark.py
@ -1,11 +1,11 @@
|
|||||||
import os
|
|
||||||
from statistics import mean
|
|
||||||
import multiprocessing as mp
|
|
||||||
import numpy as np
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import multiprocessing as mp
|
||||||
|
from statistics import mean
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
from frigate.config import DetectorTypeEnum
|
from frigate.config import DetectorTypeEnum
|
||||||
from frigate.object_detection import (
|
from frigate.object_detection import (
|
||||||
LocalObjectDetector,
|
|
||||||
ObjectDetectProcess,
|
ObjectDetectProcess,
|
||||||
RemoteObjectDetector,
|
RemoteObjectDetector,
|
||||||
load_labels,
|
load_labels,
|
||||||
@ -53,7 +53,7 @@ def start(id, num_detections, detection_queue, event):
|
|||||||
frame_times = []
|
frame_times = []
|
||||||
for x in range(0, num_detections):
|
for x in range(0, num_detections):
|
||||||
start_frame = datetime.datetime.now().timestamp()
|
start_frame = datetime.datetime.now().timestamp()
|
||||||
detections = object_detector.detect(my_frame)
|
object_detector.detect(my_frame)
|
||||||
frame_times.append(datetime.datetime.now().timestamp() - start_frame)
|
frame_times.append(datetime.datetime.now().timestamp() - start_frame)
|
||||||
|
|
||||||
duration = datetime.datetime.now().timestamp() - start
|
duration = datetime.datetime.now().timestamp() - start
|
||||||
|
|||||||
@ -12,13 +12,15 @@ apt-get -qq install --no-install-recommends -y \
|
|||||||
unzip locales tzdata libxml2 xz-utils \
|
unzip locales tzdata libxml2 xz-utils \
|
||||||
python3-pip \
|
python3-pip \
|
||||||
curl \
|
curl \
|
||||||
jq
|
jq \
|
||||||
|
nethogs
|
||||||
|
|
||||||
mkdir -p -m 600 /root/.gnupg
|
mkdir -p -m 600 /root/.gnupg
|
||||||
|
|
||||||
# add coral repo
|
# add coral repo
|
||||||
wget --quiet -O /usr/share/keyrings/google-edgetpu.gpg https://packages.cloud.google.com/apt/doc/apt-key.gpg
|
curl -fsSLo - https://packages.cloud.google.com/apt/doc/apt-key.gpg | \
|
||||||
echo "deb [signed-by=/usr/share/keyrings/google-edgetpu.gpg] https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
|
gpg --dearmor -o /etc/apt/trusted.gpg.d/google-cloud-packages-archive-keyring.gpg
|
||||||
|
echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
|
||||||
echo "libedgetpu1-max libedgetpu/accepted-eula select true" | debconf-set-selections
|
echo "libedgetpu1-max libedgetpu/accepted-eula select true" | debconf-set-selections
|
||||||
|
|
||||||
# enable non-free repo
|
# enable non-free repo
|
||||||
|
|||||||
@ -3,11 +3,14 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
sys.path.insert(0, "/opt/frigate")
|
sys.path.insert(0, "/opt/frigate")
|
||||||
from frigate.const import BIRDSEYE_PIPE, BTBN_PATH
|
from frigate.const import BIRDSEYE_PIPE, BTBN_PATH # noqa: E402
|
||||||
from frigate.ffmpeg_presets import parse_preset_hardware_acceleration_encode
|
from frigate.ffmpeg_presets import ( # noqa: E402
|
||||||
|
parse_preset_hardware_acceleration_encode,
|
||||||
|
)
|
||||||
|
|
||||||
sys.path.remove("/opt/frigate")
|
sys.path.remove("/opt/frigate")
|
||||||
|
|
||||||
|
|||||||
@ -2,4 +2,4 @@
|
|||||||
|
|
||||||
This website is built using [Docusaurus 2](https://v2.docusaurus.io/), a modern static website generator.
|
This website is built using [Docusaurus 2](https://v2.docusaurus.io/), a modern static website generator.
|
||||||
|
|
||||||
For installation and contributing instructions, please follow the [Contributing Docs](https://blakeblackshear.github.io/frigate/contributing).
|
For installation and contributing instructions, please follow the [Contributing Docs](https://docs.frigate.video/development/contributing).
|
||||||
|
|||||||
@ -107,3 +107,14 @@ To do this:
|
|||||||
3. Restart Frigate and the custom version will be used if the mapping was done correctly.
|
3. Restart Frigate and the custom version will be used if the mapping was done correctly.
|
||||||
|
|
||||||
NOTE: The folder that is mapped from the host needs to be the folder that contains `/bin`. So if the full structure is `/home/appdata/frigate/custom-ffmpeg/bin/ffmpeg` then `/home/appdata/frigate/custom-ffmpeg` needs to be mapped to `/usr/lib/btbn-ffmpeg`.
|
NOTE: The folder that is mapped from the host needs to be the folder that contains `/bin`. So if the full structure is `/home/appdata/frigate/custom-ffmpeg/bin/ffmpeg` then `/home/appdata/frigate/custom-ffmpeg` needs to be mapped to `/usr/lib/btbn-ffmpeg`.
|
||||||
|
|
||||||
|
## Custom go2rtc version
|
||||||
|
|
||||||
|
Frigate currently includes go2rtc v1.5.0, there may be certain cases where you want to run a different version of go2rtc.
|
||||||
|
|
||||||
|
To do this:
|
||||||
|
|
||||||
|
1. Download the go2rtc build to the /config folder.
|
||||||
|
2. Rename the build to `go2rtc`.
|
||||||
|
3. Give `go2rtc` execute permission.
|
||||||
|
4. Restart Frigate and the custom version will be used, you can verify by checking go2rtc logs.
|
||||||
|
|||||||
@ -141,7 +141,7 @@ go2rtc:
|
|||||||
- rtspx://192.168.1.1:7441/abcdefghijk
|
- rtspx://192.168.1.1:7441/abcdefghijk
|
||||||
```
|
```
|
||||||
|
|
||||||
[See the go2rtc docs for more information](https://github.com/AlexxIT/go2rtc/tree/v1.2.0#source-rtsp)
|
[See the go2rtc docs for more information](https://github.com/AlexxIT/go2rtc/tree/v1.5.0#source-rtsp)
|
||||||
|
|
||||||
In the Unifi 2.0 update Unifi Protect Cameras had a change in audio sample rate which causes issues for ffmpeg. The input rate needs to be set for record and rtmp if used directly with unifi protect.
|
In the Unifi 2.0 update Unifi Protect Cameras had a change in audio sample rate which causes issues for ffmpeg. The input rate needs to be set for record and rtmp if used directly with unifi protect.
|
||||||
|
|
||||||
|
|||||||
@ -198,7 +198,7 @@ To generate model files, create a new folder to save the models, download the sc
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
mkdir trt-models
|
mkdir trt-models
|
||||||
wget https://raw.githubusercontent.com/blakeblackshear/frigate/docker/tensorrt_models.sh
|
wget https://github.com/blakeblackshear/frigate/raw/master/docker/tensorrt_models.sh
|
||||||
chmod +x tensorrt_models.sh
|
chmod +x tensorrt_models.sh
|
||||||
docker run --gpus=all --rm -it -v `pwd`/trt-models:/tensorrt_models -v `pwd`/tensorrt_models.sh:/tensorrt_models.sh nvcr.io/nvidia/tensorrt:22.07-py3 /tensorrt_models.sh
|
docker run --gpus=all --rm -it -v `pwd`/trt-models:/tensorrt_models -v `pwd`/tensorrt_models.sh:/tensorrt_models.sh nvcr.io/nvidia/tensorrt:22.07-py3 /tensorrt_models.sh
|
||||||
```
|
```
|
||||||
|
|||||||
@ -15,7 +15,23 @@ ffmpeg:
|
|||||||
hwaccel_args: preset-rpi-64-h264
|
hwaccel_args: preset-rpi-64-h264
|
||||||
```
|
```
|
||||||
|
|
||||||
### Intel-based CPUs (<10th Generation) via VAAPI
|
:::note
|
||||||
|
|
||||||
|
If running Frigate in docker, you either need to run in priviliged mode or be sure to map the /dev/video1x devices to Frigate
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
docker run -d \
|
||||||
|
--name frigate \
|
||||||
|
...
|
||||||
|
--device /dev/video10 \
|
||||||
|
ghcr.io/blakeblackshear/frigate:stable
|
||||||
|
```
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
### Intel-based CPUs
|
||||||
|
|
||||||
|
#### Via VAAPI
|
||||||
|
|
||||||
VAAPI supports automatic profile selection so it will work automatically with both H.264 and H.265 streams. VAAPI is recommended for all generations of Intel-based CPUs if QSV does not work.
|
VAAPI supports automatic profile selection so it will work automatically with both H.264 and H.265 streams. VAAPI is recommended for all generations of Intel-based CPUs if QSV does not work.
|
||||||
|
|
||||||
@ -26,24 +42,89 @@ ffmpeg:
|
|||||||
|
|
||||||
**NOTICE**: With some of the processors, like the J4125, the default driver `iHD` doesn't seem to work correctly for hardware acceleration. You may need to change the driver to `i965` by adding the following environment variable `LIBVA_DRIVER_NAME=i965` to your docker-compose file or [in the `frigate.yaml` for HA OS users](advanced.md#environment_vars).
|
**NOTICE**: With some of the processors, like the J4125, the default driver `iHD` doesn't seem to work correctly for hardware acceleration. You may need to change the driver to `i965` by adding the following environment variable `LIBVA_DRIVER_NAME=i965` to your docker-compose file or [in the `frigate.yaml` for HA OS users](advanced.md#environment_vars).
|
||||||
|
|
||||||
### Intel-based CPUs (>=10th Generation) via Quicksync
|
#### Via Quicksync (>=10th Generation only)
|
||||||
|
|
||||||
QSV must be set specifically based on the video encoding of the stream.
|
QSV must be set specifically based on the video encoding of the stream.
|
||||||
|
|
||||||
#### H.264 streams
|
##### H.264 streams
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
hwaccel_args: preset-intel-qsv-h264
|
hwaccel_args: preset-intel-qsv-h264
|
||||||
```
|
```
|
||||||
|
|
||||||
#### H.265 streams
|
##### H.265 streams
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
hwaccel_args: preset-intel-qsv-h265
|
hwaccel_args: preset-intel-qsv-h265
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Configuring Intel GPU Stats in Docker
|
||||||
|
|
||||||
|
Additional configuration is needed for the Docker container to be able to access the `intel_gpu_top` command for GPU stats. Three possible changes can be made:
|
||||||
|
|
||||||
|
1. Run the container as privileged.
|
||||||
|
2. Adding the `CAP_PERFMON` capability.
|
||||||
|
3. Setting the `perf_event_paranoid` low enough to allow access to the performance event system.
|
||||||
|
|
||||||
|
##### Run as privileged
|
||||||
|
|
||||||
|
This method works, but it gives more permissions to the container than are actually needed.
|
||||||
|
|
||||||
|
###### Docker Compose - Privileged
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
frigate:
|
||||||
|
...
|
||||||
|
image: ghcr.io/blakeblackshear/frigate:stable
|
||||||
|
privileged: true
|
||||||
|
```
|
||||||
|
|
||||||
|
###### Docker Run CLI - Privileged
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run -d \
|
||||||
|
--name frigate \
|
||||||
|
...
|
||||||
|
--privileged \
|
||||||
|
ghcr.io/blakeblackshear/frigate:stable
|
||||||
|
```
|
||||||
|
|
||||||
|
##### CAP_PERFMON
|
||||||
|
|
||||||
|
Only recent versions of Docker support the `CAP_PERFMON` capability. You can test to see if yours supports it by running: `docker run --cap-add=CAP_PERFMON hello-world`
|
||||||
|
|
||||||
|
###### Docker Compose - CAP_PERFMON
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
frigate:
|
||||||
|
...
|
||||||
|
image: ghcr.io/blakeblackshear/frigate:stable
|
||||||
|
cap_add:
|
||||||
|
- CAP_PERFMON
|
||||||
|
```
|
||||||
|
|
||||||
|
###### Docker Run CLI - CAP_PERFMON
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run -d \
|
||||||
|
--name frigate \
|
||||||
|
...
|
||||||
|
--cap-add=CAP_PERFMON \
|
||||||
|
ghcr.io/blakeblackshear/frigate:stable
|
||||||
|
```
|
||||||
|
|
||||||
|
##### perf_event_paranoid
|
||||||
|
|
||||||
|
_Note: This setting must be changed for the entire system._
|
||||||
|
|
||||||
|
For more information on the various values across different distributions, see https://askubuntu.com/questions/1400874/what-does-perf-paranoia-level-four-do.
|
||||||
|
|
||||||
|
Depending on your OS and kernel configuration, you may need to change the `/proc/sys/kernel/perf_event_paranoid` kernel tunable. You can test the change by running `sudo sh -c 'echo 2 >/proc/sys/kernel/perf_event_paranoid'` which will persist until a reboot. Make it permanent by running `sudo sh -c 'echo kernel.perf_event_paranoid=1 >> /etc/sysctl.d/local.conf'`
|
||||||
|
|
||||||
### AMD/ATI GPUs (Radeon HD 2000 and newer GPUs) via libva-mesa-driver
|
### AMD/ATI GPUs (Radeon HD 2000 and newer GPUs) via libva-mesa-driver
|
||||||
|
|
||||||
VAAPI supports automatic profile selection so it will work automatically with both H.264 and H.265 streams.
|
VAAPI supports automatic profile selection so it will work automatically with both H.264 and H.265 streams.
|
||||||
@ -59,15 +140,15 @@ ffmpeg:
|
|||||||
|
|
||||||
While older GPUs may work, it is recommended to use modern, supported GPUs. NVIDIA provides a [matrix of supported GPUs and features](https://developer.nvidia.com/video-encode-and-decode-gpu-support-matrix-new). If your card is on the list and supports CUVID/NVDEC, it will most likely work with Frigate for decoding. However, you must also use [a driver version that will work with FFmpeg](https://github.com/FFmpeg/nv-codec-headers/blob/master/README). Older driver versions may be missing symbols and fail to work, and older cards are not supported by newer driver versions. The only way around this is to [provide your own FFmpeg](/configuration/advanced#custom-ffmpeg-build) that will work with your driver version, but this is unsupported and may not work well if at all.
|
While older GPUs may work, it is recommended to use modern, supported GPUs. NVIDIA provides a [matrix of supported GPUs and features](https://developer.nvidia.com/video-encode-and-decode-gpu-support-matrix-new). If your card is on the list and supports CUVID/NVDEC, it will most likely work with Frigate for decoding. However, you must also use [a driver version that will work with FFmpeg](https://github.com/FFmpeg/nv-codec-headers/blob/master/README). Older driver versions may be missing symbols and fail to work, and older cards are not supported by newer driver versions. The only way around this is to [provide your own FFmpeg](/configuration/advanced#custom-ffmpeg-build) that will work with your driver version, but this is unsupported and may not work well if at all.
|
||||||
|
|
||||||
A more complete list of cards and ther compatible drivers is available in the [driver release readme](https://download.nvidia.com/XFree86/Linux-x86_64/525.85.05/README/supportedchips.html).
|
A more complete list of cards and their compatible drivers is available in the [driver release readme](https://download.nvidia.com/XFree86/Linux-x86_64/525.85.05/README/supportedchips.html).
|
||||||
|
|
||||||
If your distribution does not offer NVIDIA driver packages, you can [download them here](https://www.nvidia.com/en-us/drivers/unix/).
|
If your distribution does not offer NVIDIA driver packages, you can [download them here](https://www.nvidia.com/en-us/drivers/unix/).
|
||||||
|
|
||||||
#### Docker Configuration
|
#### Configuring Nvidia GPUs in Docker
|
||||||
|
|
||||||
Additional configuration is needed for the Docker container to be able to access the NVIDIA GPU. The supported method for this is to install the [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html#docker) and specify the GPU to Docker. How you do this depends on how Docker is being run:
|
Additional configuration is needed for the Docker container to be able to access the NVIDIA GPU. The supported method for this is to install the [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html#docker) and specify the GPU to Docker. How you do this depends on how Docker is being run:
|
||||||
|
|
||||||
##### Docker Compose
|
##### Docker Compose - Nvidia GPU
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
services:
|
services:
|
||||||
@ -84,7 +165,7 @@ services:
|
|||||||
capabilities: [gpu]
|
capabilities: [gpu]
|
||||||
```
|
```
|
||||||
|
|
||||||
##### Docker Run CLI
|
##### Docker Run CLI - Nvidia GPU
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run -d \
|
docker run -d \
|
||||||
|
|||||||
@ -377,7 +377,7 @@ rtmp:
|
|||||||
enabled: False
|
enabled: False
|
||||||
|
|
||||||
# Optional: Restream configuration
|
# Optional: Restream configuration
|
||||||
# Uses https://github.com/AlexxIT/go2rtc (v1.2.0)
|
# Uses https://github.com/AlexxIT/go2rtc (v1.5.0)
|
||||||
go2rtc:
|
go2rtc:
|
||||||
|
|
||||||
# Optional: jsmpeg stream configuration for WebUI
|
# Optional: jsmpeg stream configuration for WebUI
|
||||||
|
|||||||
@ -78,6 +78,8 @@ WebRTC works by creating a TCP or UDP connection on port `8555`. However, it req
|
|||||||
- 192.168.1.10:8555
|
- 192.168.1.10:8555
|
||||||
- stun:8555
|
- stun:8555
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- For access through Tailscale, the Frigate system's Tailscale IP must be added as a WebRTC candidate. Tailscale IPs all start with `100.`, and are reserved within the `100.0.0.0/8` CIDR block.
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
|
|
||||||
@ -97,8 +99,20 @@ However, it is recommended if issues occur to define the candidates manually. Yo
|
|||||||
If you are having difficulties getting WebRTC to work and you are running Frigate with docker, you may want to try changing the container network mode:
|
If you are having difficulties getting WebRTC to work and you are running Frigate with docker, you may want to try changing the container network mode:
|
||||||
|
|
||||||
- `network: host`, in this mode you don't need to forward any ports. The services inside of the Frigate container will have full access to the network interfaces of your host machine as if they were running natively and not in a container. Any port conflicts will need to be resolved. This network mode is recommended by go2rtc, but we recommend you only use it if necessary.
|
- `network: host`, in this mode you don't need to forward any ports. The services inside of the Frigate container will have full access to the network interfaces of your host machine as if they were running natively and not in a container. Any port conflicts will need to be resolved. This network mode is recommended by go2rtc, but we recommend you only use it if necessary.
|
||||||
- `network: bridge` creates a virtual network interface for the container, and the container will have full access to it. You also don't need to forward any ports, however, the IP for accessing Frigate locally will differ from the IP of the host machine. Your router will see Frigate as if it was a new device connected in the network.
|
- `network: bridge` is the default network driver, a bridge network is a Link Layer device which forwards traffic between network segments. You need to forward any ports that you want to be accessible from the host IP.
|
||||||
|
|
||||||
|
If not running in host mode, port 8555 will need to be mapped for the container:
|
||||||
|
|
||||||
|
docker-compose.yml
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
frigate:
|
||||||
|
...
|
||||||
|
ports:
|
||||||
|
- "8555:8555/tcp" # WebRTC over tcp
|
||||||
|
- "8555:8555/udp" # WebRTC over udp
|
||||||
|
```
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
See [go2rtc WebRTC docs](https://github.com/AlexxIT/go2rtc/tree/v1.2.0#module-webrtc) for more information about this.
|
See [go2rtc WebRTC docs](https://github.com/AlexxIT/go2rtc/tree/v1.5.0#module-webrtc) for more information about this.
|
||||||
|
|||||||
@ -7,7 +7,7 @@ title: Restream
|
|||||||
|
|
||||||
Frigate can restream your video feed as an RTSP feed for other applications such as Home Assistant to utilize it at `rtsp://<frigate_host>:8554/<camera_name>`. Port 8554 must be open. [This allows you to use a video feed for detection in Frigate and Home Assistant live view at the same time without having to make two separate connections to the camera](#reduce-connections-to-camera). The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate.
|
Frigate can restream your video feed as an RTSP feed for other applications such as Home Assistant to utilize it at `rtsp://<frigate_host>:8554/<camera_name>`. Port 8554 must be open. [This allows you to use a video feed for detection in Frigate and Home Assistant live view at the same time without having to make two separate connections to the camera](#reduce-connections-to-camera). The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate.
|
||||||
|
|
||||||
Frigate uses [go2rtc](https://github.com/AlexxIT/go2rtc/tree/v1.2.0) to provide its restream and MSE/WebRTC capabilities. The go2rtc config is hosted at the `go2rtc` in the config, see [go2rtc docs](https://github.com/AlexxIT/go2rtc/tree/v1.2.0#configuration) for more advanced configurations and features.
|
Frigate uses [go2rtc](https://github.com/AlexxIT/go2rtc/tree/v1.5.0) to provide its restream and MSE/WebRTC capabilities. The go2rtc config is hosted at the `go2rtc` in the config, see [go2rtc docs](https://github.com/AlexxIT/go2rtc/tree/v1.5.0#configuration) for more advanced configurations and features.
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
|
|
||||||
@ -86,7 +86,7 @@ Two connections are made to the camera. One for the sub stream, one for the rest
|
|||||||
```yaml
|
```yaml
|
||||||
go2rtc:
|
go2rtc:
|
||||||
streams:
|
streams:
|
||||||
rtsp_cam:
|
rtsp_cam:
|
||||||
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio. This is only supported for rtsp streams, http must use ffmpeg
|
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio. This is only supported for rtsp streams, http must use ffmpeg
|
||||||
- "ffmpeg:rtsp_cam#audio=opus" # <- copy of the stream which transcodes audio to opus
|
- "ffmpeg:rtsp_cam#audio=opus" # <- copy of the stream which transcodes audio to opus
|
||||||
rtsp_cam_sub:
|
rtsp_cam_sub:
|
||||||
@ -130,7 +130,7 @@ cameras:
|
|||||||
|
|
||||||
## Advanced Restream Configurations
|
## Advanced Restream Configurations
|
||||||
|
|
||||||
The [exec](https://github.com/AlexxIT/go2rtc/tree/v1.2.0#source-exec) source in go2rtc can be used for custom ffmpeg commands. An example is below:
|
The [exec](https://github.com/AlexxIT/go2rtc/tree/v1.5.0#source-exec) source in go2rtc can be used for custom ffmpeg commands. An example is below:
|
||||||
|
|
||||||
NOTE: The output will need to be passed with two curly braces `{{output}}`
|
NOTE: The output will need to be passed with two curly braces `{{output}}`
|
||||||
|
|
||||||
|
|||||||
@ -4,3 +4,5 @@ title: Snapshots
|
|||||||
---
|
---
|
||||||
|
|
||||||
Frigate can save a snapshot image to `/media/frigate/clips` for each event named as `<camera>-<id>.jpg`.
|
Frigate can save a snapshot image to `/media/frigate/clips` for each event named as `<camera>-<id>.jpg`.
|
||||||
|
|
||||||
|
Snapshots sent via MQTT are configured in the [config file](https://docs.frigate.video/configuration/) under `cameras -> your_camera -> mqtt`
|
||||||
|
|||||||
@ -36,7 +36,13 @@ Fork [blakeblackshear/frigate-hass-integration](https://github.com/blakeblackshe
|
|||||||
- [Frigate source code](#frigate-core-web-and-docs)
|
- [Frigate source code](#frigate-core-web-and-docs)
|
||||||
- GNU make
|
- GNU make
|
||||||
- Docker
|
- Docker
|
||||||
- Extra Coral device (optional, but very helpful to simulate real world performance)
|
- An extra detector (Coral, OpenVINO, etc.) is optional but recommended to simulate real world performance.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
|
||||||
|
A Coral device can only be used by a single process at a time, so an extra Coral device is recommended if using a coral for development purposes.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
### Setup
|
### Setup
|
||||||
|
|
||||||
@ -79,7 +85,7 @@ Create and place these files in a `debug` folder in the root of the repo. This i
|
|||||||
VSCode will start the docker compose file for you and open a terminal window connected to `frigate-dev`.
|
VSCode will start the docker compose file for you and open a terminal window connected to `frigate-dev`.
|
||||||
|
|
||||||
- Run `python3 -m frigate` to start the backend.
|
- Run `python3 -m frigate` to start the backend.
|
||||||
- In a separate terminal window inside VS Code, change into the `web` directory and run `npm install && npm start` to start the frontend.
|
- In a separate terminal window inside VS Code, change into the `web` directory and run `npm install && npm run dev` to start the frontend.
|
||||||
|
|
||||||
#### 5. Teardown
|
#### 5. Teardown
|
||||||
|
|
||||||
|
|||||||
@ -211,3 +211,109 @@ It is recommended to run Frigate in LXC for maximum performance. See [this discu
|
|||||||
## ESX
|
## ESX
|
||||||
|
|
||||||
For details on running Frigate under ESX, see details [here](https://github.com/blakeblackshear/frigate/issues/305).
|
For details on running Frigate under ESX, see details [here](https://github.com/blakeblackshear/frigate/issues/305).
|
||||||
|
|
||||||
|
## Synology NAS on DSM 7
|
||||||
|
|
||||||
|
These settings were tested on DSM 7.1.1-42962 Update 4
|
||||||
|
|
||||||
|
|
||||||
|
**General:**
|
||||||
|
|
||||||
|
The `Execute container using high privilege` option needs to be enabled in order to give the frigate container the elevated privileges it may need.
|
||||||
|
|
||||||
|
The `Enable auto-restart` option can be enabled if you want the container to automatically restart whenever it improperly shuts down due to an error.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
**Advanced Settings:**
|
||||||
|
|
||||||
|
If you want to use the password template feature, you should add the "FRIGATE_RTSP_PASSWORD" environment variable and set it to your preferred password under advanced settings. The rest of the environment variables should be left as default for now.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
**Port Settings:**
|
||||||
|
|
||||||
|
The network mode should be set to `bridge`. You need to map the default frigate container ports to your local Synology NAS ports that you want to use to access Frigate.
|
||||||
|
|
||||||
|
There may be other services running on your NAS that are using the same ports that frigate uses. In that instance you can set the ports to auto or a specific port.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
**Volume Settings:**
|
||||||
|
|
||||||
|
You need to configure 2 paths:
|
||||||
|
|
||||||
|
- The location of your config file in yaml format, this needs to be file and you need to go to the location of where your config.yml is located, this will be different depending on your NAS folder structure e.g. `/docker/frigate/config/config.yml` will mount to `/config/config.yml` within the container.
|
||||||
|
- The location on your NAS where the recordings will be saved this needs to be a folder e.g. `/docker/volumes/frigate-0-media`
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
## QNAP NAS
|
||||||
|
|
||||||
|
These instructions were tested on a QNAP with an Intel J3455 CPU and 16G RAM, running QTS 4.5.4.2117.
|
||||||
|
|
||||||
|
QNAP has a graphic tool named Container Station to intall and manage docker containers. However, there are two limitations with Container Station that make it unsuitable to install Frigate:
|
||||||
|
|
||||||
|
1. Container Station does not incorporate GitHub Container Registry (ghcr), which hosts Frigate docker image version 0.12.0 and above.
|
||||||
|
2. Container Station uses default 64 Mb shared memory size (shm-size), and does not have a mechanism to adjust it. Frigate requires a larger shm-size to be able to work properly with more than two high resolution cameras.
|
||||||
|
|
||||||
|
Because of above limitations, the installation has to be done from command line. Here are the steps:
|
||||||
|
|
||||||
|
**Preparation**
|
||||||
|
1. Install Container Station from QNAP App Center if it is not installed.
|
||||||
|
2. Enable ssh on your QNAP (please do an Internet search on how to do this).
|
||||||
|
3. Prepare Frigate config file, name it `config.yml`.
|
||||||
|
4. Calculate shared memory size according to [documentation](https://docs.frigate.video/frigate/installation).
|
||||||
|
5. Find your time zone value from https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||||
|
6. ssh to QNAP.
|
||||||
|
|
||||||
|
**Installation**
|
||||||
|
|
||||||
|
Run the following commands to install Frigate (using `stable` version as example):
|
||||||
|
```bash
|
||||||
|
# Download Frigate image
|
||||||
|
docker pull ghcr.io/blakeblackshear/frigate:stable
|
||||||
|
# Create directory to host Frigate config file on QNAP file system.
|
||||||
|
# E.g., you can choose to create it under /share/Container.
|
||||||
|
mkdir -p /share/Container/frigate/config
|
||||||
|
# Copy the config file prepared in step 2 into the newly created config directory.
|
||||||
|
cp path/to/your/config/file /share/Container/frigate/config
|
||||||
|
# Create directory to host Frigate media files on QNAP file system.
|
||||||
|
# (if you have a surveilliance disk, create media directory on the surveilliance disk.
|
||||||
|
# Example command assumes share_vol2 is the surveilliance drive
|
||||||
|
mkdir -p /share/share_vol2/frigate/media
|
||||||
|
# Create Frigate docker container. Replace shm-size value with the value from preparation step 3.
|
||||||
|
# Also replace the time zone value for 'TZ' in the sample command.
|
||||||
|
# Example command will create a docker container that uses at most 2 CPUs and 4G RAM.
|
||||||
|
# You may need to add "--env=LIBVA_DRIVER_NAME=i965 \" to the following docker run command if you
|
||||||
|
# have certain CPU (e.g., J4125). See https://docs.frigate.video/configuration/hardware_acceleration.
|
||||||
|
docker run \
|
||||||
|
--name=frigate \
|
||||||
|
--shm-size=256m \
|
||||||
|
--restart=unless-stopped \
|
||||||
|
--env=TZ=America/New_York \
|
||||||
|
--volume=/share/Container/frigate/config:/config:rw \
|
||||||
|
--volume=/share/share_vol2/frigate/media:/media/frigate:rw \
|
||||||
|
--network=bridge \
|
||||||
|
--privileged \
|
||||||
|
--workdir=/opt/frigate \
|
||||||
|
-p 1935:1935 \
|
||||||
|
-p 5000:5000 \
|
||||||
|
-p 8554:8554 \
|
||||||
|
-p 8555:8555 \
|
||||||
|
-p 8555:8555/udp \
|
||||||
|
--label='com.qnap.qcs.network.mode=nat' \
|
||||||
|
--label='com.qnap.qcs.gpu=False' \
|
||||||
|
--memory="4g" \
|
||||||
|
--cpus="2" \
|
||||||
|
--detach=true \
|
||||||
|
-t \
|
||||||
|
ghcr.io/blakeblackshear/frigate:stable
|
||||||
|
```
|
||||||
|
|
||||||
|
Log into QNAP, open Container Station. Frigate docker container should be listed under 'Overview' and running. Visit Frigate Web UI by clicking Frigate docker, and then clicking the URL shown at the top of the detail page.
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -10,7 +10,7 @@ Use of the bundled go2rtc is optional. You can still configure FFmpeg to connect
|
|||||||
|
|
||||||
# Setup a go2rtc stream
|
# Setup a go2rtc stream
|
||||||
|
|
||||||
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. If you set the stream name under go2rtc to match the name of your camera, it will automatically be mapped and you will get additional live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.2.0#module-streams), not just rtsp.
|
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. If you set the stream name under go2rtc to match the name of your camera, it will automatically be mapped and you will get additional live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.5.0#module-streams), not just rtsp.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
go2rtc:
|
go2rtc:
|
||||||
@ -23,7 +23,7 @@ The easiest live view to get working is MSE. After adding this to the config, re
|
|||||||
|
|
||||||
### What if my video doesn't play?
|
### What if my video doesn't play?
|
||||||
|
|
||||||
If you are unable to see your video feed, first check the go2rtc logs in the Frigate UI under Logs in the sidebar. If go2rtc is having difficulty connecting to your camera, you should see some error messages in the log. If you do not see any errors, then the video codec of the stream may not be supported in your browser. If your camera stream is set to H265, try switching to H264. You can see more information about [video codec compatibility](https://github.com/AlexxIT/go2rtc/tree/v1.2.0#codecs-madness) in the go2rtc documentation. If you are not able to switch your camera settings from H265 to H264 or your stream is a different format such as MJPEG, you can use go2rtc to re-encode the video using the [FFmpeg parameters](https://github.com/AlexxIT/go2rtc/tree/v1.2.0#source-ffmpeg). It supports rotating and resizing video feeds and hardware acceleration. Keep in mind that transcoding video from one format to another is a resource intensive task and you may be better off using the built-in jsmpeg view. Here is an example of a config that will re-encode the stream to H264 without hardware acceleration:
|
If you are unable to see your video feed, first check the go2rtc logs in the Frigate UI under Logs in the sidebar. If go2rtc is having difficulty connecting to your camera, you should see some error messages in the log. If you do not see any errors, then the video codec of the stream may not be supported in your browser. If your camera stream is set to H265, try switching to H264. You can see more information about [video codec compatibility](https://github.com/AlexxIT/go2rtc/tree/v1.5.0#codecs-madness) in the go2rtc documentation. If you are not able to switch your camera settings from H265 to H264 or your stream is a different format such as MJPEG, you can use go2rtc to re-encode the video using the [FFmpeg parameters](https://github.com/AlexxIT/go2rtc/tree/v1.5.0#source-ffmpeg). It supports rotating and resizing video feeds and hardware acceleration. Keep in mind that transcoding video from one format to another is a resource intensive task and you may be better off using the built-in jsmpeg view. Here is an example of a config that will re-encode the stream to H264 without hardware acceleration:
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
go2rtc:
|
go2rtc:
|
||||||
@ -71,6 +71,12 @@ go2rtc:
|
|||||||
- "ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2#video=copy#audio=copy#audio=aac"
|
- "ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2#video=copy#audio=copy#audio=aac"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
:::caution
|
||||||
|
|
||||||
|
To access the go2rtc stream externally when utilizing the Frigate Add-On (for instance through VLC), you must first enable the RTSP Restream port. You can do this by visiting the Frigate Add-On configuration page within Home Assistant and revealing the hidden options under the "Show disabled ports" section.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
## Next steps
|
## Next steps
|
||||||
|
|
||||||
1. If the stream you added to go2rtc is also used by Frigate for the `record` or `detect` role, you can migrate your config to pull from the RTSP restream to reduce the number of connections to your camera as shown [here](/configuration/restream#reduce-connections-to-camera).
|
1. If the stream you added to go2rtc is also used by Frigate for the `record` or `detect` role, you can migrate your config to pull from the RTSP restream to reduce the number of connections to your camera as shown [here](/configuration/restream#reduce-connections-to-camera).
|
||||||
|
|||||||
@ -14,7 +14,7 @@ mqtt:
|
|||||||
enabled: False
|
enabled: False
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
camera_1: # <------ Name the camera
|
name_of_your_camera: # <------ Name the camera
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://10.0.10.10:554/rtsp # <----- The stream you want to use for detection
|
- path: rtsp://10.0.10.10:554/rtsp # <----- The stream you want to use for detection
|
||||||
@ -44,7 +44,7 @@ Here is an example configuration with hardware acceleration configured for Intel
|
|||||||
mqtt: ...
|
mqtt: ...
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
camera_1:
|
name_of_your_camera:
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
inputs: ...
|
inputs: ...
|
||||||
hwaccel_args: preset-vaapi
|
hwaccel_args: preset-vaapi
|
||||||
@ -64,7 +64,7 @@ detectors: # <---- add detectors
|
|||||||
device: usb
|
device: usb
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
camera_1:
|
name_of_your_camera:
|
||||||
ffmpeg: ...
|
ffmpeg: ...
|
||||||
detect:
|
detect:
|
||||||
enabled: True # <---- turn on detection
|
enabled: True # <---- turn on detection
|
||||||
@ -99,7 +99,7 @@ detectors:
|
|||||||
device: usb
|
device: usb
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
camera_1:
|
name_of_your_camera:
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://10.0.10.10:554/rtsp
|
- path: rtsp://10.0.10.10:554/rtsp
|
||||||
@ -127,7 +127,7 @@ mqtt: ...
|
|||||||
detectors: ...
|
detectors: ...
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
camera_1:
|
name_of_your_camera:
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://10.0.10.10:554/rtsp
|
- path: rtsp://10.0.10.10:554/rtsp
|
||||||
@ -156,7 +156,7 @@ mqtt: ...
|
|||||||
detectors: ...
|
detectors: ...
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
camera_1: ...
|
name_of_your_camera: ...
|
||||||
detect: ...
|
detect: ...
|
||||||
record: ...
|
record: ...
|
||||||
snapshots: # <----- Enable snapshots
|
snapshots: # <----- Enable snapshots
|
||||||
|
|||||||
@ -3,7 +3,7 @@ id: ha_notifications
|
|||||||
title: Home Assistant notifications
|
title: Home Assistant notifications
|
||||||
---
|
---
|
||||||
|
|
||||||
The best way to get started with notifications for Frigate is to use the [Blueprint](https://community.home-assistant.io/t/frigate-mobile-app-notifications/311091). You can use the yaml generated from the Blueprint as a starting point and customize from there.
|
The best way to get started with notifications for Frigate is to use the [Blueprint](https://community.home-assistant.io/t/frigate-mobile-app-notifications-2-0/559732). You can use the yaml generated from the Blueprint as a starting point and customize from there.
|
||||||
|
|
||||||
It is generally recommended to trigger notifications based on the `frigate/events` mqtt topic. This provides the event_id needed to fetch [thumbnails/snapshots/clips](../integrations/home-assistant.md#notification-api) and other useful information to customize when and where you want to receive alerts. The data is published in the form of a change feed, which means you can reference the "previous state" of the object in the `before` section and the "current state" of the object in the `after` section. You can see an example [here](../integrations/mqtt.md#frigateevents).
|
It is generally recommended to trigger notifications based on the `frigate/events` mqtt topic. This provides the event_id needed to fetch [thumbnails/snapshots/clips](../integrations/home-assistant.md#notification-api) and other useful information to customize when and where you want to receive alerts. The data is published in the form of a change feed, which means you can reference the "previous state" of the object in the `before` section and the "current state" of the object in the `after` section. You can see an example [here](../integrations/mqtt.md#frigateevents).
|
||||||
|
|
||||||
@ -45,7 +45,7 @@ automation:
|
|||||||
https://your.public.hass.address.com/api/frigate/notifications/{{trigger.payload_json["after"]["id"]}}/thumbnail.jpg
|
https://your.public.hass.address.com/api/frigate/notifications/{{trigger.payload_json["after"]["id"]}}/thumbnail.jpg
|
||||||
tag: '{{trigger.payload_json["after"]["id"]}}'
|
tag: '{{trigger.payload_json["after"]["id"]}}'
|
||||||
when: '{{trigger.payload_json["after"]["start_time"]|int}}'
|
when: '{{trigger.payload_json["after"]["start_time"]|int}}'
|
||||||
entity_id: camera.{{trigger.payload_json["after"]["camera"]}}
|
entity_id: camera.{{trigger.payload_json["after"]["camera"] | replace("-","_") | lower}}
|
||||||
mode: single
|
mode: single
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@ -84,3 +84,61 @@ There are many ways to authenticate a website but a straightforward approach is
|
|||||||
</Location>
|
</Location>
|
||||||
</VirtualHost>
|
</VirtualHost>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Nginx Reverse Proxy
|
||||||
|
|
||||||
|
This method shows a working example for subdomain type reverse proxy with SSL enabled.
|
||||||
|
|
||||||
|
### Setup server and port to reverse proxy
|
||||||
|
|
||||||
|
This is set in `$server` and `$port` this should match your ports you have exposed to your docker container. Optionally you listen on port `443` and enable `SSL`
|
||||||
|
|
||||||
|
```
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
# frigate.domain.com
|
||||||
|
# ------------------------------------------------------------
|
||||||
|
|
||||||
|
server {
|
||||||
|
set $forward_scheme http;
|
||||||
|
set $server "192.168.100.2"; # FRIGATE SERVER LOCATION
|
||||||
|
set $port 5000;
|
||||||
|
|
||||||
|
listen 80;
|
||||||
|
listen 443 ssl http2;
|
||||||
|
|
||||||
|
server_name frigate.domain.com;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Setup SSL (optional)
|
||||||
|
|
||||||
|
This section points to your SSL files, the example below shows locations to a default Lets Encrypt SSL certificate.
|
||||||
|
|
||||||
|
```
|
||||||
|
# Let's Encrypt SSL
|
||||||
|
include conf.d/include/letsencrypt-acme-challenge.conf;
|
||||||
|
include conf.d/include/ssl-ciphers.conf;
|
||||||
|
ssl_certificate /etc/letsencrypt/live/npm-1/fullchain.pem;
|
||||||
|
ssl_certificate_key /etc/letsencrypt/live/npm-1/privkey.pem;
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Setup reverse proxy settings
|
||||||
|
|
||||||
|
Thhe settings below enabled connection upgrade, sets up logging (optional) and proxies everything from the `/` context to the docker host and port specified earlier in the configuration
|
||||||
|
|
||||||
|
```
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $http_connection;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
|
access_log /data/logs/proxy-host-40_access.log proxy;
|
||||||
|
error_log /data/logs/proxy-host-40_error.log warn;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $http_connection;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|||||||
@ -3,7 +3,7 @@ id: stationary_objects
|
|||||||
title: Avoiding stationary objects
|
title: Avoiding stationary objects
|
||||||
---
|
---
|
||||||
|
|
||||||
Many people use Frigate to detect cars entering their driveway, and they often run into an issue with repeated events of a parked car being repeatedly detected over the course of multiple days (for example if the car is lost at night and detected again the following morning.
|
Many people use Frigate to detect cars entering their driveway, and they often run into an issue with repeated notifications or events of a parked car being repeatedly detected over the course of multiple days (for example if the car is lost at night and detected again the following morning).
|
||||||
|
|
||||||
You can use zones to restrict events and notifications to objects that have entered specific areas.
|
You can use zones to restrict events and notifications to objects that have entered specific areas.
|
||||||
|
|
||||||
@ -15,6 +15,12 @@ Frigate is designed to track objects as they move and over-masking can prevent i
|
|||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
:::info
|
||||||
|
|
||||||
|
Once a vehicle crosses the entrance into the parking area, that event will stay `In Progress` until it is no longer seen in the frame. Frigate is designed to have an event last as long as an object is visible in the frame, an event being `In Progress` does not mean the event is being constantly recorded. You can define the recording behavior by adjusting the [recording retention settings](../configuration/record.md).
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
To only be notified of cars that enter your driveway from the street, you could create multiple zones that cover your driveway. For cars, you would only notify if `entered_zones` from the events MQTT topic has more than 1 zone.
|
To only be notified of cars that enter your driveway from the street, you could create multiple zones that cover your driveway. For cars, you would only notify if `entered_zones` from the events MQTT topic has more than 1 zone.
|
||||||
|
|
||||||
See [this example](../configuration/zones.md#restricting-zones-to-specific-objects) from the Zones documentation to see how to restrict zones to certain object types.
|
See [this example](../configuration/zones.md#restricting-zones-to-specific-objects) from the Zones documentation to see how to restrict zones to certain object types.
|
||||||
|
|||||||
@ -295,3 +295,41 @@ Get ffprobe output for camera feed paths.
|
|||||||
### `GET /api/<camera_name>/ptz/info`
|
### `GET /api/<camera_name>/ptz/info`
|
||||||
|
|
||||||
Get PTZ info for the camera.
|
Get PTZ info for the camera.
|
||||||
|
|
||||||
|
### `POST /api/events/<camera_name>/<label>/create`
|
||||||
|
|
||||||
|
Create a manual API with a given `label` (ex: doorbell press) to capture a specific event besides an object being detected.
|
||||||
|
|
||||||
|
**Optional Body:**
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"subLabel": "some_string", // add sub label to event
|
||||||
|
"duration": 30, // predetermined length of event (default: 30 seconds) or can be to null for indeterminate length event
|
||||||
|
"include_recording": true, // whether the event should save recordings along with the snapshot that is taken
|
||||||
|
"draw": {
|
||||||
|
// optional annotations that will be drawn on the snapshot
|
||||||
|
"boxes": [
|
||||||
|
{
|
||||||
|
"box": [0.5, 0.5, 0.25, 0.25], // box consists of x, y, width, height which are on a scale between 0 - 1
|
||||||
|
"color": [255, 0, 0], // color of the box, default is red
|
||||||
|
"score": 100 // optional score associated with the box
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Success Response:**
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"event_id": "1682970645.13116-1ug7ns",
|
||||||
|
"message": "Successfully created event.",
|
||||||
|
"success": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `PUT /api/events/<event_id>/end`
|
||||||
|
|
||||||
|
End a specific manual event without a predetermined length.
|
||||||
|
|||||||
19
docs/docs/integrations/third_party_extensions.md
Normal file
19
docs/docs/integrations/third_party_extensions.md
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
---
|
||||||
|
id: third_party_extensions
|
||||||
|
title: Third Party Extensions
|
||||||
|
---
|
||||||
|
|
||||||
|
Being open source, others have the possibility to modify and extend the rich functionality Frigate already offers.
|
||||||
|
This page is meant to be an overview over additions one can make to the home NVR setup. The list is not exhaustive and can be extended via PR to the Frigate docs.
|
||||||
|
|
||||||
|
:::caution
|
||||||
|
|
||||||
|
This page does not recommend or rate the presented projects.
|
||||||
|
Please use your own knowledge to assess and vet them before you install anything on your system.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
## [Double Take](https://github.com/jakowenko/double-take)
|
||||||
|
|
||||||
|
[Double Take](https://github.com/jakowenko/double-take) provides an unified UI and API for processing and training images for facial recognition.
|
||||||
|
It supports automatically setting the sub labels in Frigate for person objects that are detected and recognized.
|
||||||
@ -13,7 +13,7 @@ module.exports = {
|
|||||||
themeConfig: {
|
themeConfig: {
|
||||||
algolia: {
|
algolia: {
|
||||||
appId: 'WIURGBNBPY',
|
appId: 'WIURGBNBPY',
|
||||||
apiKey: '81ec882db78f7fed05c51daf973f0362',
|
apiKey: 'd02cc0a6a61178b25da550212925226b',
|
||||||
indexName: 'frigate',
|
indexName: 'frigate',
|
||||||
},
|
},
|
||||||
docs: {
|
docs: {
|
||||||
|
|||||||
1111
docs/package-lock.json
generated
1111
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -14,8 +14,8 @@
|
|||||||
"write-heading-ids": "docusaurus write-heading-ids"
|
"write-heading-ids": "docusaurus write-heading-ids"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@docusaurus/core": "^2.4.0",
|
"@docusaurus/core": "^2.4.1",
|
||||||
"@docusaurus/preset-classic": "^2.4.0",
|
"@docusaurus/preset-classic": "^2.4.1",
|
||||||
"@mdx-js/react": "^1.6.22",
|
"@mdx-js/react": "^1.6.22",
|
||||||
"clsx": "^1.2.1",
|
"clsx": "^1.2.1",
|
||||||
"prism-react-renderer": "^1.3.5",
|
"prism-react-renderer": "^1.3.5",
|
||||||
|
|||||||
@ -37,6 +37,7 @@ module.exports = {
|
|||||||
"integrations/home-assistant",
|
"integrations/home-assistant",
|
||||||
"integrations/api",
|
"integrations/api",
|
||||||
"integrations/mqtt",
|
"integrations/mqtt",
|
||||||
|
"integrations/third_party_extensions",
|
||||||
],
|
],
|
||||||
Troubleshooting: [
|
Troubleshooting: [
|
||||||
"troubleshooting/faqs",
|
"troubleshooting/faqs",
|
||||||
|
|||||||
@ -1,13 +1,14 @@
|
|||||||
import faulthandler
|
import faulthandler
|
||||||
from flask import cli
|
|
||||||
|
|
||||||
faulthandler.enable()
|
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
threading.current_thread().name = "frigate"
|
from flask import cli
|
||||||
|
|
||||||
from frigate.app import FrigateApp
|
from frigate.app import FrigateApp
|
||||||
|
|
||||||
|
faulthandler.enable()
|
||||||
|
|
||||||
|
threading.current_thread().name = "frigate"
|
||||||
|
|
||||||
cli.show_server_banner = lambda *x: None
|
cli.show_server_banner = lambda *x: None
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@ -1,16 +1,16 @@
|
|||||||
import logging
|
import logging
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
from multiprocessing.queues import Queue
|
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import signal
|
import signal
|
||||||
import sys
|
import sys
|
||||||
from typing import Optional
|
|
||||||
from types import FrameType
|
|
||||||
import psutil
|
|
||||||
|
|
||||||
import traceback
|
import traceback
|
||||||
|
from multiprocessing.queues import Queue
|
||||||
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
|
from types import FrameType
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import psutil
|
||||||
from peewee_migrate import Router
|
from peewee_migrate import Router
|
||||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||||
from playhouse.sqliteq import SqliteQueueDatabase
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
@ -27,11 +27,13 @@ from frigate.const import (
|
|||||||
MODEL_CACHE_DIR,
|
MODEL_CACHE_DIR,
|
||||||
RECORD_DIR,
|
RECORD_DIR,
|
||||||
)
|
)
|
||||||
from frigate.object_detection import ObjectDetectProcess
|
from frigate.events.cleanup import EventCleanup
|
||||||
from frigate.events import EventCleanup, EventProcessor
|
from frigate.events.external import ExternalEventProcessor
|
||||||
|
from frigate.events.maintainer import EventProcessor
|
||||||
from frigate.http import create_app
|
from frigate.http import create_app
|
||||||
from frigate.log import log_process, root_configurer
|
from frigate.log import log_process, root_configurer
|
||||||
from frigate.models import Event, Recordings, Timeline
|
from frigate.models import Event, Recordings, Timeline
|
||||||
|
from frigate.object_detection import ObjectDetectProcess
|
||||||
from frigate.object_processing import TrackedObjectProcessor
|
from frigate.object_processing import TrackedObjectProcessor
|
||||||
from frigate.output import output_frames
|
from frigate.output import output_frames
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
@ -40,10 +42,10 @@ from frigate.record.record import manage_recordings
|
|||||||
from frigate.monitoring.stats import StatsEmitter, stats_init
|
from frigate.monitoring.stats import StatsEmitter, stats_init
|
||||||
from frigate.storage import StorageMaintainer
|
from frigate.storage import StorageMaintainer
|
||||||
from frigate.timeline import TimelineProcessor
|
from frigate.timeline import TimelineProcessor
|
||||||
|
from frigate.types import CameraMetricsTypes, RecordMetricsTypes
|
||||||
from frigate.version import VERSION
|
from frigate.version import VERSION
|
||||||
from frigate.video import capture_camera, track_camera
|
from frigate.video import capture_camera, track_camera
|
||||||
from frigate.watchdog import FrigateWatchdog
|
from frigate.watchdog import FrigateWatchdog
|
||||||
from frigate.types import CameraMetricsTypes, RecordMetricsTypes
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -131,10 +133,10 @@ class FrigateApp:
|
|||||||
for log, level in self.config.logger.logs.items():
|
for log, level in self.config.logger.logs.items():
|
||||||
logging.getLogger(log).setLevel(level.value.upper())
|
logging.getLogger(log).setLevel(level.value.upper())
|
||||||
|
|
||||||
if not "werkzeug" in self.config.logger.logs:
|
if "werkzeug" not in self.config.logger.logs:
|
||||||
logging.getLogger("werkzeug").setLevel("ERROR")
|
logging.getLogger("werkzeug").setLevel("ERROR")
|
||||||
|
|
||||||
if not "ws4py" in self.config.logger.logs:
|
if "ws4py" not in self.config.logger.logs:
|
||||||
logging.getLogger("ws4py").setLevel("ERROR")
|
logging.getLogger("ws4py").setLevel("ERROR")
|
||||||
|
|
||||||
def init_queues(self) -> None:
|
def init_queues(self) -> None:
|
||||||
@ -204,6 +206,11 @@ class FrigateApp:
|
|||||||
self.config, self.camera_metrics, self.detectors, self.processes
|
self.config, self.camera_metrics, self.detectors, self.processes
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def init_external_event_processor(self) -> None:
|
||||||
|
self.external_event_processor = ExternalEventProcessor(
|
||||||
|
self.config, self.event_queue
|
||||||
|
)
|
||||||
|
|
||||||
def init_web_server(self) -> None:
|
def init_web_server(self) -> None:
|
||||||
self.flask_app = create_app(
|
self.flask_app = create_app(
|
||||||
self.config,
|
self.config,
|
||||||
@ -212,6 +219,7 @@ class FrigateApp:
|
|||||||
self.detected_frames_processor,
|
self.detected_frames_processor,
|
||||||
self.storage_maintainer,
|
self.storage_maintainer,
|
||||||
self.onvif_controller,
|
self.onvif_controller,
|
||||||
|
self.external_event_processor,
|
||||||
self.plus_api,
|
self.plus_api,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -286,7 +294,7 @@ class FrigateApp:
|
|||||||
def start_video_output_processor(self) -> None:
|
def start_video_output_processor(self) -> None:
|
||||||
output_processor = mp.Process(
|
output_processor = mp.Process(
|
||||||
target=output_frames,
|
target=output_frames,
|
||||||
name=f"output_processor",
|
name="output_processor",
|
||||||
args=(
|
args=(
|
||||||
self.config,
|
self.config,
|
||||||
self.video_output_queue,
|
self.video_output_queue,
|
||||||
@ -436,6 +444,7 @@ class FrigateApp:
|
|||||||
self.start_camera_capture_processes()
|
self.start_camera_capture_processes()
|
||||||
self.start_storage_maintainer()
|
self.start_storage_maintainer()
|
||||||
self.init_stats()
|
self.init_stats()
|
||||||
|
self.init_external_event_processor()
|
||||||
self.init_web_server()
|
self.init_web_server()
|
||||||
self.start_timeline_processor()
|
self.start_timeline_processor()
|
||||||
self.start_event_processor()
|
self.start_event_processor()
|
||||||
@ -458,7 +467,7 @@ class FrigateApp:
|
|||||||
self.stop()
|
self.stop()
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
logger.info(f"Stopping...")
|
logger.info("Stopping...")
|
||||||
self.stop_event.set()
|
self.stop_event.set()
|
||||||
|
|
||||||
for detector in self.detectors.values():
|
for detector in self.detectors.values():
|
||||||
|
|||||||
@ -1,17 +1,14 @@
|
|||||||
"""Handle communication between Frigate and other applications."""
|
"""Handle communication between Frigate and other applications."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
from typing import Any, Callable
|
from typing import Any, Callable
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.ptz import OnvifController, OnvifCommandEnum
|
from frigate.ptz import OnvifCommandEnum, OnvifController
|
||||||
from frigate.types import CameraMetricsTypes, RecordMetricsTypes
|
from frigate.types import CameraMetricsTypes, RecordMetricsTypes
|
||||||
from frigate.util import restart_frigate
|
from frigate.util import restart_frigate
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -72,7 +69,7 @@ class Dispatcher:
|
|||||||
camera_name = topic.split("/")[-3]
|
camera_name = topic.split("/")[-3]
|
||||||
command = topic.split("/")[-2]
|
command = topic.split("/")[-2]
|
||||||
self._camera_settings_handlers[command](camera_name, payload)
|
self._camera_settings_handlers[command](camera_name, payload)
|
||||||
except IndexError as e:
|
except IndexError:
|
||||||
logger.error(f"Received invalid set command: {topic}")
|
logger.error(f"Received invalid set command: {topic}")
|
||||||
return
|
return
|
||||||
elif topic.endswith("ptz"):
|
elif topic.endswith("ptz"):
|
||||||
@ -80,7 +77,7 @@ class Dispatcher:
|
|||||||
# example /cam_name/ptz payload=MOVE_UP|MOVE_DOWN|STOP...
|
# example /cam_name/ptz payload=MOVE_UP|MOVE_DOWN|STOP...
|
||||||
camera_name = topic.split("/")[-2]
|
camera_name = topic.split("/")[-2]
|
||||||
self._on_ptz_command(camera_name, payload)
|
self._on_ptz_command(camera_name, payload)
|
||||||
except IndexError as e:
|
except IndexError:
|
||||||
logger.error(f"Received invalid ptz command: {topic}")
|
logger.error(f"Received invalid ptz command: {topic}")
|
||||||
return
|
return
|
||||||
elif topic == "restart":
|
elif topic == "restart":
|
||||||
@ -128,7 +125,7 @@ class Dispatcher:
|
|||||||
elif payload == "OFF":
|
elif payload == "OFF":
|
||||||
if self.camera_metrics[camera_name]["detection_enabled"].value:
|
if self.camera_metrics[camera_name]["detection_enabled"].value:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Turning off motion is not allowed when detection is enabled."
|
"Turning off motion is not allowed when detection is enabled."
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
@ -194,7 +191,13 @@ class Dispatcher:
|
|||||||
record_settings = self.config.cameras[camera_name].record
|
record_settings = self.config.cameras[camera_name].record
|
||||||
|
|
||||||
if payload == "ON":
|
if payload == "ON":
|
||||||
if not self.record_metrics[camera_name]["record_enabled"].value:
|
if not self.config.cameras[camera_name].record.enabled_in_config:
|
||||||
|
logger.error(
|
||||||
|
"Recordings must be enabled in the config to be turned on via MQTT."
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not record_settings.enabled:
|
||||||
logger.info(f"Turning on recordings for {camera_name}")
|
logger.info(f"Turning on recordings for {camera_name}")
|
||||||
record_settings.enabled = True
|
record_settings.enabled = True
|
||||||
self.record_metrics[camera_name]["record_enabled"].value = True
|
self.record_metrics[camera_name]["record_enabled"].value = True
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from typing import Any, Callable
|
from typing import Any, Callable
|
||||||
|
|
||||||
import paho.mqtt.client as mqtt
|
import paho.mqtt.client as mqtt
|
||||||
@ -8,7 +7,6 @@ import paho.mqtt.client as mqtt
|
|||||||
from frigate.comms.dispatcher import Communicator
|
from frigate.comms.dispatcher import Communicator
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -177,10 +175,10 @@ class MqttClient(Communicator): # type: ignore[misc]
|
|||||||
f"{self.mqtt_config.topic_prefix}/restart", self.on_mqtt_command
|
f"{self.mqtt_config.topic_prefix}/restart", self.on_mqtt_command
|
||||||
)
|
)
|
||||||
|
|
||||||
if not self.mqtt_config.tls_ca_certs is None:
|
if self.mqtt_config.tls_ca_certs is not None:
|
||||||
if (
|
if (
|
||||||
not self.mqtt_config.tls_client_cert is None
|
self.mqtt_config.tls_client_cert is not None
|
||||||
and not self.mqtt_config.tls_client_key is None
|
and self.mqtt_config.tls_client_key is not None
|
||||||
):
|
):
|
||||||
self.client.tls_set(
|
self.client.tls_set(
|
||||||
self.mqtt_config.tls_ca_certs,
|
self.mqtt_config.tls_ca_certs,
|
||||||
@ -189,9 +187,9 @@ class MqttClient(Communicator): # type: ignore[misc]
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.client.tls_set(self.mqtt_config.tls_ca_certs)
|
self.client.tls_set(self.mqtt_config.tls_ca_certs)
|
||||||
if not self.mqtt_config.tls_insecure is None:
|
if self.mqtt_config.tls_insecure is not None:
|
||||||
self.client.tls_insecure_set(self.mqtt_config.tls_insecure)
|
self.client.tls_insecure_set(self.mqtt_config.tls_insecure)
|
||||||
if not self.mqtt_config.user is None:
|
if self.mqtt_config.user is not None:
|
||||||
self.client.username_pw_set(
|
self.client.username_pw_set(
|
||||||
self.mqtt_config.user, password=self.mqtt_config.password
|
self.mqtt_config.user, password=self.mqtt_config.password
|
||||||
)
|
)
|
||||||
|
|||||||
@ -3,10 +3,9 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from wsgiref.simple_server import make_server
|
from wsgiref.simple_server import make_server
|
||||||
|
|
||||||
from ws4py.server.wsgirefserver import (
|
from ws4py.server.wsgirefserver import (
|
||||||
WebSocketWSGIHandler,
|
WebSocketWSGIHandler,
|
||||||
WebSocketWSGIRequestHandler,
|
WebSocketWSGIRequestHandler,
|
||||||
@ -18,7 +17,6 @@ from ws4py.websocket import WebSocket
|
|||||||
from frigate.comms.dispatcher import Communicator
|
from frigate.comms.dispatcher import Communicator
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -45,7 +43,7 @@ class WebSocketClient(Communicator): # type: ignore[misc]
|
|||||||
"topic": json_message.get("topic"),
|
"topic": json_message.get("topic"),
|
||||||
"payload": json_message.get("payload"),
|
"payload": json_message.get("payload"),
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Unable to parse websocket message as valid json: {message.data.decode('utf-8')}"
|
f"Unable to parse websocket message as valid json: {message.data.decode('utf-8')}"
|
||||||
)
|
)
|
||||||
@ -82,7 +80,7 @@ class WebSocketClient(Communicator): # type: ignore[misc]
|
|||||||
"payload": payload,
|
"payload": payload,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
# if the payload can't be decoded don't relay to clients
|
# if the payload can't be decoded don't relay to clients
|
||||||
logger.debug(f"payload for {topic} wasn't text. Skipping...")
|
logger.debug(f"payload for {topic} wasn't text. Skipping...")
|
||||||
return
|
return
|
||||||
|
|||||||
@ -8,26 +8,14 @@ from typing import Dict, List, Optional, Tuple, Union
|
|||||||
|
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import yaml
|
from pydantic import BaseModel, Extra, Field, parse_obj_as, validator
|
||||||
from pydantic import BaseModel, Extra, Field, validator, parse_obj_as
|
|
||||||
from pydantic.fields import PrivateAttr
|
from pydantic.fields import PrivateAttr
|
||||||
|
|
||||||
from frigate.const import (
|
from frigate.const import CACHE_DIR, DEFAULT_DB_PATH, REGEX_CAMERA_NAME, YAML_EXT
|
||||||
CACHE_DIR,
|
from frigate.detectors import DetectorConfig, ModelConfig
|
||||||
DEFAULT_DB_PATH,
|
from frigate.detectors.detector_config import InputTensorEnum # noqa: F401
|
||||||
REGEX_CAMERA_NAME,
|
from frigate.detectors.detector_config import PixelFormatEnum # noqa: F401
|
||||||
YAML_EXT,
|
|
||||||
)
|
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||||
from frigate.plus import PlusApi
|
|
||||||
from frigate.util import (
|
|
||||||
create_mask,
|
|
||||||
deep_merge,
|
|
||||||
get_ffmpeg_arg_list,
|
|
||||||
escape_special_characters,
|
|
||||||
load_config_with_no_duplicates,
|
|
||||||
load_labels,
|
|
||||||
)
|
|
||||||
from frigate.ffmpeg_presets import (
|
from frigate.ffmpeg_presets import (
|
||||||
parse_preset_hardware_acceleration_decode,
|
parse_preset_hardware_acceleration_decode,
|
||||||
parse_preset_hardware_acceleration_scale,
|
parse_preset_hardware_acceleration_scale,
|
||||||
@ -35,14 +23,14 @@ from frigate.ffmpeg_presets import (
|
|||||||
parse_preset_output_record,
|
parse_preset_output_record,
|
||||||
parse_preset_output_rtmp,
|
parse_preset_output_rtmp,
|
||||||
)
|
)
|
||||||
from frigate.detectors import (
|
from frigate.plus import PlusApi
|
||||||
PixelFormatEnum,
|
from frigate.util import (
|
||||||
InputTensorEnum,
|
create_mask,
|
||||||
ModelConfig,
|
deep_merge,
|
||||||
DetectorConfig,
|
escape_special_characters,
|
||||||
|
get_ffmpeg_arg_list,
|
||||||
|
load_config_with_no_duplicates,
|
||||||
)
|
)
|
||||||
from frigate.version import VERSION
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -179,6 +167,9 @@ class RecordConfig(FrigateBaseModel):
|
|||||||
events: EventsConfig = Field(
|
events: EventsConfig = Field(
|
||||||
default_factory=EventsConfig, title="Event specific settings."
|
default_factory=EventsConfig, title="Event specific settings."
|
||||||
)
|
)
|
||||||
|
enabled_in_config: Optional[bool] = Field(
|
||||||
|
title="Keep track of original state of recording."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class MotionConfig(FrigateBaseModel):
|
class MotionConfig(FrigateBaseModel):
|
||||||
@ -484,7 +475,7 @@ class CameraFfmpegConfig(FfmpegConfig):
|
|||||||
if len(roles) > len(roles_set):
|
if len(roles) > len(roles_set):
|
||||||
raise ValueError("Each input role may only be used once.")
|
raise ValueError("Each input role may only be used once.")
|
||||||
|
|
||||||
if not "detect" in roles:
|
if "detect" not in roles:
|
||||||
raise ValueError("The detect role is required.")
|
raise ValueError("The detect role is required.")
|
||||||
|
|
||||||
return v
|
return v
|
||||||
@ -773,12 +764,12 @@ def verify_config_roles(camera_config: CameraConfig) -> None:
|
|||||||
set([r for i in camera_config.ffmpeg.inputs for r in i.roles])
|
set([r for i in camera_config.ffmpeg.inputs for r in i.roles])
|
||||||
)
|
)
|
||||||
|
|
||||||
if camera_config.record.enabled and not "record" in assigned_roles:
|
if camera_config.record.enabled and "record" not in assigned_roles:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Camera {camera_config.name} has record enabled, but record is not assigned to an input."
|
f"Camera {camera_config.name} has record enabled, but record is not assigned to an input."
|
||||||
)
|
)
|
||||||
|
|
||||||
if camera_config.rtmp.enabled and not "rtmp" in assigned_roles:
|
if camera_config.rtmp.enabled and "rtmp" not in assigned_roles:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f"Camera {camera_config.name} has rtmp enabled, but rtmp is not assigned to an input."
|
f"Camera {camera_config.name} has rtmp enabled, but rtmp is not assigned to an input."
|
||||||
)
|
)
|
||||||
@ -961,6 +952,8 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
camera_config.onvif.password = camera_config.onvif.password.format(
|
camera_config.onvif.password = camera_config.onvif.password.format(
|
||||||
**FRIGATE_ENV_VARS
|
**FRIGATE_ENV_VARS
|
||||||
)
|
)
|
||||||
|
# set config recording value
|
||||||
|
camera_config.record.enabled_in_config = camera_config.record.enabled
|
||||||
|
|
||||||
# Add default filters
|
# Add default filters
|
||||||
object_keys = camera_config.objects.track
|
object_keys = camera_config.objects.track
|
||||||
@ -1057,7 +1050,7 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
config.model.dict(exclude_unset=True),
|
config.model.dict(exclude_unset=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
if not "path" in merged_model:
|
if "path" not in merged_model:
|
||||||
if detector_config.type == "cpu":
|
if detector_config.type == "cpu":
|
||||||
merged_model["path"] = "/cpu_model.tflite"
|
merged_model["path"] = "/cpu_model.tflite"
|
||||||
elif detector_config.type == "edgetpu":
|
elif detector_config.type == "edgetpu":
|
||||||
|
|||||||
@ -13,9 +13,9 @@ BTBN_PATH = "/usr/lib/btbn-ffmpeg"
|
|||||||
|
|
||||||
# Regex Consts
|
# Regex Consts
|
||||||
|
|
||||||
REGEX_CAMERA_NAME = "^[a-zA-Z0-9_-]+$"
|
REGEX_CAMERA_NAME = r"^[a-zA-Z0-9_-]+$"
|
||||||
REGEX_RTSP_CAMERA_USER_PASS = ":\/\/[a-zA-Z0-9_-]+:[\S]+@"
|
REGEX_RTSP_CAMERA_USER_PASS = r":\/\/[a-zA-Z0-9_-]+:[\S]+@"
|
||||||
REGEX_HTTP_CAMERA_USER_PASS = "user=[a-zA-Z0-9_-]+&password=[\S]+"
|
REGEX_HTTP_CAMERA_USER_PASS = r"user=[a-zA-Z0-9_-]+&password=[\S]+"
|
||||||
|
|
||||||
# Known Driver Names
|
# Known Driver Names
|
||||||
|
|
||||||
|
|||||||
@ -1,13 +1,7 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
from .detection_api import DetectionApi
|
from .detector_config import InputTensorEnum, ModelConfig, PixelFormatEnum # noqa: F401
|
||||||
from .detector_config import (
|
from .detector_types import DetectorConfig, DetectorTypeEnum, api_types # noqa: F401
|
||||||
PixelFormatEnum,
|
|
||||||
InputTensorEnum,
|
|
||||||
ModelConfig,
|
|
||||||
)
|
|
||||||
from .detector_types import DetectorTypeEnum, api_types, DetectorConfig
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,20 +1,18 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from enum import Enum
|
|
||||||
import os
|
import os
|
||||||
from typing import Dict, List, Optional, Tuple, Union, Literal
|
from enum import Enum
|
||||||
|
from typing import Dict, Optional, Tuple
|
||||||
|
|
||||||
|
|
||||||
import requests
|
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
from pydantic import BaseModel, Extra, Field, validator
|
import requests
|
||||||
|
from pydantic import BaseModel, Extra, Field
|
||||||
from pydantic.fields import PrivateAttr
|
from pydantic.fields import PrivateAttr
|
||||||
|
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
|
|
||||||
from frigate.util import load_labels
|
from frigate.util import load_labels
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -118,11 +116,14 @@ class ModelConfig(BaseModel):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def compute_model_hash(self) -> None:
|
def compute_model_hash(self) -> None:
|
||||||
with open(self.path, "rb") as f:
|
if not self.path or not os.path.exists(self.path):
|
||||||
file_hash = hashlib.md5()
|
self._model_hash = hashlib.md5(b"unknown").hexdigest()
|
||||||
while chunk := f.read(8192):
|
else:
|
||||||
file_hash.update(chunk)
|
with open(self.path, "rb") as f:
|
||||||
self._model_hash = file_hash.hexdigest()
|
file_hash = hashlib.md5()
|
||||||
|
while chunk := f.read(8192):
|
||||||
|
file_hash.update(chunk)
|
||||||
|
self._model_hash = file_hash.hexdigest()
|
||||||
|
|
||||||
def create_colormap(self, enabled_labels: set[str]) -> None:
|
def create_colormap(self, enabled_labels: set[str]) -> None:
|
||||||
"""Get a list of colors for enabled labels."""
|
"""Get a list of colors for enabled labels."""
|
||||||
|
|||||||
@ -1,16 +1,16 @@
|
|||||||
import logging
|
|
||||||
import importlib
|
import importlib
|
||||||
|
import logging
|
||||||
import pkgutil
|
import pkgutil
|
||||||
from typing import Union
|
|
||||||
from typing_extensions import Annotated
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
from typing_extensions import Annotated
|
||||||
|
|
||||||
from . import plugins
|
from . import plugins
|
||||||
from .detection_api import DetectionApi
|
from .detection_api import DetectionApi
|
||||||
from .detector_config import BaseDetectorConfig
|
from .detector_config import BaseDetectorConfig
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from pydantic import Field
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||||
from typing import Literal
|
|
||||||
from pydantic import Extra, Field
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from tflite_runtime.interpreter import Interpreter
|
from tflite_runtime.interpreter import Interpreter
|
||||||
|
|||||||
@ -1,14 +1,14 @@
|
|||||||
|
import io
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import requests
|
import requests
|
||||||
import io
|
from PIL import Image
|
||||||
|
from pydantic import Field
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||||
from typing import Literal
|
|
||||||
from pydantic import Extra, Field
|
|
||||||
from PIL import Image
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -33,9 +33,6 @@ class DeepStack(DetectionApi):
|
|||||||
self.api_key = detector_config.api_key
|
self.api_key = detector_config.api_key
|
||||||
self.labels = detector_config.model.merged_labelmap
|
self.labels = detector_config.model.merged_labelmap
|
||||||
|
|
||||||
self.h = detector_config.model.height
|
|
||||||
self.w = detector_config.model.width
|
|
||||||
|
|
||||||
def get_label_index(self, label_value):
|
def get_label_index(self, label_value):
|
||||||
if label_value.lower() == "truck":
|
if label_value.lower() == "truck":
|
||||||
label_value = "car"
|
label_value = "car"
|
||||||
@ -47,24 +44,31 @@ class DeepStack(DetectionApi):
|
|||||||
def detect_raw(self, tensor_input):
|
def detect_raw(self, tensor_input):
|
||||||
image_data = np.squeeze(tensor_input).astype(np.uint8)
|
image_data = np.squeeze(tensor_input).astype(np.uint8)
|
||||||
image = Image.fromarray(image_data)
|
image = Image.fromarray(image_data)
|
||||||
|
self.w, self.h = image.size
|
||||||
with io.BytesIO() as output:
|
with io.BytesIO() as output:
|
||||||
image.save(output, format="JPEG")
|
image.save(output, format="JPEG")
|
||||||
image_bytes = output.getvalue()
|
image_bytes = output.getvalue()
|
||||||
data = {"api_key": self.api_key}
|
data = {"api_key": self.api_key}
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
self.api_url, files={"image": image_bytes}, timeout=self.api_timeout
|
self.api_url,
|
||||||
|
data=data,
|
||||||
|
files={"image": image_bytes},
|
||||||
|
timeout=self.api_timeout,
|
||||||
)
|
)
|
||||||
response_json = response.json()
|
response_json = response.json()
|
||||||
detections = np.zeros((20, 6), np.float32)
|
detections = np.zeros((20, 6), np.float32)
|
||||||
|
if response_json.get("predictions") is None:
|
||||||
|
logger.debug(f"Error in parsing response json: {response_json}")
|
||||||
|
return detections
|
||||||
|
|
||||||
for i, detection in enumerate(response_json["predictions"]):
|
for i, detection in enumerate(response_json.get("predictions")):
|
||||||
logger.debug(f"Response: {detection}")
|
logger.debug(f"Response: {detection}")
|
||||||
if detection["confidence"] < 0.4:
|
if detection["confidence"] < 0.4:
|
||||||
logger.debug(f"Break due to confidence < 0.4")
|
logger.debug("Break due to confidence < 0.4")
|
||||||
break
|
break
|
||||||
label = self.get_label_index(detection["label"])
|
label = self.get_label_index(detection["label"])
|
||||||
if label < 0:
|
if label < 0:
|
||||||
logger.debug(f"Break due to unknown label")
|
logger.debug("Break due to unknown label")
|
||||||
break
|
break
|
||||||
detections[i] = [
|
detections[i] = [
|
||||||
label,
|
label,
|
||||||
|
|||||||
@ -1,10 +1,11 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from pydantic import Field
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||||
from typing import Literal
|
|
||||||
from pydantic import Extra, Field
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from tflite_runtime.interpreter import Interpreter, load_delegate
|
from tflite_runtime.interpreter import Interpreter, load_delegate
|
||||||
|
|||||||
@ -1,12 +1,12 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import openvino.runtime as ov
|
import openvino.runtime as ov
|
||||||
|
from pydantic import Field
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig, ModelTypeEnum
|
from frigate.detectors.detector_config import BaseDetectorConfig, ModelTypeEnum
|
||||||
from typing import Literal
|
|
||||||
from pydantic import Extra, Field
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -41,7 +41,7 @@ class OvDetector(DetectionApi):
|
|||||||
tensor_shape = self.interpreter.output(self.output_indexes).shape
|
tensor_shape = self.interpreter.output(self.output_indexes).shape
|
||||||
logger.info(f"Model Output-{self.output_indexes} Shape: {tensor_shape}")
|
logger.info(f"Model Output-{self.output_indexes} Shape: {tensor_shape}")
|
||||||
self.output_indexes += 1
|
self.output_indexes += 1
|
||||||
except:
|
except Exception:
|
||||||
logger.info(f"Model has {self.output_indexes} Output Tensors")
|
logger.info(f"Model has {self.output_indexes} Output Tensors")
|
||||||
break
|
break
|
||||||
if self.ov_model_type == ModelTypeEnum.yolox:
|
if self.ov_model_type == ModelTypeEnum.yolox:
|
||||||
|
|||||||
@ -1,6 +1,6 @@
|
|||||||
|
import ctypes
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import ctypes
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -8,13 +8,14 @@ try:
|
|||||||
from cuda import cuda
|
from cuda import cuda
|
||||||
|
|
||||||
TRT_SUPPORT = True
|
TRT_SUPPORT = True
|
||||||
except ModuleNotFoundError as e:
|
except ModuleNotFoundError:
|
||||||
TRT_SUPPORT = False
|
TRT_SUPPORT = False
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
from typing_extensions import Literal
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||||
from typing import Literal
|
|
||||||
from pydantic import Field
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -172,7 +173,7 @@ class TensorRtDetector(DetectionApi):
|
|||||||
if not self.context.execute_async_v2(
|
if not self.context.execute_async_v2(
|
||||||
bindings=self.bindings, stream_handle=self.stream
|
bindings=self.bindings, stream_handle=self.stream
|
||||||
):
|
):
|
||||||
logger.warn(f"Execute returned false")
|
logger.warn("Execute returned false")
|
||||||
|
|
||||||
# Transfer predictions back from the GPU.
|
# Transfer predictions back from the GPU.
|
||||||
[
|
[
|
||||||
|
|||||||
0
frigate/events/__init__.py
Normal file
0
frigate/events/__init__.py
Normal file
174
frigate/events/cleanup.py
Normal file
174
frigate/events/cleanup.py
Normal file
@ -0,0 +1,174 @@
|
|||||||
|
"""Cleanup events based on configured retention."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import threading
|
||||||
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from frigate.config import FrigateConfig
|
||||||
|
from frigate.const import CLIPS_DIR
|
||||||
|
from frigate.models import Event
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class EventCleanup(threading.Thread):
|
||||||
|
def __init__(self, config: FrigateConfig, stop_event: MpEvent):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.name = "event_cleanup"
|
||||||
|
self.config = config
|
||||||
|
self.stop_event = stop_event
|
||||||
|
self.camera_keys = list(self.config.cameras.keys())
|
||||||
|
|
||||||
|
def expire(self, media_type: str) -> None:
|
||||||
|
# TODO: Refactor media_type to enum
|
||||||
|
## Expire events from unlisted cameras based on the global config
|
||||||
|
if media_type == "clips":
|
||||||
|
retain_config = self.config.record.events.retain
|
||||||
|
file_extension = "mp4"
|
||||||
|
update_params = {"has_clip": False}
|
||||||
|
else:
|
||||||
|
retain_config = self.config.snapshots.retain
|
||||||
|
file_extension = "jpg"
|
||||||
|
update_params = {"has_snapshot": False}
|
||||||
|
|
||||||
|
distinct_labels = (
|
||||||
|
Event.select(Event.label)
|
||||||
|
.where(Event.camera.not_in(self.camera_keys))
|
||||||
|
.distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
# loop over object types in db
|
||||||
|
for event in distinct_labels:
|
||||||
|
# get expiration time for this label
|
||||||
|
expire_days = retain_config.objects.get(event.label, retain_config.default)
|
||||||
|
expire_after = (
|
||||||
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
|
).timestamp()
|
||||||
|
# grab all events after specific time
|
||||||
|
expired_events = Event.select().where(
|
||||||
|
Event.camera.not_in(self.camera_keys),
|
||||||
|
Event.start_time < expire_after,
|
||||||
|
Event.label == event.label,
|
||||||
|
Event.retain_indefinitely is False,
|
||||||
|
)
|
||||||
|
# delete the media from disk
|
||||||
|
for event in expired_events:
|
||||||
|
media_name = f"{event.camera}-{event.id}"
|
||||||
|
media_path = Path(
|
||||||
|
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
|
||||||
|
)
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
if file_extension == "jpg":
|
||||||
|
media_path = Path(
|
||||||
|
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
||||||
|
)
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
|
||||||
|
# update the clips attribute for the db entry
|
||||||
|
update_query = Event.update(update_params).where(
|
||||||
|
Event.camera.not_in(self.camera_keys),
|
||||||
|
Event.start_time < expire_after,
|
||||||
|
Event.label == event.label,
|
||||||
|
Event.retain_indefinitely is False,
|
||||||
|
)
|
||||||
|
update_query.execute()
|
||||||
|
|
||||||
|
## Expire events from cameras based on the camera config
|
||||||
|
for name, camera in self.config.cameras.items():
|
||||||
|
if media_type == "clips":
|
||||||
|
retain_config = camera.record.events.retain
|
||||||
|
else:
|
||||||
|
retain_config = camera.snapshots.retain
|
||||||
|
# get distinct objects in database for this camera
|
||||||
|
distinct_labels = (
|
||||||
|
Event.select(Event.label).where(Event.camera == name).distinct()
|
||||||
|
)
|
||||||
|
|
||||||
|
# loop over object types in db
|
||||||
|
for event in distinct_labels:
|
||||||
|
# get expiration time for this label
|
||||||
|
expire_days = retain_config.objects.get(
|
||||||
|
event.label, retain_config.default
|
||||||
|
)
|
||||||
|
expire_after = (
|
||||||
|
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||||
|
).timestamp()
|
||||||
|
# grab all events after specific time
|
||||||
|
expired_events = Event.select().where(
|
||||||
|
Event.camera == name,
|
||||||
|
Event.start_time < expire_after,
|
||||||
|
Event.label == event.label,
|
||||||
|
Event.retain_indefinitely is False,
|
||||||
|
)
|
||||||
|
# delete the grabbed clips from disk
|
||||||
|
for event in expired_events:
|
||||||
|
media_name = f"{event.camera}-{event.id}"
|
||||||
|
media_path = Path(
|
||||||
|
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
|
||||||
|
)
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
if file_extension == "jpg":
|
||||||
|
media_path = Path(
|
||||||
|
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
||||||
|
)
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
# update the clips attribute for the db entry
|
||||||
|
update_query = Event.update(update_params).where(
|
||||||
|
Event.camera == name,
|
||||||
|
Event.start_time < expire_after,
|
||||||
|
Event.label == event.label,
|
||||||
|
Event.retain_indefinitely is False,
|
||||||
|
)
|
||||||
|
update_query.execute()
|
||||||
|
|
||||||
|
def purge_duplicates(self) -> None:
|
||||||
|
duplicate_query = """with grouped_events as (
|
||||||
|
select id,
|
||||||
|
label,
|
||||||
|
camera,
|
||||||
|
has_snapshot,
|
||||||
|
has_clip,
|
||||||
|
row_number() over (
|
||||||
|
partition by label, camera, round(start_time/5,0)*5
|
||||||
|
order by end_time-start_time desc
|
||||||
|
) as copy_number
|
||||||
|
from event
|
||||||
|
)
|
||||||
|
|
||||||
|
select distinct id, camera, has_snapshot, has_clip from grouped_events
|
||||||
|
where copy_number > 1;"""
|
||||||
|
|
||||||
|
duplicate_events = Event.raw(duplicate_query)
|
||||||
|
for event in duplicate_events:
|
||||||
|
logger.debug(f"Removing duplicate: {event.id}")
|
||||||
|
media_name = f"{event.camera}-{event.id}"
|
||||||
|
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
|
||||||
|
(
|
||||||
|
Event.delete()
|
||||||
|
.where(Event.id << [event.id for event in duplicate_events])
|
||||||
|
.execute()
|
||||||
|
)
|
||||||
|
|
||||||
|
def run(self) -> None:
|
||||||
|
# only expire events every 5 minutes
|
||||||
|
while not self.stop_event.wait(300):
|
||||||
|
self.expire("clips")
|
||||||
|
self.expire("snapshots")
|
||||||
|
self.purge_duplicates()
|
||||||
|
|
||||||
|
# drop events from db where has_clip and has_snapshot are false
|
||||||
|
delete_query = Event.delete().where(
|
||||||
|
Event.has_clip is False, Event.has_snapshot is False
|
||||||
|
)
|
||||||
|
delete_query.execute()
|
||||||
|
|
||||||
|
logger.info("Exiting event cleanup...")
|
||||||
130
frigate/events/external.py
Normal file
130
frigate/events/external.py
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
"""Handle external events created by the user."""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import string
|
||||||
|
from multiprocessing.queues import Queue
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import cv2
|
||||||
|
|
||||||
|
from frigate.config import CameraConfig, FrigateConfig
|
||||||
|
from frigate.const import CLIPS_DIR
|
||||||
|
from frigate.events.maintainer import EventTypeEnum
|
||||||
|
from frigate.util import draw_box_with_label
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ExternalEventProcessor:
|
||||||
|
def __init__(self, config: FrigateConfig, queue: Queue) -> None:
|
||||||
|
self.config = config
|
||||||
|
self.queue = queue
|
||||||
|
self.default_thumbnail = None
|
||||||
|
|
||||||
|
def create_manual_event(
|
||||||
|
self,
|
||||||
|
camera: str,
|
||||||
|
label: str,
|
||||||
|
sub_label: Optional[str],
|
||||||
|
duration: Optional[int],
|
||||||
|
include_recording: bool,
|
||||||
|
draw: dict[str, any],
|
||||||
|
snapshot_frame: any,
|
||||||
|
) -> str:
|
||||||
|
now = datetime.datetime.now().timestamp()
|
||||||
|
camera_config = self.config.cameras.get(camera)
|
||||||
|
|
||||||
|
# create event id and start frame time
|
||||||
|
rand_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=6))
|
||||||
|
event_id = f"{now}-{rand_id}"
|
||||||
|
|
||||||
|
thumbnail = self._write_images(
|
||||||
|
camera_config, label, event_id, draw, snapshot_frame
|
||||||
|
)
|
||||||
|
|
||||||
|
self.queue.put(
|
||||||
|
(
|
||||||
|
EventTypeEnum.api,
|
||||||
|
"new",
|
||||||
|
camera_config,
|
||||||
|
{
|
||||||
|
"id": event_id,
|
||||||
|
"label": label,
|
||||||
|
"sub_label": sub_label,
|
||||||
|
"camera": camera,
|
||||||
|
"start_time": now,
|
||||||
|
"end_time": now + duration if duration is not None else None,
|
||||||
|
"thumbnail": thumbnail,
|
||||||
|
"has_clip": camera_config.record.enabled and include_recording,
|
||||||
|
"has_snapshot": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return event_id
|
||||||
|
|
||||||
|
def finish_manual_event(self, event_id: str) -> None:
|
||||||
|
"""Finish external event with indeterminate duration."""
|
||||||
|
now = datetime.datetime.now().timestamp()
|
||||||
|
self.queue.put(
|
||||||
|
(EventTypeEnum.api, "end", None, {"id": event_id, "end_time": now})
|
||||||
|
)
|
||||||
|
|
||||||
|
def _write_images(
|
||||||
|
self,
|
||||||
|
camera_config: CameraConfig,
|
||||||
|
label: str,
|
||||||
|
event_id: str,
|
||||||
|
draw: dict[str, any],
|
||||||
|
img_frame: any,
|
||||||
|
) -> str:
|
||||||
|
# write clean snapshot if enabled
|
||||||
|
if camera_config.snapshots.clean_copy:
|
||||||
|
ret, png = cv2.imencode(".png", img_frame)
|
||||||
|
|
||||||
|
if ret:
|
||||||
|
with open(
|
||||||
|
os.path.join(
|
||||||
|
CLIPS_DIR,
|
||||||
|
f"{camera_config.name}-{event_id}-clean.png",
|
||||||
|
),
|
||||||
|
"wb",
|
||||||
|
) as p:
|
||||||
|
p.write(png.tobytes())
|
||||||
|
|
||||||
|
# write jpg snapshot with optional annotations
|
||||||
|
if draw.get("boxes") and isinstance(draw.get("boxes"), list):
|
||||||
|
for box in draw.get("boxes"):
|
||||||
|
x = box["box"][0] * camera_config.detect.width
|
||||||
|
y = box["box"][1] * camera_config.detect.height
|
||||||
|
width = box["box"][2] * camera_config.detect.width
|
||||||
|
height = box["box"][3] * camera_config.detect.height
|
||||||
|
|
||||||
|
draw_box_with_label(
|
||||||
|
img_frame,
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
x + width,
|
||||||
|
y + height,
|
||||||
|
label,
|
||||||
|
f"{box.get('score', '-')}% {int(width * height)}",
|
||||||
|
thickness=2,
|
||||||
|
color=box.get("color", (255, 0, 0)),
|
||||||
|
)
|
||||||
|
|
||||||
|
ret, jpg = cv2.imencode(".jpg", img_frame)
|
||||||
|
with open(
|
||||||
|
os.path.join(CLIPS_DIR, f"{camera_config.name}-{event_id}.jpg"),
|
||||||
|
"wb",
|
||||||
|
) as j:
|
||||||
|
j.write(jpg.tobytes())
|
||||||
|
|
||||||
|
# create thumbnail with max height of 175 and save
|
||||||
|
width = int(175 * img_frame.shape[1] / img_frame.shape[0])
|
||||||
|
thumb = cv2.resize(img_frame, dsize=(width, 175), interpolation=cv2.INTER_AREA)
|
||||||
|
ret, jpg = cv2.imencode(".jpg", thumb)
|
||||||
|
return base64.b64encode(jpg.tobytes()).decode("utf-8")
|
||||||
@ -1,29 +1,22 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import queue
|
import queue
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from peewee import fn
|
|
||||||
|
|
||||||
from frigate.config import EventsConfig, FrigateConfig
|
|
||||||
from frigate.const import CLIPS_DIR
|
|
||||||
from frigate.models import Event
|
|
||||||
from frigate.types import CameraMetricsTypes
|
|
||||||
from frigate.util import to_relative_box
|
|
||||||
|
|
||||||
from multiprocessing.queues import Queue
|
from multiprocessing.queues import Queue
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
|
||||||
|
from frigate.config import EventsConfig, FrigateConfig
|
||||||
|
from frigate.models import Event
|
||||||
|
from frigate.types import CameraMetricsTypes
|
||||||
|
from frigate.util import to_relative_box
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class EventTypeEnum(str, Enum):
|
class EventTypeEnum(str, Enum):
|
||||||
# api = "api"
|
api = "api"
|
||||||
# audio = "audio"
|
# audio = "audio"
|
||||||
tracked_object = "tracked_object"
|
tracked_object = "tracked_object"
|
||||||
|
|
||||||
@ -68,7 +61,7 @@ class EventProcessor(threading.Thread):
|
|||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
# set an end_time on events without an end_time on startup
|
# set an end_time on events without an end_time on startup
|
||||||
Event.update(end_time=Event.start_time + 30).where(
|
Event.update(end_time=Event.start_time + 30).where(
|
||||||
Event.end_time == None
|
Event.end_time is None
|
||||||
).execute()
|
).execute()
|
||||||
|
|
||||||
while not self.stop_event.is_set():
|
while not self.stop_event.is_set():
|
||||||
@ -97,12 +90,14 @@ class EventProcessor(threading.Thread):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
self.handle_object_detection(event_type, camera, event_data)
|
self.handle_object_detection(event_type, camera, event_data)
|
||||||
|
elif source_type == EventTypeEnum.api:
|
||||||
|
self.handle_external_detection(event_type, event_data)
|
||||||
|
|
||||||
# set an end_time on events without an end_time before exiting
|
# set an end_time on events without an end_time before exiting
|
||||||
Event.update(end_time=datetime.datetime.now().timestamp()).where(
|
Event.update(end_time=datetime.datetime.now().timestamp()).where(
|
||||||
Event.end_time == None
|
Event.end_time is None
|
||||||
).execute()
|
).execute()
|
||||||
logger.info(f"Exiting event processor...")
|
logger.info("Exiting event processor...")
|
||||||
|
|
||||||
def handle_object_detection(
|
def handle_object_detection(
|
||||||
self,
|
self,
|
||||||
@ -197,160 +192,35 @@ class EventProcessor(threading.Thread):
|
|||||||
del self.events_in_process[event_data["id"]]
|
del self.events_in_process[event_data["id"]]
|
||||||
self.event_processed_queue.put((event_data["id"], camera))
|
self.event_processed_queue.put((event_data["id"], camera))
|
||||||
|
|
||||||
|
def handle_external_detection(self, type: str, event_data: Event):
|
||||||
|
if type == "new":
|
||||||
|
event = {
|
||||||
|
Event.id: event_data["id"],
|
||||||
|
Event.label: event_data["label"],
|
||||||
|
Event.sub_label: event_data["sub_label"],
|
||||||
|
Event.camera: event_data["camera"],
|
||||||
|
Event.start_time: event_data["start_time"],
|
||||||
|
Event.end_time: event_data["end_time"],
|
||||||
|
Event.thumbnail: event_data["thumbnail"],
|
||||||
|
Event.has_clip: event_data["has_clip"],
|
||||||
|
Event.has_snapshot: event_data["has_snapshot"],
|
||||||
|
Event.zones: [],
|
||||||
|
Event.data: {},
|
||||||
|
}
|
||||||
|
elif type == "end":
|
||||||
|
event = {
|
||||||
|
Event.id: event_data["id"],
|
||||||
|
Event.end_time: event_data["end_time"],
|
||||||
|
}
|
||||||
|
|
||||||
class EventCleanup(threading.Thread):
|
try:
|
||||||
def __init__(self, config: FrigateConfig, stop_event: MpEvent):
|
(
|
||||||
threading.Thread.__init__(self)
|
Event.insert(event)
|
||||||
self.name = "event_cleanup"
|
.on_conflict(
|
||||||
self.config = config
|
conflict_target=[Event.id],
|
||||||
self.stop_event = stop_event
|
update=event,
|
||||||
self.camera_keys = list(self.config.cameras.keys())
|
|
||||||
|
|
||||||
def expire(self, media_type: str) -> None:
|
|
||||||
# TODO: Refactor media_type to enum
|
|
||||||
## Expire events from unlisted cameras based on the global config
|
|
||||||
if media_type == "clips":
|
|
||||||
retain_config = self.config.record.events.retain
|
|
||||||
file_extension = "mp4"
|
|
||||||
update_params = {"has_clip": False}
|
|
||||||
else:
|
|
||||||
retain_config = self.config.snapshots.retain
|
|
||||||
file_extension = "jpg"
|
|
||||||
update_params = {"has_snapshot": False}
|
|
||||||
|
|
||||||
distinct_labels = (
|
|
||||||
Event.select(Event.label)
|
|
||||||
.where(Event.camera.not_in(self.camera_keys))
|
|
||||||
.distinct()
|
|
||||||
)
|
|
||||||
|
|
||||||
# loop over object types in db
|
|
||||||
for l in distinct_labels:
|
|
||||||
# get expiration time for this label
|
|
||||||
expire_days = retain_config.objects.get(l.label, retain_config.default)
|
|
||||||
expire_after = (
|
|
||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
|
||||||
).timestamp()
|
|
||||||
# grab all events after specific time
|
|
||||||
expired_events = Event.select().where(
|
|
||||||
Event.camera.not_in(self.camera_keys),
|
|
||||||
Event.start_time < expire_after,
|
|
||||||
Event.label == l.label,
|
|
||||||
Event.retain_indefinitely == False,
|
|
||||||
)
|
|
||||||
# delete the media from disk
|
|
||||||
for event in expired_events:
|
|
||||||
media_name = f"{event.camera}-{event.id}"
|
|
||||||
media_path = Path(
|
|
||||||
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
|
|
||||||
)
|
)
|
||||||
media_path.unlink(missing_ok=True)
|
.execute()
|
||||||
if file_extension == "jpg":
|
|
||||||
media_path = Path(
|
|
||||||
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
|
||||||
)
|
|
||||||
media_path.unlink(missing_ok=True)
|
|
||||||
|
|
||||||
# update the clips attribute for the db entry
|
|
||||||
update_query = Event.update(update_params).where(
|
|
||||||
Event.camera.not_in(self.camera_keys),
|
|
||||||
Event.start_time < expire_after,
|
|
||||||
Event.label == l.label,
|
|
||||||
Event.retain_indefinitely == False,
|
|
||||||
)
|
)
|
||||||
update_query.execute()
|
except Exception:
|
||||||
|
logger.warning(f"Failed to update manual event: {event_data['id']}")
|
||||||
## Expire events from cameras based on the camera config
|
|
||||||
for name, camera in self.config.cameras.items():
|
|
||||||
if media_type == "clips":
|
|
||||||
retain_config = camera.record.events.retain
|
|
||||||
else:
|
|
||||||
retain_config = camera.snapshots.retain
|
|
||||||
# get distinct objects in database for this camera
|
|
||||||
distinct_labels = (
|
|
||||||
Event.select(Event.label).where(Event.camera == name).distinct()
|
|
||||||
)
|
|
||||||
|
|
||||||
# loop over object types in db
|
|
||||||
for l in distinct_labels:
|
|
||||||
# get expiration time for this label
|
|
||||||
expire_days = retain_config.objects.get(l.label, retain_config.default)
|
|
||||||
expire_after = (
|
|
||||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
|
||||||
).timestamp()
|
|
||||||
# grab all events after specific time
|
|
||||||
expired_events = Event.select().where(
|
|
||||||
Event.camera == name,
|
|
||||||
Event.start_time < expire_after,
|
|
||||||
Event.label == l.label,
|
|
||||||
Event.retain_indefinitely == False,
|
|
||||||
)
|
|
||||||
# delete the grabbed clips from disk
|
|
||||||
for event in expired_events:
|
|
||||||
media_name = f"{event.camera}-{event.id}"
|
|
||||||
media_path = Path(
|
|
||||||
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
|
|
||||||
)
|
|
||||||
media_path.unlink(missing_ok=True)
|
|
||||||
if file_extension == "jpg":
|
|
||||||
media_path = Path(
|
|
||||||
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
|
||||||
)
|
|
||||||
media_path.unlink(missing_ok=True)
|
|
||||||
# update the clips attribute for the db entry
|
|
||||||
update_query = Event.update(update_params).where(
|
|
||||||
Event.camera == name,
|
|
||||||
Event.start_time < expire_after,
|
|
||||||
Event.label == l.label,
|
|
||||||
Event.retain_indefinitely == False,
|
|
||||||
)
|
|
||||||
update_query.execute()
|
|
||||||
|
|
||||||
def purge_duplicates(self) -> None:
|
|
||||||
duplicate_query = """with grouped_events as (
|
|
||||||
select id,
|
|
||||||
label,
|
|
||||||
camera,
|
|
||||||
has_snapshot,
|
|
||||||
has_clip,
|
|
||||||
row_number() over (
|
|
||||||
partition by label, camera, round(start_time/5,0)*5
|
|
||||||
order by end_time-start_time desc
|
|
||||||
) as copy_number
|
|
||||||
from event
|
|
||||||
)
|
|
||||||
|
|
||||||
select distinct id, camera, has_snapshot, has_clip from grouped_events
|
|
||||||
where copy_number > 1;"""
|
|
||||||
|
|
||||||
duplicate_events = Event.raw(duplicate_query)
|
|
||||||
for event in duplicate_events:
|
|
||||||
logger.debug(f"Removing duplicate: {event.id}")
|
|
||||||
media_name = f"{event.camera}-{event.id}"
|
|
||||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
|
||||||
media_path.unlink(missing_ok=True)
|
|
||||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
|
||||||
media_path.unlink(missing_ok=True)
|
|
||||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
|
|
||||||
media_path.unlink(missing_ok=True)
|
|
||||||
|
|
||||||
(
|
|
||||||
Event.delete()
|
|
||||||
.where(Event.id << [event.id for event in duplicate_events])
|
|
||||||
.execute()
|
|
||||||
)
|
|
||||||
|
|
||||||
def run(self) -> None:
|
|
||||||
# only expire events every 5 minutes
|
|
||||||
while not self.stop_event.wait(300):
|
|
||||||
self.expire("clips")
|
|
||||||
self.expire("snapshots")
|
|
||||||
self.purge_duplicates()
|
|
||||||
|
|
||||||
# drop events from db where has_clip and has_snapshot are false
|
|
||||||
delete_query = Event.delete().where(
|
|
||||||
Event.has_clip == False, Event.has_snapshot == False
|
|
||||||
)
|
|
||||||
delete_query.execute()
|
|
||||||
|
|
||||||
logger.info(f"Exiting event cleanup...")
|
|
||||||
@ -2,13 +2,11 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from frigate.version import VERSION
|
|
||||||
from frigate.const import BTBN_PATH
|
from frigate.const import BTBN_PATH
|
||||||
from frigate.util import vainfo_hwaccel
|
from frigate.util import vainfo_hwaccel
|
||||||
|
from frigate.version import VERSION
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -53,8 +51,8 @@ _user_agent_args = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
PRESETS_HW_ACCEL_DECODE = {
|
PRESETS_HW_ACCEL_DECODE = {
|
||||||
"preset-rpi-32-h264": ["-c:v", "h264_v4l2m2m"],
|
"preset-rpi-32-h264": ["-c:v:1", "h264_v4l2m2m"],
|
||||||
"preset-rpi-64-h264": ["-c:v", "h264_v4l2m2m"],
|
"preset-rpi-64-h264": ["-c:v:1", "h264_v4l2m2m"],
|
||||||
"preset-vaapi": [
|
"preset-vaapi": [
|
||||||
"-hwaccel_flags",
|
"-hwaccel_flags",
|
||||||
"allow_profile_mismatch",
|
"allow_profile_mismatch",
|
||||||
@ -320,7 +318,7 @@ def parse_preset_input(arg: Any, detect_fps: int) -> list[str]:
|
|||||||
|
|
||||||
if arg == "preset-http-jpeg-generic":
|
if arg == "preset-http-jpeg-generic":
|
||||||
input = PRESETS_INPUT[arg].copy()
|
input = PRESETS_INPUT[arg].copy()
|
||||||
input[1] = str(detect_fps)
|
input[len(_user_agent_args) + 1] = str(detect_fps)
|
||||||
return input
|
return input
|
||||||
|
|
||||||
return PRESETS_INPUT.get(arg, None)
|
return PRESETS_INPUT.get(arg, None)
|
||||||
|
|||||||
125
frigate/http.py
125
frigate/http.py
@ -1,25 +1,22 @@
|
|||||||
import base64
|
import base64
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
import copy
|
import copy
|
||||||
import glob
|
import glob
|
||||||
import logging
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import pytz
|
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from tzlocal import get_localzone_name
|
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote
|
||||||
from prometheus_client import REGISTRY, generate_latest, make_wsgi_app
|
from prometheus_client import REGISTRY, generate_latest, make_wsgi_app
|
||||||
from werkzeug.middleware.dispatcher import DispatcherMiddleware
|
from werkzeug.middleware.dispatcher import DispatcherMiddleware
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import pytz
|
||||||
from flask import (
|
from flask import (
|
||||||
Blueprint,
|
Blueprint,
|
||||||
Flask,
|
Flask,
|
||||||
@ -29,12 +26,13 @@ from flask import (
|
|||||||
make_response,
|
make_response,
|
||||||
request,
|
request,
|
||||||
)
|
)
|
||||||
|
from peewee import DoesNotExist, SqliteDatabase, fn, operator
|
||||||
from peewee import SqliteDatabase, operator, fn, DoesNotExist
|
|
||||||
from playhouse.shortcuts import model_to_dict
|
from playhouse.shortcuts import model_to_dict
|
||||||
|
from tzlocal import get_localzone_name
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.const import CLIPS_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
|
from frigate.const import CLIPS_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
|
||||||
|
from frigate.events.external import ExternalEventProcessor
|
||||||
from frigate.models import Event, Recordings, Timeline
|
from frigate.models import Event, Recordings, Timeline
|
||||||
from frigate.object_processing import TrackedObject
|
from frigate.object_processing import TrackedObject
|
||||||
from frigate.monitoring.prometheus import setupRegistry
|
from frigate.monitoring.prometheus import setupRegistry
|
||||||
@ -42,14 +40,15 @@ from frigate.monitoring.prometheus import setupRegistry
|
|||||||
from frigate.monitoring.stats import stats_snapshot
|
from frigate.monitoring.stats import stats_snapshot
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
from frigate.ptz import OnvifController
|
from frigate.ptz import OnvifController
|
||||||
|
from frigate.stats import stats_snapshot
|
||||||
|
from frigate.storage import StorageMaintainer
|
||||||
from frigate.util import (
|
from frigate.util import (
|
||||||
clean_camera_user_pass,
|
clean_camera_user_pass,
|
||||||
ffprobe_stream,
|
ffprobe_stream,
|
||||||
|
get_tz_modifiers,
|
||||||
restart_frigate,
|
restart_frigate,
|
||||||
vainfo_hwaccel,
|
vainfo_hwaccel,
|
||||||
get_tz_modifiers,
|
|
||||||
)
|
)
|
||||||
from frigate.storage import StorageMaintainer
|
|
||||||
from frigate.version import VERSION
|
from frigate.version import VERSION
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -64,6 +63,7 @@ def create_app(
|
|||||||
detected_frames_processor,
|
detected_frames_processor,
|
||||||
storage_maintainer: StorageMaintainer,
|
storage_maintainer: StorageMaintainer,
|
||||||
onvif: OnvifController,
|
onvif: OnvifController,
|
||||||
|
external_processor: ExternalEventProcessor,
|
||||||
plus_api: PlusApi,
|
plus_api: PlusApi,
|
||||||
):
|
):
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
@ -83,6 +83,7 @@ def create_app(
|
|||||||
app.detected_frames_processor = detected_frames_processor
|
app.detected_frames_processor = detected_frames_processor
|
||||||
app.storage_maintainer = storage_maintainer
|
app.storage_maintainer = storage_maintainer
|
||||||
app.onvif = onvif
|
app.onvif = onvif
|
||||||
|
app.external_processor = external_processor
|
||||||
app.plus_api = plus_api
|
app.plus_api = plus_api
|
||||||
app.camera_error_image = None
|
app.camera_error_image = None
|
||||||
app.hwaccel_errors = []
|
app.hwaccel_errors = []
|
||||||
@ -110,10 +111,10 @@ def events_summary():
|
|||||||
|
|
||||||
clauses = []
|
clauses = []
|
||||||
|
|
||||||
if not has_clip is None:
|
if has_clip is not None:
|
||||||
clauses.append((Event.has_clip == has_clip))
|
clauses.append((Event.has_clip == has_clip))
|
||||||
|
|
||||||
if not has_snapshot is None:
|
if has_snapshot is not None:
|
||||||
clauses.append((Event.has_snapshot == has_snapshot))
|
clauses.append((Event.has_snapshot == has_snapshot))
|
||||||
|
|
||||||
if len(clauses) == 0:
|
if len(clauses) == 0:
|
||||||
@ -203,7 +204,7 @@ def send_to_plus(id):
|
|||||||
return make_response(jsonify({"success": False, "message": message}), 404)
|
return make_response(jsonify({"success": False, "message": message}), 404)
|
||||||
|
|
||||||
# events from before the conversion to relative dimensions cant include annotations
|
# events from before the conversion to relative dimensions cant include annotations
|
||||||
if any(d > 1 for d in event.data["box"]):
|
if event.data.get("box") is None:
|
||||||
include_annotation = None
|
include_annotation = None
|
||||||
|
|
||||||
if event.end_time is None:
|
if event.end_time is None:
|
||||||
@ -258,8 +259,7 @@ def send_to_plus(id):
|
|||||||
event.plus_id = plus_id
|
event.plus_id = plus_id
|
||||||
event.save()
|
event.save()
|
||||||
|
|
||||||
if not include_annotation is None:
|
if include_annotation is not None:
|
||||||
region = event.data["region"]
|
|
||||||
box = event.data["box"]
|
box = event.data["box"]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -301,13 +301,13 @@ def false_positive(id):
|
|||||||
return make_response(jsonify({"success": False, "message": message}), 404)
|
return make_response(jsonify({"success": False, "message": message}), 404)
|
||||||
|
|
||||||
# events from before the conversion to relative dimensions cant include annotations
|
# events from before the conversion to relative dimensions cant include annotations
|
||||||
if any(d > 1 for d in event.data["box"]):
|
if event.data.get("box") is None:
|
||||||
message = f"Events prior to 0.13 cannot be submitted as false positives"
|
message = "Events prior to 0.13 cannot be submitted as false positives"
|
||||||
logger.error(message)
|
logger.error(message)
|
||||||
return make_response(jsonify({"success": False, "message": message}), 400)
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
||||||
|
|
||||||
if event.false_positive:
|
if event.false_positive:
|
||||||
message = f"False positive already submitted to Frigate+"
|
message = "False positive already submitted to Frigate+"
|
||||||
logger.error(message)
|
logger.error(message)
|
||||||
return make_response(jsonify({"success": False, "message": message}), 400)
|
return make_response(jsonify({"success": False, "message": message}), 400)
|
||||||
|
|
||||||
@ -443,7 +443,7 @@ def get_sub_labels():
|
|||||||
parts = label.split(",")
|
parts = label.split(",")
|
||||||
|
|
||||||
for part in parts:
|
for part in parts:
|
||||||
if not (part.strip()) in sub_labels:
|
if part.strip() not in sub_labels:
|
||||||
sub_labels.append(part.strip())
|
sub_labels.append(part.strip())
|
||||||
|
|
||||||
sub_labels.sort()
|
sub_labels.sort()
|
||||||
@ -482,7 +482,7 @@ def event_thumbnail(id, max_cache_age=2592000):
|
|||||||
event_complete = False
|
event_complete = False
|
||||||
try:
|
try:
|
||||||
event = Event.get(Event.id == id)
|
event = Event.get(Event.id == id)
|
||||||
if not event.end_time is None:
|
if event.end_time is not None:
|
||||||
event_complete = True
|
event_complete = True
|
||||||
thumbnail_bytes = base64.b64decode(event.thumbnail)
|
thumbnail_bytes = base64.b64decode(event.thumbnail)
|
||||||
except DoesNotExist:
|
except DoesNotExist:
|
||||||
@ -492,9 +492,9 @@ def event_thumbnail(id, max_cache_age=2592000):
|
|||||||
for camera_state in camera_states:
|
for camera_state in camera_states:
|
||||||
if id in camera_state.tracked_objects:
|
if id in camera_state.tracked_objects:
|
||||||
tracked_obj = camera_state.tracked_objects.get(id)
|
tracked_obj = camera_state.tracked_objects.get(id)
|
||||||
if not tracked_obj is None:
|
if tracked_obj is not None:
|
||||||
thumbnail_bytes = tracked_obj.get_thumbnail()
|
thumbnail_bytes = tracked_obj.get_thumbnail()
|
||||||
except:
|
except Exception:
|
||||||
return "Event not found", 404
|
return "Event not found", 404
|
||||||
|
|
||||||
if thumbnail_bytes is None:
|
if thumbnail_bytes is None:
|
||||||
@ -599,7 +599,7 @@ def event_snapshot(id):
|
|||||||
event_complete = False
|
event_complete = False
|
||||||
jpg_bytes = None
|
jpg_bytes = None
|
||||||
try:
|
try:
|
||||||
event = Event.get(Event.id == id, Event.end_time != None)
|
event = Event.get(Event.id == id, Event.end_time is not None)
|
||||||
event_complete = True
|
event_complete = True
|
||||||
if not event.has_snapshot:
|
if not event.has_snapshot:
|
||||||
return "Snapshot not available", 404
|
return "Snapshot not available", 404
|
||||||
@ -615,7 +615,7 @@ def event_snapshot(id):
|
|||||||
for camera_state in camera_states:
|
for camera_state in camera_states:
|
||||||
if id in camera_state.tracked_objects:
|
if id in camera_state.tracked_objects:
|
||||||
tracked_obj = camera_state.tracked_objects.get(id)
|
tracked_obj = camera_state.tracked_objects.get(id)
|
||||||
if not tracked_obj is None:
|
if tracked_obj is not None:
|
||||||
jpg_bytes = tracked_obj.get_jpg_bytes(
|
jpg_bytes = tracked_obj.get_jpg_bytes(
|
||||||
timestamp=request.args.get("timestamp", type=int),
|
timestamp=request.args.get("timestamp", type=int),
|
||||||
bounding_box=request.args.get("bbox", type=int),
|
bounding_box=request.args.get("bbox", type=int),
|
||||||
@ -623,9 +623,9 @@ def event_snapshot(id):
|
|||||||
height=request.args.get("h", type=int),
|
height=request.args.get("h", type=int),
|
||||||
quality=request.args.get("quality", default=70, type=int),
|
quality=request.args.get("quality", default=70, type=int),
|
||||||
)
|
)
|
||||||
except:
|
except Exception:
|
||||||
return "Event not found", 404
|
return "Event not found", 404
|
||||||
except:
|
except Exception:
|
||||||
return "Event not found", 404
|
return "Event not found", 404
|
||||||
|
|
||||||
if jpg_bytes is None:
|
if jpg_bytes is None:
|
||||||
@ -651,7 +651,7 @@ def label_snapshot(camera_name, label):
|
|||||||
event_query = (
|
event_query = (
|
||||||
Event.select()
|
Event.select()
|
||||||
.where(Event.camera == camera_name)
|
.where(Event.camera == camera_name)
|
||||||
.where(Event.has_snapshot == True)
|
.where(Event.has_snapshot is True)
|
||||||
.order_by(Event.start_time.desc())
|
.order_by(Event.start_time.desc())
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -659,7 +659,7 @@ def label_snapshot(camera_name, label):
|
|||||||
Event.select()
|
Event.select()
|
||||||
.where(Event.camera == camera_name)
|
.where(Event.camera == camera_name)
|
||||||
.where(Event.label == label)
|
.where(Event.label == label)
|
||||||
.where(Event.has_snapshot == True)
|
.where(Event.has_snapshot is True)
|
||||||
.order_by(Event.start_time.desc())
|
.order_by(Event.start_time.desc())
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -826,13 +826,13 @@ def events():
|
|||||||
if before:
|
if before:
|
||||||
clauses.append((Event.start_time < before))
|
clauses.append((Event.start_time < before))
|
||||||
|
|
||||||
if not has_clip is None:
|
if has_clip is not None:
|
||||||
clauses.append((Event.has_clip == has_clip))
|
clauses.append((Event.has_clip == has_clip))
|
||||||
|
|
||||||
if not has_snapshot is None:
|
if has_snapshot is not None:
|
||||||
clauses.append((Event.has_snapshot == has_snapshot))
|
clauses.append((Event.has_snapshot == has_snapshot))
|
||||||
|
|
||||||
if not in_progress is None:
|
if in_progress is not None:
|
||||||
clauses.append((Event.end_time.is_null(in_progress)))
|
clauses.append((Event.end_time.is_null(in_progress)))
|
||||||
|
|
||||||
if not include_thumbnails:
|
if not include_thumbnails:
|
||||||
@ -856,6 +856,58 @@ def events():
|
|||||||
return jsonify([model_to_dict(e, exclude=excluded_fields) for e in events])
|
return jsonify([model_to_dict(e, exclude=excluded_fields) for e in events])
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/events/<camera_name>/<label>/create", methods=["POST"])
|
||||||
|
def create_event(camera_name, label):
|
||||||
|
if not camera_name or not current_app.frigate_config.cameras.get(camera_name):
|
||||||
|
return jsonify(
|
||||||
|
{"success": False, "message": f"{camera_name} is not a valid camera."}, 404
|
||||||
|
)
|
||||||
|
|
||||||
|
if not label:
|
||||||
|
return jsonify({"success": False, "message": f"{label} must be set."}, 404)
|
||||||
|
|
||||||
|
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
frame = current_app.detected_frames_processor.get_current_frame(camera_name)
|
||||||
|
|
||||||
|
event_id = current_app.external_processor.create_manual_event(
|
||||||
|
camera_name,
|
||||||
|
label,
|
||||||
|
json.get("sub_label", None),
|
||||||
|
json.get("duration", 30),
|
||||||
|
json.get("include_recording", True),
|
||||||
|
json.get("draw", {}),
|
||||||
|
frame,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"The error is {e}")
|
||||||
|
return jsonify(
|
||||||
|
{"success": False, "message": f"An unknown error occurred: {e}"}, 404
|
||||||
|
)
|
||||||
|
|
||||||
|
return jsonify(
|
||||||
|
{
|
||||||
|
"success": True,
|
||||||
|
"message": "Successfully created event.",
|
||||||
|
"event_id": event_id,
|
||||||
|
},
|
||||||
|
200,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@bp.route("/events/<event_id>/end", methods=["PUT"])
|
||||||
|
def end_event(event_id):
|
||||||
|
try:
|
||||||
|
current_app.external_processor.finish_manual_event(event_id)
|
||||||
|
except Exception:
|
||||||
|
return jsonify(
|
||||||
|
{"success": False, "message": f"{event_id} must be set and valid."}, 404
|
||||||
|
)
|
||||||
|
|
||||||
|
return jsonify({"success": True, "message": "Event successfully ended."}, 200)
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/config")
|
@bp.route("/config")
|
||||||
def config():
|
def config():
|
||||||
config = current_app.frigate_config.dict()
|
config = current_app.frigate_config.dict()
|
||||||
@ -913,8 +965,8 @@ def config_save():
|
|||||||
|
|
||||||
# Validate the config schema
|
# Validate the config schema
|
||||||
try:
|
try:
|
||||||
new_yaml = FrigateConfig.parse_raw(new_config)
|
FrigateConfig.parse_raw(new_config)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
return make_response(
|
return make_response(
|
||||||
jsonify(
|
jsonify(
|
||||||
{
|
{
|
||||||
@ -938,12 +990,12 @@ def config_save():
|
|||||||
with open(config_file, "w") as f:
|
with open(config_file, "w") as f:
|
||||||
f.write(new_config)
|
f.write(new_config)
|
||||||
f.close()
|
f.close()
|
||||||
except Exception as e:
|
except Exception:
|
||||||
return make_response(
|
return make_response(
|
||||||
jsonify(
|
jsonify(
|
||||||
{
|
{
|
||||||
"success": False,
|
"success": False,
|
||||||
"message": f"Could not write config file, be sure that Frigate has write permission on the config file.",
|
"message": "Could not write config file, be sure that Frigate has write permission on the config file.",
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
400,
|
400,
|
||||||
@ -1304,6 +1356,7 @@ def recording_clip(camera_name, start_ts, end_ts):
|
|||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
ffmpeg_cmd = [
|
ffmpeg_cmd = [
|
||||||
"ffmpeg",
|
"ffmpeg",
|
||||||
|
"-hide_banner",
|
||||||
"-y",
|
"-y",
|
||||||
"-protocol_whitelist",
|
"-protocol_whitelist",
|
||||||
"pipe,file",
|
"pipe,file",
|
||||||
@ -1484,7 +1537,7 @@ def ffprobe():
|
|||||||
|
|
||||||
if not path_param:
|
if not path_param:
|
||||||
return jsonify(
|
return jsonify(
|
||||||
{"success": False, "message": f"Path needs to be provided."}, "404"
|
{"success": False, "message": "Path needs to be provided."}, "404"
|
||||||
)
|
)
|
||||||
|
|
||||||
if path_param.startswith("camera"):
|
if path_param.startswith("camera"):
|
||||||
|
|||||||
@ -1,18 +1,17 @@
|
|||||||
# adapted from https://medium.com/@jonathonbao/python3-logging-with-multiprocessing-f51f460b8778
|
# adapted from https://medium.com/@jonathonbao/python3-logging-with-multiprocessing-f51f460b8778
|
||||||
import logging
|
import logging
|
||||||
import threading
|
|
||||||
import os
|
|
||||||
import signal
|
|
||||||
import queue
|
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
from multiprocessing.queues import Queue
|
import os
|
||||||
from logging import handlers
|
import queue
|
||||||
from typing import Optional
|
import signal
|
||||||
from types import FrameType
|
import threading
|
||||||
from setproctitle import setproctitle
|
|
||||||
from typing import Deque, Optional
|
|
||||||
from types import FrameType
|
|
||||||
from collections import deque
|
from collections import deque
|
||||||
|
from logging import handlers
|
||||||
|
from multiprocessing.queues import Queue
|
||||||
|
from types import FrameType
|
||||||
|
from typing import Deque, Optional
|
||||||
|
|
||||||
|
from setproctitle import setproctitle
|
||||||
|
|
||||||
from frigate.util import clean_camera_user_pass
|
from frigate.util import clean_camera_user_pass
|
||||||
|
|
||||||
@ -44,7 +43,7 @@ def root_configurer(queue: Queue) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def log_process(log_queue: Queue) -> None:
|
def log_process(log_queue: Queue) -> None:
|
||||||
threading.current_thread().name = f"logger"
|
threading.current_thread().name = "logger"
|
||||||
setproctitle("frigate.logger")
|
setproctitle("frigate.logger")
|
||||||
listener_configurer()
|
listener_configurer()
|
||||||
|
|
||||||
@ -63,6 +62,8 @@ def log_process(log_queue: Queue) -> None:
|
|||||||
if stop_event.is_set():
|
if stop_event.is_set():
|
||||||
break
|
break
|
||||||
continue
|
continue
|
||||||
|
if record.msg.startswith("You are using a scalar distance function"):
|
||||||
|
continue
|
||||||
logger = logging.getLogger(record.name)
|
logger = logging.getLogger(record.name)
|
||||||
logger.handle(record)
|
logger.handle(record)
|
||||||
|
|
||||||
|
|||||||
@ -1,12 +1,11 @@
|
|||||||
from numpy import unique
|
|
||||||
from peewee import (
|
from peewee import (
|
||||||
Model,
|
BooleanField,
|
||||||
CharField,
|
CharField,
|
||||||
DateTimeField,
|
DateTimeField,
|
||||||
FloatField,
|
FloatField,
|
||||||
BooleanField,
|
|
||||||
TextField,
|
|
||||||
IntegerField,
|
IntegerField,
|
||||||
|
Model,
|
||||||
|
TextField,
|
||||||
)
|
)
|
||||||
from playhouse.sqlite_ext import JSONField
|
from playhouse.sqlite_ext import JSONField
|
||||||
|
|
||||||
|
|||||||
@ -1,23 +1,30 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import psutil
|
|
||||||
import shutil
|
|
||||||
import os
|
|
||||||
import requests
|
|
||||||
from typing import Optional, Any
|
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import psutil
|
||||||
|
import requests
|
||||||
|
from requests.exceptions import RequestException
|
||||||
|
|
||||||
from frigate.comms.dispatcher import Dispatcher
|
from frigate.comms.dispatcher import Dispatcher
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.const import DRIVER_AMD, DRIVER_ENV_VAR, RECORD_DIR, CLIPS_DIR, CACHE_DIR
|
from frigate.const import CACHE_DIR, CLIPS_DIR, DRIVER_AMD, DRIVER_ENV_VAR, RECORD_DIR
|
||||||
from frigate.types import StatsTrackingTypes, CameraMetricsTypes
|
|
||||||
from frigate.util import get_amd_gpu_stats, get_intel_gpu_stats, get_nvidia_gpu_stats
|
|
||||||
from frigate.version import VERSION
|
|
||||||
from frigate.util import get_cpu_stats
|
|
||||||
from frigate.object_detection import ObjectDetectProcess
|
from frigate.object_detection import ObjectDetectProcess
|
||||||
|
from frigate.types import CameraMetricsTypes, StatsTrackingTypes
|
||||||
|
from frigate.util import (
|
||||||
|
get_amd_gpu_stats,
|
||||||
|
get_bandwidth_stats,
|
||||||
|
get_cpu_stats,
|
||||||
|
get_intel_gpu_stats,
|
||||||
|
get_nvidia_gpu_stats,
|
||||||
|
)
|
||||||
|
from frigate.version import VERSION
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -31,7 +38,7 @@ def get_latest_version(config: FrigateConfig) -> str:
|
|||||||
"https://api.github.com/repos/blakeblackshear/frigate/releases/latest",
|
"https://api.github.com/repos/blakeblackshear/frigate/releases/latest",
|
||||||
timeout=10,
|
timeout=10,
|
||||||
)
|
)
|
||||||
except:
|
except RequestException:
|
||||||
return "unknown"
|
return "unknown"
|
||||||
|
|
||||||
response = request.json()
|
response = request.json()
|
||||||
@ -101,6 +108,7 @@ def get_processing_stats(
|
|||||||
[
|
[
|
||||||
asyncio.create_task(set_gpu_stats(config, stats, hwaccel_errors)),
|
asyncio.create_task(set_gpu_stats(config, stats, hwaccel_errors)),
|
||||||
asyncio.create_task(set_cpu_stats(stats)),
|
asyncio.create_task(set_cpu_stats(stats)),
|
||||||
|
asyncio.create_task(set_bandwidth_stats(stats)),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -118,6 +126,14 @@ async def set_cpu_stats(all_stats: dict[str, Any]) -> None:
|
|||||||
all_stats["cpu_usages"] = cpu_stats
|
all_stats["cpu_usages"] = cpu_stats
|
||||||
|
|
||||||
|
|
||||||
|
async def set_bandwidth_stats(all_stats: dict[str, Any]) -> None:
|
||||||
|
"""Set bandwidth from nethogs."""
|
||||||
|
bandwidth_stats = get_bandwidth_stats()
|
||||||
|
|
||||||
|
if bandwidth_stats:
|
||||||
|
all_stats["bandwidth_usages"] = bandwidth_stats
|
||||||
|
|
||||||
|
|
||||||
async def set_gpu_stats(
|
async def set_gpu_stats(
|
||||||
config: FrigateConfig, all_stats: dict[str, Any], hwaccel_errors: list[str]
|
config: FrigateConfig, all_stats: dict[str, Any], hwaccel_errors: list[str]
|
||||||
) -> None:
|
) -> None:
|
||||||
@ -299,4 +315,4 @@ class StatsEmitter(threading.Thread):
|
|||||||
)
|
)
|
||||||
self.dispatcher.publish("stats", json.dumps(stats), retain=False)
|
self.dispatcher.publish("stats", json.dumps(stats), retain=False)
|
||||||
logger.debug("Finished stats collection")
|
logger.debug("Finished stats collection")
|
||||||
logger.info(f"Exiting stats emitter...")
|
logger.info("Exiting stats emitter...")
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
import cv2
|
import cv2
|
||||||
import imutils
|
import imutils
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from frigate.config import MotionConfig
|
from frigate.config import MotionConfig
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -12,7 +12,6 @@ from setproctitle import setproctitle
|
|||||||
|
|
||||||
from frigate.config import InputTensorEnum
|
from frigate.config import InputTensorEnum
|
||||||
from frigate.detectors import create_detector
|
from frigate.detectors import create_detector
|
||||||
|
|
||||||
from frigate.util import EventsPerSecond, SharedMemoryFrameManager, listen, load_labels
|
from frigate.util import EventsPerSecond, SharedMemoryFrameManager, listen, load_labels
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -161,7 +160,7 @@ class ObjectDetectProcess:
|
|||||||
|
|
||||||
def start_or_restart(self):
|
def start_or_restart(self):
|
||||||
self.detection_start.value = 0.0
|
self.detection_start.value = 0.0
|
||||||
if (not self.detect_process is None) and self.detect_process.is_alive():
|
if (self.detect_process is not None) and self.detect_process.is_alive():
|
||||||
self.stop()
|
self.stop()
|
||||||
self.detect_process = mp.Process(
|
self.detect_process = mp.Process(
|
||||||
target=run_detector,
|
target=run_detector,
|
||||||
|
|||||||
@ -15,13 +15,13 @@ import numpy as np
|
|||||||
from frigate.comms.dispatcher import Dispatcher
|
from frigate.comms.dispatcher import Dispatcher
|
||||||
from frigate.config import (
|
from frigate.config import (
|
||||||
CameraConfig,
|
CameraConfig,
|
||||||
MqttConfig,
|
|
||||||
SnapshotsConfig,
|
|
||||||
RecordConfig,
|
|
||||||
FrigateConfig,
|
FrigateConfig,
|
||||||
|
MqttConfig,
|
||||||
|
RecordConfig,
|
||||||
|
SnapshotsConfig,
|
||||||
)
|
)
|
||||||
from frigate.const import CLIPS_DIR
|
from frigate.const import CLIPS_DIR
|
||||||
from frigate.events import EventTypeEnum
|
from frigate.events.maintainer import EventTypeEnum
|
||||||
from frigate.util import (
|
from frigate.util import (
|
||||||
SharedMemoryFrameManager,
|
SharedMemoryFrameManager,
|
||||||
calculate_region,
|
calculate_region,
|
||||||
@ -141,7 +141,7 @@ class TrackedObject:
|
|||||||
# check each zone
|
# check each zone
|
||||||
for name, zone in self.camera_config.zones.items():
|
for name, zone in self.camera_config.zones.items():
|
||||||
# if the zone is not for this object type, skip
|
# if the zone is not for this object type, skip
|
||||||
if len(zone.objects) > 0 and not obj_data["label"] in zone.objects:
|
if len(zone.objects) > 0 and obj_data["label"] not in zone.objects:
|
||||||
continue
|
continue
|
||||||
contour = zone.contour
|
contour = zone.contour
|
||||||
# check if the object is in the zone
|
# check if the object is in the zone
|
||||||
@ -177,11 +177,7 @@ class TrackedObject:
|
|||||||
return (thumb_update, significant_change)
|
return (thumb_update, significant_change)
|
||||||
|
|
||||||
def to_dict(self, include_thumbnail: bool = False):
|
def to_dict(self, include_thumbnail: bool = False):
|
||||||
snapshot_time = (
|
(self.thumbnail_data["frame_time"] if self.thumbnail_data is not None else 0.0)
|
||||||
self.thumbnail_data["frame_time"]
|
|
||||||
if not self.thumbnail_data is None
|
|
||||||
else 0.0
|
|
||||||
)
|
|
||||||
event = {
|
event = {
|
||||||
"id": self.obj_data["id"],
|
"id": self.obj_data["id"],
|
||||||
"camera": self.camera,
|
"camera": self.camera,
|
||||||
@ -526,7 +522,7 @@ class CameraState:
|
|||||||
for id in removed_ids:
|
for id in removed_ids:
|
||||||
# publish events to mqtt
|
# publish events to mqtt
|
||||||
removed_obj = tracked_objects[id]
|
removed_obj = tracked_objects[id]
|
||||||
if not "end_time" in removed_obj.obj_data:
|
if "end_time" not in removed_obj.obj_data:
|
||||||
removed_obj.obj_data["end_time"] = frame_time
|
removed_obj.obj_data["end_time"] = frame_time
|
||||||
for c in self.callbacks["end"]:
|
for c in self.callbacks["end"]:
|
||||||
c(self.name, removed_obj, frame_time)
|
c(self.name, removed_obj, frame_time)
|
||||||
@ -1028,4 +1024,4 @@ class TrackedObjectProcessor(threading.Thread):
|
|||||||
event_id, camera = self.event_processed_queue.get()
|
event_id, camera = self.event_processed_queue.get()
|
||||||
self.camera_states[camera].finished(event_id)
|
self.camera_states[camera].finished(event_id)
|
||||||
|
|
||||||
logger.info(f"Exiting object processor...")
|
logger.info("Exiting object processor...")
|
||||||
|
|||||||
@ -4,7 +4,6 @@ import logging
|
|||||||
import math
|
import math
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
import os
|
import os
|
||||||
import operator
|
|
||||||
import queue
|
import queue
|
||||||
import signal
|
import signal
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
@ -149,7 +148,7 @@ class BroadcastThread(threading.Thread):
|
|||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
ws.send(buf, binary=True)
|
ws.send(buf, binary=True)
|
||||||
except:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
elif self.converter.process.poll() is not None:
|
elif self.converter.process.poll() is not None:
|
||||||
break
|
break
|
||||||
@ -185,7 +184,7 @@ class BirdsEyeFrameManager:
|
|||||||
if len(logo_files) > 0:
|
if len(logo_files) > 0:
|
||||||
birdseye_logo = cv2.imread(logo_files[0], cv2.IMREAD_UNCHANGED)
|
birdseye_logo = cv2.imread(logo_files[0], cv2.IMREAD_UNCHANGED)
|
||||||
|
|
||||||
if not birdseye_logo is None:
|
if birdseye_logo is not None:
|
||||||
transparent_layer = birdseye_logo[:, :, 3]
|
transparent_layer = birdseye_logo[:, :, 3]
|
||||||
y_offset = height // 2 - transparent_layer.shape[0] // 2
|
y_offset = height // 2 - transparent_layer.shape[0] // 2
|
||||||
x_offset = width // 2 - transparent_layer.shape[1] // 2
|
x_offset = width // 2 - transparent_layer.shape[1] // 2
|
||||||
@ -229,7 +228,7 @@ class BirdsEyeFrameManager:
|
|||||||
self.last_output_time = 0.0
|
self.last_output_time = 0.0
|
||||||
|
|
||||||
def clear_frame(self):
|
def clear_frame(self):
|
||||||
logger.debug(f"Clearing the birdseye frame")
|
logger.debug("Clearing the birdseye frame")
|
||||||
self.frame[:] = self.blank_frame
|
self.frame[:] = self.blank_frame
|
||||||
|
|
||||||
def copy_to_position(self, position, camera=None, frame_time=None):
|
def copy_to_position(self, position, camera=None, frame_time=None):
|
||||||
@ -301,7 +300,7 @@ class BirdsEyeFrameManager:
|
|||||||
# reset the layout if it needs to be different
|
# reset the layout if it needs to be different
|
||||||
if layout_dim != self.layout_dim or reset_layout:
|
if layout_dim != self.layout_dim or reset_layout:
|
||||||
if reset_layout:
|
if reset_layout:
|
||||||
logger.debug(f"Added new cameras, resetting layout...")
|
logger.debug("Added new cameras, resetting layout...")
|
||||||
|
|
||||||
logger.debug(f"Changing layout size from {self.layout_dim} to {layout_dim}")
|
logger.debug(f"Changing layout size from {self.layout_dim} to {layout_dim}")
|
||||||
self.layout_dim = layout_dim
|
self.layout_dim = layout_dim
|
||||||
@ -385,7 +384,7 @@ class BirdsEyeFrameManager:
|
|||||||
]
|
]
|
||||||
# if not an empty spot and the camera has a newer frame, copy it
|
# if not an empty spot and the camera has a newer frame, copy it
|
||||||
elif (
|
elif (
|
||||||
not camera is None
|
camera is not None
|
||||||
and self.cameras[camera]["current_frame"]
|
and self.cameras[camera]["current_frame"]
|
||||||
!= self.cameras[camera]["layout_frame"]
|
!= self.cameras[camera]["layout_frame"]
|
||||||
):
|
):
|
||||||
@ -423,8 +422,8 @@ class BirdsEyeFrameManager:
|
|||||||
|
|
||||||
|
|
||||||
def output_frames(config: FrigateConfig, video_output_queue):
|
def output_frames(config: FrigateConfig, video_output_queue):
|
||||||
threading.current_thread().name = f"output"
|
threading.current_thread().name = "output"
|
||||||
setproctitle(f"frigate.output")
|
setproctitle("frigate.output")
|
||||||
|
|
||||||
stop_event = mp.Event()
|
stop_event = mp.Event()
|
||||||
|
|
||||||
|
|||||||
@ -3,12 +3,14 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Any, Dict, List
|
from typing import Any, List
|
||||||
import requests
|
|
||||||
from frigate.const import PLUS_ENV_VAR, PLUS_API_HOST
|
|
||||||
from requests.models import Response
|
|
||||||
import cv2
|
import cv2
|
||||||
|
import requests
|
||||||
from numpy import ndarray
|
from numpy import ndarray
|
||||||
|
from requests.models import Response
|
||||||
|
|
||||||
|
from frigate.const import PLUS_API_HOST, PLUS_ENV_VAR
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|||||||
@ -2,13 +2,12 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import site
|
import site
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
from onvif import ONVIFCamera, ONVIFError
|
from onvif import ONVIFCamera, ONVIFError
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -79,8 +78,8 @@ class OnvifController:
|
|||||||
try:
|
try:
|
||||||
presets: list[dict] = ptz.GetPresets({"ProfileToken": profile.token})
|
presets: list[dict] = ptz.GetPresets({"ProfileToken": profile.token})
|
||||||
except ONVIFError as e:
|
except ONVIFError as e:
|
||||||
logger.error(f"Unable to get presets from camera: {camera_name}: {e}")
|
logger.warning(f"Unable to get presets from camera: {camera_name}: {e}")
|
||||||
return False
|
presets = []
|
||||||
|
|
||||||
for preset in presets:
|
for preset in presets:
|
||||||
self.cams[camera_name]["presets"][preset["Name"].lower()] = preset["token"]
|
self.cams[camera_name]["presets"][preset["Name"].lower()] = preset["token"]
|
||||||
@ -145,7 +144,7 @@ class OnvifController:
|
|||||||
onvif.get_service("ptz").ContinuousMove(move_request)
|
onvif.get_service("ptz").ContinuousMove(move_request)
|
||||||
|
|
||||||
def _move_to_preset(self, camera_name: str, preset: str) -> None:
|
def _move_to_preset(self, camera_name: str, preset: str) -> None:
|
||||||
if not preset in self.cams[camera_name]["presets"]:
|
if preset not in self.cams[camera_name]["presets"]:
|
||||||
logger.error(f"{preset} is not a valid preset for {camera_name}")
|
logger.error(f"{preset} is not a valid preset for {camera_name}")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
0
frigate/record/__init__.py
Normal file
0
frigate/record/__init__.py
Normal file
@ -5,12 +5,12 @@ import itertools
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from peewee import DoesNotExist
|
from peewee import DoesNotExist
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
|
||||||
|
|
||||||
from frigate.config import RetainModeEnum, FrigateConfig
|
from frigate.config import FrigateConfig, RetainModeEnum
|
||||||
from frigate.const import RECORD_DIR, SECONDS_IN_DAY
|
from frigate.const import RECORD_DIR, SECONDS_IN_DAY
|
||||||
from frigate.models import Event, Recordings, Timeline
|
from frigate.models import Event, Recordings, Timeline
|
||||||
from frigate.record.util import remove_empty_directories
|
from frigate.record.util import remove_empty_directories
|
||||||
@ -225,7 +225,7 @@ class RecordingCleanup(threading.Thread):
|
|||||||
|
|
||||||
recordings_to_delete = []
|
recordings_to_delete = []
|
||||||
for recording in recordings.objects().iterator():
|
for recording in recordings.objects().iterator():
|
||||||
if not recording.path in files_on_disk:
|
if recording.path not in files_on_disk:
|
||||||
recordings_to_delete.append(recording.id)
|
recordings_to_delete.append(recording.id)
|
||||||
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
@ -247,7 +247,7 @@ class RecordingCleanup(threading.Thread):
|
|||||||
# Expire tmp clips every minute, recordings and clean directories every hour.
|
# Expire tmp clips every minute, recordings and clean directories every hour.
|
||||||
for counter in itertools.cycle(range(self.config.record.expire_interval)):
|
for counter in itertools.cycle(range(self.config.record.expire_interval)):
|
||||||
if self.stop_event.wait(60):
|
if self.stop_event.wait(60):
|
||||||
logger.info(f"Exiting recording cleanup...")
|
logger.info("Exiting recording cleanup...")
|
||||||
break
|
break
|
||||||
self.clean_tmp_clips()
|
self.clean_tmp_clips()
|
||||||
|
|
||||||
|
|||||||
@ -9,14 +9,14 @@ import random
|
|||||||
import string
|
import string
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import threading
|
import threading
|
||||||
import psutil
|
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Tuple
|
from typing import Any, Tuple
|
||||||
|
|
||||||
from frigate.config import RetainModeEnum, FrigateConfig
|
import psutil
|
||||||
|
|
||||||
|
from frigate.config import FrigateConfig, RetainModeEnum
|
||||||
from frigate.const import CACHE_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
|
from frigate.const import CACHE_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
|
||||||
from frigate.models import Event, Recordings
|
from frigate.models import Event, Recordings
|
||||||
from frigate.types import RecordMetricsTypes
|
from frigate.types import RecordMetricsTypes
|
||||||
@ -63,7 +63,7 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
for nt in flist:
|
for nt in flist:
|
||||||
if nt.path.startswith(CACHE_DIR):
|
if nt.path.startswith(CACHE_DIR):
|
||||||
files_in_use.append(nt.path.split("/")[-1])
|
files_in_use.append(nt.path.split("/")[-1])
|
||||||
except:
|
except psutil.Error:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# group recordings by camera
|
# group recordings by camera
|
||||||
@ -115,7 +115,7 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
Event.select()
|
Event.select()
|
||||||
.where(
|
.where(
|
||||||
Event.camera == camera,
|
Event.camera == camera,
|
||||||
(Event.end_time == None)
|
(Event.end_time is None)
|
||||||
| (Event.end_time >= recordings[0]["start_time"].timestamp()),
|
| (Event.end_time >= recordings[0]["start_time"].timestamp()),
|
||||||
Event.has_clip,
|
Event.has_clip,
|
||||||
)
|
)
|
||||||
@ -127,7 +127,7 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
|
|
||||||
# Just delete files if recordings are turned off
|
# Just delete files if recordings are turned off
|
||||||
if (
|
if (
|
||||||
not camera in self.config.cameras
|
camera not in self.config.cameras
|
||||||
or not self.process_info[camera]["record_enabled"].value
|
or not self.process_info[camera]["record_enabled"].value
|
||||||
):
|
):
|
||||||
Path(cache_path).unlink(missing_ok=True)
|
Path(cache_path).unlink(missing_ok=True)
|
||||||
@ -296,6 +296,7 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
# add faststart to kept segments to improve metadata reading
|
# add faststart to kept segments to improve metadata reading
|
||||||
ffmpeg_cmd = [
|
ffmpeg_cmd = [
|
||||||
"ffmpeg",
|
"ffmpeg",
|
||||||
|
"-hide_banner",
|
||||||
"-y",
|
"-y",
|
||||||
"-i",
|
"-i",
|
||||||
cache_path,
|
cache_path,
|
||||||
@ -394,4 +395,4 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
duration = datetime.datetime.now().timestamp() - run_start
|
duration = datetime.datetime.now().timestamp() - run_start
|
||||||
wait_time = max(0, 5 - duration)
|
wait_time = max(0, 5 - duration)
|
||||||
|
|
||||||
logger.info(f"Exiting recording maintenance...")
|
logger.info("Exiting recording maintenance...")
|
||||||
|
|||||||
@ -4,12 +4,11 @@ import logging
|
|||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
import signal
|
import signal
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from setproctitle import setproctitle
|
|
||||||
from types import FrameType
|
from types import FrameType
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from playhouse.sqliteq import SqliteQueueDatabase
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
|
from setproctitle import setproctitle
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.models import Event, Recordings, Timeline
|
from frigate.models import Event, Recordings, Timeline
|
||||||
|
|||||||
@ -1,9 +1,9 @@
|
|||||||
"""Handle storage retention and usage."""
|
"""Handle storage retention and usage."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
|
||||||
import shutil
|
import shutil
|
||||||
import threading
|
import threading
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from peewee import fn
|
from peewee import fn
|
||||||
|
|
||||||
@ -107,7 +107,7 @@ class StorageMaintainer(threading.Thread):
|
|||||||
retained_events: Event = (
|
retained_events: Event = (
|
||||||
Event.select()
|
Event.select()
|
||||||
.where(
|
.where(
|
||||||
Event.retain_indefinitely == True,
|
Event.retain_indefinitely is True,
|
||||||
Event.has_clip,
|
Event.has_clip,
|
||||||
)
|
)
|
||||||
.order_by(Event.start_time.asc())
|
.order_by(Event.start_time.asc())
|
||||||
@ -188,4 +188,4 @@ class StorageMaintainer(threading.Thread):
|
|||||||
if self.check_storage_needs_cleanup():
|
if self.check_storage_needs_cleanup():
|
||||||
self.reduce_storage_consumption()
|
self.reduce_storage_consumption()
|
||||||
|
|
||||||
logger.info(f"Exiting storage maintainer...")
|
logger.info("Exiting storage maintainer...")
|
||||||
|
|||||||
@ -1,13 +1,11 @@
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
|
|
||||||
from frigate.config import (
|
from frigate.config import BirdseyeModeEnum, FrigateConfig
|
||||||
BirdseyeModeEnum,
|
|
||||||
FrigateConfig,
|
|
||||||
)
|
|
||||||
from frigate.const import MODEL_CACHE_DIR
|
from frigate.const import MODEL_CACHE_DIR
|
||||||
from frigate.detectors import DetectorTypeEnum
|
from frigate.detectors import DetectorTypeEnum
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
@ -675,7 +673,7 @@ class TestConfig(unittest.TestCase):
|
|||||||
runtime_config = frigate_config.runtime_config()
|
runtime_config = frigate_config.runtime_config()
|
||||||
ffmpeg_cmds = runtime_config.cameras["back"].ffmpeg_cmds
|
ffmpeg_cmds = runtime_config.cameras["back"].ffmpeg_cmds
|
||||||
assert len(ffmpeg_cmds) == 1
|
assert len(ffmpeg_cmds) == 1
|
||||||
assert not "clips" in ffmpeg_cmds[0]["roles"]
|
assert "clips" not in ffmpeg_cmds[0]["roles"]
|
||||||
|
|
||||||
def test_max_disappeared_default(self):
|
def test_max_disappeared_default(self):
|
||||||
config = {
|
config = {
|
||||||
@ -986,7 +984,7 @@ class TestConfig(unittest.TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
frigate_config = FrigateConfig(**config)
|
frigate_config = FrigateConfig(**config)
|
||||||
runtime_config = frigate_config.runtime_config()
|
frigate_config.runtime_config()
|
||||||
|
|
||||||
def test_global_detect(self):
|
def test_global_detect(self):
|
||||||
config = {
|
config = {
|
||||||
@ -1145,7 +1143,7 @@ class TestConfig(unittest.TestCase):
|
|||||||
assert config == frigate_config.dict(exclude_unset=True)
|
assert config == frigate_config.dict(exclude_unset=True)
|
||||||
|
|
||||||
runtime_config = frigate_config.runtime_config()
|
runtime_config = frigate_config.runtime_config()
|
||||||
assert runtime_config.cameras["back"].snapshots.bounding_box == False
|
assert runtime_config.cameras["back"].snapshots.bounding_box is False
|
||||||
assert runtime_config.cameras["back"].snapshots.height == 150
|
assert runtime_config.cameras["back"].snapshots.height == 150
|
||||||
assert runtime_config.cameras["back"].snapshots.enabled
|
assert runtime_config.cameras["back"].snapshots.enabled
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,9 @@
|
|||||||
|
from unittest import TestCase, main
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from unittest import TestCase, main
|
|
||||||
from frigate.util import get_yuv_crop, copy_yuv_to_position
|
from frigate.util import copy_yuv_to_position, get_yuv_crop
|
||||||
|
|
||||||
|
|
||||||
class TestCopyYuvToPosition(TestCase):
|
class TestCopyYuvToPosition(TestCase):
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from frigate.config import FFMPEG_INPUT_ARGS_DEFAULT, FrigateConfig
|
from frigate.config import FFMPEG_INPUT_ARGS_DEFAULT, FrigateConfig
|
||||||
from frigate.ffmpeg_presets import parse_preset_input
|
from frigate.ffmpeg_presets import parse_preset_input
|
||||||
|
|
||||||
@ -52,7 +53,7 @@ class TestFfmpegPresets(unittest.TestCase):
|
|||||||
assert "preset-rpi-64-h264" not in (
|
assert "preset-rpi-64-h264" not in (
|
||||||
" ".join(frigate_config.cameras["back"].ffmpeg_cmds[0]["cmd"])
|
" ".join(frigate_config.cameras["back"].ffmpeg_cmds[0]["cmd"])
|
||||||
)
|
)
|
||||||
assert "-c:v h264_v4l2m2m" in (
|
assert "-c:v:1 h264_v4l2m2m" in (
|
||||||
" ".join(frigate_config.cameras["back"].ffmpeg_cmds[0]["cmd"])
|
" ".join(frigate_config.cameras["back"].ffmpeg_cmds[0]["cmd"])
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
from frigate.util import get_amd_gpu_stats, get_intel_gpu_stats, get_nvidia_gpu_stats
|
from frigate.util import get_amd_gpu_stats, get_intel_gpu_stats
|
||||||
|
|
||||||
|
|
||||||
class TestGpuStats(unittest.TestCase):
|
class TestGpuStats(unittest.TestCase):
|
||||||
|
|||||||
@ -6,15 +6,14 @@ import unittest
|
|||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
from peewee_migrate import Router
|
from peewee_migrate import Router
|
||||||
|
from playhouse.shortcuts import model_to_dict
|
||||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||||
from playhouse.sqliteq import SqliteQueueDatabase
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
from playhouse.shortcuts import model_to_dict
|
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.http import create_app
|
from frigate.http import create_app
|
||||||
from frigate.models import Event, Recordings
|
from frigate.models import Event, Recordings
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
|
|
||||||
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
||||||
|
|
||||||
|
|
||||||
@ -120,6 +119,7 @@ class TestHttp(unittest.TestCase):
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
PlusApi(),
|
PlusApi(),
|
||||||
)
|
)
|
||||||
id = "123456.random"
|
id = "123456.random"
|
||||||
@ -127,22 +127,22 @@ class TestHttp(unittest.TestCase):
|
|||||||
|
|
||||||
with app.test_client() as client:
|
with app.test_client() as client:
|
||||||
_insert_mock_event(id)
|
_insert_mock_event(id)
|
||||||
events = client.get(f"/events").json
|
events = client.get("/events").json
|
||||||
assert events
|
assert events
|
||||||
assert len(events) == 1
|
assert len(events) == 1
|
||||||
assert events[0]["id"] == id
|
assert events[0]["id"] == id
|
||||||
_insert_mock_event(id2)
|
_insert_mock_event(id2)
|
||||||
events = client.get(f"/events").json
|
events = client.get("/events").json
|
||||||
assert events
|
assert events
|
||||||
assert len(events) == 2
|
assert len(events) == 2
|
||||||
events = client.get(
|
events = client.get(
|
||||||
f"/events",
|
"/events",
|
||||||
query_string={"limit": 1},
|
query_string={"limit": 1},
|
||||||
).json
|
).json
|
||||||
assert events
|
assert events
|
||||||
assert len(events) == 1
|
assert len(events) == 1
|
||||||
events = client.get(
|
events = client.get(
|
||||||
f"/events",
|
"/events",
|
||||||
query_string={"has_clip": 0},
|
query_string={"has_clip": 0},
|
||||||
).json
|
).json
|
||||||
assert not events
|
assert not events
|
||||||
@ -155,6 +155,7 @@ class TestHttp(unittest.TestCase):
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
PlusApi(),
|
PlusApi(),
|
||||||
)
|
)
|
||||||
id = "123456.random"
|
id = "123456.random"
|
||||||
@ -175,6 +176,7 @@ class TestHttp(unittest.TestCase):
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
PlusApi(),
|
PlusApi(),
|
||||||
)
|
)
|
||||||
id = "123456.random"
|
id = "123456.random"
|
||||||
@ -194,6 +196,7 @@ class TestHttp(unittest.TestCase):
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
PlusApi(),
|
PlusApi(),
|
||||||
)
|
)
|
||||||
id = "123456.random"
|
id = "123456.random"
|
||||||
@ -215,6 +218,7 @@ class TestHttp(unittest.TestCase):
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
PlusApi(),
|
PlusApi(),
|
||||||
)
|
)
|
||||||
id = "123456.random"
|
id = "123456.random"
|
||||||
@ -225,12 +229,12 @@ class TestHttp(unittest.TestCase):
|
|||||||
event = client.get(f"/events/{id}").json
|
event = client.get(f"/events/{id}").json
|
||||||
assert event
|
assert event
|
||||||
assert event["id"] == id
|
assert event["id"] == id
|
||||||
assert event["retain_indefinitely"] == True
|
assert event["retain_indefinitely"] is True
|
||||||
client.delete(f"/events/{id}/retain")
|
client.delete(f"/events/{id}/retain")
|
||||||
event = client.get(f"/events/{id}").json
|
event = client.get(f"/events/{id}").json
|
||||||
assert event
|
assert event
|
||||||
assert event["id"] == id
|
assert event["id"] == id
|
||||||
assert event["retain_indefinitely"] == False
|
assert event["retain_indefinitely"] is False
|
||||||
|
|
||||||
def test_set_delete_sub_label(self):
|
def test_set_delete_sub_label(self):
|
||||||
app = create_app(
|
app = create_app(
|
||||||
@ -240,6 +244,7 @@ class TestHttp(unittest.TestCase):
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
PlusApi(),
|
PlusApi(),
|
||||||
)
|
)
|
||||||
id = "123456.random"
|
id = "123456.random"
|
||||||
@ -274,6 +279,7 @@ class TestHttp(unittest.TestCase):
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
PlusApi(),
|
PlusApi(),
|
||||||
)
|
)
|
||||||
id = "123456.random"
|
id = "123456.random"
|
||||||
@ -298,6 +304,7 @@ class TestHttp(unittest.TestCase):
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
PlusApi(),
|
PlusApi(),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -314,6 +321,7 @@ class TestHttp(unittest.TestCase):
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
PlusApi(),
|
PlusApi(),
|
||||||
)
|
)
|
||||||
id = "123456.random"
|
id = "123456.random"
|
||||||
@ -333,6 +341,7 @@ class TestHttp(unittest.TestCase):
|
|||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
|
None,
|
||||||
PlusApi(),
|
PlusApi(),
|
||||||
)
|
)
|
||||||
mock_stats.return_value = self.test_stats
|
mock_stats.return_value = self.test_stats
|
||||||
|
|||||||
@ -4,10 +4,10 @@ from unittest.mock import Mock, patch
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import parse_obj_as
|
from pydantic import parse_obj_as
|
||||||
|
|
||||||
from frigate.config import DetectorConfig, InputTensorEnum, ModelConfig
|
|
||||||
from frigate.detectors import DetectorTypeEnum
|
|
||||||
import frigate.detectors as detectors
|
import frigate.detectors as detectors
|
||||||
import frigate.object_detection
|
import frigate.object_detection
|
||||||
|
from frigate.config import DetectorConfig, InputTensorEnum, ModelConfig
|
||||||
|
from frigate.detectors import DetectorTypeEnum
|
||||||
|
|
||||||
|
|
||||||
class TestLocalObjectDetector(unittest.TestCase):
|
class TestLocalObjectDetector(unittest.TestCase):
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
import numpy as np
|
|
||||||
from unittest import TestCase, main
|
from unittest import TestCase, main
|
||||||
|
|
||||||
from frigate.video import box_overlaps, reduce_boxes
|
from frigate.video import box_overlaps, reduce_boxes
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,21 +1,17 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
from peewee import DoesNotExist
|
from peewee import DoesNotExist
|
||||||
from peewee_migrate import Router
|
from peewee_migrate import Router
|
||||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||||
from playhouse.sqliteq import SqliteQueueDatabase
|
from playhouse.sqliteq import SqliteQueueDatabase
|
||||||
from playhouse.shortcuts import model_to_dict
|
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
from frigate.config import FrigateConfig
|
||||||
from frigate.http import create_app
|
|
||||||
from frigate.models import Event, Recordings
|
from frigate.models import Event, Recordings
|
||||||
from frigate.storage import StorageMaintainer
|
from frigate.storage import StorageMaintainer
|
||||||
|
|
||||||
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,8 @@
|
|||||||
|
from unittest import TestCase, main
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from unittest import TestCase, main
|
|
||||||
from frigate.util import yuv_region_2_rgb
|
from frigate.util import yuv_region_2_rgb
|
||||||
|
|
||||||
|
|
||||||
@ -33,7 +35,7 @@ class TestYuvRegion2RGB(TestCase):
|
|||||||
# cv2.imwrite(f"bgr_frame.jpg", self.bgr_frame)
|
# cv2.imwrite(f"bgr_frame.jpg", self.bgr_frame)
|
||||||
yuv_frame = cv2.cvtColor(bgr_frame, cv2.COLOR_BGR2YUV_I420)
|
yuv_frame = cv2.cvtColor(bgr_frame, cv2.COLOR_BGR2YUV_I420)
|
||||||
|
|
||||||
cropped = yuv_region_2_rgb(yuv_frame, (0, 852, 648, 1500))
|
yuv_region_2_rgb(yuv_frame, (0, 852, 648, 1500))
|
||||||
# cv2.imwrite(f"cropped.jpg", cv2.cvtColor(cropped, cv2.COLOR_RGB2BGR))
|
# cv2.imwrite(f"cropped.jpg", cv2.cvtColor(cropped, cv2.COLOR_RGB2BGR))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,16 +1,14 @@
|
|||||||
"""Record events for object, audio, etc. detections."""
|
"""Record events for object, audio, etc. detections."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import threading
|
|
||||||
import queue
|
import queue
|
||||||
|
import threading
|
||||||
from frigate.config import FrigateConfig
|
|
||||||
from frigate.events import EventTypeEnum
|
|
||||||
from frigate.models import Timeline
|
|
||||||
|
|
||||||
from multiprocessing.queues import Queue
|
from multiprocessing.queues import Queue
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
|
|
||||||
|
from frigate.config import FrigateConfig
|
||||||
|
from frigate.events.maintainer import EventTypeEnum
|
||||||
|
from frigate.models import Timeline
|
||||||
from frigate.util import to_relative_box
|
from frigate.util import to_relative_box
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|||||||
13
frigate/track/__init__.py
Normal file
13
frigate/track/__init__.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
from frigate.config import DetectConfig
|
||||||
|
|
||||||
|
|
||||||
|
class ObjectTracker(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def __init__(self, config: DetectConfig):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def match_and_update(self, detections):
|
||||||
|
pass
|
||||||
@ -1,22 +1,16 @@
|
|||||||
import copy
|
|
||||||
import datetime
|
|
||||||
import itertools
|
|
||||||
import multiprocessing as mp
|
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import cv2
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from scipy.spatial import distance as dist
|
from scipy.spatial import distance as dist
|
||||||
|
|
||||||
from frigate.config import DetectConfig
|
from frigate.config import DetectConfig
|
||||||
|
from frigate.track import ObjectTracker
|
||||||
from frigate.util import intersection_over_union
|
from frigate.util import intersection_over_union
|
||||||
|
|
||||||
|
|
||||||
class ObjectTracker:
|
class CentroidTracker(ObjectTracker):
|
||||||
def __init__(self, config: DetectConfig):
|
def __init__(self, config: DetectConfig):
|
||||||
self.tracked_objects = {}
|
self.tracked_objects = {}
|
||||||
self.disappeared = {}
|
self.disappeared = {}
|
||||||
@ -141,11 +135,11 @@ class ObjectTracker:
|
|||||||
if self.is_expired(id):
|
if self.is_expired(id):
|
||||||
self.deregister(id)
|
self.deregister(id)
|
||||||
|
|
||||||
def match_and_update(self, frame_time, new_objects):
|
def match_and_update(self, frame_time, detections):
|
||||||
# group by name
|
# group by name
|
||||||
new_object_groups = defaultdict(lambda: [])
|
detection_groups = defaultdict(lambda: [])
|
||||||
for obj in new_objects:
|
for obj in detections:
|
||||||
new_object_groups[obj[0]].append(
|
detection_groups[obj[0]].append(
|
||||||
{
|
{
|
||||||
"label": obj[0],
|
"label": obj[0],
|
||||||
"score": obj[1],
|
"score": obj[1],
|
||||||
@ -160,17 +154,17 @@ class ObjectTracker:
|
|||||||
# update any tracked objects with labels that are not
|
# update any tracked objects with labels that are not
|
||||||
# seen in the current objects and deregister if needed
|
# seen in the current objects and deregister if needed
|
||||||
for obj in list(self.tracked_objects.values()):
|
for obj in list(self.tracked_objects.values()):
|
||||||
if not obj["label"] in new_object_groups:
|
if obj["label"] not in detection_groups:
|
||||||
if self.disappeared[obj["id"]] >= self.max_disappeared:
|
if self.disappeared[obj["id"]] >= self.max_disappeared:
|
||||||
self.deregister(obj["id"])
|
self.deregister(obj["id"])
|
||||||
else:
|
else:
|
||||||
self.disappeared[obj["id"]] += 1
|
self.disappeared[obj["id"]] += 1
|
||||||
|
|
||||||
if len(new_objects) == 0:
|
if len(detections) == 0:
|
||||||
return
|
return
|
||||||
|
|
||||||
# track objects for each label type
|
# track objects for each label type
|
||||||
for label, group in new_object_groups.items():
|
for label, group in detection_groups.items():
|
||||||
current_objects = [
|
current_objects = [
|
||||||
o for o in self.tracked_objects.values() if o["label"] == label
|
o for o in self.tracked_objects.values() if o["label"] == label
|
||||||
]
|
]
|
||||||
285
frigate/track/norfair_tracker.py
Normal file
285
frigate/track/norfair_tracker.py
Normal file
@ -0,0 +1,285 @@
|
|||||||
|
import random
|
||||||
|
import string
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from norfair import Detection, Drawable, Tracker, draw_boxes
|
||||||
|
from norfair.drawing.drawer import Drawer
|
||||||
|
|
||||||
|
from frigate.config import DetectConfig
|
||||||
|
from frigate.track import ObjectTracker
|
||||||
|
from frigate.util import intersection_over_union
|
||||||
|
|
||||||
|
|
||||||
|
# Normalizes distance from estimate relative to object size
|
||||||
|
# Other ideas:
|
||||||
|
# - if estimates are inaccurate for first N detections, compare with last_detection (may be fine)
|
||||||
|
# - could be variable based on time since last_detection
|
||||||
|
# - include estimated velocity in the distance (car driving by of a parked car)
|
||||||
|
# - include some visual similarity factor in the distance for occlusions
|
||||||
|
def distance(detection: np.array, estimate: np.array) -> float:
|
||||||
|
# ultimately, this should try and estimate distance in 3-dimensional space
|
||||||
|
# consider change in location, width, and height
|
||||||
|
|
||||||
|
estimate_dim = np.diff(estimate, axis=0).flatten()
|
||||||
|
detection_dim = np.diff(detection, axis=0).flatten()
|
||||||
|
|
||||||
|
# get bottom center positions
|
||||||
|
detection_position = np.array(
|
||||||
|
[np.average(detection[:, 0]), np.max(detection[:, 1])]
|
||||||
|
)
|
||||||
|
estimate_position = np.array([np.average(estimate[:, 0]), np.max(estimate[:, 1])])
|
||||||
|
|
||||||
|
distance = (detection_position - estimate_position).astype(float)
|
||||||
|
# change in x relative to w
|
||||||
|
distance[0] /= estimate_dim[0]
|
||||||
|
# change in y relative to h
|
||||||
|
distance[1] /= estimate_dim[1]
|
||||||
|
|
||||||
|
# get ratio of widths and heights
|
||||||
|
# normalize to 1
|
||||||
|
widths = np.sort([estimate_dim[0], detection_dim[0]])
|
||||||
|
heights = np.sort([estimate_dim[1], detection_dim[1]])
|
||||||
|
width_ratio = widths[1] / widths[0] - 1.0
|
||||||
|
height_ratio = heights[1] / heights[0] - 1.0
|
||||||
|
|
||||||
|
# change vector is relative x,y change and w,h ratio
|
||||||
|
change = np.append(distance, np.array([width_ratio, height_ratio]))
|
||||||
|
|
||||||
|
# calculate euclidean distance of the change vector
|
||||||
|
return np.linalg.norm(change)
|
||||||
|
|
||||||
|
|
||||||
|
def frigate_distance(detection: Detection, tracked_object) -> float:
|
||||||
|
return distance(detection.points, tracked_object.estimate)
|
||||||
|
|
||||||
|
|
||||||
|
class NorfairTracker(ObjectTracker):
|
||||||
|
def __init__(self, config: DetectConfig):
|
||||||
|
self.tracked_objects = {}
|
||||||
|
self.disappeared = {}
|
||||||
|
self.positions = {}
|
||||||
|
self.max_disappeared = config.max_disappeared
|
||||||
|
self.detect_config = config
|
||||||
|
self.track_id_map = {}
|
||||||
|
# TODO: could also initialize a tracker per object class if there
|
||||||
|
# was a good reason to have different distance calculations
|
||||||
|
self.tracker = Tracker(
|
||||||
|
distance_function=frigate_distance,
|
||||||
|
distance_threshold=2.5,
|
||||||
|
initialization_delay=0,
|
||||||
|
hit_counter_max=self.max_disappeared,
|
||||||
|
)
|
||||||
|
|
||||||
|
def register(self, track_id, obj):
|
||||||
|
rand_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=6))
|
||||||
|
id = f"{obj['frame_time']}-{rand_id}"
|
||||||
|
self.track_id_map[track_id] = id
|
||||||
|
obj["id"] = id
|
||||||
|
obj["start_time"] = obj["frame_time"]
|
||||||
|
obj["motionless_count"] = 0
|
||||||
|
obj["position_changes"] = 0
|
||||||
|
self.tracked_objects[id] = obj
|
||||||
|
self.disappeared[id] = 0
|
||||||
|
self.positions[id] = {
|
||||||
|
"xmins": [],
|
||||||
|
"ymins": [],
|
||||||
|
"xmaxs": [],
|
||||||
|
"ymaxs": [],
|
||||||
|
"xmin": 0,
|
||||||
|
"ymin": 0,
|
||||||
|
"xmax": self.detect_config.width,
|
||||||
|
"ymax": self.detect_config.height,
|
||||||
|
}
|
||||||
|
|
||||||
|
def deregister(self, id):
|
||||||
|
del self.tracked_objects[id]
|
||||||
|
del self.disappeared[id]
|
||||||
|
|
||||||
|
# tracks the current position of the object based on the last N bounding boxes
|
||||||
|
# returns False if the object has moved outside its previous position
|
||||||
|
def update_position(self, id, box):
|
||||||
|
position = self.positions[id]
|
||||||
|
position_box = (
|
||||||
|
position["xmin"],
|
||||||
|
position["ymin"],
|
||||||
|
position["xmax"],
|
||||||
|
position["ymax"],
|
||||||
|
)
|
||||||
|
|
||||||
|
xmin, ymin, xmax, ymax = box
|
||||||
|
|
||||||
|
iou = intersection_over_union(position_box, box)
|
||||||
|
|
||||||
|
# if the iou drops below the threshold
|
||||||
|
# assume the object has moved to a new position and reset the computed box
|
||||||
|
if iou < 0.6:
|
||||||
|
self.positions[id] = {
|
||||||
|
"xmins": [xmin],
|
||||||
|
"ymins": [ymin],
|
||||||
|
"xmaxs": [xmax],
|
||||||
|
"ymaxs": [ymax],
|
||||||
|
"xmin": xmin,
|
||||||
|
"ymin": ymin,
|
||||||
|
"xmax": xmax,
|
||||||
|
"ymax": ymax,
|
||||||
|
}
|
||||||
|
return False
|
||||||
|
|
||||||
|
# if there are less than 10 entries for the position, add the bounding box
|
||||||
|
# and recompute the position box
|
||||||
|
if len(position["xmins"]) < 10:
|
||||||
|
position["xmins"].append(xmin)
|
||||||
|
position["ymins"].append(ymin)
|
||||||
|
position["xmaxs"].append(xmax)
|
||||||
|
position["ymaxs"].append(ymax)
|
||||||
|
# by using percentiles here, we hopefully remove outliers
|
||||||
|
position["xmin"] = np.percentile(position["xmins"], 15)
|
||||||
|
position["ymin"] = np.percentile(position["ymins"], 15)
|
||||||
|
position["xmax"] = np.percentile(position["xmaxs"], 85)
|
||||||
|
position["ymax"] = np.percentile(position["ymaxs"], 85)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def is_expired(self, id):
|
||||||
|
obj = self.tracked_objects[id]
|
||||||
|
# get the max frames for this label type or the default
|
||||||
|
max_frames = self.detect_config.stationary.max_frames.objects.get(
|
||||||
|
obj["label"], self.detect_config.stationary.max_frames.default
|
||||||
|
)
|
||||||
|
|
||||||
|
# if there is no max_frames for this label type, continue
|
||||||
|
if max_frames is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# if the object has exceeded the max_frames setting, deregister
|
||||||
|
if (
|
||||||
|
obj["motionless_count"] - self.detect_config.stationary.threshold
|
||||||
|
> max_frames
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def update(self, track_id, obj):
|
||||||
|
id = self.track_id_map[track_id]
|
||||||
|
self.disappeared[id] = 0
|
||||||
|
# update the motionless count if the object has not moved to a new position
|
||||||
|
if self.update_position(id, obj["box"]):
|
||||||
|
self.tracked_objects[id]["motionless_count"] += 1
|
||||||
|
if self.is_expired(id):
|
||||||
|
self.deregister(id)
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
# register the first position change and then only increment if
|
||||||
|
# the object was previously stationary
|
||||||
|
if (
|
||||||
|
self.tracked_objects[id]["position_changes"] == 0
|
||||||
|
or self.tracked_objects[id]["motionless_count"]
|
||||||
|
>= self.detect_config.stationary.threshold
|
||||||
|
):
|
||||||
|
self.tracked_objects[id]["position_changes"] += 1
|
||||||
|
self.tracked_objects[id]["motionless_count"] = 0
|
||||||
|
|
||||||
|
self.tracked_objects[id].update(obj)
|
||||||
|
|
||||||
|
def update_frame_times(self, frame_time):
|
||||||
|
# if the object was there in the last frame, assume it's still there
|
||||||
|
detections = [
|
||||||
|
(
|
||||||
|
obj["label"],
|
||||||
|
obj["score"],
|
||||||
|
obj["box"],
|
||||||
|
obj["area"],
|
||||||
|
obj["ratio"],
|
||||||
|
obj["region"],
|
||||||
|
)
|
||||||
|
for id, obj in self.tracked_objects.items()
|
||||||
|
if self.disappeared[id] == 0
|
||||||
|
]
|
||||||
|
self.match_and_update(frame_time, detections=detections)
|
||||||
|
|
||||||
|
def match_and_update(self, frame_time, detections):
|
||||||
|
norfair_detections = []
|
||||||
|
|
||||||
|
for obj in detections:
|
||||||
|
# centroid is used for other things downstream
|
||||||
|
centroid_x = int((obj[2][0] + obj[2][2]) / 2.0)
|
||||||
|
centroid_y = int((obj[2][1] + obj[2][3]) / 2.0)
|
||||||
|
|
||||||
|
# track based on top,left and bottom,right corners instead of centroid
|
||||||
|
points = np.array([[obj[2][0], obj[2][1]], [obj[2][2], obj[2][3]]])
|
||||||
|
|
||||||
|
norfair_detections.append(
|
||||||
|
Detection(
|
||||||
|
points=points,
|
||||||
|
label=obj[0],
|
||||||
|
data={
|
||||||
|
"label": obj[0],
|
||||||
|
"score": obj[1],
|
||||||
|
"box": obj[2],
|
||||||
|
"area": obj[3],
|
||||||
|
"ratio": obj[4],
|
||||||
|
"region": obj[5],
|
||||||
|
"frame_time": frame_time,
|
||||||
|
"centroid": (centroid_x, centroid_y),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
tracked_objects = self.tracker.update(detections=norfair_detections)
|
||||||
|
|
||||||
|
# update or create new tracks
|
||||||
|
active_ids = []
|
||||||
|
for t in tracked_objects:
|
||||||
|
active_ids.append(t.global_id)
|
||||||
|
if t.global_id not in self.track_id_map:
|
||||||
|
self.register(t.global_id, t.last_detection.data)
|
||||||
|
# if there wasn't a detection in this frame, increment disappeared
|
||||||
|
elif t.last_detection.data["frame_time"] != frame_time:
|
||||||
|
id = self.track_id_map[t.global_id]
|
||||||
|
self.disappeared[id] += 1
|
||||||
|
# else update it
|
||||||
|
else:
|
||||||
|
self.update(t.global_id, t.last_detection.data)
|
||||||
|
|
||||||
|
# clear expired tracks
|
||||||
|
expired_ids = [k for k in self.track_id_map.keys() if k not in active_ids]
|
||||||
|
for e_id in expired_ids:
|
||||||
|
self.deregister(self.track_id_map[e_id])
|
||||||
|
del self.track_id_map[e_id]
|
||||||
|
|
||||||
|
def debug_draw(self, frame, frame_time):
|
||||||
|
active_detections = [
|
||||||
|
Drawable(id=obj.id, points=obj.last_detection.points, label=obj.label)
|
||||||
|
for obj in self.tracker.tracked_objects
|
||||||
|
if obj.last_detection.data["frame_time"] == frame_time
|
||||||
|
]
|
||||||
|
missing_detections = [
|
||||||
|
Drawable(id=obj.id, points=obj.last_detection.points, label=obj.label)
|
||||||
|
for obj in self.tracker.tracked_objects
|
||||||
|
if obj.last_detection.data["frame_time"] != frame_time
|
||||||
|
]
|
||||||
|
# draw the estimated bounding box
|
||||||
|
draw_boxes(frame, self.tracker.tracked_objects, color="green", draw_ids=True)
|
||||||
|
# draw the detections that were detected in the current frame
|
||||||
|
draw_boxes(frame, active_detections, color="blue", draw_ids=True)
|
||||||
|
# draw the detections that are missing in the current frame
|
||||||
|
draw_boxes(frame, missing_detections, color="red", draw_ids=True)
|
||||||
|
|
||||||
|
# draw the distance calculation for the last detection
|
||||||
|
# estimate vs detection
|
||||||
|
for obj in self.tracker.tracked_objects:
|
||||||
|
ld = obj.last_detection
|
||||||
|
# bottom right
|
||||||
|
text_anchor = (
|
||||||
|
ld.points[1, 0],
|
||||||
|
ld.points[1, 1],
|
||||||
|
)
|
||||||
|
frame = Drawer.text(
|
||||||
|
frame,
|
||||||
|
f"{obj.id}: {str(obj.last_distance)}",
|
||||||
|
position=text_anchor,
|
||||||
|
size=None,
|
||||||
|
color=(255, 0, 0),
|
||||||
|
thickness=None,
|
||||||
|
)
|
||||||
@ -1,7 +1,7 @@
|
|||||||
from typing import Optional, TypedDict
|
from multiprocessing.context import Process
|
||||||
from multiprocessing.queues import Queue
|
from multiprocessing.queues import Queue
|
||||||
from multiprocessing.sharedctypes import Synchronized
|
from multiprocessing.sharedctypes import Synchronized
|
||||||
from multiprocessing.context import Process
|
from typing import Optional, TypedDict
|
||||||
|
|
||||||
from frigate.object_detection import ObjectDetectProcess
|
from frigate.object_detection import ObjectDetectProcess
|
||||||
|
|
||||||
|
|||||||
@ -1,28 +1,26 @@
|
|||||||
import copy
|
import copy
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
|
||||||
import shlex
|
|
||||||
import subprocess as sp
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
|
import shlex
|
||||||
import signal
|
import signal
|
||||||
|
import subprocess as sp
|
||||||
import traceback
|
import traceback
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import yaml
|
|
||||||
import os
|
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from multiprocessing import shared_memory
|
from multiprocessing import shared_memory
|
||||||
from typing import Any, AnyStr, Optional, Tuple
|
from typing import Any, AnyStr, Optional, Tuple
|
||||||
import py3nvml.py3nvml as nvml
|
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import os
|
|
||||||
import psutil
|
import psutil
|
||||||
|
import py3nvml.py3nvml as nvml
|
||||||
import pytz
|
import pytz
|
||||||
|
import yaml
|
||||||
|
|
||||||
from frigate.const import REGEX_HTTP_CAMERA_USER_PASS, REGEX_RTSP_CAMERA_USER_PASS
|
from frigate.const import REGEX_HTTP_CAMERA_USER_PASS, REGEX_RTSP_CAMERA_USER_PASS
|
||||||
|
|
||||||
@ -457,7 +455,7 @@ def copy_yuv_to_position(
|
|||||||
# clear v2
|
# clear v2
|
||||||
destination_frame[v2[1] : v2[3], v2[0] : v2[2]] = 128
|
destination_frame[v2[1] : v2[3], v2[0] : v2[2]] = 128
|
||||||
|
|
||||||
if not source_frame is None:
|
if source_frame is not None:
|
||||||
# calculate the resized frame, maintaining the aspect ratio
|
# calculate the resized frame, maintaining the aspect ratio
|
||||||
source_aspect_ratio = source_frame.shape[1] / (source_frame.shape[0] // 3 * 2)
|
source_aspect_ratio = source_frame.shape[1] / (source_frame.shape[0] // 3 * 2)
|
||||||
dest_aspect_ratio = destination_shape[1] / destination_shape[0]
|
dest_aspect_ratio = destination_shape[1] / destination_shape[0]
|
||||||
@ -800,10 +798,11 @@ def get_cpu_stats() -> dict[str, dict]:
|
|||||||
docker_memlimit = get_docker_memlimit_bytes() / 1024
|
docker_memlimit = get_docker_memlimit_bytes() / 1024
|
||||||
total_mem = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES") / 1024
|
total_mem = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES") / 1024
|
||||||
|
|
||||||
for process in psutil.process_iter(["pid", "name", "cpu_percent"]):
|
for process in psutil.process_iter(["pid", "name", "cpu_percent", "cmdline"]):
|
||||||
pid = process.info["pid"]
|
pid = process.info["pid"]
|
||||||
try:
|
try:
|
||||||
cpu_percent = process.info["cpu_percent"]
|
cpu_percent = process.info["cpu_percent"]
|
||||||
|
cmdline = process.info["cmdline"]
|
||||||
|
|
||||||
with open(f"/proc/{pid}/stat", "r") as f:
|
with open(f"/proc/{pid}/stat", "r") as f:
|
||||||
stats = f.readline().split()
|
stats = f.readline().split()
|
||||||
@ -837,13 +836,45 @@ def get_cpu_stats() -> dict[str, dict]:
|
|||||||
"cpu": str(cpu_percent),
|
"cpu": str(cpu_percent),
|
||||||
"cpu_average": str(round(cpu_average_usage, 2)),
|
"cpu_average": str(round(cpu_average_usage, 2)),
|
||||||
"mem": f"{mem_pct}",
|
"mem": f"{mem_pct}",
|
||||||
|
"cmdline": " ".join(cmdline),
|
||||||
}
|
}
|
||||||
except:
|
except Exception:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return usages
|
return usages
|
||||||
|
|
||||||
|
|
||||||
|
def get_bandwidth_stats() -> dict[str, dict]:
|
||||||
|
"""Get bandwidth usages for each ffmpeg process id"""
|
||||||
|
usages = {}
|
||||||
|
top_command = ["nethogs", "-t", "-v0", "-c5", "-d1"]
|
||||||
|
|
||||||
|
p = sp.run(
|
||||||
|
top_command,
|
||||||
|
encoding="ascii",
|
||||||
|
capture_output=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if p.returncode != 0:
|
||||||
|
return usages
|
||||||
|
else:
|
||||||
|
lines = p.stdout.split("\n")
|
||||||
|
for line in lines:
|
||||||
|
stats = list(filter(lambda a: a != "", line.strip().split("\t")))
|
||||||
|
try:
|
||||||
|
if re.search(
|
||||||
|
r"(^ffmpeg|\/go2rtc|frigate\.detector\.[a-z]+)/([0-9]+)/", stats[0]
|
||||||
|
):
|
||||||
|
process = stats[0].split("/")
|
||||||
|
usages[process[len(process) - 2]] = {
|
||||||
|
"bandwidth": round(float(stats[1]) + float(stats[2]), 1),
|
||||||
|
}
|
||||||
|
except (IndexError, ValueError):
|
||||||
|
continue
|
||||||
|
|
||||||
|
return usages
|
||||||
|
|
||||||
|
|
||||||
def get_amd_gpu_stats() -> dict[str, str]:
|
def get_amd_gpu_stats() -> dict[str, str]:
|
||||||
"""Get stats using radeontop."""
|
"""Get stats using radeontop."""
|
||||||
radeontop_command = ["radeontop", "-d", "-", "-l", "1"]
|
radeontop_command = ["radeontop", "-d", "-", "-l", "1"]
|
||||||
@ -899,7 +930,7 @@ def get_intel_gpu_stats() -> dict[str, str]:
|
|||||||
|
|
||||||
# render is used for qsv
|
# render is used for qsv
|
||||||
render = []
|
render = []
|
||||||
for result in re.findall('"Render/3D/0":{[a-z":\d.,%]+}', reading):
|
for result in re.findall(r'"Render/3D/0":{[a-z":\d.,%]+}', reading):
|
||||||
packet = json.loads(result[14:])
|
packet = json.loads(result[14:])
|
||||||
single = packet.get("busy", 0.0)
|
single = packet.get("busy", 0.0)
|
||||||
render.append(float(single))
|
render.append(float(single))
|
||||||
@ -958,11 +989,11 @@ def get_nvidia_gpu_stats() -> dict[int, dict]:
|
|||||||
"gpu": gpu_util,
|
"gpu": gpu_util,
|
||||||
"mem": gpu_mem_util,
|
"mem": gpu_mem_util,
|
||||||
}
|
}
|
||||||
except:
|
except Exception:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
return results
|
return results
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def ffprobe_stream(path: str) -> sp.CompletedProcess:
|
def ffprobe_stream(path: str) -> sp.CompletedProcess:
|
||||||
"""Run ffprobe on stream."""
|
"""Run ffprobe on stream."""
|
||||||
|
|||||||
@ -10,16 +10,17 @@ import threading
|
|||||||
import time
|
import time
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
import cv2
|
import cv2
|
||||||
|
import numpy as np
|
||||||
from setproctitle import setproctitle
|
from setproctitle import setproctitle
|
||||||
|
|
||||||
from frigate.config import CameraConfig, DetectConfig, PixelFormatEnum
|
from frigate.config import CameraConfig, DetectConfig, PixelFormatEnum
|
||||||
from frigate.const import CACHE_DIR
|
from frigate.const import CACHE_DIR
|
||||||
from frigate.object_detection import RemoteObjectDetector
|
|
||||||
from frigate.log import LogPipe
|
from frigate.log import LogPipe
|
||||||
from frigate.motion import MotionDetector
|
from frigate.motion import MotionDetector
|
||||||
from frigate.objects import ObjectTracker
|
from frigate.object_detection import RemoteObjectDetector
|
||||||
|
from frigate.track import ObjectTracker
|
||||||
|
from frigate.track.norfair_tracker import NorfairTracker
|
||||||
from frigate.util import (
|
from frigate.util import (
|
||||||
EventsPerSecond,
|
EventsPerSecond,
|
||||||
FrameManager,
|
FrameManager,
|
||||||
@ -30,8 +31,8 @@ from frigate.util import (
|
|||||||
intersection,
|
intersection,
|
||||||
intersection_over_union,
|
intersection_over_union,
|
||||||
listen,
|
listen,
|
||||||
yuv_region_2_rgb,
|
|
||||||
yuv_region_2_bgr,
|
yuv_region_2_bgr,
|
||||||
|
yuv_region_2_rgb,
|
||||||
yuv_region_2_yuv,
|
yuv_region_2_yuv,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -45,7 +46,7 @@ def filtered(obj, objects_to_track, object_filters):
|
|||||||
object_area = obj[3]
|
object_area = obj[3]
|
||||||
object_ratio = obj[4]
|
object_ratio = obj[4]
|
||||||
|
|
||||||
if not object_name in objects_to_track:
|
if object_name not in objects_to_track:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if object_name in object_filters:
|
if object_name in object_filters:
|
||||||
@ -73,7 +74,7 @@ def filtered(obj, objects_to_track, object_filters):
|
|||||||
if obj_settings.max_ratio < object_ratio:
|
if obj_settings.max_ratio < object_ratio:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if not obj_settings.mask is None:
|
if obj_settings.mask is not None:
|
||||||
# compute the coordinates of the object and make sure
|
# compute the coordinates of the object and make sure
|
||||||
# the location isn't outside the bounds of the image (can happen from rounding)
|
# the location isn't outside the bounds of the image (can happen from rounding)
|
||||||
object_xmin = object_box[0]
|
object_xmin = object_box[0]
|
||||||
@ -169,20 +170,20 @@ def capture_frames(
|
|||||||
skipped_eps.start()
|
skipped_eps.start()
|
||||||
while True:
|
while True:
|
||||||
fps.value = frame_rate.eps()
|
fps.value = frame_rate.eps()
|
||||||
skipped_fps = skipped_eps.eps()
|
skipped_eps.eps()
|
||||||
|
|
||||||
current_frame.value = datetime.datetime.now().timestamp()
|
current_frame.value = datetime.datetime.now().timestamp()
|
||||||
frame_name = f"{camera_name}{current_frame.value}"
|
frame_name = f"{camera_name}{current_frame.value}"
|
||||||
frame_buffer = frame_manager.create(frame_name, frame_size)
|
frame_buffer = frame_manager.create(frame_name, frame_size)
|
||||||
try:
|
try:
|
||||||
frame_buffer[:] = ffmpeg_process.stdout.read(frame_size)
|
frame_buffer[:] = ffmpeg_process.stdout.read(frame_size)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
# shutdown has been initiated
|
# shutdown has been initiated
|
||||||
if stop_event.is_set():
|
if stop_event.is_set():
|
||||||
break
|
break
|
||||||
logger.error(f"{camera_name}: Unable to read frames from ffmpeg process.")
|
logger.error(f"{camera_name}: Unable to read frames from ffmpeg process.")
|
||||||
|
|
||||||
if ffmpeg_process.poll() != None:
|
if ffmpeg_process.poll() is not None:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"{camera_name}: ffmpeg process is not running. exiting capture thread..."
|
f"{camera_name}: ffmpeg process is not running. exiting capture thread..."
|
||||||
)
|
)
|
||||||
@ -472,7 +473,7 @@ def track_camera(
|
|||||||
name, labelmap, detection_queue, result_connection, model_config, stop_event
|
name, labelmap, detection_queue, result_connection, model_config, stop_event
|
||||||
)
|
)
|
||||||
|
|
||||||
object_tracker = ObjectTracker(config.detect)
|
object_tracker = NorfairTracker(config.detect)
|
||||||
|
|
||||||
frame_manager = SharedMemoryFrameManager()
|
frame_manager = SharedMemoryFrameManager()
|
||||||
|
|
||||||
@ -604,7 +605,7 @@ def process_frames(
|
|||||||
|
|
||||||
while not stop_event.is_set():
|
while not stop_event.is_set():
|
||||||
if exit_on_empty and frame_queue.empty():
|
if exit_on_empty and frame_queue.empty():
|
||||||
logger.info(f"Exiting track_objects...")
|
logger.info("Exiting track_objects...")
|
||||||
break
|
break
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -655,7 +656,7 @@ def process_frames(
|
|||||||
tracked_object_boxes = [
|
tracked_object_boxes = [
|
||||||
obj["box"]
|
obj["box"]
|
||||||
for obj in object_tracker.tracked_objects.values()
|
for obj in object_tracker.tracked_objects.values()
|
||||||
if not obj["id"] in stationary_object_ids
|
if obj["id"] not in stationary_object_ids
|
||||||
]
|
]
|
||||||
|
|
||||||
# combine motion boxes with known locations of existing objects
|
# combine motion boxes with known locations of existing objects
|
||||||
@ -847,6 +848,17 @@ def process_frames(
|
|||||||
else:
|
else:
|
||||||
object_tracker.update_frame_times(frame_time)
|
object_tracker.update_frame_times(frame_time)
|
||||||
|
|
||||||
|
# debug tracking by writing frames
|
||||||
|
if False:
|
||||||
|
bgr_frame = cv2.cvtColor(
|
||||||
|
frame,
|
||||||
|
cv2.COLOR_YUV2BGR_I420,
|
||||||
|
)
|
||||||
|
object_tracker.debug_draw(bgr_frame, frame_time)
|
||||||
|
cv2.imwrite(
|
||||||
|
f"debug/frames/track-{'{:.6f}'.format(frame_time)}.jpg", bgr_frame
|
||||||
|
)
|
||||||
|
|
||||||
# add to the queue if not full
|
# add to the queue if not full
|
||||||
if detected_objects_queue.full():
|
if detected_objects_queue.full():
|
||||||
frame_manager.delete(f"{camera_name}{frame_time}")
|
frame_manager.delete(f"{camera_name}{frame_time}")
|
||||||
|
|||||||
@ -2,12 +2,10 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import os
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
import signal
|
|
||||||
|
|
||||||
from frigate.object_detection import ObjectDetectProcess
|
from frigate.object_detection import ObjectDetectProcess
|
||||||
from frigate.util import restart_frigate
|
from frigate.util import restart_frigate
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -39,4 +37,4 @@ class FrigateWatchdog(threading.Thread):
|
|||||||
logger.info("Detection appears to have stopped. Exiting Frigate...")
|
logger.info("Detection appears to have stopped. Exiting Frigate...")
|
||||||
restart_frigate()
|
restart_frigate()
|
||||||
|
|
||||||
logger.info(f"Exiting watchdog...")
|
logger.info("Exiting watchdog...")
|
||||||
|
|||||||
@ -21,14 +21,7 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
|
|
||||||
try:
|
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,15 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -22,8 +22,6 @@ Some examples (model - class or model name)::
|
|||||||
"""
|
"""
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
|
|
||||||
from frigate.models import Recordings
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,10 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
from playhouse.sqlite_ext import JSONField
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Recordings
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Recordings
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -22,6 +22,7 @@ Some examples (model - class or model name)::
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
|
|
||||||
from frigate.models import Event
|
from frigate.models import Event
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Recordings
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Recordings
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,7 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Recordings
|
|
||||||
|
|
||||||
try:
|
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
@ -39,9 +30,15 @@ def migrate(migrator, database, fake=False, **kwargs):
|
|||||||
migrator.sql(
|
migrator.sql(
|
||||||
'CREATE TABLE IF NOT EXISTS "timeline" ("timestamp" DATETIME NOT NULL, "camera" VARCHAR(20) NOT NULL, "source" VARCHAR(20) NOT NULL, "source_id" VARCHAR(30), "class_type" VARCHAR(50) NOT NULL, "data" JSON)'
|
'CREATE TABLE IF NOT EXISTS "timeline" ("timestamp" DATETIME NOT NULL, "camera" VARCHAR(20) NOT NULL, "source" VARCHAR(20) NOT NULL, "source_id" VARCHAR(30), "class_type" VARCHAR(50) NOT NULL, "data" JSON)'
|
||||||
)
|
)
|
||||||
migrator.sql('CREATE INDEX IF NOT EXISTS "timeline_camera" ON "timeline" ("camera")')
|
migrator.sql(
|
||||||
migrator.sql('CREATE INDEX IF NOT EXISTS "timeline_source" ON "timeline" ("source")')
|
'CREATE INDEX IF NOT EXISTS "timeline_camera" ON "timeline" ("camera")'
|
||||||
migrator.sql('CREATE INDEX IF NOT EXISTS "timeline_source_id" ON "timeline" ("source_id")')
|
)
|
||||||
|
migrator.sql(
|
||||||
|
'CREATE INDEX IF NOT EXISTS "timeline_source" ON "timeline" ("source")'
|
||||||
|
)
|
||||||
|
migrator.sql(
|
||||||
|
'CREATE INDEX IF NOT EXISTS "timeline_source_id" ON "timeline" ("source_id")'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def rollback(migrator, database, fake=False, **kwargs):
|
def rollback(migrator, database, fake=False, **kwargs):
|
||||||
|
|||||||
@ -21,16 +21,9 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -21,16 +21,10 @@ Some examples (model - class or model name)::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime as dt
|
|
||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.sqlite_ext import *
|
from playhouse.sqlite_ext import JSONField
|
||||||
from decimal import ROUND_HALF_EVEN
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
try:
|
from frigate.models import Event
|
||||||
import playhouse.postgres_ext as pw_pext
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
SQL = pw.SQL
|
SQL = pw.SQL
|
||||||
|
|
||||||
|
|||||||
@ -1,6 +1,5 @@
|
|||||||
import peewee as pw
|
import peewee as pw
|
||||||
from playhouse.migrate import *
|
|
||||||
from playhouse.sqlite_ext import *
|
|
||||||
from frigate.models import Event
|
from frigate.models import Event
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,8 +1,4 @@
|
|||||||
import sys
|
import csv
|
||||||
from typing_extensions import runtime
|
|
||||||
|
|
||||||
sys.path.append("/lab/frigate")
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
@ -11,21 +7,26 @@ import subprocess as sp
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import csv
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
sys.path.append("/workspace/frigate")
|
||||||
from frigate.object_detection import LocalObjectDetector
|
|
||||||
from frigate.motion import MotionDetector
|
from frigate.config import FrigateConfig # noqa: E402
|
||||||
from frigate.object_processing import CameraState
|
from frigate.motion import MotionDetector # noqa: E402
|
||||||
from frigate.objects import ObjectTracker
|
from frigate.object_detection import LocalObjectDetector # noqa: E402
|
||||||
from frigate.util import (
|
from frigate.object_processing import CameraState # noqa: E402
|
||||||
|
from frigate.track.centroid_tracker import CentroidTracker # noqa: E402
|
||||||
|
from frigate.util import ( # noqa: E402
|
||||||
EventsPerSecond,
|
EventsPerSecond,
|
||||||
SharedMemoryFrameManager,
|
SharedMemoryFrameManager,
|
||||||
draw_box_with_label,
|
draw_box_with_label,
|
||||||
)
|
)
|
||||||
from frigate.video import capture_frames, process_frames, start_or_restart_ffmpeg
|
from frigate.video import ( # noqa: E402
|
||||||
|
capture_frames,
|
||||||
|
process_frames,
|
||||||
|
start_or_restart_ffmpeg,
|
||||||
|
)
|
||||||
|
|
||||||
logging.basicConfig()
|
logging.basicConfig()
|
||||||
logging.root.setLevel(logging.DEBUG)
|
logging.root.setLevel(logging.DEBUG)
|
||||||
@ -107,7 +108,7 @@ class ProcessClip:
|
|||||||
motion_detector = MotionDetector(self.frame_shape, self.camera_config.motion)
|
motion_detector = MotionDetector(self.frame_shape, self.camera_config.motion)
|
||||||
motion_detector.save_images = False
|
motion_detector.save_images = False
|
||||||
|
|
||||||
object_tracker = ObjectTracker(self.camera_config.detect)
|
object_tracker = CentroidTracker(self.camera_config.detect)
|
||||||
process_info = {
|
process_info = {
|
||||||
"process_fps": mp.Value("d", 0.0),
|
"process_fps": mp.Value("d", 0.0),
|
||||||
"detection_fps": mp.Value("d", 0.0),
|
"detection_fps": mp.Value("d", 0.0),
|
||||||
@ -247,7 +248,7 @@ def process(path, label, output, debug_path):
|
|||||||
clips.append(path)
|
clips.append(path)
|
||||||
|
|
||||||
json_config = {
|
json_config = {
|
||||||
"mqtt": {"host": "mqtt"},
|
"mqtt": {"enabled": False},
|
||||||
"detectors": {"coral": {"type": "edgetpu", "device": "usb"}},
|
"detectors": {"coral": {"type": "edgetpu", "device": "usb"}},
|
||||||
"cameras": {
|
"cameras": {
|
||||||
"camera": {
|
"camera": {
|
||||||
@ -281,7 +282,7 @@ def process(path, label, output, debug_path):
|
|||||||
json_config["cameras"]["camera"]["ffmpeg"]["inputs"][0]["path"] = c
|
json_config["cameras"]["camera"]["ffmpeg"]["inputs"][0]["path"] = c
|
||||||
|
|
||||||
frigate_config = FrigateConfig(**json_config)
|
frigate_config = FrigateConfig(**json_config)
|
||||||
runtime_config = frigate_config.runtime_config
|
runtime_config = frigate_config.runtime_config()
|
||||||
runtime_config.cameras["camera"].create_ffmpeg_cmds()
|
runtime_config.cameras["camera"].create_ffmpeg_cmds()
|
||||||
|
|
||||||
process_clip = ProcessClip(c, frame_shape, runtime_config)
|
process_clip = ProcessClip(c, frame_shape, runtime_config)
|
||||||
@ -310,7 +311,6 @@ def process(path, label, output, debug_path):
|
|||||||
|
|
||||||
for result in results:
|
for result in results:
|
||||||
if count == 0:
|
if count == 0:
|
||||||
|
|
||||||
# Writing headers of CSV file
|
# Writing headers of CSV file
|
||||||
header = ["file"] + list(result[1].keys())
|
header = ["file"] + list(result[1].keys())
|
||||||
csv_writer.writerow(header)
|
csv_writer.writerow(header)
|
||||||
|
|||||||
5
pyproject.toml
Normal file
5
pyproject.toml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
ignore = ["E501"]
|
||||||
@ -1,2 +1,3 @@
|
|||||||
pylint == 2.17.*
|
|
||||||
black == 23.3.*
|
black == 23.3.*
|
||||||
|
isort
|
||||||
|
ruff
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user