mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-03 17:55:21 +03:00
Merge branch 'dev' of https://github.com/blakeblackshear/frigate into auto-candidate
This commit is contained in:
commit
21cd553fda
22
.github/workflows/dependabot-auto-merge.yaml
vendored
Normal file
22
.github/workflows/dependabot-auto-merge.yaml
vendored
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
name: dependabot-auto-merge
|
||||||
|
on: pull_request
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dependabot-auto-merge:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.actor == 'dependabot[bot]'
|
||||||
|
steps:
|
||||||
|
- name: Get Dependabot metadata
|
||||||
|
id: metadata
|
||||||
|
uses: dependabot/fetch-metadata@v1
|
||||||
|
with:
|
||||||
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
- name: Enable auto-merge for Dependabot PRs
|
||||||
|
if: steps.metadata.outputs.dependency-type == 'direct:development' && (steps.metadata.outputs.update-type == 'version-update:semver-minor' || steps.metadata.outputs.update-type == 'version-update:semver-patch')
|
||||||
|
run: gh pr merge --auto --squash "$PR_URL"
|
||||||
|
env:
|
||||||
|
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
@ -27,7 +27,7 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
|||||||
FROM wget AS go2rtc
|
FROM wget AS go2rtc
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
WORKDIR /rootfs/usr/local/go2rtc/bin
|
WORKDIR /rootfs/usr/local/go2rtc/bin
|
||||||
RUN wget -qO go2rtc "https://github.com/AlexxIT/go2rtc/releases/download/v0.1-rc.8/go2rtc_linux_${TARGETARCH}" \
|
RUN wget -qO go2rtc "https://github.com/AlexxIT/go2rtc/releases/download/v0.1-rc.9/go2rtc_linux_${TARGETARCH}" \
|
||||||
&& chmod +x go2rtc
|
&& chmod +x go2rtc
|
||||||
|
|
||||||
|
|
||||||
@ -269,7 +269,9 @@ COPY --from=rootfs / /
|
|||||||
# Frigate w/ TensorRT Support as separate image
|
# Frigate w/ TensorRT Support as separate image
|
||||||
FROM frigate AS frigate-tensorrt
|
FROM frigate AS frigate-tensorrt
|
||||||
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
||||||
pip3 install -U /deps/trt-wheels/*.whl
|
pip3 install -U /deps/trt-wheels/*.whl && \
|
||||||
|
ln -s libnvrtc.so.11.2 /usr/local/lib/python3.9/dist-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so && \
|
||||||
|
ldconfig
|
||||||
|
|
||||||
# Dev Container w/ TRT
|
# Dev Container w/ TRT
|
||||||
FROM devcontainer AS devcontainer-trt
|
FROM devcontainer AS devcontainer-trt
|
||||||
|
|||||||
@ -25,6 +25,22 @@ tar -zxf ${VOD_MODULE_VERSION}.tar.gz -C /tmp/nginx-vod-module --strip-component
|
|||||||
rm ${VOD_MODULE_VERSION}.tar.gz
|
rm ${VOD_MODULE_VERSION}.tar.gz
|
||||||
# Patch MAX_CLIPS to allow more clips to be added than the default 128
|
# Patch MAX_CLIPS to allow more clips to be added than the default 128
|
||||||
sed -i 's/MAX_CLIPS (128)/MAX_CLIPS (1080)/g' /tmp/nginx-vod-module/vod/media_set.h
|
sed -i 's/MAX_CLIPS (128)/MAX_CLIPS (1080)/g' /tmp/nginx-vod-module/vod/media_set.h
|
||||||
|
patch -d /tmp/nginx-vod-module/ -p1 << 'EOF'
|
||||||
|
--- a/vod/avc_hevc_parser.c 2022-06-27 11:38:10.000000000 +0000
|
||||||
|
+++ b/vod/avc_hevc_parser.c 2023-01-16 11:25:10.900521298 +0000
|
||||||
|
@@ -3,6 +3,9 @@
|
||||||
|
bool_t
|
||||||
|
avc_hevc_parser_rbsp_trailing_bits(bit_reader_state_t* reader)
|
||||||
|
{
|
||||||
|
+ // https://github.com/blakeblackshear/frigate/issues/4572
|
||||||
|
+ return TRUE;
|
||||||
|
+
|
||||||
|
uint32_t one_bit;
|
||||||
|
|
||||||
|
if (reader->stream.eof_reached)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
|
||||||
mkdir /tmp/nginx-secure-token-module
|
mkdir /tmp/nginx-secure-token-module
|
||||||
wget https://github.com/kaltura/nginx-secure-token-module/archive/refs/tags/${SECURE_TOKEN_MODULE_VERSION}.tar.gz
|
wget https://github.com/kaltura/nginx-secure-token-module/archive/refs/tags/${SECURE_TOKEN_MODULE_VERSION}.tar.gz
|
||||||
tar -zxf ${SECURE_TOKEN_MODULE_VERSION}.tar.gz -C /tmp/nginx-secure-token-module --strip-components=1
|
tar -zxf ${SECURE_TOKEN_MODULE_VERSION}.tar.gz -C /tmp/nginx-secure-token-module --strip-components=1
|
||||||
@ -47,4 +63,4 @@ cd /tmp/nginx
|
|||||||
--with-cc-opt="-O3 -Wno-error=implicit-fallthrough"
|
--with-cc-opt="-O3 -Wno-error=implicit-fallthrough"
|
||||||
|
|
||||||
make -j$(nproc) && make install
|
make -j$(nproc) && make install
|
||||||
rm -rf /usr/local/nginx/html /usr/local/nginx/conf/*.default
|
rm -rf /usr/local/nginx/html /usr/local/nginx/conf/*.default
|
||||||
|
|||||||
5
docker/rootfs/etc/ld.so.conf.d/cuda_tensorrt.conf
Normal file
5
docker/rootfs/etc/ld.so.conf.d/cuda_tensorrt.conf
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
/usr/local/lib/python3.9/dist-packages/nvidia/cudnn/lib
|
||||||
|
/usr/local/lib/python3.9/dist-packages/nvidia/cuda_runtime/lib
|
||||||
|
/usr/local/lib/python3.9/dist-packages/nvidia/cublas/lib
|
||||||
|
/usr/local/lib/python3.9/dist-packages/nvidia/cuda_nvrtc/lib
|
||||||
|
/usr/local/lib/python3.9/dist-packages/tensorrt
|
||||||
@ -4,12 +4,8 @@
|
|||||||
|
|
||||||
set -o errexit -o nounset -o pipefail
|
set -o errexit -o nounset -o pipefail
|
||||||
|
|
||||||
if [[ -f "/config/frigate-go2rtc.yaml" ]]; then
|
raw_config=$(python3 /usr/local/go2rtc/create_config.py)
|
||||||
config_path="/config/frigate-go2rtc.yaml"
|
|
||||||
else
|
|
||||||
config_path="/usr/local/go2rtc/go2rtc.yaml"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Replace the bash process with the go2rtc process, redirecting stderr to stdout
|
# Replace the bash process with the go2rtc process, redirecting stderr to stdout
|
||||||
exec 2>&1
|
exec 2>&1
|
||||||
exec go2rtc -config="${config_path}"
|
exec go2rtc -config="${raw_config}"
|
||||||
|
|||||||
31
docker/rootfs/usr/local/go2rtc/create_config.py
Normal file
31
docker/rootfs/usr/local/go2rtc/create_config.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
"""Creates a go2rtc config file."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||||
|
|
||||||
|
# Check if we can use .yaml instead of .yml
|
||||||
|
config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||||
|
if os.path.isfile(config_file_yaml):
|
||||||
|
config_file = config_file_yaml
|
||||||
|
|
||||||
|
with open(config_file) as f:
|
||||||
|
raw_config = f.read()
|
||||||
|
|
||||||
|
if config_file.endswith((".yaml", ".yml")):
|
||||||
|
config = yaml.safe_load(raw_config)
|
||||||
|
elif config_file.endswith(".json"):
|
||||||
|
config = json.loads(raw_config)
|
||||||
|
|
||||||
|
go2rtc_config: dict[str, any] = config["go2rtc"]
|
||||||
|
|
||||||
|
if not go2rtc_config.get("log", {}).get("format"):
|
||||||
|
go2rtc_config["log"] = {"format": "text"}
|
||||||
|
|
||||||
|
if not go2rtc_config.get("webrtc", {}).get("candidates", []):
|
||||||
|
go2rtc_config["webrtc"] = {"candidates": ["stun:8555"]}
|
||||||
|
|
||||||
|
print(json.dumps(go2rtc_config))
|
||||||
@ -1,8 +0,0 @@
|
|||||||
log:
|
|
||||||
format: text
|
|
||||||
|
|
||||||
webrtc:
|
|
||||||
listen: ":8555"
|
|
||||||
candidates:
|
|
||||||
# - %%CANDIDATE%%
|
|
||||||
- stun:8555
|
|
||||||
@ -14,6 +14,12 @@ This page makes use of presets of FFmpeg args. For more information on presets,
|
|||||||
Note that mjpeg cameras require encoding the video into h264 for recording, and restream roles. This will use significantly more CPU than if the cameras supported h264 feeds directly. It is recommended to use the restream role to create an h264 restream and then use that as the source for ffmpeg.
|
Note that mjpeg cameras require encoding the video into h264 for recording, and restream roles. This will use significantly more CPU than if the cameras supported h264 feeds directly. It is recommended to use the restream role to create an h264 restream and then use that as the source for ffmpeg.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
mjpeg_cam: ffmpeg:{your_mjpeg_stream_url}#video=h264#hardware # <- use hardware acceleration to create an h264 stream usable for other components.
|
||||||
|
|
||||||
|
cameras:
|
||||||
|
...
|
||||||
mjpeg_cam:
|
mjpeg_cam:
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
inputs:
|
inputs:
|
||||||
@ -21,12 +27,6 @@ Note that mjpeg cameras require encoding the video into h264 for recording, and
|
|||||||
roles:
|
roles:
|
||||||
- detect
|
- detect
|
||||||
- record
|
- record
|
||||||
- path: {your_mjpeg_stream_url}
|
|
||||||
roles:
|
|
||||||
- restream
|
|
||||||
restream:
|
|
||||||
enabled: true
|
|
||||||
video_encoding: h264
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## JPEG Stream Cameras
|
## JPEG Stream Cameras
|
||||||
|
|||||||
@ -15,7 +15,6 @@ Each role can only be assigned to one input per camera. The options for roles ar
|
|||||||
| ---------- | ---------------------------------------------------------------------------------------- |
|
| ---------- | ---------------------------------------------------------------------------------------- |
|
||||||
| `detect` | Main feed for object detection |
|
| `detect` | Main feed for object detection |
|
||||||
| `record` | Saves segments of the video feed based on configuration settings. [docs](record.md) |
|
| `record` | Saves segments of the video feed based on configuration settings. [docs](record.md) |
|
||||||
| `restream` | Broadcast as RTSP feed and use the full res stream for live view. [docs](restream.md) |
|
|
||||||
| `rtmp` | Deprecated: Broadcast as an RTMP feed for other services to consume. [docs](restream.md) |
|
| `rtmp` | Deprecated: Broadcast as an RTMP feed for other services to consume. [docs](restream.md) |
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@ -29,7 +28,7 @@ cameras:
|
|||||||
- path: rtsp://viewer:{FRIGATE_RTSP_PASSWORD}@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
- path: rtsp://viewer:{FRIGATE_RTSP_PASSWORD}@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||||
roles:
|
roles:
|
||||||
- detect
|
- detect
|
||||||
- rtmp
|
- rtmp # <- deprecated, recommend using restream instead
|
||||||
- path: rtsp://viewer:{FRIGATE_RTSP_PASSWORD}@10.0.10.10:554/live
|
- path: rtsp://viewer:{FRIGATE_RTSP_PASSWORD}@10.0.10.10:554/live
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
|
|||||||
@ -28,15 +28,16 @@ Input args presets help make the config more readable and handle use cases for d
|
|||||||
|
|
||||||
See [the camera specific docs](/configuration/camera_specific.md) for more info on non-standard cameras and recommendations for using them in Frigate.
|
See [the camera specific docs](/configuration/camera_specific.md) for more info on non-standard cameras and recommendations for using them in Frigate.
|
||||||
|
|
||||||
| Preset | Usage | Other Notes |
|
| Preset | Usage | Other Notes |
|
||||||
| ------------------------- | ----------------------- | --------------------------------------------------- |
|
| ------------------------- | ------------------------- | --------------------------------------------------- |
|
||||||
| preset-http-jpeg-generic | HTTP Live Jpeg | Recommend restreaming live jpeg instead |
|
| preset-http-jpeg-generic | HTTP Live Jpeg | Recommend restreaming live jpeg instead |
|
||||||
| preset-http-mjpeg-generic | HTTP Mjpeg Stream | Recommend restreaming mjpeg stream instead |
|
| preset-http-mjpeg-generic | HTTP Mjpeg Stream | Recommend restreaming mjpeg stream instead |
|
||||||
| preset-http-reolink | Reolink HTTP-FLV Stream | Only for reolink http, not when restreaming as rtsp |
|
| preset-http-reolink | Reolink HTTP-FLV Stream | Only for reolink http, not when restreaming as rtsp |
|
||||||
| preset-rtmp-generic | RTMP Stream | |
|
| preset-rtmp-generic | RTMP Stream | |
|
||||||
| preset-rtsp-generic | RTSP Stream | This is the default when nothing is specified |
|
| preset-rtsp-generic | RTSP Stream | This is the default when nothing is specified |
|
||||||
| preset-rtsp-udp | RTSP Stream via UDP | Use when camera is UDP only |
|
| preset-rtsp-restream | RTSP Stream from restream | Use when using rtsp restream as source |
|
||||||
| preset-rtsp-blue-iris | Blue Iris RTSP Stream | Use when consuming a stream from Blue Iris |
|
| preset-rtsp-udp | RTSP Stream via UDP | Use when camera is UDP only |
|
||||||
|
| preset-rtsp-blue-iris | Blue Iris RTSP Stream | Use when consuming a stream from Blue Iris |
|
||||||
|
|
||||||
:::caution
|
:::caution
|
||||||
|
|
||||||
@ -66,10 +67,11 @@ cameras:
|
|||||||
|
|
||||||
Output args presets help make the config more readable and handle use cases for different types of streams to ensure consistent recordings.
|
Output args presets help make the config more readable and handle use cases for different types of streams to ensure consistent recordings.
|
||||||
|
|
||||||
| Preset | Usage | Other Notes |
|
| Preset | Usage | Other Notes |
|
||||||
| --------------------------- | --------------------------------- | --------------------------------------------- |
|
| -------------------------------- | --------------------------------- | --------------------------------------------- |
|
||||||
| preset-record-generic | Record WITHOUT audio | This is the default when nothing is specified |
|
| preset-record-generic | Record WITHOUT audio | This is the default when nothing is specified |
|
||||||
| preset-record-generic-audio | Record WITH audio | Use this to enable audio in recordings |
|
| preset-record-generic-audio-aac | Record WITH aac audio | Use this to enable audio in recordings |
|
||||||
| preset-record-mjpeg | Record an mjpeg stream | Recommend restreaming mjpeg stream instead |
|
| preset-record-generic-audio-copy | Record WITH original audio | Use this to enable audio in recordings |
|
||||||
| preset-record-jpeg | Record live jpeg | Recommend restreaming live jpeg instead |
|
| preset-record-mjpeg | Record an mjpeg stream | Recommend restreaming mjpeg stream instead |
|
||||||
| preset-record-ubiquiti | Record ubiquiti stream with audio | Recordings with ubiquiti non-standard audio |
|
| preset-record-jpeg | Record live jpeg | Recommend restreaming live jpeg instead |
|
||||||
|
| preset-record-ubiquiti | Record ubiquiti stream with audio | Recordings with ubiquiti non-standard audio |
|
||||||
|
|||||||
@ -126,6 +126,9 @@ environment_vars:
|
|||||||
birdseye:
|
birdseye:
|
||||||
# Optional: Enable birdseye view (default: shown below)
|
# Optional: Enable birdseye view (default: shown below)
|
||||||
enabled: True
|
enabled: True
|
||||||
|
# Optional: Restream birdseye via RTSP (default: shown below)
|
||||||
|
# NOTE: Enabling this will set birdseye to run 24/7 which may increase CPU usage somewhat.
|
||||||
|
restream: False
|
||||||
# Optional: Width of the output resolution (default: shown below)
|
# Optional: Width of the output resolution (default: shown below)
|
||||||
width: 1280
|
width: 1280
|
||||||
# Optional: Height of the output resolution (default: shown below)
|
# Optional: Height of the output resolution (default: shown below)
|
||||||
@ -352,28 +355,21 @@ rtmp:
|
|||||||
enabled: False
|
enabled: False
|
||||||
|
|
||||||
# Optional: Restream configuration
|
# Optional: Restream configuration
|
||||||
# NOTE: Can be overridden at the camera level
|
# Uses https://github.com/AlexxIT/go2rtc (v0.1-rc9)
|
||||||
restream:
|
go2rtc:
|
||||||
# Optional: Enable the restream (default: True)
|
|
||||||
enabled: True
|
# Optional: jsmpeg stream configuration for WebUI
|
||||||
# Optional: Force audio compatibility with browsers (default: shown below)
|
live:
|
||||||
force_audio: True
|
# Optional: Set the name of the stream that should be used for live view
|
||||||
# Optional: Video encoding to be used. By default the codec will be copied but
|
# in frigate WebUI. (default: name of camera)
|
||||||
# it can be switched to another or an MJPEG stream can be encoded and restreamed
|
stream_name: camera_name
|
||||||
# as h264 (default: shown below)
|
# Optional: Set the height of the jsmpeg stream. (default: 720)
|
||||||
video_encoding: "copy"
|
# This must be less than or equal to the height of the detect stream. Lower resolutions
|
||||||
# Optional: Restream birdseye via RTSP (default: shown below)
|
# reduce bandwidth required for viewing the jsmpeg stream. Width is computed to match known aspect ratio.
|
||||||
# NOTE: Enabling this will set birdseye to run 24/7 which may increase CPU usage somewhat.
|
height: 720
|
||||||
birdseye: False
|
# Optional: Set the encode quality of the jsmpeg stream (default: shown below)
|
||||||
# Optional: jsmpeg stream configuration for WebUI
|
# 1 is the highest quality, and 31 is the lowest. Lower quality feeds utilize less CPU resources.
|
||||||
jsmpeg:
|
quality: 8
|
||||||
# Optional: Set the height of the jsmpeg stream. (default: 720)
|
|
||||||
# This must be less than or equal to the height of the detect stream. Lower resolutions
|
|
||||||
# reduce bandwidth required for viewing the jsmpeg stream. Width is computed to match known aspect ratio.
|
|
||||||
height: 720
|
|
||||||
# Optional: Set the encode quality of the jsmpeg stream (default: shown below)
|
|
||||||
# 1 is the highest quality, and 31 is the lowest. Lower quality feeds utilize less CPU resources.
|
|
||||||
quality: 8
|
|
||||||
|
|
||||||
# Optional: in-feed timestamp style configuration
|
# Optional: in-feed timestamp style configuration
|
||||||
# NOTE: Can be overridden at the camera level
|
# NOTE: Can be overridden at the camera level
|
||||||
|
|||||||
@ -9,35 +9,94 @@ Frigate has different live view options, some of which require [restream](restre
|
|||||||
|
|
||||||
Live view options can be selected while viewing the live stream. The options are:
|
Live view options can be selected while viewing the live stream. The options are:
|
||||||
|
|
||||||
| Source | Latency | Frame Rate | Resolution | Audio | Requires Restream | Other Limitations |
|
| Source | Latency | Frame Rate | Resolution | Audio | Requires Restream | Other Limitations |
|
||||||
| ------ | ------- | -------------------------------------- | -------------- | ---------------------------- | ----------------- | -------------------------------------------- |
|
| ------ | ------- | ------------------------------------- | -------------- | ---------------------------- | ----------------- | -------------------------------------------- |
|
||||||
| jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none |
|
| jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none |
|
||||||
| mse | low | native | native | yes (depends on audio codec) | yes | not supported on iOS, Firefox is h.264 only |
|
| mse | low | native | native | yes (depends on audio codec) | yes | not supported on iOS, Firefox is h.264 only |
|
||||||
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
|
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
|
||||||
|
|
||||||
|
### Audio Support
|
||||||
|
|
||||||
|
MSE Requires AAC audio, WebRTC requires PCMU/PCMA, or opus audio. If you want to support both MSE and WebRTC then your restream config needs to use ffmpeg to set both.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
test_cam: ffmpeg:rtsp://192.168.1.5:554/live0#video=copy#audio=aac#audio=opus
|
||||||
|
```
|
||||||
|
|
||||||
|
However, chances are that your camera already provides at least one usable audio type, so you just need restream to add the missing one. For example, if your camera outputs audio in AAC format:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
test_cam: ffmpeg:rtsp://192.168.1.5:554/live0#video=copy#audio=copy#audio=opus
|
||||||
|
```
|
||||||
|
|
||||||
|
Which will reuse your camera AAC audio, while also adding one track in OPUS format.
|
||||||
|
|
||||||
|
If your camera uses RTSP and supports the audio type for the live view you want to use, then you can pass the camera stream to go2rtc without ffmpeg.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
test_cam: rtsp://192.168.1.5:554/live0
|
||||||
|
```
|
||||||
|
|
||||||
|
### Setting Stream For Live UI
|
||||||
|
|
||||||
|
There may be some cameras that you would prefer to use the sub stream for live view, but the main stream for recording. This can be done via `live -> stream_name`.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
test_cam: ffmpeg:rtsp://192.168.1.5:554/live0#video=copy#audio=aac#audio=opus
|
||||||
|
test_cam_sub: ffmpeg:rtsp://192.168.1.5:554/substream#video=copy#audio=aac#audio=opus
|
||||||
|
|
||||||
|
cameras:
|
||||||
|
test_cam:
|
||||||
|
ffmpeg:
|
||||||
|
output_args:
|
||||||
|
record: preset-record-generic-audio-copy
|
||||||
|
inputs:
|
||||||
|
- path: rtsp://127.0.0.1:8554/test_cam?video=copy&audio=aac # <--- the name here must match the name of the camera in restream
|
||||||
|
input_args: preset-rtsp-restream
|
||||||
|
roles:
|
||||||
|
- record
|
||||||
|
- path: rtsp://127.0.0.1:8554/test_cam_sub?video=copy # <--- the name here must match the name of the camera_sub in restream
|
||||||
|
input_args: preset-rtsp-restream
|
||||||
|
roles:
|
||||||
|
- detect
|
||||||
|
live:
|
||||||
|
stream_name: test_cam_sub
|
||||||
|
```
|
||||||
|
|
||||||
### WebRTC extra configuration:
|
### WebRTC extra configuration:
|
||||||
|
|
||||||
webRTC works by creating a websocket connection on extra ports. One of the following is required for webRTC to work:
|
WebRTC works by creating a TCP or UDP connection on port `8555`. However, it requires additional configuration:
|
||||||
* Frigate is run with `network_mode: host` to support automatic UDP port pass through locally and remotely. See https://github.com/AlexxIT/go2rtc#module-webrtc for more details
|
|
||||||
* Frigate is run with `network_mode: bridge` and has:
|
|
||||||
* Router setup to forward port `8555` to port `8555` on the Frigate device.
|
|
||||||
* For local webRTC, you will need to create your own go2rtc config:
|
|
||||||
|
|
||||||
```yaml
|
- For external access, over the internet, setup your router to forward port `8555` to port `8555` on the Frigate device, for both TCP and UDP.
|
||||||
log:
|
- For internal/local access, you will need to use a custom go2rtc config:
|
||||||
format: text
|
|
||||||
|
|
||||||
webrtc:
|
1. Add your internal IP to the list of `candidates`. Here is an example, assuming that `192.168.1.10` is the local IP of the device running Frigate:
|
||||||
candidates:
|
|
||||||
- <Frigate host ip address>:8555 # <--- enter Frigate host IP here
|
|
||||||
- stun:8555
|
|
||||||
```
|
|
||||||
|
|
||||||
and pass that config to Frigate via docker or `frigate-go2rtc.yaml` for addon users:
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
test_cam: ...
|
||||||
|
webrtc:
|
||||||
|
candidates:
|
||||||
|
- 192.168.1.10:8555
|
||||||
|
- stun:8555
|
||||||
|
```
|
||||||
|
|
||||||
See https://github.com/AlexxIT/go2rtc#module-webrtc for more details
|
:::note
|
||||||
|
|
||||||
```yaml
|
If you are having difficulties getting WebRTC to work and you are running Frigate with docker, you may want to try changing the container network mode:
|
||||||
volumes:
|
|
||||||
- /path/to/your/go2rtc.yaml:/config/frigate-go2rtc.yaml:ro
|
- `network: host`, in this mode you don't need to forward any ports. The services inside of the Frigate container will have full access to the network interfaces of your host machine as if they were running natively and not in a container. Any port conflicts will need to be resolved. This network mode is recommended by go2rtc, but we recommend you only use it if necessary.
|
||||||
```
|
- `network: bridge` creates a virtual network interface for the container, and the container will have full access to it. You also don't need to forward any ports, however, the IP for accessing Frigate locally will differ from the IP of the host machine. Your router will see Frigate as if it was a new device connected in the network.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
See https://github.com/AlexxIT/go2rtc#module-webrtc for more information about this.
|
||||||
|
|||||||
@ -7,29 +7,11 @@ title: Restream
|
|||||||
|
|
||||||
Frigate can restream your video feed as an RTSP feed for other applications such as Home Assistant to utilize it at `rtsp://<frigate_host>:8554/<camera_name>`. Port 8554 must be open. [This allows you to use a video feed for detection in Frigate and Home Assistant live view at the same time without having to make two separate connections to the camera](#reduce-connections-to-camera). The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate.
|
Frigate can restream your video feed as an RTSP feed for other applications such as Home Assistant to utilize it at `rtsp://<frigate_host>:8554/<camera_name>`. Port 8554 must be open. [This allows you to use a video feed for detection in Frigate and Home Assistant live view at the same time without having to make two separate connections to the camera](#reduce-connections-to-camera). The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate.
|
||||||
|
|
||||||
#### Force Audio
|
Frigate uses [go2rtc](https://github.com/AlexxIT/go2rtc) to provide its restream and MSE/WebRTC capabilities. The go2rtc config is hosted at the `go2rtc` in the config, see [go2rtc docs](https://github.com/AlexxIT/go2rtc#configuration) for more advanced configurations and features.
|
||||||
|
|
||||||
Different live view technologies (ex: MSE, WebRTC) support different audio codecs. The `restream -> force_audio` flag tells the restream to make multiple streams available so that all live view technologies are supported. Some camera streams don't work well with this, in which case `restream -> force_audio` should be disabled.
|
|
||||||
|
|
||||||
#### Birdseye Restream
|
#### Birdseye Restream
|
||||||
|
|
||||||
Birdseye RTSP restream can be enabled at `restream -> birdseye` and accessed at `rtsp://<frigate_host>:8554/birdseye`. Enabling the restream will cause birdseye to run 24/7 which may increase CPU usage somewhat.
|
Birdseye RTSP restream can be enabled at `birdseye -> restream` and accessed at `rtsp://<frigate_host>:8554/birdseye`. Enabling the restream will cause birdseye to run 24/7 which may increase CPU usage somewhat.
|
||||||
|
|
||||||
#### Changing Restream Codec
|
|
||||||
|
|
||||||
Generally it is recommended to let the codec from the camera be copied. But there may be some cases where h265 needs to be transcoded as h264 or an MJPEG stream can be encoded and restreamed as h264. In this case the encoding will need to be set, if any hardware acceleration presets are set then that will be used to encode the stream.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
ffmpeg:
|
|
||||||
hwaccel_args: your-hwaccel-preset # <- highly recommended so the GPU is used
|
|
||||||
|
|
||||||
cameras:
|
|
||||||
mjpeg_cam:
|
|
||||||
ffmpeg:
|
|
||||||
...
|
|
||||||
restream:
|
|
||||||
video_encoding: h264
|
|
||||||
```
|
|
||||||
|
|
||||||
### RTMP (Deprecated)
|
### RTMP (Deprecated)
|
||||||
|
|
||||||
@ -44,17 +26,21 @@ Some cameras only support one active connection or you may just want to have a s
|
|||||||
One connection is made to the camera. One for the restream, `detect` and `record` connect to the restream.
|
One connection is made to the camera. One for the restream, `detect` and `record` connect to the restream.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
test_cam: ffmpeg:rtsp://192.168.1.5:554/live0#video=copy#audio=aac#audio=opus
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
test_cam:
|
test_cam:
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
|
output_args:
|
||||||
|
record: preset-record-generic-audio-copy
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://127.0.0.1:8554/test_cam # <--- the name here must match the name of the camera
|
- path: rtsp://127.0.0.1:8554/test_cam?video=copy&audio=aac # <--- the name here must match the name of the camera in restream
|
||||||
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
- detect
|
- detect
|
||||||
- path: rtsp://192.168.1.5:554/live0 # <--- 1 connection to camera stream
|
|
||||||
roles:
|
|
||||||
- restream
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### With Sub Stream
|
### With Sub Stream
|
||||||
@ -62,17 +48,23 @@ cameras:
|
|||||||
Two connections are made to the camera. One for the sub stream, one for the restream, `record` connects to the restream.
|
Two connections are made to the camera. One for the sub stream, one for the restream, `record` connects to the restream.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
test_cam: ffmpeg:rtsp://192.168.1.5:554/live0#video=copy#audio=aac#audio=opus
|
||||||
|
test_cam_sub: ffmpeg:rtsp://192.168.1.5:554/substream#video=copy#audio=aac#audio=opus
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
test_cam:
|
test_cam:
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
|
output_args:
|
||||||
|
record: preset-record-generic-audio-copy
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://127.0.0.1:8554/test_cam # <--- the name here must match the name of the camera
|
- path: rtsp://127.0.0.1:8554/test_cam?video=copy&audio=aac # <--- the name here must match the name of the camera in restream
|
||||||
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
- path: rtsp://192.168.1.5:554/stream # <--- camera high res stream
|
- path: rtsp://127.0.0.1:8554/test_cam_sub?video=copy&audio=aac # <--- the name here must match the name of the camera_sub in restream
|
||||||
roles:
|
input_args: preset-rtsp-restream
|
||||||
- restream
|
|
||||||
- path: rtsp://192.168.1.5:554/substream # <--- camera sub stream
|
|
||||||
roles:
|
roles:
|
||||||
- detect
|
- detect
|
||||||
```
|
```
|
||||||
|
|||||||
@ -7,38 +7,25 @@ title: Frequently Asked Questions
|
|||||||
|
|
||||||
This error message is due to a shm-size that is too small. Try updating your shm-size according to [this guide](../frigate/installation.md#calculating-required-shm-size).
|
This error message is due to a shm-size that is too small. Try updating your shm-size according to [this guide](../frigate/installation.md#calculating-required-shm-size).
|
||||||
|
|
||||||
### I am seeing a solid green image for my camera.
|
|
||||||
|
|
||||||
A solid green image means that Frigate has not received any frames from ffmpeg. Check the logs to see why ffmpeg is exiting and adjust your ffmpeg args accordingly.
|
|
||||||
|
|
||||||
### How can I get sound or audio in my recordings? {#audio-in-recordings}
|
### How can I get sound or audio in my recordings? {#audio-in-recordings}
|
||||||
|
|
||||||
By default, Frigate removes audio from recordings to reduce the likelihood of failing for invalid data. If you would like to include audio, you need to override the output args to remove `-an` for where you want to include audio. The recommended audio codec is `aac`. The default ffmpeg args are shown [here](../configuration/index.md/#full-configuration-reference).
|
By default, Frigate removes audio from recordings to reduce the likelihood of failing for invalid data. If you would like to include audio, you need to set a [FFmpeg preset](/configuration/ffmpeg_presets) that supports audio:
|
||||||
|
|
||||||
:::tip
|
```yaml title="frigate.yml"
|
||||||
|
|
||||||
When using `-c:a aac`, do not forget to replace `-c copy` with `-c:v copy`. Example:
|
|
||||||
|
|
||||||
```diff title="frigate.yml"
|
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
output_args:
|
output_args:
|
||||||
- record: -f segment -segment_time 10 -segment_format mp4 -reset_timestamps 1 -strftime 1 -c copy -an
|
record: preset-record-generic-audio-aac
|
||||||
+ record: -f segment -segment_time 10 -segment_format mp4 -reset_timestamps 1 -strftime 1 -c:v copy -c:a aac
|
|
||||||
```
|
```
|
||||||
|
|
||||||
This is needed because the `-c` flag (without `:a` or `:v`) applies for both audio and video, thus making it conflicting with `-c:a aac`.
|
|
||||||
|
|
||||||
:::
|
|
||||||
|
|
||||||
### My mjpeg stream or snapshots look green and crazy
|
### My mjpeg stream or snapshots look green and crazy
|
||||||
|
|
||||||
This almost always means that the width/height defined for your camera are not correct. Double check the resolution with vlc or another player. Also make sure you don't have the width and height values backwards.
|
This almost always means that the width/height defined for your camera are not correct. Double check the resolution with VLC or another player. Also make sure you don't have the width and height values backwards.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
### I can't view events or recordings in the Web UI.
|
### I can't view events or recordings in the Web UI.
|
||||||
|
|
||||||
Ensure your cameras send h264 encoded video
|
Ensure your cameras send h264 encoded video, or [transcode them](/configuration/restream.md).
|
||||||
|
|
||||||
### "[mov,mp4,m4a,3gp,3g2,mj2 @ 0x5639eeb6e140] moov atom not found"
|
### "[mov,mp4,m4a,3gp,3g2,mj2 @ 0x5639eeb6e140] moov atom not found"
|
||||||
|
|
||||||
@ -46,8 +33,8 @@ These messages in the logs are expected in certain situations. Frigate checks th
|
|||||||
|
|
||||||
### "On connect called"
|
### "On connect called"
|
||||||
|
|
||||||
If you see repeated "On connect called" messages in your config, check for another instance of Frigate. This happens when multiple Frigate containers are trying to connect to mqtt with the same client_id.
|
If you see repeated "On connect called" messages in your logs, check for another instance of Frigate. This happens when multiple Frigate containers are trying to connect to MQTT with the same `client_id`.
|
||||||
|
|
||||||
### Error: Database Is Locked
|
### Error: Database Is Locked
|
||||||
|
|
||||||
sqlite does not work well on a network share, if the `/media` folder is mapped to a network share then [this guide](../configuration/advanced.md#database) should be used to move the database to a location on the internal drive.
|
SQLite does not work well on a network share, if the `/media` folder is mapped to a network share then [this guide](../configuration/advanced.md#database) should be used to move the database to a location on the internal drive.
|
||||||
|
|||||||
@ -139,35 +139,30 @@ class MqttClient(Communicator): # type: ignore[misc]
|
|||||||
)
|
)
|
||||||
|
|
||||||
# register callbacks
|
# register callbacks
|
||||||
|
callback_types = [
|
||||||
|
"recordings",
|
||||||
|
"snapshots",
|
||||||
|
"detect",
|
||||||
|
"motion",
|
||||||
|
"improve_contrast",
|
||||||
|
"motion_threshold",
|
||||||
|
"motion_contour_area",
|
||||||
|
]
|
||||||
|
|
||||||
for name in self.config.cameras.keys():
|
for name in self.config.cameras.keys():
|
||||||
self.client.message_callback_add(
|
for callback in callback_types:
|
||||||
f"{self.mqtt_config.topic_prefix}/{name}/recordings/set",
|
# We need to pre-clear existing set topics because in previous
|
||||||
self.on_mqtt_command,
|
# versions the webUI retained on the /set topic but this is
|
||||||
)
|
# no longer the case.
|
||||||
self.client.message_callback_add(
|
self.client.publish(
|
||||||
f"{self.mqtt_config.topic_prefix}/{name}/snapshots/set",
|
f"{self.mqtt_config.topic_prefix}/{name}/{callback}/set",
|
||||||
self.on_mqtt_command,
|
None,
|
||||||
)
|
retain=True,
|
||||||
self.client.message_callback_add(
|
)
|
||||||
f"{self.mqtt_config.topic_prefix}/{name}/detect/set",
|
self.client.message_callback_add(
|
||||||
self.on_mqtt_command,
|
f"{self.mqtt_config.topic_prefix}/{name}/{callback}/set",
|
||||||
)
|
self.on_mqtt_command,
|
||||||
self.client.message_callback_add(
|
)
|
||||||
f"{self.mqtt_config.topic_prefix}/{name}/motion/set",
|
|
||||||
self.on_mqtt_command,
|
|
||||||
)
|
|
||||||
self.client.message_callback_add(
|
|
||||||
f"{self.mqtt_config.topic_prefix}/{name}/improve_contrast/set",
|
|
||||||
self.on_mqtt_command,
|
|
||||||
)
|
|
||||||
self.client.message_callback_add(
|
|
||||||
f"{self.mqtt_config.topic_prefix}/{name}/motion_threshold/set",
|
|
||||||
self.on_mqtt_command,
|
|
||||||
)
|
|
||||||
self.client.message_callback_add(
|
|
||||||
f"{self.mqtt_config.topic_prefix}/{name}/motion_contour_area/set",
|
|
||||||
self.on_mqtt_command,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.client.message_callback_add(
|
self.client.message_callback_add(
|
||||||
f"{self.mqtt_config.topic_prefix}/restart", self.on_mqtt_command
|
f"{self.mqtt_config.topic_prefix}/restart", self.on_mqtt_command
|
||||||
|
|||||||
@ -344,6 +344,7 @@ class BirdseyeModeEnum(str, Enum):
|
|||||||
|
|
||||||
class BirdseyeConfig(FrigateBaseModel):
|
class BirdseyeConfig(FrigateBaseModel):
|
||||||
enabled: bool = Field(default=True, title="Enable birdseye view.")
|
enabled: bool = Field(default=True, title="Enable birdseye view.")
|
||||||
|
restream: bool = Field(default=False, title="Restream birdseye via RTSP.")
|
||||||
width: int = Field(default=1280, title="Birdseye width.")
|
width: int = Field(default=1280, title="Birdseye width.")
|
||||||
height: int = Field(default=720, title="Birdseye height.")
|
height: int = Field(default=720, title="Birdseye height.")
|
||||||
quality: int = Field(
|
quality: int = Field(
|
||||||
@ -405,7 +406,6 @@ class FfmpegConfig(FrigateBaseModel):
|
|||||||
|
|
||||||
class CameraRoleEnum(str, Enum):
|
class CameraRoleEnum(str, Enum):
|
||||||
record = "record"
|
record = "record"
|
||||||
restream = "restream"
|
|
||||||
rtmp = "rtmp"
|
rtmp = "rtmp"
|
||||||
detect = "detect"
|
detect = "detect"
|
||||||
|
|
||||||
@ -519,29 +519,15 @@ class RtmpConfig(FrigateBaseModel):
|
|||||||
enabled: bool = Field(default=False, title="RTMP restreaming enabled.")
|
enabled: bool = Field(default=False, title="RTMP restreaming enabled.")
|
||||||
|
|
||||||
|
|
||||||
class JsmpegStreamConfig(FrigateBaseModel):
|
class CameraLiveConfig(FrigateBaseModel):
|
||||||
height: int = Field(default=720, title="Live camera view height.")
|
stream_name: str = Field(default="", title="Name of restream to use as live view.")
|
||||||
quality: int = Field(default=8, ge=1, le=31, title="Live camera view quality.")
|
height: int = Field(default=720, title="Live camera view height")
|
||||||
|
quality: int = Field(default=8, ge=1, le=31, title="Live camera view quality")
|
||||||
|
|
||||||
|
|
||||||
class RestreamCodecEnum(str, Enum):
|
class RestreamConfig(BaseModel):
|
||||||
copy = "copy"
|
class Config:
|
||||||
h264 = "h264"
|
extra = Extra.allow
|
||||||
h265 = "h265"
|
|
||||||
|
|
||||||
|
|
||||||
class RestreamConfig(FrigateBaseModel):
|
|
||||||
enabled: bool = Field(default=True, title="Restreaming enabled.")
|
|
||||||
video_encoding: RestreamCodecEnum = Field(
|
|
||||||
default=RestreamCodecEnum.copy, title="Method for encoding the restream."
|
|
||||||
)
|
|
||||||
force_audio: bool = Field(
|
|
||||||
default=True, title="Force audio compatibility with the browser."
|
|
||||||
)
|
|
||||||
birdseye: bool = Field(default=False, title="Restream the birdseye feed via RTSP.")
|
|
||||||
jsmpeg: JsmpegStreamConfig = Field(
|
|
||||||
default_factory=JsmpegStreamConfig, title="Jsmpeg Stream Configuration."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class CameraUiConfig(FrigateBaseModel):
|
class CameraUiConfig(FrigateBaseModel):
|
||||||
@ -568,8 +554,8 @@ class CameraConfig(FrigateBaseModel):
|
|||||||
rtmp: RtmpConfig = Field(
|
rtmp: RtmpConfig = Field(
|
||||||
default_factory=RtmpConfig, title="RTMP restreaming configuration."
|
default_factory=RtmpConfig, title="RTMP restreaming configuration."
|
||||||
)
|
)
|
||||||
restream: RestreamConfig = Field(
|
live: CameraLiveConfig = Field(
|
||||||
default_factory=RestreamConfig, title="Restreaming configuration."
|
default_factory=CameraLiveConfig, title="Live playback settings."
|
||||||
)
|
)
|
||||||
snapshots: SnapshotsConfig = Field(
|
snapshots: SnapshotsConfig = Field(
|
||||||
default_factory=SnapshotsConfig, title="Snapshot configuration."
|
default_factory=SnapshotsConfig, title="Snapshot configuration."
|
||||||
@ -611,7 +597,6 @@ class CameraConfig(FrigateBaseModel):
|
|||||||
config["ffmpeg"]["inputs"][0]["roles"] = [
|
config["ffmpeg"]["inputs"][0]["roles"] = [
|
||||||
"record",
|
"record",
|
||||||
"detect",
|
"detect",
|
||||||
"restream",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
if has_rtmp:
|
if has_rtmp:
|
||||||
@ -748,9 +733,17 @@ def verify_config_roles(camera_config: CameraConfig) -> None:
|
|||||||
f"Camera {camera_config.name} has rtmp enabled, but rtmp is not assigned to an input."
|
f"Camera {camera_config.name} has rtmp enabled, but rtmp is not assigned to an input."
|
||||||
)
|
)
|
||||||
|
|
||||||
if camera_config.restream.enabled and not "restream" in assigned_roles:
|
|
||||||
raise ValueError(
|
def verify_valid_live_stream_name(
|
||||||
f"Camera {camera_config.name} has restream enabled, but restream is not assigned to an input."
|
frigate_config: FrigateConfig, camera_config: CameraConfig
|
||||||
|
) -> None:
|
||||||
|
"""Verify that a restream exists to use for live view."""
|
||||||
|
if (
|
||||||
|
camera_config.live.stream_name
|
||||||
|
not in frigate_config.go2rtc.dict().get("streams", {}).keys()
|
||||||
|
):
|
||||||
|
return ValueError(
|
||||||
|
f"No restream with name {camera_config.live.stream_name} exists for camera {camera_config.name}."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -844,7 +837,10 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
rtmp: RtmpConfig = Field(
|
rtmp: RtmpConfig = Field(
|
||||||
default_factory=RtmpConfig, title="Global RTMP restreaming configuration."
|
default_factory=RtmpConfig, title="Global RTMP restreaming configuration."
|
||||||
)
|
)
|
||||||
restream: RestreamConfig = Field(
|
live: CameraLiveConfig = Field(
|
||||||
|
default_factory=CameraLiveConfig, title="Live playback settings."
|
||||||
|
)
|
||||||
|
go2rtc: RestreamConfig = Field(
|
||||||
default_factory=RestreamConfig, title="Global restream configuration."
|
default_factory=RestreamConfig, title="Global restream configuration."
|
||||||
)
|
)
|
||||||
birdseye: BirdseyeConfig = Field(
|
birdseye: BirdseyeConfig = Field(
|
||||||
@ -885,7 +881,7 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
"record": ...,
|
"record": ...,
|
||||||
"snapshots": ...,
|
"snapshots": ...,
|
||||||
"rtmp": ...,
|
"rtmp": ...,
|
||||||
"restream": ...,
|
"live": ...,
|
||||||
"objects": ...,
|
"objects": ...,
|
||||||
"motion": ...,
|
"motion": ...,
|
||||||
"detect": ...,
|
"detect": ...,
|
||||||
@ -958,7 +954,12 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
**camera_config.motion.dict(exclude_unset=True),
|
**camera_config.motion.dict(exclude_unset=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Set live view stream if none is set
|
||||||
|
if not camera_config.live.stream_name:
|
||||||
|
camera_config.live.stream_name = name
|
||||||
|
|
||||||
verify_config_roles(camera_config)
|
verify_config_roles(camera_config)
|
||||||
|
verify_valid_live_stream_name(config, camera_config)
|
||||||
verify_old_retain_config(camera_config)
|
verify_old_retain_config(camera_config)
|
||||||
verify_recording_retention(camera_config)
|
verify_recording_retention(camera_config)
|
||||||
verify_recording_segments_setup_with_reasonable_time(camera_config)
|
verify_recording_segments_setup_with_reasonable_time(camera_config)
|
||||||
|
|||||||
@ -75,12 +75,6 @@ class TensorRtDetector(DetectionApi):
|
|||||||
|
|
||||||
def _load_engine(self, model_path):
|
def _load_engine(self, model_path):
|
||||||
try:
|
try:
|
||||||
ctypes.cdll.LoadLibrary(
|
|
||||||
"/usr/local/lib/python3.9/dist-packages/nvidia/cuda_runtime/lib/libcudart.so.11.0"
|
|
||||||
)
|
|
||||||
ctypes.cdll.LoadLibrary(
|
|
||||||
"/usr/local/lib/python3.9/dist-packages/tensorrt/libnvinfer.so.8"
|
|
||||||
)
|
|
||||||
trt.init_libnvinfer_plugins(self.trt_logger, "")
|
trt.init_libnvinfer_plugins(self.trt_logger, "")
|
||||||
|
|
||||||
ctypes.cdll.LoadLibrary("/trt-models/libyolo_layer.so")
|
ctypes.cdll.LoadLibrary("/trt-models/libyolo_layer.so")
|
||||||
|
|||||||
@ -38,6 +38,8 @@ PRESETS_HW_ACCEL_DECODE = {
|
|||||||
"h264_qsv",
|
"h264_qsv",
|
||||||
],
|
],
|
||||||
"preset-intel-qsv-h265": [
|
"preset-intel-qsv-h265": [
|
||||||
|
"-load_plugin",
|
||||||
|
"hevc_hw",
|
||||||
"-hwaccel",
|
"-hwaccel",
|
||||||
"qsv",
|
"qsv",
|
||||||
"-qsv_device",
|
"-qsv_device",
|
||||||
@ -100,17 +102,6 @@ PRESETS_HW_ACCEL_ENCODE = {
|
|||||||
"default": "ffmpeg -hide_banner {0} -c:v libx264 -g 50 -profile:v high -level:v 4.1 -preset:v superfast -tune:v zerolatency {1}",
|
"default": "ffmpeg -hide_banner {0} -c:v libx264 -g 50 -profile:v high -level:v 4.1 -preset:v superfast -tune:v zerolatency {1}",
|
||||||
}
|
}
|
||||||
|
|
||||||
PRESETS_HW_ACCEL_GO2RTC_ENGINE = {
|
|
||||||
"preset-rpi-32-h264": "v4l2m2m",
|
|
||||||
"preset-rpi-64-h264": "v4l2m2m",
|
|
||||||
"preset-intel-vaapi": "vaapi",
|
|
||||||
"preset-intel-qsv-h264": "vaapi", # go2rtc doesn't support qsv
|
|
||||||
"preset-intel-qsv-h265": "vaapi",
|
|
||||||
"preset-amd-vaapi": "vaapi",
|
|
||||||
"preset-nvidia-h264": "cuda",
|
|
||||||
"preset-nvidia-h265": "cuda",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def parse_preset_hardware_acceleration_decode(arg: Any) -> list[str]:
|
def parse_preset_hardware_acceleration_decode(arg: Any) -> list[str]:
|
||||||
"""Return the correct preset if in preset format otherwise return None."""
|
"""Return the correct preset if in preset format otherwise return None."""
|
||||||
@ -154,14 +145,6 @@ def parse_preset_hardware_acceleration_encode(arg: Any, input: str, output: str)
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_preset_hardware_acceleration_go2rtc_engine(arg: Any) -> list[str]:
|
|
||||||
"""Return the correct engine for the preset otherwise returns None."""
|
|
||||||
if not isinstance(arg, str):
|
|
||||||
return None
|
|
||||||
|
|
||||||
return PRESETS_HW_ACCEL_GO2RTC_ENGINE.get(arg)
|
|
||||||
|
|
||||||
|
|
||||||
PRESETS_INPUT = {
|
PRESETS_INPUT = {
|
||||||
"preset-http-jpeg-generic": _user_agent_args
|
"preset-http-jpeg-generic": _user_agent_args
|
||||||
+ [
|
+ [
|
||||||
@ -247,6 +230,13 @@ PRESETS_INPUT = {
|
|||||||
"-use_wallclock_as_timestamps",
|
"-use_wallclock_as_timestamps",
|
||||||
"1",
|
"1",
|
||||||
],
|
],
|
||||||
|
"preset-rtsp-restream": _user_agent_args
|
||||||
|
+ [
|
||||||
|
"-rtsp_transport",
|
||||||
|
"tcp",
|
||||||
|
TIMEOUT_PARAM,
|
||||||
|
"5000000",
|
||||||
|
],
|
||||||
"preset-rtsp-udp": _user_agent_args
|
"preset-rtsp-udp": _user_agent_args
|
||||||
+ [
|
+ [
|
||||||
"-avoid_negative_ts",
|
"-avoid_negative_ts",
|
||||||
@ -311,7 +301,7 @@ PRESETS_RECORD_OUTPUT = {
|
|||||||
"copy",
|
"copy",
|
||||||
"-an",
|
"-an",
|
||||||
],
|
],
|
||||||
"preset-record-generic-audio": [
|
"preset-record-generic-audio-aac": [
|
||||||
"-f",
|
"-f",
|
||||||
"segment",
|
"segment",
|
||||||
"-segment_time",
|
"-segment_time",
|
||||||
@ -327,6 +317,20 @@ PRESETS_RECORD_OUTPUT = {
|
|||||||
"-c:a",
|
"-c:a",
|
||||||
"aac",
|
"aac",
|
||||||
],
|
],
|
||||||
|
"preset-record-generic-audio-copy": [
|
||||||
|
"-f",
|
||||||
|
"segment",
|
||||||
|
"-segment_time",
|
||||||
|
"10",
|
||||||
|
"-segment_format",
|
||||||
|
"mp4",
|
||||||
|
"-reset_timestamps",
|
||||||
|
"1",
|
||||||
|
"-strftime",
|
||||||
|
"1",
|
||||||
|
"-c",
|
||||||
|
"copy",
|
||||||
|
],
|
||||||
"preset-record-mjpeg": [
|
"preset-record-mjpeg": [
|
||||||
"-f",
|
"-f",
|
||||||
"segment",
|
"segment",
|
||||||
|
|||||||
@ -41,6 +41,7 @@ from frigate.util import (
|
|||||||
ffprobe_stream,
|
ffprobe_stream,
|
||||||
restart_frigate,
|
restart_frigate,
|
||||||
vainfo_hwaccel,
|
vainfo_hwaccel,
|
||||||
|
get_tz_modifiers,
|
||||||
)
|
)
|
||||||
from frigate.storage import StorageMaintainer
|
from frigate.storage import StorageMaintainer
|
||||||
from frigate.version import VERSION
|
from frigate.version import VERSION
|
||||||
@ -91,7 +92,7 @@ def is_healthy():
|
|||||||
@bp.route("/events/summary")
|
@bp.route("/events/summary")
|
||||||
def events_summary():
|
def events_summary():
|
||||||
tz_name = request.args.get("timezone", default="utc", type=str)
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
||||||
tz_offset = f"{int(datetime.now(pytz.timezone(tz_name)).utcoffset().total_seconds()/60/60)} hour"
|
hour_modifier, minute_modifier = get_tz_modifiers(tz_name)
|
||||||
has_clip = request.args.get("has_clip", type=int)
|
has_clip = request.args.get("has_clip", type=int)
|
||||||
has_snapshot = request.args.get("has_snapshot", type=int)
|
has_snapshot = request.args.get("has_snapshot", type=int)
|
||||||
|
|
||||||
@ -111,7 +112,10 @@ def events_summary():
|
|||||||
Event.camera,
|
Event.camera,
|
||||||
Event.label,
|
Event.label,
|
||||||
fn.strftime(
|
fn.strftime(
|
||||||
"%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", tz_offset)
|
"%Y-%m-%d",
|
||||||
|
fn.datetime(
|
||||||
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||||
|
),
|
||||||
).alias("day"),
|
).alias("day"),
|
||||||
Event.zones,
|
Event.zones,
|
||||||
fn.COUNT(Event.id).alias("count"),
|
fn.COUNT(Event.id).alias("count"),
|
||||||
@ -121,7 +125,10 @@ def events_summary():
|
|||||||
Event.camera,
|
Event.camera,
|
||||||
Event.label,
|
Event.label,
|
||||||
fn.strftime(
|
fn.strftime(
|
||||||
"%Y-%m-%d", fn.datetime(Event.start_time, "unixepoch", tz_offset)
|
"%Y-%m-%d",
|
||||||
|
fn.datetime(
|
||||||
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||||
|
),
|
||||||
),
|
),
|
||||||
Event.zones,
|
Event.zones,
|
||||||
)
|
)
|
||||||
@ -710,6 +717,8 @@ def config_raw():
|
|||||||
|
|
||||||
@bp.route("/config/save", methods=["POST"])
|
@bp.route("/config/save", methods=["POST"])
|
||||||
def config_save():
|
def config_save():
|
||||||
|
save_option = request.args.get("save_option")
|
||||||
|
|
||||||
new_config = request.get_data().decode()
|
new_config = request.get_data().decode()
|
||||||
|
|
||||||
if not new_config:
|
if not new_config:
|
||||||
@ -753,13 +762,16 @@ def config_save():
|
|||||||
400,
|
400,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
if save_option == "restart":
|
||||||
restart_frigate()
|
try:
|
||||||
except Exception as e:
|
restart_frigate()
|
||||||
logging.error(f"Error restarting Frigate: {e}")
|
except Exception as e:
|
||||||
return "Config successfully saved, unable to restart Frigate", 200
|
logging.error(f"Error restarting Frigate: {e}")
|
||||||
|
return "Config successfully saved, unable to restart Frigate", 200
|
||||||
|
|
||||||
return "Config successfully saved, restarting...", 200
|
return "Config successfully saved, restarting...", 200
|
||||||
|
else:
|
||||||
|
return "Config successfully saved.", 200
|
||||||
|
|
||||||
|
|
||||||
@bp.route("/config/schema.json")
|
@bp.route("/config/schema.json")
|
||||||
@ -907,12 +919,14 @@ def get_recordings_storage_usage():
|
|||||||
@bp.route("/<camera_name>/recordings/summary")
|
@bp.route("/<camera_name>/recordings/summary")
|
||||||
def recordings_summary(camera_name):
|
def recordings_summary(camera_name):
|
||||||
tz_name = request.args.get("timezone", default="utc", type=str)
|
tz_name = request.args.get("timezone", default="utc", type=str)
|
||||||
tz_offset = f"{int(datetime.now(pytz.timezone(tz_name)).utcoffset().total_seconds()/60/60)} hour"
|
hour_modifier, minute_modifier = get_tz_modifiers(tz_name)
|
||||||
recording_groups = (
|
recording_groups = (
|
||||||
Recordings.select(
|
Recordings.select(
|
||||||
fn.strftime(
|
fn.strftime(
|
||||||
"%Y-%m-%d %H",
|
"%Y-%m-%d %H",
|
||||||
fn.datetime(Recordings.start_time, "unixepoch", tz_offset),
|
fn.datetime(
|
||||||
|
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||||
|
),
|
||||||
).alias("hour"),
|
).alias("hour"),
|
||||||
fn.SUM(Recordings.duration).alias("duration"),
|
fn.SUM(Recordings.duration).alias("duration"),
|
||||||
fn.SUM(Recordings.motion).alias("motion"),
|
fn.SUM(Recordings.motion).alias("motion"),
|
||||||
@ -922,13 +936,17 @@ def recordings_summary(camera_name):
|
|||||||
.group_by(
|
.group_by(
|
||||||
fn.strftime(
|
fn.strftime(
|
||||||
"%Y-%m-%d %H",
|
"%Y-%m-%d %H",
|
||||||
fn.datetime(Recordings.start_time, "unixepoch", tz_offset),
|
fn.datetime(
|
||||||
|
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||||
|
),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.order_by(
|
.order_by(
|
||||||
fn.strftime(
|
fn.strftime(
|
||||||
"%Y-%m-%d H",
|
"%Y-%m-%d H",
|
||||||
fn.datetime(Recordings.start_time, "unixepoch", tz_offset),
|
fn.datetime(
|
||||||
|
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||||
|
),
|
||||||
).desc()
|
).desc()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -937,7 +955,9 @@ def recordings_summary(camera_name):
|
|||||||
Event.select(
|
Event.select(
|
||||||
fn.strftime(
|
fn.strftime(
|
||||||
"%Y-%m-%d %H",
|
"%Y-%m-%d %H",
|
||||||
fn.datetime(Event.start_time, "unixepoch", tz_offset),
|
fn.datetime(
|
||||||
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||||
|
),
|
||||||
).alias("hour"),
|
).alias("hour"),
|
||||||
fn.COUNT(Event.id).alias("count"),
|
fn.COUNT(Event.id).alias("count"),
|
||||||
)
|
)
|
||||||
@ -945,7 +965,9 @@ def recordings_summary(camera_name):
|
|||||||
.group_by(
|
.group_by(
|
||||||
fn.strftime(
|
fn.strftime(
|
||||||
"%Y-%m-%d %H",
|
"%Y-%m-%d %H",
|
||||||
fn.datetime(Event.start_time, "unixepoch", tz_offset),
|
fn.datetime(
|
||||||
|
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||||
|
),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.objects()
|
.objects()
|
||||||
@ -1142,17 +1164,11 @@ def vod_hour_no_timezone(year_month, day, hour, camera_name):
|
|||||||
# TODO make this nicer when vod module is removed
|
# TODO make this nicer when vod module is removed
|
||||||
@bp.route("/vod/<year_month>/<day>/<hour>/<camera_name>/<tz_name>")
|
@bp.route("/vod/<year_month>/<day>/<hour>/<camera_name>/<tz_name>")
|
||||||
def vod_hour(year_month, day, hour, camera_name, tz_name):
|
def vod_hour(year_month, day, hour, camera_name, tz_name):
|
||||||
tz_offset = int(
|
|
||||||
datetime.now(pytz.timezone(tz_name.replace(",", "/")))
|
|
||||||
.utcoffset()
|
|
||||||
.total_seconds()
|
|
||||||
/ 60
|
|
||||||
/ 60
|
|
||||||
)
|
|
||||||
parts = year_month.split("-")
|
parts = year_month.split("-")
|
||||||
start_date = datetime(
|
start_date = (
|
||||||
int(parts[0]), int(parts[1]), int(day), int(hour), tzinfo=timezone.utc
|
datetime(int(parts[0]), int(parts[1]), int(day), int(hour), tzinfo=timezone.utc)
|
||||||
) - timedelta(hours=tz_offset)
|
- datetime.now(pytz.timezone(tz_name.replace(",", "/"))).utcoffset()
|
||||||
|
)
|
||||||
end_date = start_date + timedelta(hours=1) - timedelta(milliseconds=1)
|
end_date = start_date + timedelta(hours=1) - timedelta(milliseconds=1)
|
||||||
start_ts = start_date.timestamp()
|
start_ts = start_date.timestamp()
|
||||||
end_ts = end_date.timestamp()
|
end_ts = end_date.timestamp()
|
||||||
|
|||||||
@ -415,15 +415,15 @@ def output_frames(config: FrigateConfig, video_output_queue):
|
|||||||
|
|
||||||
for camera, cam_config in config.cameras.items():
|
for camera, cam_config in config.cameras.items():
|
||||||
width = int(
|
width = int(
|
||||||
cam_config.restream.jsmpeg.height
|
cam_config.live.height
|
||||||
* (cam_config.frame_shape[1] / cam_config.frame_shape[0])
|
* (cam_config.frame_shape[1] / cam_config.frame_shape[0])
|
||||||
)
|
)
|
||||||
converters[camera] = FFMpegConverter(
|
converters[camera] = FFMpegConverter(
|
||||||
cam_config.frame_shape[1],
|
cam_config.frame_shape[1],
|
||||||
cam_config.frame_shape[0],
|
cam_config.frame_shape[0],
|
||||||
width,
|
width,
|
||||||
cam_config.restream.jsmpeg.height,
|
cam_config.live.height,
|
||||||
cam_config.restream.jsmpeg.quality,
|
cam_config.live.quality,
|
||||||
)
|
)
|
||||||
broadcasters[camera] = BroadcastThread(
|
broadcasters[camera] = BroadcastThread(
|
||||||
camera, converters[camera], websocket_server
|
camera, converters[camera], websocket_server
|
||||||
@ -436,7 +436,7 @@ def output_frames(config: FrigateConfig, video_output_queue):
|
|||||||
config.birdseye.width,
|
config.birdseye.width,
|
||||||
config.birdseye.height,
|
config.birdseye.height,
|
||||||
config.birdseye.quality,
|
config.birdseye.quality,
|
||||||
config.restream.birdseye,
|
config.birdseye.restream,
|
||||||
)
|
)
|
||||||
broadcasters["birdseye"] = BroadcastThread(
|
broadcasters["birdseye"] = BroadcastThread(
|
||||||
"birdseye", converters["birdseye"], websocket_server
|
"birdseye", converters["birdseye"], websocket_server
|
||||||
@ -449,7 +449,7 @@ def output_frames(config: FrigateConfig, video_output_queue):
|
|||||||
|
|
||||||
birdseye_manager = BirdsEyeFrameManager(config, frame_manager)
|
birdseye_manager = BirdsEyeFrameManager(config, frame_manager)
|
||||||
|
|
||||||
if config.restream.birdseye:
|
if config.birdseye.restream:
|
||||||
birdseye_buffer = frame_manager.create(
|
birdseye_buffer = frame_manager.create(
|
||||||
"birdseye",
|
"birdseye",
|
||||||
birdseye_manager.yuv_shape[0] * birdseye_manager.yuv_shape[1],
|
birdseye_manager.yuv_shape[0] * birdseye_manager.yuv_shape[1],
|
||||||
@ -479,7 +479,7 @@ def output_frames(config: FrigateConfig, video_output_queue):
|
|||||||
converters[camera].write(frame.tobytes())
|
converters[camera].write(frame.tobytes())
|
||||||
|
|
||||||
if config.birdseye.enabled and (
|
if config.birdseye.enabled and (
|
||||||
config.restream.birdseye
|
config.birdseye.restream
|
||||||
or any(
|
or any(
|
||||||
ws.environ["PATH_INFO"].endswith("birdseye")
|
ws.environ["PATH_INFO"].endswith("birdseye")
|
||||||
for ws in websocket_server.manager
|
for ws in websocket_server.manager
|
||||||
@ -494,7 +494,7 @@ def output_frames(config: FrigateConfig, video_output_queue):
|
|||||||
):
|
):
|
||||||
frame_bytes = birdseye_manager.frame.tobytes()
|
frame_bytes = birdseye_manager.frame.tobytes()
|
||||||
|
|
||||||
if config.restream.birdseye:
|
if config.birdseye.restream:
|
||||||
birdseye_buffer[:] = frame_bytes
|
birdseye_buffer[:] = frame_bytes
|
||||||
|
|
||||||
converters["birdseye"].write(frame_bytes)
|
converters["birdseye"].write(frame_bytes)
|
||||||
|
|||||||
@ -4,34 +4,15 @@
|
|||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from typing import Optional
|
from frigate.config import FrigateConfig
|
||||||
|
|
||||||
from frigate.config import FrigateConfig, RestreamCodecEnum
|
|
||||||
from frigate.const import BIRDSEYE_PIPE
|
from frigate.const import BIRDSEYE_PIPE
|
||||||
from frigate.ffmpeg_presets import (
|
from frigate.ffmpeg_presets import (
|
||||||
parse_preset_hardware_acceleration_encode,
|
parse_preset_hardware_acceleration_encode,
|
||||||
parse_preset_hardware_acceleration_go2rtc_engine,
|
|
||||||
)
|
)
|
||||||
from frigate.util import escape_special_characters
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_manual_go2rtc_stream(
|
|
||||||
camera_url: str, codec: RestreamCodecEnum, engine: Optional[str]
|
|
||||||
) -> str:
|
|
||||||
"""Get a manual stream for go2rtc."""
|
|
||||||
if codec == RestreamCodecEnum.copy:
|
|
||||||
return f"ffmpeg:{camera_url}#video=copy#audio=aac#audio=opus"
|
|
||||||
|
|
||||||
if engine:
|
|
||||||
return (
|
|
||||||
f"ffmpeg:{camera_url}#video={codec}#hardware={engine}#audio=aac#audio=opus"
|
|
||||||
)
|
|
||||||
|
|
||||||
return f"ffmpeg:{camera_url}#video={codec}#audio=aac#audio=opus"
|
|
||||||
|
|
||||||
|
|
||||||
class RestreamApi:
|
class RestreamApi:
|
||||||
"""Control go2rtc relay API."""
|
"""Control go2rtc relay API."""
|
||||||
|
|
||||||
@ -42,30 +23,7 @@ class RestreamApi:
|
|||||||
"""Add cameras to go2rtc."""
|
"""Add cameras to go2rtc."""
|
||||||
self.relays: dict[str, str] = {}
|
self.relays: dict[str, str] = {}
|
||||||
|
|
||||||
for cam_name, camera in self.config.cameras.items():
|
if self.config.birdseye.restream:
|
||||||
if not camera.restream.enabled:
|
|
||||||
continue
|
|
||||||
|
|
||||||
for input in camera.ffmpeg.inputs:
|
|
||||||
if "restream" in input.roles:
|
|
||||||
if (
|
|
||||||
input.path.startswith("rtsp")
|
|
||||||
and not camera.restream.force_audio
|
|
||||||
):
|
|
||||||
self.relays[
|
|
||||||
cam_name
|
|
||||||
] = f"{escape_special_characters(input.path)}#backchannel=0"
|
|
||||||
else:
|
|
||||||
# go2rtc only supports rtsp for direct relay, otherwise ffmpeg is used
|
|
||||||
self.relays[cam_name] = get_manual_go2rtc_stream(
|
|
||||||
escape_special_characters(input.path),
|
|
||||||
camera.restream.video_encoding,
|
|
||||||
parse_preset_hardware_acceleration_go2rtc_engine(
|
|
||||||
self.config.ffmpeg.hwaccel_args
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.config.restream.birdseye:
|
|
||||||
self.relays[
|
self.relays[
|
||||||
"birdseye"
|
"birdseye"
|
||||||
] = f"exec:{parse_preset_hardware_acceleration_encode(self.config.ffmpeg.hwaccel_args, f'-f rawvideo -pix_fmt yuv420p -video_size {self.config.birdseye.width}x{self.config.birdseye.height} -r 10 -i {BIRDSEYE_PIPE}', '-rtsp_transport tcp -f rtsp {output}')}"
|
] = f"exec:{parse_preset_hardware_acceleration_encode(self.config.ffmpeg.hwaccel_args, f'-f rawvideo -pix_fmt yuv420p -video_size {self.config.birdseye.width}x{self.config.birdseye.height} -r 10 -i {BIRDSEYE_PIPE}', '-rtsp_transport tcp -f rtsp {output}')}"
|
||||||
|
|||||||
@ -621,7 +621,7 @@ class TestConfig(unittest.TestCase):
|
|||||||
"inputs": [
|
"inputs": [
|
||||||
{
|
{
|
||||||
"path": "rtsp://10.0.0.1:554/video",
|
"path": "rtsp://10.0.0.1:554/video",
|
||||||
"roles": ["detect", "rtmp", "restream"],
|
"roles": ["detect", "rtmp"],
|
||||||
},
|
},
|
||||||
{"path": "rtsp://10.0.0.1:554/record", "roles": ["record"]},
|
{"path": "rtsp://10.0.0.1:554/record", "roles": ["record"]},
|
||||||
]
|
]
|
||||||
@ -883,7 +883,6 @@ class TestConfig(unittest.TestCase):
|
|||||||
|
|
||||||
config = {
|
config = {
|
||||||
"mqtt": {"host": "mqtt"},
|
"mqtt": {"host": "mqtt"},
|
||||||
"restream": {"enabled": False},
|
|
||||||
"cameras": {
|
"cameras": {
|
||||||
"back": {
|
"back": {
|
||||||
"ffmpeg": {
|
"ffmpeg": {
|
||||||
@ -1096,30 +1095,6 @@ class TestConfig(unittest.TestCase):
|
|||||||
assert runtime_config.cameras["back"].snapshots.height == 150
|
assert runtime_config.cameras["back"].snapshots.height == 150
|
||||||
assert runtime_config.cameras["back"].snapshots.enabled
|
assert runtime_config.cameras["back"].snapshots.enabled
|
||||||
|
|
||||||
def test_global_restream(self):
|
|
||||||
|
|
||||||
config = {
|
|
||||||
"mqtt": {"host": "mqtt"},
|
|
||||||
"restream": {"enabled": True},
|
|
||||||
"cameras": {
|
|
||||||
"back": {
|
|
||||||
"ffmpeg": {
|
|
||||||
"inputs": [
|
|
||||||
{
|
|
||||||
"path": "rtsp://10.0.0.1:554/video",
|
|
||||||
"roles": ["detect"],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
frigate_config = FrigateConfig(**config)
|
|
||||||
assert config == frigate_config.dict(exclude_unset=True)
|
|
||||||
|
|
||||||
runtime_config = frigate_config.runtime_config
|
|
||||||
assert runtime_config.cameras["back"].restream.enabled
|
|
||||||
|
|
||||||
def test_global_rtmp_disabled(self):
|
def test_global_rtmp_disabled(self):
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
@ -1166,56 +1141,6 @@ class TestConfig(unittest.TestCase):
|
|||||||
runtime_config = frigate_config.runtime_config
|
runtime_config = frigate_config.runtime_config
|
||||||
assert not runtime_config.cameras["back"].rtmp.enabled
|
assert not runtime_config.cameras["back"].rtmp.enabled
|
||||||
|
|
||||||
def test_default_restream(self):
|
|
||||||
|
|
||||||
config = {
|
|
||||||
"mqtt": {"host": "mqtt"},
|
|
||||||
"cameras": {
|
|
||||||
"back": {
|
|
||||||
"ffmpeg": {
|
|
||||||
"inputs": [
|
|
||||||
{
|
|
||||||
"path": "rtsp://10.0.0.1:554/video",
|
|
||||||
"roles": ["detect"],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
frigate_config = FrigateConfig(**config)
|
|
||||||
assert config == frigate_config.dict(exclude_unset=True)
|
|
||||||
|
|
||||||
runtime_config = frigate_config.runtime_config
|
|
||||||
assert runtime_config.cameras["back"].restream.enabled
|
|
||||||
|
|
||||||
def test_global_restream_merge(self):
|
|
||||||
|
|
||||||
config = {
|
|
||||||
"mqtt": {"host": "mqtt"},
|
|
||||||
"restream": {"enabled": False},
|
|
||||||
"cameras": {
|
|
||||||
"back": {
|
|
||||||
"ffmpeg": {
|
|
||||||
"inputs": [
|
|
||||||
{
|
|
||||||
"path": "rtsp://10.0.0.1:554/video",
|
|
||||||
"roles": ["detect"],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"restream": {
|
|
||||||
"enabled": True,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
frigate_config = FrigateConfig(**config)
|
|
||||||
assert config == frigate_config.dict(exclude_unset=True)
|
|
||||||
|
|
||||||
runtime_config = frigate_config.runtime_config
|
|
||||||
assert runtime_config.cameras["back"].restream.enabled
|
|
||||||
|
|
||||||
def test_global_rtmp_merge(self):
|
def test_global_rtmp_merge(self):
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
@ -1247,7 +1172,6 @@ class TestConfig(unittest.TestCase):
|
|||||||
|
|
||||||
config = {
|
config = {
|
||||||
"mqtt": {"host": "mqtt"},
|
"mqtt": {"host": "mqtt"},
|
||||||
"restream": {"enabled": False},
|
|
||||||
"cameras": {
|
"cameras": {
|
||||||
"back": {
|
"back": {
|
||||||
"ffmpeg": {
|
"ffmpeg": {
|
||||||
@ -1275,7 +1199,7 @@ class TestConfig(unittest.TestCase):
|
|||||||
|
|
||||||
config = {
|
config = {
|
||||||
"mqtt": {"host": "mqtt"},
|
"mqtt": {"host": "mqtt"},
|
||||||
"restream": {"jsmpeg": {"quality": 4}},
|
"live": {"quality": 4},
|
||||||
"cameras": {
|
"cameras": {
|
||||||
"back": {
|
"back": {
|
||||||
"ffmpeg": {
|
"ffmpeg": {
|
||||||
@ -1293,7 +1217,7 @@ class TestConfig(unittest.TestCase):
|
|||||||
assert config == frigate_config.dict(exclude_unset=True)
|
assert config == frigate_config.dict(exclude_unset=True)
|
||||||
|
|
||||||
runtime_config = frigate_config.runtime_config
|
runtime_config = frigate_config.runtime_config
|
||||||
assert runtime_config.cameras["back"].restream.jsmpeg.quality == 4
|
assert runtime_config.cameras["back"].live.quality == 4
|
||||||
|
|
||||||
def test_default_live(self):
|
def test_default_live(self):
|
||||||
|
|
||||||
@ -1316,13 +1240,13 @@ class TestConfig(unittest.TestCase):
|
|||||||
assert config == frigate_config.dict(exclude_unset=True)
|
assert config == frigate_config.dict(exclude_unset=True)
|
||||||
|
|
||||||
runtime_config = frigate_config.runtime_config
|
runtime_config = frigate_config.runtime_config
|
||||||
assert runtime_config.cameras["back"].restream.jsmpeg.quality == 8
|
assert runtime_config.cameras["back"].live.quality == 8
|
||||||
|
|
||||||
def test_global_live_merge(self):
|
def test_global_live_merge(self):
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
"mqtt": {"host": "mqtt"},
|
"mqtt": {"host": "mqtt"},
|
||||||
"restream": {"jsmpeg": {"quality": 4, "height": 480}},
|
"live": {"quality": 4, "height": 480},
|
||||||
"cameras": {
|
"cameras": {
|
||||||
"back": {
|
"back": {
|
||||||
"ffmpeg": {
|
"ffmpeg": {
|
||||||
@ -1333,10 +1257,8 @@ class TestConfig(unittest.TestCase):
|
|||||||
},
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"restream": {
|
"live": {
|
||||||
"jsmpeg": {
|
"quality": 7,
|
||||||
"quality": 7,
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -1345,8 +1267,8 @@ class TestConfig(unittest.TestCase):
|
|||||||
assert config == frigate_config.dict(exclude_unset=True)
|
assert config == frigate_config.dict(exclude_unset=True)
|
||||||
|
|
||||||
runtime_config = frigate_config.runtime_config
|
runtime_config = frigate_config.runtime_config
|
||||||
assert runtime_config.cameras["back"].restream.jsmpeg.quality == 7
|
assert runtime_config.cameras["back"].live.quality == 7
|
||||||
assert runtime_config.cameras["back"].restream.jsmpeg.height == 480
|
assert runtime_config.cameras["back"].live.height == 480
|
||||||
|
|
||||||
def test_global_timestamp_style(self):
|
def test_global_timestamp_style(self):
|
||||||
|
|
||||||
|
|||||||
@ -136,10 +136,10 @@ class TestFfmpegPresets(unittest.TestCase):
|
|||||||
def test_ffmpeg_output_record_preset(self):
|
def test_ffmpeg_output_record_preset(self):
|
||||||
self.default_ffmpeg["cameras"]["back"]["ffmpeg"]["output_args"][
|
self.default_ffmpeg["cameras"]["back"]["ffmpeg"]["output_args"][
|
||||||
"record"
|
"record"
|
||||||
] = "preset-record-generic-audio"
|
] = "preset-record-generic-audio-aac"
|
||||||
frigate_config = FrigateConfig(**self.default_ffmpeg)
|
frigate_config = FrigateConfig(**self.default_ffmpeg)
|
||||||
frigate_config.cameras["back"].create_ffmpeg_cmds()
|
frigate_config.cameras["back"].create_ffmpeg_cmds()
|
||||||
assert "preset-record-generic-audio" not in (
|
assert "preset-record-generic-audio-aac" not in (
|
||||||
" ".join(frigate_config.cameras["back"].ffmpeg_cmds[0]["cmd"])
|
" ".join(frigate_config.cameras["back"].ffmpeg_cmds[0]["cmd"])
|
||||||
)
|
)
|
||||||
assert "-c:v copy -c:a aac" in (
|
assert "-c:v copy -c:a aac" in (
|
||||||
|
|||||||
@ -1,82 +0,0 @@
|
|||||||
"""Test restream.py."""
|
|
||||||
|
|
||||||
from unittest import TestCase, main
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
from frigate.config import FrigateConfig
|
|
||||||
from frigate.restream import RestreamApi
|
|
||||||
|
|
||||||
|
|
||||||
class TestRestream(TestCase):
|
|
||||||
def setUp(self) -> None:
|
|
||||||
"""Setup the tests."""
|
|
||||||
self.config = {
|
|
||||||
"mqtt": {"host": "mqtt"},
|
|
||||||
"restream": {"enabled": False},
|
|
||||||
"cameras": {
|
|
||||||
"back": {
|
|
||||||
"ffmpeg": {
|
|
||||||
"inputs": [
|
|
||||||
{
|
|
||||||
"path": "rtsp://10.0.0.1:554/video",
|
|
||||||
"roles": ["detect", "restream"],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"restream": {
|
|
||||||
"enabled": True,
|
|
||||||
"force_audio": False,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"front": {
|
|
||||||
"ffmpeg": {
|
|
||||||
"inputs": [
|
|
||||||
{
|
|
||||||
"path": "http://10.0.0.1:554/video/stream",
|
|
||||||
"roles": ["detect", "restream"],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"restream": {
|
|
||||||
"enabled": True,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
@patch("frigate.restream.requests")
|
|
||||||
def test_rtsp_stream(
|
|
||||||
self, mock_request
|
|
||||||
) -> None: # need to ensure restream doesn't try to call API
|
|
||||||
"""Test that the normal rtsp stream is sent plainly."""
|
|
||||||
frigate_config = FrigateConfig(**self.config)
|
|
||||||
restream = RestreamApi(frigate_config)
|
|
||||||
restream.add_cameras()
|
|
||||||
assert restream.relays["back"].startswith("rtsp")
|
|
||||||
|
|
||||||
@patch("frigate.restream.requests")
|
|
||||||
def test_http_stream(
|
|
||||||
self, mock_request
|
|
||||||
) -> None: # need to ensure restream doesn't try to call API
|
|
||||||
"""Test that the http stream is sent via ffmpeg."""
|
|
||||||
frigate_config = FrigateConfig(**self.config)
|
|
||||||
restream = RestreamApi(frigate_config)
|
|
||||||
restream.add_cameras()
|
|
||||||
assert not restream.relays["front"].startswith("rtsp")
|
|
||||||
|
|
||||||
@patch("frigate.restream.requests")
|
|
||||||
def test_restream_codec_change(
|
|
||||||
self, mock_request
|
|
||||||
) -> None: # need to ensure restream doesn't try to call API
|
|
||||||
"""Test that the http stream is sent via ffmpeg."""
|
|
||||||
self.config["cameras"]["front"]["restream"]["video_encoding"] = "h265"
|
|
||||||
self.config["ffmpeg"] = {"hwaccel_args": "preset-nvidia-h264"}
|
|
||||||
frigate_config = FrigateConfig(**self.config)
|
|
||||||
restream = RestreamApi(frigate_config)
|
|
||||||
restream.add_cameras()
|
|
||||||
assert "#hardware=cuda" in restream.relays["front"]
|
|
||||||
assert "#video=h265" in restream.relays["front"]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main(verbosity=2)
|
|
||||||
@ -14,12 +14,13 @@ from abc import ABC, abstractmethod
|
|||||||
from collections import Counter
|
from collections import Counter
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from multiprocessing import shared_memory
|
from multiprocessing import shared_memory
|
||||||
from typing import Any, AnyStr
|
from typing import Any, AnyStr, Tuple
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import os
|
import os
|
||||||
import psutil
|
import psutil
|
||||||
|
import pytz
|
||||||
|
|
||||||
from frigate.const import REGEX_HTTP_CAMERA_USER_PASS, REGEX_RTSP_CAMERA_USER_PASS
|
from frigate.const import REGEX_HTTP_CAMERA_USER_PASS, REGEX_RTSP_CAMERA_USER_PASS
|
||||||
|
|
||||||
@ -1040,3 +1041,14 @@ class SharedMemoryFrameManager(FrameManager):
|
|||||||
self.shm_store[name].close()
|
self.shm_store[name].close()
|
||||||
self.shm_store[name].unlink()
|
self.shm_store[name].unlink()
|
||||||
del self.shm_store[name]
|
del self.shm_store[name]
|
||||||
|
|
||||||
|
|
||||||
|
def get_tz_modifiers(tz_name: str) -> Tuple[str, str]:
|
||||||
|
seconds_offset = (
|
||||||
|
datetime.datetime.now(pytz.timezone(tz_name)).utcoffset().total_seconds()
|
||||||
|
)
|
||||||
|
hours_offset = int(seconds_offset / 60 / 60)
|
||||||
|
minutes_offset = int(seconds_offset / 60 - hours_offset * 60)
|
||||||
|
hour_modifier = f"{hours_offset} hour"
|
||||||
|
minute_modifier = f"{minutes_offset} minute"
|
||||||
|
return hour_modifier, minute_modifier
|
||||||
|
|||||||
@ -5,4 +5,5 @@ cuda-python == 11.7; platform_machine == 'x86_64'
|
|||||||
cython == 0.29.*; platform_machine == 'x86_64'
|
cython == 0.29.*; platform_machine == 'x86_64'
|
||||||
nvidia-cuda-runtime-cu11 == 11.7.*; platform_machine == 'x86_64'
|
nvidia-cuda-runtime-cu11 == 11.7.*; platform_machine == 'x86_64'
|
||||||
nvidia-cublas-cu11 == 11.11.*; platform_machine == 'x86_64'
|
nvidia-cublas-cu11 == 11.11.*; platform_machine == 'x86_64'
|
||||||
nvidia-cudnn-cu11 == 8.7.*; platform_machine == 'x86_64'
|
nvidia-cudnn-cu11 == 8.7.*; platform_machine == 'x86_64'
|
||||||
|
nvidia-cuda-nvrtc-cu11 == 11.7.*; platform_machine == 'x86_64'
|
||||||
61
web/src/components/TimeAgo.jsx
Normal file
61
web/src/components/TimeAgo.jsx
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import { h } from 'preact';
|
||||||
|
|
||||||
|
const timeAgo = ({ time, dense = false }) => {
|
||||||
|
if (!time) return 'Invalid Time';
|
||||||
|
try {
|
||||||
|
const currentTime = new Date();
|
||||||
|
const pastTime = new Date(time);
|
||||||
|
const elapsedTime = currentTime - pastTime;
|
||||||
|
if (elapsedTime < 0) return 'Invalid Time';
|
||||||
|
|
||||||
|
const timeUnits = [
|
||||||
|
{ unit: 'ye', full: 'year', value: 31536000 },
|
||||||
|
{ unit: 'mo', full: 'month', value: 0 },
|
||||||
|
{ unit: 'day', full: 'day', value: 86400 },
|
||||||
|
{ unit: 'h', full: 'hour', value: 3600 },
|
||||||
|
{ unit: 'm', full: 'minute', value: 60 },
|
||||||
|
{ unit: 's', full: 'second', value: 1 },
|
||||||
|
];
|
||||||
|
|
||||||
|
let elapsed = elapsedTime / 1000;
|
||||||
|
if (elapsed < 60) {
|
||||||
|
return 'just now';
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < timeUnits.length; i++) {
|
||||||
|
// if months
|
||||||
|
if (i === 1) {
|
||||||
|
// Get the month and year for the time provided
|
||||||
|
const pastMonth = pastTime.getUTCMonth();
|
||||||
|
const pastYear = pastTime.getUTCFullYear();
|
||||||
|
|
||||||
|
// get current month and year
|
||||||
|
const currentMonth = currentTime.getUTCMonth();
|
||||||
|
const currentYear = currentTime.getUTCFullYear();
|
||||||
|
|
||||||
|
let monthDiff = (currentYear - pastYear) * 12 + (currentMonth - pastMonth);
|
||||||
|
|
||||||
|
// check if the time provided is the previous month but not exceeded 1 month ago.
|
||||||
|
if (currentTime.getUTCDate() < pastTime.getUTCDate()) {
|
||||||
|
monthDiff--;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (monthDiff > 0) {
|
||||||
|
const unitAmount = monthDiff;
|
||||||
|
return `${unitAmount}${dense ? timeUnits[i].unit[0] : ` ${timeUnits[i].full}`}${dense ? '' : 's'} ago`;
|
||||||
|
}
|
||||||
|
} else if (elapsed >= timeUnits[i].value) {
|
||||||
|
const unitAmount = Math.floor(elapsed / timeUnits[i].value);
|
||||||
|
return `${unitAmount}${dense ? timeUnits[i].unit[0] : ` ${timeUnits[i].full}`}${dense ? '' : 's'} ago`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
return 'Invalid Time';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const TimeAgo = (props) => {
|
||||||
|
return <span>{timeAgo({ ...props })}</span>;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default TimeAgo;
|
||||||
24
web/src/icons/Clock.jsx
Normal file
24
web/src/icons/Clock.jsx
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import { h } from 'preact';
|
||||||
|
import { memo } from 'preact/compat';
|
||||||
|
|
||||||
|
export function Clock({ className = 'h-6 w-6', stroke = 'currentColor', fill = 'none', onClick = () => {} }) {
|
||||||
|
return (
|
||||||
|
<svg
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
className={className}
|
||||||
|
fill={fill}
|
||||||
|
viewBox="0 0 24 24"
|
||||||
|
stroke={stroke}
|
||||||
|
onClick={onClick}
|
||||||
|
>
|
||||||
|
<path
|
||||||
|
stroke-linecap="round"
|
||||||
|
stroke-linejoin="round"
|
||||||
|
stroke-width="2"
|
||||||
|
d="M12 8v4l3 3m6-3a9 9 0 11-18 0 9 9 0 0118 0z"
|
||||||
|
/>
|
||||||
|
</svg>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default memo(Clock);
|
||||||
@ -18,7 +18,7 @@ export default function Birdseye() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let player;
|
let player;
|
||||||
if (viewSource == 'mse' && config.restream.birdseye) {
|
if (viewSource == 'mse' && config.birdseye.restream) {
|
||||||
if ('MediaSource' in window) {
|
if ('MediaSource' in window) {
|
||||||
player = (
|
player = (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
@ -36,7 +36,7 @@ export default function Birdseye() {
|
|||||||
</Fragment>
|
</Fragment>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else if (viewSource == 'webrtc' && config.restream.birdseye) {
|
} else if (viewSource == 'webrtc' && config.birdseye.restream) {
|
||||||
player = (
|
player = (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
<div className="max-w-5xl">
|
<div className="max-w-5xl">
|
||||||
@ -61,7 +61,7 @@ export default function Birdseye() {
|
|||||||
Birdseye
|
Birdseye
|
||||||
</Heading>
|
</Heading>
|
||||||
|
|
||||||
{config.restream.birdseye && (
|
{config.birdseye.restream && (
|
||||||
<select
|
<select
|
||||||
className="basis-1/8 cursor-pointer rounded dark:bg-slate-800"
|
className="basis-1/8 cursor-pointer rounded dark:bg-slate-800"
|
||||||
value={viewSource}
|
value={viewSource}
|
||||||
|
|||||||
@ -25,14 +25,15 @@ export default function Camera({ camera }) {
|
|||||||
const [viewMode, setViewMode] = useState('live');
|
const [viewMode, setViewMode] = useState('live');
|
||||||
|
|
||||||
const cameraConfig = config?.cameras[camera];
|
const cameraConfig = config?.cameras[camera];
|
||||||
|
const restreamEnabled = cameraConfig && Object.keys(config.go2rtc.streams).includes(cameraConfig.live.stream_name);
|
||||||
const jsmpegWidth = cameraConfig
|
const jsmpegWidth = cameraConfig
|
||||||
? Math.round(cameraConfig.restream.jsmpeg.height * (cameraConfig.detect.width / cameraConfig.detect.height))
|
? Math.round(cameraConfig.live.height * (cameraConfig.detect.width / cameraConfig.detect.height))
|
||||||
: 0;
|
: 0;
|
||||||
const [viewSource, setViewSource, sourceIsLoaded] = usePersistence(
|
const [viewSource, setViewSource, sourceIsLoaded] = usePersistence(
|
||||||
`${camera}-source`,
|
`${camera}-source`,
|
||||||
getDefaultLiveMode(config, cameraConfig)
|
getDefaultLiveMode(config, cameraConfig)
|
||||||
);
|
);
|
||||||
const sourceValues = cameraConfig && cameraConfig.restream.enabled ? ['mse', 'webrtc', 'jsmpeg'] : ['jsmpeg'];
|
const sourceValues = restreamEnabled ? ['mse', 'webrtc', 'jsmpeg'] : ['jsmpeg'];
|
||||||
const [options, setOptions] = usePersistence(`${camera}-feed`, emptyObject);
|
const [options, setOptions] = usePersistence(`${camera}-feed`, emptyObject);
|
||||||
|
|
||||||
const handleSetOption = useCallback(
|
const handleSetOption = useCallback(
|
||||||
@ -106,7 +107,7 @@ export default function Camera({ camera }) {
|
|||||||
|
|
||||||
let player;
|
let player;
|
||||||
if (viewMode === 'live') {
|
if (viewMode === 'live') {
|
||||||
if (viewSource == 'mse' && cameraConfig.restream.enabled) {
|
if (viewSource == 'mse' && restreamEnabled) {
|
||||||
if ('MediaSource' in window) {
|
if ('MediaSource' in window) {
|
||||||
player = (
|
player = (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
@ -124,7 +125,7 @@ export default function Camera({ camera }) {
|
|||||||
</Fragment>
|
</Fragment>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else if (viewSource == 'webrtc' && cameraConfig.restream.enabled) {
|
} else if (viewSource == 'webrtc' && restreamEnabled) {
|
||||||
player = (
|
player = (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
<div className="max-w-5xl">
|
<div className="max-w-5xl">
|
||||||
@ -136,7 +137,7 @@ export default function Camera({ camera }) {
|
|||||||
player = (
|
player = (
|
||||||
<Fragment>
|
<Fragment>
|
||||||
<div>
|
<div>
|
||||||
<JSMpegPlayer camera={camera} width={jsmpegWidth} height={cameraConfig.restream.jsmpeg.height} />
|
<JSMpegPlayer camera={camera} width={jsmpegWidth} height={cameraConfig.live.height} />
|
||||||
</div>
|
</div>
|
||||||
</Fragment>
|
</Fragment>
|
||||||
);
|
);
|
||||||
@ -200,9 +201,9 @@ export default function Camera({ camera }) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getDefaultLiveMode(config, cameraConfig) {
|
function getDefaultLiveMode(config, cameraConfig, restreamEnabled) {
|
||||||
if (cameraConfig) {
|
if (cameraConfig) {
|
||||||
if (cameraConfig.restream.enabled) {
|
if (restreamEnabled) {
|
||||||
return config.ui.live_mode;
|
return config.ui.live_mode;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -17,13 +17,13 @@ export default function Config() {
|
|||||||
const [success, setSuccess] = useState();
|
const [success, setSuccess] = useState();
|
||||||
const [error, setError] = useState();
|
const [error, setError] = useState();
|
||||||
|
|
||||||
const onHandleSaveConfig = async (e) => {
|
const onHandleSaveConfig = async (e, save_option) => {
|
||||||
if (e) {
|
if (e) {
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
}
|
}
|
||||||
|
|
||||||
axios
|
axios
|
||||||
.post('config/save', window.editor.getValue(), {
|
.post(`config/save?save_option=${save_option}`, window.editor.getValue(), {
|
||||||
headers: { 'Content-Type': 'text/plain' },
|
headers: { 'Content-Type': 'text/plain' },
|
||||||
})
|
})
|
||||||
.then((response) => {
|
.then((response) => {
|
||||||
@ -97,9 +97,12 @@ export default function Config() {
|
|||||||
<Button className="mx-2" onClick={(e) => handleCopyConfig(e)}>
|
<Button className="mx-2" onClick={(e) => handleCopyConfig(e)}>
|
||||||
Copy Config
|
Copy Config
|
||||||
</Button>
|
</Button>
|
||||||
<Button className="mx-2" onClick={(e) => onHandleSaveConfig(e)}>
|
<Button className="mx-2" onClick={(e) => onHandleSaveConfig(e, "restart")}>
|
||||||
Save & Restart
|
Save & Restart
|
||||||
</Button>
|
</Button>
|
||||||
|
<Button className="mx-2" onClick={(e) => onHandleSaveConfig(e, "saveonly")}>
|
||||||
|
Save Only
|
||||||
|
</Button>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|||||||
@ -15,6 +15,7 @@ import { UploadPlus } from '../icons/UploadPlus';
|
|||||||
import { Clip } from '../icons/Clip';
|
import { Clip } from '../icons/Clip';
|
||||||
import { Zone } from '../icons/Zone';
|
import { Zone } from '../icons/Zone';
|
||||||
import { Camera } from '../icons/Camera';
|
import { Camera } from '../icons/Camera';
|
||||||
|
import { Clock } from '../icons/Clock';
|
||||||
import { Delete } from '../icons/Delete';
|
import { Delete } from '../icons/Delete';
|
||||||
import { Download } from '../icons/Download';
|
import { Download } from '../icons/Download';
|
||||||
import Menu, { MenuItem } from '../components/Menu';
|
import Menu, { MenuItem } from '../components/Menu';
|
||||||
@ -22,8 +23,9 @@ import CalendarIcon from '../icons/Calendar';
|
|||||||
import Calendar from '../components/Calendar';
|
import Calendar from '../components/Calendar';
|
||||||
import Button from '../components/Button';
|
import Button from '../components/Button';
|
||||||
import Dialog from '../components/Dialog';
|
import Dialog from '../components/Dialog';
|
||||||
import { fromUnixTime, intervalToDuration, formatDuration } from 'date-fns';
|
|
||||||
import MultiSelect from '../components/MultiSelect';
|
import MultiSelect from '../components/MultiSelect';
|
||||||
|
import { formatUnixTimestampToDateTime, getDurationFromTimestamps } from '../utils/dateUtil';
|
||||||
|
import TimeAgo from '../components/TimeAgo';
|
||||||
|
|
||||||
const API_LIMIT = 25;
|
const API_LIMIT = 25;
|
||||||
|
|
||||||
@ -39,16 +41,6 @@ const monthsAgo = (num) => {
|
|||||||
return new Date(date.getFullYear(), date.getMonth(), date.getDate()).getTime() / 1000;
|
return new Date(date.getFullYear(), date.getMonth(), date.getDate()).getTime() / 1000;
|
||||||
};
|
};
|
||||||
|
|
||||||
const clipDuration = (start_time, end_time) => {
|
|
||||||
const start = fromUnixTime(start_time);
|
|
||||||
const end = fromUnixTime(end_time);
|
|
||||||
let duration = 'In Progress';
|
|
||||||
if (end_time) {
|
|
||||||
duration = formatDuration(intervalToDuration({ start, end }));
|
|
||||||
}
|
|
||||||
return duration;
|
|
||||||
};
|
|
||||||
|
|
||||||
export default function Events({ path, ...props }) {
|
export default function Events({ path, ...props }) {
|
||||||
const apiHost = useApiHost();
|
const apiHost = useApiHost();
|
||||||
const [searchParams, setSearchParams] = useState({
|
const [searchParams, setSearchParams] = useState({
|
||||||
@ -296,6 +288,7 @@ export default function Events({ path, ...props }) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const timezone = config.ui?.timezone || Intl.DateTimeFormat().resolvedOptions().timeZone;
|
const timezone = config.ui?.timezone || Intl.DateTimeFormat().resolvedOptions().timeZone;
|
||||||
|
const locale = window.navigator?.language || 'en-US';
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-4 p-2 px-4 w-full">
|
<div className="space-y-4 p-2 px-4 w-full">
|
||||||
@ -510,13 +503,19 @@ export default function Events({ path, ...props }) {
|
|||||||
<div className="capitalize text-lg font-bold">
|
<div className="capitalize text-lg font-bold">
|
||||||
{event.sub_label
|
{event.sub_label
|
||||||
? `${event.label.replaceAll('_', ' ')}: ${event.sub_label.replaceAll('_', ' ')}`
|
? `${event.label.replaceAll('_', ' ')}: ${event.sub_label.replaceAll('_', ' ')}`
|
||||||
: event.label.replaceAll('_', ' ')}{' '}
|
: event.label.replaceAll('_', ' ')}
|
||||||
({(event.top_score * 100).toFixed(0)}%)
|
({(event.top_score * 100).toFixed(0)}%)
|
||||||
</div>
|
</div>
|
||||||
<div className="text-sm">
|
<div className="text-sm flex">
|
||||||
{new Date(event.start_time * 1000).toLocaleDateString({ timeZone: timezone })}{' '}
|
<Clock className="h-5 w-5 mr-2 inline" />
|
||||||
{new Date(event.start_time * 1000).toLocaleTimeString({ timeZone: timezone })} (
|
{formatUnixTimestampToDateTime(event.start_time, locale, timezone)}
|
||||||
{clipDuration(event.start_time, event.end_time)})
|
<div className="hidden md:inline">
|
||||||
|
<span className="m-1">-</span>
|
||||||
|
<TimeAgo time={event.start_time * 1000} dense />
|
||||||
|
</div>
|
||||||
|
<div className="hidden md:inline">
|
||||||
|
<span className="m-1" />( {getDurationFromTimestamps(event.start_time, event.end_time)} )
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="capitalize text-sm flex align-center mt-1">
|
<div className="capitalize text-sm flex align-center mt-1">
|
||||||
<Camera className="h-5 w-5 mr-2 inline" />
|
<Camera className="h-5 w-5 mr-2 inline" />
|
||||||
|
|||||||
@ -10,11 +10,11 @@ import useSWR from 'swr';
|
|||||||
|
|
||||||
export default function Recording({ camera, date, hour = '00', minute = '00', second = '00' }) {
|
export default function Recording({ camera, date, hour = '00', minute = '00', second = '00' }) {
|
||||||
const { data: config } = useSWR('config');
|
const { data: config } = useSWR('config');
|
||||||
let timezone = Intl.DateTimeFormat().resolvedOptions().timeZone;
|
|
||||||
const currentDate = useMemo(
|
const currentDate = useMemo(
|
||||||
() => (date ? parseISO(`${date}T${hour || '00'}:${minute || '00'}:${second || '00'}`) : new Date()),
|
() => (date ? parseISO(`${date}T${hour || '00'}:${minute || '00'}:${second || '00'}`) : new Date()),
|
||||||
[date, hour, minute, second]
|
[date, hour, minute, second]
|
||||||
);
|
);
|
||||||
|
const timezone = useMemo(() => config.ui?.timezone || Intl.DateTimeFormat().resolvedOptions().timeZone, [config]);
|
||||||
|
|
||||||
const apiHost = useApiHost();
|
const apiHost = useApiHost();
|
||||||
const { data: recordingsSummary } = useSWR([`${camera}/recordings/summary`, { timezone }], {
|
const { data: recordingsSummary } = useSWR([`${camera}/recordings/summary`, { timezone }], {
|
||||||
@ -118,10 +118,6 @@ export default function Recording({ camera, date, hour = '00', minute = '00', se
|
|||||||
return <ActivityIndicator />;
|
return <ActivityIndicator />;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (config.ui.timezone) {
|
|
||||||
timezone = config.ui.timezone;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (recordingsSummary.length === 0) {
|
if (recordingsSummary.length === 0) {
|
||||||
return (
|
return (
|
||||||
<div className="space-y-4">
|
<div className="space-y-4">
|
||||||
|
|||||||
@ -22,6 +22,13 @@ export default function Storage() {
|
|||||||
return <ActivityIndicator />;
|
return <ActivityIndicator />;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getUnitSize = (MB) => {
|
||||||
|
if (isNaN(MB) || MB < 0) return 'Invalid number';
|
||||||
|
if (MB < 1024) return `${MB} MB`;
|
||||||
|
|
||||||
|
return `${(MB / 1024).toFixed(2)} GB`;
|
||||||
|
};
|
||||||
|
|
||||||
let storage_usage;
|
let storage_usage;
|
||||||
if (
|
if (
|
||||||
service &&
|
service &&
|
||||||
@ -31,13 +38,13 @@ export default function Storage() {
|
|||||||
<Fragment>
|
<Fragment>
|
||||||
<Tr>
|
<Tr>
|
||||||
<Td>Recordings</Td>
|
<Td>Recordings</Td>
|
||||||
<Td>{service['storage']['/media/frigate/recordings']['used']}</Td>
|
<Td>{getUnitSize(service['storage']['/media/frigate/recordings']['used'])}</Td>
|
||||||
<Td>{service['storage']['/media/frigate/recordings']['total']}</Td>
|
<Td>{getUnitSize(service['storage']['/media/frigate/recordings']['total'])}</Td>
|
||||||
</Tr>
|
</Tr>
|
||||||
<Tr>
|
<Tr>
|
||||||
<Td>Snapshots</Td>
|
<Td>Snapshots</Td>
|
||||||
<Td>{service['storage']['/media/frigate/clips']['used']}</Td>
|
<Td>{getUnitSize(service['storage']['/media/frigate/clips']['used'])}</Td>
|
||||||
<Td>{service['storage']['/media/frigate/clips']['total']}</Td>
|
<Td>{getUnitSize(service['storage']['/media/frigate/clips']['total'])}</Td>
|
||||||
</Tr>
|
</Tr>
|
||||||
</Fragment>
|
</Fragment>
|
||||||
);
|
);
|
||||||
@ -46,8 +53,8 @@ export default function Storage() {
|
|||||||
<Fragment>
|
<Fragment>
|
||||||
<Tr>
|
<Tr>
|
||||||
<Td>Recordings & Snapshots</Td>
|
<Td>Recordings & Snapshots</Td>
|
||||||
<Td>{service['storage']['/media/frigate/recordings']['used']}</Td>
|
<Td>{getUnitSize(service['storage']['/media/frigate/recordings']['used'])}</Td>
|
||||||
<Td>{service['storage']['/media/frigate/recordings']['total']}</Td>
|
<Td>{getUnitSize(service['storage']['/media/frigate/recordings']['total'])}</Td>
|
||||||
</Tr>
|
</Tr>
|
||||||
</Fragment>
|
</Fragment>
|
||||||
);
|
);
|
||||||
@ -67,8 +74,8 @@ export default function Storage() {
|
|||||||
<Thead>
|
<Thead>
|
||||||
<Tr>
|
<Tr>
|
||||||
<Th>Location</Th>
|
<Th>Location</Th>
|
||||||
<Th>Used MB</Th>
|
<Th>Used</Th>
|
||||||
<Th>Total MB</Th>
|
<Th>Total</Th>
|
||||||
</Tr>
|
</Tr>
|
||||||
</Thead>
|
</Thead>
|
||||||
<Tbody>{storage_usage}</Tbody>
|
<Tbody>{storage_usage}</Tbody>
|
||||||
@ -82,20 +89,20 @@ export default function Storage() {
|
|||||||
<Thead>
|
<Thead>
|
||||||
<Tr>
|
<Tr>
|
||||||
<Th>Location</Th>
|
<Th>Location</Th>
|
||||||
<Th>Used MB</Th>
|
<Th>Used</Th>
|
||||||
<Th>Total MB</Th>
|
<Th>Total</Th>
|
||||||
</Tr>
|
</Tr>
|
||||||
</Thead>
|
</Thead>
|
||||||
<Tbody>
|
<Tbody>
|
||||||
<Tr>
|
<Tr>
|
||||||
<Td>/dev/shm</Td>
|
<Td>/dev/shm</Td>
|
||||||
<Td>{service['storage']['/dev/shm']['used']}</Td>
|
<Td>{getUnitSize(service['storage']['/dev/shm']['used'])}</Td>
|
||||||
<Td>{service['storage']['/dev/shm']['total']}</Td>
|
<Td>{getUnitSize(service['storage']['/dev/shm']['total'])}</Td>
|
||||||
</Tr>
|
</Tr>
|
||||||
<Tr>
|
<Tr>
|
||||||
<Td>/tmp/cache</Td>
|
<Td>/tmp/cache</Td>
|
||||||
<Td>{service['storage']['/tmp/cache']['used']}</Td>
|
<Td>{getUnitSize(service['storage']['/tmp/cache']['used'])}</Td>
|
||||||
<Td>{service['storage']['/tmp/cache']['total']}</Td>
|
<Td>{getUnitSize(service['storage']['/tmp/cache']['total'])}</Td>
|
||||||
</Tr>
|
</Tr>
|
||||||
</Tbody>
|
</Tbody>
|
||||||
</Table>
|
</Table>
|
||||||
@ -121,7 +128,7 @@ export default function Storage() {
|
|||||||
<Tbody>
|
<Tbody>
|
||||||
<Tr>
|
<Tr>
|
||||||
<Td>{Math.round(camera['usage_percent'] ?? 0)}%</Td>
|
<Td>{Math.round(camera['usage_percent'] ?? 0)}%</Td>
|
||||||
<Td>{camera['bandwidth'] ? camera['bandwidth'] : 'Calculating...'} MB/hr</Td>
|
<Td>{camera['bandwidth'] ? getUnitSize(camera['bandwidth']) : 'Calculating...'}/hr</Td>
|
||||||
</Tr>
|
</Tr>
|
||||||
</Tbody>
|
</Tbody>
|
||||||
</Table>
|
</Table>
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
export const longToDate = (long: number): Date => new Date(long * 1000);
|
export const longToDate = (long: number): Date => new Date(long * 1000);
|
||||||
export const epochToLong = (date: number): number => date / 1000;
|
export const epochToLong = (date: number): number => date / 1000;
|
||||||
export const dateToLong = (date: Date): number => epochToLong(date.getTime());
|
export const dateToLong = (date: Date): number => epochToLong(date.getTime());
|
||||||
|
import { fromUnixTime, intervalToDuration, formatDuration } from 'date-fns';
|
||||||
|
|
||||||
const getDateTimeYesterday = (dateTime: Date): Date => {
|
const getDateTimeYesterday = (dateTime: Date): Date => {
|
||||||
const twentyFourHoursInMilliseconds = 24 * 60 * 60 * 1000;
|
const twentyFourHoursInMilliseconds = 24 * 60 * 60 * 1000;
|
||||||
@ -14,3 +15,75 @@ const getNowYesterday = (): Date => {
|
|||||||
export const getNowYesterdayInLong = (): number => {
|
export const getNowYesterdayInLong = (): number => {
|
||||||
return dateToLong(getNowYesterday());
|
return dateToLong(getNowYesterday());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function takes in a unix timestamp, locale, timezone,
|
||||||
|
* and returns a dateTime string.
|
||||||
|
* If unixTimestamp is not provided, it returns 'Invalid time'
|
||||||
|
* @param unixTimestamp: number
|
||||||
|
* @param locale: string
|
||||||
|
* @param timezone: string
|
||||||
|
* @returns string - dateTime or 'Invalid time' if unixTimestamp is not provided
|
||||||
|
*/
|
||||||
|
export const formatUnixTimestampToDateTime = (unixTimestamp: number, locale: string, timezone: string): string => {
|
||||||
|
if (isNaN(unixTimestamp)) {
|
||||||
|
return 'Invalid time';
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const date = new Date(unixTimestamp * 1000);
|
||||||
|
const formatter = new Intl.DateTimeFormat(locale, {
|
||||||
|
day: '2-digit',
|
||||||
|
month: '2-digit',
|
||||||
|
year: 'numeric',
|
||||||
|
hour: '2-digit',
|
||||||
|
minute: '2-digit',
|
||||||
|
second: '2-digit',
|
||||||
|
timeZone: timezone,
|
||||||
|
});
|
||||||
|
return formatter.format(date);
|
||||||
|
} catch (error) {
|
||||||
|
return 'Invalid time';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
interface DurationToken {
|
||||||
|
xSeconds: string;
|
||||||
|
xMinutes: string;
|
||||||
|
xHours: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function takes in start and end time in unix timestamp,
|
||||||
|
* and returns the duration between start and end time in hours, minutes and seconds.
|
||||||
|
* If end time is not provided, it returns 'In Progress'
|
||||||
|
* @param start_time: number - Unix timestamp for start time
|
||||||
|
* @param end_time: number|null - Unix timestamp for end time
|
||||||
|
* @returns string - duration or 'In Progress' if end time is not provided
|
||||||
|
*/
|
||||||
|
export const getDurationFromTimestamps = (start_time: number, end_time: number | null): string => {
|
||||||
|
if (isNaN(start_time)) {
|
||||||
|
return 'Invalid start time';
|
||||||
|
}
|
||||||
|
let duration = 'In Progress';
|
||||||
|
if (end_time !== null) {
|
||||||
|
if (isNaN(end_time)) {
|
||||||
|
return 'Invalid end time';
|
||||||
|
}
|
||||||
|
const start = fromUnixTime(start_time);
|
||||||
|
const end = fromUnixTime(end_time);
|
||||||
|
const formatDistanceLocale: DurationToken = {
|
||||||
|
xSeconds: '{{count}}s',
|
||||||
|
xMinutes: '{{count}}m',
|
||||||
|
xHours: '{{count}}h',
|
||||||
|
};
|
||||||
|
const shortEnLocale = {
|
||||||
|
formatDistance: (token: keyof DurationToken, count: number) =>
|
||||||
|
formatDistanceLocale[token].replace('{{count}}', count.toString()),
|
||||||
|
};
|
||||||
|
duration = formatDuration(intervalToDuration({ start, end }), {
|
||||||
|
format: ['hours', 'minutes', 'seconds'],
|
||||||
|
locale: shortEnLocale,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return duration;
|
||||||
|
};
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user