mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-12-06 13:34:13 +03:00
Compare commits
14 Commits
99aaf4d3ba
...
aa7f714992
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa7f714992 | ||
|
|
1a75251ffb | ||
|
|
048475e750 | ||
|
|
7933a83a42 | ||
|
|
2eef58aa1d | ||
|
|
6659b7cb0f | ||
|
|
f134796913 | ||
|
|
b4abbd7d3b | ||
|
|
438df7d484 | ||
|
|
e27a94ae0b | ||
|
|
1dee548dbc | ||
|
|
91e17e12b7 | ||
|
|
bb45483e9e | ||
|
|
7b4eaf2d10 |
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
@ -225,3 +225,29 @@ jobs:
|
||||
sources: |
|
||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}-amd64
|
||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}-rpi
|
||||
axera_build:
|
||||
runs-on: ubuntu-22.04
|
||||
name: AXERA Build
|
||||
needs:
|
||||
- amd64_build
|
||||
- arm64_build
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push Axera build
|
||||
uses: docker/bake-action@v6
|
||||
with:
|
||||
source: .
|
||||
push: true
|
||||
targets: axcl
|
||||
files: docker/axcl/axcl.hcl
|
||||
set: |
|
||||
axcl.tags=${{ steps.setup.outputs.image-name }}-axcl
|
||||
*.cache-from=type=gha
|
||||
55
docker/axcl/Dockerfile
Normal file
55
docker/axcl/Dockerfile
Normal file
@ -0,0 +1,55 @@
|
||||
# syntax=docker/dockerfile:1.6
|
||||
|
||||
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Globally set pip break-system-packages option to avoid having to specify it every time
|
||||
ARG PIP_BREAK_SYSTEM_PACKAGES=1
|
||||
|
||||
|
||||
FROM frigate AS frigate-axcl
|
||||
ARG TARGETARCH
|
||||
ARG PIP_BREAK_SYSTEM_PACKAGES
|
||||
|
||||
# Install axpyengine
|
||||
RUN wget https://github.com/AXERA-TECH/pyaxengine/releases/download/0.1.3.rc1/axengine-0.1.3-py3-none-any.whl -O /axengine-0.1.3-py3-none-any.whl
|
||||
RUN pip3 install -i https://mirrors.aliyun.com/pypi/simple/ /axengine-0.1.3-py3-none-any.whl \
|
||||
&& rm /axengine-0.1.3-py3-none-any.whl
|
||||
|
||||
# Install axcl
|
||||
RUN if [ "$TARGETARCH" = "amd64" ]; then \
|
||||
echo "Installing x86_64 version of axcl"; \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_x86_64_V3.6.5_20250908154509_NO4973.deb -O /axcl.deb; \
|
||||
else \
|
||||
echo "Installing aarch64 version of axcl"; \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_aarch64_V3.6.5_20250908154509_NO4973.deb -O /axcl.deb; \
|
||||
fi
|
||||
|
||||
RUN mkdir /unpack_axcl && \
|
||||
dpkg-deb -x /axcl.deb /unpack_axcl && \
|
||||
cp -R /unpack_axcl/usr/bin/axcl /usr/bin/ && \
|
||||
cp -R /unpack_axcl/usr/lib/axcl /usr/lib/ && \
|
||||
rm -rf /unpack_axcl /axcl.deb
|
||||
|
||||
|
||||
# Install axcl ffmpeg
|
||||
RUN mkdir -p /usr/lib/ffmpeg/axcl
|
||||
|
||||
RUN if [ "$TARGETARCH" = "amd64" ]; then \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffmpeg-x64 -O /usr/lib/ffmpeg/axcl/ffmpeg && \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffprobe-x64 -O /usr/lib/ffmpeg/axcl/ffprobe; \
|
||||
else \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffmpeg-aarch64 -O /usr/lib/ffmpeg/axcl/ffmpeg && \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffprobe-aarch64 -O /usr/lib/ffmpeg/axcl/ffprobe; \
|
||||
fi
|
||||
|
||||
RUN chmod +x /usr/lib/ffmpeg/axcl/ffmpeg /usr/lib/ffmpeg/axcl/ffprobe
|
||||
|
||||
# Set ldconfig path
|
||||
RUN echo "/usr/lib/axcl" > /etc/ld.so.conf.d/ax.conf
|
||||
|
||||
# Set env
|
||||
ENV PATH="$PATH:/usr/bin/axcl"
|
||||
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/axcl"
|
||||
|
||||
ENTRYPOINT ["sh", "-c", "ldconfig && exec /init"]
|
||||
13
docker/axcl/axcl.hcl
Normal file
13
docker/axcl/axcl.hcl
Normal file
@ -0,0 +1,13 @@
|
||||
target frigate {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/amd64", "linux/arm64"]
|
||||
target = "frigate"
|
||||
}
|
||||
|
||||
target axcl {
|
||||
dockerfile = "docker/axcl/Dockerfile"
|
||||
contexts = {
|
||||
frigate = "target:frigate",
|
||||
}
|
||||
platforms = ["linux/amd64", "linux/arm64"]
|
||||
}
|
||||
15
docker/axcl/axcl.mk
Normal file
15
docker/axcl/axcl.mk
Normal file
@ -0,0 +1,15 @@
|
||||
BOARDS += axcl
|
||||
|
||||
local-axcl: version
|
||||
docker buildx bake --file=docker/axcl/axcl.hcl axcl \
|
||||
--set axcl.tags=frigate:latest-axcl \
|
||||
--load
|
||||
|
||||
build-axcl: version
|
||||
docker buildx bake --file=docker/axcl/axcl.hcl axcl \
|
||||
--set axcl.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-axcl
|
||||
|
||||
push-axcl: build-axcl
|
||||
docker buildx bake --file=docker/axcl/axcl.hcl axcl \
|
||||
--set axcl.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-axcl \
|
||||
--push
|
||||
83
docker/axcl/user_installation.sh
Executable file
83
docker/axcl/user_installation.sh
Executable file
@ -0,0 +1,83 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Update package list and install dependencies
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y build-essential cmake git wget pciutils kmod udev
|
||||
|
||||
# Check if gcc-12 is needed
|
||||
current_gcc_version=$(gcc --version | head -n1 | awk '{print $NF}')
|
||||
gcc_major_version=$(echo $current_gcc_version | cut -d'.' -f1)
|
||||
|
||||
if [[ $gcc_major_version -lt 12 ]]; then
|
||||
echo "Current GCC version ($current_gcc_version) is lower than 12, installing gcc-12..."
|
||||
sudo apt-get install -y gcc-12
|
||||
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
|
||||
echo "GCC-12 installed and set as default"
|
||||
else
|
||||
echo "Current GCC version ($current_gcc_version) is sufficient, skipping GCC installation"
|
||||
fi
|
||||
|
||||
# Determine architecture
|
||||
arch=$(uname -m)
|
||||
download_url=""
|
||||
|
||||
if [[ $arch == "x86_64" ]]; then
|
||||
download_url="https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_x86_64_V3.6.5_20250908154509_NO4973.deb"
|
||||
deb_file="axcl_host_x86_64_V3.6.5_20250908154509_NO4973.deb"
|
||||
elif [[ $arch == "aarch64" ]]; then
|
||||
download_url="https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_aarch64_V3.6.5_20250908154509_NO4973.deb"
|
||||
deb_file="axcl_host_aarch64_V3.6.5_20250908154509_NO4973.deb"
|
||||
else
|
||||
echo "Unsupported architecture: $arch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Download AXCL driver
|
||||
echo "Downloading AXCL driver for $arch..."
|
||||
wget "$download_url" -O "$deb_file"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to download AXCL driver"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Install AXCL driver
|
||||
echo "Installing AXCL driver..."
|
||||
sudo dpkg -i "$deb_file"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install AXCL driver, attempting to fix dependencies..."
|
||||
sudo apt-get install -f -y
|
||||
sudo dpkg -i "$deb_file"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "AXCL driver installation failed"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Update environment
|
||||
echo "Updating environment..."
|
||||
source /etc/profile
|
||||
|
||||
# Verify installation
|
||||
echo "Verifying AXCL installation..."
|
||||
if command -v axcl-smi &> /dev/null; then
|
||||
echo "AXCL driver detected, checking AI accelerator status..."
|
||||
|
||||
axcl_output=$(axcl-smi 2>&1)
|
||||
axcl_exit_code=$?
|
||||
|
||||
echo "$axcl_output"
|
||||
|
||||
if [ $axcl_exit_code -eq 0 ]; then
|
||||
echo "AXCL driver installation completed successfully!"
|
||||
else
|
||||
echo "AXCL driver installed but no AI accelerator detected or communication failed."
|
||||
echo "Please check if the AI accelerator is properly connected and powered on."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "axcl-smi command not found. AXCL driver installation may have failed."
|
||||
exit 1
|
||||
fi
|
||||
@ -49,6 +49,11 @@ Frigate supports multiple different detectors that work on different types of ha
|
||||
|
||||
- [Synaptics](#synaptics): synap models can run on Synaptics devices(e.g astra machina) with included NPUs.
|
||||
|
||||
**AXERA** <CommunityBadge />
|
||||
|
||||
- [AXEngine](#axera): axmodels can run on AXERA AI acceleration.
|
||||
|
||||
|
||||
**For Testing**
|
||||
|
||||
- [CPU Detector (not recommended for actual use](#cpu-detector-not-recommended): Use a CPU to run tflite model, this is not recommended and in most cases OpenVINO can be used in CPU mode with better results.
|
||||
@ -1438,6 +1443,41 @@ model:
|
||||
input_pixel_format: rgb/bgr # look at the model.json to figure out which to put here
|
||||
```
|
||||
|
||||
## AXERA
|
||||
|
||||
Hardware accelerated object detection is supported on the following SoCs:
|
||||
|
||||
- AX650N
|
||||
- AX8850N
|
||||
|
||||
This implementation uses the [AXera Pulsar2 Toolchain](https://huggingface.co/AXERA-TECH/Pulsar2).
|
||||
|
||||
See the [installation docs](../frigate/installation.md#axera) for information on configuring the AXEngine hardware.
|
||||
|
||||
### Configuration
|
||||
|
||||
When configuring the AXEngine detector, you have to specify the model name.
|
||||
|
||||
#### yolov9
|
||||
|
||||
A yolov9 model is provided in the container at /axmodels and is used by this detector type by default.
|
||||
|
||||
Use the model configuration shown below when using the axengine detector with the default axmodel:
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
axengine:
|
||||
type: axengine
|
||||
|
||||
model:
|
||||
path: frigate-yolov9-tiny
|
||||
model_type: yolo-generic
|
||||
width: 320
|
||||
height: 320
|
||||
tensor_format: bgr
|
||||
labelmap_path: /labelmap/coco-80.txt
|
||||
```
|
||||
|
||||
# Models
|
||||
|
||||
Some model types are not included in Frigate by default.
|
||||
|
||||
@ -104,6 +104,10 @@ Frigate supports multiple different detectors that work on different types of ha
|
||||
|
||||
- [Synaptics](#synaptics): synap models can run on Synaptics devices(e.g astra machina) with included NPUs to provide efficient object detection.
|
||||
|
||||
**AXERA** <CommunityBadge />
|
||||
|
||||
- [AXEngine](#axera): axera models can run on AXERA NPUs via AXEngine, delivering highly efficient object detection.
|
||||
|
||||
:::
|
||||
|
||||
### Hailo-8
|
||||
@ -159,7 +163,7 @@ Inference speeds vary greatly depending on the CPU or GPU used, some known examp
|
||||
| Intel HD 530 | 15 - 35 ms | | | | Can only run one detector instance |
|
||||
| Intel HD 620 | 15 - 25 ms | | 320: ~ 35 ms | | |
|
||||
| Intel HD 630 | ~ 15 ms | | 320: ~ 30 ms | | |
|
||||
| Intel UHD 730 | ~ 10 ms | | 320: ~ 19 ms 640: ~ 54 ms | | |
|
||||
| Intel UHD 730 | ~ 10 ms | t-320: 14ms s-320: 24ms t-640: 34ms s-640: 65ms | 320: ~ 19 ms 640: ~ 54 ms | | |
|
||||
| Intel UHD 770 | ~ 15 ms | t-320: ~ 16 ms s-320: ~ 20 ms s-640: ~ 40 ms | 320: ~ 20 ms 640: ~ 46 ms | | |
|
||||
| Intel N100 | ~ 15 ms | s-320: 30 ms | 320: ~ 25 ms | | Can only run one detector instance |
|
||||
| Intel N150 | ~ 15 ms | t-320: 16 ms s-320: 24 ms | | | |
|
||||
@ -287,6 +291,14 @@ The inference time of a rk3588 with all 3 cores enabled is typically 25-30 ms fo
|
||||
| ssd mobilenet | ~ 25 ms |
|
||||
| yolov5m | ~ 118 ms |
|
||||
|
||||
### AXERA
|
||||
|
||||
- **AXEngine** Default model is **yolov9**
|
||||
|
||||
| Name | AXERA AX650N/AX8850N Inference Time |
|
||||
| ---------------- | ----------------------------------- |
|
||||
| yolov9-tiny | ~ 4 ms |
|
||||
|
||||
## What does Frigate use the CPU for and what does it use a detector for? (ELI5 Version)
|
||||
|
||||
This is taken from a [user question on reddit](https://www.reddit.com/r/homeassistant/comments/q8mgau/comment/hgqbxh5/?utm_source=share&utm_medium=web2x&context=3). Modified slightly for clarity.
|
||||
@ -307,4 +319,4 @@ Basically - When you increase the resolution and/or the frame rate of the stream
|
||||
|
||||
YES! The Coral does not help with decoding video streams.
|
||||
|
||||
Decompressing video streams takes a significant amount of CPU power. Video compression uses key frames (also known as I-frames) to send a full frame in the video stream. The following frames only include the difference from the key frame, and the CPU has to compile each frame by merging the differences with the key frame. [More detailed explanation](https://support.video.ibm.com/hc/en-us/articles/18106203580316-Keyframes-InterFrame-Video-Compression). Higher resolutions and frame rates mean more processing power is needed to decode the video stream, so try and set them on the camera to avoid unnecessary decoding work.
|
||||
Decompressing video streams takes a significant amount of CPU power. Video compression uses key frames (also known as I-frames) to send a full frame in the video stream. The following frames only include the difference from the key frame, and the CPU has to compile each frame by merging the differences with the key frame. [More detailed explanation](https://support.video.ibm.com/hc/en-us/articles/18106203580316-Keyframes-InterFrame-Video-Compression). Higher resolutions and frame rates mean more processing power is needed to decode the video stream, so try and set them on the camera to avoid unnecessary decoding work.
|
||||
@ -287,6 +287,42 @@ or add these options to your `docker run` command:
|
||||
|
||||
Next, you should configure [hardware object detection](/configuration/object_detectors#synaptics) and [hardware video processing](/configuration/hardware_acceleration_video#synaptics).
|
||||
|
||||
### AXERA
|
||||
|
||||
<details>
|
||||
<summary>AXERA accelerators</summary>
|
||||
AXERA accelerators are available in an M.2 form factor, compatible with both Raspberry Pi and Orange Pi. This form factor has also been successfully tested on x86 platforms, making it a versatile choice for various computing environments.
|
||||
|
||||
#### Installation
|
||||
|
||||
Using AXERA accelerators requires the installation of the AXCL driver. We provide a convenient Linux script to complete this installation.
|
||||
|
||||
Follow these steps for installation:
|
||||
|
||||
1. Copy or download [this script](https://github.com/ivanshi1108/assets/releases/download/v0.16.2/user_installation.sh).
|
||||
2. Ensure it has execution permissions with `sudo chmod +x user_installation.sh`
|
||||
3. Run the script with `./user_installation.sh`
|
||||
|
||||
#### Setup
|
||||
|
||||
To set up Frigate, follow the default installation instructions, for example: `ghcr.io/blakeblackshear/frigate:stable`
|
||||
|
||||
Next, grant Docker permissions to access your hardware by adding the following lines to your `docker-compose.yml` file:
|
||||
|
||||
```yaml
|
||||
devices:
|
||||
- /dev/axcl_host
|
||||
- /dev/ax_mmb_dev
|
||||
- /dev/msg_userdev
|
||||
```
|
||||
|
||||
If you are using `docker run`, add this option to your command `--device /dev/axcl_host --device /dev/ax_mmb_dev --device /dev/msg_userdev`
|
||||
|
||||
#### Configuration
|
||||
|
||||
Finally, configure [hardware object detection](/configuration/object_detectors#axera) to complete the setup.
|
||||
</details>
|
||||
|
||||
## Docker
|
||||
|
||||
Running through Docker with Docker Compose is the recommended install method.
|
||||
|
||||
@ -62,8 +62,8 @@ def require_admin_by_default():
|
||||
"/",
|
||||
"/version",
|
||||
"/config/schema.json",
|
||||
"/metrics",
|
||||
# Authenticated user endpoints (allow_any_authenticated)
|
||||
"/metrics",
|
||||
"/stats",
|
||||
"/stats/history",
|
||||
"/config",
|
||||
@ -76,22 +76,28 @@ def require_admin_by_default():
|
||||
"/recognized_license_plates",
|
||||
"/timeline",
|
||||
"/timeline/hourly",
|
||||
"/events/summary",
|
||||
"/recordings/storage",
|
||||
"/recordings/summary",
|
||||
"/recordings/unavailable",
|
||||
"/go2rtc/streams",
|
||||
"/event_ids",
|
||||
"/events",
|
||||
"/exports",
|
||||
}
|
||||
|
||||
# Path prefixes that should be exempt (for paths with parameters)
|
||||
EXEMPT_PREFIXES = (
|
||||
"/logs/", # /logs/{service}
|
||||
"/review", # /review, /review/{id}, /review_ids, /review/summary, etc.
|
||||
"/review", # /review, /review/{id}, /review/summary, /review_ids, etc.
|
||||
"/reviews/", # /reviews/viewed, /reviews/delete
|
||||
"/events/", # /events/{id}/thumbnail, etc. (camera-scoped)
|
||||
"/events/", # /events/{id}/thumbnail, /events/summary, etc. (camera-scoped)
|
||||
"/export/", # /export/{camera}/start/..., /export/{id}/rename, /export/{id}
|
||||
"/go2rtc/streams/", # /go2rtc/streams/{camera}
|
||||
"/users/", # /users/{username}/password (has own auth)
|
||||
"/preview/", # /preview/{file}/thumbnail.jpg
|
||||
"/exports/", # /exports/{export_id}
|
||||
"/vod/", # /vod/{camera_name}/...
|
||||
"/notifications/", # /notifications/pubkey, /notifications/register
|
||||
)
|
||||
|
||||
async def admin_checker(request: Request):
|
||||
@ -105,6 +111,24 @@ def require_admin_by_default():
|
||||
if path.startswith(EXEMPT_PREFIXES):
|
||||
return
|
||||
|
||||
# Dynamic camera path exemption:
|
||||
# Any path whose first segment matches a configured camera name should
|
||||
# bypass the global admin requirement. These endpoints enforce access
|
||||
# via route-level dependencies (e.g. require_camera_access) to ensure
|
||||
# per-camera authorization. This allows non-admin authenticated users
|
||||
# (e.g. viewer role) to access camera-specific resources without
|
||||
# needing admin privileges.
|
||||
try:
|
||||
if path.startswith("/"):
|
||||
first_segment = path.split("/", 2)[1]
|
||||
if (
|
||||
first_segment
|
||||
and first_segment in request.app.frigate_config.cameras
|
||||
):
|
||||
return
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# For all other paths, require admin role
|
||||
# Port 5000 (internal) requests have admin role set automatically
|
||||
role = request.headers.get("remote-role")
|
||||
@ -113,7 +137,7 @@ def require_admin_by_default():
|
||||
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Admin role required for this endpoint",
|
||||
detail="Access denied. A user with the admin role is required.",
|
||||
)
|
||||
|
||||
return admin_checker
|
||||
|
||||
@ -70,6 +70,7 @@ router = APIRouter(tags=[Tags.events])
|
||||
@router.get(
|
||||
"/events",
|
||||
response_model=list[EventResponse],
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
summary="Get events",
|
||||
description="Returns a list of events.",
|
||||
)
|
||||
@ -344,6 +345,7 @@ def events(
|
||||
@router.get(
|
||||
"/events/explore",
|
||||
response_model=list[EventResponse],
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
summary="Get summary of objects.",
|
||||
description="""Gets a summary of objects from the database.
|
||||
Returns a list of objects with a max of `limit` objects for each label.
|
||||
@ -436,6 +438,7 @@ def events_explore(
|
||||
@router.get(
|
||||
"/event_ids",
|
||||
response_model=list[EventResponse],
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
summary="Get events by ids.",
|
||||
description="""Gets events by a list of ids.
|
||||
Returns a list of events.
|
||||
@ -469,6 +472,7 @@ async def event_ids(ids: str, request: Request):
|
||||
|
||||
@router.get(
|
||||
"/events/search",
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
summary="Search events.",
|
||||
description="""Searches for events in the database.
|
||||
Returns a list of events.
|
||||
@ -919,6 +923,7 @@ def events_summary(
|
||||
@router.get(
|
||||
"/events/{event_id}",
|
||||
response_model=EventResponse,
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
summary="Get event by id.",
|
||||
description="Gets an event by its id.",
|
||||
)
|
||||
@ -962,6 +967,7 @@ def set_retain(event_id: str):
|
||||
@router.post(
|
||||
"/events/{event_id}/plus",
|
||||
response_model=EventUploadPlusResponse,
|
||||
dependencies=[Depends(require_role(["admin"]))],
|
||||
summary="Send event to Frigate+.",
|
||||
description="""Sends an event to Frigate+.
|
||||
Returns a success message or an error if the event is not found.
|
||||
@ -1102,6 +1108,7 @@ async def send_to_plus(request: Request, event_id: str, body: SubmitPlusBody = N
|
||||
@router.put(
|
||||
"/events/{event_id}/false_positive",
|
||||
response_model=EventUploadPlusResponse,
|
||||
dependencies=[Depends(require_role(["admin"]))],
|
||||
summary="Submit false positive to Frigate+",
|
||||
description="""Submit an event as a false positive to Frigate+.
|
||||
This endpoint is the same as the standard Frigate+ submission endpoint,
|
||||
|
||||
@ -14,6 +14,7 @@ from peewee import DoesNotExist
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.api.auth import (
|
||||
allow_any_authenticated,
|
||||
get_allowed_cameras_for_filter,
|
||||
require_camera_access,
|
||||
require_role,
|
||||
@ -44,6 +45,7 @@ router = APIRouter(tags=[Tags.export])
|
||||
@router.get(
|
||||
"/exports",
|
||||
response_model=ExportsResponse,
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
summary="Get exports",
|
||||
description="""Gets all exports from the database for cameras the user has access to.
|
||||
Returns a list of exports ordered by date (most recent first).""",
|
||||
@ -272,6 +274,7 @@ async def export_delete(event_id: str, request: Request):
|
||||
@router.get(
|
||||
"/exports/{export_id}",
|
||||
response_model=ExportModel,
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
summary="Get a single export",
|
||||
description="""Gets a specific export by ID. The user must have access to the camera
|
||||
associated with the export.""",
|
||||
|
||||
@ -945,6 +945,7 @@ async def vod_hour(
|
||||
|
||||
@router.get(
|
||||
"/vod/event/{event_id}",
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
description="Returns an HLS playlist for the specified object. Append /master.m3u8 or /index.m3u8 for HLS playback.",
|
||||
)
|
||||
async def vod_event(
|
||||
|
||||
@ -5,11 +5,12 @@ import os
|
||||
from typing import Any
|
||||
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi import APIRouter, Depends, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from peewee import DoesNotExist
|
||||
from py_vapid import Vapid01, utils
|
||||
|
||||
from frigate.api.auth import allow_any_authenticated
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.const import CONFIG_DIR
|
||||
from frigate.models import User
|
||||
@ -21,6 +22,7 @@ router = APIRouter(tags=[Tags.notifications])
|
||||
|
||||
@router.get(
|
||||
"/notifications/pubkey",
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
summary="Get VAPID public key",
|
||||
description="""Gets the VAPID public key for the notifications.
|
||||
Returns the public key or an error if notifications are not enabled.
|
||||
@ -47,6 +49,7 @@ def get_vapid_pub_key(request: Request):
|
||||
|
||||
@router.post(
|
||||
"/notifications/register",
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
summary="Register notifications",
|
||||
description="""Registers a notifications subscription.
|
||||
Returns a success message or an error if the subscription is not provided.
|
||||
|
||||
@ -577,7 +577,9 @@ def delete_reviews(body: ReviewModifyMultipleBody):
|
||||
|
||||
|
||||
@router.get(
|
||||
"/review/activity/motion", response_model=list[ReviewActivityMotionResponse]
|
||||
"/review/activity/motion",
|
||||
response_model=list[ReviewActivityMotionResponse],
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
)
|
||||
def motion_activity(
|
||||
params: ReviewActivityMotionQueryParams = Depends(),
|
||||
@ -739,6 +741,7 @@ async def set_not_reviewed(
|
||||
|
||||
@router.post(
|
||||
"/review/summarize/start/{start_ts}/end/{end_ts}",
|
||||
dependencies=[Depends(allow_any_authenticated())],
|
||||
description="Use GenAI to summarize review items over a period of time.",
|
||||
)
|
||||
def generate_review_summary(request: Request, start_ts: float, end_ts: float):
|
||||
|
||||
87
frigate/detectors/plugins/axengine.py
Normal file
87
frigate/detectors/plugins/axengine.py
Normal file
@ -0,0 +1,87 @@
|
||||
import logging
|
||||
import os.path
|
||||
import re
|
||||
import urllib.request
|
||||
from typing import Literal
|
||||
|
||||
import axengine as axe
|
||||
|
||||
from frigate.const import MODEL_CACHE_DIR
|
||||
from frigate.detectors.detection_api import DetectionApi
|
||||
from frigate.detectors.detector_config import BaseDetectorConfig, ModelTypeEnum
|
||||
from frigate.util.model import post_process_yolo
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DETECTOR_KEY = "axengine"
|
||||
|
||||
supported_models = {
|
||||
ModelTypeEnum.yologeneric: "frigate-yolov9-.*$",
|
||||
}
|
||||
|
||||
model_cache_dir = os.path.join(MODEL_CACHE_DIR, "axengine_cache/")
|
||||
|
||||
|
||||
class AxengineDetectorConfig(BaseDetectorConfig):
|
||||
type: Literal[DETECTOR_KEY]
|
||||
|
||||
class Axengine(DetectionApi):
|
||||
type_key = DETECTOR_KEY
|
||||
def __init__(self, config: AxengineDetectorConfig):
|
||||
logger.info("__init__ axengine")
|
||||
super().__init__(config)
|
||||
self.height = config.model.height
|
||||
self.width = config.model.width
|
||||
model_path = config.model.path or "frigate-yolov9-tiny"
|
||||
model_props = self.parse_model_input(model_path)
|
||||
self.session = axe.InferenceSession(model_props["path"])
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def parse_model_input(self, model_path):
|
||||
model_props = {}
|
||||
model_props["preset"] = True
|
||||
|
||||
model_matched = False
|
||||
|
||||
for model_type, pattern in supported_models.items():
|
||||
if re.match(pattern, model_path):
|
||||
model_matched = True
|
||||
model_props["model_type"] = model_type
|
||||
|
||||
if model_matched:
|
||||
model_props["filename"] = model_path + ".axmodel"
|
||||
model_props["path"] = model_cache_dir + model_props["filename"]
|
||||
|
||||
if not os.path.isfile(model_props["path"]):
|
||||
self.download_model(model_props["filename"])
|
||||
else:
|
||||
supported_models_str = ", ".join(
|
||||
model[1:-1] for model in supported_models
|
||||
)
|
||||
raise Exception(
|
||||
f"Model {model_path} is unsupported. Provide your own model or choose one of the following: {supported_models_str}"
|
||||
)
|
||||
return model_props
|
||||
|
||||
def download_model(self, filename):
|
||||
if not os.path.isdir(model_cache_dir):
|
||||
os.mkdir(model_cache_dir)
|
||||
|
||||
GITHUB_ENDPOINT = os.environ.get("GITHUB_ENDPOINT", "https://github.com")
|
||||
urllib.request.urlretrieve(
|
||||
f"{GITHUB_ENDPOINT}/ivanshi1108/assets/releases/download/v0.16.2/{filename}",
|
||||
model_cache_dir + filename,
|
||||
)
|
||||
|
||||
def detect_raw(self, tensor_input):
|
||||
results = None
|
||||
results = self.session.run(None, {"images": tensor_input})
|
||||
if self.detector_config.model.model_type == ModelTypeEnum.yologeneric:
|
||||
return post_process_yolo(results, self.width, self.height)
|
||||
else:
|
||||
raise ValueError(
|
||||
f'Model type "{self.detector_config.model.model_type}" is currently not supported.'
|
||||
)
|
||||
|
||||
@ -1299,7 +1299,8 @@ function ObjectDetailsTab({
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{search.data.type === "object" &&
|
||||
{isAdmin &&
|
||||
search.data.type === "object" &&
|
||||
config?.plus?.enabled &&
|
||||
search.end_time != undefined &&
|
||||
search.has_snapshot && (
|
||||
|
||||
@ -38,6 +38,7 @@ import { isDesktop, isIOS, isMobileOnly, isSafari } from "react-device-detect";
|
||||
import { useApiHost } from "@/api";
|
||||
import ImageLoadingIndicator from "@/components/indicators/ImageLoadingIndicator";
|
||||
import ObjectTrackOverlay from "../ObjectTrackOverlay";
|
||||
import { useIsAdmin } from "@/hooks/use-is-admin";
|
||||
|
||||
type TrackingDetailsProps = {
|
||||
className?: string;
|
||||
@ -777,6 +778,7 @@ function LifecycleIconRow({
|
||||
const { data: config } = useSWR<FrigateConfig>("config");
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const navigate = useNavigate();
|
||||
const isAdmin = useIsAdmin();
|
||||
|
||||
const aspectRatio = useMemo(() => {
|
||||
if (!config) {
|
||||
@ -993,7 +995,7 @@ function LifecycleIconRow({
|
||||
<div className="ml-3 flex-shrink-0 px-1 text-right text-xs text-primary-variant">
|
||||
<div className="flex flex-row items-center gap-3">
|
||||
<div className="whitespace-nowrap">{formattedEventTimestamp}</div>
|
||||
{(config?.plus?.enabled || item.data.box) && (
|
||||
{((isAdmin && config?.plus?.enabled) || item.data.box) && (
|
||||
<DropdownMenu open={isOpen} onOpenChange={setIsOpen}>
|
||||
<DropdownMenuTrigger>
|
||||
<div className="rounded p-1 pr-2" role="button">
|
||||
@ -1002,7 +1004,7 @@ function LifecycleIconRow({
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuPortal>
|
||||
<DropdownMenuContent>
|
||||
{config?.plus?.enabled && (
|
||||
{isAdmin && config?.plus?.enabled && (
|
||||
<DropdownMenuItem
|
||||
className="cursor-pointer"
|
||||
onSelect={async () => {
|
||||
|
||||
@ -20,6 +20,7 @@ import ImageLoadingIndicator from "@/components/indicators/ImageLoadingIndicator
|
||||
import { baseUrl } from "@/api/baseUrl";
|
||||
import { getTranslatedLabel } from "@/utils/i18n";
|
||||
import useImageLoaded from "@/hooks/use-image-loaded";
|
||||
import { useIsAdmin } from "@/hooks/use-is-admin";
|
||||
|
||||
export type FrigatePlusDialogProps = {
|
||||
upload?: Event;
|
||||
@ -57,7 +58,9 @@ export function FrigatePlusDialog({
|
||||
);
|
||||
|
||||
const [imgRef, imgLoaded, onImgLoad] = useImageLoaded();
|
||||
const isAdmin = useIsAdmin();
|
||||
const showCard =
|
||||
isAdmin &&
|
||||
!!upload &&
|
||||
upload.data.type === "object" &&
|
||||
upload.plus_id !== "not_enabled" &&
|
||||
|
||||
@ -20,6 +20,7 @@ import { cn } from "@/lib/utils";
|
||||
import { ASPECT_VERTICAL_LAYOUT, RecordingPlayerError } from "@/types/record";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import ObjectTrackOverlay from "@/components/overlay/ObjectTrackOverlay";
|
||||
import { useIsAdmin } from "@/hooks/use-is-admin";
|
||||
|
||||
// Android native hls does not seek correctly
|
||||
const USE_NATIVE_HLS = false;
|
||||
@ -83,6 +84,7 @@ export default function HlsVideoPlayer({
|
||||
}: HlsVideoPlayerProps) {
|
||||
const { t } = useTranslation("components/player");
|
||||
const { data: config } = useSWR<FrigateConfig>("config");
|
||||
const isAdmin = useIsAdmin();
|
||||
|
||||
// for detail stream context in History
|
||||
const currentTime = currentTimeOverride;
|
||||
@ -285,7 +287,7 @@ export default function HlsVideoPlayer({
|
||||
volume: true,
|
||||
seek: true,
|
||||
playbackRate: true,
|
||||
plusUpload: config?.plus?.enabled == true,
|
||||
plusUpload: isAdmin && config?.plus?.enabled == true,
|
||||
fullscreen: supportsFullscreen,
|
||||
}}
|
||||
setControlsOpen={setControlsOpen}
|
||||
|
||||
@ -13,6 +13,7 @@ import { useTranslation } from "react-i18next";
|
||||
import { Event } from "@/types/event";
|
||||
import { FrigateConfig } from "@/types/frigateConfig";
|
||||
import { useState } from "react";
|
||||
import { useIsAdmin } from "@/hooks/use-is-admin";
|
||||
|
||||
type EventMenuProps = {
|
||||
event: Event;
|
||||
@ -35,6 +36,7 @@ export default function EventMenu({
|
||||
const navigate = useNavigate();
|
||||
const { t } = useTranslation("views/explore");
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const isAdmin = useIsAdmin();
|
||||
|
||||
const handleObjectSelect = () => {
|
||||
if (isSelected) {
|
||||
@ -85,7 +87,8 @@ export default function EventMenu({
|
||||
</a>
|
||||
</DropdownMenuItem>
|
||||
|
||||
{event.has_snapshot &&
|
||||
{isAdmin &&
|
||||
event.has_snapshot &&
|
||||
event.plus_id == undefined &&
|
||||
event.data.type == "object" &&
|
||||
config?.plus?.enabled && (
|
||||
|
||||
Loading…
Reference in New Issue
Block a user