mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-12-21 04:26:43 +03:00
Compare commits
10 Commits
ddaab05bb1
...
963385d8bf
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
963385d8bf | ||
|
|
8e8346099e | ||
|
|
b0527df3c7 | ||
|
|
301e0a1a3a | ||
|
|
438df7d484 | ||
|
|
e27a94ae0b | ||
|
|
1dee548dbc | ||
|
|
91e17e12b7 | ||
|
|
bb45483e9e | ||
|
|
7b4eaf2d10 |
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
@ -225,3 +225,29 @@ jobs:
|
||||
sources: |
|
||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}-amd64
|
||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}-rpi
|
||||
axera_build:
|
||||
runs-on: ubuntu-22.04
|
||||
name: AXERA Build
|
||||
needs:
|
||||
- amd64_build
|
||||
- arm64_build
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push Axera build
|
||||
uses: docker/bake-action@v6
|
||||
with:
|
||||
source: .
|
||||
push: true
|
||||
targets: axcl
|
||||
files: docker/axcl/axcl.hcl
|
||||
set: |
|
||||
axcl.tags=${{ steps.setup.outputs.image-name }}-axcl
|
||||
*.cache-from=type=gha
|
||||
55
docker/axcl/Dockerfile
Normal file
55
docker/axcl/Dockerfile
Normal file
@ -0,0 +1,55 @@
|
||||
# syntax=docker/dockerfile:1.6
|
||||
|
||||
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Globally set pip break-system-packages option to avoid having to specify it every time
|
||||
ARG PIP_BREAK_SYSTEM_PACKAGES=1
|
||||
|
||||
|
||||
FROM frigate AS frigate-axcl
|
||||
ARG TARGETARCH
|
||||
ARG PIP_BREAK_SYSTEM_PACKAGES
|
||||
|
||||
# Install axpyengine
|
||||
RUN wget https://github.com/AXERA-TECH/pyaxengine/releases/download/0.1.3.rc1/axengine-0.1.3-py3-none-any.whl -O /axengine-0.1.3-py3-none-any.whl
|
||||
RUN pip3 install -i https://mirrors.aliyun.com/pypi/simple/ /axengine-0.1.3-py3-none-any.whl \
|
||||
&& rm /axengine-0.1.3-py3-none-any.whl
|
||||
|
||||
# Install axcl
|
||||
RUN if [ "$TARGETARCH" = "amd64" ]; then \
|
||||
echo "Installing x86_64 version of axcl"; \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_x86_64_V3.6.5_20250908154509_NO4973.deb -O /axcl.deb; \
|
||||
else \
|
||||
echo "Installing aarch64 version of axcl"; \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_aarch64_V3.6.5_20250908154509_NO4973.deb -O /axcl.deb; \
|
||||
fi
|
||||
|
||||
RUN mkdir /unpack_axcl && \
|
||||
dpkg-deb -x /axcl.deb /unpack_axcl && \
|
||||
cp -R /unpack_axcl/usr/bin/axcl /usr/bin/ && \
|
||||
cp -R /unpack_axcl/usr/lib/axcl /usr/lib/ && \
|
||||
rm -rf /unpack_axcl /axcl.deb
|
||||
|
||||
|
||||
# Install axcl ffmpeg
|
||||
RUN mkdir -p /usr/lib/ffmpeg/axcl
|
||||
|
||||
RUN if [ "$TARGETARCH" = "amd64" ]; then \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffmpeg-x64 -O /usr/lib/ffmpeg/axcl/ffmpeg && \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffprobe-x64 -O /usr/lib/ffmpeg/axcl/ffprobe; \
|
||||
else \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffmpeg-aarch64 -O /usr/lib/ffmpeg/axcl/ffmpeg && \
|
||||
wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffprobe-aarch64 -O /usr/lib/ffmpeg/axcl/ffprobe; \
|
||||
fi
|
||||
|
||||
RUN chmod +x /usr/lib/ffmpeg/axcl/ffmpeg /usr/lib/ffmpeg/axcl/ffprobe
|
||||
|
||||
# Set ldconfig path
|
||||
RUN echo "/usr/lib/axcl" > /etc/ld.so.conf.d/ax.conf
|
||||
|
||||
# Set env
|
||||
ENV PATH="$PATH:/usr/bin/axcl"
|
||||
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/axcl"
|
||||
|
||||
ENTRYPOINT ["sh", "-c", "ldconfig && exec /init"]
|
||||
13
docker/axcl/axcl.hcl
Normal file
13
docker/axcl/axcl.hcl
Normal file
@ -0,0 +1,13 @@
|
||||
target frigate {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/amd64", "linux/arm64"]
|
||||
target = "frigate"
|
||||
}
|
||||
|
||||
target axcl {
|
||||
dockerfile = "docker/axcl/Dockerfile"
|
||||
contexts = {
|
||||
frigate = "target:frigate",
|
||||
}
|
||||
platforms = ["linux/amd64", "linux/arm64"]
|
||||
}
|
||||
15
docker/axcl/axcl.mk
Normal file
15
docker/axcl/axcl.mk
Normal file
@ -0,0 +1,15 @@
|
||||
BOARDS += axcl
|
||||
|
||||
local-axcl: version
|
||||
docker buildx bake --file=docker/axcl/axcl.hcl axcl \
|
||||
--set axcl.tags=frigate:latest-axcl \
|
||||
--load
|
||||
|
||||
build-axcl: version
|
||||
docker buildx bake --file=docker/axcl/axcl.hcl axcl \
|
||||
--set axcl.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-axcl
|
||||
|
||||
push-axcl: build-axcl
|
||||
docker buildx bake --file=docker/axcl/axcl.hcl axcl \
|
||||
--set axcl.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-axcl \
|
||||
--push
|
||||
83
docker/axcl/user_installation.sh
Executable file
83
docker/axcl/user_installation.sh
Executable file
@ -0,0 +1,83 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Update package list and install dependencies
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y build-essential cmake git wget pciutils kmod udev
|
||||
|
||||
# Check if gcc-12 is needed
|
||||
current_gcc_version=$(gcc --version | head -n1 | awk '{print $NF}')
|
||||
gcc_major_version=$(echo $current_gcc_version | cut -d'.' -f1)
|
||||
|
||||
if [[ $gcc_major_version -lt 12 ]]; then
|
||||
echo "Current GCC version ($current_gcc_version) is lower than 12, installing gcc-12..."
|
||||
sudo apt-get install -y gcc-12
|
||||
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12
|
||||
echo "GCC-12 installed and set as default"
|
||||
else
|
||||
echo "Current GCC version ($current_gcc_version) is sufficient, skipping GCC installation"
|
||||
fi
|
||||
|
||||
# Determine architecture
|
||||
arch=$(uname -m)
|
||||
download_url=""
|
||||
|
||||
if [[ $arch == "x86_64" ]]; then
|
||||
download_url="https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_x86_64_V3.6.5_20250908154509_NO4973.deb"
|
||||
deb_file="axcl_host_x86_64_V3.6.5_20250908154509_NO4973.deb"
|
||||
elif [[ $arch == "aarch64" ]]; then
|
||||
download_url="https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_aarch64_V3.6.5_20250908154509_NO4973.deb"
|
||||
deb_file="axcl_host_aarch64_V3.6.5_20250908154509_NO4973.deb"
|
||||
else
|
||||
echo "Unsupported architecture: $arch"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Download AXCL driver
|
||||
echo "Downloading AXCL driver for $arch..."
|
||||
wget "$download_url" -O "$deb_file"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to download AXCL driver"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Install AXCL driver
|
||||
echo "Installing AXCL driver..."
|
||||
sudo dpkg -i "$deb_file"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Failed to install AXCL driver, attempting to fix dependencies..."
|
||||
sudo apt-get install -f -y
|
||||
sudo dpkg -i "$deb_file"
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "AXCL driver installation failed"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Update environment
|
||||
echo "Updating environment..."
|
||||
source /etc/profile
|
||||
|
||||
# Verify installation
|
||||
echo "Verifying AXCL installation..."
|
||||
if command -v axcl-smi &> /dev/null; then
|
||||
echo "AXCL driver detected, checking AI accelerator status..."
|
||||
|
||||
axcl_output=$(axcl-smi 2>&1)
|
||||
axcl_exit_code=$?
|
||||
|
||||
echo "$axcl_output"
|
||||
|
||||
if [ $axcl_exit_code -eq 0 ]; then
|
||||
echo "AXCL driver installation completed successfully!"
|
||||
else
|
||||
echo "AXCL driver installed but no AI accelerator detected or communication failed."
|
||||
echo "Please check if the AI accelerator is properly connected and powered on."
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "axcl-smi command not found. AXCL driver installation may have failed."
|
||||
exit 1
|
||||
fi
|
||||
@ -320,6 +320,12 @@ http {
|
||||
add_header Cache-Control "public";
|
||||
}
|
||||
|
||||
location /fonts/ {
|
||||
access_log off;
|
||||
expires 1y;
|
||||
add_header Cache-Control "public";
|
||||
}
|
||||
|
||||
location /locales/ {
|
||||
access_log off;
|
||||
add_header Cache-Control "public";
|
||||
|
||||
@ -70,7 +70,7 @@ You should have at least 8 GB of RAM available (or VRAM if running on GPU) to ru
|
||||
genai:
|
||||
provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: llava:7b
|
||||
model: qwen3-vl:4b
|
||||
```
|
||||
|
||||
## Google Gemini
|
||||
|
||||
@ -35,19 +35,18 @@ Each model is available in multiple parameter sizes (3b, 4b, 8b, etc.). Larger s
|
||||
|
||||
:::tip
|
||||
|
||||
If you are trying to use a single model for Frigate and HomeAssistant, it will need to support vision and tools calling. https://github.com/skye-harris/ollama-modelfiles contains optimized model configs for this task.
|
||||
If you are trying to use a single model for Frigate and HomeAssistant, it will need to support vision and tools calling. qwen3-VL supports vision and tools simultaneously in Ollama.
|
||||
|
||||
:::
|
||||
|
||||
The following models are recommended:
|
||||
|
||||
| Model | Notes |
|
||||
| ----------------- | ----------------------------------------------------------- |
|
||||
| `qwen3-vl` | Strong visual and situational understanding |
|
||||
| `Intern3.5VL` | Relatively fast with good vision comprehension |
|
||||
| `gemma3` | Strong frame-to-frame understanding, slower inference times |
|
||||
| `qwen2.5-vl` | Fast but capable model with good vision comprehension |
|
||||
| `llava-phi3` | Lightweight and fast model with vision comprehension |
|
||||
| Model | Notes |
|
||||
| ----------------- | -------------------------------------------------------------------- |
|
||||
| `qwen3-vl` | Strong visual and situational understanding, higher vram requirement |
|
||||
| `Intern3.5VL` | Relatively fast with good vision comprehension |
|
||||
| `gemma3` | Strong frame-to-frame understanding, slower inference times |
|
||||
| `qwen2.5-vl` | Fast but capable model with good vision comprehension |
|
||||
|
||||
:::note
|
||||
|
||||
|
||||
@ -47,6 +47,11 @@ Frigate supports multiple different detectors that work on different types of ha
|
||||
|
||||
- [Synaptics](#synaptics): synap models can run on Synaptics devices(e.g astra machina) with included NPUs.
|
||||
|
||||
**AXERA**
|
||||
|
||||
- [AXEngine](#axera): axmodels can run on AXERA AI acceleration.
|
||||
|
||||
|
||||
**For Testing**
|
||||
|
||||
- [CPU Detector (not recommended for actual use](#cpu-detector-not-recommended): Use a CPU to run tflite model, this is not recommended and in most cases OpenVINO can be used in CPU mode with better results.
|
||||
@ -1169,6 +1174,41 @@ model: # required
|
||||
labelmap_path: /labelmap/coco-80.txt # required
|
||||
```
|
||||
|
||||
## AXERA
|
||||
|
||||
Hardware accelerated object detection is supported on the following SoCs:
|
||||
|
||||
- AX650N
|
||||
- AX8850N
|
||||
|
||||
This implementation uses the [AXera Pulsar2 Toolchain](https://huggingface.co/AXERA-TECH/Pulsar2).
|
||||
|
||||
See the [installation docs](../frigate/installation.md#axera) for information on configuring the AXEngine hardware.
|
||||
|
||||
### Configuration
|
||||
|
||||
When configuring the AXEngine detector, you have to specify the model name.
|
||||
|
||||
#### yolov9
|
||||
|
||||
A yolov9 model is provided in the container at /axmodels and is used by this detector type by default.
|
||||
|
||||
Use the model configuration shown below when using the axengine detector with the default axmodel:
|
||||
|
||||
```yaml
|
||||
detectors: # required
|
||||
axengine: # required
|
||||
type: axengine # required
|
||||
|
||||
model: # required
|
||||
path: frigate-yolov9-tiny # required
|
||||
model_type: yolo-generic # required
|
||||
width: 320 # required
|
||||
height: 320 # required
|
||||
tensor_format: bgr # required
|
||||
labelmap_path: /labelmap/coco-80.txt # required
|
||||
```
|
||||
|
||||
## Rockchip platform
|
||||
|
||||
Hardware accelerated object detection is supported on the following SoCs:
|
||||
|
||||
@ -110,6 +110,14 @@ Frigate supports multiple different detectors that work on different types of ha
|
||||
| ssd mobilenet | ~ 25 ms |
|
||||
| yolov5m | ~ 118 ms |
|
||||
|
||||
### AXERA
|
||||
|
||||
- **AXEngine** Default model is **yolov9**
|
||||
|
||||
| Name | AXERA AX650N/AX8850N Inference Time |
|
||||
| ---------------- | ----------------------------------- |
|
||||
| yolov9-tiny | ~ 4 ms |
|
||||
|
||||
### Hailo-8
|
||||
|
||||
Frigate supports both the Hailo-8 and Hailo-8L AI Acceleration Modules on compatible hardware platforms—including the Raspberry Pi 5 with the PCIe hat from the AI kit. The Hailo detector integration in Frigate automatically identifies your hardware type and selects the appropriate default model when a custom model isn’t provided.
|
||||
|
||||
@ -287,6 +287,40 @@ or add these options to your `docker run` command:
|
||||
|
||||
Next, you should configure [hardware object detection](/configuration/object_detectors#synaptics) and [hardware video processing](/configuration/hardware_acceleration_video#synaptics).
|
||||
|
||||
### AXERA
|
||||
|
||||
AXERA accelerators are available in an M.2 form factor, compatible with both Raspberry Pi and Orange Pi. This form factor has also been successfully tested on x86 platforms, making it a versatile choice for various computing environments.
|
||||
|
||||
#### Installation
|
||||
|
||||
Using AXERA accelerators requires the installation of the AXCL driver. We provide a convenient Linux script to complete this installation.
|
||||
|
||||
Follow these steps for installation:
|
||||
|
||||
1. Copy or download [this script](https://github.com/ivanshi1108/assets/releases/download/v0.16.2/user_installation.sh).
|
||||
2. Ensure it has execution permissions with `sudo chmod +x user_installation.sh`
|
||||
3. Run the script with `./user_installation.sh`
|
||||
|
||||
#### Setup
|
||||
|
||||
To set up Frigate, follow the default installation instructions, for example: `ghcr.io/blakeblackshear/frigate:stable`
|
||||
|
||||
Next, grant Docker permissions to access your hardware by adding the following lines to your `docker-compose.yml` file:
|
||||
|
||||
```yaml
|
||||
devices:
|
||||
- /dev/axcl_host
|
||||
- /dev/ax_mmb_dev
|
||||
- /dev/msg_userdev
|
||||
```
|
||||
|
||||
If you are using `docker run`, add this option to your command `--device /dev/axcl_host --device /dev/ax_mmb_dev --device /dev/msg_userdev`
|
||||
|
||||
#### Configuration
|
||||
|
||||
Finally, configure [hardware object detection](/configuration/object_detectors#axera) to complete the setup.
|
||||
|
||||
|
||||
## Docker
|
||||
|
||||
Running through Docker with Docker Compose is the recommended install method.
|
||||
|
||||
92
frigate/detectors/plugins/axengine.py
Normal file
92
frigate/detectors/plugins/axengine.py
Normal file
@ -0,0 +1,92 @@
|
||||
import logging
|
||||
import os.path
|
||||
import re
|
||||
import urllib.request
|
||||
from typing import Literal
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from pydantic import Field
|
||||
|
||||
from frigate.const import MODEL_CACHE_DIR
|
||||
from frigate.detectors.detection_api import DetectionApi
|
||||
from frigate.detectors.detector_config import BaseDetectorConfig, ModelTypeEnum
|
||||
from frigate.util.model import post_process_yolo
|
||||
|
||||
import axengine as axe
|
||||
from axengine import axclrt_provider_name, axengine_provider_name
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DETECTOR_KEY = "axengine"
|
||||
|
||||
supported_models = {
|
||||
ModelTypeEnum.yologeneric: "frigate-yolov9-.*$",
|
||||
}
|
||||
|
||||
model_cache_dir = os.path.join(MODEL_CACHE_DIR, "axengine_cache/")
|
||||
|
||||
|
||||
class AxengineDetectorConfig(BaseDetectorConfig):
|
||||
type: Literal[DETECTOR_KEY]
|
||||
|
||||
class Axengine(DetectionApi):
|
||||
type_key = DETECTOR_KEY
|
||||
def __init__(self, config: AxengineDetectorConfig):
|
||||
logger.info("__init__ axengine")
|
||||
super().__init__(config)
|
||||
self.height = config.model.height
|
||||
self.width = config.model.width
|
||||
model_path = config.model.path or "frigate-yolov9-tiny"
|
||||
model_props = self.parse_model_input(model_path)
|
||||
self.session = axe.InferenceSession(model_props["path"])
|
||||
|
||||
def __del__(self):
|
||||
pass
|
||||
|
||||
def parse_model_input(self, model_path):
|
||||
model_props = {}
|
||||
model_props["preset"] = True
|
||||
|
||||
model_matched = False
|
||||
|
||||
for model_type, pattern in supported_models.items():
|
||||
if re.match(pattern, model_path):
|
||||
model_matched = True
|
||||
model_props["model_type"] = model_type
|
||||
|
||||
if model_matched:
|
||||
model_props["filename"] = model_path + f".axmodel"
|
||||
model_props["path"] = model_cache_dir + model_props["filename"]
|
||||
|
||||
if not os.path.isfile(model_props["path"]):
|
||||
self.download_model(model_props["filename"])
|
||||
else:
|
||||
supported_models_str = ", ".join(
|
||||
model[1:-1] for model in supported_models
|
||||
)
|
||||
raise Exception(
|
||||
f"Model {model_path} is unsupported. Provide your own model or choose one of the following: {supported_models_str}"
|
||||
)
|
||||
return model_props
|
||||
|
||||
def download_model(self, filename):
|
||||
if not os.path.isdir(model_cache_dir):
|
||||
os.mkdir(model_cache_dir)
|
||||
|
||||
GITHUB_ENDPOINT = os.environ.get("GITHUB_ENDPOINT", "https://github.com")
|
||||
urllib.request.urlretrieve(
|
||||
f"{GITHUB_ENDPOINT}/ivanshi1108/assets/releases/download/v0.16.2/{filename}",
|
||||
model_cache_dir + filename,
|
||||
)
|
||||
|
||||
def detect_raw(self, tensor_input):
|
||||
results = None
|
||||
results = self.session.run(None, {"images": tensor_input})
|
||||
if self.detector_config.model.model_type == ModelTypeEnum.yologeneric:
|
||||
return post_process_yolo(results, self.width, self.height)
|
||||
else:
|
||||
raise ValueError(
|
||||
f'Model type "{self.detector_config.model.model_type}" is currently not supported.'
|
||||
)
|
||||
|
||||
@ -362,7 +362,7 @@ def stats_snapshot(
|
||||
stats["embeddings"]["review_description_speed"] = round(
|
||||
embeddings_metrics.review_desc_speed.value * 1000, 2
|
||||
)
|
||||
stats["embeddings"]["review_descriptions"] = round(
|
||||
stats["embeddings"]["review_description_events_per_second"] = round(
|
||||
embeddings_metrics.review_desc_dps.value, 2
|
||||
)
|
||||
|
||||
@ -370,7 +370,7 @@ def stats_snapshot(
|
||||
stats["embeddings"]["object_description_speed"] = round(
|
||||
embeddings_metrics.object_desc_speed.value * 1000, 2
|
||||
)
|
||||
stats["embeddings"]["object_descriptions"] = round(
|
||||
stats["embeddings"]["object_description_events_per_second"] = round(
|
||||
embeddings_metrics.object_desc_dps.value, 2
|
||||
)
|
||||
|
||||
@ -378,7 +378,7 @@ def stats_snapshot(
|
||||
stats["embeddings"][f"{key}_classification_speed"] = round(
|
||||
embeddings_metrics.classification_speeds[key].value * 1000, 2
|
||||
)
|
||||
stats["embeddings"][f"{key}_classification"] = round(
|
||||
stats["embeddings"][f"{key}_classification_events_per_second"] = round(
|
||||
embeddings_metrics.classification_cps[key].value, 2
|
||||
)
|
||||
|
||||
|
||||
@ -177,6 +177,10 @@
|
||||
"noCameras": {
|
||||
"title": "No Cameras Configured",
|
||||
"description": "Get started by connecting a camera to Frigate.",
|
||||
"buttonText": "Add Camera"
|
||||
"buttonText": "Add Camera",
|
||||
"restricted": {
|
||||
"title": "No Cameras Available",
|
||||
"description": "You don't have permission to view any cameras in this group."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -169,6 +169,7 @@
|
||||
"enrichments": {
|
||||
"title": "Enrichments",
|
||||
"infPerSecond": "Inferences Per Second",
|
||||
"averageInf": "Average Inference Time",
|
||||
"embeddings": {
|
||||
"image_embedding": "Image Embedding",
|
||||
"text_embedding": "Text Embedding",
|
||||
@ -180,7 +181,13 @@
|
||||
"plate_recognition_speed": "Plate Recognition Speed",
|
||||
"text_embedding_speed": "Text Embedding Speed",
|
||||
"yolov9_plate_detection_speed": "YOLOv9 Plate Detection Speed",
|
||||
"yolov9_plate_detection": "YOLOv9 Plate Detection"
|
||||
"yolov9_plate_detection": "YOLOv9 Plate Detection",
|
||||
"review_description": "Review Description",
|
||||
"review_description_speed": "Review Description Speed",
|
||||
"review_description_events_per_second": "Review Description",
|
||||
"object_description": "Object Description",
|
||||
"object_description_speed": "Object Description Speed",
|
||||
"object_description_events_per_second": "Object Description"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,7 +9,7 @@ import useSWR from "swr";
|
||||
import { MdHome } from "react-icons/md";
|
||||
import { usePersistedOverlayState } from "@/hooks/use-overlay-state";
|
||||
import { Button, buttonVariants } from "../ui/button";
|
||||
import { useCallback, useMemo, useState } from "react";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from "../ui/tooltip";
|
||||
import { LuPencil, LuPlus } from "react-icons/lu";
|
||||
import {
|
||||
@ -87,6 +87,8 @@ type CameraGroupSelectorProps = {
|
||||
export function CameraGroupSelector({ className }: CameraGroupSelectorProps) {
|
||||
const { t } = useTranslation(["components/camera"]);
|
||||
const { data: config } = useSWR<FrigateConfig>("config");
|
||||
const allowedCameras = useAllowedCameras();
|
||||
const isCustomRole = useIsCustomRole();
|
||||
|
||||
// tooltip
|
||||
|
||||
@ -119,10 +121,22 @@ export function CameraGroupSelector({ className }: CameraGroupSelectorProps) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return Object.entries(config.camera_groups).sort(
|
||||
(a, b) => a[1].order - b[1].order,
|
||||
);
|
||||
}, [config]);
|
||||
const allGroups = Object.entries(config.camera_groups);
|
||||
|
||||
// If custom role, filter out groups where user has no accessible cameras
|
||||
if (isCustomRole) {
|
||||
return allGroups
|
||||
.filter(([, groupConfig]) => {
|
||||
// Check if user has access to at least one camera in this group
|
||||
return groupConfig.cameras.some((cameraName) =>
|
||||
allowedCameras.includes(cameraName),
|
||||
);
|
||||
})
|
||||
.sort((a, b) => a[1].order - b[1].order);
|
||||
}
|
||||
|
||||
return allGroups.sort((a, b) => a[1].order - b[1].order);
|
||||
}, [config, allowedCameras, isCustomRole]);
|
||||
|
||||
// add group
|
||||
|
||||
@ -139,6 +153,7 @@ export function CameraGroupSelector({ className }: CameraGroupSelectorProps) {
|
||||
activeGroup={group}
|
||||
setGroup={setGroup}
|
||||
deleteGroup={deleteGroup}
|
||||
isCustomRole={isCustomRole}
|
||||
/>
|
||||
<Scroller className={`${isMobile ? "whitespace-nowrap" : ""}`}>
|
||||
<div
|
||||
@ -206,14 +221,16 @@ export function CameraGroupSelector({ className }: CameraGroupSelectorProps) {
|
||||
);
|
||||
})}
|
||||
|
||||
<Button
|
||||
className="bg-secondary text-muted-foreground"
|
||||
aria-label={t("group.add")}
|
||||
size="xs"
|
||||
onClick={() => setAddGroup(true)}
|
||||
>
|
||||
<LuPlus className="size-4 text-primary" />
|
||||
</Button>
|
||||
{!isCustomRole && (
|
||||
<Button
|
||||
className="bg-secondary text-muted-foreground"
|
||||
aria-label={t("group.add")}
|
||||
size="xs"
|
||||
onClick={() => setAddGroup(true)}
|
||||
>
|
||||
<LuPlus className="size-4 text-primary" />
|
||||
</Button>
|
||||
)}
|
||||
{isMobile && <ScrollBar orientation="horizontal" className="h-0" />}
|
||||
</div>
|
||||
</Scroller>
|
||||
@ -228,6 +245,7 @@ type NewGroupDialogProps = {
|
||||
activeGroup?: string;
|
||||
setGroup: (value: string | undefined, replace?: boolean | undefined) => void;
|
||||
deleteGroup: () => void;
|
||||
isCustomRole?: boolean;
|
||||
};
|
||||
function NewGroupDialog({
|
||||
open,
|
||||
@ -236,6 +254,7 @@ function NewGroupDialog({
|
||||
activeGroup,
|
||||
setGroup,
|
||||
deleteGroup,
|
||||
isCustomRole,
|
||||
}: NewGroupDialogProps) {
|
||||
const { t } = useTranslation(["components/camera"]);
|
||||
const { mutate: updateConfig } = useSWR<FrigateConfig>("config");
|
||||
@ -261,6 +280,12 @@ function NewGroupDialog({
|
||||
`${activeGroup}-draggable-layout`,
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (!open) {
|
||||
setEditState("none");
|
||||
}
|
||||
}, [open]);
|
||||
|
||||
// callbacks
|
||||
|
||||
const onDeleteGroup = useCallback(
|
||||
@ -349,13 +374,7 @@ function NewGroupDialog({
|
||||
position="top-center"
|
||||
closeButton={true}
|
||||
/>
|
||||
<Overlay
|
||||
open={open}
|
||||
onOpenChange={(open) => {
|
||||
setEditState("none");
|
||||
setOpen(open);
|
||||
}}
|
||||
>
|
||||
<Overlay open={open} onOpenChange={setOpen}>
|
||||
<Content
|
||||
className={cn(
|
||||
"scrollbar-container overflow-y-auto",
|
||||
@ -371,28 +390,30 @@ function NewGroupDialog({
|
||||
>
|
||||
<Title>{t("group.label")}</Title>
|
||||
<Description className="sr-only">{t("group.edit")}</Description>
|
||||
<div
|
||||
className={cn(
|
||||
"absolute",
|
||||
isDesktop && "right-6 top-10",
|
||||
isMobile && "absolute right-0 top-4",
|
||||
)}
|
||||
>
|
||||
<Button
|
||||
size="sm"
|
||||
{!isCustomRole && (
|
||||
<div
|
||||
className={cn(
|
||||
isDesktop &&
|
||||
"size-6 rounded-md bg-secondary-foreground p-1 text-background",
|
||||
isMobile && "text-secondary-foreground",
|
||||
"absolute",
|
||||
isDesktop && "right-6 top-10",
|
||||
isMobile && "absolute right-0 top-4",
|
||||
)}
|
||||
aria-label={t("group.add")}
|
||||
onClick={() => {
|
||||
setEditState("add");
|
||||
}}
|
||||
>
|
||||
<LuPlus />
|
||||
</Button>
|
||||
</div>
|
||||
<Button
|
||||
size="sm"
|
||||
className={cn(
|
||||
isDesktop &&
|
||||
"size-6 rounded-md bg-secondary-foreground p-1 text-background",
|
||||
isMobile && "text-secondary-foreground",
|
||||
)}
|
||||
aria-label={t("group.add")}
|
||||
onClick={() => {
|
||||
setEditState("add");
|
||||
}}
|
||||
>
|
||||
<LuPlus />
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</Header>
|
||||
<div className="flex flex-col gap-4 md:gap-3">
|
||||
{currentGroups.map((group) => (
|
||||
@ -401,6 +422,7 @@ function NewGroupDialog({
|
||||
group={group}
|
||||
onDeleteGroup={() => onDeleteGroup(group[0])}
|
||||
onEditGroup={() => onEditGroup(group)}
|
||||
isReadOnly={isCustomRole}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
@ -512,12 +534,14 @@ type CameraGroupRowProps = {
|
||||
group: [string, CameraGroupConfig];
|
||||
onDeleteGroup: () => void;
|
||||
onEditGroup: () => void;
|
||||
isReadOnly?: boolean;
|
||||
};
|
||||
|
||||
export function CameraGroupRow({
|
||||
group,
|
||||
onDeleteGroup,
|
||||
onEditGroup,
|
||||
isReadOnly,
|
||||
}: CameraGroupRowProps) {
|
||||
const { t } = useTranslation(["components/camera"]);
|
||||
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
|
||||
@ -564,7 +588,7 @@ export function CameraGroupRow({
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
|
||||
{isMobile && (
|
||||
{isMobile && !isReadOnly && (
|
||||
<>
|
||||
<DropdownMenu modal={!isDesktop}>
|
||||
<DropdownMenuTrigger>
|
||||
@ -589,7 +613,7 @@ export function CameraGroupRow({
|
||||
</DropdownMenu>
|
||||
</>
|
||||
)}
|
||||
{!isMobile && (
|
||||
{!isMobile && !isReadOnly && (
|
||||
<div className="flex flex-row items-center gap-2">
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
|
||||
@ -807,6 +807,15 @@ function ObjectDetailsTab({
|
||||
}
|
||||
}, [search]);
|
||||
|
||||
const isEventsKey = useCallback((key: unknown): boolean => {
|
||||
const candidate = Array.isArray(key) ? key[0] : key;
|
||||
const EVENTS_KEY_PATTERNS = ["events", "events/search", "events/explore"];
|
||||
return (
|
||||
typeof candidate === "string" &&
|
||||
EVENTS_KEY_PATTERNS.some((p) => candidate.includes(p))
|
||||
);
|
||||
}, []);
|
||||
|
||||
const updateDescription = useCallback(() => {
|
||||
if (!search) {
|
||||
return;
|
||||
@ -821,11 +830,7 @@ function ObjectDetailsTab({
|
||||
});
|
||||
}
|
||||
mutate(
|
||||
(key) =>
|
||||
typeof key === "string" &&
|
||||
(key.includes("events") ||
|
||||
key.includes("events/search") ||
|
||||
key.includes("events/explore")),
|
||||
(key) => isEventsKey(key),
|
||||
(currentData: SearchResult[][] | SearchResult[] | undefined) =>
|
||||
mapSearchResults(currentData, (event) =>
|
||||
event.id === search.id
|
||||
@ -838,6 +843,7 @@ function ObjectDetailsTab({
|
||||
revalidate: false,
|
||||
},
|
||||
);
|
||||
setSearch({ ...search, data: { ...search.data, description: desc } });
|
||||
})
|
||||
.catch((error) => {
|
||||
const errorMessage =
|
||||
@ -854,7 +860,7 @@ function ObjectDetailsTab({
|
||||
);
|
||||
setDesc(search.data.description);
|
||||
});
|
||||
}, [desc, search, mutate, t, mapSearchResults]);
|
||||
}, [desc, search, mutate, t, mapSearchResults, isEventsKey, setSearch]);
|
||||
|
||||
const regenerateDescription = useCallback(
|
||||
(source: "snapshot" | "thumbnails") => {
|
||||
@ -921,11 +927,7 @@ function ObjectDetailsTab({
|
||||
});
|
||||
|
||||
mutate(
|
||||
(key) =>
|
||||
typeof key === "string" &&
|
||||
(key.includes("events") ||
|
||||
key.includes("events/search") ||
|
||||
key.includes("events/explore")),
|
||||
(key) => isEventsKey(key),
|
||||
(currentData: SearchResult[][] | SearchResult[] | undefined) =>
|
||||
mapSearchResults(currentData, (event) =>
|
||||
event.id === search.id
|
||||
@ -972,7 +974,7 @@ function ObjectDetailsTab({
|
||||
);
|
||||
});
|
||||
},
|
||||
[search, apiHost, mutate, setSearch, t, mapSearchResults],
|
||||
[search, apiHost, mutate, setSearch, t, mapSearchResults, isEventsKey],
|
||||
);
|
||||
|
||||
// recognized plate
|
||||
@ -996,11 +998,7 @@ function ObjectDetailsTab({
|
||||
});
|
||||
|
||||
mutate(
|
||||
(key) =>
|
||||
typeof key === "string" &&
|
||||
(key.includes("events") ||
|
||||
key.includes("events/search") ||
|
||||
key.includes("events/explore")),
|
||||
(key) => isEventsKey(key),
|
||||
(currentData: SearchResult[][] | SearchResult[] | undefined) =>
|
||||
mapSearchResults(currentData, (event) =>
|
||||
event.id === search.id
|
||||
@ -1047,7 +1045,7 @@ function ObjectDetailsTab({
|
||||
);
|
||||
});
|
||||
},
|
||||
[search, apiHost, mutate, setSearch, t, mapSearchResults],
|
||||
[search, apiHost, mutate, setSearch, t, mapSearchResults, isEventsKey],
|
||||
);
|
||||
|
||||
// speech transcription
|
||||
@ -1103,12 +1101,9 @@ function ObjectDetailsTab({
|
||||
});
|
||||
|
||||
setState("submitted");
|
||||
setSearch({ ...search, plus_id: "new_upload" });
|
||||
mutate(
|
||||
(key) =>
|
||||
typeof key === "string" &&
|
||||
(key.includes("events") ||
|
||||
key.includes("events/search") ||
|
||||
key.includes("events/explore")),
|
||||
(key) => isEventsKey(key),
|
||||
(currentData: SearchResult[][] | SearchResult[] | undefined) =>
|
||||
mapSearchResults(currentData, (event) =>
|
||||
event.id === search.id
|
||||
@ -1122,7 +1117,7 @@ function ObjectDetailsTab({
|
||||
},
|
||||
);
|
||||
},
|
||||
[search, mutate, mapSearchResults],
|
||||
[search, mutate, mapSearchResults, setSearch, isEventsKey],
|
||||
);
|
||||
|
||||
const popoverContainerRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
@ -6,51 +6,199 @@ import {
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Event } from "@/types/event";
|
||||
import { isDesktop, isMobile } from "react-device-detect";
|
||||
import { ObjectSnapshotTab } from "../detail/SearchDetailDialog";
|
||||
import { isDesktop, isMobile, isSafari } from "react-device-detect";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import axios from "axios";
|
||||
import { useTranslation, Trans } from "react-i18next";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import ActivityIndicator from "@/components/indicators/activity-indicator";
|
||||
import { FaCheckCircle } from "react-icons/fa";
|
||||
import { Card, CardContent } from "@/components/ui/card";
|
||||
import { TransformComponent, TransformWrapper } from "react-zoom-pan-pinch";
|
||||
import ImageLoadingIndicator from "@/components/indicators/ImageLoadingIndicator";
|
||||
import { baseUrl } from "@/api/baseUrl";
|
||||
import { getTranslatedLabel } from "@/utils/i18n";
|
||||
import useImageLoaded from "@/hooks/use-image-loaded";
|
||||
|
||||
type FrigatePlusDialogProps = {
|
||||
export type FrigatePlusDialogProps = {
|
||||
upload?: Event;
|
||||
dialog?: boolean;
|
||||
onClose: () => void;
|
||||
onEventUploaded: () => void;
|
||||
};
|
||||
|
||||
export function FrigatePlusDialog({
|
||||
upload,
|
||||
dialog = true,
|
||||
onClose,
|
||||
onEventUploaded,
|
||||
}: FrigatePlusDialogProps) {
|
||||
if (!upload) {
|
||||
return;
|
||||
}
|
||||
if (dialog) {
|
||||
return (
|
||||
<Dialog
|
||||
open={upload != undefined}
|
||||
onOpenChange={(open) => (!open ? onClose() : null)}
|
||||
const { t, i18n } = useTranslation(["components/dialog"]);
|
||||
|
||||
type SubmissionState = "reviewing" | "uploading" | "submitted";
|
||||
const [state, setState] = useState<SubmissionState>(
|
||||
upload?.plus_id ? "submitted" : "reviewing",
|
||||
);
|
||||
useEffect(() => {
|
||||
setState(upload?.plus_id ? "submitted" : "reviewing");
|
||||
}, [upload?.plus_id]);
|
||||
|
||||
const onSubmitToPlus = useCallback(
|
||||
async (falsePositive: boolean) => {
|
||||
if (!upload) return;
|
||||
falsePositive
|
||||
? axios.put(`events/${upload.id}/false_positive`)
|
||||
: axios.post(`events/${upload.id}/plus`, { include_annotation: 1 });
|
||||
setState("submitted");
|
||||
onEventUploaded();
|
||||
},
|
||||
[upload, onEventUploaded],
|
||||
);
|
||||
|
||||
const [imgRef, imgLoaded, onImgLoad] = useImageLoaded();
|
||||
const showCard =
|
||||
!!upload &&
|
||||
upload.data.type === "object" &&
|
||||
upload.plus_id !== "not_enabled" &&
|
||||
upload.end_time &&
|
||||
upload.label !== "on_demand";
|
||||
|
||||
if (!dialog || !upload) return null;
|
||||
|
||||
return (
|
||||
<Dialog open={true} onOpenChange={(open) => (!open ? onClose() : null)}>
|
||||
<DialogContent
|
||||
className={cn(
|
||||
"scrollbar-container overflow-y-auto",
|
||||
isDesktop &&
|
||||
"max-h-[95dvh] sm:max-w-xl md:max-w-4xl lg:max-w-4xl xl:max-w-7xl",
|
||||
isMobile && "px-4",
|
||||
)}
|
||||
>
|
||||
<DialogContent
|
||||
className={cn(
|
||||
"scrollbar-container overflow-y-auto",
|
||||
isDesktop &&
|
||||
"max-h-[95dvh] sm:max-w-xl md:max-w-4xl lg:max-w-4xl xl:max-w-7xl",
|
||||
isMobile && "px-4",
|
||||
)}
|
||||
>
|
||||
<DialogHeader>
|
||||
<DialogTitle className="sr-only">Submit to Frigate+</DialogTitle>
|
||||
<DialogDescription className="sr-only">
|
||||
Submit this snapshot to Frigate+
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<ObjectSnapshotTab
|
||||
search={upload}
|
||||
onEventUploaded={onEventUploaded}
|
||||
<DialogHeader>
|
||||
<DialogTitle className="sr-only">Submit to Frigate+</DialogTitle>
|
||||
<DialogDescription className="sr-only">
|
||||
Submit this snapshot to Frigate+
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="relative size-full">
|
||||
<ImageLoadingIndicator
|
||||
className="absolute inset-0 aspect-video min-h-[60dvh] w-full"
|
||||
imgLoaded={imgLoaded}
|
||||
/>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
<div className={imgLoaded ? "visible" : "invisible"}>
|
||||
<TransformWrapper minScale={1.0} wheel={{ smoothStep: 0.005 }}>
|
||||
<div className="flex flex-col space-y-3">
|
||||
<TransformComponent
|
||||
wrapperStyle={{ width: "100%", height: "100%" }}
|
||||
contentStyle={{
|
||||
position: "relative",
|
||||
width: "100%",
|
||||
height: "100%",
|
||||
}}
|
||||
>
|
||||
{upload.id && (
|
||||
<div className="relative mx-auto">
|
||||
<img
|
||||
ref={imgRef}
|
||||
className="mx-auto max-h-[60dvh] rounded-lg bg-black object-contain"
|
||||
src={`${baseUrl}api/events/${upload.id}/snapshot.jpg`}
|
||||
alt={`${upload.label}`}
|
||||
loading={isSafari ? "eager" : "lazy"}
|
||||
onLoad={onImgLoad}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</TransformComponent>
|
||||
|
||||
{showCard && (
|
||||
<Card className="p-1 text-sm md:p-2">
|
||||
<CardContent className="flex flex-col items-center justify-between gap-3 p-2 md:flex-row">
|
||||
<div className="flex flex-col space-y-3">
|
||||
<div className="text-lg leading-none">
|
||||
{t("explore.plus.submitToPlus.label")}
|
||||
</div>
|
||||
<div className="text-sm text-muted-foreground">
|
||||
{t("explore.plus.submitToPlus.desc")}
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex w-full flex-1 flex-col justify-center gap-2 md:ml-8 md:w-auto md:justify-end">
|
||||
{state === "reviewing" && (
|
||||
<>
|
||||
<div>
|
||||
{i18n.language === "en" ? (
|
||||
/^[aeiou]/i.test(upload.label || "") ? (
|
||||
<Trans
|
||||
ns="components/dialog"
|
||||
values={{ label: upload.label }}
|
||||
>
|
||||
explore.plus.review.question.ask_an
|
||||
</Trans>
|
||||
) : (
|
||||
<Trans
|
||||
ns="components/dialog"
|
||||
values={{ label: upload.label }}
|
||||
>
|
||||
explore.plus.review.question.ask_a
|
||||
</Trans>
|
||||
)
|
||||
) : (
|
||||
<Trans
|
||||
ns="components/dialog"
|
||||
values={{
|
||||
untranslatedLabel: upload.label,
|
||||
translatedLabel: getTranslatedLabel(
|
||||
upload.label,
|
||||
),
|
||||
}}
|
||||
>
|
||||
explore.plus.review.question.ask_full
|
||||
</Trans>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex w-full flex-row gap-2">
|
||||
<Button
|
||||
className="flex-1 bg-success"
|
||||
aria-label={t("button.yes", { ns: "common" })}
|
||||
onClick={() => {
|
||||
setState("uploading");
|
||||
onSubmitToPlus(false);
|
||||
}}
|
||||
>
|
||||
{t("button.yes", { ns: "common" })}
|
||||
</Button>
|
||||
<Button
|
||||
className="flex-1 text-white"
|
||||
aria-label={t("button.no", { ns: "common" })}
|
||||
variant="destructive"
|
||||
onClick={() => {
|
||||
setState("uploading");
|
||||
onSubmitToPlus(true);
|
||||
}}
|
||||
>
|
||||
{t("button.no", { ns: "common" })}
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
{state === "uploading" && <ActivityIndicator />}
|
||||
{state === "submitted" && (
|
||||
<div className="flex flex-row items-center justify-center gap-2">
|
||||
<FaCheckCircle className="size-4 text-success" />
|
||||
{t("explore.plus.review.state.submitted")}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
</div>
|
||||
</TransformWrapper>
|
||||
</div>
|
||||
</div>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
|
||||
@ -6,7 +6,7 @@ import {
|
||||
useState,
|
||||
} from "react";
|
||||
import Hls from "hls.js";
|
||||
import { isAndroid, isDesktop, isMobile } from "react-device-detect";
|
||||
import { isDesktop, isMobile } from "react-device-detect";
|
||||
import { TransformComponent, TransformWrapper } from "react-zoom-pan-pinch";
|
||||
import VideoControls from "./VideoControls";
|
||||
import { VideoResolutionType } from "@/types/live";
|
||||
@ -22,7 +22,7 @@ import { useTranslation } from "react-i18next";
|
||||
import ObjectTrackOverlay from "@/components/overlay/ObjectTrackOverlay";
|
||||
|
||||
// Android native hls does not seek correctly
|
||||
const USE_NATIVE_HLS = !isAndroid;
|
||||
const USE_NATIVE_HLS = false;
|
||||
const HLS_MIME_TYPE = "application/vnd.apple.mpegurl" as const;
|
||||
const unsupportedErrorCodes = [
|
||||
MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED,
|
||||
|
||||
@ -111,7 +111,7 @@ export default function DynamicVideoPlayer({
|
||||
const [loadingTimeout, setLoadingTimeout] = useState<NodeJS.Timeout>();
|
||||
const [source, setSource] = useState<HlsSource>({
|
||||
playlist: `${apiHost}vod/${camera}/start/${timeRange.after}/end/${timeRange.before}/master.m3u8`,
|
||||
startPosition: startTimestamp ? timeRange.after - startTimestamp : 0,
|
||||
startPosition: startTimestamp ? startTimestamp - timeRange.after : 0,
|
||||
});
|
||||
|
||||
// start at correct time
|
||||
|
||||
@ -377,7 +377,7 @@ export default function Step1NameCamera({
|
||||
);
|
||||
return selectedBrand &&
|
||||
selectedBrand.value != "other" ? (
|
||||
<Popover>
|
||||
<Popover modal={true}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
|
||||
@ -600,7 +600,7 @@ export default function Step3StreamConfig({
|
||||
<Label className="text-sm font-medium text-primary-variant">
|
||||
{t("cameraWizard.step3.roles")}
|
||||
</Label>
|
||||
<Popover>
|
||||
<Popover modal={true}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant="ghost" size="sm" className="h-4 w-4 p-0">
|
||||
<LuInfo className="size-3" />
|
||||
@ -670,7 +670,7 @@ export default function Step3StreamConfig({
|
||||
<Label className="text-sm font-medium text-primary-variant">
|
||||
{t("cameraWizard.step3.featuresTitle")}
|
||||
</Label>
|
||||
<Popover>
|
||||
<Popover modal={true}>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant="ghost" size="sm" className="h-4 w-4 p-0">
|
||||
<LuInfo className="size-3" />
|
||||
|
||||
@ -93,19 +93,23 @@ function Live() {
|
||||
const allowedCameras = useAllowedCameras();
|
||||
|
||||
const includesBirdseye = useMemo(() => {
|
||||
// Restricted users should never have access to birdseye
|
||||
if (isCustomRole) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (
|
||||
config &&
|
||||
Object.keys(config.camera_groups).length &&
|
||||
cameraGroup &&
|
||||
config.camera_groups[cameraGroup] &&
|
||||
cameraGroup != "default" &&
|
||||
(!isCustomRole || "birdseye" in allowedCameras)
|
||||
cameraGroup != "default"
|
||||
) {
|
||||
return config.camera_groups[cameraGroup].cameras.includes("birdseye");
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}, [config, cameraGroup, allowedCameras, isCustomRole]);
|
||||
}, [config, cameraGroup, isCustomRole]);
|
||||
|
||||
const cameras = useMemo(() => {
|
||||
if (!config) {
|
||||
|
||||
@ -39,6 +39,7 @@ import {
|
||||
AlertDialogTitle,
|
||||
} from "@/components/ui/alert-dialog";
|
||||
import BlurredIconButton from "@/components/button/BlurredIconButton";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
|
||||
const allModelTypes = ["objects", "states"] as const;
|
||||
type ModelType = (typeof allModelTypes)[number];
|
||||
@ -332,9 +333,7 @@ function ModelCard({ config, onClick, onUpdate, onDelete }: ModelCardProps) {
|
||||
<ImageShadowOverlay lowerClassName="h-[30%] z-0" />
|
||||
</>
|
||||
) : (
|
||||
<div className="flex size-full items-center justify-center bg-background_alt">
|
||||
<MdModelTraining className="size-16 text-muted-foreground" />
|
||||
</div>
|
||||
<Skeleton className="flex size-full items-center justify-center" />
|
||||
)}
|
||||
<div className="absolute bottom-2 left-3 text-lg text-white smart-capitalize">
|
||||
{config.name}
|
||||
|
||||
@ -20,7 +20,14 @@ import {
|
||||
FrigateConfig,
|
||||
} from "@/types/frigateConfig";
|
||||
import { ReviewSegment } from "@/types/review";
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import {
|
||||
useCallback,
|
||||
useContext,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from "react";
|
||||
import {
|
||||
isDesktop,
|
||||
isMobile,
|
||||
@ -46,6 +53,8 @@ import { useStreamingSettings } from "@/context/streaming-settings-provider";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { EmptyCard } from "@/components/card/EmptyCard";
|
||||
import { BsFillCameraVideoOffFill } from "react-icons/bs";
|
||||
import { AuthContext } from "@/context/auth-context";
|
||||
import { useIsCustomRole } from "@/hooks/use-is-custom-role";
|
||||
|
||||
type LiveDashboardViewProps = {
|
||||
cameras: CameraConfig[];
|
||||
@ -374,10 +383,6 @@ export default function LiveDashboardView({
|
||||
onSaveMuting(true);
|
||||
};
|
||||
|
||||
if (cameras.length == 0 && !includeBirdseye) {
|
||||
return <NoCameraView />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className="scrollbar-container size-full select-none overflow-y-auto px-1 pt-2 md:p-2"
|
||||
@ -439,198 +444,215 @@ export default function LiveDashboardView({
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!fullscreen && events && events.length > 0 && (
|
||||
<ScrollArea>
|
||||
<TooltipProvider>
|
||||
<div className="flex items-center gap-2 px-1">
|
||||
{events.map((event) => {
|
||||
return (
|
||||
<AnimatedEventCard
|
||||
key={event.id}
|
||||
event={event}
|
||||
selectedGroup={cameraGroup}
|
||||
updateEvents={updateEvents}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</TooltipProvider>
|
||||
<ScrollBar orientation="horizontal" />
|
||||
</ScrollArea>
|
||||
)}
|
||||
|
||||
{!cameraGroup || cameraGroup == "default" || isMobileOnly ? (
|
||||
{cameras.length == 0 && !includeBirdseye ? (
|
||||
<NoCameraView />
|
||||
) : (
|
||||
<>
|
||||
<div
|
||||
className={cn(
|
||||
"mt-2 grid grid-cols-1 gap-2 px-2 md:gap-4",
|
||||
mobileLayout == "grid" &&
|
||||
"grid-cols-2 xl:grid-cols-3 3xl:grid-cols-4",
|
||||
isMobile && "px-0",
|
||||
)}
|
||||
>
|
||||
{includeBirdseye && birdseyeConfig?.enabled && (
|
||||
{!fullscreen && events && events.length > 0 && (
|
||||
<ScrollArea>
|
||||
<TooltipProvider>
|
||||
<div className="flex items-center gap-2 px-1">
|
||||
{events.map((event) => {
|
||||
return (
|
||||
<AnimatedEventCard
|
||||
key={event.id}
|
||||
event={event}
|
||||
selectedGroup={cameraGroup}
|
||||
updateEvents={updateEvents}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</TooltipProvider>
|
||||
<ScrollBar orientation="horizontal" />
|
||||
</ScrollArea>
|
||||
)}
|
||||
|
||||
{!cameraGroup || cameraGroup == "default" || isMobileOnly ? (
|
||||
<>
|
||||
<div
|
||||
className={(() => {
|
||||
const aspectRatio =
|
||||
birdseyeConfig.width / birdseyeConfig.height;
|
||||
if (aspectRatio > 2) {
|
||||
return `${mobileLayout == "grid" && "col-span-2"} aspect-wide`;
|
||||
} else if (aspectRatio < 1) {
|
||||
return `${mobileLayout == "grid" && "row-span-2 h-full"} aspect-tall`;
|
||||
} else {
|
||||
return "aspect-video";
|
||||
}
|
||||
})()}
|
||||
ref={birdseyeContainerRef}
|
||||
className={cn(
|
||||
"mt-2 grid grid-cols-1 gap-2 px-2 md:gap-4",
|
||||
mobileLayout == "grid" &&
|
||||
"grid-cols-2 xl:grid-cols-3 3xl:grid-cols-4",
|
||||
isMobile && "px-0",
|
||||
)}
|
||||
>
|
||||
<BirdseyeLivePlayer
|
||||
birdseyeConfig={birdseyeConfig}
|
||||
liveMode={birdseyeConfig.restream ? "mse" : "jsmpeg"}
|
||||
onClick={() => onSelectCamera("birdseye")}
|
||||
containerRef={birdseyeContainerRef}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{cameras.map((camera) => {
|
||||
let grow;
|
||||
const aspectRatio = camera.detect.width / camera.detect.height;
|
||||
if (aspectRatio > 2) {
|
||||
grow = `${mobileLayout == "grid" && "col-span-2"} aspect-wide`;
|
||||
} else if (aspectRatio < 1) {
|
||||
grow = `${mobileLayout == "grid" && "row-span-2 h-full"} aspect-tall`;
|
||||
} else {
|
||||
grow = "aspect-video";
|
||||
}
|
||||
const availableStreams = camera.live.streams || {};
|
||||
const firstStreamEntry = Object.values(availableStreams)[0] || "";
|
||||
|
||||
const streamNameFromSettings =
|
||||
currentGroupStreamingSettings?.[camera.name]?.streamName || "";
|
||||
const streamExists =
|
||||
streamNameFromSettings &&
|
||||
Object.values(availableStreams).includes(
|
||||
streamNameFromSettings,
|
||||
);
|
||||
|
||||
const streamName = streamExists
|
||||
? streamNameFromSettings
|
||||
: firstStreamEntry;
|
||||
const streamType =
|
||||
currentGroupStreamingSettings?.[camera.name]?.streamType;
|
||||
const autoLive =
|
||||
streamType !== undefined
|
||||
? streamType !== "no-streaming"
|
||||
: undefined;
|
||||
const showStillWithoutActivity =
|
||||
currentGroupStreamingSettings?.[camera.name]?.streamType !==
|
||||
"continuous";
|
||||
const useWebGL =
|
||||
currentGroupStreamingSettings?.[camera.name]
|
||||
?.compatibilityMode || false;
|
||||
return (
|
||||
<LiveContextMenu
|
||||
className={grow}
|
||||
key={camera.name}
|
||||
camera={camera.name}
|
||||
cameraGroup={cameraGroup}
|
||||
streamName={streamName}
|
||||
preferredLiveMode={preferredLiveModes[camera.name] ?? "mse"}
|
||||
isRestreamed={isRestreamedStates[camera.name]}
|
||||
supportsAudio={
|
||||
supportsAudioOutputStates[streamName]?.supportsAudio ??
|
||||
false
|
||||
}
|
||||
audioState={audioStates[camera.name]}
|
||||
toggleAudio={() => toggleAudio(camera.name)}
|
||||
statsState={statsStates[camera.name]}
|
||||
toggleStats={() => toggleStats(camera.name)}
|
||||
volumeState={volumeStates[camera.name] ?? 1}
|
||||
setVolumeState={(value) =>
|
||||
setVolumeStates({
|
||||
[camera.name]: value,
|
||||
})
|
||||
}
|
||||
muteAll={muteAll}
|
||||
unmuteAll={unmuteAll}
|
||||
resetPreferredLiveMode={() =>
|
||||
resetPreferredLiveMode(camera.name)
|
||||
}
|
||||
config={config}
|
||||
>
|
||||
<LivePlayer
|
||||
cameraRef={cameraRef}
|
||||
key={camera.name}
|
||||
className={`${grow} rounded-lg bg-black md:rounded-2xl`}
|
||||
windowVisible={
|
||||
windowVisible && visibleCameras.includes(camera.name)
|
||||
}
|
||||
cameraConfig={camera}
|
||||
preferredLiveMode={preferredLiveModes[camera.name] ?? "mse"}
|
||||
autoLive={autoLive ?? globalAutoLive}
|
||||
showStillWithoutActivity={showStillWithoutActivity ?? true}
|
||||
alwaysShowCameraName={displayCameraNames}
|
||||
useWebGL={useWebGL}
|
||||
playInBackground={false}
|
||||
showStats={statsStates[camera.name]}
|
||||
streamName={streamName}
|
||||
onClick={() => onSelectCamera(camera.name)}
|
||||
onError={(e) => handleError(camera.name, e)}
|
||||
onResetLiveMode={() => resetPreferredLiveMode(camera.name)}
|
||||
playAudio={audioStates[camera.name] ?? false}
|
||||
volume={volumeStates[camera.name]}
|
||||
/>
|
||||
</LiveContextMenu>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
{isDesktop && (
|
||||
<div
|
||||
className={cn(
|
||||
"fixed",
|
||||
isDesktop && "bottom-12 lg:bottom-9",
|
||||
isMobile && "bottom-12 lg:bottom-16",
|
||||
hasScrollbar && isDesktop ? "right-6" : "right-3",
|
||||
"z-50 flex flex-row gap-2",
|
||||
)}
|
||||
>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
{includeBirdseye && birdseyeConfig?.enabled && (
|
||||
<div
|
||||
className="cursor-pointer rounded-lg bg-secondary text-secondary-foreground opacity-60 transition-all duration-300 hover:bg-muted hover:opacity-100"
|
||||
onClick={toggleFullscreen}
|
||||
className={(() => {
|
||||
const aspectRatio =
|
||||
birdseyeConfig.width / birdseyeConfig.height;
|
||||
if (aspectRatio > 2) {
|
||||
return `${mobileLayout == "grid" && "col-span-2"} aspect-wide`;
|
||||
} else if (aspectRatio < 1) {
|
||||
return `${mobileLayout == "grid" && "row-span-2 h-full"} aspect-tall`;
|
||||
} else {
|
||||
return "aspect-video";
|
||||
}
|
||||
})()}
|
||||
ref={birdseyeContainerRef}
|
||||
>
|
||||
{fullscreen ? (
|
||||
<FaCompress className="size-5 md:m-[6px]" />
|
||||
) : (
|
||||
<FaExpand className="size-5 md:m-[6px]" />
|
||||
)}
|
||||
<BirdseyeLivePlayer
|
||||
birdseyeConfig={birdseyeConfig}
|
||||
liveMode={birdseyeConfig.restream ? "mse" : "jsmpeg"}
|
||||
onClick={() => onSelectCamera("birdseye")}
|
||||
containerRef={birdseyeContainerRef}
|
||||
/>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{fullscreen
|
||||
? t("button.exitFullscreen", { ns: "common" })
|
||||
: t("button.fullscreen", { ns: "common" })}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
{cameras.map((camera) => {
|
||||
let grow;
|
||||
const aspectRatio =
|
||||
camera.detect.width / camera.detect.height;
|
||||
if (aspectRatio > 2) {
|
||||
grow = `${mobileLayout == "grid" && "col-span-2"} aspect-wide`;
|
||||
} else if (aspectRatio < 1) {
|
||||
grow = `${mobileLayout == "grid" && "row-span-2 h-full"} aspect-tall`;
|
||||
} else {
|
||||
grow = "aspect-video";
|
||||
}
|
||||
const availableStreams = camera.live.streams || {};
|
||||
const firstStreamEntry =
|
||||
Object.values(availableStreams)[0] || "";
|
||||
|
||||
const streamNameFromSettings =
|
||||
currentGroupStreamingSettings?.[camera.name]?.streamName ||
|
||||
"";
|
||||
const streamExists =
|
||||
streamNameFromSettings &&
|
||||
Object.values(availableStreams).includes(
|
||||
streamNameFromSettings,
|
||||
);
|
||||
|
||||
const streamName = streamExists
|
||||
? streamNameFromSettings
|
||||
: firstStreamEntry;
|
||||
const streamType =
|
||||
currentGroupStreamingSettings?.[camera.name]?.streamType;
|
||||
const autoLive =
|
||||
streamType !== undefined
|
||||
? streamType !== "no-streaming"
|
||||
: undefined;
|
||||
const showStillWithoutActivity =
|
||||
currentGroupStreamingSettings?.[camera.name]?.streamType !==
|
||||
"continuous";
|
||||
const useWebGL =
|
||||
currentGroupStreamingSettings?.[camera.name]
|
||||
?.compatibilityMode || false;
|
||||
return (
|
||||
<LiveContextMenu
|
||||
className={grow}
|
||||
key={camera.name}
|
||||
camera={camera.name}
|
||||
cameraGroup={cameraGroup}
|
||||
streamName={streamName}
|
||||
preferredLiveMode={
|
||||
preferredLiveModes[camera.name] ?? "mse"
|
||||
}
|
||||
isRestreamed={isRestreamedStates[camera.name]}
|
||||
supportsAudio={
|
||||
supportsAudioOutputStates[streamName]?.supportsAudio ??
|
||||
false
|
||||
}
|
||||
audioState={audioStates[camera.name]}
|
||||
toggleAudio={() => toggleAudio(camera.name)}
|
||||
statsState={statsStates[camera.name]}
|
||||
toggleStats={() => toggleStats(camera.name)}
|
||||
volumeState={volumeStates[camera.name] ?? 1}
|
||||
setVolumeState={(value) =>
|
||||
setVolumeStates({
|
||||
[camera.name]: value,
|
||||
})
|
||||
}
|
||||
muteAll={muteAll}
|
||||
unmuteAll={unmuteAll}
|
||||
resetPreferredLiveMode={() =>
|
||||
resetPreferredLiveMode(camera.name)
|
||||
}
|
||||
config={config}
|
||||
>
|
||||
<LivePlayer
|
||||
cameraRef={cameraRef}
|
||||
key={camera.name}
|
||||
className={`${grow} rounded-lg bg-black md:rounded-2xl`}
|
||||
windowVisible={
|
||||
windowVisible && visibleCameras.includes(camera.name)
|
||||
}
|
||||
cameraConfig={camera}
|
||||
preferredLiveMode={
|
||||
preferredLiveModes[camera.name] ?? "mse"
|
||||
}
|
||||
autoLive={autoLive ?? globalAutoLive}
|
||||
showStillWithoutActivity={
|
||||
showStillWithoutActivity ?? true
|
||||
}
|
||||
alwaysShowCameraName={displayCameraNames}
|
||||
useWebGL={useWebGL}
|
||||
playInBackground={false}
|
||||
showStats={statsStates[camera.name]}
|
||||
streamName={streamName}
|
||||
onClick={() => onSelectCamera(camera.name)}
|
||||
onError={(e) => handleError(camera.name, e)}
|
||||
onResetLiveMode={() =>
|
||||
resetPreferredLiveMode(camera.name)
|
||||
}
|
||||
playAudio={audioStates[camera.name] ?? false}
|
||||
volume={volumeStates[camera.name]}
|
||||
/>
|
||||
</LiveContextMenu>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
{isDesktop && (
|
||||
<div
|
||||
className={cn(
|
||||
"fixed",
|
||||
isDesktop && "bottom-12 lg:bottom-9",
|
||||
isMobile && "bottom-12 lg:bottom-16",
|
||||
hasScrollbar && isDesktop ? "right-6" : "right-3",
|
||||
"z-50 flex flex-row gap-2",
|
||||
)}
|
||||
>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div
|
||||
className="cursor-pointer rounded-lg bg-secondary text-secondary-foreground opacity-60 transition-all duration-300 hover:bg-muted hover:opacity-100"
|
||||
onClick={toggleFullscreen}
|
||||
>
|
||||
{fullscreen ? (
|
||||
<FaCompress className="size-5 md:m-[6px]" />
|
||||
) : (
|
||||
<FaExpand className="size-5 md:m-[6px]" />
|
||||
)}
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{fullscreen
|
||||
? t("button.exitFullscreen", { ns: "common" })
|
||||
: t("button.fullscreen", { ns: "common" })}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<DraggableGridLayout
|
||||
cameras={cameras}
|
||||
cameraGroup={cameraGroup}
|
||||
containerRef={containerRef}
|
||||
cameraRef={cameraRef}
|
||||
includeBirdseye={includeBirdseye}
|
||||
onSelectCamera={onSelectCamera}
|
||||
windowVisible={windowVisible}
|
||||
visibleCameras={visibleCameras}
|
||||
isEditMode={isEditMode}
|
||||
setIsEditMode={setIsEditMode}
|
||||
fullscreen={fullscreen}
|
||||
toggleFullscreen={toggleFullscreen}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
<DraggableGridLayout
|
||||
cameras={cameras}
|
||||
cameraGroup={cameraGroup}
|
||||
containerRef={containerRef}
|
||||
cameraRef={cameraRef}
|
||||
includeBirdseye={includeBirdseye}
|
||||
onSelectCamera={onSelectCamera}
|
||||
windowVisible={windowVisible}
|
||||
visibleCameras={visibleCameras}
|
||||
isEditMode={isEditMode}
|
||||
setIsEditMode={setIsEditMode}
|
||||
fullscreen={fullscreen}
|
||||
toggleFullscreen={toggleFullscreen}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
@ -638,15 +660,26 @@ export default function LiveDashboardView({
|
||||
|
||||
function NoCameraView() {
|
||||
const { t } = useTranslation(["views/live"]);
|
||||
const { auth } = useContext(AuthContext);
|
||||
const isCustomRole = useIsCustomRole();
|
||||
|
||||
// Check if this is a restricted user with no cameras in this group
|
||||
const isRestricted = isCustomRole && auth.isAuthenticated;
|
||||
|
||||
return (
|
||||
<div className="flex size-full items-center justify-center">
|
||||
<EmptyCard
|
||||
icon={<BsFillCameraVideoOffFill className="size-8" />}
|
||||
title={t("noCameras.title")}
|
||||
description={t("noCameras.description")}
|
||||
buttonText={t("noCameras.buttonText")}
|
||||
link="/settings?page=cameraManagement"
|
||||
title={
|
||||
isRestricted ? t("noCameras.restricted.title") : t("noCameras.title")
|
||||
}
|
||||
description={
|
||||
isRestricted
|
||||
? t("noCameras.restricted.description")
|
||||
: t("noCameras.description")
|
||||
}
|
||||
buttonText={!isRestricted ? t("noCameras.buttonText") : undefined}
|
||||
link={!isRestricted ? "/settings?page=cameraManagement" : undefined}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
@ -198,9 +198,9 @@ export default function TriggerView({
|
||||
|
||||
return axios
|
||||
.put("config/set", configBody)
|
||||
.then((configResponse) => {
|
||||
.then(async (configResponse) => {
|
||||
if (configResponse.status === 200) {
|
||||
updateConfig();
|
||||
await updateConfig();
|
||||
const displayName =
|
||||
friendly_name && friendly_name !== ""
|
||||
? `${friendly_name} (${name})`
|
||||
@ -353,9 +353,9 @@ export default function TriggerView({
|
||||
|
||||
return axios
|
||||
.put("config/set", configBody)
|
||||
.then((configResponse) => {
|
||||
.then(async (configResponse) => {
|
||||
if (configResponse.status === 200) {
|
||||
updateConfig();
|
||||
await updateConfig();
|
||||
const friendly =
|
||||
config?.cameras?.[selectedCamera]?.semantic_search
|
||||
?.triggers?.[name]?.friendly_name;
|
||||
|
||||
@ -67,13 +67,14 @@ export default function EnrichmentMetrics({
|
||||
|
||||
// features stats
|
||||
|
||||
const embeddingInferenceTimeSeries = useMemo(() => {
|
||||
const groupedEnrichmentMetrics = useMemo(() => {
|
||||
if (!statsHistory) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const series: {
|
||||
[key: string]: {
|
||||
rawKey: string;
|
||||
name: string;
|
||||
metrics: Threshold;
|
||||
data: { x: number; y: number }[];
|
||||
@ -90,6 +91,7 @@ export default function EnrichmentMetrics({
|
||||
|
||||
if (!(key in series)) {
|
||||
series[key] = {
|
||||
rawKey,
|
||||
name: t("enrichments.embeddings." + rawKey),
|
||||
metrics: getThreshold(rawKey),
|
||||
data: [],
|
||||
@ -99,7 +101,57 @@ export default function EnrichmentMetrics({
|
||||
series[key].data.push({ x: statsIdx + 1, y: stat });
|
||||
});
|
||||
});
|
||||
return Object.values(series);
|
||||
|
||||
// Group series by category (extract base name from raw key)
|
||||
const grouped: {
|
||||
[category: string]: {
|
||||
categoryName: string;
|
||||
speedSeries?: {
|
||||
name: string;
|
||||
metrics: Threshold;
|
||||
data: { x: number; y: number }[];
|
||||
};
|
||||
eventsSeries?: {
|
||||
name: string;
|
||||
metrics: Threshold;
|
||||
data: { x: number; y: number }[];
|
||||
};
|
||||
};
|
||||
} = {};
|
||||
|
||||
Object.values(series).forEach((s) => {
|
||||
// Extract base category name from raw key
|
||||
// All metrics follow the pattern: {base}_speed and {base}_events_per_second
|
||||
let categoryKey = s.rawKey;
|
||||
let isSpeed = false;
|
||||
|
||||
if (s.rawKey.endsWith("_speed")) {
|
||||
categoryKey = s.rawKey.replace("_speed", "");
|
||||
isSpeed = true;
|
||||
} else if (s.rawKey.endsWith("_events_per_second")) {
|
||||
categoryKey = s.rawKey.replace("_events_per_second", "");
|
||||
isSpeed = false;
|
||||
}
|
||||
|
||||
// Get translated category name
|
||||
const categoryName = t("enrichments.embeddings." + categoryKey);
|
||||
|
||||
if (!(categoryKey in grouped)) {
|
||||
grouped[categoryKey] = {
|
||||
categoryName,
|
||||
speedSeries: undefined,
|
||||
eventsSeries: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
if (isSpeed) {
|
||||
grouped[categoryKey].speedSeries = s;
|
||||
} else {
|
||||
grouped[categoryKey].eventsSeries = s;
|
||||
}
|
||||
});
|
||||
|
||||
return Object.values(grouped);
|
||||
}, [statsHistory, t, getThreshold]);
|
||||
|
||||
return (
|
||||
@ -110,35 +162,42 @@ export default function EnrichmentMetrics({
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
"mt-4 grid w-full grid-cols-1 gap-2 sm:grid-cols-3",
|
||||
embeddingInferenceTimeSeries && "sm:grid-cols-4",
|
||||
"mt-4 grid w-full grid-cols-1 gap-2 sm:grid-cols-2 md:grid-cols-4",
|
||||
)}
|
||||
>
|
||||
{statsHistory.length != 0 ? (
|
||||
<>
|
||||
{embeddingInferenceTimeSeries.map((series) => (
|
||||
<div className="rounded-lg bg-background_alt p-2.5 md:rounded-2xl">
|
||||
<div className="mb-5 smart-capitalize">{series.name}</div>
|
||||
{series.name.endsWith("Speed") ? (
|
||||
<ThresholdBarGraph
|
||||
key={series.name}
|
||||
graphId={`${series.name}-inference`}
|
||||
name={series.name}
|
||||
unit="ms"
|
||||
threshold={series.metrics}
|
||||
updateTimes={updateTimes}
|
||||
data={[series]}
|
||||
/>
|
||||
) : (
|
||||
<EventsPerSecondsLineGraph
|
||||
key={series.name}
|
||||
graphId={`${series.name}-fps`}
|
||||
unit=""
|
||||
name={t("enrichments.infPerSecond")}
|
||||
updateTimes={updateTimes}
|
||||
data={[series]}
|
||||
/>
|
||||
)}
|
||||
{groupedEnrichmentMetrics.map((group) => (
|
||||
<div
|
||||
key={group.categoryName}
|
||||
className="rounded-lg bg-background_alt p-2.5 md:rounded-2xl"
|
||||
>
|
||||
<div className="mb-5 smart-capitalize">
|
||||
{group.categoryName}
|
||||
</div>
|
||||
<div className="space-y-4">
|
||||
{group.speedSeries && (
|
||||
<ThresholdBarGraph
|
||||
key={`${group.categoryName}-speed`}
|
||||
graphId={`${group.categoryName}-inference`}
|
||||
name={t("enrichments.averageInf")}
|
||||
unit="ms"
|
||||
threshold={group.speedSeries.metrics}
|
||||
updateTimes={updateTimes}
|
||||
data={[group.speedSeries]}
|
||||
/>
|
||||
)}
|
||||
{group.eventsSeries && (
|
||||
<EventsPerSecondsLineGraph
|
||||
key={`${group.categoryName}-events`}
|
||||
graphId={`${group.categoryName}-fps`}
|
||||
unit=""
|
||||
name={t("enrichments.infPerSecond")}
|
||||
updateTimes={updateTimes}
|
||||
data={[group.eventsSeries]}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</>
|
||||
|
||||
@ -729,33 +729,32 @@ export default function GeneralMetrics({
|
||||
) : (
|
||||
<Skeleton className="aspect-video w-full" />
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{statsHistory[0]?.npu_usages && (
|
||||
<div
|
||||
className={cn("mt-4 grid grid-cols-1 gap-2 sm:grid-cols-2")}
|
||||
>
|
||||
{statsHistory.length != 0 ? (
|
||||
<div className="rounded-lg bg-background_alt p-2.5 md:rounded-2xl">
|
||||
<div className="mb-5">
|
||||
{t("general.hardwareInfo.npuUsage")}
|
||||
</div>
|
||||
{npuSeries.map((series) => (
|
||||
<ThresholdBarGraph
|
||||
key={series.name}
|
||||
graphId={`${series.name}-npu`}
|
||||
name={series.name}
|
||||
unit="%"
|
||||
threshold={GPUUsageThreshold}
|
||||
updateTimes={updateTimes}
|
||||
data={[series]}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<Skeleton className="aspect-video w-full" />
|
||||
|
||||
{statsHistory[0]?.npu_usages && (
|
||||
<>
|
||||
{statsHistory.length != 0 ? (
|
||||
<div className="rounded-lg bg-background_alt p-2.5 md:rounded-2xl">
|
||||
<div className="mb-5">
|
||||
{t("general.hardwareInfo.npuUsage")}
|
||||
</div>
|
||||
{npuSeries.map((series) => (
|
||||
<ThresholdBarGraph
|
||||
key={series.name}
|
||||
graphId={`${series.name}-npu`}
|
||||
name={series.name}
|
||||
unit="%"
|
||||
threshold={GPUUsageThreshold}
|
||||
updateTimes={updateTimes}
|
||||
data={[series]}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<Skeleton className="aspect-video w-full" />
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
|
||||
Loading…
Reference in New Issue
Block a user