Compare commits

...

3 Commits

Author SHA1 Message Date
Martin Weinelt
95523f6f7c
Merge 67dca94651 into 5f2536dcd8 2026-02-19 10:42:44 -08:00
Shay Collings
5f2536dcd8
Added section for macOS installation including port conflict warning, example compose file and reference to Apple Silicon Detector (#22025)
Co-authored-by: Shay Collings <shay.collings@gmail.com>
2026-02-19 08:04:28 -06:00
Martin Weinelt
67dca94651
Fallback from tflite-runtime to ai-edge-litert
The fallback to tensorflow was established back in 2023, because we could
not provide tflite-runtime downstream in nixpkgs.

By now we have ai-edge-litert available, which is the successor to the
tflite-runtime. It still provides the same entrypoints as tflite-runtime
and functionality has been verified in multiple deployments for the last
two weeks.
2026-02-04 02:53:01 +01:00
8 changed files with 47 additions and 8 deletions

View File

@ -689,3 +689,42 @@ docker run \
```
Log into QNAP, open Container Station. Frigate docker container should be listed under 'Overview' and running. Visit Frigate Web UI by clicking Frigate docker, and then clicking the URL shown at the top of the detail page.
## macOS - Apple Silicon
:::warning
macOS uses port 5000 for its Airplay Receiver service. If you want to expose port 5000 in Frigate for local app and API access the port will need to be mapped to another port on the host e.g. 5001
Failure to remap port 5000 on the host will result in the WebUI and all API endpoints on port 5000 being unreachable, even if port 5000 is exposed correctly in Docker.
:::
Docker containers on macOS can be orchestrated by either [Docker Desktop](https://docs.docker.com/desktop/setup/install/mac-install/) or [OrbStack](https://orbstack.dev) (native swift app). The difference in inference speeds is negligable, however CPU, power consumption and container start times will be lower on OrbStack because it is a native Swift application.
To allow Frigate to use the Apple Silicon Neural Engine / Processing Unit (NPU) the host must be running [Apple Silicon Detector](../configuration/object_detectors.md#apple-silicon-detector) on the host (outside Docker)
#### Docker Compose example
```yaml
services:
frigate:
container_name: frigate
image: ghcr.io/blakeblackshear/frigate:stable-arm64
restart: unless-stopped
shm_size: "512mb" # update for your cameras based on calculation above
volumes:
- /etc/localtime:/etc/localtime:ro
- /path/to/your/config:/config
- /path/to/your/recordings:/recordings
ports:
- "8971:8971"
# If exposing on macOS map to a diffent host port like 5001 or any orher port with no conflicts
# - "5001:5000" # Internal unauthenticated access. Expose carefully.
- "8554:8554" # RTSP feeds
extra_hosts:
# This is very important
# It allows frigate access to the NPU on Apple Silicon via Apple Silicon Detector
- "host.docker.internal:host-gateway" # Required to talk to the NPU detector
environment:
- FRIGATE_RTSP_PASSWORD: "password"
```

View File

@ -22,7 +22,7 @@ from .api import RealTimeProcessorApi
try:
from tflite_runtime.interpreter import Interpreter
except ModuleNotFoundError:
from tensorflow.lite.python.interpreter import Interpreter
from ai_edge_litert.interpreter import Interpreter
logger = logging.getLogger(__name__)

View File

@ -32,7 +32,7 @@ from .api import RealTimeProcessorApi
try:
from tflite_runtime.interpreter import Interpreter
except ModuleNotFoundError:
from tensorflow.lite.python.interpreter import Interpreter
from ai_edge_litert.interpreter import Interpreter
logger = logging.getLogger(__name__)
@ -76,7 +76,7 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi):
try:
from tflite_runtime.interpreter import Interpreter
except ModuleNotFoundError:
from tensorflow.lite.python.interpreter import Interpreter
from ai_edge_litert.interpreter import Interpreter
model_path = os.path.join(self.model_dir, "model.tflite")
labelmap_path = os.path.join(self.model_dir, "labelmap.txt")

View File

@ -6,7 +6,7 @@ import numpy as np
try:
from tflite_runtime.interpreter import Interpreter, load_delegate
except ModuleNotFoundError:
from tensorflow.lite.python.interpreter import Interpreter, load_delegate
from ai_edge_litert.interpreter import Interpreter, load_delegate
logger = logging.getLogger(__name__)

View File

@ -12,7 +12,7 @@ from ..detector_utils import tflite_detect_raw, tflite_init
try:
from tflite_runtime.interpreter import Interpreter
except ModuleNotFoundError:
from tensorflow.lite.python.interpreter import Interpreter
from ai_edge_litert.interpreter import Interpreter
logger = logging.getLogger(__name__)

View File

@ -13,7 +13,7 @@ from frigate.detectors.detector_config import BaseDetectorConfig, ModelTypeEnum
try:
from tflite_runtime.interpreter import Interpreter, load_delegate
except ModuleNotFoundError:
from tensorflow.lite.python.interpreter import Interpreter, load_delegate
from ai_edge_litert.interpreter import Interpreter, load_delegate
logger = logging.getLogger(__name__)

View File

@ -17,7 +17,7 @@ from .base_embedding import BaseEmbedding
try:
from tflite_runtime.interpreter import Interpreter
except ModuleNotFoundError:
from tensorflow.lite.python.interpreter import Interpreter
from ai_edge_litert.interpreter import Interpreter
logger = logging.getLogger(__name__)

View File

@ -43,7 +43,7 @@ from frigate.video import start_or_restart_ffmpeg, stop_ffmpeg
try:
from tflite_runtime.interpreter import Interpreter
except ModuleNotFoundError:
from tensorflow.lite.python.interpreter import Interpreter
from ai_edge_litert.interpreter import Interpreter
logger = logging.getLogger(__name__)