diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dcf3070b5..60bcdf6b1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -225,3 +225,29 @@ jobs: sources: | ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}-amd64 ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}-rpi + axera_build: + runs-on: ubuntu-22.04 + name: AXERA Build + needs: + - amd64_build + - arm64_build + steps: + - name: Check out code + uses: actions/checkout@v5 + with: + persist-credentials: false + - name: Set up QEMU and Buildx + id: setup + uses: ./.github/actions/setup + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Build and push Axera build + uses: docker/bake-action@v6 + with: + source: . + push: true + targets: axcl + files: docker/axcl/axcl.hcl + set: | + axcl.tags=${{ steps.setup.outputs.image-name }}-axcl + *.cache-from=type=gha \ No newline at end of file diff --git a/docker/axcl/Dockerfile b/docker/axcl/Dockerfile new file mode 100644 index 000000000..83271bce8 --- /dev/null +++ b/docker/axcl/Dockerfile @@ -0,0 +1,55 @@ +# syntax=docker/dockerfile:1.6 + +# https://askubuntu.com/questions/972516/debian-frontend-environment-variable +ARG DEBIAN_FRONTEND=noninteractive + +# Globally set pip break-system-packages option to avoid having to specify it every time +ARG PIP_BREAK_SYSTEM_PACKAGES=1 + + +FROM frigate AS frigate-axcl +ARG TARGETARCH +ARG PIP_BREAK_SYSTEM_PACKAGES + +# Install axpyengine +RUN wget https://github.com/AXERA-TECH/pyaxengine/releases/download/0.1.3.rc1/axengine-0.1.3-py3-none-any.whl -O /axengine-0.1.3-py3-none-any.whl +RUN pip3 install -i https://mirrors.aliyun.com/pypi/simple/ /axengine-0.1.3-py3-none-any.whl \ + && rm /axengine-0.1.3-py3-none-any.whl + +# Install axcl +RUN if [ "$TARGETARCH" = "amd64" ]; then \ + echo "Installing x86_64 version of axcl"; \ + wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_x86_64_V3.6.5_20250908154509_NO4973.deb -O /axcl.deb; \ + else \ + echo "Installing aarch64 version of axcl"; \ + wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_aarch64_V3.6.5_20250908154509_NO4973.deb -O /axcl.deb; \ + fi + +RUN mkdir /unpack_axcl && \ + dpkg-deb -x /axcl.deb /unpack_axcl && \ + cp -R /unpack_axcl/usr/bin/axcl /usr/bin/ && \ + cp -R /unpack_axcl/usr/lib/axcl /usr/lib/ && \ + rm -rf /unpack_axcl /axcl.deb + + +# Install axcl ffmpeg +RUN mkdir -p /usr/lib/ffmpeg/axcl + +RUN if [ "$TARGETARCH" = "amd64" ]; then \ + wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffmpeg-x64 -O /usr/lib/ffmpeg/axcl/ffmpeg && \ + wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffprobe-x64 -O /usr/lib/ffmpeg/axcl/ffprobe; \ + else \ + wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffmpeg-aarch64 -O /usr/lib/ffmpeg/axcl/ffmpeg && \ + wget https://github.com/ivanshi1108/assets/releases/download/v0.16.2/ffprobe-aarch64 -O /usr/lib/ffmpeg/axcl/ffprobe; \ + fi + +RUN chmod +x /usr/lib/ffmpeg/axcl/ffmpeg /usr/lib/ffmpeg/axcl/ffprobe + +# Set ldconfig path +RUN echo "/usr/lib/axcl" > /etc/ld.so.conf.d/ax.conf + +# Set env +ENV PATH="$PATH:/usr/bin/axcl" +ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/axcl" + +ENTRYPOINT ["sh", "-c", "ldconfig && exec /init"] \ No newline at end of file diff --git a/docker/axcl/axcl.hcl b/docker/axcl/axcl.hcl new file mode 100644 index 000000000..d7cf0d4eb --- /dev/null +++ b/docker/axcl/axcl.hcl @@ -0,0 +1,13 @@ +target frigate { + dockerfile = "docker/main/Dockerfile" + platforms = ["linux/amd64", "linux/arm64"] + target = "frigate" +} + +target axcl { + dockerfile = "docker/axcl/Dockerfile" + contexts = { + frigate = "target:frigate", + } + platforms = ["linux/amd64", "linux/arm64"] +} \ No newline at end of file diff --git a/docker/axcl/axcl.mk b/docker/axcl/axcl.mk new file mode 100644 index 000000000..e4b6d4cef --- /dev/null +++ b/docker/axcl/axcl.mk @@ -0,0 +1,15 @@ +BOARDS += axcl + +local-axcl: version + docker buildx bake --file=docker/axcl/axcl.hcl axcl \ + --set axcl.tags=frigate:latest-axcl \ + --load + +build-axcl: version + docker buildx bake --file=docker/axcl/axcl.hcl axcl \ + --set axcl.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-axcl + +push-axcl: build-axcl + docker buildx bake --file=docker/axcl/axcl.hcl axcl \ + --set axcl.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-axcl \ + --push \ No newline at end of file diff --git a/docker/axcl/user_installation.sh b/docker/axcl/user_installation.sh new file mode 100755 index 000000000..e053a5faf --- /dev/null +++ b/docker/axcl/user_installation.sh @@ -0,0 +1,83 @@ +#!/bin/bash + +# Update package list and install dependencies +sudo apt-get update +sudo apt-get install -y build-essential cmake git wget pciutils kmod udev + +# Check if gcc-12 is needed +current_gcc_version=$(gcc --version | head -n1 | awk '{print $NF}') +gcc_major_version=$(echo $current_gcc_version | cut -d'.' -f1) + +if [[ $gcc_major_version -lt 12 ]]; then + echo "Current GCC version ($current_gcc_version) is lower than 12, installing gcc-12..." + sudo apt-get install -y gcc-12 + sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-12 12 + echo "GCC-12 installed and set as default" +else + echo "Current GCC version ($current_gcc_version) is sufficient, skipping GCC installation" +fi + +# Determine architecture +arch=$(uname -m) +download_url="" + +if [[ $arch == "x86_64" ]]; then + download_url="https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_x86_64_V3.6.5_20250908154509_NO4973.deb" + deb_file="axcl_host_x86_64_V3.6.5_20250908154509_NO4973.deb" +elif [[ $arch == "aarch64" ]]; then + download_url="https://github.com/ivanshi1108/assets/releases/download/v0.16.2/axcl_host_aarch64_V3.6.5_20250908154509_NO4973.deb" + deb_file="axcl_host_aarch64_V3.6.5_20250908154509_NO4973.deb" +else + echo "Unsupported architecture: $arch" + exit 1 +fi + +# Download AXCL driver +echo "Downloading AXCL driver for $arch..." +wget "$download_url" -O "$deb_file" + +if [ $? -ne 0 ]; then + echo "Failed to download AXCL driver" + exit 1 +fi + +# Install AXCL driver +echo "Installing AXCL driver..." +sudo dpkg -i "$deb_file" + +if [ $? -ne 0 ]; then + echo "Failed to install AXCL driver, attempting to fix dependencies..." + sudo apt-get install -f -y + sudo dpkg -i "$deb_file" + + if [ $? -ne 0 ]; then + echo "AXCL driver installation failed" + exit 1 + fi +fi + +# Update environment +echo "Updating environment..." +source /etc/profile + +# Verify installation +echo "Verifying AXCL installation..." +if command -v axcl-smi &> /dev/null; then + echo "AXCL driver detected, checking AI accelerator status..." + + axcl_output=$(axcl-smi 2>&1) + axcl_exit_code=$? + + echo "$axcl_output" + + if [ $axcl_exit_code -eq 0 ]; then + echo "AXCL driver installation completed successfully!" + else + echo "AXCL driver installed but no AI accelerator detected or communication failed." + echo "Please check if the AI accelerator is properly connected and powered on." + exit 1 + fi +else + echo "axcl-smi command not found. AXCL driver installation may have failed." + exit 1 +fi \ No newline at end of file diff --git a/docs/docs/configuration/object_detectors.md b/docs/docs/configuration/object_detectors.md index 2dd3330c2..64300833e 100644 --- a/docs/docs/configuration/object_detectors.md +++ b/docs/docs/configuration/object_detectors.md @@ -47,6 +47,11 @@ Frigate supports multiple different detectors that work on different types of ha - [Synaptics](#synaptics): synap models can run on Synaptics devices(e.g astra machina) with included NPUs. +**AXERA** + +- [AXEngine](#axera): axmodels can run on AXERA AI acceleration. + + **For Testing** - [CPU Detector (not recommended for actual use](#cpu-detector-not-recommended): Use a CPU to run tflite model, this is not recommended and in most cases OpenVINO can be used in CPU mode with better results. @@ -1169,6 +1174,41 @@ model: # required labelmap_path: /labelmap/coco-80.txt # required ``` +## AXERA + +Hardware accelerated object detection is supported on the following SoCs: + +- AX650N +- AX8850N + +This implementation uses the [AXera Pulsar2 Toolchain](https://huggingface.co/AXERA-TECH/Pulsar2). + +See the [installation docs](../frigate/installation.md#axera) for information on configuring the AXEngine hardware. + +### Configuration + +When configuring the AXEngine detector, you have to specify the model name. + +#### yolov9 + +A yolov9 model is provided in the container at /axmodels and is used by this detector type by default. + +Use the model configuration shown below when using the axengine detector with the default axmodel: + +```yaml +detectors: # required + axengine: # required + type: axengine # required + +model: # required + path: frigate-yolov9-tiny # required + model_type: yolo-generic # required + width: 320 # required + height: 320 # required + tensor_format: bgr # required + labelmap_path: /labelmap/coco-80.txt # required +``` + ## Rockchip platform Hardware accelerated object detection is supported on the following SoCs: diff --git a/docs/docs/frigate/hardware.md b/docs/docs/frigate/hardware.md index f6f647ec8..d4ac1d7e6 100644 --- a/docs/docs/frigate/hardware.md +++ b/docs/docs/frigate/hardware.md @@ -110,6 +110,14 @@ Frigate supports multiple different detectors that work on different types of ha | ssd mobilenet | ~ 25 ms | | yolov5m | ~ 118 ms | +### AXERA + +- **AXEngine** Default model is **yolov9** + +| Name | AXERA AX650N/AX8850N Inference Time | +| ---------------- | ----------------------------------- | +| yolov9-tiny | ~ 1.012 ms | + ### Hailo-8 Frigate supports both the Hailo-8 and Hailo-8L AI Acceleration Modules on compatible hardware platforms—including the Raspberry Pi 5 with the PCIe hat from the AI kit. The Hailo detector integration in Frigate automatically identifies your hardware type and selects the appropriate default model when a custom model isn’t provided. diff --git a/docs/docs/frigate/installation.md b/docs/docs/frigate/installation.md index 6a6fb8106..b022b4416 100644 --- a/docs/docs/frigate/installation.md +++ b/docs/docs/frigate/installation.md @@ -287,6 +287,40 @@ or add these options to your `docker run` command: Next, you should configure [hardware object detection](/configuration/object_detectors#synaptics) and [hardware video processing](/configuration/hardware_acceleration_video#synaptics). +### AXERA + +AXERA accelerators are available in an M.2 form factor, compatible with both Raspberry Pi and Orange Pi. This form factor has also been successfully tested on x86 platforms, making it a versatile choice for various computing environments. + +#### Installation + +Using AXERA accelerators requires the installation of the AXCL driver. We provide a convenient Linux script to complete this installation. + +Follow these steps for installation: + +1. Copy or download [this script](https://github.com/ivanshi1108/assets/releases/download/v0.16.2/user_installation.sh). +2. Ensure it has execution permissions with `sudo chmod +x user_installation.sh` +3. Run the script with `./user_installation.sh` + +#### Setup + +To set up Frigate, follow the default installation instructions, for example: `ghcr.io/blakeblackshear/frigate:stable` + +Next, grant Docker permissions to access your hardware by adding the following lines to your `docker-compose.yml` file: + +```yaml +devices: + - /dev/axcl_host + - /dev/ax_mmb_dev + - /dev/msg_userdev +``` + +If you are using `docker run`, add this option to your command `--device /dev/axcl_host --device /dev/ax_mmb_dev --device /dev/msg_userdev` + +#### Configuration + +Finally, configure [hardware object detection](/configuration/object_detectors#axera) to complete the setup. + + ## Docker Running through Docker with Docker Compose is the recommended install method. diff --git a/frigate/detectors/plugins/axengine.py b/frigate/detectors/plugins/axengine.py new file mode 100644 index 000000000..9cde9841b --- /dev/null +++ b/frigate/detectors/plugins/axengine.py @@ -0,0 +1,92 @@ +import logging +import os.path +import re +import urllib.request +from typing import Literal + +import cv2 +import numpy as np +from pydantic import Field + +from frigate.const import MODEL_CACHE_DIR +from frigate.detectors.detection_api import DetectionApi +from frigate.detectors.detector_config import BaseDetectorConfig, ModelTypeEnum +from frigate.util.model import post_process_yolo + +import axengine as axe +from axengine import axclrt_provider_name, axengine_provider_name + +logger = logging.getLogger(__name__) + +DETECTOR_KEY = "axengine" + +supported_models = { + ModelTypeEnum.yologeneric: "frigate-yolov9-.*$", +} + +model_cache_dir = os.path.join(MODEL_CACHE_DIR, "axengine_cache/") + + +class AxengineDetectorConfig(BaseDetectorConfig): + type: Literal[DETECTOR_KEY] + +class Axengine(DetectionApi): + type_key = DETECTOR_KEY + def __init__(self, config: AxengineDetectorConfig): + logger.info("__init__ axengine") + super().__init__(config) + self.height = config.model.height + self.width = config.model.width + model_path = config.model.path or "frigate-yolov9-tiny" + model_props = self.parse_model_input(model_path) + self.session = axe.InferenceSession(model_props["path"]) + + def __del__(self): + pass + + def parse_model_input(self, model_path): + model_props = {} + model_props["preset"] = True + + model_matched = False + + for model_type, pattern in supported_models.items(): + if re.match(pattern, model_path): + model_matched = True + model_props["model_type"] = model_type + + if model_matched: + model_props["filename"] = model_path + f".axmodel" + model_props["path"] = model_cache_dir + model_props["filename"] + + if not os.path.isfile(model_props["path"]): + self.download_model(model_props["filename"]) + else: + supported_models_str = ", ".join( + model[1:-1] for model in supported_models + ) + raise Exception( + f"Model {model_path} is unsupported. Provide your own model or choose one of the following: {supported_models_str}" + ) + return model_props + + def download_model(self, filename): + if not os.path.isdir(model_cache_dir): + os.mkdir(model_cache_dir) + + GITHUB_ENDPOINT = os.environ.get("GITHUB_ENDPOINT", "https://github.com") + urllib.request.urlretrieve( + f"{GITHUB_ENDPOINT}/ivanshi1108/assets/releases/download/v0.16.2/{filename}", + model_cache_dir + filename, + ) + + def detect_raw(self, tensor_input): + results = None + results = self.session.run(None, {"images": tensor_input}) + if self.detector_config.model.model_type == ModelTypeEnum.yologeneric: + return post_process_yolo(results, self.width, self.height) + else: + raise ValueError( + f'Model type "{self.detector_config.model.model_type}" is currently not supported.' + ) +