This commit is contained in:
Andreas Franzén 2022-04-13 21:20:28 +02:00
commit b7429d3524
186 changed files with 12551 additions and 18919 deletions

View File

@ -9,19 +9,32 @@
"mhutchie.git-graph",
"ms-azuretools.vscode-docker",
"streetsidesoftware.code-spell-checker",
"eamodio.gitlens",
"esbenp.prettier-vscode",
"ms-python.vscode-pylance"
"ms-python.vscode-pylance",
"dbaeumer.vscode-eslint",
"mikestead.dotenv",
"csstools.postcss",
"blanu.vscode-styled-jsx",
"bradlc.vscode-tailwindcss"
],
"settings": {
"python.pythonPath": "/usr/bin/python3",
"python.linting.pylintEnabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "black",
"python.languageServer": "Pylance",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"terminal.integrated.shell.linux": "/bin/bash"
"eslint.workingDirectories": ["./web"],
"[json][jsonc]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[jsx][js][tsx][ts]": {
"editor.codeActionsOnSave": ["source.addMissingImports", "source.fixAll"],
"editor.tabSize": 2
},
"cSpell.ignoreWords": ["rtmp"],
"cSpell.words": ["preact"]
}
}

View File

@ -7,5 +7,6 @@ config/
.git
core
*.mp4
*.jpg
*.db
*.ts

View File

@ -10,11 +10,11 @@ jobs:
- uses: actions/checkout@master
- uses: actions/setup-node@master
with:
node-version: 14.x
node-version: 16.x
- run: npm install
working-directory: ./web
- name: Lint
run: npm run lint:cmd
run: npm run lint
working-directory: ./web
web_build:
@ -24,7 +24,7 @@ jobs:
- uses: actions/checkout@master
- uses: actions/setup-node@master
with:
node-version: 14.x
node-version: 16.x
- run: npm install
working-directory: ./web
- name: Build
@ -38,14 +38,14 @@ jobs:
- uses: actions/checkout@master
- uses: actions/setup-node@master
with:
node-version: 14.x
node-version: 16.x
- run: npm install
working-directory: ./web
- name: Test
run: npm run test
working-directory: ./web
docker_tests_on_aarch64:
python_tests:
runs-on: ubuntu-latest
steps:
- name: Check out code
@ -54,17 +54,7 @@ jobs:
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Build and run tests
run: make run_tests PLATFORM="linux/arm64/v8" ARCH="aarch64"
docker_tests_on_amd64:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Build and run tests
run: make run_tests PLATFORM="linux/amd64" ARCH="amd64"
- name: Build
run: make
- name: Run tests
run: docker run --rm --entrypoint=python3 frigate:latest -u -m unittest

1
.gitignore vendored
View File

@ -14,3 +14,4 @@ web/build
web/node_modules
web/coverage
core
!/web/**/*.ts

View File

@ -1,63 +1,18 @@
default_target: amd64_frigate
default_target: frigate
COMMIT_HASH := $(shell git log -1 --pretty=format:"%h"|tail -1)
version:
echo "VERSION='0.10.1-$(COMMIT_HASH)'" > frigate/version.py
web:
docker build --tag frigate-web --file docker/Dockerfile.web web/
amd64_wheels:
docker build --tag blakeblackshear/frigate-wheels:1.0.3-amd64 --file docker/Dockerfile.wheels .
amd64_ffmpeg:
docker build --no-cache --pull --tag blakeblackshear/frigate-ffmpeg:1.2.0-amd64 --file docker/Dockerfile.ffmpeg.amd64 .
echo "VERSION='0.11.0-$(COMMIT_HASH)'" > frigate/version.py
nginx_frigate:
docker buildx build --push --platform linux/arm/v7,linux/arm64/v8,linux/amd64 --tag blakeblackshear/frigate-nginx:1.0.2 --file docker/Dockerfile.nginx .
amd64_frigate: version web
docker build --no-cache --tag frigate-base --build-arg ARCH=amd64 --build-arg FFMPEG_VERSION=1.1.0 --build-arg WHEELS_VERSION=1.0.3 --build-arg NGINX_VERSION=1.0.2 --file docker/Dockerfile.base .
docker build --no-cache --tag frigate --file docker/Dockerfile.amd64 .
frigate: version
DOCKER_BUILDKIT=1 docker build -t frigate -f docker/Dockerfile .
amd64_all: amd64_wheels amd64_ffmpeg amd64_frigate
amd64nvidia_wheels:
docker build --tag blakeblackshear/frigate-wheels:1.0.3-amd64nvidia --file docker/Dockerfile.wheels .
amd64nvidia_ffmpeg:
docker build --no-cache --pull --tag blakeblackshear/frigate-ffmpeg:1.2.0-amd64nvidia --file docker/Dockerfile.ffmpeg.amd64nvidia .
amd64nvidia_frigate: version web
docker build --no-cache --tag frigate-base --build-arg ARCH=amd64nvidia --build-arg FFMPEG_VERSION=1.0.0 --build-arg WHEELS_VERSION=1.0.3 --build-arg NGINX_VERSION=1.0.2 --file docker/Dockerfile.base .
docker build --no-cache --tag frigate --file docker/Dockerfile.amd64nvidia .
amd64nvidia_all: amd64nvidia_wheels amd64nvidia_ffmpeg amd64nvidia_frigate
aarch64_wheels:
docker build --tag blakeblackshear/frigate-wheels:1.0.3-aarch64 --file docker/Dockerfile.wheels .
aarch64_ffmpeg:
docker build --no-cache --pull --tag blakeblackshear/frigate-ffmpeg:1.3.0-aarch64 --file docker/Dockerfile.ffmpeg.aarch64 .
aarch64_frigate: version web
docker build --no-cache --tag frigate-base --build-arg ARCH=aarch64 --build-arg FFMPEG_VERSION=1.0.0 --build-arg WHEELS_VERSION=1.0.3 --build-arg NGINX_VERSION=1.0.2 --file docker/Dockerfile.base .
docker build --no-cache --tag frigate --file docker/Dockerfile.aarch64 .
aarch64_all: aarch64_wheels aarch64_ffmpeg aarch64_frigate
armv7_wheels:
docker build --tag blakeblackshear/frigate-wheels:1.0.3-armv7 --file docker/Dockerfile.wheels .
armv7_ffmpeg:
docker build --no-cache --pull --tag blakeblackshear/frigate-ffmpeg:1.2.0-armv7 --file docker/Dockerfile.ffmpeg.armv7 .
armv7_frigate: version web
docker build --no-cache --tag frigate-base --build-arg ARCH=armv7 --build-arg FFMPEG_VERSION=1.0.0 --build-arg WHEELS_VERSION=1.0.3 --build-arg NGINX_VERSION=1.0.2 --file docker/Dockerfile.base .
docker build --no-cache --tag frigate --file docker/Dockerfile.armv7 .
armv7_all: armv7_wheels armv7_ffmpeg armv7_frigate
frigate_push: version
docker buildx build --push --platform linux/arm64/v8,linux/amd64 --tag blakeblackshear/frigate:0.11.0-$(COMMIT_HASH) --file docker/Dockerfile .
run_tests:
# PLATFORM: linux/arm64/v8 linux/amd64 or linux/arm/v7
@ -71,4 +26,4 @@ run_tests:
@docker buildx build --platform=$(PLATFORM) --tag frigate-base --build-arg NGINX_VERSION=1.0.2 --build-arg FFMPEG_VERSION=1.0.0 --build-arg ARCH=$(ARCH) --build-arg WHEELS_VERSION=1.0.3 --file docker/Dockerfile.test .
@rm docker/Dockerfile.test
.PHONY: web run_tests
.PHONY: run_tests

View File

@ -3,18 +3,24 @@ services:
dev:
container_name: frigate-dev
user: vscode
privileged: true
# add groups from host for render, plugdev, video
group_add:
- 109 # render
- 44 # video
- 46 # plugdev
shm_size: "256mb"
build:
context: .
dockerfile: docker/Dockerfile.dev
devices:
- /dev/bus/usb:/dev/bus/usb
- /dev/dri:/dev/dri # for intel hwaccel, needs to be updated for your hardware
volumes:
- /etc/localtime:/etc/localtime:ro
- .:/lab/frigate:cached
- ./config/config.yml:/config/config.yml:ro
- ./debug:/media/frigate
- /dev/bus/usb:/dev/bus/usb
- /dev/dri:/dev/dri # for intel hwaccel, needs to be updated for your hardware
ports:
- "1935:1935"
- "5000:5000"

136
docker/Dockerfile Normal file
View File

@ -0,0 +1,136 @@
FROM blakeblackshear/frigate-nginx:1.0.2 as nginx
FROM node:16 as web
WORKDIR /opt/frigate
COPY web/ .
RUN npm install && npm run build
FROM debian:11 as wheels
ENV DEBIAN_FRONTEND=noninteractive
# Use a separate container to build wheels to prevent build dependencies in final image
RUN apt-get -qq update \
&& apt-get -qq install -y \
apt-transport-https \
gnupg \
wget \
&& wget -O - http://archive.raspberrypi.org/debian/raspberrypi.gpg.key | apt-key add - \
&& echo "deb http://archive.raspberrypi.org/debian/ bullseye main" | tee /etc/apt/sources.list.d/raspi.list \
&& apt-get -qq update \
&& apt-get -qq install -y \
python3 \
python3-dev \
wget \
# opencv dependencies
build-essential cmake git pkg-config libgtk-3-dev \
libavcodec-dev libavformat-dev libswscale-dev libv4l-dev \
libxvidcore-dev libx264-dev libjpeg-dev libpng-dev libtiff-dev \
gfortran openexr libatlas-base-dev libssl-dev\
libtbb2 libtbb-dev libdc1394-22-dev libopenexr-dev \
libgstreamer-plugins-base1.0-dev libgstreamer1.0-dev \
# scipy dependencies
gcc gfortran libopenblas-dev liblapack-dev
RUN wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
&& python3 get-pip.py "pip"
RUN pip3 install scikit-build
# TODO: lock with requirements.txt
RUN pip3 wheel --wheel-dir=/wheels \
opencv-python-headless \
numpy \
imutils \
scipy \
psutil \
Flask \
paho-mqtt \
PyYAML \
matplotlib \
click \
setproctitle \
peewee \
peewee_migrate \
pydantic \
zeroconf \
ws4py \
requests
# Frigate Container
FROM debian:11-slim
ARG TARGETARCH
ENV DEBIAN_FRONTEND=noninteractive
ENV FLASK_ENV=development
COPY --from=wheels /wheels /wheels
# Install ffmpeg
RUN apt-get -qq update \
&& apt-get -qq install --no-install-recommends -y \
apt-transport-https \
gnupg \
wget \
unzip tzdata libxml2 xz-utils \
python3-pip \
# add raspberry pi repo
&& wget -O - http://archive.raspberrypi.org/debian/raspberrypi.gpg.key | apt-key add - \
&& echo "deb http://archive.raspberrypi.org/debian/ bullseye main" | tee /etc/apt/sources.list.d/raspi.list \
# add coral repo
&& APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn apt-key adv --fetch-keys https://packages.cloud.google.com/apt/doc/apt-key.gpg \
&& echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" > /etc/apt/sources.list.d/coral-edgetpu.list \
&& echo "libedgetpu1-max libedgetpu/accepted-eula select true" | debconf-set-selections \
&& apt-get -qq update \
&& apt-get -qq install --no-install-recommends -y \
ffmpeg \
# coral drivers
libedgetpu1-max python3-tflite-runtime python3-pycoral \
&& pip3 install -U /wheels/*.whl \
&& rm -rf /var/lib/apt/lists/* /wheels \
&& (apt-get autoremove -y; apt-get autoclean -y)
# AMD64 specific packages
RUN if [ "${TARGETARCH}" = "amd64" ]; \
then \
wget -qO - https://repositories.intel.com/graphics/intel-graphics.key | apt-key add - \
&& echo 'deb [arch=amd64] https://repositories.intel.com/graphics/ubuntu focal main' > /etc/apt/sources.list.d/intel-graphics.list \
&& apt-get -qq update \
&& apt-get -qq install --no-install-recommends -y \
# VAAPI drivers for Intel hardware accel
libva-drm2 libva2 libmfx1 i965-va-driver vainfo intel-media-va-driver-non-free mesa-vdpau-drivers mesa-va-drivers mesa-vdpau-drivers libdrm-radeon1 \
&& rm -rf /var/lib/apt/lists/* \
&& (apt-get autoremove -y; apt-get autoclean -y) \
fi
COPY --from=nginx /usr/local/nginx/ /usr/local/nginx/
# get model and labels
COPY labelmap.txt /labelmap.txt
RUN wget -q https://github.com/google-coral/test_data/raw/release-frogfish/ssdlite_mobiledet_coco_qat_postprocess_edgetpu.tflite -O /edgetpu_model.tflite
RUN wget -q https://github.com/google-coral/test_data/raw/release-frogfish/ssdlite_mobiledet_coco_qat_postprocess.tflite -O /cpu_model.tflite
WORKDIR /opt/frigate/
ADD frigate frigate/
ADD migrations migrations/
COPY --from=web /opt/frigate/dist web/
COPY docker/rootfs/ /
# s6-overlay
RUN S6_ARCH="${TARGETARCH}" \
&& if [ "${TARGETARCH}" = "amd64" ]; then S6_ARCH="amd64"; fi \
&& if [ "${TARGETARCH}" = "arm64" ]; then S6_ARCH="aarch64"; fi \
&& wget -O /tmp/s6-overlay-installer "https://github.com/just-containers/s6-overlay/releases/download/v2.2.0.3/s6-overlay-${S6_ARCH}-installer" \
&& chmod +x /tmp/s6-overlay-installer && /tmp/s6-overlay-installer /
EXPOSE 5000
EXPOSE 1935
ENTRYPOINT ["/init"]
CMD ["python3", "-u", "-m", "frigate"]

View File

@ -1,28 +0,0 @@
FROM frigate-base
LABEL maintainer "blakeb@blakeshome.com"
ENV DEBIAN_FRONTEND=noninteractive
# Install packages for apt repo
RUN apt-get -qq update \
&& apt-get -qq install --no-install-recommends -y \
# ffmpeg runtime dependencies
libgomp1 \
# runtime dependencies
libopenexr24 \
libgstreamer1.0-0 \
libgstreamer-plugins-base1.0-0 \
libopenblas-base \
libjpeg-turbo8 \
libpng16-16 \
libtiff5 \
libdc1394-22 \
&& rm -rf /var/lib/apt/lists/* \
&& (apt-get autoremove -y; apt-get autoclean -y)
# s6-overlay
ADD https://github.com/just-containers/s6-overlay/releases/download/v2.2.0.3/s6-overlay-aarch64-installer /tmp/
RUN chmod +x /tmp/s6-overlay-aarch64-installer && /tmp/s6-overlay-aarch64-installer /
ENTRYPOINT ["/init"]
CMD ["python3", "-u", "-m", "frigate"]

View File

@ -1,28 +0,0 @@
FROM frigate-base
LABEL maintainer "blakeb@blakeshome.com"
# By default, use the i965 driver
ENV LIBVA_DRIVER_NAME=i965
# Install packages for apt repo
RUN wget -qO - https://repositories.intel.com/graphics/intel-graphics.key | apt-key add - \
&& echo 'deb [arch=amd64] https://repositories.intel.com/graphics/ubuntu focal main' > /etc/apt/sources.list.d/intel-graphics.list \
&& apt-key adv --keyserver keyserver.ubuntu.com --recv-keys F63F0F2B90935439 \
&& echo 'deb http://ppa.launchpad.net/kisak/kisak-mesa/ubuntu focal main' > /etc/apt/sources.list.d/kisak-mesa-focal.list
RUN apt-get -qq update \
&& apt-get -qq install --no-install-recommends -y \
# ffmpeg dependencies
libgomp1 \
# VAAPI drivers for Intel hardware accel
libva-drm2 libva2 libmfx1 i965-va-driver vainfo intel-media-va-driver-non-free mesa-vdpau-drivers mesa-va-drivers mesa-vdpau-drivers libdrm-radeon1 \
&& rm -rf /var/lib/apt/lists/* \
&& (apt-get autoremove -y; apt-get autoclean -y)
# s6-overlay
ADD https://github.com/just-containers/s6-overlay/releases/download/v2.2.0.3/s6-overlay-amd64-installer /tmp/
RUN chmod +x /tmp/s6-overlay-amd64-installer && /tmp/s6-overlay-amd64-installer /
ENTRYPOINT ["/init"]
CMD ["python3", "-u", "-m", "frigate"]

View File

@ -1,51 +0,0 @@
FROM frigate-base
LABEL maintainer "blakeb@blakeshome.com"
# Install packages for apt repo
RUN apt-get -qq update \
&& apt-get -qq install --no-install-recommends -y \
# ffmpeg dependencies
libgomp1 \
&& rm -rf /var/lib/apt/lists/* \
&& (apt-get autoremove -y; apt-get autoclean -y)
# nvidia layer (see https://gitlab.com/nvidia/container-images/cuda/blob/master/dist/11.1/ubuntu20.04-x86_64/base/Dockerfile)
ENV NVIDIA_DRIVER_CAPABILITIES compute,utility,video
RUN apt-get update && apt-get install -y --no-install-recommends \
gnupg2 curl ca-certificates && \
curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/7fa2af80.pub | apt-key add - && \
echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/cuda.list && \
echo "deb https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/nvidia-ml.list && \
apt-get purge --autoremove -y curl \
&& rm -rf /var/lib/apt/lists/*
ENV CUDA_VERSION 11.1.1
# For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a
RUN apt-get update && apt-get install -y --no-install-recommends \
cuda-cudart-11-1=11.1.74-1 \
cuda-compat-11-1 \
&& ln -s cuda-11.1 /usr/local/cuda && \
rm -rf /var/lib/apt/lists/*
# Required for nvidia-docker v1
RUN echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \
echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf
ENV PATH /usr/local/nvidia/bin:/usr/local/cuda/bin:${PATH}
ENV LD_LIBRARY_PATH /usr/local/nvidia/lib:/usr/local/nvidia/lib64
# nvidia-container-runtime
ENV NVIDIA_VISIBLE_DEVICES all
ENV NVIDIA_DRIVER_CAPABILITIES compute,utility,video
ENV NVIDIA_REQUIRE_CUDA "cuda>=11.1 brand=tesla,driver>=418,driver<419 brand=tesla,driver>=440,driver<441 brand=tesla,driver>=450,driver<451"
# s6-overlay
ADD https://github.com/just-containers/s6-overlay/releases/download/v2.2.0.3/s6-overlay-amd64-installer /tmp/
RUN chmod +x /tmp/s6-overlay-amd64-installer && /tmp/s6-overlay-amd64-installer /
ENTRYPOINT ["/init"]
CMD ["python3", "-u", "-m", "frigate"]

View File

@ -1,30 +0,0 @@
FROM frigate-base
LABEL maintainer "blakeb@blakeshome.com"
ENV DEBIAN_FRONTEND=noninteractive
# Install packages for apt repo
RUN apt-get -qq update \
&& apt-get -qq install --no-install-recommends -y \
# ffmpeg runtime dependencies
libgomp1 \
# runtime dependencies
libopenexr24 \
libgstreamer1.0-0 \
libgstreamer-plugins-base1.0-0 \
libopenblas-base \
libjpeg-turbo8 \
libpng16-16 \
libtiff5 \
libdc1394-22 \
libaom0 \
libx265-179 \
&& rm -rf /var/lib/apt/lists/* \
&& (apt-get autoremove -y; apt-get autoclean -y)
# s6-overlay
ADD https://github.com/just-containers/s6-overlay/releases/download/v2.2.0.3/s6-overlay-armhf-installer /tmp/
RUN chmod +x /tmp/s6-overlay-armhf-installer && /tmp/s6-overlay-armhf-installer /
ENTRYPOINT ["/init"]
CMD ["python3", "-u", "-m", "frigate"]

View File

@ -1,55 +0,0 @@
ARG ARCH=amd64
ARG WHEELS_VERSION
ARG FFMPEG_VERSION
ARG NGINX_VERSION
FROM blakeblackshear/frigate-wheels:${WHEELS_VERSION}-${ARCH} as wheels
FROM blakeblackshear/frigate-ffmpeg:${FFMPEG_VERSION}-${ARCH} as ffmpeg
FROM blakeblackshear/frigate-nginx:${NGINX_VERSION} as nginx
FROM frigate-web as web
FROM ubuntu:20.04
LABEL maintainer "blakeb@blakeshome.com"
COPY --from=ffmpeg /usr/local /usr/local/
COPY --from=wheels /wheels/. /wheels/
ENV FLASK_ENV=development
# ENV FONTCONFIG_PATH=/etc/fonts
ENV DEBIAN_FRONTEND=noninteractive
# Install packages for apt repo
RUN apt-get -qq update \
&& apt-get upgrade -y \
&& apt-get -qq install --no-install-recommends -y gnupg wget unzip tzdata libxml2 \
&& apt-get -qq install --no-install-recommends -y python3-pip \
&& pip3 install -U /wheels/*.whl \
&& APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn apt-key adv --fetch-keys https://packages.cloud.google.com/apt/doc/apt-key.gpg \
&& echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" > /etc/apt/sources.list.d/coral-edgetpu.list \
&& echo "libedgetpu1-max libedgetpu/accepted-eula select true" | debconf-set-selections \
&& apt-get -qq update && apt-get -qq install --no-install-recommends -y libedgetpu1-max python3-tflite-runtime python3-pycoral \
&& rm -rf /var/lib/apt/lists/* /wheels \
&& (apt-get autoremove -y; apt-get autoclean -y)
RUN pip3 install \
peewee_migrate \
pydantic \
zeroconf \
ws4py
COPY --from=nginx /usr/local/nginx/ /usr/local/nginx/
# get model and labels
COPY labelmap.txt /labelmap.txt
RUN wget -q https://github.com/google-coral/test_data/raw/release-frogfish/ssdlite_mobiledet_coco_qat_postprocess_edgetpu.tflite -O /edgetpu_model.tflite
RUN wget -q https://github.com/google-coral/test_data/raw/release-frogfish/ssdlite_mobiledet_coco_qat_postprocess.tflite -O /cpu_model.tflite
WORKDIR /opt/frigate/
ADD frigate frigate/
ADD migrations migrations/
COPY --from=web /opt/frigate/build web/
COPY docker/rootfs/ /
EXPOSE 5000
EXPOSE 1935

View File

@ -6,7 +6,7 @@ ARG USER_GID=$USER_UID
# Create the user
RUN groupadd --gid $USER_GID $USERNAME \
&& useradd --uid $USER_UID --gid $USER_GID -m $USERNAME \
&& useradd --uid $USER_UID --gid $USER_GID -m $USERNAME -s /bin/bash \
#
# [Optional] Add sudo support. Omit if you don't need to install software after connecting.
&& apt-get update \
@ -19,8 +19,8 @@ RUN apt-get update \
RUN pip3 install pylint black
# Install Node 14
RUN curl -sL https://deb.nodesource.com/setup_14.x | bash - \
# Install Node 16
RUN curl -sL https://deb.nodesource.com/setup_16.x | bash - \
&& apt-get install -y nodejs
RUN npm install -g npm@latest

View File

@ -1,486 +0,0 @@
# inspired by:
# https://github.com/collelog/ffmpeg/blob/master/4.3.1-alpine-rpi4-arm64v8.Dockerfile
# https://github.com/mmastrac/ffmpeg-omx-rpi-docker/blob/master/Dockerfile
# https://github.com/jrottenberg/ffmpeg/pull/158/files
# https://github.com/jrottenberg/ffmpeg/pull/239
FROM ubuntu:20.04 AS base
WORKDIR /tmp/workdir
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get -yqq update && \
apt-get install -yq --no-install-recommends ca-certificates expat libgomp1 xutils-dev && \
apt-get autoremove -y && \
apt-get clean -y
FROM base as build
ENV FFMPEG_VERSION=4.3.2 \
AOM_VERSION=v1.0.0 \
FDKAAC_VERSION=0.1.5 \
FREETYPE_VERSION=2.11.0 \
FRIBIDI_VERSION=0.19.7 \
KVAZAAR_VERSION=1.2.0 \
LAME_VERSION=3.100 \
LIBPTHREAD_STUBS_VERSION=0.4 \
LIBVIDSTAB_VERSION=1.1.0 \
LIBXCB_VERSION=1.13.1 \
XCBPROTO_VERSION=1.13 \
OGG_VERSION=1.3.2 \
OPENCOREAMR_VERSION=0.1.5 \
OPUS_VERSION=1.2 \
OPENJPEG_VERSION=2.1.2 \
THEORA_VERSION=1.1.1 \
VORBIS_VERSION=1.3.5 \
VPX_VERSION=1.8.0 \
WEBP_VERSION=1.0.2 \
X264_VERSION=20170226-2245-stable \
X265_VERSION=3.1.1 \
XAU_VERSION=1.0.9 \
XORG_MACROS_VERSION=1.19.2 \
XPROTO_VERSION=7.0.31 \
XVID_VERSION=1.3.4 \
LIBZMQ_VERSION=4.3.2 \
SRC=/usr/local
ARG FREETYPE_SHA256SUM="a45c6b403413abd5706f3582f04c8339d26397c4304b78fa552f2215df64101f freetype-2.11.0.tar.gz"
ARG FRIBIDI_SHA256SUM="3fc96fa9473bd31dcb5500bdf1aa78b337ba13eb8c301e7c28923fea982453a8 0.19.7.tar.gz"
ARG LIBVIDSTAB_SHA256SUM="14d2a053e56edad4f397be0cb3ef8eb1ec3150404ce99a426c4eb641861dc0bb v1.1.0.tar.gz"
ARG OGG_SHA256SUM="e19ee34711d7af328cb26287f4137e70630e7261b17cbe3cd41011d73a654692 libogg-1.3.2.tar.gz"
ARG OPUS_SHA256SUM="77db45a87b51578fbc49555ef1b10926179861d854eb2613207dc79d9ec0a9a9 opus-1.2.tar.gz"
ARG THEORA_SHA256SUM="40952956c47811928d1e7922cda3bc1f427eb75680c3c37249c91e949054916b libtheora-1.1.1.tar.gz"
ARG VORBIS_SHA256SUM="6efbcecdd3e5dfbf090341b485da9d176eb250d893e3eb378c428a2db38301ce libvorbis-1.3.5.tar.gz"
ARG XVID_SHA256SUM="4e9fd62728885855bc5007fe1be58df42e5e274497591fec37249e1052ae316f xvidcore-1.3.4.tar.gz"
ARG LIBZMQ_SHA256SUM="02ecc88466ae38cf2c8d79f09cfd2675ba299a439680b64ade733e26a349edeb v4.3.2.tar.gz"
ARG LD_LIBRARY_PATH=/opt/ffmpeg/lib
ARG MAKEFLAGS="-j2"
ARG PKG_CONFIG_PATH="/opt/ffmpeg/share/pkgconfig:/opt/ffmpeg/lib/pkgconfig:/opt/ffmpeg/lib64/pkgconfig"
ARG PREFIX=/opt/ffmpeg
ARG LD_LIBRARY_PATH="/opt/ffmpeg/lib:/opt/ffmpeg/lib64:/usr/lib64:/usr/lib:/lib64:/lib"
RUN buildDeps="autoconf \
automake \
cmake \
curl \
bzip2 \
libexpat1-dev \
g++ \
gcc \
git \
gperf \
libtool \
make \
nasm \
perl \
pkg-config \
python \
libssl-dev \
yasm \
linux-headers-raspi2 \
libomxil-bellagio-dev \
zlib1g-dev" && \
apt-get -yqq update && \
apt-get install -yq --no-install-recommends ${buildDeps}
## opencore-amr https://sourceforge.net/projects/opencore-amr/
RUN \
DIR=/tmp/opencore-amr && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://versaweb.dl.sourceforge.net/project/opencore-amr/opencore-amr/opencore-amr-${OPENCOREAMR_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## x264 http://www.videolan.org/developers/x264.html
RUN \
DIR=/tmp/x264 && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://download.videolan.org/pub/videolan/x264/snapshots/x264-snapshot-${X264_VERSION}.tar.bz2 | \
tar -jx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared --enable-pic --disable-cli && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### x265 http://x265.org/
RUN \
DIR=/tmp/x265 && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://download.videolan.org/pub/videolan/x265/x265_${X265_VERSION}.tar.gz | \
tar -zx && \
cd x265_${X265_VERSION}/build/linux && \
sed -i "/-DEXTRA_LIB/ s/$/ -DCMAKE_INSTALL_PREFIX=\${PREFIX}/" multilib.sh && \
sed -i "/^cmake/ s/$/ -DENABLE_CLI=OFF/" multilib.sh && \
export CXXFLAGS="${CXXFLAGS} -fPIC" && \
./multilib.sh && \
make -C 8bit install && \
rm -rf ${DIR}
### libogg https://www.xiph.org/ogg/
RUN \
DIR=/tmp/ogg && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/ogg/libogg-${OGG_VERSION}.tar.gz && \
echo ${OGG_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libogg-${OGG_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libopus https://www.opus-codec.org/
RUN \
DIR=/tmp/opus && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://archive.mozilla.org/pub/opus/opus-${OPUS_VERSION}.tar.gz && \
echo ${OPUS_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f opus-${OPUS_VERSION}.tar.gz && \
autoreconf -fiv && \
./configure --prefix="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libvorbis https://xiph.org/vorbis/
RUN \
DIR=/tmp/vorbis && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/vorbis/libvorbis-${VORBIS_VERSION}.tar.gz && \
echo ${VORBIS_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libvorbis-${VORBIS_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --with-ogg="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libtheora http://www.theora.org/
RUN \
DIR=/tmp/theora && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/theora/libtheora-${THEORA_VERSION}.tar.gz && \
echo ${THEORA_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libtheora-${THEORA_VERSION}.tar.gz && \
curl -sL 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD' -o config.guess && \
curl -sL 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD' -o config.sub && \
./configure --prefix="${PREFIX}" --with-ogg="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libvpx https://www.webmproject.org/code/
RUN \
DIR=/tmp/vpx && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://codeload.github.com/webmproject/libvpx/tar.gz/v${VPX_VERSION} | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-vp8 --enable-vp9 --enable-vp9-highbitdepth --enable-pic --enable-shared \
--disable-debug --disable-examples --disable-docs --disable-install-bins && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libwebp https://developers.google.com/speed/webp/
RUN \
DIR=/tmp/vebp && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://storage.googleapis.com/downloads.webmproject.org/releases/webp/libwebp-${WEBP_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libmp3lame http://lame.sourceforge.net/
RUN \
DIR=/tmp/lame && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://versaweb.dl.sourceforge.net/project/lame/lame/$(echo ${LAME_VERSION} | sed -e 's/[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)/\1.\2/')/lame-${LAME_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --bindir="${PREFIX}/bin" --enable-shared --enable-nasm --disable-frontend && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### xvid https://www.xvid.com/
RUN \
DIR=/tmp/xvid && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xvid.org/downloads/xvidcore-${XVID_VERSION}.tar.gz && \
echo ${XVID_SHA256SUM} | sha256sum --check && \
tar -zx -f xvidcore-${XVID_VERSION}.tar.gz && \
cd xvidcore/build/generic && \
./configure --prefix="${PREFIX}" --bindir="${PREFIX}/bin" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### fdk-aac https://github.com/mstorsjo/fdk-aac
RUN \
DIR=/tmp/fdk-aac && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://github.com/mstorsjo/fdk-aac/archive/v${FDKAAC_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
autoreconf -fiv && \
./configure --prefix="${PREFIX}" --enable-shared --datadir="${DIR}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## openjpeg https://github.com/uclouvain/openjpeg
RUN \
DIR=/tmp/openjpeg && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://github.com/uclouvain/openjpeg/archive/v${OPENJPEG_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
export CFLAGS="${CFLAGS} -DPNG_ARM_NEON_OPT=0" && \
cmake -DBUILD_THIRDPARTY:BOOL=ON -DCMAKE_INSTALL_PREFIX="${PREFIX}" . && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## freetype https://www.freetype.org/
RUN \
DIR=/tmp/freetype && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://download.savannah.gnu.org/releases/freetype/freetype-${FREETYPE_VERSION}.tar.gz && \
echo ${FREETYPE_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f freetype-${FREETYPE_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## libvstab https://github.com/georgmartius/vid.stab
RUN \
DIR=/tmp/vid.stab && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/georgmartius/vid.stab/archive/v${LIBVIDSTAB_VERSION}.tar.gz && \
echo ${LIBVIDSTAB_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f v${LIBVIDSTAB_VERSION}.tar.gz && \
cmake -DCMAKE_INSTALL_PREFIX="${PREFIX}" . && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## fridibi https://www.fribidi.org/
RUN \
DIR=/tmp/fribidi && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/fribidi/fribidi/archive/${FRIBIDI_VERSION}.tar.gz && \
echo ${FRIBIDI_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f ${FRIBIDI_VERSION}.tar.gz && \
sed -i 's/^SUBDIRS =.*/SUBDIRS=gen.tab charset lib bin/' Makefile.am && \
./bootstrap --no-config --auto && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make -j1 && \
make -j $(nproc) install && \
rm -rf ${DIR}
## kvazaar https://github.com/ultravideo/kvazaar
RUN \
DIR=/tmp/kvazaar && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/ultravideo/kvazaar/archive/v${KVAZAAR_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f v${KVAZAAR_VERSION}.tar.gz && \
./autogen.sh && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/aom && \
git clone --branch ${AOM_VERSION} --depth 1 https://aomedia.googlesource.com/aom ${DIR} ; \
cd ${DIR} ; \
rm -rf CMakeCache.txt CMakeFiles ; \
mkdir -p ./aom_build ; \
cd ./aom_build ; \
cmake -DCMAKE_INSTALL_PREFIX="${PREFIX}" -DBUILD_SHARED_LIBS=1 ..; \
make ; \
make install ; \
rm -rf ${DIR}
## libxcb (and supporting libraries) for screen capture https://xcb.freedesktop.org/
RUN \
DIR=/tmp/xorg-macros && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive//individual/util/util-macros-${XORG_MACROS_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f util-macros-${XORG_MACROS_VERSION}.tar.gz && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/xproto && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive/individual/proto/xproto-${XPROTO_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f xproto-${XPROTO_VERSION}.tar.gz && \
curl -sL 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD' -o config.guess && \
curl -sL 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD' -o config.sub && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libXau && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive/individual/lib/libXau-${XAU_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libXau-${XAU_VERSION}.tar.gz && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libpthread-stubs && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/libpthread-stubs-${LIBPTHREAD_STUBS_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libpthread-stubs-${LIBPTHREAD_STUBS_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libxcb-proto && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/xcb-proto-${XCBPROTO_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f xcb-proto-${XCBPROTO_VERSION}.tar.gz && \
ACLOCAL_PATH="${PREFIX}/share/aclocal" ./autogen.sh && \
./configure --prefix="${PREFIX}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libxcb && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/libxcb-${LIBXCB_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libxcb-${LIBXCB_VERSION}.tar.gz && \
ACLOCAL_PATH="${PREFIX}/share/aclocal" ./autogen.sh && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## libzmq https://github.com/zeromq/libzmq/
RUN \
DIR=/tmp/libzmq && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/zeromq/libzmq/archive/v${LIBZMQ_VERSION}.tar.gz && \
echo ${LIBZMQ_SHA256SUM} | sha256sum --check && \
tar -xz --strip-components=1 -f v${LIBZMQ_VERSION}.tar.gz && \
./autogen.sh && \
./configure --prefix="${PREFIX}" && \
make -j $(nproc) && \
make check && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/rkmpp && \
mkdir -p ${DIR} && \
cd ${DIR} && \
git clone https://github.com/rockchip-linux/libdrm-rockchip && git clone https://github.com/rockchip-linux/mpp && \
cd libdrm-rockchip && bash autogen.sh && ./configure && make && make install && \
cd ../mpp && cmake -DRKPLATFORM=ON -DHAVE_DRM=ON && make -j6 && make install && \
rm -rf ${DIR}
## ffmpeg https://ffmpeg.org/
RUN \
DIR=/tmp/ffmpeg && mkdir -p ${DIR} && cd ${DIR} && \
curl -sLO https://ffmpeg.org/releases/ffmpeg-${FFMPEG_VERSION}.tar.bz2 && \
tar -jx --strip-components=1 -f ffmpeg-${FFMPEG_VERSION}.tar.bz2
RUN \
DIR=/tmp/ffmpeg && mkdir -p ${DIR} && cd ${DIR} && \
./configure \
--disable-debug \
--disable-doc \
--disable-ffplay \
--enable-shared \
--enable-avresample \
--enable-libopencore-amrnb \
--enable-libopencore-amrwb \
--enable-gpl \
--enable-libfreetype \
--enable-libvidstab \
--enable-libmp3lame \
--enable-libopus \
--enable-libtheora \
--enable-libvorbis \
--enable-libvpx \
--enable-libwebp \
--enable-libxcb \
--enable-libx265 \
--enable-libxvid \
--enable-libx264 \
--enable-nonfree \
--enable-openssl \
--enable-libfdk_aac \
--enable-postproc \
--enable-small \
--enable-version3 \
--enable-libzmq \
--extra-libs=-ldl \
--prefix="${PREFIX}" \
--enable-libopenjpeg \
--enable-libkvazaar \
--enable-libaom \
--extra-libs=-lpthread \
--enable-rkmpp \
--enable-libdrm \
# --enable-omx \
# --enable-omx-rpi \
# --enable-mmal \
--enable-v4l2_m2m \
--enable-neon \
--extra-cflags="-I${PREFIX}/include" \
--extra-ldflags="-L${PREFIX}/lib" && \
make -j $(nproc) && \
make -j $(nproc) install && \
make tools/zmqsend && cp tools/zmqsend ${PREFIX}/bin/ && \
make distclean && \
hash -r && \
cd tools && \
make qt-faststart && cp qt-faststart ${PREFIX}/bin/
## cleanup
RUN \
ldd ${PREFIX}/bin/ffmpeg | grep opt/ffmpeg | cut -d ' ' -f 3 | xargs -i cp {} /usr/local/lib/ && \
for lib in /usr/local/lib/*.so.*; do ln -s "${lib##*/}" "${lib%%.so.*}".so; done && \
cp ${PREFIX}/bin/* /usr/local/bin/ && \
cp -r ${PREFIX}/share/ffmpeg /usr/local/share/ && \
LD_LIBRARY_PATH=/usr/local/lib ffmpeg -buildconf && \
cp -r ${PREFIX}/include/libav* ${PREFIX}/include/libpostproc ${PREFIX}/include/libsw* /usr/local/include && \
mkdir -p /usr/local/lib/pkgconfig && \
for pc in ${PREFIX}/lib/pkgconfig/libav*.pc ${PREFIX}/lib/pkgconfig/libpostproc.pc ${PREFIX}/lib/pkgconfig/libsw*.pc; do \
sed "s:${PREFIX}:/usr/local:g" <"$pc" >/usr/local/lib/pkgconfig/"${pc##*/}"; \
done
FROM base AS release
ENV LD_LIBRARY_PATH=/usr/local/lib:/usr/local/lib64:/usr/lib:/usr/lib64:/lib:/lib64
CMD ["--help"]
ENTRYPOINT ["ffmpeg"]
COPY --from=build /usr/local /usr/local/
# Run ffmpeg with -c:v h264_v4l2m2m to enable HW accell for decoding on raspberry pi4 64-bit

View File

@ -1,468 +0,0 @@
# inspired by:
# https://github.com/collelog/ffmpeg/blob/master/4.3.1-alpine-rpi4-arm64v8.Dockerfile
# https://github.com/jrottenberg/ffmpeg/pull/158/files
# https://github.com/jrottenberg/ffmpeg/pull/239
FROM ubuntu:20.04 AS base
WORKDIR /tmp/workdir
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get -yqq update && \
apt-get install -yq --no-install-recommends ca-certificates expat libgomp1 && \
apt-get autoremove -y && \
apt-get clean -y
FROM base as build
ENV FFMPEG_VERSION=4.3.2 \
AOM_VERSION=v1.0.0 \
FDKAAC_VERSION=0.1.5 \
FREETYPE_VERSION=2.5.5 \
FRIBIDI_VERSION=0.19.7 \
KVAZAAR_VERSION=1.2.0 \
LAME_VERSION=3.100 \
LIBPTHREAD_STUBS_VERSION=0.4 \
LIBVIDSTAB_VERSION=1.1.0 \
LIBXCB_VERSION=1.13.1 \
XCBPROTO_VERSION=1.13 \
OGG_VERSION=1.3.2 \
OPENCOREAMR_VERSION=0.1.5 \
OPUS_VERSION=1.2 \
OPENJPEG_VERSION=2.1.2 \
THEORA_VERSION=1.1.1 \
VORBIS_VERSION=1.3.5 \
VPX_VERSION=1.8.0 \
WEBP_VERSION=1.0.2 \
X264_VERSION=20170226-2245-stable \
X265_VERSION=3.1.1 \
XAU_VERSION=1.0.9 \
XORG_MACROS_VERSION=1.19.2 \
XPROTO_VERSION=7.0.31 \
XVID_VERSION=1.3.4 \
LIBZMQ_VERSION=4.3.2 \
SRC=/usr/local
ARG FREETYPE_SHA256SUM="5d03dd76c2171a7601e9ce10551d52d4471cf92cd205948e60289251daddffa8 freetype-2.5.5.tar.gz"
ARG FRIBIDI_SHA256SUM="3fc96fa9473bd31dcb5500bdf1aa78b337ba13eb8c301e7c28923fea982453a8 0.19.7.tar.gz"
ARG LIBVIDSTAB_SHA256SUM="14d2a053e56edad4f397be0cb3ef8eb1ec3150404ce99a426c4eb641861dc0bb v1.1.0.tar.gz"
ARG OGG_SHA256SUM="e19ee34711d7af328cb26287f4137e70630e7261b17cbe3cd41011d73a654692 libogg-1.3.2.tar.gz"
ARG OPUS_SHA256SUM="77db45a87b51578fbc49555ef1b10926179861d854eb2613207dc79d9ec0a9a9 opus-1.2.tar.gz"
ARG THEORA_SHA256SUM="40952956c47811928d1e7922cda3bc1f427eb75680c3c37249c91e949054916b libtheora-1.1.1.tar.gz"
ARG VORBIS_SHA256SUM="6efbcecdd3e5dfbf090341b485da9d176eb250d893e3eb378c428a2db38301ce libvorbis-1.3.5.tar.gz"
ARG XVID_SHA256SUM="4e9fd62728885855bc5007fe1be58df42e5e274497591fec37249e1052ae316f xvidcore-1.3.4.tar.gz"
ARG LIBZMQ_SHA256SUM="02ecc88466ae38cf2c8d79f09cfd2675ba299a439680b64ade733e26a349edeb v4.3.2.tar.gz"
ARG LD_LIBRARY_PATH=/opt/ffmpeg/lib
ARG MAKEFLAGS="-j2"
ARG PKG_CONFIG_PATH="/opt/ffmpeg/share/pkgconfig:/opt/ffmpeg/lib/pkgconfig:/opt/ffmpeg/lib64/pkgconfig"
ARG PREFIX=/opt/ffmpeg
ARG LD_LIBRARY_PATH="/opt/ffmpeg/lib:/opt/ffmpeg/lib64:/usr/lib64:/usr/lib:/lib64:/lib"
RUN buildDeps="autoconf \
automake \
cmake \
curl \
bzip2 \
libexpat1-dev \
g++ \
gcc \
git \
gperf \
libtool \
make \
nasm \
perl \
pkg-config \
python \
libssl-dev \
yasm \
libva-dev \
libmfx-dev \
zlib1g-dev" && \
apt-get -yqq update && \
apt-get install -yq --no-install-recommends ${buildDeps}
## opencore-amr https://sourceforge.net/projects/opencore-amr/
RUN \
DIR=/tmp/opencore-amr && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://versaweb.dl.sourceforge.net/project/opencore-amr/opencore-amr/opencore-amr-${OPENCOREAMR_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
## x264 http://www.videolan.org/developers/x264.html
RUN \
DIR=/tmp/x264 && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://download.videolan.org/pub/videolan/x264/snapshots/x264-snapshot-${X264_VERSION}.tar.bz2 | \
tar -jx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared --enable-pic --disable-cli && \
make && \
make install && \
rm -rf ${DIR}
### x265 http://x265.org/
RUN \
DIR=/tmp/x265 && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://download.videolan.org/pub/videolan/x265/x265_${X265_VERSION}.tar.gz | \
tar -zx && \
cd x265_${X265_VERSION}/build/linux && \
sed -i "/-DEXTRA_LIB/ s/$/ -DCMAKE_INSTALL_PREFIX=\${PREFIX}/" multilib.sh && \
sed -i "/^cmake/ s/$/ -DENABLE_CLI=OFF/" multilib.sh && \
./multilib.sh && \
make -C 8bit install && \
rm -rf ${DIR}
### libogg https://www.xiph.org/ogg/
RUN \
DIR=/tmp/ogg && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/ogg/libogg-${OGG_VERSION}.tar.gz && \
echo ${OGG_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libogg-${OGG_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
### libopus https://www.opus-codec.org/
RUN \
DIR=/tmp/opus && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://archive.mozilla.org/pub/opus/opus-${OPUS_VERSION}.tar.gz && \
echo ${OPUS_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f opus-${OPUS_VERSION}.tar.gz && \
autoreconf -fiv && \
./configure --prefix="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
### libvorbis https://xiph.org/vorbis/
RUN \
DIR=/tmp/vorbis && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/vorbis/libvorbis-${VORBIS_VERSION}.tar.gz && \
echo ${VORBIS_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libvorbis-${VORBIS_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --with-ogg="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
### libtheora http://www.theora.org/
RUN \
DIR=/tmp/theora && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/theora/libtheora-${THEORA_VERSION}.tar.gz && \
echo ${THEORA_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libtheora-${THEORA_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --with-ogg="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
### libvpx https://www.webmproject.org/code/
RUN \
DIR=/tmp/vpx && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://codeload.github.com/webmproject/libvpx/tar.gz/v${VPX_VERSION} | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-vp8 --enable-vp9 --enable-vp9-highbitdepth --enable-pic --enable-shared \
--disable-debug --disable-examples --disable-docs --disable-install-bins && \
make && \
make install && \
rm -rf ${DIR}
### libwebp https://developers.google.com/speed/webp/
RUN \
DIR=/tmp/vebp && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://storage.googleapis.com/downloads.webmproject.org/releases/webp/libwebp-${WEBP_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
### libmp3lame http://lame.sourceforge.net/
RUN \
DIR=/tmp/lame && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://versaweb.dl.sourceforge.net/project/lame/lame/$(echo ${LAME_VERSION} | sed -e 's/[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)/\1.\2/')/lame-${LAME_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --bindir="${PREFIX}/bin" --enable-shared --enable-nasm --disable-frontend && \
make && \
make install && \
rm -rf ${DIR}
### xvid https://www.xvid.com/
RUN \
DIR=/tmp/xvid && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xvid.org/downloads/xvidcore-${XVID_VERSION}.tar.gz && \
echo ${XVID_SHA256SUM} | sha256sum --check && \
tar -zx -f xvidcore-${XVID_VERSION}.tar.gz && \
cd xvidcore/build/generic && \
./configure --prefix="${PREFIX}" --bindir="${PREFIX}/bin" && \
make && \
make install && \
rm -rf ${DIR}
### fdk-aac https://github.com/mstorsjo/fdk-aac
RUN \
DIR=/tmp/fdk-aac && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://github.com/mstorsjo/fdk-aac/archive/v${FDKAAC_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
autoreconf -fiv && \
./configure --prefix="${PREFIX}" --enable-shared --datadir="${DIR}" && \
make && \
make install && \
rm -rf ${DIR}
## openjpeg https://github.com/uclouvain/openjpeg
RUN \
DIR=/tmp/openjpeg && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://github.com/uclouvain/openjpeg/archive/v${OPENJPEG_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
cmake -DBUILD_THIRDPARTY:BOOL=ON -DCMAKE_INSTALL_PREFIX="${PREFIX}" . && \
make && \
make install && \
rm -rf ${DIR}
## freetype https://www.freetype.org/
RUN \
DIR=/tmp/freetype && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://download.savannah.gnu.org/releases/freetype/freetype-${FREETYPE_VERSION}.tar.gz && \
echo ${FREETYPE_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f freetype-${FREETYPE_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
## libvstab https://github.com/georgmartius/vid.stab
RUN \
DIR=/tmp/vid.stab && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/georgmartius/vid.stab/archive/v${LIBVIDSTAB_VERSION}.tar.gz && \
echo ${LIBVIDSTAB_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f v${LIBVIDSTAB_VERSION}.tar.gz && \
cmake -DCMAKE_INSTALL_PREFIX="${PREFIX}" . && \
make && \
make install && \
rm -rf ${DIR}
## fridibi https://www.fribidi.org/
RUN \
DIR=/tmp/fribidi && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/fribidi/fribidi/archive/${FRIBIDI_VERSION}.tar.gz && \
echo ${FRIBIDI_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f ${FRIBIDI_VERSION}.tar.gz && \
sed -i 's/^SUBDIRS =.*/SUBDIRS=gen.tab charset lib bin/' Makefile.am && \
./bootstrap --no-config --auto && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make -j1 && \
make install && \
rm -rf ${DIR}
## kvazaar https://github.com/ultravideo/kvazaar
RUN \
DIR=/tmp/kvazaar && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/ultravideo/kvazaar/archive/v${KVAZAAR_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f v${KVAZAAR_VERSION}.tar.gz && \
./autogen.sh && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/aom && \
git clone --branch ${AOM_VERSION} --depth 1 https://aomedia.googlesource.com/aom ${DIR} ; \
cd ${DIR} ; \
rm -rf CMakeCache.txt CMakeFiles ; \
mkdir -p ./aom_build ; \
cd ./aom_build ; \
cmake -DCMAKE_INSTALL_PREFIX="${PREFIX}" -DBUILD_SHARED_LIBS=1 ..; \
make ; \
make install ; \
rm -rf ${DIR}
## libxcb (and supporting libraries) for screen capture https://xcb.freedesktop.org/
RUN \
DIR=/tmp/xorg-macros && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive//individual/util/util-macros-${XORG_MACROS_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f util-macros-${XORG_MACROS_VERSION}.tar.gz && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/xproto && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive/individual/proto/xproto-${XPROTO_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f xproto-${XPROTO_VERSION}.tar.gz && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libXau && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive/individual/lib/libXau-${XAU_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libXau-${XAU_VERSION}.tar.gz && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libpthread-stubs && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/libpthread-stubs-${LIBPTHREAD_STUBS_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libpthread-stubs-${LIBPTHREAD_STUBS_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libxcb-proto && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/xcb-proto-${XCBPROTO_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f xcb-proto-${XCBPROTO_VERSION}.tar.gz && \
ACLOCAL_PATH="${PREFIX}/share/aclocal" ./autogen.sh && \
./configure --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libxcb && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/libxcb-${LIBXCB_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libxcb-${LIBXCB_VERSION}.tar.gz && \
ACLOCAL_PATH="${PREFIX}/share/aclocal" ./autogen.sh && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
## libzmq https://github.com/zeromq/libzmq/
RUN \
DIR=/tmp/libzmq && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/zeromq/libzmq/archive/v${LIBZMQ_VERSION}.tar.gz && \
echo ${LIBZMQ_SHA256SUM} | sha256sum --check && \
tar -xz --strip-components=1 -f v${LIBZMQ_VERSION}.tar.gz && \
./autogen.sh && \
./configure --prefix="${PREFIX}" && \
make && \
make check && \
make install && \
rm -rf ${DIR}
## ffmpeg https://ffmpeg.org/
RUN \
DIR=/tmp/ffmpeg && mkdir -p ${DIR} && cd ${DIR} && \
curl -sLO https://ffmpeg.org/releases/ffmpeg-${FFMPEG_VERSION}.tar.bz2 && \
tar -jx --strip-components=1 -f ffmpeg-${FFMPEG_VERSION}.tar.bz2
RUN \
DIR=/tmp/ffmpeg && mkdir -p ${DIR} && cd ${DIR} && \
./configure \
--disable-debug \
--disable-doc \
--disable-ffplay \
--enable-shared \
--enable-avresample \
--enable-libopencore-amrnb \
--enable-libopencore-amrwb \
--enable-gpl \
--enable-libfreetype \
--enable-libvidstab \
--enable-libmfx \
--enable-libmp3lame \
--enable-libopus \
--enable-libtheora \
--enable-libvorbis \
--enable-libvpx \
--enable-libwebp \
--enable-libxcb \
--enable-libx265 \
--enable-libxvid \
--enable-libx264 \
--enable-nonfree \
--enable-openssl \
--enable-libfdk_aac \
--enable-postproc \
--enable-small \
--enable-version3 \
--enable-libzmq \
--extra-libs=-ldl \
--prefix="${PREFIX}" \
--enable-libopenjpeg \
--enable-libkvazaar \
--enable-libaom \
--extra-libs=-lpthread \
--enable-vaapi \
--extra-cflags="-I${PREFIX}/include" \
--extra-ldflags="-L${PREFIX}/lib" && \
make && \
make install && \
make tools/zmqsend && cp tools/zmqsend ${PREFIX}/bin/ && \
make distclean && \
hash -r && \
cd tools && \
make qt-faststart && cp qt-faststart ${PREFIX}/bin/
## cleanup
RUN \
ldd ${PREFIX}/bin/ffmpeg | grep opt/ffmpeg | cut -d ' ' -f 3 | xargs -i cp {} /usr/local/lib/ && \
for lib in /usr/local/lib/*.so.*; do ln -s "${lib##*/}" "${lib%%.so.*}".so; done && \
cp ${PREFIX}/bin/* /usr/local/bin/ && \
cp -r ${PREFIX}/share/ffmpeg /usr/local/share/ && \
LD_LIBRARY_PATH=/usr/local/lib ffmpeg -buildconf && \
cp -r ${PREFIX}/include/libav* ${PREFIX}/include/libpostproc ${PREFIX}/include/libsw* /usr/local/include && \
mkdir -p /usr/local/lib/pkgconfig && \
for pc in ${PREFIX}/lib/pkgconfig/libav*.pc ${PREFIX}/lib/pkgconfig/libpostproc.pc ${PREFIX}/lib/pkgconfig/libsw*.pc; do \
sed "s:${PREFIX}:/usr/local:g" <"$pc" >/usr/local/lib/pkgconfig/"${pc##*/}"; \
done
FROM base AS release
ENV LD_LIBRARY_PATH=/usr/local/lib:/usr/local/lib64:/usr/lib:/usr/lib64:/lib:/lib64
CMD ["--help"]
ENTRYPOINT ["ffmpeg"]
COPY --from=build /usr/local /usr/local/
RUN \
apt-get update -y && \
apt-get install -y --no-install-recommends libva-drm2 libva2 i965-va-driver mesa-va-drivers && \
rm -rf /var/lib/apt/lists/*

View File

@ -1,549 +0,0 @@
# inspired by https://github.com/jrottenberg/ffmpeg/blob/master/docker-images/4.3/ubuntu1804/Dockerfile
# ffmpeg - http://ffmpeg.org/download.html
#
# From https://trac.ffmpeg.org/wiki/CompilationGuide/Ubuntu
#
# https://hub.docker.com/r/jrottenberg/ffmpeg/
#
#
FROM nvidia/cuda:11.1-devel-ubuntu20.04 AS devel-base
ENV NVIDIA_DRIVER_CAPABILITIES compute,utility,video
ENV DEBIAN_FRONTEND=noninteractive
WORKDIR /tmp/workdir
RUN apt-get -yqq update && \
apt-get install -yq --no-install-recommends ca-certificates expat libgomp1 && \
apt-get autoremove -y && \
apt-get clean -y
FROM nvidia/cuda:11.1-runtime-ubuntu20.04 AS runtime-base
ENV NVIDIA_DRIVER_CAPABILITIES compute,utility,video
ENV DEBIAN_FRONTEND=noninteractive
WORKDIR /tmp/workdir
RUN apt-get -yqq update && \
apt-get install -yq --no-install-recommends ca-certificates expat libgomp1 libxcb-shape0-dev && \
apt-get autoremove -y && \
apt-get clean -y
FROM devel-base as build
ENV NVIDIA_HEADERS_VERSION=9.1.23.1
ENV FFMPEG_VERSION=4.3.2 \
AOM_VERSION=v1.0.0 \
FDKAAC_VERSION=0.1.5 \
FREETYPE_VERSION=2.5.5 \
FRIBIDI_VERSION=0.19.7 \
KVAZAAR_VERSION=1.2.0 \
LAME_VERSION=3.100 \
LIBPTHREAD_STUBS_VERSION=0.4 \
LIBVIDSTAB_VERSION=1.1.0 \
LIBXCB_VERSION=1.13.1 \
XCBPROTO_VERSION=1.13 \
OGG_VERSION=1.3.2 \
OPENCOREAMR_VERSION=0.1.5 \
OPUS_VERSION=1.2 \
OPENJPEG_VERSION=2.1.2 \
THEORA_VERSION=1.1.1 \
VORBIS_VERSION=1.3.5 \
VPX_VERSION=1.8.0 \
WEBP_VERSION=1.0.2 \
X264_VERSION=20170226-2245-stable \
X265_VERSION=3.1.1 \
XAU_VERSION=1.0.9 \
XORG_MACROS_VERSION=1.19.2 \
XPROTO_VERSION=7.0.31 \
XVID_VERSION=1.3.4 \
LIBZMQ_VERSION=4.3.2 \
LIBSRT_VERSION=1.4.1 \
LIBARIBB24_VERSION=1.0.3 \
LIBPNG_VERSION=1.6.9 \
SRC=/usr/local
ARG FREETYPE_SHA256SUM="5d03dd76c2171a7601e9ce10551d52d4471cf92cd205948e60289251daddffa8 freetype-2.5.5.tar.gz"
ARG FRIBIDI_SHA256SUM="3fc96fa9473bd31dcb5500bdf1aa78b337ba13eb8c301e7c28923fea982453a8 0.19.7.tar.gz"
ARG LIBVIDSTAB_SHA256SUM="14d2a053e56edad4f397be0cb3ef8eb1ec3150404ce99a426c4eb641861dc0bb v1.1.0.tar.gz"
ARG OGG_SHA256SUM="e19ee34711d7af328cb26287f4137e70630e7261b17cbe3cd41011d73a654692 libogg-1.3.2.tar.gz"
ARG OPUS_SHA256SUM="77db45a87b51578fbc49555ef1b10926179861d854eb2613207dc79d9ec0a9a9 opus-1.2.tar.gz"
ARG THEORA_SHA256SUM="40952956c47811928d1e7922cda3bc1f427eb75680c3c37249c91e949054916b libtheora-1.1.1.tar.gz"
ARG VORBIS_SHA256SUM="6efbcecdd3e5dfbf090341b485da9d176eb250d893e3eb378c428a2db38301ce libvorbis-1.3.5.tar.gz"
ARG XVID_SHA256SUM="4e9fd62728885855bc5007fe1be58df42e5e274497591fec37249e1052ae316f xvidcore-1.3.4.tar.gz"
ARG LIBZMQ_SHA256SUM="02ecc88466ae38cf2c8d79f09cfd2675ba299a439680b64ade733e26a349edeb v4.3.2.tar.gz"
ARG LIBARIBB24_SHA256SUM="f61560738926e57f9173510389634d8c06cabedfa857db4b28fb7704707ff128 v1.0.3.tar.gz"
ARG LD_LIBRARY_PATH=/opt/ffmpeg/lib
ARG MAKEFLAGS="-j2"
ARG PKG_CONFIG_PATH="/opt/ffmpeg/share/pkgconfig:/opt/ffmpeg/lib/pkgconfig:/opt/ffmpeg/lib64/pkgconfig"
ARG PREFIX=/opt/ffmpeg
ARG LD_LIBRARY_PATH="/opt/ffmpeg/lib:/opt/ffmpeg/lib64"
RUN buildDeps="autoconf \
automake \
cmake \
curl \
bzip2 \
libexpat1-dev \
g++ \
gcc \
git \
gperf \
libtool \
make \
nasm \
perl \
pkg-config \
python \
libssl-dev \
yasm \
zlib1g-dev" && \
apt-get -yqq update && \
apt-get install -yq --no-install-recommends ${buildDeps}
RUN \
DIR=/tmp/nv-codec-headers && \
git clone https://github.com/FFmpeg/nv-codec-headers ${DIR} && \
cd ${DIR} && \
git checkout n${NVIDIA_HEADERS_VERSION} && \
make PREFIX="${PREFIX}" && \
make install PREFIX="${PREFIX}" && \
rm -rf ${DIR}
## opencore-amr https://sourceforge.net/projects/opencore-amr/
RUN \
DIR=/tmp/opencore-amr && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://versaweb.dl.sourceforge.net/project/opencore-amr/opencore-amr/opencore-amr-${OPENCOREAMR_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
## x264 http://www.videolan.org/developers/x264.html
RUN \
DIR=/tmp/x264 && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://download.videolan.org/pub/videolan/x264/snapshots/x264-snapshot-${X264_VERSION}.tar.bz2 | \
tar -jx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared --enable-pic --disable-cli && \
make && \
make install && \
rm -rf ${DIR}
### x265 http://x265.org/
RUN \
DIR=/tmp/x265 && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://download.videolan.org/pub/videolan/x265/x265_${X265_VERSION}.tar.gz | \
tar -zx && \
cd x265_${X265_VERSION}/build/linux && \
sed -i "/-DEXTRA_LIB/ s/$/ -DCMAKE_INSTALL_PREFIX=\${PREFIX}/" multilib.sh && \
sed -i "/^cmake/ s/$/ -DENABLE_CLI=OFF/" multilib.sh && \
./multilib.sh && \
make -C 8bit install && \
rm -rf ${DIR}
### libogg https://www.xiph.org/ogg/
RUN \
DIR=/tmp/ogg && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/ogg/libogg-${OGG_VERSION}.tar.gz && \
echo ${OGG_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libogg-${OGG_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
### libopus https://www.opus-codec.org/
RUN \
DIR=/tmp/opus && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://archive.mozilla.org/pub/opus/opus-${OPUS_VERSION}.tar.gz && \
echo ${OPUS_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f opus-${OPUS_VERSION}.tar.gz && \
autoreconf -fiv && \
./configure --prefix="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
### libvorbis https://xiph.org/vorbis/
RUN \
DIR=/tmp/vorbis && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/vorbis/libvorbis-${VORBIS_VERSION}.tar.gz && \
echo ${VORBIS_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libvorbis-${VORBIS_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --with-ogg="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
### libtheora http://www.theora.org/
RUN \
DIR=/tmp/theora && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/theora/libtheora-${THEORA_VERSION}.tar.gz && \
echo ${THEORA_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libtheora-${THEORA_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --with-ogg="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
### libvpx https://www.webmproject.org/code/
RUN \
DIR=/tmp/vpx && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://codeload.github.com/webmproject/libvpx/tar.gz/v${VPX_VERSION} | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-vp8 --enable-vp9 --enable-vp9-highbitdepth --enable-pic --enable-shared \
--disable-debug --disable-examples --disable-docs --disable-install-bins && \
make && \
make install && \
rm -rf ${DIR}
### libwebp https://developers.google.com/speed/webp/
RUN \
DIR=/tmp/vebp && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://storage.googleapis.com/downloads.webmproject.org/releases/webp/libwebp-${WEBP_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
### libmp3lame http://lame.sourceforge.net/
RUN \
DIR=/tmp/lame && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://versaweb.dl.sourceforge.net/project/lame/lame/$(echo ${LAME_VERSION} | sed -e 's/[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)/\1.\2/')/lame-${LAME_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --bindir="${PREFIX}/bin" --enable-shared --enable-nasm --disable-frontend && \
make && \
make install && \
rm -rf ${DIR}
### xvid https://www.xvid.com/
RUN \
DIR=/tmp/xvid && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xvid.org/downloads/xvidcore-${XVID_VERSION}.tar.gz && \
echo ${XVID_SHA256SUM} | sha256sum --check && \
tar -zx -f xvidcore-${XVID_VERSION}.tar.gz && \
cd xvidcore/build/generic && \
./configure --prefix="${PREFIX}" --bindir="${PREFIX}/bin" && \
make && \
make install && \
rm -rf ${DIR}
### fdk-aac https://github.com/mstorsjo/fdk-aac
RUN \
DIR=/tmp/fdk-aac && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://github.com/mstorsjo/fdk-aac/archive/v${FDKAAC_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
autoreconf -fiv && \
./configure --prefix="${PREFIX}" --enable-shared --datadir="${DIR}" && \
make && \
make install && \
rm -rf ${DIR}
## openjpeg https://github.com/uclouvain/openjpeg
RUN \
DIR=/tmp/openjpeg && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://github.com/uclouvain/openjpeg/archive/v${OPENJPEG_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
cmake -DBUILD_THIRDPARTY:BOOL=ON -DCMAKE_INSTALL_PREFIX="${PREFIX}" . && \
make && \
make install && \
rm -rf ${DIR}
## freetype https://www.freetype.org/
RUN \
DIR=/tmp/freetype && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://download.savannah.gnu.org/releases/freetype/freetype-${FREETYPE_VERSION}.tar.gz && \
echo ${FREETYPE_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f freetype-${FREETYPE_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
## libvstab https://github.com/georgmartius/vid.stab
RUN \
DIR=/tmp/vid.stab && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/georgmartius/vid.stab/archive/v${LIBVIDSTAB_VERSION}.tar.gz && \
echo ${LIBVIDSTAB_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f v${LIBVIDSTAB_VERSION}.tar.gz && \
cmake -DCMAKE_INSTALL_PREFIX="${PREFIX}" . && \
make && \
make install && \
rm -rf ${DIR}
## fridibi https://www.fribidi.org/
RUN \
DIR=/tmp/fribidi && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/fribidi/fribidi/archive/${FRIBIDI_VERSION}.tar.gz && \
echo ${FRIBIDI_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f ${FRIBIDI_VERSION}.tar.gz && \
sed -i 's/^SUBDIRS =.*/SUBDIRS=gen.tab charset lib bin/' Makefile.am && \
./bootstrap --no-config --auto && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make -j1 && \
make install && \
rm -rf ${DIR}
## kvazaar https://github.com/ultravideo/kvazaar
RUN \
DIR=/tmp/kvazaar && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/ultravideo/kvazaar/archive/v${KVAZAAR_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f v${KVAZAAR_VERSION}.tar.gz && \
./autogen.sh && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/aom && \
git clone --branch ${AOM_VERSION} --depth 1 https://aomedia.googlesource.com/aom ${DIR} ; \
cd ${DIR} ; \
rm -rf CMakeCache.txt CMakeFiles ; \
mkdir -p ./aom_build ; \
cd ./aom_build ; \
cmake -DCMAKE_INSTALL_PREFIX="${PREFIX}" -DBUILD_SHARED_LIBS=1 ..; \
make ; \
make install ; \
rm -rf ${DIR}
## libxcb (and supporting libraries) for screen capture https://xcb.freedesktop.org/
RUN \
DIR=/tmp/xorg-macros && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive//individual/util/util-macros-${XORG_MACROS_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f util-macros-${XORG_MACROS_VERSION}.tar.gz && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/xproto && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive/individual/proto/xproto-${XPROTO_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f xproto-${XPROTO_VERSION}.tar.gz && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libXau && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive/individual/lib/libXau-${XAU_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libXau-${XAU_VERSION}.tar.gz && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libpthread-stubs && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/libpthread-stubs-${LIBPTHREAD_STUBS_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libpthread-stubs-${LIBPTHREAD_STUBS_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libxcb-proto && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/xcb-proto-${XCBPROTO_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f xcb-proto-${XCBPROTO_VERSION}.tar.gz && \
ACLOCAL_PATH="${PREFIX}/share/aclocal" ./autogen.sh && \
./configure --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libxcb && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/libxcb-${LIBXCB_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libxcb-${LIBXCB_VERSION}.tar.gz && \
ACLOCAL_PATH="${PREFIX}/share/aclocal" ./autogen.sh && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make && \
make install && \
rm -rf ${DIR}
## libzmq https://github.com/zeromq/libzmq/
RUN \
DIR=/tmp/libzmq && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/zeromq/libzmq/archive/v${LIBZMQ_VERSION}.tar.gz && \
echo ${LIBZMQ_SHA256SUM} | sha256sum --check && \
tar -xz --strip-components=1 -f v${LIBZMQ_VERSION}.tar.gz && \
./autogen.sh && \
./configure --prefix="${PREFIX}" && \
make && \
make check && \
make install && \
rm -rf ${DIR}
## libsrt https://github.com/Haivision/srt
RUN \
DIR=/tmp/srt && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/Haivision/srt/archive/v${LIBSRT_VERSION}.tar.gz && \
tar -xz --strip-components=1 -f v${LIBSRT_VERSION}.tar.gz && \
cmake -DCMAKE_INSTALL_PREFIX="${PREFIX}" . && \
make && \
make install && \
rm -rf ${DIR}
## libpng
RUN \
DIR=/tmp/png && \
mkdir -p ${DIR} && \
cd ${DIR} && \
git clone https://git.code.sf.net/p/libpng/code ${DIR} -b v${LIBPNG_VERSION} --depth 1 && \
./autogen.sh && \
./configure --prefix="${PREFIX}" && \
make check && \
make install && \
rm -rf ${DIR}
## libaribb24
RUN \
DIR=/tmp/b24 && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/nkoriyama/aribb24/archive/v${LIBARIBB24_VERSION}.tar.gz && \
echo ${LIBARIBB24_SHA256SUM} | sha256sum --check && \
tar -xz --strip-components=1 -f v${LIBARIBB24_VERSION}.tar.gz && \
autoreconf -fiv && \
./configure CFLAGS="-I${PREFIX}/include -fPIC" --prefix="${PREFIX}" && \
make && \
make install && \
rm -rf ${DIR}
## ffmpeg https://ffmpeg.org/
RUN \
DIR=/tmp/ffmpeg && mkdir -p ${DIR} && cd ${DIR} && \
curl -sLO https://ffmpeg.org/releases/ffmpeg-${FFMPEG_VERSION}.tar.bz2 && \
tar -jx --strip-components=1 -f ffmpeg-${FFMPEG_VERSION}.tar.bz2
RUN \
DIR=/tmp/ffmpeg && mkdir -p ${DIR} && cd ${DIR} && \
./configure \
--disable-debug \
--disable-doc \
--disable-ffplay \
--enable-shared \
--enable-avresample \
--enable-libopencore-amrnb \
--enable-libopencore-amrwb \
--enable-gpl \
--enable-libfreetype \
--enable-libvidstab \
--enable-libmp3lame \
--enable-libopus \
--enable-libtheora \
--enable-libvorbis \
--enable-libvpx \
--enable-libwebp \
--enable-libxcb \
--enable-libx265 \
--enable-libxvid \
--enable-libx264 \
--enable-nonfree \
--enable-openssl \
--enable-libfdk_aac \
--enable-postproc \
--enable-small \
--enable-version3 \
--enable-libzmq \
--extra-libs=-ldl \
--prefix="${PREFIX}" \
--enable-libopenjpeg \
--enable-libkvazaar \
--enable-libaom \
--extra-libs=-lpthread \
--enable-libsrt \
--enable-libaribb24 \
--enable-nvenc \
--enable-cuda \
--enable-cuvid \
--enable-libnpp \
--extra-cflags="-I${PREFIX}/include -I${PREFIX}/include/ffnvcodec -I/usr/local/cuda/include/" \
--extra-ldflags="-L${PREFIX}/lib -L/usr/local/cuda/lib64 -L/usr/local/cuda/lib32/" && \
make && \
make install && \
make tools/zmqsend && cp tools/zmqsend ${PREFIX}/bin/ && \
make distclean && \
hash -r && \
cd tools && \
make qt-faststart && cp qt-faststart ${PREFIX}/bin/
## cleanup
RUN \
LD_LIBRARY_PATH="${PREFIX}/lib:${PREFIX}/lib64:${LD_LIBRARY_PATH}" ldd ${PREFIX}/bin/ffmpeg | grep opt/ffmpeg | cut -d ' ' -f 3 | xargs -i cp {} /usr/local/lib/ && \
for lib in /usr/local/lib/*.so.*; do ln -s "${lib##*/}" "${lib%%.so.*}".so; done && \
cp ${PREFIX}/bin/* /usr/local/bin/ && \
cp -r ${PREFIX}/share/* /usr/local/share/ && \
LD_LIBRARY_PATH=/usr/local/lib ffmpeg -buildconf && \
cp -r ${PREFIX}/include/libav* ${PREFIX}/include/libpostproc ${PREFIX}/include/libsw* /usr/local/include && \
mkdir -p /usr/local/lib/pkgconfig && \
for pc in ${PREFIX}/lib/pkgconfig/libav*.pc ${PREFIX}/lib/pkgconfig/libpostproc.pc ${PREFIX}/lib/pkgconfig/libsw*.pc; do \
sed "s:${PREFIX}:/usr/local:g; s:/lib64:/lib:g" <"$pc" >/usr/local/lib/pkgconfig/"${pc##*/}"; \
done
FROM runtime-base AS release
ENV LD_LIBRARY_PATH=/usr/local/lib:/usr/local/lib64
CMD ["--help"]
ENTRYPOINT ["ffmpeg"]
# copy only needed files, without copying nvidia dev files
COPY --from=build /usr/local/bin /usr/local/bin/
COPY --from=build /usr/local/share /usr/local/share/
COPY --from=build /usr/local/lib /usr/local/lib/
COPY --from=build /usr/local/include /usr/local/include/
# Let's make sure the app built correctly
# Convenient to verify on https://hub.docker.com/r/jrottenberg/ffmpeg/builds/ console output

View File

@ -1,490 +0,0 @@
# inspired by:
# https://github.com/collelog/ffmpeg/blob/master/4.3.1-alpine-rpi4-arm64v8.Dockerfile
# https://github.com/mmastrac/ffmpeg-omx-rpi-docker/blob/master/Dockerfile
# https://github.com/jrottenberg/ffmpeg/pull/158/files
# https://github.com/jrottenberg/ffmpeg/pull/239
FROM ubuntu:20.04 AS base
WORKDIR /tmp/workdir
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get -yqq update && \
apt-get install -yq --no-install-recommends ca-certificates expat libgomp1 && \
apt-get autoremove -y && \
apt-get clean -y
FROM base as build
ENV FFMPEG_VERSION=4.3.2 \
AOM_VERSION=v1.0.0 \
FDKAAC_VERSION=0.1.5 \
FREETYPE_VERSION=2.5.5 \
FRIBIDI_VERSION=0.19.7 \
KVAZAAR_VERSION=1.2.0 \
LAME_VERSION=3.100 \
LIBPTHREAD_STUBS_VERSION=0.4 \
LIBVIDSTAB_VERSION=1.1.0 \
LIBXCB_VERSION=1.13.1 \
XCBPROTO_VERSION=1.13 \
OGG_VERSION=1.3.2 \
OPENCOREAMR_VERSION=0.1.5 \
OPUS_VERSION=1.2 \
OPENJPEG_VERSION=2.1.2 \
THEORA_VERSION=1.1.1 \
VORBIS_VERSION=1.3.5 \
VPX_VERSION=1.8.0 \
WEBP_VERSION=1.0.2 \
X264_VERSION=20170226-2245-stable \
X265_VERSION=3.1.1 \
XAU_VERSION=1.0.9 \
XORG_MACROS_VERSION=1.19.2 \
XPROTO_VERSION=7.0.31 \
XVID_VERSION=1.3.4 \
LIBZMQ_VERSION=4.3.3 \
SRC=/usr/local
ARG FREETYPE_SHA256SUM="5d03dd76c2171a7601e9ce10551d52d4471cf92cd205948e60289251daddffa8 freetype-2.5.5.tar.gz"
ARG FRIBIDI_SHA256SUM="3fc96fa9473bd31dcb5500bdf1aa78b337ba13eb8c301e7c28923fea982453a8 0.19.7.tar.gz"
ARG LIBVIDSTAB_SHA256SUM="14d2a053e56edad4f397be0cb3ef8eb1ec3150404ce99a426c4eb641861dc0bb v1.1.0.tar.gz"
ARG OGG_SHA256SUM="e19ee34711d7af328cb26287f4137e70630e7261b17cbe3cd41011d73a654692 libogg-1.3.2.tar.gz"
ARG OPUS_SHA256SUM="77db45a87b51578fbc49555ef1b10926179861d854eb2613207dc79d9ec0a9a9 opus-1.2.tar.gz"
ARG THEORA_SHA256SUM="40952956c47811928d1e7922cda3bc1f427eb75680c3c37249c91e949054916b libtheora-1.1.1.tar.gz"
ARG VORBIS_SHA256SUM="6efbcecdd3e5dfbf090341b485da9d176eb250d893e3eb378c428a2db38301ce libvorbis-1.3.5.tar.gz"
ARG XVID_SHA256SUM="4e9fd62728885855bc5007fe1be58df42e5e274497591fec37249e1052ae316f xvidcore-1.3.4.tar.gz"
ARG LD_LIBRARY_PATH=/opt/ffmpeg/lib
ARG MAKEFLAGS="-j2"
ARG PKG_CONFIG_PATH="/opt/ffmpeg/share/pkgconfig:/opt/ffmpeg/lib/pkgconfig:/opt/ffmpeg/lib64/pkgconfig:/opt/vc/lib/pkgconfig"
ARG PREFIX=/opt/ffmpeg
ARG LD_LIBRARY_PATH="/opt/ffmpeg/lib:/opt/ffmpeg/lib64:/usr/lib64:/usr/lib:/lib64:/lib:/opt/vc/lib"
RUN buildDeps="autoconf \
automake \
cmake \
curl \
bzip2 \
libexpat1-dev \
g++ \
gcc \
git \
gperf \
libtool \
make \
nasm \
perl \
pkg-config \
python \
sudo \
libssl-dev \
yasm \
linux-headers-raspi2 \
libomxil-bellagio-dev \
libx265-dev \
libaom-dev \
zlib1g-dev" && \
apt-get -yqq update && \
apt-get install -yq --no-install-recommends ${buildDeps}
## opencore-amr https://sourceforge.net/projects/opencore-amr/
RUN \
DIR=/tmp/opencore-amr && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://versaweb.dl.sourceforge.net/project/opencore-amr/opencore-amr/opencore-amr-${OPENCOREAMR_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## x264 http://www.videolan.org/developers/x264.html
RUN \
DIR=/tmp/x264 && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://download.videolan.org/pub/videolan/x264/snapshots/x264-snapshot-${X264_VERSION}.tar.bz2 | \
tar -jx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared --enable-pic --disable-cli && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
# ### x265 http://x265.org/
# RUN \
# DIR=/tmp/x265 && \
# mkdir -p ${DIR} && \
# cd ${DIR} && \
# curl -sL https://download.videolan.org/pub/videolan/x265/x265_${X265_VERSION}.tar.gz | \
# tar -zx && \
# cd x265_${X265_VERSION}/build/linux && \
# sed -i "/-DEXTRA_LIB/ s/$/ -DCMAKE_INSTALL_PREFIX=\${PREFIX}/" multilib.sh && \
# sed -i "/^cmake/ s/$/ -DENABLE_CLI=OFF/" multilib.sh && \
# # export CXXFLAGS="${CXXFLAGS} -fPIC" && \
# ./multilib.sh && \
# make -C 8bit install && \
# rm -rf ${DIR}
### libogg https://www.xiph.org/ogg/
RUN \
DIR=/tmp/ogg && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/ogg/libogg-${OGG_VERSION}.tar.gz && \
echo ${OGG_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libogg-${OGG_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libopus https://www.opus-codec.org/
RUN \
DIR=/tmp/opus && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://archive.mozilla.org/pub/opus/opus-${OPUS_VERSION}.tar.gz && \
echo ${OPUS_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f opus-${OPUS_VERSION}.tar.gz && \
autoreconf -fiv && \
./configure --prefix="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libvorbis https://xiph.org/vorbis/
RUN \
DIR=/tmp/vorbis && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/vorbis/libvorbis-${VORBIS_VERSION}.tar.gz && \
echo ${VORBIS_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libvorbis-${VORBIS_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --with-ogg="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libtheora http://www.theora.org/
RUN \
DIR=/tmp/theora && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xiph.org/releases/theora/libtheora-${THEORA_VERSION}.tar.gz && \
echo ${THEORA_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f libtheora-${THEORA_VERSION}.tar.gz && \
curl -sL 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD' -o config.guess && \
curl -sL 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD' -o config.sub && \
./configure --prefix="${PREFIX}" --with-ogg="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libvpx https://www.webmproject.org/code/
RUN \
DIR=/tmp/vpx && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://codeload.github.com/webmproject/libvpx/tar.gz/v${VPX_VERSION} | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-vp8 --enable-vp9 --enable-vp9-highbitdepth --enable-pic --enable-shared \
--disable-debug --disable-examples --disable-docs --disable-install-bins && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libwebp https://developers.google.com/speed/webp/
RUN \
DIR=/tmp/vebp && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://storage.googleapis.com/downloads.webmproject.org/releases/webp/libwebp-${WEBP_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### libmp3lame http://lame.sourceforge.net/
RUN \
DIR=/tmp/lame && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://versaweb.dl.sourceforge.net/project/lame/lame/$(echo ${LAME_VERSION} | sed -e 's/[^0-9]*\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)\([0-9A-Za-z-]*\)/\1.\2/')/lame-${LAME_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
./configure --prefix="${PREFIX}" --bindir="${PREFIX}/bin" --enable-shared --enable-nasm --disable-frontend && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### xvid https://www.xvid.com/
RUN \
DIR=/tmp/xvid && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO http://downloads.xvid.org/downloads/xvidcore-${XVID_VERSION}.tar.gz && \
echo ${XVID_SHA256SUM} | sha256sum --check && \
tar -zx -f xvidcore-${XVID_VERSION}.tar.gz && \
cd xvidcore/build/generic && \
./configure --prefix="${PREFIX}" --bindir="${PREFIX}/bin" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
### fdk-aac https://github.com/mstorsjo/fdk-aac
RUN \
DIR=/tmp/fdk-aac && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://github.com/mstorsjo/fdk-aac/archive/v${FDKAAC_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
autoreconf -fiv && \
./configure --prefix="${PREFIX}" --enable-shared --datadir="${DIR}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## openjpeg https://github.com/uclouvain/openjpeg
RUN \
DIR=/tmp/openjpeg && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sL https://github.com/uclouvain/openjpeg/archive/v${OPENJPEG_VERSION}.tar.gz | \
tar -zx --strip-components=1 && \
export CFLAGS="${CFLAGS} -DPNG_ARM_NEON_OPT=0" && \
cmake -DBUILD_THIRDPARTY:BOOL=ON -DCMAKE_INSTALL_PREFIX="${PREFIX}" . && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## freetype https://www.freetype.org/
RUN \
DIR=/tmp/freetype && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://download.savannah.gnu.org/releases/freetype/freetype-${FREETYPE_VERSION}.tar.gz && \
echo ${FREETYPE_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f freetype-${FREETYPE_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## libvstab https://github.com/georgmartius/vid.stab
RUN \
DIR=/tmp/vid.stab && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/georgmartius/vid.stab/archive/v${LIBVIDSTAB_VERSION}.tar.gz && \
echo ${LIBVIDSTAB_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f v${LIBVIDSTAB_VERSION}.tar.gz && \
cmake -DCMAKE_INSTALL_PREFIX="${PREFIX}" . && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## fridibi https://www.fribidi.org/
RUN \
DIR=/tmp/fribidi && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/fribidi/fribidi/archive/${FRIBIDI_VERSION}.tar.gz && \
echo ${FRIBIDI_SHA256SUM} | sha256sum --check && \
tar -zx --strip-components=1 -f ${FRIBIDI_VERSION}.tar.gz && \
sed -i 's/^SUBDIRS =.*/SUBDIRS=gen.tab charset lib bin/' Makefile.am && \
./bootstrap --no-config --auto && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make -j1 && \
make -j $(nproc) install && \
rm -rf ${DIR}
## kvazaar https://github.com/ultravideo/kvazaar
RUN \
DIR=/tmp/kvazaar && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/ultravideo/kvazaar/archive/v${KVAZAAR_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f v${KVAZAAR_VERSION}.tar.gz && \
./autogen.sh && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
# RUN \
# DIR=/tmp/aom && \
# git clone --branch ${AOM_VERSION} --depth 1 https://aomedia.googlesource.com/aom ${DIR} ; \
# cd ${DIR} ; \
# rm -rf CMakeCache.txt CMakeFiles ; \
# mkdir -p ./aom_build ; \
# cd ./aom_build ; \
# cmake -DCMAKE_INSTALL_PREFIX="${PREFIX}" -DBUILD_SHARED_LIBS=1 ..; \
# make ; \
# make install ; \
# rm -rf ${DIR}
## libxcb (and supporting libraries) for screen capture https://xcb.freedesktop.org/
RUN \
DIR=/tmp/xorg-macros && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive//individual/util/util-macros-${XORG_MACROS_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f util-macros-${XORG_MACROS_VERSION}.tar.gz && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/xproto && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive/individual/proto/xproto-${XPROTO_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f xproto-${XPROTO_VERSION}.tar.gz && \
curl -sL 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD' -o config.guess && \
curl -sL 'http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD' -o config.sub && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libXau && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://www.x.org/archive/individual/lib/libXau-${XAU_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libXau-${XAU_VERSION}.tar.gz && \
./configure --srcdir=${DIR} --prefix="${PREFIX}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libpthread-stubs && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/libpthread-stubs-${LIBPTHREAD_STUBS_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libpthread-stubs-${LIBPTHREAD_STUBS_VERSION}.tar.gz && \
./configure --prefix="${PREFIX}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libxcb-proto && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/xcb-proto-${XCBPROTO_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f xcb-proto-${XCBPROTO_VERSION}.tar.gz && \
ACLOCAL_PATH="${PREFIX}/share/aclocal" ./autogen.sh && \
./configure --prefix="${PREFIX}" && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
RUN \
DIR=/tmp/libxcb && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://xcb.freedesktop.org/dist/libxcb-${LIBXCB_VERSION}.tar.gz && \
tar -zx --strip-components=1 -f libxcb-${LIBXCB_VERSION}.tar.gz && \
ACLOCAL_PATH="${PREFIX}/share/aclocal" ./autogen.sh && \
./configure --prefix="${PREFIX}" --disable-static --enable-shared && \
make -j $(nproc) && \
make -j $(nproc) install && \
rm -rf ${DIR}
## libzmq https://github.com/zeromq/libzmq/
RUN \
DIR=/tmp/libzmq && \
mkdir -p ${DIR} && \
cd ${DIR} && \
curl -sLO https://github.com/zeromq/libzmq/archive/v${LIBZMQ_VERSION}.tar.gz && \
tar -xz --strip-components=1 -f v${LIBZMQ_VERSION}.tar.gz && \
./autogen.sh && \
./configure --prefix="${PREFIX}" && \
make -j $(nproc) && \
# make check && \
make -j $(nproc) install && \
rm -rf ${DIR}
## userland https://github.com/raspberrypi/userland
RUN \
DIR=/tmp/userland && \
mkdir -p ${DIR} && \
cd ${DIR} && \
git clone --depth 1 https://github.com/raspberrypi/userland.git . && \
./buildme && \
rm -rf ${DIR}
## ffmpeg https://ffmpeg.org/
RUN \
DIR=/tmp/ffmpeg && mkdir -p ${DIR} && cd ${DIR} && \
curl -sLO https://ffmpeg.org/releases/ffmpeg-${FFMPEG_VERSION}.tar.bz2 && \
tar -jx --strip-components=1 -f ffmpeg-${FFMPEG_VERSION}.tar.bz2
RUN \
DIR=/tmp/ffmpeg && mkdir -p ${DIR} && cd ${DIR} && \
./configure \
--disable-debug \
--disable-doc \
--disable-ffplay \
--enable-shared \
--enable-avresample \
--enable-libopencore-amrnb \
--enable-libopencore-amrwb \
--enable-gpl \
--enable-libfreetype \
--enable-libvidstab \
--enable-libmp3lame \
--enable-libopus \
--enable-libtheora \
--enable-libvorbis \
--enable-libvpx \
--enable-libwebp \
--enable-libxcb \
--enable-libx265 \
--enable-libxvid \
--enable-libx264 \
--enable-nonfree \
--enable-openssl \
--enable-libfdk_aac \
--enable-postproc \
--enable-small \
--enable-version3 \
--enable-libzmq \
--extra-libs=-ldl \
--prefix="${PREFIX}" \
--enable-libopenjpeg \
--enable-libkvazaar \
--enable-libaom \
--extra-libs=-lpthread \
--enable-omx \
--enable-omx-rpi \
--enable-mmal \
--enable-v4l2_m2m \
--enable-neon \
--extra-cflags="-I${PREFIX}/include" \
--extra-ldflags="-L${PREFIX}/lib" && \
make -j $(nproc) && \
make -j $(nproc) install && \
make tools/zmqsend && cp tools/zmqsend ${PREFIX}/bin/ && \
make distclean && \
hash -r && \
cd tools && \
make qt-faststart && cp qt-faststart ${PREFIX}/bin/
## cleanup
RUN \
ldd ${PREFIX}/bin/ffmpeg | grep opt/ffmpeg | cut -d ' ' -f 3 | xargs -i cp {} /usr/local/lib/ && \
# copy userland lib too
ldd ${PREFIX}/bin/ffmpeg | grep opt/vc | cut -d ' ' -f 3 | xargs -i cp {} /usr/local/lib/ && \
for lib in /usr/local/lib/*.so.*; do ln -s "${lib##*/}" "${lib%%.so.*}".so; done && \
cp ${PREFIX}/bin/* /usr/local/bin/ && \
cp -r ${PREFIX}/share/ffmpeg /usr/local/share/ && \
LD_LIBRARY_PATH=/usr/local/lib ffmpeg -buildconf && \
cp -r ${PREFIX}/include/libav* ${PREFIX}/include/libpostproc ${PREFIX}/include/libsw* /usr/local/include && \
mkdir -p /usr/local/lib/pkgconfig && \
for pc in ${PREFIX}/lib/pkgconfig/libav*.pc ${PREFIX}/lib/pkgconfig/libpostproc.pc ${PREFIX}/lib/pkgconfig/libsw*.pc; do \
sed "s:${PREFIX}:/usr/local:g" <"$pc" >/usr/local/lib/pkgconfig/"${pc##*/}"; \
done
FROM base AS release
ENV LD_LIBRARY_PATH=/usr/local/lib:/usr/local/lib64:/usr/lib:/usr/lib64:/lib:/lib64
RUN \
apt-get -yqq update && \
apt-get install -yq --no-install-recommends libx265-dev libaom-dev && \
apt-get autoremove -y && \
apt-get clean -y
CMD ["--help"]
ENTRYPOINT ["ffmpeg"]
COPY --from=build /usr/local /usr/local/

View File

@ -1,52 +0,0 @@
FROM ubuntu:20.04 AS base
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get -yqq update && \
apt-get install -yq --no-install-recommends ca-certificates expat libgomp1 && \
apt-get autoremove -y && \
apt-get clean -y
FROM base as build
ARG NGINX_VERSION=1.18.0
ARG VOD_MODULE_VERSION=1.28
ARG SECURE_TOKEN_MODULE_VERSION=1.4
ARG RTMP_MODULE_VERSION=1.2.1
RUN cp /etc/apt/sources.list /etc/apt/sources.list~ \
&& sed -Ei 's/^# deb-src /deb-src /' /etc/apt/sources.list \
&& apt-get update
RUN apt-get -yqq build-dep nginx
RUN apt-get -yqq install --no-install-recommends curl \
&& mkdir /tmp/nginx \
&& curl -sL https://nginx.org/download/nginx-${NGINX_VERSION}.tar.gz | tar -C /tmp/nginx -zx --strip-components=1 \
&& mkdir /tmp/nginx-vod-module \
&& curl -sL https://github.com/kaltura/nginx-vod-module/archive/refs/tags/${VOD_MODULE_VERSION}.tar.gz | tar -C /tmp/nginx-vod-module -zx --strip-components=1 \
# Patch MAX_CLIPS to allow more clips to be added than the default 128
&& sed -i 's/MAX_CLIPS (128)/MAX_CLIPS (1080)/g' /tmp/nginx-vod-module/vod/media_set.h \
&& mkdir /tmp/nginx-secure-token-module \
&& curl -sL https://github.com/kaltura/nginx-secure-token-module/archive/refs/tags/${SECURE_TOKEN_MODULE_VERSION}.tar.gz | tar -C /tmp/nginx-secure-token-module -zx --strip-components=1 \
&& mkdir /tmp/nginx-rtmp-module \
&& curl -sL https://github.com/arut/nginx-rtmp-module/archive/refs/tags/v${RTMP_MODULE_VERSION}.tar.gz | tar -C /tmp/nginx-rtmp-module -zx --strip-components=1
WORKDIR /tmp/nginx
RUN ./configure --prefix=/usr/local/nginx \
--with-file-aio \
--with-http_sub_module \
--with-http_ssl_module \
--with-threads \
--add-module=../nginx-vod-module \
--add-module=../nginx-secure-token-module \
--add-module=../nginx-rtmp-module \
--with-cc-opt="-O3 -Wno-error=implicit-fallthrough"
RUN make && make install
RUN rm -rf /usr/local/nginx/html /usr/local/nginx/conf/*.default
FROM base
COPY --from=build /usr/local/nginx /usr/local/nginx
ENTRYPOINT ["/usr/local/nginx/sbin/nginx"]
CMD ["-g", "daemon off;"]

View File

@ -1,9 +0,0 @@
ARG NODE_VERSION=14.0
FROM node:${NODE_VERSION}
WORKDIR /opt/frigate
COPY . .
RUN npm install && npm run build

View File

@ -1,41 +0,0 @@
FROM ubuntu:20.04 as build
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get -qq update \
&& apt-get -qq install -y \
python3 \
python3-dev \
wget \
# opencv dependencies
build-essential cmake git pkg-config libgtk-3-dev \
libavcodec-dev libavformat-dev libswscale-dev libv4l-dev \
libxvidcore-dev libx264-dev libjpeg-dev libpng-dev libtiff-dev \
gfortran openexr libatlas-base-dev libssl-dev\
libtbb2 libtbb-dev libdc1394-22-dev libopenexr-dev \
libgstreamer-plugins-base1.0-dev libgstreamer1.0-dev \
# scipy dependencies
gcc gfortran libopenblas-dev liblapack-dev cython
RUN wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
&& python3 get-pip.py "pip==20.2.4"
RUN pip3 install scikit-build
RUN pip3 wheel --wheel-dir=/wheels \
opencv-python-headless \
numpy \
imutils \
scipy \
psutil \
Flask \
paho-mqtt \
PyYAML \
matplotlib \
click \
setproctitle \
peewee
FROM scratch
COPY --from=build /wheels /wheels

View File

@ -173,7 +173,6 @@ http {
location /api/ {
add_header 'Access-Control-Allow-Origin' '*';
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS';
add_header Cache-Control "no-store";
proxy_pass http://frigate_api/;
proxy_pass_request_headers on;
proxy_set_header Host $host;

View File

@ -0,0 +1,14 @@
# Birdseye
Birdseye allows a heads-up view of your cameras to see what is going on around your property / space without having to watch all cameras that may have nothing happening. Birdseye allows specific modes that intelligently show and disappear based on what you care about.
### Birdseye Modes
Birdseye offers different modes to customize which cameras show under which circumstances.
- **continuous:** All cameras are always included
- **motion:** Cameras that have detected motion within the last 30 seconds are included
- **objects:** Cameras that have tracked an active object within the last 30 seconds are included
### Custom Birdseye Icon
A custom icon can be added to the birdseye background by provided a file `custom.png` inside of the Frigate `media` folder. The file must be a png with the icon as transparent, any non-transparent pixels will be white when displayed in the birdseye view.

View File

@ -198,6 +198,10 @@ objects:
min_area: 5000
# Optional: maximum width*height of the bounding box for the detected object (default: 24000000)
max_area: 100000
# Optional: minimum width/height of the bounding box for the detected object (default: 0)
min_ratio: 0.5
# Optional: maximum width/height of the bounding box for the detected object (default: 24000000)
max_ratio: 2.0
# Optional: minimum score for the object to initiate tracking (default: shown below)
min_score: 0.5
# Optional: minimum decimal percentage for tracked object's computed score to be considered a true positive (default: shown below)

View File

@ -0,0 +1,15 @@
---
id: user_interface
title: User Interface Configurations
---
### Experimental UI
While developing and testing new components, users may decide to opt-in to test potential new features on the front-end.
```yaml
ui:
use_experimental: true
```
Note that experimental changes may contain bugs or may be removed at any time in future releases of the software. Use of these features are presented as-is and with no functional guarantee.

View File

@ -40,9 +40,7 @@ Fork [blakeblackshear/frigate-hass-integration](https://github.com/blakeblackshe
### Setup
#### 1. Build the docker container locally with the appropriate make command
For x86 machines, use `make amd64_frigate`
#### 1. Build the version information and docker container locally by running `make`
#### 2. Create a local config file for testing
@ -90,6 +88,20 @@ VSCode will start the docker compose file for you and open a terminal window con
After closing VSCode, you may still have containers running. To close everything down, just run `docker-compose down -v` to cleanup all containers.
### Testing
#### FFMPEG Hardware Acceleration
The following commands are used inside the container to ensure hardware acceleration is working properly.
**Raspberry Pi (64bit)**
This should show <50% CPU in top, and ~80% CPU without `-c:v h264_v4l2m2m`.
```shell
ffmpeg -c:v h264_v4l2m2m -re -stream_loop -1 -i https://streams.videolan.org/ffmpeg/incoming/720p60.mp4 -f rawvideo -pix_fmt yuv420p pipe: > /dev/null
```
## Web Interface
### Prerequisites
@ -117,20 +129,16 @@ cd web && npm install
#### 3. Run the development server
```console
cd web && npm run start
cd web && npm run dev
```
#### 3a. Run the development server against a non-local instance
To run the development server against a non-local instance, you will need to provide an environment variable, `SNOWPACK_PUBLIC_API_HOST` that tells the web application how to connect to the Frigate API:
```console
cd web && SNOWPACK_PUBLIC_API_HOST=http://<ip-address-to-your-frigate-instance>:5000 npm run start
```
To run the development server against a non-local instance, you will need to modify the API_HOST default return in `web/src/env.js`.
#### 4. Making changes
The Web UI is built using [Snowpack](https://www.snowpack.dev/), [Preact](https://preactjs.com), and [Tailwind CSS](https://tailwindcss.com).
The Web UI is built using [Vite](https://vitejs.dev/), [Preact](https://preactjs.com), and [Tailwind CSS](https://tailwindcss.com).
Light guidelines and advice:

View File

@ -3,7 +3,11 @@ id: false_positives
title: Reducing false positives
---
Tune your object filters to adjust false positives: `min_area`, `max_area`, `min_score`, `threshold`.
Tune your object filters to adjust false positives: `min_area`, `max_area`, `min_ratio`, `max_ratio`, `min_score`, `threshold`.
The `min_area` and `max_area` values are compared against the area (number of pixels) from a given detected object. If the area is outside this range, the object will be ignored as a false positive. This allows objects that must be too small or too large to be ignored.
Similarly, the `min_ratio` and `max_ratio` values are compared against a given detected object's width/height ratio (in pixels). If the ratio is outside this range, the object will be ignored as a false positive. This allows objects that are proportionally too short-and-wide (higher ratio) or too tall-and-narrow (smaller ratio) to be ignored.
For object filters in your configuration, any single detection below `min_score` will be ignored as a false positive. `threshold` is based on the median of the history of scores (padded to 3 values) for a tracked object. Consider the following frames when `min_score` is set to 0.6 and threshold is set to 0.85:

View File

@ -24,16 +24,6 @@ Accepts the following query string parameters:
You can access a higher resolution mjpeg stream by appending `h=height-in-pixels` to the endpoint. For example `http://localhost:5000/api/back?h=1080`. You can also increase the FPS by appending `fps=frame-rate` to the URL such as `http://localhost:5000/api/back?fps=10` or both with `?fps=10&h=1000`.
### `GET /api/<camera_name>/<object_name>/best.jpg[?h=300&crop=1&quality=70]`
The best snapshot for any object type. It is a full resolution image by default.
Example parameters:
- `h=300`: resizes the image to 300 pixes tall
- `crop=1`: crops the image to the region of the detection rather than returning the entire image
- `quality=70`: sets the jpeg encoding quality (0-100)
### `GET /api/<camera_name>/latest.jpg[?h=300]`
The most recent frame that frigate has finished processing. It is a full resolution image by default.
@ -120,7 +110,8 @@ Sample response:
"service": {
/* Uptime in seconds */
"uptime": 10,
"version": "0.8.0-8883709",
"version": "0.10.1-8883709",
"latest_version": "0.10.1",
/* Storage data in MB for important locations */
"storage": {
"/media/frigate/clips": {
@ -188,10 +179,33 @@ Returns data for a single event.
Permanently deletes the event along with any clips/snapshots.
### `POST /api/events/<id>/retain`
Sets retain to true for the event id.
### `DELETE /api/events/<id>/retain`
Sets retain to false for the event id (event may be deleted quickly after removing).
### `POST /api/events/<id>/sub_label`
Set a sub label for an event. For example to update `person` -> `person's name` if they were recognized with facial recognition.
Sub labels must be 20 characters or shorter.
```json
{
"subLabel": "some_string"
}
```
### `GET /api/events/<id>/thumbnail.jpg`
Returns a thumbnail for the event id optimized for notifications. Works while the event is in progress and after completion. Passing `?format=android` will convert the thumbnail to 2:1 aspect ratio.
### `GET /api/<camera_name>/<label>/thumbnail.jpg`
Returns the thumbnail from the latest event for the given camera and label combo. Using `any` as the label will return the latest thumbnail regardless of type.
### `GET /api/events/<id>/clip.mp4`
Returns the clip for the event id. Works after the event has ended.
@ -210,6 +224,10 @@ Accepts the following query string parameters, but they are only applied when an
| `crop` | int | Crop the snapshot to the (0 or 1) |
| `quality` | int | Jpeg encoding quality (0-100). Defaults to 70. |
### `GET /api/<camera_name>/<label>/snapshot.jpg`
Returns the snapshot image from the latest event for the given camera and label combo. Using `any` as the label will return the latest thumbnail regardless of type.
### `GET /clips/<camera>-<id>.jpg`
JPG snapshot for the given camera and event id.

View File

@ -18,10 +18,12 @@ Causes frigate to exit. Docker should be configured to automatically restart the
### `frigate/<camera_name>/<object_name>`
Publishes the count of objects for the camera for use as a sensor in Home Assistant.
`all` can be used as the object_name for the count of all objects for the camera.
### `frigate/<zone_name>/<object_name>`
Publishes the count of objects for the zone for use as a sensor in Home Assistant.
`all` can be used as the object_name for the count of all objects for the zone.
### `frigate/<camera_name>/<object_name>/snapshot`
@ -50,6 +52,7 @@ Message published for each changed event. The first message is published when th
"score": 0.7890625,
"box": [424, 500, 536, 712],
"area": 23744,
"ratio": 2.113207,
"region": [264, 450, 667, 853],
"current_zones": ["driveway"],
"entered_zones": ["yard", "driveway"],
@ -73,6 +76,7 @@ Message published for each changed event. The first message is published when th
"score": 0.87890625,
"box": [432, 496, 544, 854],
"area": 40096,
"ratio": 1.251397,
"region": [218, 440, 693, 915],
"current_zones": ["yard", "driveway"],
"entered_zones": ["yard", "driveway"],

View File

@ -22,7 +22,7 @@ module.exports = {
'configuration/objects',
'configuration/rtmp',
'configuration/zones',
'configuration/stationary_objects',
'configuration/birdseye',
'configuration/advanced',
'configuration/hardware_acceleration',
'configuration/nvdec',

View File

@ -16,7 +16,7 @@ from playhouse.sqliteq import SqliteQueueDatabase
from pydantic import ValidationError
from frigate.config import DetectorTypeEnum, FrigateConfig
from frigate.const import CACHE_DIR, CLIPS_DIR, RECORD_DIR
from frigate.const import CACHE_DIR, CLIPS_DIR, RECORD_DIR, PLUS_ENV_VAR, PLUS_API_HOST
from frigate.edgetpu import EdgeTPUProcess
from frigate.events import EventCleanup, EventProcessor
from frigate.http import create_app
@ -25,6 +25,7 @@ from frigate.models import Event, Recordings
from frigate.mqtt import MqttSocketRelay, create_mqtt_client
from frigate.object_processing import TrackedObjectProcessor
from frigate.output import output_frames
from frigate.plus import PlusApi
from frigate.record import RecordingCleanup, RecordingMaintainer
from frigate.stats import StatsEmitter, stats_init
from frigate.version import VERSION
@ -44,6 +45,11 @@ class FrigateApp:
self.detection_out_events: Dict[str, mp.Event] = {}
self.detection_shms: List[mp.shared_memory.SharedMemory] = []
self.log_queue = mp.Queue()
self.plus_api = (
PlusApi(PLUS_API_HOST, os.environ.get(PLUS_ENV_VAR))
if PLUS_ENV_VAR in os.environ
else None
)
self.camera_metrics = {}
def set_environment_vars(self):
@ -146,6 +152,7 @@ class FrigateApp:
self.db,
self.stats_tracking,
self.detected_frames_processor,
self.plus_api,
)
def init_mqtt(self):

View File

@ -43,6 +43,8 @@ class DetectorConfig(FrigateBaseModel):
device: str = Field(default="usb", title="Device Type")
num_threads: int = Field(default=3, title="Number of detection threads")
class UIConfig(FrigateBaseModel):
use_experimental: bool = Field(default=False, title="Experimental UI")
class MqttConfig(FrigateBaseModel):
host: str = Field(title="MQTT Host")
@ -209,6 +211,14 @@ class FilterConfig(FrigateBaseModel):
max_area: int = Field(
default=24000000, title="Maximum area of bounding box for object to be counted."
)
min_ratio: float = Field(
default=0,
title="Minimum ratio of bounding box's width/height for object to be counted.",
)
max_ratio: float = Field(
default=24000000,
title="Maximum ratio of bounding box's width/height for object to be counted.",
)
threshold: float = Field(
default=0.7,
title="Average detection confidence threshold for object to be counted.",
@ -712,6 +722,7 @@ class FrigateConfig(FrigateBaseModel):
environment_vars: Dict[str, str] = Field(
default_factory=dict, title="Frigate environment variables."
)
ui: UIConfig = Field(default_factory=UIConfig, title="UI configuration.")
model: ModelConfig = Field(
default_factory=ModelConfig, title="Detection model configuration."
)

View File

@ -3,3 +3,5 @@ CLIPS_DIR = f"{BASE_DIR}/clips"
RECORD_DIR = f"{BASE_DIR}/recordings"
CACHE_DIR = "/tmp/cache"
YAML_EXT = (".yaml", ".yml")
PLUS_ENV_VAR = "PLUS_API_KEY"
PLUS_API_HOST = "https://api.frigate.video"

View File

@ -14,14 +14,22 @@ from frigate.models import Event
logger = logging.getLogger(__name__)
def should_insert_db(prev_event, current_event):
"""If current event has new clip or snapshot."""
return (
(not prev_event["has_clip"] and not prev_event["has_snapshot"])
and (current_event["has_clip"] or current_event["has_snapshot"])
)
def should_update_db(prev_event, current_event):
"""If current_event has updated fields and (clip or snapshot)."""
return (
prev_event["top_score"] != current_event["top_score"]
or prev_event["entered_zones"] != current_event["entered_zones"]
or prev_event["thumbnail"] != current_event["thumbnail"]
or prev_event["has_clip"] != current_event["has_clip"]
or prev_event["has_snapshot"] != current_event["has_snapshot"]
(current_event["has_clip"] or current_event["has_snapshot"])
and (prev_event["top_score"] != current_event["top_score"]
or prev_event["entered_zones"] != current_event["entered_zones"]
or prev_event["thumbnail"] != current_event["thumbnail"]
or prev_event["has_clip"] != current_event["has_clip"]
or prev_event["has_snapshot"] != current_event["has_snapshot"])
)
@ -58,33 +66,54 @@ class EventProcessor(threading.Thread):
if event_type == "start":
self.events_in_process[event_data["id"]] = event_data
elif event_type == "update" and should_insert_db(
self.events_in_process[event_data["id"]], event_data
):
self.events_in_process[event_data["id"]] = event_data
# TODO: this will generate a lot of db activity possibly
Event.insert(
id=event_data["id"],
label=event_data["label"],
camera=camera,
start_time=event_data["start_time"] - event_config.pre_capture,
end_time=None,
top_score=event_data["top_score"],
false_positive=event_data["false_positive"],
zones=list(event_data["entered_zones"]),
thumbnail=event_data["thumbnail"],
region=event_data["region"],
box=event_data["box"],
area=event_data["area"],
has_clip=event_data["has_clip"],
has_snapshot=event_data["has_snapshot"],
).execute()
elif event_type == "update" and should_update_db(
self.events_in_process[event_data["id"]], event_data
):
self.events_in_process[event_data["id"]] = event_data
# TODO: this will generate a lot of db activity possibly
if event_data["has_clip"] or event_data["has_snapshot"]:
Event.replace(
id=event_data["id"],
label=event_data["label"],
camera=camera,
start_time=event_data["start_time"] - event_config.pre_capture,
end_time=None,
top_score=event_data["top_score"],
false_positive=event_data["false_positive"],
zones=list(event_data["entered_zones"]),
thumbnail=event_data["thumbnail"],
region=event_data["region"],
box=event_data["box"],
area=event_data["area"],
has_clip=event_data["has_clip"],
has_snapshot=event_data["has_snapshot"],
).execute()
Event.update(
label=event_data["label"],
camera=camera,
start_time=event_data["start_time"] - event_config.pre_capture,
end_time=None,
top_score=event_data["top_score"],
false_positive=event_data["false_positive"],
zones=list(event_data["entered_zones"]),
thumbnail=event_data["thumbnail"],
region=event_data["region"],
box=event_data["box"],
area=event_data["area"],
ratio=event_data["ratio"],
has_clip=event_data["has_clip"],
has_snapshot=event_data["has_snapshot"],
).where(Event.id == event_data["id"]).execute()
elif event_type == "end":
if event_data["has_clip"] or event_data["has_snapshot"]:
Event.replace(
id=event_data["id"],
# Full update for valid end of event
Event.update(
label=event_data["label"],
camera=camera,
start_time=event_data["start_time"] - event_config.pre_capture,
@ -96,9 +125,16 @@ class EventProcessor(threading.Thread):
region=event_data["region"],
box=event_data["box"],
area=event_data["area"],
ratio=event_data["ratio"],
has_clip=event_data["has_clip"],
has_snapshot=event_data["has_snapshot"],
).execute()
).where(Event.id == event_data["id"]).execute()
else:
# Event ended after clip & snapshot disabled,
# only end time should be updated.
Event.update(
end_time=event_data["end_time"] + event_config.post_capture
).where(Event.id == event_data["id"]).execute()
del self.events_in_process[event_data["id"]]
self.event_processed_queue.put((event_data["id"], camera))
@ -147,6 +183,7 @@ class EventCleanup(threading.Thread):
Event.camera.not_in(self.camera_keys),
Event.start_time < expire_after,
Event.label == l.label,
Event.retain_indefinitely == False,
)
# delete the media from disk
for event in expired_events:
@ -166,6 +203,7 @@ class EventCleanup(threading.Thread):
Event.camera.not_in(self.camera_keys),
Event.start_time < expire_after,
Event.label == l.label,
Event.retain_indefinitely == False,
)
update_query.execute()
@ -192,6 +230,7 @@ class EventCleanup(threading.Thread):
Event.camera == name,
Event.start_time < expire_after,
Event.label == l.label,
Event.retain_indefinitely == False,
)
# delete the grabbed clips from disk
for event in expired_events:
@ -210,6 +249,7 @@ class EventCleanup(threading.Thread):
Event.camera == name,
Event.start_time < expire_after,
Event.label == l.label,
Event.retain_indefinitely == False,
)
update_query.execute()

View File

@ -29,7 +29,7 @@ from flask import (
from peewee import SqliteDatabase, operator, fn, DoesNotExist, Value
from playhouse.shortcuts import model_to_dict
from frigate.const import CLIPS_DIR, RECORD_DIR
from frigate.const import CLIPS_DIR, PLUS_ENV_VAR
from frigate.models import Event, Recordings
from frigate.stats import stats_snapshot
from frigate.util import calculate_region
@ -45,6 +45,7 @@ def create_app(
database: SqliteDatabase,
stats_tracking,
detected_frames_processor,
plus_api,
):
app = Flask(__name__)
@ -61,6 +62,7 @@ def create_app(
app.frigate_config = frigate_config
app.stats_tracking = stats_tracking
app.detected_frames_processor = detected_frames_processor
app.plus_api = plus_api
app.register_blueprint(bp)
@ -120,13 +122,138 @@ def event(id):
return "Event not found", 404
@bp.route("/events/<id>/retain", methods=("POST",))
def set_retain(id):
try:
event = Event.get(Event.id == id)
except DoesNotExist:
return make_response(
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
)
event.retain_indefinitely = True
event.save()
return make_response(
jsonify({"success": True, "message": "Event " + id + " retained"}), 200
)
@bp.route("/events/<id>/plus", methods=("POST",))
def send_to_plus(id):
if current_app.plus_api is None:
return make_response(
jsonify(
{
"success": False,
"message": "PLUS_API_KEY environment variable is not set",
}
),
400,
)
try:
event = Event.get(Event.id == id)
except DoesNotExist:
return make_response(
jsonify({"success": False, "message": "Event" + id + " not found"}), 404
)
if event.plus_id:
return make_response(
jsonify({"success": False, "message": "Already submitted to plus"}), 400
)
# load clean.png
try:
filename = f"{event.camera}-{event.id}-clean.png"
image = cv2.imread(os.path.join(CLIPS_DIR, filename))
except Exception:
return make_response(
jsonify(
{"success": False, "message": "Unable to load clean png for event"}
),
400,
)
try:
plus_id = current_app.plus_api.upload_image(image, event.camera)
except Exception as ex:
return make_response(
jsonify({"success": False, "message": str(ex)}),
400,
)
# store image id in the database
event.plus_id = plus_id
event.save()
return make_response(jsonify({"success": True, "plus_id": plus_id}), 200)
@bp.route("/events/<id>/retain", methods=("DELETE",))
def delete_retain(id):
try:
event = Event.get(Event.id == id)
except DoesNotExist:
return make_response(
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
)
event.retain_indefinitely = False
event.save()
return make_response(
jsonify({"success": True, "message": "Event " + id + " un-retained"}), 200
)
@bp.route("/events/<id>/sub_label", methods=("POST",))
def set_sub_label(id):
try:
event = Event.get(Event.id == id)
except DoesNotExist:
return make_response(
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
)
if request.json:
new_sub_label = request.json.get("subLabel")
else:
new_sub_label = None
if new_sub_label and len(new_sub_label) > 20:
return make_response(
jsonify(
{
"success": False,
"message": new_sub_label
+ " exceeds the 20 character limit for sub_label",
}
),
400,
)
event.sub_label = new_sub_label
event.save()
return make_response(
jsonify(
{
"success": True,
"message": "Event " + id + " sub label set to " + new_sub_label,
}
),
200,
)
@bp.route("/events/<id>", methods=("DELETE",))
def delete_event(id):
try:
event = Event.get(Event.id == id)
except DoesNotExist:
return make_response(
jsonify({"success": False, "message": "Event" + id + " not found"}), 404
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
)
media_name = f"{event.camera}-{event.id}"
@ -141,7 +268,7 @@ def delete_event(id):
event.delete_instance()
return make_response(
jsonify({"success": True, "message": "Event" + id + " deleted"}), 200
jsonify({"success": True, "message": "Event " + id + " deleted"}), 200
)
@ -149,8 +276,11 @@ def delete_event(id):
def event_thumbnail(id):
format = request.args.get("format", "ios")
thumbnail_bytes = None
event_complete = False
try:
event = Event.get(Event.id == id)
if not event.end_time is None:
event_complete = True
thumbnail_bytes = base64.b64decode(event.thumbnail)
except DoesNotExist:
# see if the object is currently being tracked
@ -185,9 +315,43 @@ def event_thumbnail(id):
response = make_response(thumbnail_bytes)
response.headers["Content-Type"] = "image/jpeg"
if event_complete:
response.headers["Cache-Control"] = "private, max-age=31536000"
return response
@bp.route("/<camera_name>/<label>/best.jpg")
@bp.route("/<camera_name>/<label>/thumbnail.jpg")
def label_thumbnail(camera_name, label):
if label == "any":
event_query = (
Event.select()
.where(Event.camera == camera_name)
.where(Event.has_snapshot == True)
.order_by(Event.start_time.desc())
)
else:
event_query = (
Event.select()
.where(Event.camera == camera_name)
.where(Event.label == label)
.where(Event.has_snapshot == True)
.order_by(Event.start_time.desc())
)
try:
event = event_query.get()
return event_thumbnail(event.id)
except DoesNotExist:
frame = np.zeros((175, 175, 3), np.uint8)
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
response = make_response(jpg.tobytes())
response.headers["Content-Type"] = "image/jpeg"
return response
@bp.route("/events/<id>/snapshot.jpg")
def event_snapshot(id):
download = request.args.get("download", type=bool)
@ -233,6 +397,36 @@ def event_snapshot(id):
return response
@bp.route("/<camera_name>/<label>/snapshot.jpg")
def label_snapshot(camera_name, label):
if label == "any":
event_query = (
Event.select()
.where(Event.camera == camera_name)
.where(Event.has_snapshot == True)
.order_by(Event.start_time.desc())
)
else:
event_query = (
Event.select()
.where(Event.camera == camera_name)
.where(Event.label == label)
.where(Event.has_snapshot == True)
.order_by(Event.start_time.desc())
)
try:
event = event_query.get()
return event_snapshot(event.id)
except DoesNotExist:
frame = np.zeros((720, 1280, 3), np.uint8)
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 70])
response = make_response(jpg.tobytes())
response.headers["Content-Type"] = "image/jpeg"
return response
@bp.route("/events/<id>/clip.mp4")
def event_clip(id):
download = request.args.get("download", type=bool)
@ -271,9 +465,9 @@ def event_clip(id):
@bp.route("/events")
def events():
limit = request.args.get("limit", 100)
camera = request.args.get("camera")
label = request.args.get("label")
zone = request.args.get("zone")
camera = request.args.get("camera", "all")
label = request.args.get("label", "all")
zone = request.args.get("zone", "all")
after = request.args.get("after", type=float)
before = request.args.get("before", type=float)
has_clip = request.args.get("has_clip", type=int)
@ -283,20 +477,20 @@ def events():
clauses = []
excluded_fields = []
if camera:
if camera != "all":
clauses.append((Event.camera == camera))
if label:
if label != "all":
clauses.append((Event.label == label))
if zone:
if zone != "all":
clauses.append((Event.zones.cast("text") % f'*"{zone}"*'))
if after:
clauses.append((Event.start_time >= after))
clauses.append((Event.start_time > after))
if before:
clauses.append((Event.start_time <= before))
clauses.append((Event.start_time < before))
if not has_clip is None:
clauses.append((Event.has_clip == has_clip))
@ -331,6 +525,8 @@ def config():
for cmd in camera_dict["ffmpeg_cmds"]:
cmd["cmd"] = " ".join(cmd["cmd"])
config["plus"] = {"enabled": PLUS_ENV_VAR in os.environ}
return jsonify(config)
@ -352,48 +548,6 @@ def stats():
return jsonify(stats)
@bp.route("/<camera_name>/<label>/best.jpg")
def best(camera_name, label):
if camera_name in current_app.frigate_config.cameras:
best_object = current_app.detected_frames_processor.get_best(camera_name, label)
best_frame = best_object.get("frame")
if best_frame is None:
best_frame = np.zeros((720, 1280, 3), np.uint8)
else:
best_frame = cv2.cvtColor(best_frame, cv2.COLOR_YUV2BGR_I420)
crop = bool(request.args.get("crop", 0, type=int))
if crop:
box_size = 300
box = best_object.get("box", (0, 0, box_size, box_size))
region = calculate_region(
best_frame.shape,
box[0],
box[1],
box[2],
box[3],
box_size,
multiplier=1.1,
)
best_frame = best_frame[region[1] : region[3], region[0] : region[2]]
height = int(request.args.get("h", str(best_frame.shape[0])))
width = int(height * best_frame.shape[1] / best_frame.shape[0])
resize_quality = request.args.get("quality", default=70, type=int)
best_frame = cv2.resize(
best_frame, dsize=(width, height), interpolation=cv2.INTER_AREA
)
ret, jpg = cv2.imencode(
".jpg", best_frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality]
)
response = make_response(jpg.tobytes())
response.headers["Content-Type"] = "image/jpeg"
return response
else:
return "Camera named {} not found".format(camera_name), 404
@bp.route("/<camera_name>")
def mjpeg_feed(camera_name):
fps = int(request.args.get("fps", "3"))
@ -614,7 +768,7 @@ def recording_clip(camera, start_ts, end_ts):
"-safe",
"0",
"-i",
"-",
"/dev/stdin",
"-c",
"copy",
"-movflags",

View File

@ -6,6 +6,7 @@ from playhouse.sqlite_ext import *
class Event(Model):
id = CharField(null=False, primary_key=True, max_length=30)
label = CharField(index=True, max_length=20)
sub_label = CharField(max_length=20, null=True)
camera = CharField(index=True, max_length=20)
start_time = DateTimeField()
end_time = DateTimeField()
@ -18,6 +19,9 @@ class Event(Model):
region = JSONField()
box = JSONField()
area = IntegerField()
retain_indefinitely = BooleanField(default=False)
ratio = FloatField(default=1.0)
plus_id = CharField(max_length=30)
class Recordings(Model):

View File

@ -192,6 +192,7 @@ class TrackedObject:
"score": self.obj_data["score"],
"box": self.obj_data["box"],
"area": self.obj_data["area"],
"ratio": self.obj_data["ratio"],
"region": self.obj_data["region"],
"stationary": self.obj_data["motionless_count"]
> self.camera_config.detect.stationary.threshold,
@ -341,6 +342,14 @@ def zone_filtered(obj: TrackedObject, object_config):
if obj_settings.threshold > obj.computed_score:
return True
# if the object is not proportionally wide enough
if obj_settings.min_ratio > obj.obj_data["ratio"]:
return True
# if the object is proportionally too wide
if obj_settings.max_ratio < obj.obj_data["ratio"]:
return True
return False
@ -554,13 +563,24 @@ class CameraState:
if not obj.false_positive
)
# keep track of all labels detected for this camera
total_label_count = 0
# report on detected objects
for obj_name, count in obj_counter.items():
total_label_count += count
if count != self.object_counts[obj_name]:
self.object_counts[obj_name] = count
for c in self.callbacks["object_status"]:
c(self.name, obj_name, count)
# publish for all labels detected for this camera
if total_label_count != self.object_counts.get("all"):
self.object_counts["all"] = total_label_count
for c in self.callbacks["object_status"]:
c(self.name, "all", total_label_count)
# expire any objects that are >0 and no longer detected
expired_objects = [
obj_name
@ -568,6 +588,10 @@ class CameraState:
if count > 0 and obj_name not in obj_counter
]
for obj_name in expired_objects:
# Ignore the artificial all label
if obj_name == "all":
continue
self.object_counts[obj_name] = 0
for c in self.callbacks["object_status"]:
c(self.name, obj_name, 0)
@ -889,9 +913,14 @@ class TrackedObjectProcessor(threading.Thread):
for obj in camera_state.tracked_objects.values()
if zone in obj.current_zones and not obj.false_positive
)
total_label_count = 0
# update counts and publish status
for label in set(self.zone_data[zone].keys()) | set(obj_counter.keys()):
# Ignore the artificial all label
if label == "all":
continue
# if we have previously published a count for this zone/label
zone_label = self.zone_data[zone][label]
if camera in zone_label:
@ -906,6 +935,10 @@ class TrackedObjectProcessor(threading.Thread):
new_count,
retain=False,
)
# Set the count for the /zone/all topic.
total_label_count += new_count
# if this is a new zone/label combo for this camera
else:
if label in obj_counter:
@ -916,6 +949,31 @@ class TrackedObjectProcessor(threading.Thread):
retain=False,
)
# Set the count for the /zone/all topic.
total_label_count += obj_counter[label]
# if we have previously published a count for this zone all labels
zone_label = self.zone_data[zone]["all"]
if camera in zone_label:
current_count = sum(zone_label.values())
zone_label[camera] = total_label_count
new_count = sum(zone_label.values())
if new_count != current_count:
self.client.publish(
f"{self.topic_prefix}/{zone}/all",
new_count,
retain=False,
)
# if this is a new zone all label for this camera
else:
zone_label[camera] = total_label_count
self.client.publish(
f"{self.topic_prefix}/{zone}/all",
total_label_count,
retain=False,
)
# cleanup event finished queue
while not self.event_processed_queue.empty():
event_id, camera = self.event_processed_queue.get()

View File

@ -149,7 +149,8 @@ class ObjectTracker:
"score": obj[1],
"box": obj[2],
"area": obj[3],
"region": obj[4],
"ratio": obj[4],
"region": obj[5],
"frame_time": frame_time,
}
)

View File

@ -22,6 +22,7 @@ from ws4py.server.wsgiutils import WebSocketWSGIApplication
from ws4py.websocket import WebSocket
from frigate.config import BirdseyeModeEnum, FrigateConfig
from frigate.const import BASE_DIR
from frigate.util import SharedMemoryFrameManager, copy_yuv_to_position, get_yuv_crop
logger = logging.getLogger(__name__)
@ -104,12 +105,21 @@ class BirdsEyeFrameManager:
self.blank_frame[0 : self.frame_shape[0], 0 : self.frame_shape[1]] = 16
# find and copy the logo on the blank frame
logo_files = glob.glob("/opt/frigate/frigate/birdseye.png")
frigate_logo = None
if len(logo_files) > 0:
frigate_logo = cv2.imread(logo_files[0], cv2.IMREAD_UNCHANGED)
if not frigate_logo is None:
transparent_layer = frigate_logo[:, :, 3]
birdseye_logo = None
custom_logo_files = glob.glob(f"{BASE_DIR}/custom.png")
if len(custom_logo_files) > 0:
birdseye_logo = cv2.imread(custom_logo_files[0], cv2.IMREAD_UNCHANGED)
if birdseye_logo is None:
logo_files = glob.glob("/opt/frigate/frigate/birdseye.png")
if len(logo_files) > 0:
birdseye_logo = cv2.imread(logo_files[0], cv2.IMREAD_UNCHANGED)
if not birdseye_logo is None:
transparent_layer = birdseye_logo[:, :, 3]
y_offset = height // 2 - transparent_layer.shape[0] // 2
x_offset = width // 2 - transparent_layer.shape[1] // 2
self.blank_frame[

96
frigate/plus.py Normal file
View File

@ -0,0 +1,96 @@
import datetime
import logging
import requests
import cv2
logger = logging.getLogger(__name__)
def get_jpg_bytes(image, max_dim, quality):
if image.shape[1] >= image.shape[0]:
width = min(max_dim, image.shape[1])
height = int(width * image.shape[0] / image.shape[1])
else:
height = min(max_dim, image.shape[0])
width = int(height * image.shape[1] / image.shape[0])
original = cv2.resize(image, dsize=(width, height), interpolation=cv2.INTER_AREA)
ret, jpg = cv2.imencode(".jpg", original, [int(cv2.IMWRITE_JPEG_QUALITY), quality])
return jpg.tobytes()
class PlusApi:
def __init__(self, host, key: str):
self.host = host
self.key = key
self._token_data = None
def _refresh_token_if_needed(self):
if (
self._token_data is None
or self._token_data["expires"] - datetime.datetime.now().timestamp() < 60
):
parts = self.key.split(":")
r = requests.get(f"{self.host}/v1/auth/token", auth=(parts[0], parts[1]))
self._token_data = r.json()
def _get_authorization_header(self):
self._refresh_token_if_needed()
return {"authorization": f"Bearer {self._token_data['accessToken']}"}
def _get(self, path):
return requests.get(
f"{self.host}/v1/{path}", headers=self._get_authorization_header()
)
def _post(self, path, data):
return requests.post(
f"{self.host}/v1/{path}",
headers=self._get_authorization_header(),
json=data,
)
def upload_image(self, image, camera: str):
r = self._get("image/signed_urls")
presigned_urls = r.json()
if not r.ok:
logger.exception(ex)
raise Exception("Unable to get signed urls")
# resize and submit original
files = {"file": get_jpg_bytes(image, 1920, 85)}
data = presigned_urls["original"]["fields"]
data["content-type"] = "image/jpeg"
r = requests.post(presigned_urls["original"]["url"], files=files, data=data)
if not r.ok:
logger.error(f"Failed to upload original: {r.status_code} {r.text}")
raise Exception(r.text)
# resize and submit annotate
files = {"file": get_jpg_bytes(image, 640, 70)}
data = presigned_urls["annotate"]["fields"]
data["content-type"] = "image/jpeg"
r = requests.post(presigned_urls["annotate"]["url"], files=files, data=data)
if not r.ok:
logger.error(f"Failed to upload annotate: {r.status_code} {r.text}")
raise Exception(r.text)
# resize and submit thumbnail
files = {"file": get_jpg_bytes(image, 200, 70)}
data = presigned_urls["thumbnail"]["fields"]
data["content-type"] = "image/jpeg"
r = requests.post(presigned_urls["thumbnail"]["url"], files=files, data=data)
if not r.ok:
logger.error(f"Failed to upload thumbnail: {r.status_code} {r.text}")
raise Exception(r.text)
# create image
r = self._post(
"image/create", {"id": presigned_urls["imageId"], "camera": camera}
)
if not r.ok:
raise Exception(r.text)
# return image id
return presigned_urls["imageId"]

View File

@ -5,6 +5,7 @@ import time
import psutil
import shutil
import os
import requests
from frigate.config import FrigateConfig
from frigate.const import RECORD_DIR, CLIPS_DIR, CACHE_DIR
@ -13,11 +14,22 @@ from frigate.version import VERSION
logger = logging.getLogger(__name__)
def get_latest_version() -> str:
request = requests.get('https://api.github.com/repos/blakeblackshear/frigate/releases/latest')
response = request.json()
if request.ok and response:
return response.get("tag_name", "unknown").replace("v", "")
else:
return "unknown"
def stats_init(camera_metrics, detectors):
stats_tracking = {
"camera_metrics": camera_metrics,
"detectors": detectors,
"started": int(time.time()),
"latest_frigate_version": get_latest_version(),
}
return stats_tracking
@ -83,6 +95,7 @@ def stats_snapshot(stats_tracking):
stats["service"] = {
"uptime": (int(time.time()) - stats_tracking["started"]),
"version": VERSION,
"latest_version": stats_tracking["latest_frigate_version"],
"storage": {},
"temperatures": get_temperatures(),
}

View File

@ -1268,6 +1268,36 @@ class TestConfig(unittest.TestCase):
ValidationError, lambda: frigate_config.runtime_config.cameras
)
def test_object_filter_ratios_work(self):
config = {
"mqtt": {"host": "mqtt"},
"objects": {
"track": ["person", "dog"],
"filters": {"dog": {"min_ratio": 0.2, "max_ratio": 10.1}},
},
"cameras": {
"back": {
"ffmpeg": {
"inputs": [
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
frigate_config = FrigateConfig(**config)
assert config == frigate_config.dict(exclude_unset=True)
runtime_config = frigate_config.runtime_config
assert "dog" in runtime_config.cameras["back"].objects.filters
assert runtime_config.cameras["back"].objects.filters["dog"].min_ratio == 0.2
assert runtime_config.cameras["back"].objects.filters["dog"].max_ratio == 10.1
if __name__ == "__main__":
unittest.main(verbosity=2)

View File

@ -522,7 +522,7 @@ def clipped(obj, frame_shape):
# if the object is within 5 pixels of the region border, and the region is not on the edge
# consider the object to be clipped
box = obj[2]
region = obj[4]
region = obj[5]
if (
(region[0] > 5 and box[0] - region[0] <= 5)
or (region[1] > 5 and box[1] - region[1] <= 5)

View File

@ -38,6 +38,10 @@ logger = logging.getLogger(__name__)
def filtered(obj, objects_to_track, object_filters):
object_name = obj[0]
object_score = obj[1]
object_box = obj[2]
object_area = obj[3]
object_ratio = obj[4]
if not object_name in objects_to_track:
return True
@ -47,24 +51,35 @@ def filtered(obj, objects_to_track, object_filters):
# if the min area is larger than the
# detected object, don't add it to detected objects
if obj_settings.min_area > obj[3]:
if obj_settings.min_area > object_area:
return True
# if the detected object is larger than the
# max area, don't add it to detected objects
if obj_settings.max_area < obj[3]:
if obj_settings.max_area < object_area:
return True
# if the score is lower than the min_score, skip
if obj_settings.min_score > obj[1]:
if obj_settings.min_score > object_score:
return True
# if the object is not proportionally wide enough
if obj_settings.min_ratio > object_ratio:
return True
# if the object is proportionally too wide
if obj_settings.max_ratio < object_ratio:
return True
if not obj_settings.mask is None:
# compute the coordinates of the object and make sure
# the location isnt outside the bounds of the image (can happen from rounding)
y_location = min(int(obj[2][3]), len(obj_settings.mask) - 1)
# the location isn't outside the bounds of the image (can happen from rounding)
object_xmin = object_box[0]
object_xmax = object_box[2]
object_ymax = object_box[3]
y_location = min(int(object_ymax), len(obj_settings.mask) - 1)
x_location = min(
int((obj[2][2] - obj[2][0]) / 2.0) + obj[2][0],
int((object_xmax + object_xmin) / 2.0),
len(obj_settings.mask[0]) - 1,
)
@ -429,11 +444,16 @@ def detect(
y_min = int((box[0] * size) + region[1])
x_max = int((box[3] * size) + region[0])
y_max = int((box[2] * size) + region[1])
width = x_max - x_min
height = y_max - y_min
area = width * height
ratio = width / height
det = (
d[0],
d[1],
(x_min, y_min, x_max, y_max),
(x_max - x_min) * (y_max - y_min),
area,
ratio,
region,
)
# apply object filters
@ -580,6 +600,7 @@ def process_frames(
obj["score"],
obj["box"],
obj["area"],
obj["ratio"],
obj["region"],
)
for obj in object_tracker.tracked_objects.values()
@ -615,15 +636,22 @@ def process_frames(
for group in detected_object_groups.values():
# apply non-maxima suppression to suppress weak, overlapping bounding boxes
# o[2] is the box of the object: xmin, ymin, xmax, ymax
boxes = [
(o[2][0], o[2][1], o[2][2] - o[2][0], o[2][3] - o[2][1])
(
o[2][0],
o[2][1],
o[2][2] - o[2][0],
o[2][3] - o[2][1],
)
for o in group
]
confidences = [o[1] for o in group]
idxs = cv2.dnn.NMSBoxes(boxes, confidences, 0.5, 0.4)
for index in idxs:
obj = group[index[0]]
index = index if isinstance(index, np.int32) else index[0]
obj = group[index]
if clipped(obj, frame_shape):
box = obj[2]
# calculate a new region that will hopefully get the entire object

View File

@ -0,0 +1,46 @@
"""Peewee migrations -- 007_add_retain_indefinitely.py.
Some examples (model - class or model name)::
> Model = migrator.orm['model_name'] # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.python(func, *args, **kwargs) # Run python code
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.drop_index(model, *col_names)
> migrator.add_not_null(model, *field_names)
> migrator.drop_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
"""
import datetime as dt
import peewee as pw
from playhouse.sqlite_ext import *
from decimal import ROUND_HALF_EVEN
from frigate.models import Event
try:
import playhouse.postgres_ext as pw_pext
except ImportError:
pass
SQL = pw.SQL
def migrate(migrator, database, fake=False, **kwargs):
migrator.add_fields(
Event,
retain_indefinitely=pw.BooleanField(default=False),
)
def rollback(migrator, database, fake=False, **kwargs):
migrator.remove_fields(Event, ["retain_indefinitely"])

View File

@ -0,0 +1,46 @@
"""Peewee migrations -- 008_add_sub_label.py.
Some examples (model - class or model name)::
> Model = migrator.orm['model_name'] # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.python(func, *args, **kwargs) # Run python code
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.drop_index(model, *col_names)
> migrator.add_not_null(model, *field_names)
> migrator.drop_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
"""
import datetime as dt
import peewee as pw
from playhouse.sqlite_ext import *
from decimal import ROUND_HALF_EVEN
from frigate.models import Event
try:
import playhouse.postgres_ext as pw_pext
except ImportError:
pass
SQL = pw.SQL
def migrate(migrator, database, fake=False, **kwargs):
migrator.add_fields(
Event,
sub_label=pw.CharField(max_length=20, null=True),
)
def rollback(migrator, database, fake=False, **kwargs):
migrator.remove_fields(Event, ["sub_label"])

View File

@ -0,0 +1,38 @@
"""Peewee migrations -- 009_add_object_filter_ratio.py.
Some examples (model - class or model name)::
> Model = migrator.orm['model_name'] # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.python(func, *args, **kwargs) # Run python code
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.drop_index(model, *col_names)
> migrator.add_not_null(model, *field_names)
> migrator.drop_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
"""
import peewee as pw
from frigate.models import Event
SQL = pw.SQL
def migrate(migrator, database, fake=False, **kwargs):
migrator.add_fields(
Event,
ratio=pw.FloatField(default=1.0), # Assume that existing detections are square
)
def rollback(migrator, database, fake=False, **kwargs):
migrator.remove_fields(Event, ["ratio"])

View File

@ -0,0 +1,46 @@
"""Peewee migrations -- 010_add_plus_image_id.py.
Some examples (model - class or model name)::
> Model = migrator.orm['model_name'] # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.python(func, *args, **kwargs) # Run python code
> migrator.create_model(Model) # Create a model (could be used as decorator)
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.drop_index(model, *col_names)
> migrator.add_not_null(model, *field_names)
> migrator.drop_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
"""
import datetime as dt
import peewee as pw
from playhouse.sqlite_ext import *
from decimal import ROUND_HALF_EVEN
from frigate.models import Event
try:
import playhouse.postgres_ext as pw_pext
except ImportError:
pass
SQL = pw.SQL
def migrate(migrator, database, fake=False, **kwargs):
migrator.add_fields(
Event,
plus_id=pw.CharField(max_length=30, null=True),
)
def rollback(migrator, database, fake=False, **kwargs):
migrator.remove_fields(Event, ["plus_id"])

View File

@ -1 +0,0 @@
node_modules

View File

@ -1,2 +1,2 @@
build/*
dist/*
node_modules/*

29
web/.eslintrc Normal file
View File

@ -0,0 +1,29 @@
{
"env": {
"browser": true,
"es2021": true
},
"extends": ["eslint:recommended", "preact", "prettier"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaFeatures": {
"jsx": true
},
"ecmaVersion": 12,
"sourceType": "module"
},
"rules": {
"indent": ["error", 2, { "SwitchCase": 1 }],
"comma-dangle": ["error", { "objects": "always-multiline", "arrays": "always-multiline" }],
"no-unused-vars": ["error", { "argsIgnorePattern": "^_", "varsIgnorePattern": "^_" }],
"no-console": "error"
},
"overrides": [
{
"files": ["**/*.{ts,tsx}"],
"parser": "@typescript-eslint/parser",
"plugins": ["@typescript-eslint"],
"extends": ["eslint:recommended", "plugin:@typescript-eslint/recommended", "prettier"]
}
]
}

View File

@ -1,140 +0,0 @@
module.exports = {
parser: '@babel/eslint-parser',
parserOptions: {
sourceType: 'module',
ecmaFeatures: {
experimentalObjectRestSpread: true,
jsx: true,
},
},
extends: [
'prettier',
'preact',
'plugin:import/react',
'plugin:testing-library/recommended',
'plugin:jest/recommended',
],
plugins: ['import', 'testing-library', 'jest'],
env: {
es6: true,
node: true,
browser: true,
},
rules: {
'constructor-super': 'error',
'default-case': ['error', { commentPattern: '^no default$' }],
'handle-callback-err': ['error', '^(err|error)$'],
'new-cap': ['error', { newIsCap: true, capIsNew: false }],
'no-alert': 'error',
'no-array-constructor': 'error',
'no-caller': 'error',
'no-case-declarations': 'error',
'no-class-assign': 'error',
'no-cond-assign': 'error',
'no-console': 'error',
'no-const-assign': 'error',
'no-control-regex': 'error',
'no-debugger': 'error',
'no-delete-var': 'error',
'no-dupe-args': 'error',
'no-dupe-class-members': 'error',
'no-dupe-keys': 'error',
'no-duplicate-case': 'error',
'no-duplicate-imports': 'error',
'no-empty-character-class': 'error',
'no-empty-pattern': 'error',
'no-eval': 'error',
'no-ex-assign': 'error',
'no-extend-native': 'error',
'no-extra-bind': 'error',
'no-extra-boolean-cast': 'error',
'no-fallthrough': 'error',
'no-floating-decimal': 'error',
'no-func-assign': 'error',
'no-implied-eval': 'error',
'no-inner-declarations': ['error', 'functions'],
'no-invalid-regexp': 'error',
'no-irregular-whitespace': 'error',
'no-iterator': 'error',
'no-label-var': 'error',
'no-labels': ['error', { allowLoop: false, allowSwitch: false }],
'no-lone-blocks': 'error',
'no-loop-func': 'error',
'no-multi-str': 'error',
'no-native-reassign': 'error',
'no-negated-in-lhs': 'error',
'no-new': 'error',
'no-new-func': 'error',
'no-new-object': 'error',
'no-new-require': 'error',
'no-new-symbol': 'error',
'no-new-wrappers': 'error',
'no-obj-calls': 'error',
'no-octal': 'error',
'no-octal-escape': 'error',
'no-path-concat': 'error',
'no-proto': 'error',
'no-redeclare': 'error',
'no-regex-spaces': 'error',
'no-return-assign': ['error', 'except-parens'],
'no-script-url': 'error',
'no-self-assign': 'error',
'no-self-compare': 'error',
'no-sequences': 'error',
'no-shadow-restricted-names': 'error',
'no-sparse-arrays': 'error',
'no-this-before-super': 'error',
'no-throw-literal': 'error',
'no-trailing-spaces': 'error',
'no-undef': 'error',
'no-undef-init': 'error',
'no-unexpected-multiline': 'error',
'no-unmodified-loop-condition': 'error',
'no-unneeded-ternary': ['error', { defaultAssignment: false }],
'no-unreachable': 'error',
'no-unsafe-finally': 'error',
'no-unused-vars': ['error', { vars: 'all', args: 'none', ignoreRestSiblings: true }],
'no-useless-call': 'error',
'no-useless-computed-key': 'error',
'no-useless-concat': 'error',
'no-useless-constructor': 'error',
'no-useless-escape': 'error',
'no-var': 'error',
'no-with': 'error',
'prefer-const': 'error',
'prefer-rest-params': 'error',
'use-isnan': 'error',
'valid-typeof': 'error',
camelcase: 'off',
eqeqeq: ['error', 'allow-null'],
indent: ['error', 2, { SwitchCase: 1 }],
quotes: ['error', 'single', 'avoid-escape'],
radix: 'error',
yoda: ['error', 'never'],
'import/no-unresolved': 'error',
// 'react-hooks/exhaustive-deps': 'error',
'jest/consistent-test-it': ['error', { fn: 'test' }],
'jest/no-test-prefixes': 'error',
'jest/no-restricted-matchers': [
'error',
{ toMatchSnapshot: 'Use `toMatchInlineSnapshot()` and ensure you only snapshot very small elements' },
],
'jest/valid-describe': 'error',
'jest/valid-expect-in-promise': 'error',
},
settings: {
'import/resolver': {
node: {
extensions: ['.js', '.jsx'],
},
},
},
};

24
web/.gitignore vendored Normal file
View File

@ -0,0 +1,24 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

4
web/.prettierrc Normal file
View File

@ -0,0 +1,4 @@
{
"printWidth": 120,
"singleQuote": true
}

View File

@ -1,3 +0,0 @@
# Frigate Web UI
For installation and contributing instructions, please follow the [Contributing Docs](https://blakeblackshear.github.io/frigate/contributing).

View File

@ -1,4 +1,4 @@
module.exports = {
presets: ['@babel/preset-env'],
presets: ['@babel/preset-env', ['@babel/typescript', { jsxPragma: 'h' }]],
plugins: [['@babel/plugin-transform-react-jsx', { pragma: 'h' }]],
};

73
web/config/handlers.js Normal file
View File

@ -0,0 +1,73 @@
import { rest } from 'msw';
import { API_HOST } from '../src/env';
export const handlers = [
rest.get(`${API_HOST}/api/config`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
mqtt: {
stats_interval: 60,
},
service: {
version: '0.8.3',
},
cameras: {
front: {
name: 'front',
objects: { track: ['taco', 'cat', 'dog'] },
record: { enabled: true },
detect: { width: 1280, height: 720 },
snapshots: {},
live: { height: 720 },
},
side: {
name: 'side',
objects: { track: ['taco', 'cat', 'dog'] },
record: { enabled: false },
detect: { width: 1280, height: 720 },
snapshots: {},
live: { height: 720 },
},
},
})
);
}),
rest.get(`${API_HOST}/api/stats`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
detection_fps: 0.0,
detectors: { coral: { detection_start: 0.0, inference_speed: 8.94, pid: 52 } },
front: { camera_fps: 5.0, capture_pid: 64, detection_fps: 0.0, pid: 54, process_fps: 0.0, skipped_fps: 0.0 },
side: {
camera_fps: 6.9,
capture_pid: 71,
detection_fps: 0.0,
pid: 60,
process_fps: 0.0,
skipped_fps: 0.0,
},
service: { uptime: 34812, version: '0.8.1-d376f6b' },
})
);
}),
rest.get(`${API_HOST}/api/events`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json(
new Array(12).fill(null).map((v, i) => ({
end_time: 1613257337 + i,
has_clip: true,
has_snapshot: true,
id: i,
label: 'person',
start_time: 1613257326 + i,
top_score: Math.random(),
zones: ['front_patio'],
thumbnail: '/9j/4aa...',
}))
)
);
}),
];

6
web/config/server.js Normal file
View File

@ -0,0 +1,6 @@
// src/mocks/server.js
import { setupServer } from 'msw/node';
import { handlers } from './handlers';
// This configures a request mocking server with the given request handlers.
export const server = setupServer(...handlers);

View File

@ -1,5 +1,6 @@
import 'regenerator-runtime/runtime';
import '@testing-library/jest-dom/extend-expect';
import { server } from './server.js';
Object.defineProperty(window, 'matchMedia', {
writable: true,
@ -13,6 +14,16 @@ Object.defineProperty(window, 'matchMedia', {
}),
});
window.fetch = () => Promise.resolve();
jest.mock('../src/env');
// Establish API mocking before all tests.
beforeAll(() => server.listen());
// Reset any request handlers that we may add during the tests,
// so they don't affect other tests.
afterEach(() => {
server.resetHandlers();
});
// Clean up after the tests are finished.
afterAll(() => server.close());

View File

@ -0,0 +1,24 @@
import { h } from 'preact';
import { render } from '@testing-library/preact';
import { ApiProvider } from '../src/api';
const Wrapper = ({ children }) => {
return (
<ApiProvider
options={{
dedupingInterval: 0,
provider: () => new Map(),
}}
>
{children}
</ApiProvider>
);
};
const customRender = (ui, options) => render(ui, { wrapper: Wrapper, ...options });
// re-export everything
export * from '@testing-library/preact';
// override render method
export { customRender as render };

View File

Before

Width:  |  Height:  |  Size: 3.1 KiB

After

Width:  |  Height:  |  Size: 3.1 KiB

View File

Before

Width:  |  Height:  |  Size: 6.9 KiB

After

Width:  |  Height:  |  Size: 6.9 KiB

View File

Before

Width:  |  Height:  |  Size: 3.9 KiB

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

Before

Width:  |  Height:  |  Size: 558 B

After

Width:  |  Height:  |  Size: 558 B

View File

Before

Width:  |  Height:  |  Size: 800 B

After

Width:  |  Height:  |  Size: 800 B

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

Before

Width:  |  Height:  |  Size: 12 KiB

After

Width:  |  Height:  |  Size: 12 KiB

BIN
web/images/marker.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 534 B

View File

Before

Width:  |  Height:  |  Size: 2.6 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

View File

Before

Width:  |  Height:  |  Size: 2.9 KiB

After

Width:  |  Height:  |  Size: 2.9 KiB

View File

@ -1,25 +1,25 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<link rel="icon" href="/favicon.ico" />
<meta charset="UTF-8" />
<link rel="icon" href="/images/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Frigate</title>
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png" />
<link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png" />
<link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png" />
<link rel="apple-touch-icon" sizes="180x180" href="/images/apple-touch-icon.png" />
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png" />
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png" />
<link rel="manifest" href="/site.webmanifest" />
<link rel="mask-icon" href="/safari-pinned-tab.svg" color="#3b82f7" />
<link rel="mask-icon" href="/images/safari-pinned-tab.svg" color="#3b82f7" />
<meta name="msapplication-TileColor" content="#3b82f7" />
<meta name="theme-color" content="#ffffff" media="(prefers-color-scheme: light)" />
<meta name="theme-color" content="#111827" media="(prefers-color-scheme: dark)" />
</head>
<body>
<div id="root" class="z-0"></div>
<div id="app" class="z-0"></div>
<div id="dialogs" class="z-0"></div>
<div id="menus" class="z-0"></div>
<div id="tooltips" class="z-0"></div>
<noscript>You need to enable JavaScript to run this app.</noscript>
<script type="module" src="/dist/index.js"></script>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

View File

@ -1,12 +1,198 @@
/*
* For a detailed explanation regarding each configuration property, visit:
* https://jestjs.io/docs/configuration
*/
module.exports = {
moduleFileExtensions: ['js', 'jsx'],
name: 'react-component-benchmark',
resetMocks: true,
roots: ['<rootDir>'],
setupFilesAfterEnv: ['<rootDir>/config/setupTests.js'],
testEnvironment: 'jsdom',
timers: 'fake',
// All imported modules in your tests should be mocked automatically
// automock: false,
// Stop running tests after `n` failures
// bail: 0,
// The directory where Jest should store its cached dependency information
// cacheDirectory: "/tmp/jest_rs",
// Automatically clear mock calls, instances and results before every test
// clearMocks: true,
// Indicates whether the coverage information should be collected while executing the test
// collectCoverage: true,
// An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: undefined,
// The directory where Jest should output its coverage files
// coverageDirectory: 'coverage',
// An array of regexp pattern strings used to skip coverage collection
// coveragePathIgnorePatterns: [
// "/node_modules/"
// ],
// Indicates which provider should be used to instrument code for coverage
// coverageProvider: "babel",
// A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [
// "json",
// "text",
// "lcov",
// "clover"
// ],
// An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: undefined,
// A path to a custom dependency extractor
// dependencyExtractor: undefined,
// Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false,
// Force coverage collection from ignored files using an array of glob patterns
// forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites
// globalSetup: undefined,
// A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: undefined,
// A set of global variables that need to be available in all test environments
// globals: {},
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
// maxWorkers: "50%",
// An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [
// "node_modules"
// ],
// An array of file extensions your modules use
// moduleFileExtensions: [
// "js",
// "jsx",
// "ts",
// "tsx",
// "json",
// "node"
// ],
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
moduleNameMapper: {
'\\.(scss|sass|css)$': '<rootDir>/src/__mocks__/styleMock.js'
}
'\\.(scss|sass|css)$': '<rootDir>/src/__mocks__/styleMock.js',
'^testing-library$': '<rootDir>/config/testing-library',
'^react$': 'preact/compat',
'^react-dom$': 'preact/compat',
},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [],
// Activates notifications for test results
// notify: false,
// An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "failure-change",
// A preset that is used as a base for Jest's configuration
// preset: undefined,
// Run tests from one or more projects
// projects: undefined,
// Use this configuration option to add custom reporters to Jest
// reporters: undefined,
// Automatically reset mock state before every test
resetMocks: true,
// Reset the module registry before running each individual test
// resetModules: false,
// A path to a custom resolver
// resolver: undefined,
// Automatically restore mock state and implementation before every test
// restoreMocks: false,
// The root directory that Jest should scan for tests and modules within
// rootDir: undefined,
// A list of paths to directories that Jest should use to search for files in
roots: ['<rootDir>'],
// Allows you to use a custom runner instead of Jest's default test runner
// runner: "jest-runner",
// The paths to modules that run some code to configure or set up the testing environment before each test
// setupFiles: [],
// A list of paths to modules that run some code to configure or set up the testing framework before each test
// setupFilesAfterEnv: [],
setupFilesAfterEnv: ['<rootDir>/config/setupTests.js'],
// The number of seconds after which a test is considered as slow and reported as such in the results.
// slowTestThreshold: 5,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [],
// The test environment that will be used for testing
testEnvironment: 'jsdom',
// Options that will be passed to the testEnvironment
// testEnvironmentOptions: {},
// Adds a location field to test results
// testLocationInResults: false,
// The glob patterns Jest uses to detect test files
// testMatch: [
// "**/__tests__/**/*.[jt]s?(x)",
// "**/?(*.)+(spec|test).[tj]s?(x)"
// ],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
// testPathIgnorePatterns: [
// "/node_modules/"
// ],
// The regexp pattern or array of patterns that Jest uses to detect test files
// testRegex: [],
// This option allows the use of a custom results processor
// testResultsProcessor: undefined,
// This option allows use of a custom test runner
// testRunner: "jest-circus/runner",
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
// testURL: "http://localhost",
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
// timers: 'fake',
// A map from regular expressions to paths to transformers
// transform: undefined,
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// transformIgnorePatterns: [
// "/node_modules/",
// "\\.pnp\\.[^\\/]+$"
// ],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run
// verbose: undefined,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
};

23732
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,51 +1,50 @@
{
"name": "frigate",
"private": true,
"version": "0.0.0",
"scripts": {
"start": "cross-env SNOWPACK_PUBLIC_API_HOST=http://localhost:5000 snowpack dev",
"start:custom": "snowpack dev",
"prebuild": "rimraf build",
"build": "cross-env NODE_ENV=production SNOWPACK_MODE=production SNOWPACK_PUBLIC_API_HOST='' snowpack build",
"lint": "npm run lint:cmd -- --fix",
"lint:cmd": "eslint ./ --ext .jsx,.js",
"dev": "vite",
"lint": "eslint ./ --ext .jsx,.js,.tsx,.ts",
"build": "tsc && vite build",
"preview": "vite preview",
"test": "jest"
},
"dependencies": {
"@cycjimmy/jsmpeg-player": "^5.0.1",
"date-fns": "^2.21.3",
"idb-keyval": "^5.0.2",
"immer": "^9.0.6",
"preact": "^10.5.9",
"axios": "^0.26.0",
"date-fns": "^2.28.0",
"idb-keyval": "^6.1.0",
"immer": "^9.0.12",
"preact": "^10.6.6",
"preact-async-route": "^2.2.1",
"preact-router": "^3.2.1",
"video.js": "^7.15.4",
"videojs-playlist": "^4.3.1",
"videojs-seek-buttons": "^2.0.1"
"preact-router": "^4.0.1",
"swr": "^1.2.2",
"video.js": "^7.17.0",
"videojs-playlist": "^5.0.0",
"videojs-seek-buttons": "^2.2.0"
},
"devDependencies": {
"@babel/eslint-parser": "^7.12.13",
"@babel/plugin-transform-react-jsx": "^7.12.13",
"@babel/preset-env": "^7.12.13",
"@prefresh/snowpack": "^3.0.1",
"@snowpack/plugin-postcss": "^1.1.0",
"@testing-library/jest-dom": "^5.11.9",
"@babel/preset-env": "^7.16.11",
"@babel/preset-typescript": "^7.16.7",
"@preact/preset-vite": "^2.1.5",
"@tailwindcss/forms": "^0.5.0",
"@testing-library/jest-dom": "^5.16.2",
"@testing-library/preact": "^2.0.1",
"@testing-library/user-event": "^12.7.1",
"autoprefixer": "^10.2.1",
"cross-env": "^7.0.3",
"eslint": "^7.19.0",
"eslint-config-preact": "^1.1.3",
"eslint-config-prettier": "^7.2.0",
"eslint-plugin-import": "^2.22.1",
"eslint-plugin-jest": "^24.1.3",
"eslint-plugin-testing-library": "^3.10.1",
"jest": "^26.6.3",
"postcss": "^8.2.10",
"postcss-cli": "^8.3.1",
"prettier": "^2.2.1",
"rimraf": "^3.0.2",
"snowpack": "^3.0.11",
"snowpack-plugin-hash": "^0.14.2",
"tailwindcss": "^2.0.2"
"@testing-library/preact-hooks": "^1.1.0",
"@testing-library/user-event": "^13.5.0",
"@typescript-eslint/eslint-plugin": "^5.18.0",
"@typescript-eslint/parser": "^5.18.0",
"autoprefixer": "^10.4.2",
"eslint": "^8.13.0",
"eslint-config-preact": "^1.3.0",
"eslint-config-prettier": "^8.5.0",
"eslint-plugin-jest": "^26.1.4",
"jest": "^27.5.1",
"msw": "^0.38.2",
"postcss": "^8.4.7",
"prettier": "^2.5.1",
"tailwindcss": "^3.0.23",
"typescript": "^4.5.4",
"vite": "^2.8.0"
}
}

View File

@ -1,3 +1,6 @@
module.exports = {
plugins: [require('tailwindcss'), require('autoprefixer')],
};
plugins: {
tailwindcss: {},
autoprefixer: {},
},
}

View File

@ -1,5 +0,0 @@
module.exports = {
printWidth: 120,
singleQuote: true,
useTabs: false,
};

View File

@ -1,19 +0,0 @@
{
"name": "",
"short_name": "",
"icons": [
{
"src": "/android-chrome-192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "/android-chrome-512x512.png",
"sizes": "512x512",
"type": "image/png"
}
],
"theme_color": "#ffffff",
"background_color": "#ffffff",
"display": "standalone"
}

19
web/site.webmanifest Normal file
View File

@ -0,0 +1,19 @@
{
"name": "",
"short_name": "",
"icons": [
{
"src": "/images/android-chrome-192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "/images/android-chrome-512x512.png",
"sizes": "512x512",
"type": "image/png"
}
],
"theme_color": "#ffffff",
"background_color": "#ffffff",
"display": "standalone"
}

View File

@ -1,19 +0,0 @@
module.exports = {
mount: {
public: { url: '/', static: true },
src: { url: '/dist' },
},
plugins: ['@snowpack/plugin-postcss', '@prefresh/snowpack', 'snowpack-plugin-hash'],
routes: [{ match: 'all', src: '(?!.*(.svg|.gif|.json|.jpg|.jpeg|.png|.js)).*', dest: '/index.html' }],
optimize: {
bundle: false,
minify: true,
treeshake: true,
},
packageOptions: {
sourcemap: false,
},
buildOptions: {
sourcemap: false,
},
};

View File

@ -7,26 +7,28 @@ import Cameras from './routes/Cameras';
import { Router } from 'preact-router';
import Sidebar from './Sidebar';
import { DarkModeProvider, DrawerProvider } from './context';
import { FetchStatus, useConfig } from './api';
import useSWR from 'swr';
export default function App() {
const { status } = useConfig();
const { data: config } = useSWR('config');
const cameraComponent = config && config.ui.use_experimental ? Routes.getCameraV2 : Routes.getCamera;
return (
<DarkModeProvider>
<DrawerProvider>
<div data-testid="app" className="w-full">
<AppBar />
{status !== FetchStatus.LOADED ? (
{!config ? (
<div className="flex flex-grow-1 min-h-screen justify-center items-center">
<ActivityIndicator />
</div>
) : (
<div className="flex flex-row min-h-screen w-full bg-white dark:bg-gray-900 text-gray-900 dark:text-white">
<Sidebar />
<div className="w-full flex-auto p-2 mt-16 px-4 min-w-0">
<div className="w-full flex-auto mt-16 min-w-0">
<Router>
<AsyncRoute path="/cameras/:camera/editor" getComponent={Routes.getCameraMap} />
<AsyncRoute path="/cameras/:camera" getComponent={Routes.getCamera} />
<AsyncRoute path="/cameras/:camera" getComponent={cameraComponent} />
<AsyncRoute path="/birdseye" getComponent={Routes.getBirdseye} />
<AsyncRoute path="/events" getComponent={Routes.getEvents} />
<AsyncRoute path="/recording/:camera/:date?/:hour?/:seconds?" getComponent={Routes.getRecording} />

View File

@ -6,7 +6,7 @@ import AutoAwesomeIcon from './icons/AutoAwesome';
import LightModeIcon from './icons/LightMode';
import DarkModeIcon from './icons/DarkMode';
import FrigateRestartIcon from './icons/FrigateRestart';
import Dialog from './components/Dialog';
import Prompt from './components/Prompt';
import { useDarkMode } from './context';
import { useCallback, useRef, useState } from 'preact/hooks';
import { useRestart } from './api/mqtt';
@ -19,7 +19,7 @@ export default function AppBar() {
const { send: sendRestart } = useRestart();
const handleSelectDarkMode = useCallback(
(value, label) => {
(value) => {
setDarkMode(value);
setShowMoreMenu(false);
},
@ -65,7 +65,7 @@ export default function AppBar() {
</Menu>
) : null}
{showDialog ? (
<Dialog
<Prompt
onDismiss={handleDismissRestartDialog}
title="Restart Frigate"
text="Are you sure?"
@ -76,7 +76,7 @@ export default function AppBar() {
/>
) : null}
{showDialogWait ? (
<Dialog
<Prompt
title="Restart in progress"
text="Please wait a few seconds for the restart to complete before reloading the page."
/>

View File

@ -3,14 +3,15 @@ import LinkedLogo from './components/LinkedLogo';
import { Match } from 'preact-router/match';
import { memo } from 'preact/compat';
import { ENV } from './env';
import { useConfig } from './api';
import { useMemo } from 'preact/hooks';
import useSWR from 'swr';
import NavigationDrawer, { Destination, Separator } from './components/NavigationDrawer';
export default function Sidebar() {
const { data: config } = useConfig();
const cameras = useMemo(() => Object.entries(config.cameras), [config]);
const { birdseye } = config;
const { data: config } = useSWR('config');
if (!config) {
return null;
}
const { cameras, birdseye } = config;
return (
<NavigationDrawer header={<Header />}>
@ -20,8 +21,8 @@ export default function Sidebar() {
matches ? (
<Fragment>
<Separator />
{cameras.map(([camera]) => (
<Destination href={`/cameras/${camera}`} text={camera} />
{Object.keys(cameras).map((camera) => (
<Destination key={camera} href={`/cameras/${camera}`} text={camera} />
))}
<Separator />
</Fragment>
@ -33,18 +34,14 @@ export default function Sidebar() {
matches ? (
<Fragment>
<Separator />
{cameras.map(([camera, conf]) => {
if (conf.record.enabled) {
return (
<Destination
path={`/recording/${camera}/:date?/:hour?/:seconds?`}
href={`/recording/${camera}`}
text={camera}
/>
);
}
return null;
})}
{Object.keys(cameras).map((camera) => (
<Destination
key={camera}
path={`/recording/${camera}/:date?/:hour?/:seconds?`}
href={`/recording/${camera}`}
text={camera}
/>
))}
<Separator />
</Fragment>
) : null

View File

@ -1,25 +1,17 @@
import { h } from 'preact';
import * as Api from '../api';
import * as IDB from 'idb-keyval';
import * as PreactRouter from 'preact-router';
import App from '../App';
import { render, screen } from '@testing-library/preact';
import { render, screen } from 'testing-library';
describe('App', () => {
let mockUseConfig;
beforeEach(() => {
jest.spyOn(IDB, 'get').mockImplementation(() => Promise.resolve(undefined));
jest.spyOn(IDB, 'set').mockImplementation(() => Promise.resolve(true));
mockUseConfig = jest.spyOn(Api, 'useConfig').mockImplementation(() => ({
data: { cameras: { front: { name: 'front', objects: { track: ['taco', 'cat', 'dog'] } } } },
}));
jest.spyOn(Api, 'useApiHost').mockImplementation(() => 'http://base-url.local:5000');
jest.spyOn(PreactRouter, 'Router').mockImplementation(() => <div data-testid="router" />);
});
test('shows a loading indicator while loading', async () => {
mockUseConfig.mockReturnValue({ status: 'loading' });
render(<App />);
await screen.findByTestId('app');
expect(screen.queryByLabelText('Loading…')).toBeInTheDocument();

View File

@ -1,7 +1,7 @@
import { h } from 'preact';
import * as Context from '../context';
import AppBar from '../AppBar';
import { fireEvent, render, screen } from '@testing-library/preact';
import { fireEvent, render, screen } from 'testing-library';
describe('AppBar', () => {
beforeEach(() => {

View File

@ -1,40 +1,17 @@
import { h } from 'preact';
import * as Api from '../api';
import * as Context from '../context';
import Sidebar from '../Sidebar';
import { render, screen } from '@testing-library/preact';
import { render, screen } from 'testing-library';
describe('Sidebar', () => {
beforeEach(() => {
jest.spyOn(Api, 'useConfig').mockImplementation(() => ({
data: {
cameras: {
front: { name: 'front', objects: { track: ['taco', 'cat', 'dog'] }, record: { enabled: true } },
side: { name: 'side', objects: { track: ['taco', 'cat', 'dog'] }, record: { enabled: false } },
},
},
}));
jest.spyOn(Context, 'useDrawer').mockImplementation(() => ({ showDrawer: true, setShowDrawer: () => {} }));
});
test('does not render cameras by default', async () => {
render(<Sidebar />);
const { findByText } = render(<Sidebar />);
await findByText('Cameras');
expect(screen.queryByRole('link', { name: 'front' })).not.toBeInTheDocument();
expect(screen.queryByRole('link', { name: 'side' })).not.toBeInTheDocument();
});
test('render cameras if in camera route', async () => {
window.history.replaceState({}, 'Cameras', '/cameras/front');
render(<Sidebar />);
expect(screen.queryByRole('link', { name: 'front' })).toBeInTheDocument();
expect(screen.queryByRole('link', { name: 'side' })).toBeInTheDocument();
});
test('render cameras if in record route', async () => {
window.history.replaceState({}, 'Front Recordings', '/recording/front');
render(<Sidebar />);
expect(screen.queryByRole('link', { name: 'front' })).toBeInTheDocument();
expect(screen.queryByRole('link', { name: 'side' })).not.toBeInTheDocument();
});
});

View File

@ -1,7 +1,7 @@
import { h } from 'preact';
import * as Mqtt from '../mqtt';
import { ApiProvider, useFetch, useApiHost } from '..';
import { render, screen } from '@testing-library/preact';
import { ApiProvider, useApiHost } from '..';
import { render, screen } from 'testing-library';
describe('useApiHost', () => {
beforeEach(() => {
@ -21,101 +21,3 @@ describe('useApiHost', () => {
expect(screen.queryByText('http://base-url.local:5000')).toBeInTheDocument();
});
});
function Test() {
const { data, status } = useFetch('/api/tacos');
return (
<div>
<span>{data ? data.returnData : ''}</span>
<span>{status}</span>
</div>
);
}
describe('useFetch', () => {
let fetchSpy;
beforeEach(() => {
jest.spyOn(Mqtt, 'MqttProvider').mockImplementation(({ children }) => children);
fetchSpy = jest.spyOn(window, 'fetch').mockImplementation(async (url, options) => {
if (url.endsWith('/api/config')) {
return Promise.resolve({ ok: true, json: () => Promise.resolve({}) });
}
return new Promise((resolve) => {
setTimeout(() => {
resolve({ ok: true, json: () => Promise.resolve({ returnData: 'yep' }) });
}, 1);
});
});
});
test('loads data', async () => {
render(
<ApiProvider>
<Test />
</ApiProvider>
);
expect(screen.queryByText('loading')).toBeInTheDocument();
expect(screen.queryByText('yep')).not.toBeInTheDocument();
jest.runAllTimers();
await screen.findByText('loaded');
expect(fetchSpy).toHaveBeenCalledWith('http://base-url.local:5000/api/tacos');
expect(screen.queryByText('loaded')).toBeInTheDocument();
expect(screen.queryByText('yep')).toBeInTheDocument();
});
test('sets error if response is not okay', async () => {
jest.spyOn(window, 'fetch').mockImplementation((url) => {
if (url.includes('/config')) {
return Promise.resolve({ ok: true, json: () => Promise.resolve({}) });
}
return new Promise((resolve) => {
setTimeout(() => {
resolve({ ok: false });
}, 1);
});
});
render(
<ApiProvider>
<Test />
</ApiProvider>
);
expect(screen.queryByText('loading')).toBeInTheDocument();
jest.runAllTimers();
await screen.findByText('error');
});
test('does not re-fetch if the query has already been made', async () => {
const { rerender } = render(
<ApiProvider>
<Test key={0} />
</ApiProvider>
);
expect(screen.queryByText('loading')).toBeInTheDocument();
expect(screen.queryByText('yep')).not.toBeInTheDocument();
jest.runAllTimers();
await screen.findByText('loaded');
expect(fetchSpy).toHaveBeenCalledWith('http://base-url.local:5000/api/tacos');
rerender(
<ApiProvider>
<Test key={1} />
</ApiProvider>
);
expect(screen.queryByText('loaded')).toBeInTheDocument();
expect(screen.queryByText('yep')).toBeInTheDocument();
jest.runAllTimers();
// once for /api/config, once for /api/tacos
expect(fetchSpy).toHaveBeenCalledTimes(2);
});
});

View File

@ -1,14 +1,16 @@
import { h } from 'preact';
import { Mqtt, MqttProvider, useMqtt } from '../mqtt';
import { useCallback, useContext } from 'preact/hooks';
import { fireEvent, render, screen } from '@testing-library/preact';
import { fireEvent, render, screen } from 'testing-library';
function Test() {
const { state } = useContext(Mqtt);
return state.__connected ? (
<div data-testid="data">
{Object.keys(state).map((key) => (
<div data-testid={key}>{JSON.stringify(state[key])}</div>
<div key={key} data-testid={key}>
{JSON.stringify(state[key])}
</div>
))}
</div>
) : null;
@ -28,10 +30,10 @@ describe('MqttProvider', () => {
return new Proxy(
{},
{
get(target, prop, receiver) {
get(_target, prop, _receiver) {
return wsClient[prop];
},
set(target, prop, value) {
set(_target, prop, value) {
wsClient[prop] = typeof value === 'function' ? jest.fn(value) : value;
if (prop === 'onopen') {
wsClient[prop]();
@ -121,12 +123,24 @@ describe('MqttProvider', () => {
</MqttProvider>
);
await screen.findByTestId('data');
expect(screen.getByTestId('front/detect/state')).toHaveTextContent('{"lastUpdate":123456,"payload":"ON","retain":true}');
expect(screen.getByTestId('front/recordings/state')).toHaveTextContent('{"lastUpdate":123456,"payload":"OFF","retain":true}');
expect(screen.getByTestId('front/snapshots/state')).toHaveTextContent('{"lastUpdate":123456,"payload":"ON","retain":true}');
expect(screen.getByTestId('side/detect/state')).toHaveTextContent('{"lastUpdate":123456,"payload":"OFF","retain":true}');
expect(screen.getByTestId('side/recordings/state')).toHaveTextContent('{"lastUpdate":123456,"payload":"OFF","retain":true}');
expect(screen.getByTestId('side/snapshots/state')).toHaveTextContent('{"lastUpdate":123456,"payload":"OFF","retain":true}');
expect(screen.getByTestId('front/detect/state')).toHaveTextContent(
'{"lastUpdate":123456,"payload":"ON","retain":true}'
);
expect(screen.getByTestId('front/recordings/state')).toHaveTextContent(
'{"lastUpdate":123456,"payload":"OFF","retain":true}'
);
expect(screen.getByTestId('front/snapshots/state')).toHaveTextContent(
'{"lastUpdate":123456,"payload":"ON","retain":true}'
);
expect(screen.getByTestId('side/detect/state')).toHaveTextContent(
'{"lastUpdate":123456,"payload":"OFF","retain":true}'
);
expect(screen.getByTestId('side/recordings/state')).toHaveTextContent(
'{"lastUpdate":123456,"payload":"OFF","retain":true}'
);
expect(screen.getByTestId('side/snapshots/state')).toHaveTextContent(
'{"lastUpdate":123456,"payload":"OFF","retain":true}'
);
});
});

View File

@ -1,148 +1,29 @@
import { h } from 'preact';
import { baseUrl } from './baseUrl';
import { h, createContext } from 'preact';
import useSWR, { SWRConfig } from 'swr';
import { MqttProvider } from './mqtt';
import produce from 'immer';
import { useContext, useEffect, useReducer } from 'preact/hooks';
import axios from 'axios';
export const FetchStatus = {
NONE: 'none',
LOADING: 'loading',
LOADED: 'loaded',
ERROR: 'error',
};
axios.defaults.baseURL = `${baseUrl}/api/`;
const initialState = Object.freeze({
host: baseUrl,
queries: {},
});
const Api = createContext(initialState);
function reducer(state, { type, payload }) {
switch (type) {
case 'REQUEST': {
const { url, fetchId } = payload;
const data = state.queries[url]?.data || null;
return produce(state, (draftState) => {
draftState.queries[url] = { status: FetchStatus.LOADING, data, fetchId };
});
}
case 'RESPONSE': {
const { url, ok, data, fetchId } = payload;
return produce(state, (draftState) => {
draftState.queries[url] = { status: ok ? FetchStatus.LOADED : FetchStatus.ERROR, data, fetchId };
});
}
case 'DELETE': {
const { eventId } = payload;
return produce(state, (draftState) => {
Object.keys(draftState.queries).map((url) => {
draftState.queries[url].deletedId = eventId;
});
});
}
default:
return state;
}
}
export function ApiProvider({ children }) {
const [state, dispatch] = useReducer(reducer, initialState);
export function ApiProvider({ children, options }) {
return (
<Api.Provider value={{ state, dispatch }}>
<SWRConfig
value={{
fetcher: (path) => axios.get(path).then((res) => res.data),
...options,
}}
>
<MqttWithConfig>{children}</MqttWithConfig>
</Api.Provider>
</SWRConfig>
);
}
function MqttWithConfig({ children }) {
const { data, status } = useConfig();
return status === FetchStatus.LOADED ? <MqttProvider config={data}>{children}</MqttProvider> : children;
}
function shouldFetch(state, url, fetchId = null) {
if ((fetchId && url in state.queries && state.queries[url].fetchId !== fetchId) || !(url in state.queries)) {
return true;
}
const { status } = state.queries[url];
return status !== FetchStatus.LOADING && status !== FetchStatus.LOADED;
}
export function useFetch(url, fetchId) {
const { state, dispatch } = useContext(Api);
useEffect(() => {
if (!shouldFetch(state, url, fetchId)) {
return;
}
async function fetchData() {
await dispatch({ type: 'REQUEST', payload: { url, fetchId } });
const response = await fetch(`${state.host}${url}`);
try {
const data = await response.json();
await dispatch({ type: 'RESPONSE', payload: { url, ok: response.ok, data, fetchId } });
} catch (e) {
await dispatch({ type: 'RESPONSE', payload: { url, ok: false, data: null, fetchId } });
}
}
fetchData();
}, [url, fetchId, state, dispatch]);
if (!(url in state.queries)) {
return { data: null, status: FetchStatus.NONE };
}
const data = state.queries[url].data || null;
const status = state.queries[url].status;
const deletedId = state.queries[url].deletedId || 0;
return { data, status, deletedId };
}
export function useDelete() {
const { dispatch, state } = useContext(Api);
async function deleteEvent(eventId) {
if (!eventId) return null;
const response = await fetch(`${state.host}/api/events/${eventId}`, { method: 'DELETE' });
await dispatch({ type: 'DELETE', payload: { eventId } });
return await (response.status < 300 ? response.json() : { success: true });
}
return deleteEvent;
const { data } = useSWR('config');
return data ? <MqttProvider config={data}>{children}</MqttProvider> : children;
}
export function useApiHost() {
const { state } = useContext(Api);
return state.host;
}
export function useEvents(searchParams, fetchId) {
const url = `/api/events${searchParams ? `?${searchParams.toString()}` : ''}`;
return useFetch(url, fetchId);
}
export function useEvent(eventId, fetchId) {
const url = `/api/events/${eventId}`;
return useFetch(url, fetchId);
}
export function useRecording(camera, fetchId) {
const url = `/api/${camera}/recordings`;
return useFetch(url, fetchId);
}
export function useConfig(searchParams, fetchId) {
const url = `/api/config${searchParams ? `?${searchParams.toString()}` : ''}`;
return useFetch(url, fetchId);
}
export function useStats(searchParams, fetchId) {
const url = `/api/stats${searchParams ? `?${searchParams.toString()}` : ''}`;
return useFetch(url, fetchId);
return baseUrl;
}

View File

@ -4,7 +4,7 @@ import { useCallback, useState } from 'preact/hooks';
const MIN_LOAD_TIMEOUT_MS = 200;
export default function AutoUpdatingCameraImage({ camera, searchParams = '', showFps = true }) {
export default function AutoUpdatingCameraImage({ camera, searchParams = '', showFps = true, className }) {
const [key, setKey] = useState(Date.now());
const [fps, setFps] = useState(0);
@ -20,7 +20,7 @@ export default function AutoUpdatingCameraImage({ camera, searchParams = '', sho
}, [key, setFps]);
return (
<div>
<div className={className}>
<CameraImage camera={camera} onload={handleLoad} searchParams={`cache=${key}&${searchParams}`} />
{showFps ? <span className="text-xs">Displaying at {fps}fps</span> : null}
</div>

View File

@ -0,0 +1,45 @@
import { h, JSX } from 'preact';
interface BubbleButtonProps {
variant?: 'primary' | 'secondary';
children?: JSX.Element;
disabled?: boolean;
className?: string;
onClick?: () => void;
}
export const BubbleButton = ({
variant = 'primary',
children,
onClick,
disabled = false,
className = '',
}: BubbleButtonProps) => {
const BASE_CLASS = 'rounded-full px-4 py-2';
const PRIMARY_CLASS = 'text-white bg-blue-500 dark:text-black dark:bg-white';
const SECONDARY_CLASS = 'text-black dark:text-white bg-transparent';
let computedClass = BASE_CLASS;
if (disabled) {
computedClass += ' text-gray-200 dark:text-gray-200';
} else if (variant === 'primary') {
computedClass += ` ${PRIMARY_CLASS}`;
} else if (variant === 'secondary') {
computedClass += ` ${SECONDARY_CLASS}`;
}
const onClickHandler = () => {
if (disabled) {
return;
}
if (onClick) {
onClick();
}
};
return (
<button onClick={onClickHandler} className={`${computedClass} ${className}`}>
{children}
</button>
);
};

View File

@ -17,6 +17,13 @@ const ButtonColors = {
text:
'text-red-500 hover:bg-red-500 hover:bg-opacity-20 focus:bg-red-500 focus:bg-opacity-40 active:bg-red-500 active:bg-opacity-40',
},
yellow: {
contained: 'bg-yellow-500 focus:bg-yellow-400 active:bg-yellow-600 ring-yellow-300',
outlined:
'text-yellow-500 border-2 border-yellow-500 hover:bg-yellow-500 hover:bg-opacity-20 focus:bg-yellow-500 focus:bg-opacity-40 active:bg-yellow-500 active:bg-opacity-40',
text:
'text-yellow-500 hover:bg-yellow-500 hover:bg-opacity-20 focus:bg-yellow-500 focus:bg-opacity-40 active:bg-yellow-500 active:bg-opacity-40',
},
green: {
contained: 'bg-green-500 focus:bg-green-400 active:bg-green-600 ring-green-300',
outlined:
@ -57,7 +64,6 @@ export default function Button({
color = 'blue',
disabled = false,
href,
size,
type = 'contained',
...attrs
}) {
@ -74,11 +80,11 @@ export default function Button({
classes = classes.replace(/(?:focus|active|hover):[^ ]+/g, '');
}
const handleMousenter = useCallback((event) => {
const handleMousenter = useCallback(() => {
setHovered(true);
}, []);
const handleMouseleave = useCallback((event) => {
const handleMouseleave = useCallback(() => {
setHovered(false);
}, []);

View File

@ -7,27 +7,35 @@ export default function ButtonsTabbed({
setHeader = null,
headers = [''],
className = 'text-gray-600 py-0 px-4 block hover:text-gray-500',
selectedClassName = `${className} focus:outline-none border-b-2 font-medium border-gray-500`
selectedClassName = `${className} focus:outline-none border-b-2 font-medium border-gray-500`,
}) {
const [selected, setSelected] = useState(0);
const captitalize = (str) => { return (`${str.charAt(0).toUpperCase()}${str.slice(1)}`); };
const captitalize = (str) => {
return `${str.charAt(0).toUpperCase()}${str.slice(1)}`;
};
const getHeader = useCallback((i) => {
return (headers.length === viewModes.length ? headers[i] : captitalize(viewModes[i]));
}, [headers, viewModes]);
const getHeader = useCallback(
(i) => {
return headers.length === viewModes.length ? headers[i] : captitalize(viewModes[i]);
},
[headers, viewModes]
);
const handleClick = useCallback((i) => {
setSelected(i);
setViewMode && setViewMode(viewModes[i]);
setHeader && setHeader(getHeader(i));
}, [setViewMode, setHeader, setSelected, viewModes, getHeader]);
const handleClick = useCallback(
(i) => {
setSelected(i);
setViewMode && setViewMode(viewModes[i]);
setHeader && setHeader(getHeader(i));
},
[setViewMode, setHeader, setSelected, viewModes, getHeader]
);
setHeader && setHeader(getHeader(selected));
return (
<nav className="flex justify-end">
{viewModes.map((item, i) => {
return (
<button onClick={() => handleClick(i)} className={i === selected ? selectedClassName : className}>
<button key={i} onClick={() => handleClick(i)} className={i === selected ? selectedClassName : className}>
{captitalize(item)}
</button>
);

View File

@ -5,7 +5,7 @@ import ArrowRightDouble from '../icons/ArrowRightDouble';
const todayTimestamp = new Date().setHours(0, 0, 0, 0).valueOf();
const Calender = ({ onChange, calenderRef, close }) => {
const Calendar = ({ onChange, calendarRef, close, dateRange }) => {
const keyRef = useRef([]);
const date = new Date();
@ -36,7 +36,7 @@ const Calender = ({ onChange, calenderRef, close }) => {
year,
month,
selectedDay: null,
timeRange: { before: null, after: null },
timeRange: dateRange,
monthDetails: null,
});
@ -98,7 +98,11 @@ const Calender = ({ onChange, calenderRef, close }) => {
);
useEffect(() => {
setState((prev) => ({ ...prev, selectedDay: todayTimestamp, monthDetails: getMonthDetails(year, month) }));
setState((prev) => ({
...prev,
selectedDay: todayTimestamp,
monthDetails: getMonthDetails(year, month),
}));
}, [year, month, getMonthDetails]);
useEffect(() => {
@ -150,7 +154,10 @@ const Calender = ({ onChange, calenderRef, close }) => {
// user has selected a date < after, reset values
if (after === null || day.timestamp < after) {
timeRange = { before: new Date(day.timestamp).setHours(24, 0, 0, 0), after: day.timestamp };
timeRange = {
before: new Date(day.timestamp).setHours(24, 0, 0, 0),
after: day.timestamp,
};
}
// user has selected a date > after
@ -280,7 +287,7 @@ const Calender = ({ onChange, calenderRef, close }) => {
};
return (
<div className="select-none w-96 flex flex-shrink" ref={calenderRef}>
<div className="select-none w-96 flex flex-shrink" ref={calendarRef}>
<div className="py-4 px-6">
<div className="flex items-center">
<div className="w-1/6 relative flex justify-around">
@ -314,7 +321,7 @@ const Calender = ({ onChange, calenderRef, close }) => {
<ArrowRight className="h-2/6" />
</div>
</div>
<div className="w-1/6 relative flex justify-around " tabIndex={104} onClick={() => setYear(1)}>
<div className="w-1/6 relative flex justify-around" tabIndex={104} onClick={() => setYear(1)}>
<div className="flex justify-center items-center cursor-pointer absolute -mt-4 text-center rounded-full w-10 h-10 bg-gray-500 hover:bg-gray-200 dark:hover:bg-gray-800">
<ArrowRightDouble className="h-2/6" />
</div>
@ -326,4 +333,4 @@ const Calender = ({ onChange, calenderRef, close }) => {
);
};
export default Calender;
export default Calendar;

View File

@ -1,19 +1,20 @@
import { h } from 'preact';
import ActivityIndicator from './ActivityIndicator';
import { useApiHost, useConfig } from '../api';
import { useApiHost } from '../api';
import useSWR from 'swr';
import { useCallback, useEffect, useMemo, useRef, useState } from 'preact/hooks';
import { useResizeObserver } from '../hooks';
export default function CameraImage({ camera, onload, searchParams = '', stretch = false }) {
const { data: config } = useConfig();
const { data: config } = useSWR('config');
const apiHost = useApiHost();
const [hasLoaded, setHasLoaded] = useState(false);
const containerRef = useRef(null);
const canvasRef = useRef(null);
const [{ width: availableWidth }] = useResizeObserver(containerRef);
const { name } = config.cameras[camera];
const { width, height } = config.cameras[camera].detect;
const { name } = config ? config.cameras[camera] : '';
const { width, height } = config ? config.cameras[camera].detect : { width: 1, height: 1 };
const aspectRatio = width / height;
const scaledHeight = useMemo(() => {
@ -36,11 +37,11 @@ export default function CameraImage({ camera, onload, searchParams = '', stretch
);
useEffect(() => {
if (scaledHeight === 0 || !canvasRef.current) {
if (!config || scaledHeight === 0 || !canvasRef.current) {
return;
}
img.src = `${apiHost}/api/${name}/latest.jpg?h=${scaledHeight}${searchParams ? `&${searchParams}` : ''}`;
}, [apiHost, canvasRef, name, img, searchParams, scaledHeight]);
}, [apiHost, canvasRef, name, img, searchParams, scaledHeight, config]);
return (
<div className="relative w-full" ref={containerRef}>

View File

@ -66,7 +66,6 @@ export default function DatePicker({
onBlur,
onChangeText,
onFocus,
readonly,
trailingIcon: TrailingIcon,
value: propValue = '',
...props

View File

@ -0,0 +1,74 @@
import { h } from 'preact';
import Link from './Link';
import Switch from './Switch';
import { useCallback, useMemo } from 'preact/hooks';
import { usePersistence } from '../context';
import AutoUpdatingCameraImage from './AutoUpdatingCameraImage';
const emptyObject = Object.freeze({});
export function DebugCamera({ camera }) {
const [options, setOptions] = usePersistence(`${camera}-feed`, emptyObject);
const handleSetOption = useCallback(
(id, value) => {
const newOptions = { ...options, [id]: value };
setOptions(newOptions);
},
[options, setOptions]
);
const searchParams = useMemo(
() =>
new URLSearchParams(
Object.keys(options).reduce((memo, key) => {
memo.push([key, options[key] === true ? '1' : '0']);
return memo;
}, [])
),
[options]
);
const optionContent = (
<div className='grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4'>
<Switch
checked={options['bbox']}
id='bbox'
onChange={handleSetOption}
label='Bounding box'
labelPosition='after'
/>
<Switch
checked={options['timestamp']}
id='timestamp'
onChange={handleSetOption}
label='Timestamp'
labelPosition='after'
/>
<Switch checked={options['zones']} id='zones' onChange={handleSetOption} label='Zones' labelPosition='after' />
<Switch checked={options['mask']} id='mask' onChange={handleSetOption} label='Masks' labelPosition='after' />
<Switch
checked={options['motion']}
id='motion'
onChange={handleSetOption}
label='Motion boxes'
labelPosition='after'
/>
<Switch
checked={options['regions']}
id='regions'
onChange={handleSetOption}
label='Regions'
labelPosition='after'
/>
<Link href={`/cameras/${camera}/editor`}>Mask & Zone creator</Link>
</div>
);
return (
<div>
<AutoUpdatingCameraImage camera={camera} searchParams={searchParams} />
{optionContent}
</div>
);
}

View File

@ -1,10 +1,8 @@
import { h, Fragment } from 'preact';
import Button from './Button';
import Heading from './Heading';
import { createPortal } from 'preact/compat';
import { useState, useEffect } from 'preact/hooks';
export default function Dialog({ actions = [], portalRootID = 'dialogs', title, text }) {
export default function Dialog({ children, portalRootID = 'dialogs' }) {
const portalRoot = portalRootID && document.getElementById(portalRootID);
const [show, setShow] = useState(false);
@ -27,17 +25,7 @@ export default function Dialog({ actions = [], portalRootID = 'dialogs', title,
show ? 'scale-100 opacity-100' : ''
}`}
>
<div className="p-4">
<Heading size="lg">{title}</Heading>
<p>{text}</p>
</div>
<div className="p-2 flex justify-start flex-row-reverse space-x-2">
{actions.map(({ color, text, onClick, ...props }, i) => (
<Button className="ml-2" color={color} key={i} onClick={onClick} type="text" {...props}>
{text}
</Button>
))}
</div>
{children}
</div>
</div>
</Fragment>

View File

@ -0,0 +1,30 @@
import { h } from 'preact';
import Heading from '../Heading';
import type { TimelineEvent } from '../Timeline/TimelineEvent';
interface HistoryHeaderProps {
event?: TimelineEvent;
className?: string;
}
export const HistoryHeader = ({ event, className = '' }: HistoryHeaderProps) => {
let title = 'No Event Found';
let subtitle = <span>Event was not found at marker position.</span>;
if (event) {
const { startTime, endTime, label } = event;
const thisMorning = new Date();
thisMorning.setHours(0, 0, 0);
const isToday = endTime.getTime() > thisMorning.getTime();
title = label;
subtitle = (
<span>
{isToday ? 'Today' : 'Yesterday'}, {startTime.toLocaleTimeString()} - {endTime.toLocaleTimeString()} &middot;
</span>
);
}
return (
<div className={`text-center ${className}`}>
<Heading size='lg'>{title}</Heading>
<div>{subtitle}</div>
</div>
);
};

View File

@ -0,0 +1,142 @@
import { h } from 'preact';
import { useCallback, useEffect, useRef, useState } from 'preact/hooks';
import { useApiHost } from '../../api';
import { isNullOrUndefined } from '../../utils/objectUtils';
interface OnTimeUpdateEvent {
timestamp: number;
isPlaying: boolean;
}
interface VideoProperties {
posterUrl: string;
videoUrl: string;
height: number;
}
interface HistoryVideoProps {
id?: string;
isPlaying: boolean;
currentTime: number;
onTimeUpdate?: (event: OnTimeUpdateEvent) => void;
onPause: () => void;
onPlay: () => void;
}
export const HistoryVideo = ({
id,
isPlaying: videoIsPlaying,
currentTime,
onTimeUpdate,
onPause,
onPlay,
}: HistoryVideoProps) => {
const apiHost = useApiHost();
const videoRef = useRef<HTMLVideoElement|null>(null);
const [videoHeight, setVideoHeight] = useState<number>(0);
const [videoProperties, setVideoProperties] = useState<VideoProperties>({
posterUrl: '',
videoUrl: '',
height: 0,
});
const currentVideo = videoRef.current;
if (currentVideo && !videoHeight) {
const currentVideoHeight = currentVideo.offsetHeight;
if (currentVideoHeight > 0) {
setVideoHeight(currentVideoHeight);
}
}
useEffect(() => {
const idExists = !isNullOrUndefined(id);
if (idExists) {
if (videoRef.current && !videoRef.current.paused) {
videoRef.current = null;
}
setVideoProperties({
posterUrl: `${apiHost}/api/events/${id}/snapshot.jpg`,
videoUrl: `${apiHost}/vod/event/${id}/index.m3u8`,
height: videoHeight,
});
} else {
setVideoProperties({
posterUrl: '',
videoUrl: '',
height: 0,
});
}
}, [id, videoHeight, videoRef, apiHost]);
useEffect(() => {
const playVideo = (video: HTMLMediaElement) => video.play();
const attemptPlayVideo = (video: HTMLMediaElement) => {
const videoHasNotLoaded = video.readyState <= 1;
if (videoHasNotLoaded) {
video.oncanplay = () => {
playVideo(video);
};
video.load();
} else {
playVideo(video);
}
};
const video = videoRef.current;
const videoExists = !isNullOrUndefined(video);
if (video && videoExists) {
if (videoIsPlaying) {
attemptPlayVideo(video);
} else {
video.pause();
}
}
}, [videoIsPlaying, videoRef]);
useEffect(() => {
const video = videoRef.current;
const videoExists = !isNullOrUndefined(video);
const hasSeeked = currentTime >= 0;
if (video && videoExists && hasSeeked) {
video.currentTime = currentTime;
}
}, [currentTime, videoRef]);
const onTimeUpdateHandler = useCallback(
(event: Event) => {
const target = event.target as HTMLMediaElement;
const timeUpdateEvent = {
isPlaying: videoIsPlaying,
timestamp: target.currentTime,
};
onTimeUpdate && onTimeUpdate(timeUpdateEvent);
},
[videoIsPlaying, onTimeUpdate]
);
const videoPropertiesIsUndefined = isNullOrUndefined(videoProperties);
if (videoPropertiesIsUndefined) {
return <div style={{ height: `${videoHeight}px`, width: '100%' }} />;
}
const { posterUrl, videoUrl, height } = videoProperties;
return (
<video
ref={videoRef}
key={posterUrl}
onTimeUpdate={onTimeUpdateHandler}
onPause={onPause}
onPlay={onPlay}
poster={posterUrl}
preload='metadata'
controls
style={height ? { minHeight: `${height}px` } : {}}
playsInline
>
<source type='application/vnd.apple.mpegurl' src={videoUrl} />
</video>
);
};

View File

@ -0,0 +1,91 @@
import { Fragment, h } from 'preact';
import { useCallback, useEffect, useState } from 'preact/hooks';
import useSWR from 'swr';
import axios from 'axios';
import Timeline from '../Timeline/Timeline';
import type { TimelineChangeEvent } from '../Timeline/TimelineChangeEvent';
import type { TimelineEvent } from '../Timeline/TimelineEvent';
import { HistoryHeader } from './HistoryHeader';
import { HistoryVideo } from './HistoryVideo';
export default function HistoryViewer({ camera }: {camera: string}) {
const searchParams = {
before: null,
after: null,
camera,
label: 'all',
zone: 'all',
};
// TODO: refactor
const eventsFetcher = (path: string, params: {[name:string]: string|number}) => {
params = { ...params, include_thumbnails: 0, limit: 500 };
return axios.get<TimelineEvent[]>(path, { params }).then((res) => res.data);
};
const { data: events } = useSWR(['events', searchParams], eventsFetcher);
const [timelineEvents, setTimelineEvents] = useState<TimelineEvent[]>([]);
const [currentEvent, setCurrentEvent] = useState<TimelineEvent>();
const [isPlaying, setIsPlaying] = useState<boolean>(false);
const [currentTime, setCurrentTime] = useState<number>(new Date().getTime());
useEffect(() => {
if (events) {
const filteredEvents = [...events].reverse().filter((e) => e.end_time !== undefined);
setTimelineEvents(filteredEvents);
}
}, [events]);
const handleTimelineChange = useCallback(
(event: TimelineChangeEvent) => {
if (event.seekComplete) {
setCurrentEvent(event.timelineEvent);
if (isPlaying && event.timelineEvent) {
const eventTime = (event.markerTime.getTime() - event.timelineEvent.startTime.getTime()) / 1000;
setCurrentTime(eventTime);
}
}
},
[isPlaying]
);
const onPlayHandler = () => {
setIsPlaying(true);
};
const onPausedHandler = () => {
setIsPlaying(false);
};
const onPlayPauseHandler = (isPlaying: boolean) => {
setIsPlaying(isPlaying);
};
return (
<Fragment>
<Fragment>
<div className='relative flex flex-col'>
<Fragment>
<HistoryHeader event={currentEvent} className='mb-2' />
<HistoryVideo
id={currentEvent ? currentEvent.id : undefined}
isPlaying={isPlaying}
currentTime={currentTime}
onPlay={onPlayHandler}
onPause={onPausedHandler}
/>
</Fragment>
</div>
</Fragment>
<Timeline
events={timelineEvents}
isPlaying={isPlaying}
onChange={handleTimelineChange}
onPlayPause={onPlayPauseHandler}
/>
</Fragment>
);
}

View File

@ -5,7 +5,7 @@ import JSMpeg from '@cycjimmy/jsmpeg-player';
export default function JSMpegPlayer({ camera, width, height }) {
const playerRef = useRef();
const url = `${baseUrl.replace(/^http/, 'ws')}/live/${camera}`
const url = `${baseUrl.replace(/^http/, 'ws')}/live/${camera}`;
useEffect(() => {
const video = new JSMpeg.VideoElement(
@ -16,15 +16,15 @@ export default function JSMpegPlayer({ camera, width, height }) {
);
const fullscreen = () => {
if(video.els.canvas.webkitRequestFullScreen) {
if (video.els.canvas.webkitRequestFullScreen) {
video.els.canvas.webkitRequestFullScreen();
}
else {
video.els.canvas.mozRequestFullScreen();
}
}
};
video.els.canvas.addEventListener('click',fullscreen)
video.els.canvas.addEventListener('click',fullscreen);
return () => {
video.destroy();

Some files were not shown because too many files have changed in this diff Show More