mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-05 18:55:23 +03:00
Add S6 scripts to test and convert specified TensortRT models at startup.
Rearrange tensorrt files into a docker support folder.
This commit is contained in:
parent
7155cc4b87
commit
da5c470a50
@ -256,16 +256,21 @@ COPY --from=rootfs / /
|
||||
# Build TensorRT-specific library
|
||||
FROM nvcr.io/nvidia/tensorrt:23.05-py3 AS trt-deps
|
||||
|
||||
RUN --mount=type=bind,source=docker/tensorrt_libyolo.sh,target=/tensorrt_libyolo.sh \
|
||||
RUN --mount=type=bind,source=docker/support/tensorrt_detector/tensorrt_libyolo.sh,target=/tensorrt_libyolo.sh \
|
||||
/tensorrt_libyolo.sh
|
||||
|
||||
# Frigate w/ TensorRT Support as separate image
|
||||
FROM frigate AS frigate-tensorrt
|
||||
|
||||
ENV YOLO_MODELS="yolov7-tiny-416"
|
||||
|
||||
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
COPY --from=trt-deps /usr/local/src/tensorrt_demos /usr/local/src/tensorrt_demos
|
||||
COPY docker/support/tensorrt_detector/rootfs/ /
|
||||
|
||||
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
||||
pip3 install -U /deps/trt-wheels/*.whl && \
|
||||
ln -s libnvrtc.so.11.2 /usr/local/lib/python3.9/dist-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so && \
|
||||
ln -s libnvrtc.so.12.1 /usr/local/lib/python3.9/dist-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so && \
|
||||
ldconfig
|
||||
|
||||
# Dev Container w/ TRT
|
||||
|
||||
45
docker/support/tensorrt_detector/rootfs/etc/s6-rc.d/trt-model-prepare/run
Executable file
45
docker/support/tensorrt_detector/rootfs/etc/s6-rc.d/trt-model-prepare/run
Executable file
@ -0,0 +1,45 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Prepare the logs folder for s6-log
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
OUTPUT_FOLDER?=/media/frigate/model_cache/tensorrt
|
||||
|
||||
# Create output folder
|
||||
mkdir -p ${OUTPUT_FOLDER}
|
||||
|
||||
FIRST_MODEL=true
|
||||
MODEL_CONVERT=""
|
||||
|
||||
for model in ${YOLO_MODELS//,/ }
|
||||
do
|
||||
if ![[ -f ${OUTPUT_FOLDER}/${model}.trt ]]; then
|
||||
if [[FIRST_MODEL == true]]; then
|
||||
MODEL_CONVERT="${model}"
|
||||
FIRST_MODEL=false
|
||||
else
|
||||
MODEL_CONVERT+=",{$model}"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[${MODEL_CONVERT} == ""]]; then
|
||||
echo "No models to convert."
|
||||
exit 0;
|
||||
fi
|
||||
|
||||
echo "Generating the following TRT Models: ${YOLO_MODELS}"
|
||||
|
||||
# Build trt engine
|
||||
cd /usr/local/src/tensorrt_demos/yolo
|
||||
|
||||
# Download yolo weights
|
||||
./download_yolo.sh $MODEL_CONVERT
|
||||
|
||||
for model in ${MODEL_CONVERT//,/ }
|
||||
do
|
||||
python3 yolo_to_onnx.py -m ${model}
|
||||
python3 onnx_to_tensorrt.py -m ${model}
|
||||
cp /tmp/tensorrt_demos-conditional_download/yolo/${model}.trt ${OUTPUT_FOLDER}/${model}.trt;
|
||||
done
|
||||
@ -0,0 +1 @@
|
||||
oneshot
|
||||
@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/trt-model-prepare/run
|
||||
16
docker/support/tensorrt_detector/tensorrt_libyolo.sh
Executable file
16
docker/support/tensorrt_detector/tensorrt_libyolo.sh
Executable file
@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
# Clone tensorrt_demos repo
|
||||
git clone --depth 1 https://github.com/NateMeyer/tensorrt_demos.git -b conditional_download
|
||||
|
||||
# Build libyolo
|
||||
cd ./tensorrt_demos/plugins && make all
|
||||
cp libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
cp libyolo_layer.so ../yolo/libyolo_layer.so
|
||||
|
||||
# Store yolo scripts for later conversion
|
||||
cd ../
|
||||
mkdir -p /usr/local/src/tensorrt_demos
|
||||
cp -a yolo /usr/local/src/tensorrt_demos/
|
||||
@ -1,10 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
# Clone tensorrt_demos repo
|
||||
git clone --depth 1 https://github.com/yeahme49/tensorrt_demos.git
|
||||
|
||||
# Build libyolo
|
||||
cd ./tensorrt_demos/plugins && make all
|
||||
cp libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
@ -1,32 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
OUTPUT_FOLDER=/trt-models
|
||||
echo "Generating the following TRT Models: ${YOLO_MODELS:="yolov4-tiny-288,yolov4-tiny-416,yolov7-tiny-416"}"
|
||||
|
||||
# Create output folder
|
||||
mkdir -p ${OUTPUT_FOLDER}
|
||||
|
||||
# Clone tensorrt_demos repo
|
||||
# git clone --depth 1 https://github.com/NateMeyer/tensorrt_demos.git -b conditional_download /tmp/tensorrt_demos
|
||||
cd /tmp/ && wget -qO tensorrt_demos.zip https://github.com/NateMeyer/tensorrt_demos/archive/refs/heads/conditional_download.zip
|
||||
unzip tensorrt_demos.zip
|
||||
|
||||
cp /usr/local/lib/libyolo_layer.so /tmp/tensorrt_demos-conditional_download/plugins/libyolo_layer.so
|
||||
|
||||
# Download yolo weights
|
||||
cd /tmp/tensorrt_demos-conditional_download/yolo && ./download_yolo.sh $YOLO_MODELS
|
||||
|
||||
# Build trt engine
|
||||
cd /tmp/tensorrt_demos-conditional_download/yolo
|
||||
|
||||
for model in ${YOLO_MODELS//,/ }
|
||||
do
|
||||
python3 yolo_to_onnx.py -m ${model}
|
||||
python3 onnx_to_tensorrt.py -m ${model}
|
||||
cp /tmp/tensorrt_demos-conditional_download/yolo/${model}.trt ${OUTPUT_FOLDER}/${model}.trt;
|
||||
done
|
||||
|
||||
# Cleanup repo
|
||||
rm -r /tmp/tensorrt_demos-conditional_download
|
||||
Loading…
Reference in New Issue
Block a user