Add S6 scripts to test and convert specified TensortRT models at startup.

Rearrange tensorrt files into a docker support folder.
This commit is contained in:
Nate Meyer 2023-06-24 11:25:01 -04:00
parent afec23ee60
commit bdabf17616
10 changed files with 70 additions and 44 deletions

View File

@ -265,16 +265,21 @@ COPY --from=rootfs / /
# Build TensorRT-specific library # Build TensorRT-specific library
FROM nvcr.io/nvidia/tensorrt:23.05-py3 AS trt-deps FROM nvcr.io/nvidia/tensorrt:23.05-py3 AS trt-deps
RUN --mount=type=bind,source=docker/tensorrt_libyolo.sh,target=/tensorrt_libyolo.sh \ RUN --mount=type=bind,source=docker/support/tensorrt_detector/tensorrt_libyolo.sh,target=/tensorrt_libyolo.sh \
/tensorrt_libyolo.sh /tensorrt_libyolo.sh
# Frigate w/ TensorRT Support as separate image # Frigate w/ TensorRT Support as separate image
FROM frigate AS frigate-tensorrt FROM frigate AS frigate-tensorrt
ENV YOLO_MODELS="yolov7-tiny-416"
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
COPY --from=trt-deps /usr/local/src/tensorrt_demos /usr/local/src/tensorrt_demos
COPY docker/support/tensorrt_detector/rootfs/ /
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \ RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
pip3 install -U /deps/trt-wheels/*.whl && \ pip3 install -U /deps/trt-wheels/*.whl && \
ln -s libnvrtc.so.11.2 /usr/local/lib/python3.9/dist-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so && \ ln -s libnvrtc.so.12.1 /usr/local/lib/python3.9/dist-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so && \
ldconfig ldconfig
# Dev Container w/ TRT # Dev Container w/ TRT

View File

@ -0,0 +1,45 @@
#!/command/with-contenv bash
# shellcheck shell=bash
# Prepare the logs folder for s6-log
set -o errexit -o nounset -o pipefail
OUTPUT_FOLDER?=/media/frigate/model_cache/tensorrt
# Create output folder
mkdir -p ${OUTPUT_FOLDER}
FIRST_MODEL=true
MODEL_CONVERT=""
for model in ${YOLO_MODELS//,/ }
do
if ![[ -f ${OUTPUT_FOLDER}/${model}.trt ]]; then
if [[FIRST_MODEL == true]]; then
MODEL_CONVERT="${model}"
FIRST_MODEL=false
else
MODEL_CONVERT+=",{$model}"
fi
fi
done
if [[${MODEL_CONVERT} == ""]]; then
echo "No models to convert."
exit 0;
fi
echo "Generating the following TRT Models: ${YOLO_MODELS}"
# Build trt engine
cd /usr/local/src/tensorrt_demos/yolo
# Download yolo weights
./download_yolo.sh $MODEL_CONVERT
for model in ${MODEL_CONVERT//,/ }
do
python3 yolo_to_onnx.py -m ${model}
python3 onnx_to_tensorrt.py -m ${model}
cp /tmp/tensorrt_demos-conditional_download/yolo/${model}.trt ${OUTPUT_FOLDER}/${model}.trt;
done

View File

@ -0,0 +1 @@
/etc/s6-overlay/s6-rc.d/trt-model-prepare/run

View File

@ -0,0 +1,16 @@
#!/bin/bash
set -euxo pipefail
# Clone tensorrt_demos repo
git clone --depth 1 https://github.com/NateMeyer/tensorrt_demos.git -b conditional_download
# Build libyolo
cd ./tensorrt_demos/plugins && make all
cp libyolo_layer.so /usr/local/lib/libyolo_layer.so
cp libyolo_layer.so ../yolo/libyolo_layer.so
# Store yolo scripts for later conversion
cd ../
mkdir -p /usr/local/src/tensorrt_demos
cp -a yolo /usr/local/src/tensorrt_demos/

View File

@ -1,10 +0,0 @@
#!/bin/bash
set -euxo pipefail
# Clone tensorrt_demos repo
git clone --depth 1 https://github.com/yeahme49/tensorrt_demos.git
# Build libyolo
cd ./tensorrt_demos/plugins && make all
cp libyolo_layer.so /usr/local/lib/libyolo_layer.so

View File

@ -1,32 +0,0 @@
#!/bin/bash
set -euxo pipefail
OUTPUT_FOLDER=/trt-models
echo "Generating the following TRT Models: ${YOLO_MODELS:="yolov4-tiny-288,yolov4-tiny-416,yolov7-tiny-416"}"
# Create output folder
mkdir -p ${OUTPUT_FOLDER}
# Clone tensorrt_demos repo
# git clone --depth 1 https://github.com/NateMeyer/tensorrt_demos.git -b conditional_download /tmp/tensorrt_demos
cd /tmp/ && wget -qO tensorrt_demos.zip https://github.com/NateMeyer/tensorrt_demos/archive/refs/heads/conditional_download.zip
unzip tensorrt_demos.zip
cp /usr/local/lib/libyolo_layer.so /tmp/tensorrt_demos-conditional_download/plugins/libyolo_layer.so
# Download yolo weights
cd /tmp/tensorrt_demos-conditional_download/yolo && ./download_yolo.sh $YOLO_MODELS
# Build trt engine
cd /tmp/tensorrt_demos-conditional_download/yolo
for model in ${YOLO_MODELS//,/ }
do
python3 yolo_to_onnx.py -m ${model}
python3 onnx_to_tensorrt.py -m ${model}
cp /tmp/tensorrt_demos-conditional_download/yolo/${model}.trt ${OUTPUT_FOLDER}/${model}.trt;
done
# Cleanup repo
rm -r /tmp/tensorrt_demos-conditional_download