mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-11 05:35:25 +03:00
exclude yolonas from image
This commit is contained in:
parent
cb9c097761
commit
e53644d81a
@ -20,12 +20,6 @@ COPY --from=rootfs / /
|
||||
|
||||
ADD https://github.com/MarcA711/rknn-toolkit2/releases/download/v2.0.0/librknnrt.so /usr/lib/
|
||||
|
||||
ADD https://github.com/MarcA711/rknn-models/releases/download/v2.0.0/deci-fp16-yolonas_s-rk3562-v2.0.0-1.rknn /models/
|
||||
ADD https://github.com/MarcA711/rknn-models/releases/download/v2.0.0/deci-fp16-yolonas_s-rk3566-v2.0.0-1.rknn /models/
|
||||
ADD https://github.com/MarcA711/rknn-models/releases/download/v2.0.0/deci-fp16-yolonas_s-rk3568-v2.0.0-1.rknn /models/
|
||||
ADD https://github.com/MarcA711/rknn-models/releases/download/v2.0.0/deci-fp16-yolonas_s-rk3576-v2.0.0-1.rknn /models/
|
||||
ADD https://github.com/MarcA711/rknn-models/releases/download/v2.0.0/deci-fp16-yolonas_s-rk3588-v2.0.0-1.rknn /models/
|
||||
|
||||
RUN rm -rf /usr/lib/btbn-ffmpeg/bin/ffmpeg
|
||||
RUN rm -rf /usr/lib/btbn-ffmpeg/bin/ffprobe
|
||||
ADD --chmod=111 https://github.com/MarcA711/Rockchip-FFmpeg-Builds/releases/download/6.1-3/ffmpeg /usr/lib/btbn-ffmpeg/bin/
|
||||
|
||||
@ -376,11 +376,16 @@ model: # required
|
||||
input_pixel_format: bgr # required
|
||||
# shape of detection frame
|
||||
input_tensor: nhwc
|
||||
model_type: yolonas # required
|
||||
```
|
||||
|
||||
### Choosing a model
|
||||
|
||||
:::warning
|
||||
|
||||
yolo-nas models use weights from DeciAI. These weights are subject to their license and can't be used commercially. For more information, see: https://docs.deci.ai/super-gradients/latest/LICENSE.YOLONAS.html
|
||||
|
||||
:::
|
||||
|
||||
The inference time was determined on a rk3588 with 3 NPU cores.
|
||||
|
||||
| Model | Size in mb | Inference time in ms |
|
||||
|
||||
@ -7,7 +7,7 @@ from typing import Literal
|
||||
from pydantic import Field
|
||||
|
||||
from frigate.detectors.detection_api import DetectionApi
|
||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||
from frigate.detectors.detector_config import BaseDetectorConfig, ModelTypeEnum
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -15,9 +15,9 @@ DETECTOR_KEY = "rknn"
|
||||
|
||||
supported_socs = ["rk3562", "rk3566", "rk3568", "rk3576", "rk3588"]
|
||||
|
||||
supported_models = ["^deci-fp16-yolonas_[sml]$"]
|
||||
supported_models = {ModelTypeEnum.yolonas: "^deci-fp16-yolonas_[sml]$"}
|
||||
|
||||
default_model = "deci-fp16-yolonas_s"
|
||||
# default_model = "deci-fp16-yolonas_s"
|
||||
|
||||
model_chache_dir = "/config/model_cache/rknn_cache/"
|
||||
|
||||
@ -40,6 +40,16 @@ class Rknn(DetectionApi):
|
||||
|
||||
model_props = self.parse_model_input(config.model.path, soc)
|
||||
|
||||
if model_props["preset"]:
|
||||
config.model.model_type = model_props["model_type"]
|
||||
|
||||
if model_props["model_type"] == ModelTypeEnum.yolonas:
|
||||
logger.info("""
|
||||
You are using yolo-nas with weights from DeciAI.
|
||||
These weights are subject to their license and can't be used commercially.
|
||||
For more information, see: https://docs.deci.ai/super-gradients/latest/LICENSE.YOLONAS.html
|
||||
""")
|
||||
|
||||
from rknnlite.api import RKNNLite
|
||||
|
||||
self.rknn = RKNNLite(verbose=False)
|
||||
@ -91,16 +101,23 @@ class Rknn(DetectionApi):
|
||||
Full name could be: default-fp16-yolonas_s-rk3588-v2.0.0-1.rknn
|
||||
"""
|
||||
|
||||
if any(re.match(pattern, model_path) for pattern in supported_models):
|
||||
model_matched = False
|
||||
|
||||
for model_type, pattern in supported_models.items():
|
||||
if re.match(pattern, model_path):
|
||||
model_matched = True
|
||||
model_props["model_type"] = model_type
|
||||
|
||||
if model_matched:
|
||||
model_props["filename"] = model_path + f"-{soc}-v2.0.0-1.rknn"
|
||||
|
||||
if model_path == default_model:
|
||||
model_props["path"] = "/models/" + model_props["filename"]
|
||||
else:
|
||||
model_props["path"] = model_chache_dir + model_props["filename"]
|
||||
# if model_path == default_model:
|
||||
# model_props["path"] = "/models/" + model_props["filename"]
|
||||
# else:
|
||||
model_props["path"] = model_chache_dir + model_props["filename"]
|
||||
|
||||
if not os.path.isfile(model_props["path"]):
|
||||
self.download_model(model_props["filename"])
|
||||
if not os.path.isfile(model_props["path"]):
|
||||
self.download_model(model_props["filename"])
|
||||
else:
|
||||
supported_models_str = ", ".join(
|
||||
model[1:-1] for model in supported_models
|
||||
|
||||
Loading…
Reference in New Issue
Block a user