Merge branch 'blakeblackshear:dev' into cleanupconfig

This commit is contained in:
Jake 2024-12-13 12:26:13 -04:00 committed by GitHub
commit 4e532d194c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 85 additions and 92 deletions

View File

@ -7,7 +7,7 @@ on:
- dev - dev
- master - master
paths-ignore: paths-ignore:
- 'docs/**' - "docs/**"
# only run the latest commit to avoid cache overwrites # only run the latest commit to avoid cache overwrites
concurrency: concurrency:
@ -24,6 +24,8 @@ jobs:
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up QEMU and Buildx - name: Set up QEMU and Buildx
id: setup id: setup
uses: ./.github/actions/setup uses: ./.github/actions/setup
@ -45,6 +47,8 @@ jobs:
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up QEMU and Buildx - name: Set up QEMU and Buildx
id: setup id: setup
uses: ./.github/actions/setup uses: ./.github/actions/setup
@ -86,6 +90,8 @@ jobs:
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up QEMU and Buildx - name: Set up QEMU and Buildx
id: setup id: setup
uses: ./.github/actions/setup uses: ./.github/actions/setup
@ -112,6 +118,8 @@ jobs:
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up QEMU and Buildx - name: Set up QEMU and Buildx
id: setup id: setup
uses: ./.github/actions/setup uses: ./.github/actions/setup
@ -140,6 +148,8 @@ jobs:
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up QEMU and Buildx - name: Set up QEMU and Buildx
id: setup id: setup
uses: ./.github/actions/setup uses: ./.github/actions/setup
@ -165,6 +175,8 @@ jobs:
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up QEMU and Buildx - name: Set up QEMU and Buildx
id: setup id: setup
uses: ./.github/actions/setup uses: ./.github/actions/setup
@ -188,6 +200,8 @@ jobs:
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up QEMU and Buildx - name: Set up QEMU and Buildx
id: setup id: setup
uses: ./.github/actions/setup uses: ./.github/actions/setup

View File

@ -1,24 +0,0 @@
name: dependabot-auto-merge
on: pull_request
permissions:
contents: write
jobs:
dependabot-auto-merge:
runs-on: ubuntu-latest
if: github.actor == 'dependabot[bot]'
steps:
- name: Get Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v2
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Enable auto-merge for Dependabot PRs
if: steps.metadata.outputs.dependency-type == 'direct:development' && (steps.metadata.outputs.update-type == 'version-update:semver-minor' || steps.metadata.outputs.update-type == 'version-update:semver-patch')
run: |
gh pr review --approve "$PR_URL"
gh pr merge --auto --squash "$PR_URL"
env:
PR_URL: ${{ github.event.pull_request.html_url }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@ -3,7 +3,7 @@ name: On pull request
on: on:
pull_request: pull_request:
paths-ignore: paths-ignore:
- 'docs/**' - "docs/**"
env: env:
DEFAULT_PYTHON: 3.9 DEFAULT_PYTHON: 3.9
@ -19,6 +19,8 @@ jobs:
DOCKER_BUILDKIT: "1" DOCKER_BUILDKIT: "1"
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-node@master - uses: actions/setup-node@master
with: with:
node-version: 16.x node-version: 16.x
@ -38,6 +40,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-node@master - uses: actions/setup-node@master
with: with:
node-version: 16.x node-version: 16.x
@ -52,6 +56,8 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-node@master - uses: actions/setup-node@master
with: with:
node-version: 20.x node-version: 20.x
@ -67,6 +73,8 @@ jobs:
steps: steps:
- name: Check out the repository - name: Check out the repository
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.1.0 uses: actions/setup-python@v5.1.0
with: with:
@ -88,6 +96,8 @@ jobs:
steps: steps:
- name: Check out code - name: Check out code
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-node@master - uses: actions/setup-node@master
with: with:
node-version: 16.x node-version: 16.x

View File

@ -11,6 +11,8 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with:
persist-credentials: false
- id: lowercaseRepo - id: lowercaseRepo
uses: ASzc/change-string-case-action@v6 uses: ASzc/change-string-case-action@v6
with: with:
@ -22,10 +24,13 @@ jobs:
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Create tag variables - name: Create tag variables
env:
TAG: ${{ github.ref_name }}
LOWERCASE_REPO: ${{ steps.lowercaseRepo.outputs.lowercase }}
run: | run: |
BUILD_TYPE=$([[ "${{ github.ref_name }}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]] && echo "stable" || echo "beta") BUILD_TYPE=$([[ "${TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]] && echo "stable" || echo "beta")
echo "BUILD_TYPE=${BUILD_TYPE}" >> $GITHUB_ENV echo "BUILD_TYPE=${BUILD_TYPE}" >> $GITHUB_ENV
echo "BASE=ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}" >> $GITHUB_ENV echo "BASE=ghcr.io/${LOWERCASE_REPO}" >> $GITHUB_ENV
echo "BUILD_TAG=${GITHUB_SHA::7}" >> $GITHUB_ENV echo "BUILD_TAG=${GITHUB_SHA::7}" >> $GITHUB_ENV
echo "CLEAN_VERSION=$(echo ${GITHUB_REF##*/} | tr '[:upper:]' '[:lower:]' | sed 's/^[v]//')" >> $GITHUB_ENV echo "CLEAN_VERSION=$(echo ${GITHUB_REF##*/} | tr '[:upper:]' '[:lower:]' | sed 's/^[v]//')" >> $GITHUB_ENV
- name: Tag and push the main image - name: Tag and push the main image

View File

@ -23,7 +23,9 @@ jobs:
exempt-pr-labels: "pinned,security,dependencies" exempt-pr-labels: "pinned,security,dependencies"
operations-per-run: 120 operations-per-run: 120
- name: Print outputs - name: Print outputs
run: echo ${{ join(steps.stale.outputs.*, ',') }} env:
STALE_OUTPUT: ${{ join(steps.stale.outputs.*, ',') }}
run: echo "$STALE_OUTPUT"
# clean_ghcr: # clean_ghcr:
# name: Delete outdated dev container images # name: Delete outdated dev container images
@ -38,4 +40,3 @@ jobs:
# account-type: personal # account-type: personal
# token: ${{ secrets.GITHUB_TOKEN }} # token: ${{ secrets.GITHUB_TOKEN }}
# token-type: github-token # token-type: github-token

View File

@ -5,7 +5,7 @@ title: Using Semantic Search
Semantic Search in Frigate allows you to find tracked objects within your review items using either the image itself, a user-defined text description, or an automatically generated one. This feature works by creating _embeddings_ — numerical vector representations — for both the images and text descriptions of your tracked objects. By comparing these embeddings, Frigate assesses their similarities to deliver relevant search results. Semantic Search in Frigate allows you to find tracked objects within your review items using either the image itself, a user-defined text description, or an automatically generated one. This feature works by creating _embeddings_ — numerical vector representations — for both the images and text descriptions of your tracked objects. By comparing these embeddings, Frigate assesses their similarities to deliver relevant search results.
Frigate has support for [Jina AI's CLIP model](https://huggingface.co/jinaai/jina-clip-v1) to create embeddings, which runs locally. Embeddings are then saved to Frigate's database. Frigate uses [Jina AI's CLIP model](https://huggingface.co/jinaai/jina-clip-v1) to create and save embeddings to Frigate's database. All of this runs locally.
Semantic Search is accessed via the _Explore_ view in the Frigate UI. Semantic Search is accessed via the _Explore_ view in the Frigate UI.
@ -19,7 +19,7 @@ For best performance, 16GB or more of RAM and a dedicated GPU are recommended.
## Configuration ## Configuration
Semantic Search is disabled by default, and must be enabled in your config file before it can be used. Semantic Search is a global configuration setting. Semantic Search is disabled by default, and must be enabled in your config file or in the UI's Settings page before it can be used. Semantic Search is a global configuration setting.
```yaml ```yaml
semantic_search: semantic_search:
@ -29,9 +29,9 @@ semantic_search:
:::tip :::tip
The embeddings database can be re-indexed from the existing tracked objects in your database by adding `reindex: True` to your `semantic_search` configuration. Depending on the number of tracked objects you have, it can take a long while to complete and may max out your CPU while indexing. Make sure to set the config back to `False` before restarting Frigate again. The embeddings database can be re-indexed from the existing tracked objects in your database by adding `reindex: True` to your `semantic_search` configuration or by toggling the switch on the Search Settings page in the UI and restarting Frigate. Depending on the number of tracked objects you have, it can take a long while to complete and may max out your CPU while indexing. Make sure to turn the UI's switch off or set the config back to `False` before restarting Frigate again.
If you are enabling the Search feature for the first time, be advised that Frigate does not automatically index older tracked objects. You will need to enable the `reindex` feature in order to do that. If you are enabling Semantic Search for the first time, be advised that Frigate does not automatically index older tracked objects. You will need to enable the `reindex` feature in order to do that.
::: :::
@ -39,9 +39,9 @@ If you are enabling the Search feature for the first time, be advised that Friga
The vision model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on tracked objects to encode the thumbnail image and store it in the database. When searching for tracked objects via text in the search box, Frigate will perform a `text -> image` similarity search against this embedding. When clicking "Find Similar" in the tracked object detail pane, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails. The vision model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on tracked objects to encode the thumbnail image and store it in the database. When searching for tracked objects via text in the search box, Frigate will perform a `text -> image` similarity search against this embedding. When clicking "Find Similar" in the tracked object detail pane, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails.
The text model is used to embed tracked object descriptions and perform searches against them. Descriptions can be created, viewed, and modified on the Search page when clicking on the gray tracked object chip at the top left of each review item. See [the Generative AI docs](/configuration/genai.md) for more information on how to automatically generate tracked object descriptions. The text model is used to embed tracked object descriptions and perform searches against them. Descriptions can be created, viewed, and modified on the Explore page when clicking on thumbnail of a tracked object. See [the Generative AI docs](/configuration/genai.md) for more information on how to automatically generate tracked object descriptions.
Differently weighted CLIP models are available and can be selected by setting the `model_size` config option as `small` or `large`: Differently weighted versions of the Jina model are available and can be selected by setting the `model_size` config option as `small` or `large`:
```yaml ```yaml
semantic_search: semantic_search:
@ -50,7 +50,7 @@ semantic_search:
``` ```
- Configuring the `large` model employs the full Jina model and will automatically run on the GPU if applicable. - Configuring the `large` model employs the full Jina model and will automatically run on the GPU if applicable.
- Configuring the `small` model employs a quantized version of the model that uses less RAM and runs on CPU with a very negligible difference in embedding quality. - Configuring the `small` model employs a quantized version of the Jina model that uses less RAM and runs on CPU with a very negligible difference in embedding quality.
### GPU Acceleration ### GPU Acceleration
@ -84,7 +84,7 @@ If the correct build is used for your GPU and the `large` model is configured, t
## Usage and Best Practices ## Usage and Best Practices
1. Semantic Search is used in conjunction with the other filters available on the Search page. Use a combination of traditional filtering and Semantic Search for the best results. 1. Semantic Search is used in conjunction with the other filters available on the Explore page. Use a combination of traditional filtering and Semantic Search for the best results.
2. Use the thumbnail search type when searching for particular objects in the scene. Use the description search type when attempting to discern the intent of your object. 2. Use the thumbnail search type when searching for particular objects in the scene. Use the description search type when attempting to discern the intent of your object.
3. Because of how the AI models Frigate uses have been trained, the comparison between text and image embedding distances generally means that with multi-modal (`thumbnail` and `description`) searches, results matching `description` will appear first, even if a `thumbnail` embedding may be a better match. Play with the "Search Type" setting to help find what you are looking for. Note that if you are generating descriptions for specific objects or zones only, this may cause search results to prioritize the objects with descriptions even if the the ones without them are more relevant. 3. Because of how the AI models Frigate uses have been trained, the comparison between text and image embedding distances generally means that with multi-modal (`thumbnail` and `description`) searches, results matching `description` will appear first, even if a `thumbnail` embedding may be a better match. Play with the "Search Type" setting to help find what you are looking for. Note that if you are generating descriptions for specific objects or zones only, this may cause search results to prioritize the objects with descriptions even if the the ones without them are more relevant.
4. Make your search language and tone closely match exactly what you're looking for. If you are using thumbnail search, **phrase your query as an image caption**. Searching for "red car" may not work as well as "red sedan driving down a residential street on a sunny day". 4. Make your search language and tone closely match exactly what you're looking for. If you are using thumbnail search, **phrase your query as an image caption**. Searching for "red car" may not work as well as "red sedan driving down a residential street on a sunny day".

View File

@ -21,13 +21,13 @@ from frigate.api.defs.query.app_query_parameters import AppTimelineHourlyQueryPa
from frigate.api.defs.request.app_body import AppConfigSetBody from frigate.api.defs.request.app_body import AppConfigSetBody
from frigate.api.defs.tags import Tags from frigate.api.defs.tags import Tags
from frigate.config import FrigateConfig from frigate.config import FrigateConfig
from frigate.const import CONFIG_DIR
from frigate.models import Event, Timeline from frigate.models import Event, Timeline
from frigate.util.builtin import ( from frigate.util.builtin import (
clean_camera_user_pass, clean_camera_user_pass,
get_tz_modifiers, get_tz_modifiers,
update_yaml_from_url, update_yaml_from_url,
) )
from frigate.util.config import find_config_file
from frigate.util.services import ( from frigate.util.services import (
ffprobe_stream, ffprobe_stream,
get_nvidia_driver_info, get_nvidia_driver_info,
@ -147,13 +147,7 @@ def config(request: Request):
@router.get("/config/raw") @router.get("/config/raw")
def config_raw(): def config_raw():
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml") config_file = find_config_file()
# Check if we can use .yaml instead of .yml
config_file_yaml = config_file.replace(".yml", ".yaml")
if os.path.isfile(config_file_yaml):
config_file = config_file_yaml
if not os.path.isfile(config_file): if not os.path.isfile(config_file):
return JSONResponse( return JSONResponse(
@ -198,13 +192,7 @@ def config_save(save_option: str, body: Any = Body(media_type="text/plain")):
# Save the config to file # Save the config to file
try: try:
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml") config_file = find_config_file()
# Check if we can use .yaml instead of .yml
config_file_yaml = config_file.replace(".yml", ".yaml")
if os.path.isfile(config_file_yaml):
config_file = config_file_yaml
with open(config_file, "w") as f: with open(config_file, "w") as f:
f.write(new_config) f.write(new_config)
@ -253,13 +241,7 @@ def config_save(save_option: str, body: Any = Body(media_type="text/plain")):
@router.put("/config/set") @router.put("/config/set")
def config_set(request: Request, body: AppConfigSetBody): def config_set(request: Request, body: AppConfigSetBody):
config_file = os.environ.get("CONFIG_FILE", f"{CONFIG_DIR}/config.yml") config_file = find_config_file()
# Check if we can use .yaml instead of .yml
config_file_yaml = config_file.replace(".yml", ".yaml")
if os.path.isfile(config_file_yaml):
config_file = config_file_yaml
with open(config_file, "r") as f: with open(config_file, "r") as f:
old_raw_config = f.read() old_raw_config = f.read()

View File

@ -29,6 +29,7 @@ from frigate.util.builtin import (
) )
from frigate.util.config import ( from frigate.util.config import (
StreamInfoRetriever, StreamInfoRetriever,
find_config_file,
get_relative_coordinates, get_relative_coordinates,
migrate_frigate_config, migrate_frigate_config,
) )
@ -67,7 +68,6 @@ logger = logging.getLogger(__name__)
yaml = YAML() yaml = YAML()
DEFAULT_CONFIG_FILE = "/config/config.yml"
DEFAULT_CONFIG = """ DEFAULT_CONFIG = """
mqtt: mqtt:
enabled: False enabled: False
@ -638,16 +638,13 @@ class FrigateConfig(FrigateBaseModel):
@classmethod @classmethod
def load(cls, **kwargs): def load(cls, **kwargs):
config_path = os.environ.get("CONFIG_FILE", DEFAULT_CONFIG_FILE) config_path = find_config_file()
if not os.path.isfile(config_path):
config_path = config_path.replace("yml", "yaml")
# No configuration file found, create one. # No configuration file found, create one.
new_config = False new_config = False
if not os.path.isfile(config_path): if not os.path.isfile(config_path):
logger.info("No config file found, saving default config") logger.info("No config file found, saving default config")
config_path = DEFAULT_CONFIG_FILE config_path = config_path
new_config = True new_config = True
else: else:
# Check if the config file needs to be migrated. # Check if the config file needs to be migrated.

View File

@ -136,17 +136,17 @@ class Rknn(DetectionApi):
def check_config(self, config): def check_config(self, config):
if (config.model.width != 320) or (config.model.height != 320): if (config.model.width != 320) or (config.model.height != 320):
raise Exception( raise Exception(
"Make sure to set the model width and height to 320 in your config.yml." "Make sure to set the model width and height to 320 in your config."
) )
if config.model.input_pixel_format != "bgr": if config.model.input_pixel_format != "bgr":
raise Exception( raise Exception(
'Make sure to set the model input_pixel_format to "bgr" in your config.yml.' 'Make sure to set the model input_pixel_format to "bgr" in your config.'
) )
if config.model.input_tensor != "nhwc": if config.model.input_tensor != "nhwc":
raise Exception( raise Exception(
'Make sure to set the model input_tensor to "nhwc" in your config.yml.' 'Make sure to set the model input_tensor to "nhwc" in your config.'
) )
def detect_raw(self, tensor_input): def detect_raw(self, tensor_input):

View File

@ -256,8 +256,9 @@ class EventCleanup(threading.Thread):
events_to_update = [] events_to_update = []
for batch in query.iterator(): for event in query.iterator():
events_to_update.extend([event.id for event in batch]) events_to_update.append(event)
if len(events_to_update) >= CHUNK_SIZE: if len(events_to_update) >= CHUNK_SIZE:
logger.debug( logger.debug(
f"Updating {update_params} for {len(events_to_update)} events" f"Updating {update_params} for {len(events_to_update)} events"
@ -330,9 +331,8 @@ class EventCleanup(threading.Thread):
def run(self) -> None: def run(self) -> None:
# only expire events every 5 minutes # only expire events every 5 minutes
while not self.stop_event.wait(1): while not self.stop_event.wait(300):
events_with_expired_clips = self.expire_clips() events_with_expired_clips = self.expire_clips()
return
# delete timeline entries for events that have expired recordings # delete timeline entries for events that have expired recordings
# delete up to 100,000 at a time # delete up to 100,000 at a time

View File

@ -82,18 +82,23 @@ class EventProcessor(threading.Thread):
) )
if source_type == EventTypeEnum.tracked_object: if source_type == EventTypeEnum.tracked_object:
id = event_data["id"]
self.timeline_queue.put( self.timeline_queue.put(
( (
camera, camera,
source_type, source_type,
event_type, event_type,
self.events_in_process.get(event_data["id"]), self.events_in_process.get(id),
event_data, event_data,
) )
) )
if event_type == EventStateEnum.start: # if this is the first message, just store it and continue, its not time to insert it in the db
self.events_in_process[event_data["id"]] = event_data if (
event_type == EventStateEnum.start
or id not in self.events_in_process
):
self.events_in_process[id] = event_data
continue continue
self.handle_object_detection(event_type, camera, event_data) self.handle_object_detection(event_type, camera, event_data)
@ -123,10 +128,6 @@ class EventProcessor(threading.Thread):
"""handle tracked object event updates.""" """handle tracked object event updates."""
updated_db = False updated_db = False
# if this is the first message, just store it and continue, its not time to insert it in the db
if event_type == EventStateEnum.start:
self.events_in_process[event_data["id"]] = event_data
if should_update_db(self.events_in_process[event_data["id"]], event_data): if should_update_db(self.events_in_process[event_data["id"]], event_data):
updated_db = True updated_db = True
camera_config = self.config.cameras[camera] camera_config = self.config.cameras[camera]

View File

@ -2,7 +2,6 @@
import copy import copy
import logging import logging
import os
import queue import queue
import threading import threading
import time import time
@ -29,11 +28,11 @@ from frigate.const import (
AUTOTRACKING_ZOOM_EDGE_THRESHOLD, AUTOTRACKING_ZOOM_EDGE_THRESHOLD,
AUTOTRACKING_ZOOM_IN_HYSTERESIS, AUTOTRACKING_ZOOM_IN_HYSTERESIS,
AUTOTRACKING_ZOOM_OUT_HYSTERESIS, AUTOTRACKING_ZOOM_OUT_HYSTERESIS,
CONFIG_DIR,
) )
from frigate.ptz.onvif import OnvifController from frigate.ptz.onvif import OnvifController
from frigate.track.tracked_object import TrackedObject from frigate.track.tracked_object import TrackedObject
from frigate.util.builtin import update_yaml_file from frigate.util.builtin import update_yaml_file
from frigate.util.config import find_config_file
from frigate.util.image import SharedMemoryFrameManager, intersection_over_union from frigate.util.image import SharedMemoryFrameManager, intersection_over_union
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -328,13 +327,7 @@ class PtzAutoTracker:
self.autotracker_init[camera] = True self.autotracker_init[camera] = True
def _write_config(self, camera): def _write_config(self, camera):
config_file = os.environ.get("CONFIG_FILE", f"{CONFIG_DIR}/config.yml") config_file = find_config_file()
# Check if we can use .yaml instead of .yml
config_file_yaml = config_file.replace(".yml", ".yaml")
if os.path.isfile(config_file_yaml):
config_file = config_file_yaml
logger.debug( logger.debug(
f"{camera}: Writing new config with autotracker motion coefficients: {self.config.cameras[camera].onvif.autotracking.movement_weights}" f"{camera}: Writing new config with autotracker motion coefficients: {self.config.cameras[camera].onvif.autotracking.movement_weights}"

View File

@ -14,6 +14,16 @@ from frigate.util.services import get_video_properties
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
CURRENT_CONFIG_VERSION = "0.15-0" CURRENT_CONFIG_VERSION = "0.15-0"
DEFAULT_CONFIG_FILE = "/config/config.yml"
def find_config_file() -> str:
config_path = os.environ.get("CONFIG_FILE", DEFAULT_CONFIG_FILE)
if not os.path.isfile(config_path):
config_path = config_path.replace("yml", "yaml")
return config_path
def migrate_frigate_config(config_file: str): def migrate_frigate_config(config_file: str):

View File

@ -5,6 +5,7 @@ import { usePersistence } from "./use-persistence";
export function useOverlayState<S>( export function useOverlayState<S>(
key: string, key: string,
defaultValue: S | undefined = undefined, defaultValue: S | undefined = undefined,
preserveSearch: boolean = true,
): [S | undefined, (value: S, replace?: boolean) => void] { ): [S | undefined, (value: S, replace?: boolean) => void] {
const location = useLocation(); const location = useLocation();
const navigate = useNavigate(); const navigate = useNavigate();
@ -15,7 +16,7 @@ export function useOverlayState<S>(
(value: S, replace: boolean = false) => { (value: S, replace: boolean = false) => {
const newLocationState = { ...currentLocationState }; const newLocationState = { ...currentLocationState };
newLocationState[key] = value; newLocationState[key] = value;
navigate(location.pathname + location.search, { navigate(location.pathname + (preserveSearch ? location.search : ""), {
state: newLocationState, state: newLocationState,
replace, replace,
}); });

View File

@ -39,8 +39,11 @@ export default function Events() {
const [showReviewed, setShowReviewed] = usePersistence("showReviewed", false); const [showReviewed, setShowReviewed] = usePersistence("showReviewed", false);
const [recording, setRecording] = const [recording, setRecording] = useOverlayState<RecordingStartingPoint>(
useOverlayState<RecordingStartingPoint>("recording"); "recording",
undefined,
false,
);
useSearchEffect("id", (reviewId: string) => { useSearchEffect("id", (reviewId: string) => {
axios axios