mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-12-12 16:16:42 +03:00
Implement shared file locking utility
This commit is contained in:
parent
178cc97f79
commit
f808ffcc95
@ -38,7 +38,7 @@ from frigate.util.classification import (
|
|||||||
collect_object_classification_examples,
|
collect_object_classification_examples,
|
||||||
collect_state_classification_examples,
|
collect_state_classification_examples,
|
||||||
)
|
)
|
||||||
from frigate.util.path import get_event_snapshot
|
from frigate.util.file import get_event_snapshot
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|||||||
@ -58,7 +58,7 @@ from frigate.const import CLIPS_DIR, TRIGGER_DIR
|
|||||||
from frigate.embeddings import EmbeddingsContext
|
from frigate.embeddings import EmbeddingsContext
|
||||||
from frigate.models import Event, ReviewSegment, Timeline, Trigger
|
from frigate.models import Event, ReviewSegment, Timeline, Trigger
|
||||||
from frigate.track.object_processing import TrackedObject
|
from frigate.track.object_processing import TrackedObject
|
||||||
from frigate.util.path import get_event_thumbnail_bytes
|
from frigate.util.file import get_event_thumbnail_bytes
|
||||||
from frigate.util.time import get_dst_transitions, get_tz_modifiers
|
from frigate.util.time import get_dst_transitions, get_tz_modifiers
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|||||||
@ -44,8 +44,8 @@ from frigate.const import (
|
|||||||
)
|
)
|
||||||
from frigate.models import Event, Previews, Recordings, Regions, ReviewSegment
|
from frigate.models import Event, Previews, Recordings, Regions, ReviewSegment
|
||||||
from frigate.track.object_processing import TrackedObjectProcessor
|
from frigate.track.object_processing import TrackedObjectProcessor
|
||||||
|
from frigate.util.file import get_event_thumbnail_bytes
|
||||||
from frigate.util.image import get_image_from_recording
|
from frigate.util.image import get_image_from_recording
|
||||||
from frigate.util.path import get_event_thumbnail_bytes
|
|
||||||
from frigate.util.time import get_dst_transitions
|
from frigate.util.time import get_dst_transitions
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|||||||
@ -20,8 +20,8 @@ from frigate.genai import GenAIClient
|
|||||||
from frigate.models import Event
|
from frigate.models import Event
|
||||||
from frigate.types import TrackedObjectUpdateTypesEnum
|
from frigate.types import TrackedObjectUpdateTypesEnum
|
||||||
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
|
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
|
||||||
|
from frigate.util.file import get_event_thumbnail_bytes
|
||||||
from frigate.util.image import create_thumbnail, ensure_jpeg_bytes
|
from frigate.util.image import create_thumbnail, ensure_jpeg_bytes
|
||||||
from frigate.util.path import get_event_thumbnail_bytes
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from frigate.embeddings import Embeddings
|
from frigate.embeddings import Embeddings
|
||||||
|
|||||||
@ -22,7 +22,7 @@ from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
|||||||
from frigate.embeddings.util import ZScoreNormalization
|
from frigate.embeddings.util import ZScoreNormalization
|
||||||
from frigate.models import Event, Trigger
|
from frigate.models import Event, Trigger
|
||||||
from frigate.util.builtin import cosine_distance
|
from frigate.util.builtin import cosine_distance
|
||||||
from frigate.util.path import get_event_thumbnail_bytes
|
from frigate.util.file import get_event_thumbnail_bytes
|
||||||
|
|
||||||
from ..post.api import PostProcessorApi
|
from ..post.api import PostProcessorApi
|
||||||
from ..types import DataProcessorMetrics
|
from ..types import DataProcessorMetrics
|
||||||
|
|||||||
@ -17,6 +17,7 @@ from frigate.detectors.detector_config import (
|
|||||||
BaseDetectorConfig,
|
BaseDetectorConfig,
|
||||||
ModelTypeEnum,
|
ModelTypeEnum,
|
||||||
)
|
)
|
||||||
|
from frigate.util.file_lock import FileLock
|
||||||
from frigate.util.model import post_process_yolo
|
from frigate.util.model import post_process_yolo
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -177,29 +178,6 @@ class MemryXDetector(DetectionApi):
|
|||||||
logger.error(f"Failed to initialize MemryX model: {e}")
|
logger.error(f"Failed to initialize MemryX model: {e}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _acquire_file_lock(self, lock_path: str, timeout: int = 60, poll: float = 0.2):
|
|
||||||
"""
|
|
||||||
Create an exclusive lock file. Blocks (with polling) until it can acquire,
|
|
||||||
or raises TimeoutError. Uses only stdlib (os.O_EXCL).
|
|
||||||
"""
|
|
||||||
start = time.time()
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
fd = os.open(lock_path, os.O_CREAT | os.O_EXCL | os.O_RDWR)
|
|
||||||
os.close(fd)
|
|
||||||
return
|
|
||||||
except FileExistsError:
|
|
||||||
if time.time() - start > timeout:
|
|
||||||
raise TimeoutError(f"Timeout waiting for lock: {lock_path}")
|
|
||||||
time.sleep(poll)
|
|
||||||
|
|
||||||
def _release_file_lock(self, lock_path: str):
|
|
||||||
"""Best-effort removal of the lock file."""
|
|
||||||
try:
|
|
||||||
os.remove(lock_path)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def load_yolo_constants(self):
|
def load_yolo_constants(self):
|
||||||
base = f"{self.cache_dir}/{self.model_folder}"
|
base = f"{self.cache_dir}/{self.model_folder}"
|
||||||
# constants for yolov9 post-processing
|
# constants for yolov9 post-processing
|
||||||
@ -212,9 +190,9 @@ class MemryXDetector(DetectionApi):
|
|||||||
os.makedirs(self.cache_dir, exist_ok=True)
|
os.makedirs(self.cache_dir, exist_ok=True)
|
||||||
|
|
||||||
lock_path = os.path.join(self.cache_dir, f".{self.model_folder}.lock")
|
lock_path = os.path.join(self.cache_dir, f".{self.model_folder}.lock")
|
||||||
self._acquire_file_lock(lock_path)
|
lock = FileLock(lock_path, timeout=60)
|
||||||
|
|
||||||
try:
|
with lock:
|
||||||
# ---------- CASE 1: user provided a custom model path ----------
|
# ---------- CASE 1: user provided a custom model path ----------
|
||||||
if self.memx_model_path:
|
if self.memx_model_path:
|
||||||
if not self.memx_model_path.endswith(".zip"):
|
if not self.memx_model_path.endswith(".zip"):
|
||||||
@ -338,9 +316,6 @@ class MemryXDetector(DetectionApi):
|
|||||||
f"Failed to remove downloaded zip {zip_path}: {e}"
|
f"Failed to remove downloaded zip {zip_path}: {e}"
|
||||||
)
|
)
|
||||||
|
|
||||||
finally:
|
|
||||||
self._release_file_lock(lock_path)
|
|
||||||
|
|
||||||
def send_input(self, connection_id, tensor_input: np.ndarray):
|
def send_input(self, connection_id, tensor_input: np.ndarray):
|
||||||
"""Pre-process (if needed) and send frame to MemryX input queue"""
|
"""Pre-process (if needed) and send frame to MemryX input queue"""
|
||||||
if tensor_input is None:
|
if tensor_input is None:
|
||||||
|
|||||||
@ -29,7 +29,7 @@ from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
|||||||
from frigate.models import Event, Trigger
|
from frigate.models import Event, Trigger
|
||||||
from frigate.types import ModelStatusTypesEnum
|
from frigate.types import ModelStatusTypesEnum
|
||||||
from frigate.util.builtin import EventsPerSecond, InferenceSpeed, serialize
|
from frigate.util.builtin import EventsPerSecond, InferenceSpeed, serialize
|
||||||
from frigate.util.path import get_event_thumbnail_bytes
|
from frigate.util.file import get_event_thumbnail_bytes
|
||||||
|
|
||||||
from .onnx.jina_v1_embedding import JinaV1ImageEmbedding, JinaV1TextEmbedding
|
from .onnx.jina_v1_embedding import JinaV1ImageEmbedding, JinaV1TextEmbedding
|
||||||
from .onnx.jina_v2_embedding import JinaV2Embedding
|
from .onnx.jina_v2_embedding import JinaV2Embedding
|
||||||
|
|||||||
@ -62,8 +62,8 @@ from frigate.events.types import EventTypeEnum, RegenerateDescriptionEnum
|
|||||||
from frigate.genai import get_genai_client
|
from frigate.genai import get_genai_client
|
||||||
from frigate.models import Event, Recordings, ReviewSegment, Trigger
|
from frigate.models import Event, Recordings, ReviewSegment, Trigger
|
||||||
from frigate.util.builtin import serialize
|
from frigate.util.builtin import serialize
|
||||||
|
from frigate.util.file import get_event_thumbnail_bytes
|
||||||
from frigate.util.image import SharedMemoryFrameManager
|
from frigate.util.image import SharedMemoryFrameManager
|
||||||
from frigate.util.path import get_event_thumbnail_bytes
|
|
||||||
|
|
||||||
from .embeddings import Embeddings
|
from .embeddings import Embeddings
|
||||||
|
|
||||||
|
|||||||
@ -12,7 +12,7 @@ from frigate.config import FrigateConfig
|
|||||||
from frigate.const import CLIPS_DIR
|
from frigate.const import CLIPS_DIR
|
||||||
from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
||||||
from frigate.models import Event, Timeline
|
from frigate.models import Event, Timeline
|
||||||
from frigate.util.path import delete_event_snapshot, delete_event_thumbnail
|
from frigate.util.file import delete_event_snapshot, delete_event_thumbnail
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|||||||
@ -20,8 +20,8 @@ from frigate.const import (
|
|||||||
from frigate.log import redirect_output_to_logger
|
from frigate.log import redirect_output_to_logger
|
||||||
from frigate.models import Event, Recordings, ReviewSegment
|
from frigate.models import Event, Recordings, ReviewSegment
|
||||||
from frigate.types import ModelStatusTypesEnum
|
from frigate.types import ModelStatusTypesEnum
|
||||||
|
from frigate.util.file import get_event_thumbnail_bytes
|
||||||
from frigate.util.image import get_image_from_recording
|
from frigate.util.image import get_image_from_recording
|
||||||
from frigate.util.path import get_event_thumbnail_bytes
|
|
||||||
from frigate.util.process import FrigateProcess
|
from frigate.util.process import FrigateProcess
|
||||||
|
|
||||||
BATCH_SIZE = 16
|
BATCH_SIZE = 16
|
||||||
|
|||||||
@ -1,7 +1,6 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import threading
|
import threading
|
||||||
import time
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Callable, List
|
from typing import Callable, List
|
||||||
|
|
||||||
@ -10,40 +9,11 @@ import requests
|
|||||||
from frigate.comms.inter_process import InterProcessRequestor
|
from frigate.comms.inter_process import InterProcessRequestor
|
||||||
from frigate.const import UPDATE_MODEL_STATE
|
from frigate.const import UPDATE_MODEL_STATE
|
||||||
from frigate.types import ModelStatusTypesEnum
|
from frigate.types import ModelStatusTypesEnum
|
||||||
|
from frigate.util.file import FileLock
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class FileLock:
|
|
||||||
def __init__(self, path):
|
|
||||||
self.path = path
|
|
||||||
self.lock_file = f"{path}.lock"
|
|
||||||
|
|
||||||
# we have not acquired the lock yet so it should not exist
|
|
||||||
if os.path.exists(self.lock_file):
|
|
||||||
try:
|
|
||||||
os.remove(self.lock_file)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def acquire(self):
|
|
||||||
parent_dir = os.path.dirname(self.lock_file)
|
|
||||||
os.makedirs(parent_dir, exist_ok=True)
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
with open(self.lock_file, "x"):
|
|
||||||
return
|
|
||||||
except FileExistsError:
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
def release(self):
|
|
||||||
try:
|
|
||||||
os.remove(self.lock_file)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ModelDownloader:
|
class ModelDownloader:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -81,15 +51,13 @@ class ModelDownloader:
|
|||||||
def _download_models(self):
|
def _download_models(self):
|
||||||
for file_name in self.file_names:
|
for file_name in self.file_names:
|
||||||
path = os.path.join(self.download_path, file_name)
|
path = os.path.join(self.download_path, file_name)
|
||||||
lock = FileLock(path)
|
lock_path = f"{path}.lock"
|
||||||
|
lock = FileLock(lock_path, cleanup_stale_on_init=True)
|
||||||
|
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
lock.acquire()
|
with lock:
|
||||||
try:
|
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
self.download_func(path)
|
self.download_func(path)
|
||||||
finally:
|
|
||||||
lock.release()
|
|
||||||
|
|
||||||
self.requestor.send_data(
|
self.requestor.send_data(
|
||||||
UPDATE_MODEL_STATE,
|
UPDATE_MODEL_STATE,
|
||||||
|
|||||||
276
frigate/util/file.py
Normal file
276
frigate/util/file.py
Normal file
@ -0,0 +1,276 @@
|
|||||||
|
"""Path and file utilities."""
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import fcntl
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import cv2
|
||||||
|
from numpy import ndarray
|
||||||
|
|
||||||
|
from frigate.const import CLIPS_DIR, THUMB_DIR
|
||||||
|
from frigate.models import Event
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_thumbnail_bytes(event: Event) -> bytes | None:
|
||||||
|
if event.thumbnail:
|
||||||
|
return base64.b64decode(event.thumbnail)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
with open(
|
||||||
|
os.path.join(THUMB_DIR, event.camera, f"{event.id}.webp"), "rb"
|
||||||
|
) as f:
|
||||||
|
return f.read()
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_snapshot(event: Event) -> ndarray:
|
||||||
|
media_name = f"{event.camera}-{event.id}"
|
||||||
|
return cv2.imread(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
||||||
|
|
||||||
|
|
||||||
|
### Deletion
|
||||||
|
|
||||||
|
|
||||||
|
def delete_event_images(event: Event) -> bool:
|
||||||
|
return delete_event_snapshot(event) and delete_event_thumbnail(event)
|
||||||
|
|
||||||
|
|
||||||
|
def delete_event_snapshot(event: Event) -> bool:
|
||||||
|
media_name = f"{event.camera}-{event.id}"
|
||||||
|
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
||||||
|
|
||||||
|
try:
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.webp")
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
# also delete clean.png (legacy) for backward compatibility
|
||||||
|
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
||||||
|
media_path.unlink(missing_ok=True)
|
||||||
|
return True
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def delete_event_thumbnail(event: Event) -> bool:
|
||||||
|
if event.thumbnail:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
Path(os.path.join(THUMB_DIR, event.camera, f"{event.id}.webp")).unlink(
|
||||||
|
missing_ok=True
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
### File Locking
|
||||||
|
|
||||||
|
|
||||||
|
class FileLock:
|
||||||
|
"""
|
||||||
|
A file-based lock for coordinating access to resources across processes.
|
||||||
|
|
||||||
|
Uses fcntl.flock() for proper POSIX file locking on Linux. Supports timeouts,
|
||||||
|
stale lock detection, and can be used as a context manager.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```python
|
||||||
|
# Using as a context manager (recommended)
|
||||||
|
with FileLock("/path/to/resource.lock", timeout=60):
|
||||||
|
# Critical section
|
||||||
|
do_something()
|
||||||
|
|
||||||
|
# Manual acquisition and release
|
||||||
|
lock = FileLock("/path/to/resource.lock")
|
||||||
|
if lock.acquire(timeout=60):
|
||||||
|
try:
|
||||||
|
do_something()
|
||||||
|
finally:
|
||||||
|
lock.release()
|
||||||
|
```
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
lock_path: Path to the lock file
|
||||||
|
timeout: Maximum time to wait for lock acquisition (seconds)
|
||||||
|
poll_interval: Time to wait between lock acquisition attempts (seconds)
|
||||||
|
stale_timeout: Time after which a lock is considered stale (seconds)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
lock_path: str | Path,
|
||||||
|
timeout: int = 300,
|
||||||
|
poll_interval: float = 1.0,
|
||||||
|
stale_timeout: int = 600,
|
||||||
|
cleanup_stale_on_init: bool = False,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize a FileLock.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
lock_path: Path to the lock file
|
||||||
|
timeout: Maximum time to wait for lock acquisition in seconds (default: 300)
|
||||||
|
poll_interval: Time to wait between lock attempts in seconds (default: 1.0)
|
||||||
|
stale_timeout: Time after which a lock is considered stale in seconds (default: 600)
|
||||||
|
cleanup_stale_on_init: Whether to clean up stale locks on initialization (default: False)
|
||||||
|
"""
|
||||||
|
self.lock_path = Path(lock_path)
|
||||||
|
self.timeout = timeout
|
||||||
|
self.poll_interval = poll_interval
|
||||||
|
self.stale_timeout = stale_timeout
|
||||||
|
self._fd: Optional[int] = None
|
||||||
|
self._acquired = False
|
||||||
|
|
||||||
|
if cleanup_stale_on_init:
|
||||||
|
self._cleanup_stale_lock()
|
||||||
|
|
||||||
|
def _cleanup_stale_lock(self) -> bool:
|
||||||
|
"""
|
||||||
|
Clean up a stale lock file if it exists and is old.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if lock was cleaned up, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if self.lock_path.exists():
|
||||||
|
# Check if lock file is older than stale_timeout
|
||||||
|
lock_age = time.time() - self.lock_path.stat().st_mtime
|
||||||
|
if lock_age > self.stale_timeout:
|
||||||
|
logger.warning(
|
||||||
|
f"Removing stale lock file: {self.lock_path} (age: {lock_age:.1f}s)"
|
||||||
|
)
|
||||||
|
self.lock_path.unlink()
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error cleaning up stale lock: {e}")
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_stale(self) -> bool:
|
||||||
|
"""
|
||||||
|
Check if the lock file is stale (older than stale_timeout).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if lock is stale, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if self.lock_path.exists():
|
||||||
|
lock_age = time.time() - self.lock_path.stat().st_mtime
|
||||||
|
return lock_age > self.stale_timeout
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def acquire(self, timeout: Optional[int] = None) -> bool:
|
||||||
|
"""
|
||||||
|
Acquire the file lock using fcntl.flock().
|
||||||
|
|
||||||
|
Args:
|
||||||
|
timeout: Maximum time to wait for lock in seconds (uses instance timeout if None)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if lock acquired, False if timeout or error
|
||||||
|
"""
|
||||||
|
if self._acquired:
|
||||||
|
logger.warning(f"Lock already acquired: {self.lock_path}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
if timeout is None:
|
||||||
|
timeout = self.timeout
|
||||||
|
|
||||||
|
# Ensure parent directory exists
|
||||||
|
self.lock_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Clean up stale lock before attempting to acquire
|
||||||
|
self._cleanup_stale_lock()
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._fd = os.open(self.lock_path, os.O_CREAT | os.O_RDWR)
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
while time.time() - start_time < timeout:
|
||||||
|
try:
|
||||||
|
fcntl.flock(self._fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||||
|
self._acquired = True
|
||||||
|
logger.debug(f"Acquired lock: {self.lock_path}")
|
||||||
|
return True
|
||||||
|
except (OSError, IOError):
|
||||||
|
# Lock is held by another process
|
||||||
|
if time.time() - start_time >= timeout:
|
||||||
|
logger.warning(f"Timeout waiting for lock: {self.lock_path}")
|
||||||
|
os.close(self._fd)
|
||||||
|
self._fd = None
|
||||||
|
return False
|
||||||
|
|
||||||
|
time.sleep(self.poll_interval)
|
||||||
|
|
||||||
|
# Timeout reached
|
||||||
|
if self._fd is not None:
|
||||||
|
os.close(self._fd)
|
||||||
|
self._fd = None
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error acquiring lock: {e}")
|
||||||
|
if self._fd is not None:
|
||||||
|
try:
|
||||||
|
os.close(self._fd)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
self._fd = None
|
||||||
|
return False
|
||||||
|
|
||||||
|
def release(self) -> None:
|
||||||
|
"""
|
||||||
|
Release the file lock.
|
||||||
|
|
||||||
|
This closes the file descriptor and removes the lock file.
|
||||||
|
"""
|
||||||
|
if not self._acquired:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Close file descriptor and release fcntl lock
|
||||||
|
if self._fd is not None:
|
||||||
|
try:
|
||||||
|
fcntl.flock(self._fd, fcntl.LOCK_UN)
|
||||||
|
os.close(self._fd)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Error closing lock file descriptor: {e}")
|
||||||
|
finally:
|
||||||
|
self._fd = None
|
||||||
|
|
||||||
|
# Remove lock file
|
||||||
|
if self.lock_path.exists():
|
||||||
|
self.lock_path.unlink()
|
||||||
|
logger.debug(f"Released lock: {self.lock_path}")
|
||||||
|
|
||||||
|
except FileNotFoundError:
|
||||||
|
# Lock file already removed, that's fine
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error releasing lock: {e}")
|
||||||
|
finally:
|
||||||
|
self._acquired = False
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
"""Context manager entry - acquire the lock."""
|
||||||
|
if not self.acquire():
|
||||||
|
raise TimeoutError(f"Failed to acquire lock: {self.lock_path}")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"""Context manager exit - release the lock."""
|
||||||
|
self.release()
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
"""Destructor - ensure lock is released."""
|
||||||
|
if self._acquired:
|
||||||
|
self.release()
|
||||||
@ -1,62 +0,0 @@
|
|||||||
"""Path utilities."""
|
|
||||||
|
|
||||||
import base64
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import cv2
|
|
||||||
from numpy import ndarray
|
|
||||||
|
|
||||||
from frigate.const import CLIPS_DIR, THUMB_DIR
|
|
||||||
from frigate.models import Event
|
|
||||||
|
|
||||||
|
|
||||||
def get_event_thumbnail_bytes(event: Event) -> bytes | None:
|
|
||||||
if event.thumbnail:
|
|
||||||
return base64.b64decode(event.thumbnail)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
with open(
|
|
||||||
os.path.join(THUMB_DIR, event.camera, f"{event.id}.webp"), "rb"
|
|
||||||
) as f:
|
|
||||||
return f.read()
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_event_snapshot(event: Event) -> ndarray:
|
|
||||||
media_name = f"{event.camera}-{event.id}"
|
|
||||||
return cv2.imread(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
|
||||||
|
|
||||||
|
|
||||||
### Deletion
|
|
||||||
|
|
||||||
|
|
||||||
def delete_event_images(event: Event) -> bool:
|
|
||||||
return delete_event_snapshot(event) and delete_event_thumbnail(event)
|
|
||||||
|
|
||||||
|
|
||||||
def delete_event_snapshot(event: Event) -> bool:
|
|
||||||
media_name = f"{event.camera}-{event.id}"
|
|
||||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
|
||||||
|
|
||||||
try:
|
|
||||||
media_path.unlink(missing_ok=True)
|
|
||||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.webp")
|
|
||||||
media_path.unlink(missing_ok=True)
|
|
||||||
# also delete clean.png (legacy) for backward compatibility
|
|
||||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
|
||||||
media_path.unlink(missing_ok=True)
|
|
||||||
return True
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def delete_event_thumbnail(event: Event) -> bool:
|
|
||||||
if event.thumbnail:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
Path(os.path.join(THUMB_DIR, event.camera, f"{event.id}.webp")).unlink(
|
|
||||||
missing_ok=True
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
@ -1,6 +1,5 @@
|
|||||||
"""RKNN model conversion utility for Frigate."""
|
"""RKNN model conversion utility for Frigate."""
|
||||||
|
|
||||||
import fcntl
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
@ -9,6 +8,8 @@ import time
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from frigate.util.file import FileLock
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
MODEL_TYPE_CONFIGS = {
|
MODEL_TYPE_CONFIGS = {
|
||||||
@ -245,112 +246,6 @@ def convert_onnx_to_rknn(
|
|||||||
logger.warning(f"Failed to remove temporary ONNX file: {e}")
|
logger.warning(f"Failed to remove temporary ONNX file: {e}")
|
||||||
|
|
||||||
|
|
||||||
def cleanup_stale_lock(lock_file_path: Path) -> bool:
|
|
||||||
"""
|
|
||||||
Clean up a stale lock file if it exists and is old.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
lock_file_path: Path to the lock file
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if lock was cleaned up, False otherwise
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if lock_file_path.exists():
|
|
||||||
# Check if lock file is older than 10 minutes (stale)
|
|
||||||
lock_age = time.time() - lock_file_path.stat().st_mtime
|
|
||||||
if lock_age > 600: # 10 minutes
|
|
||||||
logger.warning(
|
|
||||||
f"Removing stale lock file: {lock_file_path} (age: {lock_age:.1f}s)"
|
|
||||||
)
|
|
||||||
lock_file_path.unlink()
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error cleaning up stale lock: {e}")
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def acquire_conversion_lock(lock_file_path: Path, timeout: int = 300) -> bool:
|
|
||||||
"""
|
|
||||||
Acquire a file-based lock for model conversion.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
lock_file_path: Path to the lock file
|
|
||||||
timeout: Maximum time to wait for lock in seconds
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if lock acquired, False if timeout or error
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
lock_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
cleanup_stale_lock(lock_file_path)
|
|
||||||
lock_fd = os.open(lock_file_path, os.O_CREAT | os.O_RDWR)
|
|
||||||
|
|
||||||
# Try to acquire exclusive lock
|
|
||||||
start_time = time.time()
|
|
||||||
while time.time() - start_time < timeout:
|
|
||||||
try:
|
|
||||||
fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
|
||||||
# Lock acquired successfully
|
|
||||||
logger.debug(f"Acquired conversion lock: {lock_file_path}")
|
|
||||||
return True
|
|
||||||
except (OSError, IOError):
|
|
||||||
# Lock is held by another process, wait and retry
|
|
||||||
if time.time() - start_time >= timeout:
|
|
||||||
logger.warning(
|
|
||||||
f"Timeout waiting for conversion lock: {lock_file_path}"
|
|
||||||
)
|
|
||||||
os.close(lock_fd)
|
|
||||||
return False
|
|
||||||
|
|
||||||
logger.debug("Waiting for conversion lock to be released...")
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
os.close(lock_fd)
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error acquiring conversion lock: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def release_conversion_lock(lock_file_path: Path) -> None:
|
|
||||||
"""
|
|
||||||
Release the conversion lock.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
lock_file_path: Path to the lock file
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if lock_file_path.exists():
|
|
||||||
lock_file_path.unlink()
|
|
||||||
logger.debug(f"Released conversion lock: {lock_file_path}")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error releasing conversion lock: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
def is_lock_stale(lock_file_path: Path, max_age: int = 600) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a lock file is stale (older than max_age seconds).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
lock_file_path: Path to the lock file
|
|
||||||
max_age: Maximum age in seconds before considering lock stale
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if lock is stale, False otherwise
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if lock_file_path.exists():
|
|
||||||
lock_age = time.time() - lock_file_path.stat().st_mtime
|
|
||||||
return lock_age > max_age
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def wait_for_conversion_completion(
|
def wait_for_conversion_completion(
|
||||||
model_type: str, rknn_path: Path, lock_file_path: Path, timeout: int = 300
|
model_type: str, rknn_path: Path, lock_file_path: Path, timeout: int = 300
|
||||||
) -> bool:
|
) -> bool:
|
||||||
@ -358,6 +253,7 @@ def wait_for_conversion_completion(
|
|||||||
Wait for another process to complete the conversion.
|
Wait for another process to complete the conversion.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
model_type: Type of model being converted
|
||||||
rknn_path: Path to the expected RKNN model
|
rknn_path: Path to the expected RKNN model
|
||||||
lock_file_path: Path to the lock file to monitor
|
lock_file_path: Path to the lock file to monitor
|
||||||
timeout: Maximum time to wait in seconds
|
timeout: Maximum time to wait in seconds
|
||||||
@ -366,6 +262,8 @@ def wait_for_conversion_completion(
|
|||||||
True if RKNN model appears, False if timeout
|
True if RKNN model appears, False if timeout
|
||||||
"""
|
"""
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
lock = FileLock(lock_file_path, stale_timeout=600)
|
||||||
|
|
||||||
while time.time() - start_time < timeout:
|
while time.time() - start_time < timeout:
|
||||||
# Check if RKNN model appeared
|
# Check if RKNN model appeared
|
||||||
if rknn_path.exists():
|
if rknn_path.exists():
|
||||||
@ -385,11 +283,14 @@ def wait_for_conversion_completion(
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
# Check if lock is stale
|
# Check if lock is stale
|
||||||
if is_lock_stale(lock_file_path):
|
if lock.is_stale():
|
||||||
logger.warning("Lock file is stale, attempting to clean up and retry...")
|
logger.warning("Lock file is stale, attempting to clean up and retry...")
|
||||||
cleanup_stale_lock(lock_file_path)
|
lock._cleanup_stale_lock()
|
||||||
# Try to acquire lock again
|
# Try to acquire lock again
|
||||||
if acquire_conversion_lock(lock_file_path, timeout=60):
|
retry_lock = FileLock(
|
||||||
|
lock_file_path, timeout=60, cleanup_stale_on_init=True
|
||||||
|
)
|
||||||
|
if retry_lock.acquire():
|
||||||
try:
|
try:
|
||||||
# Check if RKNN file appeared while waiting
|
# Check if RKNN file appeared while waiting
|
||||||
if rknn_path.exists():
|
if rknn_path.exists():
|
||||||
@ -415,7 +316,7 @@ def wait_for_conversion_completion(
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
release_conversion_lock(lock_file_path)
|
retry_lock.release()
|
||||||
|
|
||||||
logger.debug("Waiting for RKNN model to appear...")
|
logger.debug("Waiting for RKNN model to appear...")
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@ -452,8 +353,9 @@ def auto_convert_model(
|
|||||||
return str(rknn_path)
|
return str(rknn_path)
|
||||||
|
|
||||||
lock_file_path = base_path.parent / f"{base_name}.conversion.lock"
|
lock_file_path = base_path.parent / f"{base_name}.conversion.lock"
|
||||||
|
lock = FileLock(lock_file_path, timeout=300, cleanup_stale_on_init=True)
|
||||||
|
|
||||||
if acquire_conversion_lock(lock_file_path):
|
if lock.acquire():
|
||||||
try:
|
try:
|
||||||
if rknn_path.exists():
|
if rknn_path.exists():
|
||||||
logger.info(
|
logger.info(
|
||||||
@ -476,7 +378,7 @@ def auto_convert_model(
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
release_conversion_lock(lock_file_path)
|
lock.release()
|
||||||
else:
|
else:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Another process is converting {model_path}, waiting for completion..."
|
f"Another process is converting {model_path}, waiting for completion..."
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user