Merge pull request #45 from ibs0d/claude/analyze-video-storage-OeT89

Claude/analyze video storage oe t89
This commit is contained in:
ibs0d 2026-03-13 20:41:56 +11:00 committed by GitHub
commit 682d816f19
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 72 additions and 11 deletions

View File

@ -89,6 +89,11 @@ python3 /usr/local/nginx/get_nginx_settings.py | \
tempio -template /usr/local/nginx/templates/listen.gotmpl \
-out /usr/local/nginx/conf/listen.conf
# build location blocks for recording roots outside /media/frigate
python3 /usr/local/nginx/get_nginx_settings.py | \
tempio -template /usr/local/nginx/templates/extra_recordings.gotmpl \
-out /usr/local/nginx/conf/extra_recordings.conf
# Replace the bash process with the NGINX process, redirecting stderr to stdout
exec 2>&1
exec \

View File

@ -101,6 +101,7 @@ http {
include auth_location.conf;
include base_path.conf;
include extra_recordings.conf;
location /vod/ {
include auth_request.conf;

View File

@ -52,11 +52,22 @@ listen_config["external_port"] = external_port
base_path = os.environ.get("FRIGATE_BASE_PATH", "")
# Collect recording roots that are outside the default /media/frigate tree.
# Nginx needs an explicit location block for each such root to serve preview files.
_default_recordings = "/media/frigate"
_extra_roots: set[str] = set()
for _cam_cfg in config.get("cameras", {}).values():
if isinstance(_cam_cfg, dict):
_path = _cam_cfg.get("path", "")
if _path and not _path.startswith(_default_recordings):
_extra_roots.add(_path.rstrip("/"))
result: dict[str, Any] = {
"tls": tls_config,
"ipv6": ipv6_config,
"listen": listen_config,
"base_path": base_path,
"extra_recording_roots": sorted(_extra_roots),
}
print(json.dumps(result))

View File

@ -0,0 +1,12 @@
{{ range .extra_recording_roots }}
location {{ . }}/preview/ {
include auth_request.conf;
types {
video/mp4 mp4;
}
expires 7d;
add_header Cache-Control "public";
alias {{ . }}/preview/;
}
{{ end }}

View File

@ -201,6 +201,7 @@ class FFMpegConverter(threading.Thread):
)
else:
logger.error(f"Error saving preview for {self.config.name} :: {p.stderr}")
Path(self.path).unlink(missing_ok=True)
# unlink files from cache
# don't delete last frame as it will be used as first frame in next segment
@ -345,7 +346,7 @@ class PreviewRecorder:
return False
def write_frame_to_cache(self, frame_time: float, frame: np.ndarray) -> None:
def write_frame_to_cache(self, frame_time: float, frame: np.ndarray) -> bool:
# resize yuv frame
small_frame = np.zeros((self.out_height * 3 // 2, self.out_width), np.uint8)
copy_yuv_to_position(
@ -360,7 +361,7 @@ class PreviewRecorder:
small_frame,
cv2.COLOR_YUV2BGR_I420,
)
cv2.imwrite(
result = cv2.imwrite(
get_cache_image_name(self.config.name, frame_time),
small_frame,
[
@ -368,6 +369,11 @@ class PreviewRecorder:
PREVIEW_QUALITY_WEBP[self.config.record.preview.quality],
],
)
if not result:
logger.warning(
f"Failed to write preview frame for {self.config.name} at {frame_time}, likely no space in cache"
)
return result
def write_data(
self,
@ -381,8 +387,8 @@ class PreviewRecorder:
# always write the first frame
if self.start_time == 0:
self.start_time = frame_time
self.output_frames.append(frame_time)
self.write_frame_to_cache(frame_time, frame)
if self.write_frame_to_cache(frame_time, frame):
self.output_frames.append(frame_time)
return
# check if PREVIEW clip should be generated and cached frames reset
@ -390,8 +396,8 @@ class PreviewRecorder:
if len(self.output_frames) > 0:
# save last frame to ensure consistent duration
if self.config.record:
self.output_frames.append(frame_time)
self.write_frame_to_cache(frame_time, frame)
if self.write_frame_to_cache(frame_time, frame):
self.output_frames.append(frame_time)
# write the preview if any frames exist for this hour
FFMpegConverter(
@ -409,13 +415,13 @@ class PreviewRecorder:
# include first frame to ensure consistent duration
if self.config.record.enabled:
self.output_frames.append(frame_time)
self.write_frame_to_cache(frame_time, frame)
if self.write_frame_to_cache(frame_time, frame):
self.output_frames.append(frame_time)
return
elif self.should_write_frame(current_tracked_objects, motion_boxes, frame_time):
self.output_frames.append(frame_time)
self.write_frame_to_cache(frame_time, frame)
if self.write_frame_to_cache(frame_time, frame):
self.output_frames.append(frame_time)
return
def flag_offline(self, frame_time: float) -> None:

View File

@ -10,7 +10,7 @@ from peewee import SQL, fn
from frigate.config import FrigateConfig
from frigate.const import RECORD_DIR, REPLAY_CAMERA_PREFIX
from frigate.models import Event, Recordings
from frigate.models import Event, Previews, Recordings
from frigate.util.builtin import clear_and_unlink
logger = logging.getLogger(__name__)
@ -390,6 +390,32 @@ class StorageMaintainer(threading.Thread):
f"Updated has_clip to False for {len(events_to_update)} events"
)
# Also delete preview files that overlap with deleted recordings so they
# don't continue to consume space on the same disk after the recordings
# are gone (especially important for multi-path setups where preview and
# recordings share the same disk).
if deleted_recordings:
deleted_previews = []
for camera, time_range in camera_recordings.items():
overlapping_previews = (
Previews.select(Previews.id, Previews.path)
.where(
Previews.camera == camera,
Previews.start_time < time_range["max_end"],
Previews.end_time > time_range["min_start"],
)
.namedtuples()
)
for preview in overlapping_previews:
clear_and_unlink(Path(preview.path), missing_ok=True)
deleted_previews.append(preview.id)
logger.debug(f"Expiring {len(deleted_previews)} previews")
for i in range(0, len(deleted_previews), max_deletes):
Previews.delete().where(
Previews.id << deleted_previews[i : i + max_deletes]
).execute()
deleted_recordings_list = [r.id for r in deleted_recordings]
for i in range(0, len(deleted_recordings_list), max_deletes):
Recordings.delete().where(