Add option to archive expired records to S3 instead of deleting them

This commit is contained in:
Sergey Krashevich 2023-05-23 22:34:42 +03:00
parent f60af0f212
commit 07cdec0718
No known key found for this signature in database
GPG Key ID: 625171324E7D3856
3 changed files with 19 additions and 2 deletions

View File

@ -523,6 +523,7 @@ class SnapshotsConfig(FrigateBaseModel):
class StorageS3Config(FrigateBaseModel):
enabled: bool = Field(default=False, title="S3 enabled.")
archive: bool = Field(default=False, title="Archive expired records to S3 instead of delete")
access_key_id: str = Field(default="", title="AWS_ACCESS_KEY_ID")
secret_access_key: str = Field(default="", title="AWS_SECRET_ACCESS_KEY")
bucket_name: str = Field(default="", title="Bucket name")

View File

@ -29,7 +29,7 @@ class RecordingCleanup(threading.Thread):
self.config = config
self.stop_event = stop_event
if self.config.storage.s3.enabled:
if self.config.storage.s3.enabled or self.config.storage.s3.archive:
self.s3 = StorageS3(config)
def clean_tmp_clips(self) -> None:
@ -144,7 +144,7 @@ class RecordingCleanup(threading.Thread):
and recording.objects == 0
)
):
if self.config.storage.s3.enabled:
if self.config.storage.s3.archive:
s3path = self.s3.upload_file_to_s3(recording.path)
if s3path != "":
moved_recordings.add({"id": recording.id, "path": s3path})

View File

@ -21,6 +21,7 @@ from frigate.const import CACHE_DIR, MAX_SEGMENT_DURATION, RECORD_DIR
from frigate.models import Event, Recordings
from frigate.types import RecordMetricsTypes
from frigate.util import area
from frigate.storage import StorageS3
logger = logging.getLogger(__name__)
@ -42,6 +43,9 @@ class RecordingMaintainer(threading.Thread):
self.recordings_info: dict[str, Any] = defaultdict(list)
self.end_time_cache: dict[str, Tuple[datetime.datetime, float]] = {}
if self.config.storage.s3.enabled:
self.s3 = StorageS3(config)
def move_files(self) -> None:
cache_files = sorted(
[
@ -335,6 +339,17 @@ class RecordingMaintainer(threading.Thread):
rand_id = "".join(
random.choices(string.ascii_lowercase + string.digits, k=6)
)
storage = "local"
if self.config.storage.s3.enabled:
s3path = self.s3.upload_file_to_s3(file_path)
if s3path != "":
Path(file_path).unlink(missing_ok=True)
file_path = s3path
storage = "s3"
else:
logger.error(f"Unable to upload recording segment {file_path} to s3, fallback to local")
logger.error(e)
Recordings.create(
id=f"{start_time.timestamp()}-{rand_id}",
camera=camera,
@ -346,6 +361,7 @@ class RecordingMaintainer(threading.Thread):
# TODO: update this to store list of active objects at some point
objects=active_count,
segment_size=segment_size,
storage=storage,
)
except Exception as e:
logger.error(f"Unable to store recording segment {cache_path}")