mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-05 10:45:21 +03:00
black
This commit is contained in:
parent
d257fd29ef
commit
1dc78c87fc
@ -523,7 +523,9 @@ class SnapshotsConfig(FrigateBaseModel):
|
|||||||
|
|
||||||
class StorageS3Config(FrigateBaseModel):
|
class StorageS3Config(FrigateBaseModel):
|
||||||
enabled: bool = Field(default=False, title="S3 enabled.")
|
enabled: bool = Field(default=False, title="S3 enabled.")
|
||||||
archive: bool = Field(default=False, title="Archive expired records to S3 instead of delete")
|
archive: bool = Field(
|
||||||
|
default=False, title="Archive expired records to S3 instead of delete"
|
||||||
|
)
|
||||||
access_key_id: str = Field(default="", title="AWS_ACCESS_KEY_ID")
|
access_key_id: str = Field(default="", title="AWS_ACCESS_KEY_ID")
|
||||||
secret_access_key: str = Field(default="", title="AWS_SECRET_ACCESS_KEY")
|
secret_access_key: str = Field(default="", title="AWS_SECRET_ACCESS_KEY")
|
||||||
bucket_name: str = Field(default="", title="Bucket name")
|
bucket_name: str = Field(default="", title="Bucket name")
|
||||||
|
|||||||
@ -1323,7 +1323,10 @@ def recordings(camera_name):
|
|||||||
def recording_clip(camera_name, start_ts, end_ts):
|
def recording_clip(camera_name, start_ts, end_ts):
|
||||||
download = request.args.get("download", type=bool)
|
download = request.args.get("download", type=bool)
|
||||||
|
|
||||||
if current_app.frigate_config.storage.s3.enabled or current_app.frigate_config.storage.s3.archive:
|
if (
|
||||||
|
current_app.frigate_config.storage.s3.enabled
|
||||||
|
or current_app.frigate_config.storage.s3.archive
|
||||||
|
):
|
||||||
s3 = StorageS3(current_app.frigate_config)
|
s3 = StorageS3(current_app.frigate_config)
|
||||||
|
|
||||||
recordings = (
|
recordings = (
|
||||||
@ -1342,7 +1345,7 @@ def recording_clip(camera_name, start_ts, end_ts):
|
|||||||
for clip in recordings:
|
for clip in recordings:
|
||||||
if recordings.storage == "s3":
|
if recordings.storage == "s3":
|
||||||
clip.path = s3.download_file_from_s3(clip.path)
|
clip.path = s3.download_file_from_s3(clip.path)
|
||||||
|
|
||||||
playlist_lines.append(f"file '{clip.path}'")
|
playlist_lines.append(f"file '{clip.path}'")
|
||||||
# if this is the starting clip, add an inpoint
|
# if this is the starting clip, add an inpoint
|
||||||
if clip.start_time < start_ts:
|
if clip.start_time < start_ts:
|
||||||
|
|||||||
@ -153,7 +153,6 @@ class RecordingCleanup(threading.Thread):
|
|||||||
else:
|
else:
|
||||||
Path(recording.path).unlink(missing_ok=True)
|
Path(recording.path).unlink(missing_ok=True)
|
||||||
deleted_recordings.add(recording.id)
|
deleted_recordings.add(recording.id)
|
||||||
|
|
||||||
|
|
||||||
# delete timeline entries relevant to this recording segment
|
# delete timeline entries relevant to this recording segment
|
||||||
Timeline.delete().where(
|
Timeline.delete().where(
|
||||||
@ -174,9 +173,9 @@ class RecordingCleanup(threading.Thread):
|
|||||||
).execute()
|
).execute()
|
||||||
|
|
||||||
for recording in moved_recordings:
|
for recording in moved_recordings:
|
||||||
Recordings.update({Recordings.storage: "s3", Recordings.path: recording["path"]}).where(
|
Recordings.update(
|
||||||
Recordings.id == recording["id"]
|
{Recordings.storage: "s3", Recordings.path: recording["path"]}
|
||||||
).execute()
|
).where(Recordings.id == recording["id"]).execute()
|
||||||
|
|
||||||
logger.debug(f"End camera: {camera}.")
|
logger.debug(f"End camera: {camera}.")
|
||||||
|
|
||||||
|
|||||||
@ -347,7 +347,9 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
file_path = s3path
|
file_path = s3path
|
||||||
storage = "s3"
|
storage = "s3"
|
||||||
else:
|
else:
|
||||||
logger.error(f"Unable to upload recording segment {file_path} to s3, fallback to local")
|
logger.error(
|
||||||
|
f"Unable to upload recording segment {file_path} to s3, fallback to local"
|
||||||
|
)
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
Recordings.create(
|
Recordings.create(
|
||||||
|
|||||||
@ -8,6 +8,7 @@ from frigate.config import FrigateConfig
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def remove_empty_directories(directory: str) -> None:
|
def remove_empty_directories(directory: str) -> None:
|
||||||
# list all directories recursively and sort them by path,
|
# list all directories recursively and sort them by path,
|
||||||
# longest first
|
# longest first
|
||||||
@ -22,4 +23,3 @@ def remove_empty_directories(directory: str) -> None:
|
|||||||
continue
|
continue
|
||||||
if len(os.listdir(path)) == 0:
|
if len(os.listdir(path)) == 0:
|
||||||
os.rmdir(path)
|
os.rmdir(path)
|
||||||
|
|
||||||
|
|||||||
@ -28,21 +28,22 @@ class StorageS3:
|
|||||||
def __init__(self, config: FrigateConfig) -> None:
|
def __init__(self, config: FrigateConfig) -> None:
|
||||||
self.config = config
|
self.config = config
|
||||||
if self.config.storage.s3.enabled or self.config.storage.s3.archive:
|
if self.config.storage.s3.enabled or self.config.storage.s3.archive:
|
||||||
if self.config.storage.s3.endpoint_url.startswith('http://'):
|
if self.config.storage.s3.endpoint_url.startswith("http://"):
|
||||||
try:
|
try:
|
||||||
session = boto_session()
|
session = boto_session()
|
||||||
session.set_config_variable('s3',
|
session.set_config_variable(
|
||||||
|
"s3",
|
||||||
{
|
{
|
||||||
'use_ssl': False,
|
"use_ssl": False,
|
||||||
'verify': False,
|
"verify": False,
|
||||||
}
|
},
|
||||||
)
|
)
|
||||||
self.s3_client = session.create_client(
|
self.s3_client = session.create_client(
|
||||||
"s3",
|
"s3",
|
||||||
aws_access_key_id=self.config.storage.s3.access_key_id,
|
aws_access_key_id=self.config.storage.s3.access_key_id,
|
||||||
aws_secret_access_key=self.config.storage.s3.secret_access_key,
|
aws_secret_access_key=self.config.storage.s3.secret_access_key,
|
||||||
endpoint_url=self.config.storage.s3.endpoint_url,
|
endpoint_url=self.config.storage.s3.endpoint_url,
|
||||||
config=Config(signature_version=UNSIGNED)
|
config=Config(signature_version=UNSIGNED),
|
||||||
)
|
)
|
||||||
except (BotoCoreError, ClientError) as error:
|
except (BotoCoreError, ClientError) as error:
|
||||||
logger.error(f"Failed to create S3 client: {error}")
|
logger.error(f"Failed to create S3 client: {error}")
|
||||||
@ -58,7 +59,7 @@ class StorageS3:
|
|||||||
except (BotoCoreError, ClientError) as error:
|
except (BotoCoreError, ClientError) as error:
|
||||||
logger.error(f"Failed to create S3 client: {error}")
|
logger.error(f"Failed to create S3 client: {error}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
self.s3_bucket = self.config.storage.s3.bucket_name
|
self.s3_bucket = self.config.storage.s3.bucket_name
|
||||||
self.s3_path = self.config.storage.s3.path
|
self.s3_path = self.config.storage.s3.path
|
||||||
|
|
||||||
@ -66,9 +67,7 @@ class StorageS3:
|
|||||||
try:
|
try:
|
||||||
s3_filename = self.s3_path + "/" + os.path.relpath(file_path, RECORD_DIR)
|
s3_filename = self.s3_path + "/" + os.path.relpath(file_path, RECORD_DIR)
|
||||||
self.s3_client.upload_file(file_path, self.s3_bucket, s3_filename)
|
self.s3_client.upload_file(file_path, self.s3_bucket, s3_filename)
|
||||||
logger.debug(
|
logger.debug(f"Uploading {file_path} to S3 {s3_filename}")
|
||||||
f"Uploading {file_path} to S3 {s3_filename}"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(
|
logger.error(
|
||||||
f"Error occurred while uploading {file_path} to S3 {s3_filename}: {e}"
|
f"Error occurred while uploading {file_path} to S3 {s3_filename}: {e}"
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user