mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-14 15:15:22 +03:00
Merge branch 'fastapi-poc' into fastapi-poc-media-endpoints
# Conflicts: # frigate/api/media.py
This commit is contained in:
commit
1f11d825ef
@ -17,7 +17,7 @@ sudo chown -R "$(id -u):$(id -g)" /media/frigate
|
||||
# When started as a service, LIBAVFORMAT_VERSION_MAJOR is defined in the
|
||||
# s6 service file. For dev, where frigate is started from an interactive
|
||||
# shell, we define it in .bashrc instead.
|
||||
echo 'export LIBAVFORMAT_VERSION_MAJOR=$(ffmpeg -version | grep -Po "libavformat\W+\K\d+")' >> $HOME/.bashrc
|
||||
echo 'export LIBAVFORMAT_VERSION_MAJOR=$(/usr/lib/ffmpeg/7.0/bin/ffmpeg -version | grep -Po "libavformat\W+\K\d+")' >> $HOME/.bashrc
|
||||
|
||||
make version
|
||||
|
||||
|
||||
@ -201,7 +201,8 @@ ENV ALLOW_RESET=True
|
||||
# Disable tokenizer parallelism warning
|
||||
ENV TOKENIZERS_PARALLELISM=true
|
||||
|
||||
ENV PATH="/usr/lib/btbn-ffmpeg/bin:/usr/local/go2rtc/bin:/usr/local/tempio/bin:/usr/local/nginx/sbin:${PATH}"
|
||||
ENV PATH="/usr/local/go2rtc/bin:/usr/local/tempio/bin:/usr/local/nginx/sbin:${PATH}"
|
||||
ENV LIBAVFORMAT_VERSION_MAJOR=60
|
||||
|
||||
# Install dependencies
|
||||
RUN --mount=type=bind,source=docker/main/install_deps.sh,target=/deps/install_deps.sh \
|
||||
|
||||
@ -39,18 +39,26 @@ apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
|
||||
# btbn-ffmpeg -> amd64
|
||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
mkdir -p /usr/lib/btbn-ffmpeg
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2024-09-04-18-56/ffmpeg-n7.0.2-15-g0458a86656-linux64-gpl-7.0.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/btbn-ffmpeg --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/btbn-ffmpeg/doc /usr/lib/btbn-ffmpeg/bin/ffplay
|
||||
mkdir -p /usr/lib/ffmpeg/5.0
|
||||
mkdir -p /usr/lib/ffmpeg/7.0
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linux64-gpl-5.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/5.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/5.0/doc /usr/lib/ffmpeg/5.0/bin/ffplay
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2024-09-13-12-57/ffmpeg-n7.0.2-17-gf705bc5b73-linux64-gpl-7.0.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/7.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/7.0/doc /usr/lib/ffmpeg/7.0/bin/ffplay
|
||||
fi
|
||||
|
||||
# ffmpeg -> arm64
|
||||
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||
mkdir -p /usr/lib/btbn-ffmpeg
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2024-09-04-18-56/ffmpeg-n7.0.2-15-g0458a86656-linuxarm64-gpl-7.0.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/btbn-ffmpeg --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/btbn-ffmpeg/doc /usr/lib/btbn-ffmpeg/bin/ffplay
|
||||
mkdir -p /usr/lib/ffmpeg/5.0
|
||||
mkdir -p /usr/lib/ffmpeg/7.0
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linuxarm64-gpl-5.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/5.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/5.0/doc /usr/lib/ffmpeg/5.0/bin/ffplay
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2024-09-13-12-57/ffmpeg-n7.0.2-17-gf705bc5b73-linuxarm64-gpl-7.0.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/7.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/7.0/doc /usr/lib/ffmpeg/7.0/bin/ffplay
|
||||
fi
|
||||
|
||||
# arch specific packages
|
||||
@ -59,11 +67,15 @@ if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
echo 'deb https://deb.debian.org/debian bookworm main contrib non-free' >/etc/apt/sources.list.d/debian-bookworm.list
|
||||
apt-get -qq update
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
intel-opencl-icd \
|
||||
mesa-va-drivers radeontop libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1 intel-gpu-tools
|
||||
intel-opencl-icd intel-media-va-driver-non-free i965-va-driver \
|
||||
libmfx-gen1.2 libmfx1 onevpl-tools intel-gpu-tools \
|
||||
libva-drm2 \
|
||||
mesa-va-drivers radeontop
|
||||
|
||||
# something about this dependency requires it to be installed in a separate call rather than in the line above
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
i965-va-driver-shaders
|
||||
|
||||
rm -f /etc/apt/sources.list.d/debian-bookworm.list
|
||||
fi
|
||||
|
||||
|
||||
@ -44,8 +44,6 @@ function migrate_db_path() {
|
||||
|
||||
echo "[INFO] Preparing Frigate..."
|
||||
migrate_db_path
|
||||
export LIBAVFORMAT_VERSION_MAJOR=$(ffmpeg -version | grep -Po 'libavformat\W+\K\d+')
|
||||
|
||||
echo "[INFO] Starting Frigate..."
|
||||
|
||||
cd /opt/frigate || echo "[ERROR] Failed to change working directory to /opt/frigate"
|
||||
|
||||
@ -43,8 +43,6 @@ function get_ip_and_port_from_supervisor() {
|
||||
export FRIGATE_GO2RTC_WEBRTC_CANDIDATE_INTERNAL="${ip_address}:${webrtc_port}"
|
||||
}
|
||||
|
||||
export LIBAVFORMAT_VERSION_MAJOR=$(ffmpeg -version | grep -Po 'libavformat\W+\K\d+')
|
||||
|
||||
if [[ -f "/dev/shm/go2rtc.yaml" ]]; then
|
||||
echo "[INFO] Removing stale config from last run..."
|
||||
rm /dev/shm/go2rtc.yaml
|
||||
|
||||
@ -105,16 +105,34 @@ else:
|
||||
**FRIGATE_ENV_VARS
|
||||
)
|
||||
|
||||
# need to replace ffmpeg command when using ffmpeg4
|
||||
if int(os.environ["LIBAVFORMAT_VERSION_MAJOR"]) < 59:
|
||||
# ensure ffmpeg path is set correctly
|
||||
path = config.get("ffmpeg", {}).get("path", "default")
|
||||
if path == "default":
|
||||
if int(os.getenv("", "59") or "59") >= 59:
|
||||
ffmpeg_path = "/usr/lib/ffmpeg/7.0/bin/ffmpeg"
|
||||
else:
|
||||
ffmpeg_path = "ffmpeg"
|
||||
elif path == "7.0":
|
||||
ffmpeg_path = "/usr/lib/ffmpeg/7.0/bin/ffmpeg"
|
||||
elif path == "5.0":
|
||||
ffmpeg_path = "/usr/lib/ffmpeg/5.0/bin/ffmpeg"
|
||||
else:
|
||||
ffmpeg_path = f"{path}/bin/ffmpeg"
|
||||
|
||||
if go2rtc_config.get("ffmpeg") is None:
|
||||
go2rtc_config["ffmpeg"] = {
|
||||
"rtsp": "-fflags nobuffer -flags low_delay -stimeout 5000000 -user_agent go2rtc/ffmpeg -rtsp_transport tcp -i {input}"
|
||||
}
|
||||
elif go2rtc_config["ffmpeg"].get("rtsp") is None:
|
||||
go2rtc_config["ffmpeg"] = {"bin": ffmpeg_path}
|
||||
elif go2rtc_config["ffmpeg"].get("bin") is None:
|
||||
go2rtc_config["ffmpeg"]["bin"] = ffmpeg_path
|
||||
|
||||
# need to replace ffmpeg command when using ffmpeg4
|
||||
if int(os.environ.get("LIBAVFORMAT_VERSION_MAJOR", "59") or "59") < 59:
|
||||
if go2rtc_config["ffmpeg"].get("rtsp") is None:
|
||||
go2rtc_config["ffmpeg"]["rtsp"] = (
|
||||
"-fflags nobuffer -flags low_delay -stimeout 5000000 -user_agent go2rtc/ffmpeg -rtsp_transport tcp -i {input}"
|
||||
)
|
||||
else:
|
||||
if go2rtc_config.get("ffmpeg") is None:
|
||||
go2rtc_config["ffmpeg"] = {"path": ""}
|
||||
|
||||
for name in go2rtc_config.get("streams", {}):
|
||||
stream = go2rtc_config["streams"][name]
|
||||
@ -145,7 +163,7 @@ if config.get("birdseye", {}).get("restream", False):
|
||||
birdseye: dict[str, any] = config.get("birdseye")
|
||||
|
||||
input = f"-f rawvideo -pix_fmt yuv420p -video_size {birdseye.get('width', 1280)}x{birdseye.get('height', 720)} -r 10 -i {BIRDSEYE_PIPE}"
|
||||
ffmpeg_cmd = f"exec:{parse_preset_hardware_acceleration_encode(config.get('ffmpeg', {}).get('hwaccel_args'), input, '-rtsp_transport tcp -f rtsp {output}')}"
|
||||
ffmpeg_cmd = f"exec:{parse_preset_hardware_acceleration_encode(ffmpeg_path, config.get('ffmpeg', {}).get('hwaccel_args'), input, '-rtsp_transport tcp -f rtsp {output}')}"
|
||||
|
||||
if go2rtc_config.get("streams"):
|
||||
go2rtc_config["streams"]["birdseye"] = ffmpeg_cmd
|
||||
|
||||
@ -12,5 +12,7 @@ RUN rm -rf /usr/lib/btbn-ffmpeg/
|
||||
RUN --mount=type=bind,source=docker/rpi/install_deps.sh,target=/deps/install_deps.sh \
|
||||
/deps/install_deps.sh
|
||||
|
||||
ENV LIBAVFORMAT_VERSION_MAJOR=58
|
||||
|
||||
WORKDIR /opt/frigate/
|
||||
COPY --from=rootfs / /
|
||||
|
||||
@ -162,15 +162,15 @@ listen [::]:5000 ipv6only=off;
|
||||
|
||||
### Custom ffmpeg build
|
||||
|
||||
Included with Frigate is a build of ffmpeg that works for the vast majority of users. However, there exists some hardware setups which have incompatibilities with the included build. In this case, a docker volume mapping can be used to overwrite the included ffmpeg build with an ffmpeg build that works for your specific hardware setup.
|
||||
Included with Frigate is a build of ffmpeg that works for the vast majority of users. However, there exists some hardware setups which have incompatibilities with the included build. In this case, statically built ffmpeg binary can be downloaded to /config and used.
|
||||
|
||||
To do this:
|
||||
|
||||
1. Download your ffmpeg build and uncompress to a folder on the host (let's use `/home/appdata/frigate/custom-ffmpeg` for this example).
|
||||
1. Download your ffmpeg build and uncompress to the Frigate config folder.
|
||||
2. Update your docker-compose or docker CLI to include `'/home/appdata/frigate/custom-ffmpeg':'/usr/lib/btbn-ffmpeg':'ro'` in the volume mappings.
|
||||
3. Restart Frigate and the custom version will be used if the mapping was done correctly.
|
||||
|
||||
NOTE: The folder that is mapped from the host needs to be the folder that contains `/bin`. So if the full structure is `/home/appdata/frigate/custom-ffmpeg/bin/ffmpeg` then `/home/appdata/frigate/custom-ffmpeg` needs to be mapped to `/usr/lib/btbn-ffmpeg`.
|
||||
NOTE: The folder that is set for the config needs to be the folder that contains `/bin`. So if the full structure is `/home/appdata/frigate/custom-ffmpeg/bin/ffmpeg` then the `ffmpeg -> path` field should be `/config/custom-ffmpeg/bin`.
|
||||
|
||||
### Custom go2rtc version
|
||||
|
||||
|
||||
@ -210,6 +210,10 @@ birdseye:
|
||||
# Optional: ffmpeg configuration
|
||||
# More information about presets at https://docs.frigate.video/configuration/ffmpeg_presets
|
||||
ffmpeg:
|
||||
# Optional: ffmpeg binry path (default: shown below)
|
||||
# can also be set to `7.0` or `5.0` to specify one of the included versions
|
||||
# or can be set to any path that holds `bin/ffmpeg` & `bin/ffprobe`
|
||||
path: "default"
|
||||
# Optional: global ffmpeg args (default: shown below)
|
||||
global_args: -hide_banner -loglevel warning -threads 2
|
||||
# Optional: global hwaccel args (default: auto detect)
|
||||
|
||||
@ -416,7 +416,7 @@ def ffprobe():
|
||||
output = []
|
||||
|
||||
for path in paths:
|
||||
ffprobe = ffprobe_stream(path.strip())
|
||||
ffprobe = ffprobe_stream(current_app.frigate_config.ffmpeg, path.strip())
|
||||
output.append(
|
||||
{
|
||||
"return_code": ffprobe.returncode,
|
||||
|
||||
@ -302,8 +302,21 @@ def events_explore():
|
||||
.dicts()
|
||||
)
|
||||
|
||||
events = query.iterator()
|
||||
return jsonify(list(events))
|
||||
events = list(query.iterator())
|
||||
|
||||
processed_events = [
|
||||
{k: v for k, v in event.items() if k != "data"}
|
||||
| {
|
||||
"data": {
|
||||
k: v
|
||||
for k, v in event["data"].items()
|
||||
if k in ["type", "score", "top_score", "description"]
|
||||
}
|
||||
}
|
||||
for event in events
|
||||
]
|
||||
|
||||
return jsonify(processed_events)
|
||||
|
||||
|
||||
@EventBp.route("/event_ids")
|
||||
@ -507,10 +520,12 @@ def events_search():
|
||||
events = [
|
||||
{k: v for k, v in event.items() if k != "data"}
|
||||
| {
|
||||
"data": {
|
||||
k: v
|
||||
for k, v in event["data"].items()
|
||||
if k in ["type", "score", "top_score", "description"]
|
||||
}
|
||||
}
|
||||
| {
|
||||
"search_distance": results[event["id"]]["distance"],
|
||||
"search_source": results[event["id"]]["source"],
|
||||
|
||||
@ -21,6 +21,7 @@ from tzlocal import get_localzone_name
|
||||
from werkzeug.utils import secure_filename
|
||||
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import (
|
||||
CACHE_DIR,
|
||||
CLIPS_DIR,
|
||||
@ -244,8 +245,10 @@ def get_snapshot_from_recording(
|
||||
recording: Recordings = recording_query.get()
|
||||
time_in_segment = frame_time - recording.start_time
|
||||
codec = "png" if format == "png" else "mjpeg"
|
||||
config: FrigateConfig = request.app.frigate_config
|
||||
|
||||
image_data = get_image_from_recording(
|
||||
recording.path, time_in_segment, codec, height
|
||||
config.ffmpeg, recording.path, time_in_segment, codec, height
|
||||
)
|
||||
|
||||
if not image_data:
|
||||
@ -297,9 +300,12 @@ def submit_recording_snapshot_to_plus(
|
||||
)
|
||||
|
||||
try:
|
||||
config: FrigateConfig = request.app.frigate_config
|
||||
recording: Recordings = recording_query.get()
|
||||
time_in_segment = frame_time - recording.start_time
|
||||
image_data = get_image_from_recording(recording.path, time_in_segment)
|
||||
image_data = get_image_from_recording(
|
||||
config.ffmpeg, recording.path, time_in_segment, "png"
|
||||
)
|
||||
|
||||
if not image_data:
|
||||
return JSONResponse(
|
||||
@ -448,7 +454,7 @@ def recordings(
|
||||
|
||||
@router.get("/media/camera/{camera_name}/start/{start_ts}/end/{end_ts}/clip.mp4")
|
||||
def recording_clip(
|
||||
camera_name: str, start_ts: float, end_ts: float, download: bool = False
|
||||
request: Request, camera_name: str, start_ts: float, end_ts: float, download: bool = False
|
||||
):
|
||||
recordings = (
|
||||
Recordings.select(
|
||||
@ -490,9 +496,11 @@ def recording_clip(
|
||||
file_name = secure_filename(file_name)
|
||||
path = os.path.join(CLIPS_DIR, f"cache/{file_name}")
|
||||
|
||||
config: FrigateConfig = request.app.frigate_config
|
||||
|
||||
if not os.path.exists(path):
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
config.ffmpeg.ffmpeg_path,
|
||||
"-hide_banner",
|
||||
"-y",
|
||||
"-protocol_whitelist",
|
||||
@ -1148,7 +1156,7 @@ def event_thumbnail(
|
||||
|
||||
|
||||
@router.get("/media/events/{event_id}/preview.gif")
|
||||
def event_preview(event_id: str):
|
||||
def event_preview(request: Request, event_id: str):
|
||||
try:
|
||||
event: Event = Event.get(Event.id == event_id)
|
||||
except DoesNotExist:
|
||||
@ -1160,11 +1168,12 @@ def event_preview(event_id: str):
|
||||
end_ts = start_ts + (
|
||||
min(event.end_time - event.start_time, 20) if event.end_time else 20
|
||||
)
|
||||
return preview_gif(event.camera, start_ts, end_ts)
|
||||
return preview_gif(request, event.camera, start_ts, end_ts)
|
||||
|
||||
|
||||
@router.get("/media/camera/{camera_name}/start/{start_ts}/end/{end_ts}/preview.gif")
|
||||
def preview_gif(
|
||||
request: Request,
|
||||
camera_name: str,
|
||||
start_ts: float,
|
||||
end_ts: float,
|
||||
@ -1201,8 +1210,9 @@ def preview_gif(
|
||||
diff = start_ts - preview.start_time
|
||||
minutes = int(diff / 60)
|
||||
seconds = int(diff % 60)
|
||||
config: FrigateConfig = request.app.frigate_config
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
config.ffmpeg.ffmpeg_path,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"warning",
|
||||
@ -1267,9 +1277,10 @@ def preview_gif(
|
||||
|
||||
last_file = selected_previews[-2]
|
||||
selected_previews.append(last_file)
|
||||
config: FrigateConfig = request.app.frigate_config
|
||||
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
config.ffmpeg.ffmpeg_path,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"warning",
|
||||
@ -1318,6 +1329,7 @@ def preview_gif(
|
||||
|
||||
@router.get("/media/camera/{camera_name}/start/{start_ts}/end/{end_ts}/preview.mp4")
|
||||
def preview_mp4(
|
||||
request: Request,
|
||||
camera_name: str,
|
||||
start_ts: float,
|
||||
end_ts: float,
|
||||
@ -1373,8 +1385,9 @@ def preview_mp4(
|
||||
diff = start_ts - preview.start_time
|
||||
minutes = int(diff / 60)
|
||||
seconds = int(diff % 60)
|
||||
config: FrigateConfig = request.app.frigate_config
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
config.ffmpeg.ffmpeg_path,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"warning",
|
||||
@ -1437,9 +1450,10 @@ def preview_mp4(
|
||||
|
||||
last_file = selected_previews[-2]
|
||||
selected_previews.append(last_file)
|
||||
config: FrigateConfig = request.app.frigate_config
|
||||
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
config.ffmpeg.ffmpeg_path,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"warning",
|
||||
@ -1491,6 +1505,7 @@ def preview_mp4(
|
||||
|
||||
@router.get("/media/review/{event_id}/preview")
|
||||
def review_preview(
|
||||
request: Request,
|
||||
event_id: str,
|
||||
format: str = Query(default="gif", enum=["gif", "mp4"]),
|
||||
):
|
||||
@ -1509,9 +1524,9 @@ def review_preview(
|
||||
)
|
||||
|
||||
if format == "gif":
|
||||
return preview_gif(review.camera, start_ts, end_ts)
|
||||
return preview_gif(request, review.camera, start_ts, end_ts)
|
||||
else:
|
||||
return preview_mp4(review.camera, start_ts, end_ts)
|
||||
return preview_mp4(request, review.camera, start_ts, end_ts)
|
||||
|
||||
|
||||
@router.get("/media/preview/{file_name}/thumbnail.jpg")
|
||||
|
||||
@ -94,6 +94,18 @@ def review():
|
||||
return jsonify([r for r in review])
|
||||
|
||||
|
||||
@ReviewBp.route("/review/event/<id>")
|
||||
def get_review_from_event(id: str):
|
||||
try:
|
||||
return model_to_dict(
|
||||
ReviewSegment.get(
|
||||
ReviewSegment.data["detections"].cast("text") % f'*"{id}"*'
|
||||
)
|
||||
)
|
||||
except DoesNotExist:
|
||||
return "Review item not found", 404
|
||||
|
||||
|
||||
@ReviewBp.route("/review/<id>")
|
||||
def get_review(id: str):
|
||||
try:
|
||||
|
||||
@ -377,7 +377,7 @@ class FrigateApp:
|
||||
except PermissionError:
|
||||
logger.error("Unable to write to /config to save export state")
|
||||
|
||||
migrate_exports(self.config.cameras.keys())
|
||||
migrate_exports(self.config.ffmpeg, self.config.cameras.keys())
|
||||
|
||||
def init_external_event_processor(self) -> None:
|
||||
self.external_event_processor = ExternalEventProcessor(self.config)
|
||||
|
||||
@ -866,6 +866,7 @@ class FfmpegOutputArgsConfig(FrigateBaseModel):
|
||||
|
||||
|
||||
class FfmpegConfig(FrigateBaseModel):
|
||||
path: str = Field(default="default", title="FFmpeg path")
|
||||
global_args: Union[str, List[str]] = Field(
|
||||
default=FFMPEG_GLOBAL_ARGS_DEFAULT, title="Global FFmpeg arguments."
|
||||
)
|
||||
@ -884,6 +885,34 @@ class FfmpegConfig(FrigateBaseModel):
|
||||
title="Time in seconds to wait before FFmpeg retries connecting to the camera.",
|
||||
)
|
||||
|
||||
@property
|
||||
def ffmpeg_path(self) -> str:
|
||||
if self.path == "default":
|
||||
if int(os.getenv("LIBAVFORMAT_VERSION_MAJOR", "59")) >= 59:
|
||||
return "/usr/lib/ffmpeg/7.0/bin/ffmpeg"
|
||||
else:
|
||||
return "ffmpeg"
|
||||
elif self.path == "7.0":
|
||||
return "/usr/lib/ffmpeg/7.0/bin/ffmpeg"
|
||||
elif self.path == "5.0":
|
||||
return "/usr/lib/ffmpeg/5.0/bin/ffmpeg"
|
||||
else:
|
||||
return f"{self.path}/bin/ffmpeg"
|
||||
|
||||
@property
|
||||
def ffprobe_path(self) -> str:
|
||||
if self.path == "default":
|
||||
if int(os.getenv("LIBAVFORMAT_VERSION_MAJOR", "59")) >= 59:
|
||||
return "/usr/lib/ffmpeg/7.0/bin/ffprobe"
|
||||
else:
|
||||
return "ffprobe"
|
||||
elif self.path == "7.0":
|
||||
return "/usr/lib/ffmpeg/7.0/bin/ffprobe"
|
||||
elif self.path == "5.0":
|
||||
return "/usr/lib/ffmpeg/5.0/bin/ffprobe"
|
||||
else:
|
||||
return f"{self.path}/bin/ffprobe"
|
||||
|
||||
|
||||
class CameraRoleEnum(str, Enum):
|
||||
audio = "audio"
|
||||
@ -1194,9 +1223,9 @@ class CameraConfig(FrigateBaseModel):
|
||||
)
|
||||
|
||||
cmd = (
|
||||
["ffmpeg"]
|
||||
[self.ffmpeg.ffmpeg_path]
|
||||
+ global_args
|
||||
+ hwaccel_args
|
||||
+ (hwaccel_args if "detect" in ffmpeg_input.roles else [])
|
||||
+ input_args
|
||||
+ ["-i", escape_special_characters(ffmpeg_input.path)]
|
||||
+ ffmpeg_output_args
|
||||
@ -1520,7 +1549,9 @@ class FrigateConfig(FrigateBaseModel):
|
||||
if need_detect_dimensions or need_record_fourcc:
|
||||
stream_info = {"width": 0, "height": 0, "fourcc": None}
|
||||
try:
|
||||
stream_info = stream_info_retriever.get_stream_info(input.path)
|
||||
stream_info = stream_info_retriever.get_stream_info(
|
||||
config.ffmpeg, input.path
|
||||
)
|
||||
except Exception:
|
||||
logger.warn(
|
||||
f"Error detecting stream parameters automatically for {input.path} Applying default values."
|
||||
|
||||
@ -50,7 +50,7 @@ def get_ffmpeg_command(ffmpeg: FfmpegConfig) -> list[str]:
|
||||
or get_ffmpeg_arg_list(ffmpeg.input_args)
|
||||
)
|
||||
return (
|
||||
["ffmpeg", "-vn", "-threads", "1"]
|
||||
[ffmpeg.ffmpeg_path, "-vn", "-threads", "1"]
|
||||
+ input_args
|
||||
+ ["-i"]
|
||||
+ [ffmpeg_input.path]
|
||||
|
||||
@ -49,12 +49,12 @@ class LibvaGpuSelector:
|
||||
|
||||
FPS_VFR_PARAM = (
|
||||
"-fps_mode vfr"
|
||||
if int(os.getenv("LIBAVFORMAT_VERSION_MAJOR", "59")) >= 59
|
||||
if int(os.getenv("LIBAVFORMAT_VERSION_MAJOR", "59") or "59") >= 59
|
||||
else "-vsync 2"
|
||||
)
|
||||
TIMEOUT_PARAM = (
|
||||
"-timeout"
|
||||
if int(os.getenv("LIBAVFORMAT_VERSION_MAJOR", "59")) >= 59
|
||||
if int(os.getenv("LIBAVFORMAT_VERSION_MAJOR", "59") or "59") >= 59
|
||||
else "-stimeout"
|
||||
)
|
||||
|
||||
@ -111,17 +111,17 @@ PRESETS_HW_ACCEL_SCALE["preset-nvidia-h265"] = PRESETS_HW_ACCEL_SCALE[
|
||||
]
|
||||
|
||||
PRESETS_HW_ACCEL_ENCODE_BIRDSEYE = {
|
||||
"preset-rpi-64-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m {1}",
|
||||
"preset-rpi-64-h265": "ffmpeg -hide_banner {0} -c:v hevc_v4l2m2m {1}",
|
||||
FFMPEG_HWACCEL_VAAPI: "ffmpeg -hide_banner -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device {2} {0} -c:v h264_vaapi -g 50 -bf 0 -profile:v high -level:v 4.1 -sei:v 0 -an -vf format=vaapi|nv12,hwupload {1}",
|
||||
"preset-intel-qsv-h264": "ffmpeg -hide_banner {0} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
||||
"preset-intel-qsv-h265": "ffmpeg -hide_banner {0} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
||||
FFMPEG_HWACCEL_NVIDIA: "ffmpeg -hide_banner {0} -c:v h264_nvenc -g 50 -profile:v high -level:v auto -preset:v p2 -tune:v ll {1}",
|
||||
"preset-jetson-h264": "ffmpeg -hide_banner {0} -c:v h264_nvmpi -profile high {1}",
|
||||
"preset-jetson-h265": "ffmpeg -hide_banner {0} -c:v h264_nvmpi -profile high {1}",
|
||||
"preset-rk-h264": "ffmpeg -hide_banner {0} -c:v h264_rkmpp -profile:v high {1}",
|
||||
"preset-rk-h265": "ffmpeg -hide_banner {0} -c:v hevc_rkmpp -profile:v high {1}",
|
||||
"default": "ffmpeg -hide_banner {0} -c:v libx264 -g 50 -profile:v high -level:v 4.1 -preset:v superfast -tune:v zerolatency {1}",
|
||||
"preset-rpi-64-h264": "{0} -hide_banner {1} -c:v h264_v4l2m2m {2}",
|
||||
"preset-rpi-64-h265": "{0} -hide_banner {1} -c:v hevc_v4l2m2m {2}",
|
||||
FFMPEG_HWACCEL_VAAPI: "{0} -hide_banner -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device {3} {1} -c:v h264_vaapi -g 50 -bf 0 -profile:v high -level:v 4.1 -sei:v 0 -an -vf format=vaapi|nv12,hwupload {2}",
|
||||
"preset-intel-qsv-h264": "{0} -hide_banner {1} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {2}",
|
||||
"preset-intel-qsv-h265": "{0} -hide_banner {1} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {2}",
|
||||
FFMPEG_HWACCEL_NVIDIA: "{0} -hide_banner {1} -c:v h264_nvenc -g 50 -profile:v high -level:v auto -preset:v p2 -tune:v ll {2}",
|
||||
"preset-jetson-h264": "{0} -hide_banner {1} -c:v h264_nvmpi -profile high {2}",
|
||||
"preset-jetson-h265": "{0} -hide_banner {1} -c:v h264_nvmpi -profile high {2}",
|
||||
"preset-rk-h264": "{0} -hide_banner {1} -c:v h264_rkmpp -profile:v high {2}",
|
||||
"preset-rk-h265": "{0} -hide_banner {1} -c:v hevc_rkmpp -profile:v high {2}",
|
||||
"default": "{0} -hide_banner {1} -c:v libx264 -g 50 -profile:v high -level:v 4.1 -preset:v superfast -tune:v zerolatency {2}",
|
||||
}
|
||||
PRESETS_HW_ACCEL_ENCODE_BIRDSEYE["preset-nvidia-h264"] = (
|
||||
PRESETS_HW_ACCEL_ENCODE_BIRDSEYE[FFMPEG_HWACCEL_NVIDIA]
|
||||
@ -131,18 +131,18 @@ PRESETS_HW_ACCEL_ENCODE_BIRDSEYE["preset-nvidia-h265"] = (
|
||||
)
|
||||
|
||||
PRESETS_HW_ACCEL_ENCODE_TIMELAPSE = {
|
||||
"preset-rpi-64-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m -pix_fmt yuv420p {1}",
|
||||
"preset-rpi-64-h265": "ffmpeg -hide_banner {0} -c:v hevc_v4l2m2m -pix_fmt yuv420p {1}",
|
||||
FFMPEG_HWACCEL_VAAPI: "ffmpeg -hide_banner -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device {2} {0} -c:v h264_vaapi {1}",
|
||||
"preset-intel-qsv-h264": "ffmpeg -hide_banner {0} -c:v h264_qsv -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
||||
"preset-intel-qsv-h265": "ffmpeg -hide_banner {0} -c:v hevc_qsv -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
||||
FFMPEG_HWACCEL_NVIDIA: "ffmpeg -hide_banner -hwaccel cuda -hwaccel_output_format cuda -extra_hw_frames 8 {0} -c:v h264_nvenc {1}",
|
||||
"preset-nvidia-h265": "ffmpeg -hide_banner -hwaccel cuda -hwaccel_output_format cuda -extra_hw_frames 8 {0} -c:v hevc_nvenc {1}",
|
||||
"preset-jetson-h264": "ffmpeg -hide_banner {0} -c:v h264_nvmpi -profile high {1}",
|
||||
"preset-jetson-h265": "ffmpeg -hide_banner {0} -c:v hevc_nvmpi -profile high {1}",
|
||||
"preset-rk-h264": "ffmpeg -hide_banner {0} -c:v h264_rkmpp -profile:v high {1}",
|
||||
"preset-rk-h265": "ffmpeg -hide_banner {0} -c:v hevc_rkmpp -profile:v high {1}",
|
||||
"default": "ffmpeg -hide_banner {0} -c:v libx264 -preset:v ultrafast -tune:v zerolatency {1}",
|
||||
"preset-rpi-64-h264": "{0} -hide_banner {1} -c:v h264_v4l2m2m -pix_fmt yuv420p {2}",
|
||||
"preset-rpi-64-h265": "{0} -hide_banner {1} -c:v hevc_v4l2m2m -pix_fmt yuv420p {2}",
|
||||
FFMPEG_HWACCEL_VAAPI: "{0} -hide_banner -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device {3} {1} -c:v h264_vaapi {2}",
|
||||
"preset-intel-qsv-h264": "{0} -hide_banner {1} -c:v h264_qsv -profile:v high -level:v 4.1 -async_depth:v 1 {2}",
|
||||
"preset-intel-qsv-h265": "{0} -hide_banner {1} -c:v hevc_qsv -profile:v high -level:v 4.1 -async_depth:v 1 {2}",
|
||||
FFMPEG_HWACCEL_NVIDIA: "{0} -hide_banner -hwaccel cuda -hwaccel_output_format cuda -extra_hw_frames 8 {1} -c:v h264_nvenc {2}",
|
||||
"preset-nvidia-h265": "{0} -hide_banner -hwaccel cuda -hwaccel_output_format cuda -extra_hw_frames 8 {1} -c:v hevc_nvenc {2}",
|
||||
"preset-jetson-h264": "{0} -hide_banner {1} -c:v h264_nvmpi -profile high {2}",
|
||||
"preset-jetson-h265": "{0} -hide_banner {1} -c:v hevc_nvmpi -profile high {2}",
|
||||
"preset-rk-h264": "{0} -hide_banner {1} -c:v h264_rkmpp -profile:v high {2}",
|
||||
"preset-rk-h265": "{0} -hide_banner {1} -c:v hevc_rkmpp -profile:v high {2}",
|
||||
"default": "{0} -hide_banner {1} -c:v libx264 -preset:v ultrafast -tune:v zerolatency {2}",
|
||||
}
|
||||
PRESETS_HW_ACCEL_ENCODE_TIMELAPSE["preset-nvidia-h264"] = (
|
||||
PRESETS_HW_ACCEL_ENCODE_TIMELAPSE[FFMPEG_HWACCEL_NVIDIA]
|
||||
@ -150,7 +150,7 @@ PRESETS_HW_ACCEL_ENCODE_TIMELAPSE["preset-nvidia-h264"] = (
|
||||
|
||||
# encoding of previews is only done on CPU due to comparable encode times and better quality from libx264
|
||||
PRESETS_HW_ACCEL_ENCODE_PREVIEW = {
|
||||
"default": "ffmpeg -hide_banner {0} -c:v libx264 -profile:v baseline -preset:v ultrafast {1}",
|
||||
"default": "{0} -hide_banner {1} -c:v libx264 -profile:v baseline -preset:v ultrafast {2}",
|
||||
}
|
||||
|
||||
|
||||
@ -197,7 +197,11 @@ class EncodeTypeEnum(str, Enum):
|
||||
|
||||
|
||||
def parse_preset_hardware_acceleration_encode(
|
||||
arg: Any, input: str, output: str, type: EncodeTypeEnum = EncodeTypeEnum.birdseye
|
||||
ffmpeg_path: str,
|
||||
arg: Any,
|
||||
input: str,
|
||||
output: str,
|
||||
type: EncodeTypeEnum = EncodeTypeEnum.birdseye,
|
||||
) -> str:
|
||||
"""Return the correct scaling preset or default preset if none is set."""
|
||||
if type == EncodeTypeEnum.birdseye:
|
||||
@ -215,6 +219,7 @@ def parse_preset_hardware_acceleration_encode(
|
||||
arg = "default"
|
||||
|
||||
return arg_map.get(arg, arg_map["default"]).format(
|
||||
ffmpeg_path,
|
||||
input,
|
||||
output,
|
||||
_gpu_selector.get_selected_gpu(),
|
||||
|
||||
@ -15,7 +15,7 @@ import cv2
|
||||
import numpy as np
|
||||
|
||||
from frigate.comms.config_updater import ConfigSubscriber
|
||||
from frigate.config import BirdseyeModeEnum, FrigateConfig
|
||||
from frigate.config import BirdseyeModeEnum, FfmpegConfig, FrigateConfig
|
||||
from frigate.const import BASE_DIR, BIRDSEYE_PIPE
|
||||
from frigate.util.image import (
|
||||
SharedMemoryFrameManager,
|
||||
@ -112,7 +112,7 @@ class Canvas:
|
||||
class FFMpegConverter(threading.Thread):
|
||||
def __init__(
|
||||
self,
|
||||
camera: str,
|
||||
ffmpeg: FfmpegConfig,
|
||||
input_queue: queue.Queue,
|
||||
stop_event: mp.Event,
|
||||
in_width: int,
|
||||
@ -123,8 +123,8 @@ class FFMpegConverter(threading.Thread):
|
||||
birdseye_rtsp: bool = False,
|
||||
):
|
||||
threading.Thread.__init__(self)
|
||||
self.name = f"{camera}_output_converter"
|
||||
self.camera = camera
|
||||
self.name = "birdseye_output_converter"
|
||||
self.camera = "birdseye"
|
||||
self.input_queue = input_queue
|
||||
self.stop_event = stop_event
|
||||
self.bd_pipe = None
|
||||
@ -133,7 +133,7 @@ class FFMpegConverter(threading.Thread):
|
||||
self.recreate_birdseye_pipe()
|
||||
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
ffmpeg.ffmpeg_path,
|
||||
"-threads",
|
||||
"1",
|
||||
"-f",
|
||||
@ -725,7 +725,7 @@ class Birdseye:
|
||||
self.config = config
|
||||
self.input = queue.Queue(maxsize=10)
|
||||
self.converter = FFMpegConverter(
|
||||
"birdseye",
|
||||
config.ffmpeg,
|
||||
self.input,
|
||||
stop_event,
|
||||
config.birdseye.width,
|
||||
|
||||
@ -6,7 +6,7 @@ import queue
|
||||
import subprocess as sp
|
||||
import threading
|
||||
|
||||
from frigate.config import CameraConfig
|
||||
from frigate.config import CameraConfig, FfmpegConfig
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -15,6 +15,7 @@ class FFMpegConverter(threading.Thread):
|
||||
def __init__(
|
||||
self,
|
||||
camera: str,
|
||||
ffmpeg: FfmpegConfig,
|
||||
input_queue: queue.Queue,
|
||||
stop_event: mp.Event,
|
||||
in_width: int,
|
||||
@ -30,7 +31,7 @@ class FFMpegConverter(threading.Thread):
|
||||
self.stop_event = stop_event
|
||||
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
ffmpeg.ffmpeg_path,
|
||||
"-threads",
|
||||
"1",
|
||||
"-f",
|
||||
@ -142,6 +143,7 @@ class JsmpegCamera:
|
||||
)
|
||||
self.converter = FFMpegConverter(
|
||||
config.name,
|
||||
config.ffmpeg,
|
||||
self.input,
|
||||
stop_event,
|
||||
config.frame_shape[1],
|
||||
|
||||
@ -78,6 +78,7 @@ class FFMpegConverter(threading.Thread):
|
||||
|
||||
# write a PREVIEW at fps and 1 key frame per clip
|
||||
self.ffmpeg_cmd = parse_preset_hardware_acceleration_encode(
|
||||
config.ffmpeg.ffmpeg_path,
|
||||
config.ffmpeg.hwaccel_args,
|
||||
input="-f concat -y -protocol_whitelist pipe,file -safe 0 -threads 1 -i /dev/stdin",
|
||||
output=f"-threads 1 -g {PREVIEW_KEYFRAME_INTERVAL} -bf 0 -b:v {PREVIEW_QUALITY_BIT_RATES[self.config.record.preview.quality]} {FPS_VFR_PARAM} -movflags +faststart -pix_fmt yuv420p {self.path}",
|
||||
|
||||
@ -14,7 +14,7 @@ from typing import Optional
|
||||
|
||||
from peewee import DoesNotExist
|
||||
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.config import FfmpegConfig, FrigateConfig
|
||||
from frigate.const import (
|
||||
CACHE_DIR,
|
||||
CLIPS_DIR,
|
||||
@ -116,7 +116,7 @@ class RecordingExporter(threading.Thread):
|
||||
minutes = int(diff / 60)
|
||||
seconds = int(diff % 60)
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
self.config.ffmpeg.ffmpeg_path,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"warning",
|
||||
@ -230,11 +230,12 @@ class RecordingExporter(threading.Thread):
|
||||
|
||||
if self.playback_factor == PlaybackFactorEnum.realtime:
|
||||
ffmpeg_cmd = (
|
||||
f"ffmpeg -hide_banner {ffmpeg_input} -c copy -movflags +faststart {video_path}"
|
||||
f"{self.config.ffmpeg.ffmpeg_path} -hide_banner {ffmpeg_input} -c copy -movflags +faststart {video_path}"
|
||||
).split(" ")
|
||||
elif self.playback_factor == PlaybackFactorEnum.timelapse_25x:
|
||||
ffmpeg_cmd = (
|
||||
parse_preset_hardware_acceleration_encode(
|
||||
self.config.ffmpeg.ffmpeg_path,
|
||||
self.config.ffmpeg.hwaccel_args,
|
||||
f"{TIMELAPSE_DATA_INPUT_ARGS} {ffmpeg_input}",
|
||||
f"{self.config.cameras[self.camera].record.export.timelapse_args} -movflags +faststart {video_path}",
|
||||
@ -267,7 +268,7 @@ class RecordingExporter(threading.Thread):
|
||||
logger.debug(f"Finished exporting {video_path}")
|
||||
|
||||
|
||||
def migrate_exports(camera_names: list[str]):
|
||||
def migrate_exports(ffmpeg: FfmpegConfig, camera_names: list[str]):
|
||||
Path(os.path.join(CLIPS_DIR, "export")).mkdir(exist_ok=True)
|
||||
|
||||
exports = []
|
||||
@ -286,7 +287,7 @@ def migrate_exports(camera_names: list[str]):
|
||||
) # use jpg because webp encoder can't get quality low enough
|
||||
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
ffmpeg.ffmpeg_path,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"warning",
|
||||
|
||||
@ -209,7 +209,9 @@ class RecordingMaintainer(threading.Thread):
|
||||
if cache_path in self.end_time_cache:
|
||||
end_time, duration = self.end_time_cache[cache_path]
|
||||
else:
|
||||
segment_info = await get_video_properties(cache_path, get_duration=True)
|
||||
segment_info = await get_video_properties(
|
||||
self.config.ffmpeg, cache_path, get_duration=True
|
||||
)
|
||||
|
||||
if segment_info["duration"]:
|
||||
duration = float(segment_info["duration"])
|
||||
@ -387,7 +389,7 @@ class RecordingMaintainer(threading.Thread):
|
||||
|
||||
# add faststart to kept segments to improve metadata reading
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
"ffmpeg",
|
||||
self.config.ffmpeg.ffmpeg_path,
|
||||
"-hide_banner",
|
||||
"-y",
|
||||
"-i",
|
||||
|
||||
@ -314,10 +314,10 @@ class StreamInfoRetriever:
|
||||
def __init__(self) -> None:
|
||||
self.stream_cache: dict[str, tuple[int, int]] = {}
|
||||
|
||||
def get_stream_info(self, path: str) -> str:
|
||||
def get_stream_info(self, ffmpeg, path: str) -> str:
|
||||
if path in self.stream_cache:
|
||||
return self.stream_cache[path]
|
||||
|
||||
info = asyncio.run(get_video_properties(path))
|
||||
info = asyncio.run(get_video_properties(ffmpeg, path))
|
||||
self.stream_cache[path] = info
|
||||
return info
|
||||
|
||||
@ -765,12 +765,16 @@ def add_mask(mask: str, mask_img: np.ndarray):
|
||||
|
||||
|
||||
def get_image_from_recording(
|
||||
file_path: str, relative_frame_time: float, codec: str, height: Optional[int] = None
|
||||
ffmpeg, # Ffmpeg Config
|
||||
file_path: str,
|
||||
relative_frame_time: float,
|
||||
codec: str,
|
||||
height: Optional[int] = None,
|
||||
) -> Optional[any]:
|
||||
"""retrieve a frame from given time in recording file."""
|
||||
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
ffmpeg.ffmpeg_path,
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"warning",
|
||||
|
||||
@ -378,11 +378,11 @@ def get_jetson_stats() -> dict[int, dict]:
|
||||
return results
|
||||
|
||||
|
||||
def ffprobe_stream(path: str) -> sp.CompletedProcess:
|
||||
def ffprobe_stream(ffmpeg, path: str) -> sp.CompletedProcess:
|
||||
"""Run ffprobe on stream."""
|
||||
clean_path = escape_special_characters(path)
|
||||
ffprobe_cmd = [
|
||||
"ffprobe",
|
||||
ffmpeg.ffprobe_path,
|
||||
"-timeout",
|
||||
"1000000",
|
||||
"-print_format",
|
||||
@ -438,7 +438,9 @@ def auto_detect_hwaccel() -> str:
|
||||
return ""
|
||||
|
||||
|
||||
async def get_video_properties(url, get_duration=False) -> dict[str, any]:
|
||||
async def get_video_properties(
|
||||
ffmpeg, url: str, get_duration: bool = False
|
||||
) -> dict[str, any]:
|
||||
async def calculate_duration(video: Optional[any]) -> float:
|
||||
duration = None
|
||||
|
||||
@ -453,7 +455,7 @@ async def get_video_properties(url, get_duration=False) -> dict[str, any]:
|
||||
# if cv2 failed need to use ffprobe
|
||||
if duration is None:
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
"ffprobe",
|
||||
ffmpeg.ffprobe_path,
|
||||
"-v",
|
||||
"error",
|
||||
"-show_entries",
|
||||
|
||||
@ -135,11 +135,13 @@ export function AnimatedEventCard({
|
||||
<div
|
||||
className="size-full cursor-pointer overflow-hidden rounded md:rounded-lg"
|
||||
onClick={onOpenReview}
|
||||
onAuxClick={() =>
|
||||
onAuxClick={(e) => {
|
||||
if (e.button === 1) {
|
||||
window
|
||||
.open(`${baseUrl}review?id=${event.id}`, "_blank")
|
||||
?.focus()
|
||||
?.focus();
|
||||
}
|
||||
}}
|
||||
>
|
||||
{!alertVideos ? (
|
||||
<img
|
||||
|
||||
@ -159,7 +159,7 @@ export default function ExportCard({
|
||||
className="cursor-pointer rounded-md bg-gray-500 bg-gradient-to-br from-gray-400 to-gray-500"
|
||||
onClick={() =>
|
||||
shareOrCopy(
|
||||
`${baseUrl}exports?id=${exportedRecording.id}`,
|
||||
`${baseUrl}export?id=${exportedRecording.id}`,
|
||||
exportedRecording.name.replaceAll("_", " "),
|
||||
)
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { Button } from "../ui/button";
|
||||
import { CameraGroupConfig } from "@/types/frigateConfig";
|
||||
import { useMemo, useState } from "react";
|
||||
import { useEffect, useMemo, useState } from "react";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
@ -29,7 +29,10 @@ export function CamerasFilterButton({
|
||||
}: CameraFilterButtonProps) {
|
||||
const [open, setOpen] = useState(false);
|
||||
const [currentCameras, setCurrentCameras] = useState<string[] | undefined>(
|
||||
selectedCameras,
|
||||
selectedCameras === undefined ? [...allCameras] : selectedCameras,
|
||||
);
|
||||
const [allCamerasSelected, setAllCamerasSelected] = useState(
|
||||
selectedCameras === undefined,
|
||||
);
|
||||
|
||||
const buttonText = useMemo(() => {
|
||||
@ -37,11 +40,28 @@ export function CamerasFilterButton({
|
||||
return "Cameras";
|
||||
}
|
||||
|
||||
if (!selectedCameras || selectedCameras.length == 0) {
|
||||
if (allCamerasSelected) {
|
||||
return "All Cameras";
|
||||
}
|
||||
|
||||
return `${selectedCameras.includes("birdseye") ? selectedCameras.length - 1 : selectedCameras.length} Camera${selectedCameras.length !== 1 ? "s" : ""}`;
|
||||
if (!currentCameras || currentCameras.length === 0) {
|
||||
return "No cameras";
|
||||
}
|
||||
|
||||
return `${currentCameras.includes("birdseye") ? currentCameras.length - 1 : currentCameras.length} Camera${
|
||||
currentCameras.length !== 1 ? "s" : ""
|
||||
}`;
|
||||
}, [allCamerasSelected, currentCameras]);
|
||||
|
||||
// ui
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentCameras(
|
||||
selectedCameras === undefined ? [...allCameras] : selectedCameras,
|
||||
);
|
||||
setAllCamerasSelected(selectedCameras === undefined);
|
||||
// only refresh when state changes
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [selectedCameras]);
|
||||
|
||||
const trigger = (
|
||||
@ -70,24 +90,28 @@ export function CamerasFilterButton({
|
||||
<DropdownMenuSeparator />
|
||||
</>
|
||||
)}
|
||||
<div className="scrollbar-container h-auto max-h-[80dvh] overflow-y-auto overflow-x-hidden p-4">
|
||||
<div className="scrollbar-container flex h-auto max-h-[80dvh] flex-col gap-2 overflow-y-auto overflow-x-hidden p-4">
|
||||
<FilterSwitch
|
||||
isChecked={currentCameras == undefined}
|
||||
isChecked={allCamerasSelected}
|
||||
label="All Cameras"
|
||||
onCheckedChange={(isChecked) => {
|
||||
setAllCamerasSelected(isChecked);
|
||||
|
||||
if (isChecked) {
|
||||
setCurrentCameras(undefined);
|
||||
setCurrentCameras([...allCameras]);
|
||||
} else {
|
||||
setCurrentCameras([]);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
{groups.length > 0 && (
|
||||
<>
|
||||
<DropdownMenuSeparator className="mt-2" />
|
||||
<DropdownMenuSeparator />
|
||||
{groups.map(([name, conf]) => {
|
||||
return (
|
||||
<div
|
||||
key={name}
|
||||
className="w-full cursor-pointer rounded-lg px-2 py-1.5 text-sm capitalize text-primary hover:bg-muted"
|
||||
className="w-full cursor-pointer rounded-lg px-2 py-0.5 text-sm capitalize text-primary hover:bg-muted"
|
||||
onClick={() => setCurrentCameras([...conf.cameras])}
|
||||
>
|
||||
{name}
|
||||
@ -96,7 +120,7 @@ export function CamerasFilterButton({
|
||||
})}
|
||||
</>
|
||||
)}
|
||||
<DropdownMenuSeparator className="my-2" />
|
||||
<DropdownMenuSeparator />
|
||||
<div className="flex flex-col gap-2.5">
|
||||
{allCameras.map((item) => (
|
||||
<FilterSwitch
|
||||
@ -108,31 +132,39 @@ export function CamerasFilterButton({
|
||||
const updatedCameras = currentCameras
|
||||
? [...currentCameras]
|
||||
: [];
|
||||
|
||||
updatedCameras.push(item);
|
||||
setCurrentCameras(updatedCameras);
|
||||
|
||||
// Check if all cameras are now selected
|
||||
setAllCamerasSelected(
|
||||
updatedCameras.length === allCameras.length,
|
||||
);
|
||||
} else {
|
||||
const updatedCameras = currentCameras
|
||||
? [...currentCameras]
|
||||
: [];
|
||||
const index = updatedCameras.indexOf(item);
|
||||
|
||||
// can not deselect the last item
|
||||
if (updatedCameras.length > 1) {
|
||||
updatedCameras.splice(updatedCameras.indexOf(item), 1);
|
||||
if (index > -1) {
|
||||
updatedCameras.splice(index, 1);
|
||||
setCurrentCameras(updatedCameras);
|
||||
}
|
||||
|
||||
// Deselecting one camera should disable the "All Cameras" switch
|
||||
setAllCamerasSelected(false);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<DropdownMenuSeparator className="my-2" />
|
||||
<DropdownMenuSeparator />
|
||||
<div className="flex items-center justify-evenly p-2">
|
||||
<Button
|
||||
variant="select"
|
||||
disabled={currentCameras?.length === 0}
|
||||
onClick={() => {
|
||||
updateCameraFilter(currentCameras);
|
||||
updateCameraFilter(allCamerasSelected ? undefined : currentCameras);
|
||||
setOpen(false);
|
||||
}}
|
||||
>
|
||||
@ -140,7 +172,8 @@ export function CamerasFilterButton({
|
||||
</Button>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setCurrentCameras(undefined);
|
||||
setCurrentCameras([...allCameras]);
|
||||
setAllCamerasSelected(true);
|
||||
updateCameraFilter(undefined);
|
||||
}}
|
||||
>
|
||||
@ -156,7 +189,8 @@ export function CamerasFilterButton({
|
||||
open={open}
|
||||
onOpenChange={(open) => {
|
||||
if (!open) {
|
||||
setCurrentCameras(selectedCameras);
|
||||
setCurrentCameras(selectedCameras ?? allCameras);
|
||||
setAllCamerasSelected(selectedCameras === undefined);
|
||||
}
|
||||
|
||||
setOpen(open);
|
||||
@ -176,9 +210,9 @@ export function CamerasFilterButton({
|
||||
open={open}
|
||||
onOpenChange={(open) => {
|
||||
if (!open) {
|
||||
setCurrentCameras(selectedCameras);
|
||||
setCurrentCameras(selectedCameras ?? allCameras);
|
||||
setAllCamerasSelected(selectedCameras === undefined);
|
||||
}
|
||||
|
||||
setOpen(open);
|
||||
}}
|
||||
>
|
||||
|
||||
@ -2,7 +2,7 @@ import { Button } from "../ui/button";
|
||||
import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover";
|
||||
import useSWR from "swr";
|
||||
import { FrigateConfig } from "@/types/frigateConfig";
|
||||
import { useCallback, useMemo, useState } from "react";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import { DropdownMenuSeparator } from "../ui/dropdown-menu";
|
||||
import { ReviewFilter, ReviewSeverity, ReviewSummary } from "@/types/review";
|
||||
import { getEndOfDayTimestamp } from "@/utils/dateUtil";
|
||||
@ -321,6 +321,15 @@ function GeneralFilterButton({
|
||||
selectedZones,
|
||||
);
|
||||
|
||||
// ui
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentLabels(selectedLabels);
|
||||
setCurrentZones(selectedZones);
|
||||
// only refresh when state changes
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [selectedLabels, selectedZones]);
|
||||
|
||||
const trigger = (
|
||||
<Button
|
||||
size="sm"
|
||||
|
||||
@ -2,10 +2,10 @@ import { Button } from "../ui/button";
|
||||
import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover";
|
||||
import useSWR from "swr";
|
||||
import { FrigateConfig } from "@/types/frigateConfig";
|
||||
import { useCallback, useMemo, useState } from "react";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import { DropdownMenuSeparator } from "../ui/dropdown-menu";
|
||||
import { getEndOfDayTimestamp } from "@/utils/dateUtil";
|
||||
import { isMobile } from "react-device-detect";
|
||||
import { isDesktop, isMobile } from "react-device-detect";
|
||||
import { Drawer, DrawerContent, DrawerTrigger } from "../ui/drawer";
|
||||
import { Switch } from "../ui/switch";
|
||||
import { Label } from "../ui/label";
|
||||
@ -261,6 +261,14 @@ function GeneralFilterButton({
|
||||
return `${selectedLabels.length} Labels`;
|
||||
}, [selectedLabels]);
|
||||
|
||||
// ui
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentLabels(selectedLabels);
|
||||
// only refresh when state changes
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [selectedLabels]);
|
||||
|
||||
const trigger = (
|
||||
<Button
|
||||
size="sm"
|
||||
@ -301,7 +309,7 @@ function GeneralFilterButton({
|
||||
}}
|
||||
>
|
||||
<DrawerTrigger asChild>{trigger}</DrawerTrigger>
|
||||
<DrawerContent className="max-h-[75dvh] overflow-hidden">
|
||||
<DrawerContent className="max-h-[75dvh] overflow-hidden p-4">
|
||||
{content}
|
||||
</DrawerContent>
|
||||
</Drawer>
|
||||
@ -447,6 +455,14 @@ function ZoneFilterButton({
|
||||
return `${selectedZones.length} Zones`;
|
||||
}, [selectedZones]);
|
||||
|
||||
// ui
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentZones(selectedZones);
|
||||
// only refresh when state changes
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [selectedZones]);
|
||||
|
||||
const trigger = (
|
||||
<Button
|
||||
size="sm"
|
||||
@ -487,7 +503,7 @@ function ZoneFilterButton({
|
||||
}}
|
||||
>
|
||||
<DrawerTrigger asChild>{trigger}</DrawerTrigger>
|
||||
<DrawerContent className="max-h-[75dvh] overflow-hidden">
|
||||
<DrawerContent className="max-h-[75dvh] overflow-hidden p-4">
|
||||
{content}
|
||||
</DrawerContent>
|
||||
</Drawer>
|
||||
@ -532,7 +548,7 @@ export function ZoneFilterContent({
|
||||
<div className="scrollbar-container h-auto max-h-[80dvh] overflow-y-auto overflow-x-hidden">
|
||||
{allZones && setCurrentZones && (
|
||||
<>
|
||||
<DropdownMenuSeparator />
|
||||
{isDesktop && <DropdownMenuSeparator />}
|
||||
<div className="mb-5 mt-2.5 flex items-center justify-between">
|
||||
<Label
|
||||
className="mx-2 cursor-pointer text-primary"
|
||||
@ -583,7 +599,7 @@ export function ZoneFilterContent({
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<DropdownMenuSeparator />
|
||||
{isDesktop && <DropdownMenuSeparator />}
|
||||
<div className="flex items-center justify-evenly p-2">
|
||||
<Button
|
||||
variant="select"
|
||||
@ -681,7 +697,7 @@ function SubFilterButton({
|
||||
}}
|
||||
>
|
||||
<DrawerTrigger asChild>{trigger}</DrawerTrigger>
|
||||
<DrawerContent className="max-h-[75dvh] overflow-hidden">
|
||||
<DrawerContent className="max-h-[75dvh] overflow-hidden p-4">
|
||||
{content}
|
||||
</DrawerContent>
|
||||
</Drawer>
|
||||
@ -772,7 +788,7 @@ export function SubFilterContent({
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<DropdownMenuSeparator />
|
||||
{isDesktop && <DropdownMenuSeparator />}
|
||||
<div className="flex items-center justify-evenly p-2">
|
||||
<Button
|
||||
variant="select"
|
||||
@ -861,7 +877,7 @@ function SearchTypeButton({
|
||||
}}
|
||||
>
|
||||
<DrawerTrigger asChild>{trigger}</DrawerTrigger>
|
||||
<DrawerContent className="max-h-[75dvh] overflow-hidden">
|
||||
<DrawerContent className="max-h-[75dvh] overflow-hidden p-4">
|
||||
{content}
|
||||
</DrawerContent>
|
||||
</Drawer>
|
||||
@ -940,7 +956,7 @@ export function SearchTypeContent({
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<DropdownMenuSeparator />
|
||||
{isDesktop && <DropdownMenuSeparator />}
|
||||
<div className="flex items-center justify-evenly p-2">
|
||||
<Button
|
||||
variant="select"
|
||||
|
||||
@ -193,7 +193,7 @@ export default function ObjectLifecycle({
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
if (eventSequence) {
|
||||
if (eventSequence && eventSequence.length > 0) {
|
||||
setTimeIndex(eventSequence?.[current].timestamp);
|
||||
handleSetBox(eventSequence?.[current].data.box ?? []);
|
||||
setLifecycleZones(eventSequence?.[current].data.zones);
|
||||
|
||||
@ -27,7 +27,7 @@ import { baseUrl } from "@/api/baseUrl";
|
||||
import { cn } from "@/lib/utils";
|
||||
import ActivityIndicator from "@/components/indicators/activity-indicator";
|
||||
import { ASPECT_VERTICAL_LAYOUT, ASPECT_WIDE_LAYOUT } from "@/types/record";
|
||||
import { FaImage, FaRegListAlt, FaVideo } from "react-icons/fa";
|
||||
import { FaHistory, FaImage, FaRegListAlt, FaVideo } from "react-icons/fa";
|
||||
import { FaRotate } from "react-icons/fa6";
|
||||
import ObjectLifecycle from "./ObjectLifecycle";
|
||||
import {
|
||||
@ -37,6 +37,14 @@ import {
|
||||
MobilePageHeader,
|
||||
MobilePageTitle,
|
||||
} from "@/components/mobile/MobilePage";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { ReviewSegment } from "@/types/review";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import Chip from "@/components/indicators/Chip";
|
||||
|
||||
const SEARCH_TABS = [
|
||||
"details",
|
||||
@ -199,7 +207,7 @@ export default function SearchDetailDialog({
|
||||
{page == "video" && <VideoTab search={search} config={config} />}
|
||||
{page == "object lifecycle" && (
|
||||
<ObjectLifecycle
|
||||
className="w-full"
|
||||
className="w-full overflow-x-hidden"
|
||||
event={search as unknown as Event}
|
||||
fullscreen={true}
|
||||
setPane={() => {}}
|
||||
@ -226,10 +234,10 @@ function ObjectDetailsTab({
|
||||
|
||||
// data
|
||||
|
||||
const [desc, setDesc] = useState(search?.description);
|
||||
const [desc, setDesc] = useState(search?.data.description);
|
||||
|
||||
// we have to make sure the current selected search item stays in sync
|
||||
useEffect(() => setDesc(search?.description), [search]);
|
||||
useEffect(() => setDesc(search?.data.description ?? ""), [search]);
|
||||
|
||||
const formattedDate = useFormattedTimestamp(
|
||||
search?.start_time ?? 0,
|
||||
@ -279,7 +287,7 @@ function ObjectDetailsTab({
|
||||
toast.error("Failed to update the description", {
|
||||
position: "top-center",
|
||||
});
|
||||
setDesc(search.description);
|
||||
setDesc(search.data.description);
|
||||
});
|
||||
}, [desc, search]);
|
||||
|
||||
@ -342,8 +350,8 @@ function ObjectDetailsTab({
|
||||
<div className="flex flex-col gap-1.5">
|
||||
<div className="text-sm text-primary/40">Description</div>
|
||||
<Textarea
|
||||
className="md:h-64"
|
||||
placeholder="Description of the event"
|
||||
className="h-64"
|
||||
placeholder="Description of the tracked object"
|
||||
value={desc}
|
||||
onChange={(e) => setDesc(e.target.value)}
|
||||
/>
|
||||
@ -367,6 +375,11 @@ function VideoTab({ search, config }: VideoTabProps) {
|
||||
|
||||
const endTime = useMemo(() => search.end_time ?? Date.now() / 1000, [search]);
|
||||
|
||||
const navigate = useNavigate();
|
||||
const { data: reviewItem } = useSWR<ReviewSegment>([
|
||||
`review/event/${search.id}`,
|
||||
]);
|
||||
|
||||
const mainCameraAspect = useMemo(() => {
|
||||
const camera = config?.cameras?.[search.camera];
|
||||
|
||||
@ -416,8 +429,9 @@ function VideoTab({ search, config }: VideoTabProps) {
|
||||
}, [mainCameraAspect]);
|
||||
|
||||
return (
|
||||
<div className="relative flex flex-col">
|
||||
<div className={`aspect-video ${containerClassName}`}>
|
||||
{isLoading && (
|
||||
{(isLoading || !reviewItem) && (
|
||||
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
|
||||
)}
|
||||
<div className={videoClassName}>
|
||||
@ -433,5 +447,33 @@ function VideoTab({ search, config }: VideoTabProps) {
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
{!isLoading && reviewItem && (
|
||||
<div
|
||||
className={cn(
|
||||
"absolute top-2 flex items-center",
|
||||
isIOS ? "right-8" : "right-2",
|
||||
)}
|
||||
>
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<Chip
|
||||
className="cursor-pointer rounded-md bg-gray-500 bg-gradient-to-br from-gray-400 to-gray-500"
|
||||
onClick={() => {
|
||||
if (reviewItem?.id) {
|
||||
const params = new URLSearchParams({
|
||||
id: reviewItem.id,
|
||||
}).toString();
|
||||
navigate(`/review?${params}`);
|
||||
}
|
||||
}}
|
||||
>
|
||||
<FaHistory className="size-4 text-white" />
|
||||
</Chip>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="left">View in History</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@ -59,6 +59,7 @@ export default function LivePlayer({
|
||||
onResetLiveMode,
|
||||
}: LivePlayerProps) {
|
||||
const internalContainerRef = useRef<HTMLDivElement | null>(null);
|
||||
|
||||
// camera activity
|
||||
|
||||
const { activeMotion, activeTracking, objects, offline } =
|
||||
@ -225,9 +226,11 @@ export default function LivePlayer({
|
||||
className,
|
||||
)}
|
||||
onClick={onClick}
|
||||
onAuxClick={() =>
|
||||
window.open(`${baseUrl}#${cameraConfig.name}`, "_blank")?.focus()
|
||||
onAuxClick={(e) => {
|
||||
if (e.button === 1) {
|
||||
window.open(`${baseUrl}#${cameraConfig.name}`, "_blank")?.focus();
|
||||
}
|
||||
}}
|
||||
>
|
||||
{((showStillWithoutActivity && !liveReady) || liveReady) && (
|
||||
<>
|
||||
@ -299,12 +302,16 @@ export default function LivePlayer({
|
||||
</div>
|
||||
|
||||
{offline && !showStillWithoutActivity && (
|
||||
<div className="flex size-full flex-col items-center">
|
||||
<p className="mb-5">
|
||||
{capitalizeFirstLetter(cameraConfig.name)} is offline
|
||||
</p>
|
||||
<div className="absolute inset-0 left-1/2 top-1/2 flex h-96 w-96 -translate-x-1/2 -translate-y-1/2">
|
||||
<div className="flex flex-col items-center justify-center rounded-lg bg-background/50 p-5">
|
||||
<p className="my-5 text-lg">Stream offline</p>
|
||||
<TbExclamationCircle className="mb-3 size-10" />
|
||||
<p>No frames have been received, check error logs</p>
|
||||
<p className="max-w-96 text-center">
|
||||
No frames have been received on the{" "}
|
||||
{capitalizeFirstLetter(cameraConfig.name)} <code>detect</code>{" "}
|
||||
stream, check error logs
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
|
||||
@ -177,9 +177,11 @@ export default function PreviewThumbnailPlayer({
|
||||
onMouseOver={isMobile ? undefined : () => setIsHovered(true)}
|
||||
onMouseLeave={isMobile ? undefined : () => setIsHovered(false)}
|
||||
onClick={handleOnClick}
|
||||
onAuxClick={() =>
|
||||
window.open(`${baseUrl}review?id=${review.id}`, "_blank")?.focus()
|
||||
onAuxClick={(e) => {
|
||||
if (e.button === 1) {
|
||||
window.open(`${baseUrl}review?id=${review.id}`, "_blank")?.focus();
|
||||
}
|
||||
}}
|
||||
{...swipeHandlers}
|
||||
>
|
||||
{playingBack && (
|
||||
|
||||
@ -315,7 +315,7 @@ export function DateRangePicker({
|
||||
|
||||
return (
|
||||
<div className="w-full">
|
||||
<div className="flex py-2">
|
||||
<div className="flex flex-row items-start justify-center py-2">
|
||||
<div className="flex">
|
||||
<div className="flex flex-col">
|
||||
<div className="flex flex-col items-center justify-end gap-2 px-3 pb-4 lg:flex-row lg:items-start lg:pb-0">
|
||||
|
||||
@ -69,7 +69,11 @@ export function useApiFilterArgs<
|
||||
filter[key] = value.includes(",") ? value.split(",") : [value];
|
||||
} else {
|
||||
if (value != undefined) {
|
||||
try {
|
||||
filter[key] = JSON.parse(value);
|
||||
} catch {
|
||||
filter[key] = `${value}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import ActivityIndicator from "@/components/indicators/activity-indicator";
|
||||
import { useApiFilterArgs } from "@/hooks/use-api-filter";
|
||||
import useApiFilter from "@/hooks/use-api-filter";
|
||||
import { useCameraPreviews } from "@/hooks/use-camera-previews";
|
||||
import { useTimezone } from "@/hooks/use-date-utils";
|
||||
import { useOverlayState, useSearchEffect } from "@/hooks/use-overlay-state";
|
||||
@ -71,7 +71,31 @@ export default function Events() {
|
||||
// review filter
|
||||
|
||||
const [reviewFilter, setReviewFilter, reviewSearchParams] =
|
||||
useApiFilterArgs<ReviewFilter>();
|
||||
useApiFilter<ReviewFilter>();
|
||||
|
||||
useSearchEffect("cameras", (cameras: string) => {
|
||||
setReviewFilter({
|
||||
...reviewFilter,
|
||||
cameras: cameras.includes(",") ? cameras.split(",") : [cameras],
|
||||
});
|
||||
return true;
|
||||
});
|
||||
|
||||
useSearchEffect("labels", (labels: string) => {
|
||||
setReviewFilter({
|
||||
...reviewFilter,
|
||||
labels: labels.includes(",") ? labels.split(",") : [labels],
|
||||
});
|
||||
return true;
|
||||
});
|
||||
|
||||
useSearchEffect("zones", (zones: string) => {
|
||||
setReviewFilter({
|
||||
...reviewFilter,
|
||||
zones: zones.includes(",") ? zones.split(",") : [zones],
|
||||
});
|
||||
return true;
|
||||
});
|
||||
|
||||
useSearchEffect("group", (reviewGroup) => {
|
||||
if (config && reviewGroup && reviewGroup != "default") {
|
||||
|
||||
@ -3,12 +3,15 @@ import { useCameraPreviews } from "@/hooks/use-camera-previews";
|
||||
import { useOverlayState, useSearchEffect } from "@/hooks/use-overlay-state";
|
||||
import { FrigateConfig } from "@/types/frigateConfig";
|
||||
import { RecordingStartingPoint } from "@/types/record";
|
||||
import { SearchFilter, SearchResult } from "@/types/search";
|
||||
import { SearchFilter, SearchQuery, SearchResult } from "@/types/search";
|
||||
import { TimeRange } from "@/types/timeline";
|
||||
import { RecordingView } from "@/views/recording/RecordingView";
|
||||
import SearchView from "@/views/search/SearchView";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import useSWR from "swr";
|
||||
import useSWRInfinite from "swr/infinite";
|
||||
|
||||
const API_LIMIT = 25;
|
||||
|
||||
export default function Explore() {
|
||||
const { data: config } = useSWR<FrigateConfig>("config", {
|
||||
@ -61,7 +64,7 @@ export default function Explore() {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [search]);
|
||||
|
||||
const searchQuery = useMemo(() => {
|
||||
const searchQuery: SearchQuery = useMemo(() => {
|
||||
if (similaritySearch) {
|
||||
return [
|
||||
"events/search",
|
||||
@ -107,7 +110,8 @@ export default function Explore() {
|
||||
before: searchSearchParams["before"],
|
||||
after: searchSearchParams["after"],
|
||||
search_type: searchSearchParams["search_type"],
|
||||
limit: Object.keys(searchSearchParams).length == 0 ? 20 : null,
|
||||
limit:
|
||||
Object.keys(searchSearchParams).length == 0 ? API_LIMIT : undefined,
|
||||
in_progress: 0,
|
||||
include_thumbnails: 0,
|
||||
},
|
||||
@ -117,8 +121,66 @@ export default function Explore() {
|
||||
return null;
|
||||
}, [searchTerm, searchSearchParams, similaritySearch]);
|
||||
|
||||
const { data: searchResults, isLoading } =
|
||||
useSWR<SearchResult[]>(searchQuery);
|
||||
// paging
|
||||
|
||||
const getKey = (
|
||||
pageIndex: number,
|
||||
previousPageData: SearchResult[] | null,
|
||||
): SearchQuery => {
|
||||
if (previousPageData && !previousPageData.length) return null; // reached the end
|
||||
if (!searchQuery) return null;
|
||||
|
||||
const [url, params] = searchQuery;
|
||||
|
||||
// If it's not the first page, use the last item's start_time as the 'before' parameter
|
||||
if (pageIndex > 0 && previousPageData) {
|
||||
const lastDate = previousPageData[previousPageData.length - 1].start_time;
|
||||
return [
|
||||
url,
|
||||
{ ...params, before: lastDate.toString(), limit: API_LIMIT },
|
||||
];
|
||||
}
|
||||
|
||||
// For the first page, use the original params
|
||||
return [url, { ...params, limit: API_LIMIT }];
|
||||
};
|
||||
|
||||
const { data, size, setSize, isValidating } = useSWRInfinite<SearchResult[]>(
|
||||
getKey,
|
||||
{
|
||||
revalidateFirstPage: false,
|
||||
revalidateAll: false,
|
||||
},
|
||||
);
|
||||
|
||||
const searchResults = useMemo(
|
||||
() => (data ? ([] as SearchResult[]).concat(...data) : []),
|
||||
[data],
|
||||
);
|
||||
const isLoadingInitialData = !data && !isValidating;
|
||||
const isLoadingMore =
|
||||
isLoadingInitialData ||
|
||||
(size > 0 && data && typeof data[size - 1] === "undefined");
|
||||
const isEmpty = data?.[0]?.length === 0;
|
||||
const isReachingEnd =
|
||||
isEmpty || (data && data[data.length - 1]?.length < API_LIMIT);
|
||||
|
||||
const loadMore = useCallback(() => {
|
||||
if (!isReachingEnd && !isLoadingMore) {
|
||||
if (searchQuery) {
|
||||
const [url] = searchQuery;
|
||||
|
||||
// for chroma, only load 100 results for description and similarity
|
||||
if (url === "events/search" && searchResults.length >= 100) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
setSize(size + 1);
|
||||
}
|
||||
}, [isReachingEnd, isLoadingMore, setSize, size, searchResults, searchQuery]);
|
||||
|
||||
// previews
|
||||
|
||||
const previewTimeRange = useMemo<TimeRange>(() => {
|
||||
if (!searchResults) {
|
||||
@ -212,11 +274,13 @@ export default function Explore() {
|
||||
searchTerm={searchTerm}
|
||||
searchFilter={searchFilter}
|
||||
searchResults={searchResults}
|
||||
isLoading={isLoading}
|
||||
isLoading={(isLoadingInitialData || isLoadingMore) ?? true}
|
||||
setSearch={setSearch}
|
||||
setSimilaritySearch={(search) => setSearch(`similarity:${search.id}`)}
|
||||
onUpdateFilter={setSearchFilter}
|
||||
onOpenSearch={onOpenSearch}
|
||||
loadMore={loadMore}
|
||||
hasMore={!isReachingEnd}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@ -3,7 +3,6 @@ export type SearchSource = "similarity" | "thumbnail" | "description";
|
||||
export type SearchResult = {
|
||||
id: string;
|
||||
camera: string;
|
||||
description?: string;
|
||||
start_time: number;
|
||||
end_time?: number;
|
||||
score: number;
|
||||
@ -25,6 +24,7 @@ export type SearchResult = {
|
||||
area: number;
|
||||
ratio: number;
|
||||
type: "object" | "audio" | "manual";
|
||||
description?: string;
|
||||
};
|
||||
};
|
||||
|
||||
@ -38,3 +38,20 @@ export type SearchFilter = {
|
||||
search_type?: SearchSource[];
|
||||
event_id?: string;
|
||||
};
|
||||
|
||||
export type SearchQueryParams = {
|
||||
cameras?: string[];
|
||||
labels?: string[];
|
||||
sub_labels?: string[];
|
||||
zones?: string[];
|
||||
before?: string;
|
||||
after?: string;
|
||||
search_type?: string;
|
||||
limit?: number;
|
||||
in_progress?: number;
|
||||
include_thumbnails?: number;
|
||||
query?: string;
|
||||
page?: number;
|
||||
};
|
||||
|
||||
export type SearchQuery = [string, SearchQueryParams] | null;
|
||||
@ -94,7 +94,7 @@ function ThumbnailRow({
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="rounded-lg bg-background_alt p-2 md:p-4">
|
||||
<div className="rounded-lg bg-background_alt p-2 md:px-4">
|
||||
<div className="text-lg capitalize">
|
||||
{objectType.replaceAll("_", " ")}
|
||||
{searchResults && (
|
||||
|
||||
@ -33,6 +33,8 @@ type SearchViewProps = {
|
||||
setSimilaritySearch: (search: SearchResult) => void;
|
||||
onUpdateFilter: (filter: SearchFilter) => void;
|
||||
onOpenSearch: (item: SearchResult) => void;
|
||||
loadMore: () => void;
|
||||
hasMore: boolean;
|
||||
};
|
||||
export default function SearchView({
|
||||
search,
|
||||
@ -43,6 +45,8 @@ export default function SearchView({
|
||||
setSearch,
|
||||
setSimilaritySearch,
|
||||
onUpdateFilter,
|
||||
loadMore,
|
||||
hasMore,
|
||||
}: SearchViewProps) {
|
||||
const { data: config } = useSWR<FrigateConfig>("config", {
|
||||
revalidateOnFocus: false,
|
||||
@ -143,7 +147,37 @@ export default function SearchView({
|
||||
scrollMode: "if-needed",
|
||||
});
|
||||
}
|
||||
}, [selectedIndex, uniqueResults]);
|
||||
// we only want to scroll when the index changes
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [selectedIndex]);
|
||||
|
||||
// observer for loading more
|
||||
|
||||
const observerTarget = useRef<HTMLDivElement>(null);
|
||||
const observerRef = useRef<IntersectionObserver | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const observer = new IntersectionObserver(
|
||||
(entries) => {
|
||||
if (entries[0].isIntersecting && hasMore && !isLoading) {
|
||||
loadMore();
|
||||
}
|
||||
},
|
||||
{ threshold: 1.0 },
|
||||
);
|
||||
|
||||
if (observerTarget.current) {
|
||||
observer.observe(observerTarget.current);
|
||||
}
|
||||
|
||||
observerRef.current = observer;
|
||||
|
||||
return () => {
|
||||
if (observerRef.current) {
|
||||
observerRef.current.disconnect();
|
||||
}
|
||||
};
|
||||
}, [hasMore, isLoading, loadMore]);
|
||||
|
||||
return (
|
||||
<div className="flex size-full flex-col pt-2 md:py-2">
|
||||
@ -199,20 +233,23 @@ export default function SearchView({
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="no-scrollbar flex flex-1 flex-wrap content-start gap-2 overflow-y-auto md:gap-4">
|
||||
{searchTerm.length > 0 && searchResults?.length == 0 && (
|
||||
<div className="no-scrollbar flex flex-1 flex-wrap content-start gap-2 overflow-y-auto">
|
||||
{uniqueResults?.length == 0 && !isLoading && (
|
||||
<div className="absolute left-1/2 top-1/2 flex -translate-x-1/2 -translate-y-1/2 flex-col items-center justify-center text-center">
|
||||
<LuSearchX className="size-16" />
|
||||
No Tracked Objects Found
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isLoading && (
|
||||
{uniqueResults?.length == 0 &&
|
||||
isLoading &&
|
||||
(searchTerm ||
|
||||
(searchFilter && Object.keys(searchFilter).length !== 0)) && (
|
||||
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
|
||||
)}
|
||||
|
||||
{uniqueResults && (
|
||||
<div className="mt-2 grid w-full gap-2 px-1 sm:grid-cols-2 md:mx-2 md:grid-cols-4 md:gap-4 3xl:grid-cols-6">
|
||||
<div className="grid w-full gap-2 px-1 sm:grid-cols-2 md:mx-2 md:grid-cols-4 md:gap-4 3xl:grid-cols-6">
|
||||
{uniqueResults &&
|
||||
uniqueResults.map((value, index) => {
|
||||
const selected = selectedIndex === index;
|
||||
@ -273,12 +310,22 @@ export default function SearchView({
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
{!uniqueResults && !isLoading && (
|
||||
{uniqueResults && uniqueResults.length > 0 && (
|
||||
<>
|
||||
<div ref={observerTarget} className="h-10 w-full" />
|
||||
<div className="flex h-12 w-full justify-center">
|
||||
{hasMore && isLoading && <ActivityIndicator />}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
{searchFilter &&
|
||||
Object.keys(searchFilter).length === 0 &&
|
||||
!searchTerm && (
|
||||
<div className="scrollbar-container flex size-full flex-col overflow-y-auto">
|
||||
<ExploreView onSelectSearch={onSelectSearch} />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user