Compare commits

..

4 Commits

Author SHA1 Message Date
dependabot[bot]
c6bb279e40
Merge f01dd335c2 into 28b0ad782a 2025-12-08 00:44:37 -05:00
Josh Hawkins
28b0ad782a
Fix intermittent hangs in Tracking Details videos (#21185)
Some checks are pending
CI / AMD64 Build (push) Waiting to run
CI / ARM Build (push) Waiting to run
CI / Jetson Jetpack 6 (push) Waiting to run
CI / AMD64 Extra Build (push) Blocked by required conditions
CI / ARM Extra Build (push) Blocked by required conditions
CI / Synaptics Build (push) Blocked by required conditions
CI / Assemble and push default build (push) Blocked by required conditions
* remove extra gap controller overrides

* new vod endpoint for clips to set discontinuity

ensure tracking-detail playlists emit #EXT-X-DISCONTINUITY (avoids fMP4 timestamp rewrites and playback stalls) while leaving standard recordings behavior unchanged

* use new endpoint
2025-12-07 12:58:33 -06:00
GuoQing Liu
644c7fa6b4
fix: fix classification missing i18n (#21179) 2025-12-07 11:35:48 -07:00
Josh Hawkins
88a8de0b1c
Miscellaneous Fixes (#21166)
* Improve model titles

* remove deprecated strftime_fmt

* remove

* remove restart wording

* add copilot instructions

* fix docs

* Move files into try for classification rollover

* Use friendly names for zones in notifications

---------

Co-authored-by: Nicolas Mowen <nickmowen213@gmail.com>
2025-12-07 07:57:46 -07:00
19 changed files with 111 additions and 56 deletions

2
.github/copilot-instructions.md vendored Normal file
View File

@ -0,0 +1,2 @@
Never write strings in the frontend directly, always write to and reference the relevant translations file.
Always conform new and refactored code to the existing coding style in the project.

View File

@ -116,4 +116,4 @@ Along with individual review item summaries, Generative AI provides the ability
Review reports can be requested via the [API](/integrations/api#review-summarization) by sending a POST request to `/api/review/summarize/start/{start_ts}/end/{end_ts}` with Unix timestamps. Review reports can be requested via the [API](/integrations/api#review-summarization) by sending a POST request to `/api/review/summarize/start/{start_ts}/end/{end_ts}` with Unix timestamps.
For Home Assistant users, there is a built-in service (`frigate.generate_review_summary`) that makes it easy to request review reports as part of automations or scripts. This allows you to automatically generate daily summaries, vacation reports, or custom time period reports based on your specific needs. For Home Assistant users, there is a built-in service (`frigate.review_summarize`) that makes it easy to request review reports as part of automations or scripts. This allows you to automatically generate daily summaries, vacation reports, or custom time period reports based on your specific needs.

View File

@ -28,7 +28,6 @@ To create a poly mask:
5. Click the plus icon under the type of mask or zone you would like to create 5. Click the plus icon under the type of mask or zone you would like to create
6. Click on the camera's latest image to create the points for a masked area. Click the first point again to close the polygon. 6. Click on the camera's latest image to create the points for a masked area. Click the first point again to close the polygon.
7. When you've finished creating your mask, press Save. 7. When you've finished creating your mask, press Save.
8. Restart Frigate to apply your changes.
Your config file will be updated with the relative coordinates of the mask/zone: Your config file will be updated with the relative coordinates of the mask/zone:

View File

@ -1002,10 +1002,6 @@ ui:
# full: 8:15:22 PM Mountain Standard Time # full: 8:15:22 PM Mountain Standard Time
# (default: shown below). # (default: shown below).
time_style: medium time_style: medium
# Optional: Ability to manually override the date / time styling to use strftime format
# https://www.gnu.org/software/libc/manual/html_node/Formatting-Calendar-Time.html
# possible values are shown above (default: not set)
strftime_fmt: "%Y/%m/%d %H:%M"
# Optional: Set the unit system to either "imperial" or "metric" (default: metric) # Optional: Set the unit system to either "imperial" or "metric" (default: metric)
# Used in the UI and in MQTT topics # Used in the UI and in MQTT topics
unit_system: metric unit_system: metric

View File

@ -837,7 +837,19 @@ async def recording_clip(
dependencies=[Depends(require_camera_access)], dependencies=[Depends(require_camera_access)],
description="Returns an HLS playlist for the specified timestamp-range on the specified camera. Append /master.m3u8 or /index.m3u8 for HLS playback.", description="Returns an HLS playlist for the specified timestamp-range on the specified camera. Append /master.m3u8 or /index.m3u8 for HLS playback.",
) )
async def vod_ts(camera_name: str, start_ts: float, end_ts: float): async def vod_ts(
camera_name: str,
start_ts: float,
end_ts: float,
force_discontinuity: bool = False,
):
logger.debug(
"VOD: Generating VOD for %s from %s to %s with force_discontinuity=%s",
camera_name,
start_ts,
end_ts,
force_discontinuity,
)
recordings = ( recordings = (
Recordings.select( Recordings.select(
Recordings.path, Recordings.path,
@ -862,6 +874,14 @@ async def vod_ts(camera_name: str, start_ts: float, end_ts: float):
recording: Recordings recording: Recordings
for recording in recordings: for recording in recordings:
logger.debug(
"VOD: processing recording: %s start=%s end=%s duration=%s",
recording.path,
recording.start_time,
recording.end_time,
recording.duration,
)
clip = {"type": "source", "path": recording.path} clip = {"type": "source", "path": recording.path}
duration = int(recording.duration * 1000) duration = int(recording.duration * 1000)
@ -870,6 +890,11 @@ async def vod_ts(camera_name: str, start_ts: float, end_ts: float):
inpoint = int((start_ts - recording.start_time) * 1000) inpoint = int((start_ts - recording.start_time) * 1000)
clip["clipFrom"] = inpoint clip["clipFrom"] = inpoint
duration -= inpoint duration -= inpoint
logger.debug(
"VOD: applied clipFrom %sms to %s",
inpoint,
recording.path,
)
# adjust end if recording.end_time is after end_ts # adjust end if recording.end_time is after end_ts
if recording.end_time > end_ts: if recording.end_time > end_ts:
@ -877,12 +902,23 @@ async def vod_ts(camera_name: str, start_ts: float, end_ts: float):
if duration < min_duration_ms: if duration < min_duration_ms:
# skip if the clip has no valid duration (too short to contain frames) # skip if the clip has no valid duration (too short to contain frames)
logger.debug(
"VOD: skipping recording %s - resulting duration %sms too short",
recording.path,
duration,
)
continue continue
if min_duration_ms <= duration < max_duration_ms: if min_duration_ms <= duration < max_duration_ms:
clip["keyFrameDurations"] = [duration] clip["keyFrameDurations"] = [duration]
clips.append(clip) clips.append(clip)
durations.append(duration) durations.append(duration)
logger.debug(
"VOD: added clip %s duration_ms=%s clipFrom=%s",
recording.path,
duration,
clip.get("clipFrom"),
)
else: else:
logger.warning(f"Recording clip is missing or empty: {recording.path}") logger.warning(f"Recording clip is missing or empty: {recording.path}")
@ -902,7 +938,7 @@ async def vod_ts(camera_name: str, start_ts: float, end_ts: float):
return JSONResponse( return JSONResponse(
content={ content={
"cache": hour_ago.timestamp() > start_ts, "cache": hour_ago.timestamp() > start_ts,
"discontinuity": False, "discontinuity": force_discontinuity,
"consistentSequenceMediaInfo": True, "consistentSequenceMediaInfo": True,
"durations": durations, "durations": durations,
"segment_duration": max(durations), "segment_duration": max(durations),
@ -986,6 +1022,19 @@ async def vod_event(
return vod_response return vod_response
@router.get(
"/vod/clip/{camera_name}/start/{start_ts}/end/{end_ts}",
dependencies=[Depends(require_camera_access)],
description="Returns an HLS playlist for a timestamp range with HLS discontinuity enabled. Append /master.m3u8 or /index.m3u8 for HLS playback.",
)
async def vod_clip(
camera_name: str,
start_ts: float,
end_ts: float,
):
return await vod_ts(camera_name, start_ts, end_ts, force_discontinuity=True)
@router.get( @router.get(
"/events/{event_id}/snapshot.jpg", "/events/{event_id}/snapshot.jpg",
description="Returns a snapshot image for the specified object id. NOTE: The query params only take affect while the event is in-progress. Once the event has ended the snapshot configuration is used.", description="Returns a snapshot image for the specified object id. NOTE: The query params only take affect while the event is in-progress. Once the event has ended the snapshot configuration is used.",

View File

@ -390,7 +390,20 @@ class WebPushClient(Communicator):
message = payload["after"]["data"]["metadata"]["scene"] message = payload["after"]["data"]["metadata"]["scene"]
else: else:
title = f"{titlecase(', '.join(sorted_objects).replace('_', ' '))}{' was' if state == 'end' else ''} detected in {titlecase(', '.join(payload['after']['data']['zones']).replace('_', ' '))}" zone_names = payload["after"]["data"]["zones"]
formatted_zone_names = []
for zone_name in zone_names:
if zone_name in self.config.cameras[camera].zones:
formatted_zone_names.append(
self.config.cameras[camera]
.zones[zone_name]
.get_formatted_name(zone_name)
)
else:
formatted_zone_names.append(titlecase(zone_name.replace("_", " ")))
title = f"{titlecase(', '.join(sorted_objects).replace('_', ' '))}{' was' if state == 'end' else ''} detected in {', '.join(formatted_zone_names)}"
message = f"Detected on {camera_name}" message = f"Detected on {camera_name}"
if ended: if ended:

View File

@ -37,9 +37,6 @@ class UIConfig(FrigateBaseModel):
time_style: DateTimeStyleEnum = Field( time_style: DateTimeStyleEnum = Field(
default=DateTimeStyleEnum.medium, title="Override UI timeStyle." default=DateTimeStyleEnum.medium, title="Override UI timeStyle."
) )
strftime_fmt: Optional[str] = Field(
default=None, title="Override date and time format using strftime syntax."
)
unit_system: UnitSystemEnum = Field( unit_system: UnitSystemEnum = Field(
default=UnitSystemEnum.metric, title="The unit system to use for measurements." default=UnitSystemEnum.metric, title="The unit system to use for measurements."
) )

View File

@ -639,14 +639,14 @@ def write_classification_attempt(
os.makedirs(folder, exist_ok=True) os.makedirs(folder, exist_ok=True)
cv2.imwrite(file, frame) cv2.imwrite(file, frame)
files = sorted(
filter(lambda f: (f.endswith(".webp")), os.listdir(folder)),
key=lambda f: os.path.getctime(os.path.join(folder, f)),
reverse=True,
)
# delete oldest face image if maximum is reached # delete oldest face image if maximum is reached
try: try:
files = sorted(
filter(lambda f: (f.endswith(".webp")), os.listdir(folder)),
key=lambda f: os.path.getctime(os.path.join(folder, f)),
reverse=True,
)
if len(files) > max_files: if len(files) > max_files:
os.unlink(os.path.join(folder, files[-1])) os.unlink(os.path.join(folder, files[-1]))
except FileNotFoundError: except FileNotFoundError:

View File

@ -13,11 +13,8 @@
"time_style": { "time_style": {
"label": "Override UI timeStyle." "label": "Override UI timeStyle."
}, },
"strftime_fmt": {
"label": "Override date and time format using strftime syntax."
},
"unit_system": { "unit_system": {
"label": "The unit system to use for measurements." "label": "The unit system to use for measurements."
} }
} }
} }

View File

@ -1,5 +1,5 @@
{ {
"documentTitle": "Classification Models", "documentTitle": "Classification Models - Frigate",
"details": { "details": {
"scoreInfo": "Score represents the average classification confidence across all detections of this object." "scoreInfo": "Score represents the average classification confidence across all detections of this object."
}, },
@ -83,6 +83,7 @@
"aria": "Select Recent Classifications" "aria": "Select Recent Classifications"
}, },
"categories": "Classes", "categories": "Classes",
"none": "None",
"createCategory": { "createCategory": {
"new": "Create New Class" "new": "Create New Class"
}, },

View File

@ -77,7 +77,7 @@
"millisecondsToOffset": "Milliseconds to offset detect annotations by. <em>Default: 0</em>", "millisecondsToOffset": "Milliseconds to offset detect annotations by. <em>Default: 0</em>",
"tips": "Lower the value if the video playback is ahead of the boxes and path points, and increase the value if the video playback is behind them. This value can be negative.", "tips": "Lower the value if the video playback is ahead of the boxes and path points, and increase the value if the video playback is behind them. This value can be negative.",
"toast": { "toast": {
"success": "Annotation offset for {{camera}} has been saved to the config file. Restart Frigate to apply your changes." "success": "Annotation offset for {{camera}} has been saved to the config file."
} }
} }
}, },

View File

@ -534,7 +534,7 @@
} }
}, },
"toast": { "toast": {
"success": "Zone ({{zoneName}}) has been saved. Restart Frigate to apply changes." "success": "Zone ({{zoneName}}) has been saved."
} }
}, },
"motionMasks": { "motionMasks": {
@ -558,8 +558,8 @@
}, },
"toast": { "toast": {
"success": { "success": {
"title": "{{polygonName}} has been saved. Restart Frigate to apply changes.", "title": "{{polygonName}} has been saved.",
"noName": "Motion Mask has been saved. Restart Frigate to apply changes." "noName": "Motion Mask has been saved."
} }
} }
}, },
@ -583,8 +583,8 @@
}, },
"toast": { "toast": {
"success": { "success": {
"title": "{{polygonName}} has been saved. Restart Frigate to apply changes.", "title": "{{polygonName}} has been saved.",
"noName": "Object Mask has been saved. Restart Frigate to apply changes." "noName": "Object Mask has been saved."
} }
} }
} }

View File

@ -131,7 +131,9 @@ export default function ClassificationSelectionDialog({
className="flex cursor-pointer gap-2 smart-capitalize" className="flex cursor-pointer gap-2 smart-capitalize"
onClick={() => onCategorizeImage(category)} onClick={() => onCategorizeImage(category)}
> >
{category.replaceAll("_", " ")} {category === "none"
? t("none")
: category.replaceAll("_", " ")}
</SelectorItem> </SelectorItem>
))} ))}
<Separator /> <Separator />

View File

@ -446,7 +446,7 @@ export function TrackingDetails({
(event.end_time ?? Date.now() / 1000) + annotationOffset / 1000; (event.end_time ?? Date.now() / 1000) + annotationOffset / 1000;
const startTime = eventStartRecord - REVIEW_PADDING; const startTime = eventStartRecord - REVIEW_PADDING;
const endTime = eventEndRecord + REVIEW_PADDING; const endTime = eventEndRecord + REVIEW_PADDING;
const playlist = `${baseUrl}vod/${event.camera}/start/${startTime}/end/${endTime}/index.m3u8`; const playlist = `${baseUrl}vod/clip/${event.camera}/start/${startTime}/end/${endTime}/index.m3u8`;
return { return {
playlist, playlist,
@ -559,7 +559,6 @@ export function TrackingDetails({
isDetailMode={true} isDetailMode={true}
camera={event.camera} camera={event.camera}
currentTimeOverride={currentTime} currentTimeOverride={currentTime}
enableGapControllerRecovery={true}
/> />
{isVideoLoading && ( {isVideoLoading && (
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" /> <ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />

View File

@ -180,7 +180,9 @@ export function ClassFilterContent({
{allClasses.map((item) => ( {allClasses.map((item) => (
<FilterSwitch <FilterSwitch
key={item} key={item}
label={item.replaceAll("_", " ")} label={
item === "none" ? t("none") : item.replaceAll("_", " ")
}
isChecked={classes?.includes(item) ?? false} isChecked={classes?.includes(item) ?? false}
onCheckedChange={(isChecked) => { onCheckedChange={(isChecked) => {
if (isChecked) { if (isChecked) {

View File

@ -57,7 +57,6 @@ type HlsVideoPlayerProps = {
isDetailMode?: boolean; isDetailMode?: boolean;
camera?: string; camera?: string;
currentTimeOverride?: number; currentTimeOverride?: number;
enableGapControllerRecovery?: boolean;
}; };
export default function HlsVideoPlayer({ export default function HlsVideoPlayer({
@ -82,7 +81,6 @@ export default function HlsVideoPlayer({
isDetailMode = false, isDetailMode = false,
camera, camera,
currentTimeOverride, currentTimeOverride,
enableGapControllerRecovery = false,
}: HlsVideoPlayerProps) { }: HlsVideoPlayerProps) {
const { t } = useTranslation("components/player"); const { t } = useTranslation("components/player");
const { data: config } = useSWR<FrigateConfig>("config"); const { data: config } = useSWR<FrigateConfig>("config");
@ -173,21 +171,12 @@ export default function HlsVideoPlayer({
} }
// Base HLS configuration // Base HLS configuration
const baseConfig: Partial<HlsConfig> = { const hlsConfig: Partial<HlsConfig> = {
maxBufferLength: 10, maxBufferLength: 10,
maxBufferSize: 20 * 1000 * 1000, maxBufferSize: 20 * 1000 * 1000,
startPosition: currentSource.startPosition, startPosition: currentSource.startPosition,
}; };
const hlsConfig = { ...baseConfig };
if (enableGapControllerRecovery) {
hlsConfig.highBufferWatchdogPeriod = 1; // Check for stalls every 1 second (default: 3)
hlsConfig.nudgeOffset = 0.2; // Nudge playhead forward 0.2s when stalled (default: 0.1)
hlsConfig.nudgeMaxRetry = 5; // Try up to 5 nudges before giving up (default: 3)
hlsConfig.maxBufferHole = 0.5; // Tolerate up to 0.5s gaps between fragments (default: 0.1)
}
hlsRef.current = new Hls(hlsConfig); hlsRef.current = new Hls(hlsConfig);
hlsRef.current.attachMedia(videoRef.current); hlsRef.current.attachMedia(videoRef.current);
hlsRef.current.loadSource(currentSource.playlist); hlsRef.current.loadSource(currentSource.playlist);
@ -201,13 +190,7 @@ export default function HlsVideoPlayer({
hlsRef.current.destroy(); hlsRef.current.destroy();
} }
}; };
}, [ }, [videoRef, hlsRef, useHlsCompat, currentSource]);
videoRef,
hlsRef,
useHlsCompat,
currentSource,
enableGapControllerRecovery,
]);
// state handling // state handling

View File

@ -371,7 +371,12 @@ export default function FaceLibrary() {
{selectedFaces?.length > 0 ? ( {selectedFaces?.length > 0 ? (
<div className="flex items-center justify-center gap-2"> <div className="flex items-center justify-center gap-2">
<div className="mx-1 flex w-48 items-center justify-center text-sm text-muted-foreground"> <div className="mx-1 flex w-48 items-center justify-center text-sm text-muted-foreground">
<div className="p-1">{`${selectedFaces.length} selected`}</div> <div className="p-1">
{t("selected", {
ns: "views/event",
count: selectedFaces.length,
})}
</div>
<div className="p-1">{"|"}</div> <div className="p-1">{"|"}</div>
<div <div
className="cursor-pointer p-2 text-primary hover:rounded-lg hover:bg-secondary" className="cursor-pointer p-2 text-primary hover:rounded-lg hover:bg-secondary"

View File

@ -6,7 +6,6 @@ export interface UiConfig {
time_format?: "browser" | "12hour" | "24hour"; time_format?: "browser" | "12hour" | "24hour";
date_style?: "full" | "long" | "medium" | "short"; date_style?: "full" | "long" | "medium" | "short";
time_style?: "full" | "long" | "medium" | "short"; time_style?: "full" | "long" | "medium" | "short";
strftime_fmt?: string;
dashboard: boolean; dashboard: boolean;
order: number; order: number;
unit_system?: "metric" | "imperial"; unit_system?: "metric" | "imperial";

View File

@ -84,6 +84,12 @@ export default function ModelTrainingView({ model }: ModelTrainingViewProps) {
const [page, setPage] = useState<string>("train"); const [page, setPage] = useState<string>("train");
const [pageToggle, setPageToggle] = useOptimisticState(page, setPage, 100); const [pageToggle, setPageToggle] = useOptimisticState(page, setPage, 100);
// title
useEffect(() => {
document.title = `${model.name} - ${t("documentTitle")}`;
}, [model.name, t]);
// model state // model state
const [wasTraining, setWasTraining] = useState(false); const [wasTraining, setWasTraining] = useState(false);
@ -416,7 +422,12 @@ export default function ModelTrainingView({ model }: ModelTrainingViewProps) {
)} )}
> >
<div className="flex w-48 items-center justify-center text-sm text-muted-foreground"> <div className="flex w-48 items-center justify-center text-sm text-muted-foreground">
<div className="p-1">{`${selectedImages.length} selected`}</div> <div className="p-1">
{t("selected", {
ns: "views/event",
count: selectedImages.length,
})}
</div>
<div className="p-1">{"|"}</div> <div className="p-1">{"|"}</div>
<div <div
className="cursor-pointer p-2 text-primary hover:rounded-lg hover:bg-secondary" className="cursor-pointer p-2 text-primary hover:rounded-lg hover:bg-secondary"
@ -676,7 +687,7 @@ function LibrarySelector({
className="flex-grow cursor-pointer capitalize" className="flex-grow cursor-pointer capitalize"
onClick={() => setPageToggle(id)} onClick={() => setPageToggle(id)}
> >
{id.replaceAll("_", " ")} {id === "none" ? t("none") : id.replaceAll("_", " ")}
<span className="ml-2 text-muted-foreground"> <span className="ml-2 text-muted-foreground">
({dataset?.[id].length}) ({dataset?.[id].length})
</span> </span>