Compare commits

..

7 Commits

Author SHA1 Message Date
Nicolas Mowen
0140adf8e1 Fix check for audio activity to keep a segemnt 2025-12-15 07:35:55 -07:00
Josh Hawkins
0a91888faa ensure python defs match openapi spec for auth endpoints 2025-12-15 08:32:38 -06:00
Nicolas Mowen
fb88d37c56 Consider audio activity when deciding if recording segments should be kept due to motion 2025-12-15 07:12:14 -07:00
Josh Hawkins
60116214bb fix object mask creation 2025-12-15 07:32:53 -06:00
Nicolas Mowen
202cf5ce89 Clarify ROCm enrichments 2025-12-15 06:06:15 -07:00
Josh Hawkins
0231d4474c clarify auth endpoint return in openapi schema 2025-12-14 21:42:53 -06:00
Nicolas Mowen
9e99cec52e Exclude yolov9 license plate from migraphx runner 2025-12-14 20:38:49 -07:00
4 changed files with 37 additions and 97 deletions

View File

@ -22,7 +22,6 @@ from frigate.const import (
from frigate.log import redirect_output_to_logger from frigate.log import redirect_output_to_logger
from frigate.models import Event, Recordings, ReviewSegment from frigate.models import Event, Recordings, ReviewSegment
from frigate.types import ModelStatusTypesEnum from frigate.types import ModelStatusTypesEnum
from frigate.util.downloader import ModelDownloader
from frigate.util.file import get_event_thumbnail_bytes from frigate.util.file import get_event_thumbnail_bytes
from frigate.util.image import get_image_from_recording from frigate.util.image import get_image_from_recording
from frigate.util.process import FrigateProcess from frigate.util.process import FrigateProcess
@ -122,10 +121,6 @@ def get_dataset_image_count(model_name: str) -> int:
class ClassificationTrainingProcess(FrigateProcess): class ClassificationTrainingProcess(FrigateProcess):
def __init__(self, model_name: str) -> None: def __init__(self, model_name: str) -> None:
self.BASE_WEIGHT_URL = os.environ.get(
"TF_KERAS_MOBILENET_V2_WEIGHTS_URL",
"",
)
super().__init__( super().__init__(
stop_event=None, stop_event=None,
priority=PROCESS_PRIORITY_LOW, priority=PROCESS_PRIORITY_LOW,
@ -184,23 +179,11 @@ class ClassificationTrainingProcess(FrigateProcess):
) )
return False return False
weights_path = "imagenet"
# Download MobileNetV2 weights if not present
if self.BASE_WEIGHT_URL:
weights_path = os.path.join(
MODEL_CACHE_DIR, "MobileNet", "mobilenet_v2_weights.h5"
)
if not os.path.exists(weights_path):
logger.info("Downloading MobileNet V2 weights file")
ModelDownloader.download_from_url(
self.BASE_WEIGHT_URL, weights_path
)
# Start with imagenet base model with 35% of channels in each layer # Start with imagenet base model with 35% of channels in each layer
base_model = MobileNetV2( base_model = MobileNetV2(
input_shape=(224, 224, 3), input_shape=(224, 224, 3),
include_top=False, include_top=False,
weights=weights_path, weights="imagenet",
alpha=0.35, alpha=0.35,
) )
base_model.trainable = False # Freeze pre-trained layers base_model.trainable = False # Freeze pre-trained layers
@ -499,10 +482,6 @@ def _extract_keyframes(
""" """
Extract keyframes from recordings at specified timestamps and crop to specified regions. Extract keyframes from recordings at specified timestamps and crop to specified regions.
This implementation batches work by running multiple ffmpeg snapshot commands
concurrently, which significantly reduces total runtime compared to
processing each timestamp serially.
Args: Args:
ffmpeg_path: Path to ffmpeg binary ffmpeg_path: Path to ffmpeg binary
timestamps: List of timestamp dicts from _select_balanced_timestamps timestamps: List of timestamp dicts from _select_balanced_timestamps
@ -512,21 +491,15 @@ def _extract_keyframes(
Returns: Returns:
List of paths to successfully extracted and cropped keyframe images List of paths to successfully extracted and cropped keyframe images
""" """
from concurrent.futures import ThreadPoolExecutor, as_completed keyframe_paths = []
if not timestamps: for idx, ts_info in enumerate(timestamps):
return []
# Limit the number of concurrent ffmpeg processes so we don't overload the host.
max_workers = min(5, len(timestamps))
def _process_timestamp(idx: int, ts_info: dict) -> tuple[int, str | None]:
camera = ts_info["camera"] camera = ts_info["camera"]
timestamp = ts_info["timestamp"] timestamp = ts_info["timestamp"]
if camera not in camera_crops: if camera not in camera_crops:
logger.warning(f"No crop coordinates for camera {camera}") logger.warning(f"No crop coordinates for camera {camera}")
return idx, None continue
norm_x1, norm_y1, norm_x2, norm_y2 = camera_crops[camera] norm_x1, norm_y1, norm_x2, norm_y2 = camera_crops[camera]
@ -543,7 +516,7 @@ def _extract_keyframes(
.get() .get()
) )
except Exception: except Exception:
return idx, None continue
relative_time = timestamp - recording.start_time relative_time = timestamp - recording.start_time
@ -557,57 +530,38 @@ def _extract_keyframes(
height=None, height=None,
) )
if not image_data: if image_data:
return idx, None nparr = np.frombuffer(image_data, np.uint8)
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
nparr = np.frombuffer(image_data, np.uint8) if img is not None:
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR) height, width = img.shape[:2]
if img is None: x1 = int(norm_x1 * width)
return idx, None y1 = int(norm_y1 * height)
x2 = int(norm_x2 * width)
y2 = int(norm_y2 * height)
height, width = img.shape[:2] x1_clipped = max(0, min(x1, width))
y1_clipped = max(0, min(y1, height))
x2_clipped = max(0, min(x2, width))
y2_clipped = max(0, min(y2, height))
x1 = int(norm_x1 * width) if x2_clipped > x1_clipped and y2_clipped > y1_clipped:
y1 = int(norm_y1 * height) cropped = img[y1_clipped:y2_clipped, x1_clipped:x2_clipped]
x2 = int(norm_x2 * width) resized = cv2.resize(cropped, (224, 224))
y2 = int(norm_y2 * height)
x1_clipped = max(0, min(x1, width)) output_path = os.path.join(output_dir, f"frame_{idx:04d}.jpg")
y1_clipped = max(0, min(y1, height)) cv2.imwrite(output_path, resized)
x2_clipped = max(0, min(x2, width)) keyframe_paths.append(output_path)
y2_clipped = max(0, min(y2, height))
if x2_clipped <= x1_clipped or y2_clipped <= y1_clipped:
return idx, None
cropped = img[y1_clipped:y2_clipped, x1_clipped:x2_clipped]
resized = cv2.resize(cropped, (224, 224))
output_path = os.path.join(output_dir, f"frame_{idx:04d}.jpg")
cv2.imwrite(output_path, resized)
return idx, output_path
except Exception as e: except Exception as e:
logger.debug( logger.debug(
f"Failed to extract frame from {recording.path} at {relative_time}s: {e}" f"Failed to extract frame from {recording.path} at {relative_time}s: {e}"
) )
return idx, None continue
keyframes_with_index: list[tuple[int, str]] = [] return keyframe_paths
with ThreadPoolExecutor(max_workers=max_workers) as executor:
future_to_idx = {
executor.submit(_process_timestamp, idx, ts_info): idx
for idx, ts_info in enumerate(timestamps)
}
for future in as_completed(future_to_idx):
_, path = future.result()
if path:
keyframes_with_index.append((future_to_idx[future], path))
keyframes_with_index.sort(key=lambda item: item[0])
return [path for _, path in keyframes_with_index]
def _select_distinct_images( def _select_distinct_images(

View File

@ -14,7 +14,6 @@ import ProtectedRoute from "@/components/auth/ProtectedRoute";
import { AuthProvider } from "@/context/auth-context"; import { AuthProvider } from "@/context/auth-context";
import useSWR from "swr"; import useSWR from "swr";
import { FrigateConfig } from "./types/frigateConfig"; import { FrigateConfig } from "./types/frigateConfig";
import ActivityIndicator from "@/components/indicators/activity-indicator";
const Live = lazy(() => import("@/pages/Live")); const Live = lazy(() => import("@/pages/Live"));
const Events = lazy(() => import("@/pages/Events")); const Events = lazy(() => import("@/pages/Events"));
@ -51,13 +50,6 @@ function DefaultAppView() {
const { data: config } = useSWR<FrigateConfig>("config", { const { data: config } = useSWR<FrigateConfig>("config", {
revalidateOnFocus: false, revalidateOnFocus: false,
}); });
// Compute required roles for main routes, ensuring we have config first
// to prevent race condition where custom roles are temporarily unavailable
const mainRouteRoles = config?.auth?.roles
? Object.keys(config.auth.roles)
: undefined;
return ( return (
<div className="size-full overflow-hidden"> <div className="size-full overflow-hidden">
{isDesktop && <Sidebar />} {isDesktop && <Sidebar />}
@ -76,11 +68,13 @@ function DefaultAppView() {
<Routes> <Routes>
<Route <Route
element={ element={
mainRouteRoles ? ( <ProtectedRoute
<ProtectedRoute requiredRoles={mainRouteRoles} /> requiredRoles={
) : ( config?.auth.roles
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" /> ? Object.keys(config.auth.roles)
) : ["admin", "viewer"]
}
/>
} }
> >
<Route index element={<Live />} /> <Route index element={<Live />} />

View File

@ -440,7 +440,6 @@ function CustomTimeSelector({
<FaCalendarAlt /> <FaCalendarAlt />
<div className="flex flex-wrap items-center"> <div className="flex flex-wrap items-center">
<Popover <Popover
modal={false}
open={startOpen} open={startOpen}
onOpenChange={(open) => { onOpenChange={(open) => {
if (!open) { if (!open) {
@ -462,10 +461,7 @@ function CustomTimeSelector({
{formattedStart} {formattedStart}
</Button> </Button>
</PopoverTrigger> </PopoverTrigger>
<PopoverContent <PopoverContent className="flex flex-col items-center">
disablePortal={isDesktop}
className="flex flex-col items-center"
>
<TimezoneAwareCalendar <TimezoneAwareCalendar
timezone={config?.ui.timezone} timezone={config?.ui.timezone}
selectedDay={new Date(startTime * 1000)} selectedDay={new Date(startTime * 1000)}
@ -510,7 +506,6 @@ function CustomTimeSelector({
</Popover> </Popover>
<FaArrowRight className="size-4 text-primary" /> <FaArrowRight className="size-4 text-primary" />
<Popover <Popover
modal={false}
open={endOpen} open={endOpen}
onOpenChange={(open) => { onOpenChange={(open) => {
if (!open) { if (!open) {
@ -532,10 +527,7 @@ function CustomTimeSelector({
{formattedEnd} {formattedEnd}
</Button> </Button>
</PopoverTrigger> </PopoverTrigger>
<PopoverContent <PopoverContent className="flex flex-col items-center">
disablePortal={isDesktop}
className="flex flex-col items-center"
>
<TimezoneAwareCalendar <TimezoneAwareCalendar
timezone={config?.ui.timezone} timezone={config?.ui.timezone}
selectedDay={new Date(endTime * 1000)} selectedDay={new Date(endTime * 1000)}
@ -553,7 +545,7 @@ function CustomTimeSelector({
<SelectSeparator className="bg-secondary" /> <SelectSeparator className="bg-secondary" />
<input <input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]" className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="endTime" id="startTime"
type="time" type="time"
value={endClock} value={endClock}
step={isIOS ? "60" : "1"} step={isIOS ? "60" : "1"}

View File

@ -438,7 +438,7 @@ export default function Settings() {
return ( return (
<div className="flex h-full flex-col"> <div className="flex h-full flex-col">
<div className="flex items-center justify-between border-b border-secondary p-3"> <div className="flex items-center justify-between border-b border-secondary p-3">
<Heading as="h3" className="mb-0 min-h-9"> <Heading as="h3" className="mb-0">
{t("menu.settings", { ns: "common" })} {t("menu.settings", { ns: "common" })}
</Heading> </Heading>
{CAMERA_SELECT_BUTTON_PAGES.includes(page) && ( {CAMERA_SELECT_BUTTON_PAGES.includes(page) && (