Compare commits

..

7 Commits

Author SHA1 Message Date
Nicolas Mowen
0140adf8e1 Fix check for audio activity to keep a segemnt 2025-12-15 07:35:55 -07:00
Josh Hawkins
0a91888faa ensure python defs match openapi spec for auth endpoints 2025-12-15 08:32:38 -06:00
Nicolas Mowen
fb88d37c56 Consider audio activity when deciding if recording segments should be kept due to motion 2025-12-15 07:12:14 -07:00
Josh Hawkins
60116214bb fix object mask creation 2025-12-15 07:32:53 -06:00
Nicolas Mowen
202cf5ce89 Clarify ROCm enrichments 2025-12-15 06:06:15 -07:00
Josh Hawkins
0231d4474c clarify auth endpoint return in openapi schema 2025-12-14 21:42:53 -06:00
Nicolas Mowen
9e99cec52e Exclude yolov9 license plate from migraphx runner 2025-12-14 20:38:49 -07:00
4 changed files with 37 additions and 97 deletions

View File

@ -22,7 +22,6 @@ from frigate.const import (
from frigate.log import redirect_output_to_logger
from frigate.models import Event, Recordings, ReviewSegment
from frigate.types import ModelStatusTypesEnum
from frigate.util.downloader import ModelDownloader
from frigate.util.file import get_event_thumbnail_bytes
from frigate.util.image import get_image_from_recording
from frigate.util.process import FrigateProcess
@ -122,10 +121,6 @@ def get_dataset_image_count(model_name: str) -> int:
class ClassificationTrainingProcess(FrigateProcess):
def __init__(self, model_name: str) -> None:
self.BASE_WEIGHT_URL = os.environ.get(
"TF_KERAS_MOBILENET_V2_WEIGHTS_URL",
"",
)
super().__init__(
stop_event=None,
priority=PROCESS_PRIORITY_LOW,
@ -184,23 +179,11 @@ class ClassificationTrainingProcess(FrigateProcess):
)
return False
weights_path = "imagenet"
# Download MobileNetV2 weights if not present
if self.BASE_WEIGHT_URL:
weights_path = os.path.join(
MODEL_CACHE_DIR, "MobileNet", "mobilenet_v2_weights.h5"
)
if not os.path.exists(weights_path):
logger.info("Downloading MobileNet V2 weights file")
ModelDownloader.download_from_url(
self.BASE_WEIGHT_URL, weights_path
)
# Start with imagenet base model with 35% of channels in each layer
base_model = MobileNetV2(
input_shape=(224, 224, 3),
include_top=False,
weights=weights_path,
weights="imagenet",
alpha=0.35,
)
base_model.trainable = False # Freeze pre-trained layers
@ -499,10 +482,6 @@ def _extract_keyframes(
"""
Extract keyframes from recordings at specified timestamps and crop to specified regions.
This implementation batches work by running multiple ffmpeg snapshot commands
concurrently, which significantly reduces total runtime compared to
processing each timestamp serially.
Args:
ffmpeg_path: Path to ffmpeg binary
timestamps: List of timestamp dicts from _select_balanced_timestamps
@ -512,21 +491,15 @@ def _extract_keyframes(
Returns:
List of paths to successfully extracted and cropped keyframe images
"""
from concurrent.futures import ThreadPoolExecutor, as_completed
keyframe_paths = []
if not timestamps:
return []
# Limit the number of concurrent ffmpeg processes so we don't overload the host.
max_workers = min(5, len(timestamps))
def _process_timestamp(idx: int, ts_info: dict) -> tuple[int, str | None]:
for idx, ts_info in enumerate(timestamps):
camera = ts_info["camera"]
timestamp = ts_info["timestamp"]
if camera not in camera_crops:
logger.warning(f"No crop coordinates for camera {camera}")
return idx, None
continue
norm_x1, norm_y1, norm_x2, norm_y2 = camera_crops[camera]
@ -543,7 +516,7 @@ def _extract_keyframes(
.get()
)
except Exception:
return idx, None
continue
relative_time = timestamp - recording.start_time
@ -557,57 +530,38 @@ def _extract_keyframes(
height=None,
)
if not image_data:
return idx, None
if image_data:
nparr = np.frombuffer(image_data, np.uint8)
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
nparr = np.frombuffer(image_data, np.uint8)
img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
if img is not None:
height, width = img.shape[:2]
if img is None:
return idx, None
x1 = int(norm_x1 * width)
y1 = int(norm_y1 * height)
x2 = int(norm_x2 * width)
y2 = int(norm_y2 * height)
height, width = img.shape[:2]
x1_clipped = max(0, min(x1, width))
y1_clipped = max(0, min(y1, height))
x2_clipped = max(0, min(x2, width))
y2_clipped = max(0, min(y2, height))
x1 = int(norm_x1 * width)
y1 = int(norm_y1 * height)
x2 = int(norm_x2 * width)
y2 = int(norm_y2 * height)
if x2_clipped > x1_clipped and y2_clipped > y1_clipped:
cropped = img[y1_clipped:y2_clipped, x1_clipped:x2_clipped]
resized = cv2.resize(cropped, (224, 224))
x1_clipped = max(0, min(x1, width))
y1_clipped = max(0, min(y1, height))
x2_clipped = max(0, min(x2, width))
y2_clipped = max(0, min(y2, height))
output_path = os.path.join(output_dir, f"frame_{idx:04d}.jpg")
cv2.imwrite(output_path, resized)
keyframe_paths.append(output_path)
if x2_clipped <= x1_clipped or y2_clipped <= y1_clipped:
return idx, None
cropped = img[y1_clipped:y2_clipped, x1_clipped:x2_clipped]
resized = cv2.resize(cropped, (224, 224))
output_path = os.path.join(output_dir, f"frame_{idx:04d}.jpg")
cv2.imwrite(output_path, resized)
return idx, output_path
except Exception as e:
logger.debug(
f"Failed to extract frame from {recording.path} at {relative_time}s: {e}"
)
return idx, None
continue
keyframes_with_index: list[tuple[int, str]] = []
with ThreadPoolExecutor(max_workers=max_workers) as executor:
future_to_idx = {
executor.submit(_process_timestamp, idx, ts_info): idx
for idx, ts_info in enumerate(timestamps)
}
for future in as_completed(future_to_idx):
_, path = future.result()
if path:
keyframes_with_index.append((future_to_idx[future], path))
keyframes_with_index.sort(key=lambda item: item[0])
return [path for _, path in keyframes_with_index]
return keyframe_paths
def _select_distinct_images(

View File

@ -14,7 +14,6 @@ import ProtectedRoute from "@/components/auth/ProtectedRoute";
import { AuthProvider } from "@/context/auth-context";
import useSWR from "swr";
import { FrigateConfig } from "./types/frigateConfig";
import ActivityIndicator from "@/components/indicators/activity-indicator";
const Live = lazy(() => import("@/pages/Live"));
const Events = lazy(() => import("@/pages/Events"));
@ -51,13 +50,6 @@ function DefaultAppView() {
const { data: config } = useSWR<FrigateConfig>("config", {
revalidateOnFocus: false,
});
// Compute required roles for main routes, ensuring we have config first
// to prevent race condition where custom roles are temporarily unavailable
const mainRouteRoles = config?.auth?.roles
? Object.keys(config.auth.roles)
: undefined;
return (
<div className="size-full overflow-hidden">
{isDesktop && <Sidebar />}
@ -76,11 +68,13 @@ function DefaultAppView() {
<Routes>
<Route
element={
mainRouteRoles ? (
<ProtectedRoute requiredRoles={mainRouteRoles} />
) : (
<ActivityIndicator className="absolute left-1/2 top-1/2 -translate-x-1/2 -translate-y-1/2" />
)
<ProtectedRoute
requiredRoles={
config?.auth.roles
? Object.keys(config.auth.roles)
: ["admin", "viewer"]
}
/>
}
>
<Route index element={<Live />} />

View File

@ -440,7 +440,6 @@ function CustomTimeSelector({
<FaCalendarAlt />
<div className="flex flex-wrap items-center">
<Popover
modal={false}
open={startOpen}
onOpenChange={(open) => {
if (!open) {
@ -462,10 +461,7 @@ function CustomTimeSelector({
{formattedStart}
</Button>
</PopoverTrigger>
<PopoverContent
disablePortal={isDesktop}
className="flex flex-col items-center"
>
<PopoverContent className="flex flex-col items-center">
<TimezoneAwareCalendar
timezone={config?.ui.timezone}
selectedDay={new Date(startTime * 1000)}
@ -510,7 +506,6 @@ function CustomTimeSelector({
</Popover>
<FaArrowRight className="size-4 text-primary" />
<Popover
modal={false}
open={endOpen}
onOpenChange={(open) => {
if (!open) {
@ -532,10 +527,7 @@ function CustomTimeSelector({
{formattedEnd}
</Button>
</PopoverTrigger>
<PopoverContent
disablePortal={isDesktop}
className="flex flex-col items-center"
>
<PopoverContent className="flex flex-col items-center">
<TimezoneAwareCalendar
timezone={config?.ui.timezone}
selectedDay={new Date(endTime * 1000)}
@ -553,7 +545,7 @@ function CustomTimeSelector({
<SelectSeparator className="bg-secondary" />
<input
className="text-md mx-4 w-full border border-input bg-background p-1 text-secondary-foreground hover:bg-accent hover:text-accent-foreground dark:[color-scheme:dark]"
id="endTime"
id="startTime"
type="time"
value={endClock}
step={isIOS ? "60" : "1"}

View File

@ -438,7 +438,7 @@ export default function Settings() {
return (
<div className="flex h-full flex-col">
<div className="flex items-center justify-between border-b border-secondary p-3">
<Heading as="h3" className="mb-0 min-h-9">
<Heading as="h3" className="mb-0">
{t("menu.settings", { ns: "common" })}
</Heading>
{CAMERA_SELECT_BUTTON_PAGES.includes(page) && (