From 304d39790a31bbbb590abeb7b4406f197e9ea4ab Mon Sep 17 00:00:00 2001 From: Nicolas Mowen Date: Tue, 7 Oct 2025 06:38:19 -0600 Subject: [PATCH] Refactor face card into generic classification card --- .../post/review_descriptions.py | 4 +- .../components/card/ClassificationCard.tsx | 118 ++++++ web/src/pages/FaceLibrary.tsx | 384 ++++++------------ web/src/types/classification.ts | 14 + web/src/types/face.ts | 8 - web/vite.config.ts | 2 +- 6 files changed, 266 insertions(+), 264 deletions(-) create mode 100644 web/src/components/card/ClassificationCard.tsx diff --git a/frigate/data_processing/post/review_descriptions.py b/frigate/data_processing/post/review_descriptions.py index 3f32fd8b2..aadcc08a6 100644 --- a/frigate/data_processing/post/review_descriptions.py +++ b/frigate/data_processing/post/review_descriptions.py @@ -248,9 +248,7 @@ def run_analysis( "id": final_data["id"], "camera": camera, "zones": final_data["data"]["zones"], - "start": datetime.datetime.fromtimestamp(final_data["start_time"]).strftime( - "%A, %I:%M %p" - ), + "start": "03:33:10 AM", "duration": round(final_data["end_time"] - final_data["start_time"]), } diff --git a/web/src/components/card/ClassificationCard.tsx b/web/src/components/card/ClassificationCard.tsx new file mode 100644 index 000000000..7e694c595 --- /dev/null +++ b/web/src/components/card/ClassificationCard.tsx @@ -0,0 +1,118 @@ +import { baseUrl } from "@/api/baseUrl"; +import useContextMenu from "@/hooks/use-contextmenu"; +import { cn } from "@/lib/utils"; +import { + ClassificationItemData, + ClassificationThreshold, +} from "@/types/classification"; +import { useMemo, useRef, useState } from "react"; +import { isMobile } from "react-device-detect"; +import { useTranslation } from "react-i18next"; + +type ClassificationCardProps = { + className?: string; + data: ClassificationItemData; + threshold?: ClassificationThreshold; + selected: boolean; + i18nLibrary: string; + onClick: (data: ClassificationItemData, meta: boolean) => void; + children?: React.ReactNode; +}; +export function ClassificationCard({ + className, + data, + threshold, + selected, + i18nLibrary, + onClick, + children, +}: ClassificationCardProps) { + const { t } = useTranslation([i18nLibrary]); + const [imageLoaded, setImageLoaded] = useState(false); + + const scoreStatus = useMemo(() => { + if (!data.score || !threshold) { + return "unknown"; + } + + if (data.score >= threshold.recognition) { + return "match"; + } else if (data.score >= threshold.unknown) { + return "potential"; + } else { + return "unknown"; + } + }, [data, threshold]); + + // interaction + + const imgRef = useRef(null); + + useContextMenu(imgRef, () => { + onClick(data, true); + }); + + const imageArea = useMemo(() => { + if (imgRef.current == null || !imageLoaded) { + return undefined; + } + + return imgRef.current.naturalWidth * imgRef.current.naturalHeight; + }, [imageLoaded]); + + return ( + <> +
+
+ setImageLoaded(true)} + className={cn("size-44", isMobile && "w-full")} + src={`${baseUrl}${data.filepath}`} + onClick={(e) => { + e.stopPropagation(); + onClick(data, e.metaKey || e.ctrlKey); + }} + /> + {imageArea != undefined && ( +
+ {t("pixels", { area: imageArea })} +
+ )} +
+
+
+
+
+ {data.name == "unknown" ? t("details.unknown") : data.name} +
+ {data.score && ( +
+ {Math.round(data.score * 100)}% +
+ )} +
+
+ {children} +
+
+
+
+ + ); +} diff --git a/web/src/pages/FaceLibrary.tsx b/web/src/pages/FaceLibrary.tsx index d48ef19c4..497a32f5a 100644 --- a/web/src/pages/FaceLibrary.tsx +++ b/web/src/pages/FaceLibrary.tsx @@ -1,4 +1,3 @@ -import { baseUrl } from "@/api/baseUrl"; import TimeAgo from "@/components/dynamic/TimeAgo"; import AddFaceIcon from "@/components/icons/AddFaceIcon"; import ActivityIndicator from "@/components/indicators/activity-indicator"; @@ -37,13 +36,12 @@ import { TooltipContent, TooltipTrigger, } from "@/components/ui/tooltip"; -import useContextMenu from "@/hooks/use-contextmenu"; import useKeyboardListener from "@/hooks/use-keyboard-listener"; import useOptimisticState from "@/hooks/use-optimistic-state"; import { cn } from "@/lib/utils"; import { Event } from "@/types/event"; -import { FaceLibraryData, RecognizedFaceData } from "@/types/face"; -import { FaceRecognitionConfig, FrigateConfig } from "@/types/frigateConfig"; +import { FaceLibraryData } from "@/types/face"; +import { FrigateConfig } from "@/types/frigateConfig"; import { TooltipPortal } from "@radix-ui/react-tooltip"; import axios from "axios"; import { @@ -72,6 +70,8 @@ import SearchDetailDialog, { SearchTab, } from "@/components/overlay/detail/SearchDetailDialog"; import { SearchResult } from "@/types/search"; +import { ClassificationCard } from "@/components/card/ClassificationCard"; +import { ClassificationItemData } from "@/types/classification"; export default function FaceLibrary() { const { t } = useTranslation(["views/faceLibrary"]); @@ -641,7 +641,7 @@ function TrainingGrid({ // face data const faceGroups = useMemo(() => { - const groups: { [eventId: string]: RecognizedFaceData[] } = {}; + const groups: { [eventId: string]: ClassificationItemData[] } = {}; const faces = attemptImages .map((image) => { @@ -650,6 +650,7 @@ function TrainingGrid({ try { return { filename: image, + filepath: `clips/faces/train/${image}`, timestamp: Number.parseFloat(parts[2]), eventId: `${parts[0]}-${parts[1]}`, name: parts[3], @@ -739,7 +740,7 @@ function TrainingGrid({ type FaceAttemptGroupProps = { config: FrigateConfig; - group: RecognizedFaceData[]; + group: ClassificationItemData[]; event?: Event; faceNames: string[]; selectedFaces: string[]; @@ -767,6 +768,23 @@ function FaceAttemptGroup({ [group, selectedFaces], ); + const threshold = useMemo(() => { + return { + recognition: config.face_recognition.recognition_threshold, + unknown: config.face_recognition.unknown_score, + }; + }, [config]); + + const time = useMemo(() => { + const item = group[0]; + + if (!item?.timestamp) { + return undefined; + } + + return item.timestamp * 1000; + }, [group]); + // interaction const handleClickEvent = useCallback( @@ -799,6 +817,63 @@ function FaceAttemptGroup({ [event, group, selectedFaces, onClickFaces, onSelectEvent], ); + // api calls + + const onTrainAttempt = useCallback( + (data: ClassificationItemData, trainName: string) => { + axios + .post(`/faces/train/${trainName}/classify`, { + training_file: data.filename, + }) + .then((resp) => { + if (resp.status == 200) { + toast.success(t("toast.success.trainedFace"), { + position: "top-center", + }); + onRefresh(); + } + }) + .catch((error) => { + const errorMessage = + error.response?.data?.message || + error.response?.data?.detail || + "Unknown error"; + toast.error(t("toast.error.trainFailed", { errorMessage }), { + position: "top-center", + }); + }); + }, + [onRefresh, t], + ); + + const onReprocess = useCallback( + (data: ClassificationItemData) => { + axios + .post(`/faces/reprocess`, { training_file: data.filename }) + .then((resp) => { + if (resp.status == 200) { + toast.success(t("toast.success.updatedFaceScore"), { + position: "top-center", + }); + onRefresh(); + } + }) + .catch((error) => { + const errorMessage = + error.response?.data?.message || + error.response?.data?.detail || + "Unknown error"; + toast.error( + t("toast.error.updateFaceScoreFailed", { errorMessage }), + { + position: "top-center", + }, + ); + }); + }, + [onRefresh, t], + ); + return (
- + {time && ( + + )}
{event && ( @@ -864,15 +941,15 @@ function FaceAttemptGroup({ : "grid grid-cols-2 sm:grid-cols-5 lg:grid-cols-6", )} > - {group.map((data: RecognizedFaceData) => ( - ( + { if (meta || selectedFaces.length > 0) { onClickFaces([data.filename], true); @@ -880,178 +957,29 @@ function FaceAttemptGroup({ onSelectEvent(event); } }} - onRefresh={onRefresh} - /> + > + onTrainAttempt(data, name)} + > + + + + + onReprocess(data)} + /> + + {t("button.reprocessFace")} + + ))} ); } -type FaceAttemptProps = { - data: RecognizedFaceData; - faceNames: string[]; - recognitionConfig: FaceRecognitionConfig; - selected: boolean; - onClick: (data: RecognizedFaceData, meta: boolean) => void; - onRefresh: () => void; -}; -function FaceAttempt({ - data, - faceNames, - recognitionConfig, - selected, - onClick, - onRefresh, -}: FaceAttemptProps) { - const { t } = useTranslation(["views/faceLibrary"]); - const [imageLoaded, setImageLoaded] = useState(false); - - const scoreStatus = useMemo(() => { - if (data.score >= recognitionConfig.recognition_threshold) { - return "match"; - } else if (data.score >= recognitionConfig.unknown_score) { - return "potential"; - } else { - return "unknown"; - } - }, [data, recognitionConfig]); - - // interaction - - const imgRef = useRef(null); - - useContextMenu(imgRef, () => { - onClick(data, true); - }); - - const imageArea = useMemo(() => { - if (imgRef.current == null || !imageLoaded) { - return undefined; - } - - return imgRef.current.naturalWidth * imgRef.current.naturalHeight; - }, [imageLoaded]); - - // api calls - - const onTrainAttempt = useCallback( - (trainName: string) => { - axios - .post(`/faces/train/${trainName}/classify`, { - training_file: data.filename, - }) - .then((resp) => { - if (resp.status == 200) { - toast.success(t("toast.success.trainedFace"), { - position: "top-center", - }); - onRefresh(); - } - }) - .catch((error) => { - const errorMessage = - error.response?.data?.message || - error.response?.data?.detail || - "Unknown error"; - toast.error(t("toast.error.trainFailed", { errorMessage }), { - position: "top-center", - }); - }); - }, - [data, onRefresh, t], - ); - - const onReprocess = useCallback(() => { - axios - .post(`/faces/reprocess`, { training_file: data.filename }) - .then((resp) => { - if (resp.status == 200) { - toast.success(t("toast.success.updatedFaceScore"), { - position: "top-center", - }); - onRefresh(); - } - }) - .catch((error) => { - const errorMessage = - error.response?.data?.message || - error.response?.data?.detail || - "Unknown error"; - toast.error(t("toast.error.updateFaceScoreFailed", { errorMessage }), { - position: "top-center", - }); - }); - }, [data, onRefresh, t]); - - return ( - <> -
-
- setImageLoaded(true)} - className={cn("size-44", isMobile && "w-full")} - src={`${baseUrl}clips/faces/train/${data.filename}`} - onClick={(e) => { - e.stopPropagation(); - onClick(data, e.metaKey || e.ctrlKey); - }} - /> - {imageArea != undefined && ( -
- {t("pixels", { area: imageArea })} -
- )} -
-
-
-
-
- {data.name == "unknown" ? t("details.unknown") : data.name} -
-
- {Math.round(data.score * 100)}% -
-
-
- - - - - - onReprocess()} - /> - - {t("button.reprocessFace")} - -
-
-
-
- - ); -} - type FaceGridProps = { contentRef: MutableRefObject; faceImages: string[]; @@ -1093,80 +1021,32 @@ function FaceGrid({ )} > {sortedFaces.map((image: string) => ( - + i18nLibrary="views/faceLibrary" + onClick={(data, meta) => onClickFaces([data.filename], meta)} + > + + + { + e.stopPropagation(); + onDelete(pageToggle, [image]); + }} + /> + + {t("button.deleteFaceAttempts")} + + ))} ); } - -type FaceImageProps = { - name: string; - image: string; - selected: boolean; - onClickFaces: (images: string[], ctrl: boolean) => void; - onDelete: (name: string, ids: string[]) => void; -}; -function FaceImage({ - name, - image, - selected, - onClickFaces, - onDelete, -}: FaceImageProps) { - const { t } = useTranslation(["views/faceLibrary"]); - - return ( -
{ - e.stopPropagation(); - onClickFaces([image], e.ctrlKey || e.metaKey); - }} - > -
- -
-
-
-
-
{name}
-
-
- - - { - e.stopPropagation(); - onDelete(name, [image]); - }} - /> - - {t("button.deleteFaceAttempts")} - -
-
-
-
- ); -} diff --git a/web/src/types/classification.ts b/web/src/types/classification.ts index 54320175a..092021342 100644 --- a/web/src/types/classification.ts +++ b/web/src/types/classification.ts @@ -6,3 +6,17 @@ export type TrainFilter = { min_score?: number; max_score?: number; }; + +export type ClassificationItemData = { + filepath: string; + filename: string; + name: string; + timestamp?: number; + eventId?: string; + score?: number; +}; + +export type ClassificationThreshold = { + recognition: number; + unknown: number; +}; diff --git a/web/src/types/face.ts b/web/src/types/face.ts index dc481b64f..3ba812c86 100644 --- a/web/src/types/face.ts +++ b/web/src/types/face.ts @@ -1,11 +1,3 @@ export type FaceLibraryData = { [faceName: string]: string[]; }; - -export type RecognizedFaceData = { - filename: string; - timestamp: number; - eventId: string; - name: string; - score: number; -}; diff --git a/web/vite.config.ts b/web/vite.config.ts index cb1a580bf..98a9afde1 100644 --- a/web/vite.config.ts +++ b/web/vite.config.ts @@ -4,7 +4,7 @@ import { defineConfig } from "vite"; import react from "@vitejs/plugin-react-swc"; import monacoEditorPlugin from "vite-plugin-monaco-editor"; -const proxyHost = process.env.PROXY_HOST || "localhost:5000"; +const proxyHost = process.env.PROXY_HOST || "192.168.50.106:5002"; // https://vitejs.dev/config/ export default defineConfig({