diff --git a/frigate/api/event.py b/frigate/api/event.py index 5f426c83f..0b721b82f 100644 --- a/frigate/api/event.py +++ b/frigate/api/event.py @@ -434,10 +434,8 @@ async def event_ids(ids: str, request: Request): event = Event.get(Event.id == event_id) await require_camera_access(event.camera, request=request) except DoesNotExist: - return JSONResponse( - content=({"success": False, "message": f"Event {event_id} not found"}), - status_code=404, - ) + # we should not fail the entire request if an event is not found + continue try: events = Event.select().where(Event.id << ids).dicts().iterator() diff --git a/frigate/data_processing/real_time/custom_classification.py b/frigate/data_processing/real_time/custom_classification.py index 45a4b2223..e5e4fc90e 100644 --- a/frigate/data_processing/real_time/custom_classification.py +++ b/frigate/data_processing/real_time/custom_classification.py @@ -142,7 +142,7 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi): if frame.shape != (224, 224): try: - frame = cv2.resize(frame, (224, 224)) + resized_frame = cv2.resize(frame, (224, 224)) except Exception: logger.warning("Failed to resize image for state classification") return @@ -151,13 +151,14 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi): write_classification_attempt( self.train_dir, cv2.cvtColor(frame, cv2.COLOR_RGB2BGR), + "none-none", now, "unknown", 0.0, ) return - input = np.expand_dims(frame, axis=0) + input = np.expand_dims(resized_frame, axis=0) self.interpreter.set_tensor(self.tensor_input_details[0]["index"], input) self.interpreter.invoke() res: np.ndarray = self.interpreter.get_tensor( @@ -171,6 +172,7 @@ class CustomStateClassificationProcessor(RealTimeProcessorApi): write_classification_attempt( self.train_dir, cv2.cvtColor(frame, cv2.COLOR_RGB2BGR), + "none-none", now, self.labelmap[best_id], score, @@ -284,7 +286,7 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi): if crop.shape != (224, 224): try: - crop = cv2.resize(crop, (224, 224)) + resized_crop = cv2.resize(crop, (224, 224)) except Exception: logger.warning("Failed to resize image for state classification") return @@ -293,13 +295,14 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi): write_classification_attempt( self.train_dir, cv2.cvtColor(crop, cv2.COLOR_RGB2BGR), + obj_data["id"], now, "unknown", 0.0, ) return - input = np.expand_dims(crop, axis=0) + input = np.expand_dims(resized_crop, axis=0) self.interpreter.set_tensor(self.tensor_input_details[0]["index"], input) self.interpreter.invoke() res: np.ndarray = self.interpreter.get_tensor( @@ -314,6 +317,7 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi): write_classification_attempt( self.train_dir, cv2.cvtColor(crop, cv2.COLOR_RGB2BGR), + obj_data["id"], now, self.labelmap[best_id], score, @@ -372,6 +376,7 @@ class CustomObjectClassificationProcessor(RealTimeProcessorApi): def write_classification_attempt( folder: str, frame: np.ndarray, + event_id: str, timestamp: float, label: str, score: float, @@ -379,7 +384,7 @@ def write_classification_attempt( if "-" in label: label = label.replace("-", "_") - file = os.path.join(folder, f"{timestamp}-{label}-{score}.webp") + file = os.path.join(folder, f"{event_id}-{timestamp}-{label}-{score}.webp") os.makedirs(folder, exist_ok=True) cv2.imwrite(file, frame) diff --git a/web/public/locales/en/common.json b/web/public/locales/en/common.json index c22a9227d..501f2d4bc 100644 --- a/web/public/locales/en/common.json +++ b/web/public/locales/en/common.json @@ -263,5 +263,8 @@ "desc": "Page not found" }, "selectItem": "Select {{item}}", - "readTheDocumentation": "Read the documentation" + "readTheDocumentation": "Read the documentation", + "information": { + "pixels": "{{area}}px" + } } diff --git a/web/public/locales/en/views/faceLibrary.json b/web/public/locales/en/views/faceLibrary.json index 7f4c3a5b9..3a0804511 100644 --- a/web/public/locales/en/views/faceLibrary.json +++ b/web/public/locales/en/views/faceLibrary.json @@ -5,7 +5,6 @@ "invalidName": "Invalid name. Names can only include letters, numbers, spaces, apostrophes, underscores, and hyphens." }, "details": { - "person": "Person", "subLabelScore": "Sub Label Score", "scoreInfo": "The sub label score is the weighted score for all of the recognized face confidences, so this may differ from the score shown on the snapshot.", "face": "Face Details", diff --git a/web/src/components/card/ClassificationCard.tsx b/web/src/components/card/ClassificationCard.tsx new file mode 100644 index 000000000..5153b6d71 --- /dev/null +++ b/web/src/components/card/ClassificationCard.tsx @@ -0,0 +1,263 @@ +import { baseUrl } from "@/api/baseUrl"; +import useContextMenu from "@/hooks/use-contextmenu"; +import { cn } from "@/lib/utils"; +import { + ClassificationItemData, + ClassificationThreshold, +} from "@/types/classification"; +import { Event } from "@/types/event"; +import { useMemo, useRef, useState } from "react"; +import { isDesktop, isMobile } from "react-device-detect"; +import { useTranslation } from "react-i18next"; +import TimeAgo from "../dynamic/TimeAgo"; +import { Tooltip, TooltipContent, TooltipTrigger } from "../ui/tooltip"; +import { LuSearch } from "react-icons/lu"; +import { TooltipPortal } from "@radix-ui/react-tooltip"; +import { useNavigate } from "react-router-dom"; +import { getTranslatedLabel } from "@/utils/i18n"; + +type ClassificationCardProps = { + className?: string; + imgClassName?: string; + data: ClassificationItemData; + threshold?: ClassificationThreshold; + selected: boolean; + i18nLibrary: string; + showArea?: boolean; + onClick: (data: ClassificationItemData, meta: boolean) => void; + children?: React.ReactNode; +}; +export function ClassificationCard({ + className, + imgClassName, + data, + threshold, + selected, + i18nLibrary, + showArea = true, + onClick, + children, +}: ClassificationCardProps) { + const { t } = useTranslation([i18nLibrary]); + const [imageLoaded, setImageLoaded] = useState(false); + + const scoreStatus = useMemo(() => { + if (!data.score || !threshold) { + return "unknown"; + } + + if (data.score >= threshold.recognition) { + return "match"; + } else if (data.score >= threshold.unknown) { + return "potential"; + } else { + return "unknown"; + } + }, [data, threshold]); + + // interaction + + const imgRef = useRef(null); + + useContextMenu(imgRef, () => { + onClick(data, true); + }); + + const imageArea = useMemo(() => { + if (!showArea || imgRef.current == null || !imageLoaded) { + return undefined; + } + + return imgRef.current.naturalWidth * imgRef.current.naturalHeight; + }, [showArea, imageLoaded]); + + return ( + <> +
+
+ setImageLoaded(true)} + className={cn("size-44", imgClassName, isMobile && "w-full")} + src={`${baseUrl}${data.filepath}`} + onClick={(e) => { + e.stopPropagation(); + onClick(data, e.metaKey || e.ctrlKey); + }} + /> + {imageArea != undefined && ( +
+ {t("information.pixels", { ns: "common", area: imageArea })} +
+ )} +
+
+
+
+
+ {data.name == "unknown" ? t("details.unknown") : data.name} +
+ {data.score && ( +
+ {Math.round(data.score * 100)}% +
+ )} +
+
+ {children} +
+
+
+
+ + ); +} + +type GroupedClassificationCardProps = { + group: ClassificationItemData[]; + event?: Event; + threshold?: ClassificationThreshold; + selectedItems: string[]; + i18nLibrary: string; + objectType: string; + onClick: (data: ClassificationItemData | undefined) => void; + onSelectEvent: (event: Event) => void; + children?: (data: ClassificationItemData) => React.ReactNode; +}; +export function GroupedClassificationCard({ + group, + event, + threshold, + selectedItems, + i18nLibrary, + objectType, + onClick, + onSelectEvent, + children, +}: GroupedClassificationCardProps) { + const navigate = useNavigate(); + const { t } = useTranslation(["views/explore", i18nLibrary]); + + // data + + const allItemsSelected = useMemo( + () => group.every((data) => selectedItems.includes(data.filename)), + [group, selectedItems], + ); + + const time = useMemo(() => { + const item = group[0]; + + if (!item?.timestamp) { + return undefined; + } + + return item.timestamp * 1000; + }, [group]); + + return ( +
{ + if (selectedItems.length) { + onClick(undefined); + } + }} + onContextMenu={(e) => { + e.stopPropagation(); + e.preventDefault(); + onClick(undefined); + }} + > +
+
+
+ {getTranslatedLabel(objectType)} + {event?.sub_label + ? `: ${event.sub_label} (${Math.round((event.data.sub_label_score || 0) * 100)}%)` + : ": " + t("details.unknown")} +
+ {time && ( + + )} +
+ {event && ( + + +
{ + navigate(`/explore?event_id=${event.id}`); + }} + > + +
+
+ + + {t("details.item.button.viewInExplore", { + ns: "views/explore", + })} + + +
+ )} +
+ +
+ {group.map((data: ClassificationItemData) => ( + { + if (meta || selectedItems.length > 0) { + onClick(data); + } else if (event) { + onSelectEvent(event); + } + }} + > + {children?.(data)} + + ))} +
+
+ ); +} diff --git a/web/src/pages/FaceLibrary.tsx b/web/src/pages/FaceLibrary.tsx index d48ef19c4..fc8e73e3f 100644 --- a/web/src/pages/FaceLibrary.tsx +++ b/web/src/pages/FaceLibrary.tsx @@ -1,5 +1,3 @@ -import { baseUrl } from "@/api/baseUrl"; -import TimeAgo from "@/components/dynamic/TimeAgo"; import AddFaceIcon from "@/components/icons/AddFaceIcon"; import ActivityIndicator from "@/components/indicators/activity-indicator"; import CreateFaceWizardDialog from "@/components/overlay/detail/FaceCreateWizardDialog"; @@ -37,13 +35,12 @@ import { TooltipContent, TooltipTrigger, } from "@/components/ui/tooltip"; -import useContextMenu from "@/hooks/use-contextmenu"; import useKeyboardListener from "@/hooks/use-keyboard-listener"; import useOptimisticState from "@/hooks/use-optimistic-state"; import { cn } from "@/lib/utils"; import { Event } from "@/types/event"; -import { FaceLibraryData, RecognizedFaceData } from "@/types/face"; -import { FaceRecognitionConfig, FrigateConfig } from "@/types/frigateConfig"; +import { FaceLibraryData } from "@/types/face"; +import { FrigateConfig } from "@/types/frigateConfig"; import { TooltipPortal } from "@radix-ui/react-tooltip"; import axios from "axios"; import { @@ -54,7 +51,7 @@ import { useRef, useState, } from "react"; -import { isDesktop, isMobile } from "react-device-detect"; +import { isDesktop } from "react-device-detect"; import { Trans, useTranslation } from "react-i18next"; import { LuFolderCheck, @@ -62,16 +59,19 @@ import { LuPencil, LuRefreshCw, LuScanFace, - LuSearch, LuTrash2, } from "react-icons/lu"; -import { useNavigate } from "react-router-dom"; import { toast } from "sonner"; import useSWR from "swr"; import SearchDetailDialog, { SearchTab, } from "@/components/overlay/detail/SearchDetailDialog"; import { SearchResult } from "@/types/search"; +import { + ClassificationCard, + GroupedClassificationCard, +} from "@/components/card/ClassificationCard"; +import { ClassificationItemData } from "@/types/classification"; export default function FaceLibrary() { const { t } = useTranslation(["views/faceLibrary"]); @@ -641,7 +641,7 @@ function TrainingGrid({ // face data const faceGroups = useMemo(() => { - const groups: { [eventId: string]: RecognizedFaceData[] } = {}; + const groups: { [eventId: string]: ClassificationItemData[] } = {}; const faces = attemptImages .map((image) => { @@ -650,6 +650,7 @@ function TrainingGrid({ try { return { filename: image, + filepath: `clips/faces/train/${image}`, timestamp: Number.parseFloat(parts[2]), eventId: `${parts[0]}-${parts[1]}`, name: parts[3], @@ -739,7 +740,7 @@ function TrainingGrid({ type FaceAttemptGroupProps = { config: FrigateConfig; - group: RecognizedFaceData[]; + group: ClassificationItemData[]; event?: Event; faceNames: string[]; selectedFaces: string[]; @@ -757,15 +758,16 @@ function FaceAttemptGroup({ onSelectEvent, onRefresh, }: FaceAttemptGroupProps) { - const navigate = useNavigate(); const { t } = useTranslation(["views/faceLibrary", "views/explore"]); // data - const allFacesSelected = useMemo( - () => group.every((face) => selectedFaces.includes(face.filename)), - [group, selectedFaces], - ); + const threshold = useMemo(() => { + return { + recognition: config.face_recognition.recognition_threshold, + unknown: config.face_recognition.unknown_score, + }; + }, [config]); // interaction @@ -799,144 +801,10 @@ function FaceAttemptGroup({ [event, group, selectedFaces, onClickFaces, onSelectEvent], ); - return ( -
{ - if (selectedFaces.length) { - handleClickEvent(true); - } - }} - onContextMenu={(e) => { - e.stopPropagation(); - e.preventDefault(); - handleClickEvent(true); - }} - > -
-
-
- {t("details.person")} - {event?.sub_label - ? `: ${event.sub_label} (${Math.round((event.data.sub_label_score || 0) * 100)}%)` - : ": " + t("details.unknown")} -
- -
- {event && ( - - -
{ - navigate(`/explore?event_id=${event.id}`); - }} - > - -
-
- - - {t("details.item.button.viewInExplore", { - ns: "views/explore", - })} - - -
- )} -
- -
- {group.map((data: RecognizedFaceData) => ( - { - if (meta || selectedFaces.length > 0) { - onClickFaces([data.filename], true); - } else if (event) { - onSelectEvent(event); - } - }} - onRefresh={onRefresh} - /> - ))} -
-
- ); -} - -type FaceAttemptProps = { - data: RecognizedFaceData; - faceNames: string[]; - recognitionConfig: FaceRecognitionConfig; - selected: boolean; - onClick: (data: RecognizedFaceData, meta: boolean) => void; - onRefresh: () => void; -}; -function FaceAttempt({ - data, - faceNames, - recognitionConfig, - selected, - onClick, - onRefresh, -}: FaceAttemptProps) { - const { t } = useTranslation(["views/faceLibrary"]); - const [imageLoaded, setImageLoaded] = useState(false); - - const scoreStatus = useMemo(() => { - if (data.score >= recognitionConfig.recognition_threshold) { - return "match"; - } else if (data.score >= recognitionConfig.unknown_score) { - return "potential"; - } else { - return "unknown"; - } - }, [data, recognitionConfig]); - - // interaction - - const imgRef = useRef(null); - - useContextMenu(imgRef, () => { - onClick(data, true); - }); - - const imageArea = useMemo(() => { - if (imgRef.current == null || !imageLoaded) { - return undefined; - } - - return imgRef.current.naturalWidth * imgRef.current.naturalHeight; - }, [imageLoaded]); - // api calls const onTrainAttempt = useCallback( - (trainName: string) => { + (data: ClassificationItemData, trainName: string) => { axios .post(`/faces/train/${trainName}/classify`, { training_file: data.filename, @@ -959,96 +827,74 @@ function FaceAttempt({ }); }); }, - [data, onRefresh, t], + [onRefresh, t], ); - const onReprocess = useCallback(() => { - axios - .post(`/faces/reprocess`, { training_file: data.filename }) - .then((resp) => { - if (resp.status == 200) { - toast.success(t("toast.success.updatedFaceScore"), { - position: "top-center", - }); - onRefresh(); - } - }) - .catch((error) => { - const errorMessage = - error.response?.data?.message || - error.response?.data?.detail || - "Unknown error"; - toast.error(t("toast.error.updateFaceScoreFailed", { errorMessage }), { - position: "top-center", + const onReprocess = useCallback( + (data: ClassificationItemData) => { + axios + .post(`/faces/reprocess`, { training_file: data.filename }) + .then((resp) => { + if (resp.status == 200) { + toast.success(t("toast.success.updatedFaceScore"), { + position: "top-center", + }); + onRefresh(); + } + }) + .catch((error) => { + const errorMessage = + error.response?.data?.message || + error.response?.data?.detail || + "Unknown error"; + toast.error( + t("toast.error.updateFaceScoreFailed", { errorMessage }), + { + position: "top-center", + }, + ); }); - }); - }, [data, onRefresh, t]); + }, + [onRefresh, t], + ); return ( - <> -
-
- setImageLoaded(true)} - className={cn("size-44", isMobile && "w-full")} - src={`${baseUrl}clips/faces/train/${data.filename}`} - onClick={(e) => { - e.stopPropagation(); - onClick(data, e.metaKey || e.ctrlKey); - }} - /> - {imageArea != undefined && ( -
- {t("pixels", { area: imageArea })} -
- )} -
-
-
-
-
- {data.name == "unknown" ? t("details.unknown") : data.name} -
-
- {Math.round(data.score * 100)}% -
-
-
- - - - - - onReprocess()} - /> - - {t("button.reprocessFace")} - -
-
-
-
- + { + if (data) { + onClickFaces([data.filename], true); + } else { + handleClickEvent(true); + } + }} + onSelectEvent={onSelectEvent} + > + {(data) => ( + <> + onTrainAttempt(data, name)} + > + + + + + onReprocess(data)} + /> + + {t("button.reprocessFace")} + + + )} + ); } @@ -1093,80 +939,32 @@ function FaceGrid({ )} > {sortedFaces.map((image: string) => ( - + i18nLibrary="views/faceLibrary" + onClick={(data, meta) => onClickFaces([data.filename], meta)} + > + + + { + e.stopPropagation(); + onDelete(pageToggle, [image]); + }} + /> + + {t("button.deleteFaceAttempts")} + + ))} ); } - -type FaceImageProps = { - name: string; - image: string; - selected: boolean; - onClickFaces: (images: string[], ctrl: boolean) => void; - onDelete: (name: string, ids: string[]) => void; -}; -function FaceImage({ - name, - image, - selected, - onClickFaces, - onDelete, -}: FaceImageProps) { - const { t } = useTranslation(["views/faceLibrary"]); - - return ( -
{ - e.stopPropagation(); - onClickFaces([image], e.ctrlKey || e.metaKey); - }} - > -
- -
-
-
-
-
{name}
-
-
- - - { - e.stopPropagation(); - onDelete(name, [image]); - }} - /> - - {t("button.deleteFaceAttempts")} - -
-
-
-
- ); -} diff --git a/web/src/types/classification.ts b/web/src/types/classification.ts index 54320175a..092021342 100644 --- a/web/src/types/classification.ts +++ b/web/src/types/classification.ts @@ -6,3 +6,17 @@ export type TrainFilter = { min_score?: number; max_score?: number; }; + +export type ClassificationItemData = { + filepath: string; + filename: string; + name: string; + timestamp?: number; + eventId?: string; + score?: number; +}; + +export type ClassificationThreshold = { + recognition: number; + unknown: number; +}; diff --git a/web/src/types/face.ts b/web/src/types/face.ts index dc481b64f..3ba812c86 100644 --- a/web/src/types/face.ts +++ b/web/src/types/face.ts @@ -1,11 +1,3 @@ export type FaceLibraryData = { [faceName: string]: string[]; }; - -export type RecognizedFaceData = { - filename: string; - timestamp: number; - eventId: string; - name: string; - score: number; -}; diff --git a/web/src/views/classification/ModelSelectionView.tsx b/web/src/views/classification/ModelSelectionView.tsx index 2dd7e0375..b91ea5648 100644 --- a/web/src/views/classification/ModelSelectionView.tsx +++ b/web/src/views/classification/ModelSelectionView.tsx @@ -38,7 +38,11 @@ export default function ModelSelectionView({ return (
{classificationConfigs.map((config) => ( - onClick(config)} /> + onClick(config)} + /> ))}
); diff --git a/web/src/views/classification/ModelTrainingView.tsx b/web/src/views/classification/ModelTrainingView.tsx index 7fe241496..d3fd5b40e 100644 --- a/web/src/views/classification/ModelTrainingView.tsx +++ b/web/src/views/classification/ModelTrainingView.tsx @@ -1,4 +1,3 @@ -import { baseUrl } from "@/api/baseUrl"; import TextEntryDialog from "@/components/overlay/dialog/TextEntryDialog"; import { Button, buttonVariants } from "@/components/ui/button"; import { @@ -60,7 +59,16 @@ import { IoMdArrowRoundBack } from "react-icons/io"; import { MdAutoFixHigh } from "react-icons/md"; import TrainFilterDialog from "@/components/overlay/dialog/TrainFilterDialog"; import useApiFilter from "@/hooks/use-api-filter"; -import { TrainFilter } from "@/types/classification"; +import { ClassificationItemData, TrainFilter } from "@/types/classification"; +import { + ClassificationCard, + GroupedClassificationCard, +} from "@/components/card/ClassificationCard"; +import { Event } from "@/types/event"; +import SearchDetailDialog, { + SearchTab, +} from "@/components/overlay/detail/SearchDetailDialog"; +import { SearchResult } from "@/types/search"; type ModelTrainingViewProps = { model: CustomClassificationModelConfig; @@ -626,53 +634,34 @@ function DatasetGrid({ className="scrollbar-container flex flex-wrap gap-2 overflow-y-auto p-2" > {classData.map((image) => ( -
{ - e.stopPropagation(); - - if (e.ctrlKey || e.metaKey) { - onClickImages([image], true); - } + onClickImages([data.filename], true)} > -
- -
-
-
-
- - - { - e.stopPropagation(); - onDelete([image]); - }} - /> - - - {t("button.deleteClassificationAttempts")} - - -
-
-
-
+ + + { + e.stopPropagation(); + onDelete([image]); + }} + /> + + + {t("button.deleteClassificationAttempts")} + + + ))} ); @@ -700,20 +689,19 @@ function TrainGrid({ onRefresh, onDelete, }: TrainGridProps) { - const { t } = useTranslation(["views/classificationModel"]); - - const trainData = useMemo( + const trainData = useMemo( () => trainImages .map((raw) => { const parts = raw.replaceAll(".webp", "").split("-"); - const rawScore = Number.parseFloat(parts[2]); + const rawScore = Number.parseFloat(parts[4]); return { - raw, - timestamp: parts[0], - label: parts[1], - score: rawScore * 100, - truePositive: rawScore >= model.threshold, + filename: raw, + filepath: `clips/${model.name}/train/${raw}`, + timestamp: Number.parseFloat(parts[2]), + eventId: `${parts[0]}-${parts[1]}`, + name: parts[3], + score: rawScore, }; }) .filter((data) => { @@ -721,10 +709,7 @@ function TrainGrid({ return true; } - if ( - trainFilter.classes && - !trainFilter.classes.includes(data.label) - ) { + if (trainFilter.classes && !trainFilter.classes.includes(data.name)) { return false; } @@ -744,10 +729,68 @@ function TrainGrid({ return true; }) - .sort((a, b) => b.timestamp.localeCompare(a.timestamp)), + .sort((a, b) => b.timestamp - a.timestamp), [model, trainImages, trainFilter], ); + if (model.state_config) { + return ( + + ); + } + + return ( + + ); +} + +type StateTrainGridProps = { + model: CustomClassificationModelConfig; + contentRef: MutableRefObject; + classes: string[]; + trainData?: ClassificationItemData[]; + selectedImages: string[]; + onClickImages: (images: string[], ctrl: boolean) => void; + onRefresh: () => void; + onDelete: (ids: string[]) => void; +}; +function StateTrainGrid({ + model, + contentRef, + classes, + trainData, + selectedImages, + onClickImages, + onRefresh, + onDelete, +}: StateTrainGridProps) { + const { t } = useTranslation(["views/classificationModel"]); + + const threshold = useMemo(() => { + return { + recognition: model.threshold, + unknown: model.threshold, + }; + }, [model]); + return (
{trainData?.map((data) => ( -
{ - e.stopPropagation(); - onClickImages([data.raw], e.ctrlKey || e.metaKey); - }} + onClickImages([data.filename], meta)} > -
- -
-
-
-
-
- {data.label.replaceAll("_", " ")} -
-
- {data.score}% -
-
-
- - - - - - { - e.stopPropagation(); - onDelete([data.raw]); - }} - /> - - - {t("button.deleteClassificationAttempts")} - - -
-
-
-
+ + + + + { + e.stopPropagation(); + onDelete([data.filename]); + }} + /> + + + {t("button.deleteClassificationAttempts")} + + + ))}
); } + +type ObjectTrainGridProps = { + model: CustomClassificationModelConfig; + contentRef: MutableRefObject; + classes: string[]; + trainData?: ClassificationItemData[]; + selectedImages: string[]; + onClickImages: (images: string[], ctrl: boolean) => void; + onRefresh: () => void; + onDelete: (ids: string[]) => void; +}; +function ObjectTrainGrid({ + model, + contentRef, + classes, + trainData, + selectedImages, + onClickImages, + onRefresh, + onDelete, +}: ObjectTrainGridProps) { + const { t } = useTranslation(["views/classificationModel"]); + + // item data + + const groups = useMemo(() => { + const groups: { [eventId: string]: ClassificationItemData[] } = {}; + + trainData + ?.sort((a, b) => a.eventId!.localeCompare(b.eventId!)) + .reverse() + .forEach((data) => { + if (groups[data.eventId!]) { + groups[data.eventId!].push(data); + } else { + groups[data.eventId!] = [data]; + } + }); + + return groups; + }, [trainData]); + + const eventIdsQuery = useMemo(() => Object.keys(groups).join(","), [groups]); + + const { data: events } = useSWR([ + "event_ids", + { ids: eventIdsQuery }, + ]); + + const threshold = useMemo(() => { + return { + recognition: model.threshold, + unknown: model.threshold, + }; + }, [model]); + + // selection + + const [selectedEvent, setSelectedEvent] = useState(); + const [dialogTab, setDialogTab] = useState("details"); + + // handlers + + const handleClickEvent = useCallback( + ( + group: ClassificationItemData[], + event: Event | undefined, + meta: boolean, + ) => { + if (event && selectedImages.length == 0 && !meta) { + setSelectedEvent(event); + } else { + const anySelected = + group.find((item) => selectedImages.includes(item.filename)) != + undefined; + + if (anySelected) { + // deselect all + const toDeselect: string[] = []; + group.forEach((item) => { + if (selectedImages.includes(item.filename)) { + toDeselect.push(item.filename); + } + }); + onClickImages(toDeselect, false); + } else { + // select all + onClickImages( + group.map((item) => item.filename), + true, + ); + } + } + }, + [selectedImages, onClickImages], + ); + + return ( + <> + setSelectedEvent(search as unknown as Event)} + setInputFocused={() => {}} + /> + +
+ {Object.entries(groups).map(([key, group]) => { + const event = events?.find((ev) => ev.id == key); + return ( + { + if (data) { + onClickImages([data.filename], true); + } else { + handleClickEvent(group, event, true); + } + }} + onSelectEvent={() => {}} + > + {(data) => ( + <> + + + + + + { + e.stopPropagation(); + onDelete([data.filename]); + }} + /> + + + {t("button.deleteClassificationAttempts")} + + + + )} + + ); + })} +
+ + ); +}