Fix classification/face batch actions and snapshot handling

This commit is contained in:
Teagan glenn 2026-02-21 22:41:47 -07:00
parent d175c9758e
commit ae6d6ce2e5
3 changed files with 104 additions and 142 deletions

View File

@ -970,6 +970,17 @@ def categorize_classification_image(request: Request, name: str, body: dict = No
snapshot = get_event_snapshot(event) snapshot = get_event_snapshot(event)
if snapshot is None:
return JSONResponse(
content=(
{
"success": False,
"message": f"Failed to read snapshot for event {event_id}.",
}
),
status_code=500,
)
# Get object bounding box for the first detection # Get object bounding box for the first detection
if not event.data.get("attributes") or len(event.data["attributes"]) == 0: if not event.data.get("attributes") or len(event.data["attributes"]) == 0:
return JSONResponse( return JSONResponse(
@ -987,19 +998,7 @@ def categorize_classification_image(request: Request, name: str, body: dict = No
try: try:
# Extract the crop from the snapshot # Extract the crop from the snapshot
detect_config: DetectConfig = config.cameras[event.camera].detect frame = snapshot
frame = cv2.imread(snapshot)
if frame is None:
return JSONResponse(
content=(
{
"success": False,
"message": f"Failed to read snapshot for event {event_id}.",
}
),
status_code=500,
)
height, width = frame.shape[:2] height, width = frame.shape[:2]

View File

@ -410,25 +410,21 @@ export default function FaceLibrary() {
<FaceSelectionDialog <FaceSelectionDialog
faceNames={faces} faceNames={faces}
onTrainAttempt={(name) => { onTrainAttempt={(name) => {
// Batch train all selected faces const requests = selectedFaces.map((filename) =>
let successCount = 0;
let failCount = 0;
const totalCount = selectedFaces.length;
selectedFaces.forEach((filename, index) => {
axios axios
.post(`/faces/train/${name}/classify`, { .post(`/faces/train/${name}/classify`, {
training_file: filename, training_file: filename,
}) })
.then((resp) => { .then(() => true)
if (resp.status == 200) { .catch(() => false),
successCount++; );
} else {
failCount++; Promise.allSettled(requests).then((results) => {
} const successCount = results.filter(
(result) => result.status === "fulfilled" && result.value,
).length;
const totalCount = results.length;
// Show final toast after all requests complete
if (index === totalCount - 1) {
if (successCount === totalCount) { if (successCount === totalCount) {
toast.success( toast.success(
t("toast.success.batchTrainedFaces", { t("toast.success.batchTrainedFaces", {
@ -458,25 +454,9 @@ export default function FaceLibrary() {
}, },
); );
} }
setSelectedFaces([]); setSelectedFaces([]);
refreshFaces(); refreshFaces();
}
})
.catch(() => {
failCount++;
if (index === totalCount - 1) {
toast.error(
t("toast.error.batchTrainFailed", {
count: totalCount,
}),
{
position: "top-center",
},
);
setSelectedFaces([]);
refreshFaces();
}
});
}); });
}} }}
> >

View File

@ -460,31 +460,30 @@ export default function ModelTrainingView({ model }: ModelTrainingViewProps) {
</div> </div>
{pageToggle === "train" && ( {pageToggle === "train" && (
<ClassificationSelectionDialog <ClassificationSelectionDialog
classes={classes} classes={Object.keys(dataset || {})}
modelName={model.name} modelName={model.name}
image={selectedImages[0]} image={selectedImages[0]}
onRefresh={refreshAll} onRefresh={refreshAll}
onCategorize={(category) => { onCategorize={(category) => {
// Batch categorize all selected images const requests = selectedImages.map((filename) =>
let successCount = 0;
let failCount = 0;
const totalCount = selectedImages.length;
selectedImages.forEach((filename, index) => {
axios axios
.post(`/classification/${model.name}/dataset/categorize`, { .post(
`/classification/${model.name}/dataset/categorize`,
{
category, category,
training_file: filename, training_file: filename,
}) },
.then((resp) => { )
if (resp.status == 200) { .then(() => true)
successCount++; .catch(() => false),
} else { );
failCount++;
} Promise.allSettled(requests).then((results) => {
const successCount = results.filter(
(result) => result.status === "fulfilled" && result.value,
).length;
const totalCount = results.length;
// Show final toast after all requests complete
if (index === totalCount - 1) {
if (successCount === totalCount) { if (successCount === totalCount) {
toast.success( toast.success(
t("toast.success.batchCategorized", { t("toast.success.batchCategorized", {
@ -514,25 +513,9 @@ export default function ModelTrainingView({ model }: ModelTrainingViewProps) {
}, },
); );
} }
setSelectedImages([]); setSelectedImages([]);
refreshAll(); refreshAll();
}
})
.catch(() => {
failCount++;
if (index === totalCount - 1) {
toast.error(
t("toast.error.batchCategorizeFailed", {
count: totalCount,
}),
{
position: "top-center",
},
);
setSelectedImages([]);
refreshAll();
}
});
}); });
}} }}
> >