use objects instead of labels

This commit is contained in:
Josh Hawkins 2024-09-25 10:30:11 -05:00
parent 9088359ccd
commit 543f09606c
4 changed files with 8 additions and 8 deletions

View File

@ -128,7 +128,7 @@ genai:
car: "Label the primary vehicle in these images with just the name of the company if it is a delivery vehicle, or the color make and model." car: "Label the primary vehicle in these images with just the name of the company if it is a delivery vehicle, or the color make and model."
``` ```
Prompts can also be overriden at the camera level to provide a more detailed prompt to the model about your specific camera, if you desire. By default, descriptions will be generated for all tracked labels and all zones. But you can also optionally specify `labels` and `required_zones` to only generate descriptions for certain labels or zones. Prompts can also be overriden at the camera level to provide a more detailed prompt to the model about your specific camera, if you desire. By default, descriptions will be generated for all tracked objects and all zones. But you can also optionally specify `objects` and `required_zones` to only generate descriptions for certain tracked objects or zones.
```yaml ```yaml
cameras: cameras:
@ -138,7 +138,7 @@ cameras:
object_prompts: object_prompts:
person: "Describe the main person in these images (gender, age, clothing, activity, etc). Do not include where the activity is occurring (sidewalk, concrete, driveway, etc). If delivering a package, include the company the package is from." person: "Describe the main person in these images (gender, age, clothing, activity, etc). Do not include where the activity is occurring (sidewalk, concrete, driveway, etc). If delivering a package, include the company the package is from."
cat: "Describe the cat in these images (color, size, tail). Indicate whether or not the cat is by the flower pots. If the cat is chasing a mouse, make up a name for the mouse." cat: "Describe the cat in these images (color, size, tail). Indicate whether or not the cat is by the flower pots. If the cat is chasing a mouse, make up a name for the mouse."
labels: objects:
- person - person
- cat - cat
required_zones: required_zones:

View File

@ -810,8 +810,8 @@ class GenAICameraConfig(BaseModel):
title="Default caption prompt.", title="Default caption prompt.",
) )
object_prompts: Dict[str, str] = Field(default={}, title="Object specific prompts.") object_prompts: Dict[str, str] = Field(default={}, title="Object specific prompts.")
labels: Optional[List[str]] = Field( objects: Dict[str, int] = Field(
default=None, title="Labels to run generative AI for." default_factory=dict, title="Objects to run generative AI for."
) )
required_zones: Union[str, List[str]] = Field( required_zones: Union[str, List[str]] = Field(
default_factory=list, default_factory=list,

View File

@ -128,12 +128,12 @@ class EmbeddingMaintainer(threading.Thread):
and self.genai_client is not None and self.genai_client is not None
and event.data.get("description") is None and event.data.get("description") is None
and ( and (
camera_config.genai.labels is None camera_config.genai.objects is None
or event.label in camera_config.genai.labels or event.label in camera_config.genai.objects
) )
and ( and (
camera_config.genai.required_zones is None camera_config.genai.required_zones is None
or set(event.zones) & set(camera_config.genai.zones) or set(event.zones) & set(camera_config.genai.required_zones)
) )
): ):
# Generate the description. Call happens in a thread since it is network bound. # Generate the description. Call happens in a thread since it is network bound.

View File

@ -307,7 +307,7 @@ export interface FrigateConfig {
prompt: string; prompt: string;
object_prompts: { [key: string]: string }; object_prompts: { [key: string]: string };
required_zones: string[]; required_zones: string[];
labels: string[]; objects: string[];
}; };
go2rtc: { go2rtc: {