From 50b5d40c108e46b6d81de6dafe2f03d0f456b1aa Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 08:40:24 -0600 Subject: [PATCH 01/15] add stacktrace to config validation errors --- frigate/app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frigate/app.py b/frigate/app.py index a5f577b53..bf593d6ac 100644 --- a/frigate/app.py +++ b/frigate/app.py @@ -8,6 +8,7 @@ import threading from logging.handlers import QueueHandler from typing import Dict, List +import traceback import yaml from peewee_migrate import Router from playhouse.sqlite_ext import SqliteExtDatabase @@ -320,6 +321,7 @@ class FrigateApp: print("*** Config Validation Errors ***") print("*************************************************************") print(e) + print(traceback.format_exc()) print("*************************************************************") print("*** End Config Validation Errors ***") print("*************************************************************") From 3600ebca3982ffdeebd73bd17f3662a7f71e217b Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 08:46:41 -0600 Subject: [PATCH 02/15] adjust error messages on ffmpeg crash --- frigate/video.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/frigate/video.py b/frigate/video.py index a6a447aba..c7e4ef835 100755 --- a/frigate/video.py +++ b/frigate/video.py @@ -153,10 +153,10 @@ def capture_frames( try: frame_buffer[:] = ffmpeg_process.stdout.read(frame_size) except Exception as e: - logger.info(f"{camera_name}: ffmpeg sent a broken frame. {e}") + logger.error(f"{camera_name}: Unable to read frames from ffmpeg process.") if ffmpeg_process.poll() != None: - logger.info( + logger.error( f"{camera_name}: ffmpeg process is not running. exiting capture thread..." ) frame_manager.delete(frame_name) @@ -221,12 +221,11 @@ class CameraWatchdog(threading.Thread): if not self.capture_thread.is_alive(): self.logger.error( - f"FFMPEG process crashed unexpectedly for {self.camera_name}." + f"Ffmpeg process crashed unexpectedly for {self.camera_name}." ) self.logger.error( "The following ffmpeg logs include the last 100 lines prior to exit." ) - self.logger.error("You may have invalid args defined for this camera.") self.logpipe.dump() self.start_ffmpeg_detect() elif now - self.capture_thread.current_frame.value > 20: From 499f75e165f073b6c83590d4bc1568ed8575a7fd Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 09:49:01 -0600 Subject: [PATCH 03/15] set has_clip to false when recordings fail --- frigate/http.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/frigate/http.py b/frigate/http.py index ea9a4166b..bbbf3c7a6 100644 --- a/frigate/http.py +++ b/frigate/http.py @@ -364,7 +364,13 @@ def best(camera_name, label): box_size = 300 box = best_object.get("box", (0, 0, box_size, box_size)) region = calculate_region( - best_frame.shape, box[0], box[1], box[2], box[3], box_size, multiplier=1.1 + best_frame.shape, + box[0], + box[1], + box[2], + box[3], + box_size, + multiplier=1.1, ) best_frame = best_frame[region[1] : region[3], region[0] : region[2]] @@ -711,7 +717,15 @@ def vod_event(id): end_ts = ( datetime.now().timestamp() if event.end_time is None else event.end_time ) - return vod_ts(event.camera, event.start_time, end_ts) + vod_response = vod_ts(event.camera, event.start_time, end_ts) + # If the recordings are not found, set has_clip to false + if ( + type(vod_response) == tuple + and len(vod_response) == 2 + and vod_response[1] == 404 + ): + Event.update(has_clip=False).where(Event.id == id).execute() + return vod_response duration = int((event.end_time - event.start_time) * 1000) return jsonify( From 2d5ec25dcac5ec72e9671fbb1c76636fa87256d5 Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 09:56:06 -0600 Subject: [PATCH 04/15] invert active_count logic --- frigate/record.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frigate/record.py b/frigate/record.py index 5a3b7ab1d..0a9fd5caa 100644 --- a/frigate/record.py +++ b/frigate/record.py @@ -230,7 +230,7 @@ class RecordingMaintainer(threading.Thread): [ o for o in frame[1] - if not o["false_positive"] and o["motionless_count"] > 0 + if not o["false_positive"] and o["motionless_count"] == 0 ] ) From 5a2076fcab408a0758d74abba807210cd8dc82a0 Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 10:12:49 -0600 Subject: [PATCH 05/15] improve warning for retain modes --- frigate/config.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/frigate/config.py b/frigate/config.py index 048d48ce7..50da2c561 100644 --- a/frigate/config.py +++ b/frigate/config.py @@ -836,14 +836,18 @@ class FrigateConfig(FrigateBaseModel): camera_config.record.retain.days = camera_config.record.retain_days # warning if the higher level record mode is potentially more restrictive than the events + rank_map = { + RetainModeEnum.all: 0, + RetainModeEnum.motion: 1, + RetainModeEnum.active_objects: 2, + } if ( camera_config.record.retain.days != 0 - and camera_config.record.retain.mode != RetainModeEnum.all - and camera_config.record.events.retain.mode - != camera_config.record.retain.mode + and rank_map[camera_config.record.retain.mode] + > rank_map[camera_config.record.events.retain.mode] ): logger.warning( - f"Recording retention is configured for {camera_config.record.retain.mode} and event retention is configured for {camera_config.record.events.retain.mode}. The more restrictive retention policy will be applied." + f"{name}: Recording retention is configured for {camera_config.record.retain.mode} and event retention is configured for {camera_config.record.events.retain.mode}. The more restrictive retention policy will be applied." ) # generage the ffmpeg commands camera_config.create_ffmpeg_cmds() From 1a3f21e5c141257aa72a93ecc67bc4bb57d0e58c Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 13:28:45 -0600 Subject: [PATCH 06/15] note for future --- frigate/record.py | 1 + 1 file changed, 1 insertion(+) diff --git a/frigate/record.py b/frigate/record.py index 0a9fd5caa..5409d12c1 100644 --- a/frigate/record.py +++ b/frigate/record.py @@ -285,6 +285,7 @@ class RecordingMaintainer(threading.Thread): end_time=end_time.timestamp(), duration=duration, motion=motion_count, + # TODO: update this to store list of active objects at some point objects=active_count, ) except Exception as e: From f57501d033915113b8631e013f22b63a2ca99777 Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 13:28:53 -0600 Subject: [PATCH 07/15] avoid rare divide by zero --- frigate/util.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/frigate/util.py b/frigate/util.py index 85835e933..f11c0b0f9 100755 --- a/frigate/util.py +++ b/frigate/util.py @@ -567,6 +567,9 @@ class EventsPerSecond: # compute the (approximate) events in the last n seconds now = datetime.datetime.now().timestamp() seconds = min(now - self._start, last_n_seconds) + # avoid divide by zero + if seconds == 0: + seconds = 1 return ( len([t for t in self._timestamps if t > (now - last_n_seconds)]) / seconds ) @@ -601,6 +604,7 @@ def add_mask(mask, mask_img): ) cv2.fillPoly(mask_img, pts=[contour], color=(0)) + def load_labels(path, encoding="utf-8"): """Loads labels from file (with or without index numbers). Args: @@ -620,6 +624,7 @@ def load_labels(path, encoding="utf-8"): else: return {index: line.strip() for index, line in enumerate(lines)} + class FrameManager(ABC): @abstractmethod def create(self, name, size) -> AnyStr: From 47e0e1d221f8933c23b2e3276c440185e755b714 Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 13:46:57 -0600 Subject: [PATCH 08/15] add example for ios camera live feed notification --- docs/docs/guides/ha_notifications.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/docs/docs/guides/ha_notifications.md b/docs/docs/guides/ha_notifications.md index b8070a90c..179a9bbea 100644 --- a/docs/docs/guides/ha_notifications.md +++ b/docs/docs/guides/ha_notifications.md @@ -25,6 +25,30 @@ automation: when: '{{trigger.payload_json["after"]["start_time"]|int}}' ``` +Note that iOS devices support live previews of cameras by adding a camera entity id to the message data. + +```yaml +automation: + - alias: Security_Frigate_Notifications + description: "" + trigger: + - platform: mqtt + topic: frigate/events + payload: new + value_template: "{{ value_json.type }}" + action: + - service: notify.mobile_app_iphone + data: + message: 'A {{trigger.payload_json["after"]["label"]}} was detected.' + data: + image: >- + https://your.public.hass.address.com/api/frigate/notifications/{{trigger.payload_json["after"]["id"]}}/thumbnail.jpg + tag: '{{trigger.payload_json["after"]["id"]}}' + when: '{{trigger.payload_json["after"]["start_time"]|int}}' + entity_id: camera.{{trigger.payload_json["after"]["camera"]}} + mode: single +``` + ## Conditions Conditions with the `before` and `after` values allow a high degree of customization for automations. From 5e156f815132bf4ad566e982c9c2b5a5ca0b2760 Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 13:56:09 -0600 Subject: [PATCH 09/15] update addon urls --- docs/docs/integrations/home-assistant.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/docs/docs/integrations/home-assistant.md b/docs/docs/integrations/home-assistant.md index 165eef1c4..8a5a7d0d7 100644 --- a/docs/docs/integrations/home-assistant.md +++ b/docs/docs/integrations/home-assistant.md @@ -45,11 +45,14 @@ that card. ## Configuration -When configuring the integration, you will be asked for the following parameters: +When configuring the integration, you will be asked for the `URL` of your frigate instance which is the URL you use to access Frigate in the browser. This may look like `http://:5000/`. If you are using HassOS with the addon, the URL should be one of the following depending on which addon version you are using: -| Variable | Description | -| -------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| URL | The `URL` of your frigate instance, the URL you use to access Frigate in the browser. This may look like `http://:5000/`. If you are using HassOS with the addon, the URL should be `http://ccab4aaf-frigate:5000` (or `http://ccab4aaf-frigate-beta:5000` if your are using the beta version of the addon). Live streams required port 1935, see [RTMP streams](#streams) | +| Addon Version | URL | +| ------------------------------ | -------------------------------------- | +| Frigate NVR | `http://ccab4aaf-frigate:5000` | +| Frigate NVR (Full Access) | `http://ccab4aaf-frigate-fa:5000` | +| Frigate NVR Beta | `http://ccab4aaf-frigate-beta:5000` | +| Frigate NVR Beta (Full Access) | `http://ccab4aaf-frigate-fa-beta:5000` | From 02c91d4c5162c3d330ae7718249222159b2fc236 Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 14:00:27 -0600 Subject: [PATCH 10/15] clarify that zones are based on the bottom center --- docs/docs/configuration/index.md | 2 +- docs/docs/configuration/zones.md | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/docs/configuration/index.md b/docs/docs/configuration/index.md index 81f5001e3..7dd0194d7 100644 --- a/docs/docs/configuration/index.md +++ b/docs/docs/configuration/index.md @@ -381,7 +381,7 @@ cameras: # camera. front_steps: # Required: List of x,y coordinates to define the polygon of the zone. - # NOTE: Coordinates can be generated at https://www.image-map.net/ + # NOTE: Presence in a zone is evaluated only based on the bottom center of the objects bounding box. coordinates: 545,1077,747,939,788,805 # Optional: List of objects that can trigger this zone (default: all tracked objects) objects: diff --git a/docs/docs/configuration/zones.md b/docs/docs/configuration/zones.md index a4cfa3bb4..e4453bad1 100644 --- a/docs/docs/configuration/zones.md +++ b/docs/docs/configuration/zones.md @@ -3,7 +3,9 @@ id: zones title: Zones --- -Zones allow you to define a specific area of the frame and apply additional filters for object types so you can determine whether or not an object is within a particular area. Zones cannot have the same name as a camera. If desired, a single zone can include multiple cameras if you have multiple cameras covering the same area by configuring zones with the same name for each camera. +Zones allow you to define a specific area of the frame and apply additional filters for object types so you can determine whether or not an object is within a particular area. Presence in a zone is evaluated based on the bottom center of the bounding box for the object. It does not matter how much of the bounding box overlaps with the zone. + +Zones cannot have the same name as a camera. If desired, a single zone can include multiple cameras if you have multiple cameras covering the same area by configuring zones with the same name for each camera. During testing, enable the Zones option for the debug feed so you can adjust as needed. The zone line will increase in thickness when any object enters the zone. From acc1022998938d69f62c4205bea0e740133c25a6 Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 14:02:18 -0600 Subject: [PATCH 11/15] remove outdated output args tip --- docs/docs/configuration/nvdec.md | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/docs/docs/configuration/nvdec.md b/docs/docs/configuration/nvdec.md index 52e56cd1f..69889241b 100644 --- a/docs/docs/configuration/nvdec.md +++ b/docs/docs/configuration/nvdec.md @@ -97,15 +97,3 @@ processes: | 0 N/A N/A 12827 C ffmpeg 417MiB | +-----------------------------------------------------------------------------+ ``` - -To further improve performance, you can set ffmpeg to skip frames in the output, -using the fps filter: - -```yaml -output_args: - - -filter:v - - fps=fps=5 -``` - -This setting, for example, allows Frigate to consume my 10-15fps camera streams on -my relatively low powered Haswell machine with relatively low cpu usage. From 4e239674423b51315b70e72aefb64b2897e237e2 Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 14:15:06 -0600 Subject: [PATCH 12/15] clarify addon versions --- docs/docs/installation.md | 9 +++++++++ docs/docs/integrations/home-assistant.md | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/docs/installation.md b/docs/docs/installation.md index e18b7c18d..fc236f3f7 100644 --- a/docs/docs/installation.md +++ b/docs/docs/installation.md @@ -177,6 +177,15 @@ HassOS users can install via the addon repository. 6. Start the addon container 7. (not for proxy addon) If you are using hardware acceleration for ffmpeg, you may need to disable "Protection mode" +There are several versions of the addon available: + +| Addon Version | Description | +| ------------------------------ | ---------------------------------------------------------- | +| Frigate NVR | Current release with protection mode on | +| Frigate NVR (Full Access) | Current release with the option to disable protection mode | +| Frigate NVR Beta | Beta release with protection mode on | +| Frigate NVR Beta (Full Access) | Beta release with the option to disable protection mode | + ## Home Assistant Supervised :::tip diff --git a/docs/docs/integrations/home-assistant.md b/docs/docs/integrations/home-assistant.md index 8a5a7d0d7..ae6d40344 100644 --- a/docs/docs/integrations/home-assistant.md +++ b/docs/docs/integrations/home-assistant.md @@ -45,7 +45,7 @@ that card. ## Configuration -When configuring the integration, you will be asked for the `URL` of your frigate instance which is the URL you use to access Frigate in the browser. This may look like `http://:5000/`. If you are using HassOS with the addon, the URL should be one of the following depending on which addon version you are using: +When configuring the integration, you will be asked for the `URL` of your frigate instance which is the URL you use to access Frigate in the browser. This may look like `http://:5000/`. If you are using HassOS with the addon, the URL should be one of the following depending on which addon version you are using. Note that if you are using the Proxy Addon, you do NOT point the integration at the proxy URL. Just enter the URL used to access frigate directly from your network. | Addon Version | URL | | ------------------------------ | -------------------------------------- | From 24f9937009ebb6795f7736a5168210c2ec01eefd Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 14:15:19 -0600 Subject: [PATCH 13/15] fix resolution on reolink example --- docs/docs/configuration/camera_specific.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/configuration/camera_specific.md b/docs/docs/configuration/camera_specific.md index b737a5605..a82a58f84 100644 --- a/docs/docs/configuration/camera_specific.md +++ b/docs/docs/configuration/camera_specific.md @@ -61,8 +61,8 @@ cameras: roles: - detect detect: - width: 640 - height: 480 + width: 896 + height: 672 fps: 7 ``` From 9a0d27676123d5668006b58446b2b59644207572 Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 14:49:54 -0600 Subject: [PATCH 14/15] allow motion based retention when detect is disabled --- frigate/video.py | 409 ++++++++++++++++++++++++----------------------- 1 file changed, 208 insertions(+), 201 deletions(-) diff --git a/frigate/video.py b/frigate/video.py index c7e4ef835..cb201fd22 100755 --- a/frigate/video.py +++ b/frigate/video.py @@ -491,212 +491,219 @@ def process_frames( logger.info(f"{camera_name}: frame {frame_time} is not in memory store.") continue - if not detection_enabled.value: - fps.value = fps_tracker.eps() - object_tracker.match_and_update(frame_time, []) - detected_objects_queue.put( - (camera_name, frame_time, object_tracker.tracked_objects, [], []) - ) - detection_fps.value = object_detector.fps.eps() - frame_manager.close(f"{camera_name}{frame_time}") - continue - # look for motion motion_boxes = motion_detector.detect(frame) - # get stationary object ids - # check every Nth frame for stationary objects - # disappeared objects are not stationary - # also check for overlapping motion boxes - stationary_object_ids = [ - obj["id"] - for obj in object_tracker.tracked_objects.values() - # if there hasn't been motion for 10 frames - if obj["motionless_count"] >= 10 - # and it isn't due for a periodic check - and ( - detect_config.stationary_interval == 0 - or obj["motionless_count"] % detect_config.stationary_interval != 0 - ) - # and it hasn't disappeared - and object_tracker.disappeared[obj["id"]] == 0 - # and it doesn't overlap with any current motion boxes - and not intersects_any(obj["box"], motion_boxes) - ] + regions = [] - # get tracked object boxes that aren't stationary - tracked_object_boxes = [ - obj["box"] - for obj in object_tracker.tracked_objects.values() - if not obj["id"] in stationary_object_ids - ] - - # combine motion boxes with known locations of existing objects - combined_boxes = reduce_boxes(motion_boxes + tracked_object_boxes) - - region_min_size = max(model_shape[0], model_shape[1]) - # compute regions - regions = [ - calculate_region( - frame_shape, - a[0], - a[1], - a[2], - a[3], - region_min_size, - multiplier=random.uniform(1.2, 1.5), - ) - for a in combined_boxes - ] - - # consolidate regions with heavy overlap - regions = [ - calculate_region( - frame_shape, a[0], a[1], a[2], a[3], region_min_size, multiplier=1.0 - ) - for a in reduce_boxes(regions, 0.4) - ] - - # if starting up, get the next startup scan region - if startup_scan_counter < 9: - ymin = int(frame_shape[0] / 3 * startup_scan_counter / 3) - ymax = int(frame_shape[0] / 3 + ymin) - xmin = int(frame_shape[1] / 3 * startup_scan_counter / 3) - xmax = int(frame_shape[1] / 3 + xmin) - regions.append( - calculate_region( - frame_shape, xmin, ymin, xmax, ymax, region_min_size, multiplier=1.2 - ) - ) - startup_scan_counter += 1 - - # resize regions and detect - # seed with stationary objects - detections = [ - ( - obj["label"], - obj["score"], - obj["box"], - obj["area"], - obj["region"], - ) - for obj in object_tracker.tracked_objects.values() - if obj["id"] in stationary_object_ids - ] - - for region in regions: - detections.extend( - detect( - object_detector, - frame, - model_shape, - region, - objects_to_track, - object_filters, - ) - ) - - ######### - # merge objects, check for clipped objects and look again up to 4 times - ######### - refining = len(regions) > 0 - refine_count = 0 - while refining and refine_count < 4: - refining = False - - # group by name - detected_object_groups = defaultdict(lambda: []) - for detection in detections: - detected_object_groups[detection[0]].append(detection) - - selected_objects = [] - for group in detected_object_groups.values(): - - # apply non-maxima suppression to suppress weak, overlapping bounding boxes - boxes = [ - (o[2][0], o[2][1], o[2][2] - o[2][0], o[2][3] - o[2][1]) - for o in group - ] - confidences = [o[1] for o in group] - idxs = cv2.dnn.NMSBoxes(boxes, confidences, 0.5, 0.4) - - for index in idxs: - obj = group[index[0]] - if clipped(obj, frame_shape): - box = obj[2] - # calculate a new region that will hopefully get the entire object - region = calculate_region( - frame_shape, box[0], box[1], box[2], box[3], region_min_size - ) - - regions.append(region) - - selected_objects.extend( - detect( - object_detector, - frame, - model_shape, - region, - objects_to_track, - object_filters, - ) - ) - - refining = True - else: - selected_objects.append(obj) - # set the detections list to only include top, complete objects - # and new detections - detections = selected_objects - - if refining: - refine_count += 1 - - ## drop detections that overlap too much - consolidated_detections = [] - - # if detection was run on this frame, consolidate - if len(regions) > 0: - # group by name - detected_object_groups = defaultdict(lambda: []) - for detection in detections: - detected_object_groups[detection[0]].append(detection) - - # loop over detections grouped by label - for group in detected_object_groups.values(): - # if the group only has 1 item, skip - if len(group) == 1: - consolidated_detections.append(group[0]) - continue - - # sort smallest to largest by area - sorted_by_area = sorted(group, key=lambda g: g[3]) - - for current_detection_idx in range(0, len(sorted_by_area)): - current_detection = sorted_by_area[current_detection_idx][2] - overlap = 0 - for to_check_idx in range( - min(current_detection_idx + 1, len(sorted_by_area)), - len(sorted_by_area), - ): - to_check = sorted_by_area[to_check_idx][2] - # if 90% of smaller detection is inside of another detection, consolidate - if ( - area(intersection(current_detection, to_check)) - / area(current_detection) - > 0.9 - ): - overlap = 1 - break - if overlap == 0: - consolidated_detections.append( - sorted_by_area[current_detection_idx] - ) - # now that we have refined our detections, we need to track objects - object_tracker.match_and_update(frame_time, consolidated_detections) - # else, just update the frame times for the stationary objects + # if detection is disabled + if not detection_enabled.value: + object_tracker.match_and_update(frame_time, []) else: - object_tracker.update_frame_times(frame_time) + # get stationary object ids + # check every Nth frame for stationary objects + # disappeared objects are not stationary + # also check for overlapping motion boxes + stationary_object_ids = [ + obj["id"] + for obj in object_tracker.tracked_objects.values() + # if there hasn't been motion for 10 frames + if obj["motionless_count"] >= 10 + # and it isn't due for a periodic check + and ( + detect_config.stationary_interval == 0 + or obj["motionless_count"] % detect_config.stationary_interval != 0 + ) + # and it hasn't disappeared + and object_tracker.disappeared[obj["id"]] == 0 + # and it doesn't overlap with any current motion boxes + and not intersects_any(obj["box"], motion_boxes) + ] + + # get tracked object boxes that aren't stationary + tracked_object_boxes = [ + obj["box"] + for obj in object_tracker.tracked_objects.values() + if not obj["id"] in stationary_object_ids + ] + + # combine motion boxes with known locations of existing objects + combined_boxes = reduce_boxes(motion_boxes + tracked_object_boxes) + + region_min_size = max(model_shape[0], model_shape[1]) + # compute regions + regions = [ + calculate_region( + frame_shape, + a[0], + a[1], + a[2], + a[3], + region_min_size, + multiplier=random.uniform(1.2, 1.5), + ) + for a in combined_boxes + ] + + # consolidate regions with heavy overlap + regions = [ + calculate_region( + frame_shape, a[0], a[1], a[2], a[3], region_min_size, multiplier=1.0 + ) + for a in reduce_boxes(regions, 0.4) + ] + + # if starting up, get the next startup scan region + if startup_scan_counter < 9: + ymin = int(frame_shape[0] / 3 * startup_scan_counter / 3) + ymax = int(frame_shape[0] / 3 + ymin) + xmin = int(frame_shape[1] / 3 * startup_scan_counter / 3) + xmax = int(frame_shape[1] / 3 + xmin) + regions.append( + calculate_region( + frame_shape, + xmin, + ymin, + xmax, + ymax, + region_min_size, + multiplier=1.2, + ) + ) + startup_scan_counter += 1 + + # resize regions and detect + # seed with stationary objects + detections = [ + ( + obj["label"], + obj["score"], + obj["box"], + obj["area"], + obj["region"], + ) + for obj in object_tracker.tracked_objects.values() + if obj["id"] in stationary_object_ids + ] + + for region in regions: + detections.extend( + detect( + object_detector, + frame, + model_shape, + region, + objects_to_track, + object_filters, + ) + ) + + ######### + # merge objects, check for clipped objects and look again up to 4 times + ######### + refining = len(regions) > 0 + refine_count = 0 + while refining and refine_count < 4: + refining = False + + # group by name + detected_object_groups = defaultdict(lambda: []) + for detection in detections: + detected_object_groups[detection[0]].append(detection) + + selected_objects = [] + for group in detected_object_groups.values(): + + # apply non-maxima suppression to suppress weak, overlapping bounding boxes + boxes = [ + (o[2][0], o[2][1], o[2][2] - o[2][0], o[2][3] - o[2][1]) + for o in group + ] + confidences = [o[1] for o in group] + idxs = cv2.dnn.NMSBoxes(boxes, confidences, 0.5, 0.4) + + for index in idxs: + obj = group[index[0]] + if clipped(obj, frame_shape): + box = obj[2] + # calculate a new region that will hopefully get the entire object + region = calculate_region( + frame_shape, + box[0], + box[1], + box[2], + box[3], + region_min_size, + ) + + regions.append(region) + + selected_objects.extend( + detect( + object_detector, + frame, + model_shape, + region, + objects_to_track, + object_filters, + ) + ) + + refining = True + else: + selected_objects.append(obj) + # set the detections list to only include top, complete objects + # and new detections + detections = selected_objects + + if refining: + refine_count += 1 + + ## drop detections that overlap too much + consolidated_detections = [] + + # if detection was run on this frame, consolidate + if len(regions) > 0: + # group by name + detected_object_groups = defaultdict(lambda: []) + for detection in detections: + detected_object_groups[detection[0]].append(detection) + + # loop over detections grouped by label + for group in detected_object_groups.values(): + # if the group only has 1 item, skip + if len(group) == 1: + consolidated_detections.append(group[0]) + continue + + # sort smallest to largest by area + sorted_by_area = sorted(group, key=lambda g: g[3]) + + for current_detection_idx in range(0, len(sorted_by_area)): + current_detection = sorted_by_area[current_detection_idx][2] + overlap = 0 + for to_check_idx in range( + min(current_detection_idx + 1, len(sorted_by_area)), + len(sorted_by_area), + ): + to_check = sorted_by_area[to_check_idx][2] + # if 90% of smaller detection is inside of another detection, consolidate + if ( + area(intersection(current_detection, to_check)) + / area(current_detection) + > 0.9 + ): + overlap = 1 + break + if overlap == 0: + consolidated_detections.append( + sorted_by_area[current_detection_idx] + ) + # now that we have refined our detections, we need to track objects + object_tracker.match_and_update(frame_time, consolidated_detections) + # else, just update the frame times for the stationary objects + else: + object_tracker.update_frame_times(frame_time) # add to the queue if not full if detected_objects_queue.full(): From ad4929c621ffa6bc9b9749a3fbdcceb4eeba6739 Mon Sep 17 00:00:00 2001 From: Blake Blackshear Date: Sun, 6 Feb 2022 14:50:15 -0600 Subject: [PATCH 15/15] increment motionless_count --- frigate/objects.py | 1 + 1 file changed, 1 insertion(+) diff --git a/frigate/objects.py b/frigate/objects.py index c412f4539..3a5ae1171 100644 --- a/frigate/objects.py +++ b/frigate/objects.py @@ -106,6 +106,7 @@ class ObjectTracker: def update_frame_times(self, frame_time): for id in self.tracked_objects.keys(): self.tracked_objects[id]["frame_time"] = frame_time + self.tracked_objects[id]["motionless_count"] += 1 def match_and_update(self, frame_time, new_objects): # group by name