Merge branch 'blakeblackshear:dev' into api_stats_cams

This commit is contained in:
tpjanssen 2023-10-17 13:55:00 +02:00 committed by GitHub
commit 0e6b9d6819
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 1397 additions and 856 deletions

View File

@ -33,7 +33,7 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
FROM scratch AS go2rtc FROM scratch AS go2rtc
ARG TARGETARCH ARG TARGETARCH
WORKDIR /rootfs/usr/local/go2rtc/bin WORKDIR /rootfs/usr/local/go2rtc/bin
ADD --link --chmod=755 "https://github.com/AlexxIT/go2rtc/releases/download/v1.7.1/go2rtc_linux_${TARGETARCH}" go2rtc ADD --link --chmod=755 "https://github.com/AlexxIT/go2rtc/releases/download/v1.8.1/go2rtc_linux_${TARGETARCH}" go2rtc
#### ####

View File

@ -55,24 +55,16 @@ fi
# arch specific packages # arch specific packages
if [[ "${TARGETARCH}" == "amd64" ]]; then if [[ "${TARGETARCH}" == "amd64" ]]; then
# use debian bookworm for AMD hwaccel packages # use debian bookworm for hwaccel packages
echo 'deb https://deb.debian.org/debian bookworm main contrib' >/etc/apt/sources.list.d/debian-bookworm.list echo 'deb https://deb.debian.org/debian bookworm main contrib' >/etc/apt/sources.list.d/debian-bookworm.list
apt-get -qq update apt-get -qq update
apt-get -qq install --no-install-recommends --no-install-suggests -y \
mesa-va-drivers radeontop
rm -f /etc/apt/sources.list.d/debian-bookworm.list
# Use debian testing repo only for intel hwaccel packages
echo 'deb http://deb.debian.org/debian testing main non-free' >/etc/apt/sources.list.d/debian-testing.list
apt-get -qq update
# intel-opencl-icd specifically for GPU support in OpenVino
apt-get -qq install --no-install-recommends --no-install-suggests -y \ apt-get -qq install --no-install-recommends --no-install-suggests -y \
intel-opencl-icd \ intel-opencl-icd \
libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1 intel-gpu-tools mesa-va-drivers radeontop libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1 intel-gpu-tools
# something about this dependency requires it to be installed in a separate call rather than in the line above # something about this dependency requires it to be installed in a separate call rather than in the line above
apt-get -qq install --no-install-recommends --no-install-suggests -y \ apt-get -qq install --no-install-recommends --no-install-suggests -y \
i965-va-driver-shaders i965-va-driver-shaders
rm -f /etc/apt/sources.list.d/debian-testing.list rm -f /etc/apt/sources.list.d/debian-bookworm.list
fi fi
if [[ "${TARGETARCH}" == "arm64" ]]; then if [[ "${TARGETARCH}" == "arm64" ]]; then

View File

@ -120,7 +120,7 @@ NOTE: The folder that is mapped from the host needs to be the folder that contai
## Custom go2rtc version ## Custom go2rtc version
Frigate currently includes go2rtc v1.7.1, there may be certain cases where you want to run a different version of go2rtc. Frigate currently includes go2rtc v1.8.1, there may be certain cases where you want to run a different version of go2rtc.
To do this: To do this:

View File

@ -140,7 +140,7 @@ go2rtc:
- rtspx://192.168.1.1:7441/abcdefghijk - rtspx://192.168.1.1:7441/abcdefghijk
``` ```
[See the go2rtc docs for more information](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#source-rtsp) [See the go2rtc docs for more information](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#source-rtsp)
In the Unifi 2.0 update Unifi Protect Cameras had a change in audio sample rate which causes issues for ffmpeg. The input rate needs to be set for record and rtmp if used directly with unifi protect. In the Unifi 2.0 update Unifi Protect Cameras had a change in audio sample rate which causes issues for ffmpeg. The input rate needs to be set for record and rtmp if used directly with unifi protect.

View File

@ -436,7 +436,7 @@ rtmp:
enabled: False enabled: False
# Optional: Restream configuration # Optional: Restream configuration
# Uses https://github.com/AlexxIT/go2rtc (v1.7.1) # Uses https://github.com/AlexxIT/go2rtc (v1.8.1)
go2rtc: go2rtc:
# Optional: jsmpeg stream configuration for WebUI # Optional: jsmpeg stream configuration for WebUI

View File

@ -9,11 +9,11 @@ Frigate has different live view options, some of which require the bundled `go2r
Live view options can be selected while viewing the live stream. The options are: Live view options can be selected while viewing the live stream. The options are:
| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations | | Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
| ------ | ------- | ------------------------------------- | -------------- | ---------------------------- | --------------- | -------------------------------------------- | | ------ | ------- | ------------------------------------- | -------------- | ---------------------------- | --------------- | ------------------------------------------------- |
| jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none | | jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none |
| mse | low | native | native | yes (depends on audio codec) | yes | not supported on iOS, Firefox is h.264 only | | mse | low | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only |
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 | | webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
### Audio Support ### Audio Support
@ -115,4 +115,4 @@ services:
::: :::
See [go2rtc WebRTC docs](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#module-webrtc) for more information about this. See [go2rtc WebRTC docs](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#module-webrtc) for more information about this.

View File

@ -7,7 +7,7 @@ title: Restream
Frigate can restream your video feed as an RTSP feed for other applications such as Home Assistant to utilize it at `rtsp://<frigate_host>:8554/<camera_name>`. Port 8554 must be open. [This allows you to use a video feed for detection in Frigate and Home Assistant live view at the same time without having to make two separate connections to the camera](#reduce-connections-to-camera). The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate. Frigate can restream your video feed as an RTSP feed for other applications such as Home Assistant to utilize it at `rtsp://<frigate_host>:8554/<camera_name>`. Port 8554 must be open. [This allows you to use a video feed for detection in Frigate and Home Assistant live view at the same time without having to make two separate connections to the camera](#reduce-connections-to-camera). The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate.
Frigate uses [go2rtc](https://github.com/AlexxIT/go2rtc/tree/v1.7.1) to provide its restream and MSE/WebRTC capabilities. The go2rtc config is hosted at the `go2rtc` in the config, see [go2rtc docs](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#configuration) for more advanced configurations and features. Frigate uses [go2rtc](https://github.com/AlexxIT/go2rtc/tree/v1.8.1) to provide its restream and MSE/WebRTC capabilities. The go2rtc config is hosted at the `go2rtc` in the config, see [go2rtc docs](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#configuration) for more advanced configurations and features.
:::note :::note
@ -138,7 +138,7 @@ cameras:
## Advanced Restream Configurations ## Advanced Restream Configurations
The [exec](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#source-exec) source in go2rtc can be used for custom ffmpeg commands. An example is below: The [exec](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#source-exec) source in go2rtc can be used for custom ffmpeg commands. An example is below:
NOTE: The output will need to be passed with two curly braces `{{output}}` NOTE: The output will need to be passed with two curly braces `{{output}}`

View File

@ -11,7 +11,7 @@ Use of the bundled go2rtc is optional. You can still configure FFmpeg to connect
# Setup a go2rtc stream # Setup a go2rtc stream
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. If you set the stream name under go2rtc to match the name of your camera, it will automatically be mapped and you will get additional live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#module-streams), not just rtsp. First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. If you set the stream name under go2rtc to match the name of your camera, it will automatically be mapped and you will get additional live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#module-streams), not just rtsp.
```yaml ```yaml
go2rtc: go2rtc:
@ -24,7 +24,7 @@ The easiest live view to get working is MSE. After adding this to the config, re
### What if my video doesn't play? ### What if my video doesn't play?
If you are unable to see your video feed, first check the go2rtc logs in the Frigate UI under Logs in the sidebar. If go2rtc is having difficulty connecting to your camera, you should see some error messages in the log. If you do not see any errors, then the video codec of the stream may not be supported in your browser. If your camera stream is set to H265, try switching to H264. You can see more information about [video codec compatibility](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#codecs-madness) in the go2rtc documentation. If you are not able to switch your camera settings from H265 to H264 or your stream is a different format such as MJPEG, you can use go2rtc to re-encode the video using the [FFmpeg parameters](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#source-ffmpeg). It supports rotating and resizing video feeds and hardware acceleration. Keep in mind that transcoding video from one format to another is a resource intensive task and you may be better off using the built-in jsmpeg view. Here is an example of a config that will re-encode the stream to H264 without hardware acceleration: If you are unable to see your video feed, first check the go2rtc logs in the Frigate UI under Logs in the sidebar. If go2rtc is having difficulty connecting to your camera, you should see some error messages in the log. If you do not see any errors, then the video codec of the stream may not be supported in your browser. If your camera stream is set to H265, try switching to H264. You can see more information about [video codec compatibility](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#codecs-madness) in the go2rtc documentation. If you are not able to switch your camera settings from H265 to H264 or your stream is a different format such as MJPEG, you can use go2rtc to re-encode the video using the [FFmpeg parameters](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#source-ffmpeg). It supports rotating and resizing video feeds and hardware acceleration. Keep in mind that transcoding video from one format to another is a resource intensive task and you may be better off using the built-in jsmpeg view. Here is an example of a config that will re-encode the stream to H264 without hardware acceleration:
```yaml ```yaml
go2rtc: go2rtc:

View File

@ -21,7 +21,7 @@ module.exports = {
{ {
type: "link", type: "link",
label: "Go2RTC Configuration Reference", label: "Go2RTC Configuration Reference",
href: "https://github.com/AlexxIT/go2rtc/tree/v1.7.1#configuration", href: "https://github.com/AlexxIT/go2rtc/tree/v1.8.1#configuration",
}, },
], ],
Detectors: [ Detectors: [

View File

@ -14,7 +14,7 @@ import requests
from setproctitle import setproctitle from setproctitle import setproctitle
from frigate.comms.inter_process import InterProcessCommunicator from frigate.comms.inter_process import InterProcessCommunicator
from frigate.config import CameraConfig, FrigateConfig from frigate.config import CameraConfig, CameraInput, FfmpegConfig, FrigateConfig
from frigate.const import ( from frigate.const import (
AUDIO_DURATION, AUDIO_DURATION,
AUDIO_FORMAT, AUDIO_FORMAT,
@ -39,13 +39,29 @@ except ModuleNotFoundError:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def get_ffmpeg_command(input_args: list[str], input_path: str) -> list[str]: def get_ffmpeg_command(ffmpeg: FfmpegConfig) -> list[str]:
return get_ffmpeg_arg_list( ffmpeg_input: CameraInput = [i for i in ffmpeg.inputs if "audio" in i.roles][0]
f"ffmpeg {{}} -i {{}} -f {AUDIO_FORMAT} -ar {AUDIO_SAMPLE_RATE} -ac 1 -y {{}}".format( input_args = get_ffmpeg_arg_list(ffmpeg.global_args) + (
" ".join(input_args), parse_preset_input(ffmpeg_input.input_args, 1)
input_path, or ffmpeg_input.input_args
or parse_preset_input(ffmpeg.input_args, 1)
or ffmpeg.input_args
)
return (
["ffmpeg", "-vn"]
+ input_args
+ ["-i"]
+ [ffmpeg_input.path]
+ [
"-f",
f"{AUDIO_FORMAT}",
"-ar",
f"{AUDIO_SAMPLE_RATE}",
"-ac",
"1",
"-y",
"pipe:", "pipe:",
) ]
) )
@ -173,11 +189,7 @@ class AudioEventMaintainer(threading.Thread):
self.shape = (int(round(AUDIO_DURATION * AUDIO_SAMPLE_RATE)),) self.shape = (int(round(AUDIO_DURATION * AUDIO_SAMPLE_RATE)),)
self.chunk_size = int(round(AUDIO_DURATION * AUDIO_SAMPLE_RATE * 2)) self.chunk_size = int(round(AUDIO_DURATION * AUDIO_SAMPLE_RATE * 2))
self.logger = logging.getLogger(f"audio.{self.config.name}") self.logger = logging.getLogger(f"audio.{self.config.name}")
self.ffmpeg_cmd = get_ffmpeg_command( self.ffmpeg_cmd = get_ffmpeg_command(self.config.ffmpeg)
get_ffmpeg_arg_list(self.config.ffmpeg.global_args)
+ parse_preset_input("preset-rtsp-audio-only", 1),
[i.path for i in self.config.ffmpeg.inputs if "audio" in i.roles][0],
)
self.logpipe = LogPipe(f"ffmpeg.{self.config.name}.audio") self.logpipe = LogPipe(f"ffmpeg.{self.config.name}.audio")
self.audio_listener = None self.audio_listener = None

View File

@ -256,13 +256,6 @@ PRESETS_INPUT = {
"-use_wallclock_as_timestamps", "-use_wallclock_as_timestamps",
"1", "1",
], ],
"preset-rtsp-audio-only": [
"-rtsp_transport",
"tcp",
TIMEOUT_PARAM,
"5000000",
"-vn",
],
"preset-rtsp-restream": _user_agent_args "preset-rtsp-restream": _user_agent_args
+ [ + [
"-rtsp_transport", "-rtsp_transport",

View File

@ -516,6 +516,7 @@ def delete_event(id):
media.unlink(missing_ok=True) media.unlink(missing_ok=True)
event.delete_instance() event.delete_instance()
Timeline.delete().where(Timeline.source_id == id).execute()
return make_response( return make_response(
jsonify({"success": True, "message": "Event " + id + " deleted"}), 200 jsonify({"success": True, "message": "Event " + id + " deleted"}), 200
) )
@ -1766,9 +1767,10 @@ def vod_event(id):
datetime.now().timestamp() if event.end_time is None else event.end_time datetime.now().timestamp() if event.end_time is None else event.end_time
) )
vod_response = vod_ts(event.camera, event.start_time, end_ts) vod_response = vod_ts(event.camera, event.start_time, end_ts)
# If the recordings are not found, set has_clip to false # If the recordings are not found and the event started more than 5 minutes ago, set has_clip to false
if ( if (
type(vod_response) == tuple event.start_time < datetime.now().timestamp() - 300
and type(vod_response) == tuple
and len(vod_response) == 2 and len(vod_response) == 2
and vod_response[1] == 404 and vod_response[1] == 404
): ):

View File

@ -20,3 +20,7 @@ class MotionDetector(ABC):
@abstractmethod @abstractmethod
def detect(self, frame): def detect(self, frame):
pass pass
@abstractmethod
def is_calibrating(self):
pass

View File

@ -38,6 +38,9 @@ class FrigateMotionDetector(MotionDetector):
self.threshold = threshold self.threshold = threshold
self.contour_area = contour_area self.contour_area = contour_area
def is_calibrating(self):
return False
def detect(self, frame): def detect(self, frame):
motion_boxes = [] motion_boxes = []

View File

@ -49,6 +49,9 @@ class ImprovedMotionDetector(MotionDetector):
self.contrast_values[:, 1:2] = 255 self.contrast_values[:, 1:2] = 255
self.contrast_values_index = 0 self.contrast_values_index = 0
def is_calibrating(self):
return self.calibrating
def detect(self, frame): def detect(self, frame):
motion_boxes = [] motion_boxes = []
@ -141,7 +144,6 @@ class ImprovedMotionDetector(MotionDetector):
# if calibrating or the motion contours are > 80% of the image area (lightning, ir, ptz) recalibrate # if calibrating or the motion contours are > 80% of the image area (lightning, ir, ptz) recalibrate
if self.calibrating or pct_motion > self.config.lightning_threshold: if self.calibrating or pct_motion > self.config.lightning_threshold:
motion_boxes = []
self.calibrating = True self.calibrating = True
if self.save_images: if self.save_images:

View File

@ -355,6 +355,7 @@ class RecordingMaintainer(threading.Thread):
"+faststart", "+faststart",
file_path, file_path,
stderr=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.DEVNULL,
) )
await p.wait() await p.wait()

View File

@ -1027,7 +1027,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"], "roles": ["detect"],
}, },
] ]
} },
"detect": {
"height": 720,
"width": 1280,
"fps": 5,
},
} }
}, },
} }
@ -1082,6 +1087,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"snapshots": { "snapshots": {
"height": 100, "height": 100,
}, },
@ -1107,7 +1117,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"], "roles": ["detect"],
}, },
] ]
} },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1132,6 +1147,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"snapshots": { "snapshots": {
"height": 150, "height": 150,
"enabled": True, "enabled": True,
@ -1160,6 +1180,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1181,7 +1206,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"], "roles": ["detect"],
}, },
] ]
} },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1205,6 +1235,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"rtmp": { "rtmp": {
"enabled": True, "enabled": True,
}, },
@ -1234,6 +1269,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1257,6 +1297,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1278,7 +1323,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"], "roles": ["detect"],
}, },
] ]
} },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1302,6 +1352,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"live": { "live": {
"quality": 7, "quality": 7,
}, },
@ -1329,6 +1384,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1350,7 +1410,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"], "roles": ["detect"],
}, },
] ]
} },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1375,6 +1440,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"timestamp_style": {"position": "bl", "thickness": 4}, "timestamp_style": {"position": "bl", "thickness": 4},
} }
}, },
@ -1400,6 +1470,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1423,6 +1498,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1450,6 +1530,11 @@ class TestConfig(unittest.TestCase):
}, },
], ],
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
} }
}, },
} }
@ -1475,6 +1560,11 @@ class TestConfig(unittest.TestCase):
}, },
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"zones": { "zones": {
"steps": { "steps": {
"coordinates": "0,0,0,0", "coordinates": "0,0,0,0",
@ -1546,6 +1636,11 @@ class TestConfig(unittest.TestCase):
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]} {"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"onvif": {"autotracking": {"movement_weights": "1.23, 2.34, 0.50"}}, "onvif": {"autotracking": {"movement_weights": "1.23, 2.34, 0.50"}},
} }
}, },
@ -1569,6 +1664,11 @@ class TestConfig(unittest.TestCase):
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]} {"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
] ]
}, },
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"onvif": {"autotracking": {"movement_weights": "1.234, 2.345a"}}, "onvif": {"autotracking": {"movement_weights": "1.234, 2.345a"}},
} }
}, },

View File

@ -21,7 +21,6 @@ from frigate.log import LogPipe
from frigate.motion import MotionDetector from frigate.motion import MotionDetector
from frigate.motion.improved_motion import ImprovedMotionDetector from frigate.motion.improved_motion import ImprovedMotionDetector
from frigate.object_detection import RemoteObjectDetector from frigate.object_detection import RemoteObjectDetector
from frigate.ptz.autotrack import ptz_moving_at_frame_time
from frigate.track import ObjectTracker from frigate.track import ObjectTracker
from frigate.track.norfair_tracker import NorfairTracker from frigate.track.norfair_tracker import NorfairTracker
from frigate.types import PTZMetricsTypes from frigate.types import PTZMetricsTypes
@ -777,19 +776,8 @@ def process_frames(
logger.info(f"{camera_name}: frame {frame_time} is not in memory store.") logger.info(f"{camera_name}: frame {frame_time} is not in memory store.")
continue continue
# look for motion if enabled and ptz is not moving # look for motion if enabled
# ptz_moving_at_frame_time() always returns False for motion_boxes = motion_detector.detect(frame) if motion_enabled.value else []
# non ptz/autotracking cameras
motion_boxes = (
motion_detector.detect(frame)
if motion_enabled.value
and not ptz_moving_at_frame_time(
frame_time,
ptz_metrics["ptz_start_time"].value,
ptz_metrics["ptz_stop_time"].value,
)
else []
)
regions = [] regions = []
consolidated_detections = [] consolidated_detections = []
@ -814,8 +802,10 @@ def process_frames(
) )
# and it hasn't disappeared # and it hasn't disappeared
and object_tracker.disappeared[obj["id"]] == 0 and object_tracker.disappeared[obj["id"]] == 0
# and it doesn't overlap with any current motion boxes # and it doesn't overlap with any current motion boxes when not calibrating
and not intersects_any(obj["box"], motion_boxes) and not intersects_any(
obj["box"], [] if motion_detector.is_calibrating() else motion_boxes
)
] ]
# get tracked object boxes that aren't stationary # get tracked object boxes that aren't stationary
@ -825,7 +815,10 @@ def process_frames(
if obj["id"] not in stationary_object_ids if obj["id"] not in stationary_object_ids
] ]
combined_boxes = motion_boxes + tracked_object_boxes combined_boxes = tracked_object_boxes
# only add in the motion boxes when not calibrating
if not motion_detector.is_calibrating():
combined_boxes += motion_boxes
cluster_candidates = get_cluster_candidates( cluster_candidates = get_cluster_candidates(
frame_shape, region_min_size, combined_boxes frame_shape, region_min_size, combined_boxes

View File

@ -86,4 +86,19 @@ export const handlers = [
]) ])
); );
}), }),
rest.get(`api/labels`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json([
'person',
'car',
])
);
}),
rest.get(`api/go2rtc`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({"config_path":"/dev/shm/go2rtc.yaml","host":"frigate.yourdomain.local","rtsp":{"listen":"0.0.0.0:8554","default_query":"mp4","PacketSize":0},"version":"1.7.1"})
);
}),
]; ];

1324
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -24,6 +24,7 @@
"preact-router": "^4.1.0", "preact-router": "^4.1.0",
"react": "npm:@preact/compat@^17.1.2", "react": "npm:@preact/compat@^17.1.2",
"react-dom": "npm:@preact/compat@^17.1.2", "react-dom": "npm:@preact/compat@^17.1.2",
"react-use-websocket": "^3.0.0",
"strftime": "^0.10.1", "strftime": "^0.10.1",
"swr": "^1.3.0", "swr": "^1.3.0",
"video.js": "^8.5.2", "video.js": "^8.5.2",
@ -48,6 +49,7 @@
"eslint-plugin-prettier": "^5.0.0", "eslint-plugin-prettier": "^5.0.0",
"eslint-plugin-vitest-globals": "^1.4.0", "eslint-plugin-vitest-globals": "^1.4.0",
"fake-indexeddb": "^4.0.1", "fake-indexeddb": "^4.0.1",
"jest-websocket-mock": "^2.5.0",
"jsdom": "^22.0.0", "jsdom": "^22.0.0",
"msw": "^1.2.1", "msw": "^1.2.1",
"postcss": "^8.4.29", "postcss": "^8.4.29",

View File

@ -1,10 +1,12 @@
/* eslint-disable jest/no-disabled-tests */
import { h } from 'preact'; import { h } from 'preact';
import { WS, WsProvider, useWs } from '../ws'; import { WS as frigateWS, WsProvider, useWs } from '../ws';
import { useCallback, useContext } from 'preact/hooks'; import { useCallback, useContext } from 'preact/hooks';
import { fireEvent, render, screen } from 'testing-library'; import { fireEvent, render, screen } from 'testing-library';
import { WS } from 'jest-websocket-mock';
function Test() { function Test() {
const { state } = useContext(WS); const { state } = useContext(frigateWS);
return state.__connected ? ( return state.__connected ? (
<div data-testid="data"> <div data-testid="data">
{Object.keys(state).map((key) => ( {Object.keys(state).map((key) => (
@ -19,44 +21,32 @@ function Test() {
const TEST_URL = 'ws://test-foo:1234/ws'; const TEST_URL = 'ws://test-foo:1234/ws';
describe('WsProvider', () => { describe('WsProvider', () => {
let createWebsocket, wsClient; let wsClient, wsServer;
beforeEach(() => { beforeEach(async () => {
wsClient = { wsClient = {
close: vi.fn(), close: vi.fn(),
send: vi.fn(), send: vi.fn(),
}; };
createWebsocket = vi.fn((url) => { wsServer = new WS(TEST_URL);
wsClient.args = [url];
return new Proxy(
{},
{
get(_target, prop, _receiver) {
return wsClient[prop];
},
set(_target, prop, value) {
wsClient[prop] = typeof value === 'function' ? vi.fn(value) : value;
if (prop === 'onopen') {
wsClient[prop]();
}
return true;
},
}
);
});
}); });
test('connects to the ws server', async () => { afterEach(() => {
WS.clean();
});
test.skip('connects to the ws server', async () => {
render( render(
<WsProvider config={mockConfig} createWebsocket={createWebsocket} wsUrl={TEST_URL}> <WsProvider config={mockConfig} wsUrl={TEST_URL}>
<Test /> <Test />
</WsProvider> </WsProvider>
); );
await wsServer.connected;
await screen.findByTestId('data'); await screen.findByTestId('data');
expect(wsClient.args).toEqual([TEST_URL]); expect(wsClient.args).toEqual([TEST_URL]);
expect(screen.getByTestId('__connected')).toHaveTextContent('true'); expect(screen.getByTestId('__connected')).toHaveTextContent('true');
}); });
test('receives data through useWs', async () => { test.skip('receives data through useWs', async () => {
function Test() { function Test() {
const { const {
value: { payload, retain }, value: { payload, retain },
@ -71,16 +61,17 @@ describe('WsProvider', () => {
} }
const { rerender } = render( const { rerender } = render(
<WsProvider config={mockConfig} createWebsocket={createWebsocket} wsUrl={TEST_URL}> <WsProvider config={mockConfig} wsUrl={TEST_URL}>
<Test /> <Test />
</WsProvider> </WsProvider>
); );
await wsServer.connected;
await screen.findByTestId('payload'); await screen.findByTestId('payload');
wsClient.onmessage({ wsClient.onmessage({
data: JSON.stringify({ topic: 'tacos', payload: JSON.stringify({ yes: true }), retain: false }), data: JSON.stringify({ topic: 'tacos', payload: JSON.stringify({ yes: true }), retain: false }),
}); });
rerender( rerender(
<WsProvider config={mockConfig} createWebsocket={createWebsocket} wsUrl={TEST_URL}> <WsProvider config={mockConfig} wsUrl={TEST_URL}>
<Test /> <Test />
</WsProvider> </WsProvider>
); );
@ -88,7 +79,7 @@ describe('WsProvider', () => {
expect(screen.getByTestId('retain')).toHaveTextContent('false'); expect(screen.getByTestId('retain')).toHaveTextContent('false');
}); });
test('can send values through useWs', async () => { test.skip('can send values through useWs', async () => {
function Test() { function Test() {
const { send, connected } = useWs('tacos'); const { send, connected } = useWs('tacos');
const handleClick = useCallback(() => { const handleClick = useCallback(() => {
@ -98,10 +89,11 @@ describe('WsProvider', () => {
} }
render( render(
<WsProvider config={mockConfig} createWebsocket={createWebsocket} wsUrl={TEST_URL}> <WsProvider config={mockConfig} wsUrl={TEST_URL}>
<Test /> <Test />
</WsProvider> </WsProvider>
); );
await wsServer.connected;
await screen.findByRole('button'); await screen.findByRole('button');
fireEvent.click(screen.getByRole('button')); fireEvent.click(screen.getByRole('button'));
await expect(wsClient.send).toHaveBeenCalledWith( await expect(wsClient.send).toHaveBeenCalledWith(
@ -109,19 +101,32 @@ describe('WsProvider', () => {
); );
}); });
test('prefills the recordings/detect/snapshots state from config', async () => { test.skip('prefills the recordings/detect/snapshots state from config', async () => {
vi.spyOn(Date, 'now').mockReturnValue(123456); vi.spyOn(Date, 'now').mockReturnValue(123456);
const config = { const config = {
cameras: { cameras: {
front: { name: 'front', detect: { enabled: true }, record: { enabled: false }, snapshots: { enabled: true }, audio: { enabled: false } }, front: {
side: { name: 'side', detect: { enabled: false }, record: { enabled: false }, snapshots: { enabled: false }, audio: { enabled: false } }, name: 'front',
detect: { enabled: true },
record: { enabled: false },
snapshots: { enabled: true },
audio: { enabled: false },
},
side: {
name: 'side',
detect: { enabled: false },
record: { enabled: false },
snapshots: { enabled: false },
audio: { enabled: false },
},
}, },
}; };
render( render(
<WsProvider config={config} createWebsocket={createWebsocket} wsUrl={TEST_URL}> <WsProvider config={config} wsUrl={TEST_URL}>
<Test /> <Test />
</WsProvider> </WsProvider>
); );
await wsServer.connected;
await screen.findByTestId('data'); await screen.findByTestId('data');
expect(screen.getByTestId('front/detect/state')).toHaveTextContent( expect(screen.getByTestId('front/detect/state')).toHaveTextContent(
'{"lastUpdate":123456,"payload":"ON","retain":false}' '{"lastUpdate":123456,"payload":"ON","retain":false}'

View File

@ -1,12 +1,11 @@
import { h, createContext } from 'preact'; import { h, createContext } from 'preact';
import { baseUrl } from './baseUrl'; import { baseUrl } from './baseUrl';
import { produce } from 'immer'; import { produce } from 'immer';
import { useCallback, useContext, useEffect, useRef, useReducer } from 'preact/hooks'; import { useCallback, useContext, useEffect, useReducer } from 'preact/hooks';
import useWebSocket, { ReadyState } from 'react-use-websocket';
const initialState = Object.freeze({ __connected: false }); const initialState = Object.freeze({ __connected: false });
export const WS = createContext({ state: initialState, connection: null }); export const WS = createContext({ state: initialState, readyState: null, sendJsonMessage: () => {} });
const defaultCreateWebsocket = (url) => new WebSocket(url);
function reducer(state, { topic, payload, retain }) { function reducer(state, { topic, payload, retain }) {
switch (topic) { switch (topic) {
@ -33,11 +32,18 @@ function reducer(state, { topic, payload, retain }) {
export function WsProvider({ export function WsProvider({
config, config,
children, children,
createWebsocket = defaultCreateWebsocket,
wsUrl = `${baseUrl.replace(/^http/, 'ws')}ws`, wsUrl = `${baseUrl.replace(/^http/, 'ws')}ws`,
}) { }) {
const [state, dispatch] = useReducer(reducer, initialState); const [state, dispatch] = useReducer(reducer, initialState);
const wsRef = useRef();
const { sendJsonMessage, readyState } = useWebSocket(wsUrl, {
onMessage: (event) => {
dispatch(JSON.parse(event.data));
},
onOpen: () => dispatch({ topic: '__CLIENT_CONNECTED' }),
shouldReconnect: () => true,
});
useEffect(() => { useEffect(() => {
Object.keys(config.cameras).forEach((camera) => { Object.keys(config.cameras).forEach((camera) => {
@ -49,46 +55,25 @@ export function WsProvider({
}); });
}, [config]); }, [config]);
useEffect( return <WS.Provider value={{ state, readyState, sendJsonMessage }}>{children}</WS.Provider>;
() => {
const ws = createWebsocket(wsUrl);
ws.onopen = () => {
dispatch({ topic: '__CLIENT_CONNECTED' });
};
ws.onmessage = (event) => {
dispatch(JSON.parse(event.data));
};
wsRef.current = ws;
return () => {
ws.close(3000, 'Provider destroyed');
};
},
// Forces reconnecting
[state.__reconnectAttempts, wsUrl] // eslint-disable-line react-hooks/exhaustive-deps
);
return <WS.Provider value={{ state, ws: wsRef.current }}>{children}</WS.Provider>;
} }
export function useWs(watchTopic, publishTopic) { export function useWs(watchTopic, publishTopic) {
const { state, ws } = useContext(WS); const { state, readyState, sendJsonMessage } = useContext(WS);
const value = state[watchTopic] || { payload: null }; const value = state[watchTopic] || { payload: null };
const send = useCallback( const send = useCallback(
(payload, retain = false) => { (payload, retain = false) => {
ws.send( if (readyState === ReadyState.OPEN) {
JSON.stringify({ sendJsonMessage({
topic: publishTopic || watchTopic, topic: publishTopic || watchTopic,
payload: typeof payload !== 'string' ? JSON.stringify(payload) : payload, payload,
retain, retain,
}) });
); }
}, },
[ws, watchTopic, publishTopic] [sendJsonMessage, readyState, watchTopic, publishTopic]
); );
return { value, send, connected: state.__connected }; return { value, send, connected: state.__connected };

View File

@ -157,12 +157,9 @@ class VideoRTC extends HTMLElement {
if (this.ws) this.ws.send(JSON.stringify(value)); if (this.ws) this.ws.send(JSON.stringify(value));
} }
codecs(type) { /** @param {Function} isSupported */
const test = codecs(isSupported) {
type === 'mse' return this.CODECS.filter(codec => isSupported(`video/mp4; codecs="${codec}"`)).join();
? (codec) => MediaSource.isTypeSupported(`video/mp4; codecs="${codec}"`)
: (codec) => this.video.canPlayType(`video/mp4; codecs="${codec}"`);
return this.CODECS.filter(test).join();
} }
/** /**
@ -311,7 +308,7 @@ class VideoRTC extends HTMLElement {
const modes = []; const modes = [];
if (this.mode.indexOf('mse') >= 0 && 'MediaSource' in window) { if (this.mode.indexOf('mse') >= 0 && ('MediaSource' in window || 'ManagedMediaSource' in window)) {
// iPhone // iPhone
modes.push('mse'); modes.push('mse');
this.onmse(); this.onmse();
@ -363,18 +360,29 @@ class VideoRTC extends HTMLElement {
} }
onmse() { onmse() {
const ms = new MediaSource(); /** @type {MediaSource} */
ms.addEventListener( let ms;
'sourceopen',
() => {
URL.revokeObjectURL(this.video.src);
this.send({ type: 'mse', value: this.codecs('mse') });
},
{ once: true }
);
this.video.src = URL.createObjectURL(ms); if ('ManagedMediaSource' in window) {
this.video.srcObject = null; const MediaSource = window.ManagedMediaSource;
ms = new MediaSource();
ms.addEventListener('sourceopen', () => {
this.send({type: 'mse', value: this.codecs(MediaSource.isTypeSupported)});
}, {once: true});
this.video.disableRemotePlayback = true;
this.video.srcObject = ms;
} else {
ms = new MediaSource();
ms.addEventListener('sourceopen', () => {
URL.revokeObjectURL(this.video.src);
this.send({type: 'mse', value: this.codecs(MediaSource.isTypeSupported)});
}, {once: true});
this.video.src = URL.createObjectURL(ms);
this.video.srcObject = null;
}
this.play(); this.play();
this.mseCodecs = ''; this.mseCodecs = '';
@ -580,7 +588,7 @@ class VideoRTC extends HTMLElement {
video2.src = `data:video/mp4;base64,${VideoRTC.btoa(data)}`; video2.src = `data:video/mp4;base64,${VideoRTC.btoa(data)}`;
}; };
this.send({ type: 'mp4', value: this.codecs('mp4') }); this.send({ type: 'mp4', value: this.codecs(this.video.canPlayType) });
} }
static btoa(buffer) { static btoa(buffer) {

View File

@ -81,7 +81,7 @@ export default function TimelineSummary({ event, onFrameSelected }) {
return ( return (
<div className="flex flex-col"> <div className="flex flex-col">
<div className="h-14 flex justify-center"> <div className="h-14 flex justify-center">
<div className="sm:w-1 md:w-1/4 flex flex-row flex-nowrap justify-between overflow-auto"> <div className="flex flex-row flex-nowrap justify-between overflow-auto">
{eventTimeline.map((item, index) => ( {eventTimeline.map((item, index) => (
<Button <Button
key={index} key={index}

View File

@ -101,9 +101,7 @@ describe('DarkMode', () => {
}); });
describe('usePersistence', () => { describe('usePersistence', () => {
test('returns a defaultValue initially', async () => { test('returns a defaultValue initially', async () => {
function Component() { function Component() {
const [value, , loaded] = usePersistence('tacos', 'my-default'); const [value, , loaded] = usePersistence('tacos', 'my-default');
return ( return (
@ -132,7 +130,8 @@ describe('usePersistence', () => {
`); `);
}); });
test('updates with the previously-persisted value', async () => { // eslint-disable-next-line jest/no-disabled-tests
test.skip('updates with the previously-persisted value', async () => {
setData('tacos', 'are delicious'); setData('tacos', 'are delicious');
function Component() { function Component() {

View File

@ -35,7 +35,7 @@ export default function Birdseye() {
let player; let player;
const playerClass = ptzCameras.length || isMaxWidth ? 'w-full' : 'max-w-5xl xl:w-1/2'; const playerClass = ptzCameras.length || isMaxWidth ? 'w-full' : 'max-w-5xl xl:w-1/2';
if (viewSource == 'mse' && config.birdseye.restream) { if (viewSource == 'mse' && config.birdseye.restream) {
if ('MediaSource' in window) { if ('MediaSource' in window || 'ManagedMediaSource' in window) {
player = ( player = (
<Fragment> <Fragment>
<div className={playerClass}> <div className={playerClass}>
@ -50,7 +50,7 @@ export default function Birdseye() {
player = ( player = (
<Fragment> <Fragment>
<div className="w-5xl text-center text-sm"> <div className="w-5xl text-center text-sm">
MSE is not supported on iOS devices. You'll need to use jsmpeg or webRTC. See the docs for more info. MSE is only supported on iOS 17.1+. You'll need to update if available or use jsmpeg / webRTC streams. See the docs for more info.
</div> </div>
</Fragment> </Fragment>
); );

View File

@ -116,7 +116,7 @@ export default function Camera({ camera }) {
let player; let player;
if (viewMode === 'live') { if (viewMode === 'live') {
if (viewSource == 'mse' && restreamEnabled) { if (viewSource == 'mse' && restreamEnabled) {
if ('MediaSource' in window) { if ('MediaSource' in window || 'ManagedMediaSource' in window) {
player = ( player = (
<Fragment> <Fragment>
<div className="max-w-5xl"> <div className="max-w-5xl">
@ -133,7 +133,7 @@ export default function Camera({ camera }) {
player = ( player = (
<Fragment> <Fragment>
<div className="w-5xl text-center text-sm"> <div className="w-5xl text-center text-sm">
MSE is not supported on iOS devices. You'll need to use jsmpeg or webRTC. See the docs for more info. MSE is only supported on iOS 17.1+. You'll need to update if available or use jsmpeg / webRTC streams. See the docs for more info.
</div> </div>
</Fragment> </Fragment>
); );

View File

@ -31,6 +31,9 @@ import Timepicker from '../components/TimePicker';
import TimelineSummary from '../components/TimelineSummary'; import TimelineSummary from '../components/TimelineSummary';
import TimelineEventOverlay from '../components/TimelineEventOverlay'; import TimelineEventOverlay from '../components/TimelineEventOverlay';
import { Score } from '../icons/Score'; import { Score } from '../icons/Score';
import { About } from '../icons/About';
import MenuIcon from '../icons/Menu';
import { MenuOpen } from '../icons/MenuOpen';
const API_LIMIT = 25; const API_LIMIT = 25;
@ -91,13 +94,15 @@ export default function Events({ path, ...props }) {
showDeleteFavorite: false, showDeleteFavorite: false,
}); });
const [showInProgress, setShowInProgress] = useState(true);
const eventsFetcher = useCallback( const eventsFetcher = useCallback(
(path, params) => { (path, params) => {
if (searchParams.event) { if (searchParams.event) {
path = `${path}/${searchParams.event}`; path = `${path}/${searchParams.event}`;
return axios.get(path).then((res) => [res.data]); return axios.get(path).then((res) => [res.data]);
} }
params = { ...params, include_thumbnails: 0, limit: API_LIMIT }; params = { ...params, in_progress: 0, include_thumbnails: 0, limit: API_LIMIT };
return axios.get(path, { params }).then((res) => res.data); return axios.get(path, { params }).then((res) => res.data);
}, },
[searchParams] [searchParams]
@ -116,6 +121,7 @@ export default function Events({ path, ...props }) {
[searchParams] [searchParams]
); );
const { data: ongoingEvents } = useSWR(['events', { in_progress: 1, include_thumbnails: 0 }]);
const { data: eventPages, mutate, size, setSize, isValidating } = useSWRInfinite(getKey, eventsFetcher); const { data: eventPages, mutate, size, setSize, isValidating } = useSWRInfinite(getKey, eventsFetcher);
const { data: allLabels } = useSWR(['labels']); const { data: allLabels } = useSWR(['labels']);
@ -238,6 +244,7 @@ export default function Events({ path, ...props }) {
const handleSelectDateRange = useCallback( const handleSelectDateRange = useCallback(
(dates) => { (dates) => {
setShowInProgress(false);
setSearchParams({ ...searchParams, before: dates.before, after: dates.after }); setSearchParams({ ...searchParams, before: dates.before, after: dates.after });
setState({ ...state, showDatePicker: false }); setState({ ...state, showDatePicker: false });
}, },
@ -253,6 +260,7 @@ export default function Events({ path, ...props }) {
const onFilter = useCallback( const onFilter = useCallback(
(name, value) => { (name, value) => {
setShowInProgress(false);
const updatedParams = { ...searchParams, [name]: value }; const updatedParams = { ...searchParams, [name]: value };
setSearchParams(updatedParams); setSearchParams(updatedParams);
const queryString = Object.keys(updatedParams) const queryString = Object.keys(updatedParams)
@ -604,192 +612,98 @@ export default function Events({ path, ...props }) {
</Dialog> </Dialog>
)} )}
<div className="space-y-2"> <div className="space-y-2">
{ongoingEvents ? (
<div>
<div className="flex">
<Heading className="py-4" size="sm">
Ongoing Events
</Heading>
<Button
className="rounded-full"
type="text"
color="gray"
aria-label="Events for currently tracked objects. Recordings are only saved based on your retain settings. See the recording docs for more info."
>
<About className="w-5" />
</Button>
<Button
className="rounded-full ml-auto"
type="iconOnly"
color="blue"
onClick={() => setShowInProgress(!showInProgress)}
>
{showInProgress ? <MenuOpen className="w-6" /> : <MenuIcon className="w-6" />}
</Button>
</div>
{showInProgress &&
ongoingEvents.map((event, _) => {
return (
<Event
className="my-2"
key={event.id}
config={config}
event={event}
eventDetailType={eventDetailType}
eventOverlay={eventOverlay}
viewEvent={viewEvent}
setViewEvent={setViewEvent}
uploading={uploading}
handleEventDetailTabChange={handleEventDetailTabChange}
onEventFrameSelected={onEventFrameSelected}
onDelete={onDelete}
onDispose={() => {
this.player = null;
}}
onDownloadClick={onDownloadClick}
onReady={(player) => {
this.player = player;
this.player.on('playing', () => {
setEventOverlay(undefined);
});
}}
onSave={onSave}
showSubmitToPlus={showSubmitToPlus}
/>
);
})}
</div>
) : null}
<Heading className="py-4" size="sm">
Past Events
</Heading>
{eventPages ? ( {eventPages ? (
eventPages.map((page, i) => { eventPages.map((page, i) => {
const lastPage = eventPages.length === i + 1; const lastPage = eventPages.length === i + 1;
return page.map((event, j) => { return page.map((event, j) => {
const lastEvent = lastPage && page.length === j + 1; const lastEvent = lastPage && page.length === j + 1;
return ( return (
<Fragment key={event.id}> <Event
<div key={event.id}
ref={lastEvent ? lastEventRef : false} config={config}
className="flex bg-slate-100 dark:bg-slate-800 rounded cursor-pointer min-w-[330px]" event={event}
onClick={() => (viewEvent === event.id ? setViewEvent(null) : setViewEvent(event.id))} eventDetailType={eventDetailType}
> eventOverlay={eventOverlay}
<div viewEvent={viewEvent}
className="relative rounded-l flex-initial min-w-[125px] h-[125px] bg-contain bg-no-repeat bg-center" setViewEvent={setViewEvent}
style={{ lastEvent={lastEvent}
'background-image': `url(${apiHost}api/events/${event.id}/thumbnail.jpg)`, lastEventRef={lastEventRef}
}} uploading={uploading}
> handleEventDetailTabChange={handleEventDetailTabChange}
<StarRecording onEventFrameSelected={onEventFrameSelected}
className="h-6 w-6 text-yellow-300 absolute top-1 right-1 cursor-pointer" onDelete={onDelete}
onClick={(e) => onSave(e, event.id, !event.retain_indefinitely)} onDispose={() => {
fill={event.retain_indefinitely ? 'currentColor' : 'none'} this.player = null;
/> }}
{event.end_time ? null : ( onDownloadClick={onDownloadClick}
<div className="bg-slate-300 dark:bg-slate-700 absolute bottom-0 text-center w-full uppercase text-sm rounded-bl"> onReady={(player) => {
In progress this.player = player;
</div> this.player.on('playing', () => {
)} setEventOverlay(undefined);
</div> });
<div className="m-2 flex grow"> }}
<div className="flex flex-col grow"> onSave={onSave}
<div className="capitalize text-lg font-bold"> showSubmitToPlus={showSubmitToPlus}
{event.label.replaceAll('_', ' ')} />
{event.sub_label ? `: ${event.sub_label.replaceAll('_', ' ')}` : null}
</div>
<div className="text-sm flex">
<Clock className="h-5 w-5 mr-2 inline" />
{formatUnixTimestampToDateTime(event.start_time, { ...config.ui })}
<div className="hidden md:inline">
<span className="m-1">-</span>
<TimeAgo time={event.start_time * 1000} dense />
</div>
<div className="hidden md:inline">
<span className="m-1" />( {getDurationFromTimestamps(event.start_time, event.end_time)} )
</div>
</div>
<div className="capitalize text-sm flex align-center mt-1">
<Camera className="h-5 w-5 mr-2 inline" />
{event.camera.replaceAll('_', ' ')}
</div>
{event.zones.length ? (
<div className="capitalize text-sm flex align-center">
<Zone className="w-5 h-5 mr-2 inline" />
{event.zones.join(', ').replaceAll('_', ' ')}
</div>
) : null}
<div className="capitalize text-sm flex align-center">
<Score className="w-5 h-5 mr-2 inline" />
{(event?.data?.top_score || event.top_score || 0) == 0
? null
: `${event.label}: ${((event?.data?.top_score || event.top_score) * 100).toFixed(0)}%`}
{(event?.data?.sub_label_score || 0) == 0
? null
: `, ${event.sub_label}: ${(event?.data?.sub_label_score * 100).toFixed(0)}%`}
</div>
</div>
<div class="hidden sm:flex flex-col justify-end mr-2">
{event.end_time && event.has_snapshot && (event?.data?.type || 'object') == 'object' && (
<Fragment>
{event.plus_id ? (
<div className="uppercase text-xs underline">
<Link
href={`https://plus.frigate.video/dashboard/edit-image/?id=${event.plus_id}`}
target="_blank"
rel="nofollow"
>
Edit in Frigate+
</Link>
</div>
) : (
<Button
color="gray"
disabled={uploading.includes(event.id)}
onClick={(e) =>
showSubmitToPlus(event.id, event.label, event?.data?.box || event.box, e)
}
>
{uploading.includes(event.id) ? 'Uploading...' : 'Send to Frigate+'}
</Button>
)}
</Fragment>
)}
</div>
<div class="flex flex-col">
<Delete
className="h-6 w-6 cursor-pointer"
stroke="#f87171"
onClick={(e) => onDelete(e, event.id, event.retain_indefinitely)}
/>
<Download
className="h-6 w-6 mt-auto"
stroke={event.has_clip || event.has_snapshot ? '#3b82f6' : '#cbd5e1'}
onClick={(e) => onDownloadClick(e, event)}
/>
</div>
</div>
</div>
{viewEvent !== event.id ? null : (
<div className="space-y-4">
<div className="mx-auto max-w-7xl">
<div className="flex justify-center w-full py-2">
<Tabs
selectedIndex={event.has_clip && eventDetailType == 'clip' ? 0 : 1}
onChange={handleEventDetailTabChange}
className="justify"
>
<TextTab text="Clip" disabled={!event.has_clip} />
<TextTab text={event.has_snapshot ? 'Snapshot' : 'Thumbnail'} />
</Tabs>
</div>
<div>
{eventDetailType == 'clip' && event.has_clip ? (
<div>
<TimelineSummary
event={event}
onFrameSelected={(frame, seekSeconds) =>
onEventFrameSelected(event, frame, seekSeconds)
}
/>
<div>
<VideoPlayer
options={{
preload: 'auto',
autoplay: true,
sources: [
{
src: `${apiHost}vod/event/${event.id}/master.m3u8`,
type: 'application/vnd.apple.mpegurl',
},
],
}}
seekOptions={{ forward: 10, backward: 5 }}
onReady={(player) => {
this.player = player;
this.player.on('playing', () => {
setEventOverlay(undefined);
});
}}
onDispose={() => {
this.player = null;
}}
>
{eventOverlay ? (
<TimelineEventOverlay
eventOverlay={eventOverlay}
cameraConfig={config.cameras[event.camera]}
/>
) : null}
</VideoPlayer>
</div>
</div>
) : null}
{eventDetailType == 'image' || !event.has_clip ? (
<div className="flex justify-center">
<img
className="flex-grow-0"
src={
event.has_snapshot
? `${apiHost}api/events/${event.id}/snapshot.jpg`
: `${apiHost}api/events/${event.id}/thumbnail.jpg`
}
alt={`${event.label} at ${((event?.data?.top_score || event.top_score) * 100).toFixed(
0
)}% confidence`}
/>
</div>
) : null}
</div>
</div>
</div>
)}
</Fragment>
); );
}); });
}) })
@ -801,3 +715,195 @@ export default function Events({ path, ...props }) {
</div> </div>
); );
} }
function Event({
className = '',
config,
event,
eventDetailType,
eventOverlay,
viewEvent,
setViewEvent,
lastEvent,
lastEventRef,
uploading,
handleEventDetailTabChange,
onEventFrameSelected,
onDelete,
onDispose,
onDownloadClick,
onReady,
onSave,
showSubmitToPlus,
}) {
const apiHost = useApiHost();
return (
<div className={className}>
<div
ref={lastEvent ? lastEventRef : false}
className="flex bg-slate-100 dark:bg-slate-800 rounded cursor-pointer min-w-[330px]"
onClick={() => (viewEvent === event.id ? setViewEvent(null) : setViewEvent(event.id))}
>
<div
className="relative rounded-l flex-initial min-w-[125px] h-[125px] bg-contain bg-no-repeat bg-center"
style={{
'background-image': `url(${apiHost}api/events/${event.id}/thumbnail.jpg)`,
}}
>
<StarRecording
className="h-6 w-6 text-yellow-300 absolute top-1 right-1 cursor-pointer"
onClick={(e) => onSave(e, event.id, !event.retain_indefinitely)}
fill={event.retain_indefinitely ? 'currentColor' : 'none'}
/>
{event.end_time ? null : (
<div className="bg-slate-300 dark:bg-slate-700 absolute bottom-0 text-center w-full uppercase text-sm rounded-bl">
In progress
</div>
)}
</div>
<div className="m-2 flex grow">
<div className="flex flex-col grow">
<div className="capitalize text-lg font-bold">
{event.label.replaceAll('_', ' ')}
{event.sub_label ? `: ${event.sub_label.replaceAll('_', ' ')}` : null}
</div>
<div className="text-sm flex">
<Clock className="h-5 w-5 mr-2 inline" />
{formatUnixTimestampToDateTime(event.start_time, { ...config.ui })}
<div className="hidden md:inline">
<span className="m-1">-</span>
<TimeAgo time={event.start_time * 1000} dense />
</div>
<div className="hidden md:inline">
<span className="m-1" />( {getDurationFromTimestamps(event.start_time, event.end_time)} )
</div>
</div>
<div className="capitalize text-sm flex align-center mt-1">
<Camera className="h-5 w-5 mr-2 inline" />
{event.camera.replaceAll('_', ' ')}
</div>
{event.zones.length ? (
<div className="capitalize text-sm flex align-center">
<Zone className="w-5 h-5 mr-2 inline" />
{event.zones.join(', ').replaceAll('_', ' ')}
</div>
) : null}
<div className="capitalize text-sm flex align-center">
<Score className="w-5 h-5 mr-2 inline" />
{(event?.data?.top_score || event.top_score || 0) == 0
? null
: `${event.label}: ${((event?.data?.top_score || event.top_score) * 100).toFixed(0)}%`}
{(event?.data?.sub_label_score || 0) == 0
? null
: `, ${event.sub_label}: ${(event?.data?.sub_label_score * 100).toFixed(0)}%`}
</div>
</div>
<div class="hidden sm:flex flex-col justify-end mr-2">
{event.end_time && event.has_snapshot && (event?.data?.type || 'object') == 'object' && (
<Fragment>
{event.plus_id ? (
<div className="uppercase text-xs underline">
<Link
href={`https://plus.frigate.video/dashboard/edit-image/?id=${event.plus_id}`}
target="_blank"
rel="nofollow"
>
Edit in Frigate+
</Link>
</div>
) : (
<Button
color="gray"
disabled={uploading.includes(event.id)}
onClick={(e) => showSubmitToPlus(event.id, event.label, event?.data?.box || event.box, e)}
>
{uploading.includes(event.id) ? 'Uploading...' : 'Send to Frigate+'}
</Button>
)}
</Fragment>
)}
</div>
<div class="flex flex-col">
<Delete
className="h-6 w-6 cursor-pointer"
stroke="#f87171"
onClick={(e) => onDelete(e, event.id, event.retain_indefinitely)}
/>
<Download
className="h-6 w-6 mt-auto"
stroke={event.has_clip || event.has_snapshot ? '#3b82f6' : '#cbd5e1'}
onClick={(e) => onDownloadClick(e, event)}
/>
</div>
</div>
</div>
{viewEvent !== event.id ? null : (
<div className="space-y-4">
<div className="mx-auto max-w-7xl">
<div className="flex justify-center w-full py-2">
<Tabs
selectedIndex={event.has_clip && eventDetailType == 'clip' ? 0 : 1}
onChange={handleEventDetailTabChange}
className="justify"
>
<TextTab text="Clip" disabled={!event.has_clip} />
<TextTab text={event.has_snapshot ? 'Snapshot' : 'Thumbnail'} />
</Tabs>
</div>
<div>
{eventDetailType == 'clip' && event.has_clip ? (
<div>
<TimelineSummary
event={event}
onFrameSelected={(frame, seekSeconds) => onEventFrameSelected(event, frame, seekSeconds)}
/>
<div>
<VideoPlayer
options={{
preload: 'auto',
autoplay: true,
sources: [
{
src: `${apiHost}vod/event/${event.id}/master.m3u8`,
type: 'application/vnd.apple.mpegurl',
},
],
}}
seekOptions={{ forward: 10, backward: 5 }}
onReady={onReady}
onDispose={onDispose}
>
{eventOverlay ? (
<TimelineEventOverlay eventOverlay={eventOverlay} cameraConfig={config.cameras[event.camera]} />
) : null}
</VideoPlayer>
</div>
</div>
) : null}
{eventDetailType == 'image' || !event.has_clip ? (
<div className="flex justify-center">
<img
className="flex-grow-0"
src={
event.has_snapshot
? `${apiHost}api/events/${event.id}/snapshot.jpg`
: `${apiHost}api/events/${event.id}/thumbnail.jpg`
}
alt={`${event.label} at ${((event?.data?.top_score || event.top_score) * 100).toFixed(
0
)}% confidence`}
/>
</div>
) : null}
</div>
</div>
</div>
)}
</div>
);
}

View File

@ -1,3 +1,4 @@
/* eslint-disable jest/no-disabled-tests */
import { h } from 'preact'; import { h } from 'preact';
import * as CameraImage from '../../components/CameraImage'; import * as CameraImage from '../../components/CameraImage';
import * as Hooks from '../../hooks'; import * as Hooks from '../../hooks';
@ -17,7 +18,7 @@ describe('Cameras Route', () => {
expect(screen.queryByLabelText('Loading…')).toBeInTheDocument(); expect(screen.queryByLabelText('Loading…')).toBeInTheDocument();
}); });
test('shows cameras', async () => { test.skip('shows cameras', async () => {
render(<Cameras />); render(<Cameras />);
await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…')); await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…'));
@ -29,7 +30,7 @@ describe('Cameras Route', () => {
expect(screen.queryByText('side').closest('a')).toHaveAttribute('href', '/cameras/side'); expect(screen.queryByText('side').closest('a')).toHaveAttribute('href', '/cameras/side');
}); });
test('shows recordings link', async () => { test.skip('shows recordings link', async () => {
render(<Cameras />); render(<Cameras />);
await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…')); await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…'));
@ -37,7 +38,7 @@ describe('Cameras Route', () => {
expect(screen.queryAllByText('Recordings')).toHaveLength(2); expect(screen.queryAllByText('Recordings')).toHaveLength(2);
}); });
test('buttons toggle detect, clips, and snapshots', async () => { test.skip('buttons toggle detect, clips, and snapshots', async () => {
const sendDetect = vi.fn(); const sendDetect = vi.fn();
const sendRecordings = vi.fn(); const sendRecordings = vi.fn();
const sendSnapshots = vi.fn(); const sendSnapshots = vi.fn();

View File

@ -10,7 +10,8 @@ describe('Events Route', () => {
expect(screen.queryByLabelText('Loading…')).toBeInTheDocument(); expect(screen.queryByLabelText('Loading…')).toBeInTheDocument();
}); });
test('does not show ActivityIndicator after loaded', async () => { // eslint-disable-next-line jest/no-disabled-tests
test.skip('does not show ActivityIndicator after loaded', async () => {
render(<Events limit={5} path="/events" />); render(<Events limit={5} path="/events" />);
await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…')); await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…'));

View File

@ -17,9 +17,8 @@ describe('Recording Route', () => {
expect(screen.queryByLabelText('Loading…')).toBeInTheDocument(); expect(screen.queryByLabelText('Loading…')).toBeInTheDocument();
}); });
// eslint-disable-next-line jest/no-disabled-tests
test.skip('shows no recordings warning', async () => {
test('shows no recordings warning', async () => {
render(<Cameras />); render(<Cameras />);
await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…')); await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…'));