Merge branch 'blakeblackshear:dev' into api_stats_cams

This commit is contained in:
tpjanssen 2023-10-17 13:55:00 +02:00 committed by GitHub
commit 0e6b9d6819
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 1397 additions and 856 deletions

View File

@ -33,7 +33,7 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
FROM scratch AS go2rtc
ARG TARGETARCH
WORKDIR /rootfs/usr/local/go2rtc/bin
ADD --link --chmod=755 "https://github.com/AlexxIT/go2rtc/releases/download/v1.7.1/go2rtc_linux_${TARGETARCH}" go2rtc
ADD --link --chmod=755 "https://github.com/AlexxIT/go2rtc/releases/download/v1.8.1/go2rtc_linux_${TARGETARCH}" go2rtc
####

View File

@ -55,24 +55,16 @@ fi
# arch specific packages
if [[ "${TARGETARCH}" == "amd64" ]]; then
# use debian bookworm for AMD hwaccel packages
# use debian bookworm for hwaccel packages
echo 'deb https://deb.debian.org/debian bookworm main contrib' >/etc/apt/sources.list.d/debian-bookworm.list
apt-get -qq update
apt-get -qq install --no-install-recommends --no-install-suggests -y \
mesa-va-drivers radeontop
rm -f /etc/apt/sources.list.d/debian-bookworm.list
# Use debian testing repo only for intel hwaccel packages
echo 'deb http://deb.debian.org/debian testing main non-free' >/etc/apt/sources.list.d/debian-testing.list
apt-get -qq update
# intel-opencl-icd specifically for GPU support in OpenVino
apt-get -qq install --no-install-recommends --no-install-suggests -y \
intel-opencl-icd \
libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1 intel-gpu-tools
mesa-va-drivers radeontop libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1 intel-gpu-tools
# something about this dependency requires it to be installed in a separate call rather than in the line above
apt-get -qq install --no-install-recommends --no-install-suggests -y \
i965-va-driver-shaders
rm -f /etc/apt/sources.list.d/debian-testing.list
rm -f /etc/apt/sources.list.d/debian-bookworm.list
fi
if [[ "${TARGETARCH}" == "arm64" ]]; then

View File

@ -120,7 +120,7 @@ NOTE: The folder that is mapped from the host needs to be the folder that contai
## Custom go2rtc version
Frigate currently includes go2rtc v1.7.1, there may be certain cases where you want to run a different version of go2rtc.
Frigate currently includes go2rtc v1.8.1, there may be certain cases where you want to run a different version of go2rtc.
To do this:

View File

@ -140,7 +140,7 @@ go2rtc:
- rtspx://192.168.1.1:7441/abcdefghijk
```
[See the go2rtc docs for more information](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#source-rtsp)
[See the go2rtc docs for more information](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#source-rtsp)
In the Unifi 2.0 update Unifi Protect Cameras had a change in audio sample rate which causes issues for ffmpeg. The input rate needs to be set for record and rtmp if used directly with unifi protect.

View File

@ -436,7 +436,7 @@ rtmp:
enabled: False
# Optional: Restream configuration
# Uses https://github.com/AlexxIT/go2rtc (v1.7.1)
# Uses https://github.com/AlexxIT/go2rtc (v1.8.1)
go2rtc:
# Optional: jsmpeg stream configuration for WebUI

View File

@ -9,11 +9,11 @@ Frigate has different live view options, some of which require the bundled `go2r
Live view options can be selected while viewing the live stream. The options are:
| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
| ------ | ------- | ------------------------------------- | -------------- | ---------------------------- | --------------- | -------------------------------------------- |
| jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none |
| mse | low | native | native | yes (depends on audio codec) | yes | not supported on iOS, Firefox is h.264 only |
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
| ------ | ------- | ------------------------------------- | -------------- | ---------------------------- | --------------- | ------------------------------------------------- |
| jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none |
| mse | low | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only |
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
### Audio Support
@ -115,4 +115,4 @@ services:
:::
See [go2rtc WebRTC docs](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#module-webrtc) for more information about this.
See [go2rtc WebRTC docs](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#module-webrtc) for more information about this.

View File

@ -7,7 +7,7 @@ title: Restream
Frigate can restream your video feed as an RTSP feed for other applications such as Home Assistant to utilize it at `rtsp://<frigate_host>:8554/<camera_name>`. Port 8554 must be open. [This allows you to use a video feed for detection in Frigate and Home Assistant live view at the same time without having to make two separate connections to the camera](#reduce-connections-to-camera). The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate.
Frigate uses [go2rtc](https://github.com/AlexxIT/go2rtc/tree/v1.7.1) to provide its restream and MSE/WebRTC capabilities. The go2rtc config is hosted at the `go2rtc` in the config, see [go2rtc docs](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#configuration) for more advanced configurations and features.
Frigate uses [go2rtc](https://github.com/AlexxIT/go2rtc/tree/v1.8.1) to provide its restream and MSE/WebRTC capabilities. The go2rtc config is hosted at the `go2rtc` in the config, see [go2rtc docs](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#configuration) for more advanced configurations and features.
:::note
@ -138,7 +138,7 @@ cameras:
## Advanced Restream Configurations
The [exec](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#source-exec) source in go2rtc can be used for custom ffmpeg commands. An example is below:
The [exec](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#source-exec) source in go2rtc can be used for custom ffmpeg commands. An example is below:
NOTE: The output will need to be passed with two curly braces `{{output}}`

View File

@ -11,7 +11,7 @@ Use of the bundled go2rtc is optional. You can still configure FFmpeg to connect
# Setup a go2rtc stream
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. If you set the stream name under go2rtc to match the name of your camera, it will automatically be mapped and you will get additional live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#module-streams), not just rtsp.
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. If you set the stream name under go2rtc to match the name of your camera, it will automatically be mapped and you will get additional live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#module-streams), not just rtsp.
```yaml
go2rtc:
@ -24,7 +24,7 @@ The easiest live view to get working is MSE. After adding this to the config, re
### What if my video doesn't play?
If you are unable to see your video feed, first check the go2rtc logs in the Frigate UI under Logs in the sidebar. If go2rtc is having difficulty connecting to your camera, you should see some error messages in the log. If you do not see any errors, then the video codec of the stream may not be supported in your browser. If your camera stream is set to H265, try switching to H264. You can see more information about [video codec compatibility](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#codecs-madness) in the go2rtc documentation. If you are not able to switch your camera settings from H265 to H264 or your stream is a different format such as MJPEG, you can use go2rtc to re-encode the video using the [FFmpeg parameters](https://github.com/AlexxIT/go2rtc/tree/v1.7.1#source-ffmpeg). It supports rotating and resizing video feeds and hardware acceleration. Keep in mind that transcoding video from one format to another is a resource intensive task and you may be better off using the built-in jsmpeg view. Here is an example of a config that will re-encode the stream to H264 without hardware acceleration:
If you are unable to see your video feed, first check the go2rtc logs in the Frigate UI under Logs in the sidebar. If go2rtc is having difficulty connecting to your camera, you should see some error messages in the log. If you do not see any errors, then the video codec of the stream may not be supported in your browser. If your camera stream is set to H265, try switching to H264. You can see more information about [video codec compatibility](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#codecs-madness) in the go2rtc documentation. If you are not able to switch your camera settings from H265 to H264 or your stream is a different format such as MJPEG, you can use go2rtc to re-encode the video using the [FFmpeg parameters](https://github.com/AlexxIT/go2rtc/tree/v1.8.1#source-ffmpeg). It supports rotating and resizing video feeds and hardware acceleration. Keep in mind that transcoding video from one format to another is a resource intensive task and you may be better off using the built-in jsmpeg view. Here is an example of a config that will re-encode the stream to H264 without hardware acceleration:
```yaml
go2rtc:

View File

@ -21,7 +21,7 @@ module.exports = {
{
type: "link",
label: "Go2RTC Configuration Reference",
href: "https://github.com/AlexxIT/go2rtc/tree/v1.7.1#configuration",
href: "https://github.com/AlexxIT/go2rtc/tree/v1.8.1#configuration",
},
],
Detectors: [

View File

@ -14,7 +14,7 @@ import requests
from setproctitle import setproctitle
from frigate.comms.inter_process import InterProcessCommunicator
from frigate.config import CameraConfig, FrigateConfig
from frigate.config import CameraConfig, CameraInput, FfmpegConfig, FrigateConfig
from frigate.const import (
AUDIO_DURATION,
AUDIO_FORMAT,
@ -39,13 +39,29 @@ except ModuleNotFoundError:
logger = logging.getLogger(__name__)
def get_ffmpeg_command(input_args: list[str], input_path: str) -> list[str]:
return get_ffmpeg_arg_list(
f"ffmpeg {{}} -i {{}} -f {AUDIO_FORMAT} -ar {AUDIO_SAMPLE_RATE} -ac 1 -y {{}}".format(
" ".join(input_args),
input_path,
def get_ffmpeg_command(ffmpeg: FfmpegConfig) -> list[str]:
ffmpeg_input: CameraInput = [i for i in ffmpeg.inputs if "audio" in i.roles][0]
input_args = get_ffmpeg_arg_list(ffmpeg.global_args) + (
parse_preset_input(ffmpeg_input.input_args, 1)
or ffmpeg_input.input_args
or parse_preset_input(ffmpeg.input_args, 1)
or ffmpeg.input_args
)
return (
["ffmpeg", "-vn"]
+ input_args
+ ["-i"]
+ [ffmpeg_input.path]
+ [
"-f",
f"{AUDIO_FORMAT}",
"-ar",
f"{AUDIO_SAMPLE_RATE}",
"-ac",
"1",
"-y",
"pipe:",
)
]
)
@ -173,11 +189,7 @@ class AudioEventMaintainer(threading.Thread):
self.shape = (int(round(AUDIO_DURATION * AUDIO_SAMPLE_RATE)),)
self.chunk_size = int(round(AUDIO_DURATION * AUDIO_SAMPLE_RATE * 2))
self.logger = logging.getLogger(f"audio.{self.config.name}")
self.ffmpeg_cmd = get_ffmpeg_command(
get_ffmpeg_arg_list(self.config.ffmpeg.global_args)
+ parse_preset_input("preset-rtsp-audio-only", 1),
[i.path for i in self.config.ffmpeg.inputs if "audio" in i.roles][0],
)
self.ffmpeg_cmd = get_ffmpeg_command(self.config.ffmpeg)
self.logpipe = LogPipe(f"ffmpeg.{self.config.name}.audio")
self.audio_listener = None

View File

@ -256,13 +256,6 @@ PRESETS_INPUT = {
"-use_wallclock_as_timestamps",
"1",
],
"preset-rtsp-audio-only": [
"-rtsp_transport",
"tcp",
TIMEOUT_PARAM,
"5000000",
"-vn",
],
"preset-rtsp-restream": _user_agent_args
+ [
"-rtsp_transport",

View File

@ -516,6 +516,7 @@ def delete_event(id):
media.unlink(missing_ok=True)
event.delete_instance()
Timeline.delete().where(Timeline.source_id == id).execute()
return make_response(
jsonify({"success": True, "message": "Event " + id + " deleted"}), 200
)
@ -1766,9 +1767,10 @@ def vod_event(id):
datetime.now().timestamp() if event.end_time is None else event.end_time
)
vod_response = vod_ts(event.camera, event.start_time, end_ts)
# If the recordings are not found, set has_clip to false
# If the recordings are not found and the event started more than 5 minutes ago, set has_clip to false
if (
type(vod_response) == tuple
event.start_time < datetime.now().timestamp() - 300
and type(vod_response) == tuple
and len(vod_response) == 2
and vod_response[1] == 404
):

View File

@ -20,3 +20,7 @@ class MotionDetector(ABC):
@abstractmethod
def detect(self, frame):
pass
@abstractmethod
def is_calibrating(self):
pass

View File

@ -38,6 +38,9 @@ class FrigateMotionDetector(MotionDetector):
self.threshold = threshold
self.contour_area = contour_area
def is_calibrating(self):
return False
def detect(self, frame):
motion_boxes = []

View File

@ -49,6 +49,9 @@ class ImprovedMotionDetector(MotionDetector):
self.contrast_values[:, 1:2] = 255
self.contrast_values_index = 0
def is_calibrating(self):
return self.calibrating
def detect(self, frame):
motion_boxes = []
@ -141,7 +144,6 @@ class ImprovedMotionDetector(MotionDetector):
# if calibrating or the motion contours are > 80% of the image area (lightning, ir, ptz) recalibrate
if self.calibrating or pct_motion > self.config.lightning_threshold:
motion_boxes = []
self.calibrating = True
if self.save_images:

View File

@ -355,6 +355,7 @@ class RecordingMaintainer(threading.Thread):
"+faststart",
file_path,
stderr=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.DEVNULL,
)
await p.wait()

View File

@ -1027,7 +1027,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"],
},
]
}
},
"detect": {
"height": 720,
"width": 1280,
"fps": 5,
},
}
},
}
@ -1082,6 +1087,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"snapshots": {
"height": 100,
},
@ -1107,7 +1117,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"],
},
]
}
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1132,6 +1147,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"snapshots": {
"height": 150,
"enabled": True,
@ -1160,6 +1180,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1181,7 +1206,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"],
},
]
}
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1205,6 +1235,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"rtmp": {
"enabled": True,
},
@ -1234,6 +1269,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1257,6 +1297,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1278,7 +1323,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"],
},
]
}
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1302,6 +1352,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"live": {
"quality": 7,
},
@ -1329,6 +1384,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1350,7 +1410,12 @@ class TestConfig(unittest.TestCase):
"roles": ["detect"],
},
]
}
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1375,6 +1440,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"timestamp_style": {"position": "bl", "thickness": 4},
}
},
@ -1400,6 +1470,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1423,6 +1498,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1450,6 +1530,11 @@ class TestConfig(unittest.TestCase):
},
],
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
}
},
}
@ -1475,6 +1560,11 @@ class TestConfig(unittest.TestCase):
},
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"zones": {
"steps": {
"coordinates": "0,0,0,0",
@ -1546,6 +1636,11 @@ class TestConfig(unittest.TestCase):
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"onvif": {"autotracking": {"movement_weights": "1.23, 2.34, 0.50"}},
}
},
@ -1569,6 +1664,11 @@ class TestConfig(unittest.TestCase):
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
]
},
"detect": {
"height": 1080,
"width": 1920,
"fps": 5,
},
"onvif": {"autotracking": {"movement_weights": "1.234, 2.345a"}},
}
},

View File

@ -21,7 +21,6 @@ from frigate.log import LogPipe
from frigate.motion import MotionDetector
from frigate.motion.improved_motion import ImprovedMotionDetector
from frigate.object_detection import RemoteObjectDetector
from frigate.ptz.autotrack import ptz_moving_at_frame_time
from frigate.track import ObjectTracker
from frigate.track.norfair_tracker import NorfairTracker
from frigate.types import PTZMetricsTypes
@ -777,19 +776,8 @@ def process_frames(
logger.info(f"{camera_name}: frame {frame_time} is not in memory store.")
continue
# look for motion if enabled and ptz is not moving
# ptz_moving_at_frame_time() always returns False for
# non ptz/autotracking cameras
motion_boxes = (
motion_detector.detect(frame)
if motion_enabled.value
and not ptz_moving_at_frame_time(
frame_time,
ptz_metrics["ptz_start_time"].value,
ptz_metrics["ptz_stop_time"].value,
)
else []
)
# look for motion if enabled
motion_boxes = motion_detector.detect(frame) if motion_enabled.value else []
regions = []
consolidated_detections = []
@ -814,8 +802,10 @@ def process_frames(
)
# and it hasn't disappeared
and object_tracker.disappeared[obj["id"]] == 0
# and it doesn't overlap with any current motion boxes
and not intersects_any(obj["box"], motion_boxes)
# and it doesn't overlap with any current motion boxes when not calibrating
and not intersects_any(
obj["box"], [] if motion_detector.is_calibrating() else motion_boxes
)
]
# get tracked object boxes that aren't stationary
@ -825,7 +815,10 @@ def process_frames(
if obj["id"] not in stationary_object_ids
]
combined_boxes = motion_boxes + tracked_object_boxes
combined_boxes = tracked_object_boxes
# only add in the motion boxes when not calibrating
if not motion_detector.is_calibrating():
combined_boxes += motion_boxes
cluster_candidates = get_cluster_candidates(
frame_shape, region_min_size, combined_boxes

View File

@ -86,4 +86,19 @@ export const handlers = [
])
);
}),
rest.get(`api/labels`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json([
'person',
'car',
])
);
}),
rest.get(`api/go2rtc`, (req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({"config_path":"/dev/shm/go2rtc.yaml","host":"frigate.yourdomain.local","rtsp":{"listen":"0.0.0.0:8554","default_query":"mp4","PacketSize":0},"version":"1.7.1"})
);
}),
];

1324
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -24,6 +24,7 @@
"preact-router": "^4.1.0",
"react": "npm:@preact/compat@^17.1.2",
"react-dom": "npm:@preact/compat@^17.1.2",
"react-use-websocket": "^3.0.0",
"strftime": "^0.10.1",
"swr": "^1.3.0",
"video.js": "^8.5.2",
@ -48,6 +49,7 @@
"eslint-plugin-prettier": "^5.0.0",
"eslint-plugin-vitest-globals": "^1.4.0",
"fake-indexeddb": "^4.0.1",
"jest-websocket-mock": "^2.5.0",
"jsdom": "^22.0.0",
"msw": "^1.2.1",
"postcss": "^8.4.29",

View File

@ -1,10 +1,12 @@
/* eslint-disable jest/no-disabled-tests */
import { h } from 'preact';
import { WS, WsProvider, useWs } from '../ws';
import { WS as frigateWS, WsProvider, useWs } from '../ws';
import { useCallback, useContext } from 'preact/hooks';
import { fireEvent, render, screen } from 'testing-library';
import { WS } from 'jest-websocket-mock';
function Test() {
const { state } = useContext(WS);
const { state } = useContext(frigateWS);
return state.__connected ? (
<div data-testid="data">
{Object.keys(state).map((key) => (
@ -19,44 +21,32 @@ function Test() {
const TEST_URL = 'ws://test-foo:1234/ws';
describe('WsProvider', () => {
let createWebsocket, wsClient;
beforeEach(() => {
let wsClient, wsServer;
beforeEach(async () => {
wsClient = {
close: vi.fn(),
send: vi.fn(),
};
createWebsocket = vi.fn((url) => {
wsClient.args = [url];
return new Proxy(
{},
{
get(_target, prop, _receiver) {
return wsClient[prop];
},
set(_target, prop, value) {
wsClient[prop] = typeof value === 'function' ? vi.fn(value) : value;
if (prop === 'onopen') {
wsClient[prop]();
}
return true;
},
}
);
});
wsServer = new WS(TEST_URL);
});
test('connects to the ws server', async () => {
afterEach(() => {
WS.clean();
});
test.skip('connects to the ws server', async () => {
render(
<WsProvider config={mockConfig} createWebsocket={createWebsocket} wsUrl={TEST_URL}>
<WsProvider config={mockConfig} wsUrl={TEST_URL}>
<Test />
</WsProvider>
);
await wsServer.connected;
await screen.findByTestId('data');
expect(wsClient.args).toEqual([TEST_URL]);
expect(screen.getByTestId('__connected')).toHaveTextContent('true');
});
test('receives data through useWs', async () => {
test.skip('receives data through useWs', async () => {
function Test() {
const {
value: { payload, retain },
@ -71,16 +61,17 @@ describe('WsProvider', () => {
}
const { rerender } = render(
<WsProvider config={mockConfig} createWebsocket={createWebsocket} wsUrl={TEST_URL}>
<WsProvider config={mockConfig} wsUrl={TEST_URL}>
<Test />
</WsProvider>
);
await wsServer.connected;
await screen.findByTestId('payload');
wsClient.onmessage({
data: JSON.stringify({ topic: 'tacos', payload: JSON.stringify({ yes: true }), retain: false }),
});
rerender(
<WsProvider config={mockConfig} createWebsocket={createWebsocket} wsUrl={TEST_URL}>
<WsProvider config={mockConfig} wsUrl={TEST_URL}>
<Test />
</WsProvider>
);
@ -88,7 +79,7 @@ describe('WsProvider', () => {
expect(screen.getByTestId('retain')).toHaveTextContent('false');
});
test('can send values through useWs', async () => {
test.skip('can send values through useWs', async () => {
function Test() {
const { send, connected } = useWs('tacos');
const handleClick = useCallback(() => {
@ -98,10 +89,11 @@ describe('WsProvider', () => {
}
render(
<WsProvider config={mockConfig} createWebsocket={createWebsocket} wsUrl={TEST_URL}>
<WsProvider config={mockConfig} wsUrl={TEST_URL}>
<Test />
</WsProvider>
);
await wsServer.connected;
await screen.findByRole('button');
fireEvent.click(screen.getByRole('button'));
await expect(wsClient.send).toHaveBeenCalledWith(
@ -109,19 +101,32 @@ describe('WsProvider', () => {
);
});
test('prefills the recordings/detect/snapshots state from config', async () => {
test.skip('prefills the recordings/detect/snapshots state from config', async () => {
vi.spyOn(Date, 'now').mockReturnValue(123456);
const config = {
cameras: {
front: { name: 'front', detect: { enabled: true }, record: { enabled: false }, snapshots: { enabled: true }, audio: { enabled: false } },
side: { name: 'side', detect: { enabled: false }, record: { enabled: false }, snapshots: { enabled: false }, audio: { enabled: false } },
front: {
name: 'front',
detect: { enabled: true },
record: { enabled: false },
snapshots: { enabled: true },
audio: { enabled: false },
},
side: {
name: 'side',
detect: { enabled: false },
record: { enabled: false },
snapshots: { enabled: false },
audio: { enabled: false },
},
},
};
render(
<WsProvider config={config} createWebsocket={createWebsocket} wsUrl={TEST_URL}>
<WsProvider config={config} wsUrl={TEST_URL}>
<Test />
</WsProvider>
);
await wsServer.connected;
await screen.findByTestId('data');
expect(screen.getByTestId('front/detect/state')).toHaveTextContent(
'{"lastUpdate":123456,"payload":"ON","retain":false}'

View File

@ -1,12 +1,11 @@
import { h, createContext } from 'preact';
import { baseUrl } from './baseUrl';
import { produce } from 'immer';
import { useCallback, useContext, useEffect, useRef, useReducer } from 'preact/hooks';
import { useCallback, useContext, useEffect, useReducer } from 'preact/hooks';
import useWebSocket, { ReadyState } from 'react-use-websocket';
const initialState = Object.freeze({ __connected: false });
export const WS = createContext({ state: initialState, connection: null });
const defaultCreateWebsocket = (url) => new WebSocket(url);
export const WS = createContext({ state: initialState, readyState: null, sendJsonMessage: () => {} });
function reducer(state, { topic, payload, retain }) {
switch (topic) {
@ -33,11 +32,18 @@ function reducer(state, { topic, payload, retain }) {
export function WsProvider({
config,
children,
createWebsocket = defaultCreateWebsocket,
wsUrl = `${baseUrl.replace(/^http/, 'ws')}ws`,
}) {
const [state, dispatch] = useReducer(reducer, initialState);
const wsRef = useRef();
const { sendJsonMessage, readyState } = useWebSocket(wsUrl, {
onMessage: (event) => {
dispatch(JSON.parse(event.data));
},
onOpen: () => dispatch({ topic: '__CLIENT_CONNECTED' }),
shouldReconnect: () => true,
});
useEffect(() => {
Object.keys(config.cameras).forEach((camera) => {
@ -49,46 +55,25 @@ export function WsProvider({
});
}, [config]);
useEffect(
() => {
const ws = createWebsocket(wsUrl);
ws.onopen = () => {
dispatch({ topic: '__CLIENT_CONNECTED' });
};
ws.onmessage = (event) => {
dispatch(JSON.parse(event.data));
};
wsRef.current = ws;
return () => {
ws.close(3000, 'Provider destroyed');
};
},
// Forces reconnecting
[state.__reconnectAttempts, wsUrl] // eslint-disable-line react-hooks/exhaustive-deps
);
return <WS.Provider value={{ state, ws: wsRef.current }}>{children}</WS.Provider>;
return <WS.Provider value={{ state, readyState, sendJsonMessage }}>{children}</WS.Provider>;
}
export function useWs(watchTopic, publishTopic) {
const { state, ws } = useContext(WS);
const { state, readyState, sendJsonMessage } = useContext(WS);
const value = state[watchTopic] || { payload: null };
const send = useCallback(
(payload, retain = false) => {
ws.send(
JSON.stringify({
if (readyState === ReadyState.OPEN) {
sendJsonMessage({
topic: publishTopic || watchTopic,
payload: typeof payload !== 'string' ? JSON.stringify(payload) : payload,
payload,
retain,
})
);
});
}
},
[ws, watchTopic, publishTopic]
[sendJsonMessage, readyState, watchTopic, publishTopic]
);
return { value, send, connected: state.__connected };

View File

@ -157,12 +157,9 @@ class VideoRTC extends HTMLElement {
if (this.ws) this.ws.send(JSON.stringify(value));
}
codecs(type) {
const test =
type === 'mse'
? (codec) => MediaSource.isTypeSupported(`video/mp4; codecs="${codec}"`)
: (codec) => this.video.canPlayType(`video/mp4; codecs="${codec}"`);
return this.CODECS.filter(test).join();
/** @param {Function} isSupported */
codecs(isSupported) {
return this.CODECS.filter(codec => isSupported(`video/mp4; codecs="${codec}"`)).join();
}
/**
@ -311,7 +308,7 @@ class VideoRTC extends HTMLElement {
const modes = [];
if (this.mode.indexOf('mse') >= 0 && 'MediaSource' in window) {
if (this.mode.indexOf('mse') >= 0 && ('MediaSource' in window || 'ManagedMediaSource' in window)) {
// iPhone
modes.push('mse');
this.onmse();
@ -363,18 +360,29 @@ class VideoRTC extends HTMLElement {
}
onmse() {
const ms = new MediaSource();
ms.addEventListener(
'sourceopen',
() => {
URL.revokeObjectURL(this.video.src);
this.send({ type: 'mse', value: this.codecs('mse') });
},
{ once: true }
);
/** @type {MediaSource} */
let ms;
this.video.src = URL.createObjectURL(ms);
this.video.srcObject = null;
if ('ManagedMediaSource' in window) {
const MediaSource = window.ManagedMediaSource;
ms = new MediaSource();
ms.addEventListener('sourceopen', () => {
this.send({type: 'mse', value: this.codecs(MediaSource.isTypeSupported)});
}, {once: true});
this.video.disableRemotePlayback = true;
this.video.srcObject = ms;
} else {
ms = new MediaSource();
ms.addEventListener('sourceopen', () => {
URL.revokeObjectURL(this.video.src);
this.send({type: 'mse', value: this.codecs(MediaSource.isTypeSupported)});
}, {once: true});
this.video.src = URL.createObjectURL(ms);
this.video.srcObject = null;
}
this.play();
this.mseCodecs = '';
@ -580,7 +588,7 @@ class VideoRTC extends HTMLElement {
video2.src = `data:video/mp4;base64,${VideoRTC.btoa(data)}`;
};
this.send({ type: 'mp4', value: this.codecs('mp4') });
this.send({ type: 'mp4', value: this.codecs(this.video.canPlayType) });
}
static btoa(buffer) {

View File

@ -81,7 +81,7 @@ export default function TimelineSummary({ event, onFrameSelected }) {
return (
<div className="flex flex-col">
<div className="h-14 flex justify-center">
<div className="sm:w-1 md:w-1/4 flex flex-row flex-nowrap justify-between overflow-auto">
<div className="flex flex-row flex-nowrap justify-between overflow-auto">
{eventTimeline.map((item, index) => (
<Button
key={index}

View File

@ -101,9 +101,7 @@ describe('DarkMode', () => {
});
describe('usePersistence', () => {
test('returns a defaultValue initially', async () => {
function Component() {
const [value, , loaded] = usePersistence('tacos', 'my-default');
return (
@ -132,7 +130,8 @@ describe('usePersistence', () => {
`);
});
test('updates with the previously-persisted value', async () => {
// eslint-disable-next-line jest/no-disabled-tests
test.skip('updates with the previously-persisted value', async () => {
setData('tacos', 'are delicious');
function Component() {

View File

@ -35,7 +35,7 @@ export default function Birdseye() {
let player;
const playerClass = ptzCameras.length || isMaxWidth ? 'w-full' : 'max-w-5xl xl:w-1/2';
if (viewSource == 'mse' && config.birdseye.restream) {
if ('MediaSource' in window) {
if ('MediaSource' in window || 'ManagedMediaSource' in window) {
player = (
<Fragment>
<div className={playerClass}>
@ -50,7 +50,7 @@ export default function Birdseye() {
player = (
<Fragment>
<div className="w-5xl text-center text-sm">
MSE is not supported on iOS devices. You'll need to use jsmpeg or webRTC. See the docs for more info.
MSE is only supported on iOS 17.1+. You'll need to update if available or use jsmpeg / webRTC streams. See the docs for more info.
</div>
</Fragment>
);

View File

@ -116,7 +116,7 @@ export default function Camera({ camera }) {
let player;
if (viewMode === 'live') {
if (viewSource == 'mse' && restreamEnabled) {
if ('MediaSource' in window) {
if ('MediaSource' in window || 'ManagedMediaSource' in window) {
player = (
<Fragment>
<div className="max-w-5xl">
@ -133,7 +133,7 @@ export default function Camera({ camera }) {
player = (
<Fragment>
<div className="w-5xl text-center text-sm">
MSE is not supported on iOS devices. You'll need to use jsmpeg or webRTC. See the docs for more info.
MSE is only supported on iOS 17.1+. You'll need to update if available or use jsmpeg / webRTC streams. See the docs for more info.
</div>
</Fragment>
);

View File

@ -31,6 +31,9 @@ import Timepicker from '../components/TimePicker';
import TimelineSummary from '../components/TimelineSummary';
import TimelineEventOverlay from '../components/TimelineEventOverlay';
import { Score } from '../icons/Score';
import { About } from '../icons/About';
import MenuIcon from '../icons/Menu';
import { MenuOpen } from '../icons/MenuOpen';
const API_LIMIT = 25;
@ -91,13 +94,15 @@ export default function Events({ path, ...props }) {
showDeleteFavorite: false,
});
const [showInProgress, setShowInProgress] = useState(true);
const eventsFetcher = useCallback(
(path, params) => {
if (searchParams.event) {
path = `${path}/${searchParams.event}`;
return axios.get(path).then((res) => [res.data]);
}
params = { ...params, include_thumbnails: 0, limit: API_LIMIT };
params = { ...params, in_progress: 0, include_thumbnails: 0, limit: API_LIMIT };
return axios.get(path, { params }).then((res) => res.data);
},
[searchParams]
@ -116,6 +121,7 @@ export default function Events({ path, ...props }) {
[searchParams]
);
const { data: ongoingEvents } = useSWR(['events', { in_progress: 1, include_thumbnails: 0 }]);
const { data: eventPages, mutate, size, setSize, isValidating } = useSWRInfinite(getKey, eventsFetcher);
const { data: allLabels } = useSWR(['labels']);
@ -238,6 +244,7 @@ export default function Events({ path, ...props }) {
const handleSelectDateRange = useCallback(
(dates) => {
setShowInProgress(false);
setSearchParams({ ...searchParams, before: dates.before, after: dates.after });
setState({ ...state, showDatePicker: false });
},
@ -253,6 +260,7 @@ export default function Events({ path, ...props }) {
const onFilter = useCallback(
(name, value) => {
setShowInProgress(false);
const updatedParams = { ...searchParams, [name]: value };
setSearchParams(updatedParams);
const queryString = Object.keys(updatedParams)
@ -604,192 +612,98 @@ export default function Events({ path, ...props }) {
</Dialog>
)}
<div className="space-y-2">
{ongoingEvents ? (
<div>
<div className="flex">
<Heading className="py-4" size="sm">
Ongoing Events
</Heading>
<Button
className="rounded-full"
type="text"
color="gray"
aria-label="Events for currently tracked objects. Recordings are only saved based on your retain settings. See the recording docs for more info."
>
<About className="w-5" />
</Button>
<Button
className="rounded-full ml-auto"
type="iconOnly"
color="blue"
onClick={() => setShowInProgress(!showInProgress)}
>
{showInProgress ? <MenuOpen className="w-6" /> : <MenuIcon className="w-6" />}
</Button>
</div>
{showInProgress &&
ongoingEvents.map((event, _) => {
return (
<Event
className="my-2"
key={event.id}
config={config}
event={event}
eventDetailType={eventDetailType}
eventOverlay={eventOverlay}
viewEvent={viewEvent}
setViewEvent={setViewEvent}
uploading={uploading}
handleEventDetailTabChange={handleEventDetailTabChange}
onEventFrameSelected={onEventFrameSelected}
onDelete={onDelete}
onDispose={() => {
this.player = null;
}}
onDownloadClick={onDownloadClick}
onReady={(player) => {
this.player = player;
this.player.on('playing', () => {
setEventOverlay(undefined);
});
}}
onSave={onSave}
showSubmitToPlus={showSubmitToPlus}
/>
);
})}
</div>
) : null}
<Heading className="py-4" size="sm">
Past Events
</Heading>
{eventPages ? (
eventPages.map((page, i) => {
const lastPage = eventPages.length === i + 1;
return page.map((event, j) => {
const lastEvent = lastPage && page.length === j + 1;
return (
<Fragment key={event.id}>
<div
ref={lastEvent ? lastEventRef : false}
className="flex bg-slate-100 dark:bg-slate-800 rounded cursor-pointer min-w-[330px]"
onClick={() => (viewEvent === event.id ? setViewEvent(null) : setViewEvent(event.id))}
>
<div
className="relative rounded-l flex-initial min-w-[125px] h-[125px] bg-contain bg-no-repeat bg-center"
style={{
'background-image': `url(${apiHost}api/events/${event.id}/thumbnail.jpg)`,
}}
>
<StarRecording
className="h-6 w-6 text-yellow-300 absolute top-1 right-1 cursor-pointer"
onClick={(e) => onSave(e, event.id, !event.retain_indefinitely)}
fill={event.retain_indefinitely ? 'currentColor' : 'none'}
/>
{event.end_time ? null : (
<div className="bg-slate-300 dark:bg-slate-700 absolute bottom-0 text-center w-full uppercase text-sm rounded-bl">
In progress
</div>
)}
</div>
<div className="m-2 flex grow">
<div className="flex flex-col grow">
<div className="capitalize text-lg font-bold">
{event.label.replaceAll('_', ' ')}
{event.sub_label ? `: ${event.sub_label.replaceAll('_', ' ')}` : null}
</div>
<div className="text-sm flex">
<Clock className="h-5 w-5 mr-2 inline" />
{formatUnixTimestampToDateTime(event.start_time, { ...config.ui })}
<div className="hidden md:inline">
<span className="m-1">-</span>
<TimeAgo time={event.start_time * 1000} dense />
</div>
<div className="hidden md:inline">
<span className="m-1" />( {getDurationFromTimestamps(event.start_time, event.end_time)} )
</div>
</div>
<div className="capitalize text-sm flex align-center mt-1">
<Camera className="h-5 w-5 mr-2 inline" />
{event.camera.replaceAll('_', ' ')}
</div>
{event.zones.length ? (
<div className="capitalize text-sm flex align-center">
<Zone className="w-5 h-5 mr-2 inline" />
{event.zones.join(', ').replaceAll('_', ' ')}
</div>
) : null}
<div className="capitalize text-sm flex align-center">
<Score className="w-5 h-5 mr-2 inline" />
{(event?.data?.top_score || event.top_score || 0) == 0
? null
: `${event.label}: ${((event?.data?.top_score || event.top_score) * 100).toFixed(0)}%`}
{(event?.data?.sub_label_score || 0) == 0
? null
: `, ${event.sub_label}: ${(event?.data?.sub_label_score * 100).toFixed(0)}%`}
</div>
</div>
<div class="hidden sm:flex flex-col justify-end mr-2">
{event.end_time && event.has_snapshot && (event?.data?.type || 'object') == 'object' && (
<Fragment>
{event.plus_id ? (
<div className="uppercase text-xs underline">
<Link
href={`https://plus.frigate.video/dashboard/edit-image/?id=${event.plus_id}`}
target="_blank"
rel="nofollow"
>
Edit in Frigate+
</Link>
</div>
) : (
<Button
color="gray"
disabled={uploading.includes(event.id)}
onClick={(e) =>
showSubmitToPlus(event.id, event.label, event?.data?.box || event.box, e)
}
>
{uploading.includes(event.id) ? 'Uploading...' : 'Send to Frigate+'}
</Button>
)}
</Fragment>
)}
</div>
<div class="flex flex-col">
<Delete
className="h-6 w-6 cursor-pointer"
stroke="#f87171"
onClick={(e) => onDelete(e, event.id, event.retain_indefinitely)}
/>
<Download
className="h-6 w-6 mt-auto"
stroke={event.has_clip || event.has_snapshot ? '#3b82f6' : '#cbd5e1'}
onClick={(e) => onDownloadClick(e, event)}
/>
</div>
</div>
</div>
{viewEvent !== event.id ? null : (
<div className="space-y-4">
<div className="mx-auto max-w-7xl">
<div className="flex justify-center w-full py-2">
<Tabs
selectedIndex={event.has_clip && eventDetailType == 'clip' ? 0 : 1}
onChange={handleEventDetailTabChange}
className="justify"
>
<TextTab text="Clip" disabled={!event.has_clip} />
<TextTab text={event.has_snapshot ? 'Snapshot' : 'Thumbnail'} />
</Tabs>
</div>
<div>
{eventDetailType == 'clip' && event.has_clip ? (
<div>
<TimelineSummary
event={event}
onFrameSelected={(frame, seekSeconds) =>
onEventFrameSelected(event, frame, seekSeconds)
}
/>
<div>
<VideoPlayer
options={{
preload: 'auto',
autoplay: true,
sources: [
{
src: `${apiHost}vod/event/${event.id}/master.m3u8`,
type: 'application/vnd.apple.mpegurl',
},
],
}}
seekOptions={{ forward: 10, backward: 5 }}
onReady={(player) => {
this.player = player;
this.player.on('playing', () => {
setEventOverlay(undefined);
});
}}
onDispose={() => {
this.player = null;
}}
>
{eventOverlay ? (
<TimelineEventOverlay
eventOverlay={eventOverlay}
cameraConfig={config.cameras[event.camera]}
/>
) : null}
</VideoPlayer>
</div>
</div>
) : null}
{eventDetailType == 'image' || !event.has_clip ? (
<div className="flex justify-center">
<img
className="flex-grow-0"
src={
event.has_snapshot
? `${apiHost}api/events/${event.id}/snapshot.jpg`
: `${apiHost}api/events/${event.id}/thumbnail.jpg`
}
alt={`${event.label} at ${((event?.data?.top_score || event.top_score) * 100).toFixed(
0
)}% confidence`}
/>
</div>
) : null}
</div>
</div>
</div>
)}
</Fragment>
<Event
key={event.id}
config={config}
event={event}
eventDetailType={eventDetailType}
eventOverlay={eventOverlay}
viewEvent={viewEvent}
setViewEvent={setViewEvent}
lastEvent={lastEvent}
lastEventRef={lastEventRef}
uploading={uploading}
handleEventDetailTabChange={handleEventDetailTabChange}
onEventFrameSelected={onEventFrameSelected}
onDelete={onDelete}
onDispose={() => {
this.player = null;
}}
onDownloadClick={onDownloadClick}
onReady={(player) => {
this.player = player;
this.player.on('playing', () => {
setEventOverlay(undefined);
});
}}
onSave={onSave}
showSubmitToPlus={showSubmitToPlus}
/>
);
});
})
@ -801,3 +715,195 @@ export default function Events({ path, ...props }) {
</div>
);
}
function Event({
className = '',
config,
event,
eventDetailType,
eventOverlay,
viewEvent,
setViewEvent,
lastEvent,
lastEventRef,
uploading,
handleEventDetailTabChange,
onEventFrameSelected,
onDelete,
onDispose,
onDownloadClick,
onReady,
onSave,
showSubmitToPlus,
}) {
const apiHost = useApiHost();
return (
<div className={className}>
<div
ref={lastEvent ? lastEventRef : false}
className="flex bg-slate-100 dark:bg-slate-800 rounded cursor-pointer min-w-[330px]"
onClick={() => (viewEvent === event.id ? setViewEvent(null) : setViewEvent(event.id))}
>
<div
className="relative rounded-l flex-initial min-w-[125px] h-[125px] bg-contain bg-no-repeat bg-center"
style={{
'background-image': `url(${apiHost}api/events/${event.id}/thumbnail.jpg)`,
}}
>
<StarRecording
className="h-6 w-6 text-yellow-300 absolute top-1 right-1 cursor-pointer"
onClick={(e) => onSave(e, event.id, !event.retain_indefinitely)}
fill={event.retain_indefinitely ? 'currentColor' : 'none'}
/>
{event.end_time ? null : (
<div className="bg-slate-300 dark:bg-slate-700 absolute bottom-0 text-center w-full uppercase text-sm rounded-bl">
In progress
</div>
)}
</div>
<div className="m-2 flex grow">
<div className="flex flex-col grow">
<div className="capitalize text-lg font-bold">
{event.label.replaceAll('_', ' ')}
{event.sub_label ? `: ${event.sub_label.replaceAll('_', ' ')}` : null}
</div>
<div className="text-sm flex">
<Clock className="h-5 w-5 mr-2 inline" />
{formatUnixTimestampToDateTime(event.start_time, { ...config.ui })}
<div className="hidden md:inline">
<span className="m-1">-</span>
<TimeAgo time={event.start_time * 1000} dense />
</div>
<div className="hidden md:inline">
<span className="m-1" />( {getDurationFromTimestamps(event.start_time, event.end_time)} )
</div>
</div>
<div className="capitalize text-sm flex align-center mt-1">
<Camera className="h-5 w-5 mr-2 inline" />
{event.camera.replaceAll('_', ' ')}
</div>
{event.zones.length ? (
<div className="capitalize text-sm flex align-center">
<Zone className="w-5 h-5 mr-2 inline" />
{event.zones.join(', ').replaceAll('_', ' ')}
</div>
) : null}
<div className="capitalize text-sm flex align-center">
<Score className="w-5 h-5 mr-2 inline" />
{(event?.data?.top_score || event.top_score || 0) == 0
? null
: `${event.label}: ${((event?.data?.top_score || event.top_score) * 100).toFixed(0)}%`}
{(event?.data?.sub_label_score || 0) == 0
? null
: `, ${event.sub_label}: ${(event?.data?.sub_label_score * 100).toFixed(0)}%`}
</div>
</div>
<div class="hidden sm:flex flex-col justify-end mr-2">
{event.end_time && event.has_snapshot && (event?.data?.type || 'object') == 'object' && (
<Fragment>
{event.plus_id ? (
<div className="uppercase text-xs underline">
<Link
href={`https://plus.frigate.video/dashboard/edit-image/?id=${event.plus_id}`}
target="_blank"
rel="nofollow"
>
Edit in Frigate+
</Link>
</div>
) : (
<Button
color="gray"
disabled={uploading.includes(event.id)}
onClick={(e) => showSubmitToPlus(event.id, event.label, event?.data?.box || event.box, e)}
>
{uploading.includes(event.id) ? 'Uploading...' : 'Send to Frigate+'}
</Button>
)}
</Fragment>
)}
</div>
<div class="flex flex-col">
<Delete
className="h-6 w-6 cursor-pointer"
stroke="#f87171"
onClick={(e) => onDelete(e, event.id, event.retain_indefinitely)}
/>
<Download
className="h-6 w-6 mt-auto"
stroke={event.has_clip || event.has_snapshot ? '#3b82f6' : '#cbd5e1'}
onClick={(e) => onDownloadClick(e, event)}
/>
</div>
</div>
</div>
{viewEvent !== event.id ? null : (
<div className="space-y-4">
<div className="mx-auto max-w-7xl">
<div className="flex justify-center w-full py-2">
<Tabs
selectedIndex={event.has_clip && eventDetailType == 'clip' ? 0 : 1}
onChange={handleEventDetailTabChange}
className="justify"
>
<TextTab text="Clip" disabled={!event.has_clip} />
<TextTab text={event.has_snapshot ? 'Snapshot' : 'Thumbnail'} />
</Tabs>
</div>
<div>
{eventDetailType == 'clip' && event.has_clip ? (
<div>
<TimelineSummary
event={event}
onFrameSelected={(frame, seekSeconds) => onEventFrameSelected(event, frame, seekSeconds)}
/>
<div>
<VideoPlayer
options={{
preload: 'auto',
autoplay: true,
sources: [
{
src: `${apiHost}vod/event/${event.id}/master.m3u8`,
type: 'application/vnd.apple.mpegurl',
},
],
}}
seekOptions={{ forward: 10, backward: 5 }}
onReady={onReady}
onDispose={onDispose}
>
{eventOverlay ? (
<TimelineEventOverlay eventOverlay={eventOverlay} cameraConfig={config.cameras[event.camera]} />
) : null}
</VideoPlayer>
</div>
</div>
) : null}
{eventDetailType == 'image' || !event.has_clip ? (
<div className="flex justify-center">
<img
className="flex-grow-0"
src={
event.has_snapshot
? `${apiHost}api/events/${event.id}/snapshot.jpg`
: `${apiHost}api/events/${event.id}/thumbnail.jpg`
}
alt={`${event.label} at ${((event?.data?.top_score || event.top_score) * 100).toFixed(
0
)}% confidence`}
/>
</div>
) : null}
</div>
</div>
</div>
)}
</div>
);
}

View File

@ -1,3 +1,4 @@
/* eslint-disable jest/no-disabled-tests */
import { h } from 'preact';
import * as CameraImage from '../../components/CameraImage';
import * as Hooks from '../../hooks';
@ -17,7 +18,7 @@ describe('Cameras Route', () => {
expect(screen.queryByLabelText('Loading…')).toBeInTheDocument();
});
test('shows cameras', async () => {
test.skip('shows cameras', async () => {
render(<Cameras />);
await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…'));
@ -29,7 +30,7 @@ describe('Cameras Route', () => {
expect(screen.queryByText('side').closest('a')).toHaveAttribute('href', '/cameras/side');
});
test('shows recordings link', async () => {
test.skip('shows recordings link', async () => {
render(<Cameras />);
await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…'));
@ -37,7 +38,7 @@ describe('Cameras Route', () => {
expect(screen.queryAllByText('Recordings')).toHaveLength(2);
});
test('buttons toggle detect, clips, and snapshots', async () => {
test.skip('buttons toggle detect, clips, and snapshots', async () => {
const sendDetect = vi.fn();
const sendRecordings = vi.fn();
const sendSnapshots = vi.fn();

View File

@ -10,7 +10,8 @@ describe('Events Route', () => {
expect(screen.queryByLabelText('Loading…')).toBeInTheDocument();
});
test('does not show ActivityIndicator after loaded', async () => {
// eslint-disable-next-line jest/no-disabled-tests
test.skip('does not show ActivityIndicator after loaded', async () => {
render(<Events limit={5} path="/events" />);
await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…'));

View File

@ -17,9 +17,8 @@ describe('Recording Route', () => {
expect(screen.queryByLabelText('Loading…')).toBeInTheDocument();
});
test('shows no recordings warning', async () => {
// eslint-disable-next-line jest/no-disabled-tests
test.skip('shows no recordings warning', async () => {
render(<Cameras />);
await waitForElementToBeRemoved(() => screen.queryByLabelText('Loading…'));