Merge branch 'streamline-live' of https://github.com/hawkeye217/frigate into streamline-live

This commit is contained in:
Josh Hawkins 2024-02-07 14:10:46 -06:00
commit cd16e280a0
7 changed files with 139 additions and 59 deletions

View File

@ -4,7 +4,6 @@ import { useState } from "react";
import Wrapper from "@/components/Wrapper";
import Sidebar from "@/components/Sidebar";
import Header from "@/components/Header";
import Dashboard from "@/pages/Dashboard";
import Live from "@/pages/Live";
import History from "@/pages/History";
import Export from "@/pages/Export";
@ -30,13 +29,9 @@ function App() {
<Header onToggleNavbar={toggleNavbar} />
<div className="grid grid-cols-[auto,1fr] flex-grow-1 overflow-auto">
<Sidebar sheetOpen={sheetOpen} setSheetOpen={setSheetOpen} />
<div
id="pageRoot"
className="overflow-x-hidden px-4 py-2 w-screen md:w-full"
>
<div id="pageRoot" className="overflow-x-hidden px-4 py-2 w-screen">
<Routes>
<Route path="/" element={<Dashboard />} />
<Route path="/live" element={<Live />} />
<Route path="/" element={<Live />} />
<Route path="/history" element={<History />} />
<Route path="/export" element={<Export />} />
<Route path="/storage" element={<Storage />} />

View File

@ -228,7 +228,7 @@ export function useMotionActivity(camera: string): { payload: string } {
return { payload };
}
export function useAudioActivity(camera: string): { payload: string } {
export function useAudioActivity(camera: string): { payload: number } {
const {
value: { payload },
} = useWs(`${camera}/audio/rms`, "");

View File

@ -66,26 +66,21 @@ function HeaderNavigation() {
const navbarLinks = [
{
id: 1,
title: "Dashboard",
title: "Live",
url: "/",
},
{
id: 2,
title: "Live",
url: "/live",
},
{
id: 3,
title: "History",
url: "/history",
},
{
id: 4,
id: 3,
title: "Export",
url: "/export",
},
{
id: 5,
id: 4,
title: "UI Playground",
url: "/playground",
dev: true,

View File

@ -108,7 +108,7 @@ export default function DynamicCameraImage({
{camera.audio.enabled_in_config && (
<LuEar
className={`${
parseInt(audioRms) >= camera.audio.min_volume
audioRms >= camera.audio.min_volume
? "text-audio"
: "text-gray-600"
}`}

View File

@ -11,8 +11,10 @@ import { Label } from "../ui/label";
import { usePersistence } from "@/hooks/use-persistence";
import MSEPlayer from "./MsePlayer";
import JSMpegPlayer from "./JSMpegPlayer";
import { MdCircle } from "react-icons/md";
import { MdCircle, MdLeakAdd, MdSelectAll } from "react-icons/md";
import { BsSoundwave } from "react-icons/bs";
import Chip from "../Chip";
import useCameraActivity from "@/hooks/use-camera-activity";
const emptyObject = Object.freeze({});
@ -20,6 +22,7 @@ type LivePlayerProps = {
className?: string;
cameraConfig: CameraConfig;
liveMode?: "webrtc" | "mse" | "jsmpeg" | "debug";
liveChips?: boolean;
};
type Options = { [key: string]: boolean };
@ -28,14 +31,19 @@ export default function LivePlayer({
className,
cameraConfig,
liveMode = "mse",
liveChips = false,
}: LivePlayerProps) {
const [showSettings, setShowSettings] = useState(false);
// camera activity
const { activeMotion, activeAudio, activeTracking } =
useCameraActivity(cameraConfig);
// debug view settings
const [showSettings, setShowSettings] = useState(false);
const [options, setOptions] = usePersistence(
`${cameraConfig?.name}-feed`,
emptyObject
);
const handleSetOption = useCallback(
(id: string, value: boolean) => {
const newOptions = { ...options, [id]: value };
@ -43,7 +51,6 @@ export default function LivePlayer({
},
[options, setOptions]
);
const searchParams = useMemo(
() =>
new URLSearchParams(
@ -55,7 +62,6 @@ export default function LivePlayer({
),
[options]
);
const handleToggleSettings = useCallback(() => {
setShowSettings(!showSettings);
}, [showSettings, setShowSettings]);
@ -126,6 +132,30 @@ export default function LivePlayer({
return (
<div className={`relative flex justify-center ${className}`}>
{player}
<div className="absolute flex left-2 top-2 gap-2">
<Chip className="bg-gray-500 bg-gradient-to-br">
<MdLeakAdd
className={`w-4 h-4 ${activeMotion ? "text-motion" : "text-white"}`}
/>
<div className="ml-1 capitalize text-white text-xs">Motion</div>
</Chip>
{cameraConfig.audio.enabled_in_config && (
<Chip className="bg-gray-500 bg-gradient-to-br">
<BsSoundwave
className={`w-4 h-4 ${activeAudio ? "text-audio" : "text-white"}`}
/>
<div className="ml-1 capitalize text-white text-xs">Sound</div>
</Chip>
)}
<Chip className="bg-gray-500 bg-gradient-to-br">
<MdSelectAll
className={`w-4 h-4 ${
activeTracking ? "text-object" : "text-white"
}`}
/>
<div className="ml-1 capitalize text-white text-xs">Tracking</div>
</Chip>
</div>
<Chip className="absolute right-2 top-2 bg-gray-500 bg-gradient-to-br">
<MdCircle className="w-2 h-2 text-danger" />
<div className="ml-1 capitalize text-white text-xs">

View File

@ -0,0 +1,64 @@
import {
useAudioActivity,
useFrigateEvents,
useMotionActivity,
} from "@/api/ws";
import { CameraConfig } from "@/types/frigateConfig";
import { useEffect, useMemo, useState } from "react";
type useCameraActivityReturn = {
activeTracking: boolean;
activeMotion: boolean;
activeAudio: boolean;
};
export default function useCameraActivity(
camera: CameraConfig
): useCameraActivityReturn {
const [activeObjects, setActiveObjects] = useState<string[]>([]);
const hasActiveObjects = useMemo(
() => activeObjects.length > 0,
[activeObjects]
);
const { payload: detectingMotion } = useMotionActivity(camera.name);
const { payload: event } = useFrigateEvents();
const { payload: audioRms } = useAudioActivity(camera.name);
useEffect(() => {
if (!event) {
return;
}
if (event.after.camera != camera.name) {
return;
}
if (event.type == "end") {
const eventIndex = activeObjects.indexOf(event.after.id);
if (eventIndex != -1) {
const newActiveObjects = [...activeObjects];
newActiveObjects.splice(eventIndex, 1);
setActiveObjects(newActiveObjects);
}
} else {
if (!event.after.stationary) {
const eventIndex = activeObjects.indexOf(event.after.id);
if (eventIndex == -1) {
const newActiveObjects = [...activeObjects, event.after.id];
setActiveObjects(newActiveObjects);
}
}
}
}, [event, activeObjects]);
return {
activeTracking: hasActiveObjects,
activeMotion: detectingMotion == "ON",
activeAudio: camera.audio.enabled_in_config
? audioRms >= camera.audio.min_volume
: false,
};
}

View File

@ -4,7 +4,7 @@ import { ScrollArea, ScrollBar } from "@/components/ui/scroll-area";
import { Event as FrigateEvent } from "@/types/event";
import { FrigateConfig } from "@/types/frigateConfig";
import axios from "axios";
import { useCallback, useEffect, useMemo, useState } from "react";
import { useCallback, useMemo } from "react";
import useSWR from "swr";
function Live() {
@ -12,40 +12,35 @@ function Live() {
// recent events
const [recentCutoff, setRecentCutoff] = useState<number>(0);
useEffect(() => {
const date = new Date();
date.setHours(date.getHours() - 4);
setRecentCutoff(date.getTime() / 1000);
const intervalId: NodeJS.Timeout = setInterval(() => {
const date = new Date();
date.setHours(date.getHours() - 4);
setRecentCutoff(date.getTime() / 1000);
}, 30000);
return () => clearInterval(intervalId);
}, [30000]);
const { data: events, mutate: updateEvents } = useSWR<FrigateEvent[]>([
"events",
{ limit: 10, after: recentCutoff },
]);
const onFavorite = useCallback(
async (e: Event, event: FrigateEvent) => {
e.stopPropagation();
let response;
if (!event.retain_indefinitely) {
response = await axios.post(`events/${event.id}/retain`);
} else {
response = await axios.delete(`events/${event.id}/retain`);
}
if (response.status === 200) {
updateEvents();
}
},
[event]
const { data: allEvents, mutate: updateEvents } = useSWR<FrigateEvent[]>(
["events", { limit: 10 }],
{ revalidateOnFocus: false, refreshInterval: 60000 }
);
const events = useMemo(() => {
if (!allEvents) {
return [];
}
const date = new Date();
date.setHours(date.getHours() - 1);
const cutoff = date.getTime() / 1000;
return allEvents.filter((event) => event.start_time > cutoff);
}, [allEvents]);
const onFavorite = useCallback(async (e: Event, event: FrigateEvent) => {
e.stopPropagation();
let response;
if (!event.retain_indefinitely) {
response = await axios.post(`events/${event.id}/retain`);
} else {
response = await axios.delete(`events/${event.id}/retain`);
}
if (response.status === 200) {
updateEvents();
}
}, []);
// camera live views
const cameras = useMemo(() => {
@ -77,21 +72,22 @@ function Live() {
</ScrollArea>
)}
<div className="mt-4 grid grid-cols-3 gap-4">
<div className="mt-4 grid grid-cols-1 md:grid-cols-2 xl:grid-cols-3 3xl:grid-cols-4 gap-4">
{cameras.map((camera) => {
let grow;
if (camera.detect.width / camera.detect.height > 2) {
grow = "h-[424px] col-span-2";
grow = "md:col-span-2";
} else if (camera.detect.width / camera.detect.height < 1) {
grow = "h-[840px] row-span-2";
grow = "md:row-span-2";
} else {
grow = "h-[425px]";
grow = "aspect-video";
}
return (
<LivePlayer
key={camera.name}
className={`rounded-2xl bg-black ${grow}`}
cameraConfig={camera}
liveChips
/>
);
})}