mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-01-22 20:18:30 +03:00
Merge ec5f191187 into 0a8f499640
This commit is contained in:
commit
6d50ef2063
@ -11,6 +11,12 @@ Cameras configured to output H.264 video and AAC audio will offer the most compa
|
|||||||
|
|
||||||
- **Stream Viewing**: This stream will be rebroadcast as is to Home Assistant for viewing with the stream component. Setting this resolution too high will use significant bandwidth when viewing streams in Home Assistant, and they may not load reliably over slower connections.
|
- **Stream Viewing**: This stream will be rebroadcast as is to Home Assistant for viewing with the stream component. Setting this resolution too high will use significant bandwidth when viewing streams in Home Assistant, and they may not load reliably over slower connections.
|
||||||
|
|
||||||
|
:::tip
|
||||||
|
|
||||||
|
For the best experience in Frigate's UI, configure your camera so that the detection and recording streams use the same aspect ratio. For example, if your main stream is 3840x2160 (16:9), set your substream to 640x360 (also 16:9) instead of 640x480 (4:3). While not strictly required, matching aspect ratios helps ensure seamless live stream display and preview/recordings playback.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
### Choosing a detect resolution
|
### Choosing a detect resolution
|
||||||
|
|
||||||
The ideal resolution for detection is one where the objects you want to detect fit inside the dimensions of the model used by Frigate (320x320). Frigate does not pass the entire camera frame to object detection. It will crop an area of motion from the full frame and look in that portion of the frame. If the area being inspected is larger than 320x320, Frigate must resize it before running object detection. Higher resolutions do not improve the detection accuracy because the additional detail is lost in the resize. Below you can see a reference for how large a 320x320 area is against common resolutions.
|
The ideal resolution for detection is one where the objects you want to detect fit inside the dimensions of the model used by Frigate (320x320). Frigate does not pass the entire camera frame to object detection. It will crop an area of motion from the full frame and look in that portion of the frame. If the area being inspected is larger than 320x320, Frigate must resize it before running object detection. Higher resolutions do not improve the detection accuracy because the additional detail is lost in the resize. Below you can see a reference for how large a 320x320 area is against common resolutions.
|
||||||
|
|||||||
@ -64,10 +64,12 @@ def stop_ffmpeg(ffmpeg_process: sp.Popen[Any], logger: logging.Logger):
|
|||||||
try:
|
try:
|
||||||
logger.info("Waiting for ffmpeg to exit gracefully...")
|
logger.info("Waiting for ffmpeg to exit gracefully...")
|
||||||
ffmpeg_process.communicate(timeout=30)
|
ffmpeg_process.communicate(timeout=30)
|
||||||
|
logger.info("FFmpeg has exited")
|
||||||
except sp.TimeoutExpired:
|
except sp.TimeoutExpired:
|
||||||
logger.info("FFmpeg didn't exit. Force killing...")
|
logger.info("FFmpeg didn't exit. Force killing...")
|
||||||
ffmpeg_process.kill()
|
ffmpeg_process.kill()
|
||||||
ffmpeg_process.communicate()
|
ffmpeg_process.communicate()
|
||||||
|
logger.info("FFmpeg has been killed")
|
||||||
ffmpeg_process = None
|
ffmpeg_process = None
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -13,7 +13,7 @@ import HlsVideoPlayer from "@/components/player/HlsVideoPlayer";
|
|||||||
import { baseUrl } from "@/api/baseUrl";
|
import { baseUrl } from "@/api/baseUrl";
|
||||||
import { REVIEW_PADDING } from "@/types/review";
|
import { REVIEW_PADDING } from "@/types/review";
|
||||||
import {
|
import {
|
||||||
ASPECT_VERTICAL_LAYOUT,
|
ASPECT_PORTRAIT_LAYOUT,
|
||||||
ASPECT_WIDE_LAYOUT,
|
ASPECT_WIDE_LAYOUT,
|
||||||
Recording,
|
Recording,
|
||||||
} from "@/types/record";
|
} from "@/types/record";
|
||||||
@ -39,6 +39,7 @@ import { useApiHost } from "@/api";
|
|||||||
import ImageLoadingIndicator from "@/components/indicators/ImageLoadingIndicator";
|
import ImageLoadingIndicator from "@/components/indicators/ImageLoadingIndicator";
|
||||||
import ObjectTrackOverlay from "../ObjectTrackOverlay";
|
import ObjectTrackOverlay from "../ObjectTrackOverlay";
|
||||||
import { useIsAdmin } from "@/hooks/use-is-admin";
|
import { useIsAdmin } from "@/hooks/use-is-admin";
|
||||||
|
import { VideoResolutionType } from "@/types/live";
|
||||||
|
|
||||||
type TrackingDetailsProps = {
|
type TrackingDetailsProps = {
|
||||||
className?: string;
|
className?: string;
|
||||||
@ -253,16 +254,25 @@ export function TrackingDetails({
|
|||||||
|
|
||||||
const [timelineSize] = useResizeObserver(timelineContainerRef);
|
const [timelineSize] = useResizeObserver(timelineContainerRef);
|
||||||
|
|
||||||
|
const [fullResolution, setFullResolution] = useState<VideoResolutionType>({
|
||||||
|
width: 0,
|
||||||
|
height: 0,
|
||||||
|
});
|
||||||
|
|
||||||
const aspectRatio = useMemo(() => {
|
const aspectRatio = useMemo(() => {
|
||||||
if (!config) {
|
if (!config) {
|
||||||
return 16 / 9;
|
return 16 / 9;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (fullResolution.width && fullResolution.height) {
|
||||||
|
return fullResolution.width / fullResolution.height;
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
config.cameras[event.camera].detect.width /
|
config.cameras[event.camera].detect.width /
|
||||||
config.cameras[event.camera].detect.height
|
config.cameras[event.camera].detect.height
|
||||||
);
|
);
|
||||||
}, [config, event]);
|
}, [config, event, fullResolution]);
|
||||||
|
|
||||||
const label = event.sub_label
|
const label = event.sub_label
|
||||||
? event.sub_label
|
? event.sub_label
|
||||||
@ -460,7 +470,7 @@ export function TrackingDetails({
|
|||||||
return "normal";
|
return "normal";
|
||||||
} else if (aspectRatio > ASPECT_WIDE_LAYOUT) {
|
} else if (aspectRatio > ASPECT_WIDE_LAYOUT) {
|
||||||
return "wide";
|
return "wide";
|
||||||
} else if (aspectRatio < ASPECT_VERTICAL_LAYOUT) {
|
} else if (aspectRatio < ASPECT_PORTRAIT_LAYOUT) {
|
||||||
return "tall";
|
return "tall";
|
||||||
} else {
|
} else {
|
||||||
return "normal";
|
return "normal";
|
||||||
@ -556,6 +566,7 @@ export function TrackingDetails({
|
|||||||
onSeekToTime={handleSeekToTime}
|
onSeekToTime={handleSeekToTime}
|
||||||
onUploadFrame={onUploadFrameToPlus}
|
onUploadFrame={onUploadFrameToPlus}
|
||||||
onPlaying={() => setIsVideoLoading(false)}
|
onPlaying={() => setIsVideoLoading(false)}
|
||||||
|
setFullResolution={setFullResolution}
|
||||||
isDetailMode={true}
|
isDetailMode={true}
|
||||||
camera={event.camera}
|
camera={event.camera}
|
||||||
currentTimeOverride={currentTime}
|
currentTimeOverride={currentTime}
|
||||||
@ -623,7 +634,7 @@ export function TrackingDetails({
|
|||||||
<div
|
<div
|
||||||
className={cn(
|
className={cn(
|
||||||
isDesktop && "justify-start overflow-hidden",
|
isDesktop && "justify-start overflow-hidden",
|
||||||
aspectRatio > 1 && aspectRatio < 1.5
|
aspectRatio > 1 && aspectRatio < ASPECT_PORTRAIT_LAYOUT
|
||||||
? "lg:basis-3/5"
|
? "lg:basis-3/5"
|
||||||
: "lg:basis-2/5",
|
: "lg:basis-2/5",
|
||||||
)}
|
)}
|
||||||
|
|||||||
@ -44,4 +44,5 @@ export type RecordingStartingPoint = {
|
|||||||
export type RecordingPlayerError = "stalled" | "startup";
|
export type RecordingPlayerError = "stalled" | "startup";
|
||||||
|
|
||||||
export const ASPECT_VERTICAL_LAYOUT = 1.5;
|
export const ASPECT_VERTICAL_LAYOUT = 1.5;
|
||||||
|
export const ASPECT_PORTRAIT_LAYOUT = 1.333;
|
||||||
export const ASPECT_WIDE_LAYOUT = 2;
|
export const ASPECT_WIDE_LAYOUT = 2;
|
||||||
|
|||||||
@ -664,9 +664,7 @@ export default function TriggerView({
|
|||||||
<TableHeader className="sticky top-0 bg-muted/50">
|
<TableHeader className="sticky top-0 bg-muted/50">
|
||||||
<TableRow>
|
<TableRow>
|
||||||
<TableHead className="w-4"></TableHead>
|
<TableHead className="w-4"></TableHead>
|
||||||
<TableHead>
|
<TableHead>{t("triggers.table.name")}</TableHead>
|
||||||
{t("name", { ns: "triggers.table.name" })}
|
|
||||||
</TableHead>
|
|
||||||
<TableHead>{t("triggers.table.type")}</TableHead>
|
<TableHead>{t("triggers.table.type")}</TableHead>
|
||||||
<TableHead>
|
<TableHead>
|
||||||
{t("triggers.table.lastTriggered")}
|
{t("triggers.table.lastTriggered")}
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user