Compare commits

..

No commits in common. "722ef6a1fed8e0c1079597d60fcdd7017b6e8735" and "5a214eb0d1634f87388174f96aa262edd282f23e" have entirely different histories.

7 changed files with 15 additions and 29 deletions

View File

@ -95,8 +95,7 @@ class EventCleanup(threading.Thread):
.namedtuples()
.iterator()
)
expired_events = list(expired_events)
logger.debug(f"{len(expired_events)} events can be expired")
logger.debug(f"{len(list(expired_events))} events can be expired")
# delete the media from disk
for expired in expired_events:
@ -221,8 +220,7 @@ class EventCleanup(threading.Thread):
.namedtuples()
.iterator()
)
expired_events = list(expired_events)
logger.debug(f"{len(expired_events)} events can be expired")
logger.debug(f"{len(list(expired_events))} events can be expired")
# delete the media from disk
for expired in expired_events:
media_name = f"{expired.camera}-{expired.id}"

View File

@ -63,7 +63,7 @@ class LibvaGpuSelector:
if not self._valid_gpus:
return ""
if gpu < len(self._valid_gpus):
if gpu <= len(self._valid_gpus):
return self._valid_gpus[gpu]
else:
logger.warning(f"Invalid GPU index {gpu}, using first valid GPU")
@ -278,7 +278,7 @@ def parse_preset_hardware_acceleration_encode(
arg_map = PRESETS_HW_ACCEL_ENCODE_TIMELAPSE
if not isinstance(arg, str):
return arg_map["default"].format(ffmpeg_path, input, output)
return arg_map["default"].format(input, output)
# Not all jetsons have HW encoders, so fall back to default SW encoder if not
if arg.startswith("preset-jetson-") and not os.path.exists("/dev/nvhost-msenc"):
@ -436,7 +436,7 @@ def parse_preset_input(arg: Any, detect_fps: int) -> list[str]:
if arg == "preset-http-jpeg-generic":
input = PRESETS_INPUT[arg].copy()
input[1] = str(detect_fps)
input[len(_user_agent_args) + 1] = str(detect_fps)
return input
return PRESETS_INPUT.get(arg, None)

View File

@ -303,20 +303,12 @@ class BirdsEyeFrameManager:
birdseye_logo = cv2.imread(logo_files[0], cv2.IMREAD_UNCHANGED)
if birdseye_logo is not None:
if birdseye_logo.ndim == 2:
# Grayscale image (no channels) — use directly as luminance
transparent_layer = birdseye_logo
elif birdseye_logo.shape[2] >= 4:
# RGBA — use alpha channel as luminance
transparent_layer = birdseye_logo[:, :, 3]
else:
# RGB or other format without alpha — convert to grayscale
transparent_layer = cv2.cvtColor(birdseye_logo, cv2.COLOR_BGR2GRAY)
transparent_layer = birdseye_logo[:, :, 3]
y_offset = height // 2 - transparent_layer.shape[0] // 2
x_offset = width // 2 - transparent_layer.shape[1] // 2
self.blank_frame[
y_offset : y_offset + transparent_layer.shape[0],
x_offset : x_offset + transparent_layer.shape[1],
y_offset : y_offset + transparent_layer.shape[1],
x_offset : x_offset + transparent_layer.shape[0],
] = transparent_layer
else:
logger.warning("Unable to read Frigate logo")
@ -761,7 +753,7 @@ class BirdsEyeFrameManager:
frame_changed, layout_changed = self.update_frame(frame)
except Exception:
frame_changed, layout_changed = False, False
self.active_cameras = set()
self.active_cameras = []
self.camera_layout = []
print(traceback.format_exc())

View File

@ -901,7 +901,7 @@ class PtzAutoTracker:
# Check direction difference
velocities = np.round(velocities)
invalid_dirs = False
if np.all(np.linalg.norm(velocities, axis=1)):
if not np.any(np.linalg.norm(velocities, axis=1)):
cosine_sim = np.dot(velocities[0], velocities[1]) / (
np.linalg.norm(velocities[0]) * np.linalg.norm(velocities[1])
)
@ -1067,7 +1067,7 @@ class PtzAutoTracker:
f"{camera}: Zoom test: below dimension threshold: {below_dimension_threshold} width: {bb_right - bb_left}, max width: {camera_width * (self.zoom_factor[camera] + 0.1)}, height: {bb_bottom - bb_top}, max height: {camera_height * (self.zoom_factor[camera] + 0.1)}"
)
logger.debug(
f"{camera}: Zoom test: below velocity threshold: {below_velocity_threshold} velocity x: {abs(average_velocity[0])}, x threshold: {velocity_threshold_x}, velocity y: {abs(average_velocity[1])}, y threshold: {velocity_threshold_y}"
f"{camera}: Zoom test: below velocity threshold: {below_velocity_threshold} velocity x: {abs(average_velocity[0])}, x threshold: {velocity_threshold_x}, velocity y: {abs(average_velocity[0])}, y threshold: {velocity_threshold_y}"
)
logger.debug(f"{camera}: Zoom test: at max zoom: {at_max_zoom}")
logger.debug(f"{camera}: Zoom test: at min zoom: {at_min_zoom}")

View File

@ -116,7 +116,7 @@ def clean_camera_user_pass(line: str) -> str:
def escape_special_characters(path: str) -> str:
"""Cleans reserved characters to encodings for ffmpeg."""
if len(path) > 1000:
raise ValueError("Input too long to check")
return ValueError("Input too long to check")
try:
found = re.search(REGEX_RTSP_CAMERA_USER_PASS, path).group(0)[3:-1]

View File

@ -52,7 +52,6 @@ export default function WebRtcPlayer({
// camera states
const pcRef = useRef<RTCPeerConnection | undefined>(undefined);
const wsRef = useRef<WebSocket | null>(null);
const videoRef = useRef<HTMLVideoElement | null>(null);
const [bufferTimeout, setBufferTimeout] = useState<NodeJS.Timeout>();
const videoLoadTimeoutRef = useRef<NodeJS.Timeout>(undefined);
@ -130,8 +129,7 @@ export default function WebRtcPlayer({
}
pcRef.current = await aPc;
wsRef.current = new WebSocket(wsURL);
const ws = wsRef.current;
const ws = new WebSocket(wsURL);
ws.addEventListener("open", () => {
pcRef.current?.addEventListener("icecandidate", (ev) => {
@ -185,10 +183,6 @@ export default function WebRtcPlayer({
connect(aPc);
return () => {
if (wsRef.current) {
wsRef.current.close();
wsRef.current = null;
}
if (pcRef.current) {
pcRef.current.close();
pcRef.current = undefined;

View File

@ -125,6 +125,8 @@ export function useCameraActivity(
newObjects = [...(objects ?? []), newActiveObject];
}
} else {
const newObjects = [...(objects ?? [])];
let label = updatedEvent.after.label;
if (updatedEvent.after.sub_label) {