move process logs function to services util

This commit is contained in:
Josh Hawkins 2025-02-09 17:13:51 -06:00
parent c1dadb2ea4
commit 1c32a1cf63
2 changed files with 55 additions and 53 deletions

View File

@ -9,7 +9,7 @@ import traceback
from datetime import datetime, timedelta
from functools import reduce
from io import StringIO
from typing import Any, List, Optional, Tuple
from typing import Any, Optional
import aiofiles
import requests
@ -37,6 +37,7 @@ from frigate.util.config import find_config_file
from frigate.util.services import (
ffprobe_stream,
get_nvidia_driver_info,
process_logs,
restart_frigate,
vainfo_hwaccel,
)
@ -456,57 +457,6 @@ def nvinfo():
return JSONResponse(content=get_nvidia_driver_info())
def process_logs(
contents: str,
service: Optional[str] = None,
start: Optional[int] = None,
end: Optional[int] = None,
) -> Tuple[int, List[str]]:
log_lines = []
last_message = None
last_timestamp = None
repeat_count = 0
for raw_line in contents.splitlines():
clean_line = raw_line.strip()
if len(clean_line) < 10:
continue
# Handle cases where S6 does not include date in log line
if " " not in clean_line:
clean_line = f"{datetime.now()} {clean_line}"
# Find the position of the first double space to extract timestamp and message
date_end = clean_line.index(" ")
timestamp = clean_line[:date_end]
message_part = clean_line[date_end:].strip()
if message_part == last_message:
repeat_count += 1
continue
else:
if repeat_count > 0:
# Insert a deduplication message formatted the same way as logs
dedup_message = f"{last_timestamp} [LOGGING] Last message repeated {repeat_count} times"
log_lines.append(dedup_message)
repeat_count = 0
log_lines.append(clean_line)
last_timestamp = timestamp
last_message = message_part
# If there were repeated messages at the end, log the count
if repeat_count > 0:
dedup_message = (
f"{last_timestamp} [LOGGING] Last message repeated {repeat_count} times"
)
log_lines.append(dedup_message)
return len(log_lines), log_lines[start:end]
@router.get("/logs/{service}", tags=[Tags.logs])
async def logs(
service: str = Path(enum=["frigate", "nginx", "go2rtc"]),

View File

@ -8,7 +8,8 @@ import re
import signal
import subprocess as sp
import traceback
from typing import Optional
from datetime import datetime
from typing import List, Optional, Tuple
import cv2
import psutil
@ -635,3 +636,54 @@ async def get_video_properties(
result["fourcc"] = fourcc
return result
def process_logs(
contents: str,
service: Optional[str] = None,
start: Optional[int] = None,
end: Optional[int] = None,
) -> Tuple[int, List[str]]:
log_lines = []
last_message = None
last_timestamp = None
repeat_count = 0
for raw_line in contents.splitlines():
clean_line = raw_line.strip()
if len(clean_line) < 10:
continue
# Handle cases where S6 does not include date in log line
if " " not in clean_line:
clean_line = f"{datetime.now()} {clean_line}"
# Find the position of the first double space to extract timestamp and message
date_end = clean_line.index(" ")
timestamp = clean_line[:date_end]
message_part = clean_line[date_end:].strip()
if message_part == last_message:
repeat_count += 1
continue
else:
if repeat_count > 0:
# Insert a deduplication message formatted the same way as logs
dedup_message = f"{last_timestamp} [LOGGING] Last message repeated {repeat_count} times"
log_lines.append(dedup_message)
repeat_count = 0
log_lines.append(clean_line)
last_timestamp = timestamp
last_message = message_part
# If there were repeated messages at the end, log the count
if repeat_count > 0:
dedup_message = (
f"{last_timestamp} [LOGGING] Last message repeated {repeat_count} times"
)
log_lines.append(dedup_message)
return len(log_lines), log_lines[start:end]