mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-18 17:14:26 +03:00
move process logs function to services util
This commit is contained in:
parent
c1dadb2ea4
commit
1c32a1cf63
@ -9,7 +9,7 @@ import traceback
|
|||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from typing import Any, List, Optional, Tuple
|
from typing import Any, Optional
|
||||||
|
|
||||||
import aiofiles
|
import aiofiles
|
||||||
import requests
|
import requests
|
||||||
@ -37,6 +37,7 @@ from frigate.util.config import find_config_file
|
|||||||
from frigate.util.services import (
|
from frigate.util.services import (
|
||||||
ffprobe_stream,
|
ffprobe_stream,
|
||||||
get_nvidia_driver_info,
|
get_nvidia_driver_info,
|
||||||
|
process_logs,
|
||||||
restart_frigate,
|
restart_frigate,
|
||||||
vainfo_hwaccel,
|
vainfo_hwaccel,
|
||||||
)
|
)
|
||||||
@ -456,57 +457,6 @@ def nvinfo():
|
|||||||
return JSONResponse(content=get_nvidia_driver_info())
|
return JSONResponse(content=get_nvidia_driver_info())
|
||||||
|
|
||||||
|
|
||||||
def process_logs(
|
|
||||||
contents: str,
|
|
||||||
service: Optional[str] = None,
|
|
||||||
start: Optional[int] = None,
|
|
||||||
end: Optional[int] = None,
|
|
||||||
) -> Tuple[int, List[str]]:
|
|
||||||
log_lines = []
|
|
||||||
last_message = None
|
|
||||||
last_timestamp = None
|
|
||||||
repeat_count = 0
|
|
||||||
|
|
||||||
for raw_line in contents.splitlines():
|
|
||||||
clean_line = raw_line.strip()
|
|
||||||
|
|
||||||
if len(clean_line) < 10:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Handle cases where S6 does not include date in log line
|
|
||||||
if " " not in clean_line:
|
|
||||||
clean_line = f"{datetime.now()} {clean_line}"
|
|
||||||
|
|
||||||
# Find the position of the first double space to extract timestamp and message
|
|
||||||
date_end = clean_line.index(" ")
|
|
||||||
timestamp = clean_line[:date_end]
|
|
||||||
message_part = clean_line[date_end:].strip()
|
|
||||||
|
|
||||||
if message_part == last_message:
|
|
||||||
repeat_count += 1
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if repeat_count > 0:
|
|
||||||
# Insert a deduplication message formatted the same way as logs
|
|
||||||
dedup_message = f"{last_timestamp} [LOGGING] Last message repeated {repeat_count} times"
|
|
||||||
log_lines.append(dedup_message)
|
|
||||||
repeat_count = 0
|
|
||||||
|
|
||||||
log_lines.append(clean_line)
|
|
||||||
last_timestamp = timestamp
|
|
||||||
|
|
||||||
last_message = message_part
|
|
||||||
|
|
||||||
# If there were repeated messages at the end, log the count
|
|
||||||
if repeat_count > 0:
|
|
||||||
dedup_message = (
|
|
||||||
f"{last_timestamp} [LOGGING] Last message repeated {repeat_count} times"
|
|
||||||
)
|
|
||||||
log_lines.append(dedup_message)
|
|
||||||
|
|
||||||
return len(log_lines), log_lines[start:end]
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("/logs/{service}", tags=[Tags.logs])
|
@router.get("/logs/{service}", tags=[Tags.logs])
|
||||||
async def logs(
|
async def logs(
|
||||||
service: str = Path(enum=["frigate", "nginx", "go2rtc"]),
|
service: str = Path(enum=["frigate", "nginx", "go2rtc"]),
|
||||||
|
|||||||
@ -8,7 +8,8 @@ import re
|
|||||||
import signal
|
import signal
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
import traceback
|
import traceback
|
||||||
from typing import Optional
|
from datetime import datetime
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import psutil
|
import psutil
|
||||||
@ -635,3 +636,54 @@ async def get_video_properties(
|
|||||||
result["fourcc"] = fourcc
|
result["fourcc"] = fourcc
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def process_logs(
|
||||||
|
contents: str,
|
||||||
|
service: Optional[str] = None,
|
||||||
|
start: Optional[int] = None,
|
||||||
|
end: Optional[int] = None,
|
||||||
|
) -> Tuple[int, List[str]]:
|
||||||
|
log_lines = []
|
||||||
|
last_message = None
|
||||||
|
last_timestamp = None
|
||||||
|
repeat_count = 0
|
||||||
|
|
||||||
|
for raw_line in contents.splitlines():
|
||||||
|
clean_line = raw_line.strip()
|
||||||
|
|
||||||
|
if len(clean_line) < 10:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Handle cases where S6 does not include date in log line
|
||||||
|
if " " not in clean_line:
|
||||||
|
clean_line = f"{datetime.now()} {clean_line}"
|
||||||
|
|
||||||
|
# Find the position of the first double space to extract timestamp and message
|
||||||
|
date_end = clean_line.index(" ")
|
||||||
|
timestamp = clean_line[:date_end]
|
||||||
|
message_part = clean_line[date_end:].strip()
|
||||||
|
|
||||||
|
if message_part == last_message:
|
||||||
|
repeat_count += 1
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if repeat_count > 0:
|
||||||
|
# Insert a deduplication message formatted the same way as logs
|
||||||
|
dedup_message = f"{last_timestamp} [LOGGING] Last message repeated {repeat_count} times"
|
||||||
|
log_lines.append(dedup_message)
|
||||||
|
repeat_count = 0
|
||||||
|
|
||||||
|
log_lines.append(clean_line)
|
||||||
|
last_timestamp = timestamp
|
||||||
|
|
||||||
|
last_message = message_part
|
||||||
|
|
||||||
|
# If there were repeated messages at the end, log the count
|
||||||
|
if repeat_count > 0:
|
||||||
|
dedup_message = (
|
||||||
|
f"{last_timestamp} [LOGGING] Last message repeated {repeat_count} times"
|
||||||
|
)
|
||||||
|
log_lines.append(dedup_message)
|
||||||
|
|
||||||
|
return len(log_lines), log_lines[start:end]
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user