mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-16 16:15:22 +03:00
:Add support for nvidia driver info
This commit is contained in:
parent
6ebad84160
commit
8c077ef5fd
@ -28,7 +28,12 @@ from frigate.util.builtin import (
|
|||||||
get_tz_modifiers,
|
get_tz_modifiers,
|
||||||
update_yaml_from_url,
|
update_yaml_from_url,
|
||||||
)
|
)
|
||||||
from frigate.util.services import ffprobe_stream, restart_frigate, vainfo_hwaccel
|
from frigate.util.services import (
|
||||||
|
ffprobe_stream,
|
||||||
|
get_nvidia_driver_info,
|
||||||
|
restart_frigate,
|
||||||
|
vainfo_hwaccel,
|
||||||
|
)
|
||||||
from frigate.version import VERSION
|
from frigate.version import VERSION
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -382,6 +387,11 @@ def vainfo():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/nvinfo")
|
||||||
|
def nvinfo():
|
||||||
|
return JSONResponse(content=get_nvidia_driver_info())
|
||||||
|
|
||||||
|
|
||||||
@router.get("/logs/{service}", tags=[Tags.logs])
|
@router.get("/logs/{service}", tags=[Tags.logs])
|
||||||
def logs(
|
def logs(
|
||||||
service: str = Path(enum=["frigate", "nginx", "go2rtc"]),
|
service: str = Path(enum=["frigate", "nginx", "go2rtc"]),
|
||||||
|
|||||||
@ -356,6 +356,8 @@ def get_nvidia_gpu_stats() -> dict[int, dict]:
|
|||||||
util = try_get_info(nvml.nvmlDeviceGetUtilizationRates, handle)
|
util = try_get_info(nvml.nvmlDeviceGetUtilizationRates, handle)
|
||||||
enc = try_get_info(nvml.nvmlDeviceGetEncoderUtilization, handle)
|
enc = try_get_info(nvml.nvmlDeviceGetEncoderUtilization, handle)
|
||||||
dec = try_get_info(nvml.nvmlDeviceGetDecoderUtilization, handle)
|
dec = try_get_info(nvml.nvmlDeviceGetDecoderUtilization, handle)
|
||||||
|
pstate = try_get_info(nvml.nvmlDeviceGetPowerState, handle, default=None)
|
||||||
|
|
||||||
if util != "N/A":
|
if util != "N/A":
|
||||||
gpu_util = util.gpu
|
gpu_util = util.gpu
|
||||||
else:
|
else:
|
||||||
@ -382,6 +384,7 @@ def get_nvidia_gpu_stats() -> dict[int, dict]:
|
|||||||
"mem": gpu_mem_util,
|
"mem": gpu_mem_util,
|
||||||
"enc": enc_util,
|
"enc": enc_util,
|
||||||
"dec": dec_util,
|
"dec": dec_util,
|
||||||
|
"pstate": pstate or "unknown",
|
||||||
}
|
}
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
@ -432,6 +435,29 @@ def vainfo_hwaccel(device_name: Optional[str] = None) -> sp.CompletedProcess:
|
|||||||
return sp.run(ffprobe_cmd, capture_output=True)
|
return sp.run(ffprobe_cmd, capture_output=True)
|
||||||
|
|
||||||
|
|
||||||
|
def get_nvidia_driver_info() -> dict[str, any]:
|
||||||
|
"""Get general hardware info for nvidia GPU."""
|
||||||
|
results = {}
|
||||||
|
try:
|
||||||
|
nvml.nvmlInit()
|
||||||
|
deviceCount = nvml.nvmlDeviceGetCount()
|
||||||
|
for i in range(deviceCount):
|
||||||
|
handle = nvml.nvmlDeviceGetHandleByIndex(i)
|
||||||
|
driver = try_get_info(nvml.nvmlSystemGetDriverVersion, handle, default=None)
|
||||||
|
cuda = try_get_info(
|
||||||
|
nvml.nvmlDeviceGetCudaComputeCapability, handle, default=None
|
||||||
|
)
|
||||||
|
results[i] = {
|
||||||
|
"name": nvml.nvmlDeviceGetName(handle),
|
||||||
|
"driver": driver or "unknown",
|
||||||
|
"cuda": cuda or "unknown",
|
||||||
|
}
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
finally:
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
def auto_detect_hwaccel() -> str:
|
def auto_detect_hwaccel() -> str:
|
||||||
"""Detect hwaccel args by default."""
|
"""Detect hwaccel args by default."""
|
||||||
try:
|
try:
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user