mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-02-14 23:25:25 +03:00
Rewrite the yaml loader to match PyYAML
The old implementation would fail in weird ways with configs that were incorrect in just the right way. The new implementation just does what PyYAML would do, only diverging in case of duplicate keys.
This commit is contained in:
parent
67b72cdce9
commit
6e05fbdb87
@ -9,6 +9,7 @@ from pathlib import Path
|
|||||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import yaml
|
||||||
from pydantic import (
|
from pydantic import (
|
||||||
BaseModel,
|
BaseModel,
|
||||||
ConfigDict,
|
ConfigDict,
|
||||||
@ -41,11 +42,11 @@ from frigate.ffmpeg_presets import (
|
|||||||
)
|
)
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
from frigate.util.builtin import (
|
from frigate.util.builtin import (
|
||||||
|
NoDuplicateKeysLoader,
|
||||||
deep_merge,
|
deep_merge,
|
||||||
escape_special_characters,
|
escape_special_characters,
|
||||||
generate_color_palette,
|
generate_color_palette,
|
||||||
get_ffmpeg_arg_list,
|
get_ffmpeg_arg_list,
|
||||||
load_config_with_no_duplicates,
|
|
||||||
)
|
)
|
||||||
from frigate.util.config import StreamInfoRetriever, get_relative_coordinates
|
from frigate.util.config import StreamInfoRetriever, get_relative_coordinates
|
||||||
from frigate.util.image import create_mask
|
from frigate.util.image import create_mask
|
||||||
@ -1764,7 +1765,7 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
raw_config = f.read()
|
raw_config = f.read()
|
||||||
|
|
||||||
if config_file.endswith(YAML_EXT):
|
if config_file.endswith(YAML_EXT):
|
||||||
config = load_config_with_no_duplicates(raw_config)
|
config = yaml.load(raw_config, NoDuplicateKeysLoader)
|
||||||
elif config_file.endswith(".json"):
|
elif config_file.endswith(".json"):
|
||||||
config = json.loads(raw_config)
|
config = json.loads(raw_config)
|
||||||
|
|
||||||
@ -1772,5 +1773,5 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_raw(cls, raw_config):
|
def parse_raw(cls, raw_config):
|
||||||
config = load_config_with_no_duplicates(raw_config)
|
config = yaml.load(raw_config, NoDuplicateKeysLoader)
|
||||||
return cls.model_validate(config)
|
return cls.model_validate(config)
|
||||||
|
|||||||
@ -4,13 +4,14 @@ import unittest
|
|||||||
from unittest.mock import patch
|
from unittest.mock import patch
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import yaml
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
|
|
||||||
from frigate.config import BirdseyeModeEnum, FrigateConfig
|
from frigate.config import BirdseyeModeEnum, FrigateConfig
|
||||||
from frigate.const import MODEL_CACHE_DIR
|
from frigate.const import MODEL_CACHE_DIR
|
||||||
from frigate.detectors import DetectorTypeEnum
|
from frigate.detectors import DetectorTypeEnum
|
||||||
from frigate.plus import PlusApi
|
from frigate.plus import PlusApi
|
||||||
from frigate.util.builtin import deep_merge, load_config_with_no_duplicates
|
from frigate.util.builtin import NoDuplicateKeysLoader, deep_merge
|
||||||
|
|
||||||
|
|
||||||
class TestConfig(unittest.TestCase):
|
class TestConfig(unittest.TestCase):
|
||||||
@ -1537,7 +1538,7 @@ class TestConfig(unittest.TestCase):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
ValueError, lambda: load_config_with_no_duplicates(raw_config)
|
ValueError, lambda: yaml.load(raw_config, NoDuplicateKeysLoader)
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_object_filter_ratios_work(self):
|
def test_object_filter_ratios_work(self):
|
||||||
|
|||||||
@ -89,32 +89,31 @@ def deep_merge(dct1: dict, dct2: dict, override=False, merge_lists=False) -> dic
|
|||||||
return merged
|
return merged
|
||||||
|
|
||||||
|
|
||||||
def load_config_with_no_duplicates(raw_config) -> dict:
|
class NoDuplicateKeysLoader(yaml.loader.SafeLoader):
|
||||||
"""Get config ensuring duplicate keys are not allowed."""
|
"""A yaml SafeLoader that disallows duplicate keys"""
|
||||||
|
|
||||||
# https://stackoverflow.com/a/71751051
|
def construct_mapping(self, node, deep=False):
|
||||||
# important to use SafeLoader here to avoid RCE
|
mapping = super().construct_mapping(node, deep=deep)
|
||||||
class PreserveDuplicatesLoader(yaml.loader.SafeLoader):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def map_constructor(loader, node, deep=False):
|
if len(node.value) != len(mapping):
|
||||||
keys = [loader.construct_object(node, deep=deep) for node, _ in node.value]
|
# There's a duplicate key somewhere. Find it.
|
||||||
vals = [loader.construct_object(node, deep=deep) for _, node in node.value]
|
duplicate_keys = [
|
||||||
key_count = Counter(keys)
|
key
|
||||||
data = {}
|
for key, count in Counter(
|
||||||
for key, val in zip(keys, vals):
|
self.construct_object(key, deep=deep) for key, _ in node.value
|
||||||
if key_count[key] > 1:
|
|
||||||
raise ValueError(
|
|
||||||
f"Config input {key} is defined multiple times for the same field, this is not allowed."
|
|
||||||
)
|
)
|
||||||
else:
|
if count > 1
|
||||||
data[key] = val
|
]
|
||||||
return data
|
|
||||||
|
|
||||||
PreserveDuplicatesLoader.add_constructor(
|
# This might be possible if PyYAML's construct_mapping() changes the node
|
||||||
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, map_constructor
|
# afterwards for some reason? I don't see why, but better safe than sorry.
|
||||||
)
|
assert len(duplicate_keys) > 0
|
||||||
return yaml.load(raw_config, PreserveDuplicatesLoader)
|
|
||||||
|
raise ValueError(
|
||||||
|
"Key redefinitions are not allowed: " + ", ".join(duplicate_keys)
|
||||||
|
)
|
||||||
|
|
||||||
|
return mapping
|
||||||
|
|
||||||
|
|
||||||
def clean_camera_user_pass(line: str) -> str:
|
def clean_camera_user_pass(line: str) -> str:
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user