Merge branch 'blakeblackshear:dev' into dev

This commit is contained in:
ibs0d 2026-03-18 13:24:52 +11:00 committed by GitHub
commit ea9d96d64c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 111 additions and 83 deletions

View File

@ -205,14 +205,14 @@ class EmbeddingsContext:
) )
def get_face_ids(self, name: str) -> list[str]: def get_face_ids(self, name: str) -> list[str]:
sql_query = f""" sql_query = """
SELECT SELECT
id id
FROM vec_descriptions FROM vec_descriptions
WHERE id LIKE '%{name}%' WHERE id LIKE ?
""" """
return self.db.execute_sql(sql_query).fetchall() return self.db.execute_sql(sql_query, (f"%{name}%",)).fetchall()
def reprocess_face(self, face_file: str) -> dict[str, Any]: def reprocess_face(self, face_file: str) -> dict[str, Any]:
return self.requestor.send_data( return self.requestor.send_data(

View File

@ -266,7 +266,7 @@ class Embeddings:
) )
duration = datetime.datetime.now().timestamp() - start duration = datetime.datetime.now().timestamp() - start
self.text_inference_speed.update(duration / len(valid_ids)) self.image_inference_speed.update(duration / len(valid_ids))
return embeddings return embeddings

View File

@ -95,7 +95,8 @@ class EventCleanup(threading.Thread):
.namedtuples() .namedtuples()
.iterator() .iterator()
) )
logger.debug(f"{len(list(expired_events))} events can be expired") expired_events = list(expired_events)
logger.debug(f"{len(expired_events)} events can be expired")
# delete the media from disk # delete the media from disk
for expired in expired_events: for expired in expired_events:
@ -220,7 +221,8 @@ class EventCleanup(threading.Thread):
.namedtuples() .namedtuples()
.iterator() .iterator()
) )
logger.debug(f"{len(list(expired_events))} events can be expired") expired_events = list(expired_events)
logger.debug(f"{len(expired_events)} events can be expired")
# delete the media from disk # delete the media from disk
for expired in expired_events: for expired in expired_events:
media_name = f"{expired.camera}-{expired.id}" media_name = f"{expired.camera}-{expired.id}"

View File

@ -63,7 +63,7 @@ class LibvaGpuSelector:
if not self._valid_gpus: if not self._valid_gpus:
return "" return ""
if gpu <= len(self._valid_gpus): if gpu < len(self._valid_gpus):
return self._valid_gpus[gpu] return self._valid_gpus[gpu]
else: else:
logger.warning(f"Invalid GPU index {gpu}, using first valid GPU") logger.warning(f"Invalid GPU index {gpu}, using first valid GPU")
@ -278,7 +278,7 @@ def parse_preset_hardware_acceleration_encode(
arg_map = PRESETS_HW_ACCEL_ENCODE_TIMELAPSE arg_map = PRESETS_HW_ACCEL_ENCODE_TIMELAPSE
if not isinstance(arg, str): if not isinstance(arg, str):
return arg_map["default"].format(input, output) return arg_map["default"].format(ffmpeg_path, input, output)
# Not all jetsons have HW encoders, so fall back to default SW encoder if not # Not all jetsons have HW encoders, so fall back to default SW encoder if not
if arg.startswith("preset-jetson-") and not os.path.exists("/dev/nvhost-msenc"): if arg.startswith("preset-jetson-") and not os.path.exists("/dev/nvhost-msenc"):
@ -436,7 +436,7 @@ def parse_preset_input(arg: Any, detect_fps: int) -> list[str]:
if arg == "preset-http-jpeg-generic": if arg == "preset-http-jpeg-generic":
input = PRESETS_INPUT[arg].copy() input = PRESETS_INPUT[arg].copy()
input[len(_user_agent_args) + 1] = str(detect_fps) input[1] = str(detect_fps)
return input return input
return PRESETS_INPUT.get(arg, None) return PRESETS_INPUT.get(arg, None)

View File

@ -153,6 +153,9 @@ Each line represents a detection state, not necessarily unique individuals. Pare
if "other_concerns" in schema.get("required", []): if "other_concerns" in schema.get("required", []):
schema["required"].remove("other_concerns") schema["required"].remove("other_concerns")
# OpenAI strict mode requires additionalProperties: false on all objects
schema["additionalProperties"] = False
response_format = { response_format = {
"type": "json_schema", "type": "json_schema",
"json_schema": { "json_schema": {

View File

@ -36,19 +36,12 @@ def _to_jpeg(img_bytes: bytes) -> bytes | None:
class LlamaCppClient(GenAIClient): class LlamaCppClient(GenAIClient):
"""Generative AI client for Frigate using llama.cpp server.""" """Generative AI client for Frigate using llama.cpp server."""
LOCAL_OPTIMIZED_OPTIONS = {
"temperature": 0.7,
"repeat_penalty": 1.05,
"top_p": 0.8,
}
provider: str # base_url provider: str # base_url
provider_options: dict[str, Any] provider_options: dict[str, Any]
def _init_provider(self): def _init_provider(self):
"""Initialize the client.""" """Initialize the client."""
self.provider_options = { self.provider_options = {
**self.LOCAL_OPTIMIZED_OPTIONS,
**self.genai_config.provider_options, **self.genai_config.provider_options,
} }
return ( return (

View File

@ -303,12 +303,20 @@ class BirdsEyeFrameManager:
birdseye_logo = cv2.imread(logo_files[0], cv2.IMREAD_UNCHANGED) birdseye_logo = cv2.imread(logo_files[0], cv2.IMREAD_UNCHANGED)
if birdseye_logo is not None: if birdseye_logo is not None:
transparent_layer = birdseye_logo[:, :, 3] if birdseye_logo.ndim == 2:
# Grayscale image (no channels) — use directly as luminance
transparent_layer = birdseye_logo
elif birdseye_logo.shape[2] >= 4:
# RGBA — use alpha channel as luminance
transparent_layer = birdseye_logo[:, :, 3]
else:
# RGB or other format without alpha — convert to grayscale
transparent_layer = cv2.cvtColor(birdseye_logo, cv2.COLOR_BGR2GRAY)
y_offset = height // 2 - transparent_layer.shape[0] // 2 y_offset = height // 2 - transparent_layer.shape[0] // 2
x_offset = width // 2 - transparent_layer.shape[1] // 2 x_offset = width // 2 - transparent_layer.shape[1] // 2
self.blank_frame[ self.blank_frame[
y_offset : y_offset + transparent_layer.shape[1], y_offset : y_offset + transparent_layer.shape[0],
x_offset : x_offset + transparent_layer.shape[0], x_offset : x_offset + transparent_layer.shape[1],
] = transparent_layer ] = transparent_layer
else: else:
logger.warning("Unable to read Frigate logo") logger.warning("Unable to read Frigate logo")
@ -753,7 +761,7 @@ class BirdsEyeFrameManager:
frame_changed, layout_changed = self.update_frame(frame) frame_changed, layout_changed = self.update_frame(frame)
except Exception: except Exception:
frame_changed, layout_changed = False, False frame_changed, layout_changed = False, False
self.active_cameras = [] self.active_cameras = set()
self.camera_layout = [] self.camera_layout = []
print(traceback.format_exc()) print(traceback.format_exc())

View File

@ -105,9 +105,9 @@ class PlusApi:
def upload_image(self, image: ndarray, camera: str) -> str: def upload_image(self, image: ndarray, camera: str) -> str:
r = self._get("image/signed_urls") r = self._get("image/signed_urls")
presigned_urls = r.json()
if not r.ok: if not r.ok:
raise Exception("Unable to get signed urls") raise Exception("Unable to get signed urls")
presigned_urls = r.json()
# resize and submit original # resize and submit original
files = {"file": get_jpg_bytes(image, 1920, 85)} files = {"file": get_jpg_bytes(image, 1920, 85)}

View File

@ -901,7 +901,7 @@ class PtzAutoTracker:
# Check direction difference # Check direction difference
velocities = np.round(velocities) velocities = np.round(velocities)
invalid_dirs = False invalid_dirs = False
if not np.any(np.linalg.norm(velocities, axis=1)): if np.all(np.linalg.norm(velocities, axis=1)):
cosine_sim = np.dot(velocities[0], velocities[1]) / ( cosine_sim = np.dot(velocities[0], velocities[1]) / (
np.linalg.norm(velocities[0]) * np.linalg.norm(velocities[1]) np.linalg.norm(velocities[0]) * np.linalg.norm(velocities[1])
) )
@ -1067,7 +1067,7 @@ class PtzAutoTracker:
f"{camera}: Zoom test: below dimension threshold: {below_dimension_threshold} width: {bb_right - bb_left}, max width: {camera_width * (self.zoom_factor[camera] + 0.1)}, height: {bb_bottom - bb_top}, max height: {camera_height * (self.zoom_factor[camera] + 0.1)}" f"{camera}: Zoom test: below dimension threshold: {below_dimension_threshold} width: {bb_right - bb_left}, max width: {camera_width * (self.zoom_factor[camera] + 0.1)}, height: {bb_bottom - bb_top}, max height: {camera_height * (self.zoom_factor[camera] + 0.1)}"
) )
logger.debug( logger.debug(
f"{camera}: Zoom test: below velocity threshold: {below_velocity_threshold} velocity x: {abs(average_velocity[0])}, x threshold: {velocity_threshold_x}, velocity y: {abs(average_velocity[0])}, y threshold: {velocity_threshold_y}" f"{camera}: Zoom test: below velocity threshold: {below_velocity_threshold} velocity x: {abs(average_velocity[0])}, x threshold: {velocity_threshold_x}, velocity y: {abs(average_velocity[1])}, y threshold: {velocity_threshold_y}"
) )
logger.debug(f"{camera}: Zoom test: at max zoom: {at_max_zoom}") logger.debug(f"{camera}: Zoom test: at max zoom: {at_max_zoom}")
logger.debug(f"{camera}: Zoom test: at min zoom: {at_min_zoom}") logger.debug(f"{camera}: Zoom test: at min zoom: {at_min_zoom}")

View File

@ -116,7 +116,7 @@ def clean_camera_user_pass(line: str) -> str:
def escape_special_characters(path: str) -> str: def escape_special_characters(path: str) -> str:
"""Cleans reserved characters to encodings for ffmpeg.""" """Cleans reserved characters to encodings for ffmpeg."""
if len(path) > 1000: if len(path) > 1000:
return ValueError("Input too long to check") raise ValueError("Input too long to check")
try: try:
found = re.search(REGEX_RTSP_CAMERA_USER_PASS, path).group(0)[3:-1] found = re.search(REGEX_RTSP_CAMERA_USER_PASS, path).group(0)[3:-1]

View File

@ -52,6 +52,7 @@ export default function WebRtcPlayer({
// camera states // camera states
const pcRef = useRef<RTCPeerConnection | undefined>(undefined); const pcRef = useRef<RTCPeerConnection | undefined>(undefined);
const wsRef = useRef<WebSocket | null>(null);
const videoRef = useRef<HTMLVideoElement | null>(null); const videoRef = useRef<HTMLVideoElement | null>(null);
const [bufferTimeout, setBufferTimeout] = useState<NodeJS.Timeout>(); const [bufferTimeout, setBufferTimeout] = useState<NodeJS.Timeout>();
const videoLoadTimeoutRef = useRef<NodeJS.Timeout>(undefined); const videoLoadTimeoutRef = useRef<NodeJS.Timeout>(undefined);
@ -129,7 +130,8 @@ export default function WebRtcPlayer({
} }
pcRef.current = await aPc; pcRef.current = await aPc;
const ws = new WebSocket(wsURL); wsRef.current = new WebSocket(wsURL);
const ws = wsRef.current;
ws.addEventListener("open", () => { ws.addEventListener("open", () => {
pcRef.current?.addEventListener("icecandidate", (ev) => { pcRef.current?.addEventListener("icecandidate", (ev) => {
@ -183,6 +185,10 @@ export default function WebRtcPlayer({
connect(aPc); connect(aPc);
return () => { return () => {
if (wsRef.current) {
wsRef.current.close();
wsRef.current = null;
}
if (pcRef.current) { if (pcRef.current) {
pcRef.current.close(); pcRef.current.close();
pcRef.current = undefined; pcRef.current = undefined;

View File

@ -125,8 +125,6 @@ export function useCameraActivity(
newObjects = [...(objects ?? []), newActiveObject]; newObjects = [...(objects ?? []), newActiveObject];
} }
} else { } else {
const newObjects = [...(objects ?? [])];
let label = updatedEvent.after.label; let label = updatedEvent.after.label;
if (updatedEvent.after.sub_label) { if (updatedEvent.after.sub_label) {

View File

@ -38,6 +38,7 @@ const localeMap: Record<string, () => Promise<Locale>> = {
th: () => import("date-fns/locale/th").then((module) => module.th), th: () => import("date-fns/locale/th").then((module) => module.th),
ca: () => import("date-fns/locale/ca").then((module) => module.ca), ca: () => import("date-fns/locale/ca").then((module) => module.ca),
hr: () => import("date-fns/locale/hr").then((module) => module.hr), hr: () => import("date-fns/locale/hr").then((module) => module.hr),
sl: () => import("date-fns/locale/sl").then((module) => module.sl),
}; };
export function useDateLocale(): Locale { export function useDateLocale(): Locale {

View File

@ -106,13 +106,11 @@ export default function useStats(stats: FrigateStats | undefined) {
const cameraName = config?.cameras?.[name]?.friendly_name ?? name; const cameraName = config?.cameras?.[name]?.friendly_name ?? name;
// Skip ffmpeg warnings for replay cameras when debug replay is active // Skip ffmpeg warnings for replay cameras
if ( if (
!isNaN(ffmpegAvg) && !isNaN(ffmpegAvg) &&
ffmpegAvg >= CameraFfmpegThreshold.error && ffmpegAvg >= CameraFfmpegThreshold.error &&
!( !isReplayCamera(name)
debugReplayStatus?.active && debugReplayStatus?.replay_camera === name
)
) { ) {
problems.push({ problems.push({
text: t("stats.ffmpegHighCpuUsage", { text: t("stats.ffmpegHighCpuUsage", {

View File

@ -26,6 +26,7 @@ export const supportedLanguageKeys = [
"pl", "pl",
"hr", "hr",
"sk", "sk",
"sl",
"lt", "lt",
"uk", "uk",
"cs", "cs",

View File

@ -381,7 +381,7 @@ export default function Replay() {
</div> </div>
{/* Side panel */} {/* Side panel */}
<div className="scrollbar-container order-last mb-2 mt-2 flex h-full w-full flex-col overflow-y-auto rounded-lg border-[1px] border-secondary-foreground bg-background_alt p-2 md:order-none md:mb-0 md:mr-2 md:mt-0 md:w-4/12"> <div className="order-last mb-2 mt-2 flex h-full w-full flex-col overflow-hidden rounded-lg border-[1px] border-secondary-foreground bg-background_alt p-2 md:order-none md:mb-0 md:mr-2 md:mt-0 md:w-4/12">
<div className="mb-5 flex flex-col space-y-2"> <div className="mb-5 flex flex-col space-y-2">
<Heading as="h3" className="mb-0"> <Heading as="h3" className="mb-0">
{t("title")} {t("title")}
@ -399,7 +399,10 @@ export default function Replay() {
<p>{t("description")}</p> <p>{t("description")}</p>
</div> </div>
</div> </div>
<Tabs defaultValue="debug" className="flex h-full w-full flex-col"> <Tabs
defaultValue="debug"
className="flex min-h-0 w-full flex-1 flex-col"
>
<TabsList className="grid w-full grid-cols-3"> <TabsList className="grid w-full grid-cols-3">
<TabsTrigger value="debug"> <TabsTrigger value="debug">
{t("debug.debugging", { ns: "views/settings" })} {t("debug.debugging", { ns: "views/settings" })}
@ -409,7 +412,10 @@ export default function Replay() {
{t("websocket_messages")} {t("websocket_messages")}
</TabsTrigger> </TabsTrigger>
</TabsList> </TabsList>
<TabsContent value="debug" className="mt-2"> <TabsContent
value="debug"
className="scrollbar-container mt-2 overflow-y-auto"
>
<div className="mt-2 space-y-6"> <div className="mt-2 space-y-6">
<div className="my-2.5 flex flex-col gap-2.5"> <div className="my-2.5 flex flex-col gap-2.5">
{DEBUG_OPTION_KEYS.map((key) => { {DEBUG_OPTION_KEYS.map((key) => {
@ -554,7 +560,10 @@ export default function Replay() {
</div> </div>
</div> </div>
</TabsContent> </TabsContent>
<TabsContent value="objects" className="mt-2"> <TabsContent
value="objects"
className="scrollbar-container mt-2 overflow-y-auto"
>
<ObjectList <ObjectList
cameraConfig={replayCameraConfig} cameraConfig={replayCameraConfig}
objects={objects} objects={objects}

View File

@ -9,6 +9,7 @@ import {
useState, useState,
} from "react"; } from "react";
import { isCurrentHour } from "@/utils/dateUtil"; import { isCurrentHour } from "@/utils/dateUtil";
import { isFirefox, isMobile, isSafari } from "react-device-detect";
import { useTranslation } from "react-i18next"; import { useTranslation } from "react-i18next";
import { CameraConfig } from "@/types/frigateConfig"; import { CameraConfig } from "@/types/frigateConfig";
import useSWR from "swr"; import useSWR from "swr";
@ -305,31 +306,46 @@ function MotionPreviewClip({
); );
}, [clipStart, preview, range.end_time]); }, [clipStart, preview, range.end_time]);
const compatIntervalRef = useRef<ReturnType<typeof setInterval> | null>(null);
useEffect(() => {
return () => {
if (compatIntervalRef.current) {
clearInterval(compatIntervalRef.current);
}
};
}, []);
const resetPlayback = useCallback(() => { const resetPlayback = useCallback(() => {
if (!videoRef.current || !preview) { if (!videoRef.current || !preview) {
return; return;
} }
if (compatIntervalRef.current) {
clearInterval(compatIntervalRef.current);
compatIntervalRef.current = null;
}
videoRef.current.currentTime = clipStart; videoRef.current.currentTime = clipStart;
videoRef.current.playbackRate = playbackRate;
}, [clipStart, playbackRate, preview]);
useEffect(() => { if (isSafari || (isFirefox && isMobile)) {
if (!videoRef.current || !preview) { // Safari / iOS can't play at speeds > 2x, so manually step through frames
return;
}
if (!isVisible) {
videoRef.current.pause(); videoRef.current.pause();
videoRef.current.currentTime = clipStart; compatIntervalRef.current = setInterval(() => {
return; if (!videoRef.current) {
} return;
}
if (videoRef.current.readyState >= 2) { videoRef.current.currentTime += 1;
resetPlayback();
void videoRef.current.play().catch(() => undefined); if (videoRef.current.currentTime >= clipEnd) {
videoRef.current.currentTime = clipStart;
}
}, 1000 / playbackRate);
} else {
videoRef.current.playbackRate = playbackRate;
} }
}, [clipStart, isVisible, preview, resetPlayback]); }, [clipStart, clipEnd, playbackRate, preview]);
const drawDimOverlay = useCallback(() => { const drawDimOverlay = useCallback(() => {
if (!dimOverlayCanvasRef.current) { if (!dimOverlayCanvasRef.current) {
@ -463,15 +479,17 @@ function MotionPreviewClip({
{showLoadingIndicator && ( {showLoadingIndicator && (
<Skeleton className="absolute inset-0 z-10 rounded-lg md:rounded-2xl" /> <Skeleton className="absolute inset-0 z-10 rounded-lg md:rounded-2xl" />
)} )}
{preview ? ( {preview && isVisible ? (
<> <>
<video <video
ref={videoRef} ref={videoRef}
className="size-full bg-black object-contain" className="size-full bg-black object-contain"
preload="auto"
autoPlay
playsInline playsInline
preload={isVisible ? "metadata" : "none"}
muted muted
autoPlay={isVisible} disableRemotePlayback
loop
onLoadedMetadata={() => { onLoadedMetadata={() => {
setVideoLoaded(true); setVideoLoaded(true);
@ -481,36 +499,21 @@ function MotionPreviewClip({
height: videoRef.current.videoHeight, height: videoRef.current.videoHeight,
}); });
} }
if (!isVisible) {
return;
}
resetPlayback();
if (videoRef.current) {
void videoRef.current.play().catch(() => undefined);
}
}} }}
onCanPlay={() => { onCanPlay={() => {
setVideoLoaded(true); setVideoLoaded(true);
if (!isVisible) {
return;
}
if (videoRef.current) {
void videoRef.current.play().catch(() => undefined);
}
}} }}
onPlay={() => setVideoPlaying(true)} onPlay={() => {
setVideoPlaying(true);
resetPlayback();
}}
onLoadedData={() => setVideoLoaded(true)} onLoadedData={() => setVideoLoaded(true)}
onError={() => { onError={() => {
setVideoLoaded(true); setVideoLoaded(true);
setVideoPlaying(true); setVideoPlaying(true);
}} }}
onTimeUpdate={() => { onTimeUpdate={() => {
if (!videoRef.current || !preview || !isVisible) { if (!videoRef.current || !preview) {
return; return;
} }
@ -519,12 +522,10 @@ function MotionPreviewClip({
} }
}} }}
> >
{isVisible && ( <source
<source src={`${baseUrl}${preview.src.substring(1)}`}
src={`${baseUrl}${preview.src.substring(1)}`} type={preview.type}
type={preview.type} />
/>
)}
</video> </video>
{motionHeatmap && ( {motionHeatmap && (
<canvas <canvas

View File

@ -4,7 +4,7 @@ import useSWR from "swr";
import axios from "axios"; import axios from "axios";
import ActivityIndicator from "@/components/indicators/activity-indicator"; import ActivityIndicator from "@/components/indicators/activity-indicator";
import AutoUpdatingCameraImage from "@/components/camera/AutoUpdatingCameraImage"; import AutoUpdatingCameraImage from "@/components/camera/AutoUpdatingCameraImage";
import { useCallback, useEffect, useMemo, useState } from "react"; import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { Slider } from "@/components/ui/slider"; import { Slider } from "@/components/ui/slider";
import { Label } from "@/components/ui/label"; import { Label } from "@/components/ui/label";
import { import {
@ -63,6 +63,8 @@ export default function MotionTunerView({
improve_contrast: undefined, improve_contrast: undefined,
}); });
const userInteractedRef = useRef(false);
const cameraConfig = useMemo(() => { const cameraConfig = useMemo(() => {
if (config && selectedCamera) { if (config && selectedCamera) {
return config.cameras[selectedCamera]; return config.cameras[selectedCamera];
@ -70,6 +72,7 @@ export default function MotionTunerView({
}, [config, selectedCamera]); }, [config, selectedCamera]);
useEffect(() => { useEffect(() => {
userInteractedRef.current = false;
if (cameraConfig) { if (cameraConfig) {
setMotionSettings({ setMotionSettings({
threshold: cameraConfig.motion.threshold, threshold: cameraConfig.motion.threshold,
@ -87,24 +90,29 @@ export default function MotionTunerView({
}, [selectedCamera]); }, [selectedCamera]);
useEffect(() => { useEffect(() => {
if (!motionSettings.threshold) return; if (!motionSettings.threshold || !userInteractedRef.current) return;
sendMotionThreshold(motionSettings.threshold); sendMotionThreshold(motionSettings.threshold);
}, [motionSettings.threshold, sendMotionThreshold]); }, [motionSettings.threshold, sendMotionThreshold]);
useEffect(() => { useEffect(() => {
if (!motionSettings.contour_area) return; if (!motionSettings.contour_area || !userInteractedRef.current) return;
sendMotionContourArea(motionSettings.contour_area); sendMotionContourArea(motionSettings.contour_area);
}, [motionSettings.contour_area, sendMotionContourArea]); }, [motionSettings.contour_area, sendMotionContourArea]);
useEffect(() => { useEffect(() => {
if (motionSettings.improve_contrast === undefined) return; if (
motionSettings.improve_contrast === undefined ||
!userInteractedRef.current
)
return;
sendImproveContrast(motionSettings.improve_contrast ? "ON" : "OFF"); sendImproveContrast(motionSettings.improve_contrast ? "ON" : "OFF");
}, [motionSettings.improve_contrast, sendImproveContrast]); }, [motionSettings.improve_contrast, sendImproveContrast]);
const handleMotionConfigChange = (newConfig: Partial<MotionSettings>) => { const handleMotionConfigChange = (newConfig: Partial<MotionSettings>) => {
userInteractedRef.current = true;
setMotionSettings((prevConfig) => ({ ...prevConfig, ...newConfig })); setMotionSettings((prevConfig) => ({ ...prevConfig, ...newConfig }));
setUnsavedChanges(true); setUnsavedChanges(true);
setChangedValue(true); setChangedValue(true);