mirror of
https://github.com/blakeblackshear/frigate.git
synced 2026-04-03 06:40:22 +00:00
Compare commits
10 Commits
442bd5da60
...
87f4006f9a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
87f4006f9a | ||
|
|
b751228476 | ||
|
|
3b2d136665 | ||
|
|
e7394d0dc1 | ||
|
|
2e288109f4 | ||
|
|
256817d5c2 | ||
|
|
84409eab7e | ||
|
|
9e83888133 | ||
|
|
85f7138361 | ||
|
|
27bf314f4d |
@ -2,9 +2,9 @@
|
||||
set -e
|
||||
|
||||
# Download the MxAccl for Frigate github release
|
||||
wget https://github.com/memryx/mx_accl_frigate/archive/refs/heads/main.zip -O /tmp/mxaccl.zip
|
||||
wget https://github.com/memryx/mx_accl_frigate/archive/refs/tags/v2.1.0.zip -O /tmp/mxaccl.zip
|
||||
unzip /tmp/mxaccl.zip -d /tmp
|
||||
mv /tmp/mx_accl_frigate-main /opt/mx_accl_frigate
|
||||
mv /tmp/mx_accl_frigate-2.1.0 /opt/mx_accl_frigate
|
||||
rm /tmp/mxaccl.zip
|
||||
|
||||
# Install Python dependencies
|
||||
|
||||
@ -60,7 +60,7 @@ rapidfuzz==3.12.*
|
||||
# HailoRT Wheels
|
||||
appdirs==1.4.*
|
||||
argcomplete==2.0.*
|
||||
contextlib2==0.6.*
|
||||
contextlib2==21.6.*
|
||||
distlib==0.3.*
|
||||
filelock==3.8.*
|
||||
future==0.18.*
|
||||
|
||||
@ -24,10 +24,13 @@ echo "Adding MemryX GPG key and repository..."
|
||||
wget -qO- https://developer.memryx.com/deb/memryx.asc | sudo tee /etc/apt/trusted.gpg.d/memryx.asc >/dev/null
|
||||
echo 'deb https://developer.memryx.com/deb stable main' | sudo tee /etc/apt/sources.list.d/memryx.list >/dev/null
|
||||
|
||||
# Update and install memx-drivers
|
||||
echo "Installing memx-drivers..."
|
||||
# Update and install specific SDK 2.1 packages
|
||||
echo "Installing MemryX SDK 2.1 packages..."
|
||||
sudo apt update
|
||||
sudo apt install -y memx-drivers
|
||||
sudo apt install -y memx-drivers=2.1.* memx-accl=2.1.* mxa-manager=2.1.*
|
||||
|
||||
# Hold packages to prevent automatic upgrades
|
||||
sudo apt-mark hold memx-drivers memx-accl mxa-manager
|
||||
|
||||
# ARM-specific board setup
|
||||
if [[ "$arch" == "aarch64" || "$arch" == "arm64" ]]; then
|
||||
@ -37,11 +40,5 @@ fi
|
||||
|
||||
echo -e "\n\n\033[1;31mYOU MUST RESTART YOUR COMPUTER NOW\033[0m\n\n"
|
||||
|
||||
# Install other runtime packages
|
||||
packages=("memx-accl" "mxa-manager")
|
||||
for pkg in "${packages[@]}"; do
|
||||
echo "Installing $pkg..."
|
||||
sudo apt install -y "$pkg"
|
||||
done
|
||||
echo "MemryX SDK 2.1 installation complete!"
|
||||
|
||||
echo "MemryX installation complete!"
|
||||
|
||||
@ -38,7 +38,7 @@ from frigate.util.classification import (
|
||||
collect_object_classification_examples,
|
||||
collect_state_classification_examples,
|
||||
)
|
||||
from frigate.util.path import get_event_snapshot
|
||||
from frigate.util.file import get_event_snapshot
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
|
||||
import base64
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
@ -57,8 +58,8 @@ from frigate.const import CLIPS_DIR, TRIGGER_DIR
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.models import Event, ReviewSegment, Timeline, Trigger
|
||||
from frigate.track.object_processing import TrackedObject
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
from frigate.util.time import get_tz_modifiers
|
||||
from frigate.util.file import get_event_thumbnail_bytes
|
||||
from frigate.util.time import get_dst_transitions, get_tz_modifiers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -813,7 +814,6 @@ def events_summary(
|
||||
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
|
||||
):
|
||||
tz_name = params.timezone
|
||||
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(tz_name)
|
||||
has_clip = params.has_clip
|
||||
has_snapshot = params.has_snapshot
|
||||
|
||||
@ -828,33 +828,91 @@ def events_summary(
|
||||
if len(clauses) == 0:
|
||||
clauses.append((True))
|
||||
|
||||
groups = (
|
||||
time_range_query = (
|
||||
Event.select(
|
||||
Event.camera,
|
||||
Event.label,
|
||||
Event.sub_label,
|
||||
Event.data,
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||
),
|
||||
).alias("day"),
|
||||
Event.zones,
|
||||
fn.COUNT(Event.id).alias("count"),
|
||||
fn.MIN(Event.start_time).alias("min_time"),
|
||||
fn.MAX(Event.start_time).alias("max_time"),
|
||||
)
|
||||
.where(reduce(operator.and_, clauses) & (Event.camera << allowed_cameras))
|
||||
.group_by(
|
||||
Event.camera,
|
||||
Event.label,
|
||||
Event.sub_label,
|
||||
Event.data,
|
||||
(Event.start_time + seconds_offset).cast("int") / (3600 * 24),
|
||||
Event.zones,
|
||||
)
|
||||
.dicts()
|
||||
.get()
|
||||
)
|
||||
|
||||
return JSONResponse(content=[e for e in groups.dicts()])
|
||||
min_time = time_range_query.get("min_time")
|
||||
max_time = time_range_query.get("max_time")
|
||||
|
||||
if min_time is None or max_time is None:
|
||||
return JSONResponse(content=[])
|
||||
|
||||
dst_periods = get_dst_transitions(tz_name, min_time, max_time)
|
||||
|
||||
grouped: dict[tuple, dict] = {}
|
||||
|
||||
for period_start, period_end, period_offset in dst_periods:
|
||||
hours_offset = int(period_offset / 60 / 60)
|
||||
minutes_offset = int(period_offset / 60 - hours_offset * 60)
|
||||
period_hour_modifier = f"{hours_offset} hour"
|
||||
period_minute_modifier = f"{minutes_offset} minute"
|
||||
|
||||
period_groups = (
|
||||
Event.select(
|
||||
Event.camera,
|
||||
Event.label,
|
||||
Event.sub_label,
|
||||
Event.data,
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
Event.start_time,
|
||||
"unixepoch",
|
||||
period_hour_modifier,
|
||||
period_minute_modifier,
|
||||
),
|
||||
).alias("day"),
|
||||
Event.zones,
|
||||
fn.COUNT(Event.id).alias("count"),
|
||||
)
|
||||
.where(
|
||||
reduce(operator.and_, clauses)
|
||||
& (Event.camera << allowed_cameras)
|
||||
& (Event.start_time >= period_start)
|
||||
& (Event.start_time <= period_end)
|
||||
)
|
||||
.group_by(
|
||||
Event.camera,
|
||||
Event.label,
|
||||
Event.sub_label,
|
||||
Event.data,
|
||||
(Event.start_time + period_offset).cast("int") / (3600 * 24),
|
||||
Event.zones,
|
||||
)
|
||||
.namedtuples()
|
||||
)
|
||||
|
||||
for g in period_groups:
|
||||
key = (
|
||||
g.camera,
|
||||
g.label,
|
||||
g.sub_label,
|
||||
json.dumps(g.data, sort_keys=True) if g.data is not None else None,
|
||||
g.day,
|
||||
json.dumps(g.zones, sort_keys=True) if g.zones is not None else None,
|
||||
)
|
||||
|
||||
if key in grouped:
|
||||
grouped[key]["count"] += int(g.count or 0)
|
||||
else:
|
||||
grouped[key] = {
|
||||
"camera": g.camera,
|
||||
"label": g.label,
|
||||
"sub_label": g.sub_label,
|
||||
"data": g.data,
|
||||
"day": g.day,
|
||||
"zones": g.zones,
|
||||
"count": int(g.count or 0),
|
||||
}
|
||||
|
||||
return JSONResponse(content=list(grouped.values()))
|
||||
|
||||
|
||||
@router.get(
|
||||
|
||||
@ -44,9 +44,9 @@ from frigate.const import (
|
||||
)
|
||||
from frigate.models import Event, Previews, Recordings, Regions, ReviewSegment
|
||||
from frigate.track.object_processing import TrackedObjectProcessor
|
||||
from frigate.util.file import get_event_thumbnail_bytes
|
||||
from frigate.util.image import get_image_from_recording
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
from frigate.util.time import get_tz_modifiers
|
||||
from frigate.util.time import get_dst_transitions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -424,7 +424,6 @@ def all_recordings_summary(
|
||||
allowed_cameras: List[str] = Depends(get_allowed_cameras_for_filter),
|
||||
):
|
||||
"""Returns true/false by day indicating if recordings exist"""
|
||||
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(params.timezone)
|
||||
|
||||
cameras = params.cameras
|
||||
if cameras != "all":
|
||||
@ -432,41 +431,70 @@ def all_recordings_summary(
|
||||
filtered = requested.intersection(allowed_cameras)
|
||||
if not filtered:
|
||||
return JSONResponse(content={})
|
||||
cameras = ",".join(filtered)
|
||||
camera_list = list(filtered)
|
||||
else:
|
||||
cameras = allowed_cameras
|
||||
camera_list = allowed_cameras
|
||||
|
||||
query = (
|
||||
time_range_query = (
|
||||
Recordings.select(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
Recordings.start_time + seconds_offset,
|
||||
"unixepoch",
|
||||
hour_modifier,
|
||||
minute_modifier,
|
||||
),
|
||||
).alias("day")
|
||||
fn.MIN(Recordings.start_time).alias("min_time"),
|
||||
fn.MAX(Recordings.start_time).alias("max_time"),
|
||||
)
|
||||
.group_by(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
Recordings.start_time + seconds_offset,
|
||||
"unixepoch",
|
||||
hour_modifier,
|
||||
minute_modifier,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by(Recordings.start_time.desc())
|
||||
.where(Recordings.camera << camera_list)
|
||||
.dicts()
|
||||
.get()
|
||||
)
|
||||
|
||||
if params.cameras != "all":
|
||||
query = query.where(Recordings.camera << cameras.split(","))
|
||||
min_time = time_range_query.get("min_time")
|
||||
max_time = time_range_query.get("max_time")
|
||||
|
||||
recording_days = query.namedtuples()
|
||||
days = {day.day: True for day in recording_days}
|
||||
if min_time is None or max_time is None:
|
||||
return JSONResponse(content={})
|
||||
|
||||
dst_periods = get_dst_transitions(params.timezone, min_time, max_time)
|
||||
|
||||
days: dict[str, bool] = {}
|
||||
|
||||
for period_start, period_end, period_offset in dst_periods:
|
||||
hours_offset = int(period_offset / 60 / 60)
|
||||
minutes_offset = int(period_offset / 60 - hours_offset * 60)
|
||||
period_hour_modifier = f"{hours_offset} hour"
|
||||
period_minute_modifier = f"{minutes_offset} minute"
|
||||
|
||||
period_query = (
|
||||
Recordings.select(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
Recordings.start_time,
|
||||
"unixepoch",
|
||||
period_hour_modifier,
|
||||
period_minute_modifier,
|
||||
),
|
||||
).alias("day")
|
||||
)
|
||||
.where(
|
||||
(Recordings.camera << camera_list)
|
||||
& (Recordings.end_time >= period_start)
|
||||
& (Recordings.start_time <= period_end)
|
||||
)
|
||||
.group_by(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
Recordings.start_time,
|
||||
"unixepoch",
|
||||
period_hour_modifier,
|
||||
period_minute_modifier,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by(Recordings.start_time.desc())
|
||||
.namedtuples()
|
||||
)
|
||||
|
||||
for g in period_query:
|
||||
days[g.day] = True
|
||||
|
||||
return JSONResponse(content=days)
|
||||
|
||||
@ -476,61 +504,103 @@ def all_recordings_summary(
|
||||
)
|
||||
async def recordings_summary(camera_name: str, timezone: str = "utc"):
|
||||
"""Returns hourly summary for recordings of given camera"""
|
||||
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(timezone)
|
||||
recording_groups = (
|
||||
|
||||
time_range_query = (
|
||||
Recordings.select(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d %H",
|
||||
fn.datetime(
|
||||
Recordings.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||
),
|
||||
).alias("hour"),
|
||||
fn.SUM(Recordings.duration).alias("duration"),
|
||||
fn.SUM(Recordings.motion).alias("motion"),
|
||||
fn.SUM(Recordings.objects).alias("objects"),
|
||||
fn.MIN(Recordings.start_time).alias("min_time"),
|
||||
fn.MAX(Recordings.start_time).alias("max_time"),
|
||||
)
|
||||
.where(Recordings.camera == camera_name)
|
||||
.group_by((Recordings.start_time + seconds_offset).cast("int") / 3600)
|
||||
.order_by(Recordings.start_time.desc())
|
||||
.namedtuples()
|
||||
.dicts()
|
||||
.get()
|
||||
)
|
||||
|
||||
event_groups = (
|
||||
Event.select(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d %H",
|
||||
fn.datetime(
|
||||
Event.start_time, "unixepoch", hour_modifier, minute_modifier
|
||||
),
|
||||
).alias("hour"),
|
||||
fn.COUNT(Event.id).alias("count"),
|
||||
min_time = time_range_query.get("min_time")
|
||||
max_time = time_range_query.get("max_time")
|
||||
|
||||
days: dict[str, dict] = {}
|
||||
|
||||
if min_time is None or max_time is None:
|
||||
return JSONResponse(content=list(days.values()))
|
||||
|
||||
dst_periods = get_dst_transitions(timezone, min_time, max_time)
|
||||
|
||||
for period_start, period_end, period_offset in dst_periods:
|
||||
hours_offset = int(period_offset / 60 / 60)
|
||||
minutes_offset = int(period_offset / 60 - hours_offset * 60)
|
||||
period_hour_modifier = f"{hours_offset} hour"
|
||||
period_minute_modifier = f"{minutes_offset} minute"
|
||||
|
||||
recording_groups = (
|
||||
Recordings.select(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d %H",
|
||||
fn.datetime(
|
||||
Recordings.start_time,
|
||||
"unixepoch",
|
||||
period_hour_modifier,
|
||||
period_minute_modifier,
|
||||
),
|
||||
).alias("hour"),
|
||||
fn.SUM(Recordings.duration).alias("duration"),
|
||||
fn.SUM(Recordings.motion).alias("motion"),
|
||||
fn.SUM(Recordings.objects).alias("objects"),
|
||||
)
|
||||
.where(
|
||||
(Recordings.camera == camera_name)
|
||||
& (Recordings.end_time >= period_start)
|
||||
& (Recordings.start_time <= period_end)
|
||||
)
|
||||
.group_by((Recordings.start_time + period_offset).cast("int") / 3600)
|
||||
.order_by(Recordings.start_time.desc())
|
||||
.namedtuples()
|
||||
)
|
||||
.where(Event.camera == camera_name, Event.has_clip)
|
||||
.group_by((Event.start_time + seconds_offset).cast("int") / 3600)
|
||||
.namedtuples()
|
||||
)
|
||||
|
||||
event_map = {g.hour: g.count for g in event_groups}
|
||||
event_groups = (
|
||||
Event.select(
|
||||
fn.strftime(
|
||||
"%Y-%m-%d %H",
|
||||
fn.datetime(
|
||||
Event.start_time,
|
||||
"unixepoch",
|
||||
period_hour_modifier,
|
||||
period_minute_modifier,
|
||||
),
|
||||
).alias("hour"),
|
||||
fn.COUNT(Event.id).alias("count"),
|
||||
)
|
||||
.where(Event.camera == camera_name, Event.has_clip)
|
||||
.where(
|
||||
(Event.start_time >= period_start) & (Event.start_time <= period_end)
|
||||
)
|
||||
.group_by((Event.start_time + period_offset).cast("int") / 3600)
|
||||
.namedtuples()
|
||||
)
|
||||
|
||||
days = {}
|
||||
event_map = {g.hour: g.count for g in event_groups}
|
||||
|
||||
for recording_group in recording_groups:
|
||||
parts = recording_group.hour.split()
|
||||
hour = parts[1]
|
||||
day = parts[0]
|
||||
events_count = event_map.get(recording_group.hour, 0)
|
||||
hour_data = {
|
||||
"hour": hour,
|
||||
"events": events_count,
|
||||
"motion": recording_group.motion,
|
||||
"objects": recording_group.objects,
|
||||
"duration": round(recording_group.duration),
|
||||
}
|
||||
if day not in days:
|
||||
days[day] = {"events": events_count, "hours": [hour_data], "day": day}
|
||||
else:
|
||||
days[day]["events"] += events_count
|
||||
days[day]["hours"].append(hour_data)
|
||||
for recording_group in recording_groups:
|
||||
parts = recording_group.hour.split()
|
||||
hour = parts[1]
|
||||
day = parts[0]
|
||||
events_count = event_map.get(recording_group.hour, 0)
|
||||
hour_data = {
|
||||
"hour": hour,
|
||||
"events": events_count,
|
||||
"motion": recording_group.motion,
|
||||
"objects": recording_group.objects,
|
||||
"duration": round(recording_group.duration),
|
||||
}
|
||||
if day in days:
|
||||
# merge counts if already present (edge-case at DST boundary)
|
||||
days[day]["events"] += events_count or 0
|
||||
days[day]["hours"].append(hour_data)
|
||||
else:
|
||||
days[day] = {
|
||||
"events": events_count or 0,
|
||||
"hours": [hour_data],
|
||||
"day": day,
|
||||
}
|
||||
|
||||
return JSONResponse(content=list(days.values()))
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ from frigate.config import FrigateConfig
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.models import Recordings, ReviewSegment, UserReviewStatus
|
||||
from frigate.review.types import SeverityEnum
|
||||
from frigate.util.time import get_dst_transitions, get_tz_modifiers
|
||||
from frigate.util.time import get_dst_transitions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -197,7 +197,6 @@ async def review_summary(
|
||||
|
||||
user_id = current_user["username"]
|
||||
|
||||
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(params.timezone)
|
||||
day_ago = (datetime.datetime.now() - datetime.timedelta(hours=24)).timestamp()
|
||||
|
||||
cameras = params.cameras
|
||||
|
||||
@ -20,8 +20,8 @@ from frigate.genai import GenAIClient
|
||||
from frigate.models import Event
|
||||
from frigate.types import TrackedObjectUpdateTypesEnum
|
||||
from frigate.util.builtin import EventsPerSecond, InferenceSpeed
|
||||
from frigate.util.file import get_event_thumbnail_bytes
|
||||
from frigate.util.image import create_thumbnail, ensure_jpeg_bytes
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from frigate.embeddings import Embeddings
|
||||
|
||||
@ -22,7 +22,7 @@ from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
||||
from frigate.embeddings.util import ZScoreNormalization
|
||||
from frigate.models import Event, Trigger
|
||||
from frigate.util.builtin import cosine_distance
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
from frigate.util.file import get_event_thumbnail_bytes
|
||||
|
||||
from ..post.api import PostProcessorApi
|
||||
from ..types import DataProcessorMetrics
|
||||
|
||||
@ -17,6 +17,7 @@ from frigate.detectors.detector_config import (
|
||||
BaseDetectorConfig,
|
||||
ModelTypeEnum,
|
||||
)
|
||||
from frigate.util.file import FileLock
|
||||
from frigate.util.model import post_process_yolo
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -177,29 +178,6 @@ class MemryXDetector(DetectionApi):
|
||||
logger.error(f"Failed to initialize MemryX model: {e}")
|
||||
raise
|
||||
|
||||
def _acquire_file_lock(self, lock_path: str, timeout: int = 60, poll: float = 0.2):
|
||||
"""
|
||||
Create an exclusive lock file. Blocks (with polling) until it can acquire,
|
||||
or raises TimeoutError. Uses only stdlib (os.O_EXCL).
|
||||
"""
|
||||
start = time.time()
|
||||
while True:
|
||||
try:
|
||||
fd = os.open(lock_path, os.O_CREAT | os.O_EXCL | os.O_RDWR)
|
||||
os.close(fd)
|
||||
return
|
||||
except FileExistsError:
|
||||
if time.time() - start > timeout:
|
||||
raise TimeoutError(f"Timeout waiting for lock: {lock_path}")
|
||||
time.sleep(poll)
|
||||
|
||||
def _release_file_lock(self, lock_path: str):
|
||||
"""Best-effort removal of the lock file."""
|
||||
try:
|
||||
os.remove(lock_path)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
def load_yolo_constants(self):
|
||||
base = f"{self.cache_dir}/{self.model_folder}"
|
||||
# constants for yolov9 post-processing
|
||||
@ -212,9 +190,9 @@ class MemryXDetector(DetectionApi):
|
||||
os.makedirs(self.cache_dir, exist_ok=True)
|
||||
|
||||
lock_path = os.path.join(self.cache_dir, f".{self.model_folder}.lock")
|
||||
self._acquire_file_lock(lock_path)
|
||||
lock = FileLock(lock_path, timeout=60)
|
||||
|
||||
try:
|
||||
with lock:
|
||||
# ---------- CASE 1: user provided a custom model path ----------
|
||||
if self.memx_model_path:
|
||||
if not self.memx_model_path.endswith(".zip"):
|
||||
@ -338,9 +316,6 @@ class MemryXDetector(DetectionApi):
|
||||
f"Failed to remove downloaded zip {zip_path}: {e}"
|
||||
)
|
||||
|
||||
finally:
|
||||
self._release_file_lock(lock_path)
|
||||
|
||||
def send_input(self, connection_id, tensor_input: np.ndarray):
|
||||
"""Pre-process (if needed) and send frame to MemryX input queue"""
|
||||
if tensor_input is None:
|
||||
|
||||
@ -29,7 +29,7 @@ from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
||||
from frigate.models import Event, Trigger
|
||||
from frigate.types import ModelStatusTypesEnum
|
||||
from frigate.util.builtin import EventsPerSecond, InferenceSpeed, serialize
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
from frigate.util.file import get_event_thumbnail_bytes
|
||||
|
||||
from .onnx.jina_v1_embedding import JinaV1ImageEmbedding, JinaV1TextEmbedding
|
||||
from .onnx.jina_v2_embedding import JinaV2Embedding
|
||||
|
||||
@ -62,8 +62,8 @@ from frigate.events.types import EventTypeEnum, RegenerateDescriptionEnum
|
||||
from frigate.genai import get_genai_client
|
||||
from frigate.models import Event, Recordings, ReviewSegment, Trigger
|
||||
from frigate.util.builtin import serialize
|
||||
from frigate.util.file import get_event_thumbnail_bytes
|
||||
from frigate.util.image import SharedMemoryFrameManager
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
|
||||
from .embeddings import Embeddings
|
||||
|
||||
@ -397,7 +397,14 @@ class EmbeddingMaintainer(threading.Thread):
|
||||
|
||||
source_type, _, camera, frame_name, data = update
|
||||
|
||||
logger.debug(
|
||||
f"Received update - source_type: {source_type}, camera: {camera}, data label: {data.get('label') if data else 'None'}"
|
||||
)
|
||||
|
||||
if not camera or source_type != EventTypeEnum.tracked_object:
|
||||
logger.debug(
|
||||
f"Skipping update - camera: {camera}, source_type: {source_type}"
|
||||
)
|
||||
return
|
||||
|
||||
if self.config.semantic_search.enabled:
|
||||
@ -407,6 +414,9 @@ class EmbeddingMaintainer(threading.Thread):
|
||||
|
||||
# no need to process updated objects if no processors are active
|
||||
if len(self.realtime_processors) == 0 and len(self.post_processors) == 0:
|
||||
logger.debug(
|
||||
f"No processors active - realtime: {len(self.realtime_processors)}, post: {len(self.post_processors)}"
|
||||
)
|
||||
return
|
||||
|
||||
# Create our own thumbnail based on the bounding box and the frame time
|
||||
@ -415,6 +425,7 @@ class EmbeddingMaintainer(threading.Thread):
|
||||
frame_name, camera_config.frame_shape_yuv
|
||||
)
|
||||
except FileNotFoundError:
|
||||
logger.debug(f"Frame {frame_name} not found for camera {camera}")
|
||||
pass
|
||||
|
||||
if yuv_frame is None:
|
||||
@ -423,7 +434,11 @@ class EmbeddingMaintainer(threading.Thread):
|
||||
)
|
||||
return
|
||||
|
||||
logger.debug(
|
||||
f"Processing {len(self.realtime_processors)} realtime processors for object {data.get('id')} (label: {data.get('label')})"
|
||||
)
|
||||
for processor in self.realtime_processors:
|
||||
logger.debug(f"Calling process_frame on {processor.__class__.__name__}")
|
||||
processor.process_frame(data, yuv_frame)
|
||||
|
||||
for processor in self.post_processors:
|
||||
|
||||
@ -12,7 +12,7 @@ from frigate.config import FrigateConfig
|
||||
from frigate.const import CLIPS_DIR
|
||||
from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
||||
from frigate.models import Event, Timeline
|
||||
from frigate.util.path import delete_event_snapshot, delete_event_thumbnail
|
||||
from frigate.util.file import delete_event_snapshot, delete_event_thumbnail
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@ -9,6 +9,7 @@ from multiprocessing import Queue, Value
|
||||
from multiprocessing.synchronize import Event as MpEvent
|
||||
|
||||
import numpy as np
|
||||
import zmq
|
||||
|
||||
from frigate.comms.object_detector_signaler import (
|
||||
ObjectDetectorPublisher,
|
||||
@ -377,6 +378,15 @@ class RemoteObjectDetector:
|
||||
if self.stop_event.is_set():
|
||||
return detections
|
||||
|
||||
# Drain any stale detection results from the ZMQ buffer before making a new request
|
||||
# This prevents reading detection results from a previous request
|
||||
# NOTE: This should never happen, but can in some rare cases
|
||||
while True:
|
||||
try:
|
||||
self.detector_subscriber.socket.recv_string(flags=zmq.NOBLOCK)
|
||||
except zmq.Again:
|
||||
break
|
||||
|
||||
# copy input to shared memory
|
||||
self.np_shm[:] = tensor_input[:]
|
||||
self.detection_queue.put(self.name)
|
||||
|
||||
@ -20,8 +20,8 @@ from frigate.const import (
|
||||
from frigate.log import redirect_output_to_logger
|
||||
from frigate.models import Event, Recordings, ReviewSegment
|
||||
from frigate.types import ModelStatusTypesEnum
|
||||
from frigate.util.file import get_event_thumbnail_bytes
|
||||
from frigate.util.image import get_image_from_recording
|
||||
from frigate.util.path import get_event_thumbnail_bytes
|
||||
from frigate.util.process import FrigateProcess
|
||||
|
||||
BATCH_SIZE = 16
|
||||
|
||||
@ -1,7 +1,6 @@
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Callable, List
|
||||
|
||||
@ -10,40 +9,11 @@ import requests
|
||||
from frigate.comms.inter_process import InterProcessRequestor
|
||||
from frigate.const import UPDATE_MODEL_STATE
|
||||
from frigate.types import ModelStatusTypesEnum
|
||||
from frigate.util.file import FileLock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FileLock:
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self.lock_file = f"{path}.lock"
|
||||
|
||||
# we have not acquired the lock yet so it should not exist
|
||||
if os.path.exists(self.lock_file):
|
||||
try:
|
||||
os.remove(self.lock_file)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def acquire(self):
|
||||
parent_dir = os.path.dirname(self.lock_file)
|
||||
os.makedirs(parent_dir, exist_ok=True)
|
||||
|
||||
while True:
|
||||
try:
|
||||
with open(self.lock_file, "x"):
|
||||
return
|
||||
except FileExistsError:
|
||||
time.sleep(0.1)
|
||||
|
||||
def release(self):
|
||||
try:
|
||||
os.remove(self.lock_file)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
class ModelDownloader:
|
||||
def __init__(
|
||||
self,
|
||||
@ -81,15 +51,13 @@ class ModelDownloader:
|
||||
def _download_models(self):
|
||||
for file_name in self.file_names:
|
||||
path = os.path.join(self.download_path, file_name)
|
||||
lock = FileLock(path)
|
||||
lock_path = f"{path}.lock"
|
||||
lock = FileLock(lock_path, cleanup_stale_on_init=True)
|
||||
|
||||
if not os.path.exists(path):
|
||||
lock.acquire()
|
||||
try:
|
||||
with lock:
|
||||
if not os.path.exists(path):
|
||||
self.download_func(path)
|
||||
finally:
|
||||
lock.release()
|
||||
|
||||
self.requestor.send_data(
|
||||
UPDATE_MODEL_STATE,
|
||||
|
||||
276
frigate/util/file.py
Normal file
276
frigate/util/file.py
Normal file
@ -0,0 +1,276 @@
|
||||
"""Path and file utilities."""
|
||||
|
||||
import base64
|
||||
import fcntl
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import cv2
|
||||
from numpy import ndarray
|
||||
|
||||
from frigate.const import CLIPS_DIR, THUMB_DIR
|
||||
from frigate.models import Event
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_event_thumbnail_bytes(event: Event) -> bytes | None:
|
||||
if event.thumbnail:
|
||||
return base64.b64decode(event.thumbnail)
|
||||
else:
|
||||
try:
|
||||
with open(
|
||||
os.path.join(THUMB_DIR, event.camera, f"{event.id}.webp"), "rb"
|
||||
) as f:
|
||||
return f.read()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def get_event_snapshot(event: Event) -> ndarray:
|
||||
media_name = f"{event.camera}-{event.id}"
|
||||
return cv2.imread(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
||||
|
||||
|
||||
### Deletion
|
||||
|
||||
|
||||
def delete_event_images(event: Event) -> bool:
|
||||
return delete_event_snapshot(event) and delete_event_thumbnail(event)
|
||||
|
||||
|
||||
def delete_event_snapshot(event: Event) -> bool:
|
||||
media_name = f"{event.camera}-{event.id}"
|
||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
||||
|
||||
try:
|
||||
media_path.unlink(missing_ok=True)
|
||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.webp")
|
||||
media_path.unlink(missing_ok=True)
|
||||
# also delete clean.png (legacy) for backward compatibility
|
||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
||||
media_path.unlink(missing_ok=True)
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
def delete_event_thumbnail(event: Event) -> bool:
|
||||
if event.thumbnail:
|
||||
return True
|
||||
else:
|
||||
Path(os.path.join(THUMB_DIR, event.camera, f"{event.id}.webp")).unlink(
|
||||
missing_ok=True
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
### File Locking
|
||||
|
||||
|
||||
class FileLock:
|
||||
"""
|
||||
A file-based lock for coordinating access to resources across processes.
|
||||
|
||||
Uses fcntl.flock() for proper POSIX file locking on Linux. Supports timeouts,
|
||||
stale lock detection, and can be used as a context manager.
|
||||
|
||||
Example:
|
||||
```python
|
||||
# Using as a context manager (recommended)
|
||||
with FileLock("/path/to/resource.lock", timeout=60):
|
||||
# Critical section
|
||||
do_something()
|
||||
|
||||
# Manual acquisition and release
|
||||
lock = FileLock("/path/to/resource.lock")
|
||||
if lock.acquire(timeout=60):
|
||||
try:
|
||||
do_something()
|
||||
finally:
|
||||
lock.release()
|
||||
```
|
||||
|
||||
Attributes:
|
||||
lock_path: Path to the lock file
|
||||
timeout: Maximum time to wait for lock acquisition (seconds)
|
||||
poll_interval: Time to wait between lock acquisition attempts (seconds)
|
||||
stale_timeout: Time after which a lock is considered stale (seconds)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
lock_path: str | Path,
|
||||
timeout: int = 300,
|
||||
poll_interval: float = 1.0,
|
||||
stale_timeout: int = 600,
|
||||
cleanup_stale_on_init: bool = False,
|
||||
):
|
||||
"""
|
||||
Initialize a FileLock.
|
||||
|
||||
Args:
|
||||
lock_path: Path to the lock file
|
||||
timeout: Maximum time to wait for lock acquisition in seconds (default: 300)
|
||||
poll_interval: Time to wait between lock attempts in seconds (default: 1.0)
|
||||
stale_timeout: Time after which a lock is considered stale in seconds (default: 600)
|
||||
cleanup_stale_on_init: Whether to clean up stale locks on initialization (default: False)
|
||||
"""
|
||||
self.lock_path = Path(lock_path)
|
||||
self.timeout = timeout
|
||||
self.poll_interval = poll_interval
|
||||
self.stale_timeout = stale_timeout
|
||||
self._fd: Optional[int] = None
|
||||
self._acquired = False
|
||||
|
||||
if cleanup_stale_on_init:
|
||||
self._cleanup_stale_lock()
|
||||
|
||||
def _cleanup_stale_lock(self) -> bool:
|
||||
"""
|
||||
Clean up a stale lock file if it exists and is old.
|
||||
|
||||
Returns:
|
||||
True if lock was cleaned up, False otherwise
|
||||
"""
|
||||
try:
|
||||
if self.lock_path.exists():
|
||||
# Check if lock file is older than stale_timeout
|
||||
lock_age = time.time() - self.lock_path.stat().st_mtime
|
||||
if lock_age > self.stale_timeout:
|
||||
logger.warning(
|
||||
f"Removing stale lock file: {self.lock_path} (age: {lock_age:.1f}s)"
|
||||
)
|
||||
self.lock_path.unlink()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up stale lock: {e}")
|
||||
|
||||
return False
|
||||
|
||||
def is_stale(self) -> bool:
|
||||
"""
|
||||
Check if the lock file is stale (older than stale_timeout).
|
||||
|
||||
Returns:
|
||||
True if lock is stale, False otherwise
|
||||
"""
|
||||
try:
|
||||
if self.lock_path.exists():
|
||||
lock_age = time.time() - self.lock_path.stat().st_mtime
|
||||
return lock_age > self.stale_timeout
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
def acquire(self, timeout: Optional[int] = None) -> bool:
|
||||
"""
|
||||
Acquire the file lock using fcntl.flock().
|
||||
|
||||
Args:
|
||||
timeout: Maximum time to wait for lock in seconds (uses instance timeout if None)
|
||||
|
||||
Returns:
|
||||
True if lock acquired, False if timeout or error
|
||||
"""
|
||||
if self._acquired:
|
||||
logger.warning(f"Lock already acquired: {self.lock_path}")
|
||||
return True
|
||||
|
||||
if timeout is None:
|
||||
timeout = self.timeout
|
||||
|
||||
# Ensure parent directory exists
|
||||
self.lock_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Clean up stale lock before attempting to acquire
|
||||
self._cleanup_stale_lock()
|
||||
|
||||
try:
|
||||
self._fd = os.open(self.lock_path, os.O_CREAT | os.O_RDWR)
|
||||
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < timeout:
|
||||
try:
|
||||
fcntl.flock(self._fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
self._acquired = True
|
||||
logger.debug(f"Acquired lock: {self.lock_path}")
|
||||
return True
|
||||
except (OSError, IOError):
|
||||
# Lock is held by another process
|
||||
if time.time() - start_time >= timeout:
|
||||
logger.warning(f"Timeout waiting for lock: {self.lock_path}")
|
||||
os.close(self._fd)
|
||||
self._fd = None
|
||||
return False
|
||||
|
||||
time.sleep(self.poll_interval)
|
||||
|
||||
# Timeout reached
|
||||
if self._fd is not None:
|
||||
os.close(self._fd)
|
||||
self._fd = None
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error acquiring lock: {e}")
|
||||
if self._fd is not None:
|
||||
try:
|
||||
os.close(self._fd)
|
||||
except Exception:
|
||||
pass
|
||||
self._fd = None
|
||||
return False
|
||||
|
||||
def release(self) -> None:
|
||||
"""
|
||||
Release the file lock.
|
||||
|
||||
This closes the file descriptor and removes the lock file.
|
||||
"""
|
||||
if not self._acquired:
|
||||
return
|
||||
|
||||
try:
|
||||
# Close file descriptor and release fcntl lock
|
||||
if self._fd is not None:
|
||||
try:
|
||||
fcntl.flock(self._fd, fcntl.LOCK_UN)
|
||||
os.close(self._fd)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error closing lock file descriptor: {e}")
|
||||
finally:
|
||||
self._fd = None
|
||||
|
||||
# Remove lock file
|
||||
if self.lock_path.exists():
|
||||
self.lock_path.unlink()
|
||||
logger.debug(f"Released lock: {self.lock_path}")
|
||||
|
||||
except FileNotFoundError:
|
||||
# Lock file already removed, that's fine
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.error(f"Error releasing lock: {e}")
|
||||
finally:
|
||||
self._acquired = False
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry - acquire the lock."""
|
||||
if not self.acquire():
|
||||
raise TimeoutError(f"Failed to acquire lock: {self.lock_path}")
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit - release the lock."""
|
||||
self.release()
|
||||
return False
|
||||
|
||||
def __del__(self):
|
||||
"""Destructor - ensure lock is released."""
|
||||
if self._acquired:
|
||||
self.release()
|
||||
@ -1,62 +0,0 @@
|
||||
"""Path utilities."""
|
||||
|
||||
import base64
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import cv2
|
||||
from numpy import ndarray
|
||||
|
||||
from frigate.const import CLIPS_DIR, THUMB_DIR
|
||||
from frigate.models import Event
|
||||
|
||||
|
||||
def get_event_thumbnail_bytes(event: Event) -> bytes | None:
|
||||
if event.thumbnail:
|
||||
return base64.b64decode(event.thumbnail)
|
||||
else:
|
||||
try:
|
||||
with open(
|
||||
os.path.join(THUMB_DIR, event.camera, f"{event.id}.webp"), "rb"
|
||||
) as f:
|
||||
return f.read()
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def get_event_snapshot(event: Event) -> ndarray:
|
||||
media_name = f"{event.camera}-{event.id}"
|
||||
return cv2.imread(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
||||
|
||||
|
||||
### Deletion
|
||||
|
||||
|
||||
def delete_event_images(event: Event) -> bool:
|
||||
return delete_event_snapshot(event) and delete_event_thumbnail(event)
|
||||
|
||||
|
||||
def delete_event_snapshot(event: Event) -> bool:
|
||||
media_name = f"{event.camera}-{event.id}"
|
||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.jpg")
|
||||
|
||||
try:
|
||||
media_path.unlink(missing_ok=True)
|
||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.webp")
|
||||
media_path.unlink(missing_ok=True)
|
||||
# also delete clean.png (legacy) for backward compatibility
|
||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
||||
media_path.unlink(missing_ok=True)
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
|
||||
def delete_event_thumbnail(event: Event) -> bool:
|
||||
if event.thumbnail:
|
||||
return True
|
||||
else:
|
||||
Path(os.path.join(THUMB_DIR, event.camera, f"{event.id}.webp")).unlink(
|
||||
missing_ok=True
|
||||
)
|
||||
return True
|
||||
@ -1,6 +1,5 @@
|
||||
"""RKNN model conversion utility for Frigate."""
|
||||
|
||||
import fcntl
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
@ -9,6 +8,8 @@ import time
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from frigate.util.file import FileLock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
MODEL_TYPE_CONFIGS = {
|
||||
@ -245,112 +246,6 @@ def convert_onnx_to_rknn(
|
||||
logger.warning(f"Failed to remove temporary ONNX file: {e}")
|
||||
|
||||
|
||||
def cleanup_stale_lock(lock_file_path: Path) -> bool:
|
||||
"""
|
||||
Clean up a stale lock file if it exists and is old.
|
||||
|
||||
Args:
|
||||
lock_file_path: Path to the lock file
|
||||
|
||||
Returns:
|
||||
True if lock was cleaned up, False otherwise
|
||||
"""
|
||||
try:
|
||||
if lock_file_path.exists():
|
||||
# Check if lock file is older than 10 minutes (stale)
|
||||
lock_age = time.time() - lock_file_path.stat().st_mtime
|
||||
if lock_age > 600: # 10 minutes
|
||||
logger.warning(
|
||||
f"Removing stale lock file: {lock_file_path} (age: {lock_age:.1f}s)"
|
||||
)
|
||||
lock_file_path.unlink()
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up stale lock: {e}")
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def acquire_conversion_lock(lock_file_path: Path, timeout: int = 300) -> bool:
|
||||
"""
|
||||
Acquire a file-based lock for model conversion.
|
||||
|
||||
Args:
|
||||
lock_file_path: Path to the lock file
|
||||
timeout: Maximum time to wait for lock in seconds
|
||||
|
||||
Returns:
|
||||
True if lock acquired, False if timeout or error
|
||||
"""
|
||||
try:
|
||||
lock_file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
cleanup_stale_lock(lock_file_path)
|
||||
lock_fd = os.open(lock_file_path, os.O_CREAT | os.O_RDWR)
|
||||
|
||||
# Try to acquire exclusive lock
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < timeout:
|
||||
try:
|
||||
fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||
# Lock acquired successfully
|
||||
logger.debug(f"Acquired conversion lock: {lock_file_path}")
|
||||
return True
|
||||
except (OSError, IOError):
|
||||
# Lock is held by another process, wait and retry
|
||||
if time.time() - start_time >= timeout:
|
||||
logger.warning(
|
||||
f"Timeout waiting for conversion lock: {lock_file_path}"
|
||||
)
|
||||
os.close(lock_fd)
|
||||
return False
|
||||
|
||||
logger.debug("Waiting for conversion lock to be released...")
|
||||
time.sleep(1)
|
||||
|
||||
os.close(lock_fd)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error acquiring conversion lock: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def release_conversion_lock(lock_file_path: Path) -> None:
|
||||
"""
|
||||
Release the conversion lock.
|
||||
|
||||
Args:
|
||||
lock_file_path: Path to the lock file
|
||||
"""
|
||||
try:
|
||||
if lock_file_path.exists():
|
||||
lock_file_path.unlink()
|
||||
logger.debug(f"Released conversion lock: {lock_file_path}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error releasing conversion lock: {e}")
|
||||
|
||||
|
||||
def is_lock_stale(lock_file_path: Path, max_age: int = 600) -> bool:
|
||||
"""
|
||||
Check if a lock file is stale (older than max_age seconds).
|
||||
|
||||
Args:
|
||||
lock_file_path: Path to the lock file
|
||||
max_age: Maximum age in seconds before considering lock stale
|
||||
|
||||
Returns:
|
||||
True if lock is stale, False otherwise
|
||||
"""
|
||||
try:
|
||||
if lock_file_path.exists():
|
||||
lock_age = time.time() - lock_file_path.stat().st_mtime
|
||||
return lock_age > max_age
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def wait_for_conversion_completion(
|
||||
model_type: str, rknn_path: Path, lock_file_path: Path, timeout: int = 300
|
||||
) -> bool:
|
||||
@ -358,6 +253,7 @@ def wait_for_conversion_completion(
|
||||
Wait for another process to complete the conversion.
|
||||
|
||||
Args:
|
||||
model_type: Type of model being converted
|
||||
rknn_path: Path to the expected RKNN model
|
||||
lock_file_path: Path to the lock file to monitor
|
||||
timeout: Maximum time to wait in seconds
|
||||
@ -366,6 +262,8 @@ def wait_for_conversion_completion(
|
||||
True if RKNN model appears, False if timeout
|
||||
"""
|
||||
start_time = time.time()
|
||||
lock = FileLock(lock_file_path, stale_timeout=600)
|
||||
|
||||
while time.time() - start_time < timeout:
|
||||
# Check if RKNN model appeared
|
||||
if rknn_path.exists():
|
||||
@ -385,11 +283,14 @@ def wait_for_conversion_completion(
|
||||
return False
|
||||
|
||||
# Check if lock is stale
|
||||
if is_lock_stale(lock_file_path):
|
||||
if lock.is_stale():
|
||||
logger.warning("Lock file is stale, attempting to clean up and retry...")
|
||||
cleanup_stale_lock(lock_file_path)
|
||||
lock._cleanup_stale_lock()
|
||||
# Try to acquire lock again
|
||||
if acquire_conversion_lock(lock_file_path, timeout=60):
|
||||
retry_lock = FileLock(
|
||||
lock_file_path, timeout=60, cleanup_stale_on_init=True
|
||||
)
|
||||
if retry_lock.acquire():
|
||||
try:
|
||||
# Check if RKNN file appeared while waiting
|
||||
if rknn_path.exists():
|
||||
@ -415,7 +316,7 @@ def wait_for_conversion_completion(
|
||||
return False
|
||||
|
||||
finally:
|
||||
release_conversion_lock(lock_file_path)
|
||||
retry_lock.release()
|
||||
|
||||
logger.debug("Waiting for RKNN model to appear...")
|
||||
time.sleep(1)
|
||||
@ -452,8 +353,9 @@ def auto_convert_model(
|
||||
return str(rknn_path)
|
||||
|
||||
lock_file_path = base_path.parent / f"{base_name}.conversion.lock"
|
||||
lock = FileLock(lock_file_path, timeout=300, cleanup_stale_on_init=True)
|
||||
|
||||
if acquire_conversion_lock(lock_file_path):
|
||||
if lock.acquire():
|
||||
try:
|
||||
if rknn_path.exists():
|
||||
logger.info(
|
||||
@ -476,7 +378,7 @@ def auto_convert_model(
|
||||
return None
|
||||
|
||||
finally:
|
||||
release_conversion_lock(lock_file_path)
|
||||
lock.release()
|
||||
else:
|
||||
logger.info(
|
||||
f"Another process is converting {model_path}, waiting for completion..."
|
||||
|
||||
@ -1,5 +1,8 @@
|
||||
{
|
||||
"documentTitle": "Classification Models",
|
||||
"details": {
|
||||
"scoreInfo": "Score represents the average classification confidence across all detections of this object."
|
||||
},
|
||||
"button": {
|
||||
"deleteClassificationAttempts": "Delete Classification Images",
|
||||
"renameCategory": "Rename Class",
|
||||
|
||||
@ -6,7 +6,8 @@
|
||||
},
|
||||
"details": {
|
||||
"timestamp": "Timestamp",
|
||||
"unknown": "Unknown"
|
||||
"unknown": "Unknown",
|
||||
"scoreInfo": "Score is a weighted average of all face scores, weighted by the size of the face in each image."
|
||||
},
|
||||
"documentTitle": "Face Library - Frigate",
|
||||
"uploadFaceImage": {
|
||||
|
||||
@ -11,7 +11,8 @@ import { isDesktop, isMobile } from "react-device-detect";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import TimeAgo from "../dynamic/TimeAgo";
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from "../ui/tooltip";
|
||||
import { LuSearch } from "react-icons/lu";
|
||||
import { Popover, PopoverContent, PopoverTrigger } from "../ui/popover";
|
||||
import { LuSearch, LuInfo } from "react-icons/lu";
|
||||
import { TooltipPortal } from "@radix-ui/react-tooltip";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
import { HiSquare2Stack } from "react-icons/hi2";
|
||||
@ -181,6 +182,7 @@ type GroupedClassificationCardProps = {
|
||||
selectedItems: string[];
|
||||
i18nLibrary: string;
|
||||
objectType: string;
|
||||
noClassificationLabel?: string;
|
||||
onClick: (data: ClassificationItemData | undefined) => void;
|
||||
children?: (data: ClassificationItemData) => React.ReactNode;
|
||||
};
|
||||
@ -190,6 +192,7 @@ export function GroupedClassificationCard({
|
||||
threshold,
|
||||
selectedItems,
|
||||
i18nLibrary,
|
||||
noClassificationLabel = "details.none",
|
||||
onClick,
|
||||
children,
|
||||
}: GroupedClassificationCardProps) {
|
||||
@ -222,10 +225,14 @@ export function GroupedClassificationCard({
|
||||
const bestTyped: ClassificationItemData = best;
|
||||
return {
|
||||
...bestTyped,
|
||||
name: event ? (event.sub_label ?? t("details.unknown")) : bestTyped.name,
|
||||
name: event
|
||||
? event.sub_label && event.sub_label !== "none"
|
||||
? event.sub_label
|
||||
: t(noClassificationLabel)
|
||||
: bestTyped.name,
|
||||
score: event?.data?.sub_label_score || bestTyped.score,
|
||||
};
|
||||
}, [group, event, t]);
|
||||
}, [group, event, noClassificationLabel, t]);
|
||||
|
||||
const bestScoreStatus = useMemo(() => {
|
||||
if (!bestItem?.score || !threshold) {
|
||||
@ -311,16 +318,35 @@ export function GroupedClassificationCard({
|
||||
isMobile && "px-2",
|
||||
)}
|
||||
>
|
||||
{event?.sub_label ? event.sub_label : t("details.unknown")}
|
||||
{event?.sub_label && (
|
||||
<div
|
||||
className={cn(
|
||||
"",
|
||||
bestScoreStatus == "match" && "text-success",
|
||||
bestScoreStatus == "potential" && "text-orange-400",
|
||||
bestScoreStatus == "unknown" && "text-danger",
|
||||
)}
|
||||
>{`${Math.round((event.data.sub_label_score || 0) * 100)}%`}</div>
|
||||
{event?.sub_label && event.sub_label !== "none"
|
||||
? event.sub_label
|
||||
: t(noClassificationLabel)}
|
||||
{event?.sub_label && event.sub_label !== "none" && (
|
||||
<div className="flex items-center gap-1">
|
||||
<div
|
||||
className={cn(
|
||||
"",
|
||||
bestScoreStatus == "match" && "text-success",
|
||||
bestScoreStatus == "potential" && "text-orange-400",
|
||||
bestScoreStatus == "unknown" && "text-danger",
|
||||
)}
|
||||
>{`${Math.round((event.data.sub_label_score || 0) * 100)}%`}</div>
|
||||
<Popover>
|
||||
<PopoverTrigger asChild>
|
||||
<button
|
||||
className="focus:outline-none"
|
||||
aria-label={t("details.scoreInfo", {
|
||||
ns: i18nLibrary,
|
||||
})}
|
||||
>
|
||||
<LuInfo className="size-3" />
|
||||
</button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-80 text-sm">
|
||||
{t("details.scoreInfo", { ns: i18nLibrary })}
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
</div>
|
||||
)}
|
||||
</ContentTitle>
|
||||
<ContentDescription className={cn("", isMobile && "px-2")}>
|
||||
|
||||
@ -37,6 +37,7 @@ import { capitalizeFirstLetter } from "@/utils/stringUtil";
|
||||
import { Button, buttonVariants } from "../ui/button";
|
||||
import { Trans, useTranslation } from "react-i18next";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { LuCircle } from "react-icons/lu";
|
||||
|
||||
type ReviewCardProps = {
|
||||
event: ReviewSegment;
|
||||
@ -142,7 +143,7 @@ export default function ReviewCard({
|
||||
className={cn(
|
||||
"size-full rounded-lg",
|
||||
activeReviewItem?.id == event.id &&
|
||||
"outline outline-[3px] outline-offset-1 outline-selected",
|
||||
"outline outline-[3px] -outline-offset-[2.8px] outline-selected duration-200",
|
||||
imgLoaded ? "visible" : "invisible",
|
||||
)}
|
||||
src={`${baseUrl}${event.thumb_path.replace("/media/frigate/", "")}`}
|
||||
@ -165,6 +166,14 @@ export default function ReviewCard({
|
||||
<TooltipTrigger asChild>
|
||||
<div className="flex items-center justify-evenly gap-1">
|
||||
<>
|
||||
<LuCircle
|
||||
className={cn(
|
||||
"size-2",
|
||||
event.severity == "alert"
|
||||
? "fill-severity_alert text-severity_alert"
|
||||
: "fill-severity_detection text-severity_detection",
|
||||
)}
|
||||
/>
|
||||
{event.data.objects.map((object) => {
|
||||
return getIconForLabel(
|
||||
object,
|
||||
|
||||
@ -8,7 +8,7 @@ import {
|
||||
FormMessage,
|
||||
} from "@/components/ui/form";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { useState, useEffect } from "react";
|
||||
import { useState, useEffect, useRef } from "react";
|
||||
import { useFormContext } from "react-hook-form";
|
||||
import { generateFixedHash, isValidId } from "@/utils/stringUtil";
|
||||
import { useTranslation } from "react-i18next";
|
||||
@ -41,8 +41,9 @@ export default function NameAndIdFields<T extends FieldValues = FieldValues>({
|
||||
placeholderId,
|
||||
}: NameAndIdFieldsProps<T>) {
|
||||
const { t } = useTranslation(["common"]);
|
||||
const { watch, setValue, trigger } = useFormContext<T>();
|
||||
const { watch, setValue, trigger, formState } = useFormContext<T>();
|
||||
const [isIdVisible, setIsIdVisible] = useState(false);
|
||||
const hasUserTypedRef = useRef(false);
|
||||
|
||||
const defaultProcessId = (name: string) => {
|
||||
const normalized = name.replace(/\s+/g, "_").toLowerCase();
|
||||
@ -58,6 +59,7 @@ export default function NameAndIdFields<T extends FieldValues = FieldValues>({
|
||||
useEffect(() => {
|
||||
const subscription = watch((value, { name }) => {
|
||||
if (name === nameField) {
|
||||
hasUserTypedRef.current = true;
|
||||
const processedId = effectiveProcessId(value[nameField] || "");
|
||||
setValue(idField, processedId as PathValue<T, Path<T>>);
|
||||
trigger(idField);
|
||||
@ -66,6 +68,14 @@ export default function NameAndIdFields<T extends FieldValues = FieldValues>({
|
||||
return () => subscription.unsubscribe();
|
||||
}, [watch, setValue, trigger, nameField, idField, effectiveProcessId]);
|
||||
|
||||
// Auto-expand if there's an error on the ID field after user has typed
|
||||
useEffect(() => {
|
||||
const idError = formState.errors[idField];
|
||||
if (idError && hasUserTypedRef.current && !isIdVisible) {
|
||||
setIsIdVisible(true);
|
||||
}
|
||||
}, [formState.errors, idField, isIdVisible]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<FormField
|
||||
|
||||
@ -289,6 +289,7 @@ export default function VideoControls({
|
||||
}}
|
||||
onUploadFrame={onUploadFrame}
|
||||
containerRef={containerRef}
|
||||
fullscreen={fullscreen}
|
||||
/>
|
||||
)}
|
||||
{features.fullscreen && toggleFullscreen && (
|
||||
@ -306,6 +307,7 @@ type FrigatePlusUploadButtonProps = {
|
||||
onClose: () => void;
|
||||
onUploadFrame: () => void;
|
||||
containerRef?: React.MutableRefObject<HTMLDivElement | null>;
|
||||
fullscreen?: boolean;
|
||||
};
|
||||
function FrigatePlusUploadButton({
|
||||
video,
|
||||
@ -313,6 +315,7 @@ function FrigatePlusUploadButton({
|
||||
onClose,
|
||||
onUploadFrame,
|
||||
containerRef,
|
||||
fullscreen,
|
||||
}: FrigatePlusUploadButtonProps) {
|
||||
const { t } = useTranslation(["components/player"]);
|
||||
|
||||
@ -349,7 +352,11 @@ function FrigatePlusUploadButton({
|
||||
/>
|
||||
</AlertDialogTrigger>
|
||||
<AlertDialogContent
|
||||
portalProps={{ container: containerRef?.current }}
|
||||
portalProps={
|
||||
fullscreen && containerRef?.current
|
||||
? { container: containerRef.current }
|
||||
: undefined
|
||||
}
|
||||
className="md:max-w-2xl lg:max-w-3xl xl:max-w-4xl"
|
||||
>
|
||||
<AlertDialogHeader>
|
||||
|
||||
@ -367,7 +367,11 @@ function ReviewGroup({
|
||||
return (
|
||||
<div
|
||||
data-review-id={id}
|
||||
className="cursor-pointer rounded-lg bg-secondary py-3"
|
||||
className={`mx-1 cursor-pointer rounded-lg bg-secondary px-0 py-3 outline outline-[2px] -outline-offset-[1.8px] ${
|
||||
isActive
|
||||
? "shadow-selected outline-selected"
|
||||
: "outline-transparent duration-500"
|
||||
}`}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
@ -382,10 +386,10 @@ function ReviewGroup({
|
||||
<div className="ml-4 mr-2 mt-1.5 flex flex-row items-start">
|
||||
<LuCircle
|
||||
className={cn(
|
||||
"size-3",
|
||||
isActive
|
||||
? "fill-selected text-selected"
|
||||
: "fill-muted duration-500 dark:fill-secondary-highlight dark:text-secondary-highlight",
|
||||
"size-3 duration-500",
|
||||
review.severity == "alert"
|
||||
? "fill-severity_alert text-severity_alert"
|
||||
: "fill-severity_detection text-severity_detection",
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@ -845,6 +845,7 @@ function FaceAttemptGroup({
|
||||
selectedItems={selectedFaces}
|
||||
i18nLibrary="views/faceLibrary"
|
||||
objectType="person"
|
||||
noClassificationLabel="details.unknown"
|
||||
onClick={(data) => {
|
||||
if (data) {
|
||||
onClickFaces([data.filename], true);
|
||||
|
||||
@ -43,5 +43,5 @@ export function generateFixedHash(name: string, prefix: string = "id"): string {
|
||||
* @returns True if the name is valid, false otherwise
|
||||
*/
|
||||
export function isValidId(name: string): boolean {
|
||||
return /^[a-zA-Z0-9_-]+$/.test(name);
|
||||
return /^[a-zA-Z0-9_-]+$/.test(name) && !/^\d+$/.test(name);
|
||||
}
|
||||
|
||||
@ -961,6 +961,7 @@ function ObjectTrainGrid({
|
||||
selectedItems={selectedImages}
|
||||
i18nLibrary="views/classificationModel"
|
||||
objectType={model.object_config?.objects?.at(0) ?? "Object"}
|
||||
noClassificationLabel="details.none"
|
||||
onClick={(data) => {
|
||||
if (data) {
|
||||
onClickImages([data.filename], true);
|
||||
|
||||
@ -136,7 +136,7 @@ export default function EventView({
|
||||
|
||||
const [selectedReviews, setSelectedReviews] = useState<ReviewSegment[]>([]);
|
||||
const onSelectReview = useCallback(
|
||||
(review: ReviewSegment, ctrl: boolean) => {
|
||||
(review: ReviewSegment, ctrl: boolean, detail: boolean) => {
|
||||
if (selectedReviews.length > 0 || ctrl) {
|
||||
const index = selectedReviews.findIndex((r) => r.id === review.id);
|
||||
|
||||
@ -156,17 +156,31 @@ export default function EventView({
|
||||
setSelectedReviews(copy);
|
||||
}
|
||||
} else {
|
||||
// If a specific date is selected in the calendar and it's after the event start,
|
||||
// use the selected date instead of the event start time
|
||||
const effectiveStartTime =
|
||||
timeRange.after > review.start_time
|
||||
? timeRange.after
|
||||
: review.start_time;
|
||||
|
||||
onOpenRecording({
|
||||
camera: review.camera,
|
||||
startTime: review.start_time - REVIEW_PADDING,
|
||||
startTime: effectiveStartTime - REVIEW_PADDING,
|
||||
severity: review.severity,
|
||||
timelineType: detail ? "detail" : undefined,
|
||||
});
|
||||
|
||||
review.has_been_reviewed = true;
|
||||
markItemAsReviewed(review);
|
||||
}
|
||||
},
|
||||
[selectedReviews, setSelectedReviews, onOpenRecording, markItemAsReviewed],
|
||||
[
|
||||
selectedReviews,
|
||||
setSelectedReviews,
|
||||
onOpenRecording,
|
||||
markItemAsReviewed,
|
||||
timeRange.after,
|
||||
],
|
||||
);
|
||||
const onSelectAllReviews = useCallback(() => {
|
||||
if (!currentReviewItems || currentReviewItems.length == 0) {
|
||||
@ -402,7 +416,6 @@ export default function EventView({
|
||||
onSelectAllReviews={onSelectAllReviews}
|
||||
setSelectedReviews={setSelectedReviews}
|
||||
pullLatestData={pullLatestData}
|
||||
onOpenRecording={onOpenRecording}
|
||||
/>
|
||||
)}
|
||||
{severity == "significant_motion" && (
|
||||
@ -442,11 +455,14 @@ type DetectionReviewProps = {
|
||||
loading: boolean;
|
||||
markItemAsReviewed: (review: ReviewSegment) => void;
|
||||
markAllItemsAsReviewed: (currentItems: ReviewSegment[]) => void;
|
||||
onSelectReview: (review: ReviewSegment, ctrl: boolean) => void;
|
||||
onSelectReview: (
|
||||
review: ReviewSegment,
|
||||
ctrl: boolean,
|
||||
detail: boolean,
|
||||
) => void;
|
||||
onSelectAllReviews: () => void;
|
||||
setSelectedReviews: (reviews: ReviewSegment[]) => void;
|
||||
pullLatestData: () => void;
|
||||
onOpenRecording: (recordingInfo: RecordingStartingPoint) => void;
|
||||
};
|
||||
function DetectionReview({
|
||||
contentRef,
|
||||
@ -466,7 +482,6 @@ function DetectionReview({
|
||||
onSelectAllReviews,
|
||||
setSelectedReviews,
|
||||
pullLatestData,
|
||||
onOpenRecording,
|
||||
}: DetectionReviewProps) {
|
||||
const { t } = useTranslation(["views/events"]);
|
||||
|
||||
@ -758,16 +773,7 @@ function DetectionReview({
|
||||
ctrl: boolean,
|
||||
detail: boolean,
|
||||
) => {
|
||||
if (detail) {
|
||||
onOpenRecording({
|
||||
camera: review.camera,
|
||||
startTime: review.start_time - REVIEW_PADDING,
|
||||
severity: review.severity,
|
||||
timelineType: "detail",
|
||||
});
|
||||
} else {
|
||||
onSelectReview(review, ctrl);
|
||||
}
|
||||
onSelectReview(review, ctrl, detail);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user