feat(aurora): add detection overlays with face/plate boxes in compare UI
This commit is contained in:
@@ -10,6 +10,7 @@ import os
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import mimetypes
|
||||||
import time
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
import logging
|
import logging
|
||||||
@@ -27,6 +28,11 @@ from fastapi.responses import HTMLResponse, StreamingResponse, JSONResponse
|
|||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
try:
|
||||||
|
import cv2 # type: ignore[import-untyped]
|
||||||
|
except Exception: # pragma: no cover - optional dependency in console env
|
||||||
|
cv2 = None
|
||||||
|
|
||||||
from .auth import (
|
from .auth import (
|
||||||
require_api_key, require_api_key_strict, require_auth, require_auth_strict,
|
require_api_key, require_api_key_strict, require_auth, require_auth_strict,
|
||||||
get_console_api_key, _key_valid, _cookie_token, _expected_cookie_token,
|
get_console_api_key, _key_valid, _cookie_token, _expected_cookie_token,
|
||||||
@@ -1371,6 +1377,12 @@ async def api_aurora_compare(job_id: str) -> Dict[str, Any]:
|
|||||||
before["file_size_mb"] = round(inp.stat().st_size / (1024 * 1024), 2)
|
before["file_size_mb"] = round(inp.stat().st_size / (1024 * 1024), 2)
|
||||||
_probe = _ffprobe_quick(inp) if inp.exists() else {}
|
_probe = _ffprobe_quick(inp) if inp.exists() else {}
|
||||||
if _probe:
|
if _probe:
|
||||||
|
before["resolution"] = _probe.get("resolution", before["resolution"])
|
||||||
|
before["width"] = _probe.get("width", before["width"])
|
||||||
|
before["height"] = _probe.get("height", before["height"])
|
||||||
|
before["duration_s"] = _probe.get("duration_s", before["duration_s"])
|
||||||
|
before["fps"] = _probe.get("fps", before["fps"])
|
||||||
|
before["frame_count"] = _probe.get("frame_count", before["frame_count"])
|
||||||
before["codec"] = _probe.get("codec", "—")
|
before["codec"] = _probe.get("codec", "—")
|
||||||
|
|
||||||
result_file = None
|
result_file = None
|
||||||
@@ -1428,6 +1440,12 @@ async def api_aurora_compare(job_id: str) -> Dict[str, Any]:
|
|||||||
output_path=output_media_path,
|
output_path=output_media_path,
|
||||||
output_dir=Path(output_dir) if output_dir else None,
|
output_dir=Path(output_dir) if output_dir else None,
|
||||||
)
|
)
|
||||||
|
detections = await _aurora_build_compare_detections(
|
||||||
|
media_type=str(status.get("media_type") or ""),
|
||||||
|
output_dir=Path(output_dir) if output_dir else None,
|
||||||
|
frame_preview=frame_preview,
|
||||||
|
fps=before.get("fps") or after.get("fps"),
|
||||||
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"job_id": job_id,
|
"job_id": job_id,
|
||||||
@@ -1440,6 +1458,7 @@ async def api_aurora_compare(job_id: str) -> Dict[str, Any]:
|
|||||||
"faces_detected": faces_total,
|
"faces_detected": faces_total,
|
||||||
"enhance_steps": enhance_steps,
|
"enhance_steps": enhance_steps,
|
||||||
"frame_preview": frame_preview,
|
"frame_preview": frame_preview,
|
||||||
|
"detections": detections,
|
||||||
"folder_path": output_dir,
|
"folder_path": output_dir,
|
||||||
"input_path": input_path,
|
"input_path": input_path,
|
||||||
}
|
}
|
||||||
@@ -1530,6 +1549,236 @@ def _aurora_ensure_compare_frame_preview(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _aurora_bbox_xyxy(raw_bbox: Any) -> Optional[List[int]]:
|
||||||
|
if not isinstance(raw_bbox, (list, tuple)) or len(raw_bbox) < 4:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
x1 = int(float(raw_bbox[0]))
|
||||||
|
y1 = int(float(raw_bbox[1]))
|
||||||
|
x2 = int(float(raw_bbox[2]))
|
||||||
|
y2 = int(float(raw_bbox[3]))
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
if x2 < x1:
|
||||||
|
x1, x2 = x2, x1
|
||||||
|
if y2 < y1:
|
||||||
|
y1, y2 = y2, y1
|
||||||
|
if x2 <= x1 or y2 <= y1:
|
||||||
|
return None
|
||||||
|
return [x1, y1, x2, y2]
|
||||||
|
|
||||||
|
|
||||||
|
def _aurora_image_dims(path: Path) -> Optional[Dict[str, int]]:
|
||||||
|
if cv2 is None or not path.exists():
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
img = cv2.imread(str(path), cv2.IMREAD_COLOR)
|
||||||
|
if img is None:
|
||||||
|
return None
|
||||||
|
h, w = img.shape[:2]
|
||||||
|
if w <= 0 or h <= 0:
|
||||||
|
return None
|
||||||
|
return {"width": int(w), "height": int(h)}
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _aurora_detect_faces_from_preview(path: Path) -> List[Dict[str, Any]]:
|
||||||
|
if cv2 is None or not path.exists():
|
||||||
|
return []
|
||||||
|
try:
|
||||||
|
frame = cv2.imread(str(path), cv2.IMREAD_COLOR)
|
||||||
|
if frame is None:
|
||||||
|
return []
|
||||||
|
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
|
||||||
|
cascade_path = Path(cv2.data.haarcascades) / "haarcascade_frontalface_default.xml"
|
||||||
|
cascade = cv2.CascadeClassifier(str(cascade_path))
|
||||||
|
if cascade.empty():
|
||||||
|
return []
|
||||||
|
faces = cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=4, minSize=(20, 20))
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for (x, y, w, h) in faces[:40]:
|
||||||
|
roi = gray[y : y + h, x : x + w]
|
||||||
|
lap = float(cv2.Laplacian(roi, cv2.CV_64F).var()) if roi.size > 0 else 0.0
|
||||||
|
conf = max(0.5, min(0.99, 0.55 + (lap / 400.0)))
|
||||||
|
out.append(
|
||||||
|
{
|
||||||
|
"bbox": [int(x), int(y), int(x + w), int(y + h)],
|
||||||
|
"confidence": round(conf, 3),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
async def _aurora_detect_faces_via_service(path: Path) -> List[Dict[str, Any]]:
|
||||||
|
if not path.exists():
|
||||||
|
return []
|
||||||
|
mime = mimetypes.guess_type(path.name)[0] or "application/octet-stream"
|
||||||
|
timeout = httpx.Timeout(20.0, connect=6.0)
|
||||||
|
try:
|
||||||
|
async with httpx.AsyncClient(timeout=timeout) as client:
|
||||||
|
with path.open("rb") as fh:
|
||||||
|
files = {"file": (path.name, fh, mime)}
|
||||||
|
resp = await client.post(f"{AURORA_SERVICE_URL}/api/aurora/analyze", files=files)
|
||||||
|
if resp.status_code >= 400:
|
||||||
|
return []
|
||||||
|
payload = resp.json() if resp.content else {}
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
|
faces_raw = payload.get("faces")
|
||||||
|
if not isinstance(faces_raw, list):
|
||||||
|
return []
|
||||||
|
out: List[Dict[str, Any]] = []
|
||||||
|
for item in faces_raw[:60]:
|
||||||
|
if not isinstance(item, dict):
|
||||||
|
continue
|
||||||
|
bbox = item.get("bbox")
|
||||||
|
if not isinstance(bbox, (list, tuple)) or len(bbox) < 4:
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
x = int(float(bbox[0]))
|
||||||
|
y = int(float(bbox[1]))
|
||||||
|
w = int(float(bbox[2]))
|
||||||
|
h = int(float(bbox[3]))
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
if w <= 1 or h <= 1:
|
||||||
|
continue
|
||||||
|
conf: Optional[float]
|
||||||
|
try:
|
||||||
|
conf = round(float(item.get("confidence")), 3)
|
||||||
|
except Exception:
|
||||||
|
conf = None
|
||||||
|
out.append(
|
||||||
|
{
|
||||||
|
"bbox": [x, y, x + w, y + h],
|
||||||
|
"confidence": conf,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def _aurora_select_plate_detections(
|
||||||
|
output_dir: Path,
|
||||||
|
*,
|
||||||
|
target_frame: Optional[int],
|
||||||
|
max_items: int = 12,
|
||||||
|
) -> List[Dict[str, Any]]:
|
||||||
|
report_path = output_dir / "plate_detections.json"
|
||||||
|
if not report_path.exists():
|
||||||
|
return []
|
||||||
|
try:
|
||||||
|
payload = json.loads(report_path.read_text(encoding="utf-8"))
|
||||||
|
except Exception:
|
||||||
|
return []
|
||||||
|
|
||||||
|
source_items: List[Any]
|
||||||
|
detections = payload.get("detections")
|
||||||
|
unique = payload.get("unique")
|
||||||
|
if isinstance(detections, list) and detections:
|
||||||
|
source_items = detections
|
||||||
|
elif isinstance(unique, list) and unique:
|
||||||
|
source_items = unique
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
parsed: List[Dict[str, Any]] = []
|
||||||
|
for item in source_items:
|
||||||
|
if not isinstance(item, dict):
|
||||||
|
continue
|
||||||
|
bbox = _aurora_bbox_xyxy(item.get("bbox"))
|
||||||
|
if not bbox:
|
||||||
|
continue
|
||||||
|
text_value = str(item.get("text") or "").strip()
|
||||||
|
conf_value: Optional[float]
|
||||||
|
try:
|
||||||
|
conf_value = round(float(item.get("confidence")), 3)
|
||||||
|
except Exception:
|
||||||
|
conf_value = None
|
||||||
|
frame_value: Optional[int]
|
||||||
|
try:
|
||||||
|
frame_value = int(item.get("frame")) if item.get("frame") is not None else None
|
||||||
|
except Exception:
|
||||||
|
frame_value = None
|
||||||
|
parsed.append(
|
||||||
|
{
|
||||||
|
"bbox": bbox,
|
||||||
|
"text": text_value or None,
|
||||||
|
"confidence": conf_value,
|
||||||
|
"frame": frame_value,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if not parsed:
|
||||||
|
return []
|
||||||
|
|
||||||
|
with_frame = [x for x in parsed if x.get("frame") is not None]
|
||||||
|
if target_frame is not None and with_frame:
|
||||||
|
min_distance = min(abs(int(x["frame"]) - int(target_frame)) for x in with_frame)
|
||||||
|
keep = max(4, min_distance + 2)
|
||||||
|
filtered = [x for x in with_frame if abs(int(x["frame"]) - int(target_frame)) <= keep]
|
||||||
|
filtered.sort(key=lambda x: (abs(int(x["frame"]) - int(target_frame)), -(x.get("confidence") or 0.0)))
|
||||||
|
return filtered[:max_items]
|
||||||
|
|
||||||
|
parsed.sort(key=lambda x: (-(x.get("confidence") or 0.0), x.get("text") or ""))
|
||||||
|
return parsed[:max_items]
|
||||||
|
|
||||||
|
|
||||||
|
async def _aurora_build_compare_detections(
|
||||||
|
*,
|
||||||
|
media_type: str,
|
||||||
|
output_dir: Optional[Path],
|
||||||
|
frame_preview: Optional[Dict[str, Any]],
|
||||||
|
fps: Any,
|
||||||
|
) -> Optional[Dict[str, Any]]:
|
||||||
|
if not output_dir or not output_dir.exists():
|
||||||
|
return None
|
||||||
|
if not isinstance(frame_preview, dict):
|
||||||
|
return None
|
||||||
|
|
||||||
|
before_path = output_dir / "_compare_before.jpg"
|
||||||
|
after_path = output_dir / "_compare_after.jpg"
|
||||||
|
before_faces = _aurora_detect_faces_from_preview(before_path)
|
||||||
|
after_faces = _aurora_detect_faces_from_preview(after_path)
|
||||||
|
if not before_faces and before_path.exists():
|
||||||
|
before_faces = await _aurora_detect_faces_via_service(before_path)
|
||||||
|
if not after_faces and after_path.exists():
|
||||||
|
after_faces = await _aurora_detect_faces_via_service(after_path)
|
||||||
|
before_size = _aurora_image_dims(before_path)
|
||||||
|
after_size = _aurora_image_dims(after_path)
|
||||||
|
|
||||||
|
target_ts = float(frame_preview.get("timestamp_sec") or 0.0)
|
||||||
|
target_frame: Optional[int] = None
|
||||||
|
if str(media_type).lower() == "video":
|
||||||
|
try:
|
||||||
|
fps_val = float(fps)
|
||||||
|
except Exception:
|
||||||
|
fps_val = 15.0
|
||||||
|
if fps_val <= 0:
|
||||||
|
fps_val = 15.0
|
||||||
|
target_frame = int(round(target_ts * fps_val))
|
||||||
|
|
||||||
|
plate_items = _aurora_select_plate_detections(output_dir, target_frame=target_frame)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"target_timestamp_sec": target_ts if str(media_type).lower() == "video" else None,
|
||||||
|
"target_frame": target_frame,
|
||||||
|
"before": {
|
||||||
|
"frame_size": before_size,
|
||||||
|
"faces": before_faces,
|
||||||
|
"plates": plate_items,
|
||||||
|
},
|
||||||
|
"after": {
|
||||||
|
"frame_size": after_size,
|
||||||
|
"faces": after_faces,
|
||||||
|
"plates": plate_items,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def _ffprobe_quick(filepath: Path) -> Dict[str, Any]:
|
def _ffprobe_quick(filepath: Path) -> Dict[str, Any]:
|
||||||
"""Quick ffprobe for resolution, codec, duration, fps, frame count."""
|
"""Quick ffprobe for resolution, codec, duration, fps, frame count."""
|
||||||
if not filepath.exists():
|
if not filepath.exists():
|
||||||
|
|||||||
@@ -401,6 +401,69 @@
|
|||||||
padding: 2px 0;
|
padding: 2px 0;
|
||||||
}
|
}
|
||||||
.aurora-quality-line span:first-child { color: var(--muted); }
|
.aurora-quality-line span:first-child { color: var(--muted); }
|
||||||
|
.aurora-detect-wrap {
|
||||||
|
margin: 10px 0;
|
||||||
|
padding: 10px;
|
||||||
|
border-radius: 8px;
|
||||||
|
border: 1px solid rgba(255,255,255,0.08);
|
||||||
|
background: var(--bg2);
|
||||||
|
}
|
||||||
|
.aurora-detect-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(240px, 1fr));
|
||||||
|
gap: 10px;
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
.aurora-detect-card {
|
||||||
|
border: 1px solid rgba(255,255,255,0.08);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 8px;
|
||||||
|
background: rgba(255,255,255,0.02);
|
||||||
|
}
|
||||||
|
.aurora-detect-stage {
|
||||||
|
position: relative;
|
||||||
|
overflow: hidden;
|
||||||
|
border-radius: 6px;
|
||||||
|
border: 1px solid rgba(255,255,255,0.08);
|
||||||
|
background: #000;
|
||||||
|
margin-top: 6px;
|
||||||
|
}
|
||||||
|
.aurora-detect-stage img {
|
||||||
|
display: block;
|
||||||
|
width: 100%;
|
||||||
|
height: auto;
|
||||||
|
max-height: 240px;
|
||||||
|
object-fit: contain;
|
||||||
|
background: #000;
|
||||||
|
}
|
||||||
|
.aurora-detect-overlay {
|
||||||
|
position: absolute;
|
||||||
|
inset: 0;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
.aurora-bbox {
|
||||||
|
position: absolute;
|
||||||
|
border: 2px solid #00c67a;
|
||||||
|
border-radius: 4px;
|
||||||
|
box-sizing: border-box;
|
||||||
|
box-shadow: 0 0 0 1px rgba(0,0,0,0.35) inset;
|
||||||
|
}
|
||||||
|
.aurora-bbox.face { border-color: #00c67a; }
|
||||||
|
.aurora-bbox.plate { border-color: #f5a623; }
|
||||||
|
.aurora-bbox-label {
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
top: 0;
|
||||||
|
transform: translateY(-100%);
|
||||||
|
font-size: 0.63rem;
|
||||||
|
font-family: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", monospace;
|
||||||
|
background: rgba(0,0,0,0.8);
|
||||||
|
color: #fff;
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 1px 5px;
|
||||||
|
white-space: nowrap;
|
||||||
|
border: 1px solid rgba(255,255,255,0.2);
|
||||||
|
}
|
||||||
.aurora-chat-log {
|
.aurora-chat-log {
|
||||||
max-height: 220px;
|
max-height: 220px;
|
||||||
overflow-y: auto;
|
overflow-y: auto;
|
||||||
@@ -871,6 +934,20 @@
|
|||||||
<div id="auroraQualityContent"></div>
|
<div id="auroraQualityContent"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div id="auroraDetectionsWrap" class="aurora-detect-wrap" style="display:none;">
|
||||||
|
<div class="aurora-note" style="margin-top:0;">Detections (faces + plates)</div>
|
||||||
|
<div class="aurora-detect-grid">
|
||||||
|
<div class="aurora-detect-card">
|
||||||
|
<div class="aurora-note" style="margin-top:0;">Original frame</div>
|
||||||
|
<div id="auroraDetectionsBefore"></div>
|
||||||
|
</div>
|
||||||
|
<div class="aurora-detect-card">
|
||||||
|
<div class="aurora-note" style="margin-top:0;">Aurora enhanced frame</div>
|
||||||
|
<div id="auroraDetectionsAfter"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div id="auroraFacesRow" class="aurora-kv" style="display:none;">
|
<div id="auroraFacesRow" class="aurora-kv" style="display:none;">
|
||||||
<span class="k">Виявлено облич</span><span class="v" id="auroraFacesCount">0</span>
|
<span class="k">Виявлено облич</span><span class="v" id="auroraFacesCount">0</span>
|
||||||
</div>
|
</div>
|
||||||
@@ -2233,6 +2310,8 @@ function auroraSetSelectedFile(file) {
|
|||||||
if (audioCard) audioCard.style.display = 'none';
|
if (audioCard) audioCard.style.display = 'none';
|
||||||
const qualityWrap = document.getElementById('auroraQualityWrap');
|
const qualityWrap = document.getElementById('auroraQualityWrap');
|
||||||
if (qualityWrap) qualityWrap.style.display = 'none';
|
if (qualityWrap) qualityWrap.style.display = 'none';
|
||||||
|
const detWrap = document.getElementById('auroraDetectionsWrap');
|
||||||
|
if (detWrap) detWrap.style.display = 'none';
|
||||||
const quickStartBtn = document.getElementById('auroraStartFromAnalysisBtn');
|
const quickStartBtn = document.getElementById('auroraStartFromAnalysisBtn');
|
||||||
if (quickStartBtn) quickStartBtn.disabled = !file;
|
if (quickStartBtn) quickStartBtn.disabled = !file;
|
||||||
const reBtn = document.getElementById('auroraReprocessBtn');
|
const reBtn = document.getElementById('auroraReprocessBtn');
|
||||||
@@ -2698,6 +2777,93 @@ function auroraRenderQualityReport(report) {
|
|||||||
wrap.style.display = 'block';
|
wrap.style.display = 'block';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function auroraNormalizeBoxPct(bbox, frameW, frameH) {
|
||||||
|
if (!Array.isArray(bbox) || bbox.length < 4) return null;
|
||||||
|
const fw = Number(frameW || 0);
|
||||||
|
const fh = Number(frameH || 0);
|
||||||
|
if (!Number.isFinite(fw) || !Number.isFinite(fh) || fw <= 0 || fh <= 0) return null;
|
||||||
|
let x1 = Number(bbox[0]); let y1 = Number(bbox[1]); let x2 = Number(bbox[2]); let y2 = Number(bbox[3]);
|
||||||
|
if (![x1, y1, x2, y2].every(Number.isFinite)) return null;
|
||||||
|
if (x2 < x1) [x1, x2] = [x2, x1];
|
||||||
|
if (y2 < y1) [y1, y2] = [y2, y1];
|
||||||
|
x1 = Math.max(0, Math.min(fw, x1));
|
||||||
|
x2 = Math.max(0, Math.min(fw, x2));
|
||||||
|
y1 = Math.max(0, Math.min(fh, y1));
|
||||||
|
y2 = Math.max(0, Math.min(fh, y2));
|
||||||
|
if ((x2 - x1) < 2 || (y2 - y1) < 2) return null;
|
||||||
|
return {
|
||||||
|
left: (x1 / fw) * 100,
|
||||||
|
top: (y1 / fh) * 100,
|
||||||
|
width: ((x2 - x1) / fw) * 100,
|
||||||
|
height: ((y2 - y1) / fh) * 100,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function auroraRenderDetectionsPanel(containerId, imageUrl, payload) {
|
||||||
|
const host = document.getElementById(containerId);
|
||||||
|
if (!host) return false;
|
||||||
|
const url = String(imageUrl || '').trim();
|
||||||
|
const data = (payload && typeof payload === 'object') ? payload : {};
|
||||||
|
const frame = (data.frame_size && typeof data.frame_size === 'object') ? data.frame_size : {};
|
||||||
|
const frameW = Number(frame.width || 0);
|
||||||
|
const frameH = Number(frame.height || 0);
|
||||||
|
const faces = Array.isArray(data.faces) ? data.faces : [];
|
||||||
|
const plates = Array.isArray(data.plates) ? data.plates : [];
|
||||||
|
|
||||||
|
if (!url) {
|
||||||
|
host.innerHTML = '<div class="aurora-note">preview unavailable</div>';
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const boxHtml = [];
|
||||||
|
const pushBox = (kind, item) => {
|
||||||
|
const norm = auroraNormalizeBoxPct(item?.bbox, frameW, frameH);
|
||||||
|
if (!norm) return;
|
||||||
|
const conf = Number(item?.confidence);
|
||||||
|
const confText = Number.isFinite(conf) ? conf.toFixed(2) : '?';
|
||||||
|
let label = `${kind} (${confText})`;
|
||||||
|
if (kind === 'plate' && item?.text) {
|
||||||
|
label = `plate ${String(item.text)} (${confText})`;
|
||||||
|
}
|
||||||
|
boxHtml.push(
|
||||||
|
`<div class="aurora-bbox ${kind}" style="left:${norm.left}%;top:${norm.top}%;width:${norm.width}%;height:${norm.height}%;">` +
|
||||||
|
`<span class="aurora-bbox-label">${auroraEsc(label)}</span>` +
|
||||||
|
`</div>`
|
||||||
|
);
|
||||||
|
};
|
||||||
|
faces.forEach((item) => pushBox('face', item));
|
||||||
|
plates.forEach((item) => pushBox('plate', item));
|
||||||
|
|
||||||
|
host.innerHTML = `
|
||||||
|
<div class="aurora-detect-stage">
|
||||||
|
<img src="${auroraEsc(url)}" alt="detections preview">
|
||||||
|
<div class="aurora-detect-overlay">${boxHtml.join('')}</div>
|
||||||
|
</div>
|
||||||
|
<div class="aurora-note">${boxHtml.length} boxes</div>
|
||||||
|
`;
|
||||||
|
return boxHtml.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function auroraRenderDetections(compare) {
|
||||||
|
const wrap = document.getElementById('auroraDetectionsWrap');
|
||||||
|
const beforeHost = document.getElementById('auroraDetectionsBefore');
|
||||||
|
const afterHost = document.getElementById('auroraDetectionsAfter');
|
||||||
|
if (!wrap || !beforeHost || !afterHost) return;
|
||||||
|
if (!compare || typeof compare !== 'object' || !compare.frame_preview || !compare.detections) {
|
||||||
|
wrap.style.display = 'none';
|
||||||
|
beforeHost.innerHTML = '';
|
||||||
|
afterHost.innerHTML = '';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const fp = compare.frame_preview || {};
|
||||||
|
const beforeUrl = auroraAbsoluteUrl(fp.before_url || '');
|
||||||
|
const afterUrl = auroraAbsoluteUrl(fp.after_url || '');
|
||||||
|
const d = compare.detections || {};
|
||||||
|
const beforeOk = auroraRenderDetectionsPanel('auroraDetectionsBefore', beforeUrl, d.before || {});
|
||||||
|
const afterOk = auroraRenderDetectionsPanel('auroraDetectionsAfter', afterUrl, d.after || {});
|
||||||
|
wrap.style.display = (beforeOk || afterOk) ? 'block' : 'none';
|
||||||
|
}
|
||||||
|
|
||||||
function auroraShowCompare(beforeUrl, afterUrl) {
|
function auroraShowCompare(beforeUrl, afterUrl) {
|
||||||
const wrap = document.getElementById('auroraCompareWrap');
|
const wrap = document.getElementById('auroraCompareWrap');
|
||||||
if (!wrap || !beforeUrl || !afterUrl) return;
|
if (!wrap || !beforeUrl || !afterUrl) return;
|
||||||
@@ -3449,6 +3615,7 @@ async function auroraRenderResult(data, compare) {
|
|||||||
const afterUrl = auroraAbsoluteUrl(outputImage.url);
|
const afterUrl = auroraAbsoluteUrl(outputImage.url);
|
||||||
auroraShowCompare(beforeUrl, afterUrl);
|
auroraShowCompare(beforeUrl, afterUrl);
|
||||||
}
|
}
|
||||||
|
auroraRenderDetections(compare);
|
||||||
|
|
||||||
const forensicWrap = document.getElementById('auroraForensicLogWrap');
|
const forensicWrap = document.getElementById('auroraForensicLogWrap');
|
||||||
const forensicPre = document.getElementById('auroraForensicLog');
|
const forensicPre = document.getElementById('auroraForensicLog');
|
||||||
|
|||||||
Reference in New Issue
Block a user