feat(sofiia-console): add runbooks index status endpoint
GET /api/runbooks/status returns docs_root, indexed_files, indexed_chunks, last_indexed_at, fts_available; docs_index_meta table and set on rebuild Made-with: Cursor
This commit is contained in:
@@ -374,6 +374,10 @@ CREATE VIRTUAL TABLE IF NOT EXISTS docs_chunks_fts USING fts5(
|
|||||||
content,
|
content,
|
||||||
content=''
|
content=''
|
||||||
);
|
);
|
||||||
|
CREATE TABLE IF NOT EXISTS docs_index_meta (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT NOT NULL DEFAULT ''
|
||||||
|
);
|
||||||
|
|
||||||
-- ── Graph Intelligence (Hygiene + Reflection) ──────────────────────────────
|
-- ── Graph Intelligence (Hygiene + Reflection) ──────────────────────────────
|
||||||
-- These ADD COLUMN statements are idempotent (IF NOT EXISTS requires SQLite 3.37+).
|
-- These ADD COLUMN statements are idempotent (IF NOT EXISTS requires SQLite 3.37+).
|
||||||
|
|||||||
@@ -9,11 +9,12 @@ import asyncio
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from . import db as _db
|
from . import db as _db
|
||||||
from .docs_store import clear_docs_index, insert_docs_file
|
from .docs_store import clear_docs_index, insert_docs_file, set_docs_index_meta
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -52,6 +53,8 @@ async def rebuild_index(docs_root: Optional[Path] = None) -> int:
|
|||||||
mtime = path.stat().st_mtime
|
mtime = path.stat().st_mtime
|
||||||
await insert_docs_file(path_key, mtime, content)
|
await insert_docs_file(path_key, mtime, content)
|
||||||
count += 1
|
count += 1
|
||||||
|
last_indexed_at = datetime.now(timezone.utc).isoformat()
|
||||||
|
await set_docs_index_meta(str(root), last_indexed_at, sha="")
|
||||||
logger.info("Docs index rebuilt: %s files from %s", count, root)
|
logger.info("Docs index rebuilt: %s files from %s", count, root)
|
||||||
return count
|
return count
|
||||||
|
|
||||||
|
|||||||
@@ -37,15 +37,63 @@ def _extract_title(content: str, path: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
async def clear_docs_index() -> None:
|
async def clear_docs_index() -> None:
|
||||||
"""Remove all docs_files, docs_chunks, and FTS rows."""
|
"""Remove all docs_files, docs_chunks, FTS rows, and index meta."""
|
||||||
conn = await _db.get_db()
|
conn = await _db.get_db()
|
||||||
await conn.execute("DELETE FROM docs_chunks_fts")
|
await conn.execute("DELETE FROM docs_chunks_fts")
|
||||||
await conn.execute("DELETE FROM docs_chunks")
|
await conn.execute("DELETE FROM docs_chunks")
|
||||||
await conn.execute("DELETE FROM docs_files")
|
await conn.execute("DELETE FROM docs_files")
|
||||||
|
await conn.execute("DELETE FROM docs_index_meta")
|
||||||
await conn.commit()
|
await conn.commit()
|
||||||
logger.info("Docs index cleared.")
|
logger.info("Docs index cleared.")
|
||||||
|
|
||||||
|
|
||||||
|
async def set_docs_index_meta(docs_root: str, last_indexed_at: str, sha: str = "") -> None:
|
||||||
|
"""Write meta after rebuild. last_indexed_at: ISO or epoch string."""
|
||||||
|
conn = await _db.get_db()
|
||||||
|
for key, value in [("docs_root", docs_root), ("last_indexed_at", last_indexed_at), ("sha", sha)]:
|
||||||
|
await conn.execute(
|
||||||
|
"INSERT OR REPLACE INTO docs_index_meta(key, value) VALUES (?,?)",
|
||||||
|
(key, value),
|
||||||
|
)
|
||||||
|
await conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_docs_index_status() -> Dict[str, Any]:
|
||||||
|
"""Return indexed_files, indexed_chunks, last_indexed_at, docs_root, fts_available."""
|
||||||
|
conn = await _db.get_db()
|
||||||
|
files_row = None
|
||||||
|
chunks_row = None
|
||||||
|
async with conn.execute("SELECT COUNT(*) FROM docs_files") as cur:
|
||||||
|
files_row = await cur.fetchone()
|
||||||
|
async with conn.execute("SELECT COUNT(*) FROM docs_chunks") as cur:
|
||||||
|
chunks_row = await cur.fetchone()
|
||||||
|
indexed_files = int(files_row[0]) if files_row else 0
|
||||||
|
indexed_chunks = int(chunks_row[0]) if chunks_row else 0
|
||||||
|
|
||||||
|
meta = {}
|
||||||
|
async with conn.execute("SELECT key, value FROM docs_index_meta") as cur:
|
||||||
|
async for row in cur:
|
||||||
|
meta[row[0]] = row[1]
|
||||||
|
last_indexed_at = meta.get("last_indexed_at") or None
|
||||||
|
docs_root = meta.get("docs_root") or ""
|
||||||
|
|
||||||
|
fts_available = False
|
||||||
|
if indexed_chunks > 0:
|
||||||
|
try:
|
||||||
|
async with conn.execute("SELECT 1 FROM docs_chunks_fts LIMIT 1") as cur:
|
||||||
|
fts_available = (await cur.fetchone()) is not None
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return {
|
||||||
|
"docs_root": docs_root,
|
||||||
|
"indexed_files": indexed_files,
|
||||||
|
"indexed_chunks": indexed_chunks,
|
||||||
|
"last_indexed_at": last_indexed_at,
|
||||||
|
"fts_available": fts_available,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async def insert_docs_file(path: str, mtime: float, content: str) -> None:
|
async def insert_docs_file(path: str, mtime: float, content: str) -> None:
|
||||||
"""Register one file and its chunks. Caller ensures path is normalized."""
|
"""Register one file and its chunks. Caller ensures path is normalized."""
|
||||||
conn = await _db.get_db()
|
conn = await _db.get_db()
|
||||||
|
|||||||
@@ -15,6 +15,14 @@ from . import docs_store as store
|
|||||||
runbooks_router = APIRouter(prefix="/api/runbooks", tags=["runbooks-docs"])
|
runbooks_router = APIRouter(prefix="/api/runbooks", tags=["runbooks-docs"])
|
||||||
|
|
||||||
|
|
||||||
|
@runbooks_router.get("/status")
|
||||||
|
async def runbooks_status(
|
||||||
|
_auth: str = Depends(require_auth),
|
||||||
|
):
|
||||||
|
"""Return docs index status: indexed_files, indexed_chunks, last_indexed_at, docs_root, fts_available."""
|
||||||
|
return await store.get_docs_index_status()
|
||||||
|
|
||||||
|
|
||||||
def _safe_path(path: str) -> bool:
|
def _safe_path(path: str) -> bool:
|
||||||
"""Reject path traversal and non-docs paths."""
|
"""Reject path traversal and non-docs paths."""
|
||||||
if not path or ".." in path or path.startswith("/"):
|
if not path or ".." in path or path.startswith("/"):
|
||||||
|
|||||||
@@ -93,3 +93,23 @@ def test_runbooks_raw_400_for_invalid_path(sofiia_client):
|
|||||||
"""Raw returns 400 for path traversal attempt."""
|
"""Raw returns 400 for path traversal attempt."""
|
||||||
r = sofiia_client.get("/api/runbooks/raw?path=../../../etc/passwd")
|
r = sofiia_client.get("/api/runbooks/raw?path=../../../etc/passwd")
|
||||||
assert r.status_code == 400
|
assert r.status_code == 400
|
||||||
|
|
||||||
|
|
||||||
|
def test_runbooks_status_after_rebuild(sofiia_module, tmp_path, tmp_docs_with_rehearsal, monkeypatch):
|
||||||
|
"""After rebuild, status shows indexed_files > 0, indexed_chunks > 0, last_indexed_at set."""
|
||||||
|
import app.docs_index as docs_index_mod
|
||||||
|
import app.docs_store as docs_store_mod
|
||||||
|
|
||||||
|
monkeypatch.setenv("SOFIIA_DATA_DIR", str(tmp_path / "sofiia-data"))
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
|
async def run():
|
||||||
|
await docs_index_mod.rebuild_index(tmp_docs_with_rehearsal)
|
||||||
|
return await docs_store_mod.get_docs_index_status()
|
||||||
|
|
||||||
|
status = loop.run_until_complete(run())
|
||||||
|
assert status["indexed_files"] >= 1, status
|
||||||
|
assert status["indexed_chunks"] >= 1, status
|
||||||
|
assert status.get("last_indexed_at") is not None, status
|
||||||
|
assert "docs_root" in status
|
||||||
|
assert "fts_available" in status
|
||||||
|
|||||||
Reference in New Issue
Block a user