feat(sofiia-console): harden cursor pagination with tie-breaker

Version cursor payloads and keep backward compatibility while adding dedicated tie-breaker regression coverage for equal timestamps to prevent pagination duplicates and gaps.

Made-with: Cursor
This commit is contained in:
Apple
2026-03-02 08:12:19 -08:00
parent 0c626943d6
commit e504df7dfa
2 changed files with 109 additions and 2 deletions

View File

@@ -3078,7 +3078,8 @@ def _clean_chat_reply(text: str) -> str:
def _cursor_encode(payload: Dict[str, Any]) -> str:
raw = json.dumps(payload, separators=(",", ":"), ensure_ascii=True).encode("utf-8")
wrapped = {"v": 1, **payload}
raw = json.dumps(wrapped, separators=(",", ":"), ensure_ascii=True).encode("utf-8")
return base64.urlsafe_b64encode(raw).decode("ascii")
@@ -3088,7 +3089,17 @@ def _cursor_decode(cursor: Optional[str]) -> Dict[str, Any]:
try:
decoded = base64.urlsafe_b64decode(cursor.encode("ascii")).decode("utf-8")
data = json.loads(decoded)
return data if isinstance(data, dict) else {}
if not isinstance(data, dict):
return {}
# Backward compatibility: accept old cursors without "v".
if "v" not in data:
return data
# Current cursor format version.
if int(data.get("v") or 0) == 1:
out = dict(data)
out.pop("v", None)
return out
return {}
except Exception:
return {}

View File

@@ -0,0 +1,96 @@
from __future__ import annotations
import asyncio
def _create_chat(client, agent_id: str, node_id: str, ref: str) -> str:
r = client.post(
"/api/chats",
json={
"agent_id": agent_id,
"node_id": node_id,
"source": "web",
"external_chat_ref": ref,
},
)
assert r.status_code == 200, r.text
return r.json()["chat"]["chat_id"]
def test_chats_pagination_same_updated_at_no_duplicates(sofiia_client, sofiia_module, monkeypatch):
async def _fake_infer(base_url, agent_id, text, **kwargs):
return {"response": f"ok:{agent_id}:{text}", "backend": "fake", "model": "fake-model"}
monkeypatch.setattr(sofiia_module, "infer", _fake_infer)
created = []
for idx in range(3):
cid = _create_chat(sofiia_client, "sofiia", "NODA2", f"same-updated-{idx}")
created.append(cid)
async def _force_same_updated():
db = await sofiia_module._app_db.get_db()
for cid in created:
await db.execute(
"UPDATE sessions SET last_active=? WHERE session_id=?",
("2026-03-02T10:00:00Z", cid),
)
await db.commit()
asyncio.run(_force_same_updated())
p1 = sofiia_client.get("/api/chats?nodes=NODA2&limit=2")
assert p1.status_code == 200, p1.text
j1 = p1.json()
assert j1["count"] == 2
assert j1["has_more"] is True
assert j1["next_cursor"]
p2 = sofiia_client.get(f"/api/chats?nodes=NODA2&limit=2&cursor={j1['next_cursor']}")
assert p2.status_code == 200, p2.text
j2 = p2.json()
assert j2["count"] >= 1
ids = [x["chat_id"] for x in j1["items"]] + [x["chat_id"] for x in j2["items"]]
assert len(set(ids)) == 3
def test_messages_pagination_same_ts_no_duplicates(sofiia_client, sofiia_module, monkeypatch):
async def _fake_infer(base_url, agent_id, text, **kwargs):
return {"response": f"ok:{agent_id}:{text}", "backend": "fake", "model": "fake-model"}
monkeypatch.setattr(sofiia_module, "infer", _fake_infer)
cid = _create_chat(sofiia_client, "sofiia", "NODA2", "same-ts")
for idx in range(3):
r = sofiia_client.post(
f"/api/chats/{cid}/send",
json={"text": f"msg-{idx}", "idempotency_key": f"same-ts-{idx}"},
)
assert r.status_code == 200, r.text
async def _force_same_ts():
db = await sofiia_module._app_db.get_db()
await db.execute(
"UPDATE messages SET ts=? WHERE session_id=?",
("2026-03-02T10:00:00Z", cid),
)
await db.commit()
asyncio.run(_force_same_ts())
p1 = sofiia_client.get(f"/api/chats/{cid}/messages?limit=2")
assert p1.status_code == 200, p1.text
j1 = p1.json()
assert j1["count"] == 2
assert j1["has_more"] is True
assert j1["next_cursor"]
p2 = sofiia_client.get(f"/api/chats/{cid}/messages?limit=2&cursor={j1['next_cursor']}")
assert p2.status_code == 200, p2.text
j2 = p2.json()
assert j2["count"] >= 1
ids = [x["message_id"] for x in j1["items"]] + [x["message_id"] for x in j2["items"]]
# 3 sends => 6 total persisted messages (user + assistant each turn).
assert len(set(ids)) == 4