Version cursor payloads and keep backward compatibility while adding dedicated tie-breaker regression coverage for equal timestamps to prevent pagination duplicates and gaps. Made-with: Cursor
97 lines
3.2 KiB
Python
97 lines
3.2 KiB
Python
from __future__ import annotations
|
|
|
|
import asyncio
|
|
|
|
|
|
def _create_chat(client, agent_id: str, node_id: str, ref: str) -> str:
|
|
r = client.post(
|
|
"/api/chats",
|
|
json={
|
|
"agent_id": agent_id,
|
|
"node_id": node_id,
|
|
"source": "web",
|
|
"external_chat_ref": ref,
|
|
},
|
|
)
|
|
assert r.status_code == 200, r.text
|
|
return r.json()["chat"]["chat_id"]
|
|
|
|
|
|
def test_chats_pagination_same_updated_at_no_duplicates(sofiia_client, sofiia_module, monkeypatch):
|
|
async def _fake_infer(base_url, agent_id, text, **kwargs):
|
|
return {"response": f"ok:{agent_id}:{text}", "backend": "fake", "model": "fake-model"}
|
|
|
|
monkeypatch.setattr(sofiia_module, "infer", _fake_infer)
|
|
|
|
created = []
|
|
for idx in range(3):
|
|
cid = _create_chat(sofiia_client, "sofiia", "NODA2", f"same-updated-{idx}")
|
|
created.append(cid)
|
|
|
|
async def _force_same_updated():
|
|
db = await sofiia_module._app_db.get_db()
|
|
for cid in created:
|
|
await db.execute(
|
|
"UPDATE sessions SET last_active=? WHERE session_id=?",
|
|
("2026-03-02T10:00:00Z", cid),
|
|
)
|
|
await db.commit()
|
|
|
|
asyncio.run(_force_same_updated())
|
|
|
|
p1 = sofiia_client.get("/api/chats?nodes=NODA2&limit=2")
|
|
assert p1.status_code == 200, p1.text
|
|
j1 = p1.json()
|
|
assert j1["count"] == 2
|
|
assert j1["has_more"] is True
|
|
assert j1["next_cursor"]
|
|
|
|
p2 = sofiia_client.get(f"/api/chats?nodes=NODA2&limit=2&cursor={j1['next_cursor']}")
|
|
assert p2.status_code == 200, p2.text
|
|
j2 = p2.json()
|
|
assert j2["count"] >= 1
|
|
|
|
ids = [x["chat_id"] for x in j1["items"]] + [x["chat_id"] for x in j2["items"]]
|
|
assert len(set(ids)) == 3
|
|
|
|
|
|
def test_messages_pagination_same_ts_no_duplicates(sofiia_client, sofiia_module, monkeypatch):
|
|
async def _fake_infer(base_url, agent_id, text, **kwargs):
|
|
return {"response": f"ok:{agent_id}:{text}", "backend": "fake", "model": "fake-model"}
|
|
|
|
monkeypatch.setattr(sofiia_module, "infer", _fake_infer)
|
|
cid = _create_chat(sofiia_client, "sofiia", "NODA2", "same-ts")
|
|
for idx in range(3):
|
|
r = sofiia_client.post(
|
|
f"/api/chats/{cid}/send",
|
|
json={"text": f"msg-{idx}", "idempotency_key": f"same-ts-{idx}"},
|
|
)
|
|
assert r.status_code == 200, r.text
|
|
|
|
async def _force_same_ts():
|
|
db = await sofiia_module._app_db.get_db()
|
|
await db.execute(
|
|
"UPDATE messages SET ts=? WHERE session_id=?",
|
|
("2026-03-02T10:00:00Z", cid),
|
|
)
|
|
await db.commit()
|
|
|
|
asyncio.run(_force_same_ts())
|
|
|
|
p1 = sofiia_client.get(f"/api/chats/{cid}/messages?limit=2")
|
|
assert p1.status_code == 200, p1.text
|
|
j1 = p1.json()
|
|
assert j1["count"] == 2
|
|
assert j1["has_more"] is True
|
|
assert j1["next_cursor"]
|
|
|
|
p2 = sofiia_client.get(f"/api/chats/{cid}/messages?limit=2&cursor={j1['next_cursor']}")
|
|
assert p2.status_code == 200, p2.text
|
|
j2 = p2.json()
|
|
assert j2["count"] >= 1
|
|
|
|
ids = [x["message_id"] for x in j1["items"]] + [x["message_id"] for x in j2["items"]]
|
|
# 3 sends => 6 total persisted messages (user + assistant each turn).
|
|
assert len(set(ids)) == 4
|
|
|