Optimize latest memory queries (#35)
Some checks are pending
Build / build (push) Waiting to run

This commit is contained in:
foxtacles 2026-05-02 14:15:28 -07:00 committed by GitHub
parent 96e5fea9e6
commit 37a36e287b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 62 additions and 13 deletions

View File

@ -0,0 +1,46 @@
-- Denormalized latest-per-event view of memory_completions.
-- Powers /api/memories/latest, /api/memory/:eventId, /api/sitemap.
-- Update memory_events alongside any schema change to memory_completions.
CREATE TABLE IF NOT EXISTS memory_events (
event_id TEXT PRIMARY KEY,
anim_index INTEGER NOT NULL,
completed_at INTEGER NOT NULL,
participants TEXT NOT NULL,
language TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_memory_events_completed_at
ON memory_events(completed_at DESC);
-- AFTER INSERT does not fire when INSERT OR IGNORE hits the UNIQUE conflict,
-- so duplicate completions correctly do not update memory_events.
CREATE TRIGGER IF NOT EXISTS memory_events_after_insert
AFTER INSERT ON memory_completions
BEGIN
INSERT INTO memory_events (event_id, anim_index, completed_at, participants, language)
VALUES (NEW.event_id, NEW.anim_index, NEW.completed_at, NEW.participants, NEW.language)
ON CONFLICT(event_id) DO UPDATE SET
anim_index = excluded.anim_index,
completed_at = excluded.completed_at,
participants = excluded.participants,
language = excluded.language
WHERE excluded.completed_at > memory_events.completed_at;
END;
-- Backfill: pick the latest row per event_id deterministically by (completed_at DESC, id DESC).
-- UPSERT semantics so writes captured by the trigger between CREATE TRIGGER and now are not clobbered.
INSERT INTO memory_events (event_id, anim_index, completed_at, participants, language)
SELECT event_id, anim_index, completed_at, participants, language
FROM memory_completions mc
WHERE id = (
SELECT id FROM memory_completions mc2
WHERE mc2.event_id = mc.event_id
ORDER BY completed_at DESC, id DESC
LIMIT 1
)
ON CONFLICT(event_id) DO UPDATE SET
anim_index = excluded.anim_index,
completed_at = excluded.completed_at,
participants = excluded.participants,
language = excluded.language
WHERE excluded.completed_at >= memory_events.completed_at;

View File

@ -13,6 +13,19 @@ account.delete("/", async (c) => {
c.env.DB.prepare("DELETE FROM user_saves WHERE user_id = ?").bind(userId),
c.env.DB.prepare("DELETE FROM user_config WHERE user_id = ?").bind(userId),
c.env.DB.prepare("DELETE FROM memory_completions WHERE user_id = ?").bind(userId),
c.env.DB.prepare(
"DELETE FROM memory_events WHERE event_id NOT IN (SELECT event_id FROM memory_completions)"
),
c.env.DB.prepare(
"INSERT INTO memory_events (event_id, anim_index, completed_at, participants, language) " +
"SELECT event_id, anim_index, completed_at, participants, language FROM memory_completions mc " +
"WHERE id = (SELECT id FROM memory_completions mc2 WHERE mc2.event_id = mc.event_id " +
"ORDER BY completed_at DESC, id DESC LIMIT 1) " +
"ON CONFLICT(event_id) DO UPDATE SET " +
"anim_index = excluded.anim_index, completed_at = excluded.completed_at, " +
"participants = excluded.participants, language = excluded.language " +
"WHERE excluded.completed_at >= memory_events.completed_at"
),
c.env.DB.prepare("UPDATE crash_reports SET user_id = NULL WHERE user_id = ?").bind(userId),
c.env.DB.prepare('DELETE FROM "session" WHERE "userId" = ?').bind(userId),
c.env.DB.prepare('DELETE FROM "account" WHERE "userId" = ?').bind(userId),

View File

@ -29,7 +29,7 @@ app.all("/api/auth/*", async (c) => {
// Public endpoint: all memory event IDs for sitemap generation
app.get("/api/sitemap", async (c) => {
const results = await c.env.DB.prepare(
"SELECT event_id, MAX(completed_at) AS completed_at FROM memory_completions GROUP BY event_id ORDER BY completed_at DESC"
"SELECT event_id, completed_at FROM memory_events ORDER BY completed_at DESC"
).all<{ event_id: string; completed_at: number }>();
return c.json({ entries: results.results });
@ -43,7 +43,7 @@ app.get("/api/memory/:eventId", async (c) => {
}
const result = await c.env.DB.prepare(
"SELECT anim_index, event_id, completed_at, participants, language FROM memory_completions WHERE event_id = ? LIMIT 1"
"SELECT anim_index, event_id, completed_at, participants, language FROM memory_events WHERE event_id = ? LIMIT 1"
)
.bind(eventId)
.first<{
@ -77,17 +77,7 @@ app.get("/api/memory/:eventId", async (c) => {
// Public endpoint: latest 10 unique memories for the global feed
app.get("/api/memories/latest", async (c) => {
const results = await c.env.DB.prepare(
`SELECT mc.anim_index, mc.event_id, mc.completed_at, mc.participants, mc.language
FROM memory_completions mc
INNER JOIN (
SELECT event_id, MAX(completed_at) AS max_completed_at
FROM memory_completions
GROUP BY event_id
ORDER BY max_completed_at DESC
LIMIT 10
) latest ON mc.event_id = latest.event_id AND mc.completed_at = latest.max_completed_at
GROUP BY mc.event_id
ORDER BY mc.completed_at DESC`
"SELECT anim_index, event_id, completed_at, participants, language FROM memory_events ORDER BY completed_at DESC, event_id DESC LIMIT 10"
).all<{ anim_index: number; event_id: string; completed_at: number; participants: string; language: string }>();
const entries = results.results.map((r) => {