From 8900eb32b9ac2a5b682b9d47feae5bb88ea13526 Mon Sep 17 00:00:00 2001 From: Jeff Emmett Date: Mon, 2 Mar 2026 15:48:01 -0800 Subject: [PATCH] =?UTF-8?q?feat:=20Phase=204=20=E2=80=94=20remove=20Postgr?= =?UTF-8?q?eSQL=20from=2011=20modules,=20switch=20to=20Automerge?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace all sql.unsafe() calls with Automerge document operations across rfunds, rbooks, rsplat, rnotes, rwork, rvote, rcal, rfiles, rcart, rtrips, and rinbox. Only rforum retains PG (Discourse provisioning). Each module now uses _syncServer.getDoc/changeDoc/setDoc for all CRUD, with ensureDoc() helpers for lazy document creation. Schema SQL files archived to .sql.archived. Adds Automerge round-trip test suite (35 tests). Co-Authored-By: Claude Opus 4.6 --- .../db/{schema.sql => schema.sql.archived} | 0 modules/rbooks/mod.ts | 283 +++--- .../db/{schema.sql => schema.sql.archived} | 0 modules/rcal/mod.ts | 610 ++++++++---- .../db/{schema.sql => schema.sql.archived} | 0 modules/rcart/mod.ts | 513 ++++++---- .../db/{schema.sql => schema.sql.archived} | 0 modules/rfiles/mod.ts | 534 ++++++++--- .../db/{schema.sql => schema.sql.archived} | 0 modules/rfunds/mod.ts | 90 +- .../db/{schema.sql => schema.sql.archived} | 0 modules/rinbox/mod.ts | 880 +++++++++++++----- .../db/{schema.sql => schema.sql.archived} | 0 modules/rnotes/mod.ts | 748 ++++++--------- .../db/{schema.sql => schema.sql.archived} | 0 modules/rsplat/mod.ts | 386 +++++--- .../db/{schema.sql => schema.sql.archived} | 0 modules/rtrips/mod.ts | 401 +++++--- .../db/{schema.sql => schema.sql.archived} | 0 modules/rvote/mod.ts | 586 +++++++----- .../db/{schema.sql => schema.sql.archived} | 0 modules/rwork/mod.ts | 429 ++++++--- scripts/test-automerge-roundtrip.ts | 258 +++++ 23 files changed, 3771 insertions(+), 1947 deletions(-) rename modules/rbooks/db/{schema.sql => schema.sql.archived} (100%) rename modules/rcal/db/{schema.sql => schema.sql.archived} (100%) rename modules/rcart/db/{schema.sql => schema.sql.archived} (100%) rename modules/rfiles/db/{schema.sql => schema.sql.archived} (100%) rename modules/rfunds/db/{schema.sql => schema.sql.archived} (100%) rename modules/rinbox/db/{schema.sql => schema.sql.archived} (100%) rename modules/rnotes/db/{schema.sql => schema.sql.archived} (100%) rename modules/rsplat/db/{schema.sql => schema.sql.archived} (100%) rename modules/rtrips/db/{schema.sql => schema.sql.archived} (100%) rename modules/rvote/db/{schema.sql => schema.sql.archived} (100%) rename modules/rwork/db/{schema.sql => schema.sql.archived} (100%) create mode 100644 scripts/test-automerge-roundtrip.ts diff --git a/modules/rbooks/db/schema.sql b/modules/rbooks/db/schema.sql.archived similarity index 100% rename from modules/rbooks/db/schema.sql rename to modules/rbooks/db/schema.sql.archived diff --git a/modules/rbooks/mod.ts b/modules/rbooks/mod.ts index 0b3548c8..8a5a3cc5 100644 --- a/modules/rbooks/mod.ts +++ b/modules/rbooks/mod.ts @@ -3,13 +3,16 @@ * * Ported from rbooks-online (Next.js) to Hono routes. * Routes are relative to mount point (/:space/books in unified, / in standalone). + * + * Storage: Automerge documents via SyncServer (one doc per space). + * PDF files stay on the filesystem — only metadata lives in Automerge. */ import { Hono } from "hono"; import { resolve } from "node:path"; -import { mkdir, readFile } from "node:fs/promises"; +import { mkdir } from "node:fs/promises"; import { randomUUID } from "node:crypto"; -import { sql } from "../../shared/db/pool"; +import * as Automerge from "@automerge/automerge"; import { renderShell } from "../../server/shell"; import { getModuleInfoList } from "../../shared/module"; import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module"; @@ -19,37 +22,68 @@ import { extractToken, } from "@encryptid/sdk/server"; import type { SyncServer } from '../../server/local-first/sync-server'; -import { booksCatalogSchema } from './schemas'; +import { + booksCatalogSchema, + booksCatalogDocId, + type BooksCatalogDoc, + type BookItem, +} from './schemas'; let _syncServer: SyncServer | null = null; const BOOKS_DIR = process.env.BOOKS_DIR || "/data/books"; -// ── Types ── +// ── Helpers ── -export interface BookRow { - id: string; - slug: string; - title: string; - author: string | null; - description: string | null; - pdf_path: string; - pdf_size_bytes: number; - page_count: number; - tags: string[]; - license: string; - cover_color: string; - contributor_id: string | null; - contributor_name: string | null; - status: string; - featured: boolean; - view_count: number; - download_count: number; - created_at: string; - updated_at: string; +function ensureDoc(space: string): BooksCatalogDoc { + const docId = booksCatalogDocId(space); + let doc = _syncServer!.getDoc(docId); + if (!doc) { + doc = Automerge.change(Automerge.init(), 'init', (d) => { + const init = booksCatalogSchema.init(); + d.meta = init.meta; + d.meta.spaceSlug = space; + d.items = {}; + }); + _syncServer!.setDoc(docId, doc); + } + return doc; } -// ── Helpers ── +/** Find a book by slug or id across the items map. */ +function findBook(doc: BooksCatalogDoc, idOrSlug: string): BookItem | undefined { + // Direct key lookup first (by id) + if (doc.items[idOrSlug]) return doc.items[idOrSlug]; + // Then scan by slug + return Object.values(doc.items).find( + (b) => b.slug === idOrSlug || b.id === idOrSlug + ); +} + +/** Convert a BookItem to the JSON shape the API has always returned. */ +function bookToRow(b: BookItem) { + return { + id: b.id, + slug: b.slug, + title: b.title, + author: b.author, + description: b.description, + pdf_path: b.pdfPath, + pdf_size_bytes: b.pdfSizeBytes, + page_count: b.pageCount, + tags: b.tags, + license: b.license, + cover_color: b.coverColor, + contributor_id: b.contributorId, + contributor_name: b.contributorName, + status: b.status, + featured: b.featured, + view_count: b.viewCount, + download_count: b.downloadCount, + created_at: new Date(b.createdAt).toISOString(), + updated_at: new Date(b.updatedAt).toISOString(), + }; +} function slugify(text: string): string { return text @@ -59,44 +93,69 @@ function slugify(text: string): string { .slice(0, 80); } +function escapeAttr(s: string): string { + return s.replace(/&/g, "&").replace(/"/g, """).replace(//g, ">"); +} + // ── Routes ── const routes = new Hono(); // ── API: List books ── routes.get("/api/books", async (c) => { - const search = c.req.query("search"); + const space = c.req.param("space") || "global"; + const search = c.req.query("search")?.toLowerCase(); const tag = c.req.query("tag"); const limit = Math.min(parseInt(c.req.query("limit") || "50"), 100); const offset = parseInt(c.req.query("offset") || "0"); - let query = `SELECT id, slug, title, author, description, pdf_size_bytes, - page_count, tags, cover_color, contributor_name, featured, - view_count, created_at - FROM rbooks.books WHERE status = 'published'`; - const params: (string | number)[] = []; + const doc = ensureDoc(space); + let books = Object.values(doc.items).filter((b) => b.status === "published"); if (search) { - params.push(`%${search}%`); - query += ` AND (title ILIKE $${params.length} OR author ILIKE $${params.length} OR description ILIKE $${params.length})`; + books = books.filter( + (b) => + b.title.toLowerCase().includes(search) || + b.author.toLowerCase().includes(search) || + b.description.toLowerCase().includes(search) + ); } if (tag) { - params.push(tag); - query += ` AND $${params.length} = ANY(tags)`; + books = books.filter((b) => b.tags.includes(tag)); } - query += ` ORDER BY featured DESC, created_at DESC`; - params.push(limit); - query += ` LIMIT $${params.length}`; - params.push(offset); - query += ` OFFSET $${params.length}`; + // Sort: featured first, then newest + books.sort((a, b) => { + if (a.featured !== b.featured) return a.featured ? -1 : 1; + return b.createdAt - a.createdAt; + }); + + // Paginate + const paged = books.slice(offset, offset + limit); + + // Return the subset of fields the old query returned + const rows = paged.map((b) => ({ + id: b.id, + slug: b.slug, + title: b.title, + author: b.author, + description: b.description, + pdf_size_bytes: b.pdfSizeBytes, + page_count: b.pageCount, + tags: [...b.tags], + cover_color: b.coverColor, + contributor_name: b.contributorName, + featured: b.featured, + view_count: b.viewCount, + created_at: new Date(b.createdAt).toISOString(), + })); - const rows = await sql.unsafe(query, params); return c.json({ books: rows }); }); // ── API: Upload book ── routes.post("/api/books", async (c) => { + const space = c.req.param("space") || "global"; const token = extractToken(c.req.raw.headers); if (!token) return c.json({ error: "Authentication required" }, 401); @@ -124,13 +183,13 @@ routes.post("/api/books", async (c) => { const tags = tagsRaw ? tagsRaw.split(",").map((t) => t.trim()).filter(Boolean) : []; const shortId = randomUUID().slice(0, 8); + const id = randomUUID(); let slug = slugify(title); // Check slug collision - const existing = await sql.unsafe( - `SELECT 1 FROM rbooks.books WHERE slug = $1`, [slug] - ); - if (existing.length > 0) { + const doc = ensureDoc(space); + const slugExists = Object.values(doc.items).some((b) => b.slug === slug); + if (slugExists) { slug = `${slug}-${shortId}`; } @@ -141,50 +200,82 @@ routes.post("/api/books", async (c) => { const buffer = Buffer.from(await file.arrayBuffer()); await Bun.write(filepath, buffer); - // Insert into DB - const rows = await sql.unsafe( - `INSERT INTO rbooks.books (slug, title, author, description, pdf_path, pdf_size_bytes, tags, license, contributor_id, contributor_name) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) - RETURNING id, slug, title, author, description, tags, created_at`, - [slug, title, author, description, filename, buffer.length, tags, license, claims.sub, claims.username || null] - ); + const now = Date.now(); - return c.json(rows[0], 201); + // Insert into Automerge doc + const docId = booksCatalogDocId(space); + _syncServer!.changeDoc(docId, `add book: ${slug}`, (d) => { + d.items[id] = { + id, + slug, + title, + author: author || "", + description: description || "", + pdfPath: filename, + pdfSizeBytes: buffer.length, + pageCount: 0, + tags, + license, + coverColor: null, + contributorId: claims.sub, + contributorName: claims.username || null, + status: "published", + featured: false, + viewCount: 0, + downloadCount: 0, + createdAt: now, + updatedAt: now, + }; + }); + + return c.json({ + id, + slug, + title, + author, + description, + tags, + created_at: new Date(now).toISOString(), + }, 201); }); // ── API: Get book details ── routes.get("/api/books/:id", async (c) => { + const space = c.req.param("space") || "global"; const id = c.req.param("id"); - const rows = await sql.unsafe( - `SELECT * FROM rbooks.books WHERE (slug = $1 OR id::text = $1) AND status = 'published'`, - [id] - ); + const doc = ensureDoc(space); + const book = findBook(doc, id); - if (rows.length === 0) return c.json({ error: "Book not found" }, 404); + if (!book || book.status !== "published") { + return c.json({ error: "Book not found" }, 404); + } // Increment view count - await sql.unsafe( - `UPDATE rbooks.books SET view_count = view_count + 1 WHERE id = $1`, - [rows[0].id] - ); + const docId = booksCatalogDocId(space); + _syncServer!.changeDoc(docId, `view: ${book.slug}`, (d) => { + if (d.items[book.id]) { + d.items[book.id].viewCount += 1; + d.items[book.id].updatedAt = Date.now(); + } + }); - return c.json(rows[0]); + return c.json(bookToRow(book)); }); // ── API: Serve PDF ── routes.get("/api/books/:id/pdf", async (c) => { + const space = c.req.param("space") || "global"; const id = c.req.param("id"); - const rows = await sql.unsafe( - `SELECT id, slug, title, pdf_path FROM rbooks.books WHERE (slug = $1 OR id::text = $1) AND status = 'published'`, - [id] - ); + const doc = ensureDoc(space); + const book = findBook(doc, id); - if (rows.length === 0) return c.json({ error: "Book not found" }, 404); + if (!book || book.status !== "published") { + return c.json({ error: "Book not found" }, 404); + } - const book = rows[0]; - const filepath = resolve(BOOKS_DIR, book.pdf_path); + const filepath = resolve(BOOKS_DIR, book.pdfPath); const file = Bun.file(filepath); if (!(await file.exists())) { @@ -192,10 +283,13 @@ routes.get("/api/books/:id/pdf", async (c) => { } // Increment download count - await sql.unsafe( - `UPDATE rbooks.books SET download_count = download_count + 1 WHERE id = $1`, - [book.id] - ); + const docId = booksCatalogDocId(space); + _syncServer!.changeDoc(docId, `download: ${book.slug}`, (d) => { + if (d.items[book.id]) { + d.items[book.id].downloadCount += 1; + d.items[book.id].updatedAt = Date.now(); + } + }); return new Response(file, { headers: { @@ -226,12 +320,10 @@ routes.get("/read/:id", async (c) => { const spaceSlug = c.req.param("space") || "personal"; const id = c.req.param("id"); - const rows = await sql.unsafe( - `SELECT * FROM rbooks.books WHERE (slug = $1 OR id::text = $1) AND status = 'published'`, - [id] - ); + const doc = ensureDoc(spaceSlug); + const book = findBook(doc, id); - if (rows.length === 0) { + if (!book || book.status !== "published") { const html = renderShell({ title: "Book not found | rSpace", moduleId: "rbooks", @@ -242,13 +334,14 @@ routes.get("/read/:id", async (c) => { return c.html(html, 404); } - const book = rows[0]; - // Increment view count - await sql.unsafe( - `UPDATE rbooks.books SET view_count = view_count + 1 WHERE id = $1`, - [book.id] - ); + const docId = booksCatalogDocId(spaceSlug); + _syncServer!.changeDoc(docId, `view: ${book.slug}`, (d) => { + if (d.items[book.id]) { + d.items[book.id].viewCount += 1; + d.items[book.id].updatedAt = Date.now(); + } + }); // Build the PDF URL relative to this module's mount point const pdfUrl = `/${spaceSlug}/rbooks/api/books/${book.slug}/pdf`; @@ -279,24 +372,6 @@ routes.get("/read/:id", async (c) => { return c.html(html); }); -// ── Initialize DB schema ── -async function initDB(): Promise { - try { - const schemaPath = resolve(import.meta.dir, "db/schema.sql"); - const schemaSql = await readFile(schemaPath, "utf-8"); - await sql.unsafe(`SET search_path TO rbooks, public`); - await sql.unsafe(schemaSql); - await sql.unsafe(`SET search_path TO public`); - console.log("[Books] Database schema initialized"); - } catch (e) { - console.error("[Books] Schema init failed:", e); - } -} - -function escapeAttr(s: string): string { - return s.replace(/&/g, "&").replace(/"/g, """).replace(//g, ">"); -} - // ── Module export ── export const booksModule: RSpaceModule = { @@ -311,7 +386,7 @@ export const booksModule: RSpaceModule = { landingPage: renderLanding, async onInit(ctx) { _syncServer = ctx.syncServer; - await initDB(); + console.log("[Books] Module initialized (Automerge storage)"); }, feeds: [ { diff --git a/modules/rcal/db/schema.sql b/modules/rcal/db/schema.sql.archived similarity index 100% rename from modules/rcal/db/schema.sql rename to modules/rcal/db/schema.sql.archived diff --git a/modules/rcal/mod.ts b/modules/rcal/mod.ts index e889548a..411fe815 100644 --- a/modules/rcal/mod.ts +++ b/modules/rcal/mod.ts @@ -3,69 +3,167 @@ * * Group calendars with lunar/solar/seasonal time systems, * location-aware events, and temporal-spatial zoom coupling. + * + * All persistence uses Automerge documents via SyncServer — + * no PostgreSQL dependency. */ import { Hono } from "hono"; -import { readFileSync } from "node:fs"; -import { resolve } from "node:path"; -import { sql } from "../../shared/db/pool"; +import * as Automerge from "@automerge/automerge"; import { renderShell } from "../../server/shell"; import { getModuleInfoList } from "../../shared/module"; import type { RSpaceModule } from "../../shared/module"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { renderLanding } from "./landing"; import type { SyncServer } from '../../server/local-first/sync-server'; -import { calendarSchema } from './schemas'; +import { calendarSchema, calendarDocId } from './schemas'; +import type { CalendarDoc, CalendarEvent, CalendarSource } from './schemas'; let _syncServer: SyncServer | null = null; const routes = new Hono(); -// ── DB initialization ── -const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8"); +// ── Local-first helpers ── -async function initDB() { - try { - await sql.unsafe(SCHEMA_SQL); - console.log("[Cal] DB schema initialized"); - } catch (e) { - console.error("[Cal] DB init error:", e); +/** + * Lazily create the calendar Automerge doc if it doesn't exist yet. + * Returns the current (immutable) doc snapshot. + */ +function ensureDoc(space: string): CalendarDoc { + const docId = calendarDocId(space); + let doc = _syncServer!.getDoc(docId); + if (!doc) { + doc = Automerge.change(Automerge.init(), 'init calendar', (d) => { + const init = calendarSchema.init(); + d.meta = init.meta; + d.meta.spaceSlug = space; + d.sources = {}; + d.events = {}; + }); + _syncServer!.setDoc(docId, doc); } + return doc; } -async function seedDemoIfEmpty() { - try { - const count = await sql.unsafe("SELECT count(*)::int as cnt FROM rcal.events"); - if (parseInt(count[0].cnt) > 0) return; +function daysFromNow(days: number, hours: number, minutes: number): Date { + const d = new Date(); + d.setDate(d.getDate() + days); + d.setHours(hours, minutes, 0, 0); + return d; +} + +/** + * Build an event row object suitable for JSON responses. + * Maps camelCase schema fields to the snake_case format the API previously returned. + */ +function eventToRow(ev: CalendarEvent, sources: Record) { + const src = ev.sourceId ? sources[ev.sourceId] : undefined; + return { + id: ev.id, + title: ev.title, + description: ev.description, + start_time: ev.startTime ? new Date(ev.startTime).toISOString() : null, + end_time: ev.endTime ? new Date(ev.endTime).toISOString() : null, + all_day: ev.allDay, + timezone: ev.timezone, + rrule: ev.rrule, + status: ev.status, + visibility: ev.visibility, + source_id: ev.sourceId, + source_name: src?.name ?? ev.sourceName ?? null, + source_color: src?.color ?? ev.sourceColor ?? null, + source_type: src?.sourceType ?? ev.sourceType ?? null, + location_id: ev.locationId, + location_name: ev.locationName, + location_label: ev.locationName, + location_lat: ev.locationLat, + location_lng: ev.locationLng, + location_granularity: ev.locationGranularity, + is_virtual: ev.isVirtual, + virtual_url: ev.virtualUrl, + virtual_platform: ev.virtualPlatform, + r_tool_source: ev.rToolSource, + r_tool_entity_id: ev.rToolEntityId, + attendees: ev.attendees, + attendee_count: ev.attendeeCount, + metadata: ev.metadata, + created_at: ev.createdAt ? new Date(ev.createdAt).toISOString() : null, + updated_at: ev.updatedAt ? new Date(ev.updatedAt).toISOString() : null, + }; +} + +/** + * Build a source row object for JSON responses. + */ +function sourceToRow(src: CalendarSource) { + return { + id: src.id, + name: src.name, + source_type: src.sourceType, + url: src.url, + color: src.color, + is_active: src.isActive, + is_visible: src.isVisible, + sync_interval_minutes: src.syncIntervalMinutes, + last_synced_at: src.lastSyncedAt ? new Date(src.lastSyncedAt).toISOString() : null, + owner_id: src.ownerId, + created_at: src.createdAt ? new Date(src.createdAt).toISOString() : null, + }; +} + +/** + * Seed demo data if the doc has no events yet. + */ +function seedDemoIfEmpty(space: string) { + const docId = calendarDocId(space); + const doc = ensureDoc(space); + if (Object.keys(doc.events).length > 0) return; + + _syncServer!.changeDoc(docId, 'seed demo data', (d) => { + const now = Date.now(); // Create calendar sources - const community = await sql.unsafe( - `INSERT INTO rcal.calendar_sources (name, source_type, color, is_active, is_visible) - VALUES ('Community Events', 'MANUAL', '#6366f1', true, true) RETURNING id` - ); - const sprints = await sql.unsafe( - `INSERT INTO rcal.calendar_sources (name, source_type, color, is_active, is_visible) - VALUES ('Development Sprints', 'MANUAL', '#f59e0b', true, true) RETURNING id` - ); - const communityId = community[0].id; - const sprintsId = sprints[0].id; + const communityId = crypto.randomUUID(); + const sprintsId = crypto.randomUUID(); - // Create location hierarchy - const world = await sql.unsafe( - `INSERT INTO rcal.locations (name, granularity) VALUES ('Earth', 1) RETURNING id` - ); - const europe = await sql.unsafe( - `INSERT INTO rcal.locations (name, granularity, parent_id, lat, lng) VALUES ('Europe', 2, $1, 48.8566, 2.3522) RETURNING id`, - [world[0].id] - ); - const berlin = await sql.unsafe( - `INSERT INTO rcal.locations (name, granularity, parent_id, lat, lng) VALUES ('Berlin', 4, $1, 52.52, 13.405) RETURNING id`, - [europe[0].id] - ); + d.sources[communityId] = { + id: communityId, + name: 'Community Events', + sourceType: 'MANUAL', + url: null, + color: '#6366f1', + isActive: true, + isVisible: true, + syncIntervalMinutes: null, + lastSyncedAt: 0, + ownerId: null, + createdAt: now, + }; + d.sources[sprintsId] = { + id: sprintsId, + name: 'Development Sprints', + sourceType: 'MANUAL', + url: null, + color: '#f59e0b', + isActive: true, + isVisible: true, + syncIntervalMinutes: null, + lastSyncedAt: 0, + ownerId: null, + createdAt: now, + }; - // Seed events — past, current week, and future - const now = new Date(); - const events = [ + // Location IDs (embedded on events, no separate locations table) + const berlinLocId = crypto.randomUUID(); + + // Seed events + const seedEvents: Array<{ + title: string; desc: string; start: Date; end: Date; + sourceId: string; allDay?: boolean; + locationId?: string; locationName?: string; + locationLat?: number; locationLng?: number; locationGranularity?: string; + isVirtual?: boolean; virtualUrl?: string; virtualPlatform?: string; + }> = [ { title: "rSpace Launch Party", desc: "Celebrating the launch of the unified rSpace platform with all 22 modules live.", @@ -76,13 +174,15 @@ async function seedDemoIfEmpty() { title: "Provider Onboarding Workshop", desc: "Hands-on session for print providers joining the cosmolocal network.", start: daysFromNow(-12, 14, 0), end: daysFromNow(-12, 17, 0), - sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rspace-providers", virtualPlatform: "Jitsi", + sourceId: communityId, isVirtual: true, + virtualUrl: "https://meet.jit.si/rspace-providers", virtualPlatform: "Jitsi", }, { title: "Weekly Community Standup", desc: "Open standup — share what you're working on, ask for help, coordinate.", start: daysFromNow(0, 16, 0), end: daysFromNow(0, 16, 45), - sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rspace-standup", virtualPlatform: "Jitsi", + sourceId: communityId, isVirtual: true, + virtualUrl: "https://meet.jit.si/rspace-standup", virtualPlatform: "Jitsi", }, { title: "Sprint: Module Seeding & Polish", @@ -94,77 +194,112 @@ async function seedDemoIfEmpty() { title: "rFunds Budget Review", desc: "Quarterly review of treasury flows, enoughness thresholds, and overflow routing.", start: daysFromNow(6, 15, 0), end: daysFromNow(6, 17, 0), - sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rfunds-review", virtualPlatform: "Jitsi", + sourceId: communityId, isVirtual: true, + virtualUrl: "https://meet.jit.si/rfunds-review", virtualPlatform: "Jitsi", }, { title: "Cosmolocal Design Sprint", desc: "Two-day design sprint on the next generation of cosmolocal tooling.", start: daysFromNow(11, 9, 0), end: daysFromNow(12, 18, 0), - sourceId: sprintsId, locationId: berlin[0].id, locationName: "Druckwerkstatt Berlin", + sourceId: sprintsId, + locationId: berlinLocId, locationName: "Druckwerkstatt Berlin", + locationLat: 52.52, locationLng: 13.405, locationGranularity: "city", }, { title: "Q1 Retrospective", desc: "Looking back at what we built, what worked, and what to improve.", start: daysFromNow(21, 16, 0), end: daysFromNow(21, 18, 0), - sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rspace-retro", virtualPlatform: "Jitsi", + sourceId: communityId, isVirtual: true, + virtualUrl: "https://meet.jit.si/rspace-retro", virtualPlatform: "Jitsi", }, ]; - for (const e of events) { - await sql.unsafe( - `INSERT INTO rcal.events (title, description, start_time, end_time, all_day, source_id, - location_id, location_name, is_virtual, virtual_url, virtual_platform) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)`, - [e.title, e.desc, e.start.toISOString(), e.end.toISOString(), e.allDay || false, - e.sourceId, e.locationId || null, e.locationName || null, - e.virtual || false, e.virtualUrl || null, e.virtualPlatform || null] - ); + for (const e of seedEvents) { + const eventId = crypto.randomUUID(); + d.events[eventId] = { + id: eventId, + title: e.title, + description: e.desc, + startTime: e.start.getTime(), + endTime: e.end.getTime(), + allDay: e.allDay || false, + timezone: 'UTC', + rrule: null, + status: null, + visibility: null, + sourceId: e.sourceId, + sourceName: null, + sourceType: null, + sourceColor: null, + locationId: e.locationId || null, + locationName: e.locationName || null, + coordinates: null, + locationGranularity: e.locationGranularity || null, + locationLat: e.locationLat ?? null, + locationLng: e.locationLng ?? null, + isVirtual: e.isVirtual || false, + virtualUrl: e.virtualUrl || null, + virtualPlatform: e.virtualPlatform || null, + rToolSource: null, + rToolEntityId: null, + attendees: [], + attendeeCount: 0, + metadata: null, + createdAt: now, + updatedAt: now, + }; } + }); - console.log("[Cal] Demo data seeded: 2 sources, 3 locations, 7 events"); - } catch (e) { - console.error("[Cal] Seed error:", e); - } -} - -function daysFromNow(days: number, hours: number, minutes: number): Date { - const d = new Date(); - d.setDate(d.getDate() + days); - d.setHours(hours, minutes, 0, 0); - return d; + console.log("[Cal] Demo data seeded: 2 sources, 7 events"); } // ── API: Events ── // GET /api/events — query events with filters routes.get("/api/events", async (c) => { + const space = c.req.param("space") || "demo"; const { start, end, source, search, rTool, rEntityId, upcoming } = c.req.query(); - let where = "WHERE 1=1"; - const params: any[] = []; - let idx = 1; + const doc = ensureDoc(space); + let events = Object.values(doc.events); - if (start) { where += ` AND e.start_time >= $${idx}`; params.push(start); idx++; } - if (end) { where += ` AND e.start_time <= ($${idx}::date + interval '1 day')`; params.push(end); idx++; } - if (source) { where += ` AND e.source_id = $${idx}`; params.push(source); idx++; } - if (search) { where += ` AND (e.title ILIKE $${idx} OR e.description ILIKE $${idx})`; params.push(`%${search}%`); idx++; } - if (rTool) { where += ` AND e.r_tool_source = $${idx}`; params.push(rTool); idx++; } - if (rEntityId) { where += ` AND e.r_tool_entity_id = $${idx}`; params.push(rEntityId); idx++; } + // Apply filters + if (start) { + const startMs = new Date(start).getTime(); + events = events.filter((e) => e.startTime >= startMs); + } + if (end) { + const endMs = new Date(end).getTime() + 86400000; // +1 day + events = events.filter((e) => e.startTime <= endMs); + } + if (source) { + events = events.filter((e) => e.sourceId === source); + } + if (search) { + const term = search.toLowerCase(); + events = events.filter((e) => + e.title.toLowerCase().includes(term) || + (e.description && e.description.toLowerCase().includes(term)) + ); + } + if (rTool) { + events = events.filter((e) => e.rToolSource === rTool); + } + if (rEntityId) { + events = events.filter((e) => e.rToolEntityId === rEntityId); + } if (upcoming) { - where += ` AND e.start_time >= NOW() AND e.start_time <= NOW() + ($${idx} || ' days')::interval`; - params.push(upcoming); - idx++; + const nowMs = Date.now(); + const futureMs = nowMs + parseInt(upcoming) * 86400000; + events = events.filter((e) => e.startTime >= nowMs && e.startTime <= futureMs); } - const rows = await sql.unsafe( - `SELECT e.*, cs.name as source_name, cs.color as source_color, l.name as location_label - FROM rcal.events e - LEFT JOIN rcal.calendar_sources cs ON cs.id = e.source_id - LEFT JOIN rcal.locations l ON l.id = e.location_id - ${where} - ORDER BY e.start_time ASC LIMIT 500`, - params - ); + // Sort by start time, limit to 500 + events.sort((a, b) => a.startTime - b.startTime); + events = events.slice(0, 500); + + const rows = events.map((e) => eventToRow(e, doc.sources)); return c.json({ count: rows.length, results: rows }); }); @@ -175,32 +310,65 @@ routes.post("/api/events", async (c) => { let claims; try { claims = await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } + const space = c.req.param("space") || "demo"; const body = await c.req.json(); const { title, description, start_time, end_time, all_day, timezone, source_id, location_id, location_name, is_virtual, virtual_url, virtual_platform, r_tool_source, r_tool_entity_id } = body; if (!title?.trim() || !start_time) return c.json({ error: "Title and start_time required" }, 400); - const rows = await sql.unsafe( - `INSERT INTO rcal.events (title, description, start_time, end_time, all_day, timezone, source_id, - location_id, location_name, is_virtual, virtual_url, virtual_platform, r_tool_source, r_tool_entity_id, created_by) - VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15) RETURNING *`, - [title.trim(), description || null, start_time, end_time || null, all_day || false, timezone || "UTC", - source_id || null, location_id || null, location_name || null, is_virtual || false, - virtual_url || null, virtual_platform || null, r_tool_source || null, r_tool_entity_id || null, claims.sub] - ); - return c.json(rows[0], 201); + const docId = calendarDocId(space); + ensureDoc(space); + const eventId = crypto.randomUUID(); + const now = Date.now(); + + _syncServer!.changeDoc(docId, `create event ${eventId}`, (d) => { + d.events[eventId] = { + id: eventId, + title: title.trim(), + description: description || '', + startTime: new Date(start_time).getTime(), + endTime: end_time ? new Date(end_time).getTime() : 0, + allDay: all_day || false, + timezone: timezone || 'UTC', + rrule: null, + status: null, + visibility: null, + sourceId: source_id || null, + sourceName: null, + sourceType: null, + sourceColor: null, + locationId: location_id || null, + locationName: location_name || null, + coordinates: null, + locationGranularity: null, + locationLat: null, + locationLng: null, + isVirtual: is_virtual || false, + virtualUrl: virtual_url || null, + virtualPlatform: virtual_platform || null, + rToolSource: r_tool_source || null, + rToolEntityId: r_tool_entity_id || null, + attendees: [], + attendeeCount: 0, + metadata: null, + createdAt: now, + updatedAt: now, + }; + }); + + const updated = _syncServer!.getDoc(docId)!; + return c.json(eventToRow(updated.events[eventId], updated.sources), 201); }); // GET /api/events/:id routes.get("/api/events/:id", async (c) => { - const rows = await sql.unsafe( - `SELECT e.*, cs.name as source_name, cs.color as source_color - FROM rcal.events e LEFT JOIN rcal.calendar_sources cs ON cs.id = e.source_id - WHERE e.id = $1`, - [c.req.param("id")] - ); - if (rows.length === 0) return c.json({ error: "Event not found" }, 404); - return c.json(rows[0]); + const space = c.req.param("space") || "demo"; + const id = c.req.param("id"); + const doc = ensureDoc(space); + + const ev = doc.events[id]; + if (!ev) return c.json({ error: "Event not found" }, 404); + return c.json(eventToRow(ev, doc.sources)); }); // PATCH /api/events/:id @@ -210,55 +378,93 @@ routes.patch("/api/events/:id", async (c) => { let claims; try { claims = await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } + const space = c.req.param("space") || "demo"; const id = c.req.param("id"); const body = await c.req.json(); - const fields: string[] = []; - const params: any[] = []; - let idx = 1; - const allowed = ["title", "description", "start_time", "end_time", "all_day", "timezone", - "status", "visibility", "location_name", "is_virtual", "virtual_url"]; + const docId = calendarDocId(space); + const doc = ensureDoc(space); + if (!doc.events[id]) return c.json({ error: "Not found" }, 404); - for (const key of allowed) { - if (body[key] !== undefined) { - fields.push(`${key} = $${idx}`); - params.push(body[key]); - idx++; + // Map of allowed body keys to CalendarEvent fields + const fieldMap: Record = { + title: 'title', + description: 'description', + start_time: 'startTime', + end_time: 'endTime', + all_day: 'allDay', + timezone: 'timezone', + status: 'status', + visibility: 'visibility', + location_name: 'locationName', + is_virtual: 'isVirtual', + virtual_url: 'virtualUrl', + }; + + const updates: Array<{ field: keyof CalendarEvent; value: any }> = []; + for (const [bodyKey, docField] of Object.entries(fieldMap)) { + if (body[bodyKey] !== undefined) { + let value = body[bodyKey]; + // Convert time strings to epoch ms + if (bodyKey === 'start_time' || bodyKey === 'end_time') { + value = new Date(value).getTime(); + } + updates.push({ field: docField, value }); } } - if (fields.length === 0) return c.json({ error: "No fields" }, 400); - fields.push("updated_at = NOW()"); - params.push(id); + if (updates.length === 0) return c.json({ error: "No fields" }, 400); - const rows = await sql.unsafe( - `UPDATE rcal.events SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`, - params - ); - if (rows.length === 0) return c.json({ error: "Not found" }, 404); - return c.json(rows[0]); + _syncServer!.changeDoc(docId, `update event ${id}`, (d) => { + const ev = d.events[id]; + for (const { field, value } of updates) { + (ev as any)[field] = value; + } + ev.updatedAt = Date.now(); + }); + + const updated = _syncServer!.getDoc(docId)!; + return c.json(eventToRow(updated.events[id], updated.sources)); }); // DELETE /api/events/:id routes.delete("/api/events/:id", async (c) => { - const result = await sql.unsafe("DELETE FROM rcal.events WHERE id = $1 RETURNING id", [c.req.param("id")]); - if (result.length === 0) return c.json({ error: "Not found" }, 404); + const space = c.req.param("space") || "demo"; + const id = c.req.param("id"); + + const docId = calendarDocId(space); + const doc = ensureDoc(space); + if (!doc.events[id]) return c.json({ error: "Not found" }, 404); + + _syncServer!.changeDoc(docId, `delete event ${id}`, (d) => { + delete d.events[id]; + }); return c.json({ ok: true }); }); // ── API: Sources ── routes.get("/api/sources", async (c) => { + const space = c.req.param("space") || "demo"; const { is_active, is_visible, source_type } = c.req.query(); - let where = "WHERE 1=1"; - const params: any[] = []; - let idx = 1; + const doc = ensureDoc(space); - if (is_active !== undefined) { where += ` AND is_active = $${idx}`; params.push(is_active === "true"); idx++; } - if (is_visible !== undefined) { where += ` AND is_visible = $${idx}`; params.push(is_visible === "true"); idx++; } - if (source_type) { where += ` AND source_type = $${idx}`; params.push(source_type); idx++; } + let sources = Object.values(doc.sources); - const rows = await sql.unsafe(`SELECT * FROM rcal.calendar_sources ${where} ORDER BY name`, params); + if (is_active !== undefined) { + const active = is_active === "true"; + sources = sources.filter((s) => s.isActive === active); + } + if (is_visible !== undefined) { + const visible = is_visible === "true"; + sources = sources.filter((s) => s.isVisible === visible); + } + if (source_type) { + sources = sources.filter((s) => s.sourceType === source_type); + } + + sources.sort((a, b) => a.name.localeCompare(b.name)); + const rows = sources.map(sourceToRow); return c.json({ count: rows.length, results: rows }); }); @@ -267,44 +473,99 @@ routes.post("/api/sources", async (c) => { if (!token) return c.json({ error: "Authentication required" }, 401); try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } + const space = c.req.param("space") || "demo"; const body = await c.req.json(); - const rows = await sql.unsafe( - `INSERT INTO rcal.calendar_sources (name, source_type, url, color, is_active, is_visible) - VALUES ($1, $2, $3, $4, $5, $6) RETURNING *`, - [body.name, body.source_type || "MANUAL", body.url || null, body.color || "#6366f1", - body.is_active ?? true, body.is_visible ?? true] - ); - return c.json(rows[0], 201); + const docId = calendarDocId(space); + ensureDoc(space); + + const sourceId = crypto.randomUUID(); + const now = Date.now(); + + _syncServer!.changeDoc(docId, `create source ${sourceId}`, (d) => { + d.sources[sourceId] = { + id: sourceId, + name: body.name, + sourceType: body.source_type || 'MANUAL', + url: body.url || null, + color: body.color || '#6366f1', + isActive: body.is_active ?? true, + isVisible: body.is_visible ?? true, + syncIntervalMinutes: null, + lastSyncedAt: 0, + ownerId: null, + createdAt: now, + }; + }); + + const updated = _syncServer!.getDoc(docId)!; + return c.json(sourceToRow(updated.sources[sourceId]), 201); }); // ── API: Locations ── +// Locations are now derived from event data (no separate table). +// Each unique locationId/locationName combination is extracted from events. + +interface DerivedLocation { + id: string; + name: string; + granularity: number | null; + parent_id: string | null; + lat: number | null; + lng: number | null; +} + +function deriveLocations(doc: CalendarDoc): DerivedLocation[] { + const seen = new Map(); + for (const ev of Object.values(doc.events)) { + const key = ev.locationId || ev.locationName; + if (!key) continue; + if (seen.has(key)) continue; + seen.set(key, { + id: ev.locationId || key, + name: ev.locationName || key, + granularity: ev.locationGranularity ? parseInt(ev.locationGranularity) || null : null, + parent_id: null, + lat: ev.locationLat, + lng: ev.locationLng, + }); + } + return Array.from(seen.values()); +} routes.get("/api/locations", async (c) => { + const space = c.req.param("space") || "demo"; const { granularity, parent, search, root } = c.req.query(); - let where = "WHERE 1=1"; - const params: any[] = []; - let idx = 1; + const doc = ensureDoc(space); - if (root === "true") { where += " AND parent_id IS NULL"; } - if (granularity) { where += ` AND granularity = $${idx}`; params.push(parseInt(granularity)); idx++; } - if (parent) { where += ` AND parent_id = $${idx}`; params.push(parent); idx++; } - if (search) { where += ` AND name ILIKE $${idx}`; params.push(`%${search}%`); idx++; } + let locations = deriveLocations(doc); - const rows = await sql.unsafe(`SELECT * FROM rcal.locations ${where} ORDER BY name`, params); - return c.json(rows); + if (root === "true") { + locations = locations.filter((l) => l.parent_id === null); + } + if (granularity) { + const g = parseInt(granularity); + locations = locations.filter((l) => l.granularity === g); + } + if (parent) { + locations = locations.filter((l) => l.parent_id === parent); + } + if (search) { + const term = search.toLowerCase(); + locations = locations.filter((l) => l.name.toLowerCase().includes(term)); + } + + locations.sort((a, b) => a.name.localeCompare(b.name)); + return c.json(locations); }); routes.get("/api/locations/tree", async (c) => { - const rows = await sql.unsafe( - `WITH RECURSIVE tree AS ( - SELECT id, name, granularity, parent_id, 0 as depth FROM rcal.locations WHERE parent_id IS NULL - UNION ALL - SELECT l.id, l.name, l.granularity, l.parent_id, t.depth + 1 - FROM rcal.locations l JOIN tree t ON l.parent_id = t.id - ) - SELECT * FROM tree ORDER BY depth, name` - ); - return c.json(rows); + const space = c.req.param("space") || "demo"; + const doc = ensureDoc(space); + + // Flat list with depth=0 since hierarchical parent_id data is not stored in Automerge + const locations = deriveLocations(doc).map((l) => ({ ...l, depth: 0 })); + locations.sort((a, b) => a.name.localeCompare(b.name)); + return c.json(locations); }); // ── API: Lunar data (computed, not stored) ── @@ -349,29 +610,30 @@ routes.get("/api/lunar", async (c) => { // ── API: Stats ── routes.get("/api/stats", async (c) => { - const [eventCount, sourceCount, locationCount] = await Promise.all([ - sql.unsafe("SELECT count(*)::int as cnt FROM rcal.events"), - sql.unsafe("SELECT count(*)::int as cnt FROM rcal.calendar_sources WHERE is_active = true"), - sql.unsafe("SELECT count(*)::int as cnt FROM rcal.locations"), - ]); - return c.json({ - events: eventCount[0]?.cnt || 0, - sources: sourceCount[0]?.cnt || 0, - locations: locationCount[0]?.cnt || 0, - }); + const space = c.req.param("space") || "demo"; + const doc = ensureDoc(space); + + const events = Object.values(doc.events).length; + const sources = Object.values(doc.sources).filter((s) => s.isActive).length; + const locations = deriveLocations(doc).length; + + return c.json({ events, sources, locations }); }); // ── API: Context (r* tool bridge) ── routes.get("/api/context/:tool", async (c) => { + const space = c.req.param("space") || "demo"; const tool = c.req.param("tool"); const entityId = c.req.query("entityId"); if (!entityId) return c.json({ error: "entityId required" }, 400); - const rows = await sql.unsafe( - "SELECT * FROM rcal.events WHERE r_tool_source = $1 AND r_tool_entity_id = $2 ORDER BY start_time", - [tool, entityId] - ); + const doc = ensureDoc(space); + const matching = Object.values(doc.events) + .filter((e) => e.rToolSource === tool && e.rToolEntityId === entityId) + .sort((a, b) => a.startTime - b.startTime); + + const rows = matching.map((e) => eventToRow(e, doc.sources)); return c.json({ count: rows.length, results: rows }); }); @@ -403,8 +665,8 @@ export const calModule: RSpaceModule = { landingPage: renderLanding, async onInit(ctx) { _syncServer = ctx.syncServer; - await initDB(); - await seedDemoIfEmpty(); + // Seed demo data for the default space + seedDemoIfEmpty("demo"); }, feeds: [ { diff --git a/modules/rcart/db/schema.sql b/modules/rcart/db/schema.sql.archived similarity index 100% rename from modules/rcart/db/schema.sql rename to modules/rcart/db/schema.sql.archived diff --git a/modules/rcart/mod.ts b/modules/rcart/mod.ts index 9e4ec151..4fe0f6f3 100644 --- a/modules/rcart/mod.ts +++ b/modules/rcart/mod.ts @@ -4,12 +4,12 @@ * Ported from /opt/apps/rcart/ (Express → Hono). * Handles catalog (artifact listings), orders, fulfillment resolution. * Integrates with provider-registry for provider matching and flow-service for revenue splits. + * + * Storage: Automerge documents via SyncServer (no PostgreSQL). */ +import * as Automerge from "@automerge/automerge"; import { Hono } from "hono"; -import { readFileSync } from "node:fs"; -import { resolve } from "node:path"; -import { sql } from "../../shared/db/pool"; import { renderShell } from "../../server/shell"; import { getModuleInfoList } from "../../shared/module"; import { depositOrderRevenue } from "./flow"; @@ -17,24 +17,17 @@ import type { RSpaceModule } from "../../shared/module"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { renderLanding } from "./landing"; import type { SyncServer } from '../../server/local-first/sync-server'; -import { catalogSchema, orderSchema } from './schemas'; +import { + catalogSchema, orderSchema, + catalogDocId, orderDocId, + type CatalogDoc, type CatalogEntry, + type OrderDoc, type OrderMeta, +} from './schemas'; let _syncServer: SyncServer | null = null; const routes = new Hono(); -// ── DB initialization ── -const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8"); - -async function initDB() { - try { - await sql.unsafe(SCHEMA_SQL); - console.log("[Cart] DB schema initialized"); - } catch (e) { - console.error("[Cart] DB init error:", e); - } -} - // Provider registry URL (for fulfillment resolution) const PROVIDER_REGISTRY_URL = process.env.PROVIDER_REGISTRY_URL || ""; @@ -44,10 +37,41 @@ function getProviderUrl(): string { return PROVIDER_REGISTRY_URL || "http://localhost:3000/demo/providers"; } +// ── Automerge helpers ── + +/** Lazily create (or retrieve) the catalog doc for a space. */ +function ensureCatalogDoc(space: string): Automerge.Doc { + const docId = catalogDocId(space); + let doc = _syncServer!.getDoc(docId); + if (!doc) { + doc = Automerge.change(Automerge.init(), 'init catalog', (d) => { + const init = catalogSchema.init(); + Object.assign(d, init); + d.meta.spaceSlug = space; + }); + _syncServer!.setDoc(docId, doc); + } + return doc; +} + +/** Get all order docs for a space by scanning known doc IDs. */ +function getSpaceOrderDocs(space: string): Array<{ docId: string; doc: Automerge.Doc }> { + const prefix = `${space}:cart:orders:`; + const results: Array<{ docId: string; doc: Automerge.Doc }> = []; + for (const id of _syncServer!.listDocs()) { + if (id.startsWith(prefix)) { + const doc = _syncServer!.getDoc(id); + if (doc) results.push({ docId: id, doc }); + } + } + return results; +} + // ── CATALOG ROUTES ── // POST /api/catalog/ingest — Add artifact to catalog routes.post("/api/catalog/ingest", async (c) => { + const space = c.req.param("space") || "demo"; const artifact = await c.req.json(); if (!artifact.id || !artifact.schema_version || !artifact.type) { @@ -60,121 +84,151 @@ routes.post("/api/catalog/ingest", async (c) => { return c.json({ error: "print-ready artifacts must have at least one render_target" }, 400); } - const existing = await sql.unsafe("SELECT id FROM rcart.catalog_entries WHERE artifact_id = $1", [artifact.id]); - if (existing.length > 0) { - return c.json({ error: "Artifact already listed", catalog_entry_id: existing[0].id }, 409); + const doc = ensureCatalogDoc(space); + + // Check for duplicate artifact_id + for (const [, entry] of Object.entries(doc.items)) { + if (entry.artifactId === artifact.id) { + return c.json({ error: "Artifact already listed", catalog_entry_id: entry.id }, 409); + } } - const result = await sql.unsafe( - `INSERT INTO rcart.catalog_entries ( - artifact_id, artifact, title, product_type, - required_capabilities, substrates, creator_id, - source_space, tags - ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) - RETURNING id, artifact_id, title, product_type, status, created_at`, - [ - artifact.id, JSON.stringify(artifact), - artifact.payload?.title || "Untitled", - artifact.spec?.product_type || null, - artifact.spec?.required_capabilities || [], - artifact.spec?.substrates || [], - artifact.creator?.id || null, - artifact.source_space || null, - artifact.payload?.tags || [], - ] - ); + const entryId = crypto.randomUUID(); + const now = Date.now(); - return c.json(result[0], 201); + const docId = catalogDocId(space); + _syncServer!.changeDoc(docId, 'ingest catalog entry', (d) => { + d.items[entryId] = { + id: entryId, + artifactId: artifact.id, + artifact: artifact, + title: artifact.payload?.title || "Untitled", + productType: artifact.spec?.product_type || null, + requiredCapabilities: artifact.spec?.required_capabilities || [], + substrates: artifact.spec?.substrates || [], + creatorId: artifact.creator?.id || null, + sourceSpace: artifact.source_space || space, + tags: artifact.payload?.tags || [], + status: "active", + createdAt: now, + updatedAt: now, + }; + }); + + return c.json({ + id: entryId, + artifact_id: artifact.id, + title: artifact.payload?.title || "Untitled", + product_type: artifact.spec?.product_type || null, + status: "active", + created_at: new Date(now).toISOString(), + }, 201); }); // GET /api/catalog — Browse catalog routes.get("/api/catalog", async (c) => { + const space = c.req.param("space") || "demo"; const { product_type, capability, tag, source_space, q, limit = "50", offset = "0" } = c.req.query(); - const conditions: string[] = ["status = 'active'"]; - const params: any[] = []; - let paramIdx = 1; + const doc = ensureCatalogDoc(space); + let entries = Object.values(doc.items); - if (product_type) { - conditions.push(`product_type = $${paramIdx}`); - params.push(product_type); - paramIdx++; - } + // Apply filters + entries = entries.filter((e) => e.status === "active"); + if (product_type) entries = entries.filter((e) => e.productType === product_type); if (capability) { - conditions.push(`required_capabilities && $${paramIdx}`); - params.push(capability.split(",")); - paramIdx++; - } - if (tag) { - conditions.push(`$${paramIdx} = ANY(tags)`); - params.push(tag); - paramIdx++; - } - if (source_space) { - conditions.push(`source_space = $${paramIdx}`); - params.push(source_space); - paramIdx++; + const caps = capability.split(","); + entries = entries.filter((e) => caps.some((cap) => e.requiredCapabilities.includes(cap))); } + if (tag) entries = entries.filter((e) => e.tags.includes(tag)); + if (source_space) entries = entries.filter((e) => e.sourceSpace === source_space); if (q) { - conditions.push(`title ILIKE $${paramIdx}`); - params.push(`%${q}%`); - paramIdx++; + const lower = q.toLowerCase(); + entries = entries.filter((e) => e.title.toLowerCase().includes(lower)); } - const where = conditions.join(" AND "); + // Sort by createdAt descending + entries.sort((a, b) => b.createdAt - a.createdAt); + const limitNum = Math.min(parseInt(limit) || 50, 100); const offsetNum = parseInt(offset) || 0; + const total = entries.length; + const paged = entries.slice(offsetNum, offsetNum + limitNum); - const [result, countResult] = await Promise.all([ - sql.unsafe( - `SELECT id, artifact_id, title, product_type, - required_capabilities, tags, source_space, - artifact->'payload'->>'description' as description, - artifact->'pricing' as pricing, - artifact->'spec'->'dimensions' as dimensions, - status, created_at - FROM rcart.catalog_entries - WHERE ${where} - ORDER BY created_at DESC - LIMIT ${limitNum} OFFSET ${offsetNum}`, - params - ), - sql.unsafe(`SELECT count(*) FROM rcart.catalog_entries WHERE ${where}`, params), - ]); + // Map to response shape matching the original SQL response + const result = paged.map((e) => { + const art = e.artifact as Record | undefined; + return { + id: e.id, + artifact_id: e.artifactId, + title: e.title, + product_type: e.productType, + required_capabilities: e.requiredCapabilities, + tags: e.tags, + source_space: e.sourceSpace, + description: art?.payload?.description || null, + pricing: art?.pricing || null, + dimensions: art?.spec?.dimensions || null, + status: e.status, + created_at: new Date(e.createdAt).toISOString(), + }; + }); - return c.json({ entries: result, total: parseInt(countResult[0].count as string), limit: limitNum, offset: offsetNum }); + return c.json({ entries: result, total, limit: limitNum, offset: offsetNum }); }); // GET /api/catalog/:id — Single catalog entry routes.get("/api/catalog/:id", async (c) => { + const space = c.req.param("space") || "demo"; const id = c.req.param("id"); - const result = await sql.unsafe( - "SELECT * FROM rcart.catalog_entries WHERE id = $1 OR artifact_id = $1", - [id] - ); - if (result.length === 0) return c.json({ error: "Catalog entry not found" }, 404); - const row = result[0]; - return c.json({ id: row.id, artifact: row.artifact, status: row.status, created_at: row.created_at, updated_at: row.updated_at }); + const doc = ensureCatalogDoc(space); + + // Look up by entry id or artifact id + let entry: CatalogEntry | undefined; + if (doc.items[id]) { + entry = doc.items[id]; + } else { + entry = Object.values(doc.items).find((e) => e.artifactId === id); + } + + if (!entry) return c.json({ error: "Catalog entry not found" }, 404); + + return c.json({ + id: entry.id, + artifact: entry.artifact, + status: entry.status, + created_at: new Date(entry.createdAt).toISOString(), + updated_at: new Date(entry.updatedAt).toISOString(), + }); }); // PATCH /api/catalog/:id — Update listing status routes.patch("/api/catalog/:id", async (c) => { + const space = c.req.param("space") || "demo"; const { status } = await c.req.json(); const valid = ["active", "paused", "sold_out", "removed"]; if (!valid.includes(status)) return c.json({ error: `status must be one of: ${valid.join(", ")}` }, 400); - const result = await sql.unsafe( - "UPDATE rcart.catalog_entries SET status = $1, updated_at = NOW() WHERE id = $2 RETURNING id, status", - [status, c.req.param("id")] - ); - if (result.length === 0) return c.json({ error: "Catalog entry not found" }, 404); - return c.json(result[0]); + const doc = ensureCatalogDoc(space); + const entryId = c.req.param("id"); + + if (!doc.items[entryId]) return c.json({ error: "Catalog entry not found" }, 404); + + const docId = catalogDocId(space); + _syncServer!.changeDoc(docId, `update catalog status → ${status}`, (d) => { + d.items[entryId].status = status; + d.items[entryId].updatedAt = Date.now(); + }); + + return c.json({ id: entryId, status }); }); // ── ORDER ROUTES ── // POST /api/orders — Create an order routes.post("/api/orders", async (c) => { + const space = c.req.param("space") || "demo"; + // Optional auth — set buyer_did from claims if authenticated const token = extractToken(c.req.raw.headers); let buyerDid: string | null = null; @@ -194,51 +248,70 @@ routes.post("/api/orders", async (c) => { if (!catalog_entry_id && !artifact_id) return c.json({ error: "Required: catalog_entry_id or artifact_id" }, 400); if (!provider_id || !total_price) return c.json({ error: "Required: provider_id, total_price" }, 400); - const entryResult = await sql.unsafe( - "SELECT id, artifact_id FROM rcart.catalog_entries WHERE id = $1 OR artifact_id = $1", - [catalog_entry_id || artifact_id] - ); - if (entryResult.length === 0) return c.json({ error: "Catalog entry not found" }, 404); - - const entry = entryResult[0]; + // Look up catalog entry + const catalogDoc = ensureCatalogDoc(space); + const lookupId = catalog_entry_id || artifact_id; + let entry: CatalogEntry | undefined; + if (catalogDoc.items[lookupId]) { + entry = catalogDoc.items[lookupId]; + } else { + entry = Object.values(catalogDoc.items).find((e) => e.artifactId === lookupId || e.id === lookupId); + } + if (!entry) return c.json({ error: "Catalog entry not found" }, 404); // x402 detection const x402Header = c.req.header("x-payment"); const effectiveMethod = x402Header ? "x402" : payment_method; const initialStatus = x402Header ? "paid" : "pending"; - const result = await sql.unsafe( - `INSERT INTO rcart.orders ( - catalog_entry_id, artifact_id, buyer_id, buyer_location, buyer_contact, - provider_id, provider_name, provider_distance_km, - quantity, production_cost, creator_payout, community_payout, - total_price, currency, status, payment_method, payment_tx, payment_network - ${initialStatus === "paid" ? ", paid_at" : ""} - ) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18 - ${initialStatus === "paid" ? ", NOW()" : ""}) - RETURNING *`, - [ - entry.id, entry.artifact_id, - buyerDid || buyer_id || null, - buyer_location ? JSON.stringify(buyer_location) : null, - buyer_contact ? JSON.stringify(buyer_contact) : null, - provider_id, provider_name || null, provider_distance_km || null, - quantity, production_cost || null, creator_payout || null, community_payout || null, - total_price, currency, initialStatus, effectiveMethod, - payment_tx || null, payment_network || null, - ] - ); + const orderId = crypto.randomUUID(); + const now = Date.now(); + + // Create order doc + const oDocId = orderDocId(space, orderId); + let orderDoc = Automerge.change(Automerge.init(), 'create order', (d) => { + const init = orderSchema.init(); + Object.assign(d, init); + d.meta.spaceSlug = space; + d.order.id = orderId; + d.order.catalogEntryId = entry!.id; + d.order.artifactId = entry!.artifactId; + d.order.buyerId = buyerDid || buyer_id || null; + d.order.buyerLocation = buyer_location ? JSON.stringify(buyer_location) : null; + d.order.buyerContact = buyer_contact ? JSON.stringify(buyer_contact) : null; + d.order.providerId = provider_id; + d.order.providerName = provider_name || null; + d.order.providerDistanceKm = provider_distance_km || null; + d.order.quantity = quantity; + d.order.productionCost = production_cost || null; + d.order.creatorPayout = creator_payout || null; + d.order.communityPayout = community_payout || null; + d.order.totalPrice = total_price; + d.order.currency = currency; + d.order.status = initialStatus; + d.order.paymentMethod = effectiveMethod; + d.order.paymentTx = payment_tx || null; + d.order.paymentNetwork = payment_network || null; + d.order.createdAt = now; + d.order.updatedAt = now; + if (initialStatus === "paid") d.order.paidAt = now; + }); + _syncServer!.setDoc(oDocId, orderDoc); + + const order = orderDoc.order; - const order = result[0]; if (initialStatus === "paid") { - depositOrderRevenue(total_price, order.id); + depositOrderRevenue(total_price, orderId); } - return c.json(order, 201); + // Return response matching original shape + return c.json(orderToResponse(order, entry), 201); }); // GET /api/orders — List orders routes.get("/api/orders", async (c) => { + const space = c.req.param("space") || "demo"; + // Optional auth — filter by buyer if authenticated const token = extractToken(c.req.raw.headers); let authedBuyer: string | null = null; @@ -248,73 +321,156 @@ routes.get("/api/orders", async (c) => { const { status, provider_id, buyer_id, limit = "50", offset = "0" } = c.req.query(); - const conditions: string[] = []; - const params: any[] = []; - let paramIdx = 1; + const orderDocs = getSpaceOrderDocs(space); - if (status) { conditions.push(`o.status = $${paramIdx}`); params.push(status); paramIdx++; } - if (provider_id) { conditions.push(`o.provider_id = $${paramIdx}`); params.push(provider_id); paramIdx++; } + // Build enriched order list with catalog info + const catalogDoc = ensureCatalogDoc(space); + + let orders = orderDocs.map(({ doc }) => { + const o = doc.order; + const catEntry = catalogDoc.items[o.catalogEntryId]; + const resp = orderToResponse(o); + resp.artifact_title = catEntry?.title || null; + resp.product_type = catEntry?.productType || null; + return resp; + }); + + // Apply filters + if (status) orders = orders.filter((o) => o.status === status); + if (provider_id) orders = orders.filter((o) => o.provider_id === provider_id); const effectiveBuyerId = buyer_id || (authedBuyer && !status && !provider_id ? authedBuyer : null); - if (effectiveBuyerId) { conditions.push(`o.buyer_id = $${paramIdx}`); params.push(effectiveBuyerId); paramIdx++; } + if (effectiveBuyerId) orders = orders.filter((o) => o.buyer_id === effectiveBuyerId); + + // Sort by created_at descending + orders.sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()); - const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : ""; const limitNum = Math.min(parseInt(limit) || 50, 100); const offsetNum = parseInt(offset) || 0; + const paged = orders.slice(offsetNum, offsetNum + limitNum); - const result = await sql.unsafe( - `SELECT o.*, c.title as artifact_title, c.product_type - FROM rcart.orders o JOIN rcart.catalog_entries c ON c.id = o.catalog_entry_id - ${where} ORDER BY o.created_at DESC LIMIT ${limitNum} OFFSET ${offsetNum}`, - params - ); - - return c.json({ orders: result }); + return c.json({ orders: paged }); }); // GET /api/orders/:id — Single order routes.get("/api/orders/:id", async (c) => { - const result = await sql.unsafe( - `SELECT o.*, c.artifact as artifact_envelope, c.title as artifact_title - FROM rcart.orders o JOIN rcart.catalog_entries c ON c.id = o.catalog_entry_id - WHERE o.id = $1`, - [c.req.param("id")] - ); - if (result.length === 0) return c.json({ error: "Order not found" }, 404); - return c.json(result[0]); + const space = c.req.param("space") || "demo"; + const orderId = c.req.param("id"); + const oDocId = orderDocId(space, orderId); + const doc = _syncServer!.getDoc(oDocId); + if (!doc) return c.json({ error: "Order not found" }, 404); + + const catalogDoc = ensureCatalogDoc(space); + const catEntry = catalogDoc.items[doc.order.catalogEntryId]; + + const resp = orderToResponse(doc.order); + resp.artifact_envelope = catEntry?.artifact || null; + resp.artifact_title = catEntry?.title || null; + return c.json(resp); }); // PATCH /api/orders/:id/status — Update order status routes.patch("/api/orders/:id/status", async (c) => { + const space = c.req.param("space") || "demo"; const body = await c.req.json(); const { status, payment_tx, payment_network } = body; const valid = ["pending", "paid", "accepted", "in_production", "ready", "shipped", "completed", "cancelled"]; if (!valid.includes(status)) return c.json({ error: `status must be one of: ${valid.join(", ")}` }, 400); - const timestampField: Record = { paid: "paid_at", accepted: "accepted_at", completed: "completed_at" }; - const extraSet = timestampField[status] ? `, ${timestampField[status]} = NOW()` : ""; + const orderId = c.req.param("id"); + const oDocId = orderDocId(space, orderId); + const doc = _syncServer!.getDoc(oDocId); + if (!doc) return c.json({ error: "Order not found" }, 404); - // Use parameterized query for payment info - let paymentSet = ""; - const params: any[] = [status, c.req.param("id")]; - if (status === "paid" && payment_tx) { - paymentSet = `, payment_tx = $3, payment_network = $4`; - params.push(payment_tx, payment_network || null); + const now = Date.now(); + + const updated = _syncServer!.changeDoc(oDocId, `order status → ${status}`, (d) => { + d.order.status = status; + d.order.updatedAt = now; + if (status === "paid") d.order.paidAt = now; + if (status === "accepted") d.order.acceptedAt = now; + if (status === "completed") d.order.completedAt = now; + if (status === "paid" && payment_tx) { + d.order.paymentTx = payment_tx; + d.order.paymentNetwork = payment_network || null; + } + }); + + if (!updated) return c.json({ error: "Order not found" }, 404); + + if (status === "paid" && updated.order.totalPrice) { + depositOrderRevenue(updated.order.totalPrice, orderId); } - const result = await sql.unsafe( - `UPDATE rcart.orders SET status = $1, updated_at = NOW()${extraSet}${paymentSet} WHERE id = $2 RETURNING *`, - params - ); - if (result.length === 0) return c.json({ error: "Order not found" }, 404); - - const updated = result[0]; - if (status === "paid" && updated.total_price) { - depositOrderRevenue(updated.total_price, c.req.param("id")); - } - - return c.json(updated); + return c.json(orderToResponse(updated.order)); }); +// ── Response helpers ── + +interface OrderResponse { + id: string; + catalog_entry_id: string; + artifact_id: string; + buyer_id: string | null; + buyer_location: unknown; + buyer_contact: unknown; + provider_id: string | null; + provider_name: string | null; + provider_distance_km: number | null; + quantity: number; + production_cost: number | null; + creator_payout: number | null; + community_payout: number | null; + total_price: number | null; + currency: string; + status: string; + payment_method: string | null; + payment_tx: string | null; + payment_network: string | null; + created_at: string; + paid_at: string | null; + accepted_at: string | null; + completed_at: string | null; + updated_at: string; + artifact_title?: string | null; + product_type?: string | null; + artifact_envelope?: unknown; +} + +/** Convert an OrderMeta to the flat response shape matching the original SQL rows. */ +function orderToResponse(o: OrderMeta, catEntry?: CatalogEntry): OrderResponse { + return { + id: o.id, + catalog_entry_id: o.catalogEntryId, + artifact_id: o.artifactId, + buyer_id: o.buyerId, + buyer_location: o.buyerLocation ? tryParse(o.buyerLocation) : null, + buyer_contact: o.buyerContact ? tryParse(o.buyerContact) : null, + provider_id: o.providerId, + provider_name: o.providerName, + provider_distance_km: o.providerDistanceKm, + quantity: o.quantity, + production_cost: o.productionCost, + creator_payout: o.creatorPayout, + community_payout: o.communityPayout, + total_price: o.totalPrice, + currency: o.currency, + status: o.status, + payment_method: o.paymentMethod, + payment_tx: o.paymentTx, + payment_network: o.paymentNetwork, + created_at: new Date(o.createdAt).toISOString(), + paid_at: o.paidAt ? new Date(o.paidAt).toISOString() : null, + accepted_at: o.acceptedAt ? new Date(o.acceptedAt).toISOString() : null, + completed_at: o.completedAt ? new Date(o.completedAt).toISOString() : null, + updated_at: new Date(o.updatedAt).toISOString(), + ...(catEntry ? { artifact_title: catEntry.title, product_type: catEntry.productType } : {}), + }; +} + +function tryParse(s: string): unknown { + try { return JSON.parse(s); } catch { return s; } +} + // ── FULFILLMENT ROUTES ── function round2(n: number): number { @@ -365,6 +521,7 @@ function composeCost(artifact: Record, provider: ProviderMatch, // POST /api/fulfill/resolve — Find fulfillment options routes.post("/api/fulfill/resolve", async (c) => { + const space = c.req.param("space") || "demo"; const body = await c.req.json(); const { artifact_id, catalog_entry_id, buyer_location, quantity = 1 } = body; @@ -375,14 +532,21 @@ routes.post("/api/fulfill/resolve", async (c) => { return c.json({ error: "Required: artifact_id or catalog_entry_id" }, 400); } - const entryResult = await sql.unsafe( - "SELECT * FROM rcart.catalog_entries WHERE (artifact_id = $1 OR id = $1) AND status = 'active'", - [artifact_id || catalog_entry_id] - ); - if (entryResult.length === 0) return c.json({ error: "Artifact not found in catalog" }, 404); + const catalogDoc = ensureCatalogDoc(space); + const lookupId = artifact_id || catalog_entry_id; - const entry = entryResult[0]; - const artifact = entry.artifact; + // Find entry by id or artifact_id, must be active + let entry: CatalogEntry | undefined; + if (catalogDoc.items[lookupId] && catalogDoc.items[lookupId].status === "active") { + entry = catalogDoc.items[lookupId]; + } else { + entry = Object.values(catalogDoc.items).find( + (e) => (e.artifactId === lookupId || e.id === lookupId) && e.status === "active" + ); + } + if (!entry) return c.json({ error: "Artifact not found in catalog" }, 404); + + const artifact = entry.artifact as Record; const capabilities = artifact.spec?.required_capabilities || []; const substrates = artifact.spec?.substrates || []; @@ -471,7 +635,6 @@ export const cartModule: RSpaceModule = { landingPage: renderLanding, async onInit(ctx) { _syncServer = ctx.syncServer; - await initDB(); }, feeds: [ { diff --git a/modules/rfiles/db/schema.sql b/modules/rfiles/db/schema.sql.archived similarity index 100% rename from modules/rfiles/db/schema.sql rename to modules/rfiles/db/schema.sql.archived diff --git a/modules/rfiles/mod.ts b/modules/rfiles/mod.ts index a678b66b..bbd6086a 100644 --- a/modules/rfiles/mod.ts +++ b/modules/rfiles/mod.ts @@ -1,54 +1,141 @@ /** * Files module — file sharing, public share links, memory cards. * Ported from rfiles-online (Django → Bun/Hono). + * + * All metadata is stored in Automerge documents via SyncServer. + * Binary files remain on the filesystem. */ import { Hono } from "hono"; -import { readFileSync } from "node:fs"; import { resolve } from "node:path"; import { mkdir, writeFile, unlink } from "node:fs/promises"; import { createHash, randomBytes } from "node:crypto"; -import { sql } from "../../shared/db/pool"; +import * as Automerge from "@automerge/automerge"; import { renderShell, renderExternalAppShell } from "../../server/shell"; import { getModuleInfoList } from "../../shared/module"; import type { RSpaceModule } from "../../shared/module"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { renderLanding } from "./landing"; import type { SyncServer } from '../../server/local-first/sync-server'; -import { filesSchema } from './schemas'; +import { filesSchema, filesDocId } from './schemas'; +import type { FilesDoc, MediaFile, MemoryCard } from './schemas'; + +// ── Extended doc types (shares + access logs live alongside files/cards) ── + +interface PublicShare { + id: string; + token: string; + mediaFileId: string; + createdBy: string | null; + expiresAt: number | null; // epoch ms, null = never + maxDownloads: number | null; + downloadCount: number; + isActive: boolean; + isPasswordProtected: boolean; + passwordHash: string | null; + note: string | null; + createdAt: number; +} + +interface AccessLog { + id: string; + mediaFileId: string; + shareId: string | null; + ipAddress: string | null; + userAgent: string | null; + accessType: string; + accessedAt: number; +} + +/** + * Extended doc shape — supplements FilesDoc with shares and access logs. + * The base FilesDoc from schemas.ts defines files + memoryCards; + * we add shares and accessLogs as additional top-level maps. + */ +interface FilesDocExt extends FilesDoc { + shares: Record; + accessLogs: Record; +} let _syncServer: SyncServer | null = null; const routes = new Hono(); const FILES_DIR = process.env.FILES_DIR || "/data/files"; -const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8"); -// ── DB initialization ── -async function initDB() { - try { - await sql.unsafe(SCHEMA_SQL); - console.log("[Files] DB schema initialized"); - } catch (e: any) { - console.error("[Files] DB init error:", e.message); +// ── Automerge document helpers ── + +function ensureDoc(space: string, sharedSpace: string = "default"): FilesDocExt { + const docId = filesDocId(space, sharedSpace); + let doc = _syncServer!.getDoc(docId); + if (!doc) { + doc = Automerge.change(Automerge.init(), 'init files doc', (d) => { + const init = filesSchema.init(); + d.meta = init.meta; + d.meta.spaceSlug = space; + d.meta.sharedSpace = sharedSpace; + d.files = {}; + d.memoryCards = {}; + d.shares = {}; + d.accessLogs = {}; + }); + _syncServer!.setDoc(docId, doc); } + // Ensure shares/accessLogs exist on legacy docs that predate these fields + if (!doc.shares || !doc.accessLogs) { + doc = _syncServer!.changeDoc(docId, 'add shares+logs maps', (d) => { + if (!(d as any).shares) (d as any).shares = {}; + if (!(d as any).accessLogs) (d as any).accessLogs = {}; + })!; + } + return doc; } // ── Cleanup timers (replace Celery) ── // Deactivate expired shares every hour -setInterval(async () => { +setInterval(() => { + if (!_syncServer) return; try { - const result = await sql.unsafe( - "UPDATE rfiles.public_shares SET is_active = FALSE WHERE is_active = TRUE AND expires_at IS NOT NULL AND expires_at < NOW()" - ); - if ((result as any).count > 0) console.log(`[Files] Deactivated ${(result as any).count} expired shares`); + const now = Date.now(); + for (const docId of _syncServer.getDocIds()) { + if (!docId.includes(':files:cards:')) continue; + const doc = _syncServer.getDoc(docId); + if (!doc?.shares) continue; + const toDeactivate = Object.values(doc.shares).filter( + (s) => s.isActive && s.expiresAt !== null && s.expiresAt < now + ); + if (toDeactivate.length > 0) { + _syncServer.changeDoc(docId, 'deactivate expired shares', (d) => { + for (const s of toDeactivate) { + if (d.shares[s.id]) d.shares[s.id].isActive = false; + } + }); + console.log(`[Files] Deactivated ${toDeactivate.length} expired shares in ${docId}`); + } + } } catch (e: any) { console.error("[Files] Cleanup error:", e.message); } }, 3600_000); // Delete access logs older than 90 days, daily -setInterval(async () => { +setInterval(() => { + if (!_syncServer) return; try { - await sql.unsafe("DELETE FROM rfiles.access_logs WHERE accessed_at < NOW() - INTERVAL '90 days'"); + const cutoff = Date.now() - 90 * 86400_000; + for (const docId of _syncServer.getDocIds()) { + if (!docId.includes(':files:cards:')) continue; + const doc = _syncServer.getDoc(docId); + if (!doc?.accessLogs) continue; + const toDelete = Object.values(doc.accessLogs).filter( + (l) => l.accessedAt < cutoff + ); + if (toDelete.length > 0) { + _syncServer.changeDoc(docId, 'prune old access logs', (d) => { + for (const l of toDelete) { + delete d.accessLogs[l.id]; + } + }); + } + } } catch (e: any) { console.error("[Files] Log cleanup error:", e.message); } }, 86400_000); @@ -69,6 +156,11 @@ async function computeFileHash(buffer: ArrayBuffer): Promise { return hash.digest("hex"); } +/** Serialize a doc-sourced object for JSON responses (strip Automerge proxies). */ +function toPlain(obj: T): T { + return JSON.parse(JSON.stringify(obj)); +} + // ── File upload ── routes.post("/api/files", async (c) => { const token = extractToken(c.req.raw.headers); @@ -83,7 +175,9 @@ routes.post("/api/files", async (c) => { const space = c.req.param("space") || formData.get("space")?.toString() || "default"; const title = formData.get("title")?.toString() || file.name.replace(/\.[^.]+$/, ""); const description = formData.get("description")?.toString() || ""; - const tags = formData.get("tags")?.toString() || "[]"; + const tagsRaw = formData.get("tags")?.toString() || "[]"; + let tags: string[] = []; + try { tags = JSON.parse(tagsRaw); } catch { tags = []; } const uploadedBy = claims.sub; const buffer = await file.arrayBuffer(); @@ -97,13 +191,32 @@ routes.post("/api/files", async (c) => { await mkdir(resolve(fullPath, ".."), { recursive: true }); await writeFile(fullPath, Buffer.from(buffer)); - const [row] = await sql.unsafe( - `INSERT INTO rfiles.media_files (original_filename, title, description, mime_type, file_size, file_hash, storage_path, tags, uploaded_by, shared_space) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8::jsonb, $9, $10) RETURNING *`, - [file.name, title, description, file.type || "application/octet-stream", file.size, fileHash, storagePath, tags, uploadedBy, space] - ); + const docId = filesDocId(space, space); + ensureDoc(space, space); - return c.json({ file: row }, 201); + const mediaFile: MediaFile = { + id: fileId, + originalFilename: file.name, + title, + description, + mimeType: file.type || "application/octet-stream", + fileSize: file.size, + fileHash, + storagePath, + tags, + isProcessed: false, + processingError: null, + uploadedBy, + sharedSpace: space, + createdAt: Date.now(), + updatedAt: Date.now(), + }; + + _syncServer!.changeDoc(docId, `upload file ${fileId}`, (d) => { + d.files[fileId] = mediaFile; + }); + + return c.json({ file: toPlain(mediaFile) }, 201); }); // ── File listing ── @@ -113,60 +226,72 @@ routes.get("/api/files", async (c) => { const limit = Math.min(Number(c.req.query("limit")) || 50, 200); const offset = Number(c.req.query("offset")) || 0; - let query = "SELECT * FROM rfiles.media_files WHERE shared_space = $1"; - const params: any[] = [space]; - let paramIdx = 2; + const doc = ensureDoc(space, space); + + let files = Object.values(doc.files) + .filter((f) => f.sharedSpace === space); if (mimeType) { - query += ` AND mime_type LIKE $${paramIdx}`; - params.push(`${mimeType}%`); - paramIdx++; + files = files.filter((f) => f.mimeType && f.mimeType.startsWith(mimeType)); } - query += ` ORDER BY created_at DESC LIMIT $${paramIdx} OFFSET $${paramIdx + 1}`; - params.push(limit, offset); + // Sort by createdAt descending + files.sort((a, b) => b.createdAt - a.createdAt); - const rows = await sql.unsafe(query, params); - const [{ count }] = await sql.unsafe( - "SELECT COUNT(*) as count FROM rfiles.media_files WHERE shared_space = $1", - [space] - ); + const total = files.length; + const paged = files.slice(offset, offset + limit); - return c.json({ files: rows, total: Number(count), limit, offset }); + return c.json({ files: toPlain(paged), total, limit, offset }); }); // ── File download ── routes.get("/api/files/:id/download", async (c) => { - const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]); + const fileId = c.req.param("id"); + const space = c.req.param("space") || c.req.query("space") || "default"; + const doc = ensureDoc(space, space); + const file = doc.files[fileId]; if (!file) return c.json({ error: "File not found" }, 404); - const fullPath = resolve(FILES_DIR, file.storage_path); + const fullPath = resolve(FILES_DIR, file.storagePath); const bunFile = Bun.file(fullPath); if (!await bunFile.exists()) return c.json({ error: "File missing from storage" }, 404); return new Response(bunFile, { headers: { - "Content-Type": file.mime_type || "application/octet-stream", - "Content-Disposition": `attachment; filename="${file.original_filename}"`, - "Content-Length": String(file.file_size), + "Content-Type": file.mimeType || "application/octet-stream", + "Content-Disposition": `attachment; filename="${file.originalFilename}"`, + "Content-Length": String(file.fileSize), }, }); }); // ── File detail ── routes.get("/api/files/:id", async (c) => { - const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]); + const fileId = c.req.param("id"); + const space = c.req.param("space") || c.req.query("space") || "default"; + const doc = ensureDoc(space, space); + const file = doc.files[fileId]; if (!file) return c.json({ error: "File not found" }, 404); - return c.json({ file }); + return c.json({ file: toPlain(file) }); }); // ── File delete ── routes.delete("/api/files/:id", async (c) => { - const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]); + const fileId = c.req.param("id"); + const space = c.req.param("space") || c.req.query("space") || "default"; + const docId = filesDocId(space, space); + const doc = ensureDoc(space, space); + const file = doc.files[fileId]; if (!file) return c.json({ error: "File not found" }, 404); - try { await unlink(resolve(FILES_DIR, file.storage_path)); } catch {} - await sql.unsafe("DELETE FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]); + try { await unlink(resolve(FILES_DIR, file.storagePath)); } catch {} + _syncServer!.changeDoc(docId, `delete file ${fileId}`, (d) => { + delete d.files[fileId]; + // Also remove any shares referencing this file + for (const [sid, share] of Object.entries(d.shares)) { + if (share.mediaFileId === fileId) delete d.shares[sid]; + } + }); return c.json({ message: "Deleted" }); }); @@ -177,13 +302,18 @@ routes.post("/api/files/:id/share", async (c) => { let claims; try { claims = await verifyEncryptIDToken(authToken); } catch { return c.json({ error: "Invalid token" }, 401); } - const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]); + const fileId = c.req.param("id"); + const space = c.req.param("space") || c.req.query("space") || "default"; + const docId = filesDocId(space, space); + const doc = ensureDoc(space, space); + + const file = doc.files[fileId]; if (!file) return c.json({ error: "File not found" }, 404); - if (file.uploaded_by && file.uploaded_by !== claims.sub) return c.json({ error: "Not authorized" }, 403); + if (file.uploadedBy && file.uploadedBy !== claims.sub) return c.json({ error: "Not authorized" }, 403); const body = await c.req.json<{ expires_in_hours?: number; max_downloads?: number; password?: string; note?: string }>(); - const token = generateToken(); - const expiresAt = body.expires_in_hours ? new Date(Date.now() + body.expires_in_hours * 3600_000).toISOString() : null; + const shareToken = generateToken(); + const expiresAt = body.expires_in_hours ? Date.now() + body.expires_in_hours * 3600_000 : null; const createdBy = claims.sub; let passwordHash: string | null = null; @@ -193,27 +323,52 @@ routes.post("/api/files/:id/share", async (c) => { isPasswordProtected = true; } - const [share] = await sql.unsafe( - `INSERT INTO rfiles.public_shares (token, media_file_id, created_by, expires_at, max_downloads, is_password_protected, password_hash, note) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *`, - [token, file.id, createdBy, expiresAt, body.max_downloads || null, isPasswordProtected, passwordHash, body.note || null] - ); + const shareId = crypto.randomUUID(); + const logId = crypto.randomUUID(); + const now = Date.now(); - await sql.unsafe( - "INSERT INTO rfiles.access_logs (media_file_id, share_id, access_type) VALUES ($1, $2, 'share_created')", - [file.id, share.id] - ); + const share: PublicShare = { + id: shareId, + token: shareToken, + mediaFileId: fileId, + createdBy, + expiresAt, + maxDownloads: body.max_downloads || null, + downloadCount: 0, + isActive: true, + isPasswordProtected, + passwordHash, + note: body.note || null, + createdAt: now, + }; - return c.json({ share: { ...share, url: `/s/${token}` } }, 201); + _syncServer!.changeDoc(docId, `create share for file ${fileId}`, (d) => { + d.shares[shareId] = share; + d.accessLogs[logId] = { + id: logId, + mediaFileId: fileId, + shareId, + ipAddress: null, + userAgent: null, + accessType: 'share_created', + accessedAt: now, + }; + }); + + return c.json({ share: { ...toPlain(share), url: `/s/${shareToken}` } }, 201); }); // ── List shares for a file ── routes.get("/api/files/:id/shares", async (c) => { - const rows = await sql.unsafe( - "SELECT * FROM rfiles.public_shares WHERE media_file_id = $1 ORDER BY created_at DESC", - [c.req.param("id")] - ); - return c.json({ shares: rows }); + const fileId = c.req.param("id"); + const space = c.req.param("space") || c.req.query("space") || "default"; + const doc = ensureDoc(space, space); + + const shares = Object.values(doc.shares) + .filter((s) => s.mediaFileId === fileId) + .sort((a, b) => b.createdAt - a.createdAt); + + return c.json({ shares: toPlain(shares) }); }); // ── Revoke share ── @@ -223,83 +378,126 @@ routes.post("/api/shares/:shareId/revoke", async (c) => { let claims; try { claims = await verifyEncryptIDToken(authToken); } catch { return c.json({ error: "Invalid token" }, 401); } - const [share] = await sql.unsafe( - "SELECT s.*, f.uploaded_by FROM rfiles.public_shares s JOIN rfiles.media_files f ON s.media_file_id = f.id WHERE s.id = $1", - [c.req.param("shareId")] - ); - if (!share) return c.json({ error: "Share not found" }, 404); - if (share.uploaded_by && share.uploaded_by !== claims.sub) return c.json({ error: "Not authorized" }, 403); + const shareId = c.req.param("shareId"); + const space = c.req.param("space") || c.req.query("space") || "default"; + const docId = filesDocId(space, space); + const doc = ensureDoc(space, space); - const [revoked] = await sql.unsafe( - "UPDATE rfiles.public_shares SET is_active = FALSE WHERE id = $1 RETURNING *", - [c.req.param("shareId")] - ); - return c.json({ message: "Revoked", share: revoked }); + const share = doc.shares[shareId]; + if (!share) return c.json({ error: "Share not found" }, 404); + + // Check authorization via the linked file + const file = doc.files[share.mediaFileId]; + if (file?.uploadedBy && file.uploadedBy !== claims.sub) return c.json({ error: "Not authorized" }, 403); + + _syncServer!.changeDoc(docId, `revoke share ${shareId}`, (d) => { + d.shares[shareId].isActive = false; + }); + + const updated = _syncServer!.getDoc(docId)!; + return c.json({ message: "Revoked", share: toPlain(updated.shares[shareId]) }); }); // ── Public share download ── routes.get("/s/:token", async (c) => { - const [share] = await sql.unsafe( - `SELECT s.*, f.storage_path, f.mime_type, f.original_filename, f.file_size - FROM rfiles.public_shares s JOIN rfiles.media_files f ON s.media_file_id = f.id - WHERE s.token = $1`, - [c.req.param("token")] - ); - if (!share) return c.json({ error: "Share not found" }, 404); - if (!share.is_active) return c.json({ error: "Share has been revoked" }, 410); - if (share.expires_at && new Date(share.expires_at) < new Date()) return c.json({ error: "Share has expired" }, 410); - if (share.max_downloads && share.download_count >= share.max_downloads) return c.json({ error: "Download limit reached" }, 410); + const shareToken = c.req.param("token"); - if (share.is_password_protected) { + // Find the share across all files docs + let foundDocId: string | null = null; + let foundShare: PublicShare | null = null; + let foundFile: MediaFile | null = null; + + for (const docId of _syncServer!.getDocIds()) { + if (!docId.includes(':files:cards:')) continue; + const doc = _syncServer!.getDoc(docId); + if (!doc?.shares) continue; + for (const s of Object.values(doc.shares)) { + if (s.token === shareToken) { + foundDocId = docId; + foundShare = s; + foundFile = doc.files[s.mediaFileId] || null; + break; + } + } + if (foundShare) break; + } + + if (!foundShare || !foundFile) return c.json({ error: "Share not found" }, 404); + if (!foundShare.isActive) return c.json({ error: "Share has been revoked" }, 410); + if (foundShare.expiresAt && foundShare.expiresAt < Date.now()) return c.json({ error: "Share has expired" }, 410); + if (foundShare.maxDownloads && foundShare.downloadCount >= foundShare.maxDownloads) return c.json({ error: "Download limit reached" }, 410); + + if (foundShare.isPasswordProtected) { const pw = c.req.query("password"); if (!pw) return c.json({ error: "Password required", is_password_protected: true }, 401); const hash = await hashPassword(pw); - if (hash !== share.password_hash) return c.json({ error: "Invalid password" }, 401); + if (hash !== foundShare.passwordHash) return c.json({ error: "Invalid password" }, 401); } - await sql.unsafe("UPDATE rfiles.public_shares SET download_count = download_count + 1 WHERE id = $1", [share.id]); + const logId = crypto.randomUUID(); const ip = c.req.header("X-Forwarded-For")?.split(",")[0]?.trim() || c.req.header("X-Real-IP") || null; const ua = c.req.header("User-Agent") || ""; - await sql.unsafe( - "INSERT INTO rfiles.access_logs (media_file_id, share_id, ip_address, user_agent, access_type) VALUES ($1, $2, $3, $4, 'download')", - [share.media_file_id, share.id, ip, ua.slice(0, 500)] - ); - const fullPath = resolve(FILES_DIR, share.storage_path); + _syncServer!.changeDoc(foundDocId!, `download via share ${foundShare.id}`, (d) => { + d.shares[foundShare!.id].downloadCount += 1; + d.accessLogs[logId] = { + id: logId, + mediaFileId: foundShare!.mediaFileId, + shareId: foundShare!.id, + ipAddress: ip, + userAgent: ua.slice(0, 500), + accessType: 'download', + accessedAt: Date.now(), + }; + }); + + const fullPath = resolve(FILES_DIR, foundFile.storagePath); const bunFile = Bun.file(fullPath); if (!await bunFile.exists()) return c.json({ error: "File missing" }, 404); return new Response(bunFile, { headers: { - "Content-Type": share.mime_type || "application/octet-stream", - "Content-Disposition": `attachment; filename="${share.original_filename}"`, - "Content-Length": String(share.file_size), + "Content-Type": foundFile.mimeType || "application/octet-stream", + "Content-Disposition": `attachment; filename="${foundFile.originalFilename}"`, + "Content-Length": String(foundFile.fileSize), }, }); }); // ── Share info (public) ── routes.get("/s/:token/info", async (c) => { - const [share] = await sql.unsafe( - `SELECT s.is_password_protected, s.is_active, s.expires_at, s.max_downloads, s.download_count, s.note, - f.original_filename, f.mime_type, f.file_size - FROM rfiles.public_shares s JOIN rfiles.media_files f ON s.media_file_id = f.id - WHERE s.token = $1`, - [c.req.param("token")] - ); - if (!share) return c.json({ error: "Share not found" }, 404); + const shareToken = c.req.param("token"); - const isValid = share.is_active && - (!share.expires_at || new Date(share.expires_at) > new Date()) && - (!share.max_downloads || share.download_count < share.max_downloads); + let foundShare: PublicShare | null = null; + let foundFile: MediaFile | null = null; + + for (const docId of _syncServer!.getDocIds()) { + if (!docId.includes(':files:cards:')) continue; + const doc = _syncServer!.getDoc(docId); + if (!doc?.shares) continue; + for (const s of Object.values(doc.shares)) { + if (s.token === shareToken) { + foundShare = s; + foundFile = doc.files[s.mediaFileId] || null; + break; + } + } + if (foundShare) break; + } + + if (!foundShare || !foundFile) return c.json({ error: "Share not found" }, 404); + + const isValid = foundShare.isActive && + (!foundShare.expiresAt || foundShare.expiresAt > Date.now()) && + (!foundShare.maxDownloads || foundShare.downloadCount < foundShare.maxDownloads); return c.json({ - is_password_protected: share.is_password_protected, + is_password_protected: foundShare.isPasswordProtected, is_valid: isValid, - expires_at: share.expires_at, - downloads_remaining: share.max_downloads ? share.max_downloads - share.download_count : null, - file_info: { filename: share.original_filename, mime_type: share.mime_type, size: share.file_size }, - note: share.note, + expires_at: foundShare.expiresAt ? new Date(foundShare.expiresAt).toISOString() : null, + downloads_remaining: foundShare.maxDownloads ? foundShare.maxDownloads - foundShare.downloadCount : null, + file_info: { filename: foundFile.originalFilename, mime_type: foundFile.mimeType, size: foundFile.fileSize }, + note: foundShare.note, }); }); @@ -313,13 +511,30 @@ routes.post("/api/cards", async (c) => { const body = await c.req.json<{ title: string; body?: string; card_type?: string; tags?: string[]; shared_space?: string }>(); const space = c.req.param("space") || body.shared_space || "default"; const createdBy = claims.sub; + const docId = filesDocId(space, space); + ensureDoc(space, space); - const [card] = await sql.unsafe( - `INSERT INTO rfiles.memory_cards (shared_space, title, body, card_type, tags, created_by) - VALUES ($1, $2, $3, $4, $5::jsonb, $6) RETURNING *`, - [space, body.title, body.body || "", body.card_type || "note", JSON.stringify(body.tags || []), createdBy] - ); - return c.json({ card }, 201); + const cardId = crypto.randomUUID(); + const now = Date.now(); + + const card: MemoryCard = { + id: cardId, + sharedSpace: space, + title: body.title, + body: body.body || "", + cardType: body.card_type || "note", + tags: body.tags || [], + position: 0, + createdBy, + createdAt: now, + updatedAt: now, + }; + + _syncServer!.changeDoc(docId, `create card ${cardId}`, (d) => { + d.memoryCards[cardId] = card; + }); + + return c.json({ card: toPlain(card) }, 201); }); routes.get("/api/cards", async (c) => { @@ -327,43 +542,61 @@ routes.get("/api/cards", async (c) => { const cardType = c.req.query("type"); const limit = Math.min(Number(c.req.query("limit")) || 50, 200); - let query = "SELECT * FROM rfiles.memory_cards WHERE shared_space = $1"; - const params: any[] = [space]; - if (cardType) { query += " AND card_type = $2"; params.push(cardType); } - query += " ORDER BY position, created_at DESC LIMIT $" + (params.length + 1); - params.push(limit); + const doc = ensureDoc(space, space); - const rows = await sql.unsafe(query, params); - return c.json({ cards: rows, total: rows.length }); + let cards = Object.values(doc.memoryCards) + .filter((card) => card.sharedSpace === space); + + if (cardType) { + cards = cards.filter((card) => card.cardType === cardType); + } + + // Sort by position ascending, then createdAt descending + cards.sort((a, b) => a.position - b.position || b.createdAt - a.createdAt); + cards = cards.slice(0, limit); + + return c.json({ cards: toPlain(cards), total: cards.length }); }); routes.patch("/api/cards/:id", async (c) => { - const body = await c.req.json<{ title?: string; body?: string; card_type?: string; tags?: string[]; position?: number }>(); - const sets: string[] = []; - const params: any[] = []; - let idx = 1; + const cardId = c.req.param("id"); + const space = c.req.param("space") || c.req.query("space") || "default"; + const docId = filesDocId(space, space); + const doc = ensureDoc(space, space); - if (body.title !== undefined) { sets.push(`title = $${idx}`); params.push(body.title); idx++; } - if (body.body !== undefined) { sets.push(`body = $${idx}`); params.push(body.body); idx++; } - if (body.card_type !== undefined) { sets.push(`card_type = $${idx}`); params.push(body.card_type); idx++; } - if (body.tags !== undefined) { sets.push(`tags = $${idx}::jsonb`); params.push(JSON.stringify(body.tags)); idx++; } - if (body.position !== undefined) { sets.push(`position = $${idx}`); params.push(body.position); idx++; } - - if (sets.length === 0) return c.json({ error: "No fields to update" }, 400); - sets.push(`updated_at = NOW()`); - params.push(c.req.param("id")); - - const [card] = await sql.unsafe( - `UPDATE rfiles.memory_cards SET ${sets.join(", ")} WHERE id = $${idx} RETURNING *`, - params - ); + const card = doc.memoryCards[cardId]; if (!card) return c.json({ error: "Card not found" }, 404); - return c.json({ card }); + + const body = await c.req.json<{ title?: string; body?: string; card_type?: string; tags?: string[]; position?: number }>(); + if (body.title === undefined && body.body === undefined && body.card_type === undefined && body.tags === undefined && body.position === undefined) { + return c.json({ error: "No fields to update" }, 400); + } + + _syncServer!.changeDoc(docId, `update card ${cardId}`, (d) => { + const c = d.memoryCards[cardId]; + if (body.title !== undefined) c.title = body.title; + if (body.body !== undefined) c.body = body.body; + if (body.card_type !== undefined) c.cardType = body.card_type; + if (body.tags !== undefined) c.tags = body.tags; + if (body.position !== undefined) c.position = body.position; + c.updatedAt = Date.now(); + }); + + const updated = _syncServer!.getDoc(docId)!; + return c.json({ card: toPlain(updated.memoryCards[cardId]) }); }); routes.delete("/api/cards/:id", async (c) => { - const [card] = await sql.unsafe("DELETE FROM rfiles.memory_cards WHERE id = $1 RETURNING id", [c.req.param("id")]); - if (!card) return c.json({ error: "Card not found" }, 404); + const cardId = c.req.param("id"); + const space = c.req.param("space") || c.req.query("space") || "default"; + const docId = filesDocId(space, space); + const doc = ensureDoc(space, space); + + if (!doc.memoryCards[cardId]) return c.json({ error: "Card not found" }, 404); + + _syncServer!.changeDoc(docId, `delete card ${cardId}`, (d) => { + delete d.memoryCards[cardId]; + }); return c.json({ message: "Deleted" }); }); @@ -408,7 +641,6 @@ export const filesModule: RSpaceModule = { landingPage: renderLanding, async onInit(ctx) { _syncServer = ctx.syncServer; - await initDB(); }, standaloneDomain: "rfiles.online", externalApp: { url: "https://files.rfiles.online", name: "Seafile" }, diff --git a/modules/rfunds/db/schema.sql b/modules/rfunds/db/schema.sql.archived similarity index 100% rename from modules/rfunds/db/schema.sql rename to modules/rfunds/db/schema.sql.archived diff --git a/modules/rfunds/mod.ts b/modules/rfunds/mod.ts index 4c87975c..ffb626a3 100644 --- a/modules/rfunds/mod.ts +++ b/modules/rfunds/mod.ts @@ -5,31 +5,32 @@ */ import { Hono } from "hono"; -import { readFileSync } from "node:fs"; -import { resolve } from "node:path"; -import { sql } from "../../shared/db/pool"; +import * as Automerge from "@automerge/automerge"; import { renderShell } from "../../server/shell"; import type { RSpaceModule } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { renderLanding } from "./landing"; import type { SyncServer } from '../../server/local-first/sync-server'; -import { fundsSchema } from './schemas'; +import { fundsSchema, fundsDocId, type FundsDoc, type SpaceFlow } from './schemas'; let _syncServer: SyncServer | null = null; const FLOW_SERVICE_URL = process.env.FLOW_SERVICE_URL || "http://payment-flow:3010"; -// ── DB initialization ── -const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8"); - -async function initDB() { - try { - await sql.unsafe(SCHEMA_SQL); - console.log("[Funds] DB schema initialized"); - } catch (e) { - console.error("[Funds] DB init error:", e); +function ensureDoc(space: string): FundsDoc { + const docId = fundsDocId(space); + let doc = _syncServer!.getDoc(docId); + if (!doc) { + doc = Automerge.change(Automerge.init(), 'init', (d) => { + const init = fundsSchema.init(); + d.meta = init.meta; + d.meta.spaceSlug = space; + d.spaceFlows = {}; + }); + _syncServer!.setDoc(docId, doc); } + return doc; } const routes = new Hono(); @@ -42,29 +43,22 @@ routes.get("/api/flows", async (c) => { const owner = c.req.header("X-Owner-Address") || ""; const space = c.req.query("space") || ""; - // If space filter provided, get flow IDs from space_flows table + // If space filter provided, get flow IDs from Automerge doc if (space) { - try { - const rows = await sql.unsafe( - "SELECT flow_id FROM rfunds.space_flows WHERE space_slug = $1", - [space], - ); - if (rows.length === 0) return c.json([]); + const doc = ensureDoc(space); + const flowIds = Object.values(doc.spaceFlows).map((sf) => sf.flowId); + if (flowIds.length === 0) return c.json([]); - // Fetch each flow from flow-service - const flows = await Promise.all( - rows.map(async (r: any) => { - try { - const res = await fetch(`${FLOW_SERVICE_URL}/api/flows/${r.flow_id}`); - if (res.ok) return await res.json(); - } catch {} - return null; - }), - ); - return c.json(flows.filter(Boolean)); - } catch { - // Fall through to unfiltered fetch - } + const flows = await Promise.all( + flowIds.map(async (fid) => { + try { + const res = await fetch(`${FLOW_SERVICE_URL}/api/flows/${fid}`); + if (res.ok) return await res.json(); + } catch {} + return null; + }), + ); + return c.json(flows.filter(Boolean)); } const res = await fetch(`${FLOW_SERVICE_URL}/api/flows?owner=${encodeURIComponent(owner)}`); @@ -163,11 +157,14 @@ routes.post("/api/space-flows", async (c) => { const { space, flowId } = await c.req.json(); if (!space || !flowId) return c.json({ error: "space and flowId required" }, 400); - await sql.unsafe( - `INSERT INTO rfunds.space_flows (space_slug, flow_id, added_by) - VALUES ($1, $2, $3) ON CONFLICT DO NOTHING`, - [space, flowId, claims.sub], - ); + const docId = fundsDocId(space); + ensureDoc(space); + _syncServer!.changeDoc(docId, 'add space flow', (d) => { + const key = `${space}:${flowId}`; + if (!d.spaceFlows[key]) { + d.spaceFlows[key] = { id: key, spaceSlug: space, flowId, addedBy: claims.sub, createdAt: Date.now() }; + } + }); return c.json({ ok: true }); }); @@ -181,10 +178,16 @@ routes.delete("/api/space-flows/:flowId", async (c) => { const space = c.req.query("space") || ""; if (!space) return c.json({ error: "space query param required" }, 400); - await sql.unsafe( - "DELETE FROM rfunds.space_flows WHERE space_slug = $1 AND flow_id = $2", - [space, flowId], - ); + const docId = fundsDocId(space); + const doc = _syncServer!.getDoc(docId); + if (doc) { + const key = `${space}:${flowId}`; + if (doc.spaceFlows[key]) { + _syncServer!.changeDoc(docId, 'remove space flow', (d) => { + delete d.spaceFlows[key]; + }); + } + } return c.json({ ok: true }); }); @@ -254,7 +257,6 @@ export const fundsModule: RSpaceModule = { landingPage: renderLanding, async onInit(ctx) { _syncServer = ctx.syncServer; - await initDB(); }, standaloneDomain: "rfunds.online", feeds: [ diff --git a/modules/rinbox/db/schema.sql b/modules/rinbox/db/schema.sql.archived similarity index 100% rename from modules/rinbox/db/schema.sql rename to modules/rinbox/db/schema.sql.archived diff --git a/modules/rinbox/mod.ts b/modules/rinbox/mod.ts index ec325c99..88218afe 100644 --- a/modules/rinbox/mod.ts +++ b/modules/rinbox/mod.ts @@ -3,45 +3,246 @@ * * Shared mailboxes with role-based access, threaded comments, * and Gnosis Safe multisig approval for outgoing emails. + * + * Storage: Automerge documents via SyncServer (one doc per mailbox). + * IMAP credentials and sync state kept in module-scoped Maps (not in CRDT). */ import { Hono } from "hono"; -import { readFileSync } from "node:fs"; -import { resolve } from "node:path"; -import { sql } from "../../shared/db/pool"; +import * as Automerge from "@automerge/automerge"; import { renderShell } from "../../server/shell"; import { getModuleInfoList } from "../../shared/module"; import type { RSpaceModule } from "../../shared/module"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { renderLanding } from "./landing"; import type { SyncServer } from '../../server/local-first/sync-server'; -import { mailboxSchema } from './schemas'; +import { + mailboxSchema, + mailboxDocId, + type MailboxDoc, + type MailboxMeta, + type ThreadItem, + type ThreadComment, + type ApprovalItem, + type ApprovalSignature, +} from './schemas'; let _syncServer: SyncServer | null = null; const routes = new Hono(); -// ── DB initialization ── -const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8"); +// ── In-memory stores for data not in Automerge schemas ── -async function initDB() { - try { - await sql.unsafe(SCHEMA_SQL); - console.log("[Inbox] DB schema initialized"); - } catch (e) { - console.error("[Inbox] DB init error:", e); - } +/** Workspace metadata (no Automerge schema — lightweight index) */ +interface WorkspaceInfo { + id: string; + slug: string; + name: string; + description: string | null; + ownerDid: string; + createdAt: number; +} +const _workspaces = new Map(); // slug → info + +/** IMAP credentials per mailbox (not stored in CRDT for security) */ +interface ImapConfig { + imapUser: string | null; + imapHost: string | null; + imapPort: number | null; +} +const _imapConfigs = new Map(); // mailboxId → config + +/** IMAP sync state per mailbox (transient server state) */ +interface ImapSyncState { + mailboxId: string; + lastUid: number; + uidValidity: number | null; + lastSyncAt: number | null; + error: string | null; +} +const _syncStates = new Map(); // mailboxId → state + +// ── Helpers ── + +/** Default space used when no space param is provided */ +const DEFAULT_SPACE = "global"; + +function generateId(): string { + return crypto.randomUUID(); } -// ── Helper: get or create user by DID ── -async function getOrCreateUser(did: string, username?: string) { - const rows = await sql.unsafe( - `INSERT INTO rinbox.users (did, username) VALUES ($1, $2) - ON CONFLICT (did) DO UPDATE SET username = COALESCE($2, rinbox.users.username) - RETURNING *`, - [did, username || null] - ); - return rows[0]; +/** + * Ensure a mailbox Automerge doc exists for the given space + mailboxId. + * Returns the current doc state. + */ +function ensureMailboxDoc(space: string, mailboxId: string): MailboxDoc { + const docId = mailboxDocId(space, mailboxId); + let doc = _syncServer!.getDoc(docId); + if (!doc) { + doc = Automerge.change(Automerge.init(), 'init mailbox', (d) => { + const init = mailboxSchema.init(); + d.meta = init.meta; + d.meta.spaceSlug = space; + d.mailbox = init.mailbox; + d.mailbox.id = mailboxId; + d.members = []; + d.threads = {}; + d.approvals = {}; + }); + _syncServer!.setDoc(docId, doc); + } + return doc; +} + +/** + * Find a mailbox doc by slug across all docs on the SyncServer. + * Returns [space, mailboxId, doc] or null. + */ +function findMailboxBySlug(slug: string): [string, string, MailboxDoc] | null { + for (const id of _syncServer!.getDocIds()) { + // Match pattern: {space}:inbox:mailboxes:{mailboxId} + const parts = id.split(':'); + if (parts.length === 4 && parts[1] === 'inbox' && parts[2] === 'mailboxes') { + const doc = _syncServer!.getDoc(id); + if (doc && doc.mailbox && doc.mailbox.slug === slug) { + return [parts[0], parts[3], doc]; + } + } + } + return null; +} + +/** + * Find a mailbox doc by mailbox ID across all docs on the SyncServer. + * Returns [space, docId, doc] or null. + */ +function findMailboxById(mailboxId: string): [string, string, MailboxDoc] | null { + for (const id of _syncServer!.getDocIds()) { + const parts = id.split(':'); + if (parts.length === 4 && parts[1] === 'inbox' && parts[2] === 'mailboxes' && parts[3] === mailboxId) { + const doc = _syncServer!.getDoc(id); + if (doc) return [parts[0], id, doc]; + } + } + return null; +} + +/** + * Get all mailbox docs from the SyncServer. + */ +function getAllMailboxDocs(): Array<{ space: string; docId: string; doc: MailboxDoc }> { + const results: Array<{ space: string; docId: string; doc: MailboxDoc }> = []; + for (const id of _syncServer!.getDocIds()) { + const parts = id.split(':'); + if (parts.length === 4 && parts[1] === 'inbox' && parts[2] === 'mailboxes') { + const doc = _syncServer!.getDoc(id); + if (doc) results.push({ space: parts[0], docId: id, doc }); + } + } + return results; +} + +/** + * Find a thread by ID across all mailbox docs. + * Returns [docId, threadId, thread, doc] or null. + */ +function findThreadById(threadId: string): [string, string, ThreadItem, MailboxDoc] | null { + for (const { docId, doc } of getAllMailboxDocs()) { + const thread = doc.threads[threadId]; + if (thread) return [docId, threadId, thread, doc]; + } + return null; +} + +/** + * Find an approval by ID across all mailbox docs. + * Returns [docId, approvalId, approval, doc] or null. + */ +function findApprovalById(approvalId: string): [string, string, ApprovalItem, MailboxDoc] | null { + for (const { docId, doc } of getAllMailboxDocs()) { + const approval = doc.approvals[approvalId]; + if (approval) return [docId, approvalId, approval, doc]; + } + return null; +} + +/** Convert MailboxMeta to REST response format (snake_case) */ +function mailboxToRest(mb: MailboxMeta) { + return { + id: mb.id, + workspace_id: mb.workspaceId, + slug: mb.slug, + name: mb.name, + email: mb.email, + description: mb.description, + visibility: mb.visibility, + owner_did: mb.ownerDid, + safe_address: mb.safeAddress, + safe_chain_id: mb.safeChainId, + approval_threshold: mb.approvalThreshold, + created_at: new Date(mb.createdAt).toISOString(), + imap_user: _imapConfigs.get(mb.id)?.imapUser || null, + }; +} + +/** Convert ThreadItem to REST response format */ +function threadToRest(t: ThreadItem) { + return { + id: t.id, + mailbox_id: t.mailboxId, + message_id: t.messageId, + subject: t.subject, + from_address: t.fromAddress, + from_name: t.fromName, + to_addresses: t.toAddresses, + cc_addresses: t.ccAddresses, + body_text: t.bodyText, + body_html: t.bodyHtml, + tags: t.tags, + status: t.status, + is_read: t.isRead, + is_starred: t.isStarred, + assigned_to: t.assignedTo, + has_attachments: t.hasAttachments, + received_at: new Date(t.receivedAt).toISOString(), + created_at: new Date(t.createdAt).toISOString(), + comment_count: t.comments.length, + }; +} + +/** Convert ThreadComment to REST response format */ +function commentToRest(c: ThreadComment) { + return { + id: c.id, + thread_id: c.threadId, + author_id: c.authorId, + author_did: c.authorId, // In Automerge, authorId IS the DID + username: null as string | null, + body: c.body, + mentions: c.mentions, + created_at: new Date(c.createdAt).toISOString(), + }; +} + +/** Convert ApprovalItem to REST response format */ +function approvalToRest(a: ApprovalItem) { + return { + id: a.id, + mailbox_id: a.mailboxId, + thread_id: a.threadId, + author_id: a.authorId, + subject: a.subject, + body_text: a.bodyText, + body_html: a.bodyHtml, + to_addresses: a.toAddresses, + cc_addresses: a.ccAddresses, + status: a.status, + required_signatures: a.requiredSignatures, + safe_tx_hash: a.safeTxHash, + created_at: new Date(a.createdAt).toISOString(), + resolved_at: a.resolvedAt ? new Date(a.resolvedAt).toISOString() : null, + signature_count: a.signatures.length, + }; } // ── Mailboxes API ── @@ -49,20 +250,26 @@ async function getOrCreateUser(did: string, username?: string) { // GET /api/mailboxes — list mailboxes routes.get("/api/mailboxes", async (c) => { const { workspace } = c.req.query(); - let rows; + const allDocs = getAllMailboxDocs(); + + let mailboxes: ReturnType[]; + if (workspace) { - rows = await sql.unsafe( - `SELECT m.* FROM rinbox.mailboxes m - JOIN rinbox.workspaces w ON w.id = m.workspace_id - WHERE w.slug = $1 ORDER BY m.created_at DESC`, - [workspace] - ); + const ws = _workspaces.get(workspace); + if (!ws) return c.json({ mailboxes: [] }); + + mailboxes = allDocs + .filter(({ doc }) => doc.mailbox.workspaceId === ws.id) + .map(({ doc }) => mailboxToRest(doc.mailbox)) + .sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()); } else { - rows = await sql.unsafe( - "SELECT * FROM rinbox.mailboxes ORDER BY created_at DESC LIMIT 50" - ); + mailboxes = allDocs + .map(({ doc }) => mailboxToRest(doc.mailbox)) + .sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()) + .slice(0, 50); } - return c.json({ mailboxes: rows }); + + return c.json({ mailboxes }); }); // POST /api/mailboxes — create mailbox @@ -77,34 +284,66 @@ routes.post("/api/mailboxes", async (c) => { if (!slug || !name || !email) return c.json({ error: "slug, name, email required" }, 400); if (!/^[a-z0-9-]+$/.test(slug)) return c.json({ error: "Invalid slug" }, 400); - try { - const rows = await sql.unsafe( - `INSERT INTO rinbox.mailboxes (slug, name, email, description, visibility, owner_did, imap_user) - VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING *`, - [slug, name, email, description || null, visibility, claims.sub, imap_user || null] - ); - return c.json(rows[0], 201); - } catch (e: any) { - if (e.code === "23505") return c.json({ error: "Mailbox already exists" }, 409); - throw e; + // Check uniqueness + const existing = findMailboxBySlug(slug); + if (existing) return c.json({ error: "Mailbox already exists" }, 409); + + const space = (c.req.query("space") || c.req.param("space") || DEFAULT_SPACE); + const mailboxId = generateId(); + const now = Date.now(); + + // Create Automerge doc + const doc = Automerge.change(Automerge.init(), 'Create mailbox', (d) => { + d.meta = { + module: 'inbox', + collection: 'mailboxes', + version: 1, + spaceSlug: space, + createdAt: now, + }; + d.mailbox = { + id: mailboxId, + workspaceId: null, + slug, + name, + email, + description: description || '', + visibility, + ownerDid: claims.sub, + safeAddress: null, + safeChainId: null, + approvalThreshold: 1, + createdAt: now, + }; + d.members = []; + d.threads = {}; + d.approvals = {}; + }); + _syncServer!.setDoc(mailboxDocId(space, mailboxId), doc); + + // Store IMAP config separately + if (imap_user) { + _imapConfigs.set(mailboxId, { imapUser: imap_user, imapHost: null, imapPort: null }); } + + return c.json(mailboxToRest(doc.mailbox), 201); }); // GET /api/mailboxes/:slug — mailbox detail routes.get("/api/mailboxes/:slug", async (c) => { const slug = c.req.param("slug"); - const rows = await sql.unsafe("SELECT * FROM rinbox.mailboxes WHERE slug = $1", [slug]); - if (rows.length === 0) return c.json({ error: "Mailbox not found" }, 404); + const found = findMailboxBySlug(slug); + if (!found) return c.json({ error: "Mailbox not found" }, 404); - // Get thread count - const counts = await sql.unsafe( - `SELECT status, count(*) as cnt FROM rinbox.threads WHERE mailbox_id = $1 GROUP BY status`, - [rows[0].id] - ); + const [, , doc] = found; + + // Compute thread counts by status const threadCounts: Record = {}; - for (const row of counts) threadCounts[row.status] = parseInt(row.cnt); + for (const thread of Object.values(doc.threads)) { + threadCounts[thread.status] = (threadCounts[thread.status] || 0) + 1; + } - return c.json({ ...rows[0], threadCounts }); + return c.json({ ...mailboxToRest(doc.mailbox), threadCounts }); }); // ── Threads API ── @@ -114,80 +353,83 @@ routes.get("/api/mailboxes/:slug/threads", async (c) => { const slug = c.req.param("slug"); const { status, search, limit = "50", offset = "0" } = c.req.query(); - const mailbox = await sql.unsafe("SELECT id FROM rinbox.mailboxes WHERE slug = $1", [slug]); - if (mailbox.length === 0) return c.json({ error: "Mailbox not found" }, 404); + const found = findMailboxBySlug(slug); + if (!found) return c.json({ error: "Mailbox not found" }, 404); - const conditions = ["mailbox_id = $1"]; - const params: any[] = [mailbox[0].id]; - let idx = 2; + const [, , doc] = found; + let threads = Object.values(doc.threads); + // Filter by status if (status) { - conditions.push(`status = $${idx}`); - params.push(status); - idx++; + threads = threads.filter((t) => t.status === status); } + + // Filter by search (case-insensitive on subject and from_address) if (search) { - conditions.push(`(subject ILIKE $${idx} OR from_address ILIKE $${idx})`); - params.push(`%${search}%`); - idx++; + const q = search.toLowerCase(); + threads = threads.filter((t) => + (t.subject && t.subject.toLowerCase().includes(q)) || + (t.fromAddress && t.fromAddress.toLowerCase().includes(q)) + ); } - const where = conditions.join(" AND "); - const rows = await sql.unsafe( - `SELECT t.*, (SELECT count(*) FROM rinbox.comments WHERE thread_id = t.id) as comment_count - FROM rinbox.threads t WHERE ${where} - ORDER BY t.received_at DESC - LIMIT ${Math.min(parseInt(limit), 100)} OFFSET ${parseInt(offset) || 0}`, - params - ); + // Sort by receivedAt descending + threads.sort((a, b) => b.receivedAt - a.receivedAt); - return c.json({ threads: rows }); + // Paginate + const lim = Math.min(parseInt(limit), 100); + const off = parseInt(offset) || 0; + const page = threads.slice(off, off + lim); + + return c.json({ threads: page.map(threadToRest) }); }); // GET /api/threads/:id — thread detail with comments routes.get("/api/threads/:id", async (c) => { const id = c.req.param("id"); - const rows = await sql.unsafe("SELECT * FROM rinbox.threads WHERE id = $1", [id]); - if (rows.length === 0) return c.json({ error: "Thread not found" }, 404); + const found = findThreadById(id); + if (!found) return c.json({ error: "Thread not found" }, 404); - const comments = await sql.unsafe( - `SELECT c.*, u.username, u.did as author_did - FROM rinbox.comments c - LEFT JOIN rinbox.users u ON u.id = c.author_id - WHERE c.thread_id = $1 ORDER BY c.created_at ASC`, - [id] - ); + const [, , thread] = found; + const comments = [...thread.comments] + .sort((a, b) => a.createdAt - b.createdAt) + .map(commentToRest); - return c.json({ ...rows[0], comments }); + return c.json({ ...threadToRest(thread), comments }); }); // PATCH /api/threads/:id — update thread metadata routes.patch("/api/threads/:id", async (c) => { const id = c.req.param("id"); const body = await c.req.json(); - const allowed = ["status", "is_read", "is_starred", "tags", "assigned_to"]; - const updates: string[] = []; - const params: any[] = []; - let idx = 1; + const allowed = ["status", "is_read", "is_starred", "tags", "assigned_to"] as const; - for (const key of allowed) { - if (key in body) { - const col = key === "tags" ? "tags" : key; - updates.push(`${col} = $${idx}`); - params.push(key === "tags" ? body[key] : body[key]); - idx++; + // Check at least one valid field + const hasUpdate = allowed.some((key) => key in body); + if (!hasUpdate) return c.json({ error: "No valid fields" }, 400); + + const found = findThreadById(id); + if (!found) return c.json({ error: "Thread not found" }, 404); + + const [docId] = found; + + const updated = _syncServer!.changeDoc(docId, `Update thread ${id}`, (d) => { + const t = d.threads[id]; + if (!t) return; + if ("status" in body) t.status = body.status; + if ("is_read" in body) t.isRead = body.is_read; + if ("is_starred" in body) t.isStarred = body.is_starred; + if ("tags" in body) { + // Replace tags array + t.tags.length = 0; + for (const tag of body.tags) t.tags.push(tag); } - } + if ("assigned_to" in body) t.assignedTo = body.assigned_to; + }); - if (updates.length === 0) return c.json({ error: "No valid fields" }, 400); - - params.push(id); - const rows = await sql.unsafe( - `UPDATE rinbox.threads SET ${updates.join(", ")} WHERE id = $${idx} RETURNING *`, - params - ); - if (rows.length === 0) return c.json({ error: "Thread not found" }, 404); - return c.json(rows[0]); + if (!updated) return c.json({ error: "Thread not found" }, 404); + const thread = updated.threads[id]; + return c.json(threadToRest(thread)); }); // POST /api/threads/:id/comments — add comment @@ -202,18 +444,36 @@ routes.post("/api/threads/:id/comments", async (c) => { const { text, mentions } = body; if (!text) return c.json({ error: "text required" }, 400); - // Ensure thread exists - const thread = await sql.unsafe("SELECT id FROM rinbox.threads WHERE id = $1", [threadId]); - if (thread.length === 0) return c.json({ error: "Thread not found" }, 404); + const found = findThreadById(threadId); + if (!found) return c.json({ error: "Thread not found" }, 404); - const user = await getOrCreateUser(claims.sub, claims.username); + const [docId] = found; + const commentId = generateId(); + const now = Date.now(); - const rows = await sql.unsafe( - `INSERT INTO rinbox.comments (thread_id, author_id, body, mentions) - VALUES ($1, $2, $3, $4) RETURNING *`, - [threadId, user.id, text, mentions || []] - ); - return c.json(rows[0], 201); + _syncServer!.changeDoc(docId, `Add comment to thread ${threadId}`, (d) => { + const t = d.threads[threadId]; + if (!t) return; + t.comments.push({ + id: commentId, + threadId, + authorId: claims.sub, + body: text, + mentions: mentions || [], + createdAt: now, + }); + }); + + const comment: ThreadComment = { + id: commentId, + threadId, + authorId: claims.sub, + body: text, + mentions: mentions || [], + createdAt: now, + }; + + return c.json(commentToRest(comment), 201); }); // ── Approvals API ── @@ -221,24 +481,28 @@ routes.post("/api/threads/:id/comments", async (c) => { // GET /api/approvals — list pending approvals routes.get("/api/approvals", async (c) => { const { mailbox, status = "PENDING" } = c.req.query(); - let rows; + if (mailbox) { - const mb = await sql.unsafe("SELECT id FROM rinbox.mailboxes WHERE slug = $1", [mailbox]); - if (mb.length === 0) return c.json({ error: "Mailbox not found" }, 404); - rows = await sql.unsafe( - `SELECT a.*, (SELECT count(*) FROM rinbox.approval_signatures WHERE approval_id = a.id) as signature_count - FROM rinbox.approvals a WHERE a.mailbox_id = $1 AND a.status = $2 - ORDER BY a.created_at DESC`, - [mb[0].id, status] - ); + const found = findMailboxBySlug(mailbox); + if (!found) return c.json({ error: "Mailbox not found" }, 404); + const [, , doc] = found; + + const approvals = Object.values(doc.approvals) + .filter((a) => a.status === status) + .sort((a, b) => b.createdAt - a.createdAt) + .map(approvalToRest); + + return c.json({ approvals }); } else { - rows = await sql.unsafe( - `SELECT a.*, (SELECT count(*) FROM rinbox.approval_signatures WHERE approval_id = a.id) as signature_count - FROM rinbox.approvals a WHERE a.status = $1 ORDER BY a.created_at DESC LIMIT 50`, - [status] - ); + const allApprovals: ReturnType[] = []; + for (const { doc } of getAllMailboxDocs()) { + for (const a of Object.values(doc.approvals)) { + if (a.status === status) allApprovals.push(approvalToRest(a)); + } + } + allApprovals.sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime()); + return c.json({ approvals: allApprovals.slice(0, 50) }); } - return c.json({ approvals: rows }); }); // POST /api/approvals — create approval draft @@ -252,17 +516,53 @@ routes.post("/api/approvals", async (c) => { const { mailbox_slug, thread_id, subject, body_text, to_addresses } = body; if (!mailbox_slug || !subject) return c.json({ error: "mailbox_slug and subject required" }, 400); - const mb = await sql.unsafe("SELECT * FROM rinbox.mailboxes WHERE slug = $1", [mailbox_slug]); - if (mb.length === 0) return c.json({ error: "Mailbox not found" }, 404); + const found = findMailboxBySlug(mailbox_slug); + if (!found) return c.json({ error: "Mailbox not found" }, 404); - const user = await getOrCreateUser(claims.sub, claims.username); + const [space, mailboxId, doc] = found; + const docId = mailboxDocId(space, mailboxId); + const approvalId = generateId(); + const now = Date.now(); - const rows = await sql.unsafe( - `INSERT INTO rinbox.approvals (mailbox_id, thread_id, author_id, subject, body_text, to_addresses, required_signatures) - VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING *`, - [mb[0].id, thread_id || null, user.id, subject, body_text || null, JSON.stringify(to_addresses || []), mb[0].approval_threshold || 1] - ); - return c.json(rows[0], 201); + _syncServer!.changeDoc(docId, `Create approval draft`, (d) => { + d.approvals[approvalId] = { + id: approvalId, + mailboxId: doc.mailbox.id, + threadId: thread_id || null, + authorId: claims.sub, + subject, + bodyText: body_text || '', + bodyHtml: '', + toAddresses: to_addresses || [], + ccAddresses: [], + status: 'PENDING', + requiredSignatures: doc.mailbox.approvalThreshold || 1, + safeTxHash: null, + createdAt: now, + resolvedAt: 0, + signatures: [], + }; + }); + + const approval: ApprovalItem = { + id: approvalId, + mailboxId: doc.mailbox.id, + threadId: thread_id || null, + authorId: claims.sub, + subject, + bodyText: body_text || '', + bodyHtml: '', + toAddresses: to_addresses || [], + ccAddresses: [], + status: 'PENDING', + requiredSignatures: doc.mailbox.approvalThreshold || 1, + safeTxHash: null, + createdAt: now, + resolvedAt: 0, + signatures: [], + }; + + return c.json(approvalToRest(approval), 201); }); // POST /api/approvals/:id/sign — sign an approval @@ -277,57 +577,77 @@ routes.post("/api/approvals/:id/sign", async (c) => { const { vote = "APPROVE" } = body; if (!["APPROVE", "REJECT"].includes(vote)) return c.json({ error: "Invalid vote" }, 400); - const approval = await sql.unsafe("SELECT * FROM rinbox.approvals WHERE id = $1", [id]); - if (approval.length === 0) return c.json({ error: "Approval not found" }, 404); - if (approval[0].status !== "PENDING") return c.json({ error: "Approval not pending" }, 400); + const found = findApprovalById(id); + if (!found) return c.json({ error: "Approval not found" }, 404); - const user = await getOrCreateUser(claims.sub, claims.username); + const [docId, , approval] = found; + if (approval.status !== "PENDING") return c.json({ error: "Approval not pending" }, 400); - await sql.unsafe( - `INSERT INTO rinbox.approval_signatures (approval_id, signer_id, vote) - VALUES ($1, $2, $3) - ON CONFLICT (approval_id, signer_id) DO UPDATE SET vote = $3, signed_at = NOW()`, - [id, user.id, vote] - ); + const now = Date.now(); - // Check if threshold reached - const sigs = await sql.unsafe( - "SELECT count(*) as cnt FROM rinbox.approval_signatures WHERE approval_id = $1 AND vote = 'APPROVE'", - [id] - ); - const approveCount = parseInt(sigs[0].cnt); + const updated = _syncServer!.changeDoc(docId, `Sign approval ${id}`, (d) => { + const a = d.approvals[id]; + if (!a) return; - if (approveCount >= approval[0].required_signatures) { - await sql.unsafe( - "UPDATE rinbox.approvals SET status = 'APPROVED', resolved_at = NOW() WHERE id = $1", - [id] - ); - return c.json({ ok: true, status: "APPROVED", signatures: approveCount }); - } + // Upsert signature: replace if signer already voted + const existingIdx = a.signatures.findIndex((s) => s.signerId === claims.sub); + const sig: ApprovalSignature = { + id: existingIdx >= 0 ? a.signatures[existingIdx].id : generateId(), + approvalId: id, + signerId: claims.sub, + vote, + signedAt: now, + }; - // Check for rejection (more rejects than possible remaining approvals) - const rejects = await sql.unsafe( - "SELECT count(*) as cnt FROM rinbox.approval_signatures WHERE approval_id = $1 AND vote = 'REJECT'", - [id] - ); - const rejectCount = parseInt(rejects[0].cnt); - if (rejectCount > 0) { - await sql.unsafe( - "UPDATE rinbox.approvals SET status = 'REJECTED', resolved_at = NOW() WHERE id = $1", - [id] - ); - return c.json({ ok: true, status: "REJECTED", signatures: approveCount }); - } + if (existingIdx >= 0) { + a.signatures[existingIdx].vote = vote; + a.signatures[existingIdx].signedAt = now; + } else { + a.signatures.push(sig); + } - return c.json({ ok: true, status: "PENDING", signatures: approveCount, required: approval[0].required_signatures }); + // Count approvals and rejections + const approveCount = a.signatures.filter((s) => s.vote === 'APPROVE').length; + const rejectCount = a.signatures.filter((s) => s.vote === 'REJECT').length; + + if (approveCount >= a.requiredSignatures) { + a.status = 'APPROVED'; + a.resolvedAt = now; + } else if (rejectCount > 0) { + a.status = 'REJECTED'; + a.resolvedAt = now; + } + }); + + if (!updated) return c.json({ error: "Approval not found" }, 404); + + const finalApproval = updated.approvals[id]; + const approveCount = finalApproval.signatures.filter((s) => s.vote === 'APPROVE').length; + + return c.json({ + ok: true, + status: finalApproval.status, + signatures: approveCount, + ...(finalApproval.status === 'PENDING' ? { required: finalApproval.requiredSignatures } : {}), + }); }); // ── Workspaces API ── // GET /api/workspaces routes.get("/api/workspaces", async (c) => { - const rows = await sql.unsafe("SELECT * FROM rinbox.workspaces ORDER BY created_at DESC LIMIT 50"); - return c.json({ workspaces: rows }); + const workspaces = Array.from(_workspaces.values()) + .sort((a, b) => b.createdAt - a.createdAt) + .slice(0, 50) + .map((ws) => ({ + id: ws.id, + slug: ws.slug, + name: ws.name, + description: ws.description, + owner_did: ws.ownerDid, + created_at: new Date(ws.createdAt).toISOString(), + })); + return c.json({ workspaces }); }); // POST /api/workspaces @@ -341,17 +661,26 @@ routes.post("/api/workspaces", async (c) => { const { slug, name, description } = body; if (!slug || !name) return c.json({ error: "slug and name required" }, 400); - try { - const rows = await sql.unsafe( - `INSERT INTO rinbox.workspaces (slug, name, description, owner_did) - VALUES ($1, $2, $3, $4) RETURNING *`, - [slug, name, description || null, claims.sub] - ); - return c.json(rows[0], 201); - } catch (e: any) { - if (e.code === "23505") return c.json({ error: "Workspace already exists" }, 409); - throw e; - } + if (_workspaces.has(slug)) return c.json({ error: "Workspace already exists" }, 409); + + const ws: WorkspaceInfo = { + id: generateId(), + slug, + name, + description: description || null, + ownerDid: claims.sub, + createdAt: Date.now(), + }; + _workspaces.set(slug, ws); + + return c.json({ + id: ws.id, + slug: ws.slug, + name: ws.name, + description: ws.description, + owner_did: ws.ownerDid, + created_at: new Date(ws.createdAt).toISOString(), + }, 201); }); // GET /api/health @@ -359,13 +688,33 @@ routes.get("/api/health", (c) => c.json({ ok: true, imapSync: IMAP_HOST !== "" } // GET /api/sync-status — show IMAP sync state per mailbox routes.get("/api/sync-status", async (c) => { - const rows = await sql.unsafe( - `SELECT s.*, m.slug, m.name, m.email - FROM rinbox.sync_state s - JOIN rinbox.mailboxes m ON m.id = s.mailbox_id - ORDER BY s.last_sync_at DESC NULLS LAST` - ); - return c.json({ syncStates: rows }); + const syncStates: any[] = []; + + for (const [mailboxId, state] of _syncStates) { + const found = findMailboxById(mailboxId); + if (!found) continue; + const [, , doc] = found; + + syncStates.push({ + mailbox_id: mailboxId, + last_uid: state.lastUid, + uid_validity: state.uidValidity, + last_sync_at: state.lastSyncAt ? new Date(state.lastSyncAt).toISOString() : null, + error: state.error, + slug: doc.mailbox.slug, + name: doc.mailbox.name, + email: doc.mailbox.email, + }); + } + + syncStates.sort((a, b) => { + if (!a.last_sync_at && !b.last_sync_at) return 0; + if (!a.last_sync_at) return 1; + if (!b.last_sync_at) return -1; + return new Date(b.last_sync_at).getTime() - new Date(a.last_sync_at).getTime(); + }); + + return c.json({ syncStates }); }); // ── IMAP Sync Worker ── @@ -374,7 +723,17 @@ const IMAP_PORT = parseInt(process.env.IMAP_PORT || "993"); const IMAP_TLS_REJECT = process.env.IMAP_TLS_REJECT_UNAUTHORIZED !== "false"; const SYNC_INTERVAL = 30_000; // 30 seconds -async function syncMailbox(mailbox: any) { +interface SyncableMailbox { + id: string; + slug: string; + email: string; + space: string; + imap_user: string | null; + imap_host: string | null; + imap_port: number | null; +} + +async function syncMailbox(mailbox: SyncableMailbox) { let ImapFlow: any; let simpleParser: any; try { @@ -391,17 +750,16 @@ async function syncMailbox(mailbox: any) { if (!host || !user) return; // Get or create sync state - let syncState = (await sql.unsafe( - "SELECT * FROM rinbox.sync_state WHERE mailbox_id = $1", - [mailbox.id] - ))[0]; - + let syncState = _syncStates.get(mailbox.id); if (!syncState) { - const rows = await sql.unsafe( - "INSERT INTO rinbox.sync_state (mailbox_id) VALUES ($1) RETURNING *", - [mailbox.id] - ); - syncState = rows[0]; + syncState = { + mailboxId: mailbox.id, + lastUid: 0, + uidValidity: null, + lastSyncAt: null, + error: null, + }; + _syncStates.set(mailbox.id, syncState); } const client = new ImapFlow({ @@ -422,20 +780,19 @@ async function syncMailbox(mailbox: any) { const uidValidity = status?.uidValidity; // UID validity changed — need full resync - if (syncState.uid_validity && uidValidity && syncState.uid_validity !== uidValidity) { - await sql.unsafe( - "UPDATE rinbox.sync_state SET last_uid = 0, uid_validity = $1 WHERE mailbox_id = $2", - [uidValidity, mailbox.id] - ); - syncState.last_uid = 0; + if (syncState.uidValidity && uidValidity && syncState.uidValidity !== uidValidity) { + syncState.lastUid = 0; + syncState.uidValidity = uidValidity; } // Fetch messages newer than last synced UID - const lastUid = syncState.last_uid || 0; + const lastUid = syncState.lastUid || 0; const range = lastUid > 0 ? `${lastUid + 1}:*` : "1:*"; let maxUid = lastUid; let count = 0; + const docId = mailboxDocId(mailbox.space, mailbox.id); + for await (const msg of client.fetch(range, { envelope: true, source: true, @@ -449,30 +806,45 @@ async function syncMailbox(mailbox: any) { const fromAddr = parsed.from?.value?.[0]?.address || msg.envelope?.from?.[0]?.address || ""; const fromName = parsed.from?.value?.[0]?.name || msg.envelope?.from?.[0]?.name || ""; const subject = parsed.subject || msg.envelope?.subject || "(no subject)"; - const toAddrs = (parsed.to?.value || []).map((a: any) => ({ address: a.address, name: a.name })); - const ccAddrs = (parsed.cc?.value || []).map((a: any) => ({ address: a.address, name: a.name })); + const toAddrs = (parsed.to?.value || []).map((a: any) => a.address || ""); + const ccAddrs = (parsed.cc?.value || []).map((a: any) => a.address || ""); const messageId = parsed.messageId || msg.envelope?.messageId || null; const hasAttachments = (parsed.attachments?.length || 0) > 0; + const receivedAt = parsed.date ? parsed.date.getTime() : Date.now(); - await sql.unsafe( - `INSERT INTO rinbox.threads (mailbox_id, message_id, subject, from_address, from_name, to_addresses, cc_addresses, body_text, body_html, has_attachments, received_at) - VALUES ($1, $2, $3, $4, $5, $6::jsonb, $7::jsonb, $8, $9, $10, $11) - ON CONFLICT DO NOTHING`, - [ - mailbox.id, - messageId, - subject, - fromAddr, - fromName, - JSON.stringify(toAddrs), - JSON.stringify(ccAddrs), - parsed.text || null, - parsed.html || null, - hasAttachments, - parsed.date || new Date(), - ] - ); - count++; + const threadId = generateId(); + + // Check for duplicate messageId before inserting + const currentDoc = _syncServer!.getDoc(docId); + const isDuplicate = messageId && currentDoc && + Object.values(currentDoc.threads).some((t) => t.messageId === messageId); + + if (!isDuplicate) { + _syncServer!.changeDoc(docId, `IMAP sync: ${subject}`, (d) => { + d.threads[threadId] = { + id: threadId, + mailboxId: mailbox.id, + messageId, + subject, + fromAddress: fromAddr, + fromName: fromName, + toAddresses: toAddrs, + ccAddresses: ccAddrs, + bodyText: parsed.text || '', + bodyHtml: parsed.html || '', + tags: [], + status: 'open', + isRead: false, + isStarred: false, + assignedTo: null, + hasAttachments, + receivedAt, + createdAt: Date.now(), + comments: [], + }; + }); + count++; + } } catch (parseErr) { console.error(`[Inbox] Parse error UID ${msg.uid}:`, parseErr); } @@ -481,10 +853,10 @@ async function syncMailbox(mailbox: any) { } // Update sync state - await sql.unsafe( - "UPDATE rinbox.sync_state SET last_uid = $1, uid_validity = $2, last_sync_at = NOW(), error = NULL WHERE mailbox_id = $3", - [maxUid, uidValidity || null, mailbox.id] - ); + syncState.lastUid = maxUid; + syncState.uidValidity = uidValidity || null; + syncState.lastSyncAt = Date.now(); + syncState.error = null; if (count > 0) console.log(`[Inbox] Synced ${count} messages for ${mailbox.email}`); } finally { @@ -494,14 +866,14 @@ async function syncMailbox(mailbox: any) { await client.logout(); } catch (e: any) { console.error(`[Inbox] IMAP sync error for ${mailbox.email}:`, e.message); - await sql.unsafe( - "UPDATE rinbox.sync_state SET error = $1, last_sync_at = NOW() WHERE mailbox_id = $2", - [e.message, mailbox.id] - ).catch(() => {}); + if (syncState) { + syncState.error = e.message; + syncState.lastSyncAt = Date.now(); + } } } -async function runSyncLoop() { +function runSyncLoop() { if (!IMAP_HOST) { console.log("[Inbox] IMAP_HOST not set — IMAP sync disabled"); return; @@ -509,10 +881,26 @@ async function runSyncLoop() { console.log(`[Inbox] IMAP sync enabled — polling every ${SYNC_INTERVAL / 1000}s`); const doSync = async () => { + if (!_syncServer) return; + try { - const mailboxes = await sql.unsafe( - "SELECT * FROM rinbox.mailboxes WHERE imap_user IS NOT NULL" - ); + // Find all mailboxes with IMAP config + const mailboxes: SyncableMailbox[] = []; + for (const { space, doc } of getAllMailboxDocs()) { + const imapCfg = _imapConfigs.get(doc.mailbox.id); + if (imapCfg?.imapUser) { + mailboxes.push({ + id: doc.mailbox.id, + slug: doc.mailbox.slug, + email: doc.mailbox.email, + space, + imap_user: imapCfg.imapUser, + imap_host: imapCfg.imapHost, + imap_port: imapCfg.imapPort, + }); + } + } + for (const mb of mailboxes) { await syncMailbox(mb); } @@ -599,7 +987,7 @@ routes.get("/", (c) => { export const inboxModule: RSpaceModule = { id: "rinbox", name: "rInbox", - icon: "📨", + icon: "\u{1F4E8}", description: "Collaborative email with multisig approval", scoping: { defaultScope: 'space', userConfigurable: false }, docSchemas: [{ pattern: '{space}:inbox:mailboxes:{mailboxId}', description: 'Mailbox with threads and approvals', init: mailboxSchema.init }], @@ -607,7 +995,7 @@ export const inboxModule: RSpaceModule = { landingPage: renderLanding, async onInit(ctx) { _syncServer = ctx.syncServer; - await initDB(); + console.log("[Inbox] Module initialized (Automerge-only, no PG)"); }, standaloneDomain: "rinbox.online", feeds: [ diff --git a/modules/rnotes/db/schema.sql b/modules/rnotes/db/schema.sql.archived similarity index 100% rename from modules/rnotes/db/schema.sql rename to modules/rnotes/db/schema.sql.archived diff --git a/modules/rnotes/mod.ts b/modules/rnotes/mod.ts index 75f8a92c..00063b85 100644 --- a/modules/rnotes/mod.ts +++ b/modules/rnotes/mod.ts @@ -4,153 +4,68 @@ * Port of rnotes-online (Next.js + Prisma → Hono + postgres.js). * Supports multiple note types: text, code, bookmark, audio, image, file. * - * Local-first migration: dual-write (Automerge + PG) during transition. + * Local-first: All data stored exclusively in Automerge documents via SyncServer. */ import { Hono } from "hono"; -import { readFileSync } from "node:fs"; -import { resolve } from "node:path"; import * as Automerge from "@automerge/automerge"; -import { sql } from "../../shared/db/pool"; import { renderShell } from "../../server/shell"; import { getModuleInfoList } from "../../shared/module"; import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { renderLanding } from "./landing"; -import { notebookSchema, notebookDocId } from "./schemas"; +import { notebookSchema, notebookDocId, createNoteItem } from "./schemas"; import type { NotebookDoc, NoteItem } from "./schemas"; import type { SyncServer } from "../../server/local-first/sync-server"; const routes = new Hono(); -// ── DB initialization ── -const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8"); - -async function initDB() { - try { - await sql.unsafe(SCHEMA_SQL); - console.log("[Notes] DB schema initialized"); - } catch (e) { - console.error("[Notes] DB init error:", e); - } -} - -async function seedDemoIfEmpty() { - try { - const count = await sql.unsafe("SELECT count(*)::int as cnt FROM rnotes.notebooks"); - if (parseInt(count[0].cnt) > 0) return; - - // Notebook 1: Project Ideas - const nb1 = await sql.unsafe( - `INSERT INTO rnotes.notebooks (title, description, cover_color, is_public) - VALUES ('Project Ideas', 'Brainstorms and design notes for the r* ecosystem', '#6366f1', true) RETURNING id` - ); - // Notebook 2: Meeting Notes - const nb2 = await sql.unsafe( - `INSERT INTO rnotes.notebooks (title, description, cover_color, is_public) - VALUES ('Meeting Notes', 'Weekly standups, design reviews, and retrospectives', '#f59e0b', true) RETURNING id` - ); - // Notebook 3: How-To Guides - const nb3 = await sql.unsafe( - `INSERT INTO rnotes.notebooks (title, description, cover_color, is_public) - VALUES ('How-To Guides', 'Tutorials and onboarding guides for contributors', '#10b981', true) RETURNING id` - ); - - // Create tags - const tagIds: Record = {}; - for (const name of ["design", "architecture", "cosmolocal", "governance", "onboarding", "review", "standup"]) { - const row = await sql.unsafe( - `INSERT INTO rnotes.tags (name) VALUES ($1) ON CONFLICT (name) DO UPDATE SET name = $1 RETURNING id`, - [name] - ); - tagIds[name] = row[0].id; - } - - // Seed notes - const notes = [ - { - nbId: nb1[0].id, title: "Cosmolocal Manufacturing Network", - content: "## Vision\n\nDesign global, manufacture local. Every creative work should be producible by the nearest capable provider.\n\n## Key Components\n\n- **Artifact Spec**: Standardized envelope describing what to produce\n- **Provider Registry**: Directory of local makers with capabilities + pricing\n- **rCart**: Marketplace connecting creators to providers\n- **Revenue Splits**: 50% provider, 35% creator, 15% community\n\n## Open Questions\n\n- How do we handle quality assurance across distributed providers?\n- Should providers be able to set custom margins?\n- What's the minimum viable set of capabilities for launch?", - tags: ["cosmolocal", "architecture"], pinned: true, - }, - { - nbId: nb1[0].id, title: "Revenue Sharing Model", - content: "## Current Split\n\n| Recipient | Share | Rationale |\n|-----------|-------|-----------|\n| Provider | 50% | Covers materials, labor, shipping |\n| Creator | 35% | Design and creative work |\n| Community | 15% | Platform maintenance, commons fund |\n\n## Enoughness Thresholds\n\nOnce a funnel reaches its sufficient threshold, surplus flows to the next highest-need funnel. This prevents accumulation and keeps resources flowing.\n\n## Implementation\n\nrFunds Flow Service handles deposits from rCart. Each order total is routed through the configured flow → funnel → overflow splits.", - tags: ["cosmolocal", "governance"], - }, - { - nbId: nb1[0].id, title: "FUN Model: Forget, Update, New", - content: "## Replacing CRUD\n\nNothing is permanently destroyed in rSpace.\n\n- **Forget** replaces Delete — soft-delete with `forgotten: true`. Shapes stay in document, hidden from canvas. Memory panel lets you browse + Remember.\n- **Update** stays the same — public `sync.updateShape()` for programmatic updates\n- **New** replaces Create — language shift: toolbar says \"New X\", events are `new-shape`\n\n## Why?\n\nData sovereignty means users should always be able to recover their work. The Memory panel makes forgotten shapes discoverable, like a digital archive.", - tags: ["design", "architecture"], - }, - { - nbId: nb2[0].id, title: "Weekly Standup — Feb 15, 2026", - content: "## Attendees\n\nAlice, Bob, Carol\n\n## Updates\n\n**Alice**: Finished EncryptID guardian recovery flow. 2-of-3 guardian approval working. Next: device linking via QR code.\n\n**Bob**: Provider registry now has 6 printers globally. Working on proximity search with earthdistance extension.\n\n**Carol**: rFunds river visualization deployed. Enoughness layer showing golden glow on sufficient funnels.\n\n## Action Items\n\n- [ ] Alice: Document guardian recovery API endpoints\n- [ ] Bob: Add turnaround time estimates to provider matching\n- [ ] Carol: Add demo mode to river view with mock data", - tags: ["standup"], - }, - { - nbId: nb2[0].id, title: "Design Review — rBooks Flipbook Reader", - content: "## What We Reviewed\n\nThe react-pageflip integration for PDF reading in rBooks.\n\n## Feedback\n\n1. **Page turn animation** — smooth, feels good on desktop. On mobile, swipe gesture needs larger hit area.\n2. **PDF rendering** — react-pdf handles most PDFs well. Large files (>50MB) cause browser memory issues.\n3. **Read Locally mode** — IndexedDB storage works. Need to show storage usage somewhere.\n\n## Decisions\n\n- Ship current version, iterate on mobile\n- Add a 50MB soft warning on upload\n- Explore PDF.js worker for background rendering", - tags: ["review", "design"], - }, - { - nbId: nb3[0].id, title: "Getting Started with rSpace Development", - content: "## Prerequisites\n\n- Bun runtime (v1.3+)\n- Docker + Docker Compose\n- Git access to Gitea\n\n## Local Setup\n\n```bash\ngit clone ssh://git@gitea.jeffemmett.com:223/jeffemmett/rspace-online.git\ncd rspace-online\nbun install\nbun run dev\n```\n\n## Module Structure\n\nEach module lives in `modules/{name}/` and exports an `RSpaceModule` interface:\n\n```typescript\nexport interface RSpaceModule {\n id: string;\n name: string;\n icon: string;\n description: string;\n routes: Hono;\n}\n```\n\n## Adding a New Module\n\n1. Create `modules/{name}/mod.ts`\n2. Create `modules/{name}/components/` for web components\n3. Add build step in `vite.config.ts`\n4. Register in `server/index.ts`", - tags: ["onboarding"], - }, - { - nbId: nb3[0].id, title: "How to Add a Cosmolocal Provider", - content: "## Overview\n\nProviders are local print shops, makerspaces, or studios that can fulfill rCart orders.\n\n## Steps\n\n1. Visit `providers.mycofi.earth`\n2. Sign in with your rStack passkey\n3. Click \"Register Provider\"\n4. Fill in:\n - Name, location (address + coordinates)\n - Capabilities (laser-print, risograph, screen-print, etc.)\n - Substrates (paper types, fabric, vinyl)\n - Turnaround time and pricing\n5. Submit for review\n\n## Matching Algorithm\n\nWhen an order comes in, rCart matches based on:\n- Required capabilities vs. provider capabilities\n- Geographic distance (earthdistance extension)\n- Turnaround time\n- Price", - tags: ["cosmolocal", "onboarding"], - }, - ]; - - for (const n of notes) { - const row = await sql.unsafe( - `INSERT INTO rnotes.notes (notebook_id, title, content, content_plain, type, is_pinned) - VALUES ($1, $2, $3, $4, 'NOTE', $5) RETURNING id`, - [n.nbId, n.title, n.content, n.content.replace(/<[^>]*>/g, " ").replace(/[#*|`\-\[\]]/g, " ").replace(/\s+/g, " ").trim(), n.pinned || false] - ); - for (const tagName of n.tags) { - if (tagIds[tagName]) { - await sql.unsafe( - "INSERT INTO rnotes.note_tags (note_id, tag_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", - [row[0].id, tagIds[tagName]] - ); - } - } - } - - console.log("[Notes] Demo data seeded: 3 notebooks, 7 notes, 7 tags"); - } catch (e) { - console.error("[Notes] Seed error:", e); - } -} - -// initDB + seedDemo are called from onInit lifecycle hook (see module export below) - // ── SyncServer ref (set during onInit) ── let _syncServer: SyncServer | null = null; -/** Check if a space has been migrated to local-first for notes. */ -function isLocalFirst(space: string): boolean { - if (!_syncServer) return false; - // A space is local-first if any notebook doc exists for it in the SyncServer - // We check by looking for docs with the pattern {space}:notes:notebooks:* - return _syncServer.getDoc(`${space}:notes:notebooks:default`) !== undefined; +// ── Automerge helpers ── + +/** Lazily ensure a notebook doc exists for a given space + notebookId. */ +function ensureDoc(space: string, notebookId: string): NotebookDoc { + const docId = notebookDocId(space, notebookId); + let doc = _syncServer!.getDoc(docId); + if (!doc) { + doc = Automerge.change(Automerge.init(), 'init', (d) => { + const init = notebookSchema.init(); + Object.assign(d, init); + d.meta.spaceSlug = space; + d.notebook.id = notebookId; + }); + _syncServer!.setDoc(docId, doc); + } + return doc; +} + +/** Generate a URL-safe slug from a title. */ +function slugify(title: string): string { + return title + .toLowerCase() + .replace(/[^a-z0-9]+/g, "-") + .replace(/^-|-$/g, "") + .slice(0, 80) || "untitled"; +} + +/** Generate a compact unique ID (timestamp + random suffix). */ +function newId(): string { + return `${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`; } // ── Automerge ↔ REST conversion helpers ── /** List all notebook docs for a space from the SyncServer. */ -function listAutomergeNotebooks(space: string): { docId: string; doc: NotebookDoc }[] { +function listNotebooks(space: string): { docId: string; doc: NotebookDoc }[] { if (!_syncServer) return []; const results: { docId: string; doc: NotebookDoc }[] = []; const prefix = `${space}:notes:notebooks:`; for (const docId of _syncServer.listDocs()) { if (docId.startsWith(prefix)) { const doc = _syncServer.getDoc(docId); - if (doc) results.push({ docId, doc }); + if (doc && doc.notebook && doc.notebook.title) results.push({ docId, doc }); } } return results; @@ -196,99 +111,123 @@ function noteToRest(item: NoteItem) { } /** Find the notebook doc that contains a given note ID. */ -function findNoteInAutomerge(space: string, noteId: string): { docId: string; doc: NotebookDoc; item: NoteItem } | null { - for (const { docId, doc } of listAutomergeNotebooks(space)) { +function findNote(space: string, noteId: string): { docId: string; doc: NotebookDoc; item: NoteItem } | null { + for (const { docId, doc } of listNotebooks(space)) { const item = doc.items[noteId]; if (item) return { docId, doc, item }; } return null; } -/** Write a note to the Automerge doc (creates/updates). */ -function writeNoteToAutomerge(space: string, notebookPgId: string, noteId: string, data: Partial): void { +// ── Seed demo data into Automerge (runs once if no notebooks exist) ── + +function seedDemoIfEmpty(space: string) { if (!_syncServer) return; - // Find the Automerge notebook doc for this PG notebook - // Convention: PG notebook UUID maps to docId suffix - const docId = notebookDocId(space, notebookPgId); - const doc = _syncServer.getDoc(docId); - if (!doc) return; // not migrated yet - _syncServer.changeDoc(docId, `Update note ${noteId}`, (d) => { - if (!d.items[noteId]) { - // New note - d.items[noteId] = { - id: noteId, - notebookId: notebookPgId, - authorId: data.authorId ?? null, - title: data.title ?? '', - content: data.content ?? '', - contentPlain: data.contentPlain ?? '', - type: data.type ?? 'NOTE', - url: data.url ?? null, - language: data.language ?? null, - fileUrl: data.fileUrl ?? null, - mimeType: data.mimeType ?? null, - fileSize: data.fileSize ?? null, - duration: data.duration ?? null, - isPinned: data.isPinned ?? false, - sortOrder: data.sortOrder ?? 0, - tags: data.tags ?? [], - createdAt: data.createdAt ?? Date.now(), - updatedAt: Date.now(), - }; - } else { - // Update existing fields - const item = d.items[noteId]; - if (data.title !== undefined) item.title = data.title; - if (data.content !== undefined) item.content = data.content; - if (data.contentPlain !== undefined) item.contentPlain = data.contentPlain; - if (data.type !== undefined) item.type = data.type; - if (data.url !== undefined) item.url = data.url; - if (data.language !== undefined) item.language = data.language; - if (data.isPinned !== undefined) item.isPinned = data.isPinned; - if (data.sortOrder !== undefined) item.sortOrder = data.sortOrder; - if (data.tags !== undefined) item.tags = data.tags; - item.updatedAt = Date.now(); - } + // If the space already has notebooks, skip + if (listNotebooks(space).length > 0) return; + + const now = Date.now(); + + // Notebook 1: Project Ideas + const nb1Id = newId(); + const nb1DocId = notebookDocId(space, nb1Id); + const nb1Doc = Automerge.change(Automerge.init(), "Seed: Project Ideas", (d) => { + d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: now }; + d.notebook = { id: nb1Id, title: "Project Ideas", slug: "project-ideas", description: "Brainstorms and design notes for the r* ecosystem", coverColor: "#6366f1", isPublic: true, createdAt: now, updatedAt: now }; + d.items = {}; }); -} + _syncServer.setDoc(nb1DocId, nb1Doc); -// ── Helper: get or create user ── -async function getOrCreateUser(did: string, username?: string) { - const rows = await sql.unsafe( - `INSERT INTO rnotes.users (did, username) VALUES ($1, $2) - ON CONFLICT (did) DO UPDATE SET username = COALESCE($2, rnotes.users.username) - RETURNING *`, - [did, username || null] - ); - return rows[0]; + // Notebook 2: Meeting Notes + const nb2Id = newId(); + const nb2DocId = notebookDocId(space, nb2Id); + const nb2Doc = Automerge.change(Automerge.init(), "Seed: Meeting Notes", (d) => { + d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: now }; + d.notebook = { id: nb2Id, title: "Meeting Notes", slug: "meeting-notes", description: "Weekly standups, design reviews, and retrospectives", coverColor: "#f59e0b", isPublic: true, createdAt: now, updatedAt: now }; + d.items = {}; + }); + _syncServer.setDoc(nb2DocId, nb2Doc); + + // Notebook 3: How-To Guides + const nb3Id = newId(); + const nb3DocId = notebookDocId(space, nb3Id); + const nb3Doc = Automerge.change(Automerge.init(), "Seed: How-To Guides", (d) => { + d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: now }; + d.notebook = { id: nb3Id, title: "How-To Guides", slug: "how-to-guides", description: "Tutorials and onboarding guides for contributors", coverColor: "#10b981", isPublic: true, createdAt: now, updatedAt: now }; + d.items = {}; + }); + _syncServer.setDoc(nb3DocId, nb3Doc); + + // Seed notes into notebooks + const notes = [ + { + nbId: nb1Id, nbDocId: nb1DocId, title: "Cosmolocal Manufacturing Network", + content: "## Vision\n\nDesign global, manufacture local. Every creative work should be producible by the nearest capable provider.\n\n## Key Components\n\n- **Artifact Spec**: Standardized envelope describing what to produce\n- **Provider Registry**: Directory of local makers with capabilities + pricing\n- **rCart**: Marketplace connecting creators to providers\n- **Revenue Splits**: 50% provider, 35% creator, 15% community\n\n## Open Questions\n\n- How do we handle quality assurance across distributed providers?\n- Should providers be able to set custom margins?\n- What's the minimum viable set of capabilities for launch?", + tags: ["cosmolocal", "architecture"], pinned: true, + }, + { + nbId: nb1Id, nbDocId: nb1DocId, title: "Revenue Sharing Model", + content: "## Current Split\n\n| Recipient | Share | Rationale |\n|-----------|-------|-----------|\n| Provider | 50% | Covers materials, labor, shipping |\n| Creator | 35% | Design and creative work |\n| Community | 15% | Platform maintenance, commons fund |\n\n## Enoughness Thresholds\n\nOnce a funnel reaches its sufficient threshold, surplus flows to the next highest-need funnel. This prevents accumulation and keeps resources flowing.\n\n## Implementation\n\nrFunds Flow Service handles deposits from rCart. Each order total is routed through the configured flow → funnel → overflow splits.", + tags: ["cosmolocal", "governance"], + }, + { + nbId: nb1Id, nbDocId: nb1DocId, title: "FUN Model: Forget, Update, New", + content: "## Replacing CRUD\n\nNothing is permanently destroyed in rSpace.\n\n- **Forget** replaces Delete — soft-delete with `forgotten: true`. Shapes stay in document, hidden from canvas. Memory panel lets you browse + Remember.\n- **Update** stays the same — public `sync.updateShape()` for programmatic updates\n- **New** replaces Create — language shift: toolbar says \"New X\", events are `new-shape`\n\n## Why?\n\nData sovereignty means users should always be able to recover their work. The Memory panel makes forgotten shapes discoverable, like a digital archive.", + tags: ["design", "architecture"], + }, + { + nbId: nb2Id, nbDocId: nb2DocId, title: "Weekly Standup — Feb 15, 2026", + content: "## Attendees\n\nAlice, Bob, Carol\n\n## Updates\n\n**Alice**: Finished EncryptID guardian recovery flow. 2-of-3 guardian approval working. Next: device linking via QR code.\n\n**Bob**: Provider registry now has 6 printers globally. Working on proximity search with earthdistance extension.\n\n**Carol**: rFunds river visualization deployed. Enoughness layer showing golden glow on sufficient funnels.\n\n## Action Items\n\n- [ ] Alice: Document guardian recovery API endpoints\n- [ ] Bob: Add turnaround time estimates to provider matching\n- [ ] Carol: Add demo mode to river view with mock data", + tags: ["standup"], + }, + { + nbId: nb2Id, nbDocId: nb2DocId, title: "Design Review — rBooks Flipbook Reader", + content: "## What We Reviewed\n\nThe react-pageflip integration for PDF reading in rBooks.\n\n## Feedback\n\n1. **Page turn animation** — smooth, feels good on desktop. On mobile, swipe gesture needs larger hit area.\n2. **PDF rendering** — react-pdf handles most PDFs well. Large files (>50MB) cause browser memory issues.\n3. **Read Locally mode** — IndexedDB storage works. Need to show storage usage somewhere.\n\n## Decisions\n\n- Ship current version, iterate on mobile\n- Add a 50MB soft warning on upload\n- Explore PDF.js worker for background rendering", + tags: ["review", "design"], + }, + { + nbId: nb3Id, nbDocId: nb3DocId, title: "Getting Started with rSpace Development", + content: "## Prerequisites\n\n- Bun runtime (v1.3+)\n- Docker + Docker Compose\n- Git access to Gitea\n\n## Local Setup\n\n```bash\ngit clone ssh://git@gitea.jeffemmett.com:223/jeffemmett/rspace-online.git\ncd rspace-online\nbun install\nbun run dev\n```\n\n## Module Structure\n\nEach module lives in `modules/{name}/` and exports an `RSpaceModule` interface:\n\n```typescript\nexport interface RSpaceModule {\n id: string;\n name: string;\n icon: string;\n description: string;\n routes: Hono;\n}\n```\n\n## Adding a New Module\n\n1. Create `modules/{name}/mod.ts`\n2. Create `modules/{name}/components/` for web components\n3. Add build step in `vite.config.ts`\n4. Register in `server/index.ts`", + tags: ["onboarding"], + }, + { + nbId: nb3Id, nbDocId: nb3DocId, title: "How to Add a Cosmolocal Provider", + content: "## Overview\n\nProviders are local print shops, makerspaces, or studios that can fulfill rCart orders.\n\n## Steps\n\n1. Visit `providers.mycofi.earth`\n2. Sign in with your rStack passkey\n3. Click \"Register Provider\"\n4. Fill in:\n - Name, location (address + coordinates)\n - Capabilities (laser-print, risograph, screen-print, etc.)\n - Substrates (paper types, fabric, vinyl)\n - Turnaround time and pricing\n5. Submit for review\n\n## Matching Algorithm\n\nWhen an order comes in, rCart matches based on:\n- Required capabilities vs. provider capabilities\n- Geographic distance (earthdistance extension)\n- Turnaround time\n- Price", + tags: ["cosmolocal", "onboarding"], + }, + ]; + + for (const n of notes) { + const noteId = newId(); + const contentPlain = n.content.replace(/<[^>]*>/g, " ").replace(/[#*|`\-\[\]]/g, " ").replace(/\s+/g, " ").trim(); + const item = createNoteItem(noteId, n.nbId, n.title, { + content: n.content, + contentPlain, + tags: n.tags, + isPinned: n.pinned || false, + }); + + _syncServer!.changeDoc(n.nbDocId, `Seed note: ${n.title}`, (d) => { + d.items[noteId] = item; + }); + } + + console.log("[Notes] Demo data seeded: 3 notebooks, 7 notes"); } // ── Notebooks API ── -// GET /api/notebooks — list notebooks (Automerge-first, PG fallback) +// GET /api/notebooks — list notebooks routes.get("/api/notebooks", async (c) => { const space = c.req.param("space") || "demo"; - // Try Automerge first - if (isLocalFirst(space)) { - const notebooks = listAutomergeNotebooks(space).map(({ doc }) => notebookToRest(doc)); - notebooks.sort((a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime()); - return c.json({ notebooks, source: "automerge" }); - } - - // PG fallback - const rows = await sql.unsafe( - `SELECT n.*, count(note.id) as note_count - FROM rnotes.notebooks n - LEFT JOIN rnotes.notes note ON note.notebook_id = n.id - GROUP BY n.id - ORDER BY n.updated_at DESC LIMIT 50` - ); - return c.json({ notebooks: rows }); + const notebooks = listNotebooks(space).map(({ doc }) => notebookToRest(doc)); + notebooks.sort((a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime()); + return c.json({ notebooks, source: "automerge" }); }); -// POST /api/notebooks — create notebook (dual-write) +// POST /api/notebooks — create notebook routes.post("/api/notebooks", async (c) => { const space = c.req.param("space") || "demo"; const token = extractToken(c.req.raw.headers); @@ -299,75 +238,48 @@ routes.post("/api/notebooks", async (c) => { const body = await c.req.json(); const { title, description, cover_color } = body; - // PG write - const user = await getOrCreateUser(claims.sub, claims.username); - const rows = await sql.unsafe( - `INSERT INTO rnotes.notebooks (title, description, cover_color, owner_id) - VALUES ($1, $2, $3, $4) RETURNING *`, - [title || "Untitled Notebook", description || null, cover_color || "#3b82f6", user.id] - ); - const pgRow = rows[0]; + const nbTitle = title || "Untitled Notebook"; + const notebookId = newId(); + const now = Date.now(); - // Automerge dual-write: create a new notebook doc - if (_syncServer && isLocalFirst(space)) { - const docId = notebookDocId(space, pgRow.id); - if (!_syncServer.getDoc(docId)) { - const doc = Automerge.init(); - const initialized = Automerge.change(doc, "Create notebook", (d) => { - d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: Date.now() }; - d.notebook = { - id: pgRow.id, title: pgRow.title, slug: pgRow.slug || "", - description: pgRow.description || "", coverColor: pgRow.cover_color || "#3b82f6", - isPublic: pgRow.is_public || false, createdAt: Date.now(), updatedAt: Date.now(), - }; - d.items = {}; - }); - _syncServer.setDoc(docId, initialized); - } - } + const doc = ensureDoc(space, notebookId); + _syncServer!.changeDoc(notebookDocId(space, notebookId), "Create notebook", (d) => { + d.notebook.id = notebookId; + d.notebook.title = nbTitle; + d.notebook.slug = slugify(nbTitle); + d.notebook.description = description || ""; + d.notebook.coverColor = cover_color || "#3b82f6"; + d.notebook.isPublic = false; + d.notebook.createdAt = now; + d.notebook.updatedAt = now; + }); - return c.json(pgRow, 201); + const updatedDoc = _syncServer!.getDoc(notebookDocId(space, notebookId))!; + return c.json(notebookToRest(updatedDoc), 201); }); -// GET /api/notebooks/:id — notebook detail with notes (Automerge-first) +// GET /api/notebooks/:id — notebook detail with notes routes.get("/api/notebooks/:id", async (c) => { const space = c.req.param("space") || "demo"; const id = c.req.param("id"); - // Automerge first - if (isLocalFirst(space)) { - const docId = notebookDocId(space, id); - const doc = _syncServer?.getDoc(docId); - if (doc) { - const nb = notebookToRest(doc); - const notes = Object.values(doc.items) - .map(noteToRest) - .sort((a, b) => { - if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1; - return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime(); - }); - return c.json({ ...nb, notes, source: "automerge" }); - } + const docId = notebookDocId(space, id); + const doc = _syncServer?.getDoc(docId); + if (!doc || !doc.notebook || !doc.notebook.title) { + return c.json({ error: "Notebook not found" }, 404); } - // PG fallback - const nb = await sql.unsafe("SELECT * FROM rnotes.notebooks WHERE id = $1", [id]); - if (nb.length === 0) return c.json({ error: "Notebook not found" }, 404); - - const notes = await sql.unsafe( - `SELECT n.*, array_agg(t.name) FILTER (WHERE t.name IS NOT NULL) as tags - FROM rnotes.notes n - LEFT JOIN rnotes.note_tags nt ON nt.note_id = n.id - LEFT JOIN rnotes.tags t ON t.id = nt.tag_id - WHERE n.notebook_id = $1 - GROUP BY n.id - ORDER BY n.is_pinned DESC, n.sort_order ASC, n.updated_at DESC`, - [id] - ); - return c.json({ ...nb[0], notes }); + const nb = notebookToRest(doc); + const notes = Object.values(doc.items) + .map(noteToRest) + .sort((a, b) => { + if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1; + return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime(); + }); + return c.json({ ...nb, notes, source: "automerge" }); }); -// PUT /api/notebooks/:id — update notebook (dual-write) +// PUT /api/notebooks/:id — update notebook routes.put("/api/notebooks/:id", async (c) => { const space = c.req.param("space") || "demo"; const token = extractToken(c.req.raw.headers); @@ -379,124 +291,90 @@ routes.put("/api/notebooks/:id", async (c) => { const body = await c.req.json(); const { title, description, cover_color, is_public } = body; - // PG write - const fields: string[] = []; - const params: any[] = []; - let idx = 1; - - if (title !== undefined) { fields.push(`title = $${idx}`); params.push(title); idx++; } - if (description !== undefined) { fields.push(`description = $${idx}`); params.push(description); idx++; } - if (cover_color !== undefined) { fields.push(`cover_color = $${idx}`); params.push(cover_color); idx++; } - if (is_public !== undefined) { fields.push(`is_public = $${idx}`); params.push(is_public); idx++; } - - if (fields.length === 0) return c.json({ error: "No fields to update" }, 400); - fields.push("updated_at = NOW()"); - params.push(id); - - const rows = await sql.unsafe( - `UPDATE rnotes.notebooks SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`, - params - ); - if (rows.length === 0) return c.json({ error: "Notebook not found" }, 404); - - // Automerge dual-write: update notebook metadata - if (_syncServer && isLocalFirst(space)) { - const docId = notebookDocId(space, id); - _syncServer.changeDoc(docId, "Update notebook", (d) => { - if (title !== undefined) d.notebook.title = title; - if (description !== undefined) d.notebook.description = description; - if (cover_color !== undefined) d.notebook.coverColor = cover_color; - if (is_public !== undefined) d.notebook.isPublic = is_public; - d.notebook.updatedAt = Date.now(); - }); + if (title === undefined && description === undefined && cover_color === undefined && is_public === undefined) { + return c.json({ error: "No fields to update" }, 400); } - return c.json(rows[0]); + const docId = notebookDocId(space, id); + const doc = _syncServer?.getDoc(docId); + if (!doc || !doc.notebook || !doc.notebook.title) { + return c.json({ error: "Notebook not found" }, 404); + } + + _syncServer!.changeDoc(docId, "Update notebook", (d) => { + if (title !== undefined) d.notebook.title = title; + if (description !== undefined) d.notebook.description = description; + if (cover_color !== undefined) d.notebook.coverColor = cover_color; + if (is_public !== undefined) d.notebook.isPublic = is_public; + d.notebook.updatedAt = Date.now(); + }); + + const updatedDoc = _syncServer!.getDoc(docId)!; + return c.json(notebookToRest(updatedDoc)); }); -// DELETE /api/notebooks/:id (dual-write) +// DELETE /api/notebooks/:id routes.delete("/api/notebooks/:id", async (c) => { const space = c.req.param("space") || "demo"; const id = c.req.param("id"); - const result = await sql.unsafe( - "DELETE FROM rnotes.notebooks WHERE id = $1 RETURNING id", [id] - ); - if (result.length === 0) return c.json({ error: "Notebook not found" }, 404); + const docId = notebookDocId(space, id); + const doc = _syncServer?.getDoc(docId); + if (!doc || !doc.notebook || !doc.notebook.title) { + return c.json({ error: "Notebook not found" }, 404); + } - // Automerge: remove the entire doc from SyncServer - // (SyncServer doesn't have a removeDoc — setting it to empty is the equivalent) - // For now, the doc persists in Automerge but is effectively orphaned once PG row is gone. + // Clear all items and blank the notebook title to mark as deleted. + // SyncServer has no removeDoc API, so we empty the doc instead. + _syncServer!.changeDoc(docId, "Delete notebook", (d) => { + for (const key of Object.keys(d.items)) { + delete d.items[key]; + } + d.notebook.title = ""; + d.notebook.updatedAt = Date.now(); + }); return c.json({ ok: true }); }); // ── Notes API ── -// GET /api/notes — list all notes (Automerge-first, PG fallback) +// GET /api/notes — list all notes routes.get("/api/notes", async (c) => { const space = c.req.param("space") || "demo"; const { notebook_id, type, q, limit = "50", offset = "0" } = c.req.query(); - // Automerge first - if (isLocalFirst(space)) { - let allNotes: ReturnType[] = []; - const notebooks = notebook_id - ? [{ doc: _syncServer!.getDoc(notebookDocId(space, notebook_id))! }].filter(x => x.doc) - : listAutomergeNotebooks(space); + let allNotes: ReturnType[] = []; + const notebooks = notebook_id + ? (() => { + const doc = _syncServer?.getDoc(notebookDocId(space, notebook_id)); + return doc ? [{ doc }] : []; + })() + : listNotebooks(space); - for (const { doc } of notebooks) { - for (const item of Object.values(doc.items)) { - if (type && item.type !== type) continue; - if (q) { - const lower = q.toLowerCase(); - if (!item.title.toLowerCase().includes(lower) && !item.contentPlain.toLowerCase().includes(lower)) continue; - } - allNotes.push(noteToRest(item)); + for (const { doc } of notebooks) { + for (const item of Object.values(doc.items)) { + if (type && item.type !== type) continue; + if (q) { + const lower = q.toLowerCase(); + if (!item.title.toLowerCase().includes(lower) && !item.contentPlain.toLowerCase().includes(lower)) continue; } + allNotes.push(noteToRest(item)); } - - // Sort: pinned first, then by updated_at desc - allNotes.sort((a, b) => { - if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1; - return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime(); - }); - - const lim = Math.min(parseInt(limit), 100); - const off = parseInt(offset) || 0; - return c.json({ notes: allNotes.slice(off, off + lim), source: "automerge" }); } - // PG fallback - const conditions: string[] = []; - const params: any[] = []; - let idx = 1; + // Sort: pinned first, then by updated_at desc + allNotes.sort((a, b) => { + if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1; + return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime(); + }); - if (notebook_id) { conditions.push(`n.notebook_id = $${idx}`); params.push(notebook_id); idx++; } - if (type) { conditions.push(`n.type = $${idx}`); params.push(type); idx++; } - if (q) { - conditions.push(`(n.title ILIKE $${idx} OR n.content_plain ILIKE $${idx})`); - params.push(`%${q}%`); - idx++; - } - - const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : ""; - - const rows = await sql.unsafe( - `SELECT n.*, array_agg(t.name) FILTER (WHERE t.name IS NOT NULL) as tags - FROM rnotes.notes n - LEFT JOIN rnotes.note_tags nt ON nt.note_id = n.id - LEFT JOIN rnotes.tags t ON t.id = nt.tag_id - ${where} - GROUP BY n.id - ORDER BY n.is_pinned DESC, n.updated_at DESC - LIMIT ${Math.min(parseInt(limit), 100)} OFFSET ${parseInt(offset) || 0}`, - params - ); - return c.json({ notes: rows }); + const lim = Math.min(parseInt(limit), 100); + const off = parseInt(offset) || 0; + return c.json({ notes: allNotes.slice(off, off + lim), source: "automerge" }); }); -// POST /api/notes — create note (dual-write) +// POST /api/notes — create note routes.post("/api/notes", async (c) => { const space = c.req.param("space") || "demo"; const token = extractToken(c.req.raw.headers); @@ -508,151 +386,108 @@ routes.post("/api/notes", async (c) => { const { notebook_id, title, content, type, url, language, file_url, mime_type, file_size, duration, tags } = body; if (!title?.trim()) return c.json({ error: "Title is required" }, 400); + if (!notebook_id) return c.json({ error: "notebook_id is required" }, 400); - // Strip HTML for plain text search - const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : null; + // Strip HTML/markdown for plain text search + const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : ""; - // PG write - const rows = await sql.unsafe( - `INSERT INTO rnotes.notes (notebook_id, title, content, content_plain, type, url, language, file_url, mime_type, file_size, duration) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) RETURNING *`, - [notebook_id || null, title.trim(), content || "", contentPlain, type || "NOTE", - url || null, language || null, file_url || null, mime_type || null, file_size || null, duration || null] - ); - - // Handle tags in PG + // Normalize tags const tagNames: string[] = []; if (tags && Array.isArray(tags)) { for (const tagName of tags) { - const name = tagName.trim().toLowerCase(); - if (!name) continue; - tagNames.push(name); - const tag = await sql.unsafe( - "INSERT INTO rnotes.tags (name) VALUES ($1) ON CONFLICT (name) DO UPDATE SET name = $1 RETURNING id", - [name] - ); - await sql.unsafe( - "INSERT INTO rnotes.note_tags (note_id, tag_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", - [rows[0].id, tag[0].id] - ); + const name = (tagName as string).trim().toLowerCase(); + if (name) tagNames.push(name); } } - // Automerge dual-write - if (notebook_id && isLocalFirst(space)) { - writeNoteToAutomerge(space, notebook_id, rows[0].id, { - title: title.trim(), - content: content || '', - contentPlain: contentPlain || '', - type: type || 'NOTE', - url: url || null, - language: language || null, - fileUrl: file_url || null, - mimeType: mime_type || null, - fileSize: file_size || null, - duration: duration || null, - tags: tagNames, - }); - } + const noteId = newId(); + const item = createNoteItem(noteId, notebook_id, title.trim(), { + authorId: claims.sub ?? null, + content: content || "", + contentPlain, + type: type || "NOTE", + url: url || null, + language: language || null, + fileUrl: file_url || null, + mimeType: mime_type || null, + fileSize: file_size || null, + duration: duration || null, + tags: tagNames, + }); - return c.json(rows[0], 201); + // Ensure the notebook doc exists, then add the note + ensureDoc(space, notebook_id); + const docId = notebookDocId(space, notebook_id); + _syncServer!.changeDoc(docId, `Create note: ${title.trim()}`, (d) => { + d.items[noteId] = item; + d.notebook.updatedAt = Date.now(); + }); + + return c.json(noteToRest(item), 201); }); -// GET /api/notes/:id — note detail (Automerge-first) +// GET /api/notes/:id — note detail routes.get("/api/notes/:id", async (c) => { const space = c.req.param("space") || "demo"; const id = c.req.param("id"); - // Automerge first - if (isLocalFirst(space)) { - const found = findNoteInAutomerge(space, id); - if (found) return c.json({ ...noteToRest(found.item), source: "automerge" }); - } + const found = findNote(space, id); + if (!found) return c.json({ error: "Note not found" }, 404); - // PG fallback - const rows = await sql.unsafe( - `SELECT n.*, array_agg(t.name) FILTER (WHERE t.name IS NOT NULL) as tags - FROM rnotes.notes n - LEFT JOIN rnotes.note_tags nt ON nt.note_id = n.id - LEFT JOIN rnotes.tags t ON t.id = nt.tag_id - WHERE n.id = $1 - GROUP BY n.id`, - [id] - ); - if (rows.length === 0) return c.json({ error: "Note not found" }, 404); - return c.json(rows[0]); + return c.json({ ...noteToRest(found.item), source: "automerge" }); }); -// PUT /api/notes/:id — update note (dual-write) +// PUT /api/notes/:id — update note routes.put("/api/notes/:id", async (c) => { const space = c.req.param("space") || "demo"; const id = c.req.param("id"); const body = await c.req.json(); const { title, content, type, url, language, is_pinned, sort_order } = body; - // PG write - const fields: string[] = []; - const params: any[] = []; - let idx = 1; - - if (title !== undefined) { fields.push(`title = $${idx}`); params.push(title); idx++; } - if (content !== undefined) { - fields.push(`content = $${idx}`); params.push(content); idx++; - const plain = content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim(); - fields.push(`content_plain = $${idx}`); params.push(plain); idx++; - } - if (type !== undefined) { fields.push(`type = $${idx}`); params.push(type); idx++; } - if (url !== undefined) { fields.push(`url = $${idx}`); params.push(url); idx++; } - if (language !== undefined) { fields.push(`language = $${idx}`); params.push(language); idx++; } - if (is_pinned !== undefined) { fields.push(`is_pinned = $${idx}`); params.push(is_pinned); idx++; } - if (sort_order !== undefined) { fields.push(`sort_order = $${idx}`); params.push(sort_order); idx++; } - - if (fields.length === 0) return c.json({ error: "No fields to update" }, 400); - fields.push("updated_at = NOW()"); - params.push(id); - - const rows = await sql.unsafe( - `UPDATE rnotes.notes SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`, - params - ); - if (rows.length === 0) return c.json({ error: "Note not found" }, 404); - - // Automerge dual-write - if (isLocalFirst(space)) { - const found = findNoteInAutomerge(space, id); - if (found) { - const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : undefined; - writeNoteToAutomerge(space, found.item.notebookId, id, { - ...(title !== undefined ? { title } : {}), - ...(content !== undefined ? { content, contentPlain } : {}), - ...(type !== undefined ? { type } : {}), - ...(url !== undefined ? { url } : {}), - ...(language !== undefined ? { language } : {}), - ...(is_pinned !== undefined ? { isPinned: is_pinned } : {}), - ...(sort_order !== undefined ? { sortOrder: sort_order } : {}), - }); - } + if (title === undefined && content === undefined && type === undefined && + url === undefined && language === undefined && is_pinned === undefined && sort_order === undefined) { + return c.json({ error: "No fields to update" }, 400); } - return c.json(rows[0]); + const found = findNote(space, id); + if (!found) return c.json({ error: "Note not found" }, 404); + + const contentPlain = content !== undefined + ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() + : undefined; + + _syncServer!.changeDoc(found.docId, `Update note ${id}`, (d) => { + const item = d.items[id]; + if (!item) return; + if (title !== undefined) item.title = title; + if (content !== undefined) item.content = content; + if (contentPlain !== undefined) item.contentPlain = contentPlain; + if (type !== undefined) item.type = type; + if (url !== undefined) item.url = url; + if (language !== undefined) item.language = language; + if (is_pinned !== undefined) item.isPinned = is_pinned; + if (sort_order !== undefined) item.sortOrder = sort_order; + item.updatedAt = Date.now(); + }); + + // Return the updated note + const updatedDoc = _syncServer!.getDoc(found.docId)!; + const updatedItem = updatedDoc.items[id]; + return c.json(noteToRest(updatedItem)); }); -// DELETE /api/notes/:id (dual-write) +// DELETE /api/notes/:id routes.delete("/api/notes/:id", async (c) => { const space = c.req.param("space") || "demo"; const id = c.req.param("id"); - // PG delete - const result = await sql.unsafe("DELETE FROM rnotes.notes WHERE id = $1 RETURNING id, notebook_id", [id]); - if (result.length === 0) return c.json({ error: "Note not found" }, 404); + const found = findNote(space, id); + if (!found) return c.json({ error: "Note not found" }, 404); - // Automerge dual-write: remove note from notebook doc - if (isLocalFirst(space) && result[0].notebook_id && _syncServer) { - const docId = notebookDocId(space, result[0].notebook_id); - _syncServer.changeDoc(docId, `Delete note ${id}`, (d) => { - delete d.items[id]; - }); - } + _syncServer!.changeDoc(found.docId, `Delete note ${id}`, (d) => { + delete d.items[id]; + d.notebook.updatedAt = Date.now(); + }); return c.json({ ok: true }); }); @@ -691,11 +526,10 @@ export const notesModule: RSpaceModule = { async onInit({ syncServer }) { _syncServer = syncServer; - // Init PG (still needed during dual-write period) - await initDB(); - await seedDemoIfEmpty(); + // Seed demo notebooks if the "demo" space is empty + seedDemoIfEmpty("demo"); - console.log("[Notes] onInit complete (PG + schema registered)"); + console.log("[Notes] onInit complete (Automerge-only)"); }, async onSpaceCreate(ctx: SpaceLifecycleContext) { diff --git a/modules/rsplat/db/schema.sql b/modules/rsplat/db/schema.sql.archived similarity index 100% rename from modules/rsplat/db/schema.sql rename to modules/rsplat/db/schema.sql.archived diff --git a/modules/rsplat/mod.ts b/modules/rsplat/mod.ts index a9bee27e..fdc8d9f4 100644 --- a/modules/rsplat/mod.ts +++ b/modules/rsplat/mod.ts @@ -3,13 +3,16 @@ * * Routes are relative to mount point (/:space/splat in unified). * Three.js + GaussianSplats3D loaded via CDN importmap. + * + * All metadata is stored in Automerge documents via SyncServer. + * 3D files (.ply, .splat, .spz) remain on the filesystem. */ import { Hono } from "hono"; import { resolve } from "node:path"; -import { mkdir, readFile } from "node:fs/promises"; +import { mkdir } from "node:fs/promises"; import { randomUUID } from "node:crypto"; -import { sql } from "../../shared/db/pool"; +import * as Automerge from "@automerge/automerge"; import { renderShell } from "../../server/shell"; import { getModuleInfoList } from "../../shared/module"; import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module"; @@ -20,7 +23,13 @@ import { } from "@encryptid/sdk/server"; import { setupX402FromEnv } from "../../shared/x402/hono-middleware"; import type { SyncServer } from '../../server/local-first/sync-server'; -import { splatScenesSchema } from './schemas'; +import { + splatScenesSchema, + splatScenesDocId, + type SplatScenesDoc, + type SplatItem, + type SourceFile, +} from './schemas'; let _syncServer: SyncServer | null = null; @@ -88,6 +97,88 @@ function getMimeType(format: string): string { } } +// ── Automerge helpers ── + +/** + * Lazily create the Automerge doc for a space if it doesn't exist yet. + */ +function ensureDoc(space: string): SplatScenesDoc { + const docId = splatScenesDocId(space); + let doc = _syncServer!.getDoc(docId); + if (!doc) { + doc = Automerge.change(Automerge.init(), 'init', (d) => { + const init = splatScenesSchema.init(); + d.meta = init.meta; + d.meta.spaceSlug = space; + d.items = {}; + }); + _syncServer!.setDoc(docId, doc); + } + return doc; +} + +/** + * Find a splat item by slug or id within a doc's items map. + * Returns [itemKey, item] or undefined. + */ +function findItem(doc: SplatScenesDoc, idOrSlug: string): [string, SplatItem] | undefined { + for (const [key, item] of Object.entries(doc.items)) { + if (item.slug === idOrSlug || item.id === idOrSlug) { + return [key, item]; + } + } + return undefined; +} + +/** + * Convert a SplatItem (camelCase) to a snake_case row for API responses, + * preserving the shape the frontend expects. + */ +function itemToRow(item: SplatItem): SplatRow { + return { + id: item.id, + slug: item.slug, + title: item.title, + description: item.description || null, + file_path: item.filePath, + file_format: item.fileFormat, + file_size_bytes: item.fileSizeBytes, + tags: item.tags ?? [], + space_slug: item.spaceSlug, + contributor_id: item.contributorId, + contributor_name: item.contributorName, + source: item.source ?? 'upload', + status: item.status, + view_count: item.viewCount, + payment_tx: item.paymentTx, + payment_network: item.paymentNetwork, + processing_status: item.processingStatus ?? 'ready', + processing_error: item.processingError, + source_file_count: item.sourceFileCount, + created_at: new Date(item.createdAt).toISOString(), + }; +} + +/** + * Return the subset of SplatRow fields used in list/gallery responses. + */ +function itemToListRow(item: SplatItem) { + return { + id: item.id, + slug: item.slug, + title: item.title, + description: item.description || null, + file_format: item.fileFormat, + file_size_bytes: item.fileSizeBytes, + tags: item.tags ?? [], + contributor_name: item.contributorName, + view_count: item.viewCount, + processing_status: item.processingStatus ?? 'ready', + source_file_count: item.sourceFileCount, + created_at: new Date(item.createdAt).toISOString(), + }; +} + // ── CDN importmap for Three.js + GaussianSplats3D ── const IMPORTMAP = `