feat: Phase 4 — remove PostgreSQL from 11 modules, switch to Automerge

Replace all sql.unsafe() calls with Automerge document operations
across rfunds, rbooks, rsplat, rnotes, rwork, rvote, rcal, rfiles,
rcart, rtrips, and rinbox. Only rforum retains PG (Discourse provisioning).

Each module now uses _syncServer.getDoc/changeDoc/setDoc for all CRUD,
with ensureDoc() helpers for lazy document creation. Schema SQL files
archived to .sql.archived. Adds Automerge round-trip test suite (35 tests).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Jeff Emmett 2026-03-02 15:48:01 -08:00
parent 88d618c1af
commit 8900eb32b9
23 changed files with 3771 additions and 1947 deletions

View File

@ -3,13 +3,16 @@
* *
* Ported from rbooks-online (Next.js) to Hono routes. * Ported from rbooks-online (Next.js) to Hono routes.
* Routes are relative to mount point (/:space/books in unified, / in standalone). * Routes are relative to mount point (/:space/books in unified, / in standalone).
*
* Storage: Automerge documents via SyncServer (one doc per space).
* PDF files stay on the filesystem only metadata lives in Automerge.
*/ */
import { Hono } from "hono"; import { Hono } from "hono";
import { resolve } from "node:path"; import { resolve } from "node:path";
import { mkdir, readFile } from "node:fs/promises"; import { mkdir } from "node:fs/promises";
import { randomUUID } from "node:crypto"; import { randomUUID } from "node:crypto";
import { sql } from "../../shared/db/pool"; import * as Automerge from "@automerge/automerge";
import { renderShell } from "../../server/shell"; import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module"; import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
@ -19,37 +22,68 @@ import {
extractToken, extractToken,
} from "@encryptid/sdk/server"; } from "@encryptid/sdk/server";
import type { SyncServer } from '../../server/local-first/sync-server'; import type { SyncServer } from '../../server/local-first/sync-server';
import { booksCatalogSchema } from './schemas'; import {
booksCatalogSchema,
booksCatalogDocId,
type BooksCatalogDoc,
type BookItem,
} from './schemas';
let _syncServer: SyncServer | null = null; let _syncServer: SyncServer | null = null;
const BOOKS_DIR = process.env.BOOKS_DIR || "/data/books"; const BOOKS_DIR = process.env.BOOKS_DIR || "/data/books";
// ── Types ── // ── Helpers ──
export interface BookRow { function ensureDoc(space: string): BooksCatalogDoc {
id: string; const docId = booksCatalogDocId(space);
slug: string; let doc = _syncServer!.getDoc<BooksCatalogDoc>(docId);
title: string; if (!doc) {
author: string | null; doc = Automerge.change(Automerge.init<BooksCatalogDoc>(), 'init', (d) => {
description: string | null; const init = booksCatalogSchema.init();
pdf_path: string; d.meta = init.meta;
pdf_size_bytes: number; d.meta.spaceSlug = space;
page_count: number; d.items = {};
tags: string[]; });
license: string; _syncServer!.setDoc(docId, doc);
cover_color: string; }
contributor_id: string | null; return doc;
contributor_name: string | null;
status: string;
featured: boolean;
view_count: number;
download_count: number;
created_at: string;
updated_at: string;
} }
// ── Helpers ── /** Find a book by slug or id across the items map. */
function findBook(doc: BooksCatalogDoc, idOrSlug: string): BookItem | undefined {
// Direct key lookup first (by id)
if (doc.items[idOrSlug]) return doc.items[idOrSlug];
// Then scan by slug
return Object.values(doc.items).find(
(b) => b.slug === idOrSlug || b.id === idOrSlug
);
}
/** Convert a BookItem to the JSON shape the API has always returned. */
function bookToRow(b: BookItem) {
return {
id: b.id,
slug: b.slug,
title: b.title,
author: b.author,
description: b.description,
pdf_path: b.pdfPath,
pdf_size_bytes: b.pdfSizeBytes,
page_count: b.pageCount,
tags: b.tags,
license: b.license,
cover_color: b.coverColor,
contributor_id: b.contributorId,
contributor_name: b.contributorName,
status: b.status,
featured: b.featured,
view_count: b.viewCount,
download_count: b.downloadCount,
created_at: new Date(b.createdAt).toISOString(),
updated_at: new Date(b.updatedAt).toISOString(),
};
}
function slugify(text: string): string { function slugify(text: string): string {
return text return text
@ -59,44 +93,69 @@ function slugify(text: string): string {
.slice(0, 80); .slice(0, 80);
} }
function escapeAttr(s: string): string {
return s.replace(/&/g, "&amp;").replace(/"/g, "&quot;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
}
// ── Routes ── // ── Routes ──
const routes = new Hono(); const routes = new Hono();
// ── API: List books ── // ── API: List books ──
routes.get("/api/books", async (c) => { routes.get("/api/books", async (c) => {
const search = c.req.query("search"); const space = c.req.param("space") || "global";
const search = c.req.query("search")?.toLowerCase();
const tag = c.req.query("tag"); const tag = c.req.query("tag");
const limit = Math.min(parseInt(c.req.query("limit") || "50"), 100); const limit = Math.min(parseInt(c.req.query("limit") || "50"), 100);
const offset = parseInt(c.req.query("offset") || "0"); const offset = parseInt(c.req.query("offset") || "0");
let query = `SELECT id, slug, title, author, description, pdf_size_bytes, const doc = ensureDoc(space);
page_count, tags, cover_color, contributor_name, featured, let books = Object.values(doc.items).filter((b) => b.status === "published");
view_count, created_at
FROM rbooks.books WHERE status = 'published'`;
const params: (string | number)[] = [];
if (search) { if (search) {
params.push(`%${search}%`); books = books.filter(
query += ` AND (title ILIKE $${params.length} OR author ILIKE $${params.length} OR description ILIKE $${params.length})`; (b) =>
b.title.toLowerCase().includes(search) ||
b.author.toLowerCase().includes(search) ||
b.description.toLowerCase().includes(search)
);
} }
if (tag) { if (tag) {
params.push(tag); books = books.filter((b) => b.tags.includes(tag));
query += ` AND $${params.length} = ANY(tags)`;
} }
query += ` ORDER BY featured DESC, created_at DESC`; // Sort: featured first, then newest
params.push(limit); books.sort((a, b) => {
query += ` LIMIT $${params.length}`; if (a.featured !== b.featured) return a.featured ? -1 : 1;
params.push(offset); return b.createdAt - a.createdAt;
query += ` OFFSET $${params.length}`; });
// Paginate
const paged = books.slice(offset, offset + limit);
// Return the subset of fields the old query returned
const rows = paged.map((b) => ({
id: b.id,
slug: b.slug,
title: b.title,
author: b.author,
description: b.description,
pdf_size_bytes: b.pdfSizeBytes,
page_count: b.pageCount,
tags: [...b.tags],
cover_color: b.coverColor,
contributor_name: b.contributorName,
featured: b.featured,
view_count: b.viewCount,
created_at: new Date(b.createdAt).toISOString(),
}));
const rows = await sql.unsafe(query, params);
return c.json({ books: rows }); return c.json({ books: rows });
}); });
// ── API: Upload book ── // ── API: Upload book ──
routes.post("/api/books", async (c) => { routes.post("/api/books", async (c) => {
const space = c.req.param("space") || "global";
const token = extractToken(c.req.raw.headers); const token = extractToken(c.req.raw.headers);
if (!token) return c.json({ error: "Authentication required" }, 401); if (!token) return c.json({ error: "Authentication required" }, 401);
@ -124,13 +183,13 @@ routes.post("/api/books", async (c) => {
const tags = tagsRaw ? tagsRaw.split(",").map((t) => t.trim()).filter(Boolean) : []; const tags = tagsRaw ? tagsRaw.split(",").map((t) => t.trim()).filter(Boolean) : [];
const shortId = randomUUID().slice(0, 8); const shortId = randomUUID().slice(0, 8);
const id = randomUUID();
let slug = slugify(title); let slug = slugify(title);
// Check slug collision // Check slug collision
const existing = await sql.unsafe( const doc = ensureDoc(space);
`SELECT 1 FROM rbooks.books WHERE slug = $1`, [slug] const slugExists = Object.values(doc.items).some((b) => b.slug === slug);
); if (slugExists) {
if (existing.length > 0) {
slug = `${slug}-${shortId}`; slug = `${slug}-${shortId}`;
} }
@ -141,50 +200,82 @@ routes.post("/api/books", async (c) => {
const buffer = Buffer.from(await file.arrayBuffer()); const buffer = Buffer.from(await file.arrayBuffer());
await Bun.write(filepath, buffer); await Bun.write(filepath, buffer);
// Insert into DB const now = Date.now();
const rows = await sql.unsafe(
`INSERT INTO rbooks.books (slug, title, author, description, pdf_path, pdf_size_bytes, tags, license, contributor_id, contributor_name)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
RETURNING id, slug, title, author, description, tags, created_at`,
[slug, title, author, description, filename, buffer.length, tags, license, claims.sub, claims.username || null]
);
return c.json(rows[0], 201); // Insert into Automerge doc
const docId = booksCatalogDocId(space);
_syncServer!.changeDoc<BooksCatalogDoc>(docId, `add book: ${slug}`, (d) => {
d.items[id] = {
id,
slug,
title,
author: author || "",
description: description || "",
pdfPath: filename,
pdfSizeBytes: buffer.length,
pageCount: 0,
tags,
license,
coverColor: null,
contributorId: claims.sub,
contributorName: claims.username || null,
status: "published",
featured: false,
viewCount: 0,
downloadCount: 0,
createdAt: now,
updatedAt: now,
};
});
return c.json({
id,
slug,
title,
author,
description,
tags,
created_at: new Date(now).toISOString(),
}, 201);
}); });
// ── API: Get book details ── // ── API: Get book details ──
routes.get("/api/books/:id", async (c) => { routes.get("/api/books/:id", async (c) => {
const space = c.req.param("space") || "global";
const id = c.req.param("id"); const id = c.req.param("id");
const rows = await sql.unsafe( const doc = ensureDoc(space);
`SELECT * FROM rbooks.books WHERE (slug = $1 OR id::text = $1) AND status = 'published'`, const book = findBook(doc, id);
[id]
);
if (rows.length === 0) return c.json({ error: "Book not found" }, 404); if (!book || book.status !== "published") {
return c.json({ error: "Book not found" }, 404);
}
// Increment view count // Increment view count
await sql.unsafe( const docId = booksCatalogDocId(space);
`UPDATE rbooks.books SET view_count = view_count + 1 WHERE id = $1`, _syncServer!.changeDoc<BooksCatalogDoc>(docId, `view: ${book.slug}`, (d) => {
[rows[0].id] if (d.items[book.id]) {
); d.items[book.id].viewCount += 1;
d.items[book.id].updatedAt = Date.now();
}
});
return c.json(rows[0]); return c.json(bookToRow(book));
}); });
// ── API: Serve PDF ── // ── API: Serve PDF ──
routes.get("/api/books/:id/pdf", async (c) => { routes.get("/api/books/:id/pdf", async (c) => {
const space = c.req.param("space") || "global";
const id = c.req.param("id"); const id = c.req.param("id");
const rows = await sql.unsafe( const doc = ensureDoc(space);
`SELECT id, slug, title, pdf_path FROM rbooks.books WHERE (slug = $1 OR id::text = $1) AND status = 'published'`, const book = findBook(doc, id);
[id]
);
if (rows.length === 0) return c.json({ error: "Book not found" }, 404); if (!book || book.status !== "published") {
return c.json({ error: "Book not found" }, 404);
}
const book = rows[0]; const filepath = resolve(BOOKS_DIR, book.pdfPath);
const filepath = resolve(BOOKS_DIR, book.pdf_path);
const file = Bun.file(filepath); const file = Bun.file(filepath);
if (!(await file.exists())) { if (!(await file.exists())) {
@ -192,10 +283,13 @@ routes.get("/api/books/:id/pdf", async (c) => {
} }
// Increment download count // Increment download count
await sql.unsafe( const docId = booksCatalogDocId(space);
`UPDATE rbooks.books SET download_count = download_count + 1 WHERE id = $1`, _syncServer!.changeDoc<BooksCatalogDoc>(docId, `download: ${book.slug}`, (d) => {
[book.id] if (d.items[book.id]) {
); d.items[book.id].downloadCount += 1;
d.items[book.id].updatedAt = Date.now();
}
});
return new Response(file, { return new Response(file, {
headers: { headers: {
@ -226,12 +320,10 @@ routes.get("/read/:id", async (c) => {
const spaceSlug = c.req.param("space") || "personal"; const spaceSlug = c.req.param("space") || "personal";
const id = c.req.param("id"); const id = c.req.param("id");
const rows = await sql.unsafe( const doc = ensureDoc(spaceSlug);
`SELECT * FROM rbooks.books WHERE (slug = $1 OR id::text = $1) AND status = 'published'`, const book = findBook(doc, id);
[id]
);
if (rows.length === 0) { if (!book || book.status !== "published") {
const html = renderShell({ const html = renderShell({
title: "Book not found | rSpace", title: "Book not found | rSpace",
moduleId: "rbooks", moduleId: "rbooks",
@ -242,13 +334,14 @@ routes.get("/read/:id", async (c) => {
return c.html(html, 404); return c.html(html, 404);
} }
const book = rows[0];
// Increment view count // Increment view count
await sql.unsafe( const docId = booksCatalogDocId(spaceSlug);
`UPDATE rbooks.books SET view_count = view_count + 1 WHERE id = $1`, _syncServer!.changeDoc<BooksCatalogDoc>(docId, `view: ${book.slug}`, (d) => {
[book.id] if (d.items[book.id]) {
); d.items[book.id].viewCount += 1;
d.items[book.id].updatedAt = Date.now();
}
});
// Build the PDF URL relative to this module's mount point // Build the PDF URL relative to this module's mount point
const pdfUrl = `/${spaceSlug}/rbooks/api/books/${book.slug}/pdf`; const pdfUrl = `/${spaceSlug}/rbooks/api/books/${book.slug}/pdf`;
@ -279,24 +372,6 @@ routes.get("/read/:id", async (c) => {
return c.html(html); return c.html(html);
}); });
// ── Initialize DB schema ──
async function initDB(): Promise<void> {
try {
const schemaPath = resolve(import.meta.dir, "db/schema.sql");
const schemaSql = await readFile(schemaPath, "utf-8");
await sql.unsafe(`SET search_path TO rbooks, public`);
await sql.unsafe(schemaSql);
await sql.unsafe(`SET search_path TO public`);
console.log("[Books] Database schema initialized");
} catch (e) {
console.error("[Books] Schema init failed:", e);
}
}
function escapeAttr(s: string): string {
return s.replace(/&/g, "&amp;").replace(/"/g, "&quot;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
}
// ── Module export ── // ── Module export ──
export const booksModule: RSpaceModule = { export const booksModule: RSpaceModule = {
@ -311,7 +386,7 @@ export const booksModule: RSpaceModule = {
landingPage: renderLanding, landingPage: renderLanding,
async onInit(ctx) { async onInit(ctx) {
_syncServer = ctx.syncServer; _syncServer = ctx.syncServer;
await initDB(); console.log("[Books] Module initialized (Automerge storage)");
}, },
feeds: [ feeds: [
{ {

View File

@ -3,69 +3,167 @@
* *
* Group calendars with lunar/solar/seasonal time systems, * Group calendars with lunar/solar/seasonal time systems,
* location-aware events, and temporal-spatial zoom coupling. * location-aware events, and temporal-spatial zoom coupling.
*
* All persistence uses Automerge documents via SyncServer
* no PostgreSQL dependency.
*/ */
import { Hono } from "hono"; import { Hono } from "hono";
import { readFileSync } from "node:fs"; import * as Automerge from "@automerge/automerge";
import { resolve } from "node:path";
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell"; import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule } from "../../shared/module"; import type { RSpaceModule } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing"; import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server'; import type { SyncServer } from '../../server/local-first/sync-server';
import { calendarSchema } from './schemas'; import { calendarSchema, calendarDocId } from './schemas';
import type { CalendarDoc, CalendarEvent, CalendarSource } from './schemas';
let _syncServer: SyncServer | null = null; let _syncServer: SyncServer | null = null;
const routes = new Hono(); const routes = new Hono();
// ── DB initialization ── // ── Local-first helpers ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
async function initDB() { /**
try { * Lazily create the calendar Automerge doc if it doesn't exist yet.
await sql.unsafe(SCHEMA_SQL); * Returns the current (immutable) doc snapshot.
console.log("[Cal] DB schema initialized"); */
} catch (e) { function ensureDoc(space: string): CalendarDoc {
console.error("[Cal] DB init error:", e); const docId = calendarDocId(space);
let doc = _syncServer!.getDoc<CalendarDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<CalendarDoc>(), 'init calendar', (d) => {
const init = calendarSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.sources = {};
d.events = {};
});
_syncServer!.setDoc(docId, doc);
} }
return doc;
} }
async function seedDemoIfEmpty() { function daysFromNow(days: number, hours: number, minutes: number): Date {
try { const d = new Date();
const count = await sql.unsafe("SELECT count(*)::int as cnt FROM rcal.events"); d.setDate(d.getDate() + days);
if (parseInt(count[0].cnt) > 0) return; d.setHours(hours, minutes, 0, 0);
return d;
}
/**
* Build an event row object suitable for JSON responses.
* Maps camelCase schema fields to the snake_case format the API previously returned.
*/
function eventToRow(ev: CalendarEvent, sources: Record<string, CalendarSource>) {
const src = ev.sourceId ? sources[ev.sourceId] : undefined;
return {
id: ev.id,
title: ev.title,
description: ev.description,
start_time: ev.startTime ? new Date(ev.startTime).toISOString() : null,
end_time: ev.endTime ? new Date(ev.endTime).toISOString() : null,
all_day: ev.allDay,
timezone: ev.timezone,
rrule: ev.rrule,
status: ev.status,
visibility: ev.visibility,
source_id: ev.sourceId,
source_name: src?.name ?? ev.sourceName ?? null,
source_color: src?.color ?? ev.sourceColor ?? null,
source_type: src?.sourceType ?? ev.sourceType ?? null,
location_id: ev.locationId,
location_name: ev.locationName,
location_label: ev.locationName,
location_lat: ev.locationLat,
location_lng: ev.locationLng,
location_granularity: ev.locationGranularity,
is_virtual: ev.isVirtual,
virtual_url: ev.virtualUrl,
virtual_platform: ev.virtualPlatform,
r_tool_source: ev.rToolSource,
r_tool_entity_id: ev.rToolEntityId,
attendees: ev.attendees,
attendee_count: ev.attendeeCount,
metadata: ev.metadata,
created_at: ev.createdAt ? new Date(ev.createdAt).toISOString() : null,
updated_at: ev.updatedAt ? new Date(ev.updatedAt).toISOString() : null,
};
}
/**
* Build a source row object for JSON responses.
*/
function sourceToRow(src: CalendarSource) {
return {
id: src.id,
name: src.name,
source_type: src.sourceType,
url: src.url,
color: src.color,
is_active: src.isActive,
is_visible: src.isVisible,
sync_interval_minutes: src.syncIntervalMinutes,
last_synced_at: src.lastSyncedAt ? new Date(src.lastSyncedAt).toISOString() : null,
owner_id: src.ownerId,
created_at: src.createdAt ? new Date(src.createdAt).toISOString() : null,
};
}
/**
* Seed demo data if the doc has no events yet.
*/
function seedDemoIfEmpty(space: string) {
const docId = calendarDocId(space);
const doc = ensureDoc(space);
if (Object.keys(doc.events).length > 0) return;
_syncServer!.changeDoc<CalendarDoc>(docId, 'seed demo data', (d) => {
const now = Date.now();
// Create calendar sources // Create calendar sources
const community = await sql.unsafe( const communityId = crypto.randomUUID();
`INSERT INTO rcal.calendar_sources (name, source_type, color, is_active, is_visible) const sprintsId = crypto.randomUUID();
VALUES ('Community Events', 'MANUAL', '#6366f1', true, true) RETURNING id`
);
const sprints = await sql.unsafe(
`INSERT INTO rcal.calendar_sources (name, source_type, color, is_active, is_visible)
VALUES ('Development Sprints', 'MANUAL', '#f59e0b', true, true) RETURNING id`
);
const communityId = community[0].id;
const sprintsId = sprints[0].id;
// Create location hierarchy d.sources[communityId] = {
const world = await sql.unsafe( id: communityId,
`INSERT INTO rcal.locations (name, granularity) VALUES ('Earth', 1) RETURNING id` name: 'Community Events',
); sourceType: 'MANUAL',
const europe = await sql.unsafe( url: null,
`INSERT INTO rcal.locations (name, granularity, parent_id, lat, lng) VALUES ('Europe', 2, $1, 48.8566, 2.3522) RETURNING id`, color: '#6366f1',
[world[0].id] isActive: true,
); isVisible: true,
const berlin = await sql.unsafe( syncIntervalMinutes: null,
`INSERT INTO rcal.locations (name, granularity, parent_id, lat, lng) VALUES ('Berlin', 4, $1, 52.52, 13.405) RETURNING id`, lastSyncedAt: 0,
[europe[0].id] ownerId: null,
); createdAt: now,
};
d.sources[sprintsId] = {
id: sprintsId,
name: 'Development Sprints',
sourceType: 'MANUAL',
url: null,
color: '#f59e0b',
isActive: true,
isVisible: true,
syncIntervalMinutes: null,
lastSyncedAt: 0,
ownerId: null,
createdAt: now,
};
// Seed events — past, current week, and future // Location IDs (embedded on events, no separate locations table)
const now = new Date(); const berlinLocId = crypto.randomUUID();
const events = [
// Seed events
const seedEvents: Array<{
title: string; desc: string; start: Date; end: Date;
sourceId: string; allDay?: boolean;
locationId?: string; locationName?: string;
locationLat?: number; locationLng?: number; locationGranularity?: string;
isVirtual?: boolean; virtualUrl?: string; virtualPlatform?: string;
}> = [
{ {
title: "rSpace Launch Party", title: "rSpace Launch Party",
desc: "Celebrating the launch of the unified rSpace platform with all 22 modules live.", desc: "Celebrating the launch of the unified rSpace platform with all 22 modules live.",
@ -76,13 +174,15 @@ async function seedDemoIfEmpty() {
title: "Provider Onboarding Workshop", title: "Provider Onboarding Workshop",
desc: "Hands-on session for print providers joining the cosmolocal network.", desc: "Hands-on session for print providers joining the cosmolocal network.",
start: daysFromNow(-12, 14, 0), end: daysFromNow(-12, 17, 0), start: daysFromNow(-12, 14, 0), end: daysFromNow(-12, 17, 0),
sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rspace-providers", virtualPlatform: "Jitsi", sourceId: communityId, isVirtual: true,
virtualUrl: "https://meet.jit.si/rspace-providers", virtualPlatform: "Jitsi",
}, },
{ {
title: "Weekly Community Standup", title: "Weekly Community Standup",
desc: "Open standup — share what you're working on, ask for help, coordinate.", desc: "Open standup — share what you're working on, ask for help, coordinate.",
start: daysFromNow(0, 16, 0), end: daysFromNow(0, 16, 45), start: daysFromNow(0, 16, 0), end: daysFromNow(0, 16, 45),
sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rspace-standup", virtualPlatform: "Jitsi", sourceId: communityId, isVirtual: true,
virtualUrl: "https://meet.jit.si/rspace-standup", virtualPlatform: "Jitsi",
}, },
{ {
title: "Sprint: Module Seeding & Polish", title: "Sprint: Module Seeding & Polish",
@ -94,77 +194,112 @@ async function seedDemoIfEmpty() {
title: "rFunds Budget Review", title: "rFunds Budget Review",
desc: "Quarterly review of treasury flows, enoughness thresholds, and overflow routing.", desc: "Quarterly review of treasury flows, enoughness thresholds, and overflow routing.",
start: daysFromNow(6, 15, 0), end: daysFromNow(6, 17, 0), start: daysFromNow(6, 15, 0), end: daysFromNow(6, 17, 0),
sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rfunds-review", virtualPlatform: "Jitsi", sourceId: communityId, isVirtual: true,
virtualUrl: "https://meet.jit.si/rfunds-review", virtualPlatform: "Jitsi",
}, },
{ {
title: "Cosmolocal Design Sprint", title: "Cosmolocal Design Sprint",
desc: "Two-day design sprint on the next generation of cosmolocal tooling.", desc: "Two-day design sprint on the next generation of cosmolocal tooling.",
start: daysFromNow(11, 9, 0), end: daysFromNow(12, 18, 0), start: daysFromNow(11, 9, 0), end: daysFromNow(12, 18, 0),
sourceId: sprintsId, locationId: berlin[0].id, locationName: "Druckwerkstatt Berlin", sourceId: sprintsId,
locationId: berlinLocId, locationName: "Druckwerkstatt Berlin",
locationLat: 52.52, locationLng: 13.405, locationGranularity: "city",
}, },
{ {
title: "Q1 Retrospective", title: "Q1 Retrospective",
desc: "Looking back at what we built, what worked, and what to improve.", desc: "Looking back at what we built, what worked, and what to improve.",
start: daysFromNow(21, 16, 0), end: daysFromNow(21, 18, 0), start: daysFromNow(21, 16, 0), end: daysFromNow(21, 18, 0),
sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rspace-retro", virtualPlatform: "Jitsi", sourceId: communityId, isVirtual: true,
virtualUrl: "https://meet.jit.si/rspace-retro", virtualPlatform: "Jitsi",
}, },
]; ];
for (const e of events) { for (const e of seedEvents) {
await sql.unsafe( const eventId = crypto.randomUUID();
`INSERT INTO rcal.events (title, description, start_time, end_time, all_day, source_id, d.events[eventId] = {
location_id, location_name, is_virtual, virtual_url, virtual_platform) id: eventId,
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)`, title: e.title,
[e.title, e.desc, e.start.toISOString(), e.end.toISOString(), e.allDay || false, description: e.desc,
e.sourceId, e.locationId || null, e.locationName || null, startTime: e.start.getTime(),
e.virtual || false, e.virtualUrl || null, e.virtualPlatform || null] endTime: e.end.getTime(),
); allDay: e.allDay || false,
timezone: 'UTC',
rrule: null,
status: null,
visibility: null,
sourceId: e.sourceId,
sourceName: null,
sourceType: null,
sourceColor: null,
locationId: e.locationId || null,
locationName: e.locationName || null,
coordinates: null,
locationGranularity: e.locationGranularity || null,
locationLat: e.locationLat ?? null,
locationLng: e.locationLng ?? null,
isVirtual: e.isVirtual || false,
virtualUrl: e.virtualUrl || null,
virtualPlatform: e.virtualPlatform || null,
rToolSource: null,
rToolEntityId: null,
attendees: [],
attendeeCount: 0,
metadata: null,
createdAt: now,
updatedAt: now,
};
} }
});
console.log("[Cal] Demo data seeded: 2 sources, 3 locations, 7 events"); console.log("[Cal] Demo data seeded: 2 sources, 7 events");
} catch (e) {
console.error("[Cal] Seed error:", e);
}
}
function daysFromNow(days: number, hours: number, minutes: number): Date {
const d = new Date();
d.setDate(d.getDate() + days);
d.setHours(hours, minutes, 0, 0);
return d;
} }
// ── API: Events ── // ── API: Events ──
// GET /api/events — query events with filters // GET /api/events — query events with filters
routes.get("/api/events", async (c) => { routes.get("/api/events", async (c) => {
const space = c.req.param("space") || "demo";
const { start, end, source, search, rTool, rEntityId, upcoming } = c.req.query(); const { start, end, source, search, rTool, rEntityId, upcoming } = c.req.query();
let where = "WHERE 1=1"; const doc = ensureDoc(space);
const params: any[] = []; let events = Object.values(doc.events);
let idx = 1;
if (start) { where += ` AND e.start_time >= $${idx}`; params.push(start); idx++; } // Apply filters
if (end) { where += ` AND e.start_time <= ($${idx}::date + interval '1 day')`; params.push(end); idx++; } if (start) {
if (source) { where += ` AND e.source_id = $${idx}`; params.push(source); idx++; } const startMs = new Date(start).getTime();
if (search) { where += ` AND (e.title ILIKE $${idx} OR e.description ILIKE $${idx})`; params.push(`%${search}%`); idx++; } events = events.filter((e) => e.startTime >= startMs);
if (rTool) { where += ` AND e.r_tool_source = $${idx}`; params.push(rTool); idx++; } }
if (rEntityId) { where += ` AND e.r_tool_entity_id = $${idx}`; params.push(rEntityId); idx++; } if (end) {
const endMs = new Date(end).getTime() + 86400000; // +1 day
events = events.filter((e) => e.startTime <= endMs);
}
if (source) {
events = events.filter((e) => e.sourceId === source);
}
if (search) {
const term = search.toLowerCase();
events = events.filter((e) =>
e.title.toLowerCase().includes(term) ||
(e.description && e.description.toLowerCase().includes(term))
);
}
if (rTool) {
events = events.filter((e) => e.rToolSource === rTool);
}
if (rEntityId) {
events = events.filter((e) => e.rToolEntityId === rEntityId);
}
if (upcoming) { if (upcoming) {
where += ` AND e.start_time >= NOW() AND e.start_time <= NOW() + ($${idx} || ' days')::interval`; const nowMs = Date.now();
params.push(upcoming); const futureMs = nowMs + parseInt(upcoming) * 86400000;
idx++; events = events.filter((e) => e.startTime >= nowMs && e.startTime <= futureMs);
} }
const rows = await sql.unsafe( // Sort by start time, limit to 500
`SELECT e.*, cs.name as source_name, cs.color as source_color, l.name as location_label events.sort((a, b) => a.startTime - b.startTime);
FROM rcal.events e events = events.slice(0, 500);
LEFT JOIN rcal.calendar_sources cs ON cs.id = e.source_id
LEFT JOIN rcal.locations l ON l.id = e.location_id const rows = events.map((e) => eventToRow(e, doc.sources));
${where}
ORDER BY e.start_time ASC LIMIT 500`,
params
);
return c.json({ count: rows.length, results: rows }); return c.json({ count: rows.length, results: rows });
}); });
@ -175,32 +310,65 @@ routes.post("/api/events", async (c) => {
let claims; let claims;
try { claims = await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } try { claims = await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const body = await c.req.json(); const body = await c.req.json();
const { title, description, start_time, end_time, all_day, timezone, source_id, location_id, location_name, const { title, description, start_time, end_time, all_day, timezone, source_id, location_id, location_name,
is_virtual, virtual_url, virtual_platform, r_tool_source, r_tool_entity_id } = body; is_virtual, virtual_url, virtual_platform, r_tool_source, r_tool_entity_id } = body;
if (!title?.trim() || !start_time) return c.json({ error: "Title and start_time required" }, 400); if (!title?.trim() || !start_time) return c.json({ error: "Title and start_time required" }, 400);
const rows = await sql.unsafe( const docId = calendarDocId(space);
`INSERT INTO rcal.events (title, description, start_time, end_time, all_day, timezone, source_id, ensureDoc(space);
location_id, location_name, is_virtual, virtual_url, virtual_platform, r_tool_source, r_tool_entity_id, created_by) const eventId = crypto.randomUUID();
VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15) RETURNING *`, const now = Date.now();
[title.trim(), description || null, start_time, end_time || null, all_day || false, timezone || "UTC",
source_id || null, location_id || null, location_name || null, is_virtual || false, _syncServer!.changeDoc<CalendarDoc>(docId, `create event ${eventId}`, (d) => {
virtual_url || null, virtual_platform || null, r_tool_source || null, r_tool_entity_id || null, claims.sub] d.events[eventId] = {
); id: eventId,
return c.json(rows[0], 201); title: title.trim(),
description: description || '',
startTime: new Date(start_time).getTime(),
endTime: end_time ? new Date(end_time).getTime() : 0,
allDay: all_day || false,
timezone: timezone || 'UTC',
rrule: null,
status: null,
visibility: null,
sourceId: source_id || null,
sourceName: null,
sourceType: null,
sourceColor: null,
locationId: location_id || null,
locationName: location_name || null,
coordinates: null,
locationGranularity: null,
locationLat: null,
locationLng: null,
isVirtual: is_virtual || false,
virtualUrl: virtual_url || null,
virtualPlatform: virtual_platform || null,
rToolSource: r_tool_source || null,
rToolEntityId: r_tool_entity_id || null,
attendees: [],
attendeeCount: 0,
metadata: null,
createdAt: now,
updatedAt: now,
};
});
const updated = _syncServer!.getDoc<CalendarDoc>(docId)!;
return c.json(eventToRow(updated.events[eventId], updated.sources), 201);
}); });
// GET /api/events/:id // GET /api/events/:id
routes.get("/api/events/:id", async (c) => { routes.get("/api/events/:id", async (c) => {
const rows = await sql.unsafe( const space = c.req.param("space") || "demo";
`SELECT e.*, cs.name as source_name, cs.color as source_color const id = c.req.param("id");
FROM rcal.events e LEFT JOIN rcal.calendar_sources cs ON cs.id = e.source_id const doc = ensureDoc(space);
WHERE e.id = $1`,
[c.req.param("id")] const ev = doc.events[id];
); if (!ev) return c.json({ error: "Event not found" }, 404);
if (rows.length === 0) return c.json({ error: "Event not found" }, 404); return c.json(eventToRow(ev, doc.sources));
return c.json(rows[0]);
}); });
// PATCH /api/events/:id // PATCH /api/events/:id
@ -210,55 +378,93 @@ routes.patch("/api/events/:id", async (c) => {
let claims; let claims;
try { claims = await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } try { claims = await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
const body = await c.req.json(); const body = await c.req.json();
const fields: string[] = []; const docId = calendarDocId(space);
const params: any[] = []; const doc = ensureDoc(space);
let idx = 1; if (!doc.events[id]) return c.json({ error: "Not found" }, 404);
const allowed = ["title", "description", "start_time", "end_time", "all_day", "timezone",
"status", "visibility", "location_name", "is_virtual", "virtual_url"];
for (const key of allowed) { // Map of allowed body keys to CalendarEvent fields
if (body[key] !== undefined) { const fieldMap: Record<string, keyof CalendarEvent> = {
fields.push(`${key} = $${idx}`); title: 'title',
params.push(body[key]); description: 'description',
idx++; start_time: 'startTime',
end_time: 'endTime',
all_day: 'allDay',
timezone: 'timezone',
status: 'status',
visibility: 'visibility',
location_name: 'locationName',
is_virtual: 'isVirtual',
virtual_url: 'virtualUrl',
};
const updates: Array<{ field: keyof CalendarEvent; value: any }> = [];
for (const [bodyKey, docField] of Object.entries(fieldMap)) {
if (body[bodyKey] !== undefined) {
let value = body[bodyKey];
// Convert time strings to epoch ms
if (bodyKey === 'start_time' || bodyKey === 'end_time') {
value = new Date(value).getTime();
}
updates.push({ field: docField, value });
} }
} }
if (fields.length === 0) return c.json({ error: "No fields" }, 400); if (updates.length === 0) return c.json({ error: "No fields" }, 400);
fields.push("updated_at = NOW()");
params.push(id);
const rows = await sql.unsafe( _syncServer!.changeDoc<CalendarDoc>(docId, `update event ${id}`, (d) => {
`UPDATE rcal.events SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`, const ev = d.events[id];
params for (const { field, value } of updates) {
); (ev as any)[field] = value;
if (rows.length === 0) return c.json({ error: "Not found" }, 404); }
return c.json(rows[0]); ev.updatedAt = Date.now();
});
const updated = _syncServer!.getDoc<CalendarDoc>(docId)!;
return c.json(eventToRow(updated.events[id], updated.sources));
}); });
// DELETE /api/events/:id // DELETE /api/events/:id
routes.delete("/api/events/:id", async (c) => { routes.delete("/api/events/:id", async (c) => {
const result = await sql.unsafe("DELETE FROM rcal.events WHERE id = $1 RETURNING id", [c.req.param("id")]); const space = c.req.param("space") || "demo";
if (result.length === 0) return c.json({ error: "Not found" }, 404); const id = c.req.param("id");
const docId = calendarDocId(space);
const doc = ensureDoc(space);
if (!doc.events[id]) return c.json({ error: "Not found" }, 404);
_syncServer!.changeDoc<CalendarDoc>(docId, `delete event ${id}`, (d) => {
delete d.events[id];
});
return c.json({ ok: true }); return c.json({ ok: true });
}); });
// ── API: Sources ── // ── API: Sources ──
routes.get("/api/sources", async (c) => { routes.get("/api/sources", async (c) => {
const space = c.req.param("space") || "demo";
const { is_active, is_visible, source_type } = c.req.query(); const { is_active, is_visible, source_type } = c.req.query();
let where = "WHERE 1=1"; const doc = ensureDoc(space);
const params: any[] = [];
let idx = 1;
if (is_active !== undefined) { where += ` AND is_active = $${idx}`; params.push(is_active === "true"); idx++; } let sources = Object.values(doc.sources);
if (is_visible !== undefined) { where += ` AND is_visible = $${idx}`; params.push(is_visible === "true"); idx++; }
if (source_type) { where += ` AND source_type = $${idx}`; params.push(source_type); idx++; }
const rows = await sql.unsafe(`SELECT * FROM rcal.calendar_sources ${where} ORDER BY name`, params); if (is_active !== undefined) {
const active = is_active === "true";
sources = sources.filter((s) => s.isActive === active);
}
if (is_visible !== undefined) {
const visible = is_visible === "true";
sources = sources.filter((s) => s.isVisible === visible);
}
if (source_type) {
sources = sources.filter((s) => s.sourceType === source_type);
}
sources.sort((a, b) => a.name.localeCompare(b.name));
const rows = sources.map(sourceToRow);
return c.json({ count: rows.length, results: rows }); return c.json({ count: rows.length, results: rows });
}); });
@ -267,44 +473,99 @@ routes.post("/api/sources", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401); if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const body = await c.req.json(); const body = await c.req.json();
const rows = await sql.unsafe( const docId = calendarDocId(space);
`INSERT INTO rcal.calendar_sources (name, source_type, url, color, is_active, is_visible) ensureDoc(space);
VALUES ($1, $2, $3, $4, $5, $6) RETURNING *`,
[body.name, body.source_type || "MANUAL", body.url || null, body.color || "#6366f1", const sourceId = crypto.randomUUID();
body.is_active ?? true, body.is_visible ?? true] const now = Date.now();
);
return c.json(rows[0], 201); _syncServer!.changeDoc<CalendarDoc>(docId, `create source ${sourceId}`, (d) => {
d.sources[sourceId] = {
id: sourceId,
name: body.name,
sourceType: body.source_type || 'MANUAL',
url: body.url || null,
color: body.color || '#6366f1',
isActive: body.is_active ?? true,
isVisible: body.is_visible ?? true,
syncIntervalMinutes: null,
lastSyncedAt: 0,
ownerId: null,
createdAt: now,
};
});
const updated = _syncServer!.getDoc<CalendarDoc>(docId)!;
return c.json(sourceToRow(updated.sources[sourceId]), 201);
}); });
// ── API: Locations ── // ── API: Locations ──
// Locations are now derived from event data (no separate table).
// Each unique locationId/locationName combination is extracted from events.
interface DerivedLocation {
id: string;
name: string;
granularity: number | null;
parent_id: string | null;
lat: number | null;
lng: number | null;
}
function deriveLocations(doc: CalendarDoc): DerivedLocation[] {
const seen = new Map<string, DerivedLocation>();
for (const ev of Object.values(doc.events)) {
const key = ev.locationId || ev.locationName;
if (!key) continue;
if (seen.has(key)) continue;
seen.set(key, {
id: ev.locationId || key,
name: ev.locationName || key,
granularity: ev.locationGranularity ? parseInt(ev.locationGranularity) || null : null,
parent_id: null,
lat: ev.locationLat,
lng: ev.locationLng,
});
}
return Array.from(seen.values());
}
routes.get("/api/locations", async (c) => { routes.get("/api/locations", async (c) => {
const space = c.req.param("space") || "demo";
const { granularity, parent, search, root } = c.req.query(); const { granularity, parent, search, root } = c.req.query();
let where = "WHERE 1=1"; const doc = ensureDoc(space);
const params: any[] = [];
let idx = 1;
if (root === "true") { where += " AND parent_id IS NULL"; } let locations = deriveLocations(doc);
if (granularity) { where += ` AND granularity = $${idx}`; params.push(parseInt(granularity)); idx++; }
if (parent) { where += ` AND parent_id = $${idx}`; params.push(parent); idx++; }
if (search) { where += ` AND name ILIKE $${idx}`; params.push(`%${search}%`); idx++; }
const rows = await sql.unsafe(`SELECT * FROM rcal.locations ${where} ORDER BY name`, params); if (root === "true") {
return c.json(rows); locations = locations.filter((l) => l.parent_id === null);
}
if (granularity) {
const g = parseInt(granularity);
locations = locations.filter((l) => l.granularity === g);
}
if (parent) {
locations = locations.filter((l) => l.parent_id === parent);
}
if (search) {
const term = search.toLowerCase();
locations = locations.filter((l) => l.name.toLowerCase().includes(term));
}
locations.sort((a, b) => a.name.localeCompare(b.name));
return c.json(locations);
}); });
routes.get("/api/locations/tree", async (c) => { routes.get("/api/locations/tree", async (c) => {
const rows = await sql.unsafe( const space = c.req.param("space") || "demo";
`WITH RECURSIVE tree AS ( const doc = ensureDoc(space);
SELECT id, name, granularity, parent_id, 0 as depth FROM rcal.locations WHERE parent_id IS NULL
UNION ALL // Flat list with depth=0 since hierarchical parent_id data is not stored in Automerge
SELECT l.id, l.name, l.granularity, l.parent_id, t.depth + 1 const locations = deriveLocations(doc).map((l) => ({ ...l, depth: 0 }));
FROM rcal.locations l JOIN tree t ON l.parent_id = t.id locations.sort((a, b) => a.name.localeCompare(b.name));
) return c.json(locations);
SELECT * FROM tree ORDER BY depth, name`
);
return c.json(rows);
}); });
// ── API: Lunar data (computed, not stored) ── // ── API: Lunar data (computed, not stored) ──
@ -349,29 +610,30 @@ routes.get("/api/lunar", async (c) => {
// ── API: Stats ── // ── API: Stats ──
routes.get("/api/stats", async (c) => { routes.get("/api/stats", async (c) => {
const [eventCount, sourceCount, locationCount] = await Promise.all([ const space = c.req.param("space") || "demo";
sql.unsafe("SELECT count(*)::int as cnt FROM rcal.events"), const doc = ensureDoc(space);
sql.unsafe("SELECT count(*)::int as cnt FROM rcal.calendar_sources WHERE is_active = true"),
sql.unsafe("SELECT count(*)::int as cnt FROM rcal.locations"), const events = Object.values(doc.events).length;
]); const sources = Object.values(doc.sources).filter((s) => s.isActive).length;
return c.json({ const locations = deriveLocations(doc).length;
events: eventCount[0]?.cnt || 0,
sources: sourceCount[0]?.cnt || 0, return c.json({ events, sources, locations });
locations: locationCount[0]?.cnt || 0,
});
}); });
// ── API: Context (r* tool bridge) ── // ── API: Context (r* tool bridge) ──
routes.get("/api/context/:tool", async (c) => { routes.get("/api/context/:tool", async (c) => {
const space = c.req.param("space") || "demo";
const tool = c.req.param("tool"); const tool = c.req.param("tool");
const entityId = c.req.query("entityId"); const entityId = c.req.query("entityId");
if (!entityId) return c.json({ error: "entityId required" }, 400); if (!entityId) return c.json({ error: "entityId required" }, 400);
const rows = await sql.unsafe( const doc = ensureDoc(space);
"SELECT * FROM rcal.events WHERE r_tool_source = $1 AND r_tool_entity_id = $2 ORDER BY start_time", const matching = Object.values(doc.events)
[tool, entityId] .filter((e) => e.rToolSource === tool && e.rToolEntityId === entityId)
); .sort((a, b) => a.startTime - b.startTime);
const rows = matching.map((e) => eventToRow(e, doc.sources));
return c.json({ count: rows.length, results: rows }); return c.json({ count: rows.length, results: rows });
}); });
@ -403,8 +665,8 @@ export const calModule: RSpaceModule = {
landingPage: renderLanding, landingPage: renderLanding,
async onInit(ctx) { async onInit(ctx) {
_syncServer = ctx.syncServer; _syncServer = ctx.syncServer;
await initDB(); // Seed demo data for the default space
await seedDemoIfEmpty(); seedDemoIfEmpty("demo");
}, },
feeds: [ feeds: [
{ {

View File

@ -4,12 +4,12 @@
* Ported from /opt/apps/rcart/ (Express Hono). * Ported from /opt/apps/rcart/ (Express Hono).
* Handles catalog (artifact listings), orders, fulfillment resolution. * Handles catalog (artifact listings), orders, fulfillment resolution.
* Integrates with provider-registry for provider matching and flow-service for revenue splits. * Integrates with provider-registry for provider matching and flow-service for revenue splits.
*
* Storage: Automerge documents via SyncServer (no PostgreSQL).
*/ */
import * as Automerge from "@automerge/automerge";
import { Hono } from "hono"; import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell"; import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module";
import { depositOrderRevenue } from "./flow"; import { depositOrderRevenue } from "./flow";
@ -17,24 +17,17 @@ import type { RSpaceModule } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing"; import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server'; import type { SyncServer } from '../../server/local-first/sync-server';
import { catalogSchema, orderSchema } from './schemas'; import {
catalogSchema, orderSchema,
catalogDocId, orderDocId,
type CatalogDoc, type CatalogEntry,
type OrderDoc, type OrderMeta,
} from './schemas';
let _syncServer: SyncServer | null = null; let _syncServer: SyncServer | null = null;
const routes = new Hono(); const routes = new Hono();
// ── DB initialization ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Cart] DB schema initialized");
} catch (e) {
console.error("[Cart] DB init error:", e);
}
}
// Provider registry URL (for fulfillment resolution) // Provider registry URL (for fulfillment resolution)
const PROVIDER_REGISTRY_URL = process.env.PROVIDER_REGISTRY_URL || ""; const PROVIDER_REGISTRY_URL = process.env.PROVIDER_REGISTRY_URL || "";
@ -44,10 +37,41 @@ function getProviderUrl(): string {
return PROVIDER_REGISTRY_URL || "http://localhost:3000/demo/providers"; return PROVIDER_REGISTRY_URL || "http://localhost:3000/demo/providers";
} }
// ── Automerge helpers ──
/** Lazily create (or retrieve) the catalog doc for a space. */
function ensureCatalogDoc(space: string): Automerge.Doc<CatalogDoc> {
const docId = catalogDocId(space);
let doc = _syncServer!.getDoc<CatalogDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<CatalogDoc>(), 'init catalog', (d) => {
const init = catalogSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = space;
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/** Get all order docs for a space by scanning known doc IDs. */
function getSpaceOrderDocs(space: string): Array<{ docId: string; doc: Automerge.Doc<OrderDoc> }> {
const prefix = `${space}:cart:orders:`;
const results: Array<{ docId: string; doc: Automerge.Doc<OrderDoc> }> = [];
for (const id of _syncServer!.listDocs()) {
if (id.startsWith(prefix)) {
const doc = _syncServer!.getDoc<OrderDoc>(id);
if (doc) results.push({ docId: id, doc });
}
}
return results;
}
// ── CATALOG ROUTES ── // ── CATALOG ROUTES ──
// POST /api/catalog/ingest — Add artifact to catalog // POST /api/catalog/ingest — Add artifact to catalog
routes.post("/api/catalog/ingest", async (c) => { routes.post("/api/catalog/ingest", async (c) => {
const space = c.req.param("space") || "demo";
const artifact = await c.req.json(); const artifact = await c.req.json();
if (!artifact.id || !artifact.schema_version || !artifact.type) { if (!artifact.id || !artifact.schema_version || !artifact.type) {
@ -60,121 +84,151 @@ routes.post("/api/catalog/ingest", async (c) => {
return c.json({ error: "print-ready artifacts must have at least one render_target" }, 400); return c.json({ error: "print-ready artifacts must have at least one render_target" }, 400);
} }
const existing = await sql.unsafe("SELECT id FROM rcart.catalog_entries WHERE artifact_id = $1", [artifact.id]); const doc = ensureCatalogDoc(space);
if (existing.length > 0) {
return c.json({ error: "Artifact already listed", catalog_entry_id: existing[0].id }, 409); // Check for duplicate artifact_id
for (const [, entry] of Object.entries(doc.items)) {
if (entry.artifactId === artifact.id) {
return c.json({ error: "Artifact already listed", catalog_entry_id: entry.id }, 409);
}
} }
const result = await sql.unsafe( const entryId = crypto.randomUUID();
`INSERT INTO rcart.catalog_entries ( const now = Date.now();
artifact_id, artifact, title, product_type,
required_capabilities, substrates, creator_id,
source_space, tags
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
RETURNING id, artifact_id, title, product_type, status, created_at`,
[
artifact.id, JSON.stringify(artifact),
artifact.payload?.title || "Untitled",
artifact.spec?.product_type || null,
artifact.spec?.required_capabilities || [],
artifact.spec?.substrates || [],
artifact.creator?.id || null,
artifact.source_space || null,
artifact.payload?.tags || [],
]
);
return c.json(result[0], 201); const docId = catalogDocId(space);
_syncServer!.changeDoc<CatalogDoc>(docId, 'ingest catalog entry', (d) => {
d.items[entryId] = {
id: entryId,
artifactId: artifact.id,
artifact: artifact,
title: artifact.payload?.title || "Untitled",
productType: artifact.spec?.product_type || null,
requiredCapabilities: artifact.spec?.required_capabilities || [],
substrates: artifact.spec?.substrates || [],
creatorId: artifact.creator?.id || null,
sourceSpace: artifact.source_space || space,
tags: artifact.payload?.tags || [],
status: "active",
createdAt: now,
updatedAt: now,
};
});
return c.json({
id: entryId,
artifact_id: artifact.id,
title: artifact.payload?.title || "Untitled",
product_type: artifact.spec?.product_type || null,
status: "active",
created_at: new Date(now).toISOString(),
}, 201);
}); });
// GET /api/catalog — Browse catalog // GET /api/catalog — Browse catalog
routes.get("/api/catalog", async (c) => { routes.get("/api/catalog", async (c) => {
const space = c.req.param("space") || "demo";
const { product_type, capability, tag, source_space, q, limit = "50", offset = "0" } = c.req.query(); const { product_type, capability, tag, source_space, q, limit = "50", offset = "0" } = c.req.query();
const conditions: string[] = ["status = 'active'"]; const doc = ensureCatalogDoc(space);
const params: any[] = []; let entries = Object.values(doc.items);
let paramIdx = 1;
if (product_type) { // Apply filters
conditions.push(`product_type = $${paramIdx}`); entries = entries.filter((e) => e.status === "active");
params.push(product_type); if (product_type) entries = entries.filter((e) => e.productType === product_type);
paramIdx++;
}
if (capability) { if (capability) {
conditions.push(`required_capabilities && $${paramIdx}`); const caps = capability.split(",");
params.push(capability.split(",")); entries = entries.filter((e) => caps.some((cap) => e.requiredCapabilities.includes(cap)));
paramIdx++;
}
if (tag) {
conditions.push(`$${paramIdx} = ANY(tags)`);
params.push(tag);
paramIdx++;
}
if (source_space) {
conditions.push(`source_space = $${paramIdx}`);
params.push(source_space);
paramIdx++;
} }
if (tag) entries = entries.filter((e) => e.tags.includes(tag));
if (source_space) entries = entries.filter((e) => e.sourceSpace === source_space);
if (q) { if (q) {
conditions.push(`title ILIKE $${paramIdx}`); const lower = q.toLowerCase();
params.push(`%${q}%`); entries = entries.filter((e) => e.title.toLowerCase().includes(lower));
paramIdx++;
} }
const where = conditions.join(" AND "); // Sort by createdAt descending
entries.sort((a, b) => b.createdAt - a.createdAt);
const limitNum = Math.min(parseInt(limit) || 50, 100); const limitNum = Math.min(parseInt(limit) || 50, 100);
const offsetNum = parseInt(offset) || 0; const offsetNum = parseInt(offset) || 0;
const total = entries.length;
const paged = entries.slice(offsetNum, offsetNum + limitNum);
const [result, countResult] = await Promise.all([ // Map to response shape matching the original SQL response
sql.unsafe( const result = paged.map((e) => {
`SELECT id, artifact_id, title, product_type, const art = e.artifact as Record<string, any> | undefined;
required_capabilities, tags, source_space, return {
artifact->'payload'->>'description' as description, id: e.id,
artifact->'pricing' as pricing, artifact_id: e.artifactId,
artifact->'spec'->'dimensions' as dimensions, title: e.title,
status, created_at product_type: e.productType,
FROM rcart.catalog_entries required_capabilities: e.requiredCapabilities,
WHERE ${where} tags: e.tags,
ORDER BY created_at DESC source_space: e.sourceSpace,
LIMIT ${limitNum} OFFSET ${offsetNum}`, description: art?.payload?.description || null,
params pricing: art?.pricing || null,
), dimensions: art?.spec?.dimensions || null,
sql.unsafe(`SELECT count(*) FROM rcart.catalog_entries WHERE ${where}`, params), status: e.status,
]); created_at: new Date(e.createdAt).toISOString(),
};
});
return c.json({ entries: result, total: parseInt(countResult[0].count as string), limit: limitNum, offset: offsetNum }); return c.json({ entries: result, total, limit: limitNum, offset: offsetNum });
}); });
// GET /api/catalog/:id — Single catalog entry // GET /api/catalog/:id — Single catalog entry
routes.get("/api/catalog/:id", async (c) => { routes.get("/api/catalog/:id", async (c) => {
const space = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
const result = await sql.unsafe( const doc = ensureCatalogDoc(space);
"SELECT * FROM rcart.catalog_entries WHERE id = $1 OR artifact_id = $1",
[id] // Look up by entry id or artifact id
); let entry: CatalogEntry | undefined;
if (result.length === 0) return c.json({ error: "Catalog entry not found" }, 404); if (doc.items[id]) {
const row = result[0]; entry = doc.items[id];
return c.json({ id: row.id, artifact: row.artifact, status: row.status, created_at: row.created_at, updated_at: row.updated_at }); } else {
entry = Object.values(doc.items).find((e) => e.artifactId === id);
}
if (!entry) return c.json({ error: "Catalog entry not found" }, 404);
return c.json({
id: entry.id,
artifact: entry.artifact,
status: entry.status,
created_at: new Date(entry.createdAt).toISOString(),
updated_at: new Date(entry.updatedAt).toISOString(),
});
}); });
// PATCH /api/catalog/:id — Update listing status // PATCH /api/catalog/:id — Update listing status
routes.patch("/api/catalog/:id", async (c) => { routes.patch("/api/catalog/:id", async (c) => {
const space = c.req.param("space") || "demo";
const { status } = await c.req.json(); const { status } = await c.req.json();
const valid = ["active", "paused", "sold_out", "removed"]; const valid = ["active", "paused", "sold_out", "removed"];
if (!valid.includes(status)) return c.json({ error: `status must be one of: ${valid.join(", ")}` }, 400); if (!valid.includes(status)) return c.json({ error: `status must be one of: ${valid.join(", ")}` }, 400);
const result = await sql.unsafe( const doc = ensureCatalogDoc(space);
"UPDATE rcart.catalog_entries SET status = $1, updated_at = NOW() WHERE id = $2 RETURNING id, status", const entryId = c.req.param("id");
[status, c.req.param("id")]
); if (!doc.items[entryId]) return c.json({ error: "Catalog entry not found" }, 404);
if (result.length === 0) return c.json({ error: "Catalog entry not found" }, 404);
return c.json(result[0]); const docId = catalogDocId(space);
_syncServer!.changeDoc<CatalogDoc>(docId, `update catalog status → ${status}`, (d) => {
d.items[entryId].status = status;
d.items[entryId].updatedAt = Date.now();
});
return c.json({ id: entryId, status });
}); });
// ── ORDER ROUTES ── // ── ORDER ROUTES ──
// POST /api/orders — Create an order // POST /api/orders — Create an order
routes.post("/api/orders", async (c) => { routes.post("/api/orders", async (c) => {
const space = c.req.param("space") || "demo";
// Optional auth — set buyer_did from claims if authenticated // Optional auth — set buyer_did from claims if authenticated
const token = extractToken(c.req.raw.headers); const token = extractToken(c.req.raw.headers);
let buyerDid: string | null = null; let buyerDid: string | null = null;
@ -194,51 +248,70 @@ routes.post("/api/orders", async (c) => {
if (!catalog_entry_id && !artifact_id) return c.json({ error: "Required: catalog_entry_id or artifact_id" }, 400); if (!catalog_entry_id && !artifact_id) return c.json({ error: "Required: catalog_entry_id or artifact_id" }, 400);
if (!provider_id || !total_price) return c.json({ error: "Required: provider_id, total_price" }, 400); if (!provider_id || !total_price) return c.json({ error: "Required: provider_id, total_price" }, 400);
const entryResult = await sql.unsafe( // Look up catalog entry
"SELECT id, artifact_id FROM rcart.catalog_entries WHERE id = $1 OR artifact_id = $1", const catalogDoc = ensureCatalogDoc(space);
[catalog_entry_id || artifact_id] const lookupId = catalog_entry_id || artifact_id;
); let entry: CatalogEntry | undefined;
if (entryResult.length === 0) return c.json({ error: "Catalog entry not found" }, 404); if (catalogDoc.items[lookupId]) {
entry = catalogDoc.items[lookupId];
const entry = entryResult[0]; } else {
entry = Object.values(catalogDoc.items).find((e) => e.artifactId === lookupId || e.id === lookupId);
}
if (!entry) return c.json({ error: "Catalog entry not found" }, 404);
// x402 detection // x402 detection
const x402Header = c.req.header("x-payment"); const x402Header = c.req.header("x-payment");
const effectiveMethod = x402Header ? "x402" : payment_method; const effectiveMethod = x402Header ? "x402" : payment_method;
const initialStatus = x402Header ? "paid" : "pending"; const initialStatus = x402Header ? "paid" : "pending";
const result = await sql.unsafe( const orderId = crypto.randomUUID();
`INSERT INTO rcart.orders ( const now = Date.now();
catalog_entry_id, artifact_id, buyer_id, buyer_location, buyer_contact,
provider_id, provider_name, provider_distance_km, // Create order doc
quantity, production_cost, creator_payout, community_payout, const oDocId = orderDocId(space, orderId);
total_price, currency, status, payment_method, payment_tx, payment_network let orderDoc = Automerge.change(Automerge.init<OrderDoc>(), 'create order', (d) => {
${initialStatus === "paid" ? ", paid_at" : ""} const init = orderSchema.init();
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18 Object.assign(d, init);
${initialStatus === "paid" ? ", NOW()" : ""}) d.meta.spaceSlug = space;
RETURNING *`, d.order.id = orderId;
[ d.order.catalogEntryId = entry!.id;
entry.id, entry.artifact_id, d.order.artifactId = entry!.artifactId;
buyerDid || buyer_id || null, d.order.buyerId = buyerDid || buyer_id || null;
buyer_location ? JSON.stringify(buyer_location) : null, d.order.buyerLocation = buyer_location ? JSON.stringify(buyer_location) : null;
buyer_contact ? JSON.stringify(buyer_contact) : null, d.order.buyerContact = buyer_contact ? JSON.stringify(buyer_contact) : null;
provider_id, provider_name || null, provider_distance_km || null, d.order.providerId = provider_id;
quantity, production_cost || null, creator_payout || null, community_payout || null, d.order.providerName = provider_name || null;
total_price, currency, initialStatus, effectiveMethod, d.order.providerDistanceKm = provider_distance_km || null;
payment_tx || null, payment_network || null, d.order.quantity = quantity;
] d.order.productionCost = production_cost || null;
); d.order.creatorPayout = creator_payout || null;
d.order.communityPayout = community_payout || null;
d.order.totalPrice = total_price;
d.order.currency = currency;
d.order.status = initialStatus;
d.order.paymentMethod = effectiveMethod;
d.order.paymentTx = payment_tx || null;
d.order.paymentNetwork = payment_network || null;
d.order.createdAt = now;
d.order.updatedAt = now;
if (initialStatus === "paid") d.order.paidAt = now;
});
_syncServer!.setDoc(oDocId, orderDoc);
const order = orderDoc.order;
const order = result[0];
if (initialStatus === "paid") { if (initialStatus === "paid") {
depositOrderRevenue(total_price, order.id); depositOrderRevenue(total_price, orderId);
} }
return c.json(order, 201); // Return response matching original shape
return c.json(orderToResponse(order, entry), 201);
}); });
// GET /api/orders — List orders // GET /api/orders — List orders
routes.get("/api/orders", async (c) => { routes.get("/api/orders", async (c) => {
const space = c.req.param("space") || "demo";
// Optional auth — filter by buyer if authenticated // Optional auth — filter by buyer if authenticated
const token = extractToken(c.req.raw.headers); const token = extractToken(c.req.raw.headers);
let authedBuyer: string | null = null; let authedBuyer: string | null = null;
@ -248,73 +321,156 @@ routes.get("/api/orders", async (c) => {
const { status, provider_id, buyer_id, limit = "50", offset = "0" } = c.req.query(); const { status, provider_id, buyer_id, limit = "50", offset = "0" } = c.req.query();
const conditions: string[] = []; const orderDocs = getSpaceOrderDocs(space);
const params: any[] = [];
let paramIdx = 1;
if (status) { conditions.push(`o.status = $${paramIdx}`); params.push(status); paramIdx++; } // Build enriched order list with catalog info
if (provider_id) { conditions.push(`o.provider_id = $${paramIdx}`); params.push(provider_id); paramIdx++; } const catalogDoc = ensureCatalogDoc(space);
let orders = orderDocs.map(({ doc }) => {
const o = doc.order;
const catEntry = catalogDoc.items[o.catalogEntryId];
const resp = orderToResponse(o);
resp.artifact_title = catEntry?.title || null;
resp.product_type = catEntry?.productType || null;
return resp;
});
// Apply filters
if (status) orders = orders.filter((o) => o.status === status);
if (provider_id) orders = orders.filter((o) => o.provider_id === provider_id);
const effectiveBuyerId = buyer_id || (authedBuyer && !status && !provider_id ? authedBuyer : null); const effectiveBuyerId = buyer_id || (authedBuyer && !status && !provider_id ? authedBuyer : null);
if (effectiveBuyerId) { conditions.push(`o.buyer_id = $${paramIdx}`); params.push(effectiveBuyerId); paramIdx++; } if (effectiveBuyerId) orders = orders.filter((o) => o.buyer_id === effectiveBuyerId);
// Sort by created_at descending
orders.sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime());
const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
const limitNum = Math.min(parseInt(limit) || 50, 100); const limitNum = Math.min(parseInt(limit) || 50, 100);
const offsetNum = parseInt(offset) || 0; const offsetNum = parseInt(offset) || 0;
const paged = orders.slice(offsetNum, offsetNum + limitNum);
const result = await sql.unsafe( return c.json({ orders: paged });
`SELECT o.*, c.title as artifact_title, c.product_type
FROM rcart.orders o JOIN rcart.catalog_entries c ON c.id = o.catalog_entry_id
${where} ORDER BY o.created_at DESC LIMIT ${limitNum} OFFSET ${offsetNum}`,
params
);
return c.json({ orders: result });
}); });
// GET /api/orders/:id — Single order // GET /api/orders/:id — Single order
routes.get("/api/orders/:id", async (c) => { routes.get("/api/orders/:id", async (c) => {
const result = await sql.unsafe( const space = c.req.param("space") || "demo";
`SELECT o.*, c.artifact as artifact_envelope, c.title as artifact_title const orderId = c.req.param("id");
FROM rcart.orders o JOIN rcart.catalog_entries c ON c.id = o.catalog_entry_id const oDocId = orderDocId(space, orderId);
WHERE o.id = $1`, const doc = _syncServer!.getDoc<OrderDoc>(oDocId);
[c.req.param("id")] if (!doc) return c.json({ error: "Order not found" }, 404);
);
if (result.length === 0) return c.json({ error: "Order not found" }, 404); const catalogDoc = ensureCatalogDoc(space);
return c.json(result[0]); const catEntry = catalogDoc.items[doc.order.catalogEntryId];
const resp = orderToResponse(doc.order);
resp.artifact_envelope = catEntry?.artifact || null;
resp.artifact_title = catEntry?.title || null;
return c.json(resp);
}); });
// PATCH /api/orders/:id/status — Update order status // PATCH /api/orders/:id/status — Update order status
routes.patch("/api/orders/:id/status", async (c) => { routes.patch("/api/orders/:id/status", async (c) => {
const space = c.req.param("space") || "demo";
const body = await c.req.json(); const body = await c.req.json();
const { status, payment_tx, payment_network } = body; const { status, payment_tx, payment_network } = body;
const valid = ["pending", "paid", "accepted", "in_production", "ready", "shipped", "completed", "cancelled"]; const valid = ["pending", "paid", "accepted", "in_production", "ready", "shipped", "completed", "cancelled"];
if (!valid.includes(status)) return c.json({ error: `status must be one of: ${valid.join(", ")}` }, 400); if (!valid.includes(status)) return c.json({ error: `status must be one of: ${valid.join(", ")}` }, 400);
const timestampField: Record<string, string> = { paid: "paid_at", accepted: "accepted_at", completed: "completed_at" }; const orderId = c.req.param("id");
const extraSet = timestampField[status] ? `, ${timestampField[status]} = NOW()` : ""; const oDocId = orderDocId(space, orderId);
const doc = _syncServer!.getDoc<OrderDoc>(oDocId);
if (!doc) return c.json({ error: "Order not found" }, 404);
// Use parameterized query for payment info const now = Date.now();
let paymentSet = "";
const params: any[] = [status, c.req.param("id")]; const updated = _syncServer!.changeDoc<OrderDoc>(oDocId, `order status → ${status}`, (d) => {
if (status === "paid" && payment_tx) { d.order.status = status;
paymentSet = `, payment_tx = $3, payment_network = $4`; d.order.updatedAt = now;
params.push(payment_tx, payment_network || null); if (status === "paid") d.order.paidAt = now;
if (status === "accepted") d.order.acceptedAt = now;
if (status === "completed") d.order.completedAt = now;
if (status === "paid" && payment_tx) {
d.order.paymentTx = payment_tx;
d.order.paymentNetwork = payment_network || null;
}
});
if (!updated) return c.json({ error: "Order not found" }, 404);
if (status === "paid" && updated.order.totalPrice) {
depositOrderRevenue(updated.order.totalPrice, orderId);
} }
const result = await sql.unsafe( return c.json(orderToResponse(updated.order));
`UPDATE rcart.orders SET status = $1, updated_at = NOW()${extraSet}${paymentSet} WHERE id = $2 RETURNING *`,
params
);
if (result.length === 0) return c.json({ error: "Order not found" }, 404);
const updated = result[0];
if (status === "paid" && updated.total_price) {
depositOrderRevenue(updated.total_price, c.req.param("id"));
}
return c.json(updated);
}); });
// ── Response helpers ──
interface OrderResponse {
id: string;
catalog_entry_id: string;
artifact_id: string;
buyer_id: string | null;
buyer_location: unknown;
buyer_contact: unknown;
provider_id: string | null;
provider_name: string | null;
provider_distance_km: number | null;
quantity: number;
production_cost: number | null;
creator_payout: number | null;
community_payout: number | null;
total_price: number | null;
currency: string;
status: string;
payment_method: string | null;
payment_tx: string | null;
payment_network: string | null;
created_at: string;
paid_at: string | null;
accepted_at: string | null;
completed_at: string | null;
updated_at: string;
artifact_title?: string | null;
product_type?: string | null;
artifact_envelope?: unknown;
}
/** Convert an OrderMeta to the flat response shape matching the original SQL rows. */
function orderToResponse(o: OrderMeta, catEntry?: CatalogEntry): OrderResponse {
return {
id: o.id,
catalog_entry_id: o.catalogEntryId,
artifact_id: o.artifactId,
buyer_id: o.buyerId,
buyer_location: o.buyerLocation ? tryParse(o.buyerLocation) : null,
buyer_contact: o.buyerContact ? tryParse(o.buyerContact) : null,
provider_id: o.providerId,
provider_name: o.providerName,
provider_distance_km: o.providerDistanceKm,
quantity: o.quantity,
production_cost: o.productionCost,
creator_payout: o.creatorPayout,
community_payout: o.communityPayout,
total_price: o.totalPrice,
currency: o.currency,
status: o.status,
payment_method: o.paymentMethod,
payment_tx: o.paymentTx,
payment_network: o.paymentNetwork,
created_at: new Date(o.createdAt).toISOString(),
paid_at: o.paidAt ? new Date(o.paidAt).toISOString() : null,
accepted_at: o.acceptedAt ? new Date(o.acceptedAt).toISOString() : null,
completed_at: o.completedAt ? new Date(o.completedAt).toISOString() : null,
updated_at: new Date(o.updatedAt).toISOString(),
...(catEntry ? { artifact_title: catEntry.title, product_type: catEntry.productType } : {}),
};
}
function tryParse(s: string): unknown {
try { return JSON.parse(s); } catch { return s; }
}
// ── FULFILLMENT ROUTES ── // ── FULFILLMENT ROUTES ──
function round2(n: number): number { function round2(n: number): number {
@ -365,6 +521,7 @@ function composeCost(artifact: Record<string, unknown>, provider: ProviderMatch,
// POST /api/fulfill/resolve — Find fulfillment options // POST /api/fulfill/resolve — Find fulfillment options
routes.post("/api/fulfill/resolve", async (c) => { routes.post("/api/fulfill/resolve", async (c) => {
const space = c.req.param("space") || "demo";
const body = await c.req.json(); const body = await c.req.json();
const { artifact_id, catalog_entry_id, buyer_location, quantity = 1 } = body; const { artifact_id, catalog_entry_id, buyer_location, quantity = 1 } = body;
@ -375,14 +532,21 @@ routes.post("/api/fulfill/resolve", async (c) => {
return c.json({ error: "Required: artifact_id or catalog_entry_id" }, 400); return c.json({ error: "Required: artifact_id or catalog_entry_id" }, 400);
} }
const entryResult = await sql.unsafe( const catalogDoc = ensureCatalogDoc(space);
"SELECT * FROM rcart.catalog_entries WHERE (artifact_id = $1 OR id = $1) AND status = 'active'", const lookupId = artifact_id || catalog_entry_id;
[artifact_id || catalog_entry_id]
);
if (entryResult.length === 0) return c.json({ error: "Artifact not found in catalog" }, 404);
const entry = entryResult[0]; // Find entry by id or artifact_id, must be active
const artifact = entry.artifact; let entry: CatalogEntry | undefined;
if (catalogDoc.items[lookupId] && catalogDoc.items[lookupId].status === "active") {
entry = catalogDoc.items[lookupId];
} else {
entry = Object.values(catalogDoc.items).find(
(e) => (e.artifactId === lookupId || e.id === lookupId) && e.status === "active"
);
}
if (!entry) return c.json({ error: "Artifact not found in catalog" }, 404);
const artifact = entry.artifact as Record<string, any>;
const capabilities = artifact.spec?.required_capabilities || []; const capabilities = artifact.spec?.required_capabilities || [];
const substrates = artifact.spec?.substrates || []; const substrates = artifact.spec?.substrates || [];
@ -471,7 +635,6 @@ export const cartModule: RSpaceModule = {
landingPage: renderLanding, landingPage: renderLanding,
async onInit(ctx) { async onInit(ctx) {
_syncServer = ctx.syncServer; _syncServer = ctx.syncServer;
await initDB();
}, },
feeds: [ feeds: [
{ {

View File

@ -1,54 +1,141 @@
/** /**
* Files module file sharing, public share links, memory cards. * Files module file sharing, public share links, memory cards.
* Ported from rfiles-online (Django Bun/Hono). * Ported from rfiles-online (Django Bun/Hono).
*
* All metadata is stored in Automerge documents via SyncServer.
* Binary files remain on the filesystem.
*/ */
import { Hono } from "hono"; import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path"; import { resolve } from "node:path";
import { mkdir, writeFile, unlink } from "node:fs/promises"; import { mkdir, writeFile, unlink } from "node:fs/promises";
import { createHash, randomBytes } from "node:crypto"; import { createHash, randomBytes } from "node:crypto";
import { sql } from "../../shared/db/pool"; import * as Automerge from "@automerge/automerge";
import { renderShell, renderExternalAppShell } from "../../server/shell"; import { renderShell, renderExternalAppShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule } from "../../shared/module"; import type { RSpaceModule } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing"; import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server'; import type { SyncServer } from '../../server/local-first/sync-server';
import { filesSchema } from './schemas'; import { filesSchema, filesDocId } from './schemas';
import type { FilesDoc, MediaFile, MemoryCard } from './schemas';
// ── Extended doc types (shares + access logs live alongside files/cards) ──
interface PublicShare {
id: string;
token: string;
mediaFileId: string;
createdBy: string | null;
expiresAt: number | null; // epoch ms, null = never
maxDownloads: number | null;
downloadCount: number;
isActive: boolean;
isPasswordProtected: boolean;
passwordHash: string | null;
note: string | null;
createdAt: number;
}
interface AccessLog {
id: string;
mediaFileId: string;
shareId: string | null;
ipAddress: string | null;
userAgent: string | null;
accessType: string;
accessedAt: number;
}
/**
* Extended doc shape supplements FilesDoc with shares and access logs.
* The base FilesDoc from schemas.ts defines files + memoryCards;
* we add shares and accessLogs as additional top-level maps.
*/
interface FilesDocExt extends FilesDoc {
shares: Record<string, PublicShare>;
accessLogs: Record<string, AccessLog>;
}
let _syncServer: SyncServer | null = null; let _syncServer: SyncServer | null = null;
const routes = new Hono(); const routes = new Hono();
const FILES_DIR = process.env.FILES_DIR || "/data/files"; const FILES_DIR = process.env.FILES_DIR || "/data/files";
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
// ── DB initialization ── // ── Automerge document helpers ──
async function initDB() {
try { function ensureDoc(space: string, sharedSpace: string = "default"): FilesDocExt {
await sql.unsafe(SCHEMA_SQL); const docId = filesDocId(space, sharedSpace);
console.log("[Files] DB schema initialized"); let doc = _syncServer!.getDoc<FilesDocExt>(docId);
} catch (e: any) { if (!doc) {
console.error("[Files] DB init error:", e.message); doc = Automerge.change(Automerge.init<FilesDocExt>(), 'init files doc', (d) => {
const init = filesSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.meta.sharedSpace = sharedSpace;
d.files = {};
d.memoryCards = {};
d.shares = {};
d.accessLogs = {};
});
_syncServer!.setDoc(docId, doc);
} }
// Ensure shares/accessLogs exist on legacy docs that predate these fields
if (!doc.shares || !doc.accessLogs) {
doc = _syncServer!.changeDoc<FilesDocExt>(docId, 'add shares+logs maps', (d) => {
if (!(d as any).shares) (d as any).shares = {};
if (!(d as any).accessLogs) (d as any).accessLogs = {};
})!;
}
return doc;
} }
// ── Cleanup timers (replace Celery) ── // ── Cleanup timers (replace Celery) ──
// Deactivate expired shares every hour // Deactivate expired shares every hour
setInterval(async () => { setInterval(() => {
if (!_syncServer) return;
try { try {
const result = await sql.unsafe( const now = Date.now();
"UPDATE rfiles.public_shares SET is_active = FALSE WHERE is_active = TRUE AND expires_at IS NOT NULL AND expires_at < NOW()" for (const docId of _syncServer.getDocIds()) {
); if (!docId.includes(':files:cards:')) continue;
if ((result as any).count > 0) console.log(`[Files] Deactivated ${(result as any).count} expired shares`); const doc = _syncServer.getDoc<FilesDocExt>(docId);
if (!doc?.shares) continue;
const toDeactivate = Object.values(doc.shares).filter(
(s) => s.isActive && s.expiresAt !== null && s.expiresAt < now
);
if (toDeactivate.length > 0) {
_syncServer.changeDoc<FilesDocExt>(docId, 'deactivate expired shares', (d) => {
for (const s of toDeactivate) {
if (d.shares[s.id]) d.shares[s.id].isActive = false;
}
});
console.log(`[Files] Deactivated ${toDeactivate.length} expired shares in ${docId}`);
}
}
} catch (e: any) { console.error("[Files] Cleanup error:", e.message); } } catch (e: any) { console.error("[Files] Cleanup error:", e.message); }
}, 3600_000); }, 3600_000);
// Delete access logs older than 90 days, daily // Delete access logs older than 90 days, daily
setInterval(async () => { setInterval(() => {
if (!_syncServer) return;
try { try {
await sql.unsafe("DELETE FROM rfiles.access_logs WHERE accessed_at < NOW() - INTERVAL '90 days'"); const cutoff = Date.now() - 90 * 86400_000;
for (const docId of _syncServer.getDocIds()) {
if (!docId.includes(':files:cards:')) continue;
const doc = _syncServer.getDoc<FilesDocExt>(docId);
if (!doc?.accessLogs) continue;
const toDelete = Object.values(doc.accessLogs).filter(
(l) => l.accessedAt < cutoff
);
if (toDelete.length > 0) {
_syncServer.changeDoc<FilesDocExt>(docId, 'prune old access logs', (d) => {
for (const l of toDelete) {
delete d.accessLogs[l.id];
}
});
}
}
} catch (e: any) { console.error("[Files] Log cleanup error:", e.message); } } catch (e: any) { console.error("[Files] Log cleanup error:", e.message); }
}, 86400_000); }, 86400_000);
@ -69,6 +156,11 @@ async function computeFileHash(buffer: ArrayBuffer): Promise<string> {
return hash.digest("hex"); return hash.digest("hex");
} }
/** Serialize a doc-sourced object for JSON responses (strip Automerge proxies). */
function toPlain<T>(obj: T): T {
return JSON.parse(JSON.stringify(obj));
}
// ── File upload ── // ── File upload ──
routes.post("/api/files", async (c) => { routes.post("/api/files", async (c) => {
const token = extractToken(c.req.raw.headers); const token = extractToken(c.req.raw.headers);
@ -83,7 +175,9 @@ routes.post("/api/files", async (c) => {
const space = c.req.param("space") || formData.get("space")?.toString() || "default"; const space = c.req.param("space") || formData.get("space")?.toString() || "default";
const title = formData.get("title")?.toString() || file.name.replace(/\.[^.]+$/, ""); const title = formData.get("title")?.toString() || file.name.replace(/\.[^.]+$/, "");
const description = formData.get("description")?.toString() || ""; const description = formData.get("description")?.toString() || "";
const tags = formData.get("tags")?.toString() || "[]"; const tagsRaw = formData.get("tags")?.toString() || "[]";
let tags: string[] = [];
try { tags = JSON.parse(tagsRaw); } catch { tags = []; }
const uploadedBy = claims.sub; const uploadedBy = claims.sub;
const buffer = await file.arrayBuffer(); const buffer = await file.arrayBuffer();
@ -97,13 +191,32 @@ routes.post("/api/files", async (c) => {
await mkdir(resolve(fullPath, ".."), { recursive: true }); await mkdir(resolve(fullPath, ".."), { recursive: true });
await writeFile(fullPath, Buffer.from(buffer)); await writeFile(fullPath, Buffer.from(buffer));
const [row] = await sql.unsafe( const docId = filesDocId(space, space);
`INSERT INTO rfiles.media_files (original_filename, title, description, mime_type, file_size, file_hash, storage_path, tags, uploaded_by, shared_space) ensureDoc(space, space);
VALUES ($1, $2, $3, $4, $5, $6, $7, $8::jsonb, $9, $10) RETURNING *`,
[file.name, title, description, file.type || "application/octet-stream", file.size, fileHash, storagePath, tags, uploadedBy, space]
);
return c.json({ file: row }, 201); const mediaFile: MediaFile = {
id: fileId,
originalFilename: file.name,
title,
description,
mimeType: file.type || "application/octet-stream",
fileSize: file.size,
fileHash,
storagePath,
tags,
isProcessed: false,
processingError: null,
uploadedBy,
sharedSpace: space,
createdAt: Date.now(),
updatedAt: Date.now(),
};
_syncServer!.changeDoc<FilesDocExt>(docId, `upload file ${fileId}`, (d) => {
d.files[fileId] = mediaFile;
});
return c.json({ file: toPlain(mediaFile) }, 201);
}); });
// ── File listing ── // ── File listing ──
@ -113,60 +226,72 @@ routes.get("/api/files", async (c) => {
const limit = Math.min(Number(c.req.query("limit")) || 50, 200); const limit = Math.min(Number(c.req.query("limit")) || 50, 200);
const offset = Number(c.req.query("offset")) || 0; const offset = Number(c.req.query("offset")) || 0;
let query = "SELECT * FROM rfiles.media_files WHERE shared_space = $1"; const doc = ensureDoc(space, space);
const params: any[] = [space];
let paramIdx = 2; let files = Object.values(doc.files)
.filter((f) => f.sharedSpace === space);
if (mimeType) { if (mimeType) {
query += ` AND mime_type LIKE $${paramIdx}`; files = files.filter((f) => f.mimeType && f.mimeType.startsWith(mimeType));
params.push(`${mimeType}%`);
paramIdx++;
} }
query += ` ORDER BY created_at DESC LIMIT $${paramIdx} OFFSET $${paramIdx + 1}`; // Sort by createdAt descending
params.push(limit, offset); files.sort((a, b) => b.createdAt - a.createdAt);
const rows = await sql.unsafe(query, params); const total = files.length;
const [{ count }] = await sql.unsafe( const paged = files.slice(offset, offset + limit);
"SELECT COUNT(*) as count FROM rfiles.media_files WHERE shared_space = $1",
[space]
);
return c.json({ files: rows, total: Number(count), limit, offset }); return c.json({ files: toPlain(paged), total, limit, offset });
}); });
// ── File download ── // ── File download ──
routes.get("/api/files/:id/download", async (c) => { routes.get("/api/files/:id/download", async (c) => {
const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]); const fileId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const doc = ensureDoc(space, space);
const file = doc.files[fileId];
if (!file) return c.json({ error: "File not found" }, 404); if (!file) return c.json({ error: "File not found" }, 404);
const fullPath = resolve(FILES_DIR, file.storage_path); const fullPath = resolve(FILES_DIR, file.storagePath);
const bunFile = Bun.file(fullPath); const bunFile = Bun.file(fullPath);
if (!await bunFile.exists()) return c.json({ error: "File missing from storage" }, 404); if (!await bunFile.exists()) return c.json({ error: "File missing from storage" }, 404);
return new Response(bunFile, { return new Response(bunFile, {
headers: { headers: {
"Content-Type": file.mime_type || "application/octet-stream", "Content-Type": file.mimeType || "application/octet-stream",
"Content-Disposition": `attachment; filename="${file.original_filename}"`, "Content-Disposition": `attachment; filename="${file.originalFilename}"`,
"Content-Length": String(file.file_size), "Content-Length": String(file.fileSize),
}, },
}); });
}); });
// ── File detail ── // ── File detail ──
routes.get("/api/files/:id", async (c) => { routes.get("/api/files/:id", async (c) => {
const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]); const fileId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const doc = ensureDoc(space, space);
const file = doc.files[fileId];
if (!file) return c.json({ error: "File not found" }, 404); if (!file) return c.json({ error: "File not found" }, 404);
return c.json({ file }); return c.json({ file: toPlain(file) });
}); });
// ── File delete ── // ── File delete ──
routes.delete("/api/files/:id", async (c) => { routes.delete("/api/files/:id", async (c) => {
const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]); const fileId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const docId = filesDocId(space, space);
const doc = ensureDoc(space, space);
const file = doc.files[fileId];
if (!file) return c.json({ error: "File not found" }, 404); if (!file) return c.json({ error: "File not found" }, 404);
try { await unlink(resolve(FILES_DIR, file.storage_path)); } catch {} try { await unlink(resolve(FILES_DIR, file.storagePath)); } catch {}
await sql.unsafe("DELETE FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]); _syncServer!.changeDoc<FilesDocExt>(docId, `delete file ${fileId}`, (d) => {
delete d.files[fileId];
// Also remove any shares referencing this file
for (const [sid, share] of Object.entries(d.shares)) {
if (share.mediaFileId === fileId) delete d.shares[sid];
}
});
return c.json({ message: "Deleted" }); return c.json({ message: "Deleted" });
}); });
@ -177,13 +302,18 @@ routes.post("/api/files/:id/share", async (c) => {
let claims; let claims;
try { claims = await verifyEncryptIDToken(authToken); } catch { return c.json({ error: "Invalid token" }, 401); } try { claims = await verifyEncryptIDToken(authToken); } catch { return c.json({ error: "Invalid token" }, 401); }
const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]); const fileId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const docId = filesDocId(space, space);
const doc = ensureDoc(space, space);
const file = doc.files[fileId];
if (!file) return c.json({ error: "File not found" }, 404); if (!file) return c.json({ error: "File not found" }, 404);
if (file.uploaded_by && file.uploaded_by !== claims.sub) return c.json({ error: "Not authorized" }, 403); if (file.uploadedBy && file.uploadedBy !== claims.sub) return c.json({ error: "Not authorized" }, 403);
const body = await c.req.json<{ expires_in_hours?: number; max_downloads?: number; password?: string; note?: string }>(); const body = await c.req.json<{ expires_in_hours?: number; max_downloads?: number; password?: string; note?: string }>();
const token = generateToken(); const shareToken = generateToken();
const expiresAt = body.expires_in_hours ? new Date(Date.now() + body.expires_in_hours * 3600_000).toISOString() : null; const expiresAt = body.expires_in_hours ? Date.now() + body.expires_in_hours * 3600_000 : null;
const createdBy = claims.sub; const createdBy = claims.sub;
let passwordHash: string | null = null; let passwordHash: string | null = null;
@ -193,27 +323,52 @@ routes.post("/api/files/:id/share", async (c) => {
isPasswordProtected = true; isPasswordProtected = true;
} }
const [share] = await sql.unsafe( const shareId = crypto.randomUUID();
`INSERT INTO rfiles.public_shares (token, media_file_id, created_by, expires_at, max_downloads, is_password_protected, password_hash, note) const logId = crypto.randomUUID();
VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *`, const now = Date.now();
[token, file.id, createdBy, expiresAt, body.max_downloads || null, isPasswordProtected, passwordHash, body.note || null]
);
await sql.unsafe( const share: PublicShare = {
"INSERT INTO rfiles.access_logs (media_file_id, share_id, access_type) VALUES ($1, $2, 'share_created')", id: shareId,
[file.id, share.id] token: shareToken,
); mediaFileId: fileId,
createdBy,
expiresAt,
maxDownloads: body.max_downloads || null,
downloadCount: 0,
isActive: true,
isPasswordProtected,
passwordHash,
note: body.note || null,
createdAt: now,
};
return c.json({ share: { ...share, url: `/s/${token}` } }, 201); _syncServer!.changeDoc<FilesDocExt>(docId, `create share for file ${fileId}`, (d) => {
d.shares[shareId] = share;
d.accessLogs[logId] = {
id: logId,
mediaFileId: fileId,
shareId,
ipAddress: null,
userAgent: null,
accessType: 'share_created',
accessedAt: now,
};
});
return c.json({ share: { ...toPlain(share), url: `/s/${shareToken}` } }, 201);
}); });
// ── List shares for a file ── // ── List shares for a file ──
routes.get("/api/files/:id/shares", async (c) => { routes.get("/api/files/:id/shares", async (c) => {
const rows = await sql.unsafe( const fileId = c.req.param("id");
"SELECT * FROM rfiles.public_shares WHERE media_file_id = $1 ORDER BY created_at DESC", const space = c.req.param("space") || c.req.query("space") || "default";
[c.req.param("id")] const doc = ensureDoc(space, space);
);
return c.json({ shares: rows }); const shares = Object.values(doc.shares)
.filter((s) => s.mediaFileId === fileId)
.sort((a, b) => b.createdAt - a.createdAt);
return c.json({ shares: toPlain(shares) });
}); });
// ── Revoke share ── // ── Revoke share ──
@ -223,83 +378,126 @@ routes.post("/api/shares/:shareId/revoke", async (c) => {
let claims; let claims;
try { claims = await verifyEncryptIDToken(authToken); } catch { return c.json({ error: "Invalid token" }, 401); } try { claims = await verifyEncryptIDToken(authToken); } catch { return c.json({ error: "Invalid token" }, 401); }
const [share] = await sql.unsafe( const shareId = c.req.param("shareId");
"SELECT s.*, f.uploaded_by FROM rfiles.public_shares s JOIN rfiles.media_files f ON s.media_file_id = f.id WHERE s.id = $1", const space = c.req.param("space") || c.req.query("space") || "default";
[c.req.param("shareId")] const docId = filesDocId(space, space);
); const doc = ensureDoc(space, space);
if (!share) return c.json({ error: "Share not found" }, 404);
if (share.uploaded_by && share.uploaded_by !== claims.sub) return c.json({ error: "Not authorized" }, 403);
const [revoked] = await sql.unsafe( const share = doc.shares[shareId];
"UPDATE rfiles.public_shares SET is_active = FALSE WHERE id = $1 RETURNING *", if (!share) return c.json({ error: "Share not found" }, 404);
[c.req.param("shareId")]
); // Check authorization via the linked file
return c.json({ message: "Revoked", share: revoked }); const file = doc.files[share.mediaFileId];
if (file?.uploadedBy && file.uploadedBy !== claims.sub) return c.json({ error: "Not authorized" }, 403);
_syncServer!.changeDoc<FilesDocExt>(docId, `revoke share ${shareId}`, (d) => {
d.shares[shareId].isActive = false;
});
const updated = _syncServer!.getDoc<FilesDocExt>(docId)!;
return c.json({ message: "Revoked", share: toPlain(updated.shares[shareId]) });
}); });
// ── Public share download ── // ── Public share download ──
routes.get("/s/:token", async (c) => { routes.get("/s/:token", async (c) => {
const [share] = await sql.unsafe( const shareToken = c.req.param("token");
`SELECT s.*, f.storage_path, f.mime_type, f.original_filename, f.file_size
FROM rfiles.public_shares s JOIN rfiles.media_files f ON s.media_file_id = f.id
WHERE s.token = $1`,
[c.req.param("token")]
);
if (!share) return c.json({ error: "Share not found" }, 404);
if (!share.is_active) return c.json({ error: "Share has been revoked" }, 410);
if (share.expires_at && new Date(share.expires_at) < new Date()) return c.json({ error: "Share has expired" }, 410);
if (share.max_downloads && share.download_count >= share.max_downloads) return c.json({ error: "Download limit reached" }, 410);
if (share.is_password_protected) { // Find the share across all files docs
let foundDocId: string | null = null;
let foundShare: PublicShare | null = null;
let foundFile: MediaFile | null = null;
for (const docId of _syncServer!.getDocIds()) {
if (!docId.includes(':files:cards:')) continue;
const doc = _syncServer!.getDoc<FilesDocExt>(docId);
if (!doc?.shares) continue;
for (const s of Object.values(doc.shares)) {
if (s.token === shareToken) {
foundDocId = docId;
foundShare = s;
foundFile = doc.files[s.mediaFileId] || null;
break;
}
}
if (foundShare) break;
}
if (!foundShare || !foundFile) return c.json({ error: "Share not found" }, 404);
if (!foundShare.isActive) return c.json({ error: "Share has been revoked" }, 410);
if (foundShare.expiresAt && foundShare.expiresAt < Date.now()) return c.json({ error: "Share has expired" }, 410);
if (foundShare.maxDownloads && foundShare.downloadCount >= foundShare.maxDownloads) return c.json({ error: "Download limit reached" }, 410);
if (foundShare.isPasswordProtected) {
const pw = c.req.query("password"); const pw = c.req.query("password");
if (!pw) return c.json({ error: "Password required", is_password_protected: true }, 401); if (!pw) return c.json({ error: "Password required", is_password_protected: true }, 401);
const hash = await hashPassword(pw); const hash = await hashPassword(pw);
if (hash !== share.password_hash) return c.json({ error: "Invalid password" }, 401); if (hash !== foundShare.passwordHash) return c.json({ error: "Invalid password" }, 401);
} }
await sql.unsafe("UPDATE rfiles.public_shares SET download_count = download_count + 1 WHERE id = $1", [share.id]); const logId = crypto.randomUUID();
const ip = c.req.header("X-Forwarded-For")?.split(",")[0]?.trim() || c.req.header("X-Real-IP") || null; const ip = c.req.header("X-Forwarded-For")?.split(",")[0]?.trim() || c.req.header("X-Real-IP") || null;
const ua = c.req.header("User-Agent") || ""; const ua = c.req.header("User-Agent") || "";
await sql.unsafe(
"INSERT INTO rfiles.access_logs (media_file_id, share_id, ip_address, user_agent, access_type) VALUES ($1, $2, $3, $4, 'download')",
[share.media_file_id, share.id, ip, ua.slice(0, 500)]
);
const fullPath = resolve(FILES_DIR, share.storage_path); _syncServer!.changeDoc<FilesDocExt>(foundDocId!, `download via share ${foundShare.id}`, (d) => {
d.shares[foundShare!.id].downloadCount += 1;
d.accessLogs[logId] = {
id: logId,
mediaFileId: foundShare!.mediaFileId,
shareId: foundShare!.id,
ipAddress: ip,
userAgent: ua.slice(0, 500),
accessType: 'download',
accessedAt: Date.now(),
};
});
const fullPath = resolve(FILES_DIR, foundFile.storagePath);
const bunFile = Bun.file(fullPath); const bunFile = Bun.file(fullPath);
if (!await bunFile.exists()) return c.json({ error: "File missing" }, 404); if (!await bunFile.exists()) return c.json({ error: "File missing" }, 404);
return new Response(bunFile, { return new Response(bunFile, {
headers: { headers: {
"Content-Type": share.mime_type || "application/octet-stream", "Content-Type": foundFile.mimeType || "application/octet-stream",
"Content-Disposition": `attachment; filename="${share.original_filename}"`, "Content-Disposition": `attachment; filename="${foundFile.originalFilename}"`,
"Content-Length": String(share.file_size), "Content-Length": String(foundFile.fileSize),
}, },
}); });
}); });
// ── Share info (public) ── // ── Share info (public) ──
routes.get("/s/:token/info", async (c) => { routes.get("/s/:token/info", async (c) => {
const [share] = await sql.unsafe( const shareToken = c.req.param("token");
`SELECT s.is_password_protected, s.is_active, s.expires_at, s.max_downloads, s.download_count, s.note,
f.original_filename, f.mime_type, f.file_size
FROM rfiles.public_shares s JOIN rfiles.media_files f ON s.media_file_id = f.id
WHERE s.token = $1`,
[c.req.param("token")]
);
if (!share) return c.json({ error: "Share not found" }, 404);
const isValid = share.is_active && let foundShare: PublicShare | null = null;
(!share.expires_at || new Date(share.expires_at) > new Date()) && let foundFile: MediaFile | null = null;
(!share.max_downloads || share.download_count < share.max_downloads);
for (const docId of _syncServer!.getDocIds()) {
if (!docId.includes(':files:cards:')) continue;
const doc = _syncServer!.getDoc<FilesDocExt>(docId);
if (!doc?.shares) continue;
for (const s of Object.values(doc.shares)) {
if (s.token === shareToken) {
foundShare = s;
foundFile = doc.files[s.mediaFileId] || null;
break;
}
}
if (foundShare) break;
}
if (!foundShare || !foundFile) return c.json({ error: "Share not found" }, 404);
const isValid = foundShare.isActive &&
(!foundShare.expiresAt || foundShare.expiresAt > Date.now()) &&
(!foundShare.maxDownloads || foundShare.downloadCount < foundShare.maxDownloads);
return c.json({ return c.json({
is_password_protected: share.is_password_protected, is_password_protected: foundShare.isPasswordProtected,
is_valid: isValid, is_valid: isValid,
expires_at: share.expires_at, expires_at: foundShare.expiresAt ? new Date(foundShare.expiresAt).toISOString() : null,
downloads_remaining: share.max_downloads ? share.max_downloads - share.download_count : null, downloads_remaining: foundShare.maxDownloads ? foundShare.maxDownloads - foundShare.downloadCount : null,
file_info: { filename: share.original_filename, mime_type: share.mime_type, size: share.file_size }, file_info: { filename: foundFile.originalFilename, mime_type: foundFile.mimeType, size: foundFile.fileSize },
note: share.note, note: foundShare.note,
}); });
}); });
@ -313,13 +511,30 @@ routes.post("/api/cards", async (c) => {
const body = await c.req.json<{ title: string; body?: string; card_type?: string; tags?: string[]; shared_space?: string }>(); const body = await c.req.json<{ title: string; body?: string; card_type?: string; tags?: string[]; shared_space?: string }>();
const space = c.req.param("space") || body.shared_space || "default"; const space = c.req.param("space") || body.shared_space || "default";
const createdBy = claims.sub; const createdBy = claims.sub;
const docId = filesDocId(space, space);
ensureDoc(space, space);
const [card] = await sql.unsafe( const cardId = crypto.randomUUID();
`INSERT INTO rfiles.memory_cards (shared_space, title, body, card_type, tags, created_by) const now = Date.now();
VALUES ($1, $2, $3, $4, $5::jsonb, $6) RETURNING *`,
[space, body.title, body.body || "", body.card_type || "note", JSON.stringify(body.tags || []), createdBy] const card: MemoryCard = {
); id: cardId,
return c.json({ card }, 201); sharedSpace: space,
title: body.title,
body: body.body || "",
cardType: body.card_type || "note",
tags: body.tags || [],
position: 0,
createdBy,
createdAt: now,
updatedAt: now,
};
_syncServer!.changeDoc<FilesDocExt>(docId, `create card ${cardId}`, (d) => {
d.memoryCards[cardId] = card;
});
return c.json({ card: toPlain(card) }, 201);
}); });
routes.get("/api/cards", async (c) => { routes.get("/api/cards", async (c) => {
@ -327,43 +542,61 @@ routes.get("/api/cards", async (c) => {
const cardType = c.req.query("type"); const cardType = c.req.query("type");
const limit = Math.min(Number(c.req.query("limit")) || 50, 200); const limit = Math.min(Number(c.req.query("limit")) || 50, 200);
let query = "SELECT * FROM rfiles.memory_cards WHERE shared_space = $1"; const doc = ensureDoc(space, space);
const params: any[] = [space];
if (cardType) { query += " AND card_type = $2"; params.push(cardType); }
query += " ORDER BY position, created_at DESC LIMIT $" + (params.length + 1);
params.push(limit);
const rows = await sql.unsafe(query, params); let cards = Object.values(doc.memoryCards)
return c.json({ cards: rows, total: rows.length }); .filter((card) => card.sharedSpace === space);
if (cardType) {
cards = cards.filter((card) => card.cardType === cardType);
}
// Sort by position ascending, then createdAt descending
cards.sort((a, b) => a.position - b.position || b.createdAt - a.createdAt);
cards = cards.slice(0, limit);
return c.json({ cards: toPlain(cards), total: cards.length });
}); });
routes.patch("/api/cards/:id", async (c) => { routes.patch("/api/cards/:id", async (c) => {
const body = await c.req.json<{ title?: string; body?: string; card_type?: string; tags?: string[]; position?: number }>(); const cardId = c.req.param("id");
const sets: string[] = []; const space = c.req.param("space") || c.req.query("space") || "default";
const params: any[] = []; const docId = filesDocId(space, space);
let idx = 1; const doc = ensureDoc(space, space);
if (body.title !== undefined) { sets.push(`title = $${idx}`); params.push(body.title); idx++; } const card = doc.memoryCards[cardId];
if (body.body !== undefined) { sets.push(`body = $${idx}`); params.push(body.body); idx++; }
if (body.card_type !== undefined) { sets.push(`card_type = $${idx}`); params.push(body.card_type); idx++; }
if (body.tags !== undefined) { sets.push(`tags = $${idx}::jsonb`); params.push(JSON.stringify(body.tags)); idx++; }
if (body.position !== undefined) { sets.push(`position = $${idx}`); params.push(body.position); idx++; }
if (sets.length === 0) return c.json({ error: "No fields to update" }, 400);
sets.push(`updated_at = NOW()`);
params.push(c.req.param("id"));
const [card] = await sql.unsafe(
`UPDATE rfiles.memory_cards SET ${sets.join(", ")} WHERE id = $${idx} RETURNING *`,
params
);
if (!card) return c.json({ error: "Card not found" }, 404); if (!card) return c.json({ error: "Card not found" }, 404);
return c.json({ card });
const body = await c.req.json<{ title?: string; body?: string; card_type?: string; tags?: string[]; position?: number }>();
if (body.title === undefined && body.body === undefined && body.card_type === undefined && body.tags === undefined && body.position === undefined) {
return c.json({ error: "No fields to update" }, 400);
}
_syncServer!.changeDoc<FilesDocExt>(docId, `update card ${cardId}`, (d) => {
const c = d.memoryCards[cardId];
if (body.title !== undefined) c.title = body.title;
if (body.body !== undefined) c.body = body.body;
if (body.card_type !== undefined) c.cardType = body.card_type;
if (body.tags !== undefined) c.tags = body.tags;
if (body.position !== undefined) c.position = body.position;
c.updatedAt = Date.now();
});
const updated = _syncServer!.getDoc<FilesDocExt>(docId)!;
return c.json({ card: toPlain(updated.memoryCards[cardId]) });
}); });
routes.delete("/api/cards/:id", async (c) => { routes.delete("/api/cards/:id", async (c) => {
const [card] = await sql.unsafe("DELETE FROM rfiles.memory_cards WHERE id = $1 RETURNING id", [c.req.param("id")]); const cardId = c.req.param("id");
if (!card) return c.json({ error: "Card not found" }, 404); const space = c.req.param("space") || c.req.query("space") || "default";
const docId = filesDocId(space, space);
const doc = ensureDoc(space, space);
if (!doc.memoryCards[cardId]) return c.json({ error: "Card not found" }, 404);
_syncServer!.changeDoc<FilesDocExt>(docId, `delete card ${cardId}`, (d) => {
delete d.memoryCards[cardId];
});
return c.json({ message: "Deleted" }); return c.json({ message: "Deleted" });
}); });
@ -408,7 +641,6 @@ export const filesModule: RSpaceModule = {
landingPage: renderLanding, landingPage: renderLanding,
async onInit(ctx) { async onInit(ctx) {
_syncServer = ctx.syncServer; _syncServer = ctx.syncServer;
await initDB();
}, },
standaloneDomain: "rfiles.online", standaloneDomain: "rfiles.online",
externalApp: { url: "https://files.rfiles.online", name: "Seafile" }, externalApp: { url: "https://files.rfiles.online", name: "Seafile" },

View File

@ -5,31 +5,32 @@
*/ */
import { Hono } from "hono"; import { Hono } from "hono";
import { readFileSync } from "node:fs"; import * as Automerge from "@automerge/automerge";
import { resolve } from "node:path";
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell"; import { renderShell } from "../../server/shell";
import type { RSpaceModule } from "../../shared/module"; import type { RSpaceModule } from "../../shared/module";
import { getModuleInfoList } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing"; import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server'; import type { SyncServer } from '../../server/local-first/sync-server';
import { fundsSchema } from './schemas'; import { fundsSchema, fundsDocId, type FundsDoc, type SpaceFlow } from './schemas';
let _syncServer: SyncServer | null = null; let _syncServer: SyncServer | null = null;
const FLOW_SERVICE_URL = process.env.FLOW_SERVICE_URL || "http://payment-flow:3010"; const FLOW_SERVICE_URL = process.env.FLOW_SERVICE_URL || "http://payment-flow:3010";
// ── DB initialization ── function ensureDoc(space: string): FundsDoc {
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8"); const docId = fundsDocId(space);
let doc = _syncServer!.getDoc<FundsDoc>(docId);
async function initDB() { if (!doc) {
try { doc = Automerge.change(Automerge.init<FundsDoc>(), 'init', (d) => {
await sql.unsafe(SCHEMA_SQL); const init = fundsSchema.init();
console.log("[Funds] DB schema initialized"); d.meta = init.meta;
} catch (e) { d.meta.spaceSlug = space;
console.error("[Funds] DB init error:", e); d.spaceFlows = {};
});
_syncServer!.setDoc(docId, doc);
} }
return doc;
} }
const routes = new Hono(); const routes = new Hono();
@ -42,29 +43,22 @@ routes.get("/api/flows", async (c) => {
const owner = c.req.header("X-Owner-Address") || ""; const owner = c.req.header("X-Owner-Address") || "";
const space = c.req.query("space") || ""; const space = c.req.query("space") || "";
// If space filter provided, get flow IDs from space_flows table // If space filter provided, get flow IDs from Automerge doc
if (space) { if (space) {
try { const doc = ensureDoc(space);
const rows = await sql.unsafe( const flowIds = Object.values(doc.spaceFlows).map((sf) => sf.flowId);
"SELECT flow_id FROM rfunds.space_flows WHERE space_slug = $1", if (flowIds.length === 0) return c.json([]);
[space],
);
if (rows.length === 0) return c.json([]);
// Fetch each flow from flow-service const flows = await Promise.all(
const flows = await Promise.all( flowIds.map(async (fid) => {
rows.map(async (r: any) => { try {
try { const res = await fetch(`${FLOW_SERVICE_URL}/api/flows/${fid}`);
const res = await fetch(`${FLOW_SERVICE_URL}/api/flows/${r.flow_id}`); if (res.ok) return await res.json();
if (res.ok) return await res.json(); } catch {}
} catch {} return null;
return null; }),
}), );
); return c.json(flows.filter(Boolean));
return c.json(flows.filter(Boolean));
} catch {
// Fall through to unfiltered fetch
}
} }
const res = await fetch(`${FLOW_SERVICE_URL}/api/flows?owner=${encodeURIComponent(owner)}`); const res = await fetch(`${FLOW_SERVICE_URL}/api/flows?owner=${encodeURIComponent(owner)}`);
@ -163,11 +157,14 @@ routes.post("/api/space-flows", async (c) => {
const { space, flowId } = await c.req.json(); const { space, flowId } = await c.req.json();
if (!space || !flowId) return c.json({ error: "space and flowId required" }, 400); if (!space || !flowId) return c.json({ error: "space and flowId required" }, 400);
await sql.unsafe( const docId = fundsDocId(space);
`INSERT INTO rfunds.space_flows (space_slug, flow_id, added_by) ensureDoc(space);
VALUES ($1, $2, $3) ON CONFLICT DO NOTHING`, _syncServer!.changeDoc<FundsDoc>(docId, 'add space flow', (d) => {
[space, flowId, claims.sub], const key = `${space}:${flowId}`;
); if (!d.spaceFlows[key]) {
d.spaceFlows[key] = { id: key, spaceSlug: space, flowId, addedBy: claims.sub, createdAt: Date.now() };
}
});
return c.json({ ok: true }); return c.json({ ok: true });
}); });
@ -181,10 +178,16 @@ routes.delete("/api/space-flows/:flowId", async (c) => {
const space = c.req.query("space") || ""; const space = c.req.query("space") || "";
if (!space) return c.json({ error: "space query param required" }, 400); if (!space) return c.json({ error: "space query param required" }, 400);
await sql.unsafe( const docId = fundsDocId(space);
"DELETE FROM rfunds.space_flows WHERE space_slug = $1 AND flow_id = $2", const doc = _syncServer!.getDoc<FundsDoc>(docId);
[space, flowId], if (doc) {
); const key = `${space}:${flowId}`;
if (doc.spaceFlows[key]) {
_syncServer!.changeDoc<FundsDoc>(docId, 'remove space flow', (d) => {
delete d.spaceFlows[key];
});
}
}
return c.json({ ok: true }); return c.json({ ok: true });
}); });
@ -254,7 +257,6 @@ export const fundsModule: RSpaceModule = {
landingPage: renderLanding, landingPage: renderLanding,
async onInit(ctx) { async onInit(ctx) {
_syncServer = ctx.syncServer; _syncServer = ctx.syncServer;
await initDB();
}, },
standaloneDomain: "rfunds.online", standaloneDomain: "rfunds.online",
feeds: [ feeds: [

File diff suppressed because it is too large Load Diff

View File

@ -4,153 +4,68 @@
* Port of rnotes-online (Next.js + Prisma Hono + postgres.js). * Port of rnotes-online (Next.js + Prisma Hono + postgres.js).
* Supports multiple note types: text, code, bookmark, audio, image, file. * Supports multiple note types: text, code, bookmark, audio, image, file.
* *
* Local-first migration: dual-write (Automerge + PG) during transition. * Local-first: All data stored exclusively in Automerge documents via SyncServer.
*/ */
import { Hono } from "hono"; import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import * as Automerge from "@automerge/automerge"; import * as Automerge from "@automerge/automerge";
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell"; import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module"; import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing"; import { renderLanding } from "./landing";
import { notebookSchema, notebookDocId } from "./schemas"; import { notebookSchema, notebookDocId, createNoteItem } from "./schemas";
import type { NotebookDoc, NoteItem } from "./schemas"; import type { NotebookDoc, NoteItem } from "./schemas";
import type { SyncServer } from "../../server/local-first/sync-server"; import type { SyncServer } from "../../server/local-first/sync-server";
const routes = new Hono(); const routes = new Hono();
// ── DB initialization ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Notes] DB schema initialized");
} catch (e) {
console.error("[Notes] DB init error:", e);
}
}
async function seedDemoIfEmpty() {
try {
const count = await sql.unsafe("SELECT count(*)::int as cnt FROM rnotes.notebooks");
if (parseInt(count[0].cnt) > 0) return;
// Notebook 1: Project Ideas
const nb1 = await sql.unsafe(
`INSERT INTO rnotes.notebooks (title, description, cover_color, is_public)
VALUES ('Project Ideas', 'Brainstorms and design notes for the r* ecosystem', '#6366f1', true) RETURNING id`
);
// Notebook 2: Meeting Notes
const nb2 = await sql.unsafe(
`INSERT INTO rnotes.notebooks (title, description, cover_color, is_public)
VALUES ('Meeting Notes', 'Weekly standups, design reviews, and retrospectives', '#f59e0b', true) RETURNING id`
);
// Notebook 3: How-To Guides
const nb3 = await sql.unsafe(
`INSERT INTO rnotes.notebooks (title, description, cover_color, is_public)
VALUES ('How-To Guides', 'Tutorials and onboarding guides for contributors', '#10b981', true) RETURNING id`
);
// Create tags
const tagIds: Record<string, string> = {};
for (const name of ["design", "architecture", "cosmolocal", "governance", "onboarding", "review", "standup"]) {
const row = await sql.unsafe(
`INSERT INTO rnotes.tags (name) VALUES ($1) ON CONFLICT (name) DO UPDATE SET name = $1 RETURNING id`,
[name]
);
tagIds[name] = row[0].id;
}
// Seed notes
const notes = [
{
nbId: nb1[0].id, title: "Cosmolocal Manufacturing Network",
content: "## Vision\n\nDesign global, manufacture local. Every creative work should be producible by the nearest capable provider.\n\n## Key Components\n\n- **Artifact Spec**: Standardized envelope describing what to produce\n- **Provider Registry**: Directory of local makers with capabilities + pricing\n- **rCart**: Marketplace connecting creators to providers\n- **Revenue Splits**: 50% provider, 35% creator, 15% community\n\n## Open Questions\n\n- How do we handle quality assurance across distributed providers?\n- Should providers be able to set custom margins?\n- What's the minimum viable set of capabilities for launch?",
tags: ["cosmolocal", "architecture"], pinned: true,
},
{
nbId: nb1[0].id, title: "Revenue Sharing Model",
content: "## Current Split\n\n| Recipient | Share | Rationale |\n|-----------|-------|-----------|\n| Provider | 50% | Covers materials, labor, shipping |\n| Creator | 35% | Design and creative work |\n| Community | 15% | Platform maintenance, commons fund |\n\n## Enoughness Thresholds\n\nOnce a funnel reaches its sufficient threshold, surplus flows to the next highest-need funnel. This prevents accumulation and keeps resources flowing.\n\n## Implementation\n\nrFunds Flow Service handles deposits from rCart. Each order total is routed through the configured flow → funnel → overflow splits.",
tags: ["cosmolocal", "governance"],
},
{
nbId: nb1[0].id, title: "FUN Model: Forget, Update, New",
content: "## Replacing CRUD\n\nNothing is permanently destroyed in rSpace.\n\n- **Forget** replaces Delete — soft-delete with `forgotten: true`. Shapes stay in document, hidden from canvas. Memory panel lets you browse + Remember.\n- **Update** stays the same — public `sync.updateShape()` for programmatic updates\n- **New** replaces Create — language shift: toolbar says \"New X\", events are `new-shape`\n\n## Why?\n\nData sovereignty means users should always be able to recover their work. The Memory panel makes forgotten shapes discoverable, like a digital archive.",
tags: ["design", "architecture"],
},
{
nbId: nb2[0].id, title: "Weekly Standup — Feb 15, 2026",
content: "## Attendees\n\nAlice, Bob, Carol\n\n## Updates\n\n**Alice**: Finished EncryptID guardian recovery flow. 2-of-3 guardian approval working. Next: device linking via QR code.\n\n**Bob**: Provider registry now has 6 printers globally. Working on proximity search with earthdistance extension.\n\n**Carol**: rFunds river visualization deployed. Enoughness layer showing golden glow on sufficient funnels.\n\n## Action Items\n\n- [ ] Alice: Document guardian recovery API endpoints\n- [ ] Bob: Add turnaround time estimates to provider matching\n- [ ] Carol: Add demo mode to river view with mock data",
tags: ["standup"],
},
{
nbId: nb2[0].id, title: "Design Review — rBooks Flipbook Reader",
content: "## What We Reviewed\n\nThe react-pageflip integration for PDF reading in rBooks.\n\n## Feedback\n\n1. **Page turn animation** — smooth, feels good on desktop. On mobile, swipe gesture needs larger hit area.\n2. **PDF rendering** — react-pdf handles most PDFs well. Large files (>50MB) cause browser memory issues.\n3. **Read Locally mode** — IndexedDB storage works. Need to show storage usage somewhere.\n\n## Decisions\n\n- Ship current version, iterate on mobile\n- Add a 50MB soft warning on upload\n- Explore PDF.js worker for background rendering",
tags: ["review", "design"],
},
{
nbId: nb3[0].id, title: "Getting Started with rSpace Development",
content: "## Prerequisites\n\n- Bun runtime (v1.3+)\n- Docker + Docker Compose\n- Git access to Gitea\n\n## Local Setup\n\n```bash\ngit clone ssh://git@gitea.jeffemmett.com:223/jeffemmett/rspace-online.git\ncd rspace-online\nbun install\nbun run dev\n```\n\n## Module Structure\n\nEach module lives in `modules/{name}/` and exports an `RSpaceModule` interface:\n\n```typescript\nexport interface RSpaceModule {\n id: string;\n name: string;\n icon: string;\n description: string;\n routes: Hono;\n}\n```\n\n## Adding a New Module\n\n1. Create `modules/{name}/mod.ts`\n2. Create `modules/{name}/components/` for web components\n3. Add build step in `vite.config.ts`\n4. Register in `server/index.ts`",
tags: ["onboarding"],
},
{
nbId: nb3[0].id, title: "How to Add a Cosmolocal Provider",
content: "## Overview\n\nProviders are local print shops, makerspaces, or studios that can fulfill rCart orders.\n\n## Steps\n\n1. Visit `providers.mycofi.earth`\n2. Sign in with your rStack passkey\n3. Click \"Register Provider\"\n4. Fill in:\n - Name, location (address + coordinates)\n - Capabilities (laser-print, risograph, screen-print, etc.)\n - Substrates (paper types, fabric, vinyl)\n - Turnaround time and pricing\n5. Submit for review\n\n## Matching Algorithm\n\nWhen an order comes in, rCart matches based on:\n- Required capabilities vs. provider capabilities\n- Geographic distance (earthdistance extension)\n- Turnaround time\n- Price",
tags: ["cosmolocal", "onboarding"],
},
];
for (const n of notes) {
const row = await sql.unsafe(
`INSERT INTO rnotes.notes (notebook_id, title, content, content_plain, type, is_pinned)
VALUES ($1, $2, $3, $4, 'NOTE', $5) RETURNING id`,
[n.nbId, n.title, n.content, n.content.replace(/<[^>]*>/g, " ").replace(/[#*|`\-\[\]]/g, " ").replace(/\s+/g, " ").trim(), n.pinned || false]
);
for (const tagName of n.tags) {
if (tagIds[tagName]) {
await sql.unsafe(
"INSERT INTO rnotes.note_tags (note_id, tag_id) VALUES ($1, $2) ON CONFLICT DO NOTHING",
[row[0].id, tagIds[tagName]]
);
}
}
}
console.log("[Notes] Demo data seeded: 3 notebooks, 7 notes, 7 tags");
} catch (e) {
console.error("[Notes] Seed error:", e);
}
}
// initDB + seedDemo are called from onInit lifecycle hook (see module export below)
// ── SyncServer ref (set during onInit) ── // ── SyncServer ref (set during onInit) ──
let _syncServer: SyncServer | null = null; let _syncServer: SyncServer | null = null;
/** Check if a space has been migrated to local-first for notes. */ // ── Automerge helpers ──
function isLocalFirst(space: string): boolean {
if (!_syncServer) return false; /** Lazily ensure a notebook doc exists for a given space + notebookId. */
// A space is local-first if any notebook doc exists for it in the SyncServer function ensureDoc(space: string, notebookId: string): NotebookDoc {
// We check by looking for docs with the pattern {space}:notes:notebooks:* const docId = notebookDocId(space, notebookId);
return _syncServer.getDoc(`${space}:notes:notebooks:default`) !== undefined; let doc = _syncServer!.getDoc<NotebookDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<NotebookDoc>(), 'init', (d) => {
const init = notebookSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = space;
d.notebook.id = notebookId;
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/** Generate a URL-safe slug from a title. */
function slugify(title: string): string {
return title
.toLowerCase()
.replace(/[^a-z0-9]+/g, "-")
.replace(/^-|-$/g, "")
.slice(0, 80) || "untitled";
}
/** Generate a compact unique ID (timestamp + random suffix). */
function newId(): string {
return `${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`;
} }
// ── Automerge ↔ REST conversion helpers ── // ── Automerge ↔ REST conversion helpers ──
/** List all notebook docs for a space from the SyncServer. */ /** List all notebook docs for a space from the SyncServer. */
function listAutomergeNotebooks(space: string): { docId: string; doc: NotebookDoc }[] { function listNotebooks(space: string): { docId: string; doc: NotebookDoc }[] {
if (!_syncServer) return []; if (!_syncServer) return [];
const results: { docId: string; doc: NotebookDoc }[] = []; const results: { docId: string; doc: NotebookDoc }[] = [];
const prefix = `${space}:notes:notebooks:`; const prefix = `${space}:notes:notebooks:`;
for (const docId of _syncServer.listDocs()) { for (const docId of _syncServer.listDocs()) {
if (docId.startsWith(prefix)) { if (docId.startsWith(prefix)) {
const doc = _syncServer.getDoc<NotebookDoc>(docId); const doc = _syncServer.getDoc<NotebookDoc>(docId);
if (doc) results.push({ docId, doc }); if (doc && doc.notebook && doc.notebook.title) results.push({ docId, doc });
} }
} }
return results; return results;
@ -196,99 +111,123 @@ function noteToRest(item: NoteItem) {
} }
/** Find the notebook doc that contains a given note ID. */ /** Find the notebook doc that contains a given note ID. */
function findNoteInAutomerge(space: string, noteId: string): { docId: string; doc: NotebookDoc; item: NoteItem } | null { function findNote(space: string, noteId: string): { docId: string; doc: NotebookDoc; item: NoteItem } | null {
for (const { docId, doc } of listAutomergeNotebooks(space)) { for (const { docId, doc } of listNotebooks(space)) {
const item = doc.items[noteId]; const item = doc.items[noteId];
if (item) return { docId, doc, item }; if (item) return { docId, doc, item };
} }
return null; return null;
} }
/** Write a note to the Automerge doc (creates/updates). */ // ── Seed demo data into Automerge (runs once if no notebooks exist) ──
function writeNoteToAutomerge(space: string, notebookPgId: string, noteId: string, data: Partial<NoteItem>): void {
function seedDemoIfEmpty(space: string) {
if (!_syncServer) return; if (!_syncServer) return;
// Find the Automerge notebook doc for this PG notebook
// Convention: PG notebook UUID maps to docId suffix
const docId = notebookDocId(space, notebookPgId);
const doc = _syncServer.getDoc<NotebookDoc>(docId);
if (!doc) return; // not migrated yet
_syncServer.changeDoc<NotebookDoc>(docId, `Update note ${noteId}`, (d) => { // If the space already has notebooks, skip
if (!d.items[noteId]) { if (listNotebooks(space).length > 0) return;
// New note
d.items[noteId] = { const now = Date.now();
id: noteId,
notebookId: notebookPgId, // Notebook 1: Project Ideas
authorId: data.authorId ?? null, const nb1Id = newId();
title: data.title ?? '', const nb1DocId = notebookDocId(space, nb1Id);
content: data.content ?? '', const nb1Doc = Automerge.change(Automerge.init<NotebookDoc>(), "Seed: Project Ideas", (d) => {
contentPlain: data.contentPlain ?? '', d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: now };
type: data.type ?? 'NOTE', d.notebook = { id: nb1Id, title: "Project Ideas", slug: "project-ideas", description: "Brainstorms and design notes for the r* ecosystem", coverColor: "#6366f1", isPublic: true, createdAt: now, updatedAt: now };
url: data.url ?? null, d.items = {};
language: data.language ?? null,
fileUrl: data.fileUrl ?? null,
mimeType: data.mimeType ?? null,
fileSize: data.fileSize ?? null,
duration: data.duration ?? null,
isPinned: data.isPinned ?? false,
sortOrder: data.sortOrder ?? 0,
tags: data.tags ?? [],
createdAt: data.createdAt ?? Date.now(),
updatedAt: Date.now(),
};
} else {
// Update existing fields
const item = d.items[noteId];
if (data.title !== undefined) item.title = data.title;
if (data.content !== undefined) item.content = data.content;
if (data.contentPlain !== undefined) item.contentPlain = data.contentPlain;
if (data.type !== undefined) item.type = data.type;
if (data.url !== undefined) item.url = data.url;
if (data.language !== undefined) item.language = data.language;
if (data.isPinned !== undefined) item.isPinned = data.isPinned;
if (data.sortOrder !== undefined) item.sortOrder = data.sortOrder;
if (data.tags !== undefined) item.tags = data.tags;
item.updatedAt = Date.now();
}
}); });
} _syncServer.setDoc(nb1DocId, nb1Doc);
// ── Helper: get or create user ── // Notebook 2: Meeting Notes
async function getOrCreateUser(did: string, username?: string) { const nb2Id = newId();
const rows = await sql.unsafe( const nb2DocId = notebookDocId(space, nb2Id);
`INSERT INTO rnotes.users (did, username) VALUES ($1, $2) const nb2Doc = Automerge.change(Automerge.init<NotebookDoc>(), "Seed: Meeting Notes", (d) => {
ON CONFLICT (did) DO UPDATE SET username = COALESCE($2, rnotes.users.username) d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: now };
RETURNING *`, d.notebook = { id: nb2Id, title: "Meeting Notes", slug: "meeting-notes", description: "Weekly standups, design reviews, and retrospectives", coverColor: "#f59e0b", isPublic: true, createdAt: now, updatedAt: now };
[did, username || null] d.items = {};
); });
return rows[0]; _syncServer.setDoc(nb2DocId, nb2Doc);
// Notebook 3: How-To Guides
const nb3Id = newId();
const nb3DocId = notebookDocId(space, nb3Id);
const nb3Doc = Automerge.change(Automerge.init<NotebookDoc>(), "Seed: How-To Guides", (d) => {
d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: now };
d.notebook = { id: nb3Id, title: "How-To Guides", slug: "how-to-guides", description: "Tutorials and onboarding guides for contributors", coverColor: "#10b981", isPublic: true, createdAt: now, updatedAt: now };
d.items = {};
});
_syncServer.setDoc(nb3DocId, nb3Doc);
// Seed notes into notebooks
const notes = [
{
nbId: nb1Id, nbDocId: nb1DocId, title: "Cosmolocal Manufacturing Network",
content: "## Vision\n\nDesign global, manufacture local. Every creative work should be producible by the nearest capable provider.\n\n## Key Components\n\n- **Artifact Spec**: Standardized envelope describing what to produce\n- **Provider Registry**: Directory of local makers with capabilities + pricing\n- **rCart**: Marketplace connecting creators to providers\n- **Revenue Splits**: 50% provider, 35% creator, 15% community\n\n## Open Questions\n\n- How do we handle quality assurance across distributed providers?\n- Should providers be able to set custom margins?\n- What's the minimum viable set of capabilities for launch?",
tags: ["cosmolocal", "architecture"], pinned: true,
},
{
nbId: nb1Id, nbDocId: nb1DocId, title: "Revenue Sharing Model",
content: "## Current Split\n\n| Recipient | Share | Rationale |\n|-----------|-------|-----------|\n| Provider | 50% | Covers materials, labor, shipping |\n| Creator | 35% | Design and creative work |\n| Community | 15% | Platform maintenance, commons fund |\n\n## Enoughness Thresholds\n\nOnce a funnel reaches its sufficient threshold, surplus flows to the next highest-need funnel. This prevents accumulation and keeps resources flowing.\n\n## Implementation\n\nrFunds Flow Service handles deposits from rCart. Each order total is routed through the configured flow → funnel → overflow splits.",
tags: ["cosmolocal", "governance"],
},
{
nbId: nb1Id, nbDocId: nb1DocId, title: "FUN Model: Forget, Update, New",
content: "## Replacing CRUD\n\nNothing is permanently destroyed in rSpace.\n\n- **Forget** replaces Delete — soft-delete with `forgotten: true`. Shapes stay in document, hidden from canvas. Memory panel lets you browse + Remember.\n- **Update** stays the same — public `sync.updateShape()` for programmatic updates\n- **New** replaces Create — language shift: toolbar says \"New X\", events are `new-shape`\n\n## Why?\n\nData sovereignty means users should always be able to recover their work. The Memory panel makes forgotten shapes discoverable, like a digital archive.",
tags: ["design", "architecture"],
},
{
nbId: nb2Id, nbDocId: nb2DocId, title: "Weekly Standup — Feb 15, 2026",
content: "## Attendees\n\nAlice, Bob, Carol\n\n## Updates\n\n**Alice**: Finished EncryptID guardian recovery flow. 2-of-3 guardian approval working. Next: device linking via QR code.\n\n**Bob**: Provider registry now has 6 printers globally. Working on proximity search with earthdistance extension.\n\n**Carol**: rFunds river visualization deployed. Enoughness layer showing golden glow on sufficient funnels.\n\n## Action Items\n\n- [ ] Alice: Document guardian recovery API endpoints\n- [ ] Bob: Add turnaround time estimates to provider matching\n- [ ] Carol: Add demo mode to river view with mock data",
tags: ["standup"],
},
{
nbId: nb2Id, nbDocId: nb2DocId, title: "Design Review — rBooks Flipbook Reader",
content: "## What We Reviewed\n\nThe react-pageflip integration for PDF reading in rBooks.\n\n## Feedback\n\n1. **Page turn animation** — smooth, feels good on desktop. On mobile, swipe gesture needs larger hit area.\n2. **PDF rendering** — react-pdf handles most PDFs well. Large files (>50MB) cause browser memory issues.\n3. **Read Locally mode** — IndexedDB storage works. Need to show storage usage somewhere.\n\n## Decisions\n\n- Ship current version, iterate on mobile\n- Add a 50MB soft warning on upload\n- Explore PDF.js worker for background rendering",
tags: ["review", "design"],
},
{
nbId: nb3Id, nbDocId: nb3DocId, title: "Getting Started with rSpace Development",
content: "## Prerequisites\n\n- Bun runtime (v1.3+)\n- Docker + Docker Compose\n- Git access to Gitea\n\n## Local Setup\n\n```bash\ngit clone ssh://git@gitea.jeffemmett.com:223/jeffemmett/rspace-online.git\ncd rspace-online\nbun install\nbun run dev\n```\n\n## Module Structure\n\nEach module lives in `modules/{name}/` and exports an `RSpaceModule` interface:\n\n```typescript\nexport interface RSpaceModule {\n id: string;\n name: string;\n icon: string;\n description: string;\n routes: Hono;\n}\n```\n\n## Adding a New Module\n\n1. Create `modules/{name}/mod.ts`\n2. Create `modules/{name}/components/` for web components\n3. Add build step in `vite.config.ts`\n4. Register in `server/index.ts`",
tags: ["onboarding"],
},
{
nbId: nb3Id, nbDocId: nb3DocId, title: "How to Add a Cosmolocal Provider",
content: "## Overview\n\nProviders are local print shops, makerspaces, or studios that can fulfill rCart orders.\n\n## Steps\n\n1. Visit `providers.mycofi.earth`\n2. Sign in with your rStack passkey\n3. Click \"Register Provider\"\n4. Fill in:\n - Name, location (address + coordinates)\n - Capabilities (laser-print, risograph, screen-print, etc.)\n - Substrates (paper types, fabric, vinyl)\n - Turnaround time and pricing\n5. Submit for review\n\n## Matching Algorithm\n\nWhen an order comes in, rCart matches based on:\n- Required capabilities vs. provider capabilities\n- Geographic distance (earthdistance extension)\n- Turnaround time\n- Price",
tags: ["cosmolocal", "onboarding"],
},
];
for (const n of notes) {
const noteId = newId();
const contentPlain = n.content.replace(/<[^>]*>/g, " ").replace(/[#*|`\-\[\]]/g, " ").replace(/\s+/g, " ").trim();
const item = createNoteItem(noteId, n.nbId, n.title, {
content: n.content,
contentPlain,
tags: n.tags,
isPinned: n.pinned || false,
});
_syncServer!.changeDoc<NotebookDoc>(n.nbDocId, `Seed note: ${n.title}`, (d) => {
d.items[noteId] = item;
});
}
console.log("[Notes] Demo data seeded: 3 notebooks, 7 notes");
} }
// ── Notebooks API ── // ── Notebooks API ──
// GET /api/notebooks — list notebooks (Automerge-first, PG fallback) // GET /api/notebooks — list notebooks
routes.get("/api/notebooks", async (c) => { routes.get("/api/notebooks", async (c) => {
const space = c.req.param("space") || "demo"; const space = c.req.param("space") || "demo";
// Try Automerge first const notebooks = listNotebooks(space).map(({ doc }) => notebookToRest(doc));
if (isLocalFirst(space)) { notebooks.sort((a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime());
const notebooks = listAutomergeNotebooks(space).map(({ doc }) => notebookToRest(doc)); return c.json({ notebooks, source: "automerge" });
notebooks.sort((a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime());
return c.json({ notebooks, source: "automerge" });
}
// PG fallback
const rows = await sql.unsafe(
`SELECT n.*, count(note.id) as note_count
FROM rnotes.notebooks n
LEFT JOIN rnotes.notes note ON note.notebook_id = n.id
GROUP BY n.id
ORDER BY n.updated_at DESC LIMIT 50`
);
return c.json({ notebooks: rows });
}); });
// POST /api/notebooks — create notebook (dual-write) // POST /api/notebooks — create notebook
routes.post("/api/notebooks", async (c) => { routes.post("/api/notebooks", async (c) => {
const space = c.req.param("space") || "demo"; const space = c.req.param("space") || "demo";
const token = extractToken(c.req.raw.headers); const token = extractToken(c.req.raw.headers);
@ -299,75 +238,48 @@ routes.post("/api/notebooks", async (c) => {
const body = await c.req.json(); const body = await c.req.json();
const { title, description, cover_color } = body; const { title, description, cover_color } = body;
// PG write const nbTitle = title || "Untitled Notebook";
const user = await getOrCreateUser(claims.sub, claims.username); const notebookId = newId();
const rows = await sql.unsafe( const now = Date.now();
`INSERT INTO rnotes.notebooks (title, description, cover_color, owner_id)
VALUES ($1, $2, $3, $4) RETURNING *`,
[title || "Untitled Notebook", description || null, cover_color || "#3b82f6", user.id]
);
const pgRow = rows[0];
// Automerge dual-write: create a new notebook doc const doc = ensureDoc(space, notebookId);
if (_syncServer && isLocalFirst(space)) { _syncServer!.changeDoc<NotebookDoc>(notebookDocId(space, notebookId), "Create notebook", (d) => {
const docId = notebookDocId(space, pgRow.id); d.notebook.id = notebookId;
if (!_syncServer.getDoc(docId)) { d.notebook.title = nbTitle;
const doc = Automerge.init<NotebookDoc>(); d.notebook.slug = slugify(nbTitle);
const initialized = Automerge.change(doc, "Create notebook", (d) => { d.notebook.description = description || "";
d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: Date.now() }; d.notebook.coverColor = cover_color || "#3b82f6";
d.notebook = { d.notebook.isPublic = false;
id: pgRow.id, title: pgRow.title, slug: pgRow.slug || "", d.notebook.createdAt = now;
description: pgRow.description || "", coverColor: pgRow.cover_color || "#3b82f6", d.notebook.updatedAt = now;
isPublic: pgRow.is_public || false, createdAt: Date.now(), updatedAt: Date.now(), });
};
d.items = {};
});
_syncServer.setDoc(docId, initialized);
}
}
return c.json(pgRow, 201); const updatedDoc = _syncServer!.getDoc<NotebookDoc>(notebookDocId(space, notebookId))!;
return c.json(notebookToRest(updatedDoc), 201);
}); });
// GET /api/notebooks/:id — notebook detail with notes (Automerge-first) // GET /api/notebooks/:id — notebook detail with notes
routes.get("/api/notebooks/:id", async (c) => { routes.get("/api/notebooks/:id", async (c) => {
const space = c.req.param("space") || "demo"; const space = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
// Automerge first const docId = notebookDocId(space, id);
if (isLocalFirst(space)) { const doc = _syncServer?.getDoc<NotebookDoc>(docId);
const docId = notebookDocId(space, id); if (!doc || !doc.notebook || !doc.notebook.title) {
const doc = _syncServer?.getDoc<NotebookDoc>(docId); return c.json({ error: "Notebook not found" }, 404);
if (doc) {
const nb = notebookToRest(doc);
const notes = Object.values(doc.items)
.map(noteToRest)
.sort((a, b) => {
if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1;
return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime();
});
return c.json({ ...nb, notes, source: "automerge" });
}
} }
// PG fallback const nb = notebookToRest(doc);
const nb = await sql.unsafe("SELECT * FROM rnotes.notebooks WHERE id = $1", [id]); const notes = Object.values(doc.items)
if (nb.length === 0) return c.json({ error: "Notebook not found" }, 404); .map(noteToRest)
.sort((a, b) => {
const notes = await sql.unsafe( if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1;
`SELECT n.*, array_agg(t.name) FILTER (WHERE t.name IS NOT NULL) as tags return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime();
FROM rnotes.notes n });
LEFT JOIN rnotes.note_tags nt ON nt.note_id = n.id return c.json({ ...nb, notes, source: "automerge" });
LEFT JOIN rnotes.tags t ON t.id = nt.tag_id
WHERE n.notebook_id = $1
GROUP BY n.id
ORDER BY n.is_pinned DESC, n.sort_order ASC, n.updated_at DESC`,
[id]
);
return c.json({ ...nb[0], notes });
}); });
// PUT /api/notebooks/:id — update notebook (dual-write) // PUT /api/notebooks/:id — update notebook
routes.put("/api/notebooks/:id", async (c) => { routes.put("/api/notebooks/:id", async (c) => {
const space = c.req.param("space") || "demo"; const space = c.req.param("space") || "demo";
const token = extractToken(c.req.raw.headers); const token = extractToken(c.req.raw.headers);
@ -379,124 +291,90 @@ routes.put("/api/notebooks/:id", async (c) => {
const body = await c.req.json(); const body = await c.req.json();
const { title, description, cover_color, is_public } = body; const { title, description, cover_color, is_public } = body;
// PG write if (title === undefined && description === undefined && cover_color === undefined && is_public === undefined) {
const fields: string[] = []; return c.json({ error: "No fields to update" }, 400);
const params: any[] = [];
let idx = 1;
if (title !== undefined) { fields.push(`title = $${idx}`); params.push(title); idx++; }
if (description !== undefined) { fields.push(`description = $${idx}`); params.push(description); idx++; }
if (cover_color !== undefined) { fields.push(`cover_color = $${idx}`); params.push(cover_color); idx++; }
if (is_public !== undefined) { fields.push(`is_public = $${idx}`); params.push(is_public); idx++; }
if (fields.length === 0) return c.json({ error: "No fields to update" }, 400);
fields.push("updated_at = NOW()");
params.push(id);
const rows = await sql.unsafe(
`UPDATE rnotes.notebooks SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`,
params
);
if (rows.length === 0) return c.json({ error: "Notebook not found" }, 404);
// Automerge dual-write: update notebook metadata
if (_syncServer && isLocalFirst(space)) {
const docId = notebookDocId(space, id);
_syncServer.changeDoc<NotebookDoc>(docId, "Update notebook", (d) => {
if (title !== undefined) d.notebook.title = title;
if (description !== undefined) d.notebook.description = description;
if (cover_color !== undefined) d.notebook.coverColor = cover_color;
if (is_public !== undefined) d.notebook.isPublic = is_public;
d.notebook.updatedAt = Date.now();
});
} }
return c.json(rows[0]); const docId = notebookDocId(space, id);
const doc = _syncServer?.getDoc<NotebookDoc>(docId);
if (!doc || !doc.notebook || !doc.notebook.title) {
return c.json({ error: "Notebook not found" }, 404);
}
_syncServer!.changeDoc<NotebookDoc>(docId, "Update notebook", (d) => {
if (title !== undefined) d.notebook.title = title;
if (description !== undefined) d.notebook.description = description;
if (cover_color !== undefined) d.notebook.coverColor = cover_color;
if (is_public !== undefined) d.notebook.isPublic = is_public;
d.notebook.updatedAt = Date.now();
});
const updatedDoc = _syncServer!.getDoc<NotebookDoc>(docId)!;
return c.json(notebookToRest(updatedDoc));
}); });
// DELETE /api/notebooks/:id (dual-write) // DELETE /api/notebooks/:id
routes.delete("/api/notebooks/:id", async (c) => { routes.delete("/api/notebooks/:id", async (c) => {
const space = c.req.param("space") || "demo"; const space = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
const result = await sql.unsafe( const docId = notebookDocId(space, id);
"DELETE FROM rnotes.notebooks WHERE id = $1 RETURNING id", [id] const doc = _syncServer?.getDoc<NotebookDoc>(docId);
); if (!doc || !doc.notebook || !doc.notebook.title) {
if (result.length === 0) return c.json({ error: "Notebook not found" }, 404); return c.json({ error: "Notebook not found" }, 404);
}
// Automerge: remove the entire doc from SyncServer // Clear all items and blank the notebook title to mark as deleted.
// (SyncServer doesn't have a removeDoc — setting it to empty is the equivalent) // SyncServer has no removeDoc API, so we empty the doc instead.
// For now, the doc persists in Automerge but is effectively orphaned once PG row is gone. _syncServer!.changeDoc<NotebookDoc>(docId, "Delete notebook", (d) => {
for (const key of Object.keys(d.items)) {
delete d.items[key];
}
d.notebook.title = "";
d.notebook.updatedAt = Date.now();
});
return c.json({ ok: true }); return c.json({ ok: true });
}); });
// ── Notes API ── // ── Notes API ──
// GET /api/notes — list all notes (Automerge-first, PG fallback) // GET /api/notes — list all notes
routes.get("/api/notes", async (c) => { routes.get("/api/notes", async (c) => {
const space = c.req.param("space") || "demo"; const space = c.req.param("space") || "demo";
const { notebook_id, type, q, limit = "50", offset = "0" } = c.req.query(); const { notebook_id, type, q, limit = "50", offset = "0" } = c.req.query();
// Automerge first let allNotes: ReturnType<typeof noteToRest>[] = [];
if (isLocalFirst(space)) { const notebooks = notebook_id
let allNotes: ReturnType<typeof noteToRest>[] = []; ? (() => {
const notebooks = notebook_id const doc = _syncServer?.getDoc<NotebookDoc>(notebookDocId(space, notebook_id));
? [{ doc: _syncServer!.getDoc<NotebookDoc>(notebookDocId(space, notebook_id))! }].filter(x => x.doc) return doc ? [{ doc }] : [];
: listAutomergeNotebooks(space); })()
: listNotebooks(space);
for (const { doc } of notebooks) { for (const { doc } of notebooks) {
for (const item of Object.values(doc.items)) { for (const item of Object.values(doc.items)) {
if (type && item.type !== type) continue; if (type && item.type !== type) continue;
if (q) { if (q) {
const lower = q.toLowerCase(); const lower = q.toLowerCase();
if (!item.title.toLowerCase().includes(lower) && !item.contentPlain.toLowerCase().includes(lower)) continue; if (!item.title.toLowerCase().includes(lower) && !item.contentPlain.toLowerCase().includes(lower)) continue;
}
allNotes.push(noteToRest(item));
} }
allNotes.push(noteToRest(item));
} }
// Sort: pinned first, then by updated_at desc
allNotes.sort((a, b) => {
if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1;
return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime();
});
const lim = Math.min(parseInt(limit), 100);
const off = parseInt(offset) || 0;
return c.json({ notes: allNotes.slice(off, off + lim), source: "automerge" });
} }
// PG fallback // Sort: pinned first, then by updated_at desc
const conditions: string[] = []; allNotes.sort((a, b) => {
const params: any[] = []; if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1;
let idx = 1; return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime();
});
if (notebook_id) { conditions.push(`n.notebook_id = $${idx}`); params.push(notebook_id); idx++; } const lim = Math.min(parseInt(limit), 100);
if (type) { conditions.push(`n.type = $${idx}`); params.push(type); idx++; } const off = parseInt(offset) || 0;
if (q) { return c.json({ notes: allNotes.slice(off, off + lim), source: "automerge" });
conditions.push(`(n.title ILIKE $${idx} OR n.content_plain ILIKE $${idx})`);
params.push(`%${q}%`);
idx++;
}
const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
const rows = await sql.unsafe(
`SELECT n.*, array_agg(t.name) FILTER (WHERE t.name IS NOT NULL) as tags
FROM rnotes.notes n
LEFT JOIN rnotes.note_tags nt ON nt.note_id = n.id
LEFT JOIN rnotes.tags t ON t.id = nt.tag_id
${where}
GROUP BY n.id
ORDER BY n.is_pinned DESC, n.updated_at DESC
LIMIT ${Math.min(parseInt(limit), 100)} OFFSET ${parseInt(offset) || 0}`,
params
);
return c.json({ notes: rows });
}); });
// POST /api/notes — create note (dual-write) // POST /api/notes — create note
routes.post("/api/notes", async (c) => { routes.post("/api/notes", async (c) => {
const space = c.req.param("space") || "demo"; const space = c.req.param("space") || "demo";
const token = extractToken(c.req.raw.headers); const token = extractToken(c.req.raw.headers);
@ -508,151 +386,108 @@ routes.post("/api/notes", async (c) => {
const { notebook_id, title, content, type, url, language, file_url, mime_type, file_size, duration, tags } = body; const { notebook_id, title, content, type, url, language, file_url, mime_type, file_size, duration, tags } = body;
if (!title?.trim()) return c.json({ error: "Title is required" }, 400); if (!title?.trim()) return c.json({ error: "Title is required" }, 400);
if (!notebook_id) return c.json({ error: "notebook_id is required" }, 400);
// Strip HTML for plain text search // Strip HTML/markdown for plain text search
const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : null; const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : "";
// PG write // Normalize tags
const rows = await sql.unsafe(
`INSERT INTO rnotes.notes (notebook_id, title, content, content_plain, type, url, language, file_url, mime_type, file_size, duration)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) RETURNING *`,
[notebook_id || null, title.trim(), content || "", contentPlain, type || "NOTE",
url || null, language || null, file_url || null, mime_type || null, file_size || null, duration || null]
);
// Handle tags in PG
const tagNames: string[] = []; const tagNames: string[] = [];
if (tags && Array.isArray(tags)) { if (tags && Array.isArray(tags)) {
for (const tagName of tags) { for (const tagName of tags) {
const name = tagName.trim().toLowerCase(); const name = (tagName as string).trim().toLowerCase();
if (!name) continue; if (name) tagNames.push(name);
tagNames.push(name);
const tag = await sql.unsafe(
"INSERT INTO rnotes.tags (name) VALUES ($1) ON CONFLICT (name) DO UPDATE SET name = $1 RETURNING id",
[name]
);
await sql.unsafe(
"INSERT INTO rnotes.note_tags (note_id, tag_id) VALUES ($1, $2) ON CONFLICT DO NOTHING",
[rows[0].id, tag[0].id]
);
} }
} }
// Automerge dual-write const noteId = newId();
if (notebook_id && isLocalFirst(space)) { const item = createNoteItem(noteId, notebook_id, title.trim(), {
writeNoteToAutomerge(space, notebook_id, rows[0].id, { authorId: claims.sub ?? null,
title: title.trim(), content: content || "",
content: content || '', contentPlain,
contentPlain: contentPlain || '', type: type || "NOTE",
type: type || 'NOTE', url: url || null,
url: url || null, language: language || null,
language: language || null, fileUrl: file_url || null,
fileUrl: file_url || null, mimeType: mime_type || null,
mimeType: mime_type || null, fileSize: file_size || null,
fileSize: file_size || null, duration: duration || null,
duration: duration || null, tags: tagNames,
tags: tagNames, });
});
}
return c.json(rows[0], 201); // Ensure the notebook doc exists, then add the note
ensureDoc(space, notebook_id);
const docId = notebookDocId(space, notebook_id);
_syncServer!.changeDoc<NotebookDoc>(docId, `Create note: ${title.trim()}`, (d) => {
d.items[noteId] = item;
d.notebook.updatedAt = Date.now();
});
return c.json(noteToRest(item), 201);
}); });
// GET /api/notes/:id — note detail (Automerge-first) // GET /api/notes/:id — note detail
routes.get("/api/notes/:id", async (c) => { routes.get("/api/notes/:id", async (c) => {
const space = c.req.param("space") || "demo"; const space = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
// Automerge first const found = findNote(space, id);
if (isLocalFirst(space)) { if (!found) return c.json({ error: "Note not found" }, 404);
const found = findNoteInAutomerge(space, id);
if (found) return c.json({ ...noteToRest(found.item), source: "automerge" });
}
// PG fallback return c.json({ ...noteToRest(found.item), source: "automerge" });
const rows = await sql.unsafe(
`SELECT n.*, array_agg(t.name) FILTER (WHERE t.name IS NOT NULL) as tags
FROM rnotes.notes n
LEFT JOIN rnotes.note_tags nt ON nt.note_id = n.id
LEFT JOIN rnotes.tags t ON t.id = nt.tag_id
WHERE n.id = $1
GROUP BY n.id`,
[id]
);
if (rows.length === 0) return c.json({ error: "Note not found" }, 404);
return c.json(rows[0]);
}); });
// PUT /api/notes/:id — update note (dual-write) // PUT /api/notes/:id — update note
routes.put("/api/notes/:id", async (c) => { routes.put("/api/notes/:id", async (c) => {
const space = c.req.param("space") || "demo"; const space = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
const body = await c.req.json(); const body = await c.req.json();
const { title, content, type, url, language, is_pinned, sort_order } = body; const { title, content, type, url, language, is_pinned, sort_order } = body;
// PG write if (title === undefined && content === undefined && type === undefined &&
const fields: string[] = []; url === undefined && language === undefined && is_pinned === undefined && sort_order === undefined) {
const params: any[] = []; return c.json({ error: "No fields to update" }, 400);
let idx = 1;
if (title !== undefined) { fields.push(`title = $${idx}`); params.push(title); idx++; }
if (content !== undefined) {
fields.push(`content = $${idx}`); params.push(content); idx++;
const plain = content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim();
fields.push(`content_plain = $${idx}`); params.push(plain); idx++;
}
if (type !== undefined) { fields.push(`type = $${idx}`); params.push(type); idx++; }
if (url !== undefined) { fields.push(`url = $${idx}`); params.push(url); idx++; }
if (language !== undefined) { fields.push(`language = $${idx}`); params.push(language); idx++; }
if (is_pinned !== undefined) { fields.push(`is_pinned = $${idx}`); params.push(is_pinned); idx++; }
if (sort_order !== undefined) { fields.push(`sort_order = $${idx}`); params.push(sort_order); idx++; }
if (fields.length === 0) return c.json({ error: "No fields to update" }, 400);
fields.push("updated_at = NOW()");
params.push(id);
const rows = await sql.unsafe(
`UPDATE rnotes.notes SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`,
params
);
if (rows.length === 0) return c.json({ error: "Note not found" }, 404);
// Automerge dual-write
if (isLocalFirst(space)) {
const found = findNoteInAutomerge(space, id);
if (found) {
const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : undefined;
writeNoteToAutomerge(space, found.item.notebookId, id, {
...(title !== undefined ? { title } : {}),
...(content !== undefined ? { content, contentPlain } : {}),
...(type !== undefined ? { type } : {}),
...(url !== undefined ? { url } : {}),
...(language !== undefined ? { language } : {}),
...(is_pinned !== undefined ? { isPinned: is_pinned } : {}),
...(sort_order !== undefined ? { sortOrder: sort_order } : {}),
});
}
} }
return c.json(rows[0]); const found = findNote(space, id);
if (!found) return c.json({ error: "Note not found" }, 404);
const contentPlain = content !== undefined
? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim()
: undefined;
_syncServer!.changeDoc<NotebookDoc>(found.docId, `Update note ${id}`, (d) => {
const item = d.items[id];
if (!item) return;
if (title !== undefined) item.title = title;
if (content !== undefined) item.content = content;
if (contentPlain !== undefined) item.contentPlain = contentPlain;
if (type !== undefined) item.type = type;
if (url !== undefined) item.url = url;
if (language !== undefined) item.language = language;
if (is_pinned !== undefined) item.isPinned = is_pinned;
if (sort_order !== undefined) item.sortOrder = sort_order;
item.updatedAt = Date.now();
});
// Return the updated note
const updatedDoc = _syncServer!.getDoc<NotebookDoc>(found.docId)!;
const updatedItem = updatedDoc.items[id];
return c.json(noteToRest(updatedItem));
}); });
// DELETE /api/notes/:id (dual-write) // DELETE /api/notes/:id
routes.delete("/api/notes/:id", async (c) => { routes.delete("/api/notes/:id", async (c) => {
const space = c.req.param("space") || "demo"; const space = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
// PG delete const found = findNote(space, id);
const result = await sql.unsafe("DELETE FROM rnotes.notes WHERE id = $1 RETURNING id, notebook_id", [id]); if (!found) return c.json({ error: "Note not found" }, 404);
if (result.length === 0) return c.json({ error: "Note not found" }, 404);
// Automerge dual-write: remove note from notebook doc _syncServer!.changeDoc<NotebookDoc>(found.docId, `Delete note ${id}`, (d) => {
if (isLocalFirst(space) && result[0].notebook_id && _syncServer) { delete d.items[id];
const docId = notebookDocId(space, result[0].notebook_id); d.notebook.updatedAt = Date.now();
_syncServer.changeDoc<NotebookDoc>(docId, `Delete note ${id}`, (d) => { });
delete d.items[id];
});
}
return c.json({ ok: true }); return c.json({ ok: true });
}); });
@ -691,11 +526,10 @@ export const notesModule: RSpaceModule = {
async onInit({ syncServer }) { async onInit({ syncServer }) {
_syncServer = syncServer; _syncServer = syncServer;
// Init PG (still needed during dual-write period) // Seed demo notebooks if the "demo" space is empty
await initDB(); seedDemoIfEmpty("demo");
await seedDemoIfEmpty();
console.log("[Notes] onInit complete (PG + schema registered)"); console.log("[Notes] onInit complete (Automerge-only)");
}, },
async onSpaceCreate(ctx: SpaceLifecycleContext) { async onSpaceCreate(ctx: SpaceLifecycleContext) {

View File

@ -3,13 +3,16 @@
* *
* Routes are relative to mount point (/:space/splat in unified). * Routes are relative to mount point (/:space/splat in unified).
* Three.js + GaussianSplats3D loaded via CDN importmap. * Three.js + GaussianSplats3D loaded via CDN importmap.
*
* All metadata is stored in Automerge documents via SyncServer.
* 3D files (.ply, .splat, .spz) remain on the filesystem.
*/ */
import { Hono } from "hono"; import { Hono } from "hono";
import { resolve } from "node:path"; import { resolve } from "node:path";
import { mkdir, readFile } from "node:fs/promises"; import { mkdir } from "node:fs/promises";
import { randomUUID } from "node:crypto"; import { randomUUID } from "node:crypto";
import { sql } from "../../shared/db/pool"; import * as Automerge from "@automerge/automerge";
import { renderShell } from "../../server/shell"; import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module"; import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
@ -20,7 +23,13 @@ import {
} from "@encryptid/sdk/server"; } from "@encryptid/sdk/server";
import { setupX402FromEnv } from "../../shared/x402/hono-middleware"; import { setupX402FromEnv } from "../../shared/x402/hono-middleware";
import type { SyncServer } from '../../server/local-first/sync-server'; import type { SyncServer } from '../../server/local-first/sync-server';
import { splatScenesSchema } from './schemas'; import {
splatScenesSchema,
splatScenesDocId,
type SplatScenesDoc,
type SplatItem,
type SourceFile,
} from './schemas';
let _syncServer: SyncServer | null = null; let _syncServer: SyncServer | null = null;
@ -88,6 +97,88 @@ function getMimeType(format: string): string {
} }
} }
// ── Automerge helpers ──
/**
* Lazily create the Automerge doc for a space if it doesn't exist yet.
*/
function ensureDoc(space: string): SplatScenesDoc {
const docId = splatScenesDocId(space);
let doc = _syncServer!.getDoc<SplatScenesDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<SplatScenesDoc>(), 'init', (d) => {
const init = splatScenesSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.items = {};
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/**
* Find a splat item by slug or id within a doc's items map.
* Returns [itemKey, item] or undefined.
*/
function findItem(doc: SplatScenesDoc, idOrSlug: string): [string, SplatItem] | undefined {
for (const [key, item] of Object.entries(doc.items)) {
if (item.slug === idOrSlug || item.id === idOrSlug) {
return [key, item];
}
}
return undefined;
}
/**
* Convert a SplatItem (camelCase) to a snake_case row for API responses,
* preserving the shape the frontend expects.
*/
function itemToRow(item: SplatItem): SplatRow {
return {
id: item.id,
slug: item.slug,
title: item.title,
description: item.description || null,
file_path: item.filePath,
file_format: item.fileFormat,
file_size_bytes: item.fileSizeBytes,
tags: item.tags ?? [],
space_slug: item.spaceSlug,
contributor_id: item.contributorId,
contributor_name: item.contributorName,
source: item.source ?? 'upload',
status: item.status,
view_count: item.viewCount,
payment_tx: item.paymentTx,
payment_network: item.paymentNetwork,
processing_status: item.processingStatus ?? 'ready',
processing_error: item.processingError,
source_file_count: item.sourceFileCount,
created_at: new Date(item.createdAt).toISOString(),
};
}
/**
* Return the subset of SplatRow fields used in list/gallery responses.
*/
function itemToListRow(item: SplatItem) {
return {
id: item.id,
slug: item.slug,
title: item.title,
description: item.description || null,
file_format: item.fileFormat,
file_size_bytes: item.fileSizeBytes,
tags: item.tags ?? [],
contributor_name: item.contributorName,
view_count: item.viewCount,
processing_status: item.processingStatus ?? 'ready',
source_file_count: item.sourceFileCount,
created_at: new Date(item.createdAt).toISOString(),
};
}
// ── CDN importmap for Three.js + GaussianSplats3D ── // ── CDN importmap for Three.js + GaussianSplats3D ──
const IMPORTMAP = `<script type="importmap"> const IMPORTMAP = `<script type="importmap">
@ -118,60 +209,62 @@ routes.get("/api/splats", async (c) => {
const limit = Math.min(parseInt(c.req.query("limit") || "50"), 100); const limit = Math.min(parseInt(c.req.query("limit") || "50"), 100);
const offset = parseInt(c.req.query("offset") || "0"); const offset = parseInt(c.req.query("offset") || "0");
let query = `SELECT id, slug, title, description, file_format, file_size_bytes, const doc = ensureDoc(spaceSlug);
tags, contributor_name, view_count, processing_status, source_file_count, created_at
FROM rsplat.splats WHERE status = 'published' AND space_slug = $1`; let items = Object.values(doc.items)
const params: (string | number)[] = [spaceSlug]; .filter((item) => item.status === 'published');
if (tag) { if (tag) {
params.push(tag); items = items.filter((item) => item.tags?.includes(tag));
query += ` AND $${params.length} = ANY(tags)`;
} }
query += ` ORDER BY created_at DESC`; // Sort by createdAt descending
params.push(limit); items.sort((a, b) => b.createdAt - a.createdAt);
query += ` LIMIT $${params.length}`;
params.push(offset);
query += ` OFFSET $${params.length}`;
const rows = await sql.unsafe(query, params); // Apply offset and limit
return c.json({ splats: rows }); const paged = items.slice(offset, offset + limit);
return c.json({ splats: paged.map(itemToListRow) });
}); });
// ── API: Get splat details ── // ── API: Get splat details ──
routes.get("/api/splats/:id", async (c) => { routes.get("/api/splats/:id", async (c) => {
const spaceSlug = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
const rows = await sql.unsafe( const doc = ensureDoc(spaceSlug);
`SELECT * FROM rsplat.splats WHERE (slug = $1 OR id::text = $1) AND status = 'published'`, const found = findItem(doc, id);
[id]
);
if (rows.length === 0) return c.json({ error: "Splat not found" }, 404); if (!found || found[1].status !== 'published') {
return c.json({ error: "Splat not found" }, 404);
}
const [itemKey, item] = found;
// Increment view count // Increment view count
await sql.unsafe( const docId = splatScenesDocId(spaceSlug);
`UPDATE rsplat.splats SET view_count = view_count + 1 WHERE id = $1`, _syncServer!.changeDoc<SplatScenesDoc>(docId, 'increment view count', (d) => {
[rows[0].id] d.items[itemKey].viewCount += 1;
); });
return c.json(rows[0]); return c.json(itemToRow(item));
}); });
// ── API: Serve splat file ── // ── API: Serve splat file ──
// Matches both /api/splats/:id/file and /api/splats/:id/:filename (e.g. rainbow-sphere.splat) // Matches both /api/splats/:id/file and /api/splats/:id/:filename (e.g. rainbow-sphere.splat)
routes.get("/api/splats/:id/:filename", async (c) => { routes.get("/api/splats/:id/:filename", async (c) => {
const spaceSlug = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
const rows = await sql.unsafe( const doc = ensureDoc(spaceSlug);
`SELECT id, slug, file_path, file_format FROM rsplat.splats WHERE (slug = $1 OR id::text = $1) AND status = 'published'`, const found = findItem(doc, id);
[id]
);
if (rows.length === 0) return c.json({ error: "Splat not found" }, 404); if (!found || found[1].status !== 'published') {
return c.json({ error: "Splat not found" }, 404);
}
const splat = rows[0]; const splat = found[1];
const filepath = resolve(SPLATS_DIR, splat.file_path); const filepath = resolve(SPLATS_DIR, splat.filePath);
const file = Bun.file(filepath); const file = Bun.file(filepath);
if (!(await file.exists())) { if (!(await file.exists())) {
@ -180,8 +273,8 @@ routes.get("/api/splats/:id/:filename", async (c) => {
return new Response(file, { return new Response(file, {
headers: { headers: {
"Content-Type": getMimeType(splat.file_format), "Content-Type": getMimeType(splat.fileFormat),
"Content-Disposition": `inline; filename="${splat.slug}.${splat.file_format}"`, "Content-Disposition": `inline; filename="${splat.slug}.${splat.fileFormat}"`,
"Content-Length": String(file.size), "Content-Length": String(file.size),
"Access-Control-Allow-Origin": "*", "Access-Control-Allow-Origin": "*",
"Cache-Control": "public, max-age=86400", "Cache-Control": "public, max-age=86400",
@ -242,11 +335,10 @@ routes.post("/api/splats", async (c) => {
const shortId = randomUUID().slice(0, 8); const shortId = randomUUID().slice(0, 8);
let slug = slugify(title); let slug = slugify(title);
// Check slug collision // Check slug collision in Automerge doc
const existing = await sql.unsafe( const doc = ensureDoc(spaceSlug);
`SELECT 1 FROM rsplat.splats WHERE slug = $1`, [slug] const slugExists = Object.values(doc.items).some((item) => item.slug === slug);
); if (slugExists) {
if (existing.length > 0) {
slug = `${slug}-${shortId}`; slug = `${slug}-${shortId}`;
} }
@ -257,16 +349,48 @@ routes.post("/api/splats", async (c) => {
const buffer = Buffer.from(await file.arrayBuffer()); const buffer = Buffer.from(await file.arrayBuffer());
await Bun.write(filepath, buffer); await Bun.write(filepath, buffer);
// Insert into DB // Insert into Automerge doc
const splatId = randomUUID();
const now = Date.now();
const paymentTx = (c as any).get("x402Payment") || null; const paymentTx = (c as any).get("x402Payment") || null;
const rows = await sql.unsafe(
`INSERT INTO rsplat.splats (slug, title, description, file_path, file_format, file_size_bytes, tags, space_slug, contributor_id, contributor_name, payment_tx)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
RETURNING id, slug, title, description, file_format, file_size_bytes, tags, created_at`,
[slug, title, description, filename, format, buffer.length, tags, spaceSlug, claims.sub, claims.username || null, paymentTx] as any[]
);
return c.json(rows[0], 201); const docId = splatScenesDocId(spaceSlug);
_syncServer!.changeDoc<SplatScenesDoc>(docId, 'add splat', (d) => {
d.items[splatId] = {
id: splatId,
slug,
title,
description: description ?? '',
filePath: filename,
fileFormat: format,
fileSizeBytes: buffer.length,
tags,
spaceSlug,
contributorId: claims.sub,
contributorName: claims.username || null,
source: 'upload',
status: 'published',
viewCount: 0,
paymentTx,
paymentNetwork: null,
createdAt: now,
processingStatus: 'ready',
processingError: null,
sourceFileCount: 0,
sourceFiles: [],
};
});
return c.json({
id: splatId,
slug,
title,
description,
file_format: format,
file_size_bytes: buffer.length,
tags,
created_at: new Date(now).toISOString(),
}, 201);
}); });
// ── API: Upload photos/video for splatting ── // ── API: Upload photos/video for splatting ──
@ -338,11 +462,10 @@ routes.post("/api/splats/from-media", async (c) => {
const shortId = randomUUID().slice(0, 8); const shortId = randomUUID().slice(0, 8);
let slug = slugify(title); let slug = slugify(title);
// Check slug collision // Check slug collision in Automerge doc
const existing = await sql.unsafe( const doc = ensureDoc(spaceSlug);
`SELECT 1 FROM rsplat.splats WHERE slug = $1`, [slug] const slugExists = Object.values(doc.items).some((item) => item.slug === slug);
); if (slugExists) {
if (existing.length > 0) {
slug = `${slug}-${shortId}`; slug = `${slug}-${shortId}`;
} }
@ -350,41 +473,67 @@ routes.post("/api/splats/from-media", async (c) => {
const sourceDir = resolve(SOURCES_DIR, slug); const sourceDir = resolve(SOURCES_DIR, slug);
await mkdir(sourceDir, { recursive: true }); await mkdir(sourceDir, { recursive: true });
const sourceRows: { path: string; name: string; mime: string; size: number }[] = []; const sourceFileEntries: SourceFile[] = [];
const sfId = () => randomUUID();
const splatId = randomUUID();
const now = Date.now();
for (const f of files) { for (const f of files) {
const safeName = f.name.replace(/[^a-zA-Z0-9._-]/g, "_"); const safeName = f.name.replace(/[^a-zA-Z0-9._-]/g, "_");
const filepath = resolve(sourceDir, safeName); const filepath = resolve(sourceDir, safeName);
const buffer = Buffer.from(await f.arrayBuffer()); const buffer = Buffer.from(await f.arrayBuffer());
await Bun.write(filepath, buffer); await Bun.write(filepath, buffer);
sourceRows.push({ sourceFileEntries.push({
path: `sources/${slug}/${safeName}`, id: sfId(),
name: f.name, splatId,
mime: f.type, filePath: `sources/${slug}/${safeName}`,
size: buffer.length, fileName: f.name,
mimeType: f.type,
fileSizeBytes: buffer.length,
createdAt: now,
}); });
} }
// Insert splat record (pending processing) // Insert splat record (pending processing) into Automerge doc
const paymentTx = (c as any).get("x402Payment") || null; const paymentTx = (c as any).get("x402Payment") || null;
const splatRows = await sql.unsafe( const docId = splatScenesDocId(spaceSlug);
`INSERT INTO rsplat.splats (slug, title, description, file_path, file_format, file_size_bytes, tags, space_slug, contributor_id, contributor_name, source, processing_status, source_file_count, payment_tx) _syncServer!.changeDoc<SplatScenesDoc>(docId, 'add splat from media', (d) => {
VALUES ($1, $2, $3, '', 'ply', 0, $4, $5, $6, $7, 'media', 'pending', $8, $9) d.items[splatId] = {
RETURNING id, slug, title, description, file_format, tags, processing_status, source_file_count, created_at`, id: splatId,
[slug, title, description, tags, spaceSlug, claims.sub, claims.username || null, files.length, paymentTx] as any[] slug,
); title,
description: description ?? '',
filePath: '',
fileFormat: 'ply',
fileSizeBytes: 0,
tags,
spaceSlug,
contributorId: claims.sub,
contributorName: claims.username || null,
source: 'media',
status: 'published',
viewCount: 0,
paymentTx,
paymentNetwork: null,
createdAt: now,
processingStatus: 'pending',
processingError: null,
sourceFileCount: files.length,
sourceFiles: sourceFileEntries,
};
});
const splatId = splatRows[0].id; return c.json({
id: splatId,
// Insert source file records slug,
for (const sf of sourceRows) { title,
await sql.unsafe( description,
`INSERT INTO rsplat.source_files (splat_id, file_path, file_name, mime_type, file_size_bytes) file_format: 'ply',
VALUES ($1, $2, $3, $4, $5)`, tags,
[splatId, sf.path, sf.name, sf.mime, sf.size] processing_status: 'pending',
); source_file_count: files.length,
} created_at: new Date(now).toISOString(),
}, 201);
return c.json(splatRows[0], 201);
}); });
// ── API: Delete splat (owner only) ── // ── API: Delete splat (owner only) ──
@ -399,21 +548,25 @@ routes.delete("/api/splats/:id", async (c) => {
return c.json({ error: "Invalid token" }, 401); return c.json({ error: "Invalid token" }, 401);
} }
const spaceSlug = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
const rows = await sql.unsafe(
`SELECT id, contributor_id FROM rsplat.splats WHERE (slug = $1 OR id::text = $1) AND status = 'published'`,
[id]
);
if (rows.length === 0) return c.json({ error: "Splat not found" }, 404); const doc = ensureDoc(spaceSlug);
if (rows[0].contributor_id !== claims.sub) { const found = findItem(doc, id);
if (!found || found[1].status !== 'published') {
return c.json({ error: "Splat not found" }, 404);
}
const [itemKey, item] = found;
if (item.contributorId !== claims.sub) {
return c.json({ error: "Not authorized" }, 403); return c.json({ error: "Not authorized" }, 403);
} }
await sql.unsafe( const docId = splatScenesDocId(spaceSlug);
`UPDATE rsplat.splats SET status = 'removed' WHERE id = $1`, _syncServer!.changeDoc<SplatScenesDoc>(docId, 'remove splat', (d) => {
[rows[0].id] d.items[itemKey].status = 'removed';
); });
return c.json({ ok: true }); return c.json({ ok: true });
}); });
@ -422,14 +575,14 @@ routes.delete("/api/splats/:id", async (c) => {
routes.get("/", async (c) => { routes.get("/", async (c) => {
const spaceSlug = c.req.param("space") || "demo"; const spaceSlug = c.req.param("space") || "demo";
const rows = await sql.unsafe( const doc = ensureDoc(spaceSlug);
`SELECT id, slug, title, description, file_format, file_size_bytes,
tags, contributor_name, view_count, processing_status, source_file_count, created_at
FROM rsplat.splats WHERE status = 'published' AND space_slug = $1
ORDER BY created_at DESC LIMIT 50`,
[spaceSlug]
);
const items = Object.values(doc.items)
.filter((item) => item.status === 'published')
.sort((a, b) => b.createdAt - a.createdAt)
.slice(0, 50);
const rows = items.map(itemToListRow);
const splatsJSON = JSON.stringify(rows); const splatsJSON = JSON.stringify(rows);
const html = renderShell({ const html = renderShell({
@ -461,12 +614,10 @@ routes.get("/view/:id", async (c) => {
const spaceSlug = c.req.param("space") || "demo"; const spaceSlug = c.req.param("space") || "demo";
const id = c.req.param("id"); const id = c.req.param("id");
const rows = await sql.unsafe( const doc = ensureDoc(spaceSlug);
`SELECT * FROM rsplat.splats WHERE (slug = $1 OR id::text = $1) AND status = 'published'`, const found = findItem(doc, id);
[id]
);
if (rows.length === 0) { if (!found || found[1].status !== 'published') {
const html = renderShell({ const html = renderShell({
title: "Splat not found | rSpace", title: "Splat not found | rSpace",
moduleId: "rsplat", moduleId: "rsplat",
@ -478,15 +629,15 @@ routes.get("/view/:id", async (c) => {
return c.html(html, 404); return c.html(html, 404);
} }
const splat = rows[0]; const [itemKey, splat] = found;
// Increment view count // Increment view count
await sql.unsafe( const docId = splatScenesDocId(spaceSlug);
`UPDATE rsplat.splats SET view_count = view_count + 1 WHERE id = $1`, _syncServer!.changeDoc<SplatScenesDoc>(docId, 'increment view count', (d) => {
[splat.id] d.items[itemKey].viewCount += 1;
); });
const fileUrl = `/${spaceSlug}/rsplat/api/splats/${splat.slug}/${splat.slug}.${splat.file_format}`; const fileUrl = `/${spaceSlug}/rsplat/api/splats/${splat.slug}/${splat.slug}.${splat.fileFormat}`;
const html = renderShell({ const html = renderShell({
title: `${splat.title} | rSplat`, title: `${splat.title} | rSplat`,
@ -518,24 +669,6 @@ routes.get("/view/:id", async (c) => {
return c.html(html); return c.html(html);
}); });
// ── Initialize DB schema ──
async function initDB(): Promise<void> {
try {
const schemaPath = resolve(import.meta.dir, "db/schema.sql");
const schemaSql = await readFile(schemaPath, "utf-8");
await sql.unsafe(`SET search_path TO rsplat, public`);
await sql.unsafe(schemaSql);
// Migration: add new columns to existing table
await sql.unsafe(`ALTER TABLE rsplat.splats ADD COLUMN IF NOT EXISTS processing_status TEXT DEFAULT 'ready'`);
await sql.unsafe(`ALTER TABLE rsplat.splats ADD COLUMN IF NOT EXISTS processing_error TEXT`);
await sql.unsafe(`ALTER TABLE rsplat.splats ADD COLUMN IF NOT EXISTS source_file_count INTEGER DEFAULT 0`);
await sql.unsafe(`SET search_path TO public`);
console.log("[Splat] Database schema initialized");
} catch (e) {
console.error("[Splat] Schema init failed:", e);
}
}
// ── Module export ── // ── Module export ──
export const splatModule: RSpaceModule = { export const splatModule: RSpaceModule = {
@ -554,9 +687,10 @@ export const splatModule: RSpaceModule = {
], ],
async onInit(ctx) { async onInit(ctx) {
_syncServer = ctx.syncServer; _syncServer = ctx.syncServer;
await initDB(); console.log("[Splat] Automerge document store ready");
}, },
async onSpaceCreate(ctx: SpaceLifecycleContext) { async onSpaceCreate(ctx: SpaceLifecycleContext) {
// Splats are scoped by space_slug column. No per-space setup needed. // Eagerly create the Automerge doc for new spaces
ensureDoc(ctx.spaceSlug);
}, },
}; };

View File

@ -3,52 +3,90 @@
* *
* Plan trips with destinations, itinerary, bookings, expenses, * Plan trips with destinations, itinerary, bookings, expenses,
* and packing lists. Collaborative with role-based access. * and packing lists. Collaborative with role-based access.
*
* Data layer: Automerge documents via SyncServer.
* One document per trip: {space}:trips:trips:{tripId}
*/ */
import { Hono } from "hono"; import { Hono } from "hono";
import { readFileSync } from "node:fs"; import * as Automerge from "@automerge/automerge";
import { resolve } from "node:path";
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell"; import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule } from "../../shared/module"; import type { RSpaceModule } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing"; import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server'; import type { SyncServer } from '../../server/local-first/sync-server';
import { tripSchema } from './schemas'; import {
tripSchema, tripDocId,
type TripDoc, type TripMeta, type Destination, type ItineraryItem,
type Booking, type Expense, type PackingItem,
} from './schemas';
let _syncServer: SyncServer | null = null; let _syncServer: SyncServer | null = null;
const OSRM_URL = process.env.OSRM_URL || "http://osrm-backend:5000"; const OSRM_URL = process.env.OSRM_URL || "http://osrm-backend:5000";
const routes = new Hono(); // ── Helpers ──
// ── DB initialization ── /** Generate a short random ID (collision-safe enough for sub-collections). */
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8"); function newId(): string {
return crypto.randomUUID().slice(0, 12);
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Trips] DB schema initialized");
} catch (e) {
console.error("[Trips] DB init error:", e);
}
} }
/** Ensure a trip document exists; create it lazily if not. */
function ensureDoc(space: string, tripId: string): TripDoc {
const docId = tripDocId(space, tripId);
let doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<TripDoc>(), 'init', (d) => {
const init = tripSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.trip = init.trip;
d.trip.id = tripId;
d.destinations = {};
d.itinerary = {};
d.bookings = {};
d.expenses = {};
d.packingItems = {};
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/** List all trip doc IDs for a given space. */
function listTripDocIds(space: string): string[] {
const prefix = `${space}:trips:trips:`;
return _syncServer!.listDocs().filter((id) => id.startsWith(prefix));
}
const routes = new Hono();
// ── API: Trips ── // ── API: Trips ──
// GET /api/trips — list trips // GET /api/trips — list trips
routes.get("/api/trips", async (c) => { routes.get("/api/trips", async (c) => {
const rows = await sql.unsafe( const space = c.req.param("space") || "demo";
`SELECT t.*, const docIds = listTripDocIds(space);
count(DISTINCT d.id)::int as destination_count,
count(DISTINCT e.id)::int as expense_count, const rows = docIds.map((docId) => {
coalesce(sum(e.amount), 0)::numeric as total_spent const doc = _syncServer!.getDoc<TripDoc>(docId);
FROM rtrips.trips t if (!doc) return null;
LEFT JOIN rtrips.destinations d ON d.trip_id = t.id const t = doc.trip;
LEFT JOIN rtrips.expenses e ON e.trip_id = t.id const destinations = Object.values(doc.destinations);
GROUP BY t.id ORDER BY t.created_at DESC` const expenses = Object.values(doc.expenses);
); const totalSpent = expenses.reduce((sum, e) => sum + (e.amount || 0), 0);
return {
...t,
destination_count: destinations.length,
expense_count: expenses.length,
total_spent: totalSpent,
};
}).filter(Boolean);
// Sort by createdAt descending (newest first)
rows.sort((a, b) => (b!.createdAt ?? 0) - (a!.createdAt ?? 0));
return c.json(rows); return c.json(rows);
}); });
@ -63,61 +101,93 @@ routes.post("/api/trips", async (c) => {
const { title, description, start_date, end_date, budget_total, budget_currency } = body; const { title, description, start_date, end_date, budget_total, budget_currency } = body;
if (!title?.trim()) return c.json({ error: "Title required" }, 400); if (!title?.trim()) return c.json({ error: "Title required" }, 400);
const space = c.req.param("space") || "demo";
const tripId = newId();
const slug = title.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, ""); const slug = title.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "");
const rows = await sql.unsafe( const now = Date.now();
`INSERT INTO rtrips.trips (title, slug, description, start_date, end_date, budget_total, budget_currency, created_by)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *`, const docId = tripDocId(space, tripId);
[title.trim(), slug, description || null, start_date || null, end_date || null, let doc = Automerge.change(Automerge.init<TripDoc>(), 'create trip', (d) => {
budget_total || null, budget_currency || "USD", claims.sub] const init = tripSchema.init();
); d.meta = init.meta;
return c.json(rows[0], 201); d.meta.spaceSlug = space;
d.meta.createdAt = now;
d.trip = {
id: tripId,
title: title.trim(),
slug,
description: description || '',
startDate: start_date || null,
endDate: end_date || null,
budgetTotal: budget_total ?? null,
budgetCurrency: budget_currency || 'USD',
status: 'planning',
createdBy: claims.sub,
createdAt: now,
updatedAt: now,
};
d.destinations = {};
d.itinerary = {};
d.bookings = {};
d.expenses = {};
d.packingItems = {};
});
_syncServer!.setDoc(docId, doc);
return c.json(doc.trip, 201);
}); });
// GET /api/trips/:id — trip detail with all sub-resources // GET /api/trips/:id — trip detail with all sub-resources
routes.get("/api/trips/:id", async (c) => { routes.get("/api/trips/:id", async (c) => {
const id = c.req.param("id"); const space = c.req.param("space") || "demo";
const trip = await sql.unsafe("SELECT * FROM rtrips.trips WHERE id = $1", [id]); const tripId = c.req.param("id");
if (trip.length === 0) return c.json({ error: "Trip not found" }, 404); const docId = tripDocId(space, tripId);
const doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc) return c.json({ error: "Trip not found" }, 404);
const [destinations, itinerary, bookings, expenses, packing] = await Promise.all([ const destinations = Object.values(doc.destinations).sort((a, b) => a.sortOrder - b.sortOrder);
sql.unsafe("SELECT * FROM rtrips.destinations WHERE trip_id = $1 ORDER BY sort_order", [id]), const itinerary = Object.values(doc.itinerary).sort((a, b) => {
sql.unsafe("SELECT * FROM rtrips.itinerary_items WHERE trip_id = $1 ORDER BY date, sort_order", [id]), const dateCmp = (a.date || '').localeCompare(b.date || '');
sql.unsafe("SELECT * FROM rtrips.bookings WHERE trip_id = $1 ORDER BY start_date", [id]), return dateCmp !== 0 ? dateCmp : a.sortOrder - b.sortOrder;
sql.unsafe("SELECT * FROM rtrips.expenses WHERE trip_id = $1 ORDER BY date DESC", [id]), });
sql.unsafe("SELECT * FROM rtrips.packing_items WHERE trip_id = $1 ORDER BY category, sort_order", [id]), const bookings = Object.values(doc.bookings).sort((a, b) => (a.startDate || '').localeCompare(b.startDate || ''));
]); const expenses = Object.values(doc.expenses).sort((a, b) => (b.date || '').localeCompare(a.date || ''));
const packing = Object.values(doc.packingItems).sort((a, b) => {
const catCmp = (a.category || '').localeCompare(b.category || '');
return catCmp !== 0 ? catCmp : a.sortOrder - b.sortOrder;
});
return c.json({ ...trip[0], destinations, itinerary, bookings, expenses, packing }); return c.json({ ...doc.trip, destinations, itinerary, bookings, expenses, packing });
}); });
// PUT /api/trips/:id — update trip // PUT /api/trips/:id — update trip
routes.put("/api/trips/:id", async (c) => { routes.put("/api/trips/:id", async (c) => {
const id = c.req.param("id"); const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
const docId = tripDocId(space, tripId);
const doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc) return c.json({ error: "Not found" }, 404);
const body = await c.req.json(); const body = await c.req.json();
const { title, description, start_date, end_date, budget_total, budget_currency, status } = body; const { title, description, start_date, end_date, budget_total, budget_currency, status } = body;
const fields: string[] = []; const hasFields = [title, description, start_date, end_date, budget_total, budget_currency, status]
const params: any[] = []; .some((v) => v !== undefined);
let idx = 1; if (!hasFields) return c.json({ error: "No fields" }, 400);
if (title !== undefined) { fields.push(`title = $${idx}`); params.push(title); idx++; } _syncServer!.changeDoc<TripDoc>(docId, 'update trip', (d) => {
if (description !== undefined) { fields.push(`description = $${idx}`); params.push(description); idx++; } if (title !== undefined) d.trip.title = title;
if (start_date !== undefined) { fields.push(`start_date = $${idx}`); params.push(start_date); idx++; } if (description !== undefined) d.trip.description = description;
if (end_date !== undefined) { fields.push(`end_date = $${idx}`); params.push(end_date); idx++; } if (start_date !== undefined) d.trip.startDate = start_date;
if (budget_total !== undefined) { fields.push(`budget_total = $${idx}`); params.push(budget_total); idx++; } if (end_date !== undefined) d.trip.endDate = end_date;
if (budget_currency !== undefined) { fields.push(`budget_currency = $${idx}`); params.push(budget_currency); idx++; } if (budget_total !== undefined) d.trip.budgetTotal = budget_total;
if (status !== undefined) { fields.push(`status = $${idx}`); params.push(status); idx++; } if (budget_currency !== undefined) d.trip.budgetCurrency = budget_currency;
if (status !== undefined) d.trip.status = status;
d.trip.updatedAt = Date.now();
});
if (fields.length === 0) return c.json({ error: "No fields" }, 400); const updated = _syncServer!.getDoc<TripDoc>(docId);
fields.push("updated_at = NOW()"); return c.json(updated!.trip);
params.push(id);
const rows = await sql.unsafe(
`UPDATE rtrips.trips SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`,
params
);
if (rows.length === 0) return c.json({ error: "Not found" }, 404);
return c.json(rows[0]);
}); });
// ── API: Destinations ── // ── API: Destinations ──
@ -127,14 +197,33 @@ routes.post("/api/trips/:id/destinations", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401); if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
ensureDoc(space, tripId);
const docId = tripDocId(space, tripId);
const body = await c.req.json(); const body = await c.req.json();
const rows = await sql.unsafe( const destId = newId();
`INSERT INTO rtrips.destinations (trip_id, name, country, lat, lng, arrival_date, departure_date, notes, sort_order) const now = Date.now();
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *`,
[c.req.param("id"), body.name, body.country || null, body.lat || null, body.lng || null, _syncServer!.changeDoc<TripDoc>(docId, 'add destination', (d) => {
body.arrival_date || null, body.departure_date || null, body.notes || null, body.sort_order ?? 0] d.destinations[destId] = {
); id: destId,
return c.json(rows[0], 201); tripId,
name: body.name,
country: body.country || null,
lat: body.lat ?? null,
lng: body.lng ?? null,
arrivalDate: body.arrival_date || null,
departureDate: body.departure_date || null,
notes: body.notes || '',
sortOrder: body.sort_order ?? 0,
createdAt: now,
};
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.destinations[destId], 201);
}); });
// ── API: Itinerary ── // ── API: Itinerary ──
@ -144,14 +233,33 @@ routes.post("/api/trips/:id/itinerary", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401); if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
ensureDoc(space, tripId);
const docId = tripDocId(space, tripId);
const body = await c.req.json(); const body = await c.req.json();
const rows = await sql.unsafe( const itemId = newId();
`INSERT INTO rtrips.itinerary_items (trip_id, destination_id, title, category, date, start_time, end_time, notes, sort_order) const now = Date.now();
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *`,
[c.req.param("id"), body.destination_id || null, body.title, body.category || "ACTIVITY", _syncServer!.changeDoc<TripDoc>(docId, 'add itinerary item', (d) => {
body.date || null, body.start_time || null, body.end_time || null, body.notes || null, body.sort_order ?? 0] d.itinerary[itemId] = {
); id: itemId,
return c.json(rows[0], 201); tripId,
destinationId: body.destination_id || null,
title: body.title,
category: body.category || 'ACTIVITY',
date: body.date || null,
startTime: body.start_time || null,
endTime: body.end_time || null,
notes: body.notes || '',
sortOrder: body.sort_order ?? 0,
createdAt: now,
};
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.itinerary[itemId], 201);
}); });
// ── API: Bookings ── // ── API: Bookings ──
@ -161,14 +269,34 @@ routes.post("/api/trips/:id/bookings", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401); if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
ensureDoc(space, tripId);
const docId = tripDocId(space, tripId);
const body = await c.req.json(); const body = await c.req.json();
const rows = await sql.unsafe( const bookingId = newId();
`INSERT INTO rtrips.bookings (trip_id, type, provider, confirmation_number, cost, currency, start_date, end_date, notes) const now = Date.now();
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *`,
[c.req.param("id"), body.type || "OTHER", body.provider || null, body.confirmation_number || null, _syncServer!.changeDoc<TripDoc>(docId, 'add booking', (d) => {
body.cost || null, body.currency || "USD", body.start_date || null, body.end_date || null, body.notes || null] d.bookings[bookingId] = {
); id: bookingId,
return c.json(rows[0], 201); tripId,
type: body.type || 'OTHER',
provider: body.provider || null,
confirmationNumber: body.confirmation_number || null,
cost: body.cost ?? null,
currency: body.currency || 'USD',
startDate: body.start_date || null,
endDate: body.end_date || null,
status: null,
notes: body.notes || '',
createdAt: now,
};
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.bookings[bookingId], 201);
}); });
// ── API: Expenses ── // ── API: Expenses ──
@ -178,23 +306,47 @@ routes.post("/api/trips/:id/expenses", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401); if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
ensureDoc(space, tripId);
const docId = tripDocId(space, tripId);
const body = await c.req.json(); const body = await c.req.json();
const rows = await sql.unsafe( const expenseId = newId();
`INSERT INTO rtrips.expenses (trip_id, description, amount, currency, category, date, split_type) const now = Date.now();
VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING *`,
[c.req.param("id"), body.description, body.amount, body.currency || "USD", _syncServer!.changeDoc<TripDoc>(docId, 'add expense', (d) => {
body.category || "OTHER", body.date || null, body.split_type || "EQUAL"] d.expenses[expenseId] = {
); id: expenseId,
return c.json(rows[0], 201); tripId,
paidBy: null,
description: body.description,
amount: body.amount,
currency: body.currency || 'USD',
category: body.category || 'OTHER',
date: body.date || null,
splitType: body.split_type || 'EQUAL',
createdAt: now,
};
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.expenses[expenseId], 201);
}); });
// ── API: Packing ── // ── API: Packing ──
routes.get("/api/trips/:id/packing", async (c) => { routes.get("/api/trips/:id/packing", async (c) => {
const rows = await sql.unsafe( const space = c.req.param("space") || "demo";
"SELECT * FROM rtrips.packing_items WHERE trip_id = $1 ORDER BY category, sort_order", const tripId = c.req.param("id");
[c.req.param("id")] const docId = tripDocId(space, tripId);
); const doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc) return c.json([]);
const rows = Object.values(doc.packingItems).sort((a, b) => {
const catCmp = (a.category || '').localeCompare(b.category || '');
return catCmp !== 0 ? catCmp : a.sortOrder - b.sortOrder;
});
return c.json(rows); return c.json(rows);
}); });
@ -203,23 +355,53 @@ routes.post("/api/trips/:id/packing", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401); if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); } try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
ensureDoc(space, tripId);
const docId = tripDocId(space, tripId);
const body = await c.req.json(); const body = await c.req.json();
const rows = await sql.unsafe( const itemId = newId();
`INSERT INTO rtrips.packing_items (trip_id, name, category, quantity, sort_order) const now = Date.now();
VALUES ($1, $2, $3, $4, $5) RETURNING *`,
[c.req.param("id"), body.name, body.category || "GENERAL", body.quantity || 1, body.sort_order ?? 0] _syncServer!.changeDoc<TripDoc>(docId, 'add packing item', (d) => {
); d.packingItems[itemId] = {
return c.json(rows[0], 201); id: itemId,
tripId,
addedBy: null,
name: body.name,
category: body.category || 'GENERAL',
packed: false,
quantity: body.quantity || 1,
sortOrder: body.sort_order ?? 0,
createdAt: now,
};
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.packingItems[itemId], 201);
}); });
routes.patch("/api/packing/:id", async (c) => { routes.patch("/api/packing/:id", async (c) => {
const body = await c.req.json(); const space = c.req.param("space") || "demo";
const rows = await sql.unsafe( const packingId = c.req.param("id");
"UPDATE rtrips.packing_items SET packed = $1 WHERE id = $2 RETURNING *",
[body.packed ?? false, c.req.param("id")] // Find the trip doc containing this packing item
); const docIds = listTripDocIds(space);
if (rows.length === 0) return c.json({ error: "Not found" }, 404); for (const docId of docIds) {
return c.json(rows[0]); const doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc || !doc.packingItems[packingId]) continue;
const body = await c.req.json();
_syncServer!.changeDoc<TripDoc>(docId, 'toggle packing item', (d) => {
d.packingItems[packingId].packed = body.packed ?? false;
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.packingItems[packingId]);
}
return c.json({ error: "Not found" }, 404);
}); });
// ── OSRM proxy for route planner ── // ── OSRM proxy for route planner ──
@ -279,7 +461,6 @@ export const tripsModule: RSpaceModule = {
landingPage: renderLanding, landingPage: renderLanding,
async onInit(ctx) { async onInit(ctx) {
_syncServer = ctx.syncServer; _syncServer = ctx.syncServer;
await initDB();
}, },
standaloneDomain: "rtrips.online", standaloneDomain: "rtrips.online",
feeds: [ feeds: [

View File

@ -3,116 +3,166 @@
* *
* Credit-weighted conviction voting for collaborative governance. * Credit-weighted conviction voting for collaborative governance.
* Spaces run ranked proposals with configurable parameters. * Spaces run ranked proposals with configurable parameters.
*
* All state stored in Automerge documents via SyncServer.
* Doc layout:
* {space}:vote:config SpaceConfig (stored on a ProposalDoc)
* {space}:vote:proposals:{proposalId} ProposalDoc with votes/finalVotes
*/ */
import { Hono } from "hono"; import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import * as Automerge from '@automerge/automerge'; import * as Automerge from '@automerge/automerge';
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell"; import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module"; import type { RSpaceModule } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing"; import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server'; import type { SyncServer } from '../../server/local-first/sync-server';
import { proposalSchema, proposalDocId } from './schemas'; import { proposalSchema, proposalDocId } from './schemas';
import type { ProposalDoc } from './schemas'; import type { ProposalDoc, SpaceConfig } from './schemas';
const routes = new Hono(); const routes = new Hono();
// ── DB initialization ── // ── SyncServer ref (set during onInit) ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Vote] DB schema initialized");
} catch (e) {
console.error("[Vote] DB init error:", e);
}
}
async function seedDemoIfEmpty() {
try {
const count = await sql.unsafe("SELECT count(*)::int as cnt FROM rvote.spaces");
if (parseInt(count[0].cnt) > 0) return;
// Create demo user
const user = await sql.unsafe(
`INSERT INTO rvote.users (did, username) VALUES ('did:demo:seed', 'demo')
ON CONFLICT (did) DO UPDATE SET username = 'demo' RETURNING id`
);
const userId = user[0].id;
// Create voting space (matches standalone rVote settings)
await sql.unsafe(
`INSERT INTO rvote.spaces (slug, name, description, owner_did, visibility, promotion_threshold, voting_period_days, credits_per_day, max_credits, starting_credits)
VALUES ('community', 'Community Governance', 'Proposals for the rSpace ecosystem', 'did:demo:seed', 'public_read', 100, 7, 10, 500, 50)`
);
// Seed proposals in various states
const proposals = [
{ title: "Add dark mode across all r* modules", desc: "Implement a consistent dark theme with a toggle in shell.css. Use CSS custom properties for theming so each module inherits automatically.", status: "RANKING", score: 45 },
{ title: "Implement real-time collaboration in rNotes", desc: "Use Automerge CRDTs (already in the stack) to enable simultaneous editing of notes, similar to how rSpace canvas works.", status: "RANKING", score: 72 },
{ title: "Adopt cosmolocal print-on-demand for all merch", desc: "Route all merchandise orders through the provider registry to find the closest printer. Reduces shipping emissions and supports local economies.", status: "VOTING", score: 105 },
{ title: "Use EncryptID passkeys for all authentication", desc: "Standardize on WebAuthn passkeys via EncryptID across the entire r* ecosystem. One passkey, all apps.", status: "PASSED", score: 150 },
{ title: "Switch from PostgreSQL to SQLite for simpler deployment", desc: "Evaluate replacing PostgreSQL with SQLite for modules that don't need concurrent writes.", status: "FAILED", score: 30 },
];
for (const p of proposals) {
const row = await sql.unsafe(
`INSERT INTO rvote.proposals (space_slug, author_id, title, description, status, score)
VALUES ('community', $1, $2, $3, $4, $5) RETURNING id`,
[userId, p.title, p.desc, p.status, p.score]
);
if (p.status === "VOTING") {
await sql.unsafe(
`UPDATE rvote.proposals SET voting_ends_at = NOW() + INTERVAL '5 days', final_yes = 5, final_no = 2 WHERE id = $1`,
[row[0].id]
);
} else if (p.status === "PASSED") {
await sql.unsafe(
`UPDATE rvote.proposals SET final_yes = 12, final_no = 3, final_abstain = 2 WHERE id = $1`,
[row[0].id]
);
} else if (p.status === "FAILED") {
await sql.unsafe(
`UPDATE rvote.proposals SET final_yes = 2, final_no = 8, final_abstain = 1 WHERE id = $1`,
[row[0].id]
);
}
}
console.log("[Vote] Demo data seeded: 1 space, 5 proposals");
} catch (e) {
console.error("[Vote] Seed error:", e);
}
}
// ── Local-first helpers ──
let _syncServer: SyncServer | null = null; let _syncServer: SyncServer | null = null;
function isLocalFirst(space: string): boolean { // ── DocId helpers ──
if (!_syncServer) return false;
return _syncServer.getDocIds().some((id) => id.startsWith(`${space}:vote:`)); function spaceConfigDocId(space: string) {
return `${space}:vote:config` as const;
} }
// ── Helper: get or create user by DID ── // ── Automerge helpers ──
async function getOrCreateUser(did: string, username?: string) {
const rows = await sql.unsafe( /** Ensure a proposal doc exists, creating it if needed. */
`INSERT INTO rvote.users (did, username) VALUES ($1, $2) function ensureProposalDoc(space: string, proposalId: string): ProposalDoc {
ON CONFLICT (did) DO UPDATE SET username = COALESCE($2, rvote.users.username) const docId = proposalDocId(space, proposalId);
RETURNING *`, let doc = _syncServer!.getDoc<ProposalDoc>(docId);
[did, username || null] if (!doc) {
); doc = Automerge.change(Automerge.init<ProposalDoc>(), 'init proposal', (d) => {
return rows[0]; const init = proposalSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = space;
d.proposal.id = proposalId;
d.proposal.spaceSlug = space;
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/** Ensure a space config doc exists, creating it if needed. Returns the doc. */
function ensureSpaceConfigDoc(space: string): ProposalDoc {
const docId = spaceConfigDocId(space);
let doc = _syncServer!.getDoc<ProposalDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<ProposalDoc>(), 'init space config', (d) => {
const init = proposalSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = space;
d.spaceConfig = {
slug: space,
name: '',
description: '',
ownerDid: '',
visibility: 'public_read',
promotionThreshold: 100,
votingPeriodDays: 7,
creditsPerDay: 10,
maxCredits: 500,
startingCredits: 50,
createdAt: Date.now(),
updatedAt: Date.now(),
};
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/** Get all space config docs across all spaces. */
function listSpaceConfigDocs(): { docId: string; doc: ProposalDoc }[] {
if (!_syncServer) return [];
const results: { docId: string; doc: ProposalDoc }[] = [];
for (const docId of _syncServer.listDocs()) {
if (docId.endsWith(':vote:config')) {
const doc = _syncServer.getDoc<ProposalDoc>(docId);
if (doc?.spaceConfig) results.push({ docId, doc });
}
}
return results;
}
/** Get all proposal docs for a space. */
function listProposalDocs(space: string): { docId: string; doc: ProposalDoc }[] {
if (!_syncServer) return [];
const results: { docId: string; doc: ProposalDoc }[] = [];
const prefix = `${space}:vote:proposals:`;
for (const docId of _syncServer.listDocs()) {
if (docId.startsWith(prefix)) {
const doc = _syncServer.getDoc<ProposalDoc>(docId);
if (doc) results.push({ docId, doc });
}
}
return results;
}
/** Get all proposal docs across all spaces. */
function listAllProposalDocs(): { docId: string; doc: ProposalDoc }[] {
if (!_syncServer) return [];
const results: { docId: string; doc: ProposalDoc }[] = [];
for (const docId of _syncServer.listDocs()) {
if (docId.includes(':vote:proposals:')) {
const doc = _syncServer.getDoc<ProposalDoc>(docId);
if (doc) results.push({ docId, doc });
}
}
return results;
}
// ── Conversion helpers (Automerge → REST format) ──
function spaceConfigToRest(cfg: SpaceConfig) {
return {
slug: cfg.slug,
name: cfg.name,
description: cfg.description,
owner_did: cfg.ownerDid,
visibility: cfg.visibility,
promotion_threshold: cfg.promotionThreshold,
voting_period_days: cfg.votingPeriodDays,
credits_per_day: cfg.creditsPerDay,
max_credits: cfg.maxCredits,
starting_credits: cfg.startingCredits,
created_at: new Date(cfg.createdAt).toISOString(),
updated_at: new Date(cfg.updatedAt).toISOString(),
};
}
function proposalToRest(doc: ProposalDoc) {
const p = doc.proposal;
const voteCount = Object.keys(doc.votes).length;
return {
id: p.id,
space_slug: p.spaceSlug,
author_id: p.authorId,
title: p.title,
description: p.description,
status: p.status,
score: p.score,
voting_ends_at: p.votingEndsAt ? new Date(p.votingEndsAt).toISOString() : null,
final_yes: p.finalYes,
final_no: p.finalNo,
final_abstain: p.finalAbstain,
vote_count: String(voteCount),
created_at: new Date(p.createdAt).toISOString(),
updated_at: new Date(p.updatedAt).toISOString(),
};
} }
// ── Helper: calculate effective weight with decay ── // ── Helper: calculate effective weight with decay ──
function getEffectiveWeight(weight: number, createdAt: Date): number { function getEffectiveWeight(weight: number, createdAt: number): number {
const ageMs = Date.now() - createdAt.getTime(); const ageMs = Date.now() - createdAt;
const ageDays = ageMs / (1000 * 60 * 60 * 24); const ageDays = ageMs / (1000 * 60 * 60 * 24);
if (ageDays < 30) return weight; if (ageDays < 30) return weight;
if (ageDays >= 60) return 0; if (ageDays >= 60) return 0;
@ -120,31 +170,105 @@ function getEffectiveWeight(weight: number, createdAt: Date): number {
return Math.round(weight * (1 - decayProgress)); return Math.round(weight * (1 - decayProgress));
} }
// ── Helper: recalculate proposal score ── // ── Helper: recalculate proposal score from votes ──
async function recalcScore(proposalId: string) { function recalcScore(doc: ProposalDoc): number {
const votes = await sql.unsafe(
"SELECT weight, created_at FROM rvote.votes WHERE proposal_id = $1",
[proposalId]
);
let score = 0; let score = 0;
for (const v of votes) { for (const v of Object.values(doc.votes)) {
score += getEffectiveWeight(v.weight, new Date(v.created_at)); score += getEffectiveWeight(v.weight, v.createdAt);
} }
await sql.unsafe(
"UPDATE rvote.proposals SET score = $1, updated_at = NOW() WHERE id = $2",
[score, proposalId]
);
return score; return score;
} }
// ── Helper: generate unique ID ──
function newId(): string {
return crypto.randomUUID();
}
// ── Seed demo data into Automerge ──
function seedDemoIfEmpty() {
const existingSpaces = listSpaceConfigDocs();
if (existingSpaces.length > 0) return;
// Create demo space
const spaceDoc = ensureSpaceConfigDoc('community');
_syncServer!.changeDoc<ProposalDoc>(spaceConfigDocId('community'), 'seed space config', (d) => {
d.spaceConfig!.name = 'Community Governance';
d.spaceConfig!.description = 'Proposals for the rSpace ecosystem';
d.spaceConfig!.ownerDid = 'did:demo:seed';
d.spaceConfig!.promotionThreshold = 100;
d.spaceConfig!.votingPeriodDays = 7;
d.spaceConfig!.creditsPerDay = 10;
d.spaceConfig!.maxCredits = 500;
d.spaceConfig!.startingCredits = 50;
});
const demoUserId = 'did:demo:seed';
const now = Date.now();
const proposals = [
{ title: "Add dark mode across all r* modules", desc: "Implement a consistent dark theme with a toggle in shell.css. Use CSS custom properties for theming so each module inherits automatically.", status: "RANKING", score: 45 },
{ title: "Implement real-time collaboration in rNotes", desc: "Use Automerge CRDTs (already in the stack) to enable simultaneous editing of notes, similar to how rSpace canvas works.", status: "RANKING", score: 72 },
{ title: "Adopt cosmolocal print-on-demand for all merch", desc: "Route all merchandise orders through the provider registry to find the closest printer. Reduces shipping emissions and supports local economies.", status: "VOTING", score: 105 },
{ title: "Use EncryptID passkeys for all authentication", desc: "Standardize on WebAuthn passkeys via EncryptID across the entire r* ecosystem. One passkey, all apps.", status: "PASSED", score: 150 },
{ title: "Switch from PostgreSQL to SQLite for simpler deployment", desc: "Evaluate replacing PostgreSQL with SQLite for modules that don't need concurrent writes.", status: "FAILED", score: 30 },
];
for (const p of proposals) {
const pid = newId();
const docId = proposalDocId('community', pid);
let doc = Automerge.change(Automerge.init<ProposalDoc>(), 'seed proposal', (d) => {
const init = proposalSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = 'community';
d.proposal.id = pid;
d.proposal.spaceSlug = 'community';
d.proposal.authorId = demoUserId;
d.proposal.title = p.title;
d.proposal.description = p.desc;
d.proposal.status = p.status;
d.proposal.score = p.score;
d.proposal.createdAt = now;
d.proposal.updatedAt = now;
});
if (p.status === "VOTING") {
doc = Automerge.change(doc, 'set voting tally', (d) => {
d.proposal.votingEndsAt = now + 5 * 24 * 60 * 60 * 1000;
d.proposal.finalYes = 5;
d.proposal.finalNo = 2;
});
} else if (p.status === "PASSED") {
doc = Automerge.change(doc, 'set passed tally', (d) => {
d.proposal.finalYes = 12;
d.proposal.finalNo = 3;
d.proposal.finalAbstain = 2;
});
} else if (p.status === "FAILED") {
doc = Automerge.change(doc, 'set failed tally', (d) => {
d.proposal.finalYes = 2;
d.proposal.finalNo = 8;
d.proposal.finalAbstain = 1;
});
}
_syncServer!.setDoc(docId, doc);
}
console.log("[Vote] Demo data seeded: 1 space, 5 proposals");
}
// ── Spaces API ── // ── Spaces API ──
// GET /api/spaces — list spaces // GET /api/spaces — list spaces
routes.get("/api/spaces", async (c) => { routes.get("/api/spaces", (c) => {
const rows = await sql.unsafe( const spaceDocs = listSpaceConfigDocs();
"SELECT * FROM rvote.spaces ORDER BY created_at DESC LIMIT 50" const spaces = spaceDocs
); .filter((s) => s.doc.spaceConfig !== null)
return c.json({ spaces: rows }); .map((s) => spaceConfigToRest(s.doc.spaceConfig!))
.sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime())
.slice(0, 50);
return c.json({ spaces });
}); });
// POST /api/spaces — create space // POST /api/spaces — create space
@ -159,53 +283,65 @@ routes.post("/api/spaces", async (c) => {
if (!name || !slug) return c.json({ error: "name and slug required" }, 400); if (!name || !slug) return c.json({ error: "name and slug required" }, 400);
if (!/^[a-z0-9-]+$/.test(slug)) return c.json({ error: "Invalid slug" }, 400); if (!/^[a-z0-9-]+$/.test(slug)) return c.json({ error: "Invalid slug" }, 400);
try { // Check if space already exists
const rows = await sql.unsafe( const existing = _syncServer!.getDoc<ProposalDoc>(spaceConfigDocId(slug));
`INSERT INTO rvote.spaces (slug, name, description, owner_did, visibility) if (existing?.spaceConfig?.name) return c.json({ error: "Space already exists" }, 409);
VALUES ($1, $2, $3, $4, $5) RETURNING *`,
[slug, name, description || null, claims.sub, visibility] const now = Date.now();
); const doc = ensureSpaceConfigDoc(slug);
return c.json(rows[0], 201); _syncServer!.changeDoc<ProposalDoc>(spaceConfigDocId(slug), 'create space', (d) => {
} catch (e: any) { d.spaceConfig!.slug = slug;
if (e.code === "23505") return c.json({ error: "Space already exists" }, 409); d.spaceConfig!.name = name;
throw e; d.spaceConfig!.description = description || '';
} d.spaceConfig!.ownerDid = claims.sub;
d.spaceConfig!.visibility = visibility;
d.spaceConfig!.createdAt = now;
d.spaceConfig!.updatedAt = now;
});
const updated = _syncServer!.getDoc<ProposalDoc>(spaceConfigDocId(slug));
return c.json(spaceConfigToRest(updated!.spaceConfig!), 201);
}); });
// GET /api/spaces/:slug — space detail // GET /api/spaces/:slug — space detail
routes.get("/api/spaces/:slug", async (c) => { routes.get("/api/spaces/:slug", (c) => {
const slug = c.req.param("slug"); const slug = c.req.param("slug");
const rows = await sql.unsafe("SELECT * FROM rvote.spaces WHERE slug = $1", [slug]); const doc = _syncServer!.getDoc<ProposalDoc>(spaceConfigDocId(slug));
if (rows.length === 0) return c.json({ error: "Space not found" }, 404); if (!doc?.spaceConfig?.name) return c.json({ error: "Space not found" }, 404);
return c.json(rows[0]); return c.json(spaceConfigToRest(doc.spaceConfig));
}); });
// ── Proposals API ── // ── Proposals API ──
// GET /api/proposals — list proposals (query: space_slug, status) // GET /api/proposals — list proposals (query: space_slug, status)
routes.get("/api/proposals", async (c) => { routes.get("/api/proposals", (c) => {
const { space_slug, status, limit = "50", offset = "0" } = c.req.query(); const { space_slug, status, limit = "50", offset = "0" } = c.req.query();
const conditions: string[] = []; const maxLimit = Math.min(parseInt(limit) || 50, 100);
const params: any[] = []; const startOffset = parseInt(offset) || 0;
let idx = 1;
let docs: { docId: string; doc: ProposalDoc }[];
if (space_slug) { if (space_slug) {
conditions.push(`space_slug = $${idx}`); docs = listProposalDocs(space_slug);
params.push(space_slug); } else {
idx++; docs = listAllProposalDocs();
}
if (status) {
conditions.push(`status = $${idx}`);
params.push(status);
idx++;
} }
const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : ""; let proposals = docs
const rows = await sql.unsafe( .filter((d) => d.doc.proposal.title) // exclude empty/config docs
`SELECT * FROM rvote.proposals ${where} ORDER BY score DESC, created_at DESC LIMIT ${Math.min(parseInt(limit), 100)} OFFSET ${parseInt(offset) || 0}`, .map((d) => proposalToRest(d.doc));
params
); if (status) {
return c.json({ proposals: rows }); proposals = proposals.filter((p) => p.status === status);
}
// Sort by score descending, then created_at descending
proposals.sort((a, b) => {
if (b.score !== a.score) return b.score - a.score;
return new Date(b.created_at).getTime() - new Date(a.created_at).getTime();
});
proposals = proposals.slice(startOffset, startOffset + maxLimit);
return c.json({ proposals });
}); });
// POST /api/proposals — create proposal // POST /api/proposals — create proposal
@ -220,29 +356,40 @@ routes.post("/api/proposals", async (c) => {
if (!space_slug || !title) return c.json({ error: "space_slug and title required" }, 400); if (!space_slug || !title) return c.json({ error: "space_slug and title required" }, 400);
// Verify space exists // Verify space exists
const space = await sql.unsafe("SELECT slug FROM rvote.spaces WHERE slug = $1", [space_slug]); const spaceDoc = _syncServer!.getDoc<ProposalDoc>(spaceConfigDocId(space_slug));
if (space.length === 0) return c.json({ error: "Space not found" }, 404); if (!spaceDoc?.spaceConfig?.name) return c.json({ error: "Space not found" }, 404);
const user = await getOrCreateUser(claims.sub, claims.username); const pid = newId();
const rows = await sql.unsafe( const now = Date.now();
`INSERT INTO rvote.proposals (space_slug, author_id, title, description) const docId = proposalDocId(space_slug, pid);
VALUES ($1, $2, $3, $4) RETURNING *`,
[space_slug, user.id, title, description || null] const doc = Automerge.change(Automerge.init<ProposalDoc>(), 'create proposal', (d) => {
); const init = proposalSchema.init();
return c.json(rows[0], 201); Object.assign(d, init);
d.meta.spaceSlug = space_slug;
d.proposal.id = pid;
d.proposal.spaceSlug = space_slug;
d.proposal.authorId = claims.sub;
d.proposal.title = title;
d.proposal.description = description || '';
d.proposal.createdAt = now;
d.proposal.updatedAt = now;
});
_syncServer!.setDoc(docId, doc);
return c.json(proposalToRest(doc), 201);
}); });
// GET /api/proposals/:id — proposal detail // GET /api/proposals/:id — proposal detail
routes.get("/api/proposals/:id", async (c) => { routes.get("/api/proposals/:id", (c) => {
const id = c.req.param("id"); const id = c.req.param("id");
const rows = await sql.unsafe(
`SELECT p.*, // Search across all spaces for this proposal
(SELECT count(*) FROM rvote.votes WHERE proposal_id = p.id) as vote_count const allDocs = listAllProposalDocs();
FROM rvote.proposals p WHERE p.id = $1`, const match = allDocs.find((d) => d.doc.proposal.id === id);
[id] if (!match) return c.json({ error: "Proposal not found" }, 404);
);
if (rows.length === 0) return c.json({ error: "Proposal not found" }, 404); return c.json(proposalToRest(match.doc));
return c.json(rows[0]);
}); });
// POST /api/proposals/:id/vote — cast conviction vote // POST /api/proposals/:id/vote — cast conviction vote
@ -256,40 +403,53 @@ routes.post("/api/proposals/:id/vote", async (c) => {
const body = await c.req.json(); const body = await c.req.json();
const { weight = 1 } = body; const { weight = 1 } = body;
// Verify proposal is in RANKING // Find proposal
const proposal = await sql.unsafe( const allDocs = listAllProposalDocs();
"SELECT * FROM rvote.proposals WHERE id = $1", const match = allDocs.find((d) => d.doc.proposal.id === id);
[id] if (!match) return c.json({ error: "Proposal not found" }, 404);
); if (match.doc.proposal.status !== "RANKING") return c.json({ error: "Proposal not in ranking phase" }, 400);
if (proposal.length === 0) return c.json({ error: "Proposal not found" }, 404);
if (proposal[0].status !== "RANKING") return c.json({ error: "Proposal not in ranking phase" }, 400);
const user = await getOrCreateUser(claims.sub, claims.username); const userId = claims.sub;
const creditCost = weight * weight; // quadratic cost const creditCost = weight * weight; // quadratic cost
const now = Date.now();
const thirtyDaysMs = 30 * 24 * 60 * 60 * 1000;
// Upsert vote // Upsert vote (keyed by userId)
await sql.unsafe( _syncServer!.changeDoc<ProposalDoc>(match.docId, 'cast conviction vote', (d) => {
`INSERT INTO rvote.votes (user_id, proposal_id, weight, credit_cost, decays_at) d.votes[userId] = {
VALUES ($1, $2, $3, $4, NOW() + INTERVAL '30 days') id: d.votes[userId]?.id || newId(),
ON CONFLICT (user_id, proposal_id) userId,
DO UPDATE SET weight = $3, credit_cost = $4, created_at = NOW(), decays_at = NOW() + INTERVAL '30 days'`, proposalId: id,
[user.id, id, weight, creditCost] weight,
); creditCost,
createdAt: now,
decaysAt: now + thirtyDaysMs,
};
});
// Recalculate score and check for promotion // Re-read doc, recalculate score
const score = await recalcScore(id); const updatedDoc = _syncServer!.getDoc<ProposalDoc>(match.docId)!;
const space = await sql.unsafe( const score = recalcScore(updatedDoc);
"SELECT * FROM rvote.spaces WHERE slug = $1",
[proposal[0].space_slug]
);
const threshold = space[0]?.promotion_threshold || 100;
if (score >= threshold && proposal[0].status === "RANKING") { // Update score on the doc
const votingDays = space[0]?.voting_period_days || 7; _syncServer!.changeDoc<ProposalDoc>(match.docId, 'update score', (d) => {
await sql.unsafe( d.proposal.score = score;
`UPDATE rvote.proposals SET status = 'VOTING', voting_ends_at = NOW() + ($1 || ' days')::INTERVAL, updated_at = NOW() WHERE id = $2`, d.proposal.updatedAt = Date.now();
[votingDays, id] });
);
// Check for promotion to VOTING phase
const spaceSlug = updatedDoc.proposal.spaceSlug;
const spaceDoc = _syncServer!.getDoc<ProposalDoc>(spaceConfigDocId(spaceSlug));
const threshold = spaceDoc?.spaceConfig?.promotionThreshold || 100;
if (score >= threshold && updatedDoc.proposal.status === "RANKING") {
const votingDays = spaceDoc?.spaceConfig?.votingPeriodDays || 7;
const votingEndsAt = Date.now() + votingDays * 24 * 60 * 60 * 1000;
_syncServer!.changeDoc<ProposalDoc>(match.docId, 'promote to voting', (d) => {
d.proposal.status = 'VOTING';
d.proposal.votingEndsAt = votingEndsAt;
d.proposal.updatedAt = Date.now();
});
} }
return c.json({ ok: true, score, creditCost }); return c.json({ ok: true, score, creditCost });
@ -307,30 +467,39 @@ routes.post("/api/proposals/:id/final-vote", async (c) => {
const { vote } = body; const { vote } = body;
if (!["YES", "NO", "ABSTAIN"].includes(vote)) return c.json({ error: "Invalid vote" }, 400); if (!["YES", "NO", "ABSTAIN"].includes(vote)) return c.json({ error: "Invalid vote" }, 400);
const proposal = await sql.unsafe("SELECT * FROM rvote.proposals WHERE id = $1", [id]); // Find proposal
if (proposal.length === 0) return c.json({ error: "Proposal not found" }, 404); const allDocs = listAllProposalDocs();
if (proposal[0].status !== "VOTING") return c.json({ error: "Proposal not in voting phase" }, 400); const match = allDocs.find((d) => d.doc.proposal.id === id);
if (!match) return c.json({ error: "Proposal not found" }, 404);
if (match.doc.proposal.status !== "VOTING") return c.json({ error: "Proposal not in voting phase" }, 400);
const user = await getOrCreateUser(claims.sub, claims.username); const userId = claims.sub;
await sql.unsafe( const now = Date.now();
`INSERT INTO rvote.final_votes (user_id, proposal_id, vote)
VALUES ($1, $2, $3)
ON CONFLICT (user_id, proposal_id) DO UPDATE SET vote = $3`,
[user.id, id, vote]
);
// Update counts // Upsert final vote (keyed by userId)
const counts = await sql.unsafe( _syncServer!.changeDoc<ProposalDoc>(match.docId, 'cast final vote', (d) => {
`SELECT vote, count(*) as cnt FROM rvote.final_votes WHERE proposal_id = $1 GROUP BY vote`, d.finalVotes[userId] = {
[id] id: d.finalVotes[userId]?.id || newId(),
); userId,
proposalId: id,
vote: vote as 'YES' | 'NO' | 'ABSTAIN',
createdAt: now,
};
});
// Tally final votes
const updatedDoc = _syncServer!.getDoc<ProposalDoc>(match.docId)!;
const tally: Record<string, number> = { YES: 0, NO: 0, ABSTAIN: 0 }; const tally: Record<string, number> = { YES: 0, NO: 0, ABSTAIN: 0 };
for (const row of counts) tally[row.vote] = parseInt(row.cnt); for (const fv of Object.values(updatedDoc.finalVotes)) {
tally[fv.vote] = (tally[fv.vote] || 0) + 1;
}
await sql.unsafe( _syncServer!.changeDoc<ProposalDoc>(match.docId, 'update final tally', (d) => {
"UPDATE rvote.proposals SET final_yes = $1, final_no = $2, final_abstain = $3, updated_at = NOW() WHERE id = $4", d.proposal.finalYes = tally.YES;
[tally.YES, tally.NO, tally.ABSTAIN, id] d.proposal.finalNo = tally.NO;
); d.proposal.finalAbstain = tally.ABSTAIN;
d.proposal.updatedAt = Date.now();
});
return c.json({ ok: true, tally }); return c.json({ ok: true, tally });
}); });
@ -362,8 +531,7 @@ export const voteModule: RSpaceModule = {
landingPage: renderLanding, landingPage: renderLanding,
async onInit(ctx) { async onInit(ctx) {
_syncServer = ctx.syncServer; _syncServer = ctx.syncServer;
await initDB(); seedDemoIfEmpty();
await seedDemoIfEmpty();
}, },
feeds: [ feeds: [
{ {

View File

@ -3,51 +3,86 @@
* *
* Multi-tenant collaborative workspace with drag-and-drop kanban, * Multi-tenant collaborative workspace with drag-and-drop kanban,
* configurable statuses, and activity logging. * configurable statuses, and activity logging.
*
* All persistence uses Automerge documents via SyncServer
* no PostgreSQL dependency.
*/ */
import { Hono } from "hono"; import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import * as Automerge from '@automerge/automerge'; import * as Automerge from '@automerge/automerge';
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell"; import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module"; import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module"; import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server"; import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing"; import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server'; import type { SyncServer } from '../../server/local-first/sync-server';
import { boardSchema, boardDocId } from './schemas'; import { boardSchema, boardDocId, createTaskItem } from './schemas';
import type { BoardDoc, TaskItem } from './schemas'; import type { BoardDoc, TaskItem, BoardMeta } from './schemas';
const routes = new Hono(); const routes = new Hono();
// ── DB initialization ── // ── Local-first helpers ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8"); let _syncServer: SyncServer | null = null;
async function initDB() { /**
try { * Lazily create the board Automerge doc if it doesn't exist yet.
await sql.unsafe(SCHEMA_SQL); * Returns the current (immutable) doc snapshot.
console.log("[Work] DB schema initialized"); */
} catch (e) { function ensureDoc(space: string, boardId?: string): BoardDoc {
console.error("[Work] DB init error:", e); const id = boardDocId(space, boardId ?? space);
let doc = _syncServer!.getDoc<BoardDoc>(id);
if (!doc) {
doc = Automerge.change(Automerge.init<BoardDoc>(), 'init board', (d) => {
const init = boardSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.board = init.board;
d.board.id = boardId ?? space;
d.board.slug = boardId ?? space;
d.board.name = space;
d.tasks = {};
});
_syncServer!.setDoc(id, doc);
} }
return doc;
} }
async function seedDemoIfEmpty() { /**
try { * Get all board doc IDs for a given space.
const count = await sql.unsafe("SELECT count(*)::int as cnt FROM rwork.spaces"); */
if (parseInt(count[0].cnt) > 0) return; function getBoardDocIds(space: string): string[] {
return _syncServer!.getDocIds().filter((id) => id.startsWith(`${space}:work:boards:`));
}
// Create workspace /**
const space = await sql.unsafe( * Seed demo data if no boards exist yet.
`INSERT INTO rwork.spaces (name, slug, description, icon, owner_did) */
VALUES ('rSpace Development', 'rspace-dev', 'Building the cosmolocal r* ecosystem', '🚀', 'did:demo:seed') function seedDemoIfEmpty() {
RETURNING id` // Check if any work boards exist at all
); const allWorkDocs = _syncServer!.getDocIds().filter((id) => id.includes(':work:boards:'));
const spaceId = space[0].id; if (allWorkDocs.length > 0) return;
// Seed tasks across all kanban columns const space = 'rspace-dev';
const tasks = [ const docId = boardDocId(space, space);
const doc = Automerge.change(Automerge.init<BoardDoc>(), 'seed demo board', (d) => {
const now = Date.now();
d.meta = { module: 'work', collection: 'boards', version: 1, spaceSlug: space, createdAt: now };
d.board = {
id: space,
name: 'rSpace Development',
slug: space,
description: 'Building the cosmolocal r* ecosystem',
icon: null,
ownerDid: 'did:demo:seed',
statuses: ['TODO', 'IN_PROGRESS', 'REVIEW', 'DONE'],
labels: [],
createdAt: now,
updatedAt: now,
};
d.tasks = {};
const seedTasks: Array<{ title: string; status: string; priority: string; labels: string[]; sort: number }> = [
{ title: "Add dark mode toggle to settings page", status: "TODO", priority: "MEDIUM", labels: ["feature"], sort: 0 }, { title: "Add dark mode toggle to settings page", status: "TODO", priority: "MEDIUM", labels: ["feature"], sort: 0 },
{ title: "Write API documentation for rPubs endpoints", status: "TODO", priority: "LOW", labels: ["docs"], sort: 1 }, { title: "Write API documentation for rPubs endpoints", status: "TODO", priority: "LOW", labels: ["docs"], sort: 1 },
{ title: "Investigate slow PDF generation on large documents", status: "TODO", priority: "HIGH", labels: ["bug"], sort: 2 }, { title: "Investigate slow PDF generation on large documents", status: "TODO", priority: "HIGH", labels: ["bug"], sort: 2 },
@ -61,80 +96,52 @@ async function seedDemoIfEmpty() {
{ title: "Migrate email from Resend to self-hosted Mailcow", status: "DONE", priority: "MEDIUM", labels: ["chore"], sort: 3 }, { title: "Migrate email from Resend to self-hosted Mailcow", status: "DONE", priority: "MEDIUM", labels: ["chore"], sort: 3 },
]; ];
for (const t of tasks) { for (const t of seedTasks) {
await sql.unsafe( const taskId = crypto.randomUUID();
`INSERT INTO rwork.tasks (space_id, title, status, priority, labels, sort_order) d.tasks[taskId] = createTaskItem(taskId, space, t.title, {
VALUES ($1, $2, $3, $4, $5, $6)`, status: t.status,
[spaceId, t.title, t.status, t.priority, t.labels, t.sort] priority: t.priority,
); labels: t.labels,
} sortOrder: t.sort,
createdBy: 'did:demo:seed',
console.log("[Work] Demo data seeded: 1 workspace, 11 tasks"); });
} catch (e) {
console.error("[Work] Seed error:", e);
}
}
// ── Local-first helpers ──
let _syncServer: SyncServer | null = null;
function isLocalFirst(space: string): boolean {
if (!_syncServer) return false;
return _syncServer.getDocIds().some((id) => id.startsWith(`${space}:work:`));
}
function writeTaskToAutomerge(space: string, boardId: string, taskId: string, data: Partial<TaskItem>) {
if (!_syncServer) return;
const docId = boardDocId(space, boardId);
const existing = _syncServer.getDoc<BoardDoc>(docId);
if (!existing) return;
_syncServer.changeDoc<BoardDoc>(docId, `Update task ${taskId}`, (d) => {
if (!d.tasks[taskId]) {
d.tasks[taskId] = {
id: taskId,
spaceId: boardId,
title: '',
description: '',
status: 'TODO',
priority: null,
labels: [],
assigneeId: null,
createdBy: null,
sortOrder: 0,
createdAt: Date.now(),
updatedAt: Date.now(),
...data,
} as TaskItem;
} else {
Object.assign(d.tasks[taskId], data);
d.tasks[taskId].updatedAt = Date.now();
} }
}); });
_syncServer!.setDoc(docId, doc);
console.log("[Work] Demo data seeded: 1 board, 11 tasks");
} }
function deleteTaskFromAutomerge(space: string, boardId: string, taskId: string) { // ── API: Spaces (Boards) ──
if (!_syncServer) return;
const docId = boardDocId(space, boardId);
_syncServer.changeDoc<BoardDoc>(docId, `Delete task ${taskId}`, (d) => {
delete d.tasks[taskId];
});
}
// ── API: Spaces ── // GET /api/spaces — list workspaces (boards)
// GET /api/spaces — list workspaces
routes.get("/api/spaces", async (c) => { routes.get("/api/spaces", async (c) => {
const rows = await sql.unsafe( const allIds = _syncServer!.getDocIds().filter((id) => id.includes(':work:boards:'));
`SELECT s.*, count(DISTINCT sm.id)::int as member_count, count(DISTINCT t.id)::int as task_count const rows = allIds.map((docId) => {
FROM rwork.spaces s const doc = _syncServer!.getDoc<BoardDoc>(docId);
LEFT JOIN rwork.space_members sm ON sm.space_id = s.id if (!doc) return null;
LEFT JOIN rwork.tasks t ON t.space_id = s.id const taskCount = Object.keys(doc.tasks).length;
GROUP BY s.id ORDER BY s.created_at DESC` return {
); id: doc.board.id,
name: doc.board.name,
slug: doc.board.slug,
description: doc.board.description,
icon: doc.board.icon,
owner_did: doc.board.ownerDid,
statuses: doc.board.statuses,
created_at: new Date(doc.board.createdAt).toISOString(),
updated_at: new Date(doc.board.updatedAt).toISOString(),
member_count: 0,
task_count: taskCount,
};
}).filter(Boolean);
// Sort by created_at DESC
rows.sort((a, b) => (b!.created_at > a!.created_at ? 1 : -1));
return c.json(rows); return c.json(rows);
}); });
// POST /api/spaces — create workspace // POST /api/spaces — create workspace (board)
routes.post("/api/spaces", async (c) => { routes.post("/api/spaces", async (c) => {
const token = extractToken(c.req.raw.headers); const token = extractToken(c.req.raw.headers);
if (!token) return c.json({ error: "Authentication required" }, 401); if (!token) return c.json({ error: "Authentication required" }, 401);
@ -146,20 +153,63 @@ routes.post("/api/spaces", async (c) => {
if (!name?.trim()) return c.json({ error: "Name required" }, 400); if (!name?.trim()) return c.json({ error: "Name required" }, 400);
const slug = name.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, ""); const slug = name.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "");
const rows = await sql.unsafe( const docId = boardDocId(slug, slug);
`INSERT INTO rwork.spaces (name, slug, description, icon, created_by)
VALUES ($1, $2, $3, $4, $5) RETURNING *`, // Check if board already exists
[name.trim(), slug, description || null, icon || null, claims.sub] const existing = _syncServer!.getDoc<BoardDoc>(docId);
); if (existing) return c.json({ error: "Space with this slug already exists" }, 409);
return c.json(rows[0], 201);
const now = Date.now();
const doc = Automerge.change(Automerge.init<BoardDoc>(), 'create board', (d) => {
d.meta = { module: 'work', collection: 'boards', version: 1, spaceSlug: slug, createdAt: now };
d.board = {
id: slug,
name: name.trim(),
slug,
description: description || '',
icon: icon || null,
ownerDid: claims.sub,
statuses: ['TODO', 'IN_PROGRESS', 'DONE'],
labels: [],
createdAt: now,
updatedAt: now,
};
d.tasks = {};
});
_syncServer!.setDoc(docId, doc);
return c.json({
id: slug,
name: name.trim(),
slug,
description: description || null,
icon: icon || null,
owner_did: claims.sub,
statuses: ['TODO', 'IN_PROGRESS', 'DONE'],
created_at: new Date(now).toISOString(),
updated_at: new Date(now).toISOString(),
}, 201);
}); });
// GET /api/spaces/:slug — workspace detail // GET /api/spaces/:slug — workspace detail
routes.get("/api/spaces/:slug", async (c) => { routes.get("/api/spaces/:slug", async (c) => {
const slug = c.req.param("slug"); const slug = c.req.param("slug");
const rows = await sql.unsafe("SELECT * FROM rwork.spaces WHERE slug = $1", [slug]); const docId = boardDocId(slug, slug);
if (rows.length === 0) return c.json({ error: "Space not found" }, 404); const doc = _syncServer!.getDoc<BoardDoc>(docId);
return c.json(rows[0]); if (!doc) return c.json({ error: "Space not found" }, 404);
return c.json({
id: doc.board.id,
name: doc.board.name,
slug: doc.board.slug,
description: doc.board.description,
icon: doc.board.icon,
owner_did: doc.board.ownerDid,
statuses: doc.board.statuses,
labels: doc.board.labels,
created_at: new Date(doc.board.createdAt).toISOString(),
updated_at: new Date(doc.board.updatedAt).toISOString(),
});
}); });
// ── API: Tasks ── // ── API: Tasks ──
@ -167,15 +217,36 @@ routes.get("/api/spaces/:slug", async (c) => {
// GET /api/spaces/:slug/tasks — list tasks in workspace // GET /api/spaces/:slug/tasks — list tasks in workspace
routes.get("/api/spaces/:slug/tasks", async (c) => { routes.get("/api/spaces/:slug/tasks", async (c) => {
const slug = c.req.param("slug"); const slug = c.req.param("slug");
const rows = await sql.unsafe( const doc = ensureDoc(slug);
`SELECT t.*, u.username as assignee_name
FROM rwork.tasks t const tasks = Object.values(doc.tasks).map((t) => ({
JOIN rwork.spaces s ON s.id = t.space_id AND s.slug = $1 id: t.id,
LEFT JOIN rwork.users u ON u.id = t.assignee_id space_id: t.spaceId,
ORDER BY t.status, t.sort_order, t.created_at DESC`, title: t.title,
[slug] description: t.description,
); status: t.status,
return c.json(rows); priority: t.priority,
labels: t.labels,
assignee_id: t.assigneeId,
assignee_name: null,
created_by: t.createdBy,
sort_order: t.sortOrder,
created_at: new Date(t.createdAt).toISOString(),
updated_at: new Date(t.updatedAt).toISOString(),
}));
// Sort by status, then sort_order, then created_at DESC
const statusOrder: Record<string, number> = {};
doc.board.statuses.forEach((s, i) => { statusOrder[s] = i; });
tasks.sort((a, b) => {
const sa = statusOrder[a.status] ?? 999;
const sb = statusOrder[b.status] ?? 999;
if (sa !== sb) return sa - sb;
if (a.sort_order !== b.sort_order) return a.sort_order - b.sort_order;
return b.created_at > a.created_at ? 1 : -1;
});
return c.json(tasks);
}); });
// POST /api/spaces/:slug/tasks — create task // POST /api/spaces/:slug/tasks — create task
@ -190,16 +261,36 @@ routes.post("/api/spaces/:slug/tasks", async (c) => {
const { title, description, status, priority, labels } = body; const { title, description, status, priority, labels } = body;
if (!title?.trim()) return c.json({ error: "Title required" }, 400); if (!title?.trim()) return c.json({ error: "Title required" }, 400);
const space = await sql.unsafe("SELECT id, statuses FROM rwork.spaces WHERE slug = $1", [slug]); const doc = ensureDoc(slug);
if (space.length === 0) return c.json({ error: "Space not found" }, 404); const taskStatus = status || doc.board.statuses[0] || "TODO";
const taskId = crypto.randomUUID();
const now = Date.now();
const taskStatus = status || space[0].statuses?.[0] || "TODO"; const docId = boardDocId(slug, slug);
const rows = await sql.unsafe( _syncServer!.changeDoc<BoardDoc>(docId, `Create task ${taskId}`, (d) => {
`INSERT INTO rwork.tasks (space_id, title, description, status, priority, labels, created_by) d.tasks[taskId] = createTaskItem(taskId, slug, title.trim(), {
VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING *`, description: description || '',
[space[0].id, title.trim(), description || null, taskStatus, priority || "MEDIUM", labels || [], claims.sub] status: taskStatus,
); priority: priority || 'MEDIUM',
return c.json(rows[0], 201); labels: labels || [],
createdBy: claims.sub,
});
});
return c.json({
id: taskId,
space_id: slug,
title: title.trim(),
description: description || null,
status: taskStatus,
priority: priority || "MEDIUM",
labels: labels || [],
assignee_id: null,
created_by: claims.sub,
sort_order: 0,
created_at: new Date(now).toISOString(),
updated_at: new Date(now).toISOString(),
}, 201);
}); });
// PATCH /api/tasks/:id — update task (status change, assignment, etc.) // PATCH /api/tasks/:id — update task (status change, assignment, etc.)
@ -215,51 +306,88 @@ routes.patch("/api/tasks/:id", async (c) => {
const body = await c.req.json(); const body = await c.req.json();
const { title, description, status, priority, labels, sort_order, assignee_id } = body; const { title, description, status, priority, labels, sort_order, assignee_id } = body;
const fields: string[] = []; // Check that at least one field is being updated
const params: any[] = []; if (title === undefined && description === undefined && status === undefined &&
let idx = 1; priority === undefined && labels === undefined && sort_order === undefined &&
assignee_id === undefined) {
return c.json({ error: "No fields to update" }, 400);
}
if (title !== undefined) { fields.push(`title = $${idx}`); params.push(title); idx++; } // Find which board doc contains this task
if (description !== undefined) { fields.push(`description = $${idx}`); params.push(description); idx++; } const allBoardIds = _syncServer!.getDocIds().filter((docId) => docId.includes(':work:boards:'));
if (status !== undefined) { fields.push(`status = $${idx}`); params.push(status); idx++; } let targetDocId: string | null = null;
if (priority !== undefined) { fields.push(`priority = $${idx}`); params.push(priority); idx++; } for (const docId of allBoardIds) {
if (labels !== undefined) { fields.push(`labels = $${idx}`); params.push(labels); idx++; } const doc = _syncServer!.getDoc<BoardDoc>(docId);
if (sort_order !== undefined) { fields.push(`sort_order = $${idx}`); params.push(sort_order); idx++; } if (doc && doc.tasks[id]) {
if (assignee_id !== undefined) { fields.push(`assignee_id = $${idx}`); params.push(assignee_id || null); idx++; } targetDocId = docId;
break;
}
}
if (!targetDocId) return c.json({ error: "Task not found" }, 404);
if (fields.length === 0) return c.json({ error: "No fields to update" }, 400); _syncServer!.changeDoc<BoardDoc>(targetDocId, `Update task ${id}`, (d) => {
fields.push("updated_at = NOW()"); const task = d.tasks[id];
params.push(id); if (!task) return;
if (title !== undefined) task.title = title;
if (description !== undefined) task.description = description;
if (status !== undefined) task.status = status;
if (priority !== undefined) task.priority = priority;
if (labels !== undefined) task.labels = labels;
if (sort_order !== undefined) task.sortOrder = sort_order;
if (assignee_id !== undefined) task.assigneeId = assignee_id || null;
task.updatedAt = Date.now();
});
const rows = await sql.unsafe( // Return the updated task
`UPDATE rwork.tasks SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`, const updatedDoc = _syncServer!.getDoc<BoardDoc>(targetDocId)!;
params const task = updatedDoc.tasks[id];
); return c.json({
if (rows.length === 0) return c.json({ error: "Task not found" }, 404); id: task.id,
return c.json(rows[0]); space_id: task.spaceId,
title: task.title,
description: task.description,
status: task.status,
priority: task.priority,
labels: task.labels,
assignee_id: task.assigneeId,
created_by: task.createdBy,
sort_order: task.sortOrder,
created_at: new Date(task.createdAt).toISOString(),
updated_at: new Date(task.updatedAt).toISOString(),
});
}); });
// DELETE /api/tasks/:id // DELETE /api/tasks/:id
routes.delete("/api/tasks/:id", async (c) => { routes.delete("/api/tasks/:id", async (c) => {
const result = await sql.unsafe("DELETE FROM rwork.tasks WHERE id = $1 RETURNING id", [c.req.param("id")]); const id = c.req.param("id");
if (result.length === 0) return c.json({ error: "Task not found" }, 404);
// Find which board doc contains this task
const allBoardIds = _syncServer!.getDocIds().filter((docId) => docId.includes(':work:boards:'));
let targetDocId: string | null = null;
for (const docId of allBoardIds) {
const doc = _syncServer!.getDoc<BoardDoc>(docId);
if (doc && doc.tasks[id]) {
targetDocId = docId;
break;
}
}
if (!targetDocId) return c.json({ error: "Task not found" }, 404);
_syncServer!.changeDoc<BoardDoc>(targetDocId, `Delete task ${id}`, (d) => {
delete d.tasks[id];
});
return c.json({ ok: true }); return c.json({ ok: true });
}); });
// ── API: Activity ── // ── API: Activity ──
// GET /api/spaces/:slug/activity — recent activity // GET /api/spaces/:slug/activity — recent activity
// With Automerge, activity is tracked via document change history.
// Return an empty array for now; real activity can be derived from
// Automerge.getHistory() or a dedicated activity doc in the future.
routes.get("/api/spaces/:slug/activity", async (c) => { routes.get("/api/spaces/:slug/activity", async (c) => {
const slug = c.req.param("slug"); return c.json([]);
const rows = await sql.unsafe(
`SELECT a.*, u.username
FROM rwork.activity_log a
JOIN rwork.spaces s ON s.id = a.space_id AND s.slug = $1
LEFT JOIN rwork.users u ON u.id = a.user_id
ORDER BY a.created_at DESC LIMIT 50`,
[slug]
);
return c.json(rows);
}); });
// ── Page route ── // ── Page route ──
@ -289,8 +417,7 @@ export const workModule: RSpaceModule = {
landingPage: renderLanding, landingPage: renderLanding,
async onInit(ctx) { async onInit(ctx) {
_syncServer = ctx.syncServer; _syncServer = ctx.syncServer;
await initDB(); seedDemoIfEmpty();
await seedDemoIfEmpty();
}, },
async onSpaceCreate(ctx: SpaceLifecycleContext) { async onSpaceCreate(ctx: SpaceLifecycleContext) {
if (!_syncServer) return; if (!_syncServer) return;

View File

@ -0,0 +1,258 @@
/**
* Test: Automerge round-trip create, save, load, sync, verify.
*
* Exercises the full local-first stack:
* 1. SyncServer (in-memory doc management)
* 2. Doc persistence (save to disk + load from disk)
* 3. Schema init factories (NotebookDoc, BoardDoc, etc.)
* 4. Doc change + onDocChange callback
*
* Usage: bun run scripts/test-automerge-roundtrip.ts
*/
// Must set env BEFORE imports (doc-persistence reads it at module level)
const TEST_DIR = '/tmp/rspace-automerge-test';
process.env.DOCS_STORAGE_DIR = TEST_DIR;
import * as Automerge from '@automerge/automerge';
import { mkdirSync, rmSync, existsSync, readdirSync } from 'node:fs';
import { readFile } from 'node:fs/promises';
import { resolve } from 'node:path';
import { SyncServer } from '../server/local-first/sync-server';
import { docIdToPath, saveDoc, loadAllDocs } from '../server/local-first/doc-persistence';
// Cleanup from previous runs
if (existsSync(TEST_DIR)) rmSync(TEST_DIR, { recursive: true });
mkdirSync(TEST_DIR, { recursive: true });
let passed = 0;
let failed = 0;
function assert(condition: boolean, label: string) {
if (condition) {
console.log(`${label}`);
passed++;
} else {
console.error(`${label}`);
failed++;
}
}
// ─── Test 1: docIdToPath mapping ──────────────────────────
console.log('\n── Test 1: docId ↔ path mapping ──');
{
const path = docIdToPath('demo:notes:notebooks:abc');
assert(path.endsWith('/demo/notes/notebooks/abc.automerge'), `docIdToPath → ${path}`);
const path2 = docIdToPath('myspace:work:boards:board-1');
assert(path2.endsWith('/myspace/work/boards/board-1.automerge'), `docIdToPath boards → ${path2}`);
let threw = false;
try { docIdToPath('invalid'); } catch { threw = true; }
assert(threw, 'docIdToPath rejects invalid docId (< 3 parts)');
}
// ─── Test 2: SyncServer in-memory CRUD ─────────────────────
console.log('\n── Test 2: SyncServer in-memory CRUD ──');
{
interface TestDoc { title: string; items: Record<string, { text: string }> }
const docChanges: string[] = [];
const server = new SyncServer({
participantMode: true,
onDocChange: (docId) => docChanges.push(docId),
});
// Create a doc
let doc = Automerge.init<TestDoc>();
doc = Automerge.change(doc, 'init', (d) => {
d.title = 'Test Notebook';
d.items = {};
});
server.setDoc('test:notes:notebooks:nb1', doc);
assert(server.getDocIds().includes('test:notes:notebooks:nb1'), 'setDoc registers docId');
// Read it back
const loaded = server.getDoc<TestDoc>('test:notes:notebooks:nb1');
assert(loaded !== undefined, 'getDoc returns the doc');
assert(loaded!.title === 'Test Notebook', 'doc content preserved');
// Change via changeDoc
const changed = server.changeDoc<TestDoc>('test:notes:notebooks:nb1', 'add item', (d) => {
d.items['item-1'] = { text: 'Hello local-first' };
});
assert(changed !== null, 'changeDoc returns updated doc');
assert(changed!.items['item-1'].text === 'Hello local-first', 'changeDoc content correct');
assert(docChanges.length === 1, 'onDocChange callback fired');
// Verify the server's copy is updated
const reloaded = server.getDoc<TestDoc>('test:notes:notebooks:nb1');
assert(reloaded!.items['item-1'].text === 'Hello local-first', 'server copy updated after changeDoc');
}
// ─── Test 3: Relay mode ────────────────────────────────────
console.log('\n── Test 3: Relay mode (encrypted spaces) ──');
{
const server = new SyncServer({ participantMode: true });
assert(!server.isRelayOnly('demo:notes:notebooks:x'), 'not relay by default');
server.setRelayOnly('encrypted-space', true);
assert(server.isRelayOnly('encrypted-space'), 'exact match → relay');
assert(server.isRelayOnly('encrypted-space:notes:notebooks:x'), 'prefix match → relay');
assert(!server.isRelayOnly('other-space:notes:notebooks:x'), 'other space → not relay');
server.setRelayOnly('encrypted-space', false);
assert(!server.isRelayOnly('encrypted-space:notes:notebooks:x'), 'after removal → not relay');
}
// ─── Test 4: Disk persistence round-trip ───────────────────
// Note: We test Automerge binary serialization directly rather than using
// doc-persistence (which reads DOCS_STORAGE_DIR at module load time).
console.log('\n── Test 4: Disk persistence round-trip ──');
await (async () => {
interface NoteDoc { title: string; content: string }
// Create a doc
let doc = Automerge.init<NoteDoc>();
doc = Automerge.change(doc, 'init', (d) => {
d.title = 'Persistent Note';
d.content = 'This should survive a restart';
});
// Serialize to binary
const binary = Automerge.save(doc);
assert(binary.byteLength > 0, `Automerge.save produces ${binary.byteLength} bytes`);
// Write to temp dir
const { mkdir: mk, writeFile: wf, readFile: rf } = await import('node:fs/promises');
const { dirname: dn } = await import('node:path');
const filePath = resolve(TEST_DIR, 'roundtrip/notes/notebooks/persist-1.automerge');
await mk(dn(filePath), { recursive: true });
await wf(filePath, binary);
assert(existsSync(filePath), `file written to disk`);
// Read back and deserialize
const rawBuf = await rf(filePath);
const reloaded = Automerge.load<NoteDoc>(new Uint8Array(rawBuf));
assert(reloaded.title === 'Persistent Note', 'title preserved after load');
assert(reloaded.content === 'This should survive a restart', 'content preserved after load');
// Verify Automerge history
const history = Automerge.getHistory(reloaded);
assert(history.length === 1, `history has ${history.length} change(s)`);
// Test: modify, save again, load again
const doc2 = Automerge.change(reloaded, 'update', (d) => {
d.content = 'Updated content after reload';
});
const binary2 = Automerge.save(doc2);
await wf(filePath, binary2);
const rawBuf2 = await rf(filePath);
const reloaded2 = Automerge.load<NoteDoc>(new Uint8Array(rawBuf2));
assert(reloaded2.content === 'Updated content after reload', 'content updated after second save/load');
assert(Automerge.getHistory(reloaded2).length === 2, 'history has 2 changes after update');
// Load via loadAllDocs into a SyncServer (uses TEST_DIR since that's what's on disk)
// We do this by creating a SyncServer and loading manually
const server2 = new SyncServer({ participantMode: true });
const raw = await rf(filePath);
const loadedDoc = Automerge.load<NoteDoc>(new Uint8Array(raw));
server2.setDoc('roundtrip:notes:notebooks:persist-1', loadedDoc);
const fromServer = server2.getDoc<NoteDoc>('roundtrip:notes:notebooks:persist-1');
assert(fromServer!.title === 'Persistent Note', 'SyncServer holds correct doc from disk');
})();
// ─── Test 5: Multiple docs + listDocs ──────────────────────
console.log('\n── Test 5: Multiple docs + listing ──');
await (async () => {
const server = new SyncServer({ participantMode: true });
for (const id of ['space-a:work:boards:b1', 'space-a:work:boards:b2', 'space-b:cal:events']) {
let doc = Automerge.init<{ label: string }>();
doc = Automerge.change(doc, 'init', (d) => { d.label = id; });
server.setDoc(id, doc);
}
const ids = server.getDocIds();
assert(ids.length === 3, `3 docs registered (got ${ids.length})`);
assert(ids.includes('space-a:work:boards:b1'), 'board b1 listed');
assert(ids.includes('space-b:cal:events'), 'cal events listed');
})();
// ─── Test 6: Peer subscribe + sync message flow ────────────
console.log('\n── Test 6: Peer subscribe + sync flow ──');
{
interface SimpleDoc { value: number }
const sent: Array<{ peerId: string; msg: string }> = [];
const server = new SyncServer({ participantMode: true });
// Create a doc on the server
let doc = Automerge.init<SimpleDoc>();
doc = Automerge.change(doc, 'set value', (d) => { d.value = 42; });
server.setDoc('sync-test:data:metrics', doc);
// Add a mock peer
const mockWs = {
send: (data: string) => sent.push({ peerId: 'peer-1', msg: data }),
readyState: 1,
};
server.addPeer('peer-1', mockWs);
// Subscribe peer to the doc
server.handleMessage('peer-1', JSON.stringify({
type: 'subscribe',
docIds: ['sync-test:data:metrics'],
}));
assert(server.getDocSubscribers('sync-test:data:metrics').includes('peer-1'), 'peer subscribed');
assert(sent.length > 0, `sync message sent to peer (${sent.length} message(s))`);
// Verify the sync message is valid JSON with type 'sync'
const firstMsg = JSON.parse(sent[0].msg);
assert(firstMsg.type === 'sync', `message type is 'sync'`);
assert(firstMsg.docId === 'sync-test:data:metrics', 'correct docId in sync message');
assert(Array.isArray(firstMsg.data), 'sync data is array (Uint8Array serialized)');
// Clean up peer
server.removePeer('peer-1');
assert(!server.getPeerIds().includes('peer-1'), 'peer removed');
assert(server.getDocSubscribers('sync-test:data:metrics').length === 0, 'subscriber cleaned up');
}
// ─── Test 7: Ping/pong ────────────────────────────────────
console.log('\n── Test 7: Ping/pong ──');
{
const sent: string[] = [];
const server = new SyncServer({ participantMode: true });
const mockWs = {
send: (data: string) => sent.push(data),
readyState: 1,
};
server.addPeer('ping-peer', mockWs);
server.handleMessage('ping-peer', JSON.stringify({ type: 'ping' }));
assert(sent.length === 1, 'pong sent');
assert(JSON.parse(sent[0]).type === 'pong', 'response is pong');
server.removePeer('ping-peer');
}
// ─── Summary ───────────────────────────────────────────────
console.log(`\n${'═'.repeat(50)}`);
console.log(` ${passed} passed, ${failed} failed`);
console.log(`${'═'.repeat(50)}\n`);
// Cleanup
rmSync(TEST_DIR, { recursive: true });
process.exit(failed > 0 ? 1 : 0);