Merge branch 'dev'

This commit is contained in:
Jeff Emmett 2026-03-02 15:48:13 -08:00
commit 125d3a3c40
23 changed files with 3771 additions and 1947 deletions

View File

@ -3,13 +3,16 @@
*
* Ported from rbooks-online (Next.js) to Hono routes.
* Routes are relative to mount point (/:space/books in unified, / in standalone).
*
* Storage: Automerge documents via SyncServer (one doc per space).
* PDF files stay on the filesystem only metadata lives in Automerge.
*/
import { Hono } from "hono";
import { resolve } from "node:path";
import { mkdir, readFile } from "node:fs/promises";
import { mkdir } from "node:fs/promises";
import { randomUUID } from "node:crypto";
import { sql } from "../../shared/db/pool";
import * as Automerge from "@automerge/automerge";
import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
@ -19,37 +22,68 @@ import {
extractToken,
} from "@encryptid/sdk/server";
import type { SyncServer } from '../../server/local-first/sync-server';
import { booksCatalogSchema } from './schemas';
import {
booksCatalogSchema,
booksCatalogDocId,
type BooksCatalogDoc,
type BookItem,
} from './schemas';
let _syncServer: SyncServer | null = null;
const BOOKS_DIR = process.env.BOOKS_DIR || "/data/books";
// ── Types ──
// ── Helpers ──
export interface BookRow {
id: string;
slug: string;
title: string;
author: string | null;
description: string | null;
pdf_path: string;
pdf_size_bytes: number;
page_count: number;
tags: string[];
license: string;
cover_color: string;
contributor_id: string | null;
contributor_name: string | null;
status: string;
featured: boolean;
view_count: number;
download_count: number;
created_at: string;
updated_at: string;
function ensureDoc(space: string): BooksCatalogDoc {
const docId = booksCatalogDocId(space);
let doc = _syncServer!.getDoc<BooksCatalogDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<BooksCatalogDoc>(), 'init', (d) => {
const init = booksCatalogSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.items = {};
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
// ── Helpers ──
/** Find a book by slug or id across the items map. */
function findBook(doc: BooksCatalogDoc, idOrSlug: string): BookItem | undefined {
// Direct key lookup first (by id)
if (doc.items[idOrSlug]) return doc.items[idOrSlug];
// Then scan by slug
return Object.values(doc.items).find(
(b) => b.slug === idOrSlug || b.id === idOrSlug
);
}
/** Convert a BookItem to the JSON shape the API has always returned. */
function bookToRow(b: BookItem) {
return {
id: b.id,
slug: b.slug,
title: b.title,
author: b.author,
description: b.description,
pdf_path: b.pdfPath,
pdf_size_bytes: b.pdfSizeBytes,
page_count: b.pageCount,
tags: b.tags,
license: b.license,
cover_color: b.coverColor,
contributor_id: b.contributorId,
contributor_name: b.contributorName,
status: b.status,
featured: b.featured,
view_count: b.viewCount,
download_count: b.downloadCount,
created_at: new Date(b.createdAt).toISOString(),
updated_at: new Date(b.updatedAt).toISOString(),
};
}
function slugify(text: string): string {
return text
@ -59,44 +93,69 @@ function slugify(text: string): string {
.slice(0, 80);
}
function escapeAttr(s: string): string {
return s.replace(/&/g, "&amp;").replace(/"/g, "&quot;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
}
// ── Routes ──
const routes = new Hono();
// ── API: List books ──
routes.get("/api/books", async (c) => {
const search = c.req.query("search");
const space = c.req.param("space") || "global";
const search = c.req.query("search")?.toLowerCase();
const tag = c.req.query("tag");
const limit = Math.min(parseInt(c.req.query("limit") || "50"), 100);
const offset = parseInt(c.req.query("offset") || "0");
let query = `SELECT id, slug, title, author, description, pdf_size_bytes,
page_count, tags, cover_color, contributor_name, featured,
view_count, created_at
FROM rbooks.books WHERE status = 'published'`;
const params: (string | number)[] = [];
const doc = ensureDoc(space);
let books = Object.values(doc.items).filter((b) => b.status === "published");
if (search) {
params.push(`%${search}%`);
query += ` AND (title ILIKE $${params.length} OR author ILIKE $${params.length} OR description ILIKE $${params.length})`;
books = books.filter(
(b) =>
b.title.toLowerCase().includes(search) ||
b.author.toLowerCase().includes(search) ||
b.description.toLowerCase().includes(search)
);
}
if (tag) {
params.push(tag);
query += ` AND $${params.length} = ANY(tags)`;
books = books.filter((b) => b.tags.includes(tag));
}
query += ` ORDER BY featured DESC, created_at DESC`;
params.push(limit);
query += ` LIMIT $${params.length}`;
params.push(offset);
query += ` OFFSET $${params.length}`;
// Sort: featured first, then newest
books.sort((a, b) => {
if (a.featured !== b.featured) return a.featured ? -1 : 1;
return b.createdAt - a.createdAt;
});
// Paginate
const paged = books.slice(offset, offset + limit);
// Return the subset of fields the old query returned
const rows = paged.map((b) => ({
id: b.id,
slug: b.slug,
title: b.title,
author: b.author,
description: b.description,
pdf_size_bytes: b.pdfSizeBytes,
page_count: b.pageCount,
tags: [...b.tags],
cover_color: b.coverColor,
contributor_name: b.contributorName,
featured: b.featured,
view_count: b.viewCount,
created_at: new Date(b.createdAt).toISOString(),
}));
const rows = await sql.unsafe(query, params);
return c.json({ books: rows });
});
// ── API: Upload book ──
routes.post("/api/books", async (c) => {
const space = c.req.param("space") || "global";
const token = extractToken(c.req.raw.headers);
if (!token) return c.json({ error: "Authentication required" }, 401);
@ -124,13 +183,13 @@ routes.post("/api/books", async (c) => {
const tags = tagsRaw ? tagsRaw.split(",").map((t) => t.trim()).filter(Boolean) : [];
const shortId = randomUUID().slice(0, 8);
const id = randomUUID();
let slug = slugify(title);
// Check slug collision
const existing = await sql.unsafe(
`SELECT 1 FROM rbooks.books WHERE slug = $1`, [slug]
);
if (existing.length > 0) {
const doc = ensureDoc(space);
const slugExists = Object.values(doc.items).some((b) => b.slug === slug);
if (slugExists) {
slug = `${slug}-${shortId}`;
}
@ -141,50 +200,82 @@ routes.post("/api/books", async (c) => {
const buffer = Buffer.from(await file.arrayBuffer());
await Bun.write(filepath, buffer);
// Insert into DB
const rows = await sql.unsafe(
`INSERT INTO rbooks.books (slug, title, author, description, pdf_path, pdf_size_bytes, tags, license, contributor_id, contributor_name)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
RETURNING id, slug, title, author, description, tags, created_at`,
[slug, title, author, description, filename, buffer.length, tags, license, claims.sub, claims.username || null]
);
const now = Date.now();
return c.json(rows[0], 201);
// Insert into Automerge doc
const docId = booksCatalogDocId(space);
_syncServer!.changeDoc<BooksCatalogDoc>(docId, `add book: ${slug}`, (d) => {
d.items[id] = {
id,
slug,
title,
author: author || "",
description: description || "",
pdfPath: filename,
pdfSizeBytes: buffer.length,
pageCount: 0,
tags,
license,
coverColor: null,
contributorId: claims.sub,
contributorName: claims.username || null,
status: "published",
featured: false,
viewCount: 0,
downloadCount: 0,
createdAt: now,
updatedAt: now,
};
});
return c.json({
id,
slug,
title,
author,
description,
tags,
created_at: new Date(now).toISOString(),
}, 201);
});
// ── API: Get book details ──
routes.get("/api/books/:id", async (c) => {
const space = c.req.param("space") || "global";
const id = c.req.param("id");
const rows = await sql.unsafe(
`SELECT * FROM rbooks.books WHERE (slug = $1 OR id::text = $1) AND status = 'published'`,
[id]
);
const doc = ensureDoc(space);
const book = findBook(doc, id);
if (rows.length === 0) return c.json({ error: "Book not found" }, 404);
if (!book || book.status !== "published") {
return c.json({ error: "Book not found" }, 404);
}
// Increment view count
await sql.unsafe(
`UPDATE rbooks.books SET view_count = view_count + 1 WHERE id = $1`,
[rows[0].id]
);
const docId = booksCatalogDocId(space);
_syncServer!.changeDoc<BooksCatalogDoc>(docId, `view: ${book.slug}`, (d) => {
if (d.items[book.id]) {
d.items[book.id].viewCount += 1;
d.items[book.id].updatedAt = Date.now();
}
});
return c.json(rows[0]);
return c.json(bookToRow(book));
});
// ── API: Serve PDF ──
routes.get("/api/books/:id/pdf", async (c) => {
const space = c.req.param("space") || "global";
const id = c.req.param("id");
const rows = await sql.unsafe(
`SELECT id, slug, title, pdf_path FROM rbooks.books WHERE (slug = $1 OR id::text = $1) AND status = 'published'`,
[id]
);
const doc = ensureDoc(space);
const book = findBook(doc, id);
if (rows.length === 0) return c.json({ error: "Book not found" }, 404);
if (!book || book.status !== "published") {
return c.json({ error: "Book not found" }, 404);
}
const book = rows[0];
const filepath = resolve(BOOKS_DIR, book.pdf_path);
const filepath = resolve(BOOKS_DIR, book.pdfPath);
const file = Bun.file(filepath);
if (!(await file.exists())) {
@ -192,10 +283,13 @@ routes.get("/api/books/:id/pdf", async (c) => {
}
// Increment download count
await sql.unsafe(
`UPDATE rbooks.books SET download_count = download_count + 1 WHERE id = $1`,
[book.id]
);
const docId = booksCatalogDocId(space);
_syncServer!.changeDoc<BooksCatalogDoc>(docId, `download: ${book.slug}`, (d) => {
if (d.items[book.id]) {
d.items[book.id].downloadCount += 1;
d.items[book.id].updatedAt = Date.now();
}
});
return new Response(file, {
headers: {
@ -226,12 +320,10 @@ routes.get("/read/:id", async (c) => {
const spaceSlug = c.req.param("space") || "personal";
const id = c.req.param("id");
const rows = await sql.unsafe(
`SELECT * FROM rbooks.books WHERE (slug = $1 OR id::text = $1) AND status = 'published'`,
[id]
);
const doc = ensureDoc(spaceSlug);
const book = findBook(doc, id);
if (rows.length === 0) {
if (!book || book.status !== "published") {
const html = renderShell({
title: "Book not found | rSpace",
moduleId: "rbooks",
@ -242,13 +334,14 @@ routes.get("/read/:id", async (c) => {
return c.html(html, 404);
}
const book = rows[0];
// Increment view count
await sql.unsafe(
`UPDATE rbooks.books SET view_count = view_count + 1 WHERE id = $1`,
[book.id]
);
const docId = booksCatalogDocId(spaceSlug);
_syncServer!.changeDoc<BooksCatalogDoc>(docId, `view: ${book.slug}`, (d) => {
if (d.items[book.id]) {
d.items[book.id].viewCount += 1;
d.items[book.id].updatedAt = Date.now();
}
});
// Build the PDF URL relative to this module's mount point
const pdfUrl = `/${spaceSlug}/rbooks/api/books/${book.slug}/pdf`;
@ -279,24 +372,6 @@ routes.get("/read/:id", async (c) => {
return c.html(html);
});
// ── Initialize DB schema ──
async function initDB(): Promise<void> {
try {
const schemaPath = resolve(import.meta.dir, "db/schema.sql");
const schemaSql = await readFile(schemaPath, "utf-8");
await sql.unsafe(`SET search_path TO rbooks, public`);
await sql.unsafe(schemaSql);
await sql.unsafe(`SET search_path TO public`);
console.log("[Books] Database schema initialized");
} catch (e) {
console.error("[Books] Schema init failed:", e);
}
}
function escapeAttr(s: string): string {
return s.replace(/&/g, "&amp;").replace(/"/g, "&quot;").replace(/</g, "&lt;").replace(/>/g, "&gt;");
}
// ── Module export ──
export const booksModule: RSpaceModule = {
@ -311,7 +386,7 @@ export const booksModule: RSpaceModule = {
landingPage: renderLanding,
async onInit(ctx) {
_syncServer = ctx.syncServer;
await initDB();
console.log("[Books] Module initialized (Automerge storage)");
},
feeds: [
{

View File

@ -3,69 +3,167 @@
*
* Group calendars with lunar/solar/seasonal time systems,
* location-aware events, and temporal-spatial zoom coupling.
*
* All persistence uses Automerge documents via SyncServer
* no PostgreSQL dependency.
*/
import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import { sql } from "../../shared/db/pool";
import * as Automerge from "@automerge/automerge";
import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server';
import { calendarSchema } from './schemas';
import { calendarSchema, calendarDocId } from './schemas';
import type { CalendarDoc, CalendarEvent, CalendarSource } from './schemas';
let _syncServer: SyncServer | null = null;
const routes = new Hono();
// ── DB initialization ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
// ── Local-first helpers ──
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Cal] DB schema initialized");
} catch (e) {
console.error("[Cal] DB init error:", e);
/**
* Lazily create the calendar Automerge doc if it doesn't exist yet.
* Returns the current (immutable) doc snapshot.
*/
function ensureDoc(space: string): CalendarDoc {
const docId = calendarDocId(space);
let doc = _syncServer!.getDoc<CalendarDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<CalendarDoc>(), 'init calendar', (d) => {
const init = calendarSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.sources = {};
d.events = {};
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
async function seedDemoIfEmpty() {
try {
const count = await sql.unsafe("SELECT count(*)::int as cnt FROM rcal.events");
if (parseInt(count[0].cnt) > 0) return;
function daysFromNow(days: number, hours: number, minutes: number): Date {
const d = new Date();
d.setDate(d.getDate() + days);
d.setHours(hours, minutes, 0, 0);
return d;
}
/**
* Build an event row object suitable for JSON responses.
* Maps camelCase schema fields to the snake_case format the API previously returned.
*/
function eventToRow(ev: CalendarEvent, sources: Record<string, CalendarSource>) {
const src = ev.sourceId ? sources[ev.sourceId] : undefined;
return {
id: ev.id,
title: ev.title,
description: ev.description,
start_time: ev.startTime ? new Date(ev.startTime).toISOString() : null,
end_time: ev.endTime ? new Date(ev.endTime).toISOString() : null,
all_day: ev.allDay,
timezone: ev.timezone,
rrule: ev.rrule,
status: ev.status,
visibility: ev.visibility,
source_id: ev.sourceId,
source_name: src?.name ?? ev.sourceName ?? null,
source_color: src?.color ?? ev.sourceColor ?? null,
source_type: src?.sourceType ?? ev.sourceType ?? null,
location_id: ev.locationId,
location_name: ev.locationName,
location_label: ev.locationName,
location_lat: ev.locationLat,
location_lng: ev.locationLng,
location_granularity: ev.locationGranularity,
is_virtual: ev.isVirtual,
virtual_url: ev.virtualUrl,
virtual_platform: ev.virtualPlatform,
r_tool_source: ev.rToolSource,
r_tool_entity_id: ev.rToolEntityId,
attendees: ev.attendees,
attendee_count: ev.attendeeCount,
metadata: ev.metadata,
created_at: ev.createdAt ? new Date(ev.createdAt).toISOString() : null,
updated_at: ev.updatedAt ? new Date(ev.updatedAt).toISOString() : null,
};
}
/**
* Build a source row object for JSON responses.
*/
function sourceToRow(src: CalendarSource) {
return {
id: src.id,
name: src.name,
source_type: src.sourceType,
url: src.url,
color: src.color,
is_active: src.isActive,
is_visible: src.isVisible,
sync_interval_minutes: src.syncIntervalMinutes,
last_synced_at: src.lastSyncedAt ? new Date(src.lastSyncedAt).toISOString() : null,
owner_id: src.ownerId,
created_at: src.createdAt ? new Date(src.createdAt).toISOString() : null,
};
}
/**
* Seed demo data if the doc has no events yet.
*/
function seedDemoIfEmpty(space: string) {
const docId = calendarDocId(space);
const doc = ensureDoc(space);
if (Object.keys(doc.events).length > 0) return;
_syncServer!.changeDoc<CalendarDoc>(docId, 'seed demo data', (d) => {
const now = Date.now();
// Create calendar sources
const community = await sql.unsafe(
`INSERT INTO rcal.calendar_sources (name, source_type, color, is_active, is_visible)
VALUES ('Community Events', 'MANUAL', '#6366f1', true, true) RETURNING id`
);
const sprints = await sql.unsafe(
`INSERT INTO rcal.calendar_sources (name, source_type, color, is_active, is_visible)
VALUES ('Development Sprints', 'MANUAL', '#f59e0b', true, true) RETURNING id`
);
const communityId = community[0].id;
const sprintsId = sprints[0].id;
const communityId = crypto.randomUUID();
const sprintsId = crypto.randomUUID();
// Create location hierarchy
const world = await sql.unsafe(
`INSERT INTO rcal.locations (name, granularity) VALUES ('Earth', 1) RETURNING id`
);
const europe = await sql.unsafe(
`INSERT INTO rcal.locations (name, granularity, parent_id, lat, lng) VALUES ('Europe', 2, $1, 48.8566, 2.3522) RETURNING id`,
[world[0].id]
);
const berlin = await sql.unsafe(
`INSERT INTO rcal.locations (name, granularity, parent_id, lat, lng) VALUES ('Berlin', 4, $1, 52.52, 13.405) RETURNING id`,
[europe[0].id]
);
d.sources[communityId] = {
id: communityId,
name: 'Community Events',
sourceType: 'MANUAL',
url: null,
color: '#6366f1',
isActive: true,
isVisible: true,
syncIntervalMinutes: null,
lastSyncedAt: 0,
ownerId: null,
createdAt: now,
};
d.sources[sprintsId] = {
id: sprintsId,
name: 'Development Sprints',
sourceType: 'MANUAL',
url: null,
color: '#f59e0b',
isActive: true,
isVisible: true,
syncIntervalMinutes: null,
lastSyncedAt: 0,
ownerId: null,
createdAt: now,
};
// Seed events — past, current week, and future
const now = new Date();
const events = [
// Location IDs (embedded on events, no separate locations table)
const berlinLocId = crypto.randomUUID();
// Seed events
const seedEvents: Array<{
title: string; desc: string; start: Date; end: Date;
sourceId: string; allDay?: boolean;
locationId?: string; locationName?: string;
locationLat?: number; locationLng?: number; locationGranularity?: string;
isVirtual?: boolean; virtualUrl?: string; virtualPlatform?: string;
}> = [
{
title: "rSpace Launch Party",
desc: "Celebrating the launch of the unified rSpace platform with all 22 modules live.",
@ -76,13 +174,15 @@ async function seedDemoIfEmpty() {
title: "Provider Onboarding Workshop",
desc: "Hands-on session for print providers joining the cosmolocal network.",
start: daysFromNow(-12, 14, 0), end: daysFromNow(-12, 17, 0),
sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rspace-providers", virtualPlatform: "Jitsi",
sourceId: communityId, isVirtual: true,
virtualUrl: "https://meet.jit.si/rspace-providers", virtualPlatform: "Jitsi",
},
{
title: "Weekly Community Standup",
desc: "Open standup — share what you're working on, ask for help, coordinate.",
start: daysFromNow(0, 16, 0), end: daysFromNow(0, 16, 45),
sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rspace-standup", virtualPlatform: "Jitsi",
sourceId: communityId, isVirtual: true,
virtualUrl: "https://meet.jit.si/rspace-standup", virtualPlatform: "Jitsi",
},
{
title: "Sprint: Module Seeding & Polish",
@ -94,77 +194,112 @@ async function seedDemoIfEmpty() {
title: "rFunds Budget Review",
desc: "Quarterly review of treasury flows, enoughness thresholds, and overflow routing.",
start: daysFromNow(6, 15, 0), end: daysFromNow(6, 17, 0),
sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rfunds-review", virtualPlatform: "Jitsi",
sourceId: communityId, isVirtual: true,
virtualUrl: "https://meet.jit.si/rfunds-review", virtualPlatform: "Jitsi",
},
{
title: "Cosmolocal Design Sprint",
desc: "Two-day design sprint on the next generation of cosmolocal tooling.",
start: daysFromNow(11, 9, 0), end: daysFromNow(12, 18, 0),
sourceId: sprintsId, locationId: berlin[0].id, locationName: "Druckwerkstatt Berlin",
sourceId: sprintsId,
locationId: berlinLocId, locationName: "Druckwerkstatt Berlin",
locationLat: 52.52, locationLng: 13.405, locationGranularity: "city",
},
{
title: "Q1 Retrospective",
desc: "Looking back at what we built, what worked, and what to improve.",
start: daysFromNow(21, 16, 0), end: daysFromNow(21, 18, 0),
sourceId: communityId, virtual: true, virtualUrl: "https://meet.jit.si/rspace-retro", virtualPlatform: "Jitsi",
sourceId: communityId, isVirtual: true,
virtualUrl: "https://meet.jit.si/rspace-retro", virtualPlatform: "Jitsi",
},
];
for (const e of events) {
await sql.unsafe(
`INSERT INTO rcal.events (title, description, start_time, end_time, all_day, source_id,
location_id, location_name, is_virtual, virtual_url, virtual_platform)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)`,
[e.title, e.desc, e.start.toISOString(), e.end.toISOString(), e.allDay || false,
e.sourceId, e.locationId || null, e.locationName || null,
e.virtual || false, e.virtualUrl || null, e.virtualPlatform || null]
);
for (const e of seedEvents) {
const eventId = crypto.randomUUID();
d.events[eventId] = {
id: eventId,
title: e.title,
description: e.desc,
startTime: e.start.getTime(),
endTime: e.end.getTime(),
allDay: e.allDay || false,
timezone: 'UTC',
rrule: null,
status: null,
visibility: null,
sourceId: e.sourceId,
sourceName: null,
sourceType: null,
sourceColor: null,
locationId: e.locationId || null,
locationName: e.locationName || null,
coordinates: null,
locationGranularity: e.locationGranularity || null,
locationLat: e.locationLat ?? null,
locationLng: e.locationLng ?? null,
isVirtual: e.isVirtual || false,
virtualUrl: e.virtualUrl || null,
virtualPlatform: e.virtualPlatform || null,
rToolSource: null,
rToolEntityId: null,
attendees: [],
attendeeCount: 0,
metadata: null,
createdAt: now,
updatedAt: now,
};
}
});
console.log("[Cal] Demo data seeded: 2 sources, 3 locations, 7 events");
} catch (e) {
console.error("[Cal] Seed error:", e);
}
}
function daysFromNow(days: number, hours: number, minutes: number): Date {
const d = new Date();
d.setDate(d.getDate() + days);
d.setHours(hours, minutes, 0, 0);
return d;
console.log("[Cal] Demo data seeded: 2 sources, 7 events");
}
// ── API: Events ──
// GET /api/events — query events with filters
routes.get("/api/events", async (c) => {
const space = c.req.param("space") || "demo";
const { start, end, source, search, rTool, rEntityId, upcoming } = c.req.query();
let where = "WHERE 1=1";
const params: any[] = [];
let idx = 1;
const doc = ensureDoc(space);
let events = Object.values(doc.events);
if (start) { where += ` AND e.start_time >= $${idx}`; params.push(start); idx++; }
if (end) { where += ` AND e.start_time <= ($${idx}::date + interval '1 day')`; params.push(end); idx++; }
if (source) { where += ` AND e.source_id = $${idx}`; params.push(source); idx++; }
if (search) { where += ` AND (e.title ILIKE $${idx} OR e.description ILIKE $${idx})`; params.push(`%${search}%`); idx++; }
if (rTool) { where += ` AND e.r_tool_source = $${idx}`; params.push(rTool); idx++; }
if (rEntityId) { where += ` AND e.r_tool_entity_id = $${idx}`; params.push(rEntityId); idx++; }
// Apply filters
if (start) {
const startMs = new Date(start).getTime();
events = events.filter((e) => e.startTime >= startMs);
}
if (end) {
const endMs = new Date(end).getTime() + 86400000; // +1 day
events = events.filter((e) => e.startTime <= endMs);
}
if (source) {
events = events.filter((e) => e.sourceId === source);
}
if (search) {
const term = search.toLowerCase();
events = events.filter((e) =>
e.title.toLowerCase().includes(term) ||
(e.description && e.description.toLowerCase().includes(term))
);
}
if (rTool) {
events = events.filter((e) => e.rToolSource === rTool);
}
if (rEntityId) {
events = events.filter((e) => e.rToolEntityId === rEntityId);
}
if (upcoming) {
where += ` AND e.start_time >= NOW() AND e.start_time <= NOW() + ($${idx} || ' days')::interval`;
params.push(upcoming);
idx++;
const nowMs = Date.now();
const futureMs = nowMs + parseInt(upcoming) * 86400000;
events = events.filter((e) => e.startTime >= nowMs && e.startTime <= futureMs);
}
const rows = await sql.unsafe(
`SELECT e.*, cs.name as source_name, cs.color as source_color, l.name as location_label
FROM rcal.events e
LEFT JOIN rcal.calendar_sources cs ON cs.id = e.source_id
LEFT JOIN rcal.locations l ON l.id = e.location_id
${where}
ORDER BY e.start_time ASC LIMIT 500`,
params
);
// Sort by start time, limit to 500
events.sort((a, b) => a.startTime - b.startTime);
events = events.slice(0, 500);
const rows = events.map((e) => eventToRow(e, doc.sources));
return c.json({ count: rows.length, results: rows });
});
@ -175,32 +310,65 @@ routes.post("/api/events", async (c) => {
let claims;
try { claims = await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const body = await c.req.json();
const { title, description, start_time, end_time, all_day, timezone, source_id, location_id, location_name,
is_virtual, virtual_url, virtual_platform, r_tool_source, r_tool_entity_id } = body;
if (!title?.trim() || !start_time) return c.json({ error: "Title and start_time required" }, 400);
const rows = await sql.unsafe(
`INSERT INTO rcal.events (title, description, start_time, end_time, all_day, timezone, source_id,
location_id, location_name, is_virtual, virtual_url, virtual_platform, r_tool_source, r_tool_entity_id, created_by)
VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15) RETURNING *`,
[title.trim(), description || null, start_time, end_time || null, all_day || false, timezone || "UTC",
source_id || null, location_id || null, location_name || null, is_virtual || false,
virtual_url || null, virtual_platform || null, r_tool_source || null, r_tool_entity_id || null, claims.sub]
);
return c.json(rows[0], 201);
const docId = calendarDocId(space);
ensureDoc(space);
const eventId = crypto.randomUUID();
const now = Date.now();
_syncServer!.changeDoc<CalendarDoc>(docId, `create event ${eventId}`, (d) => {
d.events[eventId] = {
id: eventId,
title: title.trim(),
description: description || '',
startTime: new Date(start_time).getTime(),
endTime: end_time ? new Date(end_time).getTime() : 0,
allDay: all_day || false,
timezone: timezone || 'UTC',
rrule: null,
status: null,
visibility: null,
sourceId: source_id || null,
sourceName: null,
sourceType: null,
sourceColor: null,
locationId: location_id || null,
locationName: location_name || null,
coordinates: null,
locationGranularity: null,
locationLat: null,
locationLng: null,
isVirtual: is_virtual || false,
virtualUrl: virtual_url || null,
virtualPlatform: virtual_platform || null,
rToolSource: r_tool_source || null,
rToolEntityId: r_tool_entity_id || null,
attendees: [],
attendeeCount: 0,
metadata: null,
createdAt: now,
updatedAt: now,
};
});
const updated = _syncServer!.getDoc<CalendarDoc>(docId)!;
return c.json(eventToRow(updated.events[eventId], updated.sources), 201);
});
// GET /api/events/:id
routes.get("/api/events/:id", async (c) => {
const rows = await sql.unsafe(
`SELECT e.*, cs.name as source_name, cs.color as source_color
FROM rcal.events e LEFT JOIN rcal.calendar_sources cs ON cs.id = e.source_id
WHERE e.id = $1`,
[c.req.param("id")]
);
if (rows.length === 0) return c.json({ error: "Event not found" }, 404);
return c.json(rows[0]);
const space = c.req.param("space") || "demo";
const id = c.req.param("id");
const doc = ensureDoc(space);
const ev = doc.events[id];
if (!ev) return c.json({ error: "Event not found" }, 404);
return c.json(eventToRow(ev, doc.sources));
});
// PATCH /api/events/:id
@ -210,55 +378,93 @@ routes.patch("/api/events/:id", async (c) => {
let claims;
try { claims = await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const id = c.req.param("id");
const body = await c.req.json();
const fields: string[] = [];
const params: any[] = [];
let idx = 1;
const allowed = ["title", "description", "start_time", "end_time", "all_day", "timezone",
"status", "visibility", "location_name", "is_virtual", "virtual_url"];
const docId = calendarDocId(space);
const doc = ensureDoc(space);
if (!doc.events[id]) return c.json({ error: "Not found" }, 404);
for (const key of allowed) {
if (body[key] !== undefined) {
fields.push(`${key} = $${idx}`);
params.push(body[key]);
idx++;
// Map of allowed body keys to CalendarEvent fields
const fieldMap: Record<string, keyof CalendarEvent> = {
title: 'title',
description: 'description',
start_time: 'startTime',
end_time: 'endTime',
all_day: 'allDay',
timezone: 'timezone',
status: 'status',
visibility: 'visibility',
location_name: 'locationName',
is_virtual: 'isVirtual',
virtual_url: 'virtualUrl',
};
const updates: Array<{ field: keyof CalendarEvent; value: any }> = [];
for (const [bodyKey, docField] of Object.entries(fieldMap)) {
if (body[bodyKey] !== undefined) {
let value = body[bodyKey];
// Convert time strings to epoch ms
if (bodyKey === 'start_time' || bodyKey === 'end_time') {
value = new Date(value).getTime();
}
updates.push({ field: docField, value });
}
}
if (fields.length === 0) return c.json({ error: "No fields" }, 400);
fields.push("updated_at = NOW()");
params.push(id);
if (updates.length === 0) return c.json({ error: "No fields" }, 400);
const rows = await sql.unsafe(
`UPDATE rcal.events SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`,
params
);
if (rows.length === 0) return c.json({ error: "Not found" }, 404);
return c.json(rows[0]);
_syncServer!.changeDoc<CalendarDoc>(docId, `update event ${id}`, (d) => {
const ev = d.events[id];
for (const { field, value } of updates) {
(ev as any)[field] = value;
}
ev.updatedAt = Date.now();
});
const updated = _syncServer!.getDoc<CalendarDoc>(docId)!;
return c.json(eventToRow(updated.events[id], updated.sources));
});
// DELETE /api/events/:id
routes.delete("/api/events/:id", async (c) => {
const result = await sql.unsafe("DELETE FROM rcal.events WHERE id = $1 RETURNING id", [c.req.param("id")]);
if (result.length === 0) return c.json({ error: "Not found" }, 404);
const space = c.req.param("space") || "demo";
const id = c.req.param("id");
const docId = calendarDocId(space);
const doc = ensureDoc(space);
if (!doc.events[id]) return c.json({ error: "Not found" }, 404);
_syncServer!.changeDoc<CalendarDoc>(docId, `delete event ${id}`, (d) => {
delete d.events[id];
});
return c.json({ ok: true });
});
// ── API: Sources ──
routes.get("/api/sources", async (c) => {
const space = c.req.param("space") || "demo";
const { is_active, is_visible, source_type } = c.req.query();
let where = "WHERE 1=1";
const params: any[] = [];
let idx = 1;
const doc = ensureDoc(space);
if (is_active !== undefined) { where += ` AND is_active = $${idx}`; params.push(is_active === "true"); idx++; }
if (is_visible !== undefined) { where += ` AND is_visible = $${idx}`; params.push(is_visible === "true"); idx++; }
if (source_type) { where += ` AND source_type = $${idx}`; params.push(source_type); idx++; }
let sources = Object.values(doc.sources);
const rows = await sql.unsafe(`SELECT * FROM rcal.calendar_sources ${where} ORDER BY name`, params);
if (is_active !== undefined) {
const active = is_active === "true";
sources = sources.filter((s) => s.isActive === active);
}
if (is_visible !== undefined) {
const visible = is_visible === "true";
sources = sources.filter((s) => s.isVisible === visible);
}
if (source_type) {
sources = sources.filter((s) => s.sourceType === source_type);
}
sources.sort((a, b) => a.name.localeCompare(b.name));
const rows = sources.map(sourceToRow);
return c.json({ count: rows.length, results: rows });
});
@ -267,44 +473,99 @@ routes.post("/api/sources", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const body = await c.req.json();
const rows = await sql.unsafe(
`INSERT INTO rcal.calendar_sources (name, source_type, url, color, is_active, is_visible)
VALUES ($1, $2, $3, $4, $5, $6) RETURNING *`,
[body.name, body.source_type || "MANUAL", body.url || null, body.color || "#6366f1",
body.is_active ?? true, body.is_visible ?? true]
);
return c.json(rows[0], 201);
const docId = calendarDocId(space);
ensureDoc(space);
const sourceId = crypto.randomUUID();
const now = Date.now();
_syncServer!.changeDoc<CalendarDoc>(docId, `create source ${sourceId}`, (d) => {
d.sources[sourceId] = {
id: sourceId,
name: body.name,
sourceType: body.source_type || 'MANUAL',
url: body.url || null,
color: body.color || '#6366f1',
isActive: body.is_active ?? true,
isVisible: body.is_visible ?? true,
syncIntervalMinutes: null,
lastSyncedAt: 0,
ownerId: null,
createdAt: now,
};
});
const updated = _syncServer!.getDoc<CalendarDoc>(docId)!;
return c.json(sourceToRow(updated.sources[sourceId]), 201);
});
// ── API: Locations ──
// Locations are now derived from event data (no separate table).
// Each unique locationId/locationName combination is extracted from events.
interface DerivedLocation {
id: string;
name: string;
granularity: number | null;
parent_id: string | null;
lat: number | null;
lng: number | null;
}
function deriveLocations(doc: CalendarDoc): DerivedLocation[] {
const seen = new Map<string, DerivedLocation>();
for (const ev of Object.values(doc.events)) {
const key = ev.locationId || ev.locationName;
if (!key) continue;
if (seen.has(key)) continue;
seen.set(key, {
id: ev.locationId || key,
name: ev.locationName || key,
granularity: ev.locationGranularity ? parseInt(ev.locationGranularity) || null : null,
parent_id: null,
lat: ev.locationLat,
lng: ev.locationLng,
});
}
return Array.from(seen.values());
}
routes.get("/api/locations", async (c) => {
const space = c.req.param("space") || "demo";
const { granularity, parent, search, root } = c.req.query();
let where = "WHERE 1=1";
const params: any[] = [];
let idx = 1;
const doc = ensureDoc(space);
if (root === "true") { where += " AND parent_id IS NULL"; }
if (granularity) { where += ` AND granularity = $${idx}`; params.push(parseInt(granularity)); idx++; }
if (parent) { where += ` AND parent_id = $${idx}`; params.push(parent); idx++; }
if (search) { where += ` AND name ILIKE $${idx}`; params.push(`%${search}%`); idx++; }
let locations = deriveLocations(doc);
const rows = await sql.unsafe(`SELECT * FROM rcal.locations ${where} ORDER BY name`, params);
return c.json(rows);
if (root === "true") {
locations = locations.filter((l) => l.parent_id === null);
}
if (granularity) {
const g = parseInt(granularity);
locations = locations.filter((l) => l.granularity === g);
}
if (parent) {
locations = locations.filter((l) => l.parent_id === parent);
}
if (search) {
const term = search.toLowerCase();
locations = locations.filter((l) => l.name.toLowerCase().includes(term));
}
locations.sort((a, b) => a.name.localeCompare(b.name));
return c.json(locations);
});
routes.get("/api/locations/tree", async (c) => {
const rows = await sql.unsafe(
`WITH RECURSIVE tree AS (
SELECT id, name, granularity, parent_id, 0 as depth FROM rcal.locations WHERE parent_id IS NULL
UNION ALL
SELECT l.id, l.name, l.granularity, l.parent_id, t.depth + 1
FROM rcal.locations l JOIN tree t ON l.parent_id = t.id
)
SELECT * FROM tree ORDER BY depth, name`
);
return c.json(rows);
const space = c.req.param("space") || "demo";
const doc = ensureDoc(space);
// Flat list with depth=0 since hierarchical parent_id data is not stored in Automerge
const locations = deriveLocations(doc).map((l) => ({ ...l, depth: 0 }));
locations.sort((a, b) => a.name.localeCompare(b.name));
return c.json(locations);
});
// ── API: Lunar data (computed, not stored) ──
@ -349,29 +610,30 @@ routes.get("/api/lunar", async (c) => {
// ── API: Stats ──
routes.get("/api/stats", async (c) => {
const [eventCount, sourceCount, locationCount] = await Promise.all([
sql.unsafe("SELECT count(*)::int as cnt FROM rcal.events"),
sql.unsafe("SELECT count(*)::int as cnt FROM rcal.calendar_sources WHERE is_active = true"),
sql.unsafe("SELECT count(*)::int as cnt FROM rcal.locations"),
]);
return c.json({
events: eventCount[0]?.cnt || 0,
sources: sourceCount[0]?.cnt || 0,
locations: locationCount[0]?.cnt || 0,
});
const space = c.req.param("space") || "demo";
const doc = ensureDoc(space);
const events = Object.values(doc.events).length;
const sources = Object.values(doc.sources).filter((s) => s.isActive).length;
const locations = deriveLocations(doc).length;
return c.json({ events, sources, locations });
});
// ── API: Context (r* tool bridge) ──
routes.get("/api/context/:tool", async (c) => {
const space = c.req.param("space") || "demo";
const tool = c.req.param("tool");
const entityId = c.req.query("entityId");
if (!entityId) return c.json({ error: "entityId required" }, 400);
const rows = await sql.unsafe(
"SELECT * FROM rcal.events WHERE r_tool_source = $1 AND r_tool_entity_id = $2 ORDER BY start_time",
[tool, entityId]
);
const doc = ensureDoc(space);
const matching = Object.values(doc.events)
.filter((e) => e.rToolSource === tool && e.rToolEntityId === entityId)
.sort((a, b) => a.startTime - b.startTime);
const rows = matching.map((e) => eventToRow(e, doc.sources));
return c.json({ count: rows.length, results: rows });
});
@ -403,8 +665,8 @@ export const calModule: RSpaceModule = {
landingPage: renderLanding,
async onInit(ctx) {
_syncServer = ctx.syncServer;
await initDB();
await seedDemoIfEmpty();
// Seed demo data for the default space
seedDemoIfEmpty("demo");
},
feeds: [
{

View File

@ -4,12 +4,12 @@
* Ported from /opt/apps/rcart/ (Express Hono).
* Handles catalog (artifact listings), orders, fulfillment resolution.
* Integrates with provider-registry for provider matching and flow-service for revenue splits.
*
* Storage: Automerge documents via SyncServer (no PostgreSQL).
*/
import * as Automerge from "@automerge/automerge";
import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module";
import { depositOrderRevenue } from "./flow";
@ -17,24 +17,17 @@ import type { RSpaceModule } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server';
import { catalogSchema, orderSchema } from './schemas';
import {
catalogSchema, orderSchema,
catalogDocId, orderDocId,
type CatalogDoc, type CatalogEntry,
type OrderDoc, type OrderMeta,
} from './schemas';
let _syncServer: SyncServer | null = null;
const routes = new Hono();
// ── DB initialization ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Cart] DB schema initialized");
} catch (e) {
console.error("[Cart] DB init error:", e);
}
}
// Provider registry URL (for fulfillment resolution)
const PROVIDER_REGISTRY_URL = process.env.PROVIDER_REGISTRY_URL || "";
@ -44,10 +37,41 @@ function getProviderUrl(): string {
return PROVIDER_REGISTRY_URL || "http://localhost:3000/demo/providers";
}
// ── Automerge helpers ──
/** Lazily create (or retrieve) the catalog doc for a space. */
function ensureCatalogDoc(space: string): Automerge.Doc<CatalogDoc> {
const docId = catalogDocId(space);
let doc = _syncServer!.getDoc<CatalogDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<CatalogDoc>(), 'init catalog', (d) => {
const init = catalogSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = space;
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/** Get all order docs for a space by scanning known doc IDs. */
function getSpaceOrderDocs(space: string): Array<{ docId: string; doc: Automerge.Doc<OrderDoc> }> {
const prefix = `${space}:cart:orders:`;
const results: Array<{ docId: string; doc: Automerge.Doc<OrderDoc> }> = [];
for (const id of _syncServer!.listDocs()) {
if (id.startsWith(prefix)) {
const doc = _syncServer!.getDoc<OrderDoc>(id);
if (doc) results.push({ docId: id, doc });
}
}
return results;
}
// ── CATALOG ROUTES ──
// POST /api/catalog/ingest — Add artifact to catalog
routes.post("/api/catalog/ingest", async (c) => {
const space = c.req.param("space") || "demo";
const artifact = await c.req.json();
if (!artifact.id || !artifact.schema_version || !artifact.type) {
@ -60,121 +84,151 @@ routes.post("/api/catalog/ingest", async (c) => {
return c.json({ error: "print-ready artifacts must have at least one render_target" }, 400);
}
const existing = await sql.unsafe("SELECT id FROM rcart.catalog_entries WHERE artifact_id = $1", [artifact.id]);
if (existing.length > 0) {
return c.json({ error: "Artifact already listed", catalog_entry_id: existing[0].id }, 409);
const doc = ensureCatalogDoc(space);
// Check for duplicate artifact_id
for (const [, entry] of Object.entries(doc.items)) {
if (entry.artifactId === artifact.id) {
return c.json({ error: "Artifact already listed", catalog_entry_id: entry.id }, 409);
}
}
const result = await sql.unsafe(
`INSERT INTO rcart.catalog_entries (
artifact_id, artifact, title, product_type,
required_capabilities, substrates, creator_id,
source_space, tags
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
RETURNING id, artifact_id, title, product_type, status, created_at`,
[
artifact.id, JSON.stringify(artifact),
artifact.payload?.title || "Untitled",
artifact.spec?.product_type || null,
artifact.spec?.required_capabilities || [],
artifact.spec?.substrates || [],
artifact.creator?.id || null,
artifact.source_space || null,
artifact.payload?.tags || [],
]
);
const entryId = crypto.randomUUID();
const now = Date.now();
return c.json(result[0], 201);
const docId = catalogDocId(space);
_syncServer!.changeDoc<CatalogDoc>(docId, 'ingest catalog entry', (d) => {
d.items[entryId] = {
id: entryId,
artifactId: artifact.id,
artifact: artifact,
title: artifact.payload?.title || "Untitled",
productType: artifact.spec?.product_type || null,
requiredCapabilities: artifact.spec?.required_capabilities || [],
substrates: artifact.spec?.substrates || [],
creatorId: artifact.creator?.id || null,
sourceSpace: artifact.source_space || space,
tags: artifact.payload?.tags || [],
status: "active",
createdAt: now,
updatedAt: now,
};
});
return c.json({
id: entryId,
artifact_id: artifact.id,
title: artifact.payload?.title || "Untitled",
product_type: artifact.spec?.product_type || null,
status: "active",
created_at: new Date(now).toISOString(),
}, 201);
});
// GET /api/catalog — Browse catalog
routes.get("/api/catalog", async (c) => {
const space = c.req.param("space") || "demo";
const { product_type, capability, tag, source_space, q, limit = "50", offset = "0" } = c.req.query();
const conditions: string[] = ["status = 'active'"];
const params: any[] = [];
let paramIdx = 1;
const doc = ensureCatalogDoc(space);
let entries = Object.values(doc.items);
if (product_type) {
conditions.push(`product_type = $${paramIdx}`);
params.push(product_type);
paramIdx++;
}
// Apply filters
entries = entries.filter((e) => e.status === "active");
if (product_type) entries = entries.filter((e) => e.productType === product_type);
if (capability) {
conditions.push(`required_capabilities && $${paramIdx}`);
params.push(capability.split(","));
paramIdx++;
}
if (tag) {
conditions.push(`$${paramIdx} = ANY(tags)`);
params.push(tag);
paramIdx++;
}
if (source_space) {
conditions.push(`source_space = $${paramIdx}`);
params.push(source_space);
paramIdx++;
const caps = capability.split(",");
entries = entries.filter((e) => caps.some((cap) => e.requiredCapabilities.includes(cap)));
}
if (tag) entries = entries.filter((e) => e.tags.includes(tag));
if (source_space) entries = entries.filter((e) => e.sourceSpace === source_space);
if (q) {
conditions.push(`title ILIKE $${paramIdx}`);
params.push(`%${q}%`);
paramIdx++;
const lower = q.toLowerCase();
entries = entries.filter((e) => e.title.toLowerCase().includes(lower));
}
const where = conditions.join(" AND ");
// Sort by createdAt descending
entries.sort((a, b) => b.createdAt - a.createdAt);
const limitNum = Math.min(parseInt(limit) || 50, 100);
const offsetNum = parseInt(offset) || 0;
const total = entries.length;
const paged = entries.slice(offsetNum, offsetNum + limitNum);
const [result, countResult] = await Promise.all([
sql.unsafe(
`SELECT id, artifact_id, title, product_type,
required_capabilities, tags, source_space,
artifact->'payload'->>'description' as description,
artifact->'pricing' as pricing,
artifact->'spec'->'dimensions' as dimensions,
status, created_at
FROM rcart.catalog_entries
WHERE ${where}
ORDER BY created_at DESC
LIMIT ${limitNum} OFFSET ${offsetNum}`,
params
),
sql.unsafe(`SELECT count(*) FROM rcart.catalog_entries WHERE ${where}`, params),
]);
// Map to response shape matching the original SQL response
const result = paged.map((e) => {
const art = e.artifact as Record<string, any> | undefined;
return {
id: e.id,
artifact_id: e.artifactId,
title: e.title,
product_type: e.productType,
required_capabilities: e.requiredCapabilities,
tags: e.tags,
source_space: e.sourceSpace,
description: art?.payload?.description || null,
pricing: art?.pricing || null,
dimensions: art?.spec?.dimensions || null,
status: e.status,
created_at: new Date(e.createdAt).toISOString(),
};
});
return c.json({ entries: result, total: parseInt(countResult[0].count as string), limit: limitNum, offset: offsetNum });
return c.json({ entries: result, total, limit: limitNum, offset: offsetNum });
});
// GET /api/catalog/:id — Single catalog entry
routes.get("/api/catalog/:id", async (c) => {
const space = c.req.param("space") || "demo";
const id = c.req.param("id");
const result = await sql.unsafe(
"SELECT * FROM rcart.catalog_entries WHERE id = $1 OR artifact_id = $1",
[id]
);
if (result.length === 0) return c.json({ error: "Catalog entry not found" }, 404);
const row = result[0];
return c.json({ id: row.id, artifact: row.artifact, status: row.status, created_at: row.created_at, updated_at: row.updated_at });
const doc = ensureCatalogDoc(space);
// Look up by entry id or artifact id
let entry: CatalogEntry | undefined;
if (doc.items[id]) {
entry = doc.items[id];
} else {
entry = Object.values(doc.items).find((e) => e.artifactId === id);
}
if (!entry) return c.json({ error: "Catalog entry not found" }, 404);
return c.json({
id: entry.id,
artifact: entry.artifact,
status: entry.status,
created_at: new Date(entry.createdAt).toISOString(),
updated_at: new Date(entry.updatedAt).toISOString(),
});
});
// PATCH /api/catalog/:id — Update listing status
routes.patch("/api/catalog/:id", async (c) => {
const space = c.req.param("space") || "demo";
const { status } = await c.req.json();
const valid = ["active", "paused", "sold_out", "removed"];
if (!valid.includes(status)) return c.json({ error: `status must be one of: ${valid.join(", ")}` }, 400);
const result = await sql.unsafe(
"UPDATE rcart.catalog_entries SET status = $1, updated_at = NOW() WHERE id = $2 RETURNING id, status",
[status, c.req.param("id")]
);
if (result.length === 0) return c.json({ error: "Catalog entry not found" }, 404);
return c.json(result[0]);
const doc = ensureCatalogDoc(space);
const entryId = c.req.param("id");
if (!doc.items[entryId]) return c.json({ error: "Catalog entry not found" }, 404);
const docId = catalogDocId(space);
_syncServer!.changeDoc<CatalogDoc>(docId, `update catalog status → ${status}`, (d) => {
d.items[entryId].status = status;
d.items[entryId].updatedAt = Date.now();
});
return c.json({ id: entryId, status });
});
// ── ORDER ROUTES ──
// POST /api/orders — Create an order
routes.post("/api/orders", async (c) => {
const space = c.req.param("space") || "demo";
// Optional auth — set buyer_did from claims if authenticated
const token = extractToken(c.req.raw.headers);
let buyerDid: string | null = null;
@ -194,51 +248,70 @@ routes.post("/api/orders", async (c) => {
if (!catalog_entry_id && !artifact_id) return c.json({ error: "Required: catalog_entry_id or artifact_id" }, 400);
if (!provider_id || !total_price) return c.json({ error: "Required: provider_id, total_price" }, 400);
const entryResult = await sql.unsafe(
"SELECT id, artifact_id FROM rcart.catalog_entries WHERE id = $1 OR artifact_id = $1",
[catalog_entry_id || artifact_id]
);
if (entryResult.length === 0) return c.json({ error: "Catalog entry not found" }, 404);
const entry = entryResult[0];
// Look up catalog entry
const catalogDoc = ensureCatalogDoc(space);
const lookupId = catalog_entry_id || artifact_id;
let entry: CatalogEntry | undefined;
if (catalogDoc.items[lookupId]) {
entry = catalogDoc.items[lookupId];
} else {
entry = Object.values(catalogDoc.items).find((e) => e.artifactId === lookupId || e.id === lookupId);
}
if (!entry) return c.json({ error: "Catalog entry not found" }, 404);
// x402 detection
const x402Header = c.req.header("x-payment");
const effectiveMethod = x402Header ? "x402" : payment_method;
const initialStatus = x402Header ? "paid" : "pending";
const result = await sql.unsafe(
`INSERT INTO rcart.orders (
catalog_entry_id, artifact_id, buyer_id, buyer_location, buyer_contact,
provider_id, provider_name, provider_distance_km,
quantity, production_cost, creator_payout, community_payout,
total_price, currency, status, payment_method, payment_tx, payment_network
${initialStatus === "paid" ? ", paid_at" : ""}
) VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13,$14,$15,$16,$17,$18
${initialStatus === "paid" ? ", NOW()" : ""})
RETURNING *`,
[
entry.id, entry.artifact_id,
buyerDid || buyer_id || null,
buyer_location ? JSON.stringify(buyer_location) : null,
buyer_contact ? JSON.stringify(buyer_contact) : null,
provider_id, provider_name || null, provider_distance_km || null,
quantity, production_cost || null, creator_payout || null, community_payout || null,
total_price, currency, initialStatus, effectiveMethod,
payment_tx || null, payment_network || null,
]
);
const orderId = crypto.randomUUID();
const now = Date.now();
// Create order doc
const oDocId = orderDocId(space, orderId);
let orderDoc = Automerge.change(Automerge.init<OrderDoc>(), 'create order', (d) => {
const init = orderSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = space;
d.order.id = orderId;
d.order.catalogEntryId = entry!.id;
d.order.artifactId = entry!.artifactId;
d.order.buyerId = buyerDid || buyer_id || null;
d.order.buyerLocation = buyer_location ? JSON.stringify(buyer_location) : null;
d.order.buyerContact = buyer_contact ? JSON.stringify(buyer_contact) : null;
d.order.providerId = provider_id;
d.order.providerName = provider_name || null;
d.order.providerDistanceKm = provider_distance_km || null;
d.order.quantity = quantity;
d.order.productionCost = production_cost || null;
d.order.creatorPayout = creator_payout || null;
d.order.communityPayout = community_payout || null;
d.order.totalPrice = total_price;
d.order.currency = currency;
d.order.status = initialStatus;
d.order.paymentMethod = effectiveMethod;
d.order.paymentTx = payment_tx || null;
d.order.paymentNetwork = payment_network || null;
d.order.createdAt = now;
d.order.updatedAt = now;
if (initialStatus === "paid") d.order.paidAt = now;
});
_syncServer!.setDoc(oDocId, orderDoc);
const order = orderDoc.order;
const order = result[0];
if (initialStatus === "paid") {
depositOrderRevenue(total_price, order.id);
depositOrderRevenue(total_price, orderId);
}
return c.json(order, 201);
// Return response matching original shape
return c.json(orderToResponse(order, entry), 201);
});
// GET /api/orders — List orders
routes.get("/api/orders", async (c) => {
const space = c.req.param("space") || "demo";
// Optional auth — filter by buyer if authenticated
const token = extractToken(c.req.raw.headers);
let authedBuyer: string | null = null;
@ -248,73 +321,156 @@ routes.get("/api/orders", async (c) => {
const { status, provider_id, buyer_id, limit = "50", offset = "0" } = c.req.query();
const conditions: string[] = [];
const params: any[] = [];
let paramIdx = 1;
const orderDocs = getSpaceOrderDocs(space);
if (status) { conditions.push(`o.status = $${paramIdx}`); params.push(status); paramIdx++; }
if (provider_id) { conditions.push(`o.provider_id = $${paramIdx}`); params.push(provider_id); paramIdx++; }
// Build enriched order list with catalog info
const catalogDoc = ensureCatalogDoc(space);
let orders = orderDocs.map(({ doc }) => {
const o = doc.order;
const catEntry = catalogDoc.items[o.catalogEntryId];
const resp = orderToResponse(o);
resp.artifact_title = catEntry?.title || null;
resp.product_type = catEntry?.productType || null;
return resp;
});
// Apply filters
if (status) orders = orders.filter((o) => o.status === status);
if (provider_id) orders = orders.filter((o) => o.provider_id === provider_id);
const effectiveBuyerId = buyer_id || (authedBuyer && !status && !provider_id ? authedBuyer : null);
if (effectiveBuyerId) { conditions.push(`o.buyer_id = $${paramIdx}`); params.push(effectiveBuyerId); paramIdx++; }
if (effectiveBuyerId) orders = orders.filter((o) => o.buyer_id === effectiveBuyerId);
// Sort by created_at descending
orders.sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime());
const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
const limitNum = Math.min(parseInt(limit) || 50, 100);
const offsetNum = parseInt(offset) || 0;
const paged = orders.slice(offsetNum, offsetNum + limitNum);
const result = await sql.unsafe(
`SELECT o.*, c.title as artifact_title, c.product_type
FROM rcart.orders o JOIN rcart.catalog_entries c ON c.id = o.catalog_entry_id
${where} ORDER BY o.created_at DESC LIMIT ${limitNum} OFFSET ${offsetNum}`,
params
);
return c.json({ orders: result });
return c.json({ orders: paged });
});
// GET /api/orders/:id — Single order
routes.get("/api/orders/:id", async (c) => {
const result = await sql.unsafe(
`SELECT o.*, c.artifact as artifact_envelope, c.title as artifact_title
FROM rcart.orders o JOIN rcart.catalog_entries c ON c.id = o.catalog_entry_id
WHERE o.id = $1`,
[c.req.param("id")]
);
if (result.length === 0) return c.json({ error: "Order not found" }, 404);
return c.json(result[0]);
const space = c.req.param("space") || "demo";
const orderId = c.req.param("id");
const oDocId = orderDocId(space, orderId);
const doc = _syncServer!.getDoc<OrderDoc>(oDocId);
if (!doc) return c.json({ error: "Order not found" }, 404);
const catalogDoc = ensureCatalogDoc(space);
const catEntry = catalogDoc.items[doc.order.catalogEntryId];
const resp = orderToResponse(doc.order);
resp.artifact_envelope = catEntry?.artifact || null;
resp.artifact_title = catEntry?.title || null;
return c.json(resp);
});
// PATCH /api/orders/:id/status — Update order status
routes.patch("/api/orders/:id/status", async (c) => {
const space = c.req.param("space") || "demo";
const body = await c.req.json();
const { status, payment_tx, payment_network } = body;
const valid = ["pending", "paid", "accepted", "in_production", "ready", "shipped", "completed", "cancelled"];
if (!valid.includes(status)) return c.json({ error: `status must be one of: ${valid.join(", ")}` }, 400);
const timestampField: Record<string, string> = { paid: "paid_at", accepted: "accepted_at", completed: "completed_at" };
const extraSet = timestampField[status] ? `, ${timestampField[status]} = NOW()` : "";
const orderId = c.req.param("id");
const oDocId = orderDocId(space, orderId);
const doc = _syncServer!.getDoc<OrderDoc>(oDocId);
if (!doc) return c.json({ error: "Order not found" }, 404);
// Use parameterized query for payment info
let paymentSet = "";
const params: any[] = [status, c.req.param("id")];
if (status === "paid" && payment_tx) {
paymentSet = `, payment_tx = $3, payment_network = $4`;
params.push(payment_tx, payment_network || null);
const now = Date.now();
const updated = _syncServer!.changeDoc<OrderDoc>(oDocId, `order status → ${status}`, (d) => {
d.order.status = status;
d.order.updatedAt = now;
if (status === "paid") d.order.paidAt = now;
if (status === "accepted") d.order.acceptedAt = now;
if (status === "completed") d.order.completedAt = now;
if (status === "paid" && payment_tx) {
d.order.paymentTx = payment_tx;
d.order.paymentNetwork = payment_network || null;
}
});
if (!updated) return c.json({ error: "Order not found" }, 404);
if (status === "paid" && updated.order.totalPrice) {
depositOrderRevenue(updated.order.totalPrice, orderId);
}
const result = await sql.unsafe(
`UPDATE rcart.orders SET status = $1, updated_at = NOW()${extraSet}${paymentSet} WHERE id = $2 RETURNING *`,
params
);
if (result.length === 0) return c.json({ error: "Order not found" }, 404);
const updated = result[0];
if (status === "paid" && updated.total_price) {
depositOrderRevenue(updated.total_price, c.req.param("id"));
}
return c.json(updated);
return c.json(orderToResponse(updated.order));
});
// ── Response helpers ──
interface OrderResponse {
id: string;
catalog_entry_id: string;
artifact_id: string;
buyer_id: string | null;
buyer_location: unknown;
buyer_contact: unknown;
provider_id: string | null;
provider_name: string | null;
provider_distance_km: number | null;
quantity: number;
production_cost: number | null;
creator_payout: number | null;
community_payout: number | null;
total_price: number | null;
currency: string;
status: string;
payment_method: string | null;
payment_tx: string | null;
payment_network: string | null;
created_at: string;
paid_at: string | null;
accepted_at: string | null;
completed_at: string | null;
updated_at: string;
artifact_title?: string | null;
product_type?: string | null;
artifact_envelope?: unknown;
}
/** Convert an OrderMeta to the flat response shape matching the original SQL rows. */
function orderToResponse(o: OrderMeta, catEntry?: CatalogEntry): OrderResponse {
return {
id: o.id,
catalog_entry_id: o.catalogEntryId,
artifact_id: o.artifactId,
buyer_id: o.buyerId,
buyer_location: o.buyerLocation ? tryParse(o.buyerLocation) : null,
buyer_contact: o.buyerContact ? tryParse(o.buyerContact) : null,
provider_id: o.providerId,
provider_name: o.providerName,
provider_distance_km: o.providerDistanceKm,
quantity: o.quantity,
production_cost: o.productionCost,
creator_payout: o.creatorPayout,
community_payout: o.communityPayout,
total_price: o.totalPrice,
currency: o.currency,
status: o.status,
payment_method: o.paymentMethod,
payment_tx: o.paymentTx,
payment_network: o.paymentNetwork,
created_at: new Date(o.createdAt).toISOString(),
paid_at: o.paidAt ? new Date(o.paidAt).toISOString() : null,
accepted_at: o.acceptedAt ? new Date(o.acceptedAt).toISOString() : null,
completed_at: o.completedAt ? new Date(o.completedAt).toISOString() : null,
updated_at: new Date(o.updatedAt).toISOString(),
...(catEntry ? { artifact_title: catEntry.title, product_type: catEntry.productType } : {}),
};
}
function tryParse(s: string): unknown {
try { return JSON.parse(s); } catch { return s; }
}
// ── FULFILLMENT ROUTES ──
function round2(n: number): number {
@ -365,6 +521,7 @@ function composeCost(artifact: Record<string, unknown>, provider: ProviderMatch,
// POST /api/fulfill/resolve — Find fulfillment options
routes.post("/api/fulfill/resolve", async (c) => {
const space = c.req.param("space") || "demo";
const body = await c.req.json();
const { artifact_id, catalog_entry_id, buyer_location, quantity = 1 } = body;
@ -375,14 +532,21 @@ routes.post("/api/fulfill/resolve", async (c) => {
return c.json({ error: "Required: artifact_id or catalog_entry_id" }, 400);
}
const entryResult = await sql.unsafe(
"SELECT * FROM rcart.catalog_entries WHERE (artifact_id = $1 OR id = $1) AND status = 'active'",
[artifact_id || catalog_entry_id]
);
if (entryResult.length === 0) return c.json({ error: "Artifact not found in catalog" }, 404);
const catalogDoc = ensureCatalogDoc(space);
const lookupId = artifact_id || catalog_entry_id;
const entry = entryResult[0];
const artifact = entry.artifact;
// Find entry by id or artifact_id, must be active
let entry: CatalogEntry | undefined;
if (catalogDoc.items[lookupId] && catalogDoc.items[lookupId].status === "active") {
entry = catalogDoc.items[lookupId];
} else {
entry = Object.values(catalogDoc.items).find(
(e) => (e.artifactId === lookupId || e.id === lookupId) && e.status === "active"
);
}
if (!entry) return c.json({ error: "Artifact not found in catalog" }, 404);
const artifact = entry.artifact as Record<string, any>;
const capabilities = artifact.spec?.required_capabilities || [];
const substrates = artifact.spec?.substrates || [];
@ -471,7 +635,6 @@ export const cartModule: RSpaceModule = {
landingPage: renderLanding,
async onInit(ctx) {
_syncServer = ctx.syncServer;
await initDB();
},
feeds: [
{

View File

@ -1,54 +1,141 @@
/**
* Files module file sharing, public share links, memory cards.
* Ported from rfiles-online (Django Bun/Hono).
*
* All metadata is stored in Automerge documents via SyncServer.
* Binary files remain on the filesystem.
*/
import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import { mkdir, writeFile, unlink } from "node:fs/promises";
import { createHash, randomBytes } from "node:crypto";
import { sql } from "../../shared/db/pool";
import * as Automerge from "@automerge/automerge";
import { renderShell, renderExternalAppShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server';
import { filesSchema } from './schemas';
import { filesSchema, filesDocId } from './schemas';
import type { FilesDoc, MediaFile, MemoryCard } from './schemas';
// ── Extended doc types (shares + access logs live alongside files/cards) ──
interface PublicShare {
id: string;
token: string;
mediaFileId: string;
createdBy: string | null;
expiresAt: number | null; // epoch ms, null = never
maxDownloads: number | null;
downloadCount: number;
isActive: boolean;
isPasswordProtected: boolean;
passwordHash: string | null;
note: string | null;
createdAt: number;
}
interface AccessLog {
id: string;
mediaFileId: string;
shareId: string | null;
ipAddress: string | null;
userAgent: string | null;
accessType: string;
accessedAt: number;
}
/**
* Extended doc shape supplements FilesDoc with shares and access logs.
* The base FilesDoc from schemas.ts defines files + memoryCards;
* we add shares and accessLogs as additional top-level maps.
*/
interface FilesDocExt extends FilesDoc {
shares: Record<string, PublicShare>;
accessLogs: Record<string, AccessLog>;
}
let _syncServer: SyncServer | null = null;
const routes = new Hono();
const FILES_DIR = process.env.FILES_DIR || "/data/files";
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
// ── DB initialization ──
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Files] DB schema initialized");
} catch (e: any) {
console.error("[Files] DB init error:", e.message);
// ── Automerge document helpers ──
function ensureDoc(space: string, sharedSpace: string = "default"): FilesDocExt {
const docId = filesDocId(space, sharedSpace);
let doc = _syncServer!.getDoc<FilesDocExt>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<FilesDocExt>(), 'init files doc', (d) => {
const init = filesSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.meta.sharedSpace = sharedSpace;
d.files = {};
d.memoryCards = {};
d.shares = {};
d.accessLogs = {};
});
_syncServer!.setDoc(docId, doc);
}
// Ensure shares/accessLogs exist on legacy docs that predate these fields
if (!doc.shares || !doc.accessLogs) {
doc = _syncServer!.changeDoc<FilesDocExt>(docId, 'add shares+logs maps', (d) => {
if (!(d as any).shares) (d as any).shares = {};
if (!(d as any).accessLogs) (d as any).accessLogs = {};
})!;
}
return doc;
}
// ── Cleanup timers (replace Celery) ──
// Deactivate expired shares every hour
setInterval(async () => {
setInterval(() => {
if (!_syncServer) return;
try {
const result = await sql.unsafe(
"UPDATE rfiles.public_shares SET is_active = FALSE WHERE is_active = TRUE AND expires_at IS NOT NULL AND expires_at < NOW()"
);
if ((result as any).count > 0) console.log(`[Files] Deactivated ${(result as any).count} expired shares`);
const now = Date.now();
for (const docId of _syncServer.getDocIds()) {
if (!docId.includes(':files:cards:')) continue;
const doc = _syncServer.getDoc<FilesDocExt>(docId);
if (!doc?.shares) continue;
const toDeactivate = Object.values(doc.shares).filter(
(s) => s.isActive && s.expiresAt !== null && s.expiresAt < now
);
if (toDeactivate.length > 0) {
_syncServer.changeDoc<FilesDocExt>(docId, 'deactivate expired shares', (d) => {
for (const s of toDeactivate) {
if (d.shares[s.id]) d.shares[s.id].isActive = false;
}
});
console.log(`[Files] Deactivated ${toDeactivate.length} expired shares in ${docId}`);
}
}
} catch (e: any) { console.error("[Files] Cleanup error:", e.message); }
}, 3600_000);
// Delete access logs older than 90 days, daily
setInterval(async () => {
setInterval(() => {
if (!_syncServer) return;
try {
await sql.unsafe("DELETE FROM rfiles.access_logs WHERE accessed_at < NOW() - INTERVAL '90 days'");
const cutoff = Date.now() - 90 * 86400_000;
for (const docId of _syncServer.getDocIds()) {
if (!docId.includes(':files:cards:')) continue;
const doc = _syncServer.getDoc<FilesDocExt>(docId);
if (!doc?.accessLogs) continue;
const toDelete = Object.values(doc.accessLogs).filter(
(l) => l.accessedAt < cutoff
);
if (toDelete.length > 0) {
_syncServer.changeDoc<FilesDocExt>(docId, 'prune old access logs', (d) => {
for (const l of toDelete) {
delete d.accessLogs[l.id];
}
});
}
}
} catch (e: any) { console.error("[Files] Log cleanup error:", e.message); }
}, 86400_000);
@ -69,6 +156,11 @@ async function computeFileHash(buffer: ArrayBuffer): Promise<string> {
return hash.digest("hex");
}
/** Serialize a doc-sourced object for JSON responses (strip Automerge proxies). */
function toPlain<T>(obj: T): T {
return JSON.parse(JSON.stringify(obj));
}
// ── File upload ──
routes.post("/api/files", async (c) => {
const token = extractToken(c.req.raw.headers);
@ -83,7 +175,9 @@ routes.post("/api/files", async (c) => {
const space = c.req.param("space") || formData.get("space")?.toString() || "default";
const title = formData.get("title")?.toString() || file.name.replace(/\.[^.]+$/, "");
const description = formData.get("description")?.toString() || "";
const tags = formData.get("tags")?.toString() || "[]";
const tagsRaw = formData.get("tags")?.toString() || "[]";
let tags: string[] = [];
try { tags = JSON.parse(tagsRaw); } catch { tags = []; }
const uploadedBy = claims.sub;
const buffer = await file.arrayBuffer();
@ -97,13 +191,32 @@ routes.post("/api/files", async (c) => {
await mkdir(resolve(fullPath, ".."), { recursive: true });
await writeFile(fullPath, Buffer.from(buffer));
const [row] = await sql.unsafe(
`INSERT INTO rfiles.media_files (original_filename, title, description, mime_type, file_size, file_hash, storage_path, tags, uploaded_by, shared_space)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8::jsonb, $9, $10) RETURNING *`,
[file.name, title, description, file.type || "application/octet-stream", file.size, fileHash, storagePath, tags, uploadedBy, space]
);
const docId = filesDocId(space, space);
ensureDoc(space, space);
return c.json({ file: row }, 201);
const mediaFile: MediaFile = {
id: fileId,
originalFilename: file.name,
title,
description,
mimeType: file.type || "application/octet-stream",
fileSize: file.size,
fileHash,
storagePath,
tags,
isProcessed: false,
processingError: null,
uploadedBy,
sharedSpace: space,
createdAt: Date.now(),
updatedAt: Date.now(),
};
_syncServer!.changeDoc<FilesDocExt>(docId, `upload file ${fileId}`, (d) => {
d.files[fileId] = mediaFile;
});
return c.json({ file: toPlain(mediaFile) }, 201);
});
// ── File listing ──
@ -113,60 +226,72 @@ routes.get("/api/files", async (c) => {
const limit = Math.min(Number(c.req.query("limit")) || 50, 200);
const offset = Number(c.req.query("offset")) || 0;
let query = "SELECT * FROM rfiles.media_files WHERE shared_space = $1";
const params: any[] = [space];
let paramIdx = 2;
const doc = ensureDoc(space, space);
let files = Object.values(doc.files)
.filter((f) => f.sharedSpace === space);
if (mimeType) {
query += ` AND mime_type LIKE $${paramIdx}`;
params.push(`${mimeType}%`);
paramIdx++;
files = files.filter((f) => f.mimeType && f.mimeType.startsWith(mimeType));
}
query += ` ORDER BY created_at DESC LIMIT $${paramIdx} OFFSET $${paramIdx + 1}`;
params.push(limit, offset);
// Sort by createdAt descending
files.sort((a, b) => b.createdAt - a.createdAt);
const rows = await sql.unsafe(query, params);
const [{ count }] = await sql.unsafe(
"SELECT COUNT(*) as count FROM rfiles.media_files WHERE shared_space = $1",
[space]
);
const total = files.length;
const paged = files.slice(offset, offset + limit);
return c.json({ files: rows, total: Number(count), limit, offset });
return c.json({ files: toPlain(paged), total, limit, offset });
});
// ── File download ──
routes.get("/api/files/:id/download", async (c) => {
const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]);
const fileId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const doc = ensureDoc(space, space);
const file = doc.files[fileId];
if (!file) return c.json({ error: "File not found" }, 404);
const fullPath = resolve(FILES_DIR, file.storage_path);
const fullPath = resolve(FILES_DIR, file.storagePath);
const bunFile = Bun.file(fullPath);
if (!await bunFile.exists()) return c.json({ error: "File missing from storage" }, 404);
return new Response(bunFile, {
headers: {
"Content-Type": file.mime_type || "application/octet-stream",
"Content-Disposition": `attachment; filename="${file.original_filename}"`,
"Content-Length": String(file.file_size),
"Content-Type": file.mimeType || "application/octet-stream",
"Content-Disposition": `attachment; filename="${file.originalFilename}"`,
"Content-Length": String(file.fileSize),
},
});
});
// ── File detail ──
routes.get("/api/files/:id", async (c) => {
const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]);
const fileId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const doc = ensureDoc(space, space);
const file = doc.files[fileId];
if (!file) return c.json({ error: "File not found" }, 404);
return c.json({ file });
return c.json({ file: toPlain(file) });
});
// ── File delete ──
routes.delete("/api/files/:id", async (c) => {
const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]);
const fileId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const docId = filesDocId(space, space);
const doc = ensureDoc(space, space);
const file = doc.files[fileId];
if (!file) return c.json({ error: "File not found" }, 404);
try { await unlink(resolve(FILES_DIR, file.storage_path)); } catch {}
await sql.unsafe("DELETE FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]);
try { await unlink(resolve(FILES_DIR, file.storagePath)); } catch {}
_syncServer!.changeDoc<FilesDocExt>(docId, `delete file ${fileId}`, (d) => {
delete d.files[fileId];
// Also remove any shares referencing this file
for (const [sid, share] of Object.entries(d.shares)) {
if (share.mediaFileId === fileId) delete d.shares[sid];
}
});
return c.json({ message: "Deleted" });
});
@ -177,13 +302,18 @@ routes.post("/api/files/:id/share", async (c) => {
let claims;
try { claims = await verifyEncryptIDToken(authToken); } catch { return c.json({ error: "Invalid token" }, 401); }
const [file] = await sql.unsafe("SELECT * FROM rfiles.media_files WHERE id = $1", [c.req.param("id")]);
const fileId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const docId = filesDocId(space, space);
const doc = ensureDoc(space, space);
const file = doc.files[fileId];
if (!file) return c.json({ error: "File not found" }, 404);
if (file.uploaded_by && file.uploaded_by !== claims.sub) return c.json({ error: "Not authorized" }, 403);
if (file.uploadedBy && file.uploadedBy !== claims.sub) return c.json({ error: "Not authorized" }, 403);
const body = await c.req.json<{ expires_in_hours?: number; max_downloads?: number; password?: string; note?: string }>();
const token = generateToken();
const expiresAt = body.expires_in_hours ? new Date(Date.now() + body.expires_in_hours * 3600_000).toISOString() : null;
const shareToken = generateToken();
const expiresAt = body.expires_in_hours ? Date.now() + body.expires_in_hours * 3600_000 : null;
const createdBy = claims.sub;
let passwordHash: string | null = null;
@ -193,27 +323,52 @@ routes.post("/api/files/:id/share", async (c) => {
isPasswordProtected = true;
}
const [share] = await sql.unsafe(
`INSERT INTO rfiles.public_shares (token, media_file_id, created_by, expires_at, max_downloads, is_password_protected, password_hash, note)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *`,
[token, file.id, createdBy, expiresAt, body.max_downloads || null, isPasswordProtected, passwordHash, body.note || null]
);
const shareId = crypto.randomUUID();
const logId = crypto.randomUUID();
const now = Date.now();
await sql.unsafe(
"INSERT INTO rfiles.access_logs (media_file_id, share_id, access_type) VALUES ($1, $2, 'share_created')",
[file.id, share.id]
);
const share: PublicShare = {
id: shareId,
token: shareToken,
mediaFileId: fileId,
createdBy,
expiresAt,
maxDownloads: body.max_downloads || null,
downloadCount: 0,
isActive: true,
isPasswordProtected,
passwordHash,
note: body.note || null,
createdAt: now,
};
return c.json({ share: { ...share, url: `/s/${token}` } }, 201);
_syncServer!.changeDoc<FilesDocExt>(docId, `create share for file ${fileId}`, (d) => {
d.shares[shareId] = share;
d.accessLogs[logId] = {
id: logId,
mediaFileId: fileId,
shareId,
ipAddress: null,
userAgent: null,
accessType: 'share_created',
accessedAt: now,
};
});
return c.json({ share: { ...toPlain(share), url: `/s/${shareToken}` } }, 201);
});
// ── List shares for a file ──
routes.get("/api/files/:id/shares", async (c) => {
const rows = await sql.unsafe(
"SELECT * FROM rfiles.public_shares WHERE media_file_id = $1 ORDER BY created_at DESC",
[c.req.param("id")]
);
return c.json({ shares: rows });
const fileId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const doc = ensureDoc(space, space);
const shares = Object.values(doc.shares)
.filter((s) => s.mediaFileId === fileId)
.sort((a, b) => b.createdAt - a.createdAt);
return c.json({ shares: toPlain(shares) });
});
// ── Revoke share ──
@ -223,83 +378,126 @@ routes.post("/api/shares/:shareId/revoke", async (c) => {
let claims;
try { claims = await verifyEncryptIDToken(authToken); } catch { return c.json({ error: "Invalid token" }, 401); }
const [share] = await sql.unsafe(
"SELECT s.*, f.uploaded_by FROM rfiles.public_shares s JOIN rfiles.media_files f ON s.media_file_id = f.id WHERE s.id = $1",
[c.req.param("shareId")]
);
if (!share) return c.json({ error: "Share not found" }, 404);
if (share.uploaded_by && share.uploaded_by !== claims.sub) return c.json({ error: "Not authorized" }, 403);
const shareId = c.req.param("shareId");
const space = c.req.param("space") || c.req.query("space") || "default";
const docId = filesDocId(space, space);
const doc = ensureDoc(space, space);
const [revoked] = await sql.unsafe(
"UPDATE rfiles.public_shares SET is_active = FALSE WHERE id = $1 RETURNING *",
[c.req.param("shareId")]
);
return c.json({ message: "Revoked", share: revoked });
const share = doc.shares[shareId];
if (!share) return c.json({ error: "Share not found" }, 404);
// Check authorization via the linked file
const file = doc.files[share.mediaFileId];
if (file?.uploadedBy && file.uploadedBy !== claims.sub) return c.json({ error: "Not authorized" }, 403);
_syncServer!.changeDoc<FilesDocExt>(docId, `revoke share ${shareId}`, (d) => {
d.shares[shareId].isActive = false;
});
const updated = _syncServer!.getDoc<FilesDocExt>(docId)!;
return c.json({ message: "Revoked", share: toPlain(updated.shares[shareId]) });
});
// ── Public share download ──
routes.get("/s/:token", async (c) => {
const [share] = await sql.unsafe(
`SELECT s.*, f.storage_path, f.mime_type, f.original_filename, f.file_size
FROM rfiles.public_shares s JOIN rfiles.media_files f ON s.media_file_id = f.id
WHERE s.token = $1`,
[c.req.param("token")]
);
if (!share) return c.json({ error: "Share not found" }, 404);
if (!share.is_active) return c.json({ error: "Share has been revoked" }, 410);
if (share.expires_at && new Date(share.expires_at) < new Date()) return c.json({ error: "Share has expired" }, 410);
if (share.max_downloads && share.download_count >= share.max_downloads) return c.json({ error: "Download limit reached" }, 410);
const shareToken = c.req.param("token");
if (share.is_password_protected) {
// Find the share across all files docs
let foundDocId: string | null = null;
let foundShare: PublicShare | null = null;
let foundFile: MediaFile | null = null;
for (const docId of _syncServer!.getDocIds()) {
if (!docId.includes(':files:cards:')) continue;
const doc = _syncServer!.getDoc<FilesDocExt>(docId);
if (!doc?.shares) continue;
for (const s of Object.values(doc.shares)) {
if (s.token === shareToken) {
foundDocId = docId;
foundShare = s;
foundFile = doc.files[s.mediaFileId] || null;
break;
}
}
if (foundShare) break;
}
if (!foundShare || !foundFile) return c.json({ error: "Share not found" }, 404);
if (!foundShare.isActive) return c.json({ error: "Share has been revoked" }, 410);
if (foundShare.expiresAt && foundShare.expiresAt < Date.now()) return c.json({ error: "Share has expired" }, 410);
if (foundShare.maxDownloads && foundShare.downloadCount >= foundShare.maxDownloads) return c.json({ error: "Download limit reached" }, 410);
if (foundShare.isPasswordProtected) {
const pw = c.req.query("password");
if (!pw) return c.json({ error: "Password required", is_password_protected: true }, 401);
const hash = await hashPassword(pw);
if (hash !== share.password_hash) return c.json({ error: "Invalid password" }, 401);
if (hash !== foundShare.passwordHash) return c.json({ error: "Invalid password" }, 401);
}
await sql.unsafe("UPDATE rfiles.public_shares SET download_count = download_count + 1 WHERE id = $1", [share.id]);
const logId = crypto.randomUUID();
const ip = c.req.header("X-Forwarded-For")?.split(",")[0]?.trim() || c.req.header("X-Real-IP") || null;
const ua = c.req.header("User-Agent") || "";
await sql.unsafe(
"INSERT INTO rfiles.access_logs (media_file_id, share_id, ip_address, user_agent, access_type) VALUES ($1, $2, $3, $4, 'download')",
[share.media_file_id, share.id, ip, ua.slice(0, 500)]
);
const fullPath = resolve(FILES_DIR, share.storage_path);
_syncServer!.changeDoc<FilesDocExt>(foundDocId!, `download via share ${foundShare.id}`, (d) => {
d.shares[foundShare!.id].downloadCount += 1;
d.accessLogs[logId] = {
id: logId,
mediaFileId: foundShare!.mediaFileId,
shareId: foundShare!.id,
ipAddress: ip,
userAgent: ua.slice(0, 500),
accessType: 'download',
accessedAt: Date.now(),
};
});
const fullPath = resolve(FILES_DIR, foundFile.storagePath);
const bunFile = Bun.file(fullPath);
if (!await bunFile.exists()) return c.json({ error: "File missing" }, 404);
return new Response(bunFile, {
headers: {
"Content-Type": share.mime_type || "application/octet-stream",
"Content-Disposition": `attachment; filename="${share.original_filename}"`,
"Content-Length": String(share.file_size),
"Content-Type": foundFile.mimeType || "application/octet-stream",
"Content-Disposition": `attachment; filename="${foundFile.originalFilename}"`,
"Content-Length": String(foundFile.fileSize),
},
});
});
// ── Share info (public) ──
routes.get("/s/:token/info", async (c) => {
const [share] = await sql.unsafe(
`SELECT s.is_password_protected, s.is_active, s.expires_at, s.max_downloads, s.download_count, s.note,
f.original_filename, f.mime_type, f.file_size
FROM rfiles.public_shares s JOIN rfiles.media_files f ON s.media_file_id = f.id
WHERE s.token = $1`,
[c.req.param("token")]
);
if (!share) return c.json({ error: "Share not found" }, 404);
const shareToken = c.req.param("token");
const isValid = share.is_active &&
(!share.expires_at || new Date(share.expires_at) > new Date()) &&
(!share.max_downloads || share.download_count < share.max_downloads);
let foundShare: PublicShare | null = null;
let foundFile: MediaFile | null = null;
for (const docId of _syncServer!.getDocIds()) {
if (!docId.includes(':files:cards:')) continue;
const doc = _syncServer!.getDoc<FilesDocExt>(docId);
if (!doc?.shares) continue;
for (const s of Object.values(doc.shares)) {
if (s.token === shareToken) {
foundShare = s;
foundFile = doc.files[s.mediaFileId] || null;
break;
}
}
if (foundShare) break;
}
if (!foundShare || !foundFile) return c.json({ error: "Share not found" }, 404);
const isValid = foundShare.isActive &&
(!foundShare.expiresAt || foundShare.expiresAt > Date.now()) &&
(!foundShare.maxDownloads || foundShare.downloadCount < foundShare.maxDownloads);
return c.json({
is_password_protected: share.is_password_protected,
is_password_protected: foundShare.isPasswordProtected,
is_valid: isValid,
expires_at: share.expires_at,
downloads_remaining: share.max_downloads ? share.max_downloads - share.download_count : null,
file_info: { filename: share.original_filename, mime_type: share.mime_type, size: share.file_size },
note: share.note,
expires_at: foundShare.expiresAt ? new Date(foundShare.expiresAt).toISOString() : null,
downloads_remaining: foundShare.maxDownloads ? foundShare.maxDownloads - foundShare.downloadCount : null,
file_info: { filename: foundFile.originalFilename, mime_type: foundFile.mimeType, size: foundFile.fileSize },
note: foundShare.note,
});
});
@ -313,13 +511,30 @@ routes.post("/api/cards", async (c) => {
const body = await c.req.json<{ title: string; body?: string; card_type?: string; tags?: string[]; shared_space?: string }>();
const space = c.req.param("space") || body.shared_space || "default";
const createdBy = claims.sub;
const docId = filesDocId(space, space);
ensureDoc(space, space);
const [card] = await sql.unsafe(
`INSERT INTO rfiles.memory_cards (shared_space, title, body, card_type, tags, created_by)
VALUES ($1, $2, $3, $4, $5::jsonb, $6) RETURNING *`,
[space, body.title, body.body || "", body.card_type || "note", JSON.stringify(body.tags || []), createdBy]
);
return c.json({ card }, 201);
const cardId = crypto.randomUUID();
const now = Date.now();
const card: MemoryCard = {
id: cardId,
sharedSpace: space,
title: body.title,
body: body.body || "",
cardType: body.card_type || "note",
tags: body.tags || [],
position: 0,
createdBy,
createdAt: now,
updatedAt: now,
};
_syncServer!.changeDoc<FilesDocExt>(docId, `create card ${cardId}`, (d) => {
d.memoryCards[cardId] = card;
});
return c.json({ card: toPlain(card) }, 201);
});
routes.get("/api/cards", async (c) => {
@ -327,43 +542,61 @@ routes.get("/api/cards", async (c) => {
const cardType = c.req.query("type");
const limit = Math.min(Number(c.req.query("limit")) || 50, 200);
let query = "SELECT * FROM rfiles.memory_cards WHERE shared_space = $1";
const params: any[] = [space];
if (cardType) { query += " AND card_type = $2"; params.push(cardType); }
query += " ORDER BY position, created_at DESC LIMIT $" + (params.length + 1);
params.push(limit);
const doc = ensureDoc(space, space);
const rows = await sql.unsafe(query, params);
return c.json({ cards: rows, total: rows.length });
let cards = Object.values(doc.memoryCards)
.filter((card) => card.sharedSpace === space);
if (cardType) {
cards = cards.filter((card) => card.cardType === cardType);
}
// Sort by position ascending, then createdAt descending
cards.sort((a, b) => a.position - b.position || b.createdAt - a.createdAt);
cards = cards.slice(0, limit);
return c.json({ cards: toPlain(cards), total: cards.length });
});
routes.patch("/api/cards/:id", async (c) => {
const body = await c.req.json<{ title?: string; body?: string; card_type?: string; tags?: string[]; position?: number }>();
const sets: string[] = [];
const params: any[] = [];
let idx = 1;
const cardId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const docId = filesDocId(space, space);
const doc = ensureDoc(space, space);
if (body.title !== undefined) { sets.push(`title = $${idx}`); params.push(body.title); idx++; }
if (body.body !== undefined) { sets.push(`body = $${idx}`); params.push(body.body); idx++; }
if (body.card_type !== undefined) { sets.push(`card_type = $${idx}`); params.push(body.card_type); idx++; }
if (body.tags !== undefined) { sets.push(`tags = $${idx}::jsonb`); params.push(JSON.stringify(body.tags)); idx++; }
if (body.position !== undefined) { sets.push(`position = $${idx}`); params.push(body.position); idx++; }
if (sets.length === 0) return c.json({ error: "No fields to update" }, 400);
sets.push(`updated_at = NOW()`);
params.push(c.req.param("id"));
const [card] = await sql.unsafe(
`UPDATE rfiles.memory_cards SET ${sets.join(", ")} WHERE id = $${idx} RETURNING *`,
params
);
const card = doc.memoryCards[cardId];
if (!card) return c.json({ error: "Card not found" }, 404);
return c.json({ card });
const body = await c.req.json<{ title?: string; body?: string; card_type?: string; tags?: string[]; position?: number }>();
if (body.title === undefined && body.body === undefined && body.card_type === undefined && body.tags === undefined && body.position === undefined) {
return c.json({ error: "No fields to update" }, 400);
}
_syncServer!.changeDoc<FilesDocExt>(docId, `update card ${cardId}`, (d) => {
const c = d.memoryCards[cardId];
if (body.title !== undefined) c.title = body.title;
if (body.body !== undefined) c.body = body.body;
if (body.card_type !== undefined) c.cardType = body.card_type;
if (body.tags !== undefined) c.tags = body.tags;
if (body.position !== undefined) c.position = body.position;
c.updatedAt = Date.now();
});
const updated = _syncServer!.getDoc<FilesDocExt>(docId)!;
return c.json({ card: toPlain(updated.memoryCards[cardId]) });
});
routes.delete("/api/cards/:id", async (c) => {
const [card] = await sql.unsafe("DELETE FROM rfiles.memory_cards WHERE id = $1 RETURNING id", [c.req.param("id")]);
if (!card) return c.json({ error: "Card not found" }, 404);
const cardId = c.req.param("id");
const space = c.req.param("space") || c.req.query("space") || "default";
const docId = filesDocId(space, space);
const doc = ensureDoc(space, space);
if (!doc.memoryCards[cardId]) return c.json({ error: "Card not found" }, 404);
_syncServer!.changeDoc<FilesDocExt>(docId, `delete card ${cardId}`, (d) => {
delete d.memoryCards[cardId];
});
return c.json({ message: "Deleted" });
});
@ -408,7 +641,6 @@ export const filesModule: RSpaceModule = {
landingPage: renderLanding,
async onInit(ctx) {
_syncServer = ctx.syncServer;
await initDB();
},
standaloneDomain: "rfiles.online",
externalApp: { url: "https://files.rfiles.online", name: "Seafile" },

View File

@ -5,31 +5,32 @@
*/
import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import { sql } from "../../shared/db/pool";
import * as Automerge from "@automerge/automerge";
import { renderShell } from "../../server/shell";
import type { RSpaceModule } from "../../shared/module";
import { getModuleInfoList } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server';
import { fundsSchema } from './schemas';
import { fundsSchema, fundsDocId, type FundsDoc, type SpaceFlow } from './schemas';
let _syncServer: SyncServer | null = null;
const FLOW_SERVICE_URL = process.env.FLOW_SERVICE_URL || "http://payment-flow:3010";
// ── DB initialization ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Funds] DB schema initialized");
} catch (e) {
console.error("[Funds] DB init error:", e);
function ensureDoc(space: string): FundsDoc {
const docId = fundsDocId(space);
let doc = _syncServer!.getDoc<FundsDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<FundsDoc>(), 'init', (d) => {
const init = fundsSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.spaceFlows = {};
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
const routes = new Hono();
@ -42,29 +43,22 @@ routes.get("/api/flows", async (c) => {
const owner = c.req.header("X-Owner-Address") || "";
const space = c.req.query("space") || "";
// If space filter provided, get flow IDs from space_flows table
// If space filter provided, get flow IDs from Automerge doc
if (space) {
try {
const rows = await sql.unsafe(
"SELECT flow_id FROM rfunds.space_flows WHERE space_slug = $1",
[space],
);
if (rows.length === 0) return c.json([]);
const doc = ensureDoc(space);
const flowIds = Object.values(doc.spaceFlows).map((sf) => sf.flowId);
if (flowIds.length === 0) return c.json([]);
// Fetch each flow from flow-service
const flows = await Promise.all(
rows.map(async (r: any) => {
try {
const res = await fetch(`${FLOW_SERVICE_URL}/api/flows/${r.flow_id}`);
if (res.ok) return await res.json();
} catch {}
return null;
}),
);
return c.json(flows.filter(Boolean));
} catch {
// Fall through to unfiltered fetch
}
const flows = await Promise.all(
flowIds.map(async (fid) => {
try {
const res = await fetch(`${FLOW_SERVICE_URL}/api/flows/${fid}`);
if (res.ok) return await res.json();
} catch {}
return null;
}),
);
return c.json(flows.filter(Boolean));
}
const res = await fetch(`${FLOW_SERVICE_URL}/api/flows?owner=${encodeURIComponent(owner)}`);
@ -163,11 +157,14 @@ routes.post("/api/space-flows", async (c) => {
const { space, flowId } = await c.req.json();
if (!space || !flowId) return c.json({ error: "space and flowId required" }, 400);
await sql.unsafe(
`INSERT INTO rfunds.space_flows (space_slug, flow_id, added_by)
VALUES ($1, $2, $3) ON CONFLICT DO NOTHING`,
[space, flowId, claims.sub],
);
const docId = fundsDocId(space);
ensureDoc(space);
_syncServer!.changeDoc<FundsDoc>(docId, 'add space flow', (d) => {
const key = `${space}:${flowId}`;
if (!d.spaceFlows[key]) {
d.spaceFlows[key] = { id: key, spaceSlug: space, flowId, addedBy: claims.sub, createdAt: Date.now() };
}
});
return c.json({ ok: true });
});
@ -181,10 +178,16 @@ routes.delete("/api/space-flows/:flowId", async (c) => {
const space = c.req.query("space") || "";
if (!space) return c.json({ error: "space query param required" }, 400);
await sql.unsafe(
"DELETE FROM rfunds.space_flows WHERE space_slug = $1 AND flow_id = $2",
[space, flowId],
);
const docId = fundsDocId(space);
const doc = _syncServer!.getDoc<FundsDoc>(docId);
if (doc) {
const key = `${space}:${flowId}`;
if (doc.spaceFlows[key]) {
_syncServer!.changeDoc<FundsDoc>(docId, 'remove space flow', (d) => {
delete d.spaceFlows[key];
});
}
}
return c.json({ ok: true });
});
@ -254,7 +257,6 @@ export const fundsModule: RSpaceModule = {
landingPage: renderLanding,
async onInit(ctx) {
_syncServer = ctx.syncServer;
await initDB();
},
standaloneDomain: "rfunds.online",
feeds: [

File diff suppressed because it is too large Load Diff

View File

@ -4,153 +4,68 @@
* Port of rnotes-online (Next.js + Prisma Hono + postgres.js).
* Supports multiple note types: text, code, bookmark, audio, image, file.
*
* Local-first migration: dual-write (Automerge + PG) during transition.
* Local-first: All data stored exclusively in Automerge documents via SyncServer.
*/
import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import * as Automerge from "@automerge/automerge";
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing";
import { notebookSchema, notebookDocId } from "./schemas";
import { notebookSchema, notebookDocId, createNoteItem } from "./schemas";
import type { NotebookDoc, NoteItem } from "./schemas";
import type { SyncServer } from "../../server/local-first/sync-server";
const routes = new Hono();
// ── DB initialization ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Notes] DB schema initialized");
} catch (e) {
console.error("[Notes] DB init error:", e);
}
}
async function seedDemoIfEmpty() {
try {
const count = await sql.unsafe("SELECT count(*)::int as cnt FROM rnotes.notebooks");
if (parseInt(count[0].cnt) > 0) return;
// Notebook 1: Project Ideas
const nb1 = await sql.unsafe(
`INSERT INTO rnotes.notebooks (title, description, cover_color, is_public)
VALUES ('Project Ideas', 'Brainstorms and design notes for the r* ecosystem', '#6366f1', true) RETURNING id`
);
// Notebook 2: Meeting Notes
const nb2 = await sql.unsafe(
`INSERT INTO rnotes.notebooks (title, description, cover_color, is_public)
VALUES ('Meeting Notes', 'Weekly standups, design reviews, and retrospectives', '#f59e0b', true) RETURNING id`
);
// Notebook 3: How-To Guides
const nb3 = await sql.unsafe(
`INSERT INTO rnotes.notebooks (title, description, cover_color, is_public)
VALUES ('How-To Guides', 'Tutorials and onboarding guides for contributors', '#10b981', true) RETURNING id`
);
// Create tags
const tagIds: Record<string, string> = {};
for (const name of ["design", "architecture", "cosmolocal", "governance", "onboarding", "review", "standup"]) {
const row = await sql.unsafe(
`INSERT INTO rnotes.tags (name) VALUES ($1) ON CONFLICT (name) DO UPDATE SET name = $1 RETURNING id`,
[name]
);
tagIds[name] = row[0].id;
}
// Seed notes
const notes = [
{
nbId: nb1[0].id, title: "Cosmolocal Manufacturing Network",
content: "## Vision\n\nDesign global, manufacture local. Every creative work should be producible by the nearest capable provider.\n\n## Key Components\n\n- **Artifact Spec**: Standardized envelope describing what to produce\n- **Provider Registry**: Directory of local makers with capabilities + pricing\n- **rCart**: Marketplace connecting creators to providers\n- **Revenue Splits**: 50% provider, 35% creator, 15% community\n\n## Open Questions\n\n- How do we handle quality assurance across distributed providers?\n- Should providers be able to set custom margins?\n- What's the minimum viable set of capabilities for launch?",
tags: ["cosmolocal", "architecture"], pinned: true,
},
{
nbId: nb1[0].id, title: "Revenue Sharing Model",
content: "## Current Split\n\n| Recipient | Share | Rationale |\n|-----------|-------|-----------|\n| Provider | 50% | Covers materials, labor, shipping |\n| Creator | 35% | Design and creative work |\n| Community | 15% | Platform maintenance, commons fund |\n\n## Enoughness Thresholds\n\nOnce a funnel reaches its sufficient threshold, surplus flows to the next highest-need funnel. This prevents accumulation and keeps resources flowing.\n\n## Implementation\n\nrFunds Flow Service handles deposits from rCart. Each order total is routed through the configured flow → funnel → overflow splits.",
tags: ["cosmolocal", "governance"],
},
{
nbId: nb1[0].id, title: "FUN Model: Forget, Update, New",
content: "## Replacing CRUD\n\nNothing is permanently destroyed in rSpace.\n\n- **Forget** replaces Delete — soft-delete with `forgotten: true`. Shapes stay in document, hidden from canvas. Memory panel lets you browse + Remember.\n- **Update** stays the same — public `sync.updateShape()` for programmatic updates\n- **New** replaces Create — language shift: toolbar says \"New X\", events are `new-shape`\n\n## Why?\n\nData sovereignty means users should always be able to recover their work. The Memory panel makes forgotten shapes discoverable, like a digital archive.",
tags: ["design", "architecture"],
},
{
nbId: nb2[0].id, title: "Weekly Standup — Feb 15, 2026",
content: "## Attendees\n\nAlice, Bob, Carol\n\n## Updates\n\n**Alice**: Finished EncryptID guardian recovery flow. 2-of-3 guardian approval working. Next: device linking via QR code.\n\n**Bob**: Provider registry now has 6 printers globally. Working on proximity search with earthdistance extension.\n\n**Carol**: rFunds river visualization deployed. Enoughness layer showing golden glow on sufficient funnels.\n\n## Action Items\n\n- [ ] Alice: Document guardian recovery API endpoints\n- [ ] Bob: Add turnaround time estimates to provider matching\n- [ ] Carol: Add demo mode to river view with mock data",
tags: ["standup"],
},
{
nbId: nb2[0].id, title: "Design Review — rBooks Flipbook Reader",
content: "## What We Reviewed\n\nThe react-pageflip integration for PDF reading in rBooks.\n\n## Feedback\n\n1. **Page turn animation** — smooth, feels good on desktop. On mobile, swipe gesture needs larger hit area.\n2. **PDF rendering** — react-pdf handles most PDFs well. Large files (>50MB) cause browser memory issues.\n3. **Read Locally mode** — IndexedDB storage works. Need to show storage usage somewhere.\n\n## Decisions\n\n- Ship current version, iterate on mobile\n- Add a 50MB soft warning on upload\n- Explore PDF.js worker for background rendering",
tags: ["review", "design"],
},
{
nbId: nb3[0].id, title: "Getting Started with rSpace Development",
content: "## Prerequisites\n\n- Bun runtime (v1.3+)\n- Docker + Docker Compose\n- Git access to Gitea\n\n## Local Setup\n\n```bash\ngit clone ssh://git@gitea.jeffemmett.com:223/jeffemmett/rspace-online.git\ncd rspace-online\nbun install\nbun run dev\n```\n\n## Module Structure\n\nEach module lives in `modules/{name}/` and exports an `RSpaceModule` interface:\n\n```typescript\nexport interface RSpaceModule {\n id: string;\n name: string;\n icon: string;\n description: string;\n routes: Hono;\n}\n```\n\n## Adding a New Module\n\n1. Create `modules/{name}/mod.ts`\n2. Create `modules/{name}/components/` for web components\n3. Add build step in `vite.config.ts`\n4. Register in `server/index.ts`",
tags: ["onboarding"],
},
{
nbId: nb3[0].id, title: "How to Add a Cosmolocal Provider",
content: "## Overview\n\nProviders are local print shops, makerspaces, or studios that can fulfill rCart orders.\n\n## Steps\n\n1. Visit `providers.mycofi.earth`\n2. Sign in with your rStack passkey\n3. Click \"Register Provider\"\n4. Fill in:\n - Name, location (address + coordinates)\n - Capabilities (laser-print, risograph, screen-print, etc.)\n - Substrates (paper types, fabric, vinyl)\n - Turnaround time and pricing\n5. Submit for review\n\n## Matching Algorithm\n\nWhen an order comes in, rCart matches based on:\n- Required capabilities vs. provider capabilities\n- Geographic distance (earthdistance extension)\n- Turnaround time\n- Price",
tags: ["cosmolocal", "onboarding"],
},
];
for (const n of notes) {
const row = await sql.unsafe(
`INSERT INTO rnotes.notes (notebook_id, title, content, content_plain, type, is_pinned)
VALUES ($1, $2, $3, $4, 'NOTE', $5) RETURNING id`,
[n.nbId, n.title, n.content, n.content.replace(/<[^>]*>/g, " ").replace(/[#*|`\-\[\]]/g, " ").replace(/\s+/g, " ").trim(), n.pinned || false]
);
for (const tagName of n.tags) {
if (tagIds[tagName]) {
await sql.unsafe(
"INSERT INTO rnotes.note_tags (note_id, tag_id) VALUES ($1, $2) ON CONFLICT DO NOTHING",
[row[0].id, tagIds[tagName]]
);
}
}
}
console.log("[Notes] Demo data seeded: 3 notebooks, 7 notes, 7 tags");
} catch (e) {
console.error("[Notes] Seed error:", e);
}
}
// initDB + seedDemo are called from onInit lifecycle hook (see module export below)
// ── SyncServer ref (set during onInit) ──
let _syncServer: SyncServer | null = null;
/** Check if a space has been migrated to local-first for notes. */
function isLocalFirst(space: string): boolean {
if (!_syncServer) return false;
// A space is local-first if any notebook doc exists for it in the SyncServer
// We check by looking for docs with the pattern {space}:notes:notebooks:*
return _syncServer.getDoc(`${space}:notes:notebooks:default`) !== undefined;
// ── Automerge helpers ──
/** Lazily ensure a notebook doc exists for a given space + notebookId. */
function ensureDoc(space: string, notebookId: string): NotebookDoc {
const docId = notebookDocId(space, notebookId);
let doc = _syncServer!.getDoc<NotebookDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<NotebookDoc>(), 'init', (d) => {
const init = notebookSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = space;
d.notebook.id = notebookId;
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/** Generate a URL-safe slug from a title. */
function slugify(title: string): string {
return title
.toLowerCase()
.replace(/[^a-z0-9]+/g, "-")
.replace(/^-|-$/g, "")
.slice(0, 80) || "untitled";
}
/** Generate a compact unique ID (timestamp + random suffix). */
function newId(): string {
return `${Date.now().toString(36)}-${Math.random().toString(36).slice(2, 8)}`;
}
// ── Automerge ↔ REST conversion helpers ──
/** List all notebook docs for a space from the SyncServer. */
function listAutomergeNotebooks(space: string): { docId: string; doc: NotebookDoc }[] {
function listNotebooks(space: string): { docId: string; doc: NotebookDoc }[] {
if (!_syncServer) return [];
const results: { docId: string; doc: NotebookDoc }[] = [];
const prefix = `${space}:notes:notebooks:`;
for (const docId of _syncServer.listDocs()) {
if (docId.startsWith(prefix)) {
const doc = _syncServer.getDoc<NotebookDoc>(docId);
if (doc) results.push({ docId, doc });
if (doc && doc.notebook && doc.notebook.title) results.push({ docId, doc });
}
}
return results;
@ -196,99 +111,123 @@ function noteToRest(item: NoteItem) {
}
/** Find the notebook doc that contains a given note ID. */
function findNoteInAutomerge(space: string, noteId: string): { docId: string; doc: NotebookDoc; item: NoteItem } | null {
for (const { docId, doc } of listAutomergeNotebooks(space)) {
function findNote(space: string, noteId: string): { docId: string; doc: NotebookDoc; item: NoteItem } | null {
for (const { docId, doc } of listNotebooks(space)) {
const item = doc.items[noteId];
if (item) return { docId, doc, item };
}
return null;
}
/** Write a note to the Automerge doc (creates/updates). */
function writeNoteToAutomerge(space: string, notebookPgId: string, noteId: string, data: Partial<NoteItem>): void {
// ── Seed demo data into Automerge (runs once if no notebooks exist) ──
function seedDemoIfEmpty(space: string) {
if (!_syncServer) return;
// Find the Automerge notebook doc for this PG notebook
// Convention: PG notebook UUID maps to docId suffix
const docId = notebookDocId(space, notebookPgId);
const doc = _syncServer.getDoc<NotebookDoc>(docId);
if (!doc) return; // not migrated yet
_syncServer.changeDoc<NotebookDoc>(docId, `Update note ${noteId}`, (d) => {
if (!d.items[noteId]) {
// New note
d.items[noteId] = {
id: noteId,
notebookId: notebookPgId,
authorId: data.authorId ?? null,
title: data.title ?? '',
content: data.content ?? '',
contentPlain: data.contentPlain ?? '',
type: data.type ?? 'NOTE',
url: data.url ?? null,
language: data.language ?? null,
fileUrl: data.fileUrl ?? null,
mimeType: data.mimeType ?? null,
fileSize: data.fileSize ?? null,
duration: data.duration ?? null,
isPinned: data.isPinned ?? false,
sortOrder: data.sortOrder ?? 0,
tags: data.tags ?? [],
createdAt: data.createdAt ?? Date.now(),
updatedAt: Date.now(),
};
} else {
// Update existing fields
const item = d.items[noteId];
if (data.title !== undefined) item.title = data.title;
if (data.content !== undefined) item.content = data.content;
if (data.contentPlain !== undefined) item.contentPlain = data.contentPlain;
if (data.type !== undefined) item.type = data.type;
if (data.url !== undefined) item.url = data.url;
if (data.language !== undefined) item.language = data.language;
if (data.isPinned !== undefined) item.isPinned = data.isPinned;
if (data.sortOrder !== undefined) item.sortOrder = data.sortOrder;
if (data.tags !== undefined) item.tags = data.tags;
item.updatedAt = Date.now();
}
// If the space already has notebooks, skip
if (listNotebooks(space).length > 0) return;
const now = Date.now();
// Notebook 1: Project Ideas
const nb1Id = newId();
const nb1DocId = notebookDocId(space, nb1Id);
const nb1Doc = Automerge.change(Automerge.init<NotebookDoc>(), "Seed: Project Ideas", (d) => {
d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: now };
d.notebook = { id: nb1Id, title: "Project Ideas", slug: "project-ideas", description: "Brainstorms and design notes for the r* ecosystem", coverColor: "#6366f1", isPublic: true, createdAt: now, updatedAt: now };
d.items = {};
});
}
_syncServer.setDoc(nb1DocId, nb1Doc);
// ── Helper: get or create user ──
async function getOrCreateUser(did: string, username?: string) {
const rows = await sql.unsafe(
`INSERT INTO rnotes.users (did, username) VALUES ($1, $2)
ON CONFLICT (did) DO UPDATE SET username = COALESCE($2, rnotes.users.username)
RETURNING *`,
[did, username || null]
);
return rows[0];
// Notebook 2: Meeting Notes
const nb2Id = newId();
const nb2DocId = notebookDocId(space, nb2Id);
const nb2Doc = Automerge.change(Automerge.init<NotebookDoc>(), "Seed: Meeting Notes", (d) => {
d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: now };
d.notebook = { id: nb2Id, title: "Meeting Notes", slug: "meeting-notes", description: "Weekly standups, design reviews, and retrospectives", coverColor: "#f59e0b", isPublic: true, createdAt: now, updatedAt: now };
d.items = {};
});
_syncServer.setDoc(nb2DocId, nb2Doc);
// Notebook 3: How-To Guides
const nb3Id = newId();
const nb3DocId = notebookDocId(space, nb3Id);
const nb3Doc = Automerge.change(Automerge.init<NotebookDoc>(), "Seed: How-To Guides", (d) => {
d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: now };
d.notebook = { id: nb3Id, title: "How-To Guides", slug: "how-to-guides", description: "Tutorials and onboarding guides for contributors", coverColor: "#10b981", isPublic: true, createdAt: now, updatedAt: now };
d.items = {};
});
_syncServer.setDoc(nb3DocId, nb3Doc);
// Seed notes into notebooks
const notes = [
{
nbId: nb1Id, nbDocId: nb1DocId, title: "Cosmolocal Manufacturing Network",
content: "## Vision\n\nDesign global, manufacture local. Every creative work should be producible by the nearest capable provider.\n\n## Key Components\n\n- **Artifact Spec**: Standardized envelope describing what to produce\n- **Provider Registry**: Directory of local makers with capabilities + pricing\n- **rCart**: Marketplace connecting creators to providers\n- **Revenue Splits**: 50% provider, 35% creator, 15% community\n\n## Open Questions\n\n- How do we handle quality assurance across distributed providers?\n- Should providers be able to set custom margins?\n- What's the minimum viable set of capabilities for launch?",
tags: ["cosmolocal", "architecture"], pinned: true,
},
{
nbId: nb1Id, nbDocId: nb1DocId, title: "Revenue Sharing Model",
content: "## Current Split\n\n| Recipient | Share | Rationale |\n|-----------|-------|-----------|\n| Provider | 50% | Covers materials, labor, shipping |\n| Creator | 35% | Design and creative work |\n| Community | 15% | Platform maintenance, commons fund |\n\n## Enoughness Thresholds\n\nOnce a funnel reaches its sufficient threshold, surplus flows to the next highest-need funnel. This prevents accumulation and keeps resources flowing.\n\n## Implementation\n\nrFunds Flow Service handles deposits from rCart. Each order total is routed through the configured flow → funnel → overflow splits.",
tags: ["cosmolocal", "governance"],
},
{
nbId: nb1Id, nbDocId: nb1DocId, title: "FUN Model: Forget, Update, New",
content: "## Replacing CRUD\n\nNothing is permanently destroyed in rSpace.\n\n- **Forget** replaces Delete — soft-delete with `forgotten: true`. Shapes stay in document, hidden from canvas. Memory panel lets you browse + Remember.\n- **Update** stays the same — public `sync.updateShape()` for programmatic updates\n- **New** replaces Create — language shift: toolbar says \"New X\", events are `new-shape`\n\n## Why?\n\nData sovereignty means users should always be able to recover their work. The Memory panel makes forgotten shapes discoverable, like a digital archive.",
tags: ["design", "architecture"],
},
{
nbId: nb2Id, nbDocId: nb2DocId, title: "Weekly Standup — Feb 15, 2026",
content: "## Attendees\n\nAlice, Bob, Carol\n\n## Updates\n\n**Alice**: Finished EncryptID guardian recovery flow. 2-of-3 guardian approval working. Next: device linking via QR code.\n\n**Bob**: Provider registry now has 6 printers globally. Working on proximity search with earthdistance extension.\n\n**Carol**: rFunds river visualization deployed. Enoughness layer showing golden glow on sufficient funnels.\n\n## Action Items\n\n- [ ] Alice: Document guardian recovery API endpoints\n- [ ] Bob: Add turnaround time estimates to provider matching\n- [ ] Carol: Add demo mode to river view with mock data",
tags: ["standup"],
},
{
nbId: nb2Id, nbDocId: nb2DocId, title: "Design Review — rBooks Flipbook Reader",
content: "## What We Reviewed\n\nThe react-pageflip integration for PDF reading in rBooks.\n\n## Feedback\n\n1. **Page turn animation** — smooth, feels good on desktop. On mobile, swipe gesture needs larger hit area.\n2. **PDF rendering** — react-pdf handles most PDFs well. Large files (>50MB) cause browser memory issues.\n3. **Read Locally mode** — IndexedDB storage works. Need to show storage usage somewhere.\n\n## Decisions\n\n- Ship current version, iterate on mobile\n- Add a 50MB soft warning on upload\n- Explore PDF.js worker for background rendering",
tags: ["review", "design"],
},
{
nbId: nb3Id, nbDocId: nb3DocId, title: "Getting Started with rSpace Development",
content: "## Prerequisites\n\n- Bun runtime (v1.3+)\n- Docker + Docker Compose\n- Git access to Gitea\n\n## Local Setup\n\n```bash\ngit clone ssh://git@gitea.jeffemmett.com:223/jeffemmett/rspace-online.git\ncd rspace-online\nbun install\nbun run dev\n```\n\n## Module Structure\n\nEach module lives in `modules/{name}/` and exports an `RSpaceModule` interface:\n\n```typescript\nexport interface RSpaceModule {\n id: string;\n name: string;\n icon: string;\n description: string;\n routes: Hono;\n}\n```\n\n## Adding a New Module\n\n1. Create `modules/{name}/mod.ts`\n2. Create `modules/{name}/components/` for web components\n3. Add build step in `vite.config.ts`\n4. Register in `server/index.ts`",
tags: ["onboarding"],
},
{
nbId: nb3Id, nbDocId: nb3DocId, title: "How to Add a Cosmolocal Provider",
content: "## Overview\n\nProviders are local print shops, makerspaces, or studios that can fulfill rCart orders.\n\n## Steps\n\n1. Visit `providers.mycofi.earth`\n2. Sign in with your rStack passkey\n3. Click \"Register Provider\"\n4. Fill in:\n - Name, location (address + coordinates)\n - Capabilities (laser-print, risograph, screen-print, etc.)\n - Substrates (paper types, fabric, vinyl)\n - Turnaround time and pricing\n5. Submit for review\n\n## Matching Algorithm\n\nWhen an order comes in, rCart matches based on:\n- Required capabilities vs. provider capabilities\n- Geographic distance (earthdistance extension)\n- Turnaround time\n- Price",
tags: ["cosmolocal", "onboarding"],
},
];
for (const n of notes) {
const noteId = newId();
const contentPlain = n.content.replace(/<[^>]*>/g, " ").replace(/[#*|`\-\[\]]/g, " ").replace(/\s+/g, " ").trim();
const item = createNoteItem(noteId, n.nbId, n.title, {
content: n.content,
contentPlain,
tags: n.tags,
isPinned: n.pinned || false,
});
_syncServer!.changeDoc<NotebookDoc>(n.nbDocId, `Seed note: ${n.title}`, (d) => {
d.items[noteId] = item;
});
}
console.log("[Notes] Demo data seeded: 3 notebooks, 7 notes");
}
// ── Notebooks API ──
// GET /api/notebooks — list notebooks (Automerge-first, PG fallback)
// GET /api/notebooks — list notebooks
routes.get("/api/notebooks", async (c) => {
const space = c.req.param("space") || "demo";
// Try Automerge first
if (isLocalFirst(space)) {
const notebooks = listAutomergeNotebooks(space).map(({ doc }) => notebookToRest(doc));
notebooks.sort((a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime());
return c.json({ notebooks, source: "automerge" });
}
// PG fallback
const rows = await sql.unsafe(
`SELECT n.*, count(note.id) as note_count
FROM rnotes.notebooks n
LEFT JOIN rnotes.notes note ON note.notebook_id = n.id
GROUP BY n.id
ORDER BY n.updated_at DESC LIMIT 50`
);
return c.json({ notebooks: rows });
const notebooks = listNotebooks(space).map(({ doc }) => notebookToRest(doc));
notebooks.sort((a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime());
return c.json({ notebooks, source: "automerge" });
});
// POST /api/notebooks — create notebook (dual-write)
// POST /api/notebooks — create notebook
routes.post("/api/notebooks", async (c) => {
const space = c.req.param("space") || "demo";
const token = extractToken(c.req.raw.headers);
@ -299,75 +238,48 @@ routes.post("/api/notebooks", async (c) => {
const body = await c.req.json();
const { title, description, cover_color } = body;
// PG write
const user = await getOrCreateUser(claims.sub, claims.username);
const rows = await sql.unsafe(
`INSERT INTO rnotes.notebooks (title, description, cover_color, owner_id)
VALUES ($1, $2, $3, $4) RETURNING *`,
[title || "Untitled Notebook", description || null, cover_color || "#3b82f6", user.id]
);
const pgRow = rows[0];
const nbTitle = title || "Untitled Notebook";
const notebookId = newId();
const now = Date.now();
// Automerge dual-write: create a new notebook doc
if (_syncServer && isLocalFirst(space)) {
const docId = notebookDocId(space, pgRow.id);
if (!_syncServer.getDoc(docId)) {
const doc = Automerge.init<NotebookDoc>();
const initialized = Automerge.change(doc, "Create notebook", (d) => {
d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: Date.now() };
d.notebook = {
id: pgRow.id, title: pgRow.title, slug: pgRow.slug || "",
description: pgRow.description || "", coverColor: pgRow.cover_color || "#3b82f6",
isPublic: pgRow.is_public || false, createdAt: Date.now(), updatedAt: Date.now(),
};
d.items = {};
});
_syncServer.setDoc(docId, initialized);
}
}
const doc = ensureDoc(space, notebookId);
_syncServer!.changeDoc<NotebookDoc>(notebookDocId(space, notebookId), "Create notebook", (d) => {
d.notebook.id = notebookId;
d.notebook.title = nbTitle;
d.notebook.slug = slugify(nbTitle);
d.notebook.description = description || "";
d.notebook.coverColor = cover_color || "#3b82f6";
d.notebook.isPublic = false;
d.notebook.createdAt = now;
d.notebook.updatedAt = now;
});
return c.json(pgRow, 201);
const updatedDoc = _syncServer!.getDoc<NotebookDoc>(notebookDocId(space, notebookId))!;
return c.json(notebookToRest(updatedDoc), 201);
});
// GET /api/notebooks/:id — notebook detail with notes (Automerge-first)
// GET /api/notebooks/:id — notebook detail with notes
routes.get("/api/notebooks/:id", async (c) => {
const space = c.req.param("space") || "demo";
const id = c.req.param("id");
// Automerge first
if (isLocalFirst(space)) {
const docId = notebookDocId(space, id);
const doc = _syncServer?.getDoc<NotebookDoc>(docId);
if (doc) {
const nb = notebookToRest(doc);
const notes = Object.values(doc.items)
.map(noteToRest)
.sort((a, b) => {
if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1;
return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime();
});
return c.json({ ...nb, notes, source: "automerge" });
}
const docId = notebookDocId(space, id);
const doc = _syncServer?.getDoc<NotebookDoc>(docId);
if (!doc || !doc.notebook || !doc.notebook.title) {
return c.json({ error: "Notebook not found" }, 404);
}
// PG fallback
const nb = await sql.unsafe("SELECT * FROM rnotes.notebooks WHERE id = $1", [id]);
if (nb.length === 0) return c.json({ error: "Notebook not found" }, 404);
const notes = await sql.unsafe(
`SELECT n.*, array_agg(t.name) FILTER (WHERE t.name IS NOT NULL) as tags
FROM rnotes.notes n
LEFT JOIN rnotes.note_tags nt ON nt.note_id = n.id
LEFT JOIN rnotes.tags t ON t.id = nt.tag_id
WHERE n.notebook_id = $1
GROUP BY n.id
ORDER BY n.is_pinned DESC, n.sort_order ASC, n.updated_at DESC`,
[id]
);
return c.json({ ...nb[0], notes });
const nb = notebookToRest(doc);
const notes = Object.values(doc.items)
.map(noteToRest)
.sort((a, b) => {
if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1;
return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime();
});
return c.json({ ...nb, notes, source: "automerge" });
});
// PUT /api/notebooks/:id — update notebook (dual-write)
// PUT /api/notebooks/:id — update notebook
routes.put("/api/notebooks/:id", async (c) => {
const space = c.req.param("space") || "demo";
const token = extractToken(c.req.raw.headers);
@ -379,124 +291,90 @@ routes.put("/api/notebooks/:id", async (c) => {
const body = await c.req.json();
const { title, description, cover_color, is_public } = body;
// PG write
const fields: string[] = [];
const params: any[] = [];
let idx = 1;
if (title !== undefined) { fields.push(`title = $${idx}`); params.push(title); idx++; }
if (description !== undefined) { fields.push(`description = $${idx}`); params.push(description); idx++; }
if (cover_color !== undefined) { fields.push(`cover_color = $${idx}`); params.push(cover_color); idx++; }
if (is_public !== undefined) { fields.push(`is_public = $${idx}`); params.push(is_public); idx++; }
if (fields.length === 0) return c.json({ error: "No fields to update" }, 400);
fields.push("updated_at = NOW()");
params.push(id);
const rows = await sql.unsafe(
`UPDATE rnotes.notebooks SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`,
params
);
if (rows.length === 0) return c.json({ error: "Notebook not found" }, 404);
// Automerge dual-write: update notebook metadata
if (_syncServer && isLocalFirst(space)) {
const docId = notebookDocId(space, id);
_syncServer.changeDoc<NotebookDoc>(docId, "Update notebook", (d) => {
if (title !== undefined) d.notebook.title = title;
if (description !== undefined) d.notebook.description = description;
if (cover_color !== undefined) d.notebook.coverColor = cover_color;
if (is_public !== undefined) d.notebook.isPublic = is_public;
d.notebook.updatedAt = Date.now();
});
if (title === undefined && description === undefined && cover_color === undefined && is_public === undefined) {
return c.json({ error: "No fields to update" }, 400);
}
return c.json(rows[0]);
const docId = notebookDocId(space, id);
const doc = _syncServer?.getDoc<NotebookDoc>(docId);
if (!doc || !doc.notebook || !doc.notebook.title) {
return c.json({ error: "Notebook not found" }, 404);
}
_syncServer!.changeDoc<NotebookDoc>(docId, "Update notebook", (d) => {
if (title !== undefined) d.notebook.title = title;
if (description !== undefined) d.notebook.description = description;
if (cover_color !== undefined) d.notebook.coverColor = cover_color;
if (is_public !== undefined) d.notebook.isPublic = is_public;
d.notebook.updatedAt = Date.now();
});
const updatedDoc = _syncServer!.getDoc<NotebookDoc>(docId)!;
return c.json(notebookToRest(updatedDoc));
});
// DELETE /api/notebooks/:id (dual-write)
// DELETE /api/notebooks/:id
routes.delete("/api/notebooks/:id", async (c) => {
const space = c.req.param("space") || "demo";
const id = c.req.param("id");
const result = await sql.unsafe(
"DELETE FROM rnotes.notebooks WHERE id = $1 RETURNING id", [id]
);
if (result.length === 0) return c.json({ error: "Notebook not found" }, 404);
const docId = notebookDocId(space, id);
const doc = _syncServer?.getDoc<NotebookDoc>(docId);
if (!doc || !doc.notebook || !doc.notebook.title) {
return c.json({ error: "Notebook not found" }, 404);
}
// Automerge: remove the entire doc from SyncServer
// (SyncServer doesn't have a removeDoc — setting it to empty is the equivalent)
// For now, the doc persists in Automerge but is effectively orphaned once PG row is gone.
// Clear all items and blank the notebook title to mark as deleted.
// SyncServer has no removeDoc API, so we empty the doc instead.
_syncServer!.changeDoc<NotebookDoc>(docId, "Delete notebook", (d) => {
for (const key of Object.keys(d.items)) {
delete d.items[key];
}
d.notebook.title = "";
d.notebook.updatedAt = Date.now();
});
return c.json({ ok: true });
});
// ── Notes API ──
// GET /api/notes — list all notes (Automerge-first, PG fallback)
// GET /api/notes — list all notes
routes.get("/api/notes", async (c) => {
const space = c.req.param("space") || "demo";
const { notebook_id, type, q, limit = "50", offset = "0" } = c.req.query();
// Automerge first
if (isLocalFirst(space)) {
let allNotes: ReturnType<typeof noteToRest>[] = [];
const notebooks = notebook_id
? [{ doc: _syncServer!.getDoc<NotebookDoc>(notebookDocId(space, notebook_id))! }].filter(x => x.doc)
: listAutomergeNotebooks(space);
let allNotes: ReturnType<typeof noteToRest>[] = [];
const notebooks = notebook_id
? (() => {
const doc = _syncServer?.getDoc<NotebookDoc>(notebookDocId(space, notebook_id));
return doc ? [{ doc }] : [];
})()
: listNotebooks(space);
for (const { doc } of notebooks) {
for (const item of Object.values(doc.items)) {
if (type && item.type !== type) continue;
if (q) {
const lower = q.toLowerCase();
if (!item.title.toLowerCase().includes(lower) && !item.contentPlain.toLowerCase().includes(lower)) continue;
}
allNotes.push(noteToRest(item));
for (const { doc } of notebooks) {
for (const item of Object.values(doc.items)) {
if (type && item.type !== type) continue;
if (q) {
const lower = q.toLowerCase();
if (!item.title.toLowerCase().includes(lower) && !item.contentPlain.toLowerCase().includes(lower)) continue;
}
allNotes.push(noteToRest(item));
}
// Sort: pinned first, then by updated_at desc
allNotes.sort((a, b) => {
if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1;
return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime();
});
const lim = Math.min(parseInt(limit), 100);
const off = parseInt(offset) || 0;
return c.json({ notes: allNotes.slice(off, off + lim), source: "automerge" });
}
// PG fallback
const conditions: string[] = [];
const params: any[] = [];
let idx = 1;
// Sort: pinned first, then by updated_at desc
allNotes.sort((a, b) => {
if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1;
return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime();
});
if (notebook_id) { conditions.push(`n.notebook_id = $${idx}`); params.push(notebook_id); idx++; }
if (type) { conditions.push(`n.type = $${idx}`); params.push(type); idx++; }
if (q) {
conditions.push(`(n.title ILIKE $${idx} OR n.content_plain ILIKE $${idx})`);
params.push(`%${q}%`);
idx++;
}
const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
const rows = await sql.unsafe(
`SELECT n.*, array_agg(t.name) FILTER (WHERE t.name IS NOT NULL) as tags
FROM rnotes.notes n
LEFT JOIN rnotes.note_tags nt ON nt.note_id = n.id
LEFT JOIN rnotes.tags t ON t.id = nt.tag_id
${where}
GROUP BY n.id
ORDER BY n.is_pinned DESC, n.updated_at DESC
LIMIT ${Math.min(parseInt(limit), 100)} OFFSET ${parseInt(offset) || 0}`,
params
);
return c.json({ notes: rows });
const lim = Math.min(parseInt(limit), 100);
const off = parseInt(offset) || 0;
return c.json({ notes: allNotes.slice(off, off + lim), source: "automerge" });
});
// POST /api/notes — create note (dual-write)
// POST /api/notes — create note
routes.post("/api/notes", async (c) => {
const space = c.req.param("space") || "demo";
const token = extractToken(c.req.raw.headers);
@ -508,151 +386,108 @@ routes.post("/api/notes", async (c) => {
const { notebook_id, title, content, type, url, language, file_url, mime_type, file_size, duration, tags } = body;
if (!title?.trim()) return c.json({ error: "Title is required" }, 400);
if (!notebook_id) return c.json({ error: "notebook_id is required" }, 400);
// Strip HTML for plain text search
const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : null;
// Strip HTML/markdown for plain text search
const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : "";
// PG write
const rows = await sql.unsafe(
`INSERT INTO rnotes.notes (notebook_id, title, content, content_plain, type, url, language, file_url, mime_type, file_size, duration)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) RETURNING *`,
[notebook_id || null, title.trim(), content || "", contentPlain, type || "NOTE",
url || null, language || null, file_url || null, mime_type || null, file_size || null, duration || null]
);
// Handle tags in PG
// Normalize tags
const tagNames: string[] = [];
if (tags && Array.isArray(tags)) {
for (const tagName of tags) {
const name = tagName.trim().toLowerCase();
if (!name) continue;
tagNames.push(name);
const tag = await sql.unsafe(
"INSERT INTO rnotes.tags (name) VALUES ($1) ON CONFLICT (name) DO UPDATE SET name = $1 RETURNING id",
[name]
);
await sql.unsafe(
"INSERT INTO rnotes.note_tags (note_id, tag_id) VALUES ($1, $2) ON CONFLICT DO NOTHING",
[rows[0].id, tag[0].id]
);
const name = (tagName as string).trim().toLowerCase();
if (name) tagNames.push(name);
}
}
// Automerge dual-write
if (notebook_id && isLocalFirst(space)) {
writeNoteToAutomerge(space, notebook_id, rows[0].id, {
title: title.trim(),
content: content || '',
contentPlain: contentPlain || '',
type: type || 'NOTE',
url: url || null,
language: language || null,
fileUrl: file_url || null,
mimeType: mime_type || null,
fileSize: file_size || null,
duration: duration || null,
tags: tagNames,
});
}
const noteId = newId();
const item = createNoteItem(noteId, notebook_id, title.trim(), {
authorId: claims.sub ?? null,
content: content || "",
contentPlain,
type: type || "NOTE",
url: url || null,
language: language || null,
fileUrl: file_url || null,
mimeType: mime_type || null,
fileSize: file_size || null,
duration: duration || null,
tags: tagNames,
});
return c.json(rows[0], 201);
// Ensure the notebook doc exists, then add the note
ensureDoc(space, notebook_id);
const docId = notebookDocId(space, notebook_id);
_syncServer!.changeDoc<NotebookDoc>(docId, `Create note: ${title.trim()}`, (d) => {
d.items[noteId] = item;
d.notebook.updatedAt = Date.now();
});
return c.json(noteToRest(item), 201);
});
// GET /api/notes/:id — note detail (Automerge-first)
// GET /api/notes/:id — note detail
routes.get("/api/notes/:id", async (c) => {
const space = c.req.param("space") || "demo";
const id = c.req.param("id");
// Automerge first
if (isLocalFirst(space)) {
const found = findNoteInAutomerge(space, id);
if (found) return c.json({ ...noteToRest(found.item), source: "automerge" });
}
const found = findNote(space, id);
if (!found) return c.json({ error: "Note not found" }, 404);
// PG fallback
const rows = await sql.unsafe(
`SELECT n.*, array_agg(t.name) FILTER (WHERE t.name IS NOT NULL) as tags
FROM rnotes.notes n
LEFT JOIN rnotes.note_tags nt ON nt.note_id = n.id
LEFT JOIN rnotes.tags t ON t.id = nt.tag_id
WHERE n.id = $1
GROUP BY n.id`,
[id]
);
if (rows.length === 0) return c.json({ error: "Note not found" }, 404);
return c.json(rows[0]);
return c.json({ ...noteToRest(found.item), source: "automerge" });
});
// PUT /api/notes/:id — update note (dual-write)
// PUT /api/notes/:id — update note
routes.put("/api/notes/:id", async (c) => {
const space = c.req.param("space") || "demo";
const id = c.req.param("id");
const body = await c.req.json();
const { title, content, type, url, language, is_pinned, sort_order } = body;
// PG write
const fields: string[] = [];
const params: any[] = [];
let idx = 1;
if (title !== undefined) { fields.push(`title = $${idx}`); params.push(title); idx++; }
if (content !== undefined) {
fields.push(`content = $${idx}`); params.push(content); idx++;
const plain = content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim();
fields.push(`content_plain = $${idx}`); params.push(plain); idx++;
}
if (type !== undefined) { fields.push(`type = $${idx}`); params.push(type); idx++; }
if (url !== undefined) { fields.push(`url = $${idx}`); params.push(url); idx++; }
if (language !== undefined) { fields.push(`language = $${idx}`); params.push(language); idx++; }
if (is_pinned !== undefined) { fields.push(`is_pinned = $${idx}`); params.push(is_pinned); idx++; }
if (sort_order !== undefined) { fields.push(`sort_order = $${idx}`); params.push(sort_order); idx++; }
if (fields.length === 0) return c.json({ error: "No fields to update" }, 400);
fields.push("updated_at = NOW()");
params.push(id);
const rows = await sql.unsafe(
`UPDATE rnotes.notes SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`,
params
);
if (rows.length === 0) return c.json({ error: "Note not found" }, 404);
// Automerge dual-write
if (isLocalFirst(space)) {
const found = findNoteInAutomerge(space, id);
if (found) {
const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : undefined;
writeNoteToAutomerge(space, found.item.notebookId, id, {
...(title !== undefined ? { title } : {}),
...(content !== undefined ? { content, contentPlain } : {}),
...(type !== undefined ? { type } : {}),
...(url !== undefined ? { url } : {}),
...(language !== undefined ? { language } : {}),
...(is_pinned !== undefined ? { isPinned: is_pinned } : {}),
...(sort_order !== undefined ? { sortOrder: sort_order } : {}),
});
}
if (title === undefined && content === undefined && type === undefined &&
url === undefined && language === undefined && is_pinned === undefined && sort_order === undefined) {
return c.json({ error: "No fields to update" }, 400);
}
return c.json(rows[0]);
const found = findNote(space, id);
if (!found) return c.json({ error: "Note not found" }, 404);
const contentPlain = content !== undefined
? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim()
: undefined;
_syncServer!.changeDoc<NotebookDoc>(found.docId, `Update note ${id}`, (d) => {
const item = d.items[id];
if (!item) return;
if (title !== undefined) item.title = title;
if (content !== undefined) item.content = content;
if (contentPlain !== undefined) item.contentPlain = contentPlain;
if (type !== undefined) item.type = type;
if (url !== undefined) item.url = url;
if (language !== undefined) item.language = language;
if (is_pinned !== undefined) item.isPinned = is_pinned;
if (sort_order !== undefined) item.sortOrder = sort_order;
item.updatedAt = Date.now();
});
// Return the updated note
const updatedDoc = _syncServer!.getDoc<NotebookDoc>(found.docId)!;
const updatedItem = updatedDoc.items[id];
return c.json(noteToRest(updatedItem));
});
// DELETE /api/notes/:id (dual-write)
// DELETE /api/notes/:id
routes.delete("/api/notes/:id", async (c) => {
const space = c.req.param("space") || "demo";
const id = c.req.param("id");
// PG delete
const result = await sql.unsafe("DELETE FROM rnotes.notes WHERE id = $1 RETURNING id, notebook_id", [id]);
if (result.length === 0) return c.json({ error: "Note not found" }, 404);
const found = findNote(space, id);
if (!found) return c.json({ error: "Note not found" }, 404);
// Automerge dual-write: remove note from notebook doc
if (isLocalFirst(space) && result[0].notebook_id && _syncServer) {
const docId = notebookDocId(space, result[0].notebook_id);
_syncServer.changeDoc<NotebookDoc>(docId, `Delete note ${id}`, (d) => {
delete d.items[id];
});
}
_syncServer!.changeDoc<NotebookDoc>(found.docId, `Delete note ${id}`, (d) => {
delete d.items[id];
d.notebook.updatedAt = Date.now();
});
return c.json({ ok: true });
});
@ -691,11 +526,10 @@ export const notesModule: RSpaceModule = {
async onInit({ syncServer }) {
_syncServer = syncServer;
// Init PG (still needed during dual-write period)
await initDB();
await seedDemoIfEmpty();
// Seed demo notebooks if the "demo" space is empty
seedDemoIfEmpty("demo");
console.log("[Notes] onInit complete (PG + schema registered)");
console.log("[Notes] onInit complete (Automerge-only)");
},
async onSpaceCreate(ctx: SpaceLifecycleContext) {

View File

@ -3,13 +3,16 @@
*
* Routes are relative to mount point (/:space/splat in unified).
* Three.js + GaussianSplats3D loaded via CDN importmap.
*
* All metadata is stored in Automerge documents via SyncServer.
* 3D files (.ply, .splat, .spz) remain on the filesystem.
*/
import { Hono } from "hono";
import { resolve } from "node:path";
import { mkdir, readFile } from "node:fs/promises";
import { mkdir } from "node:fs/promises";
import { randomUUID } from "node:crypto";
import { sql } from "../../shared/db/pool";
import * as Automerge from "@automerge/automerge";
import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
@ -20,7 +23,13 @@ import {
} from "@encryptid/sdk/server";
import { setupX402FromEnv } from "../../shared/x402/hono-middleware";
import type { SyncServer } from '../../server/local-first/sync-server';
import { splatScenesSchema } from './schemas';
import {
splatScenesSchema,
splatScenesDocId,
type SplatScenesDoc,
type SplatItem,
type SourceFile,
} from './schemas';
let _syncServer: SyncServer | null = null;
@ -88,6 +97,88 @@ function getMimeType(format: string): string {
}
}
// ── Automerge helpers ──
/**
* Lazily create the Automerge doc for a space if it doesn't exist yet.
*/
function ensureDoc(space: string): SplatScenesDoc {
const docId = splatScenesDocId(space);
let doc = _syncServer!.getDoc<SplatScenesDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<SplatScenesDoc>(), 'init', (d) => {
const init = splatScenesSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.items = {};
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/**
* Find a splat item by slug or id within a doc's items map.
* Returns [itemKey, item] or undefined.
*/
function findItem(doc: SplatScenesDoc, idOrSlug: string): [string, SplatItem] | undefined {
for (const [key, item] of Object.entries(doc.items)) {
if (item.slug === idOrSlug || item.id === idOrSlug) {
return [key, item];
}
}
return undefined;
}
/**
* Convert a SplatItem (camelCase) to a snake_case row for API responses,
* preserving the shape the frontend expects.
*/
function itemToRow(item: SplatItem): SplatRow {
return {
id: item.id,
slug: item.slug,
title: item.title,
description: item.description || null,
file_path: item.filePath,
file_format: item.fileFormat,
file_size_bytes: item.fileSizeBytes,
tags: item.tags ?? [],
space_slug: item.spaceSlug,
contributor_id: item.contributorId,
contributor_name: item.contributorName,
source: item.source ?? 'upload',
status: item.status,
view_count: item.viewCount,
payment_tx: item.paymentTx,
payment_network: item.paymentNetwork,
processing_status: item.processingStatus ?? 'ready',
processing_error: item.processingError,
source_file_count: item.sourceFileCount,
created_at: new Date(item.createdAt).toISOString(),
};
}
/**
* Return the subset of SplatRow fields used in list/gallery responses.
*/
function itemToListRow(item: SplatItem) {
return {
id: item.id,
slug: item.slug,
title: item.title,
description: item.description || null,
file_format: item.fileFormat,
file_size_bytes: item.fileSizeBytes,
tags: item.tags ?? [],
contributor_name: item.contributorName,
view_count: item.viewCount,
processing_status: item.processingStatus ?? 'ready',
source_file_count: item.sourceFileCount,
created_at: new Date(item.createdAt).toISOString(),
};
}
// ── CDN importmap for Three.js + GaussianSplats3D ──
const IMPORTMAP = `<script type="importmap">
@ -118,60 +209,62 @@ routes.get("/api/splats", async (c) => {
const limit = Math.min(parseInt(c.req.query("limit") || "50"), 100);
const offset = parseInt(c.req.query("offset") || "0");
let query = `SELECT id, slug, title, description, file_format, file_size_bytes,
tags, contributor_name, view_count, processing_status, source_file_count, created_at
FROM rsplat.splats WHERE status = 'published' AND space_slug = $1`;
const params: (string | number)[] = [spaceSlug];
const doc = ensureDoc(spaceSlug);
let items = Object.values(doc.items)
.filter((item) => item.status === 'published');
if (tag) {
params.push(tag);
query += ` AND $${params.length} = ANY(tags)`;
items = items.filter((item) => item.tags?.includes(tag));
}
query += ` ORDER BY created_at DESC`;
params.push(limit);
query += ` LIMIT $${params.length}`;
params.push(offset);
query += ` OFFSET $${params.length}`;
// Sort by createdAt descending
items.sort((a, b) => b.createdAt - a.createdAt);
const rows = await sql.unsafe(query, params);
return c.json({ splats: rows });
// Apply offset and limit
const paged = items.slice(offset, offset + limit);
return c.json({ splats: paged.map(itemToListRow) });
});
// ── API: Get splat details ──
routes.get("/api/splats/:id", async (c) => {
const spaceSlug = c.req.param("space") || "demo";
const id = c.req.param("id");
const rows = await sql.unsafe(
`SELECT * FROM rsplat.splats WHERE (slug = $1 OR id::text = $1) AND status = 'published'`,
[id]
);
const doc = ensureDoc(spaceSlug);
const found = findItem(doc, id);
if (rows.length === 0) return c.json({ error: "Splat not found" }, 404);
if (!found || found[1].status !== 'published') {
return c.json({ error: "Splat not found" }, 404);
}
const [itemKey, item] = found;
// Increment view count
await sql.unsafe(
`UPDATE rsplat.splats SET view_count = view_count + 1 WHERE id = $1`,
[rows[0].id]
);
const docId = splatScenesDocId(spaceSlug);
_syncServer!.changeDoc<SplatScenesDoc>(docId, 'increment view count', (d) => {
d.items[itemKey].viewCount += 1;
});
return c.json(rows[0]);
return c.json(itemToRow(item));
});
// ── API: Serve splat file ──
// Matches both /api/splats/:id/file and /api/splats/:id/:filename (e.g. rainbow-sphere.splat)
routes.get("/api/splats/:id/:filename", async (c) => {
const spaceSlug = c.req.param("space") || "demo";
const id = c.req.param("id");
const rows = await sql.unsafe(
`SELECT id, slug, file_path, file_format FROM rsplat.splats WHERE (slug = $1 OR id::text = $1) AND status = 'published'`,
[id]
);
const doc = ensureDoc(spaceSlug);
const found = findItem(doc, id);
if (rows.length === 0) return c.json({ error: "Splat not found" }, 404);
if (!found || found[1].status !== 'published') {
return c.json({ error: "Splat not found" }, 404);
}
const splat = rows[0];
const filepath = resolve(SPLATS_DIR, splat.file_path);
const splat = found[1];
const filepath = resolve(SPLATS_DIR, splat.filePath);
const file = Bun.file(filepath);
if (!(await file.exists())) {
@ -180,8 +273,8 @@ routes.get("/api/splats/:id/:filename", async (c) => {
return new Response(file, {
headers: {
"Content-Type": getMimeType(splat.file_format),
"Content-Disposition": `inline; filename="${splat.slug}.${splat.file_format}"`,
"Content-Type": getMimeType(splat.fileFormat),
"Content-Disposition": `inline; filename="${splat.slug}.${splat.fileFormat}"`,
"Content-Length": String(file.size),
"Access-Control-Allow-Origin": "*",
"Cache-Control": "public, max-age=86400",
@ -242,11 +335,10 @@ routes.post("/api/splats", async (c) => {
const shortId = randomUUID().slice(0, 8);
let slug = slugify(title);
// Check slug collision
const existing = await sql.unsafe(
`SELECT 1 FROM rsplat.splats WHERE slug = $1`, [slug]
);
if (existing.length > 0) {
// Check slug collision in Automerge doc
const doc = ensureDoc(spaceSlug);
const slugExists = Object.values(doc.items).some((item) => item.slug === slug);
if (slugExists) {
slug = `${slug}-${shortId}`;
}
@ -257,16 +349,48 @@ routes.post("/api/splats", async (c) => {
const buffer = Buffer.from(await file.arrayBuffer());
await Bun.write(filepath, buffer);
// Insert into DB
// Insert into Automerge doc
const splatId = randomUUID();
const now = Date.now();
const paymentTx = (c as any).get("x402Payment") || null;
const rows = await sql.unsafe(
`INSERT INTO rsplat.splats (slug, title, description, file_path, file_format, file_size_bytes, tags, space_slug, contributor_id, contributor_name, payment_tx)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
RETURNING id, slug, title, description, file_format, file_size_bytes, tags, created_at`,
[slug, title, description, filename, format, buffer.length, tags, spaceSlug, claims.sub, claims.username || null, paymentTx] as any[]
);
return c.json(rows[0], 201);
const docId = splatScenesDocId(spaceSlug);
_syncServer!.changeDoc<SplatScenesDoc>(docId, 'add splat', (d) => {
d.items[splatId] = {
id: splatId,
slug,
title,
description: description ?? '',
filePath: filename,
fileFormat: format,
fileSizeBytes: buffer.length,
tags,
spaceSlug,
contributorId: claims.sub,
contributorName: claims.username || null,
source: 'upload',
status: 'published',
viewCount: 0,
paymentTx,
paymentNetwork: null,
createdAt: now,
processingStatus: 'ready',
processingError: null,
sourceFileCount: 0,
sourceFiles: [],
};
});
return c.json({
id: splatId,
slug,
title,
description,
file_format: format,
file_size_bytes: buffer.length,
tags,
created_at: new Date(now).toISOString(),
}, 201);
});
// ── API: Upload photos/video for splatting ──
@ -338,11 +462,10 @@ routes.post("/api/splats/from-media", async (c) => {
const shortId = randomUUID().slice(0, 8);
let slug = slugify(title);
// Check slug collision
const existing = await sql.unsafe(
`SELECT 1 FROM rsplat.splats WHERE slug = $1`, [slug]
);
if (existing.length > 0) {
// Check slug collision in Automerge doc
const doc = ensureDoc(spaceSlug);
const slugExists = Object.values(doc.items).some((item) => item.slug === slug);
if (slugExists) {
slug = `${slug}-${shortId}`;
}
@ -350,41 +473,67 @@ routes.post("/api/splats/from-media", async (c) => {
const sourceDir = resolve(SOURCES_DIR, slug);
await mkdir(sourceDir, { recursive: true });
const sourceRows: { path: string; name: string; mime: string; size: number }[] = [];
const sourceFileEntries: SourceFile[] = [];
const sfId = () => randomUUID();
const splatId = randomUUID();
const now = Date.now();
for (const f of files) {
const safeName = f.name.replace(/[^a-zA-Z0-9._-]/g, "_");
const filepath = resolve(sourceDir, safeName);
const buffer = Buffer.from(await f.arrayBuffer());
await Bun.write(filepath, buffer);
sourceRows.push({
path: `sources/${slug}/${safeName}`,
name: f.name,
mime: f.type,
size: buffer.length,
sourceFileEntries.push({
id: sfId(),
splatId,
filePath: `sources/${slug}/${safeName}`,
fileName: f.name,
mimeType: f.type,
fileSizeBytes: buffer.length,
createdAt: now,
});
}
// Insert splat record (pending processing)
// Insert splat record (pending processing) into Automerge doc
const paymentTx = (c as any).get("x402Payment") || null;
const splatRows = await sql.unsafe(
`INSERT INTO rsplat.splats (slug, title, description, file_path, file_format, file_size_bytes, tags, space_slug, contributor_id, contributor_name, source, processing_status, source_file_count, payment_tx)
VALUES ($1, $2, $3, '', 'ply', 0, $4, $5, $6, $7, 'media', 'pending', $8, $9)
RETURNING id, slug, title, description, file_format, tags, processing_status, source_file_count, created_at`,
[slug, title, description, tags, spaceSlug, claims.sub, claims.username || null, files.length, paymentTx] as any[]
);
const docId = splatScenesDocId(spaceSlug);
_syncServer!.changeDoc<SplatScenesDoc>(docId, 'add splat from media', (d) => {
d.items[splatId] = {
id: splatId,
slug,
title,
description: description ?? '',
filePath: '',
fileFormat: 'ply',
fileSizeBytes: 0,
tags,
spaceSlug,
contributorId: claims.sub,
contributorName: claims.username || null,
source: 'media',
status: 'published',
viewCount: 0,
paymentTx,
paymentNetwork: null,
createdAt: now,
processingStatus: 'pending',
processingError: null,
sourceFileCount: files.length,
sourceFiles: sourceFileEntries,
};
});
const splatId = splatRows[0].id;
// Insert source file records
for (const sf of sourceRows) {
await sql.unsafe(
`INSERT INTO rsplat.source_files (splat_id, file_path, file_name, mime_type, file_size_bytes)
VALUES ($1, $2, $3, $4, $5)`,
[splatId, sf.path, sf.name, sf.mime, sf.size]
);
}
return c.json(splatRows[0], 201);
return c.json({
id: splatId,
slug,
title,
description,
file_format: 'ply',
tags,
processing_status: 'pending',
source_file_count: files.length,
created_at: new Date(now).toISOString(),
}, 201);
});
// ── API: Delete splat (owner only) ──
@ -399,21 +548,25 @@ routes.delete("/api/splats/:id", async (c) => {
return c.json({ error: "Invalid token" }, 401);
}
const spaceSlug = c.req.param("space") || "demo";
const id = c.req.param("id");
const rows = await sql.unsafe(
`SELECT id, contributor_id FROM rsplat.splats WHERE (slug = $1 OR id::text = $1) AND status = 'published'`,
[id]
);
if (rows.length === 0) return c.json({ error: "Splat not found" }, 404);
if (rows[0].contributor_id !== claims.sub) {
const doc = ensureDoc(spaceSlug);
const found = findItem(doc, id);
if (!found || found[1].status !== 'published') {
return c.json({ error: "Splat not found" }, 404);
}
const [itemKey, item] = found;
if (item.contributorId !== claims.sub) {
return c.json({ error: "Not authorized" }, 403);
}
await sql.unsafe(
`UPDATE rsplat.splats SET status = 'removed' WHERE id = $1`,
[rows[0].id]
);
const docId = splatScenesDocId(spaceSlug);
_syncServer!.changeDoc<SplatScenesDoc>(docId, 'remove splat', (d) => {
d.items[itemKey].status = 'removed';
});
return c.json({ ok: true });
});
@ -422,14 +575,14 @@ routes.delete("/api/splats/:id", async (c) => {
routes.get("/", async (c) => {
const spaceSlug = c.req.param("space") || "demo";
const rows = await sql.unsafe(
`SELECT id, slug, title, description, file_format, file_size_bytes,
tags, contributor_name, view_count, processing_status, source_file_count, created_at
FROM rsplat.splats WHERE status = 'published' AND space_slug = $1
ORDER BY created_at DESC LIMIT 50`,
[spaceSlug]
);
const doc = ensureDoc(spaceSlug);
const items = Object.values(doc.items)
.filter((item) => item.status === 'published')
.sort((a, b) => b.createdAt - a.createdAt)
.slice(0, 50);
const rows = items.map(itemToListRow);
const splatsJSON = JSON.stringify(rows);
const html = renderShell({
@ -461,12 +614,10 @@ routes.get("/view/:id", async (c) => {
const spaceSlug = c.req.param("space") || "demo";
const id = c.req.param("id");
const rows = await sql.unsafe(
`SELECT * FROM rsplat.splats WHERE (slug = $1 OR id::text = $1) AND status = 'published'`,
[id]
);
const doc = ensureDoc(spaceSlug);
const found = findItem(doc, id);
if (rows.length === 0) {
if (!found || found[1].status !== 'published') {
const html = renderShell({
title: "Splat not found | rSpace",
moduleId: "rsplat",
@ -478,15 +629,15 @@ routes.get("/view/:id", async (c) => {
return c.html(html, 404);
}
const splat = rows[0];
const [itemKey, splat] = found;
// Increment view count
await sql.unsafe(
`UPDATE rsplat.splats SET view_count = view_count + 1 WHERE id = $1`,
[splat.id]
);
const docId = splatScenesDocId(spaceSlug);
_syncServer!.changeDoc<SplatScenesDoc>(docId, 'increment view count', (d) => {
d.items[itemKey].viewCount += 1;
});
const fileUrl = `/${spaceSlug}/rsplat/api/splats/${splat.slug}/${splat.slug}.${splat.file_format}`;
const fileUrl = `/${spaceSlug}/rsplat/api/splats/${splat.slug}/${splat.slug}.${splat.fileFormat}`;
const html = renderShell({
title: `${splat.title} | rSplat`,
@ -518,24 +669,6 @@ routes.get("/view/:id", async (c) => {
return c.html(html);
});
// ── Initialize DB schema ──
async function initDB(): Promise<void> {
try {
const schemaPath = resolve(import.meta.dir, "db/schema.sql");
const schemaSql = await readFile(schemaPath, "utf-8");
await sql.unsafe(`SET search_path TO rsplat, public`);
await sql.unsafe(schemaSql);
// Migration: add new columns to existing table
await sql.unsafe(`ALTER TABLE rsplat.splats ADD COLUMN IF NOT EXISTS processing_status TEXT DEFAULT 'ready'`);
await sql.unsafe(`ALTER TABLE rsplat.splats ADD COLUMN IF NOT EXISTS processing_error TEXT`);
await sql.unsafe(`ALTER TABLE rsplat.splats ADD COLUMN IF NOT EXISTS source_file_count INTEGER DEFAULT 0`);
await sql.unsafe(`SET search_path TO public`);
console.log("[Splat] Database schema initialized");
} catch (e) {
console.error("[Splat] Schema init failed:", e);
}
}
// ── Module export ──
export const splatModule: RSpaceModule = {
@ -554,9 +687,10 @@ export const splatModule: RSpaceModule = {
],
async onInit(ctx) {
_syncServer = ctx.syncServer;
await initDB();
console.log("[Splat] Automerge document store ready");
},
async onSpaceCreate(ctx: SpaceLifecycleContext) {
// Splats are scoped by space_slug column. No per-space setup needed.
// Eagerly create the Automerge doc for new spaces
ensureDoc(ctx.spaceSlug);
},
};

View File

@ -3,52 +3,90 @@
*
* Plan trips with destinations, itinerary, bookings, expenses,
* and packing lists. Collaborative with role-based access.
*
* Data layer: Automerge documents via SyncServer.
* One document per trip: {space}:trips:trips:{tripId}
*/
import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import { sql } from "../../shared/db/pool";
import * as Automerge from "@automerge/automerge";
import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server';
import { tripSchema } from './schemas';
import {
tripSchema, tripDocId,
type TripDoc, type TripMeta, type Destination, type ItineraryItem,
type Booking, type Expense, type PackingItem,
} from './schemas';
let _syncServer: SyncServer | null = null;
const OSRM_URL = process.env.OSRM_URL || "http://osrm-backend:5000";
const routes = new Hono();
// ── Helpers ──
// ── DB initialization ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Trips] DB schema initialized");
} catch (e) {
console.error("[Trips] DB init error:", e);
}
/** Generate a short random ID (collision-safe enough for sub-collections). */
function newId(): string {
return crypto.randomUUID().slice(0, 12);
}
/** Ensure a trip document exists; create it lazily if not. */
function ensureDoc(space: string, tripId: string): TripDoc {
const docId = tripDocId(space, tripId);
let doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<TripDoc>(), 'init', (d) => {
const init = tripSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.trip = init.trip;
d.trip.id = tripId;
d.destinations = {};
d.itinerary = {};
d.bookings = {};
d.expenses = {};
d.packingItems = {};
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/** List all trip doc IDs for a given space. */
function listTripDocIds(space: string): string[] {
const prefix = `${space}:trips:trips:`;
return _syncServer!.listDocs().filter((id) => id.startsWith(prefix));
}
const routes = new Hono();
// ── API: Trips ──
// GET /api/trips — list trips
routes.get("/api/trips", async (c) => {
const rows = await sql.unsafe(
`SELECT t.*,
count(DISTINCT d.id)::int as destination_count,
count(DISTINCT e.id)::int as expense_count,
coalesce(sum(e.amount), 0)::numeric as total_spent
FROM rtrips.trips t
LEFT JOIN rtrips.destinations d ON d.trip_id = t.id
LEFT JOIN rtrips.expenses e ON e.trip_id = t.id
GROUP BY t.id ORDER BY t.created_at DESC`
);
const space = c.req.param("space") || "demo";
const docIds = listTripDocIds(space);
const rows = docIds.map((docId) => {
const doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc) return null;
const t = doc.trip;
const destinations = Object.values(doc.destinations);
const expenses = Object.values(doc.expenses);
const totalSpent = expenses.reduce((sum, e) => sum + (e.amount || 0), 0);
return {
...t,
destination_count: destinations.length,
expense_count: expenses.length,
total_spent: totalSpent,
};
}).filter(Boolean);
// Sort by createdAt descending (newest first)
rows.sort((a, b) => (b!.createdAt ?? 0) - (a!.createdAt ?? 0));
return c.json(rows);
});
@ -63,61 +101,93 @@ routes.post("/api/trips", async (c) => {
const { title, description, start_date, end_date, budget_total, budget_currency } = body;
if (!title?.trim()) return c.json({ error: "Title required" }, 400);
const space = c.req.param("space") || "demo";
const tripId = newId();
const slug = title.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "");
const rows = await sql.unsafe(
`INSERT INTO rtrips.trips (title, slug, description, start_date, end_date, budget_total, budget_currency, created_by)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *`,
[title.trim(), slug, description || null, start_date || null, end_date || null,
budget_total || null, budget_currency || "USD", claims.sub]
);
return c.json(rows[0], 201);
const now = Date.now();
const docId = tripDocId(space, tripId);
let doc = Automerge.change(Automerge.init<TripDoc>(), 'create trip', (d) => {
const init = tripSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.meta.createdAt = now;
d.trip = {
id: tripId,
title: title.trim(),
slug,
description: description || '',
startDate: start_date || null,
endDate: end_date || null,
budgetTotal: budget_total ?? null,
budgetCurrency: budget_currency || 'USD',
status: 'planning',
createdBy: claims.sub,
createdAt: now,
updatedAt: now,
};
d.destinations = {};
d.itinerary = {};
d.bookings = {};
d.expenses = {};
d.packingItems = {};
});
_syncServer!.setDoc(docId, doc);
return c.json(doc.trip, 201);
});
// GET /api/trips/:id — trip detail with all sub-resources
routes.get("/api/trips/:id", async (c) => {
const id = c.req.param("id");
const trip = await sql.unsafe("SELECT * FROM rtrips.trips WHERE id = $1", [id]);
if (trip.length === 0) return c.json({ error: "Trip not found" }, 404);
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
const docId = tripDocId(space, tripId);
const doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc) return c.json({ error: "Trip not found" }, 404);
const [destinations, itinerary, bookings, expenses, packing] = await Promise.all([
sql.unsafe("SELECT * FROM rtrips.destinations WHERE trip_id = $1 ORDER BY sort_order", [id]),
sql.unsafe("SELECT * FROM rtrips.itinerary_items WHERE trip_id = $1 ORDER BY date, sort_order", [id]),
sql.unsafe("SELECT * FROM rtrips.bookings WHERE trip_id = $1 ORDER BY start_date", [id]),
sql.unsafe("SELECT * FROM rtrips.expenses WHERE trip_id = $1 ORDER BY date DESC", [id]),
sql.unsafe("SELECT * FROM rtrips.packing_items WHERE trip_id = $1 ORDER BY category, sort_order", [id]),
]);
const destinations = Object.values(doc.destinations).sort((a, b) => a.sortOrder - b.sortOrder);
const itinerary = Object.values(doc.itinerary).sort((a, b) => {
const dateCmp = (a.date || '').localeCompare(b.date || '');
return dateCmp !== 0 ? dateCmp : a.sortOrder - b.sortOrder;
});
const bookings = Object.values(doc.bookings).sort((a, b) => (a.startDate || '').localeCompare(b.startDate || ''));
const expenses = Object.values(doc.expenses).sort((a, b) => (b.date || '').localeCompare(a.date || ''));
const packing = Object.values(doc.packingItems).sort((a, b) => {
const catCmp = (a.category || '').localeCompare(b.category || '');
return catCmp !== 0 ? catCmp : a.sortOrder - b.sortOrder;
});
return c.json({ ...trip[0], destinations, itinerary, bookings, expenses, packing });
return c.json({ ...doc.trip, destinations, itinerary, bookings, expenses, packing });
});
// PUT /api/trips/:id — update trip
routes.put("/api/trips/:id", async (c) => {
const id = c.req.param("id");
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
const docId = tripDocId(space, tripId);
const doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc) return c.json({ error: "Not found" }, 404);
const body = await c.req.json();
const { title, description, start_date, end_date, budget_total, budget_currency, status } = body;
const fields: string[] = [];
const params: any[] = [];
let idx = 1;
const hasFields = [title, description, start_date, end_date, budget_total, budget_currency, status]
.some((v) => v !== undefined);
if (!hasFields) return c.json({ error: "No fields" }, 400);
if (title !== undefined) { fields.push(`title = $${idx}`); params.push(title); idx++; }
if (description !== undefined) { fields.push(`description = $${idx}`); params.push(description); idx++; }
if (start_date !== undefined) { fields.push(`start_date = $${idx}`); params.push(start_date); idx++; }
if (end_date !== undefined) { fields.push(`end_date = $${idx}`); params.push(end_date); idx++; }
if (budget_total !== undefined) { fields.push(`budget_total = $${idx}`); params.push(budget_total); idx++; }
if (budget_currency !== undefined) { fields.push(`budget_currency = $${idx}`); params.push(budget_currency); idx++; }
if (status !== undefined) { fields.push(`status = $${idx}`); params.push(status); idx++; }
_syncServer!.changeDoc<TripDoc>(docId, 'update trip', (d) => {
if (title !== undefined) d.trip.title = title;
if (description !== undefined) d.trip.description = description;
if (start_date !== undefined) d.trip.startDate = start_date;
if (end_date !== undefined) d.trip.endDate = end_date;
if (budget_total !== undefined) d.trip.budgetTotal = budget_total;
if (budget_currency !== undefined) d.trip.budgetCurrency = budget_currency;
if (status !== undefined) d.trip.status = status;
d.trip.updatedAt = Date.now();
});
if (fields.length === 0) return c.json({ error: "No fields" }, 400);
fields.push("updated_at = NOW()");
params.push(id);
const rows = await sql.unsafe(
`UPDATE rtrips.trips SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`,
params
);
if (rows.length === 0) return c.json({ error: "Not found" }, 404);
return c.json(rows[0]);
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.trip);
});
// ── API: Destinations ──
@ -127,14 +197,33 @@ routes.post("/api/trips/:id/destinations", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
ensureDoc(space, tripId);
const docId = tripDocId(space, tripId);
const body = await c.req.json();
const rows = await sql.unsafe(
`INSERT INTO rtrips.destinations (trip_id, name, country, lat, lng, arrival_date, departure_date, notes, sort_order)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *`,
[c.req.param("id"), body.name, body.country || null, body.lat || null, body.lng || null,
body.arrival_date || null, body.departure_date || null, body.notes || null, body.sort_order ?? 0]
);
return c.json(rows[0], 201);
const destId = newId();
const now = Date.now();
_syncServer!.changeDoc<TripDoc>(docId, 'add destination', (d) => {
d.destinations[destId] = {
id: destId,
tripId,
name: body.name,
country: body.country || null,
lat: body.lat ?? null,
lng: body.lng ?? null,
arrivalDate: body.arrival_date || null,
departureDate: body.departure_date || null,
notes: body.notes || '',
sortOrder: body.sort_order ?? 0,
createdAt: now,
};
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.destinations[destId], 201);
});
// ── API: Itinerary ──
@ -144,14 +233,33 @@ routes.post("/api/trips/:id/itinerary", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
ensureDoc(space, tripId);
const docId = tripDocId(space, tripId);
const body = await c.req.json();
const rows = await sql.unsafe(
`INSERT INTO rtrips.itinerary_items (trip_id, destination_id, title, category, date, start_time, end_time, notes, sort_order)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *`,
[c.req.param("id"), body.destination_id || null, body.title, body.category || "ACTIVITY",
body.date || null, body.start_time || null, body.end_time || null, body.notes || null, body.sort_order ?? 0]
);
return c.json(rows[0], 201);
const itemId = newId();
const now = Date.now();
_syncServer!.changeDoc<TripDoc>(docId, 'add itinerary item', (d) => {
d.itinerary[itemId] = {
id: itemId,
tripId,
destinationId: body.destination_id || null,
title: body.title,
category: body.category || 'ACTIVITY',
date: body.date || null,
startTime: body.start_time || null,
endTime: body.end_time || null,
notes: body.notes || '',
sortOrder: body.sort_order ?? 0,
createdAt: now,
};
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.itinerary[itemId], 201);
});
// ── API: Bookings ──
@ -161,14 +269,34 @@ routes.post("/api/trips/:id/bookings", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
ensureDoc(space, tripId);
const docId = tripDocId(space, tripId);
const body = await c.req.json();
const rows = await sql.unsafe(
`INSERT INTO rtrips.bookings (trip_id, type, provider, confirmation_number, cost, currency, start_date, end_date, notes)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *`,
[c.req.param("id"), body.type || "OTHER", body.provider || null, body.confirmation_number || null,
body.cost || null, body.currency || "USD", body.start_date || null, body.end_date || null, body.notes || null]
);
return c.json(rows[0], 201);
const bookingId = newId();
const now = Date.now();
_syncServer!.changeDoc<TripDoc>(docId, 'add booking', (d) => {
d.bookings[bookingId] = {
id: bookingId,
tripId,
type: body.type || 'OTHER',
provider: body.provider || null,
confirmationNumber: body.confirmation_number || null,
cost: body.cost ?? null,
currency: body.currency || 'USD',
startDate: body.start_date || null,
endDate: body.end_date || null,
status: null,
notes: body.notes || '',
createdAt: now,
};
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.bookings[bookingId], 201);
});
// ── API: Expenses ──
@ -178,23 +306,47 @@ routes.post("/api/trips/:id/expenses", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
ensureDoc(space, tripId);
const docId = tripDocId(space, tripId);
const body = await c.req.json();
const rows = await sql.unsafe(
`INSERT INTO rtrips.expenses (trip_id, description, amount, currency, category, date, split_type)
VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING *`,
[c.req.param("id"), body.description, body.amount, body.currency || "USD",
body.category || "OTHER", body.date || null, body.split_type || "EQUAL"]
);
return c.json(rows[0], 201);
const expenseId = newId();
const now = Date.now();
_syncServer!.changeDoc<TripDoc>(docId, 'add expense', (d) => {
d.expenses[expenseId] = {
id: expenseId,
tripId,
paidBy: null,
description: body.description,
amount: body.amount,
currency: body.currency || 'USD',
category: body.category || 'OTHER',
date: body.date || null,
splitType: body.split_type || 'EQUAL',
createdAt: now,
};
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.expenses[expenseId], 201);
});
// ── API: Packing ──
routes.get("/api/trips/:id/packing", async (c) => {
const rows = await sql.unsafe(
"SELECT * FROM rtrips.packing_items WHERE trip_id = $1 ORDER BY category, sort_order",
[c.req.param("id")]
);
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
const docId = tripDocId(space, tripId);
const doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc) return c.json([]);
const rows = Object.values(doc.packingItems).sort((a, b) => {
const catCmp = (a.category || '').localeCompare(b.category || '');
return catCmp !== 0 ? catCmp : a.sortOrder - b.sortOrder;
});
return c.json(rows);
});
@ -203,23 +355,53 @@ routes.post("/api/trips/:id/packing", async (c) => {
if (!token) return c.json({ error: "Authentication required" }, 401);
try { await verifyEncryptIDToken(token); } catch { return c.json({ error: "Invalid token" }, 401); }
const space = c.req.param("space") || "demo";
const tripId = c.req.param("id");
ensureDoc(space, tripId);
const docId = tripDocId(space, tripId);
const body = await c.req.json();
const rows = await sql.unsafe(
`INSERT INTO rtrips.packing_items (trip_id, name, category, quantity, sort_order)
VALUES ($1, $2, $3, $4, $5) RETURNING *`,
[c.req.param("id"), body.name, body.category || "GENERAL", body.quantity || 1, body.sort_order ?? 0]
);
return c.json(rows[0], 201);
const itemId = newId();
const now = Date.now();
_syncServer!.changeDoc<TripDoc>(docId, 'add packing item', (d) => {
d.packingItems[itemId] = {
id: itemId,
tripId,
addedBy: null,
name: body.name,
category: body.category || 'GENERAL',
packed: false,
quantity: body.quantity || 1,
sortOrder: body.sort_order ?? 0,
createdAt: now,
};
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.packingItems[itemId], 201);
});
routes.patch("/api/packing/:id", async (c) => {
const body = await c.req.json();
const rows = await sql.unsafe(
"UPDATE rtrips.packing_items SET packed = $1 WHERE id = $2 RETURNING *",
[body.packed ?? false, c.req.param("id")]
);
if (rows.length === 0) return c.json({ error: "Not found" }, 404);
return c.json(rows[0]);
const space = c.req.param("space") || "demo";
const packingId = c.req.param("id");
// Find the trip doc containing this packing item
const docIds = listTripDocIds(space);
for (const docId of docIds) {
const doc = _syncServer!.getDoc<TripDoc>(docId);
if (!doc || !doc.packingItems[packingId]) continue;
const body = await c.req.json();
_syncServer!.changeDoc<TripDoc>(docId, 'toggle packing item', (d) => {
d.packingItems[packingId].packed = body.packed ?? false;
});
const updated = _syncServer!.getDoc<TripDoc>(docId);
return c.json(updated!.packingItems[packingId]);
}
return c.json({ error: "Not found" }, 404);
});
// ── OSRM proxy for route planner ──
@ -279,7 +461,6 @@ export const tripsModule: RSpaceModule = {
landingPage: renderLanding,
async onInit(ctx) {
_syncServer = ctx.syncServer;
await initDB();
},
standaloneDomain: "rtrips.online",
feeds: [

View File

@ -3,116 +3,166 @@
*
* Credit-weighted conviction voting for collaborative governance.
* Spaces run ranked proposals with configurable parameters.
*
* All state stored in Automerge documents via SyncServer.
* Doc layout:
* {space}:vote:config SpaceConfig (stored on a ProposalDoc)
* {space}:vote:proposals:{proposalId} ProposalDoc with votes/finalVotes
*/
import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import * as Automerge from '@automerge/automerge';
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
import type { RSpaceModule } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server';
import { proposalSchema, proposalDocId } from './schemas';
import type { ProposalDoc } from './schemas';
import type { ProposalDoc, SpaceConfig } from './schemas';
const routes = new Hono();
// ── DB initialization ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Vote] DB schema initialized");
} catch (e) {
console.error("[Vote] DB init error:", e);
}
}
async function seedDemoIfEmpty() {
try {
const count = await sql.unsafe("SELECT count(*)::int as cnt FROM rvote.spaces");
if (parseInt(count[0].cnt) > 0) return;
// Create demo user
const user = await sql.unsafe(
`INSERT INTO rvote.users (did, username) VALUES ('did:demo:seed', 'demo')
ON CONFLICT (did) DO UPDATE SET username = 'demo' RETURNING id`
);
const userId = user[0].id;
// Create voting space (matches standalone rVote settings)
await sql.unsafe(
`INSERT INTO rvote.spaces (slug, name, description, owner_did, visibility, promotion_threshold, voting_period_days, credits_per_day, max_credits, starting_credits)
VALUES ('community', 'Community Governance', 'Proposals for the rSpace ecosystem', 'did:demo:seed', 'public_read', 100, 7, 10, 500, 50)`
);
// Seed proposals in various states
const proposals = [
{ title: "Add dark mode across all r* modules", desc: "Implement a consistent dark theme with a toggle in shell.css. Use CSS custom properties for theming so each module inherits automatically.", status: "RANKING", score: 45 },
{ title: "Implement real-time collaboration in rNotes", desc: "Use Automerge CRDTs (already in the stack) to enable simultaneous editing of notes, similar to how rSpace canvas works.", status: "RANKING", score: 72 },
{ title: "Adopt cosmolocal print-on-demand for all merch", desc: "Route all merchandise orders through the provider registry to find the closest printer. Reduces shipping emissions and supports local economies.", status: "VOTING", score: 105 },
{ title: "Use EncryptID passkeys for all authentication", desc: "Standardize on WebAuthn passkeys via EncryptID across the entire r* ecosystem. One passkey, all apps.", status: "PASSED", score: 150 },
{ title: "Switch from PostgreSQL to SQLite for simpler deployment", desc: "Evaluate replacing PostgreSQL with SQLite for modules that don't need concurrent writes.", status: "FAILED", score: 30 },
];
for (const p of proposals) {
const row = await sql.unsafe(
`INSERT INTO rvote.proposals (space_slug, author_id, title, description, status, score)
VALUES ('community', $1, $2, $3, $4, $5) RETURNING id`,
[userId, p.title, p.desc, p.status, p.score]
);
if (p.status === "VOTING") {
await sql.unsafe(
`UPDATE rvote.proposals SET voting_ends_at = NOW() + INTERVAL '5 days', final_yes = 5, final_no = 2 WHERE id = $1`,
[row[0].id]
);
} else if (p.status === "PASSED") {
await sql.unsafe(
`UPDATE rvote.proposals SET final_yes = 12, final_no = 3, final_abstain = 2 WHERE id = $1`,
[row[0].id]
);
} else if (p.status === "FAILED") {
await sql.unsafe(
`UPDATE rvote.proposals SET final_yes = 2, final_no = 8, final_abstain = 1 WHERE id = $1`,
[row[0].id]
);
}
}
console.log("[Vote] Demo data seeded: 1 space, 5 proposals");
} catch (e) {
console.error("[Vote] Seed error:", e);
}
}
// ── Local-first helpers ──
// ── SyncServer ref (set during onInit) ──
let _syncServer: SyncServer | null = null;
function isLocalFirst(space: string): boolean {
if (!_syncServer) return false;
return _syncServer.getDocIds().some((id) => id.startsWith(`${space}:vote:`));
// ── DocId helpers ──
function spaceConfigDocId(space: string) {
return `${space}:vote:config` as const;
}
// ── Helper: get or create user by DID ──
async function getOrCreateUser(did: string, username?: string) {
const rows = await sql.unsafe(
`INSERT INTO rvote.users (did, username) VALUES ($1, $2)
ON CONFLICT (did) DO UPDATE SET username = COALESCE($2, rvote.users.username)
RETURNING *`,
[did, username || null]
);
return rows[0];
// ── Automerge helpers ──
/** Ensure a proposal doc exists, creating it if needed. */
function ensureProposalDoc(space: string, proposalId: string): ProposalDoc {
const docId = proposalDocId(space, proposalId);
let doc = _syncServer!.getDoc<ProposalDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<ProposalDoc>(), 'init proposal', (d) => {
const init = proposalSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = space;
d.proposal.id = proposalId;
d.proposal.spaceSlug = space;
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/** Ensure a space config doc exists, creating it if needed. Returns the doc. */
function ensureSpaceConfigDoc(space: string): ProposalDoc {
const docId = spaceConfigDocId(space);
let doc = _syncServer!.getDoc<ProposalDoc>(docId);
if (!doc) {
doc = Automerge.change(Automerge.init<ProposalDoc>(), 'init space config', (d) => {
const init = proposalSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = space;
d.spaceConfig = {
slug: space,
name: '',
description: '',
ownerDid: '',
visibility: 'public_read',
promotionThreshold: 100,
votingPeriodDays: 7,
creditsPerDay: 10,
maxCredits: 500,
startingCredits: 50,
createdAt: Date.now(),
updatedAt: Date.now(),
};
});
_syncServer!.setDoc(docId, doc);
}
return doc;
}
/** Get all space config docs across all spaces. */
function listSpaceConfigDocs(): { docId: string; doc: ProposalDoc }[] {
if (!_syncServer) return [];
const results: { docId: string; doc: ProposalDoc }[] = [];
for (const docId of _syncServer.listDocs()) {
if (docId.endsWith(':vote:config')) {
const doc = _syncServer.getDoc<ProposalDoc>(docId);
if (doc?.spaceConfig) results.push({ docId, doc });
}
}
return results;
}
/** Get all proposal docs for a space. */
function listProposalDocs(space: string): { docId: string; doc: ProposalDoc }[] {
if (!_syncServer) return [];
const results: { docId: string; doc: ProposalDoc }[] = [];
const prefix = `${space}:vote:proposals:`;
for (const docId of _syncServer.listDocs()) {
if (docId.startsWith(prefix)) {
const doc = _syncServer.getDoc<ProposalDoc>(docId);
if (doc) results.push({ docId, doc });
}
}
return results;
}
/** Get all proposal docs across all spaces. */
function listAllProposalDocs(): { docId: string; doc: ProposalDoc }[] {
if (!_syncServer) return [];
const results: { docId: string; doc: ProposalDoc }[] = [];
for (const docId of _syncServer.listDocs()) {
if (docId.includes(':vote:proposals:')) {
const doc = _syncServer.getDoc<ProposalDoc>(docId);
if (doc) results.push({ docId, doc });
}
}
return results;
}
// ── Conversion helpers (Automerge → REST format) ──
function spaceConfigToRest(cfg: SpaceConfig) {
return {
slug: cfg.slug,
name: cfg.name,
description: cfg.description,
owner_did: cfg.ownerDid,
visibility: cfg.visibility,
promotion_threshold: cfg.promotionThreshold,
voting_period_days: cfg.votingPeriodDays,
credits_per_day: cfg.creditsPerDay,
max_credits: cfg.maxCredits,
starting_credits: cfg.startingCredits,
created_at: new Date(cfg.createdAt).toISOString(),
updated_at: new Date(cfg.updatedAt).toISOString(),
};
}
function proposalToRest(doc: ProposalDoc) {
const p = doc.proposal;
const voteCount = Object.keys(doc.votes).length;
return {
id: p.id,
space_slug: p.spaceSlug,
author_id: p.authorId,
title: p.title,
description: p.description,
status: p.status,
score: p.score,
voting_ends_at: p.votingEndsAt ? new Date(p.votingEndsAt).toISOString() : null,
final_yes: p.finalYes,
final_no: p.finalNo,
final_abstain: p.finalAbstain,
vote_count: String(voteCount),
created_at: new Date(p.createdAt).toISOString(),
updated_at: new Date(p.updatedAt).toISOString(),
};
}
// ── Helper: calculate effective weight with decay ──
function getEffectiveWeight(weight: number, createdAt: Date): number {
const ageMs = Date.now() - createdAt.getTime();
function getEffectiveWeight(weight: number, createdAt: number): number {
const ageMs = Date.now() - createdAt;
const ageDays = ageMs / (1000 * 60 * 60 * 24);
if (ageDays < 30) return weight;
if (ageDays >= 60) return 0;
@ -120,31 +170,105 @@ function getEffectiveWeight(weight: number, createdAt: Date): number {
return Math.round(weight * (1 - decayProgress));
}
// ── Helper: recalculate proposal score ──
async function recalcScore(proposalId: string) {
const votes = await sql.unsafe(
"SELECT weight, created_at FROM rvote.votes WHERE proposal_id = $1",
[proposalId]
);
// ── Helper: recalculate proposal score from votes ──
function recalcScore(doc: ProposalDoc): number {
let score = 0;
for (const v of votes) {
score += getEffectiveWeight(v.weight, new Date(v.created_at));
for (const v of Object.values(doc.votes)) {
score += getEffectiveWeight(v.weight, v.createdAt);
}
await sql.unsafe(
"UPDATE rvote.proposals SET score = $1, updated_at = NOW() WHERE id = $2",
[score, proposalId]
);
return score;
}
// ── Helper: generate unique ID ──
function newId(): string {
return crypto.randomUUID();
}
// ── Seed demo data into Automerge ──
function seedDemoIfEmpty() {
const existingSpaces = listSpaceConfigDocs();
if (existingSpaces.length > 0) return;
// Create demo space
const spaceDoc = ensureSpaceConfigDoc('community');
_syncServer!.changeDoc<ProposalDoc>(spaceConfigDocId('community'), 'seed space config', (d) => {
d.spaceConfig!.name = 'Community Governance';
d.spaceConfig!.description = 'Proposals for the rSpace ecosystem';
d.spaceConfig!.ownerDid = 'did:demo:seed';
d.spaceConfig!.promotionThreshold = 100;
d.spaceConfig!.votingPeriodDays = 7;
d.spaceConfig!.creditsPerDay = 10;
d.spaceConfig!.maxCredits = 500;
d.spaceConfig!.startingCredits = 50;
});
const demoUserId = 'did:demo:seed';
const now = Date.now();
const proposals = [
{ title: "Add dark mode across all r* modules", desc: "Implement a consistent dark theme with a toggle in shell.css. Use CSS custom properties for theming so each module inherits automatically.", status: "RANKING", score: 45 },
{ title: "Implement real-time collaboration in rNotes", desc: "Use Automerge CRDTs (already in the stack) to enable simultaneous editing of notes, similar to how rSpace canvas works.", status: "RANKING", score: 72 },
{ title: "Adopt cosmolocal print-on-demand for all merch", desc: "Route all merchandise orders through the provider registry to find the closest printer. Reduces shipping emissions and supports local economies.", status: "VOTING", score: 105 },
{ title: "Use EncryptID passkeys for all authentication", desc: "Standardize on WebAuthn passkeys via EncryptID across the entire r* ecosystem. One passkey, all apps.", status: "PASSED", score: 150 },
{ title: "Switch from PostgreSQL to SQLite for simpler deployment", desc: "Evaluate replacing PostgreSQL with SQLite for modules that don't need concurrent writes.", status: "FAILED", score: 30 },
];
for (const p of proposals) {
const pid = newId();
const docId = proposalDocId('community', pid);
let doc = Automerge.change(Automerge.init<ProposalDoc>(), 'seed proposal', (d) => {
const init = proposalSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = 'community';
d.proposal.id = pid;
d.proposal.spaceSlug = 'community';
d.proposal.authorId = demoUserId;
d.proposal.title = p.title;
d.proposal.description = p.desc;
d.proposal.status = p.status;
d.proposal.score = p.score;
d.proposal.createdAt = now;
d.proposal.updatedAt = now;
});
if (p.status === "VOTING") {
doc = Automerge.change(doc, 'set voting tally', (d) => {
d.proposal.votingEndsAt = now + 5 * 24 * 60 * 60 * 1000;
d.proposal.finalYes = 5;
d.proposal.finalNo = 2;
});
} else if (p.status === "PASSED") {
doc = Automerge.change(doc, 'set passed tally', (d) => {
d.proposal.finalYes = 12;
d.proposal.finalNo = 3;
d.proposal.finalAbstain = 2;
});
} else if (p.status === "FAILED") {
doc = Automerge.change(doc, 'set failed tally', (d) => {
d.proposal.finalYes = 2;
d.proposal.finalNo = 8;
d.proposal.finalAbstain = 1;
});
}
_syncServer!.setDoc(docId, doc);
}
console.log("[Vote] Demo data seeded: 1 space, 5 proposals");
}
// ── Spaces API ──
// GET /api/spaces — list spaces
routes.get("/api/spaces", async (c) => {
const rows = await sql.unsafe(
"SELECT * FROM rvote.spaces ORDER BY created_at DESC LIMIT 50"
);
return c.json({ spaces: rows });
routes.get("/api/spaces", (c) => {
const spaceDocs = listSpaceConfigDocs();
const spaces = spaceDocs
.filter((s) => s.doc.spaceConfig !== null)
.map((s) => spaceConfigToRest(s.doc.spaceConfig!))
.sort((a, b) => new Date(b.created_at).getTime() - new Date(a.created_at).getTime())
.slice(0, 50);
return c.json({ spaces });
});
// POST /api/spaces — create space
@ -159,53 +283,65 @@ routes.post("/api/spaces", async (c) => {
if (!name || !slug) return c.json({ error: "name and slug required" }, 400);
if (!/^[a-z0-9-]+$/.test(slug)) return c.json({ error: "Invalid slug" }, 400);
try {
const rows = await sql.unsafe(
`INSERT INTO rvote.spaces (slug, name, description, owner_did, visibility)
VALUES ($1, $2, $3, $4, $5) RETURNING *`,
[slug, name, description || null, claims.sub, visibility]
);
return c.json(rows[0], 201);
} catch (e: any) {
if (e.code === "23505") return c.json({ error: "Space already exists" }, 409);
throw e;
}
// Check if space already exists
const existing = _syncServer!.getDoc<ProposalDoc>(spaceConfigDocId(slug));
if (existing?.spaceConfig?.name) return c.json({ error: "Space already exists" }, 409);
const now = Date.now();
const doc = ensureSpaceConfigDoc(slug);
_syncServer!.changeDoc<ProposalDoc>(spaceConfigDocId(slug), 'create space', (d) => {
d.spaceConfig!.slug = slug;
d.spaceConfig!.name = name;
d.spaceConfig!.description = description || '';
d.spaceConfig!.ownerDid = claims.sub;
d.spaceConfig!.visibility = visibility;
d.spaceConfig!.createdAt = now;
d.spaceConfig!.updatedAt = now;
});
const updated = _syncServer!.getDoc<ProposalDoc>(spaceConfigDocId(slug));
return c.json(spaceConfigToRest(updated!.spaceConfig!), 201);
});
// GET /api/spaces/:slug — space detail
routes.get("/api/spaces/:slug", async (c) => {
routes.get("/api/spaces/:slug", (c) => {
const slug = c.req.param("slug");
const rows = await sql.unsafe("SELECT * FROM rvote.spaces WHERE slug = $1", [slug]);
if (rows.length === 0) return c.json({ error: "Space not found" }, 404);
return c.json(rows[0]);
const doc = _syncServer!.getDoc<ProposalDoc>(spaceConfigDocId(slug));
if (!doc?.spaceConfig?.name) return c.json({ error: "Space not found" }, 404);
return c.json(spaceConfigToRest(doc.spaceConfig));
});
// ── Proposals API ──
// GET /api/proposals — list proposals (query: space_slug, status)
routes.get("/api/proposals", async (c) => {
routes.get("/api/proposals", (c) => {
const { space_slug, status, limit = "50", offset = "0" } = c.req.query();
const conditions: string[] = [];
const params: any[] = [];
let idx = 1;
const maxLimit = Math.min(parseInt(limit) || 50, 100);
const startOffset = parseInt(offset) || 0;
let docs: { docId: string; doc: ProposalDoc }[];
if (space_slug) {
conditions.push(`space_slug = $${idx}`);
params.push(space_slug);
idx++;
}
if (status) {
conditions.push(`status = $${idx}`);
params.push(status);
idx++;
docs = listProposalDocs(space_slug);
} else {
docs = listAllProposalDocs();
}
const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
const rows = await sql.unsafe(
`SELECT * FROM rvote.proposals ${where} ORDER BY score DESC, created_at DESC LIMIT ${Math.min(parseInt(limit), 100)} OFFSET ${parseInt(offset) || 0}`,
params
);
return c.json({ proposals: rows });
let proposals = docs
.filter((d) => d.doc.proposal.title) // exclude empty/config docs
.map((d) => proposalToRest(d.doc));
if (status) {
proposals = proposals.filter((p) => p.status === status);
}
// Sort by score descending, then created_at descending
proposals.sort((a, b) => {
if (b.score !== a.score) return b.score - a.score;
return new Date(b.created_at).getTime() - new Date(a.created_at).getTime();
});
proposals = proposals.slice(startOffset, startOffset + maxLimit);
return c.json({ proposals });
});
// POST /api/proposals — create proposal
@ -220,29 +356,40 @@ routes.post("/api/proposals", async (c) => {
if (!space_slug || !title) return c.json({ error: "space_slug and title required" }, 400);
// Verify space exists
const space = await sql.unsafe("SELECT slug FROM rvote.spaces WHERE slug = $1", [space_slug]);
if (space.length === 0) return c.json({ error: "Space not found" }, 404);
const spaceDoc = _syncServer!.getDoc<ProposalDoc>(spaceConfigDocId(space_slug));
if (!spaceDoc?.spaceConfig?.name) return c.json({ error: "Space not found" }, 404);
const user = await getOrCreateUser(claims.sub, claims.username);
const rows = await sql.unsafe(
`INSERT INTO rvote.proposals (space_slug, author_id, title, description)
VALUES ($1, $2, $3, $4) RETURNING *`,
[space_slug, user.id, title, description || null]
);
return c.json(rows[0], 201);
const pid = newId();
const now = Date.now();
const docId = proposalDocId(space_slug, pid);
const doc = Automerge.change(Automerge.init<ProposalDoc>(), 'create proposal', (d) => {
const init = proposalSchema.init();
Object.assign(d, init);
d.meta.spaceSlug = space_slug;
d.proposal.id = pid;
d.proposal.spaceSlug = space_slug;
d.proposal.authorId = claims.sub;
d.proposal.title = title;
d.proposal.description = description || '';
d.proposal.createdAt = now;
d.proposal.updatedAt = now;
});
_syncServer!.setDoc(docId, doc);
return c.json(proposalToRest(doc), 201);
});
// GET /api/proposals/:id — proposal detail
routes.get("/api/proposals/:id", async (c) => {
routes.get("/api/proposals/:id", (c) => {
const id = c.req.param("id");
const rows = await sql.unsafe(
`SELECT p.*,
(SELECT count(*) FROM rvote.votes WHERE proposal_id = p.id) as vote_count
FROM rvote.proposals p WHERE p.id = $1`,
[id]
);
if (rows.length === 0) return c.json({ error: "Proposal not found" }, 404);
return c.json(rows[0]);
// Search across all spaces for this proposal
const allDocs = listAllProposalDocs();
const match = allDocs.find((d) => d.doc.proposal.id === id);
if (!match) return c.json({ error: "Proposal not found" }, 404);
return c.json(proposalToRest(match.doc));
});
// POST /api/proposals/:id/vote — cast conviction vote
@ -256,40 +403,53 @@ routes.post("/api/proposals/:id/vote", async (c) => {
const body = await c.req.json();
const { weight = 1 } = body;
// Verify proposal is in RANKING
const proposal = await sql.unsafe(
"SELECT * FROM rvote.proposals WHERE id = $1",
[id]
);
if (proposal.length === 0) return c.json({ error: "Proposal not found" }, 404);
if (proposal[0].status !== "RANKING") return c.json({ error: "Proposal not in ranking phase" }, 400);
// Find proposal
const allDocs = listAllProposalDocs();
const match = allDocs.find((d) => d.doc.proposal.id === id);
if (!match) return c.json({ error: "Proposal not found" }, 404);
if (match.doc.proposal.status !== "RANKING") return c.json({ error: "Proposal not in ranking phase" }, 400);
const user = await getOrCreateUser(claims.sub, claims.username);
const userId = claims.sub;
const creditCost = weight * weight; // quadratic cost
const now = Date.now();
const thirtyDaysMs = 30 * 24 * 60 * 60 * 1000;
// Upsert vote
await sql.unsafe(
`INSERT INTO rvote.votes (user_id, proposal_id, weight, credit_cost, decays_at)
VALUES ($1, $2, $3, $4, NOW() + INTERVAL '30 days')
ON CONFLICT (user_id, proposal_id)
DO UPDATE SET weight = $3, credit_cost = $4, created_at = NOW(), decays_at = NOW() + INTERVAL '30 days'`,
[user.id, id, weight, creditCost]
);
// Upsert vote (keyed by userId)
_syncServer!.changeDoc<ProposalDoc>(match.docId, 'cast conviction vote', (d) => {
d.votes[userId] = {
id: d.votes[userId]?.id || newId(),
userId,
proposalId: id,
weight,
creditCost,
createdAt: now,
decaysAt: now + thirtyDaysMs,
};
});
// Recalculate score and check for promotion
const score = await recalcScore(id);
const space = await sql.unsafe(
"SELECT * FROM rvote.spaces WHERE slug = $1",
[proposal[0].space_slug]
);
const threshold = space[0]?.promotion_threshold || 100;
// Re-read doc, recalculate score
const updatedDoc = _syncServer!.getDoc<ProposalDoc>(match.docId)!;
const score = recalcScore(updatedDoc);
if (score >= threshold && proposal[0].status === "RANKING") {
const votingDays = space[0]?.voting_period_days || 7;
await sql.unsafe(
`UPDATE rvote.proposals SET status = 'VOTING', voting_ends_at = NOW() + ($1 || ' days')::INTERVAL, updated_at = NOW() WHERE id = $2`,
[votingDays, id]
);
// Update score on the doc
_syncServer!.changeDoc<ProposalDoc>(match.docId, 'update score', (d) => {
d.proposal.score = score;
d.proposal.updatedAt = Date.now();
});
// Check for promotion to VOTING phase
const spaceSlug = updatedDoc.proposal.spaceSlug;
const spaceDoc = _syncServer!.getDoc<ProposalDoc>(spaceConfigDocId(spaceSlug));
const threshold = spaceDoc?.spaceConfig?.promotionThreshold || 100;
if (score >= threshold && updatedDoc.proposal.status === "RANKING") {
const votingDays = spaceDoc?.spaceConfig?.votingPeriodDays || 7;
const votingEndsAt = Date.now() + votingDays * 24 * 60 * 60 * 1000;
_syncServer!.changeDoc<ProposalDoc>(match.docId, 'promote to voting', (d) => {
d.proposal.status = 'VOTING';
d.proposal.votingEndsAt = votingEndsAt;
d.proposal.updatedAt = Date.now();
});
}
return c.json({ ok: true, score, creditCost });
@ -307,30 +467,39 @@ routes.post("/api/proposals/:id/final-vote", async (c) => {
const { vote } = body;
if (!["YES", "NO", "ABSTAIN"].includes(vote)) return c.json({ error: "Invalid vote" }, 400);
const proposal = await sql.unsafe("SELECT * FROM rvote.proposals WHERE id = $1", [id]);
if (proposal.length === 0) return c.json({ error: "Proposal not found" }, 404);
if (proposal[0].status !== "VOTING") return c.json({ error: "Proposal not in voting phase" }, 400);
// Find proposal
const allDocs = listAllProposalDocs();
const match = allDocs.find((d) => d.doc.proposal.id === id);
if (!match) return c.json({ error: "Proposal not found" }, 404);
if (match.doc.proposal.status !== "VOTING") return c.json({ error: "Proposal not in voting phase" }, 400);
const user = await getOrCreateUser(claims.sub, claims.username);
await sql.unsafe(
`INSERT INTO rvote.final_votes (user_id, proposal_id, vote)
VALUES ($1, $2, $3)
ON CONFLICT (user_id, proposal_id) DO UPDATE SET vote = $3`,
[user.id, id, vote]
);
const userId = claims.sub;
const now = Date.now();
// Update counts
const counts = await sql.unsafe(
`SELECT vote, count(*) as cnt FROM rvote.final_votes WHERE proposal_id = $1 GROUP BY vote`,
[id]
);
// Upsert final vote (keyed by userId)
_syncServer!.changeDoc<ProposalDoc>(match.docId, 'cast final vote', (d) => {
d.finalVotes[userId] = {
id: d.finalVotes[userId]?.id || newId(),
userId,
proposalId: id,
vote: vote as 'YES' | 'NO' | 'ABSTAIN',
createdAt: now,
};
});
// Tally final votes
const updatedDoc = _syncServer!.getDoc<ProposalDoc>(match.docId)!;
const tally: Record<string, number> = { YES: 0, NO: 0, ABSTAIN: 0 };
for (const row of counts) tally[row.vote] = parseInt(row.cnt);
for (const fv of Object.values(updatedDoc.finalVotes)) {
tally[fv.vote] = (tally[fv.vote] || 0) + 1;
}
await sql.unsafe(
"UPDATE rvote.proposals SET final_yes = $1, final_no = $2, final_abstain = $3, updated_at = NOW() WHERE id = $4",
[tally.YES, tally.NO, tally.ABSTAIN, id]
);
_syncServer!.changeDoc<ProposalDoc>(match.docId, 'update final tally', (d) => {
d.proposal.finalYes = tally.YES;
d.proposal.finalNo = tally.NO;
d.proposal.finalAbstain = tally.ABSTAIN;
d.proposal.updatedAt = Date.now();
});
return c.json({ ok: true, tally });
});
@ -362,8 +531,7 @@ export const voteModule: RSpaceModule = {
landingPage: renderLanding,
async onInit(ctx) {
_syncServer = ctx.syncServer;
await initDB();
await seedDemoIfEmpty();
seedDemoIfEmpty();
},
feeds: [
{

View File

@ -3,51 +3,86 @@
*
* Multi-tenant collaborative workspace with drag-and-drop kanban,
* configurable statuses, and activity logging.
*
* All persistence uses Automerge documents via SyncServer
* no PostgreSQL dependency.
*/
import { Hono } from "hono";
import { readFileSync } from "node:fs";
import { resolve } from "node:path";
import * as Automerge from '@automerge/automerge';
import { sql } from "../../shared/db/pool";
import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
import { renderLanding } from "./landing";
import type { SyncServer } from '../../server/local-first/sync-server';
import { boardSchema, boardDocId } from './schemas';
import type { BoardDoc, TaskItem } from './schemas';
import { boardSchema, boardDocId, createTaskItem } from './schemas';
import type { BoardDoc, TaskItem, BoardMeta } from './schemas';
const routes = new Hono();
// ── DB initialization ──
const SCHEMA_SQL = readFileSync(resolve(import.meta.dir, "db/schema.sql"), "utf-8");
// ── Local-first helpers ──
let _syncServer: SyncServer | null = null;
async function initDB() {
try {
await sql.unsafe(SCHEMA_SQL);
console.log("[Work] DB schema initialized");
} catch (e) {
console.error("[Work] DB init error:", e);
/**
* Lazily create the board Automerge doc if it doesn't exist yet.
* Returns the current (immutable) doc snapshot.
*/
function ensureDoc(space: string, boardId?: string): BoardDoc {
const id = boardDocId(space, boardId ?? space);
let doc = _syncServer!.getDoc<BoardDoc>(id);
if (!doc) {
doc = Automerge.change(Automerge.init<BoardDoc>(), 'init board', (d) => {
const init = boardSchema.init();
d.meta = init.meta;
d.meta.spaceSlug = space;
d.board = init.board;
d.board.id = boardId ?? space;
d.board.slug = boardId ?? space;
d.board.name = space;
d.tasks = {};
});
_syncServer!.setDoc(id, doc);
}
return doc;
}
async function seedDemoIfEmpty() {
try {
const count = await sql.unsafe("SELECT count(*)::int as cnt FROM rwork.spaces");
if (parseInt(count[0].cnt) > 0) return;
/**
* Get all board doc IDs for a given space.
*/
function getBoardDocIds(space: string): string[] {
return _syncServer!.getDocIds().filter((id) => id.startsWith(`${space}:work:boards:`));
}
// Create workspace
const space = await sql.unsafe(
`INSERT INTO rwork.spaces (name, slug, description, icon, owner_did)
VALUES ('rSpace Development', 'rspace-dev', 'Building the cosmolocal r* ecosystem', '🚀', 'did:demo:seed')
RETURNING id`
);
const spaceId = space[0].id;
/**
* Seed demo data if no boards exist yet.
*/
function seedDemoIfEmpty() {
// Check if any work boards exist at all
const allWorkDocs = _syncServer!.getDocIds().filter((id) => id.includes(':work:boards:'));
if (allWorkDocs.length > 0) return;
// Seed tasks across all kanban columns
const tasks = [
const space = 'rspace-dev';
const docId = boardDocId(space, space);
const doc = Automerge.change(Automerge.init<BoardDoc>(), 'seed demo board', (d) => {
const now = Date.now();
d.meta = { module: 'work', collection: 'boards', version: 1, spaceSlug: space, createdAt: now };
d.board = {
id: space,
name: 'rSpace Development',
slug: space,
description: 'Building the cosmolocal r* ecosystem',
icon: null,
ownerDid: 'did:demo:seed',
statuses: ['TODO', 'IN_PROGRESS', 'REVIEW', 'DONE'],
labels: [],
createdAt: now,
updatedAt: now,
};
d.tasks = {};
const seedTasks: Array<{ title: string; status: string; priority: string; labels: string[]; sort: number }> = [
{ title: "Add dark mode toggle to settings page", status: "TODO", priority: "MEDIUM", labels: ["feature"], sort: 0 },
{ title: "Write API documentation for rPubs endpoints", status: "TODO", priority: "LOW", labels: ["docs"], sort: 1 },
{ title: "Investigate slow PDF generation on large documents", status: "TODO", priority: "HIGH", labels: ["bug"], sort: 2 },
@ -61,80 +96,52 @@ async function seedDemoIfEmpty() {
{ title: "Migrate email from Resend to self-hosted Mailcow", status: "DONE", priority: "MEDIUM", labels: ["chore"], sort: 3 },
];
for (const t of tasks) {
await sql.unsafe(
`INSERT INTO rwork.tasks (space_id, title, status, priority, labels, sort_order)
VALUES ($1, $2, $3, $4, $5, $6)`,
[spaceId, t.title, t.status, t.priority, t.labels, t.sort]
);
}
console.log("[Work] Demo data seeded: 1 workspace, 11 tasks");
} catch (e) {
console.error("[Work] Seed error:", e);
}
}
// ── Local-first helpers ──
let _syncServer: SyncServer | null = null;
function isLocalFirst(space: string): boolean {
if (!_syncServer) return false;
return _syncServer.getDocIds().some((id) => id.startsWith(`${space}:work:`));
}
function writeTaskToAutomerge(space: string, boardId: string, taskId: string, data: Partial<TaskItem>) {
if (!_syncServer) return;
const docId = boardDocId(space, boardId);
const existing = _syncServer.getDoc<BoardDoc>(docId);
if (!existing) return;
_syncServer.changeDoc<BoardDoc>(docId, `Update task ${taskId}`, (d) => {
if (!d.tasks[taskId]) {
d.tasks[taskId] = {
id: taskId,
spaceId: boardId,
title: '',
description: '',
status: 'TODO',
priority: null,
labels: [],
assigneeId: null,
createdBy: null,
sortOrder: 0,
createdAt: Date.now(),
updatedAt: Date.now(),
...data,
} as TaskItem;
} else {
Object.assign(d.tasks[taskId], data);
d.tasks[taskId].updatedAt = Date.now();
for (const t of seedTasks) {
const taskId = crypto.randomUUID();
d.tasks[taskId] = createTaskItem(taskId, space, t.title, {
status: t.status,
priority: t.priority,
labels: t.labels,
sortOrder: t.sort,
createdBy: 'did:demo:seed',
});
}
});
_syncServer!.setDoc(docId, doc);
console.log("[Work] Demo data seeded: 1 board, 11 tasks");
}
function deleteTaskFromAutomerge(space: string, boardId: string, taskId: string) {
if (!_syncServer) return;
const docId = boardDocId(space, boardId);
_syncServer.changeDoc<BoardDoc>(docId, `Delete task ${taskId}`, (d) => {
delete d.tasks[taskId];
});
}
// ── API: Spaces (Boards) ──
// ── API: Spaces ──
// GET /api/spaces — list workspaces
// GET /api/spaces — list workspaces (boards)
routes.get("/api/spaces", async (c) => {
const rows = await sql.unsafe(
`SELECT s.*, count(DISTINCT sm.id)::int as member_count, count(DISTINCT t.id)::int as task_count
FROM rwork.spaces s
LEFT JOIN rwork.space_members sm ON sm.space_id = s.id
LEFT JOIN rwork.tasks t ON t.space_id = s.id
GROUP BY s.id ORDER BY s.created_at DESC`
);
const allIds = _syncServer!.getDocIds().filter((id) => id.includes(':work:boards:'));
const rows = allIds.map((docId) => {
const doc = _syncServer!.getDoc<BoardDoc>(docId);
if (!doc) return null;
const taskCount = Object.keys(doc.tasks).length;
return {
id: doc.board.id,
name: doc.board.name,
slug: doc.board.slug,
description: doc.board.description,
icon: doc.board.icon,
owner_did: doc.board.ownerDid,
statuses: doc.board.statuses,
created_at: new Date(doc.board.createdAt).toISOString(),
updated_at: new Date(doc.board.updatedAt).toISOString(),
member_count: 0,
task_count: taskCount,
};
}).filter(Boolean);
// Sort by created_at DESC
rows.sort((a, b) => (b!.created_at > a!.created_at ? 1 : -1));
return c.json(rows);
});
// POST /api/spaces — create workspace
// POST /api/spaces — create workspace (board)
routes.post("/api/spaces", async (c) => {
const token = extractToken(c.req.raw.headers);
if (!token) return c.json({ error: "Authentication required" }, 401);
@ -146,20 +153,63 @@ routes.post("/api/spaces", async (c) => {
if (!name?.trim()) return c.json({ error: "Name required" }, 400);
const slug = name.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-|-$/g, "");
const rows = await sql.unsafe(
`INSERT INTO rwork.spaces (name, slug, description, icon, created_by)
VALUES ($1, $2, $3, $4, $5) RETURNING *`,
[name.trim(), slug, description || null, icon || null, claims.sub]
);
return c.json(rows[0], 201);
const docId = boardDocId(slug, slug);
// Check if board already exists
const existing = _syncServer!.getDoc<BoardDoc>(docId);
if (existing) return c.json({ error: "Space with this slug already exists" }, 409);
const now = Date.now();
const doc = Automerge.change(Automerge.init<BoardDoc>(), 'create board', (d) => {
d.meta = { module: 'work', collection: 'boards', version: 1, spaceSlug: slug, createdAt: now };
d.board = {
id: slug,
name: name.trim(),
slug,
description: description || '',
icon: icon || null,
ownerDid: claims.sub,
statuses: ['TODO', 'IN_PROGRESS', 'DONE'],
labels: [],
createdAt: now,
updatedAt: now,
};
d.tasks = {};
});
_syncServer!.setDoc(docId, doc);
return c.json({
id: slug,
name: name.trim(),
slug,
description: description || null,
icon: icon || null,
owner_did: claims.sub,
statuses: ['TODO', 'IN_PROGRESS', 'DONE'],
created_at: new Date(now).toISOString(),
updated_at: new Date(now).toISOString(),
}, 201);
});
// GET /api/spaces/:slug — workspace detail
routes.get("/api/spaces/:slug", async (c) => {
const slug = c.req.param("slug");
const rows = await sql.unsafe("SELECT * FROM rwork.spaces WHERE slug = $1", [slug]);
if (rows.length === 0) return c.json({ error: "Space not found" }, 404);
return c.json(rows[0]);
const docId = boardDocId(slug, slug);
const doc = _syncServer!.getDoc<BoardDoc>(docId);
if (!doc) return c.json({ error: "Space not found" }, 404);
return c.json({
id: doc.board.id,
name: doc.board.name,
slug: doc.board.slug,
description: doc.board.description,
icon: doc.board.icon,
owner_did: doc.board.ownerDid,
statuses: doc.board.statuses,
labels: doc.board.labels,
created_at: new Date(doc.board.createdAt).toISOString(),
updated_at: new Date(doc.board.updatedAt).toISOString(),
});
});
// ── API: Tasks ──
@ -167,15 +217,36 @@ routes.get("/api/spaces/:slug", async (c) => {
// GET /api/spaces/:slug/tasks — list tasks in workspace
routes.get("/api/spaces/:slug/tasks", async (c) => {
const slug = c.req.param("slug");
const rows = await sql.unsafe(
`SELECT t.*, u.username as assignee_name
FROM rwork.tasks t
JOIN rwork.spaces s ON s.id = t.space_id AND s.slug = $1
LEFT JOIN rwork.users u ON u.id = t.assignee_id
ORDER BY t.status, t.sort_order, t.created_at DESC`,
[slug]
);
return c.json(rows);
const doc = ensureDoc(slug);
const tasks = Object.values(doc.tasks).map((t) => ({
id: t.id,
space_id: t.spaceId,
title: t.title,
description: t.description,
status: t.status,
priority: t.priority,
labels: t.labels,
assignee_id: t.assigneeId,
assignee_name: null,
created_by: t.createdBy,
sort_order: t.sortOrder,
created_at: new Date(t.createdAt).toISOString(),
updated_at: new Date(t.updatedAt).toISOString(),
}));
// Sort by status, then sort_order, then created_at DESC
const statusOrder: Record<string, number> = {};
doc.board.statuses.forEach((s, i) => { statusOrder[s] = i; });
tasks.sort((a, b) => {
const sa = statusOrder[a.status] ?? 999;
const sb = statusOrder[b.status] ?? 999;
if (sa !== sb) return sa - sb;
if (a.sort_order !== b.sort_order) return a.sort_order - b.sort_order;
return b.created_at > a.created_at ? 1 : -1;
});
return c.json(tasks);
});
// POST /api/spaces/:slug/tasks — create task
@ -190,16 +261,36 @@ routes.post("/api/spaces/:slug/tasks", async (c) => {
const { title, description, status, priority, labels } = body;
if (!title?.trim()) return c.json({ error: "Title required" }, 400);
const space = await sql.unsafe("SELECT id, statuses FROM rwork.spaces WHERE slug = $1", [slug]);
if (space.length === 0) return c.json({ error: "Space not found" }, 404);
const doc = ensureDoc(slug);
const taskStatus = status || doc.board.statuses[0] || "TODO";
const taskId = crypto.randomUUID();
const now = Date.now();
const taskStatus = status || space[0].statuses?.[0] || "TODO";
const rows = await sql.unsafe(
`INSERT INTO rwork.tasks (space_id, title, description, status, priority, labels, created_by)
VALUES ($1, $2, $3, $4, $5, $6, $7) RETURNING *`,
[space[0].id, title.trim(), description || null, taskStatus, priority || "MEDIUM", labels || [], claims.sub]
);
return c.json(rows[0], 201);
const docId = boardDocId(slug, slug);
_syncServer!.changeDoc<BoardDoc>(docId, `Create task ${taskId}`, (d) => {
d.tasks[taskId] = createTaskItem(taskId, slug, title.trim(), {
description: description || '',
status: taskStatus,
priority: priority || 'MEDIUM',
labels: labels || [],
createdBy: claims.sub,
});
});
return c.json({
id: taskId,
space_id: slug,
title: title.trim(),
description: description || null,
status: taskStatus,
priority: priority || "MEDIUM",
labels: labels || [],
assignee_id: null,
created_by: claims.sub,
sort_order: 0,
created_at: new Date(now).toISOString(),
updated_at: new Date(now).toISOString(),
}, 201);
});
// PATCH /api/tasks/:id — update task (status change, assignment, etc.)
@ -215,51 +306,88 @@ routes.patch("/api/tasks/:id", async (c) => {
const body = await c.req.json();
const { title, description, status, priority, labels, sort_order, assignee_id } = body;
const fields: string[] = [];
const params: any[] = [];
let idx = 1;
// Check that at least one field is being updated
if (title === undefined && description === undefined && status === undefined &&
priority === undefined && labels === undefined && sort_order === undefined &&
assignee_id === undefined) {
return c.json({ error: "No fields to update" }, 400);
}
if (title !== undefined) { fields.push(`title = $${idx}`); params.push(title); idx++; }
if (description !== undefined) { fields.push(`description = $${idx}`); params.push(description); idx++; }
if (status !== undefined) { fields.push(`status = $${idx}`); params.push(status); idx++; }
if (priority !== undefined) { fields.push(`priority = $${idx}`); params.push(priority); idx++; }
if (labels !== undefined) { fields.push(`labels = $${idx}`); params.push(labels); idx++; }
if (sort_order !== undefined) { fields.push(`sort_order = $${idx}`); params.push(sort_order); idx++; }
if (assignee_id !== undefined) { fields.push(`assignee_id = $${idx}`); params.push(assignee_id || null); idx++; }
// Find which board doc contains this task
const allBoardIds = _syncServer!.getDocIds().filter((docId) => docId.includes(':work:boards:'));
let targetDocId: string | null = null;
for (const docId of allBoardIds) {
const doc = _syncServer!.getDoc<BoardDoc>(docId);
if (doc && doc.tasks[id]) {
targetDocId = docId;
break;
}
}
if (!targetDocId) return c.json({ error: "Task not found" }, 404);
if (fields.length === 0) return c.json({ error: "No fields to update" }, 400);
fields.push("updated_at = NOW()");
params.push(id);
_syncServer!.changeDoc<BoardDoc>(targetDocId, `Update task ${id}`, (d) => {
const task = d.tasks[id];
if (!task) return;
if (title !== undefined) task.title = title;
if (description !== undefined) task.description = description;
if (status !== undefined) task.status = status;
if (priority !== undefined) task.priority = priority;
if (labels !== undefined) task.labels = labels;
if (sort_order !== undefined) task.sortOrder = sort_order;
if (assignee_id !== undefined) task.assigneeId = assignee_id || null;
task.updatedAt = Date.now();
});
const rows = await sql.unsafe(
`UPDATE rwork.tasks SET ${fields.join(", ")} WHERE id = $${idx} RETURNING *`,
params
);
if (rows.length === 0) return c.json({ error: "Task not found" }, 404);
return c.json(rows[0]);
// Return the updated task
const updatedDoc = _syncServer!.getDoc<BoardDoc>(targetDocId)!;
const task = updatedDoc.tasks[id];
return c.json({
id: task.id,
space_id: task.spaceId,
title: task.title,
description: task.description,
status: task.status,
priority: task.priority,
labels: task.labels,
assignee_id: task.assigneeId,
created_by: task.createdBy,
sort_order: task.sortOrder,
created_at: new Date(task.createdAt).toISOString(),
updated_at: new Date(task.updatedAt).toISOString(),
});
});
// DELETE /api/tasks/:id
routes.delete("/api/tasks/:id", async (c) => {
const result = await sql.unsafe("DELETE FROM rwork.tasks WHERE id = $1 RETURNING id", [c.req.param("id")]);
if (result.length === 0) return c.json({ error: "Task not found" }, 404);
const id = c.req.param("id");
// Find which board doc contains this task
const allBoardIds = _syncServer!.getDocIds().filter((docId) => docId.includes(':work:boards:'));
let targetDocId: string | null = null;
for (const docId of allBoardIds) {
const doc = _syncServer!.getDoc<BoardDoc>(docId);
if (doc && doc.tasks[id]) {
targetDocId = docId;
break;
}
}
if (!targetDocId) return c.json({ error: "Task not found" }, 404);
_syncServer!.changeDoc<BoardDoc>(targetDocId, `Delete task ${id}`, (d) => {
delete d.tasks[id];
});
return c.json({ ok: true });
});
// ── API: Activity ──
// GET /api/spaces/:slug/activity — recent activity
// With Automerge, activity is tracked via document change history.
// Return an empty array for now; real activity can be derived from
// Automerge.getHistory() or a dedicated activity doc in the future.
routes.get("/api/spaces/:slug/activity", async (c) => {
const slug = c.req.param("slug");
const rows = await sql.unsafe(
`SELECT a.*, u.username
FROM rwork.activity_log a
JOIN rwork.spaces s ON s.id = a.space_id AND s.slug = $1
LEFT JOIN rwork.users u ON u.id = a.user_id
ORDER BY a.created_at DESC LIMIT 50`,
[slug]
);
return c.json(rows);
return c.json([]);
});
// ── Page route ──
@ -289,8 +417,7 @@ export const workModule: RSpaceModule = {
landingPage: renderLanding,
async onInit(ctx) {
_syncServer = ctx.syncServer;
await initDB();
await seedDemoIfEmpty();
seedDemoIfEmpty();
},
async onSpaceCreate(ctx: SpaceLifecycleContext) {
if (!_syncServer) return;

View File

@ -0,0 +1,258 @@
/**
* Test: Automerge round-trip create, save, load, sync, verify.
*
* Exercises the full local-first stack:
* 1. SyncServer (in-memory doc management)
* 2. Doc persistence (save to disk + load from disk)
* 3. Schema init factories (NotebookDoc, BoardDoc, etc.)
* 4. Doc change + onDocChange callback
*
* Usage: bun run scripts/test-automerge-roundtrip.ts
*/
// Must set env BEFORE imports (doc-persistence reads it at module level)
const TEST_DIR = '/tmp/rspace-automerge-test';
process.env.DOCS_STORAGE_DIR = TEST_DIR;
import * as Automerge from '@automerge/automerge';
import { mkdirSync, rmSync, existsSync, readdirSync } from 'node:fs';
import { readFile } from 'node:fs/promises';
import { resolve } from 'node:path';
import { SyncServer } from '../server/local-first/sync-server';
import { docIdToPath, saveDoc, loadAllDocs } from '../server/local-first/doc-persistence';
// Cleanup from previous runs
if (existsSync(TEST_DIR)) rmSync(TEST_DIR, { recursive: true });
mkdirSync(TEST_DIR, { recursive: true });
let passed = 0;
let failed = 0;
function assert(condition: boolean, label: string) {
if (condition) {
console.log(`${label}`);
passed++;
} else {
console.error(`${label}`);
failed++;
}
}
// ─── Test 1: docIdToPath mapping ──────────────────────────
console.log('\n── Test 1: docId ↔ path mapping ──');
{
const path = docIdToPath('demo:notes:notebooks:abc');
assert(path.endsWith('/demo/notes/notebooks/abc.automerge'), `docIdToPath → ${path}`);
const path2 = docIdToPath('myspace:work:boards:board-1');
assert(path2.endsWith('/myspace/work/boards/board-1.automerge'), `docIdToPath boards → ${path2}`);
let threw = false;
try { docIdToPath('invalid'); } catch { threw = true; }
assert(threw, 'docIdToPath rejects invalid docId (< 3 parts)');
}
// ─── Test 2: SyncServer in-memory CRUD ─────────────────────
console.log('\n── Test 2: SyncServer in-memory CRUD ──');
{
interface TestDoc { title: string; items: Record<string, { text: string }> }
const docChanges: string[] = [];
const server = new SyncServer({
participantMode: true,
onDocChange: (docId) => docChanges.push(docId),
});
// Create a doc
let doc = Automerge.init<TestDoc>();
doc = Automerge.change(doc, 'init', (d) => {
d.title = 'Test Notebook';
d.items = {};
});
server.setDoc('test:notes:notebooks:nb1', doc);
assert(server.getDocIds().includes('test:notes:notebooks:nb1'), 'setDoc registers docId');
// Read it back
const loaded = server.getDoc<TestDoc>('test:notes:notebooks:nb1');
assert(loaded !== undefined, 'getDoc returns the doc');
assert(loaded!.title === 'Test Notebook', 'doc content preserved');
// Change via changeDoc
const changed = server.changeDoc<TestDoc>('test:notes:notebooks:nb1', 'add item', (d) => {
d.items['item-1'] = { text: 'Hello local-first' };
});
assert(changed !== null, 'changeDoc returns updated doc');
assert(changed!.items['item-1'].text === 'Hello local-first', 'changeDoc content correct');
assert(docChanges.length === 1, 'onDocChange callback fired');
// Verify the server's copy is updated
const reloaded = server.getDoc<TestDoc>('test:notes:notebooks:nb1');
assert(reloaded!.items['item-1'].text === 'Hello local-first', 'server copy updated after changeDoc');
}
// ─── Test 3: Relay mode ────────────────────────────────────
console.log('\n── Test 3: Relay mode (encrypted spaces) ──');
{
const server = new SyncServer({ participantMode: true });
assert(!server.isRelayOnly('demo:notes:notebooks:x'), 'not relay by default');
server.setRelayOnly('encrypted-space', true);
assert(server.isRelayOnly('encrypted-space'), 'exact match → relay');
assert(server.isRelayOnly('encrypted-space:notes:notebooks:x'), 'prefix match → relay');
assert(!server.isRelayOnly('other-space:notes:notebooks:x'), 'other space → not relay');
server.setRelayOnly('encrypted-space', false);
assert(!server.isRelayOnly('encrypted-space:notes:notebooks:x'), 'after removal → not relay');
}
// ─── Test 4: Disk persistence round-trip ───────────────────
// Note: We test Automerge binary serialization directly rather than using
// doc-persistence (which reads DOCS_STORAGE_DIR at module load time).
console.log('\n── Test 4: Disk persistence round-trip ──');
await (async () => {
interface NoteDoc { title: string; content: string }
// Create a doc
let doc = Automerge.init<NoteDoc>();
doc = Automerge.change(doc, 'init', (d) => {
d.title = 'Persistent Note';
d.content = 'This should survive a restart';
});
// Serialize to binary
const binary = Automerge.save(doc);
assert(binary.byteLength > 0, `Automerge.save produces ${binary.byteLength} bytes`);
// Write to temp dir
const { mkdir: mk, writeFile: wf, readFile: rf } = await import('node:fs/promises');
const { dirname: dn } = await import('node:path');
const filePath = resolve(TEST_DIR, 'roundtrip/notes/notebooks/persist-1.automerge');
await mk(dn(filePath), { recursive: true });
await wf(filePath, binary);
assert(existsSync(filePath), `file written to disk`);
// Read back and deserialize
const rawBuf = await rf(filePath);
const reloaded = Automerge.load<NoteDoc>(new Uint8Array(rawBuf));
assert(reloaded.title === 'Persistent Note', 'title preserved after load');
assert(reloaded.content === 'This should survive a restart', 'content preserved after load');
// Verify Automerge history
const history = Automerge.getHistory(reloaded);
assert(history.length === 1, `history has ${history.length} change(s)`);
// Test: modify, save again, load again
const doc2 = Automerge.change(reloaded, 'update', (d) => {
d.content = 'Updated content after reload';
});
const binary2 = Automerge.save(doc2);
await wf(filePath, binary2);
const rawBuf2 = await rf(filePath);
const reloaded2 = Automerge.load<NoteDoc>(new Uint8Array(rawBuf2));
assert(reloaded2.content === 'Updated content after reload', 'content updated after second save/load');
assert(Automerge.getHistory(reloaded2).length === 2, 'history has 2 changes after update');
// Load via loadAllDocs into a SyncServer (uses TEST_DIR since that's what's on disk)
// We do this by creating a SyncServer and loading manually
const server2 = new SyncServer({ participantMode: true });
const raw = await rf(filePath);
const loadedDoc = Automerge.load<NoteDoc>(new Uint8Array(raw));
server2.setDoc('roundtrip:notes:notebooks:persist-1', loadedDoc);
const fromServer = server2.getDoc<NoteDoc>('roundtrip:notes:notebooks:persist-1');
assert(fromServer!.title === 'Persistent Note', 'SyncServer holds correct doc from disk');
})();
// ─── Test 5: Multiple docs + listDocs ──────────────────────
console.log('\n── Test 5: Multiple docs + listing ──');
await (async () => {
const server = new SyncServer({ participantMode: true });
for (const id of ['space-a:work:boards:b1', 'space-a:work:boards:b2', 'space-b:cal:events']) {
let doc = Automerge.init<{ label: string }>();
doc = Automerge.change(doc, 'init', (d) => { d.label = id; });
server.setDoc(id, doc);
}
const ids = server.getDocIds();
assert(ids.length === 3, `3 docs registered (got ${ids.length})`);
assert(ids.includes('space-a:work:boards:b1'), 'board b1 listed');
assert(ids.includes('space-b:cal:events'), 'cal events listed');
})();
// ─── Test 6: Peer subscribe + sync message flow ────────────
console.log('\n── Test 6: Peer subscribe + sync flow ──');
{
interface SimpleDoc { value: number }
const sent: Array<{ peerId: string; msg: string }> = [];
const server = new SyncServer({ participantMode: true });
// Create a doc on the server
let doc = Automerge.init<SimpleDoc>();
doc = Automerge.change(doc, 'set value', (d) => { d.value = 42; });
server.setDoc('sync-test:data:metrics', doc);
// Add a mock peer
const mockWs = {
send: (data: string) => sent.push({ peerId: 'peer-1', msg: data }),
readyState: 1,
};
server.addPeer('peer-1', mockWs);
// Subscribe peer to the doc
server.handleMessage('peer-1', JSON.stringify({
type: 'subscribe',
docIds: ['sync-test:data:metrics'],
}));
assert(server.getDocSubscribers('sync-test:data:metrics').includes('peer-1'), 'peer subscribed');
assert(sent.length > 0, `sync message sent to peer (${sent.length} message(s))`);
// Verify the sync message is valid JSON with type 'sync'
const firstMsg = JSON.parse(sent[0].msg);
assert(firstMsg.type === 'sync', `message type is 'sync'`);
assert(firstMsg.docId === 'sync-test:data:metrics', 'correct docId in sync message');
assert(Array.isArray(firstMsg.data), 'sync data is array (Uint8Array serialized)');
// Clean up peer
server.removePeer('peer-1');
assert(!server.getPeerIds().includes('peer-1'), 'peer removed');
assert(server.getDocSubscribers('sync-test:data:metrics').length === 0, 'subscriber cleaned up');
}
// ─── Test 7: Ping/pong ────────────────────────────────────
console.log('\n── Test 7: Ping/pong ──');
{
const sent: string[] = [];
const server = new SyncServer({ participantMode: true });
const mockWs = {
send: (data: string) => sent.push(data),
readyState: 1,
};
server.addPeer('ping-peer', mockWs);
server.handleMessage('ping-peer', JSON.stringify({ type: 'ping' }));
assert(sent.length === 1, 'pong sent');
assert(JSON.parse(sent[0]).type === 'pong', 'response is pong');
server.removePeer('ping-peer');
}
// ─── Summary ───────────────────────────────────────────────
console.log(`\n${'═'.repeat(50)}`);
console.log(` ${passed} passed, ${failed} failed`);
console.log(`${'═'.repeat(50)}\n`);
// Cleanup
rmSync(TEST_DIR, { recursive: true });
process.exit(failed > 0 ? 1 : 0);