Merge branch 'dev'
This commit is contained in:
commit
58a864b991
|
|
@ -0,0 +1,209 @@
|
|||
/**
|
||||
* rNotes Local-First Client
|
||||
*
|
||||
* Wraps the shared local-first stack (DocSyncManager + EncryptedDocStore)
|
||||
* into a notes-specific API. This replaces the manual WebSocket + REST
|
||||
* approach in folk-notes-app with proper offline support and encryption.
|
||||
*
|
||||
* Usage:
|
||||
* const client = new NotesLocalFirstClient(space);
|
||||
* await client.init();
|
||||
* const notebooks = client.listNotebooks();
|
||||
* client.onChange(docId, (doc) => { ... });
|
||||
* client.disconnect();
|
||||
*/
|
||||
|
||||
import * as Automerge from '@automerge/automerge';
|
||||
import { DocumentManager } from '../../shared/local-first/document';
|
||||
import type { DocumentId } from '../../shared/local-first/document';
|
||||
import { EncryptedDocStore } from '../../shared/local-first/storage';
|
||||
import { DocSyncManager } from '../../shared/local-first/sync';
|
||||
import { DocCrypto } from '../../shared/local-first/crypto';
|
||||
import { notebookSchema, notebookDocId } from './schemas';
|
||||
import type { NotebookDoc, NoteItem, NotebookMeta } from './schemas';
|
||||
|
||||
export class NotesLocalFirstClient {
|
||||
#space: string;
|
||||
#documents: DocumentManager;
|
||||
#store: EncryptedDocStore;
|
||||
#sync: DocSyncManager;
|
||||
#initialized = false;
|
||||
|
||||
constructor(space: string, docCrypto?: DocCrypto) {
|
||||
this.#space = space;
|
||||
this.#documents = new DocumentManager();
|
||||
this.#store = new EncryptedDocStore(space, docCrypto);
|
||||
this.#sync = new DocSyncManager({
|
||||
documents: this.#documents,
|
||||
store: this.#store,
|
||||
});
|
||||
|
||||
// Register the notebook schema
|
||||
this.#documents.registerSchema(notebookSchema);
|
||||
}
|
||||
|
||||
get isConnected(): boolean { return this.#sync.isConnected; }
|
||||
get isInitialized(): boolean { return this.#initialized; }
|
||||
|
||||
/**
|
||||
* Initialize: open IndexedDB, load cached docs, connect to sync server.
|
||||
*/
|
||||
async init(): Promise<void> {
|
||||
if (this.#initialized) return;
|
||||
|
||||
// Open IndexedDB store
|
||||
await this.#store.open();
|
||||
|
||||
// Load any cached notebook docs from IndexedDB
|
||||
const cachedIds = await this.#store.listByModule('notes', 'notebooks');
|
||||
for (const docId of cachedIds) {
|
||||
const binary = await this.#store.load(docId);
|
||||
if (binary) {
|
||||
this.#documents.open<NotebookDoc>(docId, notebookSchema, binary);
|
||||
}
|
||||
}
|
||||
|
||||
// Connect to sync server
|
||||
const proto = location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const wsUrl = `${proto}//${location.host}/ws/${this.#space}`;
|
||||
try {
|
||||
await this.#sync.connect(wsUrl, this.#space);
|
||||
} catch {
|
||||
console.warn('[NotesClient] WebSocket connection failed, working offline');
|
||||
}
|
||||
|
||||
this.#initialized = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Subscribe to a specific notebook doc for real-time sync.
|
||||
*/
|
||||
async subscribeNotebook(notebookId: string): Promise<NotebookDoc | null> {
|
||||
const docId = notebookDocId(this.#space, notebookId) as DocumentId;
|
||||
|
||||
// Open or get existing doc
|
||||
let doc = this.#documents.get<NotebookDoc>(docId);
|
||||
if (!doc) {
|
||||
// Try loading from IndexedDB
|
||||
const binary = await this.#store.load(docId);
|
||||
if (binary) {
|
||||
doc = this.#documents.open<NotebookDoc>(docId, notebookSchema, binary);
|
||||
} else {
|
||||
// Create empty placeholder — server will fill via sync
|
||||
doc = this.#documents.open<NotebookDoc>(docId, notebookSchema);
|
||||
}
|
||||
}
|
||||
|
||||
// Subscribe for sync
|
||||
await this.#sync.subscribe([docId]);
|
||||
|
||||
return doc ?? null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unsubscribe from a notebook's sync.
|
||||
*/
|
||||
unsubscribeNotebook(notebookId: string): void {
|
||||
const docId = notebookDocId(this.#space, notebookId) as DocumentId;
|
||||
this.#sync.unsubscribe([docId]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a notebook doc (already opened).
|
||||
*/
|
||||
getNotebook(notebookId: string): NotebookDoc | undefined {
|
||||
const docId = notebookDocId(this.#space, notebookId) as DocumentId;
|
||||
return this.#documents.get<NotebookDoc>(docId);
|
||||
}
|
||||
|
||||
/**
|
||||
* List all notebook IDs for this space.
|
||||
*/
|
||||
listNotebookIds(): string[] {
|
||||
return this.#documents.list(this.#space, 'notes');
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a note within a notebook (creates if it doesn't exist).
|
||||
*/
|
||||
updateNote(notebookId: string, noteId: string, changes: Partial<NoteItem>): void {
|
||||
const docId = notebookDocId(this.#space, notebookId) as DocumentId;
|
||||
this.#sync.change<NotebookDoc>(docId, `Update note ${noteId}`, (d) => {
|
||||
if (!d.items[noteId]) {
|
||||
d.items[noteId] = {
|
||||
id: noteId,
|
||||
notebookId,
|
||||
authorId: null,
|
||||
title: '',
|
||||
content: '',
|
||||
contentPlain: '',
|
||||
type: 'NOTE',
|
||||
url: null,
|
||||
language: null,
|
||||
fileUrl: null,
|
||||
mimeType: null,
|
||||
fileSize: null,
|
||||
duration: null,
|
||||
isPinned: false,
|
||||
sortOrder: 0,
|
||||
tags: [],
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now(),
|
||||
...changes,
|
||||
};
|
||||
} else {
|
||||
const item = d.items[noteId];
|
||||
Object.assign(item, changes);
|
||||
item.updatedAt = Date.now();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a note from a notebook.
|
||||
*/
|
||||
deleteNote(notebookId: string, noteId: string): void {
|
||||
const docId = notebookDocId(this.#space, notebookId) as DocumentId;
|
||||
this.#sync.change<NotebookDoc>(docId, `Delete note ${noteId}`, (d) => {
|
||||
delete d.items[noteId];
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update notebook metadata.
|
||||
*/
|
||||
updateNotebook(notebookId: string, changes: Partial<NotebookMeta>): void {
|
||||
const docId = notebookDocId(this.#space, notebookId) as DocumentId;
|
||||
this.#sync.change<NotebookDoc>(docId, 'Update notebook', (d) => {
|
||||
Object.assign(d.notebook, changes);
|
||||
d.notebook.updatedAt = Date.now();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Listen for changes to a notebook doc.
|
||||
*/
|
||||
onChange(notebookId: string, cb: (doc: NotebookDoc) => void): () => void {
|
||||
const docId = notebookDocId(this.#space, notebookId) as DocumentId;
|
||||
return this.#sync.onChange(docId, cb as (doc: any) => void);
|
||||
}
|
||||
|
||||
/**
|
||||
* Listen for connection/disconnection events.
|
||||
*/
|
||||
onConnect(cb: () => void): () => void {
|
||||
return this.#sync.onConnect(cb);
|
||||
}
|
||||
|
||||
onDisconnect(cb: () => void): () => void {
|
||||
return this.#sync.onDisconnect(cb);
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush all pending saves to IndexedDB and disconnect.
|
||||
*/
|
||||
async disconnect(): Promise<void> {
|
||||
await this.#sync.flush();
|
||||
this.#sync.disconnect();
|
||||
}
|
||||
}
|
||||
|
|
@ -3,17 +3,23 @@
|
|||
*
|
||||
* Port of rnotes-online (Next.js + Prisma → Hono + postgres.js).
|
||||
* Supports multiple note types: text, code, bookmark, audio, image, file.
|
||||
*
|
||||
* Local-first migration: dual-write (Automerge + PG) during transition.
|
||||
*/
|
||||
|
||||
import { Hono } from "hono";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { resolve } from "node:path";
|
||||
import * as Automerge from "@automerge/automerge";
|
||||
import { sql } from "../../shared/db/pool";
|
||||
import { renderShell } from "../../server/shell";
|
||||
import { getModuleInfoList } from "../../shared/module";
|
||||
import type { RSpaceModule } from "../../shared/module";
|
||||
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
|
||||
import { verifyEncryptIDToken, extractToken } from "@encryptid/sdk/server";
|
||||
import { renderLanding } from "./landing";
|
||||
import { notebookSchema, notebookDocId } from "./schemas";
|
||||
import type { NotebookDoc, NoteItem } from "./schemas";
|
||||
import type { SyncServer } from "../../server/local-first/sync-server";
|
||||
|
||||
const routes = new Hono();
|
||||
|
||||
|
|
@ -121,7 +127,131 @@ async function seedDemoIfEmpty() {
|
|||
}
|
||||
}
|
||||
|
||||
initDB().then(seedDemoIfEmpty);
|
||||
// initDB + seedDemo are called from onInit lifecycle hook (see module export below)
|
||||
|
||||
// ── SyncServer ref (set during onInit) ──
|
||||
let _syncServer: SyncServer | null = null;
|
||||
|
||||
/** Check if a space has been migrated to local-first for notes. */
|
||||
function isLocalFirst(space: string): boolean {
|
||||
if (!_syncServer) return false;
|
||||
// A space is local-first if any notebook doc exists for it in the SyncServer
|
||||
// We check by looking for docs with the pattern {space}:notes:notebooks:*
|
||||
return _syncServer.getDoc(`${space}:notes:notebooks:default`) !== undefined;
|
||||
}
|
||||
|
||||
// ── Automerge ↔ REST conversion helpers ──
|
||||
|
||||
/** List all notebook docs for a space from the SyncServer. */
|
||||
function listAutomergeNotebooks(space: string): { docId: string; doc: NotebookDoc }[] {
|
||||
if (!_syncServer) return [];
|
||||
const results: { docId: string; doc: NotebookDoc }[] = [];
|
||||
const prefix = `${space}:notes:notebooks:`;
|
||||
for (const docId of _syncServer.listDocs()) {
|
||||
if (docId.startsWith(prefix)) {
|
||||
const doc = _syncServer.getDoc<NotebookDoc>(docId);
|
||||
if (doc) results.push({ docId, doc });
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
/** Convert an Automerge NotebookDoc to REST API format. */
|
||||
function notebookToRest(doc: NotebookDoc) {
|
||||
const nb = doc.notebook;
|
||||
return {
|
||||
id: nb.id,
|
||||
title: nb.title,
|
||||
slug: nb.slug,
|
||||
description: nb.description,
|
||||
cover_color: nb.coverColor,
|
||||
is_public: nb.isPublic,
|
||||
note_count: String(Object.keys(doc.items).length),
|
||||
created_at: new Date(nb.createdAt).toISOString(),
|
||||
updated_at: new Date(nb.updatedAt).toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
/** Convert an Automerge NoteItem to REST API format. */
|
||||
function noteToRest(item: NoteItem) {
|
||||
return {
|
||||
id: item.id,
|
||||
notebook_id: item.notebookId,
|
||||
title: item.title,
|
||||
content: item.content,
|
||||
content_plain: item.contentPlain,
|
||||
type: item.type,
|
||||
tags: item.tags.length > 0 ? item.tags : null,
|
||||
is_pinned: item.isPinned,
|
||||
sort_order: item.sortOrder,
|
||||
url: item.url,
|
||||
language: item.language,
|
||||
file_url: item.fileUrl,
|
||||
mime_type: item.mimeType,
|
||||
file_size: item.fileSize,
|
||||
duration: item.duration,
|
||||
created_at: new Date(item.createdAt).toISOString(),
|
||||
updated_at: new Date(item.updatedAt).toISOString(),
|
||||
};
|
||||
}
|
||||
|
||||
/** Find the notebook doc that contains a given note ID. */
|
||||
function findNoteInAutomerge(space: string, noteId: string): { docId: string; doc: NotebookDoc; item: NoteItem } | null {
|
||||
for (const { docId, doc } of listAutomergeNotebooks(space)) {
|
||||
const item = doc.items[noteId];
|
||||
if (item) return { docId, doc, item };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/** Write a note to the Automerge doc (creates/updates). */
|
||||
function writeNoteToAutomerge(space: string, notebookPgId: string, noteId: string, data: Partial<NoteItem>): void {
|
||||
if (!_syncServer) return;
|
||||
// Find the Automerge notebook doc for this PG notebook
|
||||
// Convention: PG notebook UUID maps to docId suffix
|
||||
const docId = notebookDocId(space, notebookPgId);
|
||||
const doc = _syncServer.getDoc<NotebookDoc>(docId);
|
||||
if (!doc) return; // not migrated yet
|
||||
|
||||
_syncServer.changeDoc<NotebookDoc>(docId, `Update note ${noteId}`, (d) => {
|
||||
if (!d.items[noteId]) {
|
||||
// New note
|
||||
d.items[noteId] = {
|
||||
id: noteId,
|
||||
notebookId: notebookPgId,
|
||||
authorId: data.authorId ?? null,
|
||||
title: data.title ?? '',
|
||||
content: data.content ?? '',
|
||||
contentPlain: data.contentPlain ?? '',
|
||||
type: data.type ?? 'NOTE',
|
||||
url: data.url ?? null,
|
||||
language: data.language ?? null,
|
||||
fileUrl: data.fileUrl ?? null,
|
||||
mimeType: data.mimeType ?? null,
|
||||
fileSize: data.fileSize ?? null,
|
||||
duration: data.duration ?? null,
|
||||
isPinned: data.isPinned ?? false,
|
||||
sortOrder: data.sortOrder ?? 0,
|
||||
tags: data.tags ?? [],
|
||||
createdAt: data.createdAt ?? Date.now(),
|
||||
updatedAt: Date.now(),
|
||||
};
|
||||
} else {
|
||||
// Update existing fields
|
||||
const item = d.items[noteId];
|
||||
if (data.title !== undefined) item.title = data.title;
|
||||
if (data.content !== undefined) item.content = data.content;
|
||||
if (data.contentPlain !== undefined) item.contentPlain = data.contentPlain;
|
||||
if (data.type !== undefined) item.type = data.type;
|
||||
if (data.url !== undefined) item.url = data.url;
|
||||
if (data.language !== undefined) item.language = data.language;
|
||||
if (data.isPinned !== undefined) item.isPinned = data.isPinned;
|
||||
if (data.sortOrder !== undefined) item.sortOrder = data.sortOrder;
|
||||
if (data.tags !== undefined) item.tags = data.tags;
|
||||
item.updatedAt = Date.now();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// ── Helper: get or create user ──
|
||||
async function getOrCreateUser(did: string, username?: string) {
|
||||
|
|
@ -136,8 +266,18 @@ async function getOrCreateUser(did: string, username?: string) {
|
|||
|
||||
// ── Notebooks API ──
|
||||
|
||||
// GET /api/notebooks — list notebooks
|
||||
// GET /api/notebooks — list notebooks (Automerge-first, PG fallback)
|
||||
routes.get("/api/notebooks", async (c) => {
|
||||
const space = c.req.param("space") || "demo";
|
||||
|
||||
// Try Automerge first
|
||||
if (isLocalFirst(space)) {
|
||||
const notebooks = listAutomergeNotebooks(space).map(({ doc }) => notebookToRest(doc));
|
||||
notebooks.sort((a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime());
|
||||
return c.json({ notebooks, source: "automerge" });
|
||||
}
|
||||
|
||||
// PG fallback
|
||||
const rows = await sql.unsafe(
|
||||
`SELECT n.*, count(note.id) as note_count
|
||||
FROM rnotes.notebooks n
|
||||
|
|
@ -148,8 +288,9 @@ routes.get("/api/notebooks", async (c) => {
|
|||
return c.json({ notebooks: rows });
|
||||
});
|
||||
|
||||
// POST /api/notebooks — create notebook
|
||||
// POST /api/notebooks — create notebook (dual-write)
|
||||
routes.post("/api/notebooks", async (c) => {
|
||||
const space = c.req.param("space") || "demo";
|
||||
const token = extractToken(c.req.raw.headers);
|
||||
if (!token) return c.json({ error: "Authentication required" }, 401);
|
||||
let claims;
|
||||
|
|
@ -158,18 +299,58 @@ routes.post("/api/notebooks", async (c) => {
|
|||
const body = await c.req.json();
|
||||
const { title, description, cover_color } = body;
|
||||
|
||||
// PG write
|
||||
const user = await getOrCreateUser(claims.sub, claims.username);
|
||||
const rows = await sql.unsafe(
|
||||
`INSERT INTO rnotes.notebooks (title, description, cover_color, owner_id)
|
||||
VALUES ($1, $2, $3, $4) RETURNING *`,
|
||||
[title || "Untitled Notebook", description || null, cover_color || "#3b82f6", user.id]
|
||||
);
|
||||
return c.json(rows[0], 201);
|
||||
const pgRow = rows[0];
|
||||
|
||||
// Automerge dual-write: create a new notebook doc
|
||||
if (_syncServer && isLocalFirst(space)) {
|
||||
const docId = notebookDocId(space, pgRow.id);
|
||||
if (!_syncServer.getDoc(docId)) {
|
||||
const doc = Automerge.init<NotebookDoc>();
|
||||
const initialized = Automerge.change(doc, "Create notebook", (d) => {
|
||||
d.meta = { module: "notes", collection: "notebooks", version: 1, spaceSlug: space, createdAt: Date.now() };
|
||||
d.notebook = {
|
||||
id: pgRow.id, title: pgRow.title, slug: pgRow.slug || "",
|
||||
description: pgRow.description || "", coverColor: pgRow.cover_color || "#3b82f6",
|
||||
isPublic: pgRow.is_public || false, createdAt: Date.now(), updatedAt: Date.now(),
|
||||
};
|
||||
d.items = {};
|
||||
});
|
||||
_syncServer.setDoc(docId, initialized);
|
||||
}
|
||||
}
|
||||
|
||||
return c.json(pgRow, 201);
|
||||
});
|
||||
|
||||
// GET /api/notebooks/:id — notebook detail with notes
|
||||
// GET /api/notebooks/:id — notebook detail with notes (Automerge-first)
|
||||
routes.get("/api/notebooks/:id", async (c) => {
|
||||
const space = c.req.param("space") || "demo";
|
||||
const id = c.req.param("id");
|
||||
|
||||
// Automerge first
|
||||
if (isLocalFirst(space)) {
|
||||
const docId = notebookDocId(space, id);
|
||||
const doc = _syncServer?.getDoc<NotebookDoc>(docId);
|
||||
if (doc) {
|
||||
const nb = notebookToRest(doc);
|
||||
const notes = Object.values(doc.items)
|
||||
.map(noteToRest)
|
||||
.sort((a, b) => {
|
||||
if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1;
|
||||
return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime();
|
||||
});
|
||||
return c.json({ ...nb, notes, source: "automerge" });
|
||||
}
|
||||
}
|
||||
|
||||
// PG fallback
|
||||
const nb = await sql.unsafe("SELECT * FROM rnotes.notebooks WHERE id = $1", [id]);
|
||||
if (nb.length === 0) return c.json({ error: "Notebook not found" }, 404);
|
||||
|
||||
|
|
@ -186,8 +367,9 @@ routes.get("/api/notebooks/:id", async (c) => {
|
|||
return c.json({ ...nb[0], notes });
|
||||
});
|
||||
|
||||
// PUT /api/notebooks/:id — update notebook
|
||||
// PUT /api/notebooks/:id — update notebook (dual-write)
|
||||
routes.put("/api/notebooks/:id", async (c) => {
|
||||
const space = c.req.param("space") || "demo";
|
||||
const token = extractToken(c.req.raw.headers);
|
||||
if (!token) return c.json({ error: "Authentication required" }, 401);
|
||||
let claims;
|
||||
|
|
@ -197,6 +379,7 @@ routes.put("/api/notebooks/:id", async (c) => {
|
|||
const body = await c.req.json();
|
||||
const { title, description, cover_color, is_public } = body;
|
||||
|
||||
// PG write
|
||||
const fields: string[] = [];
|
||||
const params: any[] = [];
|
||||
let idx = 1;
|
||||
|
|
@ -215,24 +398,76 @@ routes.put("/api/notebooks/:id", async (c) => {
|
|||
params
|
||||
);
|
||||
if (rows.length === 0) return c.json({ error: "Notebook not found" }, 404);
|
||||
|
||||
// Automerge dual-write: update notebook metadata
|
||||
if (_syncServer && isLocalFirst(space)) {
|
||||
const docId = notebookDocId(space, id);
|
||||
_syncServer.changeDoc<NotebookDoc>(docId, "Update notebook", (d) => {
|
||||
if (title !== undefined) d.notebook.title = title;
|
||||
if (description !== undefined) d.notebook.description = description;
|
||||
if (cover_color !== undefined) d.notebook.coverColor = cover_color;
|
||||
if (is_public !== undefined) d.notebook.isPublic = is_public;
|
||||
d.notebook.updatedAt = Date.now();
|
||||
});
|
||||
}
|
||||
|
||||
return c.json(rows[0]);
|
||||
});
|
||||
|
||||
// DELETE /api/notebooks/:id
|
||||
// DELETE /api/notebooks/:id (dual-write)
|
||||
routes.delete("/api/notebooks/:id", async (c) => {
|
||||
const space = c.req.param("space") || "demo";
|
||||
const id = c.req.param("id");
|
||||
|
||||
const result = await sql.unsafe(
|
||||
"DELETE FROM rnotes.notebooks WHERE id = $1 RETURNING id",
|
||||
[c.req.param("id")]
|
||||
"DELETE FROM rnotes.notebooks WHERE id = $1 RETURNING id", [id]
|
||||
);
|
||||
if (result.length === 0) return c.json({ error: "Notebook not found" }, 404);
|
||||
|
||||
// Automerge: remove the entire doc from SyncServer
|
||||
// (SyncServer doesn't have a removeDoc — setting it to empty is the equivalent)
|
||||
// For now, the doc persists in Automerge but is effectively orphaned once PG row is gone.
|
||||
|
||||
return c.json({ ok: true });
|
||||
});
|
||||
|
||||
// ── Notes API ──
|
||||
|
||||
// GET /api/notes — list all notes (query: notebook_id, type, q)
|
||||
// GET /api/notes — list all notes (Automerge-first, PG fallback)
|
||||
routes.get("/api/notes", async (c) => {
|
||||
const space = c.req.param("space") || "demo";
|
||||
const { notebook_id, type, q, limit = "50", offset = "0" } = c.req.query();
|
||||
|
||||
// Automerge first
|
||||
if (isLocalFirst(space)) {
|
||||
let allNotes: ReturnType<typeof noteToRest>[] = [];
|
||||
const notebooks = notebook_id
|
||||
? [{ doc: _syncServer!.getDoc<NotebookDoc>(notebookDocId(space, notebook_id))! }].filter(x => x.doc)
|
||||
: listAutomergeNotebooks(space);
|
||||
|
||||
for (const { doc } of notebooks) {
|
||||
for (const item of Object.values(doc.items)) {
|
||||
if (type && item.type !== type) continue;
|
||||
if (q) {
|
||||
const lower = q.toLowerCase();
|
||||
if (!item.title.toLowerCase().includes(lower) && !item.contentPlain.toLowerCase().includes(lower)) continue;
|
||||
}
|
||||
allNotes.push(noteToRest(item));
|
||||
}
|
||||
}
|
||||
|
||||
// Sort: pinned first, then by updated_at desc
|
||||
allNotes.sort((a, b) => {
|
||||
if (a.is_pinned !== b.is_pinned) return a.is_pinned ? -1 : 1;
|
||||
return new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime();
|
||||
});
|
||||
|
||||
const lim = Math.min(parseInt(limit), 100);
|
||||
const off = parseInt(offset) || 0;
|
||||
return c.json({ notes: allNotes.slice(off, off + lim), source: "automerge" });
|
||||
}
|
||||
|
||||
// PG fallback
|
||||
const conditions: string[] = [];
|
||||
const params: any[] = [];
|
||||
let idx = 1;
|
||||
|
|
@ -261,8 +496,9 @@ routes.get("/api/notes", async (c) => {
|
|||
return c.json({ notes: rows });
|
||||
});
|
||||
|
||||
// POST /api/notes — create note
|
||||
// POST /api/notes — create note (dual-write)
|
||||
routes.post("/api/notes", async (c) => {
|
||||
const space = c.req.param("space") || "demo";
|
||||
const token = extractToken(c.req.raw.headers);
|
||||
if (!token) return c.json({ error: "Authentication required" }, 401);
|
||||
let claims;
|
||||
|
|
@ -276,6 +512,7 @@ routes.post("/api/notes", async (c) => {
|
|||
// Strip HTML for plain text search
|
||||
const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : null;
|
||||
|
||||
// PG write
|
||||
const rows = await sql.unsafe(
|
||||
`INSERT INTO rnotes.notes (notebook_id, title, content, content_plain, type, url, language, file_url, mime_type, file_size, duration)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11) RETURNING *`,
|
||||
|
|
@ -283,11 +520,13 @@ routes.post("/api/notes", async (c) => {
|
|||
url || null, language || null, file_url || null, mime_type || null, file_size || null, duration || null]
|
||||
);
|
||||
|
||||
// Handle tags
|
||||
// Handle tags in PG
|
||||
const tagNames: string[] = [];
|
||||
if (tags && Array.isArray(tags)) {
|
||||
for (const tagName of tags) {
|
||||
const name = tagName.trim().toLowerCase();
|
||||
if (!name) continue;
|
||||
tagNames.push(name);
|
||||
const tag = await sql.unsafe(
|
||||
"INSERT INTO rnotes.tags (name) VALUES ($1) ON CONFLICT (name) DO UPDATE SET name = $1 RETURNING id",
|
||||
[name]
|
||||
|
|
@ -299,12 +538,38 @@ routes.post("/api/notes", async (c) => {
|
|||
}
|
||||
}
|
||||
|
||||
// Automerge dual-write
|
||||
if (notebook_id && isLocalFirst(space)) {
|
||||
writeNoteToAutomerge(space, notebook_id, rows[0].id, {
|
||||
title: title.trim(),
|
||||
content: content || '',
|
||||
contentPlain: contentPlain || '',
|
||||
type: type || 'NOTE',
|
||||
url: url || null,
|
||||
language: language || null,
|
||||
fileUrl: file_url || null,
|
||||
mimeType: mime_type || null,
|
||||
fileSize: file_size || null,
|
||||
duration: duration || null,
|
||||
tags: tagNames,
|
||||
});
|
||||
}
|
||||
|
||||
return c.json(rows[0], 201);
|
||||
});
|
||||
|
||||
// GET /api/notes/:id — note detail
|
||||
// GET /api/notes/:id — note detail (Automerge-first)
|
||||
routes.get("/api/notes/:id", async (c) => {
|
||||
const space = c.req.param("space") || "demo";
|
||||
const id = c.req.param("id");
|
||||
|
||||
// Automerge first
|
||||
if (isLocalFirst(space)) {
|
||||
const found = findNoteInAutomerge(space, id);
|
||||
if (found) return c.json({ ...noteToRest(found.item), source: "automerge" });
|
||||
}
|
||||
|
||||
// PG fallback
|
||||
const rows = await sql.unsafe(
|
||||
`SELECT n.*, array_agg(t.name) FILTER (WHERE t.name IS NOT NULL) as tags
|
||||
FROM rnotes.notes n
|
||||
|
|
@ -318,12 +583,14 @@ routes.get("/api/notes/:id", async (c) => {
|
|||
return c.json(rows[0]);
|
||||
});
|
||||
|
||||
// PUT /api/notes/:id — update note
|
||||
// PUT /api/notes/:id — update note (dual-write)
|
||||
routes.put("/api/notes/:id", async (c) => {
|
||||
const space = c.req.param("space") || "demo";
|
||||
const id = c.req.param("id");
|
||||
const body = await c.req.json();
|
||||
const { title, content, type, url, language, is_pinned, sort_order } = body;
|
||||
|
||||
// PG write
|
||||
const fields: string[] = [];
|
||||
const params: any[] = [];
|
||||
let idx = 1;
|
||||
|
|
@ -349,13 +616,44 @@ routes.put("/api/notes/:id", async (c) => {
|
|||
params
|
||||
);
|
||||
if (rows.length === 0) return c.json({ error: "Note not found" }, 404);
|
||||
|
||||
// Automerge dual-write
|
||||
if (isLocalFirst(space)) {
|
||||
const found = findNoteInAutomerge(space, id);
|
||||
if (found) {
|
||||
const contentPlain = content ? content.replace(/<[^>]*>/g, " ").replace(/\s+/g, " ").trim() : undefined;
|
||||
writeNoteToAutomerge(space, found.item.notebookId, id, {
|
||||
...(title !== undefined ? { title } : {}),
|
||||
...(content !== undefined ? { content, contentPlain } : {}),
|
||||
...(type !== undefined ? { type } : {}),
|
||||
...(url !== undefined ? { url } : {}),
|
||||
...(language !== undefined ? { language } : {}),
|
||||
...(is_pinned !== undefined ? { isPinned: is_pinned } : {}),
|
||||
...(sort_order !== undefined ? { sortOrder: sort_order } : {}),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return c.json(rows[0]);
|
||||
});
|
||||
|
||||
// DELETE /api/notes/:id
|
||||
// DELETE /api/notes/:id (dual-write)
|
||||
routes.delete("/api/notes/:id", async (c) => {
|
||||
const result = await sql.unsafe("DELETE FROM rnotes.notes WHERE id = $1 RETURNING id", [c.req.param("id")]);
|
||||
const space = c.req.param("space") || "demo";
|
||||
const id = c.req.param("id");
|
||||
|
||||
// PG delete
|
||||
const result = await sql.unsafe("DELETE FROM rnotes.notes WHERE id = $1 RETURNING id, notebook_id", [id]);
|
||||
if (result.length === 0) return c.json({ error: "Note not found" }, 404);
|
||||
|
||||
// Automerge dual-write: remove note from notebook doc
|
||||
if (isLocalFirst(space) && result[0].notebook_id && _syncServer) {
|
||||
const docId = notebookDocId(space, result[0].notebook_id);
|
||||
_syncServer.changeDoc<NotebookDoc>(docId, `Delete note ${id}`, (d) => {
|
||||
delete d.items[id];
|
||||
});
|
||||
}
|
||||
|
||||
return c.json({ ok: true });
|
||||
});
|
||||
|
||||
|
|
@ -381,6 +679,60 @@ export const notesModule: RSpaceModule = {
|
|||
description: "Notebooks with rich-text notes, voice transcription, and collaboration",
|
||||
scoping: { defaultScope: 'global', userConfigurable: true },
|
||||
routes,
|
||||
|
||||
docSchemas: [
|
||||
{
|
||||
pattern: '{space}:notes:notebooks:{notebookId}',
|
||||
description: 'One Automerge doc per notebook, containing all notes as items',
|
||||
init: notebookSchema.init,
|
||||
},
|
||||
],
|
||||
|
||||
async onInit({ syncServer }) {
|
||||
_syncServer = syncServer;
|
||||
|
||||
// Init PG (still needed during dual-write period)
|
||||
await initDB();
|
||||
await seedDemoIfEmpty();
|
||||
|
||||
console.log("[Notes] onInit complete (PG + schema registered)");
|
||||
},
|
||||
|
||||
async onSpaceCreate(ctx: SpaceLifecycleContext) {
|
||||
if (!_syncServer) return;
|
||||
|
||||
// Create a default "My Notes" notebook doc for the new space
|
||||
const notebookId = "default";
|
||||
const docId = notebookDocId(ctx.spaceSlug, notebookId);
|
||||
|
||||
if (_syncServer.getDoc(docId)) return; // already exists
|
||||
|
||||
const doc = Automerge.init<NotebookDoc>();
|
||||
const initialized = Automerge.change(doc, "Create default notebook", (d) => {
|
||||
d.meta = {
|
||||
module: "notes",
|
||||
collection: "notebooks",
|
||||
version: 1,
|
||||
spaceSlug: ctx.spaceSlug,
|
||||
createdAt: Date.now(),
|
||||
};
|
||||
d.notebook = {
|
||||
id: notebookId,
|
||||
title: "My Notes",
|
||||
slug: "my-notes",
|
||||
description: "Default notebook",
|
||||
coverColor: "#3b82f6",
|
||||
isPublic: false,
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now(),
|
||||
};
|
||||
d.items = {};
|
||||
});
|
||||
|
||||
_syncServer.setDoc(docId, initialized);
|
||||
console.log(`[Notes] Created default notebook for space: ${ctx.spaceSlug}`);
|
||||
},
|
||||
|
||||
landingPage: renderLanding,
|
||||
standaloneDomain: "rnotes.online",
|
||||
feeds: [
|
||||
|
|
|
|||
|
|
@ -0,0 +1,124 @@
|
|||
/**
|
||||
* rNotes Automerge document schemas.
|
||||
*
|
||||
* Granularity: one Automerge document per notebook.
|
||||
* DocId format: {space}:notes:notebooks:{notebookId}
|
||||
*
|
||||
* The shape matches the PG→Automerge migration adapter
|
||||
* (server/local-first/migration/pg-to-automerge.ts:notesMigration)
|
||||
* and the client-side NotebookDoc type in folk-notes-app.ts.
|
||||
*/
|
||||
|
||||
import type { DocSchema } from '../../shared/local-first/document';
|
||||
|
||||
// ── Document types ──
|
||||
|
||||
export interface NoteItem {
|
||||
id: string;
|
||||
notebookId: string;
|
||||
authorId: string | null;
|
||||
title: string;
|
||||
content: string;
|
||||
contentPlain: string;
|
||||
type: 'NOTE' | 'CLIP' | 'BOOKMARK' | 'CODE' | 'IMAGE' | 'FILE' | 'AUDIO';
|
||||
url: string | null;
|
||||
language: string | null;
|
||||
fileUrl: string | null;
|
||||
mimeType: string | null;
|
||||
fileSize: number | null;
|
||||
duration: number | null;
|
||||
isPinned: boolean;
|
||||
sortOrder: number;
|
||||
tags: string[];
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
}
|
||||
|
||||
export interface NotebookMeta {
|
||||
id: string;
|
||||
title: string;
|
||||
slug: string;
|
||||
description: string;
|
||||
coverColor: string;
|
||||
isPublic: boolean;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
}
|
||||
|
||||
export interface NotebookDoc {
|
||||
meta: {
|
||||
module: string;
|
||||
collection: string;
|
||||
version: number;
|
||||
spaceSlug: string;
|
||||
createdAt: number;
|
||||
};
|
||||
notebook: NotebookMeta;
|
||||
items: Record<string, NoteItem>;
|
||||
}
|
||||
|
||||
// ── Schema registration ──
|
||||
|
||||
export const notebookSchema: DocSchema<NotebookDoc> = {
|
||||
module: 'notes',
|
||||
collection: 'notebooks',
|
||||
version: 1,
|
||||
init: (): NotebookDoc => ({
|
||||
meta: {
|
||||
module: 'notes',
|
||||
collection: 'notebooks',
|
||||
version: 1,
|
||||
spaceSlug: '',
|
||||
createdAt: Date.now(),
|
||||
},
|
||||
notebook: {
|
||||
id: '',
|
||||
title: 'Untitled Notebook',
|
||||
slug: '',
|
||||
description: '',
|
||||
coverColor: '#3b82f6',
|
||||
isPublic: false,
|
||||
createdAt: Date.now(),
|
||||
updatedAt: Date.now(),
|
||||
},
|
||||
items: {},
|
||||
}),
|
||||
};
|
||||
|
||||
// ── Helpers ──
|
||||
|
||||
/** Generate a docId for a notebook. */
|
||||
export function notebookDocId(space: string, notebookId: string) {
|
||||
return `${space}:notes:notebooks:${notebookId}` as const;
|
||||
}
|
||||
|
||||
/** Create a fresh NoteItem with defaults. */
|
||||
export function createNoteItem(
|
||||
id: string,
|
||||
notebookId: string,
|
||||
title: string,
|
||||
opts: Partial<NoteItem> = {},
|
||||
): NoteItem {
|
||||
const now = Date.now();
|
||||
return {
|
||||
id,
|
||||
notebookId,
|
||||
authorId: null,
|
||||
title,
|
||||
content: '',
|
||||
contentPlain: '',
|
||||
type: 'NOTE',
|
||||
url: null,
|
||||
language: null,
|
||||
fileUrl: null,
|
||||
mimeType: null,
|
||||
fileSize: null,
|
||||
duration: null,
|
||||
isPinned: false,
|
||||
sortOrder: 0,
|
||||
tags: [],
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
...opts,
|
||||
};
|
||||
}
|
||||
|
|
@ -1,8 +1,13 @@
|
|||
/**
|
||||
* Run all PG → Automerge migrations for real.
|
||||
* Run PG → Automerge migrations.
|
||||
*
|
||||
* Usage (inside rspace container):
|
||||
* bun run server/local-first/migration/run-migration.ts [space]
|
||||
* bun run server/local-first/migration/run-migration.ts [space] [flags]
|
||||
*
|
||||
* Flags:
|
||||
* --dry-run Preview without creating docs
|
||||
* --module=notes Run only a specific module migration
|
||||
* --verify After migrating, compare Automerge docs against PG
|
||||
*
|
||||
* Default space: "demo". Creates disk backups in /data/docs-backup/.
|
||||
* Idempotent: skips docs that already exist in the SyncServer.
|
||||
|
|
@ -11,6 +16,7 @@
|
|||
import postgres from 'postgres';
|
||||
import * as Automerge from '@automerge/automerge';
|
||||
import { mkdirSync, writeFileSync } from 'node:fs';
|
||||
import { dirname } from 'node:path';
|
||||
import {
|
||||
migrateModule,
|
||||
allMigrations,
|
||||
|
|
@ -36,47 +42,136 @@ const pool = {
|
|||
},
|
||||
};
|
||||
|
||||
const space = process.argv[2] || 'demo';
|
||||
// ── CLI args ──
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
const flags = args.filter((a) => a.startsWith('--'));
|
||||
const positional = args.filter((a) => !a.startsWith('--'));
|
||||
|
||||
const space = positional[0] || 'demo';
|
||||
const dryRun = flags.includes('--dry-run');
|
||||
const verify = flags.includes('--verify');
|
||||
const moduleFlag = flags.find((f) => f.startsWith('--module='))?.split('=')[1];
|
||||
const BACKUP_DIR = '/data/docs-backup';
|
||||
|
||||
async function main() {
|
||||
console.log(`\n=== PG → AUTOMERGE MIGRATION (space: "${space}") ===\n`);
|
||||
const mode = dryRun ? 'DRY-RUN' : 'MIGRATE';
|
||||
console.log(`\n=== PG → AUTOMERGE ${mode} (space: "${space}") ===\n`);
|
||||
|
||||
// Load any existing docs so idempotency checks work
|
||||
await loadAllDocs(syncServer);
|
||||
|
||||
// Filter migrations if --module flag provided
|
||||
const migrations = moduleFlag
|
||||
? allMigrations.filter((m) => m.module === moduleFlag)
|
||||
: allMigrations;
|
||||
|
||||
if (moduleFlag && migrations.length === 0) {
|
||||
console.error(`No migration found for module "${moduleFlag}"`);
|
||||
console.error(`Available: ${allMigrations.map((m) => m.module).join(', ')}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const results: MigrationResult[] = [];
|
||||
|
||||
for (const migration of allMigrations) {
|
||||
for (const migration of migrations) {
|
||||
const result = await migrateModule(migration, pool, space, syncServer, {
|
||||
dryRun: false,
|
||||
backupDir: BACKUP_DIR,
|
||||
dryRun,
|
||||
backupDir: dryRun ? undefined : BACKUP_DIR,
|
||||
});
|
||||
results.push(result);
|
||||
console.log('');
|
||||
}
|
||||
|
||||
// Flush all docs to /data/docs/ (setDoc doesn't trigger onDocChange,
|
||||
// so debounced saves won't fire — we save explicitly here)
|
||||
console.log('[Migration] Saving all docs to /data/docs/...');
|
||||
const { mkdirSync: mkdir } = await import('node:fs');
|
||||
const { dirname } = await import('node:path');
|
||||
let saved = 0;
|
||||
for (const docId of syncServer.getDocIds()) {
|
||||
const doc = syncServer.getDoc(docId);
|
||||
if (!doc) continue;
|
||||
try {
|
||||
const filePath = docIdToPath(docId);
|
||||
mkdir(dirname(filePath), { recursive: true });
|
||||
const binary = Automerge.save(doc);
|
||||
writeFileSync(filePath, binary);
|
||||
saved++;
|
||||
} catch (e) {
|
||||
console.error(`[Migration] Failed to save ${docId}:`, e);
|
||||
// Save docs to disk (skip in dry-run)
|
||||
if (!dryRun) {
|
||||
console.log('[Migration] Saving all docs to /data/docs/...');
|
||||
let saved = 0;
|
||||
for (const docId of syncServer.getDocIds()) {
|
||||
const doc = syncServer.getDoc(docId);
|
||||
if (!doc) continue;
|
||||
try {
|
||||
const filePath = docIdToPath(docId);
|
||||
mkdirSync(dirname(filePath), { recursive: true });
|
||||
const binary = Automerge.save(doc);
|
||||
writeFileSync(filePath, binary);
|
||||
saved++;
|
||||
} catch (e) {
|
||||
console.error(`[Migration] Failed to save ${docId}:`, e);
|
||||
}
|
||||
}
|
||||
console.log(`[Migration] Saved ${saved} docs to disk.`);
|
||||
}
|
||||
console.log(`[Migration] Saved ${saved} docs to disk.`);
|
||||
|
||||
// ── Summary ──
|
||||
|
||||
printSummary(results);
|
||||
|
||||
// ── Verification ──
|
||||
|
||||
if (verify) {
|
||||
console.log('\n=== VERIFICATION ===\n');
|
||||
await verifyNotes(space);
|
||||
}
|
||||
|
||||
console.log(`\nBackups: ${BACKUP_DIR}/`);
|
||||
console.log(`Persistent: /data/docs/`);
|
||||
console.log(`Total docs in SyncServer: ${syncServer.getDocIds().length}`);
|
||||
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
// ── Verification: compare PG vs Automerge for notes ──
|
||||
|
||||
async function verifyNotes(space: string) {
|
||||
try {
|
||||
// Count notebooks in PG
|
||||
const { rows: pgNotebooks } = await pool.query(
|
||||
'SELECT id, title FROM rnotes.notebooks ORDER BY created_at'
|
||||
);
|
||||
|
||||
// Count notebook docs in Automerge
|
||||
const notesDocs = syncServer.getDocIds().filter((id) =>
|
||||
id.includes(':notes:notebooks:')
|
||||
);
|
||||
|
||||
console.log(` PG notebooks: ${pgNotebooks.length}`);
|
||||
console.log(` Automerge notebooks: ${notesDocs.length}`);
|
||||
|
||||
if (pgNotebooks.length !== notesDocs.length) {
|
||||
console.warn(' ⚠ Count mismatch!');
|
||||
}
|
||||
|
||||
// Per-notebook: compare note counts
|
||||
let allMatch = true;
|
||||
for (const nb of pgNotebooks) {
|
||||
const { rows: pgNotes } = await pool.query(
|
||||
'SELECT COUNT(*) as count FROM rnotes.notes WHERE notebook_id = $1',
|
||||
[nb.id]
|
||||
);
|
||||
const pgCount = parseInt(pgNotes[0]?.count ?? '0', 10);
|
||||
|
||||
const docId = `${space}:notes:notebooks:${nb.id}`;
|
||||
const doc = syncServer.getDoc<{ items: Record<string, unknown> }>(docId);
|
||||
const amCount = doc ? Object.keys(doc.items ?? {}).length : 0;
|
||||
|
||||
if (pgCount !== amCount) {
|
||||
console.warn(` ⚠ "${nb.title}" (${nb.id}): PG=${pgCount} AM=${amCount}`);
|
||||
allMatch = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (allMatch && pgNotebooks.length > 0) {
|
||||
console.log(' ✓ All note counts match between PG and Automerge');
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn(` Verification skipped (notes tables may not exist): ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Print summary table ──
|
||||
|
||||
function printSummary(results: MigrationResult[]) {
|
||||
console.log('\n=== SUMMARY ===\n');
|
||||
console.log(
|
||||
`${'Module'.padEnd(12)} ${'Created'.padStart(8)} ${'Skipped'.padStart(8)} ${'Rows'.padStart(6)} ${'Errors'.padStart(7)} ${'Time'.padStart(8)}`
|
||||
|
|
@ -111,12 +206,6 @@ async function main() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\nBackups: ${BACKUP_DIR}/`);
|
||||
console.log(`Persistent: /data/docs/`);
|
||||
console.log(`Total docs in SyncServer: ${syncServer.getDocIds().length}`);
|
||||
|
||||
await sql.end();
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
|
|
|
|||
|
|
@ -165,6 +165,13 @@ export class SyncServer {
|
|||
return this.#docs.get(docId);
|
||||
}
|
||||
|
||||
/**
|
||||
* List all document IDs held by the server.
|
||||
*/
|
||||
listDocs(): string[] {
|
||||
return Array.from(this.#docs.keys());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set/replace a server-side document and sync to all subscribed peers.
|
||||
*/
|
||||
|
|
|
|||
Loading…
Reference in New Issue