rspace-online/modules/rnotes/mod.ts

565 lines
16 KiB
TypeScript

/**
* rNotes module — vault sync and browse for Obsidian and Logseq.
*
* Replaces the old full-editor module (~1784 lines).
* Rich editing is now in rDocs. This module handles:
* - ZIP vault uploads (Obsidian / Logseq)
* - Automerge metadata storage (titles, tags, hashes, wikilinks)
* - On-demand note content served from ZIP on disk
* - Graph data for wikilink visualization
* - Browser-extension compat shims → rdocs
*/
import { Hono } from "hono";
import * as Automerge from "@automerge/automerge";
import JSZip from "jszip";
import { createHash } from "crypto";
import { mkdir, writeFile, readFile, unlink } from "fs/promises";
import { join } from "path";
import { renderShell } from "../../server/shell";
import { getModuleInfoList } from "../../shared/module";
import type { RSpaceModule, SpaceLifecycleContext } from "../../shared/module";
import { resolveDataSpace } from "../../shared/scope-resolver";
import { verifyToken, extractToken } from "../../server/auth";
import { renderLanding } from "./landing";
import { vaultSchema, vaultDocId } from "./schemas";
import type { VaultDoc, VaultNoteMeta } from "./schemas";
import type { SyncServer } from "../../server/local-first/sync-server";
// ── SyncServer ref (set during onInit) ──
let _syncServer: SyncServer | null = null;
// ── Constants ──
const VAULT_UPLOAD_DIR = "/data/files/uploads/vaults";
// ── Helpers ──
/** Find all vault docIds for a space by scanning known prefix. */
function findVaultDocIds(space: string): string[] {
if (!_syncServer) return [];
const prefix = `${space}:rnotes:vaults:`;
return _syncServer.getDocIds().filter((id) => id.startsWith(prefix));
}
interface ParsedNote {
path: string;
title: string;
tags: string[];
frontmatter: Record<string, unknown>;
contentHash: string;
sizeBytes: number;
wikilinks: string[];
}
/**
* Parse a ZIP buffer and extract metadata for each .md file.
* Returns an array of parsed note metadata objects.
*/
async function parseVaultZip(
buffer: ArrayBuffer,
source: "obsidian" | "logseq",
): Promise<ParsedNote[]> {
const zip = await JSZip.loadAsync(buffer);
const results: ParsedNote[] = [];
for (const [relativePath, zipEntry] of Object.entries(zip.files)) {
if (zipEntry.dir) continue;
if (!relativePath.endsWith(".md") && !relativePath.endsWith(".markdown")) continue;
// Skip hidden / system files
if (relativePath.includes("/.") || relativePath.startsWith(".")) continue;
const content = await zipEntry.async("string");
const sizeBytes = content.length;
const contentHash = createHash("sha256").update(content).digest("hex");
// Parse frontmatter (YAML between --- delimiters)
const frontmatter: Record<string, unknown> = {};
let bodyStart = 0;
if (content.startsWith("---")) {
const end = content.indexOf("\n---", 3);
if (end !== -1) {
const fmBlock = content.slice(3, end).trim();
bodyStart = end + 4;
// Simple YAML key: value parser (no arrays/nested)
for (const line of fmBlock.split("\n")) {
const colon = line.indexOf(":");
if (colon === -1) continue;
const key = line.slice(0, colon).trim();
const val = line.slice(colon + 1).trim();
if (key) frontmatter[key] = val;
}
}
}
// Extract tags from frontmatter
let tags: string[] = [];
if (typeof frontmatter.tags === "string") {
tags = frontmatter.tags
.replace(/[\[\]]/g, "")
.split(",")
.map((t) => t.trim().replace(/^#/, ""))
.filter(Boolean);
}
// Also pick up inline #tags from body
const body = content.slice(bodyStart);
const inlineTags = [...body.matchAll(/#([\w/-]+)/g)].map((m) => m[1]);
tags = [...new Set([...tags, ...inlineTags])];
// Derive title: first H1 heading, or frontmatter title, or filename
let title = (frontmatter.title as string) ?? "";
if (!title) {
const h1 = body.match(/^#\s+(.+)/m);
title = h1 ? h1[1].trim() : relativePath.split("/").pop()!.replace(/\.md$/i, "");
}
// Extract wikilinks [[Target]] and [[Target|Alias]]
const wikilinks = [...body.matchAll(/\[\[([^\]|]+)(?:\|[^\]]+)?\]\]/g)].map((m) =>
m[1].trim(),
);
// Logseq: strip "pages/" prefix if present
const normalizedPath =
source === "logseq" && relativePath.startsWith("pages/")
? relativePath.slice(6)
: relativePath;
results.push({
path: normalizedPath,
title,
tags,
frontmatter,
contentHash,
sizeBytes,
wikilinks,
});
}
return results;
}
// ── Routes ──
const routes = new Hono();
// GET / — render landing or app shell
routes.get("/", (c) => {
const space = c.req.param("space") || "demo";
if (!space || space === "rnotes.online") {
return c.html(renderLanding());
}
return c.html(
renderShell({
title: `${space} — rNotes | rSpace`,
moduleId: "rnotes",
spaceSlug: space,
modules: getModuleInfoList(),
theme: "dark",
body: `<folk-notes-app space="${space}"></folk-notes-app>`,
scripts: `<script type="module" src="/modules/rnotes/folk-notes-app.js?v=9"></script>`,
}),
);
});
// POST /api/vault/upload — accept ZIP + metadata, parse, store
routes.post("/api/vault/upload", async (c) => {
const token = extractToken(c.req.raw.headers);
if (!token) return c.json({ error: "Unauthorized" }, 401);
try {
await verifyToken(token);
} catch {
return c.json({ error: "Invalid token" }, 401);
}
const space = c.req.param("space") || "demo";
const dataSpace = resolveDataSpace("rnotes", space);
let formData: FormData;
try {
formData = await c.req.formData();
} catch {
return c.json({ error: "Expected multipart/form-data" }, 400);
}
const file = formData.get("file") as File | null;
const name = (formData.get("name") as string | null) ?? "My Vault";
const source = ((formData.get("source") as string | null) ?? "obsidian") as
| "obsidian"
| "logseq";
if (!file) return c.json({ error: "Missing file" }, 400);
if (!file.name.endsWith(".zip")) return c.json({ error: "File must be a .zip" }, 400);
const buffer = await file.arrayBuffer();
const vaultId = crypto.randomUUID();
// Parse ZIP metadata
let notes: ParsedNote[];
try {
notes = await parseVaultZip(buffer, source);
} catch (err) {
return c.json({ error: `Failed to parse ZIP: ${(err as Error).message}` }, 400);
}
if (notes.length === 0) {
return c.json({ error: "No markdown files found in ZIP" }, 400);
}
// Store ZIP to disk
await mkdir(VAULT_UPLOAD_DIR, { recursive: true });
const zipPath = join(VAULT_UPLOAD_DIR, `${vaultId}.zip`);
await writeFile(zipPath, Buffer.from(buffer));
// Build Automerge doc
const docId = vaultDocId(dataSpace, vaultId);
const now = Date.now();
const totalSize = notes.reduce((acc, n) => acc + n.sizeBytes, 0);
const doc = Automerge.change(Automerge.init<VaultDoc>(), "init vault", (d) => {
d.meta = {
module: "rnotes",
collection: "vaults",
version: 1,
spaceSlug: dataSpace,
createdAt: now,
};
d.vault = {
id: vaultId,
name,
source,
totalNotes: notes.length,
totalSizeBytes: totalSize,
lastSyncedAt: now,
createdAt: now,
};
d.notes = {};
d.wikilinks = {};
for (const n of notes) {
d.notes[n.path] = {
path: n.path,
title: n.title,
tags: n.tags,
contentHash: n.contentHash,
sizeBytes: n.sizeBytes,
lastModifiedAt: now,
syncStatus: "synced",
frontmatter: n.frontmatter as Record<string, any>,
} satisfies VaultNoteMeta;
if (n.wikilinks.length > 0) {
d.wikilinks[n.path] = n.wikilinks;
}
}
});
_syncServer!.setDoc(docId, doc);
return c.json({
vaultId,
name,
source,
totalNotes: notes.length,
totalSizeBytes: totalSize,
});
});
// GET /api/vault/list — list all vaults for a space
routes.get("/api/vault/list", (c) => {
const space = c.req.param("space") || "demo";
const dataSpace = resolveDataSpace("rnotes", space);
const docIds = findVaultDocIds(dataSpace);
const vaults = docIds
.map((id) => _syncServer!.getDoc<VaultDoc>(id))
.filter((d): d is VaultDoc => !!d)
.map((d) => d.vault);
return c.json({ vaults });
});
// GET /api/vault/:vaultId/status — sync status for a vault
routes.get("/api/vault/:vaultId/status", (c) => {
const space = c.req.param("space") || "demo";
const dataSpace = resolveDataSpace("rnotes", space);
const vaultId = c.req.param("vaultId");
const docId = vaultDocId(dataSpace, vaultId);
const doc = _syncServer!.getDoc<VaultDoc>(docId);
if (!doc) return c.json({ error: "Vault not found" }, 404);
const notes = Object.values(doc.notes);
const synced = notes.filter((n) => n.syncStatus === "synced").length;
const conflicts = notes.filter((n) => n.syncStatus === "conflict").length;
const localModified = notes.filter((n) => n.syncStatus === "local-modified").length;
return c.json({
vaultId,
name: doc.vault.name,
source: doc.vault.source,
totalNotes: doc.vault.totalNotes,
lastSyncedAt: doc.vault.lastSyncedAt,
synced,
conflicts,
localModified,
});
});
// GET /api/vault/:vaultId/notes — list notes (with ?folder= and ?search=)
routes.get("/api/vault/:vaultId/notes", (c) => {
const space = c.req.param("space") || "demo";
const dataSpace = resolveDataSpace("rnotes", space);
const vaultId = c.req.param("vaultId");
const { folder, search } = c.req.query();
const docId = vaultDocId(dataSpace, vaultId);
const doc = _syncServer!.getDoc<VaultDoc>(docId);
if (!doc) return c.json({ error: "Vault not found" }, 404);
let notes = Object.values(doc.notes);
if (folder) {
const prefix = folder.endsWith("/") ? folder : `${folder}/`;
notes = notes.filter((n) => n.path.startsWith(prefix));
}
if (search) {
const term = search.toLowerCase();
notes = notes.filter(
(n) =>
n.title.toLowerCase().includes(term) ||
n.path.toLowerCase().includes(term) ||
n.tags.some((t) => t.toLowerCase().includes(term)),
);
}
return c.json({
vaultId,
total: notes.length,
notes: notes.sort((a, b) => b.lastModifiedAt - a.lastModifiedAt),
});
});
// GET /api/vault/:vaultId/note/:path+ — read note content from ZIP
routes.get("/api/vault/:vaultId/note/*", async (c) => {
const space = c.req.param("space") || "demo";
const dataSpace = resolveDataSpace("rnotes", space);
const vaultId = c.req.param("vaultId");
// Extract wildcard path segment after /note/
const url = new URL(c.req.url);
const prefix = `/rnotes/api/vault/${vaultId}/note/`;
const altPrefix = `/api/vault/${vaultId}/note/`;
let notePath = url.pathname;
if (notePath.includes(prefix)) {
notePath = decodeURIComponent(notePath.slice(notePath.indexOf(prefix) + prefix.length));
} else if (notePath.includes(altPrefix)) {
notePath = decodeURIComponent(notePath.slice(notePath.indexOf(altPrefix) + altPrefix.length));
}
const docId = vaultDocId(dataSpace, vaultId);
const doc = _syncServer!.getDoc<VaultDoc>(docId);
if (!doc) return c.json({ error: "Vault not found" }, 404);
if (!doc.notes[notePath]) return c.json({ error: "Note not found" }, 404);
const zipPath = join(VAULT_UPLOAD_DIR, `${vaultId}.zip`);
let zipBuffer: Buffer;
try {
zipBuffer = await readFile(zipPath);
} catch {
return c.json({ error: "Vault ZIP not found on disk" }, 404);
}
const zip = await JSZip.loadAsync(zipBuffer);
const zipEntry = zip.file(notePath);
if (!zipEntry) {
// Logseq may store under pages/ prefix
const altEntry = zip.file(`pages/${notePath}`);
if (!altEntry) return c.json({ error: "File not in ZIP" }, 404);
const content = await altEntry.async("string");
return new Response(content, {
headers: { "Content-Type": "text/markdown; charset=utf-8" },
});
}
const content = await zipEntry.async("string");
return new Response(content, {
headers: { "Content-Type": "text/markdown; charset=utf-8" },
});
});
// GET /api/vault/:vaultId/graph — wikilinks for graph visualization
routes.get("/api/vault/:vaultId/graph", (c) => {
const space = c.req.param("space") || "demo";
const dataSpace = resolveDataSpace("rnotes", space);
const vaultId = c.req.param("vaultId");
const docId = vaultDocId(dataSpace, vaultId);
const doc = _syncServer!.getDoc<VaultDoc>(docId);
if (!doc) return c.json({ error: "Vault not found" }, 404);
// Build nodes + edges for graph viz
const notes = Object.values(doc.notes);
const nodeSet = new Set(notes.map((n) => n.path));
const nodes = notes.map((n) => ({
id: n.path,
label: n.title,
tags: n.tags,
}));
const edges: Array<{ source: string; target: string }> = [];
for (const [sourcePath, targets] of Object.entries(doc.wikilinks)) {
for (const target of targets) {
// Try to find the matching note path (title match or path match)
const targetPath =
[...nodeSet].find(
(p) =>
p === target ||
p.replace(/\.md$/i, "") === target ||
p.split("/").pop()?.replace(/\.md$/i, "") === target,
) ?? target;
edges.push({ source: sourcePath, target: targetPath });
}
}
return c.json({ vaultId, nodes, edges });
});
// DELETE /api/vault/:vaultId — remove vault doc + ZIP
routes.delete("/api/vault/:vaultId", async (c) => {
const token = extractToken(c.req.raw.headers);
if (!token) return c.json({ error: "Unauthorized" }, 401);
try {
await verifyToken(token);
} catch {
return c.json({ error: "Invalid token" }, 401);
}
const space = c.req.param("space") || "demo";
const dataSpace = resolveDataSpace("rnotes", space);
const vaultId = c.req.param("vaultId");
const docId = vaultDocId(dataSpace, vaultId);
const doc = _syncServer!.getDoc<VaultDoc>(docId);
if (!doc) return c.json({ error: "Vault not found" }, 404);
// Remove Automerge doc
(_syncServer as any).deleteDoc?.(docId);
// Remove ZIP from disk (best-effort)
const zipPath = join(VAULT_UPLOAD_DIR, `${vaultId}.zip`);
try {
await unlink(zipPath);
} catch {
// Ignore if already gone
}
return c.json({ ok: true, vaultId });
});
// ── Browser extension compat shims ──
// Old extension POSTed to /api/notes and GET /api/notebooks.
// Redirect to rdocs equivalents so existing installs keep working.
routes.post("/api/notes", (c) => {
const space = c.req.param("space") || "demo";
return c.redirect(`/${space}/rdocs/api/notes`, 301);
});
routes.get("/api/notebooks", (c) => {
const space = c.req.param("space") || "demo";
return c.redirect(`/${space}/rdocs/api/notebooks`, 301);
});
// ── Module definition ──
export const notesModule: RSpaceModule = {
id: "rnotes",
name: "rNotes",
icon: "🔗",
description: "Vault sync and browse for Obsidian and Logseq",
standaloneDomain: "rnotes.online",
routes,
scoping: {
defaultScope: "space",
userConfigurable: false,
},
docSchemas: [
{
pattern: "{space}:rnotes:vaults:{vaultId}",
description: "Vault metadata — notes, tags, wikilinks (content in ZIP on disk)",
init: vaultSchema.init,
},
],
landingPage: renderLanding,
onboardingActions: [
{
label: "Upload Vault ZIP",
icon: "📦",
description: "Export your Obsidian or Logseq vault as a ZIP and upload it here",
type: "upload",
upload: { accept: ".zip", endpoint: "/api/vault/upload" },
},
],
outputPaths: [
{
path: "vaults",
name: "Vaults",
icon: "🗂️",
description: "Synced Obsidian and Logseq vaults",
},
],
async onInit({ syncServer }) {
_syncServer = syncServer;
},
async onSpaceCreate(_ctx: SpaceLifecycleContext) {
// Vaults are user-uploaded — no auto-create needed
},
};
export default notesModule;
// ── MI Integration ──
export interface MIVaultNote {
title: string;
path: string;
vaultName: string;
tags: string[];
lastModifiedAt: number;
}
/**
* Return recently modified vault notes for the MI system prompt.
* Queries all vault docs for the space and returns the most recent entries.
*/
export function getRecentVaultNotesForMI(space: string, limit = 10): MIVaultNote[] {
if (!_syncServer) return [];
const docIds = findVaultDocIds(space);
const allNotes: MIVaultNote[] = [];
for (const docId of docIds) {
const doc = _syncServer.getDoc<VaultDoc>(docId);
if (!doc) continue;
const vaultName = doc.vault.name;
for (const note of Object.values(doc.notes)) {
allNotes.push({
title: note.title,
path: note.path,
vaultName,
tags: Array.from(note.tags),
lastModifiedAt: note.lastModifiedAt,
});
}
}
return allNotes
.sort((a, b) => b.lastModifiedAt - a.lastModifiedAt)
.slice(0, limit);
}