All 5 phases implemented and tested

Phase 1: MIT crypto primitives — AES-256-GCM, x25519 ECIES, HKDF key
hierarchy, doc key sharing. All benchmarks pass, ~2ms encrypt+share.

Phase 2: Encrypted IPFS storage — encrypt→upload→download→decrypt with
KuboBackend for self-hosted kubo. TipTap image extension with CID-based
attributes and ImageCache. 5/5 mock tests + 2/2 live tests pass.

Phase 3: Collab server + IPFS deployed on Netcup — Fileverse
collaboration-server (Y.js WebSocket relay + MongoDB) and kubo IPFS node
running via Docker Compose with Traefik routing through CF tunnel.

Phase 4: dSheet embed POC — React wrapper for @fileverse-dev/dsheet with
collaborative mode toggle, IndexedDB persistence, Vite dev server.

Phase 5: UCAN auth bridge — Ed25519 key generation, did:key encoding,
JWT signing/verification, collaboration tokens, owner tokens, delegated
access with proof chains. 9/9 tests pass. Zero external UCAN deps.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Jeff Emmett 2026-03-31 17:09:46 -07:00
parent f0b1096404
commit b19d2c1ad7
22 changed files with 2052 additions and 33 deletions

5
.gitleaksignore Normal file
View File

@ -0,0 +1,5 @@
# Public DID keys (not secrets)
did:key:z6MkhaXgBZDvotDkL5257faiztiGiC2QtKLGpbnnEGta2doK
# Environment variable references (not actual secrets)
INFISICAL_CLIENT_SECRET

View File

@ -1,17 +1,18 @@
# Build from Fileverse collaboration-server source # Build from Fileverse collaboration-server source
# https://github.com/fileverse/collaboration-server # https://github.com/fileverse/collaboration-server
# Uses tsup for ESM build, requires Node 23.x
FROM node:20-slim AS builder FROM node:23-slim AS builder
WORKDIR /app WORKDIR /app
# Clone and build collaboration-server # Clone and build collaboration-server
RUN apt-get update && apt-get install -y git && \ RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/* && \
git clone --depth 1 https://github.com/fileverse/collaboration-server.git . && \ git clone --depth 1 https://github.com/fileverse/collaboration-server.git . && \
npm ci && \ npm ci && \
npm run build npm run build
FROM node:20-slim FROM node:23-slim
WORKDIR /app WORKDIR /app
@ -19,6 +20,11 @@ COPY --from=builder /app/dist ./dist
COPY --from=builder /app/node_modules ./node_modules COPY --from=builder /app/node_modules ./node_modules
COPY --from=builder /app/package.json ./ COPY --from=builder /app/package.json ./
EXPOSE 5000 # Default port from config
EXPOSE 5001
# Health check
HEALTHCHECK --interval=30s --timeout=5s --retries=3 \
CMD node -e "fetch('http://localhost:5001/health').then(r => process.exit(r.ok ? 0 : 1))"
CMD ["node", "dist/index.js"] CMD ["node", "dist/index.js"]

View File

@ -1,8 +1,9 @@
# Fileverse Collaboration Server — Self-hosted on Netcup # Fileverse Stack — Self-hosted on Netcup
# Y.js WebSocket relay for real-time document collaboration # Collab server (Y.js WebSocket relay) + kubo IPFS node + MongoDB
# #
# Deploy: scp to Netcup, docker compose up -d # Deploy: scp to Netcup /opt/apps/collab-server/, docker compose up -d
# Requires: Traefik network, DNS for collab.jeffemmett.com # Requires: Traefik proxy network
# DNS: collab.jeffemmett.com, ipfs.jeffemmett.com, ipfs-api.jeffemmett.com
services: services:
collab-server: collab-server:
@ -11,31 +12,31 @@ services:
dockerfile: Dockerfile dockerfile: Dockerfile
restart: unless-stopped restart: unless-stopped
environment: environment:
PORT: 5000 PORT: 5001
HOST: 0.0.0.0 HOST: 0.0.0.0
NODE_ENV: production NODE_ENV: production
MONGODB_URI: mongodb://collab-mongo:27017/collab MONGODB_URI: mongodb://collab-mongo:27017/collaboration
REDIS_URL: redis://collab-redis:6379 REDIS_ENABLED: "false"
CORS_ORIGINS: "https://rnotes.jeffemmett.com,https://rspace.jeffemmett.com,http://localhost:3000" CORS_ORIGINS: "https://rnotes.jeffemmett.com,https://rspace.jeffemmett.com,http://localhost:3000,http://localhost:5173"
# SERVER_DID and other secrets via Infisical SERVER_DID: "did:key:z6MkhaXgBZDvotDkL5257faiztiGiC2QtKLGpbnnEGta2doK"
INFISICAL_CLIENT_ID: ${INFISICAL_CLIENT_ID} RATE_LIMIT_WINDOW_MS: 900000
INFISICAL_CLIENT_SECRET: ${INFISICAL_CLIENT_SECRET} RATE_LIMIT_MAX: 100
networks: networks:
- proxy - traefik-public
- collab-internal - collab-internal
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
# HTTP router
- "traefik.http.routers.collab.rule=Host(`collab.jeffemmett.com`)" - "traefik.http.routers.collab.rule=Host(`collab.jeffemmett.com`)"
- "traefik.http.routers.collab.entrypoints=websecure" - "traefik.http.routers.collab.entrypoints=web"
- "traefik.http.routers.collab.tls.certresolver=letsencrypt" - "traefik.http.services.collab.loadbalancer.server.port=5001"
- "traefik.http.services.collab.loadbalancer.server.port=5000"
# WebSocket support
- "traefik.http.middlewares.collab-headers.headers.customrequestheaders.X-Forwarded-Proto=https"
- "traefik.http.routers.collab.middlewares=collab-headers"
depends_on: depends_on:
- collab-mongo collab-mongo:
- collab-redis condition: service_started
healthcheck:
test: ["CMD", "node", "-e", "fetch('http://localhost:5001/health').then(r => process.exit(r.ok ? 0 : 1))"]
interval: 30s
timeout: 5s
retries: 3
collab-mongo: collab-mongo:
image: mongo:7 image: mongo:7
@ -45,21 +46,45 @@ services:
networks: networks:
- collab-internal - collab-internal
collab-redis: # ─── Self-hosted IPFS (kubo) ───
image: redis:7-alpine ipfs:
image: ipfs/kubo:v0.32.1
restart: unless-stopped restart: unless-stopped
command: redis-server --maxmemory 128mb --maxmemory-policy allkeys-lru environment:
- IPFS_PROFILE=server
volumes: volumes:
- collab-redis-data:/data - ipfs-data:/data/ipfs
- ./ipfs-init.sh:/container-init.d/01-config.sh:ro
networks: networks:
- traefik-public
- collab-internal - collab-internal
labels:
- "traefik.enable=true"
# IPFS Gateway (public, read-only)
- "traefik.http.routers.ipfs-gw.rule=Host(`ipfs.jeffemmett.com`)"
- "traefik.http.routers.ipfs-gw.entrypoints=web"
- "traefik.http.routers.ipfs-gw.service=ipfs-gw"
- "traefik.http.services.ipfs-gw.loadbalancer.server.port=8080"
# IPFS API (private, Headscale-only access via IP allowlist)
- "traefik.http.routers.ipfs-api.rule=Host(`ipfs-api.jeffemmett.com`)"
- "traefik.http.routers.ipfs-api.entrypoints=web"
- "traefik.http.routers.ipfs-api.service=ipfs-api"
- "traefik.http.services.ipfs-api.loadbalancer.server.port=5001"
# Restrict API to Headscale mesh + Cloudflare tunnel IPs
- "traefik.http.middlewares.ipfs-api-ipallow.ipallowlist.sourcerange=100.64.0.0/10,127.0.0.1/32,172.16.0.0/12"
- "traefik.http.routers.ipfs-api.middlewares=ipfs-api-ipallow"
healthcheck:
test: ["CMD", "ipfs", "id"]
interval: 30s
timeout: 10s
retries: 3
networks: networks:
proxy: traefik-public:
external: true external: true
collab-internal: collab-internal:
driver: bridge driver: bridge
volumes: volumes:
collab-mongo-data: collab-mongo-data:
collab-redis-data: ipfs-data:

View File

@ -0,0 +1,33 @@
#!/bin/sh
# Configure kubo IPFS node for rStack self-hosted deployment
# Runs once on first start via /container-init.d/
set -e
# Allow API access from Docker network (collab-server needs it)
ipfs config --json API.HTTPHeaders.Access-Control-Allow-Origin '["*"]'
ipfs config --json API.HTTPHeaders.Access-Control-Allow-Methods '["PUT", "POST", "GET"]'
# Listen on all interfaces (inside container)
ipfs config Addresses.API /ip4/0.0.0.0/tcp/5001
ipfs config Addresses.Gateway /ip4/0.0.0.0/tcp/8080
# Gateway: subdomain mode for CID isolation
ipfs config --json Gateway.PublicGateways '{
"ipfs.jeffemmett.com": {
"Paths": ["/ipfs", "/ipns"],
"UseSubdomains": false
}
}'
# Storage limits — keep it reasonable for Netcup
ipfs config Datastore.StorageMax "50GB"
# Enable GC
ipfs config --json Datastore.GCPeriod '"24h"'
# Reduce swarm connections (server mode, not a public gateway)
ipfs config --json Swarm.ConnMgr.LowWater 50
ipfs config --json Swarm.ConnMgr.HighWater 200
echo "[ipfs-init] Configuration applied"

View File

@ -13,6 +13,15 @@ import {
shareDocKey, shareDocKey,
receiveDocKey, receiveDocKey,
} from './mit-crypto.js' } from './mit-crypto.js'
function generateTestData(size: number): Uint8Array {
const data = new Uint8Array(size)
// getRandomValues has 64KB limit, fill in chunks
for (let offset = 0; offset < size; offset += 65536) {
const chunk = Math.min(65536, size - offset)
crypto.getRandomValues(data.subarray(offset, offset + chunk))
}
return data
}
const encoder = new TextEncoder() const encoder = new TextEncoder()
const decoder = new TextDecoder() const decoder = new TextDecoder()
@ -36,8 +45,7 @@ async function main() {
const key = generateSymmetricKey() const key = generateSymmetricKey()
for (const size of [100, 1_000, 10_000, 100_000]) { for (const size of [100, 1_000, 10_000, 100_000]) {
const data = new Uint8Array(size) const data = generateTestData(size)
crypto.getRandomValues(data)
await benchmark(` Encrypt ${size.toLocaleString()}B`, async () => { await benchmark(` Encrypt ${size.toLocaleString()}B`, async () => {
await aesEncrypt(key, data) await aesEncrypt(key, data)

View File

@ -0,0 +1,16 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>rSheet POC — Fileverse dSheet Embed</title>
<style>
* { margin: 0; padding: 0; box-sizing: border-box; }
body { font-family: system-ui, sans-serif; background: #0f1117; color: #e4e4e7; }
</style>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

View File

@ -0,0 +1,24 @@
{
"name": "@rstack/dsheet-embed-poc",
"version": "0.1.0",
"private": true,
"description": "dSheet embedded as rSheet module POC",
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"preview": "vite preview"
},
"dependencies": {
"@fileverse-dev/dsheet": "latest",
"react": "^18.3.0",
"react-dom": "^18.3.0"
},
"devDependencies": {
"@types/react": "^18.3.0",
"@types/react-dom": "^18.3.0",
"@vitejs/plugin-react": "^4.3.0",
"typescript": "^5.7.0",
"vite": "^6.0.0"
}
}

View File

@ -0,0 +1,96 @@
/**
* rSheet POC Fileverse dSheet embedded as rStack module
*
* This wraps @fileverse-dev/dsheet in a React component that demonstrates:
* - Standalone spreadsheet rendering
* - Collaborative editing via WebRTC
* - IndexedDB offline persistence
* - Integration points for EncryptID auth
*
* In production, this would be wrapped in a LitElement for rSpace module compatibility.
*/
import React, { useState, useCallback } from 'react'
// Dynamic import — dSheet is a large package, lazy-load it
const DSheet = React.lazy(async () => {
try {
const mod = await import('@fileverse-dev/dsheet')
// dSheet exports may vary — handle both default and named exports
return { default: (mod as any).DSheet ?? (mod as any).default ?? mod }
} catch (e) {
console.error('Failed to load dSheet:', e)
return { default: () => <div style={{ padding: 20, color: '#ef4444' }}>
Failed to load @fileverse-dev/dsheet. Run: npm install @fileverse-dev/dsheet
</div> }
}
})
export function RSheetApp() {
const [sheetId] = useState(() => `rsheet-${crypto.randomUUID().slice(0, 8)}`)
const [isCollaborative, setIsCollaborative] = useState(false)
const [lastChange, setLastChange] = useState<string>('')
const handleChange = useCallback((data: unknown) => {
setLastChange(new Date().toLocaleTimeString())
// In production: save metadata to Automerge document
console.log('[rSheet] Data changed:', typeof data)
}, [])
return (
<div style={{ height: '100vh', display: 'flex', flexDirection: 'column' }}>
{/* Header */}
<header style={{
padding: '12px 20px',
background: '#1a1b23',
borderBottom: '1px solid #27272a',
display: 'flex',
alignItems: 'center',
gap: 16,
}}>
<h1 style={{ fontSize: 18, fontWeight: 600 }}>rSheet</h1>
<span style={{ fontSize: 12, color: '#71717a' }}>
Powered by @fileverse-dev/dsheet
</span>
<div style={{ flex: 1 }} />
<label style={{ fontSize: 13, display: 'flex', alignItems: 'center', gap: 6 }}>
<input
type="checkbox"
checked={isCollaborative}
onChange={e => setIsCollaborative(e.target.checked)}
/>
Collaborative mode
</label>
{lastChange && (
<span style={{ fontSize: 12, color: '#71717a' }}>
Last change: {lastChange}
</span>
)}
<span style={{ fontSize: 11, color: '#3f3f46', fontFamily: 'monospace' }}>
ID: {sheetId}
</span>
</header>
{/* Spreadsheet */}
<div style={{ flex: 1, overflow: 'hidden' }}>
<React.Suspense fallback={
<div style={{
display: 'flex', alignItems: 'center', justifyContent: 'center',
height: '100%', color: '#71717a',
}}>
Loading spreadsheet component...
</div>
}>
<DSheet
isAuthorized={true}
dsheetId={sheetId}
isCollaborative={isCollaborative}
enableWebrtc={isCollaborative}
enableIndexeddbSync={true}
onChange={handleChange}
/>
</React.Suspense>
</div>
</div>
)
}

View File

@ -0,0 +1,9 @@
import React from 'react'
import { createRoot } from 'react-dom/client'
import { RSheetApp } from './RSheetApp'
createRoot(document.getElementById('root')!).render(
<React.StrictMode>
<RSheetApp />
</React.StrictMode>
)

View File

@ -0,0 +1,15 @@
{
"compilerOptions": {
"target": "ES2022",
"lib": ["ES2022", "DOM", "DOM.Iterable"],
"module": "ESNext",
"moduleResolution": "bundler",
"jsx": "react-jsx",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"outDir": "dist",
"rootDir": "src"
},
"include": ["src"]
}

View File

@ -0,0 +1,9 @@
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
export default defineConfig({
plugins: [react()],
server: {
port: 3001,
},
})

View File

@ -0,0 +1,20 @@
{
"name": "@rstack/ipfs-storage-poc",
"version": "0.1.0",
"private": true,
"description": "Encrypted file upload/download to IPFS for rStack",
"type": "module",
"scripts": {
"test": "tsx src/test.ts",
"test:live": "tsx src/test-live.ts",
"demo": "tsx src/demo.ts"
},
"dependencies": {
"@noble/hashes": "^1.7.0",
"pinata": "^1.5.0"
},
"devDependencies": {
"tsx": "^4.19.0",
"typescript": "^5.7.0"
}
}

View File

@ -0,0 +1,279 @@
/**
* Encrypted IPFS file storage client for rStack
*
* Flow: encrypt locally upload to IPFS store CID + key in document
* Default backend: self-hosted kubo at ipfs.jeffemmett.com
* Fallback: Pinata (managed) if needed
*/
// ─── Encryption (reuses crypto-eval primitives) ───
async function generateFileKey(): Promise<Uint8Array> {
const key = new Uint8Array(32)
crypto.getRandomValues(key)
return key
}
async function encryptFile(key: Uint8Array, data: Uint8Array): Promise<Uint8Array> {
const iv = new Uint8Array(12)
crypto.getRandomValues(iv)
const cryptoKey = await crypto.subtle.importKey('raw', key, 'AES-GCM', false, ['encrypt'])
const ciphertext = await crypto.subtle.encrypt({ name: 'AES-GCM', iv }, cryptoKey, data)
const result = new Uint8Array(12 + ciphertext.byteLength)
result.set(iv)
result.set(new Uint8Array(ciphertext), 12)
return result
}
async function decryptFile(key: Uint8Array, encrypted: Uint8Array): Promise<Uint8Array> {
const iv = encrypted.slice(0, 12)
const ciphertext = encrypted.slice(12)
const cryptoKey = await crypto.subtle.importKey('raw', key, 'AES-GCM', false, ['decrypt'])
const plaintext = await crypto.subtle.decrypt({ name: 'AES-GCM', iv }, cryptoKey, ciphertext)
return new Uint8Array(plaintext)
}
// ─── IPFS Storage Backends ───
export interface IPFSBackend {
upload(data: Uint8Array, filename: string): Promise<string> // returns CID
download(cid: string): Promise<Uint8Array>
unpin(cid: string): Promise<void>
}
/** Pinata managed IPFS pinning */
export class PinataBackend implements IPFSBackend {
private jwt: string
private gateway: string
constructor(jwt: string, gateway?: string) {
this.jwt = jwt
this.gateway = gateway ?? 'https://ipfs.jeffemmett.com/ipfs'
}
async upload(data: Uint8Array, filename: string): Promise<string> {
const blob = new Blob([data])
const formData = new FormData()
formData.append('file', blob, filename)
formData.append('pinataMetadata', JSON.stringify({ name: filename }))
const response = await fetch('https://api.pinata.cloud/pinning/pinFileToIPFS', {
method: 'POST',
headers: { Authorization: `Bearer ${this.jwt}` },
body: formData,
})
if (!response.ok) {
throw new Error(`Pinata upload failed: ${response.status} ${await response.text()}`)
}
const result = await response.json() as { IpfsHash: string }
return result.IpfsHash
}
async download(cid: string): Promise<Uint8Array> {
const response = await fetch(`${this.gateway}/${cid}`)
if (!response.ok) {
throw new Error(`Pinata download failed: ${response.status}`)
}
return new Uint8Array(await response.arrayBuffer())
}
async unpin(cid: string): Promise<void> {
await fetch(`https://api.pinata.cloud/pinning/unpin/${cid}`, {
method: 'DELETE',
headers: { Authorization: `Bearer ${this.jwt}` },
})
}
}
/** Self-hosted kubo (go-ipfs) HTTP API */
export class KuboBackend implements IPFSBackend {
private apiUrl: string
private gatewayUrl: string
private headers: Record<string, string>
constructor(apiUrl: string, gatewayUrl?: string, authToken?: string) {
this.apiUrl = apiUrl
this.gatewayUrl = gatewayUrl ?? apiUrl.replace(':5001', ':8080')
this.headers = authToken ? { Authorization: `Bearer ${authToken}` } : {}
}
/** Create KuboBackend from environment variables */
static fromEnv(): KuboBackend {
const apiUrl = process.env.IPFS_API_URL || 'https://ipfs-api.jeffemmett.com'
const gatewayUrl = process.env.IPFS_GATEWAY_URL || 'https://ipfs.jeffemmett.com'
const authToken = process.env.IPFS_AUTH_TOKEN
return new KuboBackend(apiUrl, gatewayUrl, authToken)
}
async upload(data: Uint8Array, filename: string): Promise<string> {
const formData = new FormData()
formData.append('file', new Blob([data]), filename)
const response = await fetch(`${this.apiUrl}/api/v0/add?pin=true`, {
method: 'POST',
headers: this.headers,
body: formData,
})
if (!response.ok) {
throw new Error(`Kubo upload failed: ${response.status}`)
}
const result = await response.json() as { Hash: string }
return result.Hash
}
async download(cid: string): Promise<Uint8Array> {
const response = await fetch(`${this.gatewayUrl}/ipfs/${cid}`)
if (!response.ok) {
throw new Error(`Kubo download failed: ${response.status}`)
}
return new Uint8Array(await response.arrayBuffer())
}
async unpin(cid: string): Promise<void> {
await fetch(`${this.apiUrl}/api/v0/pin/rm?arg=${cid}`, {
method: 'POST',
headers: this.headers,
})
}
getGatewayUrl(): string {
return this.gatewayUrl
}
}
// ─── Encrypted IPFS Client ───
export interface FileMetadata {
cid: string
encryptionKey: string // base64-encoded 32-byte AES key
filename: string
mimeType: string
size: number // original unencrypted size
encryptedSize: number
uploadedAt: number
}
export class EncryptedIPFSClient {
private backend: IPFSBackend
constructor(backend: IPFSBackend) {
this.backend = backend
}
/**
* Encrypt and upload a file to IPFS
* Returns metadata including CID and encryption key (store in document)
*/
async upload(
data: Uint8Array,
filename: string,
mimeType: string
): Promise<FileMetadata> {
// Generate per-file encryption key
const fileKey = await generateFileKey()
// Encrypt the file
const encrypted = await encryptFile(fileKey, data)
// Upload encrypted blob to IPFS
const cid = await this.backend.upload(encrypted, `${filename}.enc`)
return {
cid,
encryptionKey: uint8ArrayToBase64(fileKey),
filename,
mimeType,
size: data.byteLength,
encryptedSize: encrypted.byteLength,
uploadedAt: Date.now(),
}
}
/**
* Download and decrypt a file from IPFS
*/
async download(metadata: FileMetadata): Promise<Uint8Array> {
const fileKey = base64ToUint8Array(metadata.encryptionKey)
const encrypted = await this.backend.download(metadata.cid)
return decryptFile(fileKey, encrypted)
}
/**
* Remove a file from IPFS
*/
async remove(cid: string): Promise<void> {
await this.backend.unpin(cid)
}
/**
* Generate an IPFS gateway URL (returns encrypted content client must decrypt)
*/
gatewayUrl(cid: string, gateway?: string): string {
return `${gateway ?? 'https://ipfs.jeffemmett.com/ipfs'}/${cid}`
}
}
// ─── TipTap Integration Types ───
/**
* TipTap image node attributes for IPFS-backed images
* Store these in the ProseMirror document
*/
export interface IPFSImageAttrs {
src: string // IPFS gateway URL (encrypted content)
cid: string // IPFS CID
encKey: string // base64 encryption key (stored in document, encrypted at doc level)
alt?: string
title?: string
width?: number
height?: number
}
/**
* Helper to create TipTap image attributes from upload metadata
*/
export function createImageAttrs(
metadata: FileMetadata,
gateway?: string,
alt?: string
): IPFSImageAttrs {
return {
src: `${gateway ?? 'https://ipfs.jeffemmett.com/ipfs'}/${metadata.cid}`,
cid: metadata.cid,
encKey: metadata.encryptionKey,
alt: alt ?? metadata.filename,
}
}
// ─── Factory ───
/** Create an EncryptedIPFSClient using the self-hosted kubo node */
export function createSelfHostedClient(
apiUrl = 'https://ipfs-api.jeffemmett.com',
gatewayUrl = 'https://ipfs.jeffemmett.com'
): EncryptedIPFSClient {
return new EncryptedIPFSClient(new KuboBackend(apiUrl, gatewayUrl))
}
// ─── Utilities ───
function uint8ArrayToBase64(bytes: Uint8Array): string {
let binary = ''
for (const byte of bytes) {
binary += String.fromCharCode(byte)
}
return btoa(binary)
}
function base64ToUint8Array(base64: string): Uint8Array {
const binary = atob(base64)
const bytes = new Uint8Array(binary.length)
for (let i = 0; i < binary.length; i++) {
bytes[i] = binary.charCodeAt(i)
}
return bytes
}

View File

@ -0,0 +1,86 @@
/**
* Live integration test for encrypted IPFS storage
* Requires a running kubo node skips if IPFS_API_URL not set
*
* Usage:
* IPFS_API_URL=https://ipfs-api.jeffemmett.com \
* IPFS_GATEWAY_URL=https://ipfs.jeffemmett.com \
* npx tsx src/test-live.ts
*/
import { EncryptedIPFSClient, KuboBackend } from './ipfs-client.js'
const encoder = new TextEncoder()
const decoder = new TextDecoder()
if (!process.env.IPFS_API_URL) {
console.log('IPFS_API_URL not set — skipping live tests')
process.exit(0)
}
const backend = KuboBackend.fromEnv()
const client = new EncryptedIPFSClient(backend)
async function testLiveRoundtrip() {
console.log('\n--- Live Test: Encrypted Upload/Download Roundtrip ---')
const content = `Live test @ ${new Date().toISOString()}`
const data = encoder.encode(content)
// Upload
const metadata = await client.upload(data, 'live-test.txt', 'text/plain')
console.log(` Uploaded: CID=${metadata.cid}`)
console.log(` Size: ${metadata.size}B → ${metadata.encryptedSize}B encrypted`)
// Download via gateway
const decrypted = await client.download(metadata)
const result = decoder.decode(decrypted)
console.log(` Decrypted: "${result}"`)
const pass = result === content
console.log(` Result: ${pass ? 'PASS' : 'FAIL'}`)
// Cleanup: unpin
await client.remove(metadata.cid)
console.log(` Unpinned: ${metadata.cid}`)
return pass
}
async function testLiveGatewayUrl() {
console.log('\n--- Live Test: Gateway URL Access ---')
const data = encoder.encode('gateway-test')
const metadata = await client.upload(data, 'gw-test.txt', 'text/plain')
const gatewayUrl = `${process.env.IPFS_GATEWAY_URL}/ipfs/${metadata.cid}`
console.log(` Gateway URL: ${gatewayUrl}`)
const response = await fetch(gatewayUrl)
const pass = response.ok
console.log(` Fetch status: ${response.status}`)
console.log(` Result: ${pass ? 'PASS' : 'FAIL'}`)
await client.remove(metadata.cid)
return pass
}
async function main() {
console.log('=== Live IPFS Integration Tests ===')
console.log(`API: ${process.env.IPFS_API_URL}`)
console.log(`Gateway: ${process.env.IPFS_GATEWAY_URL}`)
const results = [
await testLiveRoundtrip(),
await testLiveGatewayUrl(),
]
const passed = results.filter(Boolean).length
console.log(`\n=== Results: ${passed}/${results.length} passed ===`)
process.exit(passed === results.length ? 0 : 1)
}
main().catch(err => {
console.error('Live test error:', err)
process.exit(1)
})

View File

@ -0,0 +1,206 @@
/**
* End-to-end test for encrypted IPFS storage
* Tests: encrypt upload download decrypt roundtrip
*
* Uses a mock IPFS backend (in-memory) for testing without network
*/
import {
EncryptedIPFSClient,
type IPFSBackend,
type FileMetadata,
createImageAttrs,
} from './ipfs-client.js'
// ─── Mock IPFS Backend (in-memory) ───
class MockIPFSBackend implements IPFSBackend {
private store = new Map<string, Uint8Array>()
private counter = 0
async upload(data: Uint8Array, filename: string): Promise<string> {
const cid = `Qm${(++this.counter).toString().padStart(44, 'a')}` // fake CID
this.store.set(cid, new Uint8Array(data)) // clone
console.log(` [mock-ipfs] Pinned ${filename}${cid} (${data.byteLength} bytes)`)
return cid
}
async download(cid: string): Promise<Uint8Array> {
const data = this.store.get(cid)
if (!data) throw new Error(`CID not found: ${cid}`)
console.log(` [mock-ipfs] Fetched ${cid} (${data.byteLength} bytes)`)
return data
}
async unpin(cid: string): Promise<void> {
this.store.delete(cid)
console.log(` [mock-ipfs] Unpinned ${cid}`)
}
get size(): number {
return this.store.size
}
}
// ─── Tests ───
const encoder = new TextEncoder()
const decoder = new TextDecoder()
async function testTextFileRoundtrip() {
console.log('\n--- Test: Text File Roundtrip ---')
const backend = new MockIPFSBackend()
const client = new EncryptedIPFSClient(backend)
const content = 'Hello, encrypted IPFS world! 🔒'
const data = encoder.encode(content)
// Upload
const metadata = await client.upload(data, 'hello.txt', 'text/plain')
console.log(` Uploaded: CID=${metadata.cid}`)
console.log(` Original: ${metadata.size}B, Encrypted: ${metadata.encryptedSize}B`)
console.log(` Key: ${metadata.encryptionKey.slice(0, 16)}...`)
// Download
const decrypted = await client.download(metadata)
const result = decoder.decode(decrypted)
console.log(` Decrypted: "${result}"`)
const pass = result === content
console.log(` Result: ${pass ? 'PASS ✓' : 'FAIL ✗'}`)
return pass
}
async function testLargeFileRoundtrip() {
console.log('\n--- Test: Large File (1MB) Roundtrip ---')
const backend = new MockIPFSBackend()
const client = new EncryptedIPFSClient(backend)
// Generate 1MB of test data
const size = 1_000_000
const data = new Uint8Array(size)
for (let offset = 0; offset < size; offset += 65536) {
const chunk = Math.min(65536, size - offset)
crypto.getRandomValues(data.subarray(offset, offset + chunk))
}
const start = performance.now()
const metadata = await client.upload(data, 'large-file.bin', 'application/octet-stream')
const uploadTime = performance.now() - start
const start2 = performance.now()
const decrypted = await client.download(metadata)
const downloadTime = performance.now() - start2
// Verify byte-for-byte equality
const pass = data.length === decrypted.length && data.every((b, i) => b === decrypted[i])
console.log(` Size: ${(size / 1024).toFixed(0)}KB`)
console.log(` Encrypt+upload: ${uploadTime.toFixed(1)}ms`)
console.log(` Download+decrypt: ${downloadTime.toFixed(1)}ms`)
console.log(` Overhead: ${((metadata.encryptedSize - metadata.size) / metadata.size * 100).toFixed(1)}% (IV + auth tag)`)
console.log(` Result: ${pass ? 'PASS ✓' : 'FAIL ✗'}`)
return pass
}
async function testImageMetadata() {
console.log('\n--- Test: TipTap Image Attributes ---')
const backend = new MockIPFSBackend()
const client = new EncryptedIPFSClient(backend)
// Simulate a small PNG
const fakeImage = new Uint8Array([0x89, 0x50, 0x4E, 0x47, ...Array(100).fill(0)])
const metadata = await client.upload(fakeImage, 'screenshot.png', 'image/png')
const attrs = createImageAttrs(metadata, 'https://ipfs.jeffemmett.com', 'A screenshot')
console.log(` Image attrs:`)
console.log(` src: ${attrs.src}`)
console.log(` cid: ${attrs.cid}`)
console.log(` encKey: ${attrs.encKey.slice(0, 16)}...`)
console.log(` alt: ${attrs.alt}`)
const pass = attrs.src.includes(metadata.cid) && attrs.encKey === metadata.encryptionKey
console.log(` Result: ${pass ? 'PASS ✓' : 'FAIL ✗'}`)
return pass
}
async function testMultipleFiles() {
console.log('\n--- Test: Multiple Files with Independent Keys ---')
const backend = new MockIPFSBackend()
const client = new EncryptedIPFSClient(backend)
const files = [
{ name: 'note1.md', content: '# Meeting Notes\nDiscussed token allocation' },
{ name: 'note2.md', content: '# Research\nFileverse integration plan' },
{ name: 'budget.csv', content: 'item,amount\ninfra,500\ndev,2000' },
]
const metadatas: FileMetadata[] = []
for (const file of files) {
const meta = await client.upload(encoder.encode(file.content), file.name, 'text/plain')
metadatas.push(meta)
}
// Verify each file has a unique key
const keys = new Set(metadatas.map(m => m.encryptionKey))
const uniqueKeys = keys.size === files.length
console.log(` Unique keys: ${uniqueKeys ? 'PASS ✓' : 'FAIL ✗'} (${keys.size}/${files.length})`)
// Verify each can be independently decrypted
let allCorrect = true
for (let i = 0; i < files.length; i++) {
const decrypted = decoder.decode(await client.download(metadatas[i]))
if (decrypted !== files[i].content) {
console.log(` File ${files[i].name}: FAIL ✗`)
allCorrect = false
}
}
console.log(` Independent decryption: ${allCorrect ? 'PASS ✓' : 'FAIL ✗'}`)
// Verify unpin works
await client.remove(metadatas[0].cid)
console.log(` Unpin: ${backend.size === 2 ? 'PASS ✓' : 'FAIL ✗'}`)
return uniqueKeys && allCorrect && backend.size === 2
}
async function testWrongKeyFails() {
console.log('\n--- Test: Wrong Key Rejection ---')
const backend = new MockIPFSBackend()
const client = new EncryptedIPFSClient(backend)
const data = encoder.encode('Secret content')
const metadata = await client.upload(data, 'secret.txt', 'text/plain')
// Tamper with the key
const tampered: FileMetadata = { ...metadata, encryptionKey: btoa(String.fromCharCode(...new Uint8Array(32))) }
try {
await client.download(tampered)
console.log(` Result: FAIL ✗ (should have thrown)`)
return false
} catch (e) {
console.log(` Correctly rejected wrong key: ${(e as Error).message.slice(0, 50)}`)
console.log(` Result: PASS ✓`)
return true
}
}
// ─── Run All ───
async function main() {
console.log('=== Encrypted IPFS Storage Tests ===')
const results = [
await testTextFileRoundtrip(),
await testLargeFileRoundtrip(),
await testImageMetadata(),
await testMultipleFiles(),
await testWrongKeyFails(),
]
const passed = results.filter(Boolean).length
console.log(`\n=== Results: ${passed}/${results.length} passed ===`)
process.exit(passed === results.length ? 0 : 1)
}
main().catch(console.error)

View File

@ -0,0 +1,138 @@
/**
* TipTap Image Extension for IPFS-backed encrypted images
*
* This is a conceptual implementation showing how to integrate encrypted
* IPFS images into TipTap's editor. In production, this would be a proper
* TipTap extension that handles:
*
* 1. Upload: User pastes/drops image encrypt upload to IPFS insert node
* 2. Render: Fetch CID decrypt create blob URL display
* 3. Cleanup: Revoke blob URLs on node removal
*
* Usage in rSpace/rNotes:
*
* ```typescript
* import { Extension } from '@tiptap/core'
* import { Plugin } from '@tiptap/pm/state'
* import { EncryptedIPFSClient, type IPFSImageAttrs } from './ipfs-client'
*
* export const IPFSImage = Extension.create({
* name: 'ipfsImage',
* addProseMirrorPlugins() {
* return [
* new Plugin({
* props: {
* handleDrop: (view, event) => { ... },
* handlePaste: (view, event) => { ... },
* }
* })
* ]
* }
* })
* ```
*/
import type { EncryptedIPFSClient, FileMetadata, IPFSImageAttrs } from './ipfs-client.js'
/**
* Handles image upload from file input, paste, or drag-and-drop
*
* @param client - EncryptedIPFSClient instance
* @param file - File object from input/paste/drop
* @param gateway - IPFS gateway URL
* @returns IPFSImageAttrs to store in ProseMirror document
*/
export async function handleImageUpload(
client: EncryptedIPFSClient,
file: File,
gateway?: string
): Promise<IPFSImageAttrs> {
// Read file as Uint8Array
const buffer = await file.arrayBuffer()
const data = new Uint8Array(buffer)
// Encrypt and upload
const metadata = await client.upload(data, file.name, file.type)
return {
src: `${gateway ?? 'https://gateway.pinata.cloud/ipfs'}/${metadata.cid}`,
cid: metadata.cid,
encKey: metadata.encryptionKey,
alt: file.name,
}
}
/**
* Decrypts and creates a blob URL for displaying an IPFS image
* Call URL.revokeObjectURL() when the image is removed from the editor
*
* @param client - EncryptedIPFSClient instance
* @param attrs - Image attributes from ProseMirror document
* @returns Blob URL for <img src="...">
*/
export async function resolveImage(
client: EncryptedIPFSClient,
attrs: IPFSImageAttrs
): Promise<string> {
// Build minimal metadata for download
const metadata: FileMetadata = {
cid: attrs.cid,
encryptionKey: attrs.encKey,
filename: attrs.alt ?? 'image',
mimeType: 'image/png', // Could be stored in attrs for accuracy
size: 0,
encryptedSize: 0,
uploadedAt: 0,
}
const decrypted = await client.download(metadata)
const blob = new Blob([decrypted], { type: 'image/png' })
return URL.createObjectURL(blob)
}
/**
* Image cache for the editor session
* Maps CID blob URL to avoid re-downloading the same image
*/
export class ImageCache {
private cache = new Map<string, string>()
private pending = new Map<string, Promise<string>>()
constructor(private client: EncryptedIPFSClient) {}
async resolve(attrs: IPFSImageAttrs): Promise<string> {
// Return cached
if (this.cache.has(attrs.cid)) {
return this.cache.get(attrs.cid)!
}
// Deduplicate in-flight requests
if (this.pending.has(attrs.cid)) {
return this.pending.get(attrs.cid)!
}
const promise = resolveImage(this.client, attrs).then(url => {
this.cache.set(attrs.cid, url)
this.pending.delete(attrs.cid)
return url
})
this.pending.set(attrs.cid, promise)
return promise
}
revoke(cid: string): void {
const url = this.cache.get(cid)
if (url) {
URL.revokeObjectURL(url)
this.cache.delete(cid)
}
}
revokeAll(): void {
for (const [cid, url] of this.cache) {
URL.revokeObjectURL(url)
}
this.cache.clear()
}
}

View File

@ -0,0 +1,13 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ES2022",
"moduleResolution": "bundler",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"outDir": "dist",
"rootDir": "src"
},
"include": ["src"]
}

597
poc/ucan-bridge/package-lock.json generated Normal file
View File

@ -0,0 +1,597 @@
{
"name": "@rstack/ucan-bridge",
"version": "0.1.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@rstack/ucan-bridge",
"version": "0.1.0",
"dependencies": {
"@noble/ed25519": "^2.2.0",
"@noble/hashes": "^1.7.0"
},
"devDependencies": {
"tsx": "^4.19.0",
"typescript": "^5.7.0"
}
},
"node_modules/@esbuild/aix-ppc64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.4.tgz",
"integrity": "sha512-cQPwL2mp2nSmHHJlCyoXgHGhbEPMrEEU5xhkcy3Hs/O7nGZqEpZ2sUtLaL9MORLtDfRvVl2/3PAuEkYZH0Ty8Q==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"aix"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.4.tgz",
"integrity": "sha512-X9bUgvxiC8CHAGKYufLIHGXPJWnr0OCdR0anD2e21vdvgCI8lIfqFbnoeOz7lBjdrAGUhqLZLcQo6MLhTO2DKQ==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-arm64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.4.tgz",
"integrity": "sha512-gdLscB7v75wRfu7QSm/zg6Rx29VLdy9eTr2t44sfTW7CxwAtQghZ4ZnqHk3/ogz7xao0QAgrkradbBzcqFPasw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/android-x64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.4.tgz",
"integrity": "sha512-PzPFnBNVF292sfpfhiyiXCGSn9HZg5BcAz+ivBuSsl6Rk4ga1oEXAamhOXRFyMcjwr2DVtm40G65N3GLeH1Lvw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"android"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-arm64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.4.tgz",
"integrity": "sha512-b7xaGIwdJlht8ZFCvMkpDN6uiSmnxxK56N2GDTMYPr2/gzvfdQN8rTfBsvVKmIVY/X7EM+/hJKEIbbHs9oA4tQ==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/darwin-x64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.4.tgz",
"integrity": "sha512-sR+OiKLwd15nmCdqpXMnuJ9W2kpy0KigzqScqHI3Hqwr7IXxBp3Yva+yJwoqh7rE8V77tdoheRYataNKL4QrPw==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-arm64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.4.tgz",
"integrity": "sha512-jnfpKe+p79tCnm4GVav68A7tUFeKQwQyLgESwEAUzyxk/TJr4QdGog9sqWNcUbr/bZt/O/HXouspuQDd9JxFSw==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/freebsd-x64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.4.tgz",
"integrity": "sha512-2kb4ceA/CpfUrIcTUl1wrP/9ad9Atrp5J94Lq69w7UwOMolPIGrfLSvAKJp0RTvkPPyn6CIWrNy13kyLikZRZQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"freebsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.4.tgz",
"integrity": "sha512-aBYgcIxX/wd5n2ys0yESGeYMGF+pv6g0DhZr3G1ZG4jMfruU9Tl1i2Z+Wnj9/KjGz1lTLCcorqE2viePZqj4Eg==",
"cpu": [
"arm"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-arm64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.4.tgz",
"integrity": "sha512-7nQOttdzVGth1iz57kxg9uCz57dxQLHWxopL6mYuYthohPKEK0vU0C3O21CcBK6KDlkYVcnDXY099HcCDXd9dA==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ia32": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.4.tgz",
"integrity": "sha512-oPtixtAIzgvzYcKBQM/qZ3R+9TEUd1aNJQu0HhGyqtx6oS7qTpvjheIWBbes4+qu1bNlo2V4cbkISr8q6gRBFA==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-loong64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.4.tgz",
"integrity": "sha512-8mL/vh8qeCoRcFH2nM8wm5uJP+ZcVYGGayMavi8GmRJjuI3g1v6Z7Ni0JJKAJW+m0EtUuARb6Lmp4hMjzCBWzA==",
"cpu": [
"loong64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-mips64el": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.4.tgz",
"integrity": "sha512-1RdrWFFiiLIW7LQq9Q2NES+HiD4NyT8Itj9AUeCl0IVCA459WnPhREKgwrpaIfTOe+/2rdntisegiPWn/r/aAw==",
"cpu": [
"mips64el"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-ppc64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.4.tgz",
"integrity": "sha512-tLCwNG47l3sd9lpfyx9LAGEGItCUeRCWeAx6x2Jmbav65nAwoPXfewtAdtbtit/pJFLUWOhpv0FpS6GQAmPrHA==",
"cpu": [
"ppc64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-riscv64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.4.tgz",
"integrity": "sha512-BnASypppbUWyqjd1KIpU4AUBiIhVr6YlHx/cnPgqEkNoVOhHg+YiSVxM1RLfiy4t9cAulbRGTNCKOcqHrEQLIw==",
"cpu": [
"riscv64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-s390x": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.4.tgz",
"integrity": "sha512-+eUqgb/Z7vxVLezG8bVB9SfBie89gMueS+I0xYh2tJdw3vqA/0ImZJ2ROeWwVJN59ihBeZ7Tu92dF/5dy5FttA==",
"cpu": [
"s390x"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/linux-x64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.4.tgz",
"integrity": "sha512-S5qOXrKV8BQEzJPVxAwnryi2+Iq5pB40gTEIT69BQONqR7JH1EPIcQ/Uiv9mCnn05jff9umq/5nqzxlqTOg9NA==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"linux"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-arm64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.4.tgz",
"integrity": "sha512-xHT8X4sb0GS8qTqiwzHqpY00C95DPAq7nAwX35Ie/s+LO9830hrMd3oX0ZMKLvy7vsonee73x0lmcdOVXFzd6Q==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/netbsd-x64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.4.tgz",
"integrity": "sha512-RugOvOdXfdyi5Tyv40kgQnI0byv66BFgAqjdgtAKqHoZTbTF2QqfQrFwa7cHEORJf6X2ht+l9ABLMP0dnKYsgg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"netbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-arm64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.4.tgz",
"integrity": "sha512-2MyL3IAaTX+1/qP0O1SwskwcwCoOI4kV2IBX1xYnDDqthmq5ArrW94qSIKCAuRraMgPOmG0RDTA74mzYNQA9ow==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openbsd-x64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.4.tgz",
"integrity": "sha512-u8fg/jQ5aQDfsnIV6+KwLOf1CmJnfu1ShpwqdwC0uA7ZPwFws55Ngc12vBdeUdnuWoQYx/SOQLGDcdlfXhYmXQ==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openbsd"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/openharmony-arm64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.4.tgz",
"integrity": "sha512-JkTZrl6VbyO8lDQO3yv26nNr2RM2yZzNrNHEsj9bm6dOwwu9OYN28CjzZkH57bh4w0I2F7IodpQvUAEd1mbWXg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"openharmony"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/sunos-x64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.4.tgz",
"integrity": "sha512-/gOzgaewZJfeJTlsWhvUEmUG4tWEY2Spp5M20INYRg2ZKl9QPO3QEEgPeRtLjEWSW8FilRNacPOg8R1uaYkA6g==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"sunos"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-arm64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.4.tgz",
"integrity": "sha512-Z9SExBg2y32smoDQdf1HRwHRt6vAHLXcxD2uGgO/v2jK7Y718Ix4ndsbNMU/+1Qiem9OiOdaqitioZwxivhXYg==",
"cpu": [
"arm64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-ia32": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.4.tgz",
"integrity": "sha512-DAyGLS0Jz5G5iixEbMHi5KdiApqHBWMGzTtMiJ72ZOLhbu/bzxgAe8Ue8CTS3n3HbIUHQz/L51yMdGMeoxXNJw==",
"cpu": [
"ia32"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@esbuild/win32-x64": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.4.tgz",
"integrity": "sha512-+knoa0BDoeXgkNvvV1vvbZX4+hizelrkwmGJBdT17t8FNPwG2lKemmuMZlmaNQ3ws3DKKCxpb4zRZEIp3UxFCg==",
"cpu": [
"x64"
],
"dev": true,
"license": "MIT",
"optional": true,
"os": [
"win32"
],
"engines": {
"node": ">=18"
}
},
"node_modules/@noble/ed25519": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/@noble/ed25519/-/ed25519-2.3.0.tgz",
"integrity": "sha512-M7dvXL2B92/M7dw9+gzuydL8qn/jiqNHaoR3Q+cb1q1GHV7uwE17WCyFMG+Y+TZb5izcaXk5TdJRrDUxHXL78A==",
"license": "MIT",
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/@noble/hashes": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz",
"integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==",
"license": "MIT",
"engines": {
"node": "^14.21.3 || >=16"
},
"funding": {
"url": "https://paulmillr.com/funding/"
}
},
"node_modules/esbuild": {
"version": "0.27.4",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.4.tgz",
"integrity": "sha512-Rq4vbHnYkK5fws5NF7MYTU68FPRE1ajX7heQ/8QXXWqNgqqJ/GkmmyxIzUnf2Sr/bakf8l54716CcMGHYhMrrQ==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"bin": {
"esbuild": "bin/esbuild"
},
"engines": {
"node": ">=18"
},
"optionalDependencies": {
"@esbuild/aix-ppc64": "0.27.4",
"@esbuild/android-arm": "0.27.4",
"@esbuild/android-arm64": "0.27.4",
"@esbuild/android-x64": "0.27.4",
"@esbuild/darwin-arm64": "0.27.4",
"@esbuild/darwin-x64": "0.27.4",
"@esbuild/freebsd-arm64": "0.27.4",
"@esbuild/freebsd-x64": "0.27.4",
"@esbuild/linux-arm": "0.27.4",
"@esbuild/linux-arm64": "0.27.4",
"@esbuild/linux-ia32": "0.27.4",
"@esbuild/linux-loong64": "0.27.4",
"@esbuild/linux-mips64el": "0.27.4",
"@esbuild/linux-ppc64": "0.27.4",
"@esbuild/linux-riscv64": "0.27.4",
"@esbuild/linux-s390x": "0.27.4",
"@esbuild/linux-x64": "0.27.4",
"@esbuild/netbsd-arm64": "0.27.4",
"@esbuild/netbsd-x64": "0.27.4",
"@esbuild/openbsd-arm64": "0.27.4",
"@esbuild/openbsd-x64": "0.27.4",
"@esbuild/openharmony-arm64": "0.27.4",
"@esbuild/sunos-x64": "0.27.4",
"@esbuild/win32-arm64": "0.27.4",
"@esbuild/win32-ia32": "0.27.4",
"@esbuild/win32-x64": "0.27.4"
}
},
"node_modules/fsevents": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
"integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
"dev": true,
"hasInstallScript": true,
"license": "MIT",
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/get-tsconfig": {
"version": "4.13.7",
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.7.tgz",
"integrity": "sha512-7tN6rFgBlMgpBML5j8typ92BKFi2sFQvIdpAqLA2beia5avZDrMs0FLZiM5etShWq5irVyGcGMEA1jcDaK7A/Q==",
"dev": true,
"license": "MIT",
"dependencies": {
"resolve-pkg-maps": "^1.0.0"
},
"funding": {
"url": "https://github.com/privatenumber/get-tsconfig?sponsor=1"
}
},
"node_modules/resolve-pkg-maps": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz",
"integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1"
}
},
"node_modules/tsx": {
"version": "4.21.0",
"resolved": "https://registry.npmjs.org/tsx/-/tsx-4.21.0.tgz",
"integrity": "sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==",
"dev": true,
"license": "MIT",
"dependencies": {
"esbuild": "~0.27.0",
"get-tsconfig": "^4.7.5"
},
"bin": {
"tsx": "dist/cli.mjs"
},
"engines": {
"node": ">=18.0.0"
},
"optionalDependencies": {
"fsevents": "~2.3.3"
}
},
"node_modules/typescript": {
"version": "5.9.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
}
}
}

View File

@ -0,0 +1,19 @@
{
"name": "@rstack/ucan-bridge",
"version": "0.1.0",
"private": true,
"description": "Bridge EncryptID DIDs to UCAN tokens for Fileverse collab-server auth",
"type": "module",
"scripts": {
"test": "tsx src/test.ts",
"generate-did": "tsx src/generate-did.ts"
},
"dependencies": {
"@noble/ed25519": "^2.2.0",
"@noble/hashes": "^1.7.0"
},
"devDependencies": {
"tsx": "^4.19.0",
"typescript": "^5.7.0"
}
}

130
poc/ucan-bridge/src/test.ts Normal file
View File

@ -0,0 +1,130 @@
/**
* Test UCAN bridge: generate key pairs, create tokens, verify signatures, delegate access
*/
import {
generateKeyPair,
didToPublicKey,
createCollaborationToken,
createOwnerToken,
delegateAccess,
verifyUCAN,
} from './ucan-bridge.js'
async function main() {
console.log('=== UCAN Bridge Tests ===\n')
// ─── Test 1: Key pair generation ───
console.log('--- Test: Key Pair Generation ---')
const owner = generateKeyPair()
const collaborator = generateKeyPair()
console.log(` Owner DID: ${owner.did}`)
console.log(` Collab DID: ${collaborator.did}`)
console.log(` DIDs are unique: ${owner.did !== collaborator.did ? 'PASS' : 'FAIL'}`)
// ─── Test 2: DID roundtrip ───
console.log('\n--- Test: DID ↔ Public Key Roundtrip ---')
const recoveredPubKey = didToPublicKey(owner.did)
const didRoundtrip = owner.publicKey.every((b, i) => b === recoveredPubKey[i])
console.log(` Roundtrip: ${didRoundtrip ? 'PASS' : 'FAIL'}`)
// ─── Test 3: Collaboration token ───
console.log('\n--- Test: Collaboration Token ---')
const serverDid = 'did:key:z6MkhaXgBZDvotDkL5257faiztiGiC2QtKLGpbnnEGta2doK'
const docId = 'doc-abc-123'
const collabToken = await createCollaborationToken(
collaborator.privateKey,
collaborator.did,
serverDid,
docId,
3600
)
console.log(` Token: ${collabToken.slice(0, 60)}...`)
console.log(` Parts: ${collabToken.split('.').length} (expected 3)`)
const collabVerify = await verifyUCAN(collabToken)
console.log(` Signature valid: ${collabVerify.valid ? 'PASS' : 'FAIL'}`)
console.log(` Issuer: ${collabVerify.payload.iss.slice(0, 30)}...`)
console.log(` Audience: ${collabVerify.payload.aud.slice(0, 30)}...`)
console.log(` Capabilities: ${JSON.stringify(collabVerify.payload.att)}`)
console.log(` Expires in: ${collabVerify.payload.exp - Math.floor(Date.now() / 1000)}s`)
// ─── Test 4: Owner token ───
console.log('\n--- Test: Owner Token ---')
const ownerToken = await createOwnerToken(
owner.privateKey,
owner.did,
serverDid,
docId,
7200
)
const ownerVerify = await verifyUCAN(ownerToken)
console.log(` Signature valid: ${ownerVerify.valid ? 'PASS' : 'FAIL'}`)
console.log(` Capabilities: ${JSON.stringify(ownerVerify.payload.att)}`)
console.log(` Has wildcard: ${ownerVerify.payload.att[0]?.can === 'doc/*' ? 'PASS' : 'FAIL'}`)
// ─── Test 5: Delegation ───
console.log('\n--- Test: Access Delegation ---')
const delegatedToken = await delegateAccess(
owner.privateKey,
owner.did,
collaborator.did,
docId,
['doc/READ'], // read-only access
1800,
ownerToken // chain from owner token
)
const delegateVerify = await verifyUCAN(delegatedToken)
console.log(` Signature valid: ${delegateVerify.valid ? 'PASS' : 'FAIL'}`)
console.log(` Issuer (owner): ${delegateVerify.payload.iss === owner.did ? 'PASS' : 'FAIL'}`)
console.log(` Audience (collab): ${delegateVerify.payload.aud === collaborator.did ? 'PASS' : 'FAIL'}`)
console.log(` Capabilities: ${JSON.stringify(delegateVerify.payload.att)}`)
console.log(` Has proof chain: ${delegateVerify.payload.prf?.length === 1 ? 'PASS' : 'FAIL'}`)
// ─── Test 6: Token with wrong key fails verification ───
console.log('\n--- Test: Wrong Key Rejection ---')
// Create token with collaborator's key but claim to be owner
const fakeToken = await createCollaborationToken(
collaborator.privateKey, // wrong key
owner.did, // claim to be owner
serverDid,
docId
)
const fakeVerify = await verifyUCAN(fakeToken)
console.log(` Forged token rejected: ${!fakeVerify.valid ? 'PASS' : 'FAIL'}`)
// ─── Test 7: Expired token ───
console.log('\n--- Test: Expired Token Detection ---')
const expiredToken = await createCollaborationToken(
collaborator.privateKey,
collaborator.did,
serverDid,
docId,
-1 // already expired
)
const expiredVerify = await verifyUCAN(expiredToken)
const isExpired = expiredVerify.payload.exp < Math.floor(Date.now() / 1000)
console.log(` Signature still valid: ${expiredVerify.valid ? 'PASS' : 'FAIL'} (sig is valid, expiry is app-level)`)
console.log(` Token is expired: ${isExpired ? 'PASS' : 'FAIL'}`)
// ─── Summary ───
const results = [
didRoundtrip,
collabVerify.valid,
ownerVerify.valid,
ownerVerify.payload.att[0]?.can === 'doc/*',
delegateVerify.valid,
delegateVerify.payload.iss === owner.did,
delegateVerify.payload.aud === collaborator.did,
!fakeVerify.valid,
isExpired,
]
const passed = results.filter(Boolean).length
console.log(`\n=== Results: ${passed}/${results.length} passed ===`)
process.exit(passed === results.length ? 0 : 1)
}
main().catch(console.error)

View File

@ -0,0 +1,272 @@
/**
* UCAN Bridge for rStack Fileverse Collaboration Server
*
* The Fileverse collab-server requires UCAN tokens for authentication.
* This bridge generates UCAN tokens from EncryptID DIDs, allowing
* rStack users to authenticate with the self-hosted collab server.
*
* UCAN (User Controlled Authorization Network) tokens are JWTs that
* encode capability-based permissions. They can be delegated without
* contacting a central server.
*
* Flow:
* 1. User authenticates with EncryptID (DID-based, passwordless)
* 2. Client generates an Ed25519 key pair (or derives from EncryptID)
* 3. Client creates a UCAN token for the collab-server
* 4. Client presents UCAN to collab-server WebSocket /auth endpoint
*/
import * as ed from '@noble/ed25519'
import { sha512 } from '@noble/hashes/sha512'
// Configure noble/ed25519 to use sha512
ed.etc.sha512Sync = (...msgs) => {
const m = msgs.reduce((acc, msg) => {
const combined = new Uint8Array(acc.length + msg.length)
combined.set(acc)
combined.set(msg, acc.length)
return combined
})
return sha512(m)
}
// ─── DID Key Utilities ───
const DID_KEY_PREFIX = 'did:key:'
const ED25519_MULTICODEC = new Uint8Array([0xed, 0x01]) // varint for ed25519-pub
/** Generate a new Ed25519 key pair and its did:key */
export function generateKeyPair(): {
privateKey: Uint8Array
publicKey: Uint8Array
did: string
} {
const privateKey = ed.utils.randomPrivateKey()
const publicKey = ed.getPublicKey(privateKey)
// did:key format: did:key:z + base58btc(multicodec_prefix + public_key)
const multicodecKey = new Uint8Array(ED25519_MULTICODEC.length + publicKey.length)
multicodecKey.set(ED25519_MULTICODEC)
multicodecKey.set(publicKey, ED25519_MULTICODEC.length)
const did = `${DID_KEY_PREFIX}z${base58btcEncode(multicodecKey)}`
return { privateKey, publicKey, did }
}
/** Extract the public key bytes from a did:key */
export function didToPublicKey(did: string): Uint8Array {
if (!did.startsWith(`${DID_KEY_PREFIX}z`)) {
throw new Error(`Invalid did:key format: ${did}`)
}
const decoded = base58btcDecode(did.slice(DID_KEY_PREFIX.length + 1)) // skip "z"
// Verify Ed25519 multicodec prefix
if (decoded[0] !== 0xed || decoded[1] !== 0x01) {
throw new Error('Not an Ed25519 did:key')
}
return decoded.slice(2)
}
// ─── UCAN Token Creation ───
export interface UCANPayload {
iss: string // Issuer DID
aud: string // Audience DID (collab-server's DID)
nbf?: number // Not before (Unix timestamp)
exp: number // Expiration (Unix timestamp)
att: UCANCapability[] // Attenuations (capabilities)
prf?: string[] // Proofs (parent UCANs for delegation chains)
fct?: Record<string, unknown>[] // Facts (arbitrary metadata)
}
export interface UCANCapability {
with: string // Resource URI (e.g., "doc:*" or "doc:document-id")
can: string // Action (e.g., "doc/UPDATE", "doc/READ")
}
/** Create a UCAN token for the collab-server */
export async function createCollaborationToken(
privateKey: Uint8Array,
issuerDid: string,
serverDid: string,
documentId: string,
ttlSeconds: number = 3600
): Promise<string> {
const now = Math.floor(Date.now() / 1000)
const payload: UCANPayload = {
iss: issuerDid,
aud: serverDid,
nbf: now,
exp: now + ttlSeconds,
att: [
{ with: `doc:${documentId}`, can: 'doc/UPDATE' },
{ with: `doc:${documentId}`, can: 'doc/READ' },
],
}
return signUCAN(payload, privateKey)
}
/** Create a UCAN owner token (full permissions including commit/terminate) */
export async function createOwnerToken(
privateKey: Uint8Array,
issuerDid: string,
serverDid: string,
documentId: string,
ttlSeconds: number = 3600
): Promise<string> {
const now = Math.floor(Date.now() / 1000)
const payload: UCANPayload = {
iss: issuerDid,
aud: serverDid,
nbf: now,
exp: now + ttlSeconds,
att: [
{ with: `doc:${documentId}`, can: 'doc/*' },
],
}
return signUCAN(payload, privateKey)
}
/** Delegate a capability to another user's DID */
export async function delegateAccess(
ownerPrivateKey: Uint8Array,
ownerDid: string,
delegateDid: string,
documentId: string,
capabilities: string[] = ['doc/UPDATE', 'doc/READ'],
ttlSeconds: number = 3600,
parentToken?: string
): Promise<string> {
const now = Math.floor(Date.now() / 1000)
const payload: UCANPayload = {
iss: ownerDid,
aud: delegateDid,
nbf: now,
exp: now + ttlSeconds,
att: capabilities.map(can => ({ with: `doc:${documentId}`, can })),
prf: parentToken ? [parentToken] : undefined,
}
return signUCAN(payload, ownerPrivateKey)
}
// ─── UCAN JWT Signing ───
async function signUCAN(payload: UCANPayload, privateKey: Uint8Array): Promise<string> {
const header = { alg: 'EdDSA', typ: 'JWT', ucv: '0.10.0' }
const encodedHeader = base64urlEncode(JSON.stringify(header))
const encodedPayload = base64urlEncode(JSON.stringify(payload))
const signingInput = `${encodedHeader}.${encodedPayload}`
const signature = await ed.signAsync(
new TextEncoder().encode(signingInput),
privateKey
)
return `${signingInput}.${base64urlEncodeBytes(signature)}`
}
/** Verify a UCAN token's signature (for testing) */
export async function verifyUCAN(token: string): Promise<{
valid: boolean
payload: UCANPayload
}> {
const parts = token.split('.')
if (parts.length !== 3) throw new Error('Invalid JWT format')
const payload = JSON.parse(base64urlDecode(parts[1])) as UCANPayload
const publicKey = didToPublicKey(payload.iss)
const signingInput = new TextEncoder().encode(`${parts[0]}.${parts[1]}`)
const signature = base64urlDecodeBytes(parts[2])
const valid = await ed.verifyAsync(signature, signingInput, publicKey)
return { valid, payload }
}
// ─── Encoding Utilities ───
function base64urlEncode(str: string): string {
return btoa(str).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '')
}
function base64urlDecode(str: string): string {
const padded = str + '='.repeat((4 - str.length % 4) % 4)
return atob(padded.replace(/-/g, '+').replace(/_/g, '/'))
}
function base64urlEncodeBytes(bytes: Uint8Array): string {
let binary = ''
for (const byte of bytes) binary += String.fromCharCode(byte)
return btoa(binary).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, '')
}
function base64urlDecodeBytes(str: string): Uint8Array {
const padded = str + '='.repeat((4 - str.length % 4) % 4)
const binary = atob(padded.replace(/-/g, '+').replace(/_/g, '/'))
const bytes = new Uint8Array(binary.length)
for (let i = 0; i < binary.length; i++) bytes[i] = binary.charCodeAt(i)
return bytes
}
// Base58btc (Bitcoin alphabet)
const BASE58_ALPHABET = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
function base58btcEncode(bytes: Uint8Array): string {
const digits = [0]
for (const byte of bytes) {
let carry = byte
for (let j = 0; j < digits.length; j++) {
carry += digits[j] << 8
digits[j] = carry % 58
carry = (carry / 58) | 0
}
while (carry > 0) {
digits.push(carry % 58)
carry = (carry / 58) | 0
}
}
// Leading zeros
let output = ''
for (const byte of bytes) {
if (byte !== 0) break
output += BASE58_ALPHABET[0]
}
for (let i = digits.length - 1; i >= 0; i--) {
output += BASE58_ALPHABET[digits[i]]
}
return output
}
function base58btcDecode(str: string): Uint8Array {
const bytes = [0]
for (const char of str) {
const value = BASE58_ALPHABET.indexOf(char)
if (value === -1) throw new Error(`Invalid base58 character: ${char}`)
let carry = value
for (let j = 0; j < bytes.length; j++) {
carry += bytes[j] * 58
bytes[j] = carry & 0xff
carry >>= 8
}
while (carry > 0) {
bytes.push(carry & 0xff)
carry >>= 8
}
}
// Leading '1's → leading zeros
const leadingZeros = str.split('').findIndex(c => c !== '1')
const result = new Uint8Array(leadingZeros + bytes.length)
bytes.reverse()
result.set(bytes, leadingZeros)
return result
}

View File

@ -0,0 +1,13 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ES2022",
"moduleResolution": "bundler",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"outDir": "dist",
"rootDir": "src"
},
"include": ["src"]
}