Compare commits

..

1 Commits

Author SHA1 Message Date
Jeff Emmett 96a00a6f36 feat(revents): add events module scaffold 2026-03-16 00:31:42 +01:00
709 changed files with 19184 additions and 163302 deletions

View File

@ -1,68 +0,0 @@
# Gitea Actions CI/CD — Static Site (no tests, build + deploy only)
# Copy to: <repo>/.gitea/workflows/ci.yml
# Replace: rspace-online, /opt/websites/rspace-online, https://rspace.online/
name: CI/CD
on:
push:
branches: [main]
env:
REGISTRY: localhost:3000
IMAGE: localhost:3000/jeffemmett/rspace-online
jobs:
deploy:
runs-on: ubuntu-latest
container:
image: docker:cli
steps:
- name: Setup tools
run: apk add --no-cache git openssh-client curl
- name: Checkout
run: git clone --depth 1 --branch ${{ github.ref_name }} http://token:${{ github.token }}@server:3000/${{ github.repository }}.git .
- name: Set image tag
run: |
SHORT_SHA=$(echo "${{ github.sha }}" | cut -c1-8)
echo "IMAGE_TAG=${SHORT_SHA}" >> $GITHUB_ENV
- name: Build and push image
run: |
git clone --depth 1 http://jeffemmett:${{ secrets.REPO_READ_TOKEN }}@server:3000/jeffemmett/encryptid-sdk.git ../encryptid-sdk
docker build --build-context encryptid-sdk=../encryptid-sdk -t ${{ env.IMAGE }}:${{ env.IMAGE_TAG }} -t ${{ env.IMAGE }}:latest .
echo "${{ secrets.REGISTRY_TOKEN }}" | docker login ${{ env.REGISTRY }} -u ${{ secrets.REGISTRY_USER }} --password-stdin
docker push ${{ env.IMAGE }}:${{ env.IMAGE_TAG }}
docker push ${{ env.IMAGE }}:latest
- name: Deploy
run: |
mkdir -p ~/.ssh
echo "${{ secrets.DEPLOY_SSH_KEY }}" | base64 -d > ~/.ssh/deploy_key
chmod 600 ~/.ssh/deploy_key
ssh -o StrictHostKeyChecking=no -i ~/.ssh/deploy_key root@${{ secrets.DEPLOY_HOST }} "
cd /opt/websites/rspace-online
cat .last-deployed-tag 2>/dev/null > .rollback-tag || true
echo '${{ env.IMAGE_TAG }}' > .last-deployed-tag
docker pull ${{ env.IMAGE }}:${{ env.IMAGE_TAG }}
IMAGE_TAG=${{ env.IMAGE_TAG }} docker compose up -d --no-build rspace
"
- name: Smoke test
run: |
sleep 20
HTTP_CODE=$(ssh -o StrictHostKeyChecking=no -i ~/.ssh/deploy_key root@${{ secrets.DEPLOY_HOST }} \
"curl -sSL -o /dev/null -w '%{http_code}' --max-time 30 https://rspace.online/ 2>/dev/null || echo 000")
if [ "$HTTP_CODE" -lt 200 ] || [ "$HTTP_CODE" -ge 400 ]; then
echo "Smoke test failed (HTTP $HTTP_CODE) — rolling back"
ROLLBACK_TAG=$(ssh -o StrictHostKeyChecking=no -i ~/.ssh/deploy_key root@${{ secrets.DEPLOY_HOST }} "cat /opt/websites/rspace-online/.rollback-tag 2>/dev/null")
if [ -n "$ROLLBACK_TAG" ]; then
ssh -o StrictHostKeyChecking=no -i ~/.ssh/deploy_key root@${{ secrets.DEPLOY_HOST }} \
"cd /opt/websites/rspace-online && IMAGE_TAG=$ROLLBACK_TAG docker compose up -d --no-build rspace"
echo "Rolled back to $ROLLBACK_TAG"
fi
exit 1
fi
echo "Smoke test passed (HTTP $HTTP_CODE)"

View File

@ -1,12 +0,0 @@
{
"mcpServers": {
"rspace-calendar": {
"command": "node",
"args": ["/home/jeffe/.claude/mcp-servers/calendar/index.js"],
"env": {
"RSPACE_BASE_URL": "http://localhost:3000",
"RSPACE_DEFAULT_SPACE": "demo"
}
}
}
}

View File

@ -30,12 +30,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends curl xz-utils c
FROM oven/bun:1-slim AS production FROM oven/bun:1-slim AS production
WORKDIR /app WORKDIR /app
# Install CA certificates + python3 + pip (for markitdown)
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates python3 python3-pip \
&& pip install --no-cache-dir --break-system-packages markitdown \
&& apt-get purge -y python3-pip && apt-get autoremove -y \
&& rm -rf /var/lib/apt/lists/*
# Install Typst binary (for rPubs PDF generation) # Install Typst binary (for rPubs PDF generation)
COPY --from=typst /usr/local/bin/typst /usr/local/bin/typst COPY --from=typst /usr/local/bin/typst /usr/local/bin/typst
@ -53,7 +47,7 @@ COPY --from=build /encryptid-sdk /encryptid-sdk
RUN bun install --production RUN bun install --production
# Create data directories # Create data directories
RUN mkdir -p /data/communities /data/books /data/swag-artifacts /data/files /data/splats /data/rpubs-publications RUN mkdir -p /data/communities /data/books /data/swag-artifacts /data/files /data/splats
# Copy entrypoint for Infisical secret injection # Copy entrypoint for Infisical secret injection
COPY entrypoint.sh /app/entrypoint.sh COPY entrypoint.sh /app/entrypoint.sh
@ -67,7 +61,6 @@ ENV BOOKS_DIR=/data/books
ENV SWAG_ARTIFACTS_DIR=/data/swag-artifacts ENV SWAG_ARTIFACTS_DIR=/data/swag-artifacts
ENV FILES_DIR=/data/files ENV FILES_DIR=/data/files
ENV SPLATS_DIR=/data/splats ENV SPLATS_DIR=/data/splats
ENV PUBS_DIR=/data/rpubs-publications
ENV PORT=3000 ENV PORT=3000
# Data volumes for persistence # Data volumes for persistence
@ -76,7 +69,6 @@ VOLUME /data/books
VOLUME /data/swag-artifacts VOLUME /data/swag-artifacts
VOLUME /data/files VOLUME /data/files
VOLUME /data/splats VOLUME /data/splats
VOLUME /data/rpubs-publications
EXPOSE 3000 EXPOSE 3000

View File

@ -23,7 +23,6 @@ RUN bun install --frozen-lockfile || bun install
COPY src/encryptid ./src/encryptid COPY src/encryptid ./src/encryptid
COPY shared/local-first ./shared/local-first COPY shared/local-first ./shared/local-first
COPY server/notification-service.ts ./server/notification-service.ts COPY server/notification-service.ts ./server/notification-service.ts
COPY server/welcome-email.ts ./server/welcome-email.ts
COPY public ./public COPY public ./public
COPY tsconfig.json ./ COPY tsconfig.json ./
@ -40,7 +39,6 @@ COPY --from=builder /app/node_modules ./node_modules
COPY --from=builder /app/src/encryptid ./src/encryptid COPY --from=builder /app/src/encryptid ./src/encryptid
COPY --from=builder /app/shared/local-first ./shared/local-first COPY --from=builder /app/shared/local-first ./shared/local-first
COPY --from=builder /app/server/notification-service.ts ./server/notification-service.ts COPY --from=builder /app/server/notification-service.ts ./server/notification-service.ts
COPY --from=builder /app/server/welcome-email.ts ./server/welcome-email.ts
COPY --from=builder /app/public ./public COPY --from=builder /app/public ./public
COPY --from=builder /app/package.json ./ COPY --from=builder /app/package.json ./

View File

@ -221,7 +221,7 @@ Flows are typed connections between modules:
| Kind | Description | Example | | Kind | Description | Example |
|------|-------------|---------| |------|-------------|---------|
| `data` | Information flow | rDocs → rPubs (publish) | | `data` | Information flow | rNotes → rPubs (publish) |
| `economic` | Value/payment flow | rFunds → rWallet (treasury) | | `economic` | Value/payment flow | rFunds → rWallet (treasury) |
| `trust` | Reputation/attestation | rVote → rNetwork (delegation) | | `trust` | Reputation/attestation | rVote → rNetwork (delegation) |
| `attention` | Signal/notification | rInbox → rForum (mentions) | | `attention` | Signal/notification | rInbox → rForum (mentions) |
@ -251,10 +251,10 @@ redirects to the unified server with subdomain-based space routing.
| Module | Domain | Purpose | | Module | Domain | Purpose |
|--------|--------|---------| |--------|--------|---------|
| **rDocs** | rdocs.online | Rich editor — notebooks, voice transcription, AI, import/export (TipTap + Automerge) | | **rNotes** | rnotes.online | Collaborative notebooks (Automerge) |
| **rNotes** | rnotes.online | Vault sync & browse for Obsidian and Logseq |
| **rPubs** | rpubs.online | Long-form publishing (Typst PDF) | | **rPubs** | rpubs.online | Long-form publishing (Typst PDF) |
| **rBooks** | rbooks.online | PDF library with flipbook reader | | **rBooks** | rbooks.online | PDF library with flipbook reader |
| **rDocs** | rdocs.online | Document management |
| **rData** | rdata.online | Data visualization & analysis | | **rData** | rdata.online | Data visualization & analysis |
### Planning & Spatial ### Planning & Spatial
@ -265,8 +265,7 @@ redirects to the unified server with subdomain-based space routing.
| **rMaps** | rmaps.online | Geographic mapping & location hierarchy | | **rMaps** | rmaps.online | Geographic mapping & location hierarchy |
| **rTrips** | rtrips.online | Trip planning with itineraries | | **rTrips** | rtrips.online | Trip planning with itineraries |
| **rTasks** | rtasks.online | Task boards & project management | | **rTasks** | rtasks.online | Task boards & project management |
| **rMinders** | rminders.online | Reminders, cron jobs, and automation workflows — email, webhooks, briefings | | **rSchedule** | rschedule.online | Persistent cron-based job scheduling with email, webhooks & briefings |
| **rSchedule** | rschedule.online | Calendly-style booking from rCal availability (native port of schedule-jeffemmett) |
### Communication ### Communication

138
README.md
View File

@ -1,138 +0,0 @@
# rSpace
A composable, local-first platform for collaborative knowledge work, democratic governance, and programmable economic flows.
**Live at [rspace.online](https://rspace.online)**
## What is rSpace?
rSpace is an integrated suite of 35+ collaborative applications ("rApps") built on shared digital primitives — identity, encrypted CRDT data, micropayments, and AI — that gain power through composition on an infinite spatial canvas.
Every rApp works offline-first. Data lives on your device as Automerge CRDTs, encrypted per-document. The server is a sync peer, not a gatekeeper. Identity is a single passkey tap — no passwords, no seed phrases.
## Architecture
```
rStack (Foundation) — Identity, CRDT sync, payments, encryption
rSpace (Platform) — Spaces, infinite canvas, module composition
rApps (Modules) — 35+ apps that compose on the canvas
```
## Digital Primitives
These are the building blocks that all rApps share and compose:
**EncryptID** — Self-sovereign identity via WebAuthn passkeys. One biometric tap derives encryption keys (AES-256-GCM), a DID identity (Ed25519), and a crypto wallet (secp256k1). Social recovery via threshold guardian approval. No passwords or seed phrases, ever.
**Local-First Data** — 7-layer Automerge CRDT stack. All data encrypted client-side before sync. Conflict-free offline editing with automatic merge on reconnect. The server stores only ciphertext.
**x402 Micropayments** — HTTP 402 as a first-class protocol. Any endpoint can require payment. Passkey-derived wallet signs transactions on L2 rollups (~$0.001/tx). No MetaMask popup required.
**CRDT Tokens** — Automerge-based token ledger (cUSDC, $MYCO) with bonding curve dynamics. Instant, free, off-chain transfers that settle to L2 when needed.
**Spatial Canvas** — FolkJS web components on an infinite 2D canvas. Modules render as positioned shapes that can be connected, nested, and linked.
**On-Demand Sidecars** — Docker containers (Ollama, KiCad, FreeCAD, Blender, Scribus) start on first API call and stop after 5 minutes idle. Saves ~8GB RAM when not in use.
**IPFS Pinning** — Generated files auto-pinned to Kubo with `.cid` sidecar files. Encrypted backups also pinned for redundancy.
## rApps
### Information & Documents
| App | Description |
|-----|-------------|
| **rNotes** | Rich-text notebooks with voice transcription, code blocks, file attachments, and Notion/Google Docs sync |
| **rPubs** | Markdown to print-ready pocket books via Typst compilation |
| **rBooks** | Community PDF library with flipbook reader |
| **rFiles** | File sharing with time-limited links, password protection, and Memory Cards for cross-module data interchange |
| **rData** | Privacy-first analytics (cookieless, self-hosted Umami) |
### Planning & Coordination
| App | Description |
|-----|-------------|
| **rCal** | Calendar with lunar/solar/seasonal systems, group scheduling, location-aware events, and spatio-temporal coupling with rMaps |
| **rMaps** | Real-time location sharing with OSM tiles, indoor routing (c3nav), privacy controls (precision fuzzing, ghost mode), and push notifications |
| **rTrips** | Collaborative trip planner with itinerary, routing, expenses, and packing lists |
| **rTasks** | Kanban boards with ClickUp bi-directional sync |
| **rMinders** | Reminders, cron jobs, automation workflows — emails, webhooks, calendar events, broadcasts |
| **rSchedule** | Calendly-style public booking against rCal availability |
### Communication
| App | Description |
|-----|-------------|
| **rInbox** | Collaborative email with shared mailboxes, threaded comments, and Gnosis Safe multisig approval for outgoing mail |
| **rMeets** | Video meetings (Jitsi) with recording transcription and search |
| **rSocials** | Federated social feed — multi-platform posting via Postiz, newsletters via Listmonk, AI content generation |
| **rForum** | One-click self-hosted Discourse forum deployment with Cloudflare DNS auto-provisioning |
### Democratic Governance
| App | Description |
|-----|-------------|
| **rVote** | Conviction voting with credit-weighted decay, ranked proposals, ELO scoring, and delegative trust flows (liquid democracy) |
| **rChoices** | Polls, ranked lists, and multi-criteria spider plots as canvas shapes |
| **rGov** | Modular governance circuits — signoff gates, resource thresholds, tunable knobs, amendable decision flows |
| **CrowdSurf** | Swipe-based activity coordination with commitment thresholds — triggers when enough people join |
### Economic & Financial
| App | Description |
|-----|-------------|
| **rWallet** | Multi-chain Safe wallet viewer with CoinGecko prices, Zerion DeFi positions, CRDT token balances, bonding curve swap UI, and fiat on/off-ramp |
| **rFlows** | Budget river visualization, Openfort smart accounts, outcome tracking, community budgeting with "enoughness" thresholds |
| **rExchange** | P2P crypto/fiat exchange with intent matching, escrow settlement, and reputation tracking |
| **rTime** | Timebank commitment pool — visualizes hour pledges as floating orbs, weaves commitments into tasks with skill-curve matching |
| **rCart** | Group shopping and cosmolocal print-on-demand shop with multi-currency checkout |
| **rNetwork** | Community relationship graph (3D force-directed) with CRM sync and trust visualization |
### Creative & Media
| App | Description |
|-----|-------------|
| **rDesign** | AI-powered desktop publishing — Scribus via noVNC with a Gemini agent that drives the layout |
| **rSwag** | Design print-ready stickers, posters, and tees with dithering, color separation, and fulfillment routing |
| **rPhotos** | Community photo commons (Immich-backed gallery) |
| **rTube** | Video hosting and HLS live streaming via Cloudflare R2 |
| **rSplat** | 3D Gaussian splat viewer for `.ply`/`.splat`/`.spz` files |
### Sharing Economy
| App | Description |
|-----|-------------|
| **rBnb** | Trust-based hospitality and space sharing — gift economy as a first-class option |
| **rVnb** | Peer-to-peer camper and RV rentals with trust, endorsements, and configurable economy models |
### AI Services
The platform integrates multiple AI providers as composable canvas shapes:
- **Gemini** — CAD orchestration (drives KiCad/FreeCAD via MCP tool-calling), design agent, zine generation, image generation
- **Ollama** — Local inference (llama3, qwen-coder, mistral) via on-demand sidecar
- **fal.ai** — Flux Pro image gen, WAN 2.1 text-to-video, Kling image-to-video
- **LiteLLM** — Unified proxy across 9 models
## Tech Stack
- **Runtime**: Bun + Hono
- **Frontend**: Lit web components (FolkJS) + Vite
- **Data**: Automerge CRDT with AES-256-GCM encryption
- **Identity**: WebAuthn PRF + HKDF key derivation
- **Payments**: x402 + ethers.js + Gnosis Safe SDK
- **Infrastructure**: Docker + Traefik + Cloudflare Tunnel
- **AI**: Gemini SDK, Ollama, fal.ai, LiteLLM
## Development
```bash
bun install
bun run dev
```
Requires Bun 1.1+. See `ONTOLOGY.md` for detailed architecture documentation.
## License
All rights reserved.

View File

@ -1,8 +0,0 @@
---
id: m-2
title: "Phase 0 Prototype"
---
## Description
Initial working prototype: Pico firmware, PWA with CV pipeline, demo guide, slide deck, and first user tests

View File

@ -1,19 +1,19 @@
--- ---
id: TASK-104 id: TASK-104
title: n8n-style automation canvas for rMinders title: n8n-style automation canvas for rSchedule
status: Done status: Done
assignee: [] assignee: []
created_date: '2026-03-10 18:43' created_date: '2026-03-10 18:43'
labels: labels:
- rminders - rschedule
- feature - feature
- automation - automation
dependencies: [] dependencies: []
references: references:
- modules/rminders/schemas.ts - modules/rschedule/schemas.ts
- modules/rminders/mod.ts - modules/rschedule/mod.ts
- modules/rminders/components/folk-automation-canvas.ts - modules/rschedule/components/folk-automation-canvas.ts
- modules/rminders/components/automation-canvas.css - modules/rschedule/components/automation-canvas.css
- vite.config.ts - vite.config.ts
priority: medium priority: medium
--- ---
@ -21,14 +21,14 @@ priority: medium
## Description ## Description
<!-- SECTION:DESCRIPTION:BEGIN --> <!-- SECTION:DESCRIPTION:BEGIN -->
Visual workflow builder at /:space/rminders/reminders that lets users wire together triggers, conditions, and actions from any rApp — enabling automations like "if my location approaches home, notify family" or "when document sign-off completes, schedule posts and notify comms director." Visual workflow builder at /:space/rschedule/reminders that lets users wire together triggers, conditions, and actions from any rApp — enabling automations like "if my location approaches home, notify family" or "when document sign-off completes, schedule posts and notify comms director."
Built with SVG canvas (pan/zoom/Bezier wiring), 15 node types across 3 categories, REST-persisted CRUD, topological execution engine, cron tick loop integration, and webhook trigger endpoint. Built with SVG canvas (pan/zoom/Bezier wiring), 15 node types across 3 categories, REST-persisted CRUD, topological execution engine, cron tick loop integration, and webhook trigger endpoint.
<!-- SECTION:DESCRIPTION:END --> <!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria ## Acceptance Criteria
<!-- AC:BEGIN --> <!-- AC:BEGIN -->
- [ ] #1 Canvas loads at /:space/rminders/reminders with node palette - [ ] #1 Canvas loads at /:space/rschedule/reminders with node palette
- [ ] #2 Drag nodes from palette, wire ports, configure — auto-saves via REST - [ ] #2 Drag nodes from palette, wire ports, configure — auto-saves via REST
- [ ] #3 Run All on manual-trigger workflow — nodes animate, execution log shows results - [ ] #3 Run All on manual-trigger workflow — nodes animate, execution log shows results
- [ ] #4 Cron workflows execute on tick loop - [ ] #4 Cron workflows execute on tick loop
@ -39,9 +39,9 @@ Built with SVG canvas (pan/zoom/Bezier wiring), 15 node types across 3 categorie
## Final Summary ## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN --> <!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented n8n-style automation canvas for rMinders with 5 files (2490 lines added): Implemented n8n-style automation canvas for rSchedule with 5 files (2490 lines added):
**schemas.ts** — 15 automation node types (5 triggers, 4 conditions, 6 actions), NODE_CATALOG with typed ports and config schemas, Workflow/WorkflowNode/WorkflowEdge types, extended MindersDoc. **schemas.ts** — 15 automation node types (5 triggers, 4 conditions, 6 actions), NODE_CATALOG with typed ports and config schemas, Workflow/WorkflowNode/WorkflowEdge types, extended ScheduleDoc.
**folk-automation-canvas.ts** — SVG canvas with pan/zoom, left sidebar node palette (drag-to-add), Bezier edge wiring between typed ports, right sidebar config panel driven by NODE_CATALOG, execution visualization, REST persistence with 1.5s debounced auto-save. **folk-automation-canvas.ts** — SVG canvas with pan/zoom, left sidebar node palette (drag-to-add), Bezier edge wiring between typed ports, right sidebar config panel driven by NODE_CATALOG, execution visualization, REST persistence with 1.5s debounced auto-save.

View File

@ -24,5 +24,5 @@ Created shared ViewHistory<V> utility class providing stack-based back navigatio
## Final Summary ## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN --> <!-- SECTION:FINAL_SUMMARY:BEGIN -->
Commit 31b0885 on dev+main. New shared/view-history.ts with ViewHistory<V> class (push/back/canGoBack/peekBack/reset, max depth 20). Integrated into rtrips, rmaps, rtasks, rforum, rphotos, rvote, rnotes, rinbox, rminders, rcart. Full rWork→rTasks rename: directory modules/rwork→modules/rtasks, component folk-work-board→folk-tasks-board, class FolkWorkBoard→FolkTasksBoard, all cross-module refs, docker-compose, vite config, encryptid CORS, landing pages. Removed rwork.online from cloudflared config and deleted its Cloudflare zone. Commit 31b0885 on dev+main. New shared/view-history.ts with ViewHistory<V> class (push/back/canGoBack/peekBack/reset, max depth 20). Integrated into rtrips, rmaps, rtasks, rforum, rphotos, rvote, rnotes, rinbox, rschedule, rcart. Full rWork→rTasks rename: directory modules/rwork→modules/rtasks, component folk-work-board→folk-tasks-board, class FolkWorkBoard→FolkTasksBoard, all cross-module refs, docker-compose, vite config, encryptid CORS, landing pages. Removed rwork.online from cloudflared config and deleted its Cloudflare zone.
<!-- SECTION:FINAL_SUMMARY:END --> <!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,84 +0,0 @@
---
id: TASK-118
title: 'Epic: Make all rApps multiplayer with "Pull rApplet to rSpace"'
status: Done
assignee: []
created_date: '2026-03-16 00:05'
updated_date: '2026-03-16 00:51'
labels:
- epic
- multiplayer
- architecture
milestone: Multiplayer Everything
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Ensure every rApp module has:
1. **Multiplayer real-time sync** via existing Automerge/local-first stack — see other participants' changes live
2. **"Pull rApplet to rSpace" button** — a standard UI pattern letting space owners pull/enable an rApp module into their space from a global catalog
## Current State (27 modules)
- **12 already have local-first/Automerge**: rbooks, rcal, rcart, rfiles, rflows, rinbox, rnotes, rsocials, rsplat, rtasks, rtrips, rvote
- **2 use ephemeral WebSocket sync** (no Automerge): rmaps, rnetwork
- **13 have NO real-time sync**: rchoices, rdata, rdesign, rdocs, rforum, rmeets, rphotos, rpubs, rswag, rtube, rwallet, rspace, rminders
## "Pull rApplet to rSpace" Pattern
A standardized UI component (`folk-applet-pull.ts`) that:
- Shows available rApps as cards in a global catalog
- Space owners can enable/disable modules per-space via PATCH `/:space/modules`
- Each module card shows: name, icon, description, sync status, scope (space/global)
- Enabled modules appear in the space's app switcher
- Uses existing `enabledModules` API in `server/spaces.ts`
## Multiplayer Tiers
### Tier 1 — Already multiplayer (12 modules) — just need "Pull to rSpace" button
rbooks, rcal, rcart, rfiles, rflows, rinbox, rnotes, rsocials, rsplat, rtasks, rtrips, rvote
### Tier 2 — Near-multiplayer, need Automerge integration (5 modules)
- **rchoices**: Add schema + local-first-client for voting sessions, live vote tallies
- **rswag**: Add schema for shared design state, collaborative editing
- **rwallet**: Add schema for shared wallet watchlist, collaborative treasury view
- **rminders**: Already has schemas, needs local-first-client.ts + component sync
- **rnetwork**: Already has WebSocket, add Automerge doc for CRM data persistence
### Tier 3 — UI-only wrappers, add lightweight sync (4 modules)
- **rdata**: Sync dashboard config/filters across participants
- **rphotos**: Sync album curation, shared selections
- **rtube**: Sync playlists, watch parties, queue state
- **rpubs**: Sync publication drafts, collaborative editing queue
### Tier 4 — External service wrappers, iframe-based (3 modules)
- **rdesign** (Affine): Add space-scoped project linking, cannot sync internal state
- **rdocs** (Docmost): Add space-scoped doc linking
- **rmeets** (Jitsi): Add meeting history/scheduling sync
### Tier 5 — Infrastructure, minimal sync needed (3 modules)
- **rforum**: Provision state only, sync forum URL/status per space
- **rmaps**: Already has ephemeral WebSocket rooms — add persistent map annotations via Automerge
- **rspace**: Core module — canvas state already synced via Automerge in host app
## Architecture Decisions
- All new local-first clients follow the established pattern: `local-first-client.ts` + `schemas.ts` per module
- Document ID format: `{space}:{module}:{collection}`
- "Pull to rSpace" UI reuses existing `PATCH /:space/modules` API
- Shared `folk-applet-catalog.ts` component renders the catalog modal
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Every rApp module has real-time multiplayer sync or a clear reason why not (external iframe wrappers)
- [ ] #2 Standard 'Pull rApplet to rSpace' UI exists in space settings and is accessible from app switcher
- [ ] #3 Space owners can enable/disable any module via the catalog UI
- [ ] #4 All new sync follows established local-first-client.ts + schemas.ts pattern
- [ ] #5 Demo/unauthenticated mode still works as local-only fallback for all modules
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
All 14 sub-tasks complete. Every rApp module now has schemas.ts + local-first-client.ts for Automerge CRDT sync. Key modules (rchoices, rswag, rwallet) have full UI integration with LIVE indicators and real-time sync.
<!-- SECTION:NOTES:END -->

View File

@ -1,54 +0,0 @@
---
id: TASK-118.1
title: Build shared folk-applet-catalog.ts component
status: Done
assignee: []
created_date: '2026-03-16 00:05'
updated_date: '2026-03-16 00:21'
labels:
- multiplayer
- ui
- shared
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Create a reusable web component that renders the "Pull rApplet to rSpace" catalog modal.
## Component: `lib/folk-applet-catalog.ts`
- Fetches module list from `GET /:space/modules` API
- Renders cards grid: icon, name, description, enabled toggle, scope badge
- Toggle calls `PATCH /:space/modules` with updated `enabledModules` array
- Accessible from space settings and a "+" button in the app switcher
- Shows sync status indicator (multiplayer/local-only/external)
- Requires space owner authentication to toggle; read-only for members
## Shell integration: `server/shell.ts`
- Add "+" button to app switcher nav that opens the catalog modal
- Only visible to space owners (check `ownerDID` from space meta)
## Files to create/modify:
- `lib/folk-applet-catalog.ts` (new)
- `server/shell.ts` (add catalog trigger button)
- `server/index.ts` (register the new component JS)
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Catalog modal shows all registered modules with icon, name, description
- [x] #2 Space owners can toggle modules on/off with immediate effect
- [x] #3 Non-owners see read-only view of enabled modules
- [x] #4 App switcher updates when modules are toggled
- [x] #5 Works in demo mode with local-only toggle (no API call)
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Built "Manage rApps" panel into the existing app switcher sidebar. Extends `rstack-app-switcher` with expandable catalog showing all modules (enabled + disabled). Space owners can toggle modules via + / buttons calling `PATCH /api/spaces/:slug/modules`. Shell passes full module list via `setAllModules()`. Demo mode has local-only fallback.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,49 +0,0 @@
---
id: TASK-118.10
title: Add lightweight sync to rpubs (collaborative publication queue)
status: Done
assignee: []
created_date: '2026-03-16 00:06'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-3
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rpubs compiles markdown to print-ready pocket books via Typst. Add Automerge sync for shared publication drafts and editorial queue.
## New files:
- `modules/rpubs/schemas.ts` — PubsDoc with publications, editorialQueue, comments
- `modules/rpubs/local-first-client.ts` — CRUD: saveDraft, addToQueue, addComment
## Schema:
```
PubsDoc {
meta: { module: 'pubs', collection: 'editorial', version: 1 }
publications: Record<string, { id, title, markdownContent, status, authorDid, updatedAt }>
editorialQueue: string[]
comments: Record<string, { pubId, authorDid, text, createdAt }[]>
}
```
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Publication drafts sync between editors in real-time
- [ ] #2 Editorial queue shared across space members
- [ ] #3 Comments visible to all members
- [ ] #4 Demo mode works locally
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,49 +0,0 @@
---
id: TASK-118.11
title: 'Add space-scoped linking for external wrappers (rdesign, rdocs, rmeets)'
status: Done
assignee: []
created_date: '2026-03-16 00:06'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-4
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
These 3 modules wrap external services (Affine, Docmost, Jitsi) via iframes. We can't sync their internal state, but we can add Automerge docs for space-scoped metadata: which projects/docs/rooms are linked to this space, access history, and meeting scheduling.
## rdesign (Affine)
- Schema: `DesignDoc { linkedProjects: Record<id, { url, name, addedBy }> }`
- Component: Show linked Affine projects, allow adding/removing
## rdocs (Docmost)
- Schema: `DocsDoc { linkedDocuments: Record<id, { url, title, addedBy }> }`
- Component: Show linked Docmost docs, allow adding/removing
## rmeets (Jitsi)
- Schema: `MeetsDoc { meetings: Record<id, { roomName, title, scheduledAt, hostDid, participants[] }>, meetingHistory[] }`
- Component: Schedule meetings, show history, quick-join links
Each needs: schemas.ts, local-first-client.ts, component integration.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Linked external projects/docs/rooms sync across space members
- [ ] #2 Meeting scheduling syncs in real-time
- [ ] #3 Adding/removing links requires authentication
- [ ] #4 Demo mode shows placeholder data
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,51 +0,0 @@
---
id: TASK-118.12
title: Add persistent map annotations to rmaps via Automerge
status: Done
assignee: []
created_date: '2026-03-16 00:06'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-5
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rmaps already has ephemeral WebSocket rooms for live location sharing. Add an Automerge doc layer for persistent map annotations (pins, notes, routes, areas) that survive room disconnection.
## New files:
- `modules/rmaps/schemas.ts` — MapsDoc with annotations, savedRoutes, meetingPoints
- `modules/rmaps/local-first-client.ts` — CRUD: addAnnotation, saveRoute, setMeetingPoint
## Schema:
```
MapsDoc {
meta: { module: 'maps', collection: 'annotations', version: 1 }
annotations: Record<string, { id, type: 'pin'|'note'|'area', lat, lng, label, authorDid, createdAt }>
savedRoutes: Record<string, { id, name, waypoints[], authorDid }>
savedMeetingPoints: Record<string, { id, name, lat, lng, setBy }>
}
```
Ephemeral room sync (live location) remains unchanged.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Persistent annotations survive room disconnection
- [ ] #2 Saved routes and meeting points sync via Automerge
- [ ] #3 Ephemeral live location sharing still works unchanged
- [ ] #4 Demo mode works locally
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,46 +0,0 @@
---
id: TASK-118.13
title: Add forum provision state sync to rforum
status: Done
assignee: []
created_date: '2026-03-16 00:07'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-5
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rforum provisions Discourse instances on Hetzner. Add minimal Automerge sync for forum provisioning state per space (URL, status, admin info).
## New files:
- `modules/rforum/local-first-client.ts` — wraps existing schemas
## Schema (extend existing):
```
ForumDoc {
meta: { module: 'forum', collection: 'provision', version: 1 }
forums: Record<string, { url, status: 'provisioning'|'active'|'suspended', adminDid, createdAt }>
}
```
Minimal — just syncs which forum is linked to which space.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Forum provision state syncs across space members
- [ ] #2 All members can see forum URL and status
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,42 +0,0 @@
---
id: TASK-118.14
title: Add "Pull to rSpace" button to all 12 existing multiplayer modules
status: Done
assignee: []
created_date: '2026-03-16 00:07'
updated_date: '2026-03-16 00:21'
labels:
- multiplayer
- tier-1
milestone: Multiplayer Everything
dependencies:
- TASK-118.1
parent_task_id: TASK-118
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The 12 modules that already have local-first/Automerge sync (rbooks, rcal, rcart, rfiles, rflows, rinbox, rnotes, rsocials, rsplat, rtasks, rtrips, rvote) need the standardized "Pull rApplet to rSpace" integration.
## What to do:
- Ensure each module's component checks `enabledModules` from space meta
- Add graceful "not enabled" state when module is disabled for a space
- Each module's landing/nav shows correctly in the folk-applet-catalog
This task depends on TASK-118.1 (the catalog component) being built first.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 All 12 modules show 'not enabled' state when disabled for a space
- [x] #2 All 12 modules appear correctly in the applet catalog
- [x] #3 Enabling/disabling a module immediately updates the app switcher
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
No per-module changes needed. The existing middleware in index.ts:1667 already returns 404 for disabled modules. The "Manage rApps" catalog in TASK-118.1 handles discovery and toggling. The shell's visibleModules filtering (shell.ts:101-103) already hides disabled modules from the app switcher. All 12 multiplayer modules work with the catalog out of the box.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,53 +0,0 @@
---
id: TASK-118.2
title: Add multiplayer sync to rchoices (voting/ranking sessions)
status: Done
assignee: []
created_date: '2026-03-16 00:05'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-2
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rchoices is currently a stateless voting UI. Add Automerge-backed real-time sync for live collaborative voting sessions.
## New files:
- `modules/rchoices/schemas.ts` — ChoicesDoc with votingSessions, votes, rankings
- `modules/rchoices/local-first-client.ts` — CRUD: createSession, castVote, updateRanking
## Schema design:
```
ChoicesDoc {
meta: { module: 'choices', collection: 'sessions', version: 1 }
sessions: Record<string, { id, title, type: 'vote'|'rank'|'score', options: [], createdBy, createdAt }>
votes: Record<string, { sessionId, participantDid, choices: Record<optionId, number>, updatedAt }>
}
```
## Component updates (`folk-choices-*.ts`):
- Init local-first client, subscribe to doc changes
- Real-time vote tally updates as participants vote
- Show participant count and live results
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Voting sessions sync in real-time between participants
- [ ] #2 Vote tallies update live as votes come in
- [ ] #3 Session creator can configure vote type (single/multi/ranked)
- [ ] #4 Demo mode works with local-only state
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts + folk-choices-dashboard.ts updated with multiplayer sessions, voting, LIVE indicator
<!-- SECTION:NOTES:END -->

View File

@ -1,53 +0,0 @@
---
id: TASK-118.3
title: Add multiplayer sync to rswag (collaborative swag design)
status: Done
assignee: []
created_date: '2026-03-16 00:05'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-2
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rswag is a client-side design canvas. Add Automerge sync so multiple space members can collaborate on swag designs.
## New files:
- `modules/rswag/schemas.ts` — SwagDoc with designs, assets, selectedTemplate
- `modules/rswag/local-first-client.ts` — CRUD: saveDesign, updateCanvas, addAsset
## Schema design:
```
SwagDoc {
meta: { module: 'swag', collection: 'designs', version: 1 }
designs: Record<string, { id, name, templateId, canvasState: string, createdBy, updatedAt }>
activeDesignId: string
}
```
## Component updates:
- Init local-first client on connectedCallback
- Debounced save of canvas state changes
- Live cursor/selection indicators for collaborators (stretch)
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Design state syncs between participants in real-time
- [ ] #2 Canvas changes debounced and saved via Automerge
- [ ] #3 Design list shared across space members
- [ ] #4 Demo mode works locally
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,54 +0,0 @@
---
id: TASK-118.4
title: Add multiplayer sync to rwallet (shared treasury view)
status: Done
assignee: []
created_date: '2026-03-16 00:05'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-2
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rwallet currently renders client-side-only wallet data from Safe Global API. Add Automerge sync for shared watchlists and treasury annotations.
## New files:
- `modules/rwallet/schemas.ts` — WalletDoc with watchedAddresses, annotations, dashboardConfig
- `modules/rwallet/local-first-client.ts` — CRUD: addWatchAddress, setAnnotation, updateConfig
## Schema:
```
WalletDoc {
meta: { module: 'wallet', collection: 'treasury', version: 1 }
watchedAddresses: Record<string, { address, chain, label, addedBy, addedAt }>
annotations: Record<string, { txHash, note, authorDid, createdAt }>
dashboardConfig: { defaultChain, displayCurrency, layout }
}
```
## Component updates (`folk-wallet-viewer.ts`):
- Shared watchlist syncs across space members
- Transaction annotations visible to all
- Dashboard layout preferences synced
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Watched wallet addresses sync across space members
- [ ] #2 Transaction annotations visible to all space members
- [ ] #3 Dashboard config shared (chain, currency, layout)
- [ ] #4 Demo mode works with local-only state
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,41 +0,0 @@
---
id: TASK-118.5
title: Add local-first-client to rminders
status: Done
assignee: []
created_date: '2026-03-16 00:06'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-2
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rminders already has Automerge schemas but lacks a local-first-client.ts for client-side sync. Add the client and wire it into the 3 components (automation-canvas, reminders-widget, minders-app).
## New file:
- `modules/rminders/local-first-client.ts` — wraps existing schemas with sync methods
## Component updates:
- All 3 components init the client, subscribe, and react to remote changes
- Scheduled jobs, reminders, and automations sync in real-time between space members
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 local-first-client.ts created following established pattern
- [ ] #2 All 3 components sync via Automerge
- [ ] #3 Reminders and scheduled jobs visible to all space members in real-time
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,51 +0,0 @@
---
id: TASK-118.6
title: Add Automerge persistence to rnetwork CRM data
status: Done
assignee: []
created_date: '2026-03-16 00:06'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-2
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rnetwork currently uses server-stored CRM data with WebSocket visualization. Add Automerge doc for persistent CRM relationship data that syncs via local-first stack alongside the existing WebSocket graph updates.
## New files:
- `modules/rnetwork/schemas.ts` — NetworkDoc with contacts, relationships, delegations
- `modules/rnetwork/local-first-client.ts` — CRUD for CRM data
## Schema:
```
NetworkDoc {
meta: { module: 'network', collection: 'crm', version: 1 }
contacts: Record<string, { did, name, role, tags[], addedBy, addedAt }>
relationships: Record<string, { fromDid, toDid, type, weight, note }>
graphLayout: { positions: Record<did, {x,y}>, zoom, pan }
}
```
Note: Delegations already in PostgreSQL (trust-engine) — this is for CRM metadata only.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 CRM contact metadata syncs via Automerge between space members
- [ ] #2 Graph layout positions persist and sync
- [ ] #3 Existing WebSocket delegation UI still works unchanged
- [ ] #4 Demo mode works with local-only data
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,53 +0,0 @@
---
id: TASK-118.7
title: Add lightweight sync to rdata (shared analytics dashboard)
status: Done
assignee: []
created_date: '2026-03-16 00:06'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-3
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rdata is a privacy-first analytics dashboard. Add Automerge sync so space members share dashboard configuration and filter state.
## New files:
- `modules/rdata/schemas.ts` — DataDoc with dashboardConfig, savedViews, filterPresets
- `modules/rdata/local-first-client.ts` — CRUD: saveView, updateFilters, setConfig
## Schema:
```
DataDoc {
meta: { module: 'data', collection: 'dashboard', version: 1 }
savedViews: Record<string, { id, name, filters, dateRange, metrics[], createdBy }>
activeViewId: string
sharedFilters: { dateRange, granularity, segments[] }
}
```
## Component updates:
- Dashboard filter changes sync between viewers
- Saved views shared across space members
- "Follow" mode: one member's view reflected to all
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Saved dashboard views sync across space members
- [ ] #2 Filter changes can optionally sync in real-time
- [ ] #3 Demo mode works with local-only state
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,50 +0,0 @@
---
id: TASK-118.8
title: Add lightweight sync to rphotos (shared album curation)
status: Done
assignee: []
created_date: '2026-03-16 00:06'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-3
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rphotos wraps Immich for photo display. Add Automerge sync for shared album curation and selections.
## New files:
- `modules/rphotos/schemas.ts` — PhotosDoc with albums, selections, annotations
- `modules/rphotos/local-first-client.ts` — CRUD: createAlbum, addToAlbum, annotatePhoto
## Schema:
```
PhotosDoc {
meta: { module: 'photos', collection: 'curation', version: 1 }
albums: Record<string, { id, name, photoIds[], createdBy, updatedAt }>
selections: Record<string, { photoId, selectedBy[], note }>
activeAlbumId: string
}
```
Photo IDs reference the external Immich instance — this syncs curation metadata only.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Shared albums sync across space members
- [ ] #2 Photo selections and annotations visible to all
- [ ] #3 Demo mode works with local-only state
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,48 +0,0 @@
---
id: TASK-118.9
title: Add lightweight sync to rtube (shared playlists/watch parties)
status: Done
assignee: []
created_date: '2026-03-16 00:06'
updated_date: '2026-03-16 00:50'
labels:
- multiplayer
- tier-3
milestone: Multiplayer Everything
dependencies: []
parent_task_id: TASK-118
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
rtube is a community video hosting UI. Add Automerge sync for shared playlists and watch party queue state.
## New files:
- `modules/rtube/schemas.ts` — TubeDoc with playlists, watchParty, queue
- `modules/rtube/local-first-client.ts` — CRUD: createPlaylist, addToPlaylist, updateQueue
## Schema:
```
TubeDoc {
meta: { module: 'tube', collection: 'playlists', version: 1 }
playlists: Record<string, { id, name, videoIds[], createdBy, updatedAt }>
watchParty: { active: boolean, currentVideoId, position, hostDid, participants[] }
queue: string[]
}
```
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Playlists sync across space members
- [ ] #2 Watch party state (current video, position) syncs in real-time
- [ ] #3 Demo mode works with local-only state
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
schemas.ts + local-first-client.ts created
<!-- SECTION:NOTES:END -->

View File

@ -1,25 +0,0 @@
---
id: TASK-119
title: Implement folk-applet-catalog.ts and wire into shell
status: Done
assignee: []
created_date: '2026-03-16 00:14'
updated_date: '2026-03-16 00:21'
labels:
- multiplayer
- in-progress
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Starting implementation of TASK-118.1
<!-- SECTION:DESCRIPTION:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Completed as part of TASK-118.1
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,59 +0,0 @@
---
id: TASK-120
title: Universal Profiles × EncryptID integration
status: In Progress
assignee: []
created_date: ''
updated_date: '2026-04-10 23:25'
labels: []
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Give every EncryptID user a LUKSO Universal Profile (LSP0 + LSP6) on Base, controlled by their passkey-derived secp256k1 key.
## Phase 1: Core (DONE)
- [x] EVM key derivation (`encryptid-sdk/src/client/evm-key.ts`) — HKDF secp256k1 from PRF
- [x] UP deployment service (`encryptid-up-service/`) — Hono API with CREATE2, LSP6 permissions, LSP25 relay
- [x] SDK types — `eid.up` in JWT claims, `LSP6Permission` enum, UP request/response types
- [x] Session UP helpers — `getUPAddress()`, `hasUniversalProfile()`, `setUniversalProfile()`
- [x] Recovery hooks — `onUPRecovery()` for on-chain controller rotation
- [x] Schema migration — UP columns on users table
- [x] Server endpoints — `GET/POST /api/profile/:id/up`, UP info in JWT claims
## Phase 2: UP-Aware Sessions
- [x] Map EncryptID AuthLevel → LSP6 BitArray permissions (scaffolding — `lsp6.ts` mapper)
- [ ] Guardian → LSP6 controller mapping with ADDPERMISSIONS
- [ ] On-chain permission write (requires LSP factory deployment)
## Phase 3: Payment-Infra Migration
- [x] WalletAdapter abstraction (UP + Safe + EOA) — `wallet-adapter.ts`
- [ ] New users → UP by default
## Phase 4: NLA Oracle Integration
- [x] `getEncryptIDWallet()` for CLI — `wallet-helper.ts`
- [ ] Escrow parties identified by UP address
<!-- SECTION:DESCRIPTION:END -->
## Notes
- encryptid-up-service repo: https://gitea.jeffemmett.com/jeffemmett/encryptid-up-service
- Chain: Base Sepolia (84532) for dev, Base mainnet for prod
- LSP contracts are EVM-compatible, deployed on Base
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
**2026-04-10 Architecture Decision — Chain-Parameterized WalletAdapter:**
Phase 3 WalletAdapter MUST be built with `chainId` parameter from day one, not Base-hardcoded. This enables adding Linea (59144/59141) or any EVM L2 as: add chain config → deploy LSP factory → done. Add Linea to CHAIN_MAP alongside the adapter work. CREATE2 determinism should work on Linea's zkEVM but LSP factory contracts need deployment there. Current state: wallet module reads 13+ chains but UP write operations are Base-only.
## Phases 2-4 Implementation (2026-04-10)
- **Linea chain support**: Added Linea mainnet (59144) + Linea Sepolia (59141) to all 6 chain maps in rwallet/mod.ts, price-feed, defi-positions, wallet-viewer, and encryptid server CHAIN_PREFIXES. Popular tokens: USDC, WETH, USDT on Linea.
- **WalletAdapter** (`src/encryptid/wallet-adapter.ts`): Chain-parameterized abstraction over Safe/EOA/UP with `fromSafe()`, `fromEOA()`, `fromUP()` factories, immutable `withUniversalProfile()`, `getInfo()`, `toJSON()`.
- **LSP6 Permission Mapper** (`encryptid-sdk/src/types/lsp6.ts`): 23-bit `LSP6Permission` enum, `buildBitmap()`, `hasPermission()`, `mergePermissions()`, `AUTH_LEVEL_PERMISSIONS` mapping BASIC→CRITICAL, `GUARDIAN_PERMISSIONS`, `getPermissionsForAuthLevel()`. Removed duplicate inline enum from types/index.ts.
- **getEncryptIDWallet()** (`encryptid-sdk/src/client/wallet-helper.ts`): SDK helper returns read-only `EncryptIDWalletInfo` snapshot (EOA, DID, username, UP, auth level, compressed pubkey) for CLI/oracle. Never exposes private keys.
- **SDK exports**: All new types/functions re-exported from types/index.ts, client/index.ts, src/index.ts.
- Deployed to production. rspace.online returns 200.
<!-- SECTION:NOTES:END -->

View File

@ -1,57 +0,0 @@
---
id: TASK-121
title: 'rNetwork: 150-member trust graph with absolute token weights & delegation UX'
status: Done
assignee: []
created_date: '2026-03-16 04:24'
labels:
- rnetwork
- delegation
- trust
- frontend
dependencies: []
references:
- modules/rnetwork/components/folk-graph-viewer.ts
- modules/rnetwork/mod.ts
- modules/rnetwork/components/folk-trust-sankey.ts
- modules/rnetwork/components/folk-delegation-manager.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Implement a 150-member community trust graph in rNetwork with concentric sphere layout, absolute token-based delegation weights, and interactive delegation UX.
Key features:
- 150 demo members (15 admins, 35 members, 100 viewers) with deterministic PRNG delegation edges
- Authority display remap: gov-ops→Gov (purple), fin-ops→Econ (green), dev-ops→Tech (blue)
- Absolute token weights: base×100 tokens per authority, effective = base delegated + received
- Concentric sphere layout (Fibonacci distribution) with wireframe guides
- Multi-select click-to-delegate with fuzzy search and per-authority sliders
- Member list sidebar showing per-authority G/E/T weights, sorted by total weight
- Responsive zoom (2x/0.5x steps, scroll speed 2.5x)
- Enlarged node sizing (6-56px range) and text labels (512×96 canvas, 36px font)
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 150 demo members displayed (15 admin, 35 member, 100 viewer) — no CRM data
- [ ] #2 Authority bar shows Gov/Econ/Tech with correct purple/green/blue colors
- [ ] #3 Trust mode nodes sized by absolute effective weight (base delegated + received)
- [ ] #4 Concentric sphere layout with 3 wireframe sphere guides (R=30/80/160)
- [ ] #5 Click node to add to delegation panel with per-authority sliders
- [ ] #6 Fuzzy search in delegation panel finds members by name
- [ ] #7 Confirm delegation creates edges, recomputes weights, nodes resize live
- [ ] #8 Member list sidebar shows per-authority G/E/T absolute weights
- [ ] #9 Detail panel shows weight breakdown: base delegated + received per authority
- [ ] #10 Badge shows integer token count per authority in trust mode
- [ ] #11 Zoom buttons use 2x/0.5x steps with 200ms animation
- [ ] #12 Authority-filtered edge view: specific authority shows only that domain's delegation edges
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented across 6 phases over multiple sessions:\n\n**Phase 1**: Authority display remap (AUTHORITY_DISPLAY maps in 3 components)\n**Phase 2**: 150 demo members with mulberry32 PRNG delegation generator replacing 3×70 hardcoded arrays\n**Phase 3**: Weight accounting system (WeightAccounting interface, effectiveWeight per authority)\n**Phase 4**: Concentric sphere layout (Fibonacci spiral on sphere surfaces) with wireframe SphereGeometry guides\n**Phase 5**: Multi-select delegation panel with fuzzy search, per-node per-authority sliders\n**Phase 6**: Authority-filtered edge view in updateGraphData()\n\n**Follow-up refinements**:\n- Responsive zoom (2x/0.5x, scroll 2.5x, 200ms animation)\n- Larger nodes (6-56px) and text labels (512×96 canvas, 36px font, 14×3.5 sprite)\n- Member list sidebar with click-to-fly-to-node\n- Absolute token weights: base×100 delegated + received (no averages)\n- Detail panel weight breakdown per authority\n- Fixed vite build: wasmBuild() wrapper for all sub-builds\n\nCommits: 7cab8d6, 20c4a19, d4bb1da
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,41 +0,0 @@
---
id: TASK-122
title: Canvas element reminder scheduling UX enhancements
status: Done
assignee: []
created_date: '2026-03-17 01:01'
labels:
- canvas
- rminders
- UX
dependencies: []
references:
- website/canvas.html
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add multiple UX affordances for scheduling reminders on canvas shapes: floating calendar icon on selected shapes, right-click context menu option, drag-to-calendar compact mode, and email notifications on reminder creation.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Floating 📅 icon appears near top-right of selected shape
- [ ] #2 Clicking calendar icon toggles the reminder widget
- [ ] #3 Right-click context menu shows 'Schedule a reminder' option
- [ ] #4 Context menu option opens reminder widget for the target shape
- [ ] #5 Dragging a shape for 200ms+ shows compact calendar in bottom-right
- [ ] #6 Hovering over calendar days during drag highlights them
- [ ] #7 Releasing shape over a highlighted day creates the reminder
- [ ] #8 Reminder API call includes notifyEmail when user email is available
- [ ] #9 Email is fetched from EncryptID and cached for session
- [ ] #10 Feedback message indicates email notification when applicable
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented 4 reminder scheduling UX enhancements in `website/canvas.html` (156 insertions):\n\n1. **Right-click context menu** — \"📅 Schedule a reminder\" option in shape context menu opens reminder widget\n2. **Email notification** — Fetches user email from EncryptID `/auth/api/account/security`, caches it, passes `notifyEmail` to rMinders API, shows confirmation in feedback\n3. **Floating calendar icon** — 28px circular 📅 button positioned at selected shape's top-right corner, repositions on scroll/zoom, toggles widget on click\n4. **Drag-to-calendar** — Compact calendar appears after 200ms of shape drag, day cells highlight on hover, releasing over a day creates the reminder
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,91 +0,0 @@
---
id: TASK-123
title: rSwag Feature Parity — Full 8-Phase Implementation
status: Done
assignee: []
created_date: '2026-03-21 06:21'
updated_date: '2026-03-21 06:21'
labels:
- rswag
- feature-parity
- pod
- dithering
- ai-generation
dependencies: []
references:
- modules/rswag/mod.ts
- modules/rswag/pod/printful.ts
- modules/rswag/pod/prodigi.ts
- modules/rswag/dither.ts
- modules/rswag/mockup.ts
- modules/rswag/fulfillment.ts
- modules/rswag/components/folk-swag-designer.ts
- modules/rswag/components/folk-revenue-sankey.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Brought the rspace.online/rswag module to feature parity with the standalone rswag.online (Next.js + FastAPI + PostgreSQL) application. rSwag now owns design tools, product catalog, mockups, POD clients, dithering, and AI generation. rCart owns cart/checkout/payments/order lifecycle. A bridge connects them via catalog ingest and fulfillment routing.
## What was built
### Phase 1: POD Provider Clients
- `modules/rswag/pod/types.ts` — shared POD TypeScript interfaces
- `modules/rswag/pod/printful.ts` — Printful v2 API client (catalog variants, mockup generation, order creation, sandbox mode)
- `modules/rswag/pod/prodigi.ts` — Prodigi v4 API client (orders, quotes, status)
### Phase 2: Enhanced Image Processing
- `modules/rswag/dither.ts` — 11 dithering algorithms (8 error diffusion + 3 ordered), median-cut quantization, screen-print color separations
- `modules/rswag/mockup.ts` — Sharp-based mockup compositor with SVG templates + Printful API fallback
### Phase 3: AI Design Generation
- Gemini-powered design generation (gemini-2.5-flash-image)
- User artwork upload (PNG/JPEG/WebP, min 500x500, max 10MB)
- Design lifecycle: draft → active → paused → removed
### Phase 4: Product Catalog & Mockup Routes
- ~15 new API routes for designs, mockups, dithering, storefront, fulfillment
- Filesystem-based design storage with in-memory index
- 24hr cache for images, LRU caches for dithered/mockup results
### Phase 5: Fulfillment Bridge
- `modules/rswag/fulfillment.ts` — order routing to Printful/Prodigi
- Webhook parsers for shipment tracking updates
- Tracking info lookup
### Phase 6: Frontend Design Tools UI
- 4-tab layout in folk-swag-designer (Browse, Create, HitherDither, Orders)
- Browse: product grid with search/filter/add-to-cart
- Create: AI Generate, Upload, My Designs sub-modes
- HitherDither: algorithm picker, color count, live preview, screen-print separations
- Orders: fulfillment status and tracking
### Phase 7: Revenue Sankey & Enhanced Landing
- `folk-revenue-sankey` web component with animated SVG flow + draggable sliders
- Updated landing page with Sankey embed and new feature descriptions
### Phase 8: Admin & Polish
- Admin routes: design sync, product override, analytics summary
- Schema migration v1→v2 for existing designs
- Extended products.ts with POD SKUs and StorefrontProduct type
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 POD clients (Printful v2, Prodigi v4) implemented with sandbox mode
- [x] #2 11 dithering algorithms with screen-print color separations
- [x] #3 AI design generation via Gemini + user artwork upload
- [x] #4 ~15 new API routes for designs, mockups, dithering, storefront, fulfillment
- [x] #5 Fulfillment bridge routes orders to correct POD provider
- [x] #6 4-tab frontend UI (Browse, Create, HitherDither, Orders)
- [x] #7 Interactive revenue Sankey on landing page
- [x] #8 TypeScript compiles cleanly (zero errors)
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
All 8 phases implemented in a single session. Created 7 new files (pod/types.ts, pod/printful.ts, pod/prodigi.ts, dither.ts, mockup.ts, fulfillment.ts, folk-revenue-sankey.ts) and modified 6 existing files (schemas.ts, products.ts, mod.ts, folk-swag-designer.ts, landing.ts, swag.css). TypeScript compiles with zero errors. Ported Python reference code (printful_client.py, prodigi_client.py, dither_service.py, design_generator.py) to TypeScript.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,51 +0,0 @@
---
id: TASK-124
title: Encrypt all PII at rest in EncryptID database
status: Done
assignee: []
created_date: '2026-03-24 00:29'
updated_date: '2026-03-24 00:29'
labels:
- security
- encryptid
- database
dependencies: []
references:
- src/encryptid/server-crypto.ts
- src/encryptid/migrations/encrypt-pii.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Server-side AES-256-GCM encryption for all PII fields stored in PostgreSQL. Keys derived from JWT_SECRET via HKDF with dedicated salts (`pii-v1` for encryption, `pii-hash-v1` for HMAC). HMAC-SHA256 hash indexes for equality lookups on email and UP address fields.
**Scope:** 18 fields across 6 tables (users, guardians, identity_invites, space_invites, notifications, fund_claims). Username and display_name excluded (public identifiers, needed for ILIKE search).
**Files:**
- `src/encryptid/server-crypto.ts` — NEW: encryptField(), decryptField(), hashForLookup()
- `src/encryptid/schema.sql` — 18 _enc/_hash columns + 4 indexes
- `src/encryptid/db.ts` — async row mappers with decrypt fallback, dual-write on inserts/updates, hash-based lookups
- `src/encryptid/server.ts` — replaced unkeyed hashEmail() with HMAC hashForLookup()
- `src/encryptid/migrations/encrypt-pii.ts` — NEW: idempotent backfill script
**Remaining:** Drop plaintext columns after extended verification period.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 All PII fields have corresponding _enc columns with AES-256-GCM ciphertext
- [x] #2 HMAC-SHA256 hash indexes enable email and UP address lookups without plaintext
- [x] #3 Row mappers decrypt transparently — callers receive plaintext
- [x] #4 Wrong encryption key cannot decrypt (verified with test)
- [x] #5 Same plaintext produces different ciphertext each time (random IV)
- [x] #6 Backfill migration encrypts all existing rows (0 remaining unencrypted)
- [x] #7 Legacy plaintext fallback works for pre-migration rows during transition
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Deployed 2026-03-23. Commit `9695e95`. Backfill completed: 1 user, 2 guardians, 8 identity invites, 2 fund claims encrypted. 19/19 verification tests passed (ciphertext format, decryption, HMAC determinism, wrong-key rejection, random IV uniqueness). Plaintext columns retained for rollback safety — drop in follow-up task after extended verification.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,63 +0,0 @@
---
id: TASK-125
title: Configure Stripe & Mollie API keys and test HyperSwitch payment channels
status: To Do
assignee: []
created_date: '2026-03-24 00:56'
labels:
- payments
- hyperswitch
- infrastructure
dependencies: []
references:
- 'https://pay.rspace.online/health'
- 'https://dashboard.stripe.com/test/apikeys'
- 'https://my.mollie.com/dashboard'
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
HyperSwitch payment orchestrator is deployed at `pay.rspace.online` with merchant account `rspace_merchant` and DB migrations complete. The connector configuration and end-to-end payment testing is blocked on obtaining real API keys from Stripe and Mollie.
## Context
- HyperSwitch is live: `https://pay.rspace.online/health`
- Merchant account created with publishable key `pk_snd_9167de4f...`
- Merchant API key saved to `.env` as `HS_MERCHANT_SECRET_KEY`
- Internal mint/escrow/confirm APIs verified working on rspace-online
- Bonding curve ($MYCO) endpoints live and tested
- `INTERNAL_API_KEY` and `RSPACE_INTERNAL_API_KEY` deployed to both repos
## Steps
1. **Obtain Stripe API key** — create Stripe account or use existing, get test mode API key (`sk_test_...`)
2. **Obtain Mollie API key** — create Mollie account, get test API key
3. **Add keys to Infisical**`STRIPE_API_KEY`, `STRIPE_WEBHOOK_SECRET`, `MOLLIE_API_KEY` in rspace project
4. **Add keys to payment-infra `.env`** on Netcup
5. **Run `scripts/setup-hyperswitch.sh`** — configures Stripe + Mollie connectors, geo-based routing (EU→Mollie, US→Stripe), webhook endpoint
6. **Rebuild payment-infra onramp/offramp services** — they have new HyperSwitch integration code (`hyperswitch.ts`, `hyperswitch-offramp.ts`) but haven't been rebuilt
7. **Test Stripe channel** — create payment intent, complete with test card `4242424242424242`, verify cUSDC minted
8. **Test Mollie channel** — create payment intent with EU billing, complete via Mollie test mode, verify cUSDC minted
9. **Test off-ramp** — initiate withdrawal, verify escrow burn, simulate payout webhook, verify confirm/reverse
10. **Run `bun scripts/test-full-loop.ts`** — full loop: fiat in → cUSDC → $MYCO → cUSDC → fiat out
## Key files
- `payment-infra/scripts/setup-hyperswitch.sh` — connector + routing setup script
- `payment-infra/services/onramp-service/src/hyperswitch.ts` — on-ramp integration
- `payment-infra/services/offramp-service/src/hyperswitch-offramp.ts` — off-ramp integration
- `rspace-online/scripts/test-full-loop.ts` — end-to-end test script
- `rspace-online/server/index.ts` — internal mint/escrow/confirm endpoints (lines 570-680)
- `payment-infra/config/hyperswitch/config.toml` — HyperSwitch TOML config on Netcup
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Stripe test API key obtained and added to Infisical + payment-infra .env
- [ ] #2 Mollie test API key obtained and added to Infisical + payment-infra .env
- [ ] #3 setup-hyperswitch.sh runs successfully — Stripe + Mollie connectors configured with geo-based routing
- [ ] #4 onramp-service and offramp-service rebuilt with HyperSwitch integration code
- [ ] #5 Stripe test payment completes end-to-end: card payment → webhook → cUSDC minted in CRDT ledger
- [ ] #6 Mollie test payment completes end-to-end: iDEAL/SEPA → webhook → cUSDC minted
- [ ] #7 Off-ramp escrow flow verified: escrow burn → payout → confirm (or reverse on failure)
- [ ] #8 Full loop test passes: fiat → cUSDC → $MYCO swap → cUSDC → fiat withdrawal
<!-- AC:END -->

View File

@ -1,32 +0,0 @@
---
id: TASK-126
title: Repo structure setup
status: Done
assignee: []
created_date: '2026-03-29 20:51'
labels:
- setup
- repo
milestone: m-2
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Create directory structure (docs/, firmware/, app/js/, app/css/, app/guides/), move existing spec docs to docs/, write README.md
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Directory structure matches plan
- [ ] #2 Existing docs moved to docs/
- [ ] #3 README.md with project overview, quick start, and structure
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Repo structured, docs moved, README written with full project overview, hardware setup, command protocol, and licence info.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,33 +0,0 @@
---
id: TASK-127
title: Guide JSON format + bicycle brake pad guide
status: Done
assignee: []
created_date: '2026-03-29 20:51'
labels:
- content
- guide
milestone: m-2
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Define the guide JSON schema and create the demo guide (bicycle-brake-pads.json) with 10 steps including detection labels, fallback strategies, completion conditions, and timed hints.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Guide JSON validates against schema
- [ ] #2 10 steps with audio_text, pointer_target, completion_condition
- [ ] #3 Fallback strategies for non-COCO parts
- [ ] #4 Timed hints on each step
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
bicycle-brake-pads.json created with 10 realistic steps, COCO anchor + relative_to fallbacks, dwell/manual completion, and 2 hints per step.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,33 +0,0 @@
---
id: TASK-128
title: Pico firmware — servo + LED drivers
status: Done
assignee: []
created_date: '2026-03-29 20:51'
labels:
- firmware
- hardware
milestone: m-2
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Write config.py, servo_driver.py (PCA9685 I2C, smooth interpolation), and led_driver.py (GPIO PWM brightness) for Raspberry Pi Pico.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 PCA9685 init at 50Hz, set_angle with 500-2500us mapping
- [ ] #2 Smooth interpolation in configurable degree steps
- [ ] #3 LED PWM 0-255, blink helper, deinit cleanup
- [ ] #4 Config centralises all pin/calibration constants
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Three firmware files: config.py (constants), servo_driver.py (PCA9685 two-class design with smooth interp), led_driver.py (hardware PWM on GP15).
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,32 +0,0 @@
---
id: TASK-129
title: Pico firmware — command parser + USB serial
status: Done
assignee: []
created_date: '2026-03-29 20:51'
labels:
- firmware
milestone: m-2
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Write command_parser.py (JSON line parser, validates & dispatches to servo/LED) and transport_usb.py (non-blocking USB serial listener at 115200 baud).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Parses all command types: pan/tilt, led, led_pwm, home, ping
- [ ] #2 Returns JSON ack with current position
- [ ] #3 Graceful error handling for malformed JSON
- [ ] #4 Non-blocking stdin poll with buffer overflow guard
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
command_parser.py: stateless dispatcher with strict validation, defined processing order. transport_usb.py: select.poll non-blocking with 512-char overflow guard.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,34 +0,0 @@
---
id: TASK-130
title: Pico firmware — BLE transport (Pico W)
status: Done
assignee: []
created_date: '2026-03-29 20:51'
labels:
- firmware
- ble
milestone: m-2
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Write transport_ble.py (BLE GATT Nordic UART Service) and main.py (auto-detect transport, cooperative poll loop). BLE advertises as 'GaiaAR', chunks notifications to 20-byte MTU.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 NUS service with correct UUIDs registered
- [ ] #2 IRQ handler queues to pending list, no heavy work in interrupt
- [ ] #3 MTU chunking for notifications
- [ ] #4 Auto-reconnect advertising on disconnect
- [ ] #5 main.py soft-imports bluetooth for Pico/Pico W compat
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
transport_ble.py: GATT NUS with chunked notify, per-connection line buffers, auto-readvertise. main.py: soft BLE import, cooperative dual-transport loop, error-resilient.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,33 +0,0 @@
---
id: TASK-131
title: PWA — camera feed + shell
status: Done
assignee: []
created_date: '2026-03-29 20:51'
labels:
- pwa
- frontend
milestone: m-2
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Write index.html (three views: setup/guide/complete), main.css (dark workshop theme, mobile-first), and camera.js (rear camera getUserMedia wrapper).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Three-view HTML shell with hidden attr toggling
- [ ] #2 Dark theme with amber/green accents, 48px+ touch targets
- [ ] #3 Camera feed fills viewport with environment-facing preference
- [ ] #4 Graceful fallback for desktop webcams
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
index.html (183 lines), main.css (732 lines dark workshop theme), camera.js (environment-facing, desktop fallback).
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,34 +0,0 @@
---
id: TASK-132
title: PWA — transport abstraction (simulator + serial + BLE)
status: Done
assignee: []
created_date: '2026-03-29 20:52'
labels:
- pwa
- transport
milestone: m-2
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Write transport.js (GaiaTransport base class, SerialTransport via Web Serial, BLETransport via Web Bluetooth NUS, SimulatorTransport) and simulator.js (virtual pan/tilt panel with crosshair + LED indicator).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Base class with connect/disconnect/send/onMessage/isConnected
- [ ] #2 SerialTransport: Web Serial 115200 baud, JSON lines
- [ ] #3 BLETransport: NUS UUIDs, TX notify subscription, 20-byte MTU chunking
- [ ] #4 SimulatorTransport: passes to Simulator.update()
- [ ] #5 Simulator: floating panel with crosshair dot + LED circle + readout
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
transport.js: 3 implementations + factory. simulator.js: DOM panel with crosshair grid, LED indicator, angle readout. ?transport=simulator URL param works.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,33 +0,0 @@
---
id: TASK-133
title: PWA — guide engine (step sequencing + audio)
status: Done
assignee: []
created_date: '2026-03-29 20:52'
labels:
- pwa
- guide
milestone: m-2
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Write guide-engine.js (load/sequence/advance steps, dwell timer completion detection, onStepChange/onGuideComplete callbacks), audio-engine.js (Web Speech TTS wrapper), and guide-store.js (IndexedDB offline cache).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Guide engine normalises both canonical and on-disk JSON schemas
- [ ] #2 Dwell timer tracks continuous detection, resets on break
- [ ] #3 TTS with English voice preference and queue support
- [ ] #4 IndexedDB store with save/get/list/delete/importFromUrl
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
guide-engine.js: dual schema normalisation, dwell timer. audio-engine.js: SpeechSynthesis with voice selection. guide-store.js: IndexedDB gaia-guides store.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,35 +0,0 @@
---
id: TASK-134
title: PWA — MediaPipe CV pipeline + pointer mapper
status: Done
assignee: []
created_date: '2026-03-29 20:52'
labels:
- pwa
- cv
milestone: m-2
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Write cv-pipeline.js (MediaPipe ObjectDetector from CDN, EfficientDet Lite 0 float16, VIDEO mode, detection overlay drawing) and pointer-mapper.js (FOV-based bbox→pan/tilt angle conversion with exponential smoothing).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Dynamic import from MediaPipe CDN with GPU delegate + CPU fallback
- [ ] #2 detectForVideo returns normalised bboxes with centerX/centerY
- [ ] #3 drawDetections: green for target, amber dashed for others, HiDPI aware
- [ ] #4 pixelToAngles: mirrored X axis, 80/60 FOV default
- [ ] #5 relativeTarget for fallback anchor offsets
- [ ] #6 Exponential smoothing factor 0.3
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
cv-pipeline.js: MediaPipe Tasks Vision CDN, EfficientDet Lite 0, normalised detections, styled overlay. pointer-mapper.js: FOV mapping, EMA smoothing, relative fallback.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,34 +0,0 @@
---
id: TASK-135
title: PWA — offline support (SW + IndexedDB)
status: Done
assignee: []
created_date: '2026-03-29 20:52'
labels:
- pwa
- offline
milestone: m-2
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Write sw.js (service worker with 3 cache buckets: shell/guides/mediapipe, cache-first for shell+CDN, network-first for guides, offline fallback) and manifest.json (PWA install metadata).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Three versioned cache buckets
- [ ] #2 Shell pre-cached on install, old caches cleaned on activate
- [ ] #3 CACHE_GUIDE_URL message handler for dynamic guide caching
- [ ] #4 Offline fallback: cached index.html for nav, error JSON for guides
- [ ] #5 PWA manifest with standalone display and portrait orientation
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
sw.js (210 lines): 3 cache buckets, pre-cache on install, clients.claim(), offline fallback. manifest.json with simulator shortcut.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,28 +0,0 @@
---
id: TASK-136
title: End-to-end hardware test
status: To Do
assignee: []
created_date: '2026-03-29 20:52'
labels:
- testing
- hardware
milestone: m-2
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Test full pipeline with physical hardware: Pico + PCA9685 + servos + LED. Verify USB serial command/ack, BLE connect from nRF Connect, servo movement, LED control. Test PWA↔Pico over both transports.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 USB: echo JSON to /dev/ttyACM0, servo moves, LED on, ack received
- [ ] #2 BLE: nRF Connect write to NUS RX, verify TX response
- [ ] #3 PWA serial mode: connect, guide step sends pan/tilt, servo tracks
- [ ] #4 PWA BLE mode: same as serial but wireless
- [ ] #5 No firmware crash on malformed input
<!-- AC:END -->

View File

@ -1,35 +0,0 @@
---
id: TASK-137
title: 'Fill slide deck gaps (slides 8, 9, 13, 14, A3, A4)'
status: Done
assignee: []
created_date: '2026-03-29 20:52'
labels:
- docs
- slides
milestone: m-2
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Write missing content for 4 slides in SLIDE_DECK_PROMPT.md (BT communication, single earbud, manufacturing, IP/Commons) and 2 slides in ANNEX_PROMPT.md (healthcare/first aid, closing platform statement).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Slide 8: BT 5.0 dual connection, NUS, <20ms latency, USB-C fallback
- [ ] #2 Slide 9: single earbud spec — IPX4, 4h+, physical button, TTS-driven
- [ ] #3 Slide 13: Phase 1 CEM 500 units, Phase 2 local 3D-print + kit
- [ ] #4 Slide 14: triple licence stack (CERN-OHL-S, AGPL, CC BY-SA)
- [ ] #5 A3: healthcare/first aid — wound care, CPR, tourniquet
- [ ] #6 A4: closing — platform not product, Commons logic
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
All 6 GAP placeholders replaced with full slide content matching deck tone and format. BT, earbud, manufacturing, IP, healthcare, closing slides complete.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,27 +0,0 @@
---
id: TASK-138
title: Document headlamp mounting interface
status: To Do
assignee: []
created_date: '2026-03-29 20:52'
labels:
- hardware
- docs
milestone: m-2
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Measure and document the slide-and-lock interface on the Decathlon Forclaz headlamp. Create a technical drawing or dimensioned sketch for the custom module housing design. Include tolerances for 3D printing.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Physical measurements of headlamp mounting interface
- [ ] #2 Dimensioned sketch or CAD file in docs/
- [ ] #3 Print tolerances documented for FDM PETG/ASA
- [ ] #4 Photos of reference headlamp for comparison
<!-- AC:END -->

View File

@ -1,28 +0,0 @@
---
id: TASK-139
title: First user test (5 users)
status: To Do
assignee: []
created_date: '2026-03-29 20:52'
labels:
- testing
- ux
milestone: m-2
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Recruit 5 users with no prior bicycle repair experience. Test full end-to-end flow: put on headset, connect phone, follow brake pad guide to completion. Document usability issues, completion rate, time, and qualitative feedback.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 5 users complete the test session
- [ ] #2 Completion rate and time recorded per user
- [ ] #3 Usability issues documented with severity ratings
- [ ] #4 Post-test interview notes captured
- [ ] #5 Summary report with top 3 improvement priorities
<!-- AC:END -->

View File

@ -1,43 +0,0 @@
---
id: TASK-140
title: IPFS integration for backups and generated files
status: Done
assignee: []
created_date: '2026-04-02 22:11'
updated_date: '2026-04-02 22:11'
labels:
- infra
- ipfs
- storage
dependencies: []
references:
- server/ipfs.ts
- server/ipfs-routes.ts
- server/local-first/backup-store.ts
- server/local-first/backup-routes.ts
- server/index.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add IPFS as a redundant storage layer via Kubo (ipfs.jeffemmett.com). Pin encrypted backups and AI-generated files (images, 3D models, zines) to IPFS fire-and-forget. Filesystem remains primary — IPFS failures are non-fatal. API routes at /api/ipfs for status, pin/unpin, and gateway proxy.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 server/ipfs.ts client library with pin/unpin/status functions
- [x] #2 server/ipfs-routes.ts Hono router at /api/ipfs (status, pin, unpin, gateway proxy)
- [x] #3 Backup pinning in backup-store.ts (fire-and-forget, CID in manifest)
- [x] #4 IPFS URL route in backup-routes.ts (GET /:space/:docId/ipfs)
- [x] #5 Generated file pinning with .cid sidecar files for 8 producer endpoints
- [x] #6 IPFS_API_URL and IPFS_GATEWAY_URL env vars in docker-compose.yml
- [x] #7 Kubo reachable from rspace container via traefik-public network
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented IPFS integration for rspace-online. Created server/ipfs.ts (client library) and server/ipfs-routes.ts (API routes at /api/ipfs). Modified backup-store.ts to pin encrypted backups fire-and-forget with CID stored in manifest. Added pinGeneratedFile() helper in server/index.ts called from 8 producer endpoints (3D models, fal.ai images, Gemini/Imagen images, zine pages). Each pinned file gets a .cid sidecar loaded into memory cache on startup. Kubo container is collab-server-ipfs-1 on traefik-public network. Deployed and verified on Netcup. Key deployment discovery: server uses local Gitea registry (localhost:3000/jeffemmett/rspace-online), not compose build — documented in MEMORY.md.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,51 +0,0 @@
---
id: TASK-141
title: SMS/text-based poll input for rSpace (magic links + optional Twilio 2-way)
status: To Do
assignee: []
created_date: '2026-04-09 16:42'
labels:
- rChoices
- rCal
- integration
- SMS
dependencies: []
references:
- modules/rchoices/mod.ts
- modules/rcal/mod.ts
- modules/rminders/mod.ts
- server/index.ts (webhook pattern examples around line 2808)
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Enable lightweight poll/RSVP responses via text message. Users send a text or click a magic link in SMS/email to respond to polls (e.g. "1" for Yes, "0" for No).
## Phased approach:
1. **Phase 1 — Magic links (minimal infra):** Generate per-participant short token URLs for polls/RSVPs. Send via Mailcow email (free) or any SMS API. Recipient clicks link → minimal no-auth 1-tap response page → updates Automerge doc (rChoices poll or rCal RSVP).
2. **Phase 2 — Twilio 2-way SMS:** Twilio number (~$1/mo), outbound SMS with poll question + response codes, inbound webhook at `POST /api/sms/inbound` parses reply digit, phone→DID mapping table to attribute responses.
## Key integration points:
- **rChoices** (`modules/rchoices/`) — simple polls (vote/rank/spider)
- **rCal** (`modules/rcal/`) — event RSVPs (attendee fields exist but stub)
- **rMinders** (`modules/rminders/`) — could add SMS as action type for scheduled sends
- **Existing webhook pattern** — follow payment webhook style (unauthenticated POST endpoints)
## Design notes from initial discussion:
- Magic link approach gets 90% of value with minimal new infra
- Twilio costs ~$0.02/round-trip, magic links ~$0.01 outbound only
- Email-based variant is free via existing Mailcow setup
- Need phone→DID mapping if doing 2-way SMS
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Magic link generation for polls/RSVPs with unique per-participant tokens
- [ ] #2 Minimal no-auth response page (1-tap Yes/No) that updates Automerge doc
- [ ] #3 Email delivery of magic links via Mailcow
- [ ] #4 Optional: Twilio outbound SMS delivery
- [ ] #5 Optional: Twilio inbound webhook parsing for 2-way SMS replies
- [ ] #6 Optional: Phone-to-DID mapping table for SMS identity attribution
<!-- AC:END -->

View File

@ -1,25 +0,0 @@
---
id: TASK-142
title: miC — Voice Conversation Mode for MI Agent
status: Done
assignee: []
created_date: '2026-04-10 22:40'
labels: []
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add a "miC" toggle button to the MI agent that enables a full voice conversation loop: speak → transcribe → auto-submit to MI → speak response aloud → listen again.
## Implementation
- `lib/mi-voice-bridge.ts`: MiVoiceBridge class — Edge TTS via `claude-voice.jeffemmett.com` WebSocket + Web Speech Synthesis fallback
- `shared/components/rstack-mi.ts`: Voice mode state machine (IDLE → LISTENING → THINKING → SPEAKING → LISTENING), miC buttons in bar + panel header, voice status strip with waveform animation, auto-submit on 1.5s silence, TTS truncation (strips markdown/code, limits to ~4 sentences), echo prevention, interruption support
## Key Decisions
- Separate SpeechDictation instance from bar dictation (browser only allows one SpeechRecognition)
- No server changes — uses existing #ask() flow and parseMiActions()
- Edge TTS primary, browser speechSynthesis fallback
<!-- SECTION:DESCRIPTION:END -->

View File

@ -1,45 +0,0 @@
---
id: TASK-143
title: Customizable Dashboard with Persistent Home Icon
status: Done
assignee: []
created_date: '2026-04-11 03:18'
updated_date: '2026-04-11 03:18'
labels:
- dashboard
- ux
- tab-bar
dependencies: []
references:
- shared/components/rstack-tab-bar.ts
- shared/components/rstack-user-dashboard.ts
- server/dashboard-routes.ts
- server/shell.ts
- shared/tab-cache.ts
- server/index.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add always-visible home button in tab bar and customizable widget dashboard system. Persistent home icon toggles dashboard overlay even with tabs open. 8 widget cards (tasks, calendar, activity, members, tools, quick actions, wallet, flows) with toggle/reorder customization persisted to localStorage. Dashboard summary API aggregates data from multiple modules in a single endpoint.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Home icon always visible in tab bar, even with tabs open
- [x] #2 Click home icon toggles dashboard overlay on/off
- [x] #3 Dashboard shows when all tabs closed (existing behavior preserved)
- [x] #4 8 widget cards: tasks, calendar, activity, members, tools, quick actions, wallet, flows
- [x] #5 Customize mode with toggle checkboxes and reorder arrows
- [x] #6 Widget config persisted to localStorage per space
- [x] #7 Dashboard summary API at /api/dashboard-summary/:space
- [x] #8 Auth-gated widgets (activity, wallet) show sign-in prompts when logged out
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented persistent home icon in tab bar and full widget-based dashboard system.\n\nFiles modified:\n- `rstack-tab-bar.ts`: Permanent home button with home-click event and home-active observed attribute\n- `rstack-user-dashboard.ts`: Full refactor with widget registry, config persistence, customize mode, 8 widget cards with per-widget data loading\n- `server/shell.ts`: home-click listener for dashboard overlay toggle, home-active tracking on layer-switch and dashboard-navigate\n- `shared/tab-cache.ts`: Clear home-active on popstate back-to-tab\n- `server/dashboard-routes.ts` (NEW): GET /api/dashboard-summary/:space aggregation endpoint\n- `server/index.ts`: Mount dashboard routes\n\nCommit: e632858\nDeployed to rspace.online and verified API returns tasks/calendar/flows data.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,62 +0,0 @@
---
id: TASK-144
title: Power Indices for DAO Governance Analysis
status: Done
assignee: []
created_date: '2026-04-16 18:50'
labels:
- rnetwork
- governance
- encryptid
- trust-engine
dependencies: []
references:
- src/encryptid/power-indices.ts
- src/encryptid/trust-engine.ts
- src/encryptid/schema.sql
- modules/rnetwork/components/folk-graph-viewer.ts
- modules/rnetwork/mod.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Banzhaf & Shapley-Shubik power index computation for rSpace delegation system. Reveals who actually controls outcomes vs raw delegation weights.
## Implemented
- **Compute engine** (`src/encryptid/power-indices.ts`): Banzhaf DP O(n·Q), Shapley-Shubik DP O(n²·Q), Gini coefficient, HHI concentration
- **DB schema**: `power_indices` table (PK: did, space_slug, authority) with materialized results
- **Background job**: Hooks into trust engine 5-min recompute cycle
- **API**: GET `/api/power-indices?space=X&authority=Y`, GET `/api/power-indices/:did`, POST `/api/power-indices/simulate` (coalition what-if)
- **Visualization**: Power tab in rNetwork 3D graph viewer — animated Banzhaf bars, Gini/HHI gauges, node sizing by coalitional power
- **On-demand compute**: First API hit computes + caches if DB empty
## Future Integration Opportunities
- **Delegation Dashboard** (`folk-delegation-manager.ts`): Show each user their own Banzhaf power next to their delegation weights. "Your 10% weight gives you 23% voting power" insight.
- **rVote conviction voting**: Weight votes by Shapley-Shubik instead of raw tokens — prevents plutocratic capture
- **fin-ops blending**: Blend $MYCO token balances with delegation weights (configurable ratio) for fin-ops authority power indices
- **Trust Sankey** (`folk-trust-sankey.ts`): Color/thickness flows by marginal power contribution, not just raw weight
- **Space admin dashboard**: Alert when Gini > 0.6 or HHI > 0.25 (concentration warning)
- **rData analytics**: Time-series of power concentration metrics (Gini trend, effective voters trend)
- **Coalition builder UI**: Interactive "what if we form this coalition?" tool using the simulate endpoint
- **Quadratic power weighting**: Use sqrt(Banzhaf) as vote weight to reduce inequality
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Banzhaf & Shapley-Shubik computed via DP (not brute force)
- [ ] #2 Results materialized in PG, recomputed every 5 min
- [ ] #3 3 API endpoints (list, per-user, simulate)
- [ ] #4 Power tab in rNetwork graph viewer with animated bars + gauges
- [ ] #5 Node sizes reflect Banzhaf power in power mode
- [ ] #6 On-demand computation when DB empty
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented Banzhaf and Shapley-Shubik power index computation integrated into the trust engine's 5-min background cycle. Power indices table in PG stores materialized results per (did, space, authority). Three API endpoints on EncryptID server with rNetwork proxy routes. Visualization integrated into 3D graph viewer as Power tab — animated bar chart showing weight/Banzhaf/Shapley-Shubik per player, Gini and HHI concentration gauges, and Banzhaf-scaled node sizing. Also fixed encryptid Dockerfile missing welcome-email.ts and swapped mouse controls to left-drag=rotate.
Commits: 97c1b02 (feature), 1bc2a0a (Dockerfile fix). Deployed to Netcup, live at demo.rspace.online/rnetwork/power.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@ -1,45 +0,0 @@
---
id: TASK-145
title: Power Badge in Delegation Manager
status: To Do
assignee: []
created_date: '2026-04-16 18:56'
labels:
- rnetwork
- governance
- power-indices
dependencies:
- TASK-144
references:
- modules/rnetwork/components/folk-delegation-manager.ts
- src/encryptid/power-indices.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add Banzhaf power percentage badge to `folk-delegation-manager.ts` inbound delegation section.
## What
Each user's inbound delegation count already shows "3 delegations received". Add a power badge: **"3 delegations → 23% power"** fetched from `/api/power-indices/:did`.
## Primitive
- Fetch user's power index on component load: `GET /rnetwork/api/power-indices/{did}?space={space}`
- Display per-authority: weight% vs Banzhaf% with color coding (green if proportional, red if disproportionate)
- Tooltip: "You hold 10% of delegation weight but 23% of actual voting power because smaller players can't form winning coalitions without you"
## Implementation
- `folk-delegation-manager.ts`: Add `fetchPowerBadge()` in `connectedCallback`, cache result
- New `renderPowerBadge(authority)` method → returns HTML for the badge
- Insert into the inbound delegations header row per authority
- ~40 lines of code, one fetch call, zero new files
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Banzhaf % shown next to inbound delegation count per authority
- [ ] #2 Color coded: green (proportional ±20%), red (overrepresented), blue (underrepresented)
- [ ] #3 Tooltip explains power vs weight difference
- [ ] #4 Graceful fallback when no power data available
<!-- AC:END -->

View File

@ -1,48 +0,0 @@
---
id: TASK-146
title: Sankey Power Overlay — dual-bar node sizing
status: To Do
assignee: []
created_date: '2026-04-16 18:56'
labels:
- rnetwork
- governance
- power-indices
dependencies:
- TASK-144
references:
- modules/rnetwork/components/folk-trust-sankey.ts
- src/encryptid/power-indices.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add power index overlay to `folk-trust-sankey.ts` right-column nodes.
## What
Right-column delegate nodes currently show rank badge + received weight %. Add a second bar showing Banzhaf power %, creating a visual comparison: raw weight vs actual coalitional power.
## Primitive
- Fetch power indices once on authority change: `GET /rnetwork/api/power-indices?space={space}&authority={authority}`
- Build `Map<did, { banzhaf, shapleyShubik }>` lookup
- Right-column nodes get dual horizontal bars:
- Top bar (gray): raw received weight %
- Bottom bar (authority color): Banzhaf power %
- Nodes where power >> weight glow red (disproportionate influence)
## Implementation
- `folk-trust-sankey.ts`: Add `powerMap` field, fetch in `loadData()`
- Modify `renderRightNodes()` to draw second bar below weight bar
- Add CSS for `.power-bar` with transition animation
- ~60 lines, one fetch, zero new files
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Dual bars on right-column nodes: weight % and Banzhaf %
- [ ] #2 Red glow on nodes where Banzhaf > 1.5x weight share
- [ ] #3 Bars animate on authority tab switch
- [ ] #4 Toggle to show/hide power overlay
<!-- AC:END -->

View File

@ -1,49 +0,0 @@
---
id: TASK-147
title: Delegation-weighted voting mode for rVote
status: To Do
assignee: []
created_date: '2026-04-16 18:56'
labels:
- rvote
- governance
- power-indices
dependencies:
- TASK-144
references:
- modules/rvote/mod.ts
- src/encryptid/power-indices.ts
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Optional voting mode where conviction vote weight is multiplied by the voter's Shapley-Shubik index.
## What
Currently rVote uses credit-based quadratic voting (1 vote = 1 credit, 2 = 4 credits). Add an optional space-level toggle: "delegation-weighted voting" where each vote's effective weight = `creditWeight × shapleyShubikIndex`. This lets delegated authority flow into proposal ranking.
## Primitive: Power Weight Multiplier
- New field in space voting config: `weightMode: 'credits-only' | 'delegation-weighted'`
- When `delegation-weighted`: fetch voter's power index at vote time
- `effectiveWeight = creditWeight × (1 + shapleyShubik × delegationMultiplier)`
- Default `delegationMultiplier = 2.0` (configurable per space)
- Fallback: if no power index data, effectiveWeight = creditWeight (graceful degradation)
## Implementation
- `modules/rvote/mod.ts`: In `POST /api/proposals/:id/vote` handler, check space config
- If delegation-weighted: fetch from EncryptID `/api/power-indices/:did?space={space}`
- Multiply vote weight before storing in Automerge doc
- Display in UI: "Your vote: 3 credits × 1.4x delegation = 4.2 effective weight"
- ~50 lines server, ~20 lines UI display
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Space config toggle: credits-only vs delegation-weighted
- [ ] #2 Vote weight multiplied by Shapley-Shubik when delegation-weighted
- [ ] #3 Multiplier configurable per space (default 2.0)
- [ ] #4 UI shows breakdown: credits × delegation multiplier = effective
- [ ] #5 Graceful fallback to credits-only when no power data
<!-- AC:END -->

View File

@ -1,52 +0,0 @@
---
id: TASK-148
title: Concentration alerts for space admins
status: To Do
assignee: []
created_date: '2026-04-16 18:56'
labels:
- governance
- encryptid
- power-indices
- notifications
dependencies:
- TASK-144
references:
- src/encryptid/power-indices.ts
- src/encryptid/trust-engine.ts
- src/encryptid/server.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Alert space admins when power concentration exceeds healthy thresholds.
## What
The power indices engine already computes Gini coefficient and HHI per space+authority every 5 minutes. Surface warnings when:
- HHI > 0.25 (highly concentrated — fewer than ~4 effective voters)
- Gini > 0.6 (severe inequality — top players hold most power)
- Single player Banzhaf > 0.5 (near-dictator — one person controls majority)
## Primitive: Concentration Monitor
- New function `checkConcentrationAlerts(spaceSlug)` in `power-indices.ts`
- Called after `computeSpacePowerIndices()` in trust engine cycle
- When threshold crossed: create notification via existing `createNotification()` for space admins
- Notification: category='system', event_type='power_concentration_warning'
- Debounce: only alert once per 24h per space+authority (store `last_alert_at` in power_indices or separate field)
## Implementation
- `src/encryptid/power-indices.ts`: Add `checkConcentrationAlerts()` function
- `src/encryptid/trust-engine.ts`: Call after power index computation
- Uses existing notification system — zero new infrastructure
- ~40 lines, zero new files, zero new tables
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Alert when HHI > 0.25, Gini > 0.6, or single-player Banzhaf > 0.5
- [ ] #2 Notification sent to space admins via existing notification system
- [ ] #3 24h debounce per space+authority to avoid spam
- [ ] #4 Notification includes specific metric + suggestion (e.g. 'encourage more delegation diversity')
<!-- AC:END -->

View File

@ -1,67 +0,0 @@
---
id: TASK-149
title: Power index time-series snapshots
status: To Do
assignee: []
created_date: '2026-04-16 18:56'
labels:
- governance
- analytics
- power-indices
dependencies:
- TASK-144
references:
- src/encryptid/schema.sql
- src/encryptid/power-indices.ts
- modules/rnetwork/components/folk-graph-viewer.ts
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Store daily snapshots of power concentration metrics for trend analysis.
## What
Currently power_indices table overwrites on each 5-min cycle. Add a `power_snapshots` table that stores one row per space+authority per day with aggregate metrics. Enables "is power becoming more or less concentrated over time?" analysis.
## Primitive: Daily Snapshot Aggregation
- New table `power_snapshots`:
```sql
CREATE TABLE power_snapshots (
space_slug TEXT NOT NULL,
authority TEXT NOT NULL,
snapshot_date DATE NOT NULL,
player_count INTEGER,
gini_coefficient REAL,
herfindahl_index REAL,
top3_banzhaf_sum REAL,
effective_voters REAL,
PRIMARY KEY (space_slug, authority, snapshot_date)
);
```
- In trust engine cycle: after computing power indices, check if today's snapshot exists. If not, insert.
- One INSERT per space+authority per day — negligible DB cost.
## Frontend: Sparkline in power panel
- `folk-graph-viewer.ts` power panel: fetch `GET /api/power-snapshots?space=X&authority=Y&days=30`
- Render 30-day sparkline of Gini + HHI below the gauge metrics
- Red trend line = concentrating, green = dispersing
## Implementation
- `src/encryptid/schema.sql`: New table
- `src/encryptid/db.ts`: `upsertPowerSnapshot()`, `getPowerSnapshots(space, authority, days)`
- `src/encryptid/power-indices.ts`: `snapshotIfNeeded()` called from trust engine
- `src/encryptid/server.ts`: `GET /api/power-snapshots` endpoint
- `folk-graph-viewer.ts`: 30-day sparkline SVG in power panel
- ~80 lines backend, ~40 lines frontend
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 power_snapshots table with daily aggregates per space+authority
- [ ] #2 Auto-insert one snapshot per day during trust engine cycle
- [ ] #3 API endpoint returns N days of historical snapshots
- [ ] #4 30-day sparkline in power panel showing Gini + HHI trend
- [ ] #5 Red/green trend coloring based on direction
<!-- AC:END -->

View File

@ -1,50 +0,0 @@
---
id: TASK-150
title: Coalition simulator UI
status: To Do
assignee: []
created_date: '2026-04-16 18:57'
labels:
- rnetwork
- governance
- power-indices
dependencies:
- TASK-144
references:
- src/encryptid/power-indices.ts
- modules/rnetwork/components/folk-graph-viewer.ts
priority: low
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Interactive coalition builder using the existing `/api/power-indices/simulate` endpoint.
## What
Let users select a group of voters and instantly see: "Can this coalition pass a vote? Who is the swing voter?" Uses the simulate endpoint already built in TASK-144.
## Primitive: Coalition Picker Component
- New `<folk-coalition-sim>` element (or inline in power panel)
- Checkbox list of top N voters (sorted by Banzhaf)
- As checkboxes toggle: POST to simulate endpoint, show result:
- ✅ "Winning coalition (67% of weight, needs 50%+1)"
- Per-member: "Alice: swing voter ⚡" / "Bob: not swing (coalition wins without them)"
- "Add 1 more voter to win" suggestion when losing
## Implementation
- Can be embedded in the power panel of `folk-graph-viewer.ts` as a collapsible section
- Or standalone `folk-coalition-sim.ts` for embedding in delegation manager
- POST `/rnetwork/api/power-indices/simulate` with `{ space, authority, coalition: [did1, did2...] }`
- Response already returns `isWinning`, `marginalContributions[].isSwing`
- ~80 lines, zero backend changes (endpoint exists)
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Checkbox selection of voters from top-N list
- [ ] #2 Live POST to simulate endpoint on selection change
- [ ] #3 Shows winning/losing status with weight vs quota
- [ ] #4 Identifies swing voters in the coalition
- [ ] #5 Suggests minimum additions to form winning coalition
<!-- AC:END -->

View File

@ -1,10 +1,9 @@
--- ---
id: TASK-29 id: TASK-29
title: Port folk-drawfast shape (collaborative drawing/gesture recognition) title: Port folk-drawfast shape (collaborative drawing/gesture recognition)
status: Done status: To Do
assignee: [] assignee: []
created_date: '2026-02-18 19:50' created_date: '2026-02-18 19:50'
updated_date: '2026-04-10 21:28'
labels: labels:
- shape-port - shape-port
- phase-2 - phase-2
@ -35,16 +34,8 @@ Features to implement:
## Acceptance Criteria ## Acceptance Criteria
<!-- AC:BEGIN --> <!-- AC:BEGIN -->
- [x] #1 Freehand drawing works with pointer/touch input - [ ] #1 Freehand drawing works with pointer/touch input
- [x] #2 Gesture recognition detects basic shapes - [ ] #2 Gesture recognition detects basic shapes
- [x] #3 Drawing state syncs across clients - [ ] #3 Drawing state syncs across clients
- [x] #4 Toolbar button added to canvas.html - [ ] #4 Toolbar button added to canvas.html
<!-- AC:END --> <!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
2026-04-10: Added AI sketch-to-image generation (fal.ai + Gemini via /api/image-gen/img2img). Split-view layout with drawing canvas + AI result. Auto-generate toggle, strength slider, provider selector. Image preloading for smooth transitions. Port descriptors for folk-arrow connections. AC#1 (freehand drawing) and AC#4 (toolbar button) were already implemented. AC#2 (gesture recognition) and AC#3 (collaborative sync) still outstanding.
AC#2: Implemented Unistroke Recognizer with templates for circle, rectangle, triangle, line, arrow, checkmark. Freehand strokes matching >70% confidence are auto-converted to clean geometric shapes with a floating badge. AC#3: Fixed applyData() to restore strokes array, prompt text, and last result URL from Automerge sync data. toJSON() now exports prompt text for sync.
<!-- SECTION:NOTES:END -->

View File

@ -1,47 +0,0 @@
---
id: TASK-41
title: Build dynamic Shape Registry to replace hardcoded switch statements
status: Done
assignee: []
created_date: '2026-02-18 20:06'
updated_date: '2026-03-14 21:56'
labels:
- infrastructure
- phase-0
- ecosystem
milestone: m-1
dependencies: []
priority: high
status_history:
- status: Done
timestamp: '2026-03-14 21:56'
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Replace the 170-line switch statement in canvas.html's `createShapeElement()` and the 100-line type-switch in community-sync.ts's `#updateShapeElement()` with a dynamic ShapeRegistry.
Create lib/shape-registry.ts with:
- ShapeRegistration interface (tagName, elementClass, defaults, category, portDescriptors, eventDescriptors)
- ShapeRegistry class with register(), createElement(), updateElement(), listAll(), getByCategory()
- Each folk-*.ts gets a static `registration` property and static `fromData()` method
This is the prerequisite for all other ecosystem features (pipes, events, groups, nesting, embedding).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 ShapeRegistry class created with register/createElement/updateElement methods
- [ ] #2 All 30+ folk-*.ts shapes have static registration property
- [ ] #3 canvas.html switch statement replaced with registry.createElement()
- [ ] #4 community-sync.ts type-switch replaced with registry.updateElement()
- [ ] #5 All existing shapes still create and sync correctly
- [ ] #6 No regression in shape creation or remote sync
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Shape registry implemented in lib/shape-registry.ts. Switch statements in community-sync.ts removed. Registry used by ecosystem-bridge for dynamic shape loading.
<!-- SECTION:NOTES:END -->

View File

@ -1,67 +0,0 @@
---
id: TASK-42
title: 'Implement Data Pipes: typed data flow through arrows'
status: Done
assignee: []
created_date: '2026-02-18 20:06'
updated_date: '2026-03-15 00:43'
labels:
- feature
- phase-1
- ecosystem
milestone: m-1
dependencies:
- TASK-41
priority: high
status_history:
- status: Done
timestamp: '2026-03-15 00:43'
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Transform folk-arrow from visual-only connector into a typed data conduit between shapes.
New file lib/data-types.ts:
- DataType enum: string, number, boolean, image-url, video-url, text, json, trigger, any
- Type compatibility matrix and isCompatible() function
Add port mixin to FolkShape:
- ports map, getPort(), setPortValue(), onPortValueChanged()
- Port values stored in Automerge: doc.shapes[id].ports[name].value
- 100ms debounce on port propagation to prevent keystroke thrashing
Enhance folk-arrow:
- sourcePort/targetPort fields referencing named ports
- Listen for port-value-changed on source, push to target
- Type compatibility check before pushing
- Visual: arrows tinted by data type, flow animation when active
- Port handle UI during connect mode
Add port descriptors to AI shapes:
- folk-image-gen: input "prompt" (text), output "image" (image-url)
- folk-video-gen: input "prompt" (text), input "image" (image-url), output "video" (video-url)
- folk-prompt: input "context" (text), output "response" (text)
- folk-transcription: output "transcript" (text)
Example pipeline: Transcription →[text]→ Prompt →[text]→ ImageGen →[image-url]→ VideoGen
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 DataType system with compatibility matrix works
- [ ] #2 Shapes can declare input/output ports via registration
- [ ] #3 setPortValue() writes to Automerge and dispatches event
- [ ] #4 folk-arrow pipes data from source port to target port
- [ ] #5 Type incompatible connections show warning
- [ ] #6 Arrows visually indicate data type and active flow
- [ ] #7 Port values sync to remote clients via Automerge
- [ ] #8 100ms debounce prevents thrashing on rapid changes
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Already implemented: data-types.ts with DataType enum + compatibility matrix, FolkShape has portDescriptors/getPort/setPortValue, folk-arrow connects ports with type checking and flow visualization, AI shapes (image-gen, prompt) have port descriptors.
<!-- SECTION:NOTES:END -->

View File

@ -1,19 +1,22 @@
--- ---
id: TASK-51 id: TASK-51
title: Consolidate standalone r*.online domains → rspace.online title: Consolidate standalone r*.online domains → rspace.online
status: Done status: To Do
assignee: [] assignee: []
created_date: '2026-02-25 07:46' created_date: '2026-02-25 07:46'
updated_date: '2026-03-14 21:55'
labels: labels:
- infrastructure - infrastructure
- domains - domains
- migration - migration
dependencies: [] dependencies: []
references:
- server/index.ts (lines 457-521 — standalone rewrite logic)
- shared/module.ts (standaloneDomain interface)
- shared/components/rstack-app-switcher.ts (external link arrows)
- docker-compose.yml (lines 44-114 — Traefik labels)
- src/encryptid/server.ts (allowedOrigins list)
- src/encryptid/session.ts (JWT aud claim)
priority: high priority: high
status_history:
- status: Done
timestamp: '2026-03-14 21:55'
--- ---
## Description ## Description
@ -39,9 +42,3 @@ Key risks:
- [ ] #5 Standalone .ts entry points deleted - [ ] #5 Standalone .ts entry points deleted
- [ ] #6 Domain registrations allowed to expire - [ ] #6 Domain registrations allowed to expire
<!-- AC:END --> <!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Domain consolidation complete. All standalone domains 301 → rspace.online
<!-- SECTION:NOTES:END -->

View File

@ -1,27 +0,0 @@
---
id: TASK-HIGH.6
title: rtasks email checklist — HMAC-signed clickable AC items from email
status: Done
assignee: []
created_date: '2026-03-16 19:28'
updated_date: '2026-03-16 19:28'
labels:
- rtasks
- email
- checklist
- backlog
dependencies: []
parent_task_id: TASK-HIGH
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Email checklist micro-service integrated into rspace-online rtasks module. Sends emails with HMAC-signed links for each backlog task acceptance criterion. Clicking a link toggles the AC in the markdown file and re-renders a confirmation page. Routes: GET /rtasks/check/:token (verify + toggle + render), POST /api/rtasks/send (build + send email). Uses Web Crypto HMAC-SHA256 tokens, direct markdown AC parsing, Nodemailer via Mailcow SMTP (noreply@rmail.online). Mounted at top-level in server/index.ts to bypass space auth middleware.
<!-- SECTION:DESCRIPTION:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Implemented and deployed. Routes at /rtasks/check/:token and /api/rtasks/send. Infisical secrets: RTASKS_HMAC_SECRET, RTASKS_API_KEY. SMTP via mailcowdockerized-postfix-mailcow-1 as noreply@rmail.online. Volume mount /opt/dev-ops:/repos/dev-ops for task file access. E2E tested successfully — email sends, link toggles AC, page re-renders. Commits: integrated into rspace-online (not standalone).
<!-- SECTION:NOTES:END -->

View File

@ -1,35 +0,0 @@
---
id: TASK-HIGH.7
title: Intent-routed resource-backed commitments for rTime
status: Done
assignee: []
created_date: '2026-04-01 05:36'
updated_date: '2026-04-01 05:38'
labels: []
dependencies: []
parent_task_id: TASK-HIGH
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Anoma-style intent routing integrated into rTime. Members declare needs/capacities as intents, solver finds collaboration clusters via Mycelium Clustering algorithm, settlement locks tokens via CRDT escrow. New files: schemas-intent.ts, solver.ts, settlement.ts, skill-curve.ts, reputation.ts, intent-routes.ts. Frontend: Collaborate tab with intent cards, solver results, accept/reject, skill prices, status rings on pool orbs.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Intent CRUD routes working
- [x] #2 Solver produces valid cluster matches
- [x] #3 Settlement creates connections and tasks atomically
- [x] #4 Skill curve pricing responds to supply/demand
- [x] #5 Collaborate tab renders in frontend
- [x] #6 Status rings visible on pool orbs
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Committed 08cae26, pushed to Gitea/GitHub, merged dev→main, rebuild triggered on Netcup.
Deployed to production. Commit 08cae26, built and running on Netcup. Live at rspace.online/{space}/rtime (Collaborate tab).
<!-- SECTION:NOTES:END -->

View File

@ -1,23 +0,0 @@
---
id: TASK-LOW.1
title: 'Netcup memory pressure: 7.9G in swap, 1.7G free'
status: To Do
assignee: []
created_date: '2026-04-16 23:18'
labels: []
dependencies: []
parent_task_id: TASK-LOW
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Snapshot 2026-04-17 01:13: 45G/62G used, 1.7G free, 7.9G in swap. Page-swapping regularly. Not urgent but warrants a pass: (a) audit which containers have mem caps > working-set (too generous) vs containers with no cap at all (already fixed by enforce script patch tonight), (b) restart long-running JVM/node containers that leaked, (c) consider killing 'nice-to-have' services if starved. Top mem consumers last checked: mailcow stack, p2pwiki-elasticsearch (3G cap), various twenty-* stacks, gitea (633M / 1G cap = 63%).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Free mem > 4G under normal load
- [ ] #2 Swap usage < 2G under normal load
- [ ] #3 Identified + documented over-allocated containers
<!-- AC:END -->

View File

@ -1,24 +0,0 @@
---
id: TASK-LOW.2
title: >-
Deploy enforce-container-limits.sh from dev-ops repo (replace unversioned
/opt/scripts/)
status: To Do
assignee: []
created_date: '2026-04-16 23:18'
labels: []
dependencies: []
parent_task_id: TASK-LOW
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Script is now in dev-ops at netcup/scripts/enforce-container-limits.sh (commit dev-ops/73acc6e → main/9b25487). /opt/scripts/enforce-container-limits.sh on Netcup is still a manual copy not tied to git. Consider: (a) symlink /opt/scripts/ → /opt/dev-ops/netcup/scripts/ so git pulls update the script, or (b) add a deploy hook that copies on commit. Option (a) is simpler but exposes directory structure; option (b) is safer.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Production /opt/scripts/enforce-container-limits.sh tracks dev-ops
- [ ] #2 Script edit in dev-ops flows to Netcup without manual scp
<!-- AC:END -->

View File

@ -1,22 +0,0 @@
---
id: TASK-LOW.3
title: Sablier scale-to-zero for encryptid (original TASK-MEDIUM.7 scope)
status: To Do
assignee: []
created_date: '2026-04-16 23:18'
labels: []
dependencies: []
parent_task_id: TASK-LOW
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Retargeted tonight to sidecars (TASK-MEDIUM.7 Done). Original idea was to put encryptid + encryptid-db behind Sablier for 256MB RAM savings when auth is idle. Tradeoff: cold-start latency (few seconds) on first login after idle — user-facing annoyance. Probably not worth it for auth, but documenting for future consideration. If pursued: add Sablier labels to encryptid services, configure Traefik dynamic config to route auth.rspace.online / auth.ridentity.online / encryptid.jeffemmett.com through Sablier middleware (see dev-ops/netcup/traefik/config/sablier-voice.yml for the pattern).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Decision: pursue or permanently close
- [ ] #2 If pursued: labels + Traefik route + verified cold-start acceptable
<!-- AC:END -->

View File

@ -1,24 +0,0 @@
---
id: TASK-MEDIUM.10
title: Roll out canvas-with-widgets UX pattern to remaining rApps
status: To Do
assignee: []
created_date: '2026-04-16 23:17'
labels: []
dependencies: []
parent_task_id: TASK-MEDIUM
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Prototype landed tonight at demo.rspace.online/rtasks/canvas: folk-app-canvas + folk-widget + 3 rTasks widgets (Board/Backlog/Activity). Each rApp's root view becomes an integrated canvas of togglable widgets instead of siloed tab pages. 24 single-view rApps are candidates — feature inventory already done (see conversation log). Pending user review of rTasks prototype before rolling out. Planned tab groupings per rApp: rMeets, rcal, rmaps, rinbox, rtrips, rtime, rfiles, rdocs, rnotes, rfeeds, rchoices, rvote, rbnb, rvnb, rbooks, rdata, rphotos, rforum. Skip: rcred, rgov, rpast, rsplat, rtube, rchats (genuinely single-purpose).
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 User approves rTasks prototype after visual review
- [ ] #2 Widget registry pattern documented for new rApp authors
- [ ] #3 Each approved rApp has a /canvas route alongside existing root
- [ ] #4 Mobile fallback (stacked cards) tested on real device
<!-- AC:END -->

View File

@ -1,23 +0,0 @@
---
id: TASK-MEDIUM.4
title: Inline CrowdSurf swipe cards in rChoices dashboard
status: Done
assignee: []
created_date: '2026-03-17 00:30'
updated_date: '2026-03-17 00:30'
labels: []
dependencies: []
parent_task_id: TASK-MEDIUM
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Embed swipe-card interface in the CrowdSurf tab of folk-choices-dashboard.ts, populated with rChoices option data using seeded PRNG sortition. Right-swipe = approve (casts vote), left-swipe = skip. Includes gesture handling, localStorage persistence, summary view, and reset.
<!-- SECTION:DESCRIPTION:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Implemented inline CrowdSurf swipe cards with seeded PRNG sortition (mulberry32 + djb2), gesture handling adapted from folk-crowdsurf-dashboard.ts with bug fixes, localStorage persistence, vote casting via local-first client, summary view, and reset. Commit: 383441e on dev+main.
<!-- SECTION:NOTES:END -->

View File

@ -1,23 +0,0 @@
---
id: TASK-MEDIUM.5
title: 'Move CrowdSurf under rChoices sub-nav, fix header overlap'
status: Done
assignee: []
created_date: '2026-03-17 00:43'
updated_date: '2026-03-17 00:43'
labels: []
dependencies: []
parent_task_id: TASK-MEDIUM
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Hide CrowdSurf from app switcher, replace dead Polls/Results outputPaths with actual tab routes (Spider/Ranking/Voting/CrowdSurf), add /:tab route, component reads tab attribute, remove internal demo-tabs in favor of shell sub-nav.
<!-- SECTION:DESCRIPTION:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Completed: CrowdSurf hidden from app switcher, outputPaths updated to Spider/Ranking/Voting/CrowdSurf with working /:tab routes, internal demo-tabs removed (shell sub-nav handles navigation), JS cache bumped to v=6. Commit: 362bdd5 on dev+main.
<!-- SECTION:NOTES:END -->

View File

@ -1,25 +0,0 @@
---
id: TASK-MEDIUM.6
title: Platform Connections dashboard in space settings
status: Done
assignee: []
created_date: '2026-03-31 20:25'
updated_date: '2026-03-31 22:54'
labels: []
dependencies: []
parent_task_id: TASK-MEDIUM
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add 5th 'Connections' tab to space settings modal with n8n-style visual dashboard. Shows platform cards (Google, Notion, ClickUp live + 7 coming soon) connected via SVG bezier lines to central rSpace hub node. Includes OAuth connect/disconnect flows and GET /api/oauth/status endpoint. Files: server/oauth/index.ts, server/index.ts, shared/components/rstack-space-switcher.ts.
<!-- SECTION:DESCRIPTION:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Implemented and deployed 2026-03-31. Commit 26aa643. Live at rspace.online — open any space gear icon → Connections tab.
Refactored 2026-03-31 (commit 32093a0): Moved connections dashboard from space settings 5th tab to My Account modal as collapsible section. Added selective sharing — users connect platforms to personal data store, then pick which community spaces to share data into via per-provider space checkboxes. New endpoints: GET/POST /api/oauth/sharing. Sharing config in Automerge doc {userSpace}:oauth:sharing.
<!-- SECTION:NOTES:END -->

View File

@ -1,61 +0,0 @@
---
id: TASK-MEDIUM.7
title: Migrate on-demand sidecars from sidecar-manager.ts to Sablier
status: To Do
assignee: []
created_date: '2026-04-16 22:44'
updated_date: '2026-04-16 22:56'
labels: []
dependencies: []
parent_task_id: TASK-MEDIUM
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
docker-compose.sablier-support.yml is currently a no-op placeholder. To activate scale-to-zero for encryptid + encryptid-db:
1. Add sablier labels to encryptid/encryptid-db in sablier-support.yml (copy from docker-compose.sablier-encryptid.yml):
- sablier.enable=true
- sablier.group=encryptid
- traefik.enable=false (on encryptid)
2. Configure Traefik dynamic config to route auth.rspace.online / auth.ridentity.online / encryptid.jeffemmett.com through the Sablier middleware (sablier.group=encryptid).
3. Verify Sablier container (running 45h healthy on Netcup) receives requests and wakes encryptid on demand.
Without step 2, flipping traefik.enable=false on encryptid will break auth immediately. Must sequence: Traefik route first, then compose-up with new labels.
Context: discovered 2026-04-16 while deploying rTasks canvas — .env on Netcup referenced missing docker-compose.sablier-support.yml, causing docker compose failures.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Sablier labels present on encryptid services via sablier-support.yml
- [ ] #2 Traefik dynamic config routes encryptid hostnames through Sablier middleware
- [ ] #3 auth.rspace.online returns 200 after container idle timeout + wake
- [ ] #4 Sablier logs show wake events from real requests
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Completed 2026-04-16.
Implementation:
- server/sidecar-manager.ts rewritten to call Sablier's /api/strategies/blocking HTTP endpoint instead of Docker Engine API (commit ee251fd → merged 000ee0d)
- Public API (ensureSidecar / markSidecarUsed / isSidecarRunning / startIdleWatcher) unchanged; all server/index.ts callers untouched
- SABLIER_URL defaults to http://sablier:10000; SIDECAR_SESSION_DURATION=5m matches previous idle timeout
- dev-ops/netcup/sablier/docker-compose.yml attaches sablier to rspace-online_rspace-internal (commit dev-ops/1a29e30 → merged d62b70a)
Verification (demo.rspace.online):
- rspace logs show "[sidecar] Lifecycle delegated to Sablier at http://sablier:10000 (ttl 5m)" on startup
- From rspace container: fetch(http://sablier:10000/health) → 200
- Sablier /api/strategies/blocking returns 200 for an existing running container (rspace-db test)
Outstanding:
- The 5 sidecar containers (kicad-mcp, freecad-mcp, blender-worker, scribus-novnc, open-notebook) do not currently exist on Netcup — run `docker compose --profile sidecar create` in /opt/rspace-online to create them before Sablier can wake anything on demand. Ollama is not in the rspace compose at all; sidecar-manager.ts still lists it but ensureSidecar("ollama") will be a no-op on wake until an ollama container is defined somewhere Sablier can see it.
- Docker socket mount at /var/run/docker.sock on rspace container is now unused — can be removed in a follow-up (security hygiene).
[AC GATE] Reverted to 'To Do': 4/4 ACs unchecked
<!-- SECTION:NOTES:END -->

View File

@ -1,26 +0,0 @@
---
id: TASK-MEDIUM.8
title: >-
Create on-demand sidecar containers
(kicad/freecad/blender/scribus/open-notebook)
status: To Do
assignee: []
created_date: '2026-04-16 23:17'
labels: []
dependencies: []
parent_task_id: TASK-MEDIUM
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
The 5 sidecar containers defined in /opt/rspace-online/docker-compose.yml under profiles:[sidecar] don't exist on Netcup. Sablier can't wake what doesn't exist. Run `cd /opt/rspace-online && docker compose --profile sidecar create` when server load is low — this triggers heavy Docker builds (KiCad/FreeCAD/Blender pull hundreds of MB + compile). Wait for load avg < 8 and free mem > 4GB before running.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 All 5 sidecar images built on Netcup
- [ ] #2 Containers in 'created' state (not started)
- [ ] #3 Sablier can wake each one via /api/strategies/blocking
- [ ] #4 ensureSidecar(name) from rspace server triggers actual container start
<!-- AC:END -->

View File

@ -1,23 +0,0 @@
---
id: TASK-MEDIUM.9
title: Wire ollama into rspace sidecar lifecycle
status: To Do
assignee: []
created_date: '2026-04-16 23:17'
labels: []
dependencies: []
parent_task_id: TASK-MEDIUM
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
server/sidecar-manager.ts lists ollama in SIDECARS but there's no ollama service in /opt/rspace-online/docker-compose.yml. ensureSidecar('ollama') calls from server/index.ts:2853 silently no-op. Either: (a) add an ollama service to the compose under profiles:[sidecar] so Sablier can wake it, or (b) drop ollama from sidecar-manager and adjust callers. Memory doc (2026-03-31 entry) suggests ollama was intended as a sidecar — option (a) is more likely correct.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 ollama container exists on rspace-internal network at host 'ollama' port 11434
- [ ] #2 Sablier can wake it via blocking API
- [ ] #3 fetch('http://ollama:11434/') from rspace returns 200 after ensureSidecar('ollama')
<!-- AC:END -->

View File

@ -44,7 +44,7 @@
<div class="field"> <div class="field">
<label for="slug">Space slug</label> <label for="slug">Space slug</label>
<input type="text" id="slug" placeholder="my-space" /> <input type="text" id="slug" placeholder="my-space" />
<div class="help">Your space name in the URL (e.g. "my-space" from my-space.rspace.online)</div> <div class="help">Your space name in the URL (e.g. "my-space" from rspace.online/my-space)</div>
</div> </div>
</div> </div>

1073
bun.lock

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +0,0 @@
services:
encryptid:
labels:
- "sablier.enable=true"
- "sablier.group=encryptid"
- "traefik.enable=false"
encryptid-db:
labels:
- "sablier.enable=true"
- "sablier.group=encryptid"

View File

@ -1,13 +0,0 @@
# Sablier override — referenced by COMPOSE_FILE on Netcup.
#
# Currently a NO-OP placeholder so `docker compose` commands don't fail when
# COMPOSE_FILE includes this path. Real Sablier activation is deferred until
# Traefik middleware wiring is ready — see docker-compose.sablier-encryptid.yml
# for the encryptid label pattern, which cannot be applied in place because
# `traefik.enable=false` on the encryptid container would immediately break
# auth traffic without a Sablier-backed route in front of it.
#
# To activate: add sablier labels here AND configure Traefik dynamic config
# to route encryptid's hostname through the Sablier middleware.
services: {}

View File

@ -1,13 +1,9 @@
services: services:
rspace: rspace:
# CI pushes to localhost:3000/jeffemmett/rspace-online:<short-sha> and build:
# sets IMAGE_TAG via env when running `docker compose up -d --no-build`. context: .
# Falls back to :latest for local rebuilds. additional_contexts:
image: localhost:3000/jeffemmett/rspace-online:${IMAGE_TAG:-latest} encryptid-sdk: ../encryptid-sdk
# build:
# context: .
# additional_contexts:
# encryptid-sdk: ../encryptid-sdk
container_name: rspace-online container_name: rspace-online
restart: unless-stopped restart: unless-stopped
volumes: volumes:
@ -18,7 +14,6 @@ services:
- rspace-splats:/data/splats - rspace-splats:/data/splats
- rspace-docs:/data/docs - rspace-docs:/data/docs
- rspace-backups:/data/backups - rspace-backups:/data/backups
- /opt/dev-ops:/repos/dev-ops:rw
environment: environment:
- NODE_ENV=production - NODE_ENV=production
- STORAGE_DIR=/data/communities - STORAGE_DIR=/data/communities
@ -34,7 +29,6 @@ services:
- INFISICAL_PROJECT_SLUG=rspace - INFISICAL_PROJECT_SLUG=rspace
- INFISICAL_ENV=prod - INFISICAL_ENV=prod
- INFISICAL_URL=http://infisical:8080 - INFISICAL_URL=http://infisical:8080
- JWT_SECRET=${JWT_SECRET}
- FLOW_SERVICE_URL=http://payment-flow:3010 - FLOW_SERVICE_URL=http://payment-flow:3010
- FLOW_ID=a79144ec-e6a2-4e30-a42a-6d8237a5953d - FLOW_ID=a79144ec-e6a2-4e30-a42a-6d8237a5953d
- FUNNEL_ID=0ff6a9ac-1667-4fc7-9a01-b1620810509f - FUNNEL_ID=0ff6a9ac-1667-4fc7-9a01-b1620810509f
@ -44,33 +38,28 @@ services:
- IMAP_HOST=mail.rmail.online - IMAP_HOST=mail.rmail.online
- IMAP_PORT=993 - IMAP_PORT=993
- IMAP_TLS_REJECT_UNAUTHORIZED=false - IMAP_TLS_REJECT_UNAUTHORIZED=false
- SMTP_HOST=${SMTP_HOST:-mailcowdockerized-postfix-mailcow-1} - SMTP_HOST=${SMTP_HOST:-mail.rmail.online}
- SMTP_PORT=${SMTP_PORT:-587} - SMTP_PORT=${SMTP_PORT:-587}
- SMTP_USER=${SMTP_USER:-noreply@rmail.online} - SMTP_USER=${SMTP_USER:-noreply@rmail.online}
- SMTP_PASS=${SMTP_PASS} - SMTP_PASS=${SMTP_PASS}
- SMTP_FROM=${SMTP_FROM:-rSpace <noreply@rmail.online>}
- SITE_URL=https://rspace.online - SITE_URL=https://rspace.online
- RTASKS_REPO_BASE=/repos
- SPLAT_NOTIFY_EMAIL=jeffemmett@gmail.com - SPLAT_NOTIFY_EMAIL=jeffemmett@gmail.com
- TWENTY_API_URL=http://twenty-ch-server:3000 - TWENTY_API_URL=http://twenty-ch-server:3000
- TWENTY_API_TOKEN=${TWENTY_API_TOKEN:-}
- TRANSAK_API_KEY=${TRANSAK_API_KEY:-}
- TRANSAK_API_KEY_STAGING=${TRANSAK_API_KEY_STAGING:-}
- TRANSAK_API_KEY_PRODUCTION=${TRANSAK_API_KEY_PRODUCTION:-}
- TRANSAK_SECRET=${TRANSAK_SECRET:-}
- TRANSAK_WEBHOOK_SECRET_STAGING=${TRANSAK_WEBHOOK_SECRET_STAGING:-}
- TRANSAK_WEBHOOK_SECRET_PRODUCTION=${TRANSAK_WEBHOOK_SECRET_PRODUCTION:-}
- TRANSAK_ENV=${TRANSAK_ENV:-STAGING}
- OLLAMA_URL=http://ollama:11434 - OLLAMA_URL=http://ollama:11434
- INFISICAL_AI_CLIENT_ID=${INFISICAL_AI_CLIENT_ID} - INFISICAL_AI_CLIENT_ID=${INFISICAL_AI_CLIENT_ID}
- INFISICAL_AI_CLIENT_SECRET=${INFISICAL_AI_CLIENT_SECRET} - INFISICAL_AI_CLIENT_SECRET=${INFISICAL_AI_CLIENT_SECRET}
- INFISICAL_AI_PROJECT_SLUG=claude-ops - INFISICAL_AI_PROJECT_SLUG=claude-ops
- INFISICAL_AI_SECRET_PATH=/ai - INFISICAL_AI_SECRET_PATH=/ai
- INTERNAL_API_KEY=${INTERNAL_API_KEY}
- LISTMONK_URL=https://newsletter.cosmolocal.world - LISTMONK_URL=https://newsletter.cosmolocal.world
- NOTEBOOK_API_URL=http://open-notebook:5055 - NOTEBOOK_API_URL=http://open-notebook:5055
- SPLIT_360_URL=http://video360-splitter:5000
- SCRIBUS_BRIDGE_URL=http://scribus-novnc:8765
- TRANSAK_ENV=${TRANSAK_ENV:-STAGING}
- SCRIBUS_BRIDGE_SECRET=${SCRIBUS_BRIDGE_SECRET}
- SCRIBUS_NOVNC_URL=https://design.rspace.online
- IPFS_API_URL=http://collab-server-ipfs-1:5001
- IPFS_GATEWAY_URL=https://ipfs.jeffemmett.com
- MEETING_INTELLIGENCE_API_URL=${MEETING_INTELLIGENCE_API_URL:-http://meeting-intelligence-api:8000}
- MI_INTERNAL_KEY=${MI_INTERNAL_KEY}
- JITSI_URL=${JITSI_URL:-https://jeffsi.localvibe.live}
depends_on: depends_on:
rspace-db: rspace-db:
condition: service_healthy condition: service_healthy
@ -174,21 +163,9 @@ services:
- "traefik.http.routers.rspace-rsocials.entrypoints=web" - "traefik.http.routers.rspace-rsocials.entrypoints=web"
- "traefik.http.routers.rspace-rsocials.priority=120" - "traefik.http.routers.rspace-rsocials.priority=120"
- "traefik.http.routers.rspace-rsocials.service=rspace-online" - "traefik.http.routers.rspace-rsocials.service=rspace-online"
# Rate limiting middleware (coarse edge defense — token bucket per client IP)
# Without sourceCriterion Traefik groups by request Host, so one bucket is
# shared across ALL users of a domain — trips instantly under normal use.
# Scope per client IP using Cloudflare's CF-Connecting-IP header.
- "traefik.http.middlewares.rspace-ratelimit.ratelimit.average=600"
- "traefik.http.middlewares.rspace-ratelimit.ratelimit.burst=150"
- "traefik.http.middlewares.rspace-ratelimit.ratelimit.period=1m"
- "traefik.http.middlewares.rspace-ratelimit.ratelimit.sourcecriterion.requestheadername=CF-Connecting-IP"
- "traefik.http.routers.rspace-main.middlewares=rspace-ratelimit"
- "traefik.http.routers.rspace-canvas.middlewares=rspace-ratelimit"
# Service configuration # Service configuration
- "traefik.http.services.rspace-online.loadbalancer.server.port=3000" - "traefik.http.services.rspace-online.loadbalancer.server.port=3000"
- "traefik.docker.network=traefik-public" - "traefik.docker.network=traefik-public"
mem_limit: 1536m
cpus: 2
networks: networks:
- traefik-public - traefik-public
- rspace-internal - rspace-internal
@ -200,8 +177,6 @@ services:
image: postgres:16-alpine image: postgres:16-alpine
container_name: rspace-db container_name: rspace-db
restart: unless-stopped restart: unless-stopped
mem_limit: 256m
cpus: 1
volumes: volumes:
- rspace-pgdata:/var/lib/postgresql/data - rspace-pgdata:/var/lib/postgresql/data
- ./db/init.sql:/docker-entrypoint-initdb.d/init.sql:ro - ./db/init.sql:/docker-entrypoint-initdb.d/init.sql:ro
@ -226,8 +201,6 @@ services:
encryptid-sdk: ../encryptid-sdk encryptid-sdk: ../encryptid-sdk
container_name: encryptid container_name: encryptid
restart: unless-stopped restart: unless-stopped
mem_limit: 256m
cpus: 1
depends_on: depends_on:
encryptid-db: encryptid-db:
condition: service_healthy condition: service_healthy
@ -236,11 +209,11 @@ services:
- PORT=3000 - PORT=3000
- JWT_SECRET=${JWT_SECRET} - JWT_SECRET=${JWT_SECRET}
- DATABASE_URL=postgres://encryptid:${ENCRYPTID_DB_PASSWORD}@encryptid-db:5432/encryptid - DATABASE_URL=postgres://encryptid:${ENCRYPTID_DB_PASSWORD}@encryptid-db:5432/encryptid
- SMTP_HOST=${SMTP_HOST:-mailcowdockerized-postfix-mailcow-1} - SMTP_HOST=${SMTP_HOST:-mail.rmail.online}
- SMTP_PORT=${SMTP_PORT:-587} - SMTP_PORT=${SMTP_PORT:-587}
- SMTP_USER=${SMTP_USER:-noreply@rmail.online} - SMTP_USER=${SMTP_USER:-noreply@rspace.online}
- SMTP_PASS=${SMTP_PASS} - SMTP_PASS=${SMTP_PASS}
- SMTP_FROM=${SMTP_FROM:-EncryptID <noreply@rmail.online>} - SMTP_FROM=${SMTP_FROM:-EncryptID <noreply@rspace.online>}
- RECOVERY_URL=${RECOVERY_URL:-https://auth.rspace.online/recover} - RECOVERY_URL=${RECOVERY_URL:-https://auth.rspace.online/recover}
- MAILCOW_API_URL=${MAILCOW_API_URL:-http://nginx-mailcow:8080} - MAILCOW_API_URL=${MAILCOW_API_URL:-http://nginx-mailcow:8080}
- MAILCOW_API_KEY=${MAILCOW_API_KEY:-} - MAILCOW_API_KEY=${MAILCOW_API_KEY:-}
@ -275,8 +248,6 @@ services:
image: postgres:16-alpine image: postgres:16-alpine
container_name: encryptid-db container_name: encryptid-db
restart: unless-stopped restart: unless-stopped
mem_limit: 256m
cpus: 1
environment: environment:
- POSTGRES_DB=encryptid - POSTGRES_DB=encryptid
- POSTGRES_USER=encryptid - POSTGRES_USER=encryptid
@ -292,118 +263,31 @@ services:
retries: 5 retries: 5
start_period: 10s start_period: 10s
# ── Blender Multi-User replication server (always-on, persistent TCP) ── # ── Open Notebook (NotebookLM-like RAG service) ──
blender-multiuser:
image: registry.gitlab.com/slumber/multi-user/multi-user-server:0.5.8
container_name: blender-multiuser
restart: unless-stopped
mem_limit: 512m
cpus: 1
ports:
- "5555:5555"
- "5556:5556"
- "5557:5557"
- "5558:5558"
environment:
- port=5555
- password=${BLENDER_MULTIUSER_PASSWORD}
- timeout=5000
- log_level=INFO
- log_file=multiuser_server.log
networks:
- rspace-internal
healthcheck:
test: ["CMD-SHELL", "python3 -c 'import socket; s=socket.socket(); s.settimeout(2); s.connect((\"localhost\",5555)); s.close()' || exit 1"]
interval: 30s
timeout: 5s
retries: 3
start_period: 10s
# ── On-demand sidecars (started/stopped by server/sidecar-manager.ts) ──
# Build: docker compose --profile sidecar build
# Create: docker compose --profile sidecar create
# These containers are NOT started with `docker compose up -d`.
# The rspace server starts them on API request and stops them after 5min idle.
kicad-mcp:
build: ./docker/kicad-mcp
container_name: kicad-mcp
restart: "no"
profiles: ["sidecar"]
mem_limit: 2g
cpus: 1
volumes:
- rspace-files:/data/files
networks:
- rspace-internal
freecad-mcp:
build: ./docker/freecad-mcp
container_name: freecad-mcp
restart: "no"
profiles: ["sidecar"]
mem_limit: 1g
cpus: 1
volumes:
- rspace-files:/data/files
networks:
- rspace-internal
blender-worker:
build: ./docker/blender-worker
container_name: blender-worker
restart: "no"
profiles: ["sidecar"]
mem_limit: 1g
cpus: 2
volumes:
- rspace-files:/data/files
networks:
- rspace-internal
# ── Scribus noVNC (rDesign DTP workspace) — on-demand sidecar ──
scribus-novnc:
build:
context: ./docker/scribus-novnc
container_name: scribus-novnc
restart: "no"
profiles: ["sidecar"]
mem_limit: 512m
cpus: 1
volumes:
- scribus-designs:/data/designs
- rspace-files:/data/files
environment:
- BRIDGE_SECRET=${SCRIBUS_BRIDGE_SECRET}
- BRIDGE_PORT=8765
- NOVNC_PORT=6080
- SCREEN_WIDTH=1920
- SCREEN_HEIGHT=1080
- SCREEN_DEPTH=24
healthcheck:
test: ["CMD-SHELL", "curl -so /dev/null -w '%{http_code}' http://localhost:8765/ | grep -q '^[2-4]'"]
interval: 30s
timeout: 5s
retries: 3
start_period: 30s
networks:
- rspace-internal
# ── Open Notebook (NotebookLM-like RAG service) — on-demand sidecar ──
open-notebook: open-notebook:
image: ghcr.io/lfnovo/open-notebook:v1-latest-single image: ghcr.io/lfnovo/open-notebook:v1-latest-single
container_name: open-notebook container_name: open-notebook
restart: "no" restart: always
profiles: ["sidecar"]
mem_limit: 1g
cpus: 1
env_file: ./open-notebook.env env_file: ./open-notebook.env
volumes: volumes:
- open-notebook-data:/app/data - open-notebook-data:/app/data
- open-notebook-db:/mydata - open-notebook-db:/mydata
networks: networks:
- rspace-internal - traefik-public
- ai-internal - ai-internal
labels:
- "traefik.enable=true"
- "traefik.docker.network=traefik-public"
# Frontend UI
- "traefik.http.routers.rspace-notebook.rule=Host(`notebook.rspace.online`)"
- "traefik.http.routers.rspace-notebook.entrypoints=web"
- "traefik.http.routers.rspace-notebook.tls.certresolver=letsencrypt"
- "traefik.http.services.rspace-notebook.loadbalancer.server.port=8502"
# API endpoint (used by rNotes integration)
- "traefik.http.routers.rspace-notebook-api.rule=Host(`notebook-api.rspace.online`)"
- "traefik.http.routers.rspace-notebook-api.entrypoints=web"
- "traefik.http.routers.rspace-notebook-api.tls.certresolver=letsencrypt"
- "traefik.http.services.rspace-notebook-api.loadbalancer.server.port=5055"
volumes: volumes:
rspace-data: rspace-data:
@ -415,7 +299,6 @@ volumes:
rspace-backups: rspace-backups:
rspace-pgdata: rspace-pgdata:
encryptid-pgdata: encryptid-pgdata:
scribus-designs:
open-notebook-data: open-notebook-data:
open-notebook-db: open-notebook-db:

View File

@ -1,23 +0,0 @@
FROM debian:bookworm-slim
RUN apt-get update && apt-get install -y --no-install-recommends \
blender \
python3 \
ca-certificates \
libegl1 \
libgl1-mesa-dri \
libglx-mesa0 \
&& rm -rf /var/lib/apt/lists/*
ENV QT_QPA_PLATFORM=offscreen
ENV DISPLAY=""
WORKDIR /app
COPY server.py .
RUN mkdir -p /data/files/generated
EXPOSE 8810
CMD ["python3", "server.py"]

View File

@ -1,105 +0,0 @@
"""Headless Blender render worker — accepts scripts via HTTP, returns rendered images."""
import json
import os
import random
import shutil
import string
import subprocess
import time
from http.server import HTTPServer, BaseHTTPRequestHandler
GENERATED_DIR = "/data/files/generated"
BLENDER_TIMEOUT = 90 # seconds (fits within CF 100s limit)
class RenderHandler(BaseHTTPRequestHandler):
def do_GET(self):
if self.path == "/health":
self._json_response(200, {"ok": True, "service": "blender-worker"})
else:
self._json_response(404, {"error": "not found"})
def do_POST(self):
if self.path != "/render":
self._json_response(404, {"error": "not found"})
return
try:
length = int(self.headers.get("Content-Length", 0))
body = json.loads(self.rfile.read(length))
except (json.JSONDecodeError, ValueError):
self._json_response(400, {"error": "invalid JSON"})
return
script = body.get("script", "").strip()
if not script:
self._json_response(400, {"error": "script required"})
return
# Write script to temp file
script_path = "/tmp/scene.py"
render_path = "/tmp/render.png"
with open(script_path, "w") as f:
f.write(script)
# Clean any previous render
if os.path.exists(render_path):
os.remove(render_path)
# Run Blender headless
try:
result = subprocess.run(
["blender", "--background", "--python", script_path],
capture_output=True,
text=True,
timeout=BLENDER_TIMEOUT,
)
except subprocess.TimeoutExpired:
self._json_response(504, {
"success": False,
"error": f"Blender timed out after {BLENDER_TIMEOUT}s",
})
return
# Check if render was produced
if not os.path.exists(render_path):
self._json_response(422, {
"success": False,
"error": "Blender finished but no render output at /tmp/render.png",
"stdout": result.stdout[-2000:] if result.stdout else "",
"stderr": result.stderr[-2000:] if result.stderr else "",
})
return
# Move render to shared volume with unique name
rand = "".join(random.choices(string.ascii_lowercase + string.digits, k=6))
filename = f"blender-{int(time.time())}-{rand}.png"
dest = os.path.join(GENERATED_DIR, filename)
os.makedirs(GENERATED_DIR, exist_ok=True)
shutil.move(render_path, dest)
self._json_response(200, {
"success": True,
"render_url": f"/data/files/generated/{filename}",
"filename": filename,
})
def _json_response(self, status, data):
body = json.dumps(data).encode("utf-8")
self.send_response(status)
self.send_header("Content-Type", "application/json")
self.send_header("Content-Length", str(len(body)))
self.end_headers()
self.wfile.write(body)
def log_message(self, fmt, *args):
print(f"[blender-worker] {fmt % args}")
if __name__ == "__main__":
server = HTTPServer(("0.0.0.0", 8810), RenderHandler)
print("[blender-worker] listening on :8810")
server.serve_forever()

View File

@ -1,28 +0,0 @@
FROM node:20-slim
# Install FreeCAD headless (freecad-cmd) and dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
freecad \
ca-certificates \
&& rm -rf /var/lib/apt/lists/*
# Set headless Qt/FreeCAD env
ENV QT_QPA_PLATFORM=offscreen
ENV DISPLAY=""
ENV FREECAD_USER_CONFIG=/tmp/.FreeCAD
WORKDIR /app
# Copy MCP server source
COPY freecad-mcp-server/ .
# Install Node deps + supergateway (stdio→HTTP bridge)
RUN npm install && npm install -g supergateway
# Ensure generated files dir exists
RUN mkdir -p /data/files/generated
EXPOSE 8808
# Use StreamableHttp (supports multiple concurrent connections, unlike SSE)
CMD ["supergateway", "--stdio", "node build/index.js", "--port", "8808", "--outputTransport", "streamableHttp"]

View File

@ -1,38 +0,0 @@
FROM node:20-slim
# Install KiCad (includes pcbnew Python module), Python, and build dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
kicad \
python3 \
python3-pip \
ca-certificates \
&& rm -rf /var/lib/apt/lists/*
# Use SWIG backend (headless — no KiCad GUI needed)
ENV KICAD_BACKEND=swig
# Ensure pcbnew module is findable (installed by kicad package)
ENV PYTHONPATH=/usr/lib/python3/dist-packages
# Point KiCad MCP to system Python (absolute path for existsSync validation)
ENV KICAD_PYTHON=/usr/bin/python3
WORKDIR /app
# Copy MCP server source
COPY KiCAD-MCP-Server/ .
# Remove any venv so the server uses system Python (which has pcbnew)
RUN rm -rf .venv venv
# Install Node deps + supergateway (stdio→SSE bridge)
RUN npm install && npm install -g supergateway
# Install Python requirements into system Python (Pillow, cairosvg, requests, etc.)
RUN pip3 install --break-system-packages -r python/requirements.txt requests
# Ensure generated files dir exists
RUN mkdir -p /data/files/generated
EXPOSE 8809
# Use StreamableHttp (supports multiple concurrent connections, unlike SSE)
CMD ["supergateway", "--stdio", "node dist/index.js", "--port", "8809", "--outputTransport", "streamableHttp"]

View File

@ -1,51 +0,0 @@
FROM ubuntu:22.04
ENV DEBIAN_FRONTEND=noninteractive \
DISPLAY=:1 \
VNC_PORT=5900 \
NOVNC_PORT=6080 \
BRIDGE_PORT=8765 \
SCREEN_WIDTH=1920 \
SCREEN_HEIGHT=1080 \
SCREEN_DEPTH=24
# System packages: Scribus, Xvfb, VNC, noVNC, Python, supervisor
RUN apt-get update && apt-get install -y --no-install-recommends \
scribus \
xvfb \
x11vnc \
novnc \
websockify \
supervisor \
python3 \
python3-pip \
fonts-liberation \
fonts-dejavu \
wget \
curl \
procps \
&& rm -rf /var/lib/apt/lists/*
# Python bridge dependencies
COPY bridge/requirements.txt /opt/bridge/requirements.txt
RUN pip3 install --no-cache-dir -r /opt/bridge/requirements.txt
# Copy bridge server and Scribus runner
COPY bridge/ /opt/bridge/
# Supervisord config
COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
# Startup script
COPY startup.sh /opt/startup.sh
RUN chmod +x /opt/startup.sh
# Data directory for design files
RUN mkdir -p /data/designs
EXPOSE ${NOVNC_PORT} ${BRIDGE_PORT}
HEALTHCHECK --interval=30s --timeout=5s --retries=3 \
CMD curl -sf http://localhost:${BRIDGE_PORT}/health || exit 1
ENTRYPOINT ["/opt/startup.sh"]

View File

@ -1,3 +0,0 @@
flask==3.1.0
flask-socketio==5.4.1
watchdog==6.0.0

View File

@ -1,337 +0,0 @@
"""
Scribus Bridge Runner runs inside the Scribus Python scripting environment.
Listens on a Unix socket for JSON commands from the Flask bridge server
and dispatches them to the Scribus Python API.
Launched via: scribus --python-script scribus_runner.py
"""
import json
import os
import socket
import sys
import threading
import traceback
from pathlib import Path
try:
import scribus
except ImportError:
# Running outside Scribus for testing
scribus = None
print("[runner] WARNING: scribus module not available (running outside Scribus?)")
SOCKET_PATH = "/tmp/scribus_bridge.sock"
DESIGNS_DIR = Path("/data/designs")
SCREENSHOT_DIR = Path("/tmp/scribus_screenshots")
def _ensure_dirs():
DESIGNS_DIR.mkdir(parents=True, exist_ok=True)
SCREENSHOT_DIR.mkdir(parents=True, exist_ok=True)
# ── Command handlers ──
def cmd_new_document(args: dict) -> dict:
"""Create a new Scribus document."""
width = args.get("width", 210) # mm, A4 default
height = args.get("height", 297)
margins = args.get("margins", 10)
pages = args.get("pages", 1)
unit = args.get("unit", 0) # 0=points, 1=mm, 2=inches, 3=picas
if scribus:
# newDocument(width, height, topMargin, leftMargin, rightMargin, bottomMargin, ..., unit, pages, ...)
scribus.newDocument(
(width, height),
(margins, margins, margins, margins),
scribus.PORTRAIT, pages, unit, scribus.FACINGPAGES, scribus.FIRSTPAGELEFT, 1
)
return {"ok": True, "message": f"Created {width}x{height}mm document with {pages} page(s)"}
def cmd_add_text_frame(args: dict) -> dict:
"""Create a text frame and optionally set its content."""
x = args.get("x", 10)
y = args.get("y", 10)
w = args.get("width", 100)
h = args.get("height", 30)
text = args.get("text", "")
font_size = args.get("fontSize", 12)
font_name = args.get("fontName", "Liberation Sans")
name = args.get("name")
if scribus:
frame = scribus.createText(x, y, w, h, name or "")
if text:
scribus.setText(text, frame)
scribus.setFontSize(font_size, frame)
try:
scribus.setFont(font_name, frame)
except Exception:
scribus.setFont("Liberation Sans", frame)
return {"ok": True, "frame": frame}
frame_name = name or f"text_{x}_{y}"
return {"ok": True, "frame": frame_name, "simulated": True}
def cmd_add_image_frame(args: dict) -> dict:
"""Create an image frame, optionally loading an image from a path or URL."""
x = args.get("x", 10)
y = args.get("y", 10)
w = args.get("width", 100)
h = args.get("height", 100)
image_path = args.get("imagePath", "")
name = args.get("name")
if scribus:
frame = scribus.createImage(x, y, w, h, name or "")
if image_path and os.path.exists(image_path):
scribus.loadImage(image_path, frame)
scribus.setScaleImageToFrame(True, True, frame)
return {"ok": True, "frame": frame}
frame_name = name or f"image_{x}_{y}"
return {"ok": True, "frame": frame_name, "simulated": True}
def cmd_add_shape(args: dict) -> dict:
"""Create a geometric shape (rectangle or ellipse)."""
shape_type = args.get("shapeType", "rect")
x = args.get("x", 10)
y = args.get("y", 10)
w = args.get("width", 50)
h = args.get("height", 50)
fill = args.get("fill")
name = args.get("name")
if scribus:
if shape_type == "ellipse":
frame = scribus.createEllipse(x, y, w, h, name or "")
else:
frame = scribus.createRect(x, y, w, h, name or "")
if fill:
# Define and set fill color
color_name = f"fill_{frame}"
r, g, b = _parse_color(fill)
scribus.defineColorRGB(color_name, r, g, b)
scribus.setFillColor(color_name, frame)
return {"ok": True, "frame": frame}
return {"ok": True, "frame": name or f"{shape_type}_{x}_{y}", "simulated": True}
def cmd_get_doc_state(args: dict) -> dict:
"""Return a full snapshot of the current document state."""
if not scribus:
return {"error": "No scribus module", "simulated": True}
try:
page_count = scribus.pageCount()
except Exception:
return {"pages": [], "frames": [], "message": "No document open"}
pages = []
for p in range(1, page_count + 1):
scribus.gotoPage(p)
w, h = scribus.getPageSize()
pages.append({"number": p, "width": w, "height": h})
frames = []
all_objects = scribus.getAllObjects()
for obj_name in all_objects:
obj_type = scribus.getObjectType(obj_name)
x, y = scribus.getPosition(obj_name)
w, h = scribus.getSize(obj_name)
frame_info = {
"name": obj_name,
"type": obj_type,
"x": x, "y": y,
"width": w, "height": h,
}
if obj_type == "TextFrame":
try:
frame_info["text"] = scribus.getText(obj_name)
frame_info["fontSize"] = scribus.getFontSize(obj_name)
frame_info["fontName"] = scribus.getFont(obj_name)
except Exception:
pass
frames.append(frame_info)
return {"pages": pages, "frames": frames}
def cmd_screenshot(args: dict) -> dict:
"""Export the current page as PNG."""
dpi = args.get("dpi", 72)
_ensure_dirs()
path = str(SCREENSHOT_DIR / "current_page.png")
if scribus:
try:
scribus.savePageAsEPS(path.replace(".png", ".eps"))
# Fallback: use scribus PDF export + convert, or direct image export
# Scribus 1.5 has limited direct PNG export; use saveDocAs + external convert
scribus.saveDocAs(path.replace(".png", ".sla"))
return {"ok": True, "path": path, "note": "SLA saved; PNG conversion may require external tool"}
except Exception as e:
return {"error": f"Screenshot failed: {str(e)}"}
return {"ok": True, "path": path, "simulated": True}
def cmd_save_as_sla(args: dict) -> dict:
"""Save the document as .sla file."""
space = args.get("space", "default")
filename = args.get("filename", "design.sla")
_ensure_dirs()
save_dir = DESIGNS_DIR / space
save_dir.mkdir(parents=True, exist_ok=True)
save_path = str(save_dir / filename)
if scribus:
scribus.saveDocAs(save_path)
return {"ok": True, "path": save_path}
return {"ok": True, "path": save_path, "simulated": True}
def cmd_move_frame(args: dict) -> dict:
"""Move a frame by relative or absolute coordinates."""
name = args.get("name", "")
x = args.get("x", 0)
y = args.get("y", 0)
absolute = args.get("absolute", False)
if scribus and name:
if absolute:
scribus.moveObjectAbs(x, y, name)
else:
scribus.moveObject(x, y, name)
return {"ok": True}
return {"ok": True, "simulated": True}
def cmd_delete_frame(args: dict) -> dict:
"""Delete a frame by name."""
name = args.get("name", "")
if scribus and name:
scribus.deleteObject(name)
return {"ok": True}
return {"ok": True, "simulated": True}
def cmd_set_background_color(args: dict) -> dict:
"""Set the page background color."""
color = args.get("color", "#ffffff")
if scribus:
r, g, b = _parse_color(color)
color_name = "page_bg"
scribus.defineColorRGB(color_name, r, g, b)
# Scribus doesn't have direct page background — create a full-page rect
w, h = scribus.getPageSize()
bg = scribus.createRect(0, 0, w, h, "background_rect")
scribus.setFillColor(color_name, bg)
scribus.setLineWidth(0, bg)
scribus.sentToLayer("Background", bg) if False else None
# Send to back
try:
for _ in range(50):
scribus.moveSelectionToBack()
except Exception:
pass
return {"ok": True, "frame": bg}
return {"ok": True, "simulated": True}
# ── Helpers ──
def _parse_color(color_str: str) -> tuple:
"""Parse hex color string to (r, g, b) tuple."""
color_str = color_str.lstrip("#")
if len(color_str) == 6:
return (int(color_str[0:2], 16), int(color_str[2:4], 16), int(color_str[4:6], 16))
return (0, 0, 0)
COMMAND_MAP = {
"new_document": cmd_new_document,
"add_text_frame": cmd_add_text_frame,
"add_image_frame": cmd_add_image_frame,
"add_shape": cmd_add_shape,
"get_doc_state": cmd_get_doc_state,
"screenshot": cmd_screenshot,
"save_as_sla": cmd_save_as_sla,
"move_frame": cmd_move_frame,
"delete_frame": cmd_delete_frame,
"set_background_color": cmd_set_background_color,
}
def handle_command(data: dict) -> dict:
"""Dispatch a command to the appropriate handler."""
action = data.get("action", "")
args = data.get("args", {})
handler = COMMAND_MAP.get(action)
if not handler:
return {"error": f"Unknown action: {action}", "available": list(COMMAND_MAP.keys())}
try:
return handler(args)
except Exception as e:
return {"error": f"Command '{action}' failed: {str(e)}", "traceback": traceback.format_exc()}
def run_socket_server():
"""Listen on Unix socket for commands from the Flask bridge."""
if os.path.exists(SOCKET_PATH):
os.remove(SOCKET_PATH)
server = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
server.bind(SOCKET_PATH)
os.chmod(SOCKET_PATH, 0o666)
server.listen(5)
print(f"[runner] Listening on {SOCKET_PATH}")
while True:
try:
conn, _ = server.accept()
data = b""
while True:
chunk = conn.recv(4096)
if not chunk:
break
data += chunk
if b"\n" in data:
break
if data:
cmd = json.loads(data.decode("utf-8").strip())
result = handle_command(cmd)
response = json.dumps(result) + "\n"
conn.sendall(response.encode("utf-8"))
conn.close()
except Exception as e:
print(f"[runner] Socket error: {e}", file=sys.stderr)
# Always run — Scribus --python-script doesn't set __name__ to "__main__"
_ensure_dirs()
print("[runner] Scribus bridge runner starting...")
# Run socket server in a thread so Scribus event loop can continue
t = threading.Thread(target=run_socket_server, daemon=True)
t.start()
print("[runner] Socket server thread started")
# Keep the script alive
try:
while True:
import time
time.sleep(1)
except KeyboardInterrupt:
print("[runner] Shutting down")

View File

@ -1,137 +0,0 @@
"""
Scribus Bridge Server HTTP API for controlling Scribus from rSpace.
Architecture:
rspace container HTTP this Flask server (port 8765)
Unix socket
scribus --python-script scribus_runner.py
The runner script executes inside the Scribus process (required by Scribus
Python API). It listens on a Unix socket for JSON commands. This Flask
server translates HTTP requests into socket commands.
"""
import json
import os
import socket
import time
from pathlib import Path
from flask import Flask, request, jsonify
app = Flask(__name__)
BRIDGE_SECRET = os.environ.get("BRIDGE_SECRET", "")
SOCKET_PATH = "/tmp/scribus_bridge.sock"
DESIGNS_DIR = Path("/data/designs")
def _check_auth():
"""Verify X-Bridge-Secret header."""
if not BRIDGE_SECRET:
return None # No secret configured, allow all
token = request.headers.get("X-Bridge-Secret", "")
if token != BRIDGE_SECRET:
return jsonify({"error": "Unauthorized"}), 401
return None
def _send_command(cmd: dict, timeout: float = 30.0) -> dict:
"""Send a JSON command to the Scribus runner via Unix socket."""
if not os.path.exists(SOCKET_PATH):
return {"error": "Scribus runner not connected. Call /api/scribus/start first."}
try:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(timeout)
sock.connect(SOCKET_PATH)
payload = json.dumps(cmd) + "\n"
sock.sendall(payload.encode("utf-8"))
# Read response (newline-delimited JSON)
buf = b""
while True:
chunk = sock.recv(4096)
if not chunk:
break
buf += chunk
if b"\n" in buf:
break
sock.close()
return json.loads(buf.decode("utf-8").strip())
except socket.timeout:
return {"error": "Command timed out"}
except ConnectionRefusedError:
return {"error": "Scribus runner not responding"}
except Exception as e:
return {"error": f"Bridge error: {str(e)}"}
@app.before_request
def before_request():
auth = _check_auth()
if auth:
return auth
@app.route("/health", methods=["GET"])
def health():
runner_alive = os.path.exists(SOCKET_PATH)
return jsonify({
"ok": True,
"service": "scribus-bridge",
"runner_connected": runner_alive,
})
@app.route("/api/scribus/start", methods=["POST"])
def start_scribus():
"""Verify runner socket is available. Optionally launch Scribus GUI for real rendering."""
# The runner process is managed by supervisor and should already be listening.
# Wait briefly for socket if it's still starting up.
for _ in range(10):
if os.path.exists(SOCKET_PATH):
return jsonify({"ok": True, "message": "Runner connected", "runner_connected": True})
time.sleep(0.5)
return jsonify({"ok": False, "error": "Runner socket not available. Check supervisor logs."}), 500
@app.route("/api/scribus/command", methods=["POST"])
def scribus_command():
"""Execute a Scribus command via the runner."""
body = request.get_json(silent=True)
if not body or "action" not in body:
return jsonify({"error": "Missing 'action' in request body"}), 400
result = _send_command(body)
status = 200 if "error" not in result else 500
return jsonify(result), status
@app.route("/api/scribus/state", methods=["GET"])
def scribus_state():
"""Return the full document state as JSON."""
result = _send_command({"action": "get_doc_state"})
status = 200 if "error" not in result else 500
return jsonify(result), status
@app.route("/api/scribus/screenshot", methods=["GET"])
def scribus_screenshot():
"""Export the current page as PNG."""
dpi = request.args.get("dpi", "72", type=str)
result = _send_command({"action": "screenshot", "args": {"dpi": int(dpi)}})
if "error" in result:
return jsonify(result), 500
png_path = result.get("path")
if png_path and os.path.exists(png_path):
return app.send_static_file(png_path) if False else \
(open(png_path, "rb").read(), 200, {"Content-Type": "image/png"})
return jsonify({"error": "Screenshot not generated"}), 500
if __name__ == "__main__":
DESIGNS_DIR.mkdir(parents=True, exist_ok=True)
app.run(host="0.0.0.0", port=int(os.environ.get("BRIDGE_PORT", 8765)))

View File

@ -1,12 +0,0 @@
#!/bin/bash
set -e
# Ensure data directories exist
mkdir -p /data/designs
mkdir -p /var/log/supervisor
echo "[rDesign] Starting Scribus noVNC container..."
echo "[rDesign] Screen: ${SCREEN_WIDTH}x${SCREEN_HEIGHT}x${SCREEN_DEPTH}"
echo "[rDesign] noVNC port: ${NOVNC_PORT}, Bridge port: ${BRIDGE_PORT}"
exec /usr/bin/supervisord -c /etc/supervisor/conf.d/supervisord.conf

View File

@ -1,38 +0,0 @@
[supervisord]
nodaemon=true
logfile=/var/log/supervisor/supervisord.log
pidfile=/var/run/supervisord.pid
[program:xvfb]
command=Xvfb :1 -screen 0 %(ENV_SCREEN_WIDTH)sx%(ENV_SCREEN_HEIGHT)sx%(ENV_SCREEN_DEPTH)s
autorestart=true
priority=10
[program:x11vnc]
command=x11vnc -display :1 -nopw -listen 0.0.0.0 -xkb -ncache 10 -ncache_cr -forever -shared
autorestart=true
priority=20
startsecs=3
[program:websockify]
command=websockify --web /usr/share/novnc %(ENV_NOVNC_PORT)s localhost:%(ENV_VNC_PORT)s
autorestart=true
priority=30
startsecs=5
[program:runner]
command=python3 /opt/bridge/scribus_runner.py
autorestart=true
priority=35
environment=DISPLAY=":1"
stdout_logfile=/var/log/supervisor/runner.log
stderr_logfile=/var/log/supervisor/runner_err.log
startsecs=2
[program:bridge]
command=python3 /opt/bridge/server.py
autorestart=true
priority=40
environment=DISPLAY=":1"
stdout_logfile=/var/log/supervisor/bridge.log
stderr_logfile=/var/log/supervisor/bridge_err.log

View File

@ -33,6 +33,6 @@ export const MODULES: ModuleEntry[] = [
{ id: "rsplat", name: "rSplat", primarySelector: "folk-splat-viewer" }, { id: "rsplat", name: "rSplat", primarySelector: "folk-splat-viewer" },
{ id: "rphotos", name: "rPhotos", primarySelector: "folk-photo-gallery" }, { id: "rphotos", name: "rPhotos", primarySelector: "folk-photo-gallery" },
{ id: "rsocials", name: "rSocials", primarySelector: undefined }, // HTML hub page, no main element { id: "rsocials", name: "rSocials", primarySelector: undefined }, // HTML hub page, no main element
{ id: "rminders", name: "rMinders", primarySelector: "folk-minders-app" }, { id: "rschedule", name: "rSchedule", primarySelector: "folk-schedule-app" },
{ id: "rmeets", name: "rMeets", primarySelector: undefined }, // HTML hub page { id: "rmeets", name: "rMeets", primarySelector: undefined }, // HTML hub page
]; ];

View File

@ -1,280 +0,0 @@
/**
* Smoke tests for the unified rSocials campaign flow UX.
*
* Covers the integration points from the recent refactor:
* 1. /campaigns dashboard lists flows + routes to /campaign-flow?id=X
* 2. Brief canvas node replaces the old slide-out generate + preview banner
* 3. Markdown import modal adds post nodes wired to a platform
* 4. Wizard success page links into /campaign-flow?id=<flowId>
*
* Set BASE_URL=http://localhost:3000 for local runs. The /api/campaign/flow/from-brief
* and /api/campaign/wizard/:id/content endpoints require GEMINI_API_KEY tests that
* depend on AI are skipped automatically when the API returns 503.
*/
import { test, expect, type Page } from "@playwright/test";
import { ConsoleCollector } from "../helpers/console-collector";
const SPACE = "demo";
// ── 1. Dashboard integration ──
test.describe("rSocials /campaigns dashboard", () => {
test("loads, renders folk-campaigns-dashboard, no JS errors", async ({ page }) => {
const collector = new ConsoleCollector(page);
const res = await page.goto(`/${SPACE}/rsocials/campaigns`);
expect(res?.status()).toBe(200);
await expect(page.locator("folk-campaigns-dashboard")).toBeAttached();
collector.assertNoErrors();
});
test("shows Campaigns header and Wizard button", async ({ page }) => {
await page.goto(`/${SPACE}/rsocials/campaigns`);
const dashboard = page.locator("folk-campaigns-dashboard");
await expect(dashboard).toBeAttached();
// Header lives in shadow DOM
const header = dashboard.locator('h2', { hasText: "Campaigns" });
await expect(header).toBeVisible({ timeout: 10_000 });
const wizardBtn = dashboard.locator('#btn-wizard');
await expect(wizardBtn).toBeVisible();
});
test("+ New Campaign creates a flow and navigates to /campaign-flow?id=<id>", async ({ page }) => {
await page.goto(`/${SPACE}/rsocials/campaigns`);
const dashboard = page.locator("folk-campaigns-dashboard");
await expect(dashboard).toBeAttached();
// Either #btn-new (when flows exist) or empty-state button
const newBtn = dashboard.locator('#btn-new, .cd-btn--new-empty:not(#btn-wizard-empty)').first();
await expect(newBtn).toBeVisible({ timeout: 10_000 });
await Promise.all([
page.waitForURL(/\/campaign-flow\?id=/, { timeout: 15_000 }),
newBtn.click(),
]);
expect(page.url()).toMatch(/\/campaign-flow\?id=flow-/);
await expect(page.locator("folk-campaign-planner")).toBeAttached();
});
});
// ── 2. Brief node + preview ──
test.describe("rSocials /campaign-flow brief node", () => {
test("page loads with folk-campaign-planner attached", async ({ page }) => {
const collector = new ConsoleCollector(page);
const res = await page.goto(`/${SPACE}/rsocials/campaign-flow`);
expect(res?.status()).toBe(200);
await expect(page.locator("folk-campaign-planner")).toBeAttached();
collector.assertNoErrors();
});
test("flow-id attribute is forwarded from ?id= query", async ({ page }) => {
await page.goto(`/${SPACE}/rsocials/campaign-flow?id=smoke-test-id`);
const attr = await page.locator("folk-campaign-planner").getAttribute("flow-id");
expect(attr).toBe("smoke-test-id");
});
test('toolbar exposes "+ Brief" and "Import" buttons (not the old From Brief drawer)', async ({ page }) => {
await page.goto(`/${SPACE}/rsocials/campaign-flow`);
const planner = page.locator("folk-campaign-planner");
await expect(planner).toBeAttached();
const briefBtn = planner.locator('#add-brief');
await expect(briefBtn).toBeVisible({ timeout: 10_000 });
await expect(briefBtn).toContainText('Brief');
const importBtn = planner.locator('#open-import');
await expect(importBtn).toBeVisible();
// The old slide-out drawer is gone
await expect(planner.locator('#brief-panel')).toHaveCount(0);
await expect(planner.locator('#toggle-brief')).toHaveCount(0);
});
test('clicking "+ Brief" adds a brief node to the canvas', async ({ page }) => {
await page.goto(`/${SPACE}/rsocials/campaign-flow`);
const planner = page.locator("folk-campaign-planner");
await expect(planner).toBeAttached();
const before = await planner.locator('g.cp-node[data-node-type="brief"]').count();
await planner.locator('#add-brief').click();
// Brief node appears
const briefNodes = planner.locator('g.cp-node[data-node-type="brief"]');
await expect(briefNodes).toHaveCount(before + 1, { timeout: 5_000 });
});
test("brief Generate: preview banner + Keep/Regen/Discard (skipped if no GEMINI_API_KEY)", async ({ page }) => {
page.on('dialog', (dialog) => { dialog.dismiss().catch(() => {}); });
page.on('pageerror', (err) => console.log('[pageerror]', err.message, '\n', err.stack));
page.on('console', (msg) => { if (msg.type() === 'error') console.log('[console.error]', msg.text()); });
let briefResponse: { status: number } | null = null;
let briefRequested = false;
page.on('request', (r) => {
if (r.url().includes('/api/campaign/flow/from-brief')) briefRequested = true;
});
page.on('response', (r) => {
if (r.url().includes('/api/campaign/flow/from-brief')) {
briefResponse = { status: r.status() };
}
});
await page.goto(`/${SPACE}/rsocials/campaign-flow`);
const planner = page.locator("folk-campaign-planner");
await planner.locator('#add-brief').click();
// Fill the brief textarea inside the auto-opened inline config
const textarea = planner.locator('.cp-inline-config textarea[data-field="text"]').first();
await expect(textarea).toBeVisible({ timeout: 5_000 });
await textarea.fill(
"Launch a week-long hackathon for regen finance builders. 3 phases: tease, announce, countdown. " +
"Platforms: X and LinkedIn. Target web3 devs."
);
const genBtn = planner.locator('.cp-inline-config [data-action="generate-brief"]').first();
await expect(genBtn).toBeEnabled({ timeout: 5_000 });
// Click via evaluateHandle — Playwright's click() can mis-target buttons inside
// <foreignObject> within SVG + shadow DOM. Dispatching directly guarantees the
// listener runs if it is bound.
await genBtn.evaluate((el) => {
(el as HTMLButtonElement).click();
});
// Poll for a response up to 25s
await page.waitForFunction(
() => (window as any).__briefResp !== undefined,
null,
{ timeout: 100 }
).catch(() => {}); // noop
// Wait up to 25s for a response via our listener
const deadline = Date.now() + 25_000;
while (!briefResponse && Date.now() < deadline) {
await page.waitForTimeout(250);
}
if (!briefResponse) {
console.log('[debug] briefRequested=', briefRequested);
test.skip(true, `from-brief ${briefRequested ? 'request fired but no response' : 'fetch NEVER fired'}`);
return;
}
const resp: { status: number } = briefResponse;
if (resp.status !== 200 && resp.status !== 201) {
test.skip(true, `from-brief returned ${resp.status} — likely GEMINI_API_KEY missing`);
return;
}
// Preview banner appears with Keep / Regenerate / Discard
await expect(planner.locator('.cp-preview-banner')).toBeVisible({ timeout: 10_000 });
await expect(planner.locator('#preview-keep')).toBeVisible();
await expect(planner.locator('#preview-regen')).toBeVisible();
await expect(planner.locator('#preview-discard')).toBeVisible();
// Preview nodes are visually marked
await expect(planner.locator('g.cp-node--preview').first()).toBeAttached();
// Discard cleans them up and removes the banner
await planner.locator('#preview-discard').click();
await expect(planner.locator('.cp-preview-banner')).toHaveCount(0);
await expect(planner.locator('g.cp-node--preview')).toHaveCount(0);
});
});
// ── 3. Markdown import ──
test.describe("rSocials /campaign-flow markdown import", () => {
test("Import modal parses --- separated posts and adds post nodes", async ({ page }) => {
await page.goto(`/${SPACE}/rsocials/campaign-flow`);
const planner = page.locator("folk-campaign-planner");
await expect(planner).toBeAttached();
const beforePosts = await planner.locator('g.cp-node[data-node-type="post"]').count();
// Open modal
await planner.locator('#open-import').click();
const modal = planner.locator('#import-modal');
await expect(modal).toBeVisible();
// Fill 3 tweets
const textarea = planner.locator('#import-text');
await textarea.fill("First imported tweet\n---\nSecond tweet with more content\n---\nThird tweet, final one");
// Choose platform
await planner.locator('#import-platform').selectOption('linkedin');
// Submit
await planner.locator('#import-submit').click();
// Modal closes
await expect(modal).toBeHidden();
// 3 new post nodes appear
await expect(planner.locator('g.cp-node[data-node-type="post"]')).toHaveCount(beforePosts + 3, { timeout: 5_000 });
// A linkedin platform node exists (created or already present)
const linkedinPlatform = planner.locator('g.cp-node[data-node-type="platform"]').filter({ hasText: /linkedin/i });
await expect(linkedinPlatform.first()).toBeAttached();
});
});
// ── 4. Wizard → planner handoff ──
// This test only validates the wizard URL loads and dashboard→wizard link works.
// Exercising the full wizard requires Gemini + commit, which is out of scope for smoke.
test.describe("rSocials /campaign-wizard", () => {
test("loads and renders folk-campaign-wizard", async ({ page }) => {
const res = await page.goto(`/${SPACE}/rsocials/campaign-wizard`);
expect(res?.status()).toBe(200);
await expect(page.locator("folk-campaign-wizard")).toBeAttached();
});
test("dashboard Wizard button navigates to /campaign-wizard", async ({ page }) => {
await page.goto(`/${SPACE}/rsocials/campaigns`);
const dashboard = page.locator("folk-campaigns-dashboard");
await expect(dashboard).toBeAttached();
const wizardBtn = dashboard.locator('#btn-wizard');
await expect(wizardBtn).toBeVisible({ timeout: 10_000 });
await Promise.all([
page.waitForURL(/\/campaign-wizard(\/|$)/, { timeout: 15_000 }),
wizardBtn.click(),
]);
await expect(page.locator("folk-campaign-wizard")).toBeAttached();
});
});
// ── API sanity ──
test.describe("rSocials campaign flow API", () => {
test("GET /api/campaign/flows returns array shape", async ({ request }) => {
const res = await request.get(`/${SPACE}/rsocials/api/campaign/flows`);
expect(res.status()).toBe(200);
const body = await res.json();
expect(body).toHaveProperty('results');
expect(Array.isArray(body.results)).toBe(true);
expect(body).toHaveProperty('count');
});
test("POST creates a flow, DELETE removes it", async ({ request }) => {
const created = await request.post(`/${SPACE}/rsocials/api/campaign/flows`, {
data: { name: "Playwright Smoke Flow" },
});
expect(created.status()).toBe(201);
const flow = await created.json();
expect(flow.id).toMatch(/^flow-/);
expect(flow.name).toBe("Playwright Smoke Flow");
const deleted = await request.delete(`/${SPACE}/rsocials/api/campaign/flows/${flow.id}`);
expect(deleted.status()).toBe(200);
const missing = await request.delete(`/${SPACE}/rsocials/api/campaign/flows/${flow.id}`);
expect(missing.status()).toBe(404);
});
});

View File

@ -1,368 +0,0 @@
#!/usr/bin/env bash
#
# Space Creation & Member Management API Test
#
# Tests the full lifecycle:
# 1. Create a test space
# 2. Verify space exists
# 3. Add member by EncryptID username (each role)
# 4. List members & verify roles
# 5. Change member role
# 6. Invite by email
# 7. Remove member
# 8. Delete the test space
#
# Usage:
# ./e2e/tests/space-members-api.sh <AUTH_TOKEN>
#
# Get your token from browser: localStorage.getItem("encryptid_session") → .token
#
# Optionally set:
# BASE_URL (default: https://rspace.online)
# TEST_USER (default: jeff) — an existing EncryptID username to add as member
set -euo pipefail
# ── Config ──
TOKEN="${1:-}"
BASE="${BASE_URL:-https://rspace.online}"
TEST_USER="${TEST_USER:-jeff}"
TEST_SLUG="api-test-$(date +%s)"
PASS=0
FAIL=0
WARN=0
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
CYAN='\033[0;36m'
BOLD='\033[1m'
NC='\033[0m'
if [[ -z "$TOKEN" ]]; then
echo -e "${RED}Usage: $0 <AUTH_TOKEN>${NC}"
echo ""
echo "Get your token from the browser console:"
echo " JSON.parse(localStorage.getItem('encryptid_session')).token"
exit 1
fi
AUTH="Authorization: Bearer $TOKEN"
CT="Content-Type: application/json"
# ── Helpers ──
pass() {
PASS=$((PASS + 1))
echo -e " ${GREEN}PASS${NC} $1"
}
fail() {
FAIL=$((FAIL + 1))
echo -e " ${RED}FAIL${NC} $1"
if [[ -n "${2:-}" ]]; then
echo -e " ${RED}$2${NC}"
fi
}
warn() {
WARN=$((WARN + 1))
echo -e " ${YELLOW}WARN${NC} $1"
}
assert_status() {
local label="$1" expected="$2" actual="$3" body="${4:-}"
if [[ "$actual" == "$expected" ]]; then
pass "$label (HTTP $actual)"
else
fail "$label — expected HTTP $expected, got $actual" "$body"
fi
}
assert_json_field() {
local label="$1" json="$2" field="$3" expected="$4"
local actual
actual=$(echo "$json" | jq -r "$field" 2>/dev/null || echo "PARSE_ERROR")
if [[ "$actual" == "$expected" ]]; then
pass "$label ($field = $actual)"
else
fail "$label$field: expected '$expected', got '$actual'"
fi
}
api() {
local method="$1" path="$2"
shift 2
curl -s -w "\n%{http_code}" -X "$method" "$BASE$path" -H "$AUTH" "$@"
}
api_with_body() {
local method="$1" path="$2" body="$3"
curl -s -w "\n%{http_code}" -X "$method" "$BASE$path" -H "$AUTH" -H "$CT" -d "$body"
}
extract_body() { echo "$1" | sed '$d'; }
extract_status() { echo "$1" | tail -1; }
# ── Preamble ──
echo -e "${BOLD}${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${BOLD} rSpace — Space Creation & Member Management API Test${NC}"
echo -e "${BOLD}${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo ""
echo -e " Base URL: ${CYAN}$BASE${NC}"
echo -e " Test slug: ${CYAN}$TEST_SLUG${NC}"
echo -e " Test user: ${CYAN}$TEST_USER${NC}"
echo ""
# ── 0. Verify auth token works ──
echo -e "${BOLD}[0] Verify authentication${NC}"
RES=$(api GET "/api/spaces")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "GET /api/spaces — token valid" "200" "$STATUS" "$BODY"
echo ""
# ── 1. Create a test space ──
echo -e "${BOLD}[1] Create test space${NC}"
RES=$(api_with_body POST "/api/spaces" "{\"name\":\"API Test Space\",\"slug\":\"$TEST_SLUG\",\"visibility\":\"private\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "POST /api/spaces — create space" "201" "$STATUS" "$BODY"
assert_json_field "Space slug" "$BODY" ".slug" "$TEST_SLUG"
assert_json_field "Space visibility" "$BODY" ".visibility" "private"
echo ""
# ── 2. Verify space exists ──
echo -e "${BOLD}[2] Verify space exists${NC}"
RES=$(api GET "/api/spaces/$TEST_SLUG")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "GET /api/spaces/$TEST_SLUG" "200" "$STATUS" "$BODY"
assert_json_field "Space name" "$BODY" ".name" "API Test Space"
echo ""
# ── 3. Cannot create duplicate ──
echo -e "${BOLD}[3] Duplicate slug rejected${NC}"
RES=$(api_with_body POST "/api/spaces" "{\"name\":\"Dupe\",\"slug\":\"$TEST_SLUG\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "POST /api/spaces — duplicate slug" "409" "$STATUS" "$BODY"
echo ""
# ── 4. Add member by username (as 'viewer') ──
echo -e "${BOLD}[4] Add member by username (viewer)${NC}"
RES=$(api_with_body POST "/api/spaces/$TEST_SLUG/members/add" "{\"username\":\"$TEST_USER\",\"role\":\"viewer\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "POST /members/add — viewer" "200" "$STATUS" "$BODY"
assert_json_field "Role assigned" "$BODY" ".role" "viewer"
# Capture the DID for later use
MEMBER_DID=$(echo "$BODY" | jq -r '.did // empty' 2>/dev/null)
if [[ -n "$MEMBER_DID" ]]; then
pass "Got member DID: ${MEMBER_DID:0:24}..."
else
warn "Could not extract member DID from response"
fi
echo ""
# ── 5. List members — verify viewer is present ──
echo -e "${BOLD}[5] List members${NC}"
RES=$(api GET "/api/spaces/$TEST_SLUG/members")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "GET /members" "200" "$STATUS" "$BODY"
MEMBER_COUNT=$(echo "$BODY" | jq '.members | length' 2>/dev/null || echo "0")
if [[ "$MEMBER_COUNT" -ge 1 ]]; then
pass "Members list has $MEMBER_COUNT entries"
else
fail "Expected at least 1 member, got $MEMBER_COUNT"
fi
echo ""
# ── 6. Change role: viewer → member ──
echo -e "${BOLD}[6] Change role: viewer → member${NC}"
if [[ -n "$MEMBER_DID" ]]; then
ENCODED_DID=$(python3 -c "import urllib.parse; print(urllib.parse.quote('$MEMBER_DID', safe=''))")
RES=$(api_with_body PATCH "/api/spaces/$TEST_SLUG/members/$ENCODED_DID" "{\"role\":\"member\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "PATCH /members/:did — viewer→member" "200" "$STATUS" "$BODY"
assert_json_field "New role" "$BODY" ".role" "member"
else
warn "Skipped — no DID captured"
fi
echo ""
# ── 7. Change role: member → admin ──
echo -e "${BOLD}[7] Change role: member → admin${NC}"
if [[ -n "$MEMBER_DID" ]]; then
RES=$(api_with_body PATCH "/api/spaces/$TEST_SLUG/members/$ENCODED_DID" "{\"role\":\"admin\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "PATCH /members/:did — member→admin" "200" "$STATUS" "$BODY"
assert_json_field "New role" "$BODY" ".role" "admin"
else
warn "Skipped — no DID captured"
fi
echo ""
# ── 8. Change role: admin → viewer (demote) ──
echo -e "${BOLD}[8] Demote role: admin → viewer${NC}"
if [[ -n "$MEMBER_DID" ]]; then
RES=$(api_with_body PATCH "/api/spaces/$TEST_SLUG/members/$ENCODED_DID" "{\"role\":\"viewer\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "PATCH /members/:did — admin→viewer" "200" "$STATUS" "$BODY"
assert_json_field "New role" "$BODY" ".role" "viewer"
else
warn "Skipped — no DID captured"
fi
echo ""
# ── 9. Invalid role rejected ──
echo -e "${BOLD}[9] Invalid role rejected${NC}"
if [[ -n "$MEMBER_DID" ]]; then
RES=$(api_with_body PATCH "/api/spaces/$TEST_SLUG/members/$ENCODED_DID" "{\"role\":\"superadmin\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "PATCH /members/:did — invalid role" "400" "$STATUS" "$BODY"
else
warn "Skipped — no DID captured"
fi
echo ""
# ── 10. Invite by email ──
echo -e "${BOLD}[10] Invite by email${NC}"
RES=$(api_with_body POST "/api/spaces/$TEST_SLUG/invite" "{\"email\":\"test@example.com\",\"role\":\"member\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
# May be 200 (ok) or 500 (SMTP not configured) — both indicate the route works
if [[ "$STATUS" == "200" ]]; then
pass "POST /invite — email invite created (HTTP $STATUS)"
INVITE_URL=$(echo "$BODY" | jq -r '.inviteUrl // empty' 2>/dev/null)
if [[ -n "$INVITE_URL" ]]; then
pass "Invite URL generated: ${INVITE_URL:0:50}..."
fi
elif [[ "$STATUS" == "500" ]]; then
warn "POST /invite — SMTP not configured (HTTP 500, expected in dev)"
else
fail "POST /invite — unexpected HTTP $STATUS" "$BODY"
fi
echo ""
# ── 11. Invite with invalid role rejected ──
echo -e "${BOLD}[11] Invite with invalid role rejected${NC}"
RES=$(api_with_body POST "/api/spaces/$TEST_SLUG/invite" "{\"email\":\"test@example.com\",\"role\":\"overlord\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "POST /invite — invalid role" "400" "$STATUS" "$BODY"
echo ""
# ── 12. Add member by nonexistent username ──
echo -e "${BOLD}[12] Add nonexistent username${NC}"
RES=$(api_with_body POST "/api/spaces/$TEST_SLUG/members/add" "{\"username\":\"nonexistent-user-xyz-99999\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "POST /members/add — nonexistent user" "404" "$STATUS" "$BODY"
echo ""
# ── 13. Remove member ──
echo -e "${BOLD}[13] Remove member${NC}"
if [[ -n "$MEMBER_DID" ]]; then
RES=$(api DELETE "/api/spaces/$TEST_SLUG/members/$ENCODED_DID")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "DELETE /members/:did" "200" "$STATUS" "$BODY"
else
warn "Skipped — no DID captured"
fi
echo ""
# ── 14. Verify member removed ──
echo -e "${BOLD}[14] Verify member removed${NC}"
RES=$(api GET "/api/spaces/$TEST_SLUG/members")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "GET /members after removal" "200" "$STATUS" "$BODY"
# Should have 0 non-owner members (owner isn't in members by default)
MEMBER_COUNT=$(echo "$BODY" | jq '.members | length' 2>/dev/null || echo "?")
pass "Members after removal: $MEMBER_COUNT"
echo ""
# ── 15. Re-add as admin for multi-role verification ──
echo -e "${BOLD}[15] Add member as admin${NC}"
RES=$(api_with_body POST "/api/spaces/$TEST_SLUG/members/add" "{\"username\":\"$TEST_USER\",\"role\":\"admin\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "POST /members/add — admin role" "200" "$STATUS" "$BODY"
assert_json_field "Role assigned" "$BODY" ".role" "admin"
echo ""
# ── 16. Re-add as moderator (overwrite) ──
echo -e "${BOLD}[16] Overwrite role via add (admin → moderator)${NC}"
RES=$(api_with_body POST "/api/spaces/$TEST_SLUG/members/add" "{\"username\":\"$TEST_USER\",\"role\":\"moderator\"}")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "POST /members/add — moderator overwrite" "200" "$STATUS" "$BODY"
assert_json_field "Role assigned" "$BODY" ".role" "moderator"
echo ""
# ── 17. Unauthenticated access denied ──
echo -e "${BOLD}[17] Unauthenticated access denied${NC}"
RES=$(curl -s -w "\n%{http_code}" -X POST "$BASE/api/spaces" -H "$CT" -d '{"name":"Nope","slug":"nope"}')
STATUS=$(extract_status "$RES")
assert_status "POST /api/spaces — no auth" "401" "$STATUS"
echo ""
# ── 18. Cleanup: remove member, then delete space ──
echo -e "${BOLD}[18] Cleanup — remove member & delete space${NC}"
if [[ -n "$MEMBER_DID" ]]; then
api DELETE "/api/spaces/$TEST_SLUG/members/$ENCODED_DID" > /dev/null 2>&1 || true
fi
RES=$(api DELETE "/api/spaces/$TEST_SLUG")
STATUS=$(extract_status "$RES")
BODY=$(extract_body "$RES")
assert_status "DELETE /api/spaces/$TEST_SLUG" "200" "$STATUS" "$BODY"
echo ""
# ── 19. Verify space gone ──
echo -e "${BOLD}[19] Verify space deleted${NC}"
RES=$(api GET "/api/spaces/$TEST_SLUG")
STATUS=$(extract_status "$RES")
assert_status "GET deleted space" "404" "$STATUS"
echo ""
# ── Summary ──
TOTAL=$((PASS + FAIL))
echo -e "${BOLD}${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${BOLD} Results: ${GREEN}$PASS passed${NC} / ${RED}$FAIL failed${NC} / ${YELLOW}$WARN warnings${NC} (${TOTAL} total)"
echo -e "${BOLD}${CYAN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
if [[ $FAIL -gt 0 ]]; then
exit 1
fi

View File

@ -1,277 +0,0 @@
/**
* applet-circuit-canvas Reusable SVG node graph renderer.
*
* Lightweight pan/zoom SVG canvas for rendering sub-node graphs
* inside expanded folk-applet shapes. Extracted from folk-gov-circuit patterns.
*
* NOT a FolkShape just an HTMLElement used inside folk-applet's shadow DOM.
*/
import type { AppletSubNode, AppletSubEdge } from "../shared/applet-types";
const NODE_WIDTH = 200;
const NODE_HEIGHT = 80;
const PORT_RADIUS = 5;
function esc(s: string): string {
return s.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;").replace(/"/g, "&quot;");
}
function bezierPath(x1: number, y1: number, x2: number, y2: number): string {
const dx = Math.abs(x2 - x1) * 0.5;
return `M ${x1} ${y1} C ${x1 + dx} ${y1}, ${x2 - dx} ${y2}, ${x2} ${y2}`;
}
const STYLES = `
:host {
display: block;
width: 100%;
height: 100%;
background: #0f172a;
border-radius: 0 0 8px 8px;
overflow: hidden;
}
svg {
width: 100%;
height: 100%;
}
.acc-node-body {
width: 100%;
height: 100%;
box-sizing: border-box;
background: #1e293b;
border: 1.5px solid #334155;
border-radius: 6px;
padding: 8px;
display: flex;
flex-direction: column;
justify-content: center;
gap: 4px;
font-family: inherit;
}
.acc-node-label {
font-size: 11px;
font-weight: 600;
color: #e2e8f0;
display: flex;
align-items: center;
gap: 4px;
}
.acc-node-meta {
font-size: 10px;
color: #94a3b8;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.acc-edge-path {
fill: none;
stroke-width: 1.5;
stroke-opacity: 0.5;
pointer-events: none;
}
.acc-edge-hit {
fill: none;
stroke: transparent;
stroke-width: 10;
cursor: pointer;
}
.acc-edge-hit:hover + .acc-edge-path {
stroke-opacity: 1;
stroke-width: 2.5;
}
.acc-port-dot {
transition: r 0.1s;
}
.acc-port-hit {
cursor: crosshair;
}
.acc-port-hit:hover ~ .acc-port-dot {
r: 8;
}
.acc-grid-line {
stroke: #1e293b;
stroke-width: 0.5;
}
`;
export class AppletCircuitCanvas extends HTMLElement {
#shadow: ShadowRoot;
#nodes: AppletSubNode[] = [];
#edges: AppletSubEdge[] = [];
#panX = 0;
#panY = 0;
#zoom = 1;
#isPanning = false;
#panStart = { x: 0, y: 0 };
constructor() {
super();
this.#shadow = this.attachShadow({ mode: "open" });
}
get nodes() { return this.#nodes; }
set nodes(v: AppletSubNode[]) {
this.#nodes = v;
this.#render();
}
get edges() { return this.#edges; }
set edges(v: AppletSubEdge[]) {
this.#edges = v;
this.#render();
}
connectedCallback() {
this.#render();
this.#setupInteraction();
}
#render(): void {
const gridDef = `
<defs>
<pattern id="acc-grid" width="30" height="30" patternUnits="userSpaceOnUse">
<line x1="30" y1="0" x2="30" y2="30" class="acc-grid-line"/>
<line x1="0" y1="30" x2="30" y2="30" class="acc-grid-line"/>
</pattern>
</defs>
<rect width="8000" height="8000" x="-4000" y="-4000" fill="url(#acc-grid)"/>
`;
const edgesHtml = this.#edges.map(edge => {
const fromNode = this.#nodes.find(n => n.id === edge.fromNode);
const toNode = this.#nodes.find(n => n.id === edge.toNode);
if (!fromNode || !toNode) return "";
const x1 = fromNode.position.x + NODE_WIDTH;
const y1 = fromNode.position.y + NODE_HEIGHT / 2;
const x2 = toNode.position.x;
const y2 = toNode.position.y + NODE_HEIGHT / 2;
const d = bezierPath(x1, y1, x2, y2);
return `
<g data-edge-id="${esc(edge.id)}">
<path class="acc-edge-hit" d="${d}"/>
<path class="acc-edge-path" d="${d}" stroke="#6366f1" stroke-opacity="0.5"/>
</g>
`;
}).join("");
const nodesHtml = this.#nodes.map(node => {
const configSummary = Object.entries(node.config)
.slice(0, 2)
.map(([k, v]) => `${k}: ${v}`)
.join(", ");
return `
<g data-node-id="${esc(node.id)}">
<foreignObject x="${node.position.x}" y="${node.position.y}" width="${NODE_WIDTH}" height="${NODE_HEIGHT}">
<div xmlns="http://www.w3.org/1999/xhtml" class="acc-node-body">
<div class="acc-node-label">${esc(node.icon)} ${esc(node.label)}</div>
${configSummary ? `<div class="acc-node-meta">${esc(configSummary)}</div>` : ""}
</div>
</foreignObject>
</g>
`;
}).join("");
this.#shadow.innerHTML = `
<style>${STYLES}</style>
<svg xmlns="http://www.w3.org/2000/svg">
<g id="canvas-transform" transform="translate(${this.#panX},${this.#panY}) scale(${this.#zoom})">
${gridDef}
<g id="edge-layer">${edgesHtml}</g>
<g id="node-layer">${nodesHtml}</g>
</g>
</svg>
`;
this.#fitView();
}
#fitView(): void {
if (this.#nodes.length === 0) return;
const svg = this.#shadow.querySelector("svg");
if (!svg) return;
const rect = svg.getBoundingClientRect();
if (rect.width === 0 || rect.height === 0) return;
let minX = Infinity, minY = Infinity, maxX = -Infinity, maxY = -Infinity;
for (const n of this.#nodes) {
minX = Math.min(minX, n.position.x);
minY = Math.min(minY, n.position.y);
maxX = Math.max(maxX, n.position.x + NODE_WIDTH);
maxY = Math.max(maxY, n.position.y + NODE_HEIGHT);
}
const pad = 30;
const contentW = maxX - minX + pad * 2;
const contentH = maxY - minY + pad * 2;
const scaleX = rect.width / contentW;
const scaleY = rect.height / contentH;
this.#zoom = Math.min(scaleX, scaleY, 1.5);
this.#panX = (rect.width - contentW * this.#zoom) / 2 - (minX - pad) * this.#zoom;
this.#panY = (rect.height - contentH * this.#zoom) / 2 - (minY - pad) * this.#zoom;
this.#updateTransform();
}
#updateTransform(): void {
const g = this.#shadow.getElementById("canvas-transform");
if (g) g.setAttribute("transform", `translate(${this.#panX},${this.#panY}) scale(${this.#zoom})`);
}
#setupInteraction(): void {
const svg = this.#shadow.querySelector("svg");
if (!svg) return;
// Pan
svg.addEventListener("pointerdown", (e) => {
if (e.button !== 0 && e.button !== 1) return;
this.#isPanning = true;
this.#panStart = { x: e.clientX - this.#panX, y: e.clientY - this.#panY };
svg.setPointerCapture(e.pointerId);
e.preventDefault();
});
svg.addEventListener("pointermove", (e) => {
if (!this.#isPanning) return;
this.#panX = e.clientX - this.#panStart.x;
this.#panY = e.clientY - this.#panStart.y;
this.#updateTransform();
});
svg.addEventListener("pointerup", () => {
this.#isPanning = false;
});
// Zoom
svg.addEventListener("wheel", (e) => {
e.preventDefault();
const factor = e.deltaY > 0 ? 0.9 : 1.1;
const oldZoom = this.#zoom;
const newZoom = Math.max(0.2, Math.min(3, oldZoom * factor));
const rect = svg.getBoundingClientRect();
const mx = e.clientX - rect.left;
const my = e.clientY - rect.top;
this.#panX = mx - (mx - this.#panX) * (newZoom / oldZoom);
this.#panY = my - (my - this.#panY) * (newZoom / oldZoom);
this.#zoom = newZoom;
this.#updateTransform();
}, { passive: false });
}
}
customElements.define("applet-circuit-canvas", AppletCircuitCanvas);

View File

@ -1,24 +0,0 @@
/**
* Barrel file re-exporting all module applet definitions.
* Imported in canvas.html to register applets client-side
* (applet defs contain functions, can't be JSON-serialized).
*/
export { govApplets } from "../modules/rgov/applets";
export { flowsApplets } from "../modules/rflows/applets";
export { walletApplets } from "../modules/rwallet/applets";
export { tasksApplets } from "../modules/rtasks/applets";
export { timeApplets } from "../modules/rtime/applets";
export { calApplets } from "../modules/rcal/applets";
export { chatsApplets } from "../modules/rchats/applets";
export { dataApplets } from "../modules/rdata/applets";
export { docsApplets } from "../modules/rdocs/applets";
export { notesApplets } from "../modules/rnotes/applets";
export { photosApplets } from "../modules/rphotos/applets";
export { mapsApplets } from "../modules/rmaps/applets";
export { networkApplets } from "../modules/rnetwork/applets";
export { choicesApplets } from "../modules/rchoices/applets";
export { inboxApplets } from "../modules/rinbox/applets";
export { socialsApplets } from "../modules/rsocials/applets";
export { booksApplets } from "../modules/rbooks/applets";
export { exchangeApplets } from "../modules/rexchange/applets";

View File

@ -1,209 +0,0 @@
/**
* AppletTemplateManager save/instantiate/list/delete applet templates.
*
* Templates capture a selection of shapes + their inter-connecting arrows,
* storing relative positions in CommunityDoc.templates. Instantiation
* generates new IDs, remaps arrow refs, and places at cursor position.
*/
import type { CommunitySync, CommunityDoc, ShapeData } from "./community-sync";
import type { AppletTemplateRecord, AppletTemplateShape, AppletTemplateArrow } from "../shared/applet-types";
import * as Automerge from "@automerge/automerge";
export class AppletTemplateManager {
#sync: CommunitySync;
constructor(sync: CommunitySync) {
this.#sync = sync;
}
/** Get templates map from doc. */
#getTemplates(): Record<string, AppletTemplateRecord> {
return (this.#sync.doc as any).templates || {};
}
/** Batch-mutate the Automerge doc. */
#change(msg: string, fn: (doc: any) => void): void {
const oldDoc = this.#sync.doc;
const newDoc = Automerge.change(oldDoc, msg, (d: any) => {
if (!d.templates) d.templates = {};
fn(d);
});
(this.#sync as any)._applyDocChange(newDoc);
}
// ── Save ──
/**
* Save selected shapes + their internal arrows as a template.
* Only captures arrows where both source AND target are in the selection.
*/
saveTemplate(
selectedIds: string[],
meta: { name: string; description?: string; icon?: string; color?: string; createdBy?: string },
): AppletTemplateRecord | null {
const shapes = this.#sync.doc.shapes || {};
const selectedSet = new Set(selectedIds);
// Filter to existing shapes
const validIds = selectedIds.filter(id => shapes[id]);
if (validIds.length === 0) return null;
// Compute bounding box
let minX = Infinity, minY = Infinity, maxX = -Infinity, maxY = -Infinity;
for (const id of validIds) {
const s = shapes[id];
minX = Math.min(minX, s.x);
minY = Math.min(minY, s.y);
maxX = Math.max(maxX, s.x + s.width);
maxY = Math.max(maxY, s.y + s.height);
}
// Build relative-ID map: shapeId → relativeId
const idMap = new Map<string, string>();
let relIdx = 0;
// Separate non-arrow shapes and arrows
const templateShapes: AppletTemplateShape[] = [];
const templateArrows: AppletTemplateArrow[] = [];
for (const id of validIds) {
const s = shapes[id];
if (s.type === "folk-arrow") continue; // handle arrows separately
const relId = `rel-${relIdx++}`;
idMap.set(id, relId);
const { id: _id, x, y, width, height, rotation, type, ...rest } = s;
templateShapes.push({
relativeId: relId,
type,
relX: x - minX,
relY: y - minY,
width,
height,
rotation: rotation || 0,
props: rest as Record<string, unknown>,
});
}
// Find arrows connecting shapes within the selection
for (const [id, s] of Object.entries(shapes)) {
if (s.type !== "folk-arrow") continue;
if (!s.sourceId || !s.targetId) continue;
if (!selectedSet.has(s.sourceId) || !selectedSet.has(s.targetId)) continue;
const sourceRelId = idMap.get(s.sourceId);
const targetRelId = idMap.get(s.targetId);
if (!sourceRelId || !targetRelId) continue;
const relId = `rel-${relIdx++}`;
templateArrows.push({
relativeId: relId,
sourceRelId,
targetRelId,
sourcePort: s.sourcePort,
targetPort: s.targetPort,
});
}
const template: AppletTemplateRecord = {
id: `tpl-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
name: meta.name,
description: meta.description || "",
icon: meta.icon || "📋",
color: meta.color || "#6366f1",
createdAt: Date.now(),
createdBy: meta.createdBy || "unknown",
shapes: templateShapes,
arrows: templateArrows,
boundingWidth: maxX - minX,
boundingHeight: maxY - minY,
};
this.#change(`Save template "${meta.name}"`, (d) => {
d.templates[template.id] = template;
});
return template;
}
// ── Instantiate ──
/**
* Create new shapes + arrows from a template at the given position.
* Returns array of new shape IDs (for optional group creation).
*/
instantiateTemplate(templateId: string, x: number, y: number): string[] {
const template = this.#getTemplates()[templateId];
if (!template) return [];
// Map relativeId → new real ID
const relToNew = new Map<string, string>();
const newShapeIds: string[] = [];
// Create shapes
for (const tplShape of template.shapes) {
const newId = `shape-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
relToNew.set(tplShape.relativeId, newId);
const shapeData: ShapeData = {
type: tplShape.type,
id: newId,
x: x + tplShape.relX,
y: y + tplShape.relY,
width: tplShape.width,
height: tplShape.height,
rotation: tplShape.rotation,
...tplShape.props,
};
this.#sync.addShapeData(shapeData);
newShapeIds.push(newId);
}
// Create arrows with remapped source/target
for (const tplArrow of template.arrows) {
const sourceId = relToNew.get(tplArrow.sourceRelId);
const targetId = relToNew.get(tplArrow.targetRelId);
if (!sourceId || !targetId) continue;
const arrowId = `arrow-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
const arrowData: ShapeData = {
type: "folk-arrow",
id: arrowId,
x: 0,
y: 0,
width: 0,
height: 0,
rotation: 0,
sourceId,
targetId,
sourcePort: tplArrow.sourcePort,
targetPort: tplArrow.targetPort,
};
this.#sync.addShapeData(arrowData);
newShapeIds.push(arrowId);
}
return newShapeIds;
}
// ── List / Get / Delete ──
listTemplates(): AppletTemplateRecord[] {
return Object.values(this.#getTemplates())
.sort((a, b) => b.createdAt - a.createdAt);
}
getTemplate(id: string): AppletTemplateRecord | undefined {
return this.#getTemplates()[id];
}
deleteTemplate(id: string): void {
this.#change(`Delete template "${id}"`, (d) => {
delete d.templates[id];
});
}
}

View File

@ -1,850 +0,0 @@
/**
* Canvas Tool Registry shared by server (Gemini function declarations) and client (shape spawning).
* Pure TypeScript, no DOM or server dependencies.
*/
export interface CanvasToolDefinition {
declaration: {
name: string;
description: string;
parameters: {
type: "object";
properties: Record<string, { type: string; description: string; enum?: string[] }>;
required: string[];
};
};
tagName: string;
/** Module that owns this tool (omit for core/always-available tools) */
moduleId?: string;
buildProps: (args: Record<string, any>) => Record<string, any>;
actionLabel: (args: Record<string, any>) => string;
}
const registry: CanvasToolDefinition[] = [
{
declaration: {
name: "create_map",
description: "Create an interactive map centered on a location. Use when the user wants to see a place, get directions, or explore a geographic area.",
parameters: {
type: "object",
properties: {
latitude: { type: "number", description: "Latitude of the center point" },
longitude: { type: "number", description: "Longitude of the center point" },
zoom: { type: "number", description: "Zoom level (1-18, default 12)" },
location_name: { type: "string", description: "Human-readable name of the location" },
},
required: ["latitude", "longitude", "location_name"],
},
},
tagName: "folk-map",
moduleId: "rmaps",
buildProps: (args) => ({
center: [args.longitude, args.latitude],
zoom: args.zoom || 12,
}),
actionLabel: (args) => `Created map: ${args.location_name}`,
},
{
declaration: {
name: "create_note",
description: "Create a markdown note on the canvas. Use for text content, lists, summaries, instructions, or any written information.",
parameters: {
type: "object",
properties: {
content: { type: "string", description: "Markdown content for the note" },
title: { type: "string", description: "Optional title for the note" },
},
required: ["content"],
},
},
tagName: "folk-markdown",
buildProps: (args) => ({
value: args.title ? `# ${args.title}\n\n${args.content}` : args.content,
}),
actionLabel: (args) => `Created note${args.title ? `: ${args.title}` : ""}`,
},
{
declaration: {
name: "create_embed",
description: "Embed a webpage or web app on the canvas. Use for websites, search results, booking sites, videos, or any URL the user wants to view inline.",
parameters: {
type: "object",
properties: {
url: { type: "string", description: "The URL to embed" },
title: { type: "string", description: "Descriptive title for the embed" },
},
required: ["url"],
},
},
tagName: "folk-embed",
buildProps: (args) => ({
url: args.url,
}),
actionLabel: (args) => `Embedded: ${args.title || args.url}`,
},
{
declaration: {
name: "create_image",
description: "Display an image on the canvas from a URL. Use when showing an existing image, photo, diagram, or any direct image link.",
parameters: {
type: "object",
properties: {
src: { type: "string", description: "Image URL" },
alt: { type: "string", description: "Alt text describing the image" },
},
required: ["src"],
},
},
tagName: "folk-image",
buildProps: (args) => ({
src: args.src,
alt: args.alt || "",
}),
actionLabel: (args) => `Created image${args.alt ? `: ${args.alt}` : ""}`,
},
{
declaration: {
name: "create_bookmark",
description: "Create a bookmark card for a URL. Use when the user wants to save or reference a link without embedding the full page.",
parameters: {
type: "object",
properties: {
url: { type: "string", description: "The URL to bookmark" },
},
required: ["url"],
},
},
tagName: "folk-bookmark",
buildProps: (args) => ({
url: args.url,
}),
actionLabel: (args) => `Bookmarked: ${args.url}`,
},
{
declaration: {
name: "create_image_gen",
description: "Generate an AI image from a text prompt. Use when the user wants to create, generate, or imagine a new image that doesn't exist yet.",
parameters: {
type: "object",
properties: {
prompt: { type: "string", description: "Text prompt describing the image to generate" },
style: {
type: "string",
description: "Visual style for the generated image",
enum: ["photorealistic", "illustration", "painting", "sketch", "punk-zine", "collage", "vintage", "minimalist"],
},
},
required: ["prompt"],
},
},
tagName: "folk-image-gen",
buildProps: (args) => ({
prompt: args.prompt,
style: args.style || "photorealistic",
}),
actionLabel: (args) => `Generating image: ${args.prompt.slice(0, 50)}${args.prompt.length > 50 ? "..." : ""}`,
},
// ── Trip Planning Tools ──
{
declaration: {
name: "create_destination",
description: "Create a destination card for a trip location. Use when the user mentions a city, place, or stop on their trip.",
parameters: {
type: "object",
properties: {
destName: { type: "string", description: "Name of the destination (city or place)" },
country: { type: "string", description: "Country name" },
lat: { type: "number", description: "Latitude coordinate" },
lng: { type: "number", description: "Longitude coordinate" },
arrivalDate: { type: "string", description: "Arrival date in YYYY-MM-DD format" },
departureDate: { type: "string", description: "Departure date in YYYY-MM-DD format" },
notes: { type: "string", description: "Additional notes about this destination" },
},
required: ["destName"],
},
},
tagName: "folk-destination",
moduleId: "rtrips",
buildProps: (args) => ({
destName: args.destName,
...(args.country ? { country: args.country } : {}),
...(args.lat != null ? { lat: args.lat } : {}),
...(args.lng != null ? { lng: args.lng } : {}),
...(args.arrivalDate ? { arrivalDate: args.arrivalDate } : {}),
...(args.departureDate ? { departureDate: args.departureDate } : {}),
...(args.notes ? { notes: args.notes } : {}),
}),
actionLabel: (args) => `Created destination: ${args.destName}${args.country ? `, ${args.country}` : ""}`,
},
{
declaration: {
name: "create_itinerary",
description: "Create an itinerary card with a list of activities/events organized by date. Use when planning a schedule or day-by-day plan.",
parameters: {
type: "object",
properties: {
tripTitle: { type: "string", description: "Title for the itinerary" },
itemsJson: { type: "string", description: 'JSON array of items. Each: {"id":"<uuid>","title":"...","date":"YYYY-MM-DD","startTime":"HH:MM","category":"ACTIVITY|TRANSPORT|MEAL|FREE_TIME|FLIGHT"}' },
},
required: ["tripTitle", "itemsJson"],
},
},
tagName: "folk-itinerary",
moduleId: "rtrips",
buildProps: (args) => {
let items: any[] = [];
try { items = JSON.parse(args.itemsJson); } catch { items = []; }
return { tripTitle: args.tripTitle, items };
},
actionLabel: (args) => `Created itinerary: ${args.tripTitle}`,
},
{
declaration: {
name: "create_booking",
description: "Create a booking card for a flight, hotel, transport, activity, or restaurant reservation.",
parameters: {
type: "object",
properties: {
bookingType: {
type: "string",
description: "Type of booking",
enum: ["FLIGHT", "HOTEL", "CAR_RENTAL", "TRAIN", "BUS", "FERRY", "ACTIVITY", "RESTAURANT", "OTHER"],
},
provider: { type: "string", description: "Provider/company name (e.g. airline, hotel name)" },
cost: { type: "number", description: "Cost amount" },
currency: { type: "string", description: "ISO currency code (e.g. USD, EUR)" },
startDate: { type: "string", description: "Start/check-in date in YYYY-MM-DD format" },
endDate: { type: "string", description: "End/check-out date in YYYY-MM-DD format" },
bookingStatus: { type: "string", description: "Booking status", enum: ["PENDING", "CONFIRMED", "CANCELLED"] },
details: { type: "string", description: "Additional booking details or notes" },
},
required: ["bookingType", "provider"],
},
},
tagName: "folk-booking",
moduleId: "rtrips",
buildProps: (args) => ({
bookingType: args.bookingType,
provider: args.provider,
...(args.cost != null ? { cost: args.cost } : {}),
...(args.currency ? { currency: args.currency } : {}),
...(args.startDate ? { startDate: args.startDate } : {}),
...(args.endDate ? { endDate: args.endDate } : {}),
...(args.bookingStatus ? { bookingStatus: args.bookingStatus } : {}),
...(args.details ? { details: args.details } : {}),
}),
actionLabel: (args) => `Created booking: ${args.bookingType}${args.provider}`,
},
{
declaration: {
name: "create_budget",
description: "Create a budget tracker card with total budget and expense line items. Use when the user wants to track trip costs.",
parameters: {
type: "object",
properties: {
budgetTotal: { type: "number", description: "Total budget amount" },
currency: { type: "string", description: "ISO currency code (e.g. USD, EUR)" },
expensesJson: { type: "string", description: 'JSON array of expenses. Each: {"id":"<uuid>","category":"TRANSPORT|ACCOMMODATION|FOOD|ACTIVITY|SHOPPING|OTHER","description":"...","amount":123,"date":"YYYY-MM-DD"}' },
},
required: ["budgetTotal"],
},
},
tagName: "folk-budget",
moduleId: "rtrips",
buildProps: (args) => {
let expenses: any[] = [];
try { expenses = JSON.parse(args.expensesJson); } catch { expenses = []; }
return {
budgetTotal: args.budgetTotal,
...(args.currency ? { currency: args.currency } : {}),
expenses,
};
},
actionLabel: (args) => `Created budget: ${args.currency || "USD"} ${args.budgetTotal}`,
},
{
declaration: {
name: "create_packing_list",
description: "Create a packing list card with checkable items organized by category. Use when the user needs help with what to pack.",
parameters: {
type: "object",
properties: {
itemsJson: { type: "string", description: 'JSON array of packing items. Each: {"id":"<uuid>","name":"...","category":"CLOTHING|FOOTWEAR|ELECTRONICS|GEAR|PERSONAL|DOCUMENTS|SAFETY|SUPPLIES","quantity":1,"packed":false}' },
},
required: ["itemsJson"],
},
},
tagName: "folk-packing-list",
moduleId: "rtrips",
buildProps: (args) => {
let items: any[] = [];
try { items = JSON.parse(args.itemsJson); } catch { items = []; }
return { items };
},
actionLabel: (args) => {
let count = 0;
try { count = JSON.parse(args.itemsJson).length; } catch {}
return `Created packing list (${count} items)`;
},
},
];
// ── Mermaid Diagram Tool ──
registry.push({
declaration: {
name: "create_mermaid_diagram",
description: "Create a mermaid diagram on the canvas. Use when the user wants to create flowcharts, sequence diagrams, class diagrams, state diagrams, ER diagrams, Gantt charts, or any diagram that can be expressed in Mermaid syntax.",
parameters: {
type: "object",
properties: {
prompt: { type: "string", description: "Description of the diagram to generate (e.g. 'CI/CD pipeline with build, test, deploy stages')" },
},
required: ["prompt"],
},
},
tagName: "folk-mermaid-gen",
buildProps: (args) => ({
prompt: args.prompt,
}),
actionLabel: (args) => `Creating diagram: ${args.prompt.slice(0, 50)}${args.prompt.length > 50 ? "..." : ""}`,
});
// ── Social Media / Campaign Tools ──
registry.push(
{
declaration: {
name: "create_social_post",
description: "Create a social media post card for scheduling across platforms.",
parameters: {
type: "object",
properties: {
platform: { type: "string", description: "Target platform", enum: ["x", "linkedin", "instagram", "youtube", "threads", "bluesky", "tiktok", "facebook"] },
content: { type: "string", description: "Post text content" },
postType: { type: "string", description: "Format", enum: ["text", "image", "video", "carousel", "thread", "article"] },
scheduledAt: { type: "string", description: "ISO datetime to schedule" },
hashtags: { type: "string", description: "Comma-separated hashtags" },
},
required: ["platform", "content"],
},
},
tagName: "folk-social-post",
moduleId: "rsocials",
buildProps: (args) => ({
platform: args.platform || "x",
content: args.content,
postType: args.postType || "text",
scheduledAt: args.scheduledAt || "",
hashtags: args.hashtags ? args.hashtags.split(",").map((t: string) => t.trim()).filter(Boolean) : [],
status: "draft",
}),
actionLabel: (args) => `Created ${args.platform || "social"} post`,
},
{
declaration: {
name: "create_social_thread",
description: "Create a tweet thread card on the canvas. Use when the user wants to draft a multi-tweet thread.",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Thread title" },
platform: { type: "string", description: "Target platform", enum: ["x", "bluesky", "threads"] },
tweetsJson: { type: "string", description: "JSON array of tweet strings" },
status: { type: "string", description: "Thread status", enum: ["draft", "ready", "published"] },
},
required: ["title"],
},
},
tagName: "folk-social-thread",
moduleId: "rsocials",
buildProps: (args) => {
let tweets: string[] = [];
try { tweets = JSON.parse(args.tweetsJson || "[]"); } catch { tweets = []; }
return {
title: args.title,
platform: args.platform || "x",
tweets,
status: args.status || "draft",
};
},
actionLabel: (args) => `Created thread: ${args.title}`,
},
{
declaration: {
name: "create_campaign_card",
description: "Create a campaign dashboard card on the canvas. Use when the user wants to plan or track a social media campaign.",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Campaign title" },
description: { type: "string", description: "Campaign description" },
platforms: { type: "string", description: "Comma-separated platform names" },
duration: { type: "string", description: "Campaign duration (e.g. '4 weeks')" },
},
required: ["title"],
},
},
tagName: "folk-social-campaign",
moduleId: "rsocials",
buildProps: (args) => ({
title: args.title,
description: args.description || "",
platforms: args.platforms ? args.platforms.split(",").map((p: string) => p.trim().toLowerCase()).filter(Boolean) : [],
duration: args.duration || "",
}),
actionLabel: (args) => `Created campaign: ${args.title}`,
},
{
declaration: {
name: "create_newsletter_card",
description: "Create a newsletter/email campaign card on the canvas. Use when the user wants to draft or schedule an email newsletter.",
parameters: {
type: "object",
properties: {
subject: { type: "string", description: "Email subject line" },
listName: { type: "string", description: "Mailing list name" },
status: { type: "string", description: "Newsletter status", enum: ["draft", "scheduled", "sent"] },
scheduledAt: { type: "string", description: "ISO datetime to schedule" },
},
required: ["subject"],
},
},
tagName: "folk-social-newsletter",
moduleId: "rsocials",
buildProps: (args) => ({
subject: args.subject,
listName: args.listName || "",
status: args.status || "draft",
scheduledAt: args.scheduledAt || "",
}),
actionLabel: (args) => `Created newsletter: ${args.subject}`,
},
);
// ── rTime Commitment/Task Tools ──
registry.push(
{
declaration: {
name: "create_commitment_pool",
description: "Create a commitment pool basket on the canvas. Shows floating orbs representing community time pledges that can be dragged onto task cards.",
parameters: {
type: "object",
properties: {
spaceSlug: { type: "string", description: "The space slug to load commitments from" },
},
required: ["spaceSlug"],
},
},
tagName: "folk-commitment-pool",
moduleId: "rtime",
buildProps: (args) => ({
spaceSlug: args.spaceSlug || "demo",
}),
actionLabel: (args) => `Created commitment pool for ${args.spaceSlug || "demo"}`,
},
{
declaration: {
name: "create_task_request",
description: "Create a task request card on the canvas with skill slots. Commitments can be dragged from the pool onto matching skill slots.",
parameters: {
type: "object",
properties: {
taskName: { type: "string", description: "Name of the task" },
spaceSlug: { type: "string", description: "The space slug this task belongs to" },
needsJson: { type: "string", description: 'JSON object of skill needs, e.g. {"facilitation":3,"design":2}' },
},
required: ["taskName"],
},
},
tagName: "folk-task-request",
moduleId: "rtime",
buildProps: (args) => {
let needs: Record<string, number> = {};
try { needs = JSON.parse(args.needsJson || "{}"); } catch { needs = {}; }
return {
taskName: args.taskName,
spaceSlug: args.spaceSlug || "demo",
needs,
};
},
actionLabel: (args) => `Created task request: ${args.taskName}`,
},
);
// ── rTime Weaving Coverage Applet ──
registry.push({
declaration: {
name: "create_weaving_coverage",
description: "Create a weaving coverage applet card on the canvas. Shows per-task skill fulfillment bars from the commitment weaving system. Self-fetches weaving data and outputs coverage summary for downstream applets.",
parameters: {
type: "object",
properties: {},
required: [],
},
},
tagName: "folk-applet",
moduleId: "rtime",
buildProps: () => ({
moduleId: "rtime",
appletId: "weaving-coverage",
}),
actionLabel: () => "Created weaving coverage applet",
});
// ── rTasks Resource Coverage Applet ──
registry.push({
declaration: {
name: "create_resource_coverage",
description: "Create a resource coverage applet card on the canvas. Shows task readiness status (ready/partial/unresourced) based on commitment coverage data piped in via the coverage-in port.",
parameters: {
type: "object",
properties: {},
required: [],
},
},
tagName: "folk-applet",
moduleId: "rtasks",
buildProps: () => ({
moduleId: "rtasks",
appletId: "resource-coverage",
}),
actionLabel: () => "Created resource coverage applet",
});
// ── rExchange P2P Exchange Tool ──
registry.push({
declaration: {
name: "create_exchange_node",
description: "Create a P2P exchange order board on the canvas. Shows buy/sell intents as colored orbs with live matching status. Use when the user wants to visualize or interact with the community exchange.",
parameters: {
type: "object",
properties: {
spaceSlug: { type: "string", description: "The space slug to load exchange intents from" },
},
required: ["spaceSlug"],
},
},
tagName: "folk-exchange-node",
moduleId: "rexchange",
buildProps: (args) => ({
spaceSlug: args.spaceSlug || "demo",
}),
actionLabel: (args) => `Created exchange board for ${args.spaceSlug || "demo"}`,
});
// ── ASCII Art Tool ──
registry.push({
declaration: {
name: "create_ascii_art",
description: "Generate ASCII art from patterns like plasma, mandelbrot, spiral, waves, nebula, kaleidoscope, aurora, lava, crystals, or fractal_tree.",
parameters: {
type: "object",
properties: {
prompt: { type: "string", description: "Pattern name or description of what to generate" },
pattern: {
type: "string",
description: "Pattern type",
enum: ["plasma", "mandelbrot", "spiral", "waves", "nebula", "kaleidoscope", "aurora", "lava", "crystals", "fractal_tree", "random"],
},
palette: {
type: "string",
description: "Character palette to use",
enum: ["classic", "blocks", "braille", "dots", "shades", "emoji", "cosmic", "runes", "geometric", "kanji", "hieroglyph", "alchemical"],
},
width: { type: "number", description: "Width in characters (default 80)" },
height: { type: "number", description: "Height in characters (default 40)" },
},
required: ["prompt"],
},
},
tagName: "folk-ascii-gen",
buildProps: (args) => ({
prompt: args.prompt,
...(args.pattern ? { pattern: args.pattern } : {}),
...(args.palette ? { palette: args.palette } : {}),
...(args.width ? { width: args.width } : {}),
...(args.height ? { height: args.height } : {}),
}),
actionLabel: (args) => `Generating ASCII art: ${args.prompt?.slice(0, 50) || args.pattern || "random"}`,
});
// ── MakeReal (Sketch-to-HTML) Tool ──
registry.push({
declaration: {
name: "create_makereal",
description: "Convert a sketch or wireframe into functional HTML/CSS code with live preview. Use when the user wants to turn a drawing into a working web page.",
parameters: {
type: "object",
properties: {
prompt: { type: "string", description: "Description of the UI to generate from the sketch (e.g. 'A login page with email and password fields')" },
framework: {
type: "string",
description: "CSS/JS framework to use",
enum: ["html", "tailwind", "react"],
},
},
required: ["prompt"],
},
},
tagName: "folk-makereal",
buildProps: (args) => ({
prompt: args.prompt,
...(args.framework ? { framework: args.framework } : {}),
}),
actionLabel: (args) => `Opening MakeReal: ${args.prompt?.slice(0, 50) || "wireframe"}${(args.prompt?.length || 0) > 50 ? "..." : ""}`,
});
// ── Design Agent Tool ──
registry.push({
declaration: {
name: "create_design_agent",
description: "Open the design agent to create print layouts in Scribus. Use when the user wants to design a poster, flyer, brochure, or any print-ready document.",
parameters: {
type: "object",
properties: {
brief: { type: "string", description: "Design brief describing what to create (e.g. 'A4 event poster for Mushroom Festival with title, date, and image area')" },
},
required: ["brief"],
},
},
tagName: "folk-design-agent",
moduleId: "rdesign",
buildProps: (args) => ({ brief: args.brief || "" }),
actionLabel: (args) => `Opened design agent${args.brief ? `: ${args.brief.slice(0, 50)}` : ""}`,
});
// ── rGov Governance Circuit Tools ──
registry.push(
{
declaration: {
name: "create_binary_gate",
description: "Create a Yes/No signoff gate on the canvas. Use when a decision requires someone's explicit approval or sign-off.",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Title for the signoff gate (e.g. 'Proprietor Approval')" },
assignee: { type: "string", description: "Who must sign off (leave empty for 'anyone')" },
},
required: ["title"],
},
},
tagName: "folk-gov-binary",
moduleId: "rgov",
buildProps: (args) => ({
title: args.title,
...(args.assignee ? { assignee: args.assignee } : {}),
}),
actionLabel: (args) => `Created binary gate: ${args.title}`,
},
{
declaration: {
name: "create_threshold",
description: "Create a numeric threshold gate on the canvas. Use when a decision requires accumulating a target amount (hours, dollars, signatures, etc.).",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Title for the threshold (e.g. 'Capital Required')" },
target: { type: "number", description: "Target value to reach" },
unit: { type: "string", description: "Unit of measurement (e.g. '$', 'hours', 'signatures')" },
},
required: ["title", "target"],
},
},
tagName: "folk-gov-threshold",
moduleId: "rgov",
buildProps: (args) => ({
title: args.title,
target: args.target,
...(args.unit ? { unit: args.unit } : {}),
}),
actionLabel: (args) => `Created threshold: ${args.title} (${args.target} ${args.unit || ""})`,
},
{
declaration: {
name: "create_gov_knob",
description: "Create an adjustable parameter knob on the canvas. Use when a governance parameter needs to be tunable (e.g. quorum percentage, budget cap).",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Parameter name (e.g. 'Quorum %')" },
min: { type: "number", description: "Minimum value" },
max: { type: "number", description: "Maximum value" },
value: { type: "number", description: "Initial value" },
unit: { type: "string", description: "Unit label (e.g. '%', '$', 'hours')" },
cooldown: { type: "number", description: "Cooldown in seconds before value propagates (0 for instant)" },
},
required: ["title"],
},
},
tagName: "folk-gov-knob",
moduleId: "rgov",
buildProps: (args) => ({
title: args.title,
...(args.min != null ? { min: args.min } : {}),
...(args.max != null ? { max: args.max } : {}),
...(args.value != null ? { value: args.value } : {}),
...(args.unit ? { unit: args.unit } : {}),
...(args.cooldown != null ? { cooldown: args.cooldown } : {}),
}),
actionLabel: (args) => `Created knob: ${args.title}`,
},
{
declaration: {
name: "create_gov_project",
description: "Create a governance project aggregator on the canvas. It automatically tracks all upstream gates wired to it and shows overall completion progress.",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Project title (e.g. 'Build a Climbing Wall')" },
description: { type: "string", description: "Project description" },
status: { type: "string", description: "Initial status", enum: ["draft", "active", "completed", "archived"] },
},
required: ["title"],
},
},
tagName: "folk-gov-project",
moduleId: "rgov",
buildProps: (args) => ({
title: args.title,
...(args.description ? { description: args.description } : {}),
...(args.status ? { status: args.status } : {}),
}),
actionLabel: (args) => `Created project: ${args.title}`,
},
{
declaration: {
name: "create_amendment",
description: "Create a governance amendment proposal on the canvas. An amendment proposes replacing one gate with another (e.g. converting a dollar threshold into a binary checkbox).",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Amendment title" },
targetShapeId: { type: "string", description: "ID of the shape to modify" },
replacementType: { type: "string", description: "Type of replacement shape (e.g. 'folk-gov-binary')" },
approvalMode: { type: "string", description: "How approval works", enum: ["single", "majority", "unanimous"] },
description: { type: "string", description: "Description of what the amendment changes" },
},
required: ["title"],
},
},
tagName: "folk-gov-amendment",
moduleId: "rgov",
buildProps: (args) => ({
title: args.title,
...(args.targetShapeId ? { targetShapeId: args.targetShapeId } : {}),
...(args.replacementType ? { replacementType: args.replacementType } : {}),
...(args.approvalMode ? { approvalMode: args.approvalMode } : {}),
...(args.description ? { description: args.description } : {}),
}),
actionLabel: (args) => `Created amendment: ${args.title}`,
},
{
declaration: {
name: "create_quadratic_transform",
description: "Create a quadratic weight transformer on the canvas. Accepts raw weights and applies sqrt/log/linear dampening — useful for reducing whale dominance in voting.",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Transform title (e.g. 'Vote Weight Dampener')" },
mode: { type: "string", description: "Transform mode", enum: ["sqrt", "log", "linear"] },
},
required: ["title"],
},
},
tagName: "folk-gov-quadratic",
moduleId: "rgov",
buildProps: (args) => ({
title: args.title,
...(args.mode ? { mode: args.mode } : {}),
}),
actionLabel: (args) => `Created quadratic transform: ${args.title}`,
},
{
declaration: {
name: "create_conviction_gate",
description: "Create a conviction accumulator on the canvas. Accumulates time-weighted conviction from stakes. Gate mode triggers at threshold; tuner mode continuously emits score.",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Gate title (e.g. 'Community Support')" },
convictionMode: { type: "string", description: "Operating mode", enum: ["gate", "tuner"] },
threshold: { type: "number", description: "Conviction threshold for gate mode" },
},
required: ["title"],
},
},
tagName: "folk-gov-conviction",
moduleId: "rgov",
buildProps: (args) => ({
title: args.title,
...(args.convictionMode ? { convictionMode: args.convictionMode } : {}),
...(args.threshold != null ? { threshold: args.threshold } : {}),
}),
actionLabel: (args) => `Created conviction gate: ${args.title}`,
},
{
declaration: {
name: "create_multisig_gate",
description: "Create an M-of-N multisig gate on the canvas. Requires M named signers before passing. Signers can sign manually or auto-populate from upstream binary gates.",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Multisig title (e.g. 'Council Approval')" },
requiredM: { type: "number", description: "Number of required signatures (M)" },
signerNames: { type: "string", description: "Comma-separated signer names" },
},
required: ["title"],
},
},
tagName: "folk-gov-multisig",
moduleId: "rgov",
buildProps: (args) => ({
title: args.title,
...(args.requiredM != null ? { requiredM: args.requiredM } : {}),
...(args.signerNames ? {
signers: args.signerNames.split(",").map((n: string) => ({
name: n.trim(), signed: false, timestamp: 0,
})),
} : {}),
}),
actionLabel: (args) => `Created multisig: ${args.title}`,
},
{
declaration: {
name: "create_sankey_visualizer",
description: "Create a governance flow Sankey visualizer on the canvas. Auto-discovers all nearby gov shapes and renders an animated flow diagram. No ports — purely visual.",
parameters: {
type: "object",
properties: {
title: { type: "string", description: "Visualizer title (e.g. 'Governance Flow')" },
},
required: ["title"],
},
},
tagName: "folk-gov-sankey",
moduleId: "rgov",
buildProps: (args) => ({
title: args.title,
}),
actionLabel: (args) => `Created Sankey visualizer: ${args.title}`,
},
);
export const CANVAS_TOOLS: CanvasToolDefinition[] = [...registry];
export const CANVAS_TOOL_DECLARATIONS = CANVAS_TOOLS.map((t) => t.declaration);
export function findTool(name: string): CanvasToolDefinition | undefined {
return CANVAS_TOOLS.find((t) => t.declaration.name === name);
}
export function registerCanvasTool(def: CanvasToolDefinition): void {
CANVAS_TOOLS.push(def);
}
/** Return tools available for the given set of enabled modules.
* If enabledIds is null/undefined, all tools are returned (all modules enabled). */
export function getToolsForModules(enabledIds: string[] | null | undefined): CanvasToolDefinition[] {
if (!enabledIds) return CANVAS_TOOLS;
const enabled = new Set(enabledIds);
return CANVAS_TOOLS.filter(t => !t.moduleId || enabled.has(t.moduleId));
}

View File

@ -11,7 +11,6 @@ import { computeMembranePermeability } from "./connection-types";
import { makeChangeMessage, parseChangeMessage } from "../shared/local-first/change-message"; import { makeChangeMessage, parseChangeMessage } from "../shared/local-first/change-message";
import type { HistoryEntry } from "../shared/components/rstack-history-panel"; import type { HistoryEntry } from "../shared/components/rstack-history-panel";
import type { EventEntry } from "./event-bus"; import type { EventEntry } from "./event-bus";
import type { CommentPinData } from "../shared/comment-pin-types";
// Shape data stored in Automerge document // Shape data stored in Automerge document
export interface ShapeData { export interface ShapeData {
@ -50,15 +49,6 @@ export interface ShapeData {
[key: string]: unknown; [key: string]: unknown;
} }
// ── Undo/Redo entry ──
export interface UndoEntry {
shapeId: string;
before: ShapeData | null; // null = shape didn't exist (creation)
after: ShapeData | null; // null = shape hard-deleted
ts: number;
}
// ── Nested space types (client-side) ── // ── Nested space types (client-side) ──
export interface NestPermissions { export interface NestPermissions {
@ -137,12 +127,6 @@ export interface CommunityDoc {
layerViewMode?: "flat" | "stack"; layerViewMode?: "flat" | "stack";
/** Pub/sub event log — bounded ring buffer (last 100 entries) */ /** Pub/sub event log — bounded ring buffer (last 100 entries) */
eventLog?: EventEntry[]; eventLog?: EventEntry[];
/** Comment pins — Figma-style overlay markers */
commentPins?: { [pinId: string]: CommentPinData };
/** Saved applet templates (reusable wired shape groups) */
templates?: {
[templateId: string]: import("../shared/applet-types").AppletTemplateRecord;
};
} }
type SyncState = Automerge.SyncState; type SyncState = Automerge.SyncState;
@ -161,8 +145,6 @@ export class CommunitySync extends EventTarget {
#disconnectedIntentionally = false; #disconnectedIntentionally = false;
#communitySlug: string; #communitySlug: string;
#shapes: Map<string, FolkShape> = new Map(); #shapes: Map<string, FolkShape> = new Map();
#shapeListeners: Map<string, { transform: EventListener; content: EventListener }> = new Map();
#changeCount = 0;
#pendingChanges: boolean = false; #pendingChanges: boolean = false;
#reconnectAttempts = 0; #reconnectAttempts = 0;
#maxReconnectAttempts = 5; #maxReconnectAttempts = 5;
@ -170,15 +152,7 @@ export class CommunitySync extends EventTarget {
#offlineStore: OfflineStore | null = null; #offlineStore: OfflineStore | null = null;
#saveDebounceTimer: ReturnType<typeof setTimeout> | null = null; #saveDebounceTimer: ReturnType<typeof setTimeout> | null = null;
#syncedDebounceTimer: ReturnType<typeof setTimeout> | null = null; #syncedDebounceTimer: ReturnType<typeof setTimeout> | null = null;
#initialSyncFired = false;
#wsUrl: string | null = null; #wsUrl: string | null = null;
#localDID: string = '';
// ── Undo/Redo state ──
#undoStack: UndoEntry[] = [];
#redoStack: UndoEntry[] = [];
#maxUndoDepth = 50;
#isUndoRedoing = false;
constructor(communitySlug: string, offlineStore?: OfflineStore) { constructor(communitySlug: string, offlineStore?: OfflineStore) {
super(); super();
@ -202,11 +176,6 @@ export class CommunitySync extends EventTarget {
} }
} }
/** Set the local user's DID so forgotten-shape filtering is per-user. */
setLocalDID(did: string): void {
this.#localDID = did;
}
/** /**
* Load document and sync state from offline cache. * Load document and sync state from offline cache.
* Call BEFORE connect() to show cached content immediately. * Call BEFORE connect() to show cached content immediately.
@ -308,7 +277,6 @@ export class CommunitySync extends EventTarget {
this.#ws.onclose = () => { this.#ws.onclose = () => {
console.log(`[CommunitySync] Disconnected from ${this.#communitySlug}`); console.log(`[CommunitySync] Disconnected from ${this.#communitySlug}`);
this.#initialSyncFired = false;
this.dispatchEvent(new CustomEvent("disconnected")); this.dispatchEvent(new CustomEvent("disconnected"));
if (!this.#disconnectedIntentionally) { if (!this.#disconnectedIntentionally) {
@ -317,7 +285,7 @@ export class CommunitySync extends EventTarget {
}; };
this.#ws.onerror = (error) => { this.#ws.onerror = (error) => {
console.warn("[CommunitySync] WebSocket error (will reconnect):", error); console.error("[CommunitySync] WebSocket error:", error);
this.dispatchEvent(new CustomEvent("error", { detail: error })); this.dispatchEvent(new CustomEvent("error", { detail: error }));
}; };
} }
@ -538,32 +506,19 @@ export class CommunitySync extends EventTarget {
registerShape(shape: FolkShape): void { registerShape(shape: FolkShape): void {
this.#shapes.set(shape.id, shape); this.#shapes.set(shape.id, shape);
// Remove stale listeners if shape is re-registered // Listen for transform events
const old = this.#shapeListeners.get(shape.id); shape.addEventListener("folk-transform", ((e: CustomEvent) => {
if (old) {
shape.removeEventListener("folk-transform", old.transform);
shape.removeEventListener("content-change", old.content);
}
// Create named listener refs so they can be removed later
const transformListener = (() => {
this.#handleShapeChange(shape); this.#handleShapeChange(shape);
}) as EventListener; }) as EventListener);
const contentListener = (() => {
// Listen for content changes (for markdown shapes)
shape.addEventListener("content-change", ((e: CustomEvent) => {
this.#handleShapeChange(shape); this.#handleShapeChange(shape);
}) as EventListener; }) as EventListener);
this.#shapeListeners.set(shape.id, { transform: transformListener, content: contentListener });
shape.addEventListener("folk-transform", transformListener);
shape.addEventListener("content-change", contentListener);
// Add to document if not exists // Add to document if not exists
if (!this.#doc.shapes[shape.id]) { if (!this.#doc.shapes[shape.id]) {
this.#updateShapeInDoc(shape); this.#updateShapeInDoc(shape);
// Record creation for undo (before=null means shape was new)
const afterData = this.#cloneShapeData(shape.id);
this.#pushUndo(shape.id, null, afterData);
} }
} }
@ -571,13 +526,6 @@ export class CommunitySync extends EventTarget {
* Unregister a shape * Unregister a shape
*/ */
unregisterShape(shapeId: string): void { unregisterShape(shapeId: string): void {
const shape = this.#shapes.get(shapeId);
const listeners = this.#shapeListeners.get(shapeId);
if (shape && listeners) {
shape.removeEventListener("folk-transform", listeners.transform);
shape.removeEventListener("content-change", listeners.content);
}
this.#shapeListeners.delete(shapeId);
this.#shapes.delete(shapeId); this.#shapes.delete(shapeId);
} }
@ -597,7 +545,6 @@ export class CommunitySync extends EventTarget {
* Update shape data in Automerge document * Update shape data in Automerge document
*/ */
#updateShapeInDoc(shape: FolkShape): void { #updateShapeInDoc(shape: FolkShape): void {
const beforeData = this.#cloneShapeData(shape.id);
const shapeData = this.#shapeToData(shape); const shapeData = this.#shapeToData(shape);
this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Update shape ${shape.id}`), (doc) => { this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Update shape ${shape.id}`), (doc) => {
@ -605,17 +552,6 @@ export class CommunitySync extends EventTarget {
doc.shapes[shape.id] = JSON.parse(JSON.stringify(shapeData)); doc.shapes[shape.id] = JSON.parse(JSON.stringify(shapeData));
}); });
// Compact Automerge history periodically to prevent unbounded WASM heap growth
this.#changeCount++;
if (this.#changeCount % 500 === 0) {
this.#doc = Automerge.clone(this.#doc);
}
// Record for undo (skip if this is a brand-new shape — registerShape handles that)
if (beforeData) {
this.#pushUndo(shape.id, beforeData, this.#cloneShapeData(shape.id));
}
this.#scheduleSave(); this.#scheduleSave();
} }
@ -719,7 +655,6 @@ export class CommunitySync extends EventTarget {
* Three-state: present forgotten (faded) deleted * Three-state: present forgotten (faded) deleted
*/ */
forgetShape(shapeId: string, did: string): void { forgetShape(shapeId: string, did: string): void {
const beforeData = this.#cloneShapeData(shapeId);
this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Forget shape ${shapeId}`), (doc) => { this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Forget shape ${shapeId}`), (doc) => {
if (doc.shapes && doc.shapes[shapeId]) { if (doc.shapes && doc.shapes[shapeId]) {
const shape = doc.shapes[shapeId] as Record<string, unknown>; const shape = doc.shapes[shapeId] as Record<string, unknown>;
@ -733,13 +668,11 @@ export class CommunitySync extends EventTarget {
} }
}); });
this.#pushUndo(shapeId, beforeData, this.#cloneShapeData(shapeId));
// Don't remove from DOM — just update visual state // Don't remove from DOM — just update visual state
this.dispatchEvent(new CustomEvent("shape-state-changed", { this.dispatchEvent(new CustomEvent("shape-state-changed", {
detail: { shapeId, state: 'forgotten', data: this.#doc.shapes?.[shapeId] } detail: { shapeId, state: 'forgotten', data: this.#doc.shapes?.[shapeId] }
})); }));
this.#saveImmediate(); this.#scheduleSave();
this.#syncToServer(); this.#syncToServer();
} }
@ -750,7 +683,6 @@ export class CommunitySync extends EventTarget {
const shapeData = this.#doc.shapes?.[shapeId]; const shapeData = this.#doc.shapes?.[shapeId];
if (!shapeData) return; if (!shapeData) return;
const beforeData = this.#cloneShapeData(shapeId);
const wasDeleted = !!(shapeData as Record<string, unknown>).deleted; const wasDeleted = !!(shapeData as Record<string, unknown>).deleted;
this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Remember shape ${shapeId}`), (doc) => { this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Remember shape ${shapeId}`), (doc) => {
@ -764,8 +696,6 @@ export class CommunitySync extends EventTarget {
} }
}); });
this.#pushUndo(shapeId, beforeData, this.#cloneShapeData(shapeId));
if (wasDeleted) { if (wasDeleted) {
// Re-add to DOM if was hard-deleted // Re-add to DOM if was hard-deleted
this.#applyShapeToDOM(this.#doc.shapes[shapeId]); this.#applyShapeToDOM(this.#doc.shapes[shapeId]);
@ -774,7 +704,7 @@ export class CommunitySync extends EventTarget {
this.dispatchEvent(new CustomEvent("shape-state-changed", { this.dispatchEvent(new CustomEvent("shape-state-changed", {
detail: { shapeId, state: 'present', data: this.#doc.shapes?.[shapeId] } detail: { shapeId, state: 'present', data: this.#doc.shapes?.[shapeId] }
})); }));
this.#saveImmediate(); this.#scheduleSave();
this.#syncToServer(); this.#syncToServer();
} }
@ -783,66 +713,17 @@ export class CommunitySync extends EventTarget {
* Shape stays in Automerge doc for restore from memory panel. * Shape stays in Automerge doc for restore from memory panel.
*/ */
hardDeleteShape(shapeId: string): void { hardDeleteShape(shapeId: string): void {
const beforeData = this.#cloneShapeData(shapeId);
this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Delete shape ${shapeId}`), (doc) => { this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Delete shape ${shapeId}`), (doc) => {
if (doc.shapes && doc.shapes[shapeId]) { if (doc.shapes && doc.shapes[shapeId]) {
(doc.shapes[shapeId] as Record<string, unknown>).deleted = true; (doc.shapes[shapeId] as Record<string, unknown>).deleted = true;
} }
}); });
this.#pushUndo(shapeId, beforeData, null);
this.#removeShapeFromDOM(shapeId); this.#removeShapeFromDOM(shapeId);
this.dispatchEvent(new CustomEvent("shape-state-changed", { this.dispatchEvent(new CustomEvent("shape-state-changed", {
detail: { shapeId, state: 'deleted', data: this.#doc.shapes?.[shapeId] } detail: { shapeId, state: 'deleted', data: this.#doc.shapes?.[shapeId] }
})); }));
this.#saveImmediate(); this.#scheduleSave();
this.#syncToServer();
}
/**
* Bulk forget/delete shapes in a single Automerge transaction.
* Shapes already forgotten get hard-deleted; others get soft-forgotten.
*/
bulkForget(shapeIds: string[], did: string): void {
const changes: Array<{ id: string; before: ShapeData | null; action: 'forget' | 'delete' }> = [];
for (const id of shapeIds) {
const state = this.getShapeVisualState(id);
changes.push({ id, before: this.#cloneShapeData(id), action: state === 'forgotten' ? 'delete' : 'forget' });
}
this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Bulk forget ${shapeIds.length} shapes`), (doc) => {
if (!doc.shapes) return;
for (const c of changes) {
const shape = doc.shapes[c.id] as Record<string, unknown> | undefined;
if (!shape) continue;
if (c.action === 'delete') {
shape.deleted = true;
} else {
if (!shape.forgottenBy || typeof shape.forgottenBy !== 'object') {
shape.forgottenBy = {};
}
(shape.forgottenBy as Record<string, number>)[did] = Date.now();
shape.forgotten = true;
shape.forgottenAt = Date.now();
}
}
});
// Post-transaction: undo stack, DOM updates, events
for (const c of changes) {
if (c.action === 'delete') {
this.#pushUndo(c.id, c.before, null);
this.#removeShapeFromDOM(c.id);
} else {
this.#pushUndo(c.id, c.before, this.#cloneShapeData(c.id));
}
this.dispatchEvent(new CustomEvent("shape-state-changed", {
detail: { shapeId: c.id, state: c.action === 'delete' ? 'deleted' : 'forgotten', data: this.#doc.shapes?.[c.id] }
}));
}
this.#saveImmediate();
this.#syncToServer(); this.#syncToServer();
} }
@ -909,20 +790,13 @@ export class CommunitySync extends EventTarget {
*/ */
#applyDocToDOM(): void { #applyDocToDOM(): void {
const shapes = this.#doc.shapes || {}; const shapes = this.#doc.shapes || {};
const validIds = new Set<string>();
for (const [id, shapeData] of Object.entries(shapes)) { for (const [id, shapeData] of Object.entries(shapes)) {
const d = shapeData as Record<string, unknown>; const d = shapeData as Record<string, unknown>;
if (d.deleted === true) continue; // Deleted: not in DOM if (d.deleted === true) continue; // Deleted: not in DOM
// Skip shapes this user has forgotten — one delete = gone from their view
const fb = d.forgottenBy;
if (this.#localDID && fb && typeof fb === 'object'
&& (fb as Record<string, number>)[this.#localDID]) {
continue;
}
validIds.add(id);
this.#applyShapeToDOM(shapeData); this.#applyShapeToDOM(shapeData);
// If forgotten by others (but not this user), emit state-changed for fade visual // If forgotten (faded), emit state-changed so canvas can apply visual
const fb = d.forgottenBy;
if (fb && typeof fb === 'object' && Object.keys(fb).length > 0) { if (fb && typeof fb === 'object' && Object.keys(fb).length > 0) {
this.dispatchEvent(new CustomEvent("shape-state-changed", { this.dispatchEvent(new CustomEvent("shape-state-changed", {
detail: { shapeId: id, state: 'forgotten', data: shapeData } detail: { shapeId: id, state: 'forgotten', data: shapeData }
@ -930,31 +804,17 @@ export class CommunitySync extends EventTarget {
} }
} }
// Prune stale DOM shapes that are deleted, forgotten, or no longer in the doc
for (const id of this.#shapes.keys()) {
if (!validIds.has(id)) {
this.#removeShapeFromDOM(id);
}
}
// Notify event bus if there are any events to process // Notify event bus if there are any events to process
if (this.#doc.eventLog && this.#doc.eventLog.length > 0) { if (this.#doc.eventLog && this.#doc.eventLog.length > 0) {
this.dispatchEvent(new CustomEvent("eventlog-changed")); this.dispatchEvent(new CustomEvent("eventlog-changed"));
} }
// Notify comment pin manager of any pin data
if (this.#doc.commentPins && Object.keys(this.#doc.commentPins).length > 0) {
this.dispatchEvent(new CustomEvent("comment-pins-changed"));
}
// Debounce the synced event — during initial sync negotiation, #applyDocToDOM() // Debounce the synced event — during initial sync negotiation, #applyDocToDOM()
// is called for every Automerge sync message (100+ round-trips). Debounce to // is called for every Automerge sync message (100+ round-trips). Debounce to
// fire once after the burst settles. Only fires once per connection cycle. // fire once after the burst settles.
if (this.#initialSyncFired) return;
if (this.#syncedDebounceTimer) clearTimeout(this.#syncedDebounceTimer); if (this.#syncedDebounceTimer) clearTimeout(this.#syncedDebounceTimer);
this.#syncedDebounceTimer = setTimeout(() => { this.#syncedDebounceTimer = setTimeout(() => {
this.#syncedDebounceTimer = null; this.#syncedDebounceTimer = null;
this.#initialSyncFired = true;
this.dispatchEvent(new CustomEvent("synced", { detail: { shapes } })); this.dispatchEvent(new CustomEvent("synced", { detail: { shapes } }));
}, 300); }, 300);
} }
@ -965,7 +825,6 @@ export class CommunitySync extends EventTarget {
*/ */
#applyPatchesToDOM(patches: Automerge.Patch[]): void { #applyPatchesToDOM(patches: Automerge.Patch[]): void {
let eventLogChanged = false; let eventLogChanged = false;
let commentPinsChanged = false;
for (const patch of patches) { for (const patch of patches) {
const path = patch.path; const path = patch.path;
@ -976,12 +835,6 @@ export class CommunitySync extends EventTarget {
continue; continue;
} }
// Detect commentPins changes
if (path[0] === "commentPins") {
commentPinsChanged = true;
continue;
}
// Handle shape updates: ["shapes", shapeId, ...] // Handle shape updates: ["shapes", shapeId, ...]
if (path[0] === "shapes" && typeof path[1] === "string") { if (path[0] === "shapes" && typeof path[1] === "string") {
const shapeId = path[1]; const shapeId = path[1];
@ -994,14 +847,6 @@ export class CommunitySync extends EventTarget {
const d = shapeData as Record<string, unknown>; const d = shapeData as Record<string, unknown>;
const state = this.getShapeVisualState(shapeId); const state = this.getShapeVisualState(shapeId);
// Skip shapes this user has forgotten — don't create/update DOM
const fb = d.forgottenBy;
if (this.#localDID && fb && typeof fb === 'object'
&& (fb as Record<string, number>)[this.#localDID]) {
this.#removeShapeFromDOM(shapeId);
continue;
}
if (state === 'deleted') { if (state === 'deleted') {
// Hard-deleted: remove from DOM // Hard-deleted: remove from DOM
this.#removeShapeFromDOM(shapeId); this.#removeShapeFromDOM(shapeId);
@ -1009,7 +854,7 @@ export class CommunitySync extends EventTarget {
detail: { shapeId, state: 'deleted', data: shapeData } detail: { shapeId, state: 'deleted', data: shapeData }
})); }));
} else if (state === 'forgotten') { } else if (state === 'forgotten') {
// Forgotten by others: keep in DOM, emit state change for fade visual // Forgotten: keep in DOM, emit state change for fade visual
this.#applyShapeToDOM(shapeData); this.#applyShapeToDOM(shapeData);
this.dispatchEvent(new CustomEvent("shape-state-changed", { this.dispatchEvent(new CustomEvent("shape-state-changed", {
detail: { shapeId, state: 'forgotten', data: shapeData } detail: { shapeId, state: 'forgotten', data: shapeData }
@ -1031,11 +876,6 @@ export class CommunitySync extends EventTarget {
if (eventLogChanged) { if (eventLogChanged) {
this.dispatchEvent(new CustomEvent("eventlog-changed")); this.dispatchEvent(new CustomEvent("eventlog-changed"));
} }
// Notify comment pin manager of remote pin changes
if (commentPinsChanged) {
this.dispatchEvent(new CustomEvent("comment-pins-changed"));
}
} }
/** /**
@ -1140,18 +980,6 @@ export class CommunitySync extends EventTarget {
}, 2000); }, 2000);
} }
/** Flush doc to IndexedDB immediately (no debounce). Use for destructive ops. */
#saveImmediate(): void {
if (!this.#offlineStore) return;
if (this.#saveDebounceTimer) {
clearTimeout(this.#saveDebounceTimer);
this.#saveDebounceTimer = null;
}
const binary = Automerge.save(this.#doc);
this.#offlineStore.saveDocImmediate(this.#communitySlug, binary);
this.#offlineStore.saveDocEmergency(this.#communitySlug, binary);
}
#persistSyncState(): void { #persistSyncState(): void {
if (!this.#offlineStore) return; if (!this.#offlineStore) return;
@ -1163,132 +991,6 @@ export class CommunitySync extends EventTarget {
} }
} }
// ── Undo/Redo API ──
/**
* Record an undo entry. Batches rapid changes to the same shape (<500ms)
* by keeping the original `before` and updating the timestamp.
*/
#pushUndo(shapeId: string, before: ShapeData | null, after: ShapeData | null): void {
if (this.#isUndoRedoing) return;
const now = Date.now();
const top = this.#undoStack[this.#undoStack.length - 1];
// Batch: same shape within 500ms — keep original `before`, update after + ts
if (top && top.shapeId === shapeId && (now - top.ts) < 500) {
top.after = after;
top.ts = now;
return;
}
this.#undoStack.push({ shapeId, before, after, ts: now });
if (this.#undoStack.length > this.#maxUndoDepth) {
this.#undoStack.shift();
}
// Any new change clears redo
this.#redoStack.length = 0;
}
/** Deep-clone shape data from the Automerge doc (returns null if absent). */
#cloneShapeData(shapeId: string): ShapeData | null {
const data = this.#doc.shapes?.[shapeId];
if (!data) return null;
return JSON.parse(JSON.stringify(data));
}
/** Undo the last local shape operation. */
undo(): void {
const entry = this.#undoStack.pop();
if (!entry) return;
this.#isUndoRedoing = true;
try {
if (entry.before === null) {
// Was a creation — soft-delete (forget) the shape
if (this.#doc.shapes?.[entry.shapeId]) {
this.forgetShape(entry.shapeId, 'undo');
// Snapshot after for redo
entry.after = this.#cloneShapeData(entry.shapeId);
}
} else if (entry.after === null || (entry.after as Record<string, unknown>).deleted === true) {
// Was a hard-delete — restore via rememberShape
this.rememberShape(entry.shapeId);
// Also restore full data if we have it
if (entry.before) {
this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Undo delete ${entry.shapeId}`), (doc) => {
if (doc.shapes && doc.shapes[entry.shapeId]) {
const restored = JSON.parse(JSON.stringify(entry.before));
for (const [key, value] of Object.entries(restored)) {
(doc.shapes[entry.shapeId] as Record<string, unknown>)[key] = value;
}
}
});
const shape = this.#shapes.get(entry.shapeId);
if (shape) this.#updateShapeElement(shape, entry.before);
}
} else if ((entry.after as Record<string, unknown>).forgottenBy &&
Object.keys((entry.after as Record<string, unknown>).forgottenBy as Record<string, unknown>).length > 0) {
// Was a forget — restore via rememberShape
this.rememberShape(entry.shapeId);
} else {
// Was a property change — restore `before` data
this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Undo ${entry.shapeId}`), (doc) => {
if (doc.shapes) {
doc.shapes[entry.shapeId] = JSON.parse(JSON.stringify(entry.before));
}
});
const shape = this.#shapes.get(entry.shapeId);
if (shape && entry.before) this.#updateShapeElement(shape, entry.before);
}
this.#redoStack.push(entry);
this.#scheduleSave();
this.#syncToServer();
} finally {
this.#isUndoRedoing = false;
}
}
/** Redo the last undone operation. */
redo(): void {
const entry = this.#redoStack.pop();
if (!entry) return;
this.#isUndoRedoing = true;
try {
if (entry.before === null && entry.after) {
// Was a creation that got undone (forgotten) — remember it back
this.rememberShape(entry.shapeId);
} else if (entry.after === null || (entry.after as Record<string, unknown>).deleted === true) {
// Re-delete
this.hardDeleteShape(entry.shapeId);
} else if ((entry.after as Record<string, unknown>).forgottenBy &&
Object.keys((entry.after as Record<string, unknown>).forgottenBy as Record<string, unknown>).length > 0) {
// Re-forget
this.forgetShape(entry.shapeId, 'undo');
} else {
// Re-apply `after` data
this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Redo ${entry.shapeId}`), (doc) => {
if (doc.shapes) {
doc.shapes[entry.shapeId] = JSON.parse(JSON.stringify(entry.after));
}
});
const shape = this.#shapes.get(entry.shapeId);
if (shape && entry.after) this.#updateShapeElement(shape, entry.after);
}
this.#undoStack.push(entry);
this.#scheduleSave();
this.#syncToServer();
} finally {
this.#isUndoRedoing = false;
}
}
get canUndo(): boolean { return this.#undoStack.length > 0; }
get canRedo(): boolean { return this.#redoStack.length > 0; }
// ── Layer & Flow API ── // ── Layer & Flow API ──
/** Add a layer to the document */ /** Add a layer to the document */
@ -1340,8 +1042,13 @@ export class CommunitySync extends EventTarget {
this.#syncToServer(); this.#syncToServer();
} }
/** Set active layer — local-only, never broadcast to other tabs/devices */ /** Set active layer */
setActiveLayer(layerId: string): void { setActiveLayer(layerId: string): void {
this.#doc = Automerge.change(this.#doc, makeChangeMessage(`Switch to layer ${layerId}`), (doc) => {
doc.activeLayerId = layerId;
});
this.#scheduleSave();
this.#syncToServer();
this.dispatchEvent(new CustomEvent("active-layer-changed", { detail: { layerId } })); this.dispatchEvent(new CustomEvent("active-layer-changed", { detail: { layerId } }));
} }
@ -1538,33 +1245,6 @@ export class CommunitySync extends EventTarget {
return Automerge.view(this.#doc, heads); return Automerge.view(this.#doc, heads);
} }
/**
* Revert document content to the state at a given change hash.
* Creates a forward change (preserving full history) and syncs to peers.
* Meta (space name/slug/config) is preserved only content is reverted.
*/
revertToHash(hash: string): void {
const snapshot = Automerge.view(this.#doc, [hash]);
const data = JSON.parse(JSON.stringify(snapshot)) as CommunityDoc;
const newDoc = Automerge.change(this.#doc, makeChangeMessage(`Revert to change ${hash.slice(0, 8)}`), (doc) => {
doc.shapes = data.shapes || {};
if (data.layers) doc.layers = data.layers;
if (data.flows) doc.flows = data.flows;
if (data.connections) doc.connections = data.connections;
if (data.groups) doc.groups = data.groups;
if (data.nestedSpaces) doc.nestedSpaces = data.nestedSpaces;
if (data.activeLayerId !== undefined) doc.activeLayerId = data.activeLayerId;
if (data.layerViewMode !== undefined) doc.layerViewMode = data.layerViewMode;
if (data.commentPins) doc.commentPins = data.commentPins;
// Preserve meta — don't revert space name/slug/config
});
this._applyDocChange(newDoc);
this.#undoStack.length = 0;
this.#redoStack.length = 0;
}
/** /**
* Get parsed history entries for the activity feed. * Get parsed history entries for the activity feed.
*/ */

View File

@ -1,133 +0,0 @@
/**
* Gemini function declarations for the design agent.
* These map to Scribus bridge commands executed via the Python bridge server.
*/
export const DESIGN_TOOL_DECLARATIONS = [
{
name: "new_document",
description: "Create a new Scribus document with specified dimensions and margins.",
parameters: {
type: "object",
properties: {
width: { type: "number", description: "Document width in mm (default: 210 for A4)" },
height: { type: "number", description: "Document height in mm (default: 297 for A4)" },
margins: { type: "number", description: "Page margins in mm (default: 10)" },
pages: { type: "integer", description: "Number of pages (default: 1)" },
},
required: [],
},
},
{
name: "add_text_frame",
description: "Add a text frame to the page at the specified position. Coordinates and dimensions in mm from top-left.",
parameters: {
type: "object",
properties: {
x: { type: "number", description: "X position in mm from left edge" },
y: { type: "number", description: "Y position in mm from top edge" },
width: { type: "number", description: "Frame width in mm" },
height: { type: "number", description: "Frame height in mm" },
text: { type: "string", description: "Text content for the frame" },
fontSize: { type: "number", description: "Font size in points (default: 12)" },
fontName: { type: "string", description: "Font name. Safe fonts: Liberation Sans, Liberation Serif, DejaVu Sans" },
name: { type: "string", description: "Optional frame name for later reference" },
},
required: ["x", "y", "width", "height"],
},
},
{
name: "add_image_frame",
description: "Add an image frame to the page. If imagePath is provided, the image will be loaded into the frame.",
parameters: {
type: "object",
properties: {
x: { type: "number", description: "X position in mm from left edge" },
y: { type: "number", description: "Y position in mm from top edge" },
width: { type: "number", description: "Frame width in mm" },
height: { type: "number", description: "Frame height in mm" },
imagePath: { type: "string", description: "Path to image file to load into frame" },
name: { type: "string", description: "Optional frame name for later reference" },
},
required: ["x", "y", "width", "height"],
},
},
{
name: "add_shape",
description: "Add a geometric shape (rectangle or ellipse) to the page.",
parameters: {
type: "object",
properties: {
shapeType: { type: "string", description: "Shape type: 'rect' or 'ellipse'", enum: ["rect", "ellipse"] },
x: { type: "number", description: "X position in mm from left edge" },
y: { type: "number", description: "Y position in mm from top edge" },
width: { type: "number", description: "Shape width in mm" },
height: { type: "number", description: "Shape height in mm" },
fill: { type: "string", description: "Fill color as hex string (e.g. '#ff6600')" },
name: { type: "string", description: "Optional shape name for later reference" },
},
required: ["x", "y", "width", "height"],
},
},
{
name: "set_background_color",
description: "Set the page background color by creating a full-page rectangle.",
parameters: {
type: "object",
properties: {
color: { type: "string", description: "Background color as hex string (e.g. '#1a1a2e')" },
},
required: ["color"],
},
},
{
name: "get_state",
description: "Get the current document state including all pages and frames. Use this to verify layout after making changes.",
parameters: {
type: "object",
properties: {},
required: [],
},
},
{
name: "save_document",
description: "Save the current document as a .sla file.",
parameters: {
type: "object",
properties: {
space: { type: "string", description: "Space slug for the save directory" },
filename: { type: "string", description: "Filename for the .sla file" },
},
required: ["filename"],
},
},
{
name: "generate_image",
description: "Generate an AI image from a text prompt using fal.ai and place it in an image frame on the page.",
parameters: {
type: "object",
properties: {
prompt: { type: "string", description: "Text prompt describing the image to generate" },
x: { type: "number", description: "X position for the image frame in mm" },
y: { type: "number", description: "Y position for the image frame in mm" },
width: { type: "number", description: "Image frame width in mm" },
height: { type: "number", description: "Image frame height in mm" },
},
required: ["prompt", "x", "y", "width", "height"],
},
},
];
export type DesignToolName = (typeof DESIGN_TOOL_DECLARATIONS)[number]["name"];
export const DESIGN_SYSTEM_PROMPT = `You are a professional graphic designer using Scribus DTP software. Given a design brief:
1. Create a document with appropriate dimensions
2. Establish visual hierarchy with text frames (heading > subheading > body)
3. Place image frames for visual elements
4. Add geometric shapes for structure and decoration
5. Verify layout with get_state
6. Save the document
Coordinates are in mm from top-left. Safe fonts: Liberation Sans, Liberation Serif, DejaVu Sans.
Minimum margins: 10mm. Standard sizes: A4 (210x297), A5 (148x210), Letter (216x279).
Always create the document first before adding frames.`;

View File

@ -1,84 +0,0 @@
/**
* extractArtifactToCanvas Pull a generated artifact out of a generator shape
* and place it as a standalone canvas object (folk-image, folk-embed, or folk-bookmark).
*/
import type { FolkShape } from "./folk-shape";
export type ArtifactMediaType = "image" | "video" | "pdf" | "download";
interface ExtractOptions {
url: string;
mediaType: ArtifactMediaType;
title?: string;
sourceShape: FolkShape;
}
interface CanvasApi {
newShape: (tagName: string, props?: Record<string, any>, atPosition?: { x: number; y: number }) => any;
findFreePosition: (w: number, h: number, px?: number, py?: number, exclude?: any) => { x: number; y: number };
SHAPE_DEFAULTS: Record<string, { width: number; height: number }>;
}
const TAG_MAP: Record<ArtifactMediaType, string> = {
image: "folk-image",
video: "folk-embed",
pdf: "folk-embed",
download: "folk-bookmark",
};
export function extractArtifactToCanvas({ url, mediaType, title, sourceShape }: ExtractOptions): boolean {
const api = (window as any).__canvasApi as CanvasApi | undefined;
if (!api) {
console.warn("[extract-artifact] Canvas API not available");
return false;
}
const tagName = TAG_MAP[mediaType];
const defaults = api.SHAPE_DEFAULTS[tagName] || { width: 400, height: 300 };
// Position to the right of the source shape
const preferX = sourceShape.x + sourceShape.width + 40 + defaults.width / 2;
const preferY = sourceShape.y + sourceShape.height / 2;
const pos = api.findFreePosition(defaults.width, defaults.height, preferX, preferY, sourceShape);
const props: Record<string, any> = {};
if (mediaType === "image") {
props.src = url;
if (title) props.alt = title;
} else if (mediaType === "video" || mediaType === "pdf") {
props.url = url;
} else {
props.url = url;
if (title) props.title = title;
}
api.newShape(tagName, props, { x: pos.x + defaults.width / 2, y: pos.y + defaults.height / 2 });
return true;
}
/** CSS for the extract button — inject into each component's stylesheet */
export const extractBtnCss = `
.extract-btn {
position: absolute;
top: 6px;
right: 6px;
padding: 3px 8px;
background: rgba(0, 0, 0, 0.6);
color: white;
border: none;
border-radius: 12px;
font-size: 12px;
cursor: pointer;
opacity: 0;
transition: opacity 0.15s;
z-index: 1;
line-height: 1;
}
.image-item:hover .extract-btn,
.video-item:hover .extract-btn,
.section:hover .extract-btn,
.render-preview:hover .extract-btn,
.preview-area:hover > .extract-btn { opacity: 1; }
.extract-btn:hover { background: rgba(0, 0, 0, 0.8); }
`;

View File

@ -1,496 +0,0 @@
/**
* folk-applet Generic rApplet shape for the canvas.
*
* Compact mode (default): 300×200 card with module-provided HTML body + port indicators.
* Expanded mode: 600×400 with applet-circuit-canvas sub-graph or iframe fallback.
*
* Persisted fields: moduleId, appletId, instanceConfig, mode.
* Live data arrives via updateLiveData() no direct module imports.
*/
import { FolkShape } from "./folk-shape";
import { css, html } from "./tags";
import { dataTypeColor } from "./data-types";
import type { PortDescriptor } from "./data-types";
import type { AppletDefinition, AppletLiveData, AppletContext } from "../shared/applet-types";
// ── Applet registry (populated by modules at init) ──
const appletDefs = new Map<string, AppletDefinition>();
/** Register an applet definition. Key = "moduleId:appletId". */
export function registerAppletDef(moduleId: string, def: AppletDefinition): void {
appletDefs.set(`${moduleId}:${def.id}`, def);
}
/** Look up a registered applet definition. */
export function getAppletDef(moduleId: string, appletId: string): AppletDefinition | undefined {
return appletDefs.get(`${moduleId}:${appletId}`);
}
/** List all registered applet definitions. */
export function listAppletDefs(): Array<{ moduleId: string; def: AppletDefinition }> {
const result: Array<{ moduleId: string; def: AppletDefinition }> = [];
for (const [key, def] of appletDefs) {
const moduleId = key.split(":")[0];
result.push({ moduleId, def });
}
return result;
}
// ── Styles ──
const COMPACT_W = 300;
const COMPACT_H = 200;
const EXPANDED_W = 600;
const EXPANDED_H = 400;
const styles = css`
:host {
background: var(--rs-bg-surface, #1e293b);
border-radius: 10px;
box-shadow: 0 2px 12px rgba(0, 0, 0, 0.25);
overflow: visible;
}
.applet-wrapper {
width: 100%;
height: 100%;
display: flex;
flex-direction: column;
position: relative;
}
.header {
display: flex;
align-items: center;
justify-content: space-between;
padding: 8px 12px;
color: white;
font-size: 12px;
font-weight: 600;
cursor: move;
border-radius: 10px 10px 0 0;
min-height: 32px;
}
.header-title {
display: flex;
align-items: center;
gap: 6px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.header-actions {
display: flex;
gap: 2px;
}
.header-actions button {
background: transparent;
border: none;
color: white;
cursor: pointer;
padding: 2px 6px;
border-radius: 4px;
font-size: 14px;
line-height: 1;
}
.header-actions button:hover {
background: rgba(255, 255, 255, 0.2);
}
.body {
flex: 1;
padding: 12px;
overflow: hidden;
font-size: 12px;
color: var(--rs-text-primary, #e2e8f0);
border-radius: 0 0 10px 10px;
}
.body-empty {
display: flex;
align-items: center;
justify-content: center;
color: var(--rs-text-muted, #64748b);
font-style: italic;
}
/* Port chips on edges */
.port-chip {
position: absolute;
display: flex;
align-items: center;
gap: 4px;
padding: 1px 6px;
border-radius: 8px;
border: 1.5px solid;
background: var(--rs-bg-surface, #1e293b);
font-size: 9px;
color: var(--rs-text-muted, #94a3b8);
white-space: nowrap;
cursor: crosshair;
z-index: 2;
transform: translateY(-50%);
transition: filter 0.15s;
}
.port-chip:hover {
filter: brightness(1.3);
}
.port-chip.input {
left: -2px;
}
.port-chip.output {
right: -2px;
flex-direction: row-reverse;
}
.chip-dot {
width: 6px;
height: 6px;
border-radius: 50%;
flex-shrink: 0;
}
/* Expanded mode circuit container */
.circuit-container {
flex: 1;
border-radius: 0 0 10px 10px;
overflow: hidden;
}
.circuit-container applet-circuit-canvas {
width: 100%;
height: 100%;
}
`;
declare global {
interface HTMLElementTagNameMap {
"folk-applet": FolkApplet;
}
}
export class FolkApplet extends FolkShape {
static override tagName = "folk-applet";
// Dynamic port descriptors set from the applet definition
static override portDescriptors: PortDescriptor[] = [];
static {
const sheet = new CSSStyleSheet();
const parentRules = Array.from(FolkShape.styles.cssRules).map(r => r.cssText).join("\n");
const childRules = Array.from(styles.cssRules).map(r => r.cssText).join("\n");
sheet.replaceSync(`${parentRules}\n${childRules}`);
this.styles = sheet;
}
#moduleId = "";
#appletId = "";
#mode: "compact" | "expanded" = "compact";
#instanceConfig: Record<string, unknown> = {};
#liveData: AppletLiveData | null = null;
// DOM refs
#bodyEl!: HTMLElement;
#wrapper!: HTMLElement;
// Instance-level port descriptors (override static)
#instancePorts: PortDescriptor[] = [];
// Live data polling timer
#liveDataTimer: ReturnType<typeof setInterval> | null = null;
get moduleId() { return this.#moduleId; }
set moduleId(v: string) {
this.#moduleId = v;
this.#syncDefPorts();
}
get appletId() { return this.#appletId; }
set appletId(v: string) {
this.#appletId = v;
this.#syncDefPorts();
}
get mode() { return this.#mode; }
set mode(v: "compact" | "expanded") {
if (this.#mode === v) return;
this.#mode = v;
this.#updateMode();
}
get instanceConfig() { return this.#instanceConfig; }
set instanceConfig(v: Record<string, unknown>) { this.#instanceConfig = v; }
/** Sync port descriptors from the applet definition. */
#syncDefPorts(): void {
const def = getAppletDef(this.#moduleId, this.#appletId);
if (def) {
this.#instancePorts = def.ports;
}
}
/** Override: use instance ports instead of static. */
override getInputPorts(): PortDescriptor[] {
return this.#instancePorts.filter(p => p.direction === "input");
}
override getOutputPorts(): PortDescriptor[] {
return this.#instancePorts.filter(p => p.direction === "output");
}
override getPort(name: string): PortDescriptor | undefined {
return this.#instancePorts.find(p => p.name === name);
}
/** Bridge FolkArrow piping → applet def's onInputReceived. */
override setPortValue(name: string, value: unknown): void {
super.setPortValue(name, value);
const port = this.getPort(name);
if (port?.direction !== "input") return;
const def = getAppletDef(this.#moduleId, this.#appletId);
if (!def?.onInputReceived) return;
const ctx: AppletContext = {
space: (this.closest("[space]") as any)?.getAttribute("space") || "",
shapeId: this.id,
emitOutput: (portName, val) => super.setPortValue(portName, val),
};
def.onInputReceived(name, value, ctx);
this.#renderBody();
}
/** Update live data and re-render compact body. */
updateLiveData(snapshot: Record<string, unknown>): void {
this.#liveData = {
space: (this.closest("[space]") as any)?.getAttribute("space") || "",
moduleId: this.#moduleId,
appletId: this.#appletId,
snapshot,
outputValues: {},
};
this.#renderBody();
}
override createRenderRoot() {
const root = super.createRenderRoot();
this.#syncDefPorts();
this.initPorts();
const def = getAppletDef(this.#moduleId, this.#appletId);
const accentColor = def?.accentColor || "#475569";
const icon = def?.icon || "📦";
const label = def?.label || this.#appletId;
this.#wrapper = document.createElement("div");
this.#wrapper.className = "applet-wrapper";
this.#wrapper.innerHTML = html`
<div class="header" data-drag style="background: ${accentColor}">
<span class="header-title">${icon} ${label}</span>
<span class="header-actions">
<button class="expand-btn" title="Toggle expanded"></button>
<button class="close-btn" title="Close">&times;</button>
</span>
</div>
<div class="body body-empty">Loading...</div>
`;
const slot = root.querySelector("slot");
const container = slot?.parentElement as HTMLElement;
if (container) container.replaceWith(this.#wrapper);
this.#bodyEl = this.#wrapper.querySelector(".body") as HTMLElement;
// Wire events
this.#wrapper.querySelector(".expand-btn")!.addEventListener("click", (e) => {
e.stopPropagation();
this.mode = this.#mode === "compact" ? "expanded" : "compact";
this.dispatchEvent(new CustomEvent("content-change"));
});
this.#wrapper.querySelector(".close-btn")!.addEventListener("click", (e) => {
e.stopPropagation();
this.dispatchEvent(new CustomEvent("close"));
});
// Render port indicators
this.#renderPorts();
// Render initial body
this.#renderBody();
// Notify canvas we want live data
this.dispatchEvent(new CustomEvent("applet-subscribe", {
bubbles: true,
detail: { moduleId: this.#moduleId, appletId: this.#appletId, shapeId: this.id },
}));
// Start self-fetch polling if the applet defines fetchLiveData
this.#startLiveDataPolling();
return root;
}
disconnectedCallback() {
if (this.#liveDataTimer) {
clearInterval(this.#liveDataTimer);
this.#liveDataTimer = null;
}
}
#startLiveDataPolling(): void {
const def = getAppletDef(this.#moduleId, this.#appletId);
if (!def?.fetchLiveData) return;
const space = (this.closest("[space]") as any)?.getAttribute("space") || "";
const doFetch = () => {
def.fetchLiveData!(space).then(snapshot => {
this.updateLiveData(snapshot);
}).catch(() => {});
};
// Fetch immediately, then every 30s
doFetch();
this.#liveDataTimer = setInterval(doFetch, 30_000);
}
#renderPorts(): void {
this.#wrapper.querySelectorAll(".port-chip").forEach(el => el.remove());
const renderChips = (ports: PortDescriptor[], dir: "input" | "output") => {
ports.forEach((port, i) => {
const yPct = ((i + 1) / (ports.length + 1)) * 100;
const color = dataTypeColor(port.type);
const chip = document.createElement("div");
chip.className = `port-chip ${dir}`;
chip.style.top = `${yPct}%`;
chip.style.borderColor = color;
chip.dataset.portName = port.name;
chip.dataset.portDir = dir;
chip.title = `${port.name} (${port.type})`;
const dot = document.createElement("span");
dot.className = "chip-dot";
dot.style.background = color;
const label = document.createTextNode(port.name);
chip.appendChild(dot);
chip.appendChild(label);
this.#wrapper.appendChild(chip);
});
};
renderChips(this.getInputPorts(), "input");
renderChips(this.getOutputPorts(), "output");
}
#renderBody(): void {
if (!this.#bodyEl) return;
const def = getAppletDef(this.#moduleId, this.#appletId);
if (!def) {
this.#bodyEl.className = "body body-empty";
this.#bodyEl.textContent = `Unknown applet: ${this.#moduleId}:${this.#appletId}`;
return;
}
if (this.#mode === "expanded" && def.getCircuit) {
this.#renderExpanded(def);
return;
}
// Compact mode — module-provided HTML
const data: AppletLiveData = this.#liveData || {
space: "",
moduleId: this.#moduleId,
appletId: this.#appletId,
snapshot: {},
outputValues: {},
};
try {
const bodyHtml = def.renderCompact(data);
this.#bodyEl.className = "body";
this.#bodyEl.innerHTML = bodyHtml;
} catch (err) {
this.#bodyEl.className = "body body-empty";
this.#bodyEl.textContent = `Render error: ${err}`;
}
}
#renderExpanded(def: AppletDefinition): void {
if (!def.getCircuit) return;
const space = (this.closest("[space]") as any)?.getAttribute("space") || "";
const { nodes, edges } = def.getCircuit(space);
this.#bodyEl.className = "body circuit-container";
this.#bodyEl.innerHTML = "";
const canvas = document.createElement("applet-circuit-canvas") as any;
canvas.nodes = nodes;
canvas.edges = edges;
this.#bodyEl.appendChild(canvas);
}
#updateMode(): void {
if (!this.#wrapper) return;
if (this.#mode === "expanded") {
this.width = EXPANDED_W;
this.height = EXPANDED_H;
} else {
this.width = COMPACT_W;
this.height = COMPACT_H;
}
this.#renderBody();
// Update expand button icon
const btn = this.#wrapper.querySelector(".expand-btn");
if (btn) btn.textContent = this.#mode === "expanded" ? "⊟" : "⊞";
}
// ── Serialization ──
override toJSON() {
return {
...super.toJSON(),
type: "folk-applet",
moduleId: this.#moduleId,
appletId: this.#appletId,
mode: this.#mode,
instanceConfig: this.#instanceConfig,
};
}
static override fromData(data: Record<string, any>): FolkApplet {
const shape = FolkShape.fromData.call(this, data) as FolkApplet;
if (data.moduleId) shape.moduleId = data.moduleId;
if (data.appletId) shape.appletId = data.appletId;
if (data.mode) shape.mode = data.mode;
if (data.instanceConfig) shape.instanceConfig = data.instanceConfig;
return shape;
}
override applyData(data: Record<string, any>): void {
super.applyData(data);
if (data.moduleId !== undefined && data.moduleId !== this.#moduleId) this.moduleId = data.moduleId;
if (data.appletId !== undefined && data.appletId !== this.#appletId) this.appletId = data.appletId;
if (data.mode !== undefined && data.mode !== this.#mode) this.mode = data.mode;
if (data.instanceConfig !== undefined) this.instanceConfig = data.instanceConfig;
}
}

View File

@ -138,7 +138,7 @@ export type ArrowStyle = "smooth" | "straight" | "curved" | "sketchy";
export interface ArrowGate { export interface ArrowGate {
shapeId: string; // governance shape ID shapeId: string; // governance shape ID
portName: string; // port to watch (e.g. "decision-out") portName: string; // port to watch (e.g. "decision-out")
condition: "truthy" | "passed" | "threshold" | "satisfied"; condition: "truthy" | "passed" | "threshold";
threshold?: number; threshold?: number;
} }
@ -464,8 +464,6 @@ export class FolkArrow extends FolkElement {
const v = value as any; const v = value as any;
const num = typeof v === "number" ? v : (v?.margin ?? v?.score ?? 0); const num = typeof v === "number" ? v : (v?.margin ?? v?.score ?? 0);
this.#gateOpen = num >= (this.#gate.threshold ?? 0.5); this.#gateOpen = num >= (this.#gate.threshold ?? 0.5);
} else if (this.#gate.condition === "satisfied") {
this.#gateOpen = (value as any)?.satisfied === true;
} }
if (wasOpen !== this.#gateOpen) this.#updateArrow(); if (wasOpen !== this.#gateOpen) this.#updateArrow();

Some files were not shown because too many files have changed in this diff Show More