Compare commits
323 Commits
main
...
add-runpod
| Author | SHA1 | Date |
|---|---|---|
|
|
083095c821 | |
|
|
05197f8430 | |
|
|
b52b715340 | |
|
|
0882648565 | |
|
|
f963152238 | |
|
|
c2a56f08e1 | |
|
|
88a162d6ae | |
|
|
48dac00f59 | |
|
|
776526c65e | |
|
|
64d4a65613 | |
|
|
432e90d9ef | |
|
|
52c1af6864 | |
|
|
71e7e5de05 | |
|
|
9b7cde262a | |
|
|
f0f7c47775 | |
|
|
90605bee09 | |
|
|
7a17b0944a | |
|
|
f5582fc7d1 | |
|
|
f66fac74d0 | |
|
|
4d7b05efa2 | |
|
|
ab1d0344a5 | |
|
|
04f6fe5192 | |
|
|
2528ad4726 | |
|
|
ffef04df50 | |
|
|
5fd83944fc | |
|
|
a3950baf17 | |
|
|
ef4a84e8f1 | |
|
|
d1179169cc | |
|
|
0e90e2d097 | |
|
|
eafbf6c9fe | |
|
|
edbe76ebda | |
|
|
ef39328d95 | |
|
|
229f4d6b41 | |
|
|
0fa1652f72 | |
|
|
1b172d7529 | |
|
|
c1df50c49b | |
|
|
053bd95d4a | |
|
|
73ac456e17 | |
|
|
92cac8dee5 | |
|
|
b8fb64c01b | |
|
|
680b6a5359 | |
|
|
fec80ddd18 | |
|
|
5b32184012 | |
|
|
be5f1a5a3a | |
|
|
bf5d214e45 | |
|
|
f8e4fa3802 | |
|
|
a063abdf77 | |
|
|
04135a5487 | |
|
|
5e11183557 | |
|
|
b5463d4d64 | |
|
|
bda2523e3b | |
|
|
3072dc70c0 | |
|
|
62afed445e | |
|
|
f2b05a8fe6 | |
|
|
0a34c0ab3e | |
|
|
0c2ca28d0e | |
|
|
5cfa2d683c | |
|
|
b5785f059f | |
|
|
fa6b874313 | |
|
|
657df72534 | |
|
|
9664439f31 | |
|
|
8cce96ea20 | |
|
|
5375f63e70 | |
|
|
663c845cab | |
|
|
a82f8faa00 | |
|
|
065a3b3483 | |
|
|
f688851764 | |
|
|
5c99a82c14 | |
|
|
39c1e2251b | |
|
|
8a8568d042 | |
|
|
822b979864 | |
|
|
067dae1ba6 | |
|
|
d1ad51c8ab | |
|
|
d3f2029521 | |
|
|
119146e094 | |
|
|
38d1f28e35 | |
|
|
4815fa4a23 | |
|
|
f8e4647e1a | |
|
|
368732e3b1 | |
|
|
719a4eb918 | |
|
|
8fa8c388d9 | |
|
|
356a262114 | |
|
|
1abeeaea10 | |
|
|
808b37425a | |
|
|
8385e30d25 | |
|
|
391e13c350 | |
|
|
d0233c0eb6 | |
|
|
3b137b0b55 | |
|
|
ec9db36a50 | |
|
|
e78f9a8281 | |
|
|
c99b9710b5 | |
|
|
a8c9bd845b | |
|
|
9a9cab1b8e | |
|
|
1d1b64fe7c | |
|
|
17ba57ce6e | |
|
|
fa2f16c019 | |
|
|
0c980f5f48 | |
|
|
fdc14a1a92 | |
|
|
956463d43f | |
|
|
125e565c55 | |
|
|
129d72cd58 | |
|
|
b01cb9abf8 | |
|
|
f949f323de | |
|
|
5eb5789c23 | |
|
|
15fa9b8d19 | |
|
|
7e3cca656e | |
|
|
6e373e57f1 | |
|
|
545372dcba | |
|
|
d7fcf121f8 | |
|
|
c5e606e326 | |
|
|
bb144428d0 | |
|
|
33f1aa4e90 | |
|
|
411fc99201 | |
|
|
4364743555 | |
|
|
6dd387613b | |
|
|
04705665f5 | |
|
|
c13d8720d2 | |
|
|
df72890577 | |
|
|
4e88428706 | |
|
|
52736e9812 | |
|
|
7b84d34c98 | |
|
|
e936d1c597 | |
|
|
b0beefe516 | |
|
|
49f11dc6e5 | |
|
|
30c0dfc3ba | |
|
|
d7b1e348e9 | |
|
|
2a3b79df15 | |
|
|
b11aecffa4 | |
|
|
4b5ba9eab3 | |
|
|
0add9bd514 | |
|
|
a770d516df | |
|
|
47db716af3 | |
|
|
e7e911c5bb | |
|
|
1126fc4a1c | |
|
|
59e9025336 | |
|
|
7d6afb6c6b | |
|
|
3a99af257d | |
|
|
12256c5b9c | |
|
|
87854883c6 | |
|
|
ebe2d4c0a2 | |
|
|
d733b61a66 | |
|
|
61143d2c20 | |
|
|
f47c3e0007 | |
|
|
536e1e7a87 | |
|
|
ab2a9f6a79 | |
|
|
9b33efdcb3 | |
|
|
86b37b9cc8 | |
|
|
7805a1e961 | |
|
|
fdb96b6ae1 | |
|
|
1783d1b6eb | |
|
|
bfbe7b8325 | |
|
|
e3e2c474ac | |
|
|
7b1fe2b803 | |
|
|
02f816e613 | |
|
|
198109a919 | |
|
|
c6370c0fde | |
|
|
c75acca85b | |
|
|
d7f4d61b55 | |
|
|
221a453411 | |
|
|
ce3063e9ba | |
|
|
7987c3a8e4 | |
|
|
8f94ee3a6f | |
|
|
201e489cef | |
|
|
d23dca3ba8 | |
|
|
42e5afbb21 | |
|
|
997f690d22 | |
|
|
7978772d7b | |
|
|
9f54400f18 | |
|
|
34681a3f4f | |
|
|
3bb7eda655 | |
|
|
72a7a54866 | |
|
|
e714233f67 | |
|
|
cca1a06b9f | |
|
|
84e737216d | |
|
|
bf5b3239dd | |
|
|
5858775483 | |
|
|
b74ae75fa8 | |
|
|
6e1e03d05b | |
|
|
ce50366985 | |
|
|
d9fb9637bd | |
|
|
5d39baaea8 | |
|
|
9def6c52b5 | |
|
|
1f6b693ec1 | |
|
|
b2e06ad76b | |
|
|
ac69e09aca | |
|
|
08f31a0bbd | |
|
|
2bdd6a8dba | |
|
|
9ff366c80b | |
|
|
cc216eb07f | |
|
|
d2ff445ddf | |
|
|
a8ca366bb6 | |
|
|
4901a56d61 | |
|
|
2d562b3e4c | |
|
|
a9a23e27e3 | |
|
|
cee2bfa336 | |
|
|
5924b0cc97 | |
|
|
4ec6b73fb3 | |
|
|
ce50026cc3 | |
|
|
0ff9c64908 | |
|
|
cf722c2490 | |
|
|
64d7581e6b | |
|
|
1190848222 | |
|
|
11c88ec0de | |
|
|
95307ed453 | |
|
|
bfe6b238e9 | |
|
|
fe4b40a3fe | |
|
|
4fda800e8b | |
|
|
7c28758204 | |
|
|
75c769a774 | |
|
|
5d8781462d | |
|
|
b2d6b1599b | |
|
|
c81238c45a | |
|
|
f012632cde | |
|
|
78e396d11e | |
|
|
cba62a453b | |
|
|
923f61ac9e | |
|
|
94bec533c4 | |
|
|
e286a120f1 | |
|
|
2e0a05ab32 | |
|
|
110fc19b94 | |
|
|
111be03907 | |
|
|
39e6cccc3f | |
|
|
08175d3a7c | |
|
|
3006e85375 | |
|
|
632e7979a2 | |
|
|
71fc07133a | |
|
|
97b00c1569 | |
|
|
c4198e1faf | |
|
|
6f6c924f66 | |
|
|
0eb4407219 | |
|
|
3a2a38c0b6 | |
|
|
02124ce920 | |
|
|
b700846a9c | |
|
|
f7310919f8 | |
|
|
949062941f | |
|
|
7f497ae8d8 | |
|
|
1d817c8e0f | |
|
|
7dd045bb33 | |
|
|
11d13a03d3 | |
|
|
3bcfa83168 | |
|
|
b0a3cd7328 | |
|
|
c71b67e24c | |
|
|
d582be49b2 | |
|
|
46ee4e7906 | |
|
|
c34418e964 | |
|
|
1c8909ce69 | |
|
|
5f2c90219d | |
|
|
fef2ca0eb3 | |
|
|
eab574e130 | |
|
|
b2656c911b | |
|
|
6ba124b038 | |
|
|
1cd7208ddf | |
|
|
d555910c77 | |
|
|
d1a8407a9b | |
|
|
db3205f97a | |
|
|
100b88268b | |
|
|
202971f343 | |
|
|
b26b9e6384 | |
|
|
4d69340a6b | |
|
|
14e0126995 | |
|
|
04782854d2 | |
|
|
4eff918bd3 | |
|
|
4e2103aab2 | |
|
|
895d02a19c | |
|
|
375f69b365 | |
|
|
09a729c787 | |
|
|
bb8a76026e | |
|
|
4319a6b1ee | |
|
|
2ca6705599 | |
|
|
07556dd53a | |
|
|
c93b3066bd | |
|
|
d282f6b650 | |
|
|
c34cae40b6 | |
|
|
46b54394ad | |
|
|
b05aa413e3 | |
|
|
2435f3f495 | |
|
|
49bca38b5f | |
|
|
0d7ee5889c | |
|
|
a0bba93055 | |
|
|
a2d7ab4af0 | |
|
|
99f7f131ed | |
|
|
c369762001 | |
|
|
d81ae56de0 | |
|
|
f384673cf9 | |
|
|
670c9ff0b0 | |
|
|
2ac4ec8de3 | |
|
|
7e16f6e6b0 | |
|
|
63cd76e919 | |
|
|
91df5214c6 | |
|
|
900833c06c | |
|
|
700875434f | |
|
|
9d5d0d6655 | |
|
|
8ce8dec8f7 | |
|
|
836d37df76 | |
|
|
2c35a0c53c | |
|
|
a8c8d62e63 | |
|
|
807637eae0 | |
|
|
572608f878 | |
|
|
6747c5df02 | |
|
|
2c4b2f6c91 | |
|
|
80cda32cba | |
|
|
032e4e1199 | |
|
|
04676b3788 | |
|
|
d6f3830884 | |
|
|
50c7c52c3d | |
|
|
a6eb2abed0 | |
|
|
1c38cb1bdb | |
|
|
932c9935d5 | |
|
|
249031619d | |
|
|
408df0d11e | |
|
|
fc602ff943 | |
|
|
d34e586215 | |
|
|
ee2484f1d0 | |
|
|
0ac03dec60 | |
|
|
5f3cf2800c | |
|
|
206d2a57ec | |
|
|
87118b86d5 | |
|
|
58cb4da348 | |
|
|
d087b61ce5 | |
|
|
9d73295702 | |
|
|
3e6db31c69 | |
|
|
b8038a6a97 | |
|
|
ee49689416 |
|
|
@ -1,4 +1,7 @@
|
||||||
# Ignore Cloudflare Worker configuration files during Pages deployment
|
# Ignore Cloudflare Worker configuration files during Pages deployment
|
||||||
# These are only used for separate Worker deployments
|
# These are only used for separate Worker deployments
|
||||||
|
wrangler.toml
|
||||||
|
wrangler.dev.toml
|
||||||
|
worker/wrangler.toml
|
||||||
worker/
|
worker/
|
||||||
*.toml
|
*.toml
|
||||||
|
|
|
||||||
21
.env.example
21
.env.example
|
|
@ -4,21 +4,16 @@ VITE_GOOGLE_MAPS_API_KEY='your_google_maps_api_key'
|
||||||
VITE_DAILY_DOMAIN='your_daily_domain'
|
VITE_DAILY_DOMAIN='your_daily_domain'
|
||||||
VITE_TLDRAW_WORKER_URL='your_worker_url'
|
VITE_TLDRAW_WORKER_URL='your_worker_url'
|
||||||
|
|
||||||
# AI Configuration
|
# AI Orchestrator (Primary - Netcup RS 8000)
|
||||||
# AI Orchestrator with Ollama (FREE local AI - highest priority)
|
VITE_AI_ORCHESTRATOR_URL='http://159.195.32.209:8000'
|
||||||
VITE_OLLAMA_URL='https://ai.jeffemmett.com'
|
# Or use domain when DNS is configured:
|
||||||
|
# VITE_AI_ORCHESTRATOR_URL='https://ai-api.jeffemmett.com'
|
||||||
|
|
||||||
# RunPod API (Primary AI provider when Ollama unavailable)
|
# RunPod API (Fallback/Direct Access)
|
||||||
# Users don't need their own API keys - RunPod is pre-configured
|
|
||||||
VITE_RUNPOD_API_KEY='your_runpod_api_key_here'
|
VITE_RUNPOD_API_KEY='your_runpod_api_key_here'
|
||||||
VITE_RUNPOD_TEXT_ENDPOINT_ID='your_text_endpoint_id' # vLLM for chat/text
|
VITE_RUNPOD_TEXT_ENDPOINT_ID='your_text_endpoint_id'
|
||||||
VITE_RUNPOD_IMAGE_ENDPOINT_ID='your_image_endpoint_id' # Automatic1111/SD
|
VITE_RUNPOD_IMAGE_ENDPOINT_ID='your_image_endpoint_id'
|
||||||
VITE_RUNPOD_VIDEO_ENDPOINT_ID='your_video_endpoint_id' # Wan2.2
|
VITE_RUNPOD_VIDEO_ENDPOINT_ID='your_video_endpoint_id'
|
||||||
VITE_RUNPOD_WHISPER_ENDPOINT_ID='your_whisper_endpoint_id' # WhisperX
|
|
||||||
|
|
||||||
# WalletConnect (Web3 wallet integration)
|
|
||||||
# Get your project ID at https://cloud.walletconnect.com/
|
|
||||||
VITE_WALLETCONNECT_PROJECT_ID='your_walletconnect_project_id'
|
|
||||||
|
|
||||||
# Worker-only Variables (Do not prefix with VITE_)
|
# Worker-only Variables (Do not prefix with VITE_)
|
||||||
CLOUDFLARE_API_TOKEN='your_cloudflare_token'
|
CLOUDFLARE_API_TOKEN='your_cloudflare_token'
|
||||||
|
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
name: Mirror to Gitea
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- master
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
mirror:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Mirror to Gitea
|
|
||||||
env:
|
|
||||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
|
||||||
GITEA_USERNAME: ${{ secrets.GITEA_USERNAME }}
|
|
||||||
run: |
|
|
||||||
REPO_NAME=$(basename $GITHUB_REPOSITORY)
|
|
||||||
git remote add gitea https://$GITEA_USERNAME:$GITEA_TOKEN@gitea.jeffemmett.com/jeffemmett/$REPO_NAME.git || true
|
|
||||||
git push gitea --all --force
|
|
||||||
git push gitea --tags --force
|
|
||||||
|
|
||||||
|
|
@ -1,129 +0,0 @@
|
||||||
name: Tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [dev, main]
|
|
||||||
pull_request:
|
|
||||||
branches: [dev, main]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
unit-tests:
|
|
||||||
name: Unit & Integration Tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '20'
|
|
||||||
cache: 'npm'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Run TypeScript check
|
|
||||||
run: npm run types
|
|
||||||
|
|
||||||
- name: Run unit tests with coverage
|
|
||||||
run: npm run test:coverage
|
|
||||||
|
|
||||||
- name: Run worker tests
|
|
||||||
run: npm run test:worker
|
|
||||||
|
|
||||||
- name: Upload coverage to Codecov
|
|
||||||
uses: codecov/codecov-action@v4
|
|
||||||
with:
|
|
||||||
files: ./coverage/lcov.info
|
|
||||||
fail_ci_if_error: false
|
|
||||||
verbose: true
|
|
||||||
|
|
||||||
e2e-tests:
|
|
||||||
name: E2E Tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 30
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '20'
|
|
||||||
cache: 'npm'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Install Playwright browsers
|
|
||||||
run: npx playwright install chromium --with-deps
|
|
||||||
|
|
||||||
- name: Run E2E tests
|
|
||||||
run: npm run test:e2e
|
|
||||||
env:
|
|
||||||
CI: true
|
|
||||||
|
|
||||||
- name: Upload Playwright report
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
if: failure()
|
|
||||||
with:
|
|
||||||
name: playwright-report
|
|
||||||
path: playwright-report/
|
|
||||||
retention-days: 7
|
|
||||||
|
|
||||||
- name: Upload Playwright traces
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
if: failure()
|
|
||||||
with:
|
|
||||||
name: playwright-traces
|
|
||||||
path: test-results/
|
|
||||||
retention-days: 7
|
|
||||||
|
|
||||||
build-check:
|
|
||||||
name: Build Check
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Setup Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: '20'
|
|
||||||
cache: 'npm'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: npm ci
|
|
||||||
|
|
||||||
- name: Build project
|
|
||||||
run: npm run build
|
|
||||||
env:
|
|
||||||
NODE_OPTIONS: '--max-old-space-size=8192'
|
|
||||||
|
|
||||||
# Gate job that requires all tests to pass before merge
|
|
||||||
merge-ready:
|
|
||||||
name: Merge Ready
|
|
||||||
needs: [unit-tests, e2e-tests, build-check]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: always()
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check all jobs passed
|
|
||||||
run: |
|
|
||||||
if [[ "${{ needs.unit-tests.result }}" != "success" ]]; then
|
|
||||||
echo "Unit tests failed"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
if [[ "${{ needs.e2e-tests.result }}" != "success" ]]; then
|
|
||||||
echo "E2E tests failed"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
if [[ "${{ needs.build-check.result }}" != "success" ]]; then
|
|
||||||
echo "Build check failed"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "All checks passed - ready to merge!"
|
|
||||||
|
|
@ -175,8 +175,3 @@ dist
|
||||||
.env.*.local
|
.env.*.local
|
||||||
.dev.vars
|
.dev.vars
|
||||||
.env.production
|
.env.production
|
||||||
.aider*
|
|
||||||
|
|
||||||
# Playwright
|
|
||||||
playwright-report/
|
|
||||||
test-results/
|
|
||||||
|
|
|
||||||
63
CHANGELOG.md
63
CHANGELOG.md
|
|
@ -1,63 +0,0 @@
|
||||||
# Changelog
|
|
||||||
|
|
||||||
Activity log of changes to canvas boards, organized by contributor.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 2026-01-06
|
|
||||||
|
|
||||||
### Claude
|
|
||||||
- Added per-board Activity Logger feature
|
|
||||||
- Automatically tracks shape creates, deletes, and updates
|
|
||||||
- Collapsible sidebar panel showing activity timeline
|
|
||||||
- Groups activities by date (Today, Yesterday, etc.)
|
|
||||||
- Debounces updates to avoid logging tiny movements
|
|
||||||
- Toggle button in top-right corner
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 2026-01-05
|
|
||||||
|
|
||||||
### Jeff
|
|
||||||
- Added embed shape linking to MycoFi whitepaper
|
|
||||||
- Deleted old map shape from planning board
|
|
||||||
- Added shared piano shape to music-collab board
|
|
||||||
- Moved token diagram to center of canvas
|
|
||||||
- Created new markdown note with meeting summary
|
|
||||||
|
|
||||||
### Claude
|
|
||||||
- Added "Last Visited" canvases feature to Dashboard
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 2026-01-04
|
|
||||||
|
|
||||||
### Jeff
|
|
||||||
- Created new board `/hyperindex-planning`
|
|
||||||
- Added 3 holon shapes for system architecture
|
|
||||||
- Uploaded screenshot of database schema
|
|
||||||
- Added arrow connectors between components
|
|
||||||
- Renamed board title to "Hyperindex Architecture"
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 2026-01-03
|
|
||||||
|
|
||||||
### Jeff
|
|
||||||
- Deleted duplicate image shapes from mycofi board
|
|
||||||
- Added video chat shape for team standup
|
|
||||||
- Created slide deck with 5 slides for presentation
|
|
||||||
- Added sticky notes with action items
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Legend
|
|
||||||
|
|
||||||
| User | Description |
|
|
||||||
|------|-------------|
|
|
||||||
| Jeff | Project Owner |
|
|
||||||
| Claude | AI Assistant |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
*This log tracks user actions on canvas boards (shape additions, deletions, moves, etc.)*
|
|
||||||
988
CLAUDE.md
988
CLAUDE.md
|
|
@ -1,988 +0,0 @@
|
||||||
## 🔧 AUTO-APPROVED OPERATIONS
|
|
||||||
|
|
||||||
The following operations are auto-approved and do not require user confirmation:
|
|
||||||
- **Read**: All file read operations (`Read(*)`)
|
|
||||||
- **Glob**: All file pattern matching (`Glob(*)`)
|
|
||||||
- **Grep**: All content searching (`Grep(*)`)
|
|
||||||
|
|
||||||
These permissions are configured in `~/.claude/settings.json`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## ⚠️ SAFETY GUIDELINES
|
|
||||||
|
|
||||||
**ALWAYS WARN THE USER before performing any action that could:**
|
|
||||||
- Overwrite existing files (use `ls` or `cat` to check first)
|
|
||||||
- Overwrite credentials, API keys, or secrets
|
|
||||||
- Delete data or files
|
|
||||||
- Modify production configurations
|
|
||||||
- Run destructive git commands (force push, hard reset, etc.)
|
|
||||||
- Drop databases or truncate tables
|
|
||||||
|
|
||||||
**Best practices:**
|
|
||||||
- Before writing to a file, check if it exists and show its contents
|
|
||||||
- Use `>>` (append) instead of `>` (overwrite) for credential files
|
|
||||||
- Create backups before modifying critical configs (e.g., `cp file file.backup`)
|
|
||||||
- Ask for confirmation before irreversible actions
|
|
||||||
|
|
||||||
**Sudo commands:**
|
|
||||||
- **NEVER run sudo commands directly** - the Bash tool doesn't support interactive input
|
|
||||||
- Instead, **provide the user with the exact sudo command** they need to run in their terminal
|
|
||||||
- Format the command clearly in a code block for easy copy-paste
|
|
||||||
- After user runs the sudo command, continue with the workflow
|
|
||||||
- Alternative: If user has recently run sudo (within ~15 min), subsequent sudo commands may not require password
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔑 ACCESS & CREDENTIALS
|
|
||||||
|
|
||||||
### Version Control & Code Hosting
|
|
||||||
- **Gitea**: Self-hosted at `gitea.jeffemmett.com` - PRIMARY repository
|
|
||||||
- Push here FIRST, then mirror to GitHub
|
|
||||||
- Private repos and source of truth
|
|
||||||
- SSH Key: `~/.ssh/gitea_ed25519` (private), `~/.ssh/gitea_ed25519.pub` (public)
|
|
||||||
- Public Key: `ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIE2+2UZElEYptgZ9GFs2CXW0PIA57BfQcU9vlyV6fz4 gitea@jeffemmett.com`
|
|
||||||
- **Gitea CLI (tea)**: ✅ Installed at `~/bin/tea` (added to PATH)
|
|
||||||
|
|
||||||
- **GitHub**: Public mirror and collaboration
|
|
||||||
- Receives pushes from Gitea via mirror sync
|
|
||||||
- Token: `(REDACTED-GITHUB-TOKEN)`
|
|
||||||
- SSH Key: `~/.ssh/github_deploy_key` (private), `~/.ssh/github_deploy_key.pub` (public)
|
|
||||||
- **GitHub CLI (gh)**: ✅ Installed and available for PR/issue management
|
|
||||||
|
|
||||||
### Git Workflow
|
|
||||||
**Two-way sync between Gitea and GitHub:**
|
|
||||||
|
|
||||||
**Gitea-Primary Repos (Default):**
|
|
||||||
1. Develop locally in `/home/jeffe/Github/`
|
|
||||||
2. Commit and push to Gitea first
|
|
||||||
3. Gitea automatically mirrors TO GitHub (built-in push mirror)
|
|
||||||
4. GitHub used for public collaboration and visibility
|
|
||||||
|
|
||||||
**GitHub-Primary Repos (Mirror Repos):**
|
|
||||||
For repos where GitHub is source of truth (v0.dev exports, client collabs):
|
|
||||||
1. Push to GitHub
|
|
||||||
2. Deploy webhook pulls from GitHub and deploys
|
|
||||||
3. Webhook triggers Gitea to sync FROM GitHub
|
|
||||||
|
|
||||||
### 🔀 DEV BRANCH WORKFLOW (MANDATORY)
|
|
||||||
|
|
||||||
**CRITICAL: All development work on canvas-website (and other active projects) MUST use a dev branch.**
|
|
||||||
|
|
||||||
#### Branch Strategy
|
|
||||||
```
|
|
||||||
main (production)
|
|
||||||
└── dev (integration/staging)
|
|
||||||
└── feature/* (optional feature branches)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Development Rules
|
|
||||||
|
|
||||||
1. **ALWAYS work on the `dev` branch** for new features and changes:
|
|
||||||
```bash
|
|
||||||
cd /home/jeffe/Github/canvas-website
|
|
||||||
git checkout dev
|
|
||||||
git pull origin dev
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **After completing a feature**, push to dev:
|
|
||||||
```bash
|
|
||||||
git add .
|
|
||||||
git commit -m "feat: description of changes"
|
|
||||||
git push origin dev
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Update backlog task** immediately after pushing:
|
|
||||||
```bash
|
|
||||||
backlog task edit <task-id> --status "Done" --append-notes "Pushed to dev branch"
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **NEVER push directly to main** - main is for tested, verified features only
|
|
||||||
|
|
||||||
5. **Merge dev → main manually** when features are verified working:
|
|
||||||
```bash
|
|
||||||
git checkout main
|
|
||||||
git pull origin main
|
|
||||||
git merge dev
|
|
||||||
git push origin main
|
|
||||||
git checkout dev # Return to dev for continued work
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Complete Feature Deployment Checklist
|
|
||||||
|
|
||||||
- [ ] Work on `dev` branch (not main)
|
|
||||||
- [ ] Test locally before committing
|
|
||||||
- [ ] Commit with descriptive message
|
|
||||||
- [ ] Push to `dev` branch on Gitea
|
|
||||||
- [ ] Update backlog task status to "Done"
|
|
||||||
- [ ] Add notes to backlog task about what was implemented
|
|
||||||
- [ ] (Later) When verified working: merge dev → main manually
|
|
||||||
|
|
||||||
#### Why This Matters
|
|
||||||
- **Protects production**: main branch always has known-working code
|
|
||||||
- **Enables testing**: dev branch can be deployed to staging for verification
|
|
||||||
- **Clean history**: main only gets complete, tested features
|
|
||||||
- **Easy rollback**: if dev breaks, main is still stable
|
|
||||||
|
|
||||||
### Server Infrastructure
|
|
||||||
- **Netcup RS 8000 G12 Pro**: Primary application & AI server
|
|
||||||
- IP: `159.195.32.209`
|
|
||||||
- 20 cores, 64GB RAM, 3TB storage
|
|
||||||
- Hosts local AI models (Ollama, Stable Diffusion)
|
|
||||||
- All websites and apps deployed here in Docker containers
|
|
||||||
- Location: Germany (low latency EU)
|
|
||||||
- SSH Key (local): `~/.ssh/netcup_ed25519` (private), `~/.ssh/netcup_ed25519.pub` (public)
|
|
||||||
- Public Key: `ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIKmp4A2klKv/YIB1C6JAsb2UzvlzzE+0EcJ0jtkyFuhO netcup-rs8000@jeffemmett.com`
|
|
||||||
- SSH Access: `ssh netcup`
|
|
||||||
- **SSH Keys ON the server** (for git operations):
|
|
||||||
- Gitea: `~/.ssh/gitea_ed25519` → `ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIIE2+2UZElEYptgZ9GFs2CXW0PIA57BfQcU9vlyV6fz4 gitea@jeffemmett.com`
|
|
||||||
- GitHub: `~/.ssh/github_ed25519` → `ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIC6xXNICy0HXnqHO+U7+y7ui+pZBGe0bm0iRMS23pR1E github-deploy@netcup-rs8000`
|
|
||||||
|
|
||||||
- **RunPod**: GPU burst capacity for AI workloads
|
|
||||||
- Host: `ssh.runpod.io`
|
|
||||||
- Serverless GPU pods (pay-per-use)
|
|
||||||
- Used for: SDXL/SD3, video generation, training
|
|
||||||
- Smart routing from RS 8000 orchestrator
|
|
||||||
- SSH Key: `~/.ssh/runpod_ed25519` (private), `~/.ssh/runpod_ed25519.pub` (public)
|
|
||||||
- Public Key: `ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIAC7NYjI0U/2ChGaZBBWP7gKt/V12Ts6FgatinJOQ8JG runpod@jeffemmett.com`
|
|
||||||
- SSH Access: `ssh runpod`
|
|
||||||
- **API Key**: `(REDACTED-RUNPOD-KEY)`
|
|
||||||
- **CLI Config**: `~/.runpod/config.toml`
|
|
||||||
- **Serverless Endpoints**:
|
|
||||||
- Image (SD): `tzf1j3sc3zufsy` (Automatic1111)
|
|
||||||
- Video (Wan2.2): `4jql4l7l0yw0f3`
|
|
||||||
- Text (vLLM): `03g5hz3hlo8gr2`
|
|
||||||
- Whisper: `lrtisuv8ixbtub`
|
|
||||||
- ComfyUI: `5zurj845tbf8he`
|
|
||||||
|
|
||||||
### API Keys & Services
|
|
||||||
|
|
||||||
**IMPORTANT**: All API keys and tokens are stored securely on the Netcup server. Never store credentials locally.
|
|
||||||
- Access credentials via: `ssh netcup "cat ~/.cloudflare-credentials.env"` or `ssh netcup "cat ~/.porkbun_credentials"`
|
|
||||||
- All API operations should be performed FROM the Netcup server, not locally
|
|
||||||
|
|
||||||
#### Credential Files on Netcup (`/root/`)
|
|
||||||
| File | Contents |
|
|
||||||
|------|----------|
|
|
||||||
| `~/.cloudflare-credentials.env` | Cloudflare API tokens, account ID, tunnel token |
|
|
||||||
| `~/.cloudflare_credentials` | Legacy/DNS token |
|
|
||||||
| `~/.porkbun_credentials` | Porkbun API key and secret |
|
|
||||||
| `~/.v0_credentials` | V0.dev API key |
|
|
||||||
|
|
||||||
#### Cloudflare
|
|
||||||
- **Account ID**: `0e7b3338d5278ed1b148e6456b940913`
|
|
||||||
- **Tokens stored on Netcup** - source `~/.cloudflare-credentials.env`:
|
|
||||||
- `CLOUDFLARE_API_TOKEN` - Zone read, Worker:read/edit, R2:read/edit
|
|
||||||
- `CLOUDFLARE_TUNNEL_TOKEN` - Tunnel management
|
|
||||||
- `CLOUDFLARE_ZONE_TOKEN` - Zone:Edit, DNS:Edit (for adding domains)
|
|
||||||
|
|
||||||
#### Porkbun (Domain Registrar)
|
|
||||||
- **Credentials stored on Netcup** - source `~/.porkbun_credentials`:
|
|
||||||
- `PORKBUN_API_KEY` and `PORKBUN_SECRET_KEY`
|
|
||||||
- **API Endpoint**: `https://api-ipv4.porkbun.com/api/json/v3/`
|
|
||||||
- **API Docs**: https://porkbun.com/api/json/v3/documentation
|
|
||||||
- **Important**: JSON must have `secretapikey` before `apikey` in requests
|
|
||||||
- **Capabilities**: Update nameservers, get auth codes for transfers, manage DNS
|
|
||||||
- **Note**: Each domain must have "API Access" enabled individually in Porkbun dashboard
|
|
||||||
|
|
||||||
#### Domain Onboarding Workflow (Porkbun → Cloudflare)
|
|
||||||
Run these commands FROM Netcup (`ssh netcup`):
|
|
||||||
1. Add domain to Cloudflare (creates zone, returns nameservers)
|
|
||||||
2. Update nameservers at Porkbun to point to Cloudflare
|
|
||||||
3. Add CNAME record pointing to Cloudflare tunnel
|
|
||||||
4. Add hostname to tunnel config and restart cloudflared
|
|
||||||
5. Domain is live through the tunnel!
|
|
||||||
|
|
||||||
#### V0.dev (AI UI Generation)
|
|
||||||
- **Credentials stored on Netcup** - source `~/.v0_credentials`:
|
|
||||||
- `V0_API_KEY` - Platform API access
|
|
||||||
- **API Key**: `v1:5AwJbit4j9rhGcAKPU4XlVWs:05vyCcJLiWRVQW7Xu4u5E03G`
|
|
||||||
- **SDK**: `npm install v0-sdk` (use `v0` CLI for adding components)
|
|
||||||
- **Docs**: https://v0.app/docs/v0-platform-api
|
|
||||||
- **Capabilities**:
|
|
||||||
- List/create/update/delete projects
|
|
||||||
- Manage chats and versions
|
|
||||||
- Download generated code
|
|
||||||
- Create deployments
|
|
||||||
- Manage environment variables
|
|
||||||
- **Limitations**: GitHub-only for git integration (no Gitea/GitLab support)
|
|
||||||
- **Usage**:
|
|
||||||
```javascript
|
|
||||||
const { v0 } = require('v0-sdk');
|
|
||||||
// Uses V0_API_KEY env var automatically
|
|
||||||
const projects = await v0.projects.find();
|
|
||||||
const chats = await v0.chats.find();
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Other Services
|
|
||||||
- **HuggingFace**: CLI access available for model downloads
|
|
||||||
- **RunPod**: API access for serverless GPU orchestration (see Server Infrastructure above)
|
|
||||||
|
|
||||||
### Dev Ops Stack & Principles
|
|
||||||
- **Platform**: Linux WSL2 (Ubuntu on Windows) for development
|
|
||||||
- **Working Directory**: `/home/jeffe/Github`
|
|
||||||
- **Container Strategy**:
|
|
||||||
- ALL repos should be Dockerized
|
|
||||||
- Optimized containers for production deployment
|
|
||||||
- Docker Compose for multi-service orchestration
|
|
||||||
- **Process Management**: PM2 available for Node.js services
|
|
||||||
- **Version Control**: Git configured with GitHub + Gitea mirrors
|
|
||||||
- **Package Managers**: npm/pnpm/yarn available
|
|
||||||
|
|
||||||
### 🚀 Traefik Reverse Proxy (Central Routing)
|
|
||||||
All HTTP services on Netcup RS 8000 route through Traefik for automatic service discovery.
|
|
||||||
|
|
||||||
**Architecture:**
|
|
||||||
```
|
|
||||||
Internet → Cloudflare Tunnel → Traefik (:80/:443) → Docker Services
|
|
||||||
│
|
|
||||||
├── gitea.jeffemmett.com → gitea:3000
|
|
||||||
├── mycofi.earth → mycofi:3000
|
|
||||||
├── games.jeffemmett.com → games:80
|
|
||||||
└── [auto-discovered via Docker labels]
|
|
||||||
```
|
|
||||||
|
|
||||||
**Location:** `/root/traefik/` on Netcup RS 8000
|
|
||||||
|
|
||||||
**Adding a New Service:**
|
|
||||||
```yaml
|
|
||||||
# In your docker-compose.yml, add these labels:
|
|
||||||
services:
|
|
||||||
myapp:
|
|
||||||
image: myapp:latest
|
|
||||||
labels:
|
|
||||||
- "traefik.enable=true"
|
|
||||||
- "traefik.http.routers.myapp.rule=Host(`myapp.jeffemmett.com`)"
|
|
||||||
- "traefik.http.services.myapp.loadbalancer.server.port=3000"
|
|
||||||
networks:
|
|
||||||
- traefik-public
|
|
||||||
networks:
|
|
||||||
traefik-public:
|
|
||||||
external: true
|
|
||||||
```
|
|
||||||
|
|
||||||
**Traefik Dashboard:** `http://159.195.32.209:8888` (internal only)
|
|
||||||
|
|
||||||
**SSH Git Access:**
|
|
||||||
- SSH goes direct (not through Traefik): `git.jeffemmett.com:223` → `159.195.32.209:223`
|
|
||||||
- Web UI goes through Traefik: `gitea.jeffemmett.com` → Traefik → gitea:3000
|
|
||||||
|
|
||||||
### ☁️ Cloudflare Tunnel Configuration
|
|
||||||
**Location:** `/root/cloudflared/` on Netcup RS 8000
|
|
||||||
|
|
||||||
The tunnel uses a token-based configuration managed via Cloudflare Zero Trust Dashboard.
|
|
||||||
All public hostnames should point to `http://localhost:80` (Traefik), which routes based on Host header.
|
|
||||||
|
|
||||||
**Managed hostnames:**
|
|
||||||
- `gitea.jeffemmett.com` → Traefik → Gitea
|
|
||||||
- `photos.jeffemmett.com` → Traefik → Immich
|
|
||||||
- `movies.jeffemmett.com` → Traefik → Jellyfin
|
|
||||||
- `search.jeffemmett.com` → Traefik → Semantic Search
|
|
||||||
- `mycofi.earth` → Traefik → MycoFi
|
|
||||||
- `games.jeffemmett.com` → Traefik → Games Platform
|
|
||||||
- `decolonizeti.me` → Traefik → Decolonize Time
|
|
||||||
|
|
||||||
**Tunnel ID:** `a838e9dc-0af5-4212-8af2-6864eb15e1b5`
|
|
||||||
**Tunnel CNAME Target:** `a838e9dc-0af5-4212-8af2-6864eb15e1b5.cfargotunnel.com`
|
|
||||||
|
|
||||||
**To deploy a new website/service:**
|
|
||||||
|
|
||||||
1. **Dockerize the project** with Traefik labels in `docker-compose.yml`:
|
|
||||||
```yaml
|
|
||||||
services:
|
|
||||||
myapp:
|
|
||||||
build: .
|
|
||||||
labels:
|
|
||||||
- "traefik.enable=true"
|
|
||||||
- "traefik.http.routers.myapp.rule=Host(`mydomain.com`) || Host(`www.mydomain.com`)"
|
|
||||||
- "traefik.http.services.myapp.loadbalancer.server.port=3000"
|
|
||||||
networks:
|
|
||||||
- traefik-public
|
|
||||||
networks:
|
|
||||||
traefik-public:
|
|
||||||
external: true
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Deploy to Netcup:**
|
|
||||||
```bash
|
|
||||||
ssh netcup "cd /opt/websites && git clone <repo-url>"
|
|
||||||
ssh netcup "cd /opt/websites/<project> && docker compose up -d --build"
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Add hostname to tunnel config** (`/root/cloudflared/config.yml`):
|
|
||||||
```yaml
|
|
||||||
- hostname: mydomain.com
|
|
||||||
service: http://localhost:80
|
|
||||||
- hostname: www.mydomain.com
|
|
||||||
service: http://localhost:80
|
|
||||||
```
|
|
||||||
Then restart: `ssh netcup "docker restart cloudflared"`
|
|
||||||
|
|
||||||
4. **Configure DNS in Cloudflare dashboard** (CRITICAL - prevents 525 SSL errors):
|
|
||||||
- Go to Cloudflare Dashboard → select domain → DNS → Records
|
|
||||||
- Delete any existing A/AAAA records for `@` and `www`
|
|
||||||
- Add CNAME records:
|
|
||||||
| Type | Name | Target | Proxy |
|
|
||||||
|------|------|--------|-------|
|
|
||||||
| CNAME | `@` | `a838e9dc-0af5-4212-8af2-6864eb15e1b5.cfargotunnel.com` | Proxied ✓ |
|
|
||||||
| CNAME | `www` | `a838e9dc-0af5-4212-8af2-6864eb15e1b5.cfargotunnel.com` | Proxied ✓ |
|
|
||||||
|
|
||||||
**API Credentials** (on Netcup at `~/.cloudflare*`):
|
|
||||||
- `CLOUDFLARE_API_TOKEN` - Zone read access only
|
|
||||||
- `CLOUDFLARE_TUNNEL_TOKEN` - Tunnel management only
|
|
||||||
- See **API Keys & Services** section above for Domain Management Token (required for DNS automation)
|
|
||||||
|
|
||||||
### 🔄 Auto-Deploy Webhook System
|
|
||||||
**Location:** `/opt/deploy-webhook/` on Netcup RS 8000
|
|
||||||
**Endpoint:** `https://deploy.jeffemmett.com/deploy/<repo-name>`
|
|
||||||
**Secret:** `gitea-deploy-secret-2025`
|
|
||||||
|
|
||||||
Pushes to Gitea automatically trigger rebuilds. The webhook receiver:
|
|
||||||
1. Validates HMAC signature from Gitea
|
|
||||||
2. Runs `git pull && docker compose up -d --build`
|
|
||||||
3. Returns build status
|
|
||||||
|
|
||||||
**Adding a new repo to auto-deploy:**
|
|
||||||
1. Add entry to `/opt/deploy-webhook/webhook.py` REPOS dict
|
|
||||||
2. Restart: `ssh netcup "cd /opt/deploy-webhook && docker compose up -d --build"`
|
|
||||||
3. Add Gitea webhook:
|
|
||||||
```bash
|
|
||||||
curl -X POST "https://gitea.jeffemmett.com/api/v1/repos/jeffemmett/<repo>/hooks" \
|
|
||||||
-H "Authorization: token <gitea-token>" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{"type":"gitea","active":true,"events":["push"],"config":{"url":"https://deploy.jeffemmett.com/deploy/<repo>","content_type":"json","secret":"gitea-deploy-secret-2025"}}'
|
|
||||||
```
|
|
||||||
|
|
||||||
**Currently auto-deploying:**
|
|
||||||
- `decolonize-time-website` → /opt/websites/decolonize-time-website
|
|
||||||
- `mycofi-earth-website` → /opt/websites/mycofi-earth-website
|
|
||||||
- `games-platform` → /opt/apps/games-platform
|
|
||||||
|
|
||||||
### 🔐 SSH Keys Quick Reference
|
|
||||||
|
|
||||||
**Local keys** (in `~/.ssh/` on your laptop):
|
|
||||||
|
|
||||||
| Service | Private Key | Public Key | Purpose |
|
|
||||||
|---------|-------------|------------|---------|
|
|
||||||
| **Gitea** | `gitea_ed25519` | `gitea_ed25519.pub` | Primary git repository |
|
|
||||||
| **GitHub** | `github_deploy_key` | `github_deploy_key.pub` | Public mirror sync |
|
|
||||||
| **Netcup RS 8000** | `netcup_ed25519` | `netcup_ed25519.pub` | Primary server SSH |
|
|
||||||
| **RunPod** | `runpod_ed25519` | `runpod_ed25519.pub` | GPU pods SSH |
|
|
||||||
| **Default** | `id_ed25519` | `id_ed25519.pub` | General purpose/legacy |
|
|
||||||
|
|
||||||
**Server-side keys** (in `/root/.ssh/` on Netcup RS 8000):
|
|
||||||
|
|
||||||
| Service | Key File | Purpose |
|
|
||||||
|---------|----------|---------|
|
|
||||||
| **Gitea** | `gitea_ed25519` | Server pulls from Gitea repos |
|
|
||||||
| **GitHub** | `github_ed25519` | Server pulls from GitHub (mirror repos) |
|
|
||||||
|
|
||||||
**SSH Config**: `~/.ssh/config` contains all host configurations
|
|
||||||
**Quick Access**:
|
|
||||||
- `ssh netcup` - Connect to Netcup RS 8000
|
|
||||||
- `ssh runpod` - Connect to RunPod
|
|
||||||
- `ssh gitea.jeffemmett.com` - Git operations
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🤖 AI ORCHESTRATION ARCHITECTURE
|
|
||||||
|
|
||||||
### Smart Routing Strategy
|
|
||||||
All AI requests go through intelligent orchestration layer on RS 8000:
|
|
||||||
|
|
||||||
**Routing Logic:**
|
|
||||||
- **Text/Code (70-80% of workload)**: Always local RS 8000 CPU (Ollama) → FREE
|
|
||||||
- **Images - Low Priority**: RS 8000 CPU (SD 1.5/2.1) → FREE but slow (~60s)
|
|
||||||
- **Images - High Priority**: RunPod GPU (SDXL/SD3) → $0.02/image, fast
|
|
||||||
- **Video Generation**: Always RunPod GPU → $0.50/video (only option)
|
|
||||||
- **Training/Fine-tuning**: RunPod GPU on-demand
|
|
||||||
|
|
||||||
**Queue System:**
|
|
||||||
- Redis-based queues: text, image, code, video
|
|
||||||
- Priority-based routing (low/normal/high)
|
|
||||||
- Worker pools scale based on load
|
|
||||||
- Cost tracking per job, per user
|
|
||||||
|
|
||||||
**Cost Optimization:**
|
|
||||||
- Target: $90-120/mo (vs $136-236/mo current)
|
|
||||||
- Savings: $552-1,392/year
|
|
||||||
- 70-80% of workload FREE (local CPU)
|
|
||||||
- GPU only when needed (serverless = no idle costs)
|
|
||||||
|
|
||||||
### Deployment Architecture
|
|
||||||
```
|
|
||||||
RS 8000 G12 Pro (Netcup)
|
|
||||||
├── Cloudflare Tunnel (secure ingress)
|
|
||||||
├── Traefik Reverse Proxy (auto-discovery)
|
|
||||||
│ └── Routes to all services via Docker labels
|
|
||||||
├── Core Services
|
|
||||||
│ ├── Gitea (git hosting) - gitea.jeffemmett.com
|
|
||||||
│ └── Other internal tools
|
|
||||||
├── AI Services
|
|
||||||
│ ├── Ollama (text/code models)
|
|
||||||
│ ├── Stable Diffusion (CPU fallback)
|
|
||||||
│ └── Smart Router API (FastAPI)
|
|
||||||
├── Queue Infrastructure
|
|
||||||
│ ├── Redis (job queues)
|
|
||||||
│ └── PostgreSQL (job history/analytics)
|
|
||||||
├── Monitoring
|
|
||||||
│ ├── Prometheus (metrics)
|
|
||||||
│ ├── Grafana (dashboards)
|
|
||||||
│ └── Cost tracking API
|
|
||||||
└── Application Hosting
|
|
||||||
├── All websites (Dockerized + Traefik labels)
|
|
||||||
├── All apps (Dockerized + Traefik labels)
|
|
||||||
└── Backend services (Dockerized)
|
|
||||||
|
|
||||||
RunPod Serverless (GPU Burst)
|
|
||||||
├── SDXL/SD3 endpoints
|
|
||||||
├── Video generation (Wan2.1)
|
|
||||||
└── Training/fine-tuning jobs
|
|
||||||
```
|
|
||||||
|
|
||||||
### Integration Pattern for Projects
|
|
||||||
All projects use unified AI client SDK:
|
|
||||||
```python
|
|
||||||
from orchestrator_client import AIOrchestrator
|
|
||||||
ai = AIOrchestrator("http://rs8000-ip:8000")
|
|
||||||
|
|
||||||
# Automatically routes based on priority & model
|
|
||||||
result = await ai.generate_text(prompt, priority="low") # → FREE CPU
|
|
||||||
result = await ai.generate_image(prompt, priority="high") # → RunPod GPU
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 💰 GPU COST ANALYSIS & MIGRATION PLAN
|
|
||||||
|
|
||||||
### Current Infrastructure Costs (Monthly)
|
|
||||||
|
|
||||||
| Service | Type | Cost | Notes |
|
|
||||||
|---------|------|------|-------|
|
|
||||||
| Netcup RS 8000 G12 Pro | Fixed | ~€45 | 20 cores, 64GB RAM, 3TB (CPU-only) |
|
|
||||||
| RunPod Serverless | Variable | $50-100 | Pay-per-use GPU (images, video) |
|
|
||||||
| DigitalOcean Droplets | Fixed | ~$48 | ⚠️ DEPRECATED - migrate ASAP |
|
|
||||||
| **Current Total** | | **~$140-190/mo** | |
|
|
||||||
|
|
||||||
### GPU Provider Comparison
|
|
||||||
|
|
||||||
#### Netcup vGPU (NEW - Early Access, Ends July 7, 2025)
|
|
||||||
|
|
||||||
| Plan | GPU | VRAM | vCores | RAM | Storage | Price/mo | Price/hr equiv |
|
|
||||||
|------|-----|------|--------|-----|---------|----------|----------------|
|
|
||||||
| RS 2000 vGPU 7 | H200 | 7 GB dedicated | 8 | 16 GB DDR5 | 512 GB NVMe | €137.31 (~$150) | $0.21/hr |
|
|
||||||
| RS 4000 vGPU 14 | H200 | 14 GB dedicated | 12 | 32 GB DDR5 | 1 TB NVMe | €261.39 (~$285) | $0.40/hr |
|
|
||||||
|
|
||||||
**Pros:**
|
|
||||||
- NVIDIA H200 (latest gen, better than H100 for inference)
|
|
||||||
- Dedicated VRAM (no noisy neighbors)
|
|
||||||
- Germany location (EU data sovereignty, low latency to RS 8000)
|
|
||||||
- Fixed monthly cost = predictable budgeting
|
|
||||||
- 24/7 availability, no cold starts
|
|
||||||
|
|
||||||
**Cons:**
|
|
||||||
- Pay even when idle
|
|
||||||
- Limited to 7GB or 14GB VRAM options
|
|
||||||
- Early access = limited availability
|
|
||||||
|
|
||||||
#### RunPod Serverless (Current)
|
|
||||||
|
|
||||||
| GPU | VRAM | Price/hr | Typical Use |
|
|
||||||
|-----|------|----------|-------------|
|
|
||||||
| RTX 4090 | 24 GB | ~$0.44/hr | SDXL, medium models |
|
|
||||||
| A100 40GB | 40 GB | ~$1.14/hr | Large models, training |
|
|
||||||
| H100 80GB | 80 GB | ~$2.49/hr | Largest models |
|
|
||||||
|
|
||||||
**Current Endpoint Costs:**
|
|
||||||
- Image (SD/SDXL): ~$0.02/image (~2s compute)
|
|
||||||
- Video (Wan2.2): ~$0.50/video (~60s compute)
|
|
||||||
- Text (vLLM): ~$0.001/request
|
|
||||||
- Whisper: ~$0.01/minute audio
|
|
||||||
|
|
||||||
**Pros:**
|
|
||||||
- Zero idle costs
|
|
||||||
- Unlimited burst capacity
|
|
||||||
- Wide GPU selection (up to 80GB VRAM)
|
|
||||||
- Pay only for actual compute
|
|
||||||
|
|
||||||
**Cons:**
|
|
||||||
- Cold start delays (10-30s first request)
|
|
||||||
- Variable availability during peak times
|
|
||||||
- Per-request costs add up at scale
|
|
||||||
|
|
||||||
### Break-even Analysis
|
|
||||||
|
|
||||||
**When does Netcup vGPU become cheaper than RunPod?**
|
|
||||||
|
|
||||||
| Scenario | RunPod Cost | Netcup RS 2000 vGPU 7 | Netcup RS 4000 vGPU 14 |
|
|
||||||
|----------|-------------|----------------------|------------------------|
|
|
||||||
| 1,000 images/mo | $20 | $150 ❌ | $285 ❌ |
|
|
||||||
| 5,000 images/mo | $100 | $150 ❌ | $285 ❌ |
|
|
||||||
| **7,500 images/mo** | **$150** | **$150 ✅** | $285 ❌ |
|
|
||||||
| 10,000 images/mo | $200 | $150 ✅ | $285 ❌ |
|
|
||||||
| **14,250 images/mo** | **$285** | $150 ✅ | **$285 ✅** |
|
|
||||||
| 100 videos/mo | $50 | $150 ❌ | $285 ❌ |
|
|
||||||
| **300 videos/mo** | **$150** | **$150 ✅** | $285 ❌ |
|
|
||||||
| 500 videos/mo | $250 | $150 ✅ | $285 ❌ |
|
|
||||||
|
|
||||||
**Recommendation by Usage Pattern:**
|
|
||||||
|
|
||||||
| Monthly Usage | Best Option | Est. Cost |
|
|
||||||
|---------------|-------------|-----------|
|
|
||||||
| < 5,000 images OR < 250 videos | RunPod Serverless | $50-100 |
|
|
||||||
| 5,000-10,000 images OR 250-500 videos | **Netcup RS 2000 vGPU 7** | $150 fixed |
|
|
||||||
| > 10,000 images OR > 500 videos + training | **Netcup RS 4000 vGPU 14** | $285 fixed |
|
|
||||||
| Unpredictable/bursty workloads | RunPod Serverless | Variable |
|
|
||||||
|
|
||||||
### Migration Strategy
|
|
||||||
|
|
||||||
#### Phase 1: Immediate (Before July 7, 2025)
|
|
||||||
**Decision Point: Secure Netcup vGPU Early Access?**
|
|
||||||
|
|
||||||
- [ ] Monitor actual GPU usage for 2-4 weeks
|
|
||||||
- [ ] Calculate average monthly image/video generation
|
|
||||||
- [ ] If consistently > 5,000 images/mo → Consider RS 2000 vGPU 7
|
|
||||||
- [ ] If consistently > 10,000 images/mo → Consider RS 4000 vGPU 14
|
|
||||||
- [ ] **ACTION**: Redeem early access code if usage justifies fixed GPU
|
|
||||||
|
|
||||||
#### Phase 2: Hybrid Architecture (If vGPU Acquired)
|
|
||||||
|
|
||||||
```
|
|
||||||
RS 8000 G12 Pro (CPU - Current)
|
|
||||||
├── Ollama (text/code) → FREE
|
|
||||||
├── SD 1.5/2.1 CPU fallback → FREE
|
|
||||||
└── Orchestrator API
|
|
||||||
|
|
||||||
Netcup vGPU Server (NEW - If purchased)
|
|
||||||
├── Primary GPU workloads
|
|
||||||
├── SDXL/SD3 generation
|
|
||||||
├── Video generation (Wan2.1 I2V)
|
|
||||||
├── Model inference (14B params with 14GB VRAM)
|
|
||||||
└── Connected via internal netcup network (low latency)
|
|
||||||
|
|
||||||
RunPod Serverless (Burst Only)
|
|
||||||
├── Overflow capacity
|
|
||||||
├── Models requiring > 14GB VRAM
|
|
||||||
├── Training/fine-tuning jobs
|
|
||||||
└── Geographic distribution needs
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Phase 3: Cost Optimization Targets
|
|
||||||
|
|
||||||
| Scenario | Current | With vGPU Migration | Savings |
|
|
||||||
|----------|---------|---------------------|---------|
|
|
||||||
| Low usage | $140/mo | $95/mo (RS8000 + minimal RunPod) | $540/yr |
|
|
||||||
| Medium usage | $190/mo | $195/mo (RS8000 + vGPU 7) | Break-even |
|
|
||||||
| High usage | $250/mo | $195/mo (RS8000 + vGPU 7) | $660/yr |
|
|
||||||
| Very high usage | $350/mo | $330/mo (RS8000 + vGPU 14) | $240/yr |
|
|
||||||
|
|
||||||
### Model VRAM Requirements Reference
|
|
||||||
|
|
||||||
| Model | VRAM Needed | Fits vGPU 7? | Fits vGPU 14? |
|
|
||||||
|-------|-------------|--------------|---------------|
|
|
||||||
| SD 1.5 | ~4 GB | ✅ | ✅ |
|
|
||||||
| SD 2.1 | ~5 GB | ✅ | ✅ |
|
|
||||||
| SDXL | ~7 GB | ⚠️ Tight | ✅ |
|
|
||||||
| SD3 Medium | ~8 GB | ❌ | ✅ |
|
|
||||||
| Wan2.1 I2V 14B | ~12 GB | ❌ | ✅ |
|
|
||||||
| Wan2.1 T2V 14B | ~14 GB | ❌ | ⚠️ Tight |
|
|
||||||
| Flux.1 Dev | ~12 GB | ❌ | ✅ |
|
|
||||||
| LLaMA 3 8B (Q4) | ~6 GB | ✅ | ✅ |
|
|
||||||
| LLaMA 3 70B (Q4) | ~40 GB | ❌ | ❌ (RunPod) |
|
|
||||||
|
|
||||||
### Decision Framework
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────┐
|
|
||||||
│ GPU WORKLOAD DECISION TREE │
|
|
||||||
├─────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ Is usage predictable and consistent? │
|
|
||||||
│ ├── YES → Is monthly GPU spend > $150? │
|
|
||||||
│ │ ├── YES → Netcup vGPU (fixed cost wins) │
|
|
||||||
│ │ └── NO → RunPod Serverless (no idle cost) │
|
|
||||||
│ └── NO → RunPod Serverless (pay for what you use) │
|
|
||||||
│ │
|
|
||||||
│ Does model require > 14GB VRAM? │
|
|
||||||
│ ├── YES → RunPod (A100/H100 on-demand) │
|
|
||||||
│ └── NO → Netcup vGPU or RS 8000 CPU │
|
|
||||||
│ │
|
|
||||||
│ Is low latency critical? │
|
|
||||||
│ ├── YES → Netcup vGPU (same datacenter as RS 8000) │
|
|
||||||
│ └── NO → RunPod Serverless (acceptable for batch) │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
### Monitoring & Review Schedule
|
|
||||||
|
|
||||||
- **Weekly**: Review RunPod spend dashboard
|
|
||||||
- **Monthly**: Calculate total GPU costs, compare to vGPU break-even
|
|
||||||
- **Quarterly**: Re-evaluate architecture, consider plan changes
|
|
||||||
- **Annually**: Full infrastructure cost audit
|
|
||||||
|
|
||||||
### Action Items
|
|
||||||
|
|
||||||
- [ ] **URGENT**: Decide on Netcup vGPU early access before July 7, 2025
|
|
||||||
- [ ] Set up GPU usage tracking in orchestrator
|
|
||||||
- [ ] Create Grafana dashboard for cost monitoring
|
|
||||||
- [ ] Test Wan2.1 I2V 14B model on vGPU 14 (if acquired)
|
|
||||||
- [ ] Document migration runbook for vGPU setup
|
|
||||||
- [ ] Complete DigitalOcean deprecation (separate from GPU decision)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📁 PROJECT PORTFOLIO STRUCTURE
|
|
||||||
|
|
||||||
### Repository Organization
|
|
||||||
- **Location**: `/home/jeffe/Github/`
|
|
||||||
- **Primary Flow**: Gitea (source of truth) → GitHub (public mirror)
|
|
||||||
- **Containerization**: ALL repos must be Dockerized with optimized production containers
|
|
||||||
|
|
||||||
### 🎯 MAIN PROJECT: canvas-website
|
|
||||||
**Location**: `/home/jeffe/Github/canvas-website`
|
|
||||||
**Description**: Collaborative canvas deployment - the integration hub where all tools come together
|
|
||||||
- Tldraw-based collaborative canvas platform
|
|
||||||
- Integrates Hyperindex, rSpace, MycoFi, and other tools
|
|
||||||
- Real-time collaboration features
|
|
||||||
- Deployed on RS 8000 in Docker
|
|
||||||
- Uses AI orchestrator for intelligent features
|
|
||||||
|
|
||||||
### Project Categories
|
|
||||||
|
|
||||||
**AI & Infrastructure:**
|
|
||||||
- AI Orchestrator (smart routing between RS 8000 & RunPod)
|
|
||||||
- Model hosting & fine-tuning pipelines
|
|
||||||
- Cost optimization & monitoring dashboards
|
|
||||||
|
|
||||||
**Web Applications & Sites:**
|
|
||||||
- **canvas-website**: Main collaborative canvas (integration hub)
|
|
||||||
- All deployed in Docker containers on RS 8000
|
|
||||||
- Cloudflare Workers for edge functions (Hyperindex)
|
|
||||||
- Static sites + dynamic backends containerized
|
|
||||||
|
|
||||||
**Supporting Projects:**
|
|
||||||
- **Hyperindex**: Tldraw canvas integration (Cloudflare stack) - integrates into canvas-website
|
|
||||||
- **rSpace**: Real-time collaboration platform - integrates into canvas-website
|
|
||||||
- **MycoFi**: DeFi/Web3 project - integrates into canvas-website
|
|
||||||
- **Canvas-related tools**: Knowledge graph & visualization components
|
|
||||||
|
|
||||||
### Deployment Strategy
|
|
||||||
1. **Development**: Local WSL2 environment (`/home/jeffe/Github/`)
|
|
||||||
2. **Version Control**: Push to Gitea FIRST → Auto-mirror to GitHub
|
|
||||||
3. **Containerization**: Build optimized Docker images with Traefik labels
|
|
||||||
4. **Deployment**: Deploy to RS 8000 via Docker Compose (join `traefik-public` network)
|
|
||||||
5. **Routing**: Traefik auto-discovers service via labels, no config changes needed
|
|
||||||
6. **DNS**: Add hostname to Cloudflare tunnel (if new domain) or it just works (existing domains)
|
|
||||||
7. **AI Integration**: Connect to local orchestrator API
|
|
||||||
8. **Monitoring**: Grafana dashboards for all services
|
|
||||||
|
|
||||||
### Infrastructure Philosophy
|
|
||||||
- **Self-hosted first**: Own your infrastructure (RS 8000 + Gitea)
|
|
||||||
- **Cloud for edge cases**: Cloudflare (edge), RunPod (GPU burst)
|
|
||||||
- **Cost-optimized**: Local CPU for 70-80% of workload
|
|
||||||
- **Dockerized everything**: Reproducible, scalable, maintainable
|
|
||||||
- **Smart orchestration**: Right compute for the right job
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
- can you make sure you are runing the hf download for a non deprecated version? After that, you can proceed with Image-to-Video 14B 720p (RECOMMENDED)
|
|
||||||
huggingface-cli download Wan-AI/Wan2.1-I2V-14B-720P \
|
|
||||||
--include "*.safetensors" \
|
|
||||||
--local-dir models/diffusion_models/wan2.1_i2v_14b
|
|
||||||
|
|
||||||
## 🕸️ HYPERINDEX PROJECT - TOP PRIORITY
|
|
||||||
|
|
||||||
**Location:** `/home/jeffe/Github/hyperindex-system/`
|
|
||||||
|
|
||||||
When user is ready to work on the hyperindexing system:
|
|
||||||
1. Reference `HYPERINDEX_PROJECT.md` for complete architecture and implementation details
|
|
||||||
2. Follow `HYPERINDEX_TODO.md` for step-by-step checklist
|
|
||||||
3. Start with Phase 1 (Database & Core Types), then proceed sequentially through Phase 5
|
|
||||||
4. This is a tldraw canvas integration project using Cloudflare Workers, D1, R2, and Durable Objects
|
|
||||||
5. Creates a "living, mycelial network" of web discoveries that spawn on the canvas in real-time
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📋 BACKLOG.MD - UNIFIED TASK MANAGEMENT
|
|
||||||
|
|
||||||
**All projects use Backlog.md for task tracking.** Tasks are managed as markdown files and can be viewed at `backlog.jeffemmett.com` for a unified cross-project view.
|
|
||||||
|
|
||||||
### MCP Integration
|
|
||||||
Backlog.md is integrated via MCP server. Available tools:
|
|
||||||
- `backlog.task_create` - Create new tasks
|
|
||||||
- `backlog.task_list` - List tasks with filters
|
|
||||||
- `backlog.task_update` - Update task status/details
|
|
||||||
- `backlog.task_view` - View task details
|
|
||||||
- `backlog.search` - Search across tasks, docs, decisions
|
|
||||||
|
|
||||||
### Task Lifecycle Workflow
|
|
||||||
|
|
||||||
**CRITICAL: Claude agents MUST follow this workflow for ALL development tasks:**
|
|
||||||
|
|
||||||
#### 1. Task Discovery (Before Starting Work)
|
|
||||||
```bash
|
|
||||||
# Check if task already exists
|
|
||||||
backlog search "<task description>" --plain
|
|
||||||
|
|
||||||
# List current tasks
|
|
||||||
backlog task list --plain
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 2. Task Creation (If Not Exists)
|
|
||||||
```bash
|
|
||||||
# Create task with full details
|
|
||||||
backlog task create "Task Title" \
|
|
||||||
--desc "Detailed description" \
|
|
||||||
--priority high \
|
|
||||||
--status "To Do"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 3. Starting Work (Move to In Progress)
|
|
||||||
```bash
|
|
||||||
# Update status when starting
|
|
||||||
backlog task edit <task-id> --status "In Progress"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 4. During Development (Update Notes)
|
|
||||||
```bash
|
|
||||||
# Append progress notes
|
|
||||||
backlog task edit <task-id> --append-notes "Completed X, working on Y"
|
|
||||||
|
|
||||||
# Update acceptance criteria
|
|
||||||
backlog task edit <task-id> --check-ac 1
|
|
||||||
```
|
|
||||||
|
|
||||||
#### 5. Completion (Move to Done)
|
|
||||||
```bash
|
|
||||||
# Mark complete when finished
|
|
||||||
backlog task edit <task-id> --status "Done"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Project Initialization
|
|
||||||
|
|
||||||
When starting work in a new repository that doesn't have backlog:
|
|
||||||
```bash
|
|
||||||
cd /path/to/repo
|
|
||||||
backlog init "Project Name" --integration-mode mcp --defaults
|
|
||||||
```
|
|
||||||
|
|
||||||
This creates the `backlog/` directory structure:
|
|
||||||
```
|
|
||||||
backlog/
|
|
||||||
├── config.yml # Project configuration
|
|
||||||
├── tasks/ # Active tasks
|
|
||||||
├── completed/ # Finished tasks
|
|
||||||
├── drafts/ # Draft tasks
|
|
||||||
├── docs/ # Project documentation
|
|
||||||
├── decisions/ # Architecture decision records
|
|
||||||
└── archive/ # Archived tasks
|
|
||||||
```
|
|
||||||
|
|
||||||
### Task File Format
|
|
||||||
Tasks are markdown files with YAML frontmatter:
|
|
||||||
```yaml
|
|
||||||
---
|
|
||||||
id: task-001
|
|
||||||
title: Feature implementation
|
|
||||||
status: In Progress
|
|
||||||
assignee: [@claude]
|
|
||||||
created_date: '2025-12-03 14:30'
|
|
||||||
labels: [feature, backend]
|
|
||||||
priority: high
|
|
||||||
dependencies: [task-002]
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
What needs to be done...
|
|
||||||
|
|
||||||
## Plan
|
|
||||||
1. Step one
|
|
||||||
2. Step two
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
- [ ] Criterion 1
|
|
||||||
- [x] Criterion 2 (completed)
|
|
||||||
|
|
||||||
## Notes
|
|
||||||
Progress updates go here...
|
|
||||||
```
|
|
||||||
|
|
||||||
### Cross-Project Aggregation (backlog.jeffemmett.com)
|
|
||||||
|
|
||||||
**Architecture:**
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────┐
|
|
||||||
│ backlog.jeffemmett.com │
|
|
||||||
│ (Unified Kanban Dashboard) │
|
|
||||||
├─────────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
|
|
||||||
│ │ canvas-web │ │ hyperindex │ │ mycofi │ ... │
|
|
||||||
│ │ (purple) │ │ (green) │ │ (blue) │ │
|
|
||||||
│ └──────┬──────┘ └──────┬──────┘ └──────┬──────┘ │
|
|
||||||
│ │ │ │ │
|
|
||||||
│ └────────────────┴────────────────┘ │
|
|
||||||
│ │ │
|
|
||||||
│ ┌───────────┴───────────┐ │
|
|
||||||
│ │ Aggregation API │ │
|
|
||||||
│ │ (polls all projects) │ │
|
|
||||||
│ └───────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────────────────┘
|
|
||||||
|
|
||||||
Data Sources:
|
|
||||||
├── Local: /home/jeffe/Github/*/backlog/
|
|
||||||
└── Remote: ssh netcup "ls /opt/*/backlog/"
|
|
||||||
```
|
|
||||||
|
|
||||||
**Color Coding by Project:**
|
|
||||||
| Project | Color | Location |
|
|
||||||
|---------|-------|----------|
|
|
||||||
| canvas-website | Purple | Local + Netcup |
|
|
||||||
| hyperindex-system | Green | Local |
|
|
||||||
| mycofi-earth | Blue | Local + Netcup |
|
|
||||||
| decolonize-time | Orange | Local + Netcup |
|
|
||||||
| ai-orchestrator | Red | Netcup |
|
|
||||||
|
|
||||||
**Aggregation Service** (to be deployed on Netcup):
|
|
||||||
- Polls all project `backlog/tasks/` directories
|
|
||||||
- Serves unified JSON API at `api.backlog.jeffemmett.com`
|
|
||||||
- Web UI at `backlog.jeffemmett.com` shows combined Kanban
|
|
||||||
- Real-time updates via WebSocket
|
|
||||||
- Filter by project, status, priority, assignee
|
|
||||||
|
|
||||||
### Agent Behavior Requirements
|
|
||||||
|
|
||||||
**When Claude starts working on ANY task:**
|
|
||||||
|
|
||||||
1. **Check for existing backlog** in the repo:
|
|
||||||
```bash
|
|
||||||
ls backlog/config.yml 2>/dev/null || echo "Backlog not initialized"
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **If backlog exists**, search for related tasks:
|
|
||||||
```bash
|
|
||||||
backlog search "<relevant keywords>" --plain
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Create or update task** before writing code:
|
|
||||||
```bash
|
|
||||||
# If new task needed:
|
|
||||||
backlog task create "Task title" --status "In Progress"
|
|
||||||
|
|
||||||
# If task exists:
|
|
||||||
backlog task edit <id> --status "In Progress"
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **Update task on completion**:
|
|
||||||
```bash
|
|
||||||
backlog task edit <id> --status "Done" --append-notes "Implementation complete"
|
|
||||||
```
|
|
||||||
|
|
||||||
5. **Never leave tasks in "In Progress"** when stopping work - either complete them or add notes explaining blockers.
|
|
||||||
|
|
||||||
### Viewing Tasks
|
|
||||||
|
|
||||||
**Terminal Kanban Board:**
|
|
||||||
```bash
|
|
||||||
backlog board
|
|
||||||
```
|
|
||||||
|
|
||||||
**Web Interface (single project):**
|
|
||||||
```bash
|
|
||||||
backlog browser --port 6420
|
|
||||||
```
|
|
||||||
|
|
||||||
**Unified View (all projects):**
|
|
||||||
Visit `backlog.jeffemmett.com` (served from Netcup)
|
|
||||||
|
|
||||||
### Backlog CLI Quick Reference
|
|
||||||
|
|
||||||
#### Task Operations
|
|
||||||
| Action | Command |
|
|
||||||
|--------|---------|
|
|
||||||
| View task | `backlog task 42 --plain` |
|
|
||||||
| List tasks | `backlog task list --plain` |
|
|
||||||
| Search tasks | `backlog search "topic" --plain` |
|
|
||||||
| Filter by status | `backlog task list -s "In Progress" --plain` |
|
|
||||||
| Create task | `backlog task create "Title" -d "Description" --ac "Criterion 1"` |
|
|
||||||
| Edit task | `backlog task edit 42 -t "New Title" -s "In Progress"` |
|
|
||||||
| Assign task | `backlog task edit 42 -a @claude` |
|
|
||||||
|
|
||||||
#### Acceptance Criteria Management
|
|
||||||
| Action | Command |
|
|
||||||
|--------|---------|
|
|
||||||
| Add AC | `backlog task edit 42 --ac "New criterion"` |
|
|
||||||
| Check AC #1 | `backlog task edit 42 --check-ac 1` |
|
|
||||||
| Check multiple | `backlog task edit 42 --check-ac 1 --check-ac 2` |
|
|
||||||
| Uncheck AC | `backlog task edit 42 --uncheck-ac 1` |
|
|
||||||
| Remove AC | `backlog task edit 42 --remove-ac 2` |
|
|
||||||
|
|
||||||
#### Multi-line Input (Description/Plan/Notes)
|
|
||||||
The CLI preserves input literally. Use shell-specific syntax for real newlines:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Bash/Zsh (ANSI-C quoting)
|
|
||||||
backlog task edit 42 --notes $'Line1\nLine2\nLine3'
|
|
||||||
backlog task edit 42 --plan $'1. Step one\n2. Step two'
|
|
||||||
|
|
||||||
# POSIX portable
|
|
||||||
backlog task edit 42 --notes "$(printf 'Line1\nLine2')"
|
|
||||||
|
|
||||||
# Append notes progressively
|
|
||||||
backlog task edit 42 --append-notes $'- Completed X\n- Working on Y'
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Definition of Done (DoD)
|
|
||||||
A task is **Done** only when ALL of these are complete:
|
|
||||||
|
|
||||||
**Via CLI:**
|
|
||||||
1. All acceptance criteria checked: `--check-ac <index>` for each
|
|
||||||
2. Implementation notes added: `--notes "..."` or `--append-notes "..."`
|
|
||||||
3. Status set to Done: `-s Done`
|
|
||||||
|
|
||||||
**Via Code/Testing:**
|
|
||||||
4. Tests pass (run test suite and linting)
|
|
||||||
5. Documentation updated if needed
|
|
||||||
6. Code self-reviewed
|
|
||||||
7. No regressions
|
|
||||||
|
|
||||||
**NEVER mark a task as Done without completing ALL items above.**
|
|
||||||
|
|
||||||
### Configuration Reference
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔧 TROUBLESHOOTING
|
|
||||||
|
|
||||||
### tmux "server exited unexpectedly"
|
|
||||||
This error occurs when a stale socket file exists from a crashed tmux server.
|
|
||||||
|
|
||||||
**Fix:**
|
|
||||||
```bash
|
|
||||||
rm -f /tmp/tmux-$(id -u)/default
|
|
||||||
```
|
|
||||||
|
|
||||||
Then start a new session normally with `tmux` or `tmux new -s <name>`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Default `backlog/config.yml`:
|
|
||||||
```yaml
|
|
||||||
project_name: "Project Name"
|
|
||||||
default_status: "To Do"
|
|
||||||
statuses: ["To Do", "In Progress", "Done"]
|
|
||||||
labels: []
|
|
||||||
milestones: []
|
|
||||||
date_format: yyyy-mm-dd
|
|
||||||
max_column_width: 20
|
|
||||||
auto_open_browser: true
|
|
||||||
default_port: 6420
|
|
||||||
remote_operations: true
|
|
||||||
auto_commit: true
|
|
||||||
zero_padded_ids: 3
|
|
||||||
bypass_git_hooks: false
|
|
||||||
check_active_branches: true
|
|
||||||
active_branch_days: 60
|
|
||||||
```
|
|
||||||
55
Dockerfile
55
Dockerfile
|
|
@ -1,55 +0,0 @@
|
||||||
# Canvas Website Dockerfile
|
|
||||||
# Builds Vite frontend and serves with nginx
|
|
||||||
# Backend (sync) still uses Cloudflare Workers
|
|
||||||
|
|
||||||
# Build stage
|
|
||||||
FROM node:20-alpine AS build
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Install dependencies
|
|
||||||
COPY package*.json ./
|
|
||||||
RUN npm ci --legacy-peer-deps
|
|
||||||
|
|
||||||
# Copy source
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
# Build args for environment
|
|
||||||
ARG VITE_WORKER_ENV=production
|
|
||||||
ARG VITE_DAILY_API_KEY
|
|
||||||
ARG VITE_RUNPOD_API_KEY
|
|
||||||
ARG VITE_RUNPOD_IMAGE_ENDPOINT_ID
|
|
||||||
ARG VITE_RUNPOD_VIDEO_ENDPOINT_ID
|
|
||||||
ARG VITE_RUNPOD_TEXT_ENDPOINT_ID
|
|
||||||
ARG VITE_RUNPOD_WHISPER_ENDPOINT_ID
|
|
||||||
|
|
||||||
# Set environment for build
|
|
||||||
# VITE_WORKER_ENV: 'production' | 'staging' | 'dev' | 'local'
|
|
||||||
ENV VITE_WORKER_ENV=$VITE_WORKER_ENV
|
|
||||||
ENV VITE_DAILY_API_KEY=$VITE_DAILY_API_KEY
|
|
||||||
ENV VITE_RUNPOD_API_KEY=$VITE_RUNPOD_API_KEY
|
|
||||||
ENV VITE_RUNPOD_IMAGE_ENDPOINT_ID=$VITE_RUNPOD_IMAGE_ENDPOINT_ID
|
|
||||||
ENV VITE_RUNPOD_VIDEO_ENDPOINT_ID=$VITE_RUNPOD_VIDEO_ENDPOINT_ID
|
|
||||||
ENV VITE_RUNPOD_TEXT_ENDPOINT_ID=$VITE_RUNPOD_TEXT_ENDPOINT_ID
|
|
||||||
ENV VITE_RUNPOD_WHISPER_ENDPOINT_ID=$VITE_RUNPOD_WHISPER_ENDPOINT_ID
|
|
||||||
|
|
||||||
# Build the app
|
|
||||||
RUN npm run build
|
|
||||||
|
|
||||||
# Production stage
|
|
||||||
FROM nginx:alpine AS production
|
|
||||||
WORKDIR /usr/share/nginx/html
|
|
||||||
|
|
||||||
# Remove default nginx static assets
|
|
||||||
RUN rm -rf ./*
|
|
||||||
|
|
||||||
# Copy built assets from build stage
|
|
||||||
COPY --from=build /app/dist .
|
|
||||||
|
|
||||||
# Copy nginx config
|
|
||||||
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
|
||||||
|
|
||||||
# Expose port
|
|
||||||
EXPOSE 80
|
|
||||||
|
|
||||||
# Start nginx
|
|
||||||
CMD ["nginx", "-g", "daemon off;"]
|
|
||||||
|
|
@ -1,232 +0,0 @@
|
||||||
# mulTmux Integration
|
|
||||||
|
|
||||||
mulTmux is now integrated into the canvas-website project as a collaborative terminal tool. This allows multiple developers to work together in the same terminal session.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
From the root of the canvas-website project:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Install all dependencies including mulTmux packages
|
|
||||||
npm run multmux:install
|
|
||||||
|
|
||||||
# Build mulTmux packages
|
|
||||||
npm run multmux:build
|
|
||||||
```
|
|
||||||
|
|
||||||
## Available Commands
|
|
||||||
|
|
||||||
All commands are run from the **root** of the canvas-website project:
|
|
||||||
|
|
||||||
| Command | Description |
|
|
||||||
|---------|-------------|
|
|
||||||
| `npm run multmux:install` | Install mulTmux dependencies |
|
|
||||||
| `npm run multmux:build` | Build server and CLI packages |
|
|
||||||
| `npm run multmux:dev:server` | Run server in development mode |
|
|
||||||
| `npm run multmux:dev:cli` | Run CLI in development mode |
|
|
||||||
| `npm run multmux:start` | Start the production server |
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
### 1. Build mulTmux
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run multmux:build
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Start the Server Locally (for testing)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run multmux:start
|
|
||||||
```
|
|
||||||
|
|
||||||
Server will be available at:
|
|
||||||
- HTTP API: `http://localhost:3000`
|
|
||||||
- WebSocket: `ws://localhost:3001`
|
|
||||||
|
|
||||||
### 3. Install CLI Globally
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd multmux/packages/cli
|
|
||||||
npm link
|
|
||||||
```
|
|
||||||
|
|
||||||
Now you can use the `multmux` command anywhere!
|
|
||||||
|
|
||||||
### 4. Create a Session
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Local testing
|
|
||||||
multmux create my-session
|
|
||||||
|
|
||||||
# Or specify your AI server (when deployed)
|
|
||||||
multmux create my-session --server http://your-ai-server:3000
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Join from Another Terminal
|
|
||||||
|
|
||||||
```bash
|
|
||||||
multmux join <token-from-above> --server ws://your-ai-server:3001
|
|
||||||
```
|
|
||||||
|
|
||||||
## Deploying to AI Server
|
|
||||||
|
|
||||||
### Option 1: Using the Deploy Script
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd multmux
|
|
||||||
./infrastructure/deploy.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
This will:
|
|
||||||
- Install system dependencies (tmux, Node.js)
|
|
||||||
- Build the project
|
|
||||||
- Set up PM2 for process management
|
|
||||||
- Start the server
|
|
||||||
|
|
||||||
### Option 2: Manual Deployment
|
|
||||||
|
|
||||||
1. **SSH to your AI server**
|
|
||||||
```bash
|
|
||||||
ssh your-ai-server
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Clone or copy the project**
|
|
||||||
```bash
|
|
||||||
git clone <your-repo>
|
|
||||||
cd canvas-website
|
|
||||||
git checkout mulTmux-webtree
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Install and build**
|
|
||||||
```bash
|
|
||||||
npm install
|
|
||||||
npm run multmux:build
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **Start with PM2**
|
|
||||||
```bash
|
|
||||||
cd multmux
|
|
||||||
npm install -g pm2
|
|
||||||
pm2 start packages/server/dist/index.js --name multmux-server
|
|
||||||
pm2 save
|
|
||||||
pm2 startup
|
|
||||||
```
|
|
||||||
|
|
||||||
## Project Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
canvas-website/
|
|
||||||
├── multmux/
|
|
||||||
│ ├── packages/
|
|
||||||
│ │ ├── server/ # Backend (Node.js + tmux)
|
|
||||||
│ │ └── cli/ # Command-line client
|
|
||||||
│ ├── infrastructure/
|
|
||||||
│ │ ├── deploy.sh # Auto-deployment script
|
|
||||||
│ │ └── nginx.conf # Reverse proxy config
|
|
||||||
│ └── README.md # Full documentation
|
|
||||||
├── package.json # Now includes workspace config
|
|
||||||
└── MULTMUX_INTEGRATION.md # This file
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage Examples
|
|
||||||
|
|
||||||
### Collaborative Coding Session
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Developer 1: Create session in project directory
|
|
||||||
cd /path/to/project
|
|
||||||
multmux create coding-session --repo $(pwd)
|
|
||||||
|
|
||||||
# Developer 2: Join and start coding together
|
|
||||||
multmux join <token>
|
|
||||||
|
|
||||||
# Both can now type in the same terminal!
|
|
||||||
```
|
|
||||||
|
|
||||||
### Debugging Together
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create a session for debugging
|
|
||||||
multmux create debug-auth-issue
|
|
||||||
|
|
||||||
# Share token with teammate
|
|
||||||
# Both can run commands, check logs, etc.
|
|
||||||
```
|
|
||||||
|
|
||||||
### List Active Sessions
|
|
||||||
|
|
||||||
```bash
|
|
||||||
multmux list
|
|
||||||
```
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
### Environment Variables
|
|
||||||
|
|
||||||
You can customize ports by setting environment variables:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export PORT=3000 # HTTP API port
|
|
||||||
export WS_PORT=3001 # WebSocket port
|
|
||||||
```
|
|
||||||
|
|
||||||
### Token Expiration
|
|
||||||
|
|
||||||
Default: 60 minutes. To change, edit `/home/jeffe/Github/canvas-website/multmux/packages/server/src/managers/TokenManager.ts:11`
|
|
||||||
|
|
||||||
### Session Cleanup
|
|
||||||
|
|
||||||
Sessions auto-cleanup when all users disconnect. To change this behavior, edit `/home/jeffe/Github/canvas-website/multmux/packages/server/src/managers/SessionManager.ts:64`
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### "Command not found: multmux"
|
|
||||||
|
|
||||||
Run `npm link` from the CLI package:
|
|
||||||
```bash
|
|
||||||
cd multmux/packages/cli
|
|
||||||
npm link
|
|
||||||
```
|
|
||||||
|
|
||||||
### "Connection refused"
|
|
||||||
|
|
||||||
1. Check server is running:
|
|
||||||
```bash
|
|
||||||
pm2 status
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Check ports are available:
|
|
||||||
```bash
|
|
||||||
netstat -tlnp | grep -E '3000|3001'
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Check logs:
|
|
||||||
```bash
|
|
||||||
pm2 logs multmux-server
|
|
||||||
```
|
|
||||||
|
|
||||||
### Token Expired
|
|
||||||
|
|
||||||
Generate a new token:
|
|
||||||
```bash
|
|
||||||
curl -X POST http://localhost:3000/api/sessions/<session-id>/tokens \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{"expiresInMinutes": 60}'
|
|
||||||
```
|
|
||||||
|
|
||||||
## Security Notes
|
|
||||||
|
|
||||||
- Tokens expire after 60 minutes
|
|
||||||
- Sessions are isolated per tmux instance
|
|
||||||
- All input is validated on the server
|
|
||||||
- Use nginx + SSL for production deployments
|
|
||||||
|
|
||||||
## Next Steps
|
|
||||||
|
|
||||||
1. **Test locally first**: Run `npm run multmux:start` and try creating/joining sessions
|
|
||||||
2. **Deploy to AI server**: Use `./infrastructure/deploy.sh`
|
|
||||||
3. **Set up nginx**: Copy config from `infrastructure/nginx.conf` for SSL/reverse proxy
|
|
||||||
4. **Share with team**: Send them tokens to collaborate!
|
|
||||||
|
|
||||||
For full documentation, see `multmux/README.md`.
|
|
||||||
|
|
@ -1,236 +0,0 @@
|
||||||
# Offline Storage Feasibility Assessment
|
|
||||||
|
|
||||||
|
|
||||||
Summary
|
|
||||||
Difficulty: Medium — Feasible with Automerge's built-in support.
|
|
||||||
Why it's feasible
|
|
||||||
Automerge Repo supports storage adapters — you can add an IndexedDB adapter alongside the network adapter
|
|
||||||
CRDT merging — Automerge handles conflict resolution automatically
|
|
||||||
Existing architecture — minimal changes needed; mainly adding a storage adapter
|
|
||||||
What needs to be done
|
|
||||||
Add IndexedDB storage adapter (easy)
|
|
||||||
Install/create an IndexedDB storage adapter
|
|
||||||
Add it to the Repo configuration
|
|
||||||
Fix document ID consistency (important)
|
|
||||||
Currently uses repo.create() which generates new IDs each time
|
|
||||||
Need to use repo.find(roomId) or store a roomId → documentId mapping
|
|
||||||
This is the main blocker for local storage persistence
|
|
||||||
Handle online/offline transitions (medium)
|
|
||||||
Listen for network status changes
|
|
||||||
Trigger sync when coming back online
|
|
||||||
The existing network adapter already handles reconnection
|
|
||||||
Update initialization logic (medium)
|
|
||||||
Load from IndexedDB first (instant offline access)
|
|
||||||
Then sync with server when online
|
|
||||||
Automerge will automatically merge changes
|
|
||||||
Estimated effort
|
|
||||||
Research & setup: 1-2 hours
|
|
||||||
Implementation: 4-6 hours
|
|
||||||
Testing: 2-3 hours
|
|
||||||
Total: ~1 day of focused work
|
|
||||||
Main challenge
|
|
||||||
The document ID consistency issue. The code comment says "We can't use repo.find() with a custom ID because Automerge requires specific document ID formats" — this needs to be resolved. Options:
|
|
||||||
Store a mapping of roomId → documentId in IndexedDB
|
|
||||||
Use a deterministic algorithm to convert roomId to a valid Automerge document ID
|
|
||||||
Use Automerge's document ID generation but persist the mapping
|
|
||||||
Benefits
|
|
||||||
Instant offline access — no loading spinners
|
|
||||||
Automatic conflict resolution — Automerge handles merging
|
|
||||||
Better UX — works seamlessly offline
|
|
||||||
Resilience — works even if server is temporarily unavailable
|
|
||||||
I've created a detailed feasibility document at OFFLINE_STORAGE_FEASIBILITY.md with implementation steps, code examples, and considerations.
|
|
||||||
Should I start implementing this, or do you want to review the feasibility document first?
|
|
||||||
|
|
||||||
|
|
||||||
## Summary
|
|
||||||
|
|
||||||
**Difficulty: Medium** - The implementation is straightforward thanks to Automerge's built-in support for storage adapters, but requires careful integration with the existing sync architecture.
|
|
||||||
|
|
||||||
## Current Architecture
|
|
||||||
|
|
||||||
1. **Client-side**: Uses `@automerge/automerge-repo` with `CloudflareNetworkAdapter` for WebSocket sync
|
|
||||||
2. **Server-side**: `AutomergeDurableObject` stores documents in R2 and handles WebSocket connections
|
|
||||||
3. **Persistence flow**:
|
|
||||||
- Client saves to worker via POST `/room/:roomId`
|
|
||||||
- Worker persists to R2 (throttled to every 2 seconds)
|
|
||||||
- Client loads initial data from server via GET `/room/:roomId`
|
|
||||||
|
|
||||||
## What's Needed
|
|
||||||
|
|
||||||
### 1. Add IndexedDB Storage Adapter (Easy)
|
|
||||||
|
|
||||||
Automerge Repo supports storage adapters out of the box. You'll need to:
|
|
||||||
|
|
||||||
- Install `@automerge/automerge-repo-storage-indexeddb` (if available) or create a custom IndexedDB adapter
|
|
||||||
- Add the storage adapter to the Repo configuration alongside the network adapter
|
|
||||||
- The Repo will automatically persist document changes to IndexedDB
|
|
||||||
|
|
||||||
**Code changes needed:**
|
|
||||||
```typescript
|
|
||||||
// In useAutomergeSyncRepo.ts
|
|
||||||
import { IndexedDBStorageAdapter } from "@automerge/automerge-repo-storage-indexeddb"
|
|
||||||
|
|
||||||
const [repo] = useState(() => {
|
|
||||||
const adapter = new CloudflareNetworkAdapter(workerUrl, roomId, applyJsonSyncData)
|
|
||||||
const storageAdapter = new IndexedDBStorageAdapter() // Add this
|
|
||||||
return new Repo({
|
|
||||||
network: [adapter],
|
|
||||||
storage: [storageAdapter] // Add this
|
|
||||||
})
|
|
||||||
})
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Load from Local Storage on Startup (Medium)
|
|
||||||
|
|
||||||
Modify the initialization logic to:
|
|
||||||
- Check IndexedDB for existing document data
|
|
||||||
- Load from IndexedDB first (for instant offline access)
|
|
||||||
- Then sync with server when online
|
|
||||||
- Automerge will automatically merge local and remote changes
|
|
||||||
|
|
||||||
**Code changes needed:**
|
|
||||||
```typescript
|
|
||||||
// In useAutomergeSyncRepo.ts - modify initializeHandle
|
|
||||||
const initializeHandle = async () => {
|
|
||||||
// Check if document exists in IndexedDB first
|
|
||||||
const localDoc = await repo.find(roomId) // This will load from IndexedDB if available
|
|
||||||
|
|
||||||
// Then sync with server (if online)
|
|
||||||
if (navigator.onLine) {
|
|
||||||
// Existing server sync logic
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Handle Online/Offline Transitions (Medium)
|
|
||||||
|
|
||||||
- Detect network status changes
|
|
||||||
- When coming online, ensure sync happens
|
|
||||||
- The existing `CloudflareNetworkAdapter` already handles reconnection, but you may want to add explicit sync triggers
|
|
||||||
|
|
||||||
**Code changes needed:**
|
|
||||||
```typescript
|
|
||||||
// Add network status listener
|
|
||||||
useEffect(() => {
|
|
||||||
const handleOnline = () => {
|
|
||||||
console.log('🌐 Back online - syncing with server')
|
|
||||||
// Trigger sync - Automerge will handle merging automatically
|
|
||||||
if (handle) {
|
|
||||||
// The network adapter will automatically reconnect and sync
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
window.addEventListener('online', handleOnline)
|
|
||||||
return () => window.removeEventListener('online', handleOnline)
|
|
||||||
}, [handle])
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Document ID Consistency (Important)
|
|
||||||
|
|
||||||
Currently, the code creates a new document handle each time (`repo.create()`). For local storage to work properly, you need:
|
|
||||||
- Consistent document IDs per room
|
|
||||||
- The challenge: Automerge requires specific document ID formats (like `automerge:xxxxx`)
|
|
||||||
- **Solution options:**
|
|
||||||
1. Use `repo.find()` with a properly formatted Automerge document ID (derive from roomId)
|
|
||||||
2. Store a mapping of roomId → documentId in IndexedDB
|
|
||||||
3. Use a deterministic way to generate document IDs from roomId
|
|
||||||
|
|
||||||
**Code changes needed:**
|
|
||||||
```typescript
|
|
||||||
// Option 1: Generate deterministic Automerge document ID from roomId
|
|
||||||
const documentId = `automerge:${roomId}` // May need proper formatting
|
|
||||||
const handle = repo.find(documentId) // This will load from IndexedDB or create new
|
|
||||||
|
|
||||||
// Option 2: Store mapping in IndexedDB
|
|
||||||
const storedMapping = await getDocumentIdMapping(roomId)
|
|
||||||
const documentId = storedMapping || generateNewDocumentId()
|
|
||||||
const handle = repo.find(documentId)
|
|
||||||
await saveDocumentIdMapping(roomId, documentId)
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note**: The current code comment says "We can't use repo.find() with a custom ID because Automerge requires specific document ID formats" - this needs to be resolved. You may need to:
|
|
||||||
- Use Automerge's document ID generation but store the mapping
|
|
||||||
- Or use a deterministic algorithm to convert roomId to valid Automerge document ID format
|
|
||||||
|
|
||||||
## Benefits
|
|
||||||
|
|
||||||
1. **Instant Offline Access**: Users can immediately see and edit their data without waiting for server response
|
|
||||||
2. **Automatic Merging**: Automerge's CRDT nature means local and remote changes merge automatically without conflicts
|
|
||||||
3. **Better UX**: No loading spinners when offline - data is instantly available
|
|
||||||
4. **Resilience**: Works even if server is temporarily unavailable
|
|
||||||
|
|
||||||
## Challenges & Considerations
|
|
||||||
|
|
||||||
### 1. Storage Quota Limits
|
|
||||||
- IndexedDB has browser-specific limits (typically 50% of disk space)
|
|
||||||
- Large documents could hit quota limits
|
|
||||||
- **Solution**: Monitor storage usage and implement cleanup for old documents
|
|
||||||
|
|
||||||
### 2. Document ID Management
|
|
||||||
- Need to ensure consistent document IDs per room
|
|
||||||
- Current code uses `repo.create()` which generates new IDs
|
|
||||||
- **Solution**: Use `repo.find(roomId)` with a consistent ID format
|
|
||||||
|
|
||||||
### 3. Initial Load Strategy
|
|
||||||
- Should load from IndexedDB first (fast) or server first (fresh)?
|
|
||||||
- **Recommendation**: Load from IndexedDB first for instant UI, then sync with server in background
|
|
||||||
|
|
||||||
### 4. Conflict Resolution
|
|
||||||
- Automerge handles this automatically, but you may want to show users when their offline changes were merged
|
|
||||||
- **Solution**: Use Automerge's change tracking to show merge notifications
|
|
||||||
|
|
||||||
### 5. Storage Adapter Availability
|
|
||||||
- Need to verify if `@automerge/automerge-repo-storage-indexeddb` exists
|
|
||||||
- If not, you'll need to create a custom adapter (still straightforward)
|
|
||||||
|
|
||||||
## Implementation Steps
|
|
||||||
|
|
||||||
1. **Research**: Check if `@automerge/automerge-repo-storage-indexeddb` package exists
|
|
||||||
2. **Install**: Add storage adapter package or create custom adapter
|
|
||||||
3. **Modify Repo Setup**: Add storage adapter to Repo configuration
|
|
||||||
4. **Update Document Loading**: Use `repo.find()` instead of `repo.create()` for consistent IDs
|
|
||||||
5. **Add Network Detection**: Listen for online/offline events
|
|
||||||
6. **Test**: Verify offline editing works and syncs correctly when back online
|
|
||||||
7. **Handle Edge Cases**: Storage quota, document size limits, etc.
|
|
||||||
|
|
||||||
## Estimated Effort
|
|
||||||
|
|
||||||
- **Research & Setup**: 1-2 hours
|
|
||||||
- **Implementation**: 4-6 hours
|
|
||||||
- **Testing**: 2-3 hours
|
|
||||||
- **Total**: ~1 day of focused work
|
|
||||||
|
|
||||||
## Code Locations to Modify
|
|
||||||
|
|
||||||
1. `src/automerge/useAutomergeSyncRepo.ts` - Main sync hook (add storage adapter, modify initialization)
|
|
||||||
2. `src/automerge/CloudflareAdapter.ts` - Network adapter (may need minor changes for offline detection)
|
|
||||||
3. Potentially create: `src/automerge/IndexedDBStorageAdapter.ts` - If custom adapter needed
|
|
||||||
|
|
||||||
## Conclusion
|
|
||||||
|
|
||||||
This is a **medium-complexity** feature that's very feasible. Automerge's architecture is designed for this exact use case, and the main work is:
|
|
||||||
1. Adding the storage adapter (straightforward)
|
|
||||||
2. Ensuring consistent document IDs (important fix)
|
|
||||||
3. Handling online/offline transitions (moderate complexity)
|
|
||||||
|
|
||||||
The biggest benefit is that Automerge's CRDT nature means you don't need to write complex merge logic - it handles conflict resolution automatically.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Related: Google Data Sovereignty
|
|
||||||
|
|
||||||
Beyond canvas document storage, we also support importing and securely storing Google Workspace data locally. See **[docs/GOOGLE_DATA_SOVEREIGNTY.md](./docs/GOOGLE_DATA_SOVEREIGNTY.md)** for the complete architecture covering:
|
|
||||||
|
|
||||||
- **Gmail** - Import and encrypt emails locally
|
|
||||||
- **Drive** - Import and encrypt documents locally
|
|
||||||
- **Photos** - Import thumbnails with on-demand full resolution
|
|
||||||
- **Calendar** - Import and encrypt events locally
|
|
||||||
|
|
||||||
Key principles:
|
|
||||||
1. **Local-first**: All data stored in encrypted IndexedDB
|
|
||||||
2. **User-controlled encryption**: Keys derived from WebCrypto auth, never leave browser
|
|
||||||
3. **Selective sharing**: Choose what to share to canvas boards
|
|
||||||
4. **Optional R2 backup**: Encrypted cloud backup (you hold the keys)
|
|
||||||
|
|
||||||
This builds on the same IndexedDB + Automerge foundation described above.
|
|
||||||
|
|
||||||
|
|
@ -1,139 +0,0 @@
|
||||||
# Open Mapping Project
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
**Open Mapping** is a collaborative route planning module for canvas-website that provides advanced mapping functionality beyond traditional tools like Google Maps. Built on open-source foundations (OpenStreetMap, OSRM, Valhalla, MapLibre), it integrates seamlessly with the tldraw canvas environment.
|
|
||||||
|
|
||||||
## Vision
|
|
||||||
|
|
||||||
Create a "living map" that exists as a layer within the collaborative canvas, enabling teams to:
|
|
||||||
- Plan multi-destination trips with optimized routing
|
|
||||||
- Compare alternative routes visually
|
|
||||||
- Share and collaborate on itineraries in real-time
|
|
||||||
- Track budgets and schedules alongside geographic planning
|
|
||||||
- Work offline with cached map data
|
|
||||||
|
|
||||||
## Core Features
|
|
||||||
|
|
||||||
### 1. Map Canvas Integration
|
|
||||||
- MapLibre GL JS as the rendering engine
|
|
||||||
- Seamless embedding within tldraw canvas
|
|
||||||
- Pan/zoom synchronized with canvas viewport
|
|
||||||
|
|
||||||
### 2. Multi-Path Routing
|
|
||||||
- Support for multiple routing profiles (car, bike, foot, transit)
|
|
||||||
- Side-by-side route comparison
|
|
||||||
- Alternative route suggestions
|
|
||||||
- Turn-by-turn directions with elevation profiles
|
|
||||||
|
|
||||||
### 3. Collaborative Editing
|
|
||||||
- Real-time waypoint sharing via Y.js/CRDT
|
|
||||||
- Cursor presence on map
|
|
||||||
- Concurrent route editing without conflicts
|
|
||||||
- Share links for view-only or edit access
|
|
||||||
|
|
||||||
### 4. Layer Management
|
|
||||||
- Multiple basemap options (OSM, satellite, terrain)
|
|
||||||
- Custom overlay layers (GeoJSON import)
|
|
||||||
- Route-specific layers (cycling, hiking trails)
|
|
||||||
|
|
||||||
### 5. Calendar Integration
|
|
||||||
- Attach time windows to waypoints
|
|
||||||
- Visualize itinerary timeline
|
|
||||||
- Sync with external calendars (iCal export)
|
|
||||||
|
|
||||||
### 6. Budget Tracking
|
|
||||||
- Cost estimates per route (fuel, tolls)
|
|
||||||
- Per-waypoint expense tracking
|
|
||||||
- Trip budget aggregation
|
|
||||||
|
|
||||||
### 7. Offline Capability
|
|
||||||
- Tile caching for offline use
|
|
||||||
- Route pre-computation and storage
|
|
||||||
- PWA support
|
|
||||||
|
|
||||||
## Technology Stack
|
|
||||||
|
|
||||||
| Component | Technology | License |
|
|
||||||
|-----------|------------|---------|
|
|
||||||
| Map Renderer | MapLibre GL JS | BSD-3 |
|
|
||||||
| Base Maps | OpenStreetMap | ODbL |
|
|
||||||
| Routing Engine | OSRM / Valhalla | BSD-2 / MIT |
|
|
||||||
| Optimization | VROOM | BSD |
|
|
||||||
| Collaboration | Y.js | MIT |
|
|
||||||
|
|
||||||
## Implementation Phases
|
|
||||||
|
|
||||||
### Phase 1: Foundation (MVP)
|
|
||||||
- [ ] MapLibre GL JS integration with tldraw
|
|
||||||
- [ ] Basic waypoint placement and rendering
|
|
||||||
- [ ] Single-route calculation via OSRM
|
|
||||||
- [ ] Route polyline display
|
|
||||||
|
|
||||||
### Phase 2: Multi-Route & Comparison
|
|
||||||
- [ ] Alternative routes visualization
|
|
||||||
- [ ] Route comparison panel
|
|
||||||
- [ ] Elevation profile display
|
|
||||||
- [ ] Drag-to-reroute functionality
|
|
||||||
|
|
||||||
### Phase 3: Collaboration
|
|
||||||
- [ ] Y.js integration for real-time sync
|
|
||||||
- [ ] Cursor presence on map
|
|
||||||
- [ ] Share link generation
|
|
||||||
|
|
||||||
### Phase 4: Layers & Customization
|
|
||||||
- [ ] Layer panel UI
|
|
||||||
- [ ] Multiple basemap options
|
|
||||||
- [ ] Overlay layer support
|
|
||||||
|
|
||||||
### Phase 5: Calendar & Budget
|
|
||||||
- [ ] Time window attachment
|
|
||||||
- [ ] Budget tracking per waypoint
|
|
||||||
- [ ] iCal export
|
|
||||||
|
|
||||||
### Phase 6: Optimization & Offline
|
|
||||||
- [ ] VROOM integration for TSP/VRP
|
|
||||||
- [ ] Tile caching via Service Worker
|
|
||||||
- [ ] PWA manifest
|
|
||||||
|
|
||||||
## File Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
src/open-mapping/
|
|
||||||
├── index.ts # Public exports
|
|
||||||
├── types/index.ts # TypeScript definitions
|
|
||||||
├── components/
|
|
||||||
│ ├── MapCanvas.tsx # Main map component
|
|
||||||
│ ├── RouteLayer.tsx # Route rendering
|
|
||||||
│ ├── WaypointMarker.tsx # Interactive markers
|
|
||||||
│ └── LayerPanel.tsx # Layer management UI
|
|
||||||
├── hooks/
|
|
||||||
│ ├── useMapInstance.ts # MapLibre instance
|
|
||||||
│ ├── useRouting.ts # Route calculation
|
|
||||||
│ ├── useCollaboration.ts # Y.js sync
|
|
||||||
│ └── useLayers.ts # Layer state
|
|
||||||
├── services/
|
|
||||||
│ ├── RoutingService.ts # Multi-provider routing
|
|
||||||
│ ├── TileService.ts # Tile management
|
|
||||||
│ └── OptimizationService.ts # VROOM integration
|
|
||||||
└── utils/index.ts # Helper functions
|
|
||||||
```
|
|
||||||
|
|
||||||
## Docker Deployment
|
|
||||||
|
|
||||||
Backend services deploy to `/opt/apps/open-mapping/` on Netcup RS 8000:
|
|
||||||
|
|
||||||
- **OSRM** - Primary routing engine
|
|
||||||
- **Valhalla** - Extended routing with transit/isochrones
|
|
||||||
- **TileServer GL** - Vector tiles
|
|
||||||
- **VROOM** - Route optimization
|
|
||||||
|
|
||||||
See `open-mapping.docker-compose.yml` for full configuration.
|
|
||||||
|
|
||||||
## References
|
|
||||||
|
|
||||||
- [OSRM Documentation](https://project-osrm.org/docs/v5.24.0/api/)
|
|
||||||
- [Valhalla API](https://valhalla.github.io/valhalla/api/)
|
|
||||||
- [MapLibre GL JS](https://maplibre.org/maplibre-gl-js-docs/api/)
|
|
||||||
- [VROOM Project](http://vroom-project.org/)
|
|
||||||
- [Y.js Documentation](https://docs.yjs.dev/)
|
|
||||||
|
|
@ -1,341 +0,0 @@
|
||||||
# Git Worktree Automation Setup
|
|
||||||
|
|
||||||
This repository is configured to automatically create Git worktrees for new branches, allowing you to work on multiple branches simultaneously without switching contexts.
|
|
||||||
|
|
||||||
## What Are Worktrees?
|
|
||||||
|
|
||||||
Git worktrees allow you to have multiple working directories (copies of your repo) checked out to different branches at the same time. This means:
|
|
||||||
|
|
||||||
- No need to stash or commit work when switching branches
|
|
||||||
- Run dev servers on multiple branches simultaneously
|
|
||||||
- Compare code across branches easily
|
|
||||||
- Keep your main branch clean while working on features
|
|
||||||
|
|
||||||
## Automatic Worktree Creation
|
|
||||||
|
|
||||||
A Git hook (`.git/hooks/post-checkout`) is installed that automatically creates worktrees when you create a new branch from `main`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# This will automatically create a worktree at ../canvas-website-feature-name
|
|
||||||
git checkout -b feature/new-feature
|
|
||||||
```
|
|
||||||
|
|
||||||
**Worktree Location Pattern:**
|
|
||||||
```
|
|
||||||
/home/jeffe/Github/
|
|
||||||
├── canvas-website/ # Main repo (main branch)
|
|
||||||
├── canvas-website-feature-name/ # Worktree for feature branch
|
|
||||||
└── canvas-website-bugfix-something/ # Worktree for bugfix branch
|
|
||||||
```
|
|
||||||
|
|
||||||
## Manual Worktree Management
|
|
||||||
|
|
||||||
Use the `worktree-manager.sh` script for manual management:
|
|
||||||
|
|
||||||
### List All Worktrees
|
|
||||||
```bash
|
|
||||||
./scripts/worktree-manager.sh list
|
|
||||||
```
|
|
||||||
|
|
||||||
### Create a New Worktree
|
|
||||||
```bash
|
|
||||||
# Creates worktree for existing branch
|
|
||||||
./scripts/worktree-manager.sh create feature/my-feature
|
|
||||||
|
|
||||||
# Or create new branch with worktree
|
|
||||||
./scripts/worktree-manager.sh create feature/new-branch
|
|
||||||
```
|
|
||||||
|
|
||||||
### Remove a Worktree
|
|
||||||
```bash
|
|
||||||
./scripts/worktree-manager.sh remove feature/old-feature
|
|
||||||
```
|
|
||||||
|
|
||||||
### Clean Up All Worktrees (Keep Main)
|
|
||||||
```bash
|
|
||||||
./scripts/worktree-manager.sh clean
|
|
||||||
```
|
|
||||||
|
|
||||||
### Show Status of All Worktrees
|
|
||||||
```bash
|
|
||||||
./scripts/worktree-manager.sh status
|
|
||||||
```
|
|
||||||
|
|
||||||
### Navigate to a Worktree
|
|
||||||
```bash
|
|
||||||
# Get worktree path
|
|
||||||
./scripts/worktree-manager.sh goto feature/my-feature
|
|
||||||
|
|
||||||
# Or use with cd
|
|
||||||
cd $(./scripts/worktree-manager.sh goto feature/my-feature)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Help
|
|
||||||
```bash
|
|
||||||
./scripts/worktree-manager.sh help
|
|
||||||
```
|
|
||||||
|
|
||||||
## Workflow Examples
|
|
||||||
|
|
||||||
### Starting a New Feature
|
|
||||||
|
|
||||||
**With automatic worktree creation:**
|
|
||||||
```bash
|
|
||||||
# In main repo
|
|
||||||
cd /home/jeffe/Github/canvas-website
|
|
||||||
|
|
||||||
# Create and switch to new branch (worktree auto-created)
|
|
||||||
git checkout -b feature/terminal-tool
|
|
||||||
|
|
||||||
# Notification appears:
|
|
||||||
# 🌳 Creating worktree for branch: feature/terminal-tool
|
|
||||||
# 📁 Location: /home/jeffe/Github/canvas-website-feature-terminal-tool
|
|
||||||
|
|
||||||
# Continue working in current directory or switch to worktree
|
|
||||||
cd ../canvas-website-feature-terminal-tool
|
|
||||||
```
|
|
||||||
|
|
||||||
**Manual worktree creation:**
|
|
||||||
```bash
|
|
||||||
./scripts/worktree-manager.sh create feature/my-feature
|
|
||||||
cd $(./scripts/worktree-manager.sh goto feature/my-feature)
|
|
||||||
```
|
|
||||||
|
|
||||||
### Working on Multiple Features Simultaneously
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Terminal 1: Main repo (main branch)
|
|
||||||
cd /home/jeffe/Github/canvas-website
|
|
||||||
npm run dev # Port 5173
|
|
||||||
|
|
||||||
# Terminal 2: Feature branch 1
|
|
||||||
cd /home/jeffe/Github/canvas-website-feature-auth
|
|
||||||
npm run dev # Different port
|
|
||||||
|
|
||||||
# Terminal 3: Feature branch 2
|
|
||||||
cd /home/jeffe/Github/canvas-website-feature-ui
|
|
||||||
npm run dev # Another port
|
|
||||||
|
|
||||||
# All running simultaneously, no conflicts!
|
|
||||||
```
|
|
||||||
|
|
||||||
### Comparing Code Across Branches
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Use diff or your IDE to compare files
|
|
||||||
diff /home/jeffe/Github/canvas-website/src/App.tsx \
|
|
||||||
/home/jeffe/Github/canvas-website-feature-auth/src/App.tsx
|
|
||||||
|
|
||||||
# Or open both in VS Code
|
|
||||||
code /home/jeffe/Github/canvas-website \
|
|
||||||
/home/jeffe/Github/canvas-website-feature-auth
|
|
||||||
```
|
|
||||||
|
|
||||||
### Cleaning Up After Merging
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# After merging feature/my-feature to main
|
|
||||||
cd /home/jeffe/Github/canvas-website
|
|
||||||
|
|
||||||
# Remove the worktree
|
|
||||||
./scripts/worktree-manager.sh remove feature/my-feature
|
|
||||||
|
|
||||||
# Or clean all worktrees except main
|
|
||||||
./scripts/worktree-manager.sh clean
|
|
||||||
```
|
|
||||||
|
|
||||||
## How It Works
|
|
||||||
|
|
||||||
### Post-Checkout Hook
|
|
||||||
|
|
||||||
The `.git/hooks/post-checkout` script runs automatically after `git checkout` and:
|
|
||||||
|
|
||||||
1. Detects if you're creating a new branch from `main`
|
|
||||||
2. Creates a worktree in `../canvas-website-{branch-name}`
|
|
||||||
3. Links the worktree to the new branch
|
|
||||||
4. Shows a notification with the worktree path
|
|
||||||
|
|
||||||
**Hook Behavior:**
|
|
||||||
- ✅ Creates worktree when: `git checkout -b new-branch` (from main)
|
|
||||||
- ❌ Skips creation when:
|
|
||||||
- Switching to existing branches
|
|
||||||
- Already in a worktree
|
|
||||||
- Worktree already exists for that branch
|
|
||||||
- Not branching from main/master
|
|
||||||
|
|
||||||
### Worktree Manager Script
|
|
||||||
|
|
||||||
The `scripts/worktree-manager.sh` script provides:
|
|
||||||
- User-friendly commands for worktree operations
|
|
||||||
- Colored output for better readability
|
|
||||||
- Error handling and validation
|
|
||||||
- Status reporting across all worktrees
|
|
||||||
|
|
||||||
## Git Commands with Worktrees
|
|
||||||
|
|
||||||
Most Git commands work the same way in worktrees:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# In any worktree
|
|
||||||
git status # Shows status of current worktree
|
|
||||||
git add . # Stages files in current worktree
|
|
||||||
git commit -m "..." # Commits in current branch
|
|
||||||
git push # Pushes current branch
|
|
||||||
git pull # Pulls current branch
|
|
||||||
|
|
||||||
# List all worktrees (works from any worktree)
|
|
||||||
git worktree list
|
|
||||||
|
|
||||||
# Remove a worktree (from main repo)
|
|
||||||
git worktree remove feature/branch-name
|
|
||||||
|
|
||||||
# Prune deleted worktrees
|
|
||||||
git worktree prune
|
|
||||||
```
|
|
||||||
|
|
||||||
## Important Notes
|
|
||||||
|
|
||||||
### Shared Git Directory
|
|
||||||
|
|
||||||
All worktrees share the same `.git` directory (in the main repo), which means:
|
|
||||||
- ✅ Commits, branches, and remotes are shared across all worktrees
|
|
||||||
- ✅ One `git fetch` or `git pull` in main updates all worktrees
|
|
||||||
- ⚠️ Don't delete the main repo while worktrees exist
|
|
||||||
- ⚠️ Stashes are shared (stash in one worktree, pop in another)
|
|
||||||
|
|
||||||
### Node Modules
|
|
||||||
|
|
||||||
Each worktree has its own `node_modules`:
|
|
||||||
- First time entering a worktree: run `npm install`
|
|
||||||
- Dependencies may differ across branches
|
|
||||||
- More disk space usage (one `node_modules` per worktree)
|
|
||||||
|
|
||||||
### Port Conflicts
|
|
||||||
|
|
||||||
When running dev servers in multiple worktrees:
|
|
||||||
```bash
|
|
||||||
# Main repo
|
|
||||||
npm run dev # Uses default port 5173
|
|
||||||
|
|
||||||
# In worktree, specify different port
|
|
||||||
npm run dev -- --port 5174
|
|
||||||
```
|
|
||||||
|
|
||||||
### IDE Integration
|
|
||||||
|
|
||||||
**VS Code:**
|
|
||||||
```bash
|
|
||||||
# Open specific worktree
|
|
||||||
code /home/jeffe/Github/canvas-website-feature-name
|
|
||||||
|
|
||||||
# Or open multiple worktrees as workspace
|
|
||||||
code --add /home/jeffe/Github/canvas-website \
|
|
||||||
--add /home/jeffe/Github/canvas-website-feature-name
|
|
||||||
```
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Worktree Path Already Exists
|
|
||||||
|
|
||||||
If you see:
|
|
||||||
```
|
|
||||||
fatal: '/path/to/worktree' already exists
|
|
||||||
```
|
|
||||||
|
|
||||||
Remove the directory manually:
|
|
||||||
```bash
|
|
||||||
rm -rf /home/jeffe/Github/canvas-website-feature-name
|
|
||||||
git worktree prune
|
|
||||||
```
|
|
||||||
|
|
||||||
### Can't Delete Main Repo
|
|
||||||
|
|
||||||
If you have active worktrees, you can't delete the main repo. Clean up first:
|
|
||||||
```bash
|
|
||||||
./scripts/worktree-manager.sh clean
|
|
||||||
```
|
|
||||||
|
|
||||||
### Worktree Out of Sync
|
|
||||||
|
|
||||||
If a worktree seems out of sync:
|
|
||||||
```bash
|
|
||||||
cd /path/to/worktree
|
|
||||||
git fetch origin
|
|
||||||
git reset --hard origin/branch-name
|
|
||||||
```
|
|
||||||
|
|
||||||
### Hook Not Running
|
|
||||||
|
|
||||||
If the post-checkout hook isn't running:
|
|
||||||
```bash
|
|
||||||
# Check if it's executable
|
|
||||||
ls -la .git/hooks/post-checkout
|
|
||||||
|
|
||||||
# Make it executable if needed
|
|
||||||
chmod +x .git/hooks/post-checkout
|
|
||||||
|
|
||||||
# Test the hook manually
|
|
||||||
.git/hooks/post-checkout HEAD HEAD 1
|
|
||||||
```
|
|
||||||
|
|
||||||
## Disabling Automatic Worktrees
|
|
||||||
|
|
||||||
To disable automatic worktree creation:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Remove or rename the hook
|
|
||||||
mv .git/hooks/post-checkout .git/hooks/post-checkout.disabled
|
|
||||||
```
|
|
||||||
|
|
||||||
To re-enable:
|
|
||||||
```bash
|
|
||||||
mv .git/hooks/post-checkout.disabled .git/hooks/post-checkout
|
|
||||||
```
|
|
||||||
|
|
||||||
## Advanced Usage
|
|
||||||
|
|
||||||
### Custom Worktree Location
|
|
||||||
|
|
||||||
Modify the `post-checkout` hook to change the worktree location:
|
|
||||||
```bash
|
|
||||||
# Edit .git/hooks/post-checkout
|
|
||||||
# Change this line:
|
|
||||||
WORKTREE_BASE=$(dirname "$REPO_ROOT")
|
|
||||||
|
|
||||||
# To (example):
|
|
||||||
WORKTREE_BASE="$HOME/worktrees"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Worktree for Remote Branches
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create worktree for remote branch
|
|
||||||
git worktree add ../canvas-website-remote-branch origin/feature-branch
|
|
||||||
|
|
||||||
# Or use the script
|
|
||||||
./scripts/worktree-manager.sh create origin/feature-branch
|
|
||||||
```
|
|
||||||
|
|
||||||
### Detached HEAD Worktree
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create worktree at specific commit
|
|
||||||
git worktree add ../canvas-website-commit-abc123 abc123
|
|
||||||
```
|
|
||||||
|
|
||||||
## Best Practices
|
|
||||||
|
|
||||||
1. **Clean up regularly**: Remove worktrees for merged branches
|
|
||||||
2. **Name branches clearly**: Worktree names mirror branch names
|
|
||||||
3. **Run npm install**: Always run in new worktrees
|
|
||||||
4. **Check branch**: Always verify which branch you're on before committing
|
|
||||||
5. **Use status command**: Check all worktrees before major operations
|
|
||||||
|
|
||||||
## Resources
|
|
||||||
|
|
||||||
- [Git Worktree Documentation](https://git-scm.com/docs/git-worktree)
|
|
||||||
- [Git Hooks Documentation](https://git-scm.com/docs/githooks)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Setup Complete!** New branches will automatically create worktrees. Use `./scripts/worktree-manager.sh help` for manual management.
|
|
||||||
29
_redirects
29
_redirects
|
|
@ -1,25 +1,14 @@
|
||||||
# Cloudflare Pages redirects and rewrites
|
# Cloudflare Pages redirects and rewrites
|
||||||
# This file handles SPA routing and URL rewrites (replaces vercel.json rewrites)
|
# This file handles SPA routing and URL rewrites (replaces vercel.json rewrites)
|
||||||
|
|
||||||
# Specific route rewrites (matching vercel.json)
|
# SPA fallback - all routes should serve index.html
|
||||||
# Handle both with and without trailing slashes
|
|
||||||
/board/* /index.html 200
|
|
||||||
/board /index.html 200
|
|
||||||
/board/ /index.html 200
|
|
||||||
/inbox /index.html 200
|
|
||||||
/inbox/ /index.html 200
|
|
||||||
/contact /index.html 200
|
|
||||||
/contact/ /index.html 200
|
|
||||||
/presentations /index.html 200
|
|
||||||
/presentations/ /index.html 200
|
|
||||||
/presentations/* /index.html 200
|
|
||||||
/dashboard /index.html 200
|
|
||||||
/dashboard/ /index.html 200
|
|
||||||
/login /index.html 200
|
|
||||||
/login/ /index.html 200
|
|
||||||
/debug /index.html 200
|
|
||||||
/debug/ /index.html 200
|
|
||||||
|
|
||||||
# SPA fallback - all routes should serve index.html (must be last)
|
|
||||||
/* /index.html 200
|
/* /index.html 200
|
||||||
|
|
||||||
|
# Specific route rewrites (matching vercel.json)
|
||||||
|
/board/* /index.html 200
|
||||||
|
/board /index.html 200
|
||||||
|
/inbox /index.html 200
|
||||||
|
/contact /index.html 200
|
||||||
|
/presentations /index.html 200
|
||||||
|
/dashboard /index.html 200
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,15 +0,0 @@
|
||||||
project_name: "Canvas Feature List"
|
|
||||||
default_status: "To Do"
|
|
||||||
statuses: ["To Do", "In Progress", "Done"]
|
|
||||||
labels: []
|
|
||||||
milestones: []
|
|
||||||
date_format: yyyy-mm-dd
|
|
||||||
max_column_width: 20
|
|
||||||
auto_open_browser: true
|
|
||||||
default_port: 6420
|
|
||||||
remote_operations: true
|
|
||||||
auto_commit: true
|
|
||||||
zero_padded_ids: 3
|
|
||||||
bypass_git_hooks: false
|
|
||||||
check_active_branches: true
|
|
||||||
active_branch_days: 60
|
|
||||||
|
|
@ -1,665 +0,0 @@
|
||||||
---
|
|
||||||
id: doc-001
|
|
||||||
title: Web3 Wallet Integration Architecture
|
|
||||||
type: other
|
|
||||||
created_date: '2026-01-02 16:07'
|
|
||||||
---
|
|
||||||
# Web3 Wallet Integration Architecture
|
|
||||||
|
|
||||||
**Status:** Planning
|
|
||||||
**Created:** 2026-01-02
|
|
||||||
**Related Task:** task-007
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 1. Overview
|
|
||||||
|
|
||||||
This document outlines the architecture for integrating Web3 wallet capabilities into the canvas-website, enabling CryptID users to link Ethereum wallets for on-chain transactions, voting, and token-gated features.
|
|
||||||
|
|
||||||
### Key Constraint: Cryptographic Curve Mismatch
|
|
||||||
|
|
||||||
| System | Curve | Usage |
|
|
||||||
|--------|-------|-------|
|
|
||||||
| **CryptID (WebCrypto)** | ECDSA P-256 (NIST) | Authentication, passwordless login |
|
|
||||||
| **Ethereum** | ECDSA secp256k1 | Transactions, message signing |
|
|
||||||
|
|
||||||
These curves are **incompatible**. A CryptID key cannot sign Ethereum transactions. Therefore, we use a **wallet linking** approach where:
|
|
||||||
1. CryptID handles authentication (who you are)
|
|
||||||
2. Linked wallet handles on-chain actions (what you can do)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 2. Database Schema
|
|
||||||
|
|
||||||
### Migration: `002_linked_wallets.sql`
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Migration: Add Linked Wallets for Web3 Integration
|
|
||||||
-- Date: 2026-01-02
|
|
||||||
-- Description: Enables CryptID users to link Ethereum wallets for
|
|
||||||
-- on-chain transactions, voting, and token-gated features.
|
|
||||||
|
|
||||||
-- =============================================================================
|
|
||||||
-- LINKED WALLETS TABLE
|
|
||||||
-- =============================================================================
|
|
||||||
-- Each CryptID user can link multiple Ethereum wallets (EOA, Safe, hardware)
|
|
||||||
-- Linking requires signature verification to prove wallet ownership
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS linked_wallets (
|
|
||||||
id TEXT PRIMARY KEY, -- UUID for the link record
|
|
||||||
user_id TEXT NOT NULL, -- References users.id (CryptID account)
|
|
||||||
wallet_address TEXT NOT NULL, -- Ethereum address (checksummed, 0x-prefixed)
|
|
||||||
|
|
||||||
-- Wallet metadata
|
|
||||||
wallet_type TEXT DEFAULT 'eoa' CHECK (wallet_type IN ('eoa', 'safe', 'hardware', 'contract')),
|
|
||||||
chain_id INTEGER DEFAULT 1, -- Primary chain (1 = Ethereum mainnet)
|
|
||||||
label TEXT, -- User-provided label (e.g., "Main Wallet")
|
|
||||||
|
|
||||||
-- Verification proof
|
|
||||||
signature_message TEXT NOT NULL, -- The message that was signed
|
|
||||||
signature TEXT NOT NULL, -- EIP-191 personal_sign signature
|
|
||||||
verified_at TEXT NOT NULL, -- When signature was verified
|
|
||||||
|
|
||||||
-- ENS integration
|
|
||||||
ens_name TEXT, -- Resolved ENS name (if any)
|
|
||||||
ens_avatar TEXT, -- ENS avatar URL (if any)
|
|
||||||
ens_resolved_at TEXT, -- When ENS was last resolved
|
|
||||||
|
|
||||||
-- Flags
|
|
||||||
is_primary INTEGER DEFAULT 0, -- 1 = primary wallet for this user
|
|
||||||
is_active INTEGER DEFAULT 1, -- 0 = soft-deleted
|
|
||||||
|
|
||||||
-- Timestamps
|
|
||||||
created_at TEXT DEFAULT (datetime('now')),
|
|
||||||
updated_at TEXT DEFAULT (datetime('now')),
|
|
||||||
last_used_at TEXT, -- Last time wallet was used for action
|
|
||||||
|
|
||||||
-- Constraints
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
|
|
||||||
UNIQUE(user_id, wallet_address) -- Can't link same wallet twice
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Indexes for efficient lookups
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_linked_wallets_user ON linked_wallets(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_linked_wallets_address ON linked_wallets(wallet_address);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_linked_wallets_active ON linked_wallets(is_active);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_linked_wallets_primary ON linked_wallets(user_id, is_primary);
|
|
||||||
|
|
||||||
-- =============================================================================
|
|
||||||
-- WALLET LINKING TOKENS TABLE (for Safe/multisig delayed verification)
|
|
||||||
-- =============================================================================
|
|
||||||
-- For contract wallets that require on-chain signature verification
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS wallet_link_tokens (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
user_id TEXT NOT NULL,
|
|
||||||
wallet_address TEXT NOT NULL,
|
|
||||||
nonce TEXT NOT NULL, -- Random nonce for signature message
|
|
||||||
token TEXT NOT NULL UNIQUE, -- Secret token for verification callback
|
|
||||||
expires_at TEXT NOT NULL,
|
|
||||||
used INTEGER DEFAULT 0,
|
|
||||||
created_at TEXT DEFAULT (datetime('now')),
|
|
||||||
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_wallet_link_tokens_token ON wallet_link_tokens(token);
|
|
||||||
|
|
||||||
-- =============================================================================
|
|
||||||
-- TOKEN BALANCES CACHE (optional, for token-gating)
|
|
||||||
-- =============================================================================
|
|
||||||
-- Cache of token balances for faster permission checks
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS wallet_token_balances (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
wallet_address TEXT NOT NULL,
|
|
||||||
token_address TEXT NOT NULL, -- ERC-20/721/1155 contract address
|
|
||||||
token_type TEXT CHECK (token_type IN ('erc20', 'erc721', 'erc1155')),
|
|
||||||
chain_id INTEGER NOT NULL,
|
|
||||||
balance TEXT NOT NULL, -- String to handle big numbers
|
|
||||||
last_updated TEXT DEFAULT (datetime('now')),
|
|
||||||
|
|
||||||
UNIQUE(wallet_address, token_address, chain_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_token_balances_wallet ON wallet_token_balances(wallet_address);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_token_balances_token ON wallet_token_balances(token_address);
|
|
||||||
```
|
|
||||||
|
|
||||||
### TypeScript Types
|
|
||||||
|
|
||||||
Add to `worker/types.ts`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// =============================================================================
|
|
||||||
// Linked Wallet Types
|
|
||||||
// =============================================================================
|
|
||||||
|
|
||||||
export type WalletType = 'eoa' | 'safe' | 'hardware' | 'contract';
|
|
||||||
|
|
||||||
export interface LinkedWallet {
|
|
||||||
id: string;
|
|
||||||
user_id: string;
|
|
||||||
wallet_address: string;
|
|
||||||
wallet_type: WalletType;
|
|
||||||
chain_id: number;
|
|
||||||
label: string | null;
|
|
||||||
signature_message: string;
|
|
||||||
signature: string;
|
|
||||||
verified_at: string;
|
|
||||||
ens_name: string | null;
|
|
||||||
ens_avatar: string | null;
|
|
||||||
ens_resolved_at: string | null;
|
|
||||||
is_primary: number; // SQLite boolean
|
|
||||||
is_active: number; // SQLite boolean
|
|
||||||
created_at: string;
|
|
||||||
updated_at: string;
|
|
||||||
last_used_at: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WalletLinkToken {
|
|
||||||
id: string;
|
|
||||||
user_id: string;
|
|
||||||
wallet_address: string;
|
|
||||||
nonce: string;
|
|
||||||
token: string;
|
|
||||||
expires_at: string;
|
|
||||||
used: number;
|
|
||||||
created_at: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WalletTokenBalance {
|
|
||||||
id: string;
|
|
||||||
wallet_address: string;
|
|
||||||
token_address: string;
|
|
||||||
token_type: 'erc20' | 'erc721' | 'erc1155';
|
|
||||||
chain_id: number;
|
|
||||||
balance: string;
|
|
||||||
last_updated: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
// API Response types
|
|
||||||
export interface LinkedWalletResponse {
|
|
||||||
id: string;
|
|
||||||
address: string;
|
|
||||||
type: WalletType;
|
|
||||||
chainId: number;
|
|
||||||
label: string | null;
|
|
||||||
ensName: string | null;
|
|
||||||
ensAvatar: string | null;
|
|
||||||
isPrimary: boolean;
|
|
||||||
linkedAt: string;
|
|
||||||
lastUsedAt: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface WalletLinkRequest {
|
|
||||||
walletAddress: string;
|
|
||||||
signature: string;
|
|
||||||
message: string;
|
|
||||||
walletType?: WalletType;
|
|
||||||
chainId?: number;
|
|
||||||
label?: string;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 3. API Endpoints
|
|
||||||
|
|
||||||
### Base Path: `/api/wallet`
|
|
||||||
|
|
||||||
All endpoints require CryptID authentication via `X-CryptID-PublicKey` header.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### `POST /api/wallet/link`
|
|
||||||
|
|
||||||
Link a new wallet to the authenticated CryptID account.
|
|
||||||
|
|
||||||
**Request:**
|
|
||||||
```typescript
|
|
||||||
{
|
|
||||||
walletAddress: string; // 0x-prefixed Ethereum address
|
|
||||||
signature: string; // EIP-191 signature of the message
|
|
||||||
message: string; // Must match server-generated format
|
|
||||||
walletType?: 'eoa' | 'safe' | 'hardware' | 'contract';
|
|
||||||
chainId?: number; // Default: 1 (mainnet)
|
|
||||||
label?: string; // Optional user label
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Message Format (must be signed):**
|
|
||||||
```
|
|
||||||
Link wallet to CryptID
|
|
||||||
|
|
||||||
Account: ${cryptidUsername}
|
|
||||||
Wallet: ${walletAddress}
|
|
||||||
Timestamp: ${isoTimestamp}
|
|
||||||
Nonce: ${randomNonce}
|
|
||||||
|
|
||||||
This signature proves you own this wallet.
|
|
||||||
```
|
|
||||||
|
|
||||||
**Response (201 Created):**
|
|
||||||
```typescript
|
|
||||||
{
|
|
||||||
success: true;
|
|
||||||
wallet: LinkedWalletResponse;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Errors:**
|
|
||||||
- `400` - Invalid request body or signature
|
|
||||||
- `401` - Not authenticated
|
|
||||||
- `409` - Wallet already linked to this account
|
|
||||||
- `422` - Signature verification failed
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### `GET /api/wallet/list`
|
|
||||||
|
|
||||||
Get all wallets linked to the authenticated user.
|
|
||||||
|
|
||||||
**Response:**
|
|
||||||
```typescript
|
|
||||||
{
|
|
||||||
wallets: LinkedWalletResponse[];
|
|
||||||
count: number;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### `GET /api/wallet/:address`
|
|
||||||
|
|
||||||
Get details for a specific linked wallet.
|
|
||||||
|
|
||||||
**Response:**
|
|
||||||
```typescript
|
|
||||||
{
|
|
||||||
wallet: LinkedWalletResponse;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### `PATCH /api/wallet/:address`
|
|
||||||
|
|
||||||
Update a linked wallet (label, primary status).
|
|
||||||
|
|
||||||
**Request:**
|
|
||||||
```typescript
|
|
||||||
{
|
|
||||||
label?: string;
|
|
||||||
isPrimary?: boolean;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Response:**
|
|
||||||
```typescript
|
|
||||||
{
|
|
||||||
success: true;
|
|
||||||
wallet: LinkedWalletResponse;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### `DELETE /api/wallet/:address`
|
|
||||||
|
|
||||||
Unlink a wallet from the account.
|
|
||||||
|
|
||||||
**Response:**
|
|
||||||
```typescript
|
|
||||||
{
|
|
||||||
success: true;
|
|
||||||
message: 'Wallet unlinked';
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### `GET /api/wallet/verify/:address`
|
|
||||||
|
|
||||||
Check if a wallet address is linked to any CryptID account.
|
|
||||||
(Public endpoint - no auth required)
|
|
||||||
|
|
||||||
**Response:**
|
|
||||||
```typescript
|
|
||||||
{
|
|
||||||
linked: boolean;
|
|
||||||
cryptidUsername?: string; // Only if user allows public display
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### `POST /api/wallet/refresh-ens`
|
|
||||||
|
|
||||||
Refresh ENS name resolution for a linked wallet.
|
|
||||||
|
|
||||||
**Request:**
|
|
||||||
```typescript
|
|
||||||
{
|
|
||||||
walletAddress: string;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Response:**
|
|
||||||
```typescript
|
|
||||||
{
|
|
||||||
ensName: string | null;
|
|
||||||
ensAvatar: string | null;
|
|
||||||
resolvedAt: string;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 4. Signature Verification Implementation
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// worker/walletAuth.ts
|
|
||||||
|
|
||||||
import { verifyMessage, getAddress } from 'viem';
|
|
||||||
|
|
||||||
export function generateLinkMessage(
|
|
||||||
username: string,
|
|
||||||
address: string,
|
|
||||||
timestamp: string,
|
|
||||||
nonce: string
|
|
||||||
): string {
|
|
||||||
return `Link wallet to CryptID
|
|
||||||
|
|
||||||
Account: ${username}
|
|
||||||
Wallet: ${address}
|
|
||||||
Timestamp: ${timestamp}
|
|
||||||
Nonce: ${nonce}
|
|
||||||
|
|
||||||
This signature proves you own this wallet.`;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function verifyWalletSignature(
|
|
||||||
address: string,
|
|
||||||
message: string,
|
|
||||||
signature: `0x${string}`
|
|
||||||
): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
// Normalize address
|
|
||||||
const checksumAddress = getAddress(address);
|
|
||||||
|
|
||||||
// Verify EIP-191 personal_sign signature
|
|
||||||
const valid = await verifyMessage({
|
|
||||||
address: checksumAddress,
|
|
||||||
message,
|
|
||||||
signature,
|
|
||||||
});
|
|
||||||
|
|
||||||
return valid;
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Signature verification error:', error);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// For ERC-1271 contract wallet verification (Safe, etc.)
|
|
||||||
export async function verifyContractSignature(
|
|
||||||
address: string,
|
|
||||||
message: string,
|
|
||||||
signature: string,
|
|
||||||
rpcUrl: string
|
|
||||||
): Promise<boolean> {
|
|
||||||
// ERC-1271 magic value: 0x1626ba7e
|
|
||||||
// Implementation needed for Safe/contract wallet support
|
|
||||||
// Uses eth_call to isValidSignature(bytes32,bytes)
|
|
||||||
throw new Error('Contract signature verification not yet implemented');
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 5. Library Comparison
|
|
||||||
|
|
||||||
### Recommendation: **wagmi v2 + viem**
|
|
||||||
|
|
||||||
| Library | Bundle Size | Type Safety | React Hooks | Maintenance | Recommendation |
|
|
||||||
|---------|-------------|-------------|-------------|-------------|----------------|
|
|
||||||
| **wagmi v2** | ~40KB | Excellent | Native | Active (wevm team) | ✅ **Best for React** |
|
|
||||||
| **viem** | ~25KB | Excellent | N/A | Active (wevm team) | ✅ **Best for worker** |
|
|
||||||
| **ethers v6** | ~120KB | Good | None | Active | ⚠️ Larger bundle |
|
|
||||||
| **web3.js** | ~400KB | Poor | None | Declining | ❌ Avoid |
|
|
||||||
|
|
||||||
### Why wagmi + viem?
|
|
||||||
|
|
||||||
1. **Same team** - wagmi and viem are both from wevm, designed to work together
|
|
||||||
2. **Tree-shakeable** - Only import what you use
|
|
||||||
3. **TypeScript-first** - Excellent type inference and autocomplete
|
|
||||||
4. **Modern React** - Hooks-based, works with React 18+ and Suspense
|
|
||||||
5. **WalletConnect v2** - Built-in support via Web3Modal
|
|
||||||
6. **No ethers dependency** - Pure viem underneath
|
|
||||||
|
|
||||||
### Package Configuration
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"dependencies": {
|
|
||||||
"wagmi": "^2.12.0",
|
|
||||||
"viem": "^2.19.0",
|
|
||||||
"@tanstack/react-query": "^5.45.0",
|
|
||||||
"@web3modal/wagmi": "^5.0.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Supported Wallets (via Web3Modal)
|
|
||||||
|
|
||||||
- MetaMask (injected)
|
|
||||||
- WalletConnect v2 (mobile wallets)
|
|
||||||
- Coinbase Wallet
|
|
||||||
- Rainbow
|
|
||||||
- Safe (via WalletConnect)
|
|
||||||
- Hardware wallets (via MetaMask bridge)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 6. Frontend Architecture
|
|
||||||
|
|
||||||
### Provider Setup (`src/providers/Web3Provider.tsx`)
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { WagmiProvider, createConfig, http } from 'wagmi';
|
|
||||||
import { mainnet, optimism, arbitrum, base } from 'wagmi/chains';
|
|
||||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
|
||||||
import { createWeb3Modal } from '@web3modal/wagmi/react';
|
|
||||||
|
|
||||||
// Configure chains
|
|
||||||
const chains = [mainnet, optimism, arbitrum, base] as const;
|
|
||||||
|
|
||||||
// Create wagmi config
|
|
||||||
const config = createConfig({
|
|
||||||
chains,
|
|
||||||
transports: {
|
|
||||||
[mainnet.id]: http(),
|
|
||||||
[optimism.id]: http(),
|
|
||||||
[arbitrum.id]: http(),
|
|
||||||
[base.id]: http(),
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create Web3Modal
|
|
||||||
const projectId = process.env.WALLETCONNECT_PROJECT_ID!;
|
|
||||||
|
|
||||||
createWeb3Modal({
|
|
||||||
wagmiConfig: config,
|
|
||||||
projectId,
|
|
||||||
chains,
|
|
||||||
themeMode: 'dark',
|
|
||||||
});
|
|
||||||
|
|
||||||
const queryClient = new QueryClient();
|
|
||||||
|
|
||||||
export function Web3Provider({ children }: { children: React.ReactNode }) {
|
|
||||||
return (
|
|
||||||
<WagmiProvider config={config}>
|
|
||||||
<QueryClientProvider client={queryClient}>
|
|
||||||
{children}
|
|
||||||
</QueryClientProvider>
|
|
||||||
</WagmiProvider>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Wallet Link Hook (`src/hooks/useWalletLink.ts`)
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { useAccount, useSignMessage, useDisconnect } from 'wagmi';
|
|
||||||
import { useAuth } from '../context/AuthContext';
|
|
||||||
import { useState } from 'react';
|
|
||||||
|
|
||||||
export function useWalletLink() {
|
|
||||||
const { address, isConnected } = useAccount();
|
|
||||||
const { signMessageAsync } = useSignMessage();
|
|
||||||
const { disconnect } = useDisconnect();
|
|
||||||
const { session } = useAuth();
|
|
||||||
const [isLinking, setIsLinking] = useState(false);
|
|
||||||
|
|
||||||
const linkWallet = async (label?: string) => {
|
|
||||||
if (!address || !session.username) return;
|
|
||||||
|
|
||||||
setIsLinking(true);
|
|
||||||
try {
|
|
||||||
// Generate link message
|
|
||||||
const timestamp = new Date().toISOString();
|
|
||||||
const nonce = crypto.randomUUID();
|
|
||||||
const message = generateLinkMessage(
|
|
||||||
session.username,
|
|
||||||
address,
|
|
||||||
timestamp,
|
|
||||||
nonce
|
|
||||||
);
|
|
||||||
|
|
||||||
// Request signature from wallet
|
|
||||||
const signature = await signMessageAsync({ message });
|
|
||||||
|
|
||||||
// Send to backend for verification
|
|
||||||
const response = await fetch('/api/wallet/link', {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
'X-CryptID-PublicKey': session.publicKey,
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
walletAddress: address,
|
|
||||||
signature,
|
|
||||||
message,
|
|
||||||
label,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error('Failed to link wallet');
|
|
||||||
}
|
|
||||||
|
|
||||||
return await response.json();
|
|
||||||
} finally {
|
|
||||||
setIsLinking(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return {
|
|
||||||
address,
|
|
||||||
isConnected,
|
|
||||||
isLinking,
|
|
||||||
linkWallet,
|
|
||||||
disconnect,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 7. Integration Points
|
|
||||||
|
|
||||||
### A. AuthContext Extension
|
|
||||||
|
|
||||||
Add to `Session` type:
|
|
||||||
```typescript
|
|
||||||
interface Session {
|
|
||||||
// ... existing fields
|
|
||||||
linkedWallets?: LinkedWalletResponse[];
|
|
||||||
primaryWallet?: LinkedWalletResponse;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### B. Token-Gated Features
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Check if user holds specific tokens
|
|
||||||
async function checkTokenGate(
|
|
||||||
walletAddress: string,
|
|
||||||
requirement: {
|
|
||||||
tokenAddress: string;
|
|
||||||
minBalance: string;
|
|
||||||
chainId: number;
|
|
||||||
}
|
|
||||||
): Promise<boolean> {
|
|
||||||
// Query on-chain balance or use cached value
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### C. Snapshot Voting (Future)
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Vote on Snapshot proposal
|
|
||||||
async function voteOnProposal(
|
|
||||||
space: string,
|
|
||||||
proposal: string,
|
|
||||||
choice: number,
|
|
||||||
walletAddress: string
|
|
||||||
): Promise<void> {
|
|
||||||
// Use Snapshot.js SDK with linked wallet
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 8. Security Considerations
|
|
||||||
|
|
||||||
1. **Signature Replay Prevention**
|
|
||||||
- Include timestamp and nonce in message
|
|
||||||
- Server validates timestamp is recent (within 5 minutes)
|
|
||||||
- Nonces are single-use
|
|
||||||
|
|
||||||
2. **Address Validation**
|
|
||||||
- Always checksum addresses before storing/comparing
|
|
||||||
- Validate address format (0x + 40 hex chars)
|
|
||||||
|
|
||||||
3. **Rate Limiting**
|
|
||||||
- Limit link attempts per user (e.g., 5/hour)
|
|
||||||
- Limit total wallets per user (e.g., 10)
|
|
||||||
|
|
||||||
4. **Wallet Verification**
|
|
||||||
- EOA: EIP-191 personal_sign
|
|
||||||
- Safe: ERC-1271 isValidSignature
|
|
||||||
- Hardware: Same as EOA (via MetaMask bridge)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 9. Next Steps
|
|
||||||
|
|
||||||
1. **Phase 1 (This Sprint)**
|
|
||||||
- [ ] Add migration file
|
|
||||||
- [ ] Install wagmi/viem dependencies
|
|
||||||
- [ ] Implement link/list/unlink endpoints
|
|
||||||
- [ ] Create WalletLinkPanel UI
|
|
||||||
- [ ] Add wallet section to settings
|
|
||||||
|
|
||||||
2. **Phase 2 (Next Sprint)**
|
|
||||||
- [ ] Snapshot.js integration
|
|
||||||
- [ ] VotingShape for canvas
|
|
||||||
- [ ] Token balance caching
|
|
||||||
|
|
||||||
3. **Phase 3 (Future)**
|
|
||||||
- [ ] Safe SDK integration
|
|
||||||
- [ ] TransactionBuilderShape
|
|
||||||
- [ ] Account Abstraction exploration
|
|
||||||
|
|
@ -1,54 +0,0 @@
|
||||||
---
|
|
||||||
id: task-001
|
|
||||||
title: offline local storage
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03 23:42'
|
|
||||||
updated_date: '2025-12-07 20:50'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- offline
|
|
||||||
- persistence
|
|
||||||
- indexeddb
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
IndexedDB persistence is already implemented via @automerge/automerge-repo-storage-indexeddb. The remaining work is:
|
|
||||||
|
|
||||||
1. Add real online/offline detection (currently always returns "online")
|
|
||||||
2. Create UI indicator showing connection status
|
|
||||||
3. Handle Safari's 7-day IndexedDB eviction
|
|
||||||
|
|
||||||
Existing code locations:
|
|
||||||
- src/automerge/useAutomergeSyncRepo.ts (lines 346, 380-432)
|
|
||||||
- src/automerge/useAutomergeStoreV2.ts (connectionStatus property)
|
|
||||||
- src/automerge/documentIdMapping.ts (room→document mapping)
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Real WebSocket connection state tracking (not hardcoded 'online')
|
|
||||||
- [x] #2 navigator.onLine integration for network detection
|
|
||||||
- [x] #3 UI indicator component showing connection status
|
|
||||||
- [x] #4 Visual feedback when working offline
|
|
||||||
- [x] #5 Auto-reconnect with status updates
|
|
||||||
- [ ] #6 Safari 7-day eviction mitigation (service worker or periodic touch)
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Implemented connection status tracking:
|
|
||||||
- Added ConnectionState type and tracking in CloudflareAdapter
|
|
||||||
- Added navigator.onLine integration for network detection
|
|
||||||
- Exposed connectionState and isNetworkOnline from useAutomergeSync hook
|
|
||||||
- Created ConnectionStatusIndicator component with visual feedback
|
|
||||||
- Shows status only when not connected (connecting/reconnecting/disconnected/offline)
|
|
||||||
- Auto-hides when connected and online
|
|
||||||
|
|
||||||
Model files downloaded successfully: tiny.en-encoder.int8.onnx (13MB), tiny.en-decoder.int8.onnx (87MB), tokens.txt (816KB)
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
---
|
|
||||||
id: task-002
|
|
||||||
title: RunPod AI API Integration
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
labels: [feature, ai, integration]
|
|
||||||
priority: high
|
|
||||||
branch: add-runpod-AI-API
|
|
||||||
worktree: /home/jeffe/Github/canvas-website-branch-worktrees/add-runpod-AI-API
|
|
||||||
updated_date: '2025-12-04 13:43'
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
Integrate RunPod serverless AI API for image generation and other AI features on the canvas.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `add-runpod-AI-API`
|
|
||||||
- **Worktree**: `/home/jeffe/Github/canvas-website-branch-worktrees/add-runpod-AI-API`
|
|
||||||
- **Commit**: 083095c
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
- [ ] Connect to RunPod serverless endpoints
|
|
||||||
- [ ] Implement image generation from canvas
|
|
||||||
- [ ] Handle AI responses and display on canvas
|
|
||||||
- [ ] Error handling and loading states
|
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
---
|
|
||||||
id: task-003
|
|
||||||
title: MulTmux Web Integration
|
|
||||||
status: In Progress
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
labels: [feature, terminal, integration]
|
|
||||||
priority: medium
|
|
||||||
branch: mulTmux-webtree
|
|
||||||
worktree: /home/jeffe/Github/canvas-website-branch-worktrees/mulTmux-webtree
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
Integrate MulTmux web terminal functionality into the canvas for terminal-based interactions.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `mulTmux-webtree`
|
|
||||||
- **Worktree**: `/home/jeffe/Github/canvas-website-branch-worktrees/mulTmux-webtree`
|
|
||||||
- **Commit**: 8ea3490
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
- [ ] Embed terminal component in canvas
|
|
||||||
- [ ] Handle terminal I/O within canvas context
|
|
||||||
- [ ] Support multiple terminal sessions
|
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
---
|
|
||||||
id: task-004
|
|
||||||
title: IO Chip Feature
|
|
||||||
status: In Progress
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
updated_date: '2025-12-07 06:43'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- io
|
|
||||||
- ui
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement IO chip feature for the canvas - enabling input/output connections between canvas elements.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `feature/io-chip`
|
|
||||||
- **Worktree**: `/home/jeffe/Github/canvas-website-io-chip`
|
|
||||||
- **Commit**: 527462a
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Create IO chip component
|
|
||||||
- [ ] #2 Enable connections between canvas elements
|
|
||||||
- [ ] #3 Handle data flow between connected chips
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Native Android app scaffolded and committed to main (0b1dac0). Dev branch created for future work.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
---
|
|
||||||
id: task-004
|
|
||||||
title: IO Chip Feature
|
|
||||||
status: In Progress
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
labels: [feature, io, ui]
|
|
||||||
priority: medium
|
|
||||||
branch: feature/io-chip
|
|
||||||
worktree: /home/jeffe/Github/canvas-website-io-chip
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
Implement IO chip feature for the canvas - enabling input/output connections between canvas elements.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `feature/io-chip`
|
|
||||||
- **Worktree**: `/home/jeffe/Github/canvas-website-io-chip`
|
|
||||||
- **Commit**: 527462a
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
- [ ] Create IO chip component
|
|
||||||
- [ ] Enable connections between canvas elements
|
|
||||||
- [ ] Handle data flow between connected chips
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
---
|
|
||||||
id: task-005
|
|
||||||
title: Automerge CRDT Sync
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
updated_date: '2025-12-05 03:41'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- sync
|
|
||||||
- collaboration
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement Automerge CRDT-based synchronization for real-time collaborative canvas editing.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `Automerge`
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Integrate Automerge library
|
|
||||||
- [ ] #2 Enable real-time sync between clients
|
|
||||||
- [ ] #3 Handle conflict resolution automatically
|
|
||||||
- [ ] #4 Persist state across sessions
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Binary Automerge sync implemented:
|
|
||||||
- CloudflareNetworkAdapter sends/receives binary sync messages
|
|
||||||
- Worker sends initial sync on connect
|
|
||||||
- Message buffering for early server messages
|
|
||||||
- documentId tracking for proper Automerge Repo routing
|
|
||||||
- Multi-client sync verified working
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,22 +0,0 @@
|
||||||
---
|
|
||||||
id: task-006
|
|
||||||
title: Stripe Payment Integration
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
labels: [feature, payments, integration]
|
|
||||||
priority: medium
|
|
||||||
branch: stripe-integration
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
Integrate Stripe for payment processing and subscription management.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `stripe-integration`
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
- [ ] Set up Stripe API connection
|
|
||||||
- [ ] Implement payment flow
|
|
||||||
- [ ] Handle subscriptions
|
|
||||||
- [ ] Add billing management UI
|
|
||||||
|
|
@ -1,182 +0,0 @@
|
||||||
---
|
|
||||||
id: task-007
|
|
||||||
title: Web3 Wallet Linking & Blockchain Integration
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
updated_date: '2026-01-02 17:05'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- web3
|
|
||||||
- blockchain
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Integrate Web3 wallet capabilities to enable CryptID users to link EOA wallets and Safe multisigs for on-chain transactions, voting (Snapshot), and token-gated features.
|
|
||||||
|
|
||||||
## Architecture Overview
|
|
||||||
|
|
||||||
CryptID uses ECDSA P-256 (WebCrypto), while Ethereum uses secp256k1. These curves are incompatible, so we use a **wallet linking** approach rather than key reuse.
|
|
||||||
|
|
||||||
### Core Concept
|
|
||||||
1. CryptID remains the primary authentication layer (passwordless)
|
|
||||||
2. Users can link one or more Ethereum wallets to their CryptID
|
|
||||||
3. Linking requires signing a verification message with the wallet
|
|
||||||
4. Linked wallets enable: transactions, voting, token-gating, NFT features
|
|
||||||
|
|
||||||
### Tech Stack
|
|
||||||
- **wagmi v2** + **viem** - Modern React hooks for wallet connection
|
|
||||||
- **WalletConnect v2** - Multi-wallet support (MetaMask, Rainbow, etc.)
|
|
||||||
- **Safe SDK** - Multisig wallet integration
|
|
||||||
- **Snapshot.js** - Off-chain governance voting
|
|
||||||
|
|
||||||
## Implementation Phases
|
|
||||||
|
|
||||||
### Phase 1: Wallet Linking Foundation (This Task)
|
|
||||||
- Add wagmi/viem/walletconnect dependencies
|
|
||||||
- Create linked_wallets D1 table
|
|
||||||
- Implement wallet linking API endpoints
|
|
||||||
- Build WalletLinkPanel UI component
|
|
||||||
- Display linked wallets in user settings
|
|
||||||
|
|
||||||
### Phase 2: Snapshot Voting (Future Task)
|
|
||||||
- Integrate Snapshot.js SDK
|
|
||||||
- Create VotingShape for canvas visualization
|
|
||||||
- Implement vote signing flow
|
|
||||||
|
|
||||||
### Phase 3: Safe Multisig (Future Task)
|
|
||||||
- Safe SDK integration
|
|
||||||
- TransactionBuilderShape for visual tx composition
|
|
||||||
- Collaborative signing UI
|
|
||||||
|
|
||||||
### Phase 4: Account Abstraction (Future Task)
|
|
||||||
- ERC-4337 smart wallet with P-256 signature validation
|
|
||||||
- Gasless transactions via paymaster
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Install and configure wagmi v2, viem, and @walletconnect/web3modal
|
|
||||||
- [x] #2 Create linked_wallets table in Cloudflare D1 with proper schema
|
|
||||||
- [x] #3 Implement POST /api/wallet/link endpoint with signature verification
|
|
||||||
- [ ] #4 Implement GET /api/wallet/list endpoint to retrieve linked wallets
|
|
||||||
- [ ] #5 Implement DELETE /api/wallet/unlink endpoint to remove wallet links
|
|
||||||
- [ ] #6 Create WalletConnectButton component using wagmi hooks
|
|
||||||
- [ ] #7 Create WalletLinkPanel component for linking flow UI
|
|
||||||
- [ ] #8 Add wallet section to user settings/profile panel
|
|
||||||
- [ ] #9 Display linked wallet addresses with ENS resolution
|
|
||||||
- [ ] #10 Support multiple wallet types: EOA, Safe, Hardware
|
|
||||||
- [ ] #11 Add wallet connection state to AuthContext
|
|
||||||
- [ ] #12 Write tests for wallet linking flow
|
|
||||||
- [ ] #13 Update CLAUDE.md with Web3 architecture documentation
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
### Step 1: Dependencies & Configuration
|
|
||||||
```bash
|
|
||||||
npm install wagmi viem @tanstack/react-query @walletconnect/web3modal
|
|
||||||
```
|
|
||||||
|
|
||||||
Configure wagmi with WalletConnect projectId and supported chains.
|
|
||||||
|
|
||||||
### Step 2: Database Schema
|
|
||||||
Add to D1 migration:
|
|
||||||
- linked_wallets table (user_id, wallet_address, wallet_type, chain_id, verified_at, signature_proof, ens_name, is_primary)
|
|
||||||
|
|
||||||
### Step 3: API Endpoints
|
|
||||||
Worker routes:
|
|
||||||
- POST /api/wallet/link - Verify signature, create link
|
|
||||||
- GET /api/wallet/list - List user's linked wallets
|
|
||||||
- DELETE /api/wallet/unlink - Remove a linked wallet
|
|
||||||
- GET /api/wallet/verify/:address - Check if address is linked to any CryptID
|
|
||||||
|
|
||||||
### Step 4: Frontend Components
|
|
||||||
- WagmiProvider wrapper in App.tsx
|
|
||||||
- WalletConnectButton - Connect/disconnect wallet
|
|
||||||
- WalletLinkPanel - Full linking flow with signature
|
|
||||||
- WalletBadge - Display linked wallet in UI
|
|
||||||
|
|
||||||
### Step 5: Integration
|
|
||||||
- Add linkedWallets to Session type
|
|
||||||
- Update AuthContext with wallet state
|
|
||||||
- Add wallet section to settings panel
|
|
||||||
|
|
||||||
### Step 6: Testing
|
|
||||||
- Unit tests for signature verification
|
|
||||||
- Integration tests for linking flow
|
|
||||||
- E2E test for full wallet link journey
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
## Planning Complete (2026-01-02)
|
|
||||||
|
|
||||||
Comprehensive planning phase completed:
|
|
||||||
|
|
||||||
### Created Architecture Document (doc-001)
|
|
||||||
- Full technical architecture for wallet linking
|
|
||||||
- Database schema design
|
|
||||||
- API endpoint specifications
|
|
||||||
- Library comparison (wagmi/viem recommended)
|
|
||||||
- Security considerations
|
|
||||||
- Frontend component designs
|
|
||||||
|
|
||||||
### Created Migration File
|
|
||||||
- `worker/migrations/002_linked_wallets.sql`
|
|
||||||
- Tables: linked_wallets, wallet_link_tokens, wallet_token_balances
|
|
||||||
- Proper indexes and foreign keys
|
|
||||||
|
|
||||||
### Created Follow-up Tasks
|
|
||||||
- task-060: Snapshot Voting Integration
|
|
||||||
- task-061: Safe Multisig Integration
|
|
||||||
- task-062: Account Abstraction Exploration
|
|
||||||
|
|
||||||
### Key Architecture Decisions
|
|
||||||
1. **Wallet Linking** approach (not key reuse) due to P-256/secp256k1 incompatibility
|
|
||||||
2. **wagmi v2 + viem** for frontend (React hooks, tree-shakeable)
|
|
||||||
3. **viem** for worker (signature verification)
|
|
||||||
4. **EIP-191 personal_sign** for EOA verification
|
|
||||||
5. **ERC-1271** for Safe/contract wallet verification (future)
|
|
||||||
|
|
||||||
### Next Steps
|
|
||||||
1. Install dependencies: wagmi, viem, @tanstack/react-query, @web3modal/wagmi
|
|
||||||
2. Run migration on D1
|
|
||||||
3. Implement API endpoints in worker
|
|
||||||
4. Build WalletLinkPanel UI component
|
|
||||||
|
|
||||||
## Implementation Complete (Phase 1: Wallet Linking)
|
|
||||||
|
|
||||||
### Files Created:
|
|
||||||
- `src/providers/Web3Provider.tsx` - Wagmi v2 config with WalletConnect
|
|
||||||
- `src/hooks/useWallet.ts` - React hooks for wallet connection/linking
|
|
||||||
- `src/components/WalletLinkPanel.tsx` - UI component for wallet management
|
|
||||||
- `worker/walletAuth.ts` - Backend signature verification and API handlers
|
|
||||||
- `worker/migrations/002_linked_wallets.sql` - Database schema
|
|
||||||
|
|
||||||
### Files Modified:
|
|
||||||
- `worker/types.ts` - Added wallet types
|
|
||||||
- `worker/worker.ts` - Added wallet API routes
|
|
||||||
- `src/App.tsx` - Integrated Web3Provider
|
|
||||||
- `src/ui/UserSettingsModal.tsx` - Added wallet section to Integrations tab
|
|
||||||
|
|
||||||
### Features:
|
|
||||||
- Connect wallets via MetaMask, WalletConnect, Coinbase Wallet
|
|
||||||
- Link wallets to CryptID accounts via EIP-191 signature
|
|
||||||
- View/manage linked wallets
|
|
||||||
- Set primary wallet, unlink wallets
|
|
||||||
- Supports mainnet, Optimism, Arbitrum, Base, Polygon
|
|
||||||
|
|
||||||
### Remaining Work:
|
|
||||||
- Add @noble/hashes for proper keccak256/ecrecover (placeholder functions)
|
|
||||||
- Run D1 migration on production
|
|
||||||
- Get WalletConnect Project ID from cloud.walletconnect.com
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,22 +0,0 @@
|
||||||
---
|
|
||||||
id: task-008
|
|
||||||
title: Audio Recording Feature
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
labels: [feature, audio, media]
|
|
||||||
priority: medium
|
|
||||||
branch: audio-recording-attempt
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
Implement audio recording capability for voice notes and audio annotations on the canvas.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `audio-recording-attempt`
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
- [ ] Record audio from microphone
|
|
||||||
- [ ] Save audio clips to canvas
|
|
||||||
- [ ] Playback audio annotations
|
|
||||||
- [ ] Transcription integration
|
|
||||||
|
|
@ -1,22 +0,0 @@
|
||||||
---
|
|
||||||
id: task-009
|
|
||||||
title: Web Speech API Transcription
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
labels: [feature, transcription, speech]
|
|
||||||
priority: medium
|
|
||||||
branch: transcribe-webspeechAPI
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
Implement speech-to-text transcription using the Web Speech API for voice input on the canvas.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `transcribe-webspeechAPI`
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
- [ ] Capture speech via Web Speech API
|
|
||||||
- [ ] Convert to text in real-time
|
|
||||||
- [ ] Display transcription on canvas
|
|
||||||
- [ ] Support multiple languages
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
---
|
|
||||||
id: task-010
|
|
||||||
title: Holon Integration
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
labels: [feature, holon, integration]
|
|
||||||
priority: medium
|
|
||||||
branch: holon-integration
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
Integrate Holon framework for hierarchical canvas organization and nested structures.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `holon-integration`
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
- [ ] Implement holon data structure
|
|
||||||
- [ ] Enable nested canvas elements
|
|
||||||
- [ ] Support hierarchical navigation
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
---
|
|
||||||
id: task-011
|
|
||||||
title: Terminal Tool
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
labels: [feature, terminal, tool]
|
|
||||||
priority: medium
|
|
||||||
branch: feature/terminal-tool
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
Add a terminal tool to the canvas toolbar for embedding terminal sessions.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `feature/terminal-tool`
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
- [ ] Add terminal tool to toolbar
|
|
||||||
- [ ] Spawn terminal instances on canvas
|
|
||||||
- [ ] Handle terminal sizing and positioning
|
|
||||||
|
|
@ -1,67 +0,0 @@
|
||||||
---
|
|
||||||
id: task-012
|
|
||||||
title: Dark Mode Theme
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-03'
|
|
||||||
updated_date: '2025-12-04 06:29'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- ui
|
|
||||||
- theme
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement dark mode theme support for the canvas interface.
|
|
||||||
|
|
||||||
## Branch Info
|
|
||||||
- **Branch**: `dark-mode`
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Create dark theme colors
|
|
||||||
- [x] #2 Add theme toggle
|
|
||||||
- [x] #3 Persist user preference
|
|
||||||
- [x] #4 System theme detection
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
## Implementation Complete (2025-12-03)
|
|
||||||
|
|
||||||
### Components Updated:
|
|
||||||
|
|
||||||
1. **Mycelial Intelligence (MI) Bar** (`src/ui/MycelialIntelligenceBar.tsx`)
|
|
||||||
- Added dark mode color palette with automatic switching based on `isDark` state
|
|
||||||
- Dark backgrounds, lighter text, adjusted shadows
|
|
||||||
- Inline code blocks use CSS class for proper dark mode styling
|
|
||||||
|
|
||||||
2. **Comprehensive CSS Dark Mode** (`src/css/style.css`)
|
|
||||||
- Added CSS variables: `--card-bg`, `--input-bg`, `--muted-text`
|
|
||||||
- Dark mode styles for: blockquotes, tables, navigation, command palette, MDXEditor, chat containers, form inputs, error/success messages
|
|
||||||
|
|
||||||
3. **UserSettingsModal** (`src/ui/UserSettingsModal.tsx`)
|
|
||||||
- Added `colors` object with dark/light mode variants
|
|
||||||
- Updated all inline styles to use theme-aware colors
|
|
||||||
|
|
||||||
4. **StandardizedToolWrapper** (`src/components/StandardizedToolWrapper.tsx`)
|
|
||||||
- Added `useIsDarkMode` hook for dark mode detection
|
|
||||||
- Updated wrapper backgrounds, shadows, borders, tags styling
|
|
||||||
|
|
||||||
5. **Markdown Tool** (`src/shapes/MarkdownShapeUtil.tsx`)
|
|
||||||
- Dark mode detection with automatic background switching
|
|
||||||
- Fixed scrollbar: vertical only, hidden when not needed
|
|
||||||
- Added toolbar minimize/expand button
|
|
||||||
|
|
||||||
### Technical Details:
|
|
||||||
- Automatic detection via `document.documentElement.classList` observer
|
|
||||||
- CSS variables for base styles that auto-switch in dark mode
|
|
||||||
- Inline style support with conditional color objects
|
|
||||||
- Comprehensive coverage of all major UI components and tools
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,44 +0,0 @@
|
||||||
---
|
|
||||||
id: task-013
|
|
||||||
title: Markdown Tool UX Improvements
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 06:29'
|
|
||||||
updated_date: '2025-12-04 06:29'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- ui
|
|
||||||
- markdown
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Improve the Markdown tool user experience with better scrollbar behavior and collapsible toolbar.
|
|
||||||
|
|
||||||
## Changes Implemented:
|
|
||||||
- Scrollbar is now vertical only (no horizontal scrollbar)
|
|
||||||
- Scrollbar auto-hides when not needed
|
|
||||||
- Added minimize/expand button for the formatting toolbar
|
|
||||||
- Full editing area uses available space
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Scrollbar is vertical only
|
|
||||||
- [x] #2 Scrollbar hides when not needed
|
|
||||||
- [x] #3 Toolbar has minimize/expand toggle
|
|
||||||
- [x] #4 Full window is editing area
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Implementation completed in `src/shapes/MarkdownShapeUtil.tsx`:
|
|
||||||
- Added `overflow-x: hidden` to content area
|
|
||||||
- Custom scrollbar styling with thin width and auto-hide
|
|
||||||
- Added toggle button in toolbar that collapses/expands formatting options
|
|
||||||
- `isToolbarMinimized` state controls toolbar visibility
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,351 +0,0 @@
|
||||||
---
|
|
||||||
id: task-014
|
|
||||||
title: Implement WebGPU-based local image generation to reduce RunPod costs
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 11:46'
|
|
||||||
updated_date: '2025-12-04 11:47'
|
|
||||||
labels:
|
|
||||||
- performance
|
|
||||||
- cost-optimization
|
|
||||||
- webgpu
|
|
||||||
- ai
|
|
||||||
- image-generation
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Integrate WebGPU-powered browser-based image generation (SD-Turbo) to reduce RunPod API costs and eliminate cold start delays. This creates a hybrid pipeline where quick drafts/iterations run locally in the browser (FREE, ~1-3 seconds), while high-quality final renders still use RunPod SDXL.
|
|
||||||
|
|
||||||
**Problem:**
|
|
||||||
- Current image generation always hits RunPod (~$0.02/image + 10-30s cold starts)
|
|
||||||
- No instant feedback loop for creative iteration
|
|
||||||
- 100% of compute costs are cloud-based
|
|
||||||
|
|
||||||
**Solution:**
|
|
||||||
- Add WebGPU capability detection
|
|
||||||
- Integrate SD-Turbo for instant browser-based previews
|
|
||||||
- Smart routing: drafts → browser, final renders → RunPod
|
|
||||||
- Potential 70% reduction in RunPod image generation costs
|
|
||||||
|
|
||||||
**Cost Impact (projected):**
|
|
||||||
- 1,000 images/mo: $20 → $6 (save $14/mo)
|
|
||||||
- 5,000 images/mo: $100 → $30 (save $70/mo)
|
|
||||||
- 10,000 images/mo: $200 → $60 (save $140/mo)
|
|
||||||
|
|
||||||
**Browser Support:**
|
|
||||||
- Chrome/Edge: Full WebGPU (v113+)
|
|
||||||
- Firefox: Windows (July 2025)
|
|
||||||
- Safari: v26 beta
|
|
||||||
- Fallback: WASM backend for unsupported browsers
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 WebGPU capability detection added to clientConfig.ts
|
|
||||||
- [ ] #2 SD-Turbo model loads and runs in browser via WebGPU
|
|
||||||
- [ ] #3 ImageGenShapeUtil has Quick Preview vs High Quality toggle
|
|
||||||
- [ ] #4 Smart routing in aiOrchestrator routes drafts to browser
|
|
||||||
- [ ] #5 Fallback to WASM for browsers without WebGPU
|
|
||||||
- [ ] #6 User can generate preview images with zero cold start
|
|
||||||
- [ ] #7 RunPod only called for High Quality final renders
|
|
||||||
- [ ] #8 Model download progress indicator shown to user
|
|
||||||
- [ ] #9 Works offline after initial model download
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
## Phase 1: Foundation (Quick Wins)
|
|
||||||
|
|
||||||
### 1.1 WebGPU Capability Detection
|
|
||||||
**File:** `src/lib/clientConfig.ts`
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
export async function detectWebGPUCapabilities(): Promise<{
|
|
||||||
hasWebGPU: boolean
|
|
||||||
hasF16: boolean
|
|
||||||
adapterInfo?: GPUAdapterInfo
|
|
||||||
estimatedVRAM?: number
|
|
||||||
}> {
|
|
||||||
if (!navigator.gpu) {
|
|
||||||
return { hasWebGPU: false, hasF16: false }
|
|
||||||
}
|
|
||||||
|
|
||||||
const adapter = await navigator.gpu.requestAdapter()
|
|
||||||
if (!adapter) {
|
|
||||||
return { hasWebGPU: false, hasF16: false }
|
|
||||||
}
|
|
||||||
|
|
||||||
const hasF16 = adapter.features.has('shader-f16')
|
|
||||||
const adapterInfo = await adapter.requestAdapterInfo()
|
|
||||||
|
|
||||||
return {
|
|
||||||
hasWebGPU: true,
|
|
||||||
hasF16,
|
|
||||||
adapterInfo,
|
|
||||||
estimatedVRAM: adapterInfo.memoryHeaps?.[0]?.size
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 1.2 Install Dependencies
|
|
||||||
```bash
|
|
||||||
npm install @anthropic-ai/sdk onnxruntime-web
|
|
||||||
# Or for transformers.js v3:
|
|
||||||
npm install @huggingface/transformers
|
|
||||||
```
|
|
||||||
|
|
||||||
### 1.3 Vite Config Updates
|
|
||||||
**File:** `vite.config.ts`
|
|
||||||
- Ensure WASM/ONNX assets are properly bundled
|
|
||||||
- Add WebGPU shader compilation support
|
|
||||||
- Configure chunk splitting for ML models
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 2: Browser Diffusion Integration
|
|
||||||
|
|
||||||
### 2.1 Create WebGPU Diffusion Module
|
|
||||||
**New File:** `src/lib/webgpuDiffusion.ts`
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { pipeline } from '@huggingface/transformers'
|
|
||||||
|
|
||||||
let generator: any = null
|
|
||||||
let loadingPromise: Promise<void> | null = null
|
|
||||||
|
|
||||||
export async function initSDTurbo(
|
|
||||||
onProgress?: (progress: number, status: string) => void
|
|
||||||
): Promise<void> {
|
|
||||||
if (generator) return
|
|
||||||
if (loadingPromise) return loadingPromise
|
|
||||||
|
|
||||||
loadingPromise = (async () => {
|
|
||||||
onProgress?.(0, 'Loading SD-Turbo model...')
|
|
||||||
|
|
||||||
generator = await pipeline(
|
|
||||||
'text-to-image',
|
|
||||||
'Xenova/sdxl-turbo', // or 'stabilityai/sd-turbo'
|
|
||||||
{
|
|
||||||
device: 'webgpu',
|
|
||||||
dtype: 'fp16',
|
|
||||||
progress_callback: (p) => onProgress?.(p.progress, p.status)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
onProgress?.(100, 'Ready')
|
|
||||||
})()
|
|
||||||
|
|
||||||
return loadingPromise
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function generateLocalImage(
|
|
||||||
prompt: string,
|
|
||||||
options?: {
|
|
||||||
width?: number
|
|
||||||
height?: number
|
|
||||||
steps?: number
|
|
||||||
seed?: number
|
|
||||||
}
|
|
||||||
): Promise<string> {
|
|
||||||
if (!generator) {
|
|
||||||
throw new Error('SD-Turbo not initialized. Call initSDTurbo() first.')
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await generator(prompt, {
|
|
||||||
width: options?.width || 512,
|
|
||||||
height: options?.height || 512,
|
|
||||||
num_inference_steps: options?.steps || 1, // SD-Turbo = 1 step
|
|
||||||
seed: options?.seed
|
|
||||||
})
|
|
||||||
|
|
||||||
// Returns base64 data URL
|
|
||||||
return result[0].image
|
|
||||||
}
|
|
||||||
|
|
||||||
export function isSDTurboReady(): boolean {
|
|
||||||
return generator !== null
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function unloadSDTurbo(): Promise<void> {
|
|
||||||
generator = null
|
|
||||||
loadingPromise = null
|
|
||||||
// Force garbage collection of GPU memory
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2.2 Create Model Download Manager
|
|
||||||
**New File:** `src/lib/modelDownloadManager.ts`
|
|
||||||
|
|
||||||
Handle progressive model downloads with:
|
|
||||||
- IndexedDB caching for persistence
|
|
||||||
- Progress tracking UI
|
|
||||||
- Resume capability for interrupted downloads
|
|
||||||
- Storage quota management
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 3: UI Integration
|
|
||||||
|
|
||||||
### 3.1 Update ImageGenShapeUtil
|
|
||||||
**File:** `src/shapes/ImageGenShapeUtil.tsx`
|
|
||||||
|
|
||||||
Add to shape props:
|
|
||||||
```typescript
|
|
||||||
type IImageGen = TLBaseShape<"ImageGen", {
|
|
||||||
// ... existing props
|
|
||||||
generationMode: 'auto' | 'local' | 'cloud' // NEW
|
|
||||||
localModelStatus: 'not-loaded' | 'loading' | 'ready' | 'error' // NEW
|
|
||||||
localModelProgress: number // NEW (0-100)
|
|
||||||
}>
|
|
||||||
```
|
|
||||||
|
|
||||||
Add UI toggle:
|
|
||||||
```tsx
|
|
||||||
<div className="generation-mode-toggle">
|
|
||||||
<button
|
|
||||||
onClick={() => setMode('local')}
|
|
||||||
disabled={!hasWebGPU}
|
|
||||||
title={!hasWebGPU ? 'WebGPU not supported' : 'Fast preview (~1-3s)'}
|
|
||||||
>
|
|
||||||
⚡ Quick Preview
|
|
||||||
</button>
|
|
||||||
<button
|
|
||||||
onClick={() => setMode('cloud')}
|
|
||||||
title="High quality SDXL (~10-30s)"
|
|
||||||
>
|
|
||||||
✨ High Quality
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3.2 Smart Generation Logic
|
|
||||||
```typescript
|
|
||||||
const generateImage = async (prompt: string) => {
|
|
||||||
const mode = shape.props.generationMode
|
|
||||||
const capabilities = await detectWebGPUCapabilities()
|
|
||||||
|
|
||||||
// Auto mode: local for iterations, cloud for final
|
|
||||||
if (mode === 'auto' || mode === 'local') {
|
|
||||||
if (capabilities.hasWebGPU && isSDTurboReady()) {
|
|
||||||
// Generate locally - instant!
|
|
||||||
const imageUrl = await generateLocalImage(prompt)
|
|
||||||
updateShape({ imageUrl, source: 'local' })
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fall back to RunPod
|
|
||||||
await generateWithRunPod(prompt)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 4: AI Orchestrator Integration
|
|
||||||
|
|
||||||
### 4.1 Update aiOrchestrator.ts
|
|
||||||
**File:** `src/lib/aiOrchestrator.ts`
|
|
||||||
|
|
||||||
Add browser as compute target:
|
|
||||||
```typescript
|
|
||||||
type ComputeTarget = 'browser' | 'netcup' | 'runpod'
|
|
||||||
|
|
||||||
interface ImageGenerationOptions {
|
|
||||||
prompt: string
|
|
||||||
priority: 'draft' | 'final'
|
|
||||||
preferLocal?: boolean
|
|
||||||
}
|
|
||||||
|
|
||||||
async function generateImage(options: ImageGenerationOptions) {
|
|
||||||
const { hasWebGPU } = await detectWebGPUCapabilities()
|
|
||||||
|
|
||||||
// Routing logic
|
|
||||||
if (options.priority === 'draft' && hasWebGPU && isSDTurboReady()) {
|
|
||||||
return { target: 'browser', cost: 0 }
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.priority === 'final') {
|
|
||||||
return { target: 'runpod', cost: 0.02 }
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fallback chain
|
|
||||||
return { target: 'runpod', cost: 0.02 }
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Phase 5: Advanced Features (Future)
|
|
||||||
|
|
||||||
### 5.1 Real-time img2img Refinement
|
|
||||||
- Start with browser SD-Turbo draft
|
|
||||||
- User adjusts/annotates
|
|
||||||
- Send to RunPod SDXL for final with img2img
|
|
||||||
|
|
||||||
### 5.2 Browser-based Upscaling
|
|
||||||
- Add Real-ESRGAN-lite via ONNX Runtime
|
|
||||||
- 2x/4x upscale locally before cloud render
|
|
||||||
|
|
||||||
### 5.3 Background Removal
|
|
||||||
- U2Net in browser via transformers.js
|
|
||||||
- Zero-cost background removal
|
|
||||||
|
|
||||||
### 5.4 Style Transfer
|
|
||||||
- Fast neural style transfer via WebGPU shaders
|
|
||||||
- Real-time preview on canvas
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Technical Considerations
|
|
||||||
|
|
||||||
### Model Sizes
|
|
||||||
| Model | Size | Load Time | Generation |
|
|
||||||
|-------|------|-----------|------------|
|
|
||||||
| SD-Turbo | ~2GB | 30-60s (first) | 1-3s |
|
|
||||||
| SD-Turbo (quantized) | ~1GB | 15-30s | 2-4s |
|
|
||||||
|
|
||||||
### Memory Management
|
|
||||||
- Unload model when tab backgrounded
|
|
||||||
- Clear GPU memory on low-memory warnings
|
|
||||||
- IndexedDB for model caching (survives refresh)
|
|
||||||
|
|
||||||
### Error Handling
|
|
||||||
- Graceful degradation to WASM if WebGPU fails
|
|
||||||
- Clear error messages for unsupported browsers
|
|
||||||
- Automatic fallback to RunPod on local failure
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Files to Create/Modify
|
|
||||||
|
|
||||||
**New Files:**
|
|
||||||
- `src/lib/webgpuDiffusion.ts` - SD-Turbo wrapper
|
|
||||||
- `src/lib/modelDownloadManager.ts` - Model caching
|
|
||||||
- `src/lib/webgpuCapabilities.ts` - Detection utilities
|
|
||||||
- `src/components/ModelDownloadProgress.tsx` - UI component
|
|
||||||
|
|
||||||
**Modified Files:**
|
|
||||||
- `src/lib/clientConfig.ts` - Add WebGPU detection
|
|
||||||
- `src/lib/aiOrchestrator.ts` - Add browser routing
|
|
||||||
- `src/shapes/ImageGenShapeUtil.tsx` - Add mode toggle
|
|
||||||
- `vite.config.ts` - ONNX/WASM config
|
|
||||||
- `package.json` - New dependencies
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Testing Checklist
|
|
||||||
|
|
||||||
- [ ] WebGPU detection works on Chrome, Edge, Firefox
|
|
||||||
- [ ] WASM fallback works on Safari/older browsers
|
|
||||||
- [ ] Model downloads and caches correctly
|
|
||||||
- [ ] Generation completes in <5s on modern GPU
|
|
||||||
- [ ] Memory cleaned up properly on unload
|
|
||||||
- [ ] Offline generation works after model cached
|
|
||||||
- [ ] RunPod fallback triggers correctly
|
|
||||||
- [ ] Cost tracking reflects local vs cloud usage
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
@ -1,146 +0,0 @@
|
||||||
---
|
|
||||||
id: task-015
|
|
||||||
title: Set up Cloudflare D1 email-collector database for cross-site subscriptions
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 12:00'
|
|
||||||
updated_date: '2025-12-04 12:03'
|
|
||||||
labels:
|
|
||||||
- infrastructure
|
|
||||||
- cloudflare
|
|
||||||
- d1
|
|
||||||
- email
|
|
||||||
- cross-site
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Create a standalone Cloudflare D1 database for collecting email subscriptions across all websites (mycofi.earth, canvas.jeffemmett.com, decolonizeti.me, etc.) with easy export capabilities.
|
|
||||||
|
|
||||||
**Purpose:**
|
|
||||||
- Unified email collection from all sites
|
|
||||||
- Page-separated lists (e.g., /newsletter, /waitlist, /landing)
|
|
||||||
- Simple CSV/JSON export for email campaigns
|
|
||||||
- GDPR-compliant with unsubscribe tracking
|
|
||||||
|
|
||||||
**Sites to integrate:**
|
|
||||||
- mycofi.earth
|
|
||||||
- canvas.jeffemmett.com
|
|
||||||
- decolonizeti.me
|
|
||||||
- games.jeffemmett.com
|
|
||||||
- Future sites
|
|
||||||
|
|
||||||
**Key Features:**
|
|
||||||
- Double opt-in verification
|
|
||||||
- Source tracking (which site, which page)
|
|
||||||
- Export in multiple formats (CSV, JSON, Mailchimp)
|
|
||||||
- Basic admin dashboard or CLI for exports
|
|
||||||
- Rate limiting to prevent abuse
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 D1 database 'email-collector' created on Cloudflare
|
|
||||||
- [ ] #2 Schema deployed with subscribers, verification_tokens tables
|
|
||||||
- [ ] #3 POST /api/subscribe endpoint accepts email + source_site + source_page
|
|
||||||
- [ ] #4 Email verification flow with token-based double opt-in
|
|
||||||
- [ ] #5 GET /api/emails/export returns CSV with filters (site, date, verified)
|
|
||||||
- [ ] #6 Unsubscribe endpoint and tracking
|
|
||||||
- [ ] #7 Rate limiting prevents spam submissions
|
|
||||||
- [ ] #8 At least one site integrated and collecting emails
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
## Implementation Steps
|
|
||||||
|
|
||||||
### 1. Create D1 Database
|
|
||||||
```bash
|
|
||||||
wrangler d1 create email-collector
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Create Schema File
|
|
||||||
Create `worker/email-collector-schema.sql`:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
-- Email Collector Schema
|
|
||||||
-- Cross-site email subscription management
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS subscribers (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
email TEXT NOT NULL,
|
|
||||||
email_hash TEXT NOT NULL, -- For duplicate checking
|
|
||||||
source_site TEXT NOT NULL,
|
|
||||||
source_page TEXT,
|
|
||||||
referrer TEXT,
|
|
||||||
ip_country TEXT,
|
|
||||||
subscribed_at TEXT DEFAULT (datetime('now')),
|
|
||||||
verified INTEGER DEFAULT 0,
|
|
||||||
verified_at TEXT,
|
|
||||||
unsubscribed INTEGER DEFAULT 0,
|
|
||||||
unsubscribed_at TEXT,
|
|
||||||
metadata TEXT -- JSON for custom fields
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE IF NOT EXISTS verification_tokens (
|
|
||||||
id TEXT PRIMARY KEY,
|
|
||||||
email TEXT NOT NULL,
|
|
||||||
token TEXT UNIQUE NOT NULL,
|
|
||||||
expires_at TEXT NOT NULL,
|
|
||||||
used INTEGER DEFAULT 0,
|
|
||||||
created_at TEXT DEFAULT (datetime('now'))
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Rate limiting table
|
|
||||||
CREATE TABLE IF NOT EXISTS rate_limits (
|
|
||||||
ip_hash TEXT PRIMARY KEY,
|
|
||||||
request_count INTEGER DEFAULT 1,
|
|
||||||
window_start TEXT DEFAULT (datetime('now'))
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Indexes
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_subs_email_hash ON subscribers(email_hash);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_subs_site ON subscribers(source_site);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_subs_page ON subscribers(source_site, source_page);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_subs_verified ON subscribers(verified);
|
|
||||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_subs_unique ON subscribers(email_hash, source_site);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_tokens_token ON verification_tokens(token);
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Create Worker Endpoints
|
|
||||||
Create `worker/emailCollector.ts`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// POST /api/subscribe
|
|
||||||
// GET /api/verify/:token
|
|
||||||
// POST /api/unsubscribe
|
|
||||||
// GET /api/emails/export (auth required)
|
|
||||||
// GET /api/emails/stats
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Export Formats
|
|
||||||
- CSV: `email,source_site,source_page,subscribed_at,verified`
|
|
||||||
- JSON: Full object array
|
|
||||||
- Mailchimp: CSV with required headers
|
|
||||||
|
|
||||||
### 5. Admin Authentication
|
|
||||||
- Use simple API key for export endpoint
|
|
||||||
- Store in Worker secret: `EMAIL_ADMIN_KEY`
|
|
||||||
|
|
||||||
### 6. Integration
|
|
||||||
Add to each site's signup form:
|
|
||||||
```javascript
|
|
||||||
fetch('https://canvas.jeffemmett.com/api/subscribe', {
|
|
||||||
method: 'POST',
|
|
||||||
body: JSON.stringify({
|
|
||||||
email: 'user@example.com',
|
|
||||||
source_site: 'mycofi.earth',
|
|
||||||
source_page: '/newsletter'
|
|
||||||
})
|
|
||||||
})
|
|
||||||
```
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
@ -1,56 +0,0 @@
|
||||||
---
|
|
||||||
id: task-016
|
|
||||||
title: Add encryption for CryptID emails at rest
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 12:01'
|
|
||||||
labels:
|
|
||||||
- security
|
|
||||||
- cryptid
|
|
||||||
- encryption
|
|
||||||
- privacy
|
|
||||||
- d1
|
|
||||||
dependencies:
|
|
||||||
- task-017
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Enhance CryptID security by encrypting email addresses stored in D1 database. This protects user privacy even if the database is compromised.
|
|
||||||
|
|
||||||
**Encryption Strategy:**
|
|
||||||
- Encrypt email addresses before storing in D1
|
|
||||||
- Use Cloudflare Workers KV or environment secret for encryption key
|
|
||||||
- Store encrypted email + hash for lookups
|
|
||||||
- Decrypt only when needed (sending emails, display)
|
|
||||||
|
|
||||||
**Implementation Options:**
|
|
||||||
1. **AES-GCM encryption** with key in Worker secret
|
|
||||||
2. **Deterministic encryption** for email lookups (hash-based)
|
|
||||||
3. **Hybrid approach**: Hash for lookup index, AES for actual email
|
|
||||||
|
|
||||||
**Schema Changes:**
|
|
||||||
```sql
|
|
||||||
ALTER TABLE users ADD COLUMN email_encrypted TEXT;
|
|
||||||
ALTER TABLE users ADD COLUMN email_hash TEXT; -- For lookups
|
|
||||||
-- Migrate existing emails, then drop plaintext column
|
|
||||||
```
|
|
||||||
|
|
||||||
**Considerations:**
|
|
||||||
- Key rotation strategy
|
|
||||||
- Performance impact on lookups
|
|
||||||
- Backup/recovery implications
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Encryption key securely stored in Worker secrets
|
|
||||||
- [ ] #2 Emails encrypted before D1 insert
|
|
||||||
- [ ] #3 Email lookup works via hash index
|
|
||||||
- [ ] #4 Decryption works for email display and sending
|
|
||||||
- [ ] #5 Existing emails migrated to encrypted format
|
|
||||||
- [ ] #6 Key rotation procedure documented
|
|
||||||
- [ ] #7 No plaintext emails in database
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,63 +0,0 @@
|
||||||
---
|
|
||||||
id: task-017
|
|
||||||
title: Deploy CryptID email recovery to dev branch and test
|
|
||||||
status: In Progress
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 12:00'
|
|
||||||
updated_date: '2025-12-11 15:15'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- cryptid
|
|
||||||
- auth
|
|
||||||
- testing
|
|
||||||
- dev-branch
|
|
||||||
dependencies:
|
|
||||||
- task-018
|
|
||||||
- task-019
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Push the existing CryptID email recovery code changes to dev branch and test the full flow before merging to main.
|
|
||||||
|
|
||||||
**Code Changes Ready:**
|
|
||||||
- src/App.tsx - Routes for /verify-email, /link-device
|
|
||||||
- src/components/auth/CryptID.tsx - Email linking flow
|
|
||||||
- src/components/auth/Profile.tsx - Email management UI, device list
|
|
||||||
- src/css/crypto-auth.css - Styling for email/device modals
|
|
||||||
- worker/types.ts - Updated D1 types
|
|
||||||
- worker/worker.ts - Auth API routes
|
|
||||||
- worker/cryptidAuth.ts - Auth handlers (already committed)
|
|
||||||
|
|
||||||
**Test Scenarios:**
|
|
||||||
1. Link email to existing CryptID account
|
|
||||||
2. Verify email via link
|
|
||||||
3. Request device link from new device
|
|
||||||
4. Approve device link via email
|
|
||||||
5. View and revoke linked devices
|
|
||||||
6. Recover account on new device via email
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 All CryptID changes committed to dev branch
|
|
||||||
- [ ] #2 Worker deployed to dev environment
|
|
||||||
- [ ] #3 Link email flow works end-to-end
|
|
||||||
- [ ] #4 Email verification completes successfully
|
|
||||||
- [ ] #5 Device linking via email works
|
|
||||||
- [ ] #6 Device revocation works
|
|
||||||
- [ ] #7 Profile shows linked email and devices
|
|
||||||
- [ ] #8 No console errors in happy path
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Branch created: `feature/cryptid-email-recovery`
|
|
||||||
|
|
||||||
Code committed and pushed to Gitea
|
|
||||||
|
|
||||||
PR available at: https://gitea.jeffemmett.com/jeffemmett/canvas-website/compare/main...feature/cryptid-email-recovery
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,118 +0,0 @@
|
||||||
---
|
|
||||||
id: task-018
|
|
||||||
title: Create Cloudflare D1 cryptid-auth database
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 12:02'
|
|
||||||
updated_date: '2025-12-06 06:39'
|
|
||||||
labels:
|
|
||||||
- infrastructure
|
|
||||||
- cloudflare
|
|
||||||
- d1
|
|
||||||
- cryptid
|
|
||||||
- auth
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Create the D1 database on Cloudflare for CryptID authentication system. This is the first step before deploying the email recovery feature.
|
|
||||||
|
|
||||||
**Database Purpose:**
|
|
||||||
- Store user accounts linked to CryptID usernames
|
|
||||||
- Store device public keys for multi-device auth
|
|
||||||
- Store verification tokens for email/device linking
|
|
||||||
- Enable account recovery via verified email
|
|
||||||
|
|
||||||
**Security Considerations:**
|
|
||||||
- Emails should be encrypted at rest (task-016)
|
|
||||||
- Public keys are safe to store (not secrets)
|
|
||||||
- Tokens are time-limited and single-use
|
|
||||||
- No passwords stored (WebCrypto key-based auth)
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 D1 database 'cryptid-auth' created via wrangler d1 create
|
|
||||||
- [ ] #2 D1 database 'cryptid-auth-dev' created for dev environment
|
|
||||||
- [ ] #3 Database IDs added to wrangler.toml (replacing placeholders)
|
|
||||||
- [ ] #4 Schema from worker/schema.sql deployed to both databases
|
|
||||||
- [ ] #5 Verified tables exist: users, device_keys, verification_tokens
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
## Implementation Steps
|
|
||||||
|
|
||||||
### 1. Create D1 Databases
|
|
||||||
Run from local machine or Netcup (requires wrangler CLI):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /home/jeffe/Github/canvas-website
|
|
||||||
|
|
||||||
# Create production database
|
|
||||||
wrangler d1 create cryptid-auth
|
|
||||||
|
|
||||||
# Create dev database
|
|
||||||
wrangler d1 create cryptid-auth-dev
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Update wrangler.toml
|
|
||||||
Replace placeholder IDs with actual database IDs from step 1:
|
|
||||||
|
|
||||||
```toml
|
|
||||||
[[d1_databases]]
|
|
||||||
binding = "CRYPTID_DB"
|
|
||||||
database_name = "cryptid-auth"
|
|
||||||
database_id = "<PROD_ID_FROM_STEP_1>"
|
|
||||||
|
|
||||||
[[env.dev.d1_databases]]
|
|
||||||
binding = "CRYPTID_DB"
|
|
||||||
database_name = "cryptid-auth-dev"
|
|
||||||
database_id = "<DEV_ID_FROM_STEP_1>"
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Deploy Schema
|
|
||||||
```bash
|
|
||||||
# Deploy to dev first
|
|
||||||
wrangler d1 execute cryptid-auth-dev --file=./worker/schema.sql
|
|
||||||
|
|
||||||
# Then production
|
|
||||||
wrangler d1 execute cryptid-auth --file=./worker/schema.sql
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Verify Tables
|
|
||||||
```bash
|
|
||||||
# Check dev
|
|
||||||
wrangler d1 execute cryptid-auth-dev --command="SELECT name FROM sqlite_master WHERE type='table';"
|
|
||||||
|
|
||||||
# Expected output:
|
|
||||||
# - users
|
|
||||||
# - device_keys
|
|
||||||
# - verification_tokens
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Commit wrangler.toml Changes
|
|
||||||
```bash
|
|
||||||
git add wrangler.toml
|
|
||||||
git commit -m "chore: add D1 database IDs for cryptid-auth"
|
|
||||||
```
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Feature branch: `feature/cryptid-email-recovery`
|
|
||||||
|
|
||||||
Code is ready - waiting for D1 database creation
|
|
||||||
|
|
||||||
Schema deployed to production D1 (35fbe755-0e7c-4b9a-a454-34f945e5f7cc)
|
|
||||||
|
|
||||||
Tables created:
|
|
||||||
- users, device_keys, verification_tokens (CryptID auth)
|
|
||||||
- boards, board_permissions (permissions system)
|
|
||||||
- user_profiles, user_connections, connection_metadata (social graph)
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,41 +0,0 @@
|
||||||
---
|
|
||||||
id: task-019
|
|
||||||
title: Configure CryptID secrets and SendGrid integration
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 12:02'
|
|
||||||
labels:
|
|
||||||
- infrastructure
|
|
||||||
- cloudflare
|
|
||||||
- cryptid
|
|
||||||
- secrets
|
|
||||||
- sendgrid
|
|
||||||
dependencies:
|
|
||||||
- task-018
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Set up the required secrets and environment variables for CryptID email functionality on Cloudflare Workers.
|
|
||||||
|
|
||||||
**Required Secrets:**
|
|
||||||
- SENDGRID_API_KEY - For sending verification emails
|
|
||||||
- CRYPTID_EMAIL_FROM - Sender email address (e.g., auth@jeffemmett.com)
|
|
||||||
- APP_URL - Base URL for verification links (e.g., https://canvas.jeffemmett.com)
|
|
||||||
|
|
||||||
**Configuration:**
|
|
||||||
- Secrets set for both production and dev environments
|
|
||||||
- SendGrid account configured with verified sender domain
|
|
||||||
- Email templates tested
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 SENDGRID_API_KEY secret set via wrangler secret put
|
|
||||||
- [ ] #2 CRYPTID_EMAIL_FROM secret configured
|
|
||||||
- [ ] #3 APP_URL environment variable set in wrangler.toml
|
|
||||||
- [ ] #4 SendGrid sender domain verified (jeffemmett.com or subdomain)
|
|
||||||
- [ ] #5 Test email sends successfully from Worker
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,184 +0,0 @@
|
||||||
---
|
|
||||||
id: task-024
|
|
||||||
title: 'Open Mapping: Collaborative Route Planning Module'
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 14:30'
|
|
||||||
updated_date: '2025-12-07 06:43'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- mapping
|
|
||||||
dependencies:
|
|
||||||
- task-029
|
|
||||||
- task-030
|
|
||||||
- task-031
|
|
||||||
- task-036
|
|
||||||
- task-037
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement an open-source mapping and routing layer for the canvas that provides advanced route planning capabilities beyond Google Maps. Built on OpenStreetMap, OSRM/Valhalla, and MapLibre GL JS.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 MapLibre GL JS integrated with tldraw canvas
|
|
||||||
- [x] #2 OSRM routing backend deployed to Netcup
|
|
||||||
- [x] #3 Waypoint placement and route calculation working
|
|
||||||
- [ ] #4 Multi-route comparison UI implemented
|
|
||||||
- [ ] #5 Y.js collaboration for shared route editing
|
|
||||||
- [ ] #6 Layer management panel with basemap switching
|
|
||||||
- [ ] #7 Offline tile caching via Service Worker
|
|
||||||
- [ ] #8 Budget tracking per waypoint/route
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Plan
|
|
||||||
|
|
||||||
<!-- SECTION:PLAN:BEGIN -->
|
|
||||||
Phase 1 - Foundation:
|
|
||||||
- Integrate MapLibre GL JS with tldraw
|
|
||||||
- Deploy OSRM to /opt/apps/open-mapping/
|
|
||||||
- Basic waypoint and route UI
|
|
||||||
|
|
||||||
Phase 2 - Multi-Route:
|
|
||||||
- Alternative routes visualization
|
|
||||||
- Route comparison panel
|
|
||||||
- Elevation profiles
|
|
||||||
|
|
||||||
Phase 3 - Collaboration:
|
|
||||||
- Y.js integration
|
|
||||||
- Real-time cursor presence
|
|
||||||
- Share links
|
|
||||||
|
|
||||||
Phase 4 - Layers:
|
|
||||||
- Layer panel UI
|
|
||||||
- Multiple basemaps
|
|
||||||
- Custom overlays
|
|
||||||
|
|
||||||
Phase 5 - Calendar/Budget:
|
|
||||||
- Time windows on waypoints
|
|
||||||
- Cost estimation
|
|
||||||
- iCal export
|
|
||||||
|
|
||||||
Phase 6 - Optimization:
|
|
||||||
- VROOM TSP/VRP
|
|
||||||
- Offline PWA
|
|
||||||
<!-- SECTION:PLAN:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
**Subsystem implementations completed:**
|
|
||||||
- task-029: zkGPS Privacy Protocol (src/open-mapping/privacy/)
|
|
||||||
- task-030: Mycelial Signal Propagation (src/open-mapping/mycelium/)
|
|
||||||
- task-031: Alternative Map Lens System (src/open-mapping/lenses/)
|
|
||||||
- task-036: Possibility Cones & Constraints (src/open-mapping/conics/)
|
|
||||||
- task-037: Location Games & Discovery (src/open-mapping/discovery/)
|
|
||||||
|
|
||||||
**Still needs:**
|
|
||||||
- MapLibre GL JS canvas integration
|
|
||||||
- OSRM backend deployment
|
|
||||||
- UI components for all subsystems
|
|
||||||
- Automerge sync for collaborative editing
|
|
||||||
|
|
||||||
Pushed to feature/open-mapping branch:
|
|
||||||
- MapShapeUtil for tldraw canvas integration
|
|
||||||
- Presence layer with location sharing
|
|
||||||
- Mycelium network visualization
|
|
||||||
- Discovery system (spores, hunts, collectibles)
|
|
||||||
- Privacy system with ZK-GPS protocol concepts
|
|
||||||
|
|
||||||
**Merged to dev branch (2025-12-05):**
|
|
||||||
- All subsystem TypeScript implementations merged
|
|
||||||
- MapShapeUtil integrated with canvas
|
|
||||||
- ConnectionStatusIndicator added
|
|
||||||
- Merged with PrivateWorkspace feature (no conflicts)
|
|
||||||
- Ready for staging/production testing
|
|
||||||
|
|
||||||
**Remaining work:**
|
|
||||||
- MapLibre GL JS full canvas integration
|
|
||||||
- OSRM backend deployment to Netcup
|
|
||||||
- UI polish and testing
|
|
||||||
|
|
||||||
**OSRM Backend Deployed (2025-12-05):**
|
|
||||||
- Docker container running on Netcup RS 8000
|
|
||||||
- Location: /opt/apps/osrm-routing/
|
|
||||||
- Public URL: https://routing.jeffemmett.com
|
|
||||||
- Uses Traefik for routing via Docker network
|
|
||||||
- Currently loaded with Monaco OSM data (for testing)
|
|
||||||
- MapShapeUtil updated to use self-hosted OSRM
|
|
||||||
- Verified working: curl returns valid route responses
|
|
||||||
|
|
||||||
Map refactoring completed:
|
|
||||||
- Created simplified MapShapeUtil.tsx (836 lines) with MapLibre + search + routing
|
|
||||||
- Created GPSCollaborationLayer.ts as standalone module for GPS sharing
|
|
||||||
- Added layers/index.ts and updated open-mapping exports
|
|
||||||
- Server running without compilation errors
|
|
||||||
- Architecture now follows layer pattern: Base Map → Collaboration Layers
|
|
||||||
|
|
||||||
Enhanced MapShapeUtil (1326 lines) with:
|
|
||||||
- Touch/pen/mouse support with proper z-index (1000+) and touchAction styles
|
|
||||||
- Search with autocomplete as you type (Nominatim, 400ms debounce)
|
|
||||||
- Directions panel with waypoint management, reverse route, clear
|
|
||||||
- GPS location sharing panel with start/stop, accuracy display
|
|
||||||
- Quick action toolbar: search, directions (🚗), GPS (📍), style picker
|
|
||||||
- Larger touch targets (44px buttons) for mobile
|
|
||||||
- Pulse animation on user GPS marker
|
|
||||||
- "Fit All" button to zoom to all GPS users
|
|
||||||
- Route info badge when panel is closed
|
|
||||||
|
|
||||||
Fixed persistence issue with two changes:
|
|
||||||
|
|
||||||
1. Server-side: handlePeerDisconnect now flushes pending saves immediately (prevents data loss on page close)
|
|
||||||
|
|
||||||
2. Client-side: Changed merge strategy from 'local takes precedence' to 'server takes precedence' for initial load
|
|
||||||
|
|
||||||
**D1 Database & Networking Fixes (2025-12-06):**
|
|
||||||
- Added CRYPTID_DB D1 binding to wrangler.dev.toml
|
|
||||||
- Applied schema.sql to local D1 database
|
|
||||||
- All 25 SQL commands executed successfully
|
|
||||||
- Networking API now working locally (returns 401 without auth as expected)
|
|
||||||
- Added d1_persist=true to miniflare config for data persistence
|
|
||||||
|
|
||||||
**CryptID Connections Feature:**
|
|
||||||
- Enhanced CustomToolbar.tsx with "People in Canvas" section
|
|
||||||
- Shows all tldraw collaborators with connection status colors
|
|
||||||
- Green border = trusted, Yellow = connected, Grey = unconnected
|
|
||||||
- Connect/Trust/Demote/Remove buttons for connection management
|
|
||||||
- Uses tldraw useValue hook for reactive collaborator updates
|
|
||||||
|
|
||||||
**Build Script Updates:**
|
|
||||||
- Added NODE_OPTIONS="--max-old-space-size=8192" to build, deploy, deploy:pages scripts
|
|
||||||
- Prevents memory issues during TypeScript compilation and Vite build
|
|
||||||
|
|
||||||
Completed Mapus-inspired MapShapeUtil enhancements:
|
|
||||||
- Left sidebar with title/description editing
|
|
||||||
- Search bar with Nominatim geocoding
|
|
||||||
- Find Nearby categories (8 types: Food, Drinks, Groceries, Hotels, Health, Services, Shopping, Transport)
|
|
||||||
- Collaborators list with Observe mode
|
|
||||||
- Annotations list with visibility toggle
|
|
||||||
- Drawing toolbar (cursor, marker, line, area, eraser)
|
|
||||||
- Color picker with 8 Mapus colors
|
|
||||||
- Style picker (Voyager, Light, Dark, Satellite)
|
|
||||||
- Zoom controls + GPS location button
|
|
||||||
- Fixed TypeScript errors (3 issues resolved)
|
|
||||||
|
|
||||||
**MapLibre Cleanup Fixes (2025-12-07):**
|
|
||||||
- Added isMountedRef to track component mount state
|
|
||||||
- Fixed map initialization cleanup with named event handlers
|
|
||||||
- Added try/catch blocks for all MapLibre operations
|
|
||||||
- Fixed style change, resize, and annotations effects with mounted checks
|
|
||||||
- Updated callbacks (observeUser, selectSearchResult, findNearby) with null checks
|
|
||||||
- Added legacy property support (interactive, showGPS, showSearch, showDirections, sharingLocation, gpsUsers)
|
|
||||||
- Prevents 'getLayer' and 'map' undefined errors during component unmount
|
|
||||||
- All schema validation errors resolved
|
|
||||||
|
|
||||||
**Feature Branch Created (2025-12-07):**
|
|
||||||
- Branch: feature/mapshapeutil-fixes
|
|
||||||
- Pushed to Gitea: https://gitea.jeffemmett.com/jeffemmett/canvas-website/compare/main...feature/mapshapeutil-fixes
|
|
||||||
- Includes all MapLibre cleanup fixes and z-index/pointer-event style improvements
|
|
||||||
- Ready for testing before merging to dev
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,105 +0,0 @@
|
||||||
---
|
|
||||||
id: task-025
|
|
||||||
title: 'Google Export: Local-First Data Sovereignty'
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 20:25'
|
|
||||||
updated_date: '2025-12-05 01:53'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- google
|
|
||||||
- encryption
|
|
||||||
- privacy
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Import Google Workspace data (Gmail, Drive, Photos, Calendar) locally, encrypt with WebCrypto, store in IndexedDB. User controls what gets shared to board or backed up to R2.
|
|
||||||
|
|
||||||
Worktree: /home/jeffe/Github/canvas-website-branch-worktrees/google-export
|
|
||||||
Branch: feature/google-export
|
|
||||||
|
|
||||||
Architecture docs in: docs/GOOGLE_DATA_SOVEREIGNTY.md
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 OAuth 2.0 with PKCE flow for Google APIs
|
|
||||||
- [x] #2 IndexedDB schema for encrypted data storage
|
|
||||||
- [x] #3 WebCrypto key derivation from master key
|
|
||||||
- [x] #4 Gmail import with pagination and progress
|
|
||||||
- [x] #5 Drive document import
|
|
||||||
- [x] #6 Photos thumbnail import
|
|
||||||
- [x] #7 Calendar event import
|
|
||||||
- [x] #8 Share to board functionality
|
|
||||||
- [x] #9 R2 encrypted backup/restore
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Starting implementation - reviewed architecture doc GOOGLE_DATA_SOVEREIGNTY.md
|
|
||||||
|
|
||||||
Implemented core Google Data Sovereignty module:
|
|
||||||
|
|
||||||
- types.ts: Type definitions for all encrypted data structures
|
|
||||||
|
|
||||||
- encryption.ts: WebCrypto AES-256-GCM encryption, HKDF key derivation, PKCE utilities
|
|
||||||
|
|
||||||
- database.ts: IndexedDB schema with stores for gmail, drive, photos, calendar, sync metadata, encryption metadata, tokens
|
|
||||||
|
|
||||||
- oauth.ts: OAuth 2.0 PKCE flow for Google APIs with encrypted token storage
|
|
||||||
|
|
||||||
- importers/gmail.ts: Gmail import with pagination, progress tracking, batch storage
|
|
||||||
|
|
||||||
- importers/drive.ts: Drive import with folder navigation, Google Docs export
|
|
||||||
|
|
||||||
- importers/photos.ts: Photos import with thumbnail caching, album support
|
|
||||||
|
|
||||||
- importers/calendar.ts: Calendar import with date range filtering, recurring events
|
|
||||||
|
|
||||||
- share.ts: Share service for creating tldraw shapes from encrypted data
|
|
||||||
|
|
||||||
- backup.ts: R2 backup service with encrypted manifest, checksum verification
|
|
||||||
|
|
||||||
- index.ts: Main module with GoogleDataService class and singleton pattern
|
|
||||||
|
|
||||||
TypeScript compilation passes - all core modules implemented
|
|
||||||
|
|
||||||
Committed and pushed to feature/google-export branch (e69ed0e)
|
|
||||||
|
|
||||||
All core modules implemented and working: OAuth, encryption, database, share, backup
|
|
||||||
|
|
||||||
Gmail, Drive, and Calendar importers working correctly
|
|
||||||
|
|
||||||
Photos importer has 403 error on some thumbnail URLs - needs investigation:
|
|
||||||
|
|
||||||
- May require proper OAuth consent screen verification
|
|
||||||
|
|
||||||
- baseUrl might need different approach for non-public photos
|
|
||||||
|
|
||||||
- Consider using Photos API mediaItems.get for base URLs instead of direct thumbnail access
|
|
||||||
|
|
||||||
Phase 2 complete: Renamed GoogleDataBrowser to GoogleExportBrowser (commit 33f5dc7)
|
|
||||||
|
|
||||||
Pushed to feature/google-export branch
|
|
||||||
|
|
||||||
Phase 3 complete: Added Private Workspace zone (commit 052c984)
|
|
||||||
|
|
||||||
- PrivateWorkspaceShapeUtil: Frosted glass container with pin/collapse/close
|
|
||||||
|
|
||||||
- usePrivateWorkspace hook for event handling
|
|
||||||
|
|
||||||
- PrivateWorkspaceManager component integrated into Board.tsx
|
|
||||||
|
|
||||||
Phase 4 complete: Added GoogleItemShape with privacy badges (commit 84c6bf8)
|
|
||||||
|
|
||||||
- GoogleItemShapeUtil: Visual distinction for local vs shared items
|
|
||||||
|
|
||||||
- Privacy badge with 🔒/🌐 icons
|
|
||||||
|
|
||||||
- Updated ShareableItem type with service and thumbnailUrl
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,57 +0,0 @@
|
||||||
---
|
|
||||||
id: task-026
|
|
||||||
title: Fix text shape sync between clients
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 20:48'
|
|
||||||
updated_date: '2025-12-25 23:30'
|
|
||||||
labels:
|
|
||||||
- bug
|
|
||||||
- sync
|
|
||||||
- automerge
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Text shapes created with the "T" text tool show up on the creating client but not on other clients viewing the same board.
|
|
||||||
|
|
||||||
Root cause investigation:
|
|
||||||
- Text shapes ARE being persisted to R2 (confirmed in server logs)
|
|
||||||
- Issue is on receiving client side in AutomergeToTLStore.ts
|
|
||||||
- Line 1142: 'text' is in invalidTextProps list and gets deleted
|
|
||||||
- If richText isn't properly populated before text is deleted, content is lost
|
|
||||||
|
|
||||||
Files to investigate:
|
|
||||||
- src/automerge/AutomergeToTLStore.ts (sanitization logic)
|
|
||||||
- src/automerge/TLStoreToAutomerge.ts (serialization logic)
|
|
||||||
- src/automerge/useAutomergeStoreV2.ts (store updates)
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Text shapes sync correctly between multiple clients
|
|
||||||
- [x] #2 Text content preserved during automerge serialization/deserialization
|
|
||||||
- [x] #3 Both new and existing text shapes display correctly on all clients
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
## Fix Applied (2025-12-25)
|
|
||||||
|
|
||||||
Root cause: Text shapes arriving from other clients had `props.text` but the deserialization code was:
|
|
||||||
1. Initializing `richText` to empty `{ content: [], type: 'doc' }`
|
|
||||||
2. Then deleting `props.text`
|
|
||||||
3. Result: content lost
|
|
||||||
|
|
||||||
Fix: Added text → richText conversion for text shapes in `AutomergeToTLStore.ts` (lines 1162-1191), similar to the existing conversion for geo shapes.
|
|
||||||
|
|
||||||
The fix:
|
|
||||||
- Checks if `props.text` exists before initializing richText
|
|
||||||
- Converts text content to richText format
|
|
||||||
- Preserves original text in `meta.text` for backward compatibility
|
|
||||||
- Logs conversion for debugging
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,119 +0,0 @@
|
||||||
---
|
|
||||||
id: task-027
|
|
||||||
title: Implement proper Automerge CRDT sync for offline-first support
|
|
||||||
status: In Progress
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 21:06'
|
|
||||||
updated_date: '2025-12-25 23:59'
|
|
||||||
labels:
|
|
||||||
- offline-sync
|
|
||||||
- crdt
|
|
||||||
- automerge
|
|
||||||
- architecture
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Replace the current "last-write-wins" full document replacement with proper Automerge CRDT sync protocol. This ensures deletions are preserved across offline/reconnect scenarios and concurrent edits merge correctly.
|
|
||||||
|
|
||||||
Current problem: Server does `currentDoc.store = { ...newDoc.store }` which is full replacement, not merge. This causes "ghost resurrection" of deleted shapes when offline clients reconnect.
|
|
||||||
|
|
||||||
Solution: Use Automerge's native binary sync protocol with proper CRDT merge semantics.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Server stores Automerge binary documents in R2 (not JSON)
|
|
||||||
- [ ] #2 Client-server communication uses Automerge sync protocol (binary messages)
|
|
||||||
- [ ] #3 Deletions persist correctly when offline client reconnects
|
|
||||||
- [ ] #4 Concurrent edits merge deterministically without data loss
|
|
||||||
- [x] #5 Existing JSON rooms are migrated to Automerge format
|
|
||||||
- [ ] #6 All existing functionality continues to work
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
## Progress Update (2025-12-04)
|
|
||||||
|
|
||||||
### Implemented:
|
|
||||||
1. **automerge-init.ts** - WASM initialization for Cloudflare Workers using slim variant
|
|
||||||
2. **automerge-sync-manager.ts** - Core CRDT sync manager with proper merge semantics
|
|
||||||
3. **automerge-r2-storage.ts** - Binary R2 storage for Automerge documents
|
|
||||||
4. **wasm.d.ts** - TypeScript declarations for WASM imports
|
|
||||||
|
|
||||||
### Integration Fixes:
|
|
||||||
- `getDocument()` now returns CRDT document when sync manager is active
|
|
||||||
- `handleBinaryMessage()` syncs `currentDoc` with CRDT state after updates
|
|
||||||
- `schedulePersistToR2()` delegates to sync manager when CRDT mode is enabled
|
|
||||||
- Fixed CloudflareAdapter TypeScript errors (peer-candidate peerMetadata)
|
|
||||||
|
|
||||||
### Current State:
|
|
||||||
- `useCrdtSync = true` flag is enabled
|
|
||||||
- Worker compiles and runs successfully
|
|
||||||
- JSON sync fallback works for backward compatibility
|
|
||||||
- Binary sync infrastructure is in place
|
|
||||||
- Needs production testing with multi-client sync and delete operations
|
|
||||||
|
|
||||||
**Merged to dev branch (2025-12-05):**
|
|
||||||
- All Automerge CRDT infrastructure merged
|
|
||||||
- WASM initialization, sync manager, R2 storage
|
|
||||||
- Integration fixes for getDocument(), handleBinaryMessage(), schedulePersistToR2()
|
|
||||||
- Ready for production testing
|
|
||||||
|
|
||||||
### 2025-12-05: Data Safety Mitigations Added
|
|
||||||
|
|
||||||
Added safety mitigations for Automerge format conversion (commit f8092d8 on feature/google-export):
|
|
||||||
|
|
||||||
**Pre-conversion backups:**
|
|
||||||
- Before any format migration, raw document backed up to R2
|
|
||||||
- Location: `pre-conversion-backups/{roomId}/{timestamp}_{formatType}.json`
|
|
||||||
|
|
||||||
**Conversion threshold guards:**
|
|
||||||
- 10% loss threshold: Conversion aborts if too many records would be lost
|
|
||||||
- 5% shape loss warning: Emits warning if shapes are lost
|
|
||||||
|
|
||||||
**Unknown format handling:**
|
|
||||||
- Unknown formats backed up before creating empty document
|
|
||||||
- Raw document keys logged for investigation
|
|
||||||
|
|
||||||
**Also fixed:**
|
|
||||||
- Keyboard shortcuts dialog error (tldraw i18n objects)
|
|
||||||
- Google Workspace integration now first in Settings > Integrations
|
|
||||||
|
|
||||||
Fixed persistence issue: Modified handlePeerDisconnect to flush pending saves and updated client-side merge strategy in useAutomergeSyncRepo.ts to properly bootstrap from server when local is empty while preserving offline changes
|
|
||||||
|
|
||||||
Fixed TypeScript errors in networking module: corrected useSession->useAuth import, added myConnections to NetworkGraph type, fixed GraphEdge type alignment between client and worker
|
|
||||||
|
|
||||||
## Investigation Summary (2025-12-25)
|
|
||||||
|
|
||||||
**Current Architecture:**
|
|
||||||
- Worker: CRDT sync enabled with SyncManager
|
|
||||||
- Client: CloudflareNetworkAdapter with binary message support
|
|
||||||
- Storage: IndexedDB for offline persistence
|
|
||||||
|
|
||||||
**Issue:** Automerge Repo not generating sync messages when `handle.change()` is called. JSON sync workaround in use.
|
|
||||||
|
|
||||||
**Suspected Root Cause:**
|
|
||||||
The Automerge Repo requires proper peer discovery. The adapter emits `peer-candidate` for server, but Repo may not be establishing proper sync relationship.
|
|
||||||
|
|
||||||
**Remaining ACs:**
|
|
||||||
- #2 Client-server binary protocol (partially working - needs Repo to generate messages)
|
|
||||||
- #3 Deletions persist (needs testing once binary sync works)
|
|
||||||
- #4 Concurrent edits merge (needs testing)
|
|
||||||
- #6 All functionality works (JSON workaround is functional)
|
|
||||||
|
|
||||||
**Next Steps:**
|
|
||||||
1. Add debug logging to adapter.send() to verify Repo calls
|
|
||||||
2. Check sync states between local peer and server
|
|
||||||
3. May need to manually trigger sync or fix Repo configuration
|
|
||||||
|
|
||||||
Dec 25: Added debug logging and peer-candidate re-emission fix to CloudflareAdapter.ts
|
|
||||||
|
|
||||||
Key fix: Re-emit peer-candidate after documentId is set to trigger Repo sync (timing issue)
|
|
||||||
|
|
||||||
Committed and pushed to dev branch - needs testing to verify binary sync is now working
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,93 +0,0 @@
|
||||||
---
|
|
||||||
id: task-028
|
|
||||||
title: OSM Canvas Integration Foundation
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- '@claude'
|
|
||||||
created_date: '2025-12-04 21:12'
|
|
||||||
updated_date: '2025-12-04 21:44'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- mapping
|
|
||||||
- foundation
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement the foundational layer for rendering OpenStreetMap data on the tldraw canvas. This includes coordinate transformation (geographic ↔ canvas), tile rendering as canvas background, and basic interaction patterns.
|
|
||||||
|
|
||||||
Core components:
|
|
||||||
- Geographic coordinate system (lat/lng to canvas x/y transforms)
|
|
||||||
- OSM tile layer rendering (raster tiles as background)
|
|
||||||
- Zoom level handling that respects geographic scale
|
|
||||||
- Pan/zoom gestures that work with map context
|
|
||||||
- Basic marker/shape placement with geographic coordinates
|
|
||||||
- Vector tile support for interactive OSM elements
|
|
||||||
|
|
||||||
This is the foundation that task-024 (Route Planning) and other spatial features build upon.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 OSM raster tiles render as canvas background layer
|
|
||||||
- [x] #2 Coordinate transformation functions (geo ↔ canvas) working accurately
|
|
||||||
- [x] #3 Zoom levels map to appropriate tile zoom levels
|
|
||||||
- [x] #4 Pan/zoom gestures work smoothly with tile loading
|
|
||||||
- [x] #5 Shapes can be placed with lat/lng coordinates
|
|
||||||
- [x] #6 Basic MapLibre GL or Leaflet integration pattern established
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
## Progress (2025-12-04)
|
|
||||||
|
|
||||||
### Completed:
|
|
||||||
- Reviewed existing open-mapping module scaffolding
|
|
||||||
- Installed maplibre-gl npm package
|
|
||||||
- Created comprehensive geo-canvas coordinate transformation utilities (geoTransform.ts)
|
|
||||||
- GeoCanvasTransform class for bidirectional geo ↔ canvas transforms
|
|
||||||
- Web Mercator projection support
|
|
||||||
- Tile coordinate utilities
|
|
||||||
- Haversine distance calculations
|
|
||||||
|
|
||||||
### In Progress:
|
|
||||||
- Wiring up MapLibre GL JS in useMapInstance hook
|
|
||||||
- Creating MapShapeUtil for tldraw canvas integration
|
|
||||||
|
|
||||||
### Additional Progress:
|
|
||||||
- Fixed MapLibre attributionControl type issue
|
|
||||||
- Created MapShapeUtil.tsx with full tldraw integration
|
|
||||||
- Created MapTool.ts for placing map shapes
|
|
||||||
- Registered MapShape and MapTool in Board.tsx
|
|
||||||
- Map shape features:
|
|
||||||
- Resizable map window
|
|
||||||
- Interactive pan/zoom toggle
|
|
||||||
- Location presets (NYC, London, Tokyo, SF, Paris)
|
|
||||||
- Live coordinate display
|
|
||||||
- Pin to view support
|
|
||||||
- Tag system integration
|
|
||||||
|
|
||||||
### Completion Summary:
|
|
||||||
- All core OSM canvas integration foundation is complete
|
|
||||||
- MapShape can be placed on canvas via MapTool
|
|
||||||
- MapLibre GL JS renders OpenStreetMap tiles
|
|
||||||
- Coordinate transforms enable geo ↔ canvas mapping
|
|
||||||
- Ready for testing on dev server at localhost:5173
|
|
||||||
|
|
||||||
### Files Created/Modified:
|
|
||||||
- src/open-mapping/utils/geoTransform.ts (NEW)
|
|
||||||
- src/open-mapping/hooks/useMapInstance.ts (UPDATED with MapLibre)
|
|
||||||
- src/shapes/MapShapeUtil.tsx (NEW)
|
|
||||||
- src/tools/MapTool.ts (NEW)
|
|
||||||
- src/routes/Board.tsx (UPDATED with MapShape/MapTool)
|
|
||||||
- package.json (added maplibre-gl)
|
|
||||||
|
|
||||||
### Next Steps (task-024):
|
|
||||||
- Add OSRM routing backend
|
|
||||||
- Implement waypoint placement
|
|
||||||
- Route calculation and display
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,70 +0,0 @@
|
||||||
---
|
|
||||||
id: task-029
|
|
||||||
title: zkGPS Protocol Design
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- '@claude'
|
|
||||||
created_date: '2025-12-04 21:12'
|
|
||||||
updated_date: '2025-12-04 23:29'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- privacy
|
|
||||||
- cryptography
|
|
||||||
- research
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Design and implement a zero-knowledge proof system for privacy-preserving location sharing. Enables users to prove location claims without revealing exact coordinates.
|
|
||||||
|
|
||||||
Key capabilities:
|
|
||||||
- Proximity proofs: Prove "I am within X distance of Y" without revealing exact location
|
|
||||||
- Region membership: Prove "I am in Central Park" without revealing which part
|
|
||||||
- Temporal proofs: Prove "I was in region R between T1 and T2"
|
|
||||||
- Group rendezvous: N people prove they are all nearby without revealing locations to each other
|
|
||||||
|
|
||||||
Technical approaches to evaluate:
|
|
||||||
- ZK-SNARKs (Groth16, PLONK) for succinct proofs
|
|
||||||
- Bulletproofs for range proofs on coordinates
|
|
||||||
- Geohash commitments for variable precision
|
|
||||||
- Homomorphic encryption for distance calculations
|
|
||||||
- Ring signatures for group privacy
|
|
||||||
|
|
||||||
Integration with canvas:
|
|
||||||
- Share location with configurable precision per trust circle
|
|
||||||
- Verify location claims from network participants
|
|
||||||
- Display verified presence without exact coordinates
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Protocol specification document complete
|
|
||||||
- [x] #2 Proof-of-concept proximity proof working
|
|
||||||
- [x] #3 Geohash commitment scheme implemented
|
|
||||||
- [x] #4 Trust circle precision configuration UI
|
|
||||||
- [x] #5 Integration with canvas presence system
|
|
||||||
- [ ] #6 Performance benchmarks acceptable for real-time use
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Completed all zkGPS Protocol Design implementation:
|
|
||||||
|
|
||||||
- ZKGPS_PROTOCOL.md: Full specification document with design goals, proof types, wire protocol, security considerations
|
|
||||||
|
|
||||||
- geohash.ts: Complete geohash encoding/decoding with precision levels, neighbor finding, radius/polygon cell intersection
|
|
||||||
|
|
||||||
- types.ts: Comprehensive TypeScript types for commitments, trust circles, proofs, and protocol messages
|
|
||||||
|
|
||||||
- commitments.ts: Hash-based commitment scheme with salt, signing, and verification
|
|
||||||
|
|
||||||
- proofs.ts: Proximity, region, temporal, and group proximity proof generation/verification
|
|
||||||
|
|
||||||
- trustCircles.ts: TrustCircleManager class for managing social layer and precision-per-contact
|
|
||||||
|
|
||||||
- index.ts: Barrel export for clean module API
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,64 +0,0 @@
|
||||||
---
|
|
||||||
id: task-030
|
|
||||||
title: Mycelial Signal Propagation System
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- '@claude'
|
|
||||||
created_date: '2025-12-04 21:12'
|
|
||||||
updated_date: '2025-12-04 23:37'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- mapping
|
|
||||||
- intelligence
|
|
||||||
- research
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement a biologically-inspired signal propagation system for the canvas network, modeling how information, attention, and value flow through the collaborative space like nutrients through mycelium.
|
|
||||||
|
|
||||||
Core concepts:
|
|
||||||
- Nodes: Points of interest, events, people, resources, discoveries
|
|
||||||
- Hyphae: Connections/paths between nodes (relationships, routes, attention threads)
|
|
||||||
- Signals: Urgency, relevance, trust, novelty gradients
|
|
||||||
- Behaviors: Gradient following, path optimization, emergence detection
|
|
||||||
|
|
||||||
Features:
|
|
||||||
- Signal emission when events/discoveries occur
|
|
||||||
- Decay with spatial, relational, and temporal distance
|
|
||||||
- Aggregation at nodes (multiple weak signals → strong signal)
|
|
||||||
- Spore dispersal pattern for notifications
|
|
||||||
- Resonance detection (unconnected focus on same location)
|
|
||||||
- Collective blindspot visualization (unmapped areas)
|
|
||||||
|
|
||||||
The map becomes a living organism that breathes with activity cycles and grows where attention focuses.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Signal propagation algorithm implemented
|
|
||||||
- [x] #2 Decay functions configurable (spatial, relational, temporal)
|
|
||||||
- [x] #3 Visualization of signal gradients on canvas
|
|
||||||
- [x] #4 Resonance detection alerts working
|
|
||||||
- [x] #5 Spore-style notification system
|
|
||||||
- [x] #6 Blindspot/unknown area highlighting
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Completed Mycelial Signal Propagation System - 5 files in src/open-mapping/mycelium/:
|
|
||||||
|
|
||||||
types.ts: Node/Hypha/Signal/Decay/Propagation/Resonance type definitions with event system
|
|
||||||
|
|
||||||
signals.ts: Decay functions (exponential, linear, inverse, step, gaussian) + 4 propagation algorithms (flood, gradient, random-walk, diffusion)
|
|
||||||
|
|
||||||
network.ts: MyceliumNetwork class with node/hypha CRUD, signal emission/queue, resonance detection, maintenance loop, stats
|
|
||||||
|
|
||||||
visualization.ts: Color palettes, dynamic sizing, Canvas 2D rendering, heat maps, CSS keyframes
|
|
||||||
|
|
||||||
index.ts: Clean barrel export for entire module
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,65 +0,0 @@
|
||||||
---
|
|
||||||
id: task-031
|
|
||||||
title: Alternative Map Lens System
|
|
||||||
status: Done
|
|
||||||
assignee:
|
|
||||||
- '@claude'
|
|
||||||
created_date: '2025-12-04 21:12'
|
|
||||||
updated_date: '2025-12-04 23:42'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- mapping
|
|
||||||
- visualization
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement multiple "lens" views that project different data dimensions onto the canvas coordinate space. The same underlying data can be viewed through different lenses.
|
|
||||||
|
|
||||||
Lens types:
|
|
||||||
- Geographic: Traditional OSM basemap, physical locations
|
|
||||||
- Temporal: Time as X-axis, events as nodes, time-scrubbing UI
|
|
||||||
- Attention: Heatmap of collective focus, nodes sized by current attention
|
|
||||||
- Incentive: Value gradients, token flows, MycoFi integration
|
|
||||||
- Relational: Social graph topology, force-directed layout
|
|
||||||
- Possibility: Branching futures, what-if scenarios, alternate timelines
|
|
||||||
|
|
||||||
Features:
|
|
||||||
- Smooth transitions between lens types
|
|
||||||
- Lens blending (e.g., 50% geographic + 50% attention)
|
|
||||||
- Temporal scrubber for historical playback
|
|
||||||
- Temporal portals (click location to see across time)
|
|
||||||
- Living maps that grow/fade based on attention
|
|
||||||
|
|
||||||
Each lens uses the same canvas shapes but transforms their positions and styling based on the active projection.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Lens switcher UI implemented
|
|
||||||
- [x] #2 Geographic lens working with OSM
|
|
||||||
- [x] #3 Temporal lens with time scrubber
|
|
||||||
- [x] #4 Attention heatmap visualization
|
|
||||||
- [x] #5 Smooth transitions between lenses
|
|
||||||
- [x] #6 Lens blending capability
|
|
||||||
- [ ] #7 Temporal portal feature (click to see history)
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Completed Alternative Map Lens System - 5 files in src/open-mapping/lenses/:
|
|
||||||
|
|
||||||
types.ts: All lens type definitions (Geographic, Temporal, Attention, Incentive, Relational, Possibility) with configs, transitions, events
|
|
||||||
|
|
||||||
transforms.ts: Coordinate transform functions for each lens type + force-directed layout algorithm for relational lens
|
|
||||||
|
|
||||||
blending.ts: Easing functions, transition creation/interpolation, point blending for multi-lens views
|
|
||||||
|
|
||||||
manager.ts: LensManager class with lens activation/deactivation, transitions, viewport control, temporal playback, temporal portals
|
|
||||||
|
|
||||||
index.ts: Clean barrel export for entire lens system
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,69 +0,0 @@
|
||||||
---
|
|
||||||
id: task-032
|
|
||||||
title: Privacy Gradient Trust Circle System
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 21:12'
|
|
||||||
updated_date: '2025-12-05 01:42'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- privacy
|
|
||||||
- social
|
|
||||||
dependencies:
|
|
||||||
- task-029
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement a non-binary privacy system where location and presence information is shared at different precision levels based on trust circles.
|
|
||||||
|
|
||||||
Trust circle levels (configurable):
|
|
||||||
- Intimate: Exact coordinates, real-time updates
|
|
||||||
- Close: Street/block level precision
|
|
||||||
- Friends: Neighborhood/district level
|
|
||||||
- Network: City/region only
|
|
||||||
- Public: Just "online" status or timezone
|
|
||||||
|
|
||||||
Features:
|
|
||||||
- Per-contact trust level configuration
|
|
||||||
- Group trust levels (share more with "coworkers" group)
|
|
||||||
- Automatic precision degradation over time
|
|
||||||
- Selective disclosure controls per-session
|
|
||||||
- Trust level visualization on map (concentric circles of precision)
|
|
||||||
- Integration with zkGPS for cryptographic enforcement
|
|
||||||
- Consent management and audit logs
|
|
||||||
|
|
||||||
The system should default to maximum privacy and require explicit opt-in to share more precise information.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Trust circle configuration UI
|
|
||||||
- [ ] #2 Per-contact precision settings
|
|
||||||
- [x] #3 Group-based trust levels
|
|
||||||
- [x] #4 Precision degradation over time working
|
|
||||||
- [ ] #5 Visual representation of trust circles on map
|
|
||||||
- [ ] #6 Consent management interface
|
|
||||||
- [x] #7 Integration points with zkGPS task
|
|
||||||
- [x] #8 Privacy-by-default enforced
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
**TypeScript foundation completed in task-029:**
|
|
||||||
- TrustCircleManager class (src/open-mapping/privacy/trustCircles.ts)
|
|
||||||
- 5 trust levels with precision mapping
|
|
||||||
- Per-contact trust configuration
|
|
||||||
- Group trust levels
|
|
||||||
- Precision degradation over time
|
|
||||||
- Integration with zkGPS commitments
|
|
||||||
|
|
||||||
**Still needs UI components:**
|
|
||||||
- Trust circle configuration panel
|
|
||||||
- Contact management interface
|
|
||||||
- Visual concentric circles on map
|
|
||||||
- Consent management dialog
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,87 +0,0 @@
|
||||||
---
|
|
||||||
id: task-033
|
|
||||||
title: Version History & Reversion System with Visual Diffs
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 21:44'
|
|
||||||
updated_date: '2025-12-05 00:46'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- version-control
|
|
||||||
- automerge
|
|
||||||
- r2
|
|
||||||
- ui
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement a comprehensive version history and reversion system that allows users to:
|
|
||||||
1. View and revert to historical board states
|
|
||||||
2. See visual diffs highlighting new/deleted shapes since their last visit
|
|
||||||
3. Walk through CRDT history step-by-step
|
|
||||||
4. Restore accidentally deleted shapes
|
|
||||||
|
|
||||||
Key features:
|
|
||||||
- Time rewind button next to the star dashboard button
|
|
||||||
- Popup menu showing historical versions
|
|
||||||
- Yellow glow on newly added shapes (first time user sees them)
|
|
||||||
- Dim grey on deleted shapes with "undo discard" option
|
|
||||||
- Permission-based (admin, editor, viewer)
|
|
||||||
- Integration with R2 backups and Automerge CRDT history
|
|
||||||
- Compare user's local state with server state to highlight diffs
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Version history button renders next to star button with time-rewind icon
|
|
||||||
- [x] #2 Clicking button opens popup showing list of historical versions
|
|
||||||
- [x] #3 User can select a version to preview or revert to
|
|
||||||
- [x] #4 Newly added shapes since last user visit have yellow glow effect
|
|
||||||
- [x] #5 Deleted shapes show dimmed with 'undo discard' option
|
|
||||||
- [x] #6 Version navigation respects user permissions (admin/editor/viewer)
|
|
||||||
- [x] #7 Works with R2 backup snapshots for coarse-grained history
|
|
||||||
- [ ] #8 Leverages Automerge CRDT for fine-grained change tracking
|
|
||||||
- [x] #9 User's last-seen state stored in localStorage for diff comparison
|
|
||||||
- [x] #10 Visual effects are subtle and non-intrusive
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Implementation complete in feature/version-reversion worktree:
|
|
||||||
|
|
||||||
**Files Created:**
|
|
||||||
- src/lib/versionHistory.ts - Core version history utilities
|
|
||||||
- src/lib/permissions.ts - Role-based permission system
|
|
||||||
- src/components/VersionHistoryButton.tsx - Time-rewind icon button
|
|
||||||
- src/components/VersionHistoryPanel.tsx - Panel with 3 tabs
|
|
||||||
- src/components/DeletedShapesOverlay.tsx - Floating deleted shapes indicator
|
|
||||||
- src/hooks/useVersionHistory.ts - React hook for state management
|
|
||||||
- src/hooks/usePermissions.ts - Permission context hook
|
|
||||||
- src/css/version-history.css - Visual effects CSS
|
|
||||||
|
|
||||||
**Files Modified:**
|
|
||||||
- src/ui/CustomToolbar.tsx - Added VersionHistoryButton
|
|
||||||
- src/ui/components.tsx - Added DeletedShapesOverlay
|
|
||||||
- src/css/style.css - Imported version-history.css
|
|
||||||
- worker/worker.ts - Added /api/versions endpoints
|
|
||||||
|
|
||||||
**Features Implemented:**
|
|
||||||
1. Time-rewind button next to star dashboard
|
|
||||||
2. Version History Panel with Changes/Versions/Deleted tabs
|
|
||||||
3. localStorage tracking of user's last-seen state
|
|
||||||
4. Yellow glow animation for new shapes
|
|
||||||
5. Dim grey effect for deleted shapes
|
|
||||||
6. Floating indicator with restore options
|
|
||||||
7. R2 integration for version snapshots
|
|
||||||
8. Permission system (admin/editor/viewer roles)
|
|
||||||
|
|
||||||
Commit: 03894d2
|
|
||||||
|
|
||||||
Renamed GoogleDataBrowser to GoogleExportBrowser as requested by user
|
|
||||||
|
|
||||||
Pushed to feature/google-export branch (commit 33f5dc7)
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
---
|
|
||||||
id: task-034
|
|
||||||
title: Fix Google Photos 403 error on thumbnail URLs
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 23:24'
|
|
||||||
labels:
|
|
||||||
- bug
|
|
||||||
- google
|
|
||||||
- photos
|
|
||||||
dependencies:
|
|
||||||
- task-025
|
|
||||||
priority: low
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Debug and fix the 403 Forbidden errors when fetching Google Photos thumbnails in the Google Data Sovereignty module.
|
|
||||||
|
|
||||||
Current behavior:
|
|
||||||
- Photos metadata imports successfully
|
|
||||||
- Thumbnail URLs (baseUrl with =w200-h200 suffix) return 403
|
|
||||||
- Error occurs even with valid OAuth token
|
|
||||||
|
|
||||||
Investigation areas:
|
|
||||||
1. OAuth consent screen verification status (test mode vs published)
|
|
||||||
2. Photo sharing status (private vs shared photos may behave differently)
|
|
||||||
3. baseUrl expiration - Google Photos baseUrls expire after ~1 hour
|
|
||||||
4. May need to use mediaItems.get API to refresh baseUrl before each fetch
|
|
||||||
5. Consider adding Authorization header to thumbnail fetch requests
|
|
||||||
|
|
||||||
Reference: src/lib/google/importers/photos.ts in feature/google-export branch
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Photos thumbnails download without 403 errors
|
|
||||||
- [ ] #2 OAuth consent screen properly configured if needed
|
|
||||||
- [ ] #3 baseUrl refresh mechanism implemented if required
|
|
||||||
- [ ] #4 Test with both private and shared photos
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,90 +0,0 @@
|
||||||
---
|
|
||||||
id: task-035
|
|
||||||
title: 'Data Sovereignty Zone: Private Workspace UI'
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 23:36'
|
|
||||||
updated_date: '2025-12-05 02:00'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- privacy
|
|
||||||
- google
|
|
||||||
- ui
|
|
||||||
dependencies:
|
|
||||||
- task-025
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement privacy-first UX for managing LOCAL (encrypted IndexedDB) vs SHARED (collaborative) data on the canvas.
|
|
||||||
|
|
||||||
Key features:
|
|
||||||
- Google Integration card in Settings modal
|
|
||||||
- Data Browser popup for selecting encrypted items
|
|
||||||
- Private Workspace zone (toggleable, frosted glass container)
|
|
||||||
- Visual distinction: 🔒 shaded overlay for local, normal for shared
|
|
||||||
- Permission prompt when dragging items outside workspace
|
|
||||||
|
|
||||||
Design decisions:
|
|
||||||
- Toggleable workspace that can pin to viewport
|
|
||||||
- Items always start private, explicit share action required
|
|
||||||
- ZK integration deferred to future phase
|
|
||||||
- R2 upload visual-only for now
|
|
||||||
|
|
||||||
Worktree: /home/jeffe/Github/canvas-website-branch-worktrees/google-export
|
|
||||||
Branch: feature/google-export
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Google Workspace integration card in Settings Integrations tab
|
|
||||||
- [x] #2 Data Browser popup with service tabs and item selection
|
|
||||||
- [x] #3 Private Workspace zone shape with frosted glass effect
|
|
||||||
- [x] #4 Privacy badges (lock/globe) on items showing visibility
|
|
||||||
- [x] #5 Permission modal when changing visibility from local to shared
|
|
||||||
- [ ] #6 Zone can be toggled visible/hidden and pinned to viewport
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Phase 1 complete (c9c8c00):
|
|
||||||
|
|
||||||
- Added Google Workspace section to Settings > Integrations tab
|
|
||||||
|
|
||||||
- Connection status badge and import counts display
|
|
||||||
|
|
||||||
- Connect/Disconnect buttons with loading states
|
|
||||||
|
|
||||||
- Added getStoredCounts() method to GoogleDataService
|
|
||||||
|
|
||||||
- Privacy messaging about AES-256 encryption
|
|
||||||
|
|
||||||
Phase 2 complete (a754ffa):
|
|
||||||
|
|
||||||
- GoogleDataBrowser component with service tabs
|
|
||||||
|
|
||||||
- Searchable, multi-select item list
|
|
||||||
|
|
||||||
- Dark mode support
|
|
||||||
|
|
||||||
- Privacy messaging and 'Add to Private Workspace' action
|
|
||||||
|
|
||||||
Phase 5 completed: Implemented permission flow and drag detection
|
|
||||||
|
|
||||||
Created VisibilityChangeModal.tsx for confirming visibility changes
|
|
||||||
|
|
||||||
Created VisibilityChangeManager.tsx to handle events and drag detection
|
|
||||||
|
|
||||||
GoogleItem shapes dispatch visibility change events on badge click
|
|
||||||
|
|
||||||
Support both local->shared and shared->local transitions
|
|
||||||
|
|
||||||
Auto-detect when GoogleItems are dragged outside PrivateWorkspace
|
|
||||||
|
|
||||||
Session storage for 'don't ask again' preference
|
|
||||||
|
|
||||||
All 5 phases complete - full data sovereignty UI implementation done
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,35 +0,0 @@
|
||||||
---
|
|
||||||
id: task-036
|
|
||||||
title: Implement Possibility Cones and Constraint Propagation System
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-05 00:45'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- open-mapping
|
|
||||||
- visualization
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implemented a mathematical framework for visualizing how constraints propagate through decision pipelines. Each decision point creates a "possibility cone" - a light-cone-like structure representing reachable futures. Subsequent constraints act as apertures that narrow these cones.
|
|
||||||
|
|
||||||
Key components:
|
|
||||||
- types.ts: Core type definitions (SpacePoint, PossibilityCone, ConeConstraint, ConeIntersection, etc.)
|
|
||||||
- geometry.ts: Vector operations, cone math, conic sections, intersection algorithms
|
|
||||||
- pipeline.ts: ConstraintPipelineManager for constraint propagation through stages
|
|
||||||
- optimization.ts: PathOptimizer with A*, Dijkstra, gradient descent, simulated annealing
|
|
||||||
- visualization.ts: Rendering helpers for 2D/3D projections, SVG paths, canvas rendering
|
|
||||||
|
|
||||||
Features:
|
|
||||||
- N-dimensional possibility space with configurable dimensions
|
|
||||||
- Constraint pipeline with stages and dependency analysis
|
|
||||||
- Multiple constraint surface types (hyperplane, sphere, cone, custom)
|
|
||||||
- Value-weighted path optimization through constrained space
|
|
||||||
- Waist detection (bottleneck finding)
|
|
||||||
- Caustic point detection (convergence analysis)
|
|
||||||
- Animation helpers for cone narrowing visualization
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
@ -1,114 +0,0 @@
|
||||||
---
|
|
||||||
id: task-037
|
|
||||||
title: zkGPS Location Games and Discovery System
|
|
||||||
status: In Progress
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-05 00:49'
|
|
||||||
updated_date: '2025-12-05 03:52'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- open-mapping
|
|
||||||
- games
|
|
||||||
- zkGPS
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Build a location-based game framework combining zkGPS privacy proofs with collaborative mapping for treasure hunts, collectibles, and IoT-anchored discoveries.
|
|
||||||
|
|
||||||
Use cases:
|
|
||||||
- Conference treasure hunts with provable location without disclosure
|
|
||||||
- Collectible elements anchored to physical locations
|
|
||||||
- Crafting/combining discovered items
|
|
||||||
- Mycelial network growth between discovered nodes
|
|
||||||
- IoT hardware integration (NFC tags, BLE beacons)
|
|
||||||
|
|
||||||
Game mechanics:
|
|
||||||
- Proximity proofs ("I'm within 50m of X" without revealing where)
|
|
||||||
- Hot/cold navigation using geohash precision degradation
|
|
||||||
- First-finder rewards with timestamp proofs
|
|
||||||
- Group discovery requiring N players in proximity
|
|
||||||
- Spore collection and mycelium cultivation
|
|
||||||
- Fruiting bodies when networks connect
|
|
||||||
|
|
||||||
Integration points:
|
|
||||||
- zkGPS commitments for hidden locations
|
|
||||||
- Mycelium network for discovery propagation
|
|
||||||
- Trust circles for team-based play
|
|
||||||
- Possibility cones for "reachable discoveries" visualization
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Discovery anchor types (physical, virtual, IoT)
|
|
||||||
- [x] #2 Proximity proof verification for discoveries
|
|
||||||
- [x] #3 Collectible item system with crafting
|
|
||||||
- [x] #4 Mycelium growth between discovered locations
|
|
||||||
- [x] #5 Team/group discovery mechanics
|
|
||||||
- [x] #6 Hot/cold navigation hints
|
|
||||||
- [x] #7 First-finder and timestamp proofs
|
|
||||||
- [x] #8 IoT anchor protocol (NFC/BLE/QR)
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Implemented complete discovery game system with:
|
|
||||||
|
|
||||||
**types.ts** - Comprehensive type definitions:
|
|
||||||
- Discovery anchors (physical, NFC, BLE, QR, virtual, temporal, social)
|
|
||||||
- IoT requirements and social requirements
|
|
||||||
- Collectibles, crafting recipes, inventory slots
|
|
||||||
- Spores, planted spores, fruiting bodies
|
|
||||||
- Treasure hunts, scoring, leaderboards
|
|
||||||
- Hot/cold navigation hints
|
|
||||||
|
|
||||||
**anchors.ts** - Anchor management:
|
|
||||||
- Create anchors with zkGPS commitments
|
|
||||||
- Proximity-based discovery verification
|
|
||||||
- Hot/cold navigation hints
|
|
||||||
- Prerequisite and cooldown checking
|
|
||||||
- IoT and social requirement verification
|
|
||||||
|
|
||||||
**collectibles.ts** - Item and crafting system:
|
|
||||||
- ItemRegistry for item definitions
|
|
||||||
- InventoryManager with stacking
|
|
||||||
- CraftingManager with recipes
|
|
||||||
- Default spore, fragment, and artifact items
|
|
||||||
|
|
||||||
**spores.ts** - Mycelium integration:
|
|
||||||
- 7 spore types (explorer, connector, amplifier, guardian, harvester, temporal, social)
|
|
||||||
- Planting spores at discovered locations
|
|
||||||
- Hypha connections between nearby spores
|
|
||||||
- Fruiting body emergence when networks connect
|
|
||||||
- Growth simulation with nutrient decay
|
|
||||||
|
|
||||||
**hunts.ts** - Treasure hunt management:
|
|
||||||
- Create hunts with multiple anchors
|
|
||||||
- Sequential or free-form discovery
|
|
||||||
- Scoring with bonuses (first finder, time, sequence, group)
|
|
||||||
- Leaderboards and prizes
|
|
||||||
- Hunt templates (quick, standard, epic, team)
|
|
||||||
|
|
||||||
Moving to In Progress - core TypeScript implementation complete, still needs:
|
|
||||||
- UI components for discovery/hunt interfaces
|
|
||||||
- Canvas integration for map visualization
|
|
||||||
- Real IoT hardware testing (NFC/BLE)
|
|
||||||
- Backend persistence layer
|
|
||||||
- Multiplayer sync via Automerge
|
|
||||||
|
|
||||||
**Merged to dev branch (2025-12-05):**
|
|
||||||
- Complete discovery game system TypeScript merged
|
|
||||||
- Anchor, collectible, spore, and hunt systems in place
|
|
||||||
- All type definitions and core logic implemented
|
|
||||||
|
|
||||||
**Still needs for production:**
|
|
||||||
- React UI components for discovery/hunt interfaces
|
|
||||||
- Canvas map visualization integration
|
|
||||||
- IoT hardware testing (NFC/BLE)
|
|
||||||
- Backend persistence layer
|
|
||||||
- Multiplayer sync testing
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,59 +0,0 @@
|
||||||
---
|
|
||||||
id: task-038
|
|
||||||
title: Real-Time Location Presence with Privacy Controls
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-05 02:00'
|
|
||||||
updated_date: '2025-12-05 02:00'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- open-mapping
|
|
||||||
- privacy
|
|
||||||
- collaboration
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implemented real-time location sharing with trust-based privacy controls for collaborative mapping.
|
|
||||||
|
|
||||||
Key features:
|
|
||||||
- Privacy-preserving location via zkGPS commitments
|
|
||||||
- Trust circle precision controls (intimate ~2.4m → public ~630km)
|
|
||||||
- Real-time broadcasting and receiving of presence
|
|
||||||
- Proximity detection without revealing exact location
|
|
||||||
- React hook for easy canvas integration
|
|
||||||
- Map visualization components (PresenceLayer, PresenceList)
|
|
||||||
|
|
||||||
Files created in src/open-mapping/presence/:
|
|
||||||
- types.ts: Comprehensive type definitions
|
|
||||||
- manager.ts: PresenceManager class with location watch, broadcasting, trust circles
|
|
||||||
- useLocationPresence.ts: React hook for canvas integration
|
|
||||||
- PresenceLayer.tsx: Map visualization components
|
|
||||||
- index.ts: Barrel export
|
|
||||||
|
|
||||||
Integration pattern:
|
|
||||||
```typescript
|
|
||||||
const presence = useLocationPresence({
|
|
||||||
channelId: 'room-id',
|
|
||||||
user: { pubKey, privKey, displayName, color },
|
|
||||||
broadcastFn: (data) => automergeAdapter.broadcast(data),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Set trust levels for contacts
|
|
||||||
presence.setTrustLevel(bobKey, 'friends'); // ~2.4km precision
|
|
||||||
presence.setTrustLevel(aliceKey, 'intimate'); // ~2.4m precision
|
|
||||||
```
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Location presence types defined
|
|
||||||
- [x] #2 PresenceManager with broadcasting
|
|
||||||
- [x] #3 Trust-based precision controls
|
|
||||||
- [x] #4 React hook for canvas integration
|
|
||||||
- [x] #5 Map visualization components
|
|
||||||
- [x] #6 Proximity detection without exact location
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,154 +0,0 @@
|
||||||
---
|
|
||||||
id: task-039
|
|
||||||
title: 'MapShape Integration: Connect Subsystems to Canvas Shape'
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-05 02:12'
|
|
||||||
updated_date: '2025-12-05 03:41'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- mapping
|
|
||||||
- integration
|
|
||||||
dependencies:
|
|
||||||
- task-024
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Evolve MapShapeUtil.tsx to integrate the 6 implemented subsystems (privacy, mycelium, lenses, conics, discovery, presence) into the canvas map shape. Currently the MapShape is a standalone map viewer - it needs to become the central hub for all open-mapping features.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 MapShape props extended for subsystem toggles
|
|
||||||
- [x] #2 Presence layer integrated with opt-in location sharing
|
|
||||||
- [x] #3 Lens system accessible via UI
|
|
||||||
- [x] #4 Route/waypoint visualization working
|
|
||||||
- [x] #5 Collaboration sync via Automerge
|
|
||||||
- [x] #6 Discovery game elements visible on map
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
**MapShape Evolution Progress (Dec 5, 2025):**
|
|
||||||
|
|
||||||
### Completed:
|
|
||||||
|
|
||||||
1. **Extended IMapShape Props** - Added comprehensive subsystem configuration types:
|
|
||||||
- `MapPresenceConfig` - Location sharing with privacy levels
|
|
||||||
- `MapLensConfig` - Alternative map projections
|
|
||||||
- `MapDiscoveryConfig` - Games, anchors, spores, hunts
|
|
||||||
- `MapRoutingConfig` - Waypoints, routes, alternatives
|
|
||||||
- `MapConicsConfig` - Possibility cones visualization
|
|
||||||
|
|
||||||
2. **Header UI Controls** - Subsystem toolbar with:
|
|
||||||
- ⚙️ Expandable subsystem panel
|
|
||||||
- Toggle buttons for each subsystem
|
|
||||||
- Lens selector dropdown (6 lens types)
|
|
||||||
- Share location button for presence
|
|
||||||
- Active subsystem indicators in header
|
|
||||||
|
|
||||||
3. **Visualization Layers Added:**
|
|
||||||
- Route polyline layer (MapLibre GeoJSON source/layer)
|
|
||||||
- Waypoint markers management
|
|
||||||
- Routing panel (bottom-right) with stats
|
|
||||||
- Presence panel (bottom-left) with share button
|
|
||||||
- Discovery panel (top-right) with checkboxes
|
|
||||||
- Lens indicator badge (top-left when active)
|
|
||||||
|
|
||||||
### Still Needed:
|
|
||||||
- Actual MapLibre marker implementation for waypoints
|
|
||||||
- Integration with OSRM routing backend
|
|
||||||
- Connect presence system to actual location services
|
|
||||||
- Wire up discovery system to anchor/spore data
|
|
||||||
|
|
||||||
**Additional Implementation (Dec 5, 2025):**
|
|
||||||
|
|
||||||
### Routing System - Fully Working:
|
|
||||||
- ✅ MapLibre.Marker implementation with draggable waypoints
|
|
||||||
- ✅ Click-to-add-waypoint when routing enabled
|
|
||||||
- ✅ OSRM routing service integration (public server)
|
|
||||||
- ✅ Auto-route calculation after adding/dragging waypoints
|
|
||||||
- ✅ Route polyline rendering with GeoJSON layer
|
|
||||||
- ✅ Clear route button with full state reset
|
|
||||||
- ✅ Loading indicator during route calculation
|
|
||||||
- ✅ Distance/duration display in routing panel
|
|
||||||
|
|
||||||
### Presence System - Fully Working:
|
|
||||||
- ✅ Browser Geolocation API integration
|
|
||||||
- ✅ Location watching with configurable accuracy
|
|
||||||
- ✅ User location marker with pulsing animation
|
|
||||||
- ✅ Error handling (permission denied, unavailable, timeout)
|
|
||||||
- ✅ "Go to My Location" button with flyTo animation
|
|
||||||
- ✅ Privacy level affects GPS accuracy settings
|
|
||||||
- ✅ Real-time coordinate display when sharing
|
|
||||||
|
|
||||||
### Still TODO:
|
|
||||||
- Discovery system anchor visualization
|
|
||||||
- Automerge sync for collaborative editing
|
|
||||||
|
|
||||||
Phase 5: Automerge Sync Integration - Analyzing existing sync architecture. TLDraw shapes sync automatically via TLStoreToAutomerge.ts. MapShape props should already sync since they're part of the shape record.
|
|
||||||
|
|
||||||
**Automerge Sync Implementation Complete (Dec 5, 2025):**
|
|
||||||
|
|
||||||
1. **Collaborative sharedLocations** - Added `sharedLocations: Record<string, SharedLocation>` to MapPresenceConfig props
|
|
||||||
|
|
||||||
2. **Conflict-free updates** - Each user updates only their own key in sharedLocations, allowing Automerge CRDT to handle concurrent updates automatically
|
|
||||||
|
|
||||||
3. **Location sync effect** - When user shares location, their coordinate is published to sharedLocations with userId, userName, color, timestamp, and privacyLevel
|
|
||||||
|
|
||||||
4. **Auto-cleanup** - User's entry is removed from sharedLocations when they stop sharing
|
|
||||||
|
|
||||||
5. **Collaborator markers** - Renders MapLibre markers for all other users' shared locations (different from user's own pulsing marker)
|
|
||||||
|
|
||||||
6. **Stale location filtering** - Collaborator locations older than 5 minutes are not rendered
|
|
||||||
|
|
||||||
7. **UI updates** - Presence panel now shows count of online collaborators
|
|
||||||
|
|
||||||
**How it works:**
|
|
||||||
|
|
||||||
- MapShape props sync automatically via existing TLDraw → Automerge infrastructure
|
|
||||||
|
|
||||||
- When user calls editor.updateShape() to update MapShape props, changes flow through TLStoreToAutomerge.ts
|
|
||||||
|
|
||||||
- Remote changes come back via Automerge patches and update the shape's props
|
|
||||||
|
|
||||||
- Each user only writes to their own key in sharedLocations, so no conflicts occur
|
|
||||||
|
|
||||||
**Discovery Visualization Complete (Dec 5, 2025):**
|
|
||||||
|
|
||||||
### Added Display Types for Automerge Sync:
|
|
||||||
- `DiscoveryAnchorMarker` - Simplified anchor data for map markers
|
|
||||||
- `SporeMarker` - Mycelium spore data with strength and connections
|
|
||||||
- `HuntMarker` - Treasure hunt waypoints with sequence numbers
|
|
||||||
|
|
||||||
### MapDiscoveryConfig Extended:
|
|
||||||
- `anchors: DiscoveryAnchorMarker[]` - Synced anchor data
|
|
||||||
- `spores: SporeMarker[]` - Synced spore data with connection graph
|
|
||||||
- `hunts: HuntMarker[]` - Synced treasure hunt waypoints
|
|
||||||
|
|
||||||
### Marker Rendering Implemented:
|
|
||||||
1. **Anchor Markers** - Circular markers with type-specific colors (physical=green, nfc=blue, qr=purple, virtual=amber). Hidden anchors shown with reduced opacity until discovered.
|
|
||||||
|
|
||||||
2. **Spore Markers** - Pulsing circular markers with radial gradients. Size scales with spore strength (40-100%). Animation keyframes for organic feel.
|
|
||||||
|
|
||||||
3. **Mycelium Network** - GeoJSON LineString layer connecting spores. Dashed green lines with 60% opacity visualize the network connections.
|
|
||||||
|
|
||||||
4. **Hunt Markers** - Numbered square markers for treasure hunts. Amber when not found, green with checkmark when discovered.
|
|
||||||
|
|
||||||
### Discovery Panel Enhanced:
|
|
||||||
- Stats display showing counts: 📍 anchors, 🍄 spores, 🏆 hunts
|
|
||||||
- "+Add Anchor" button - Creates demo anchor at map center
|
|
||||||
- "+Add Spore" button - Creates demo spore with random connection
|
|
||||||
- "+Add Hunt Point" button - Creates treasure hunt waypoint
|
|
||||||
- "Clear All" button - Removes all discovery elements
|
|
||||||
|
|
||||||
### How Automerge Sync Works:
|
|
||||||
- Discovery data stored in MapShape.props.discovery
|
|
||||||
- Shape updates via editor.updateShape() flow through TLStoreToAutomerge
|
|
||||||
- All collaborators see markers appear in real-time
|
|
||||||
- Each user can add/modify elements, CRDT handles conflicts
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,39 +0,0 @@
|
||||||
---
|
|
||||||
id: task-040
|
|
||||||
title: 'Open-Mapping Production Ready: Fix TypeScript, Enable Build, Polish UI'
|
|
||||||
status: In Progress
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-05 21:58'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- mapping
|
|
||||||
- typescript
|
|
||||||
- build
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Make the open-mapping module production-ready by fixing TypeScript errors, re-enabling it in the build, and polishing the UI components.
|
|
||||||
|
|
||||||
Currently the open-mapping directory is excluded from tsconfig due to TypeScript errors. This task covers:
|
|
||||||
1. Fix TypeScript errors in src/open-mapping/**
|
|
||||||
2. Re-enable in tsconfig.json
|
|
||||||
3. Add NODE_OPTIONS for build memory
|
|
||||||
4. Polish MapShapeUtil UI (multi-route, layer panel)
|
|
||||||
5. Test collaboration features
|
|
||||||
6. Deploy to staging
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 open-mapping included in tsconfig without errors
|
|
||||||
- [ ] #2 npm run build succeeds
|
|
||||||
- [ ] #3 MapShapeUtil renders and functions correctly
|
|
||||||
- [ ] #4 Routing via OSRM works
|
|
||||||
- [ ] #5 GPS sharing works between clients
|
|
||||||
- [ ] #6 Layer switching works
|
|
||||||
- [ ] #7 Search with autocomplete works
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,91 +0,0 @@
|
||||||
---
|
|
||||||
id: task-041
|
|
||||||
title: User Networking & Social Graph Visualization
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-06 06:17'
|
|
||||||
updated_date: '2025-12-06 06:46'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- social
|
|
||||||
- visualization
|
|
||||||
- networking
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Build a social networking layer on the canvas that allows users to:
|
|
||||||
1. Tag other users as "connected" to them
|
|
||||||
2. Search by username to add connections
|
|
||||||
3. Track connected network of CryptIDs
|
|
||||||
4. Replace top-right presence icons with bottom-right graph visualization
|
|
||||||
5. Create 3D interactive graph at graph.jeffemmett.com
|
|
||||||
|
|
||||||
Key Components:
|
|
||||||
- Connection storage (extend trust circles in D1/Automerge)
|
|
||||||
- User search API
|
|
||||||
- 2D mini-graph in bottom-right (like minimap)
|
|
||||||
- 3D force-graph visualization (Three.js/react-force-graph-3d)
|
|
||||||
- Edge metadata (relationship types, clickable edges)
|
|
||||||
|
|
||||||
Architecture: Extends existing presence system in open-mapping/presence/ and trust circles in privacy/trustCircles.ts
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Users can search and add connections to other CryptIDs
|
|
||||||
- [x] #2 Connections persist across sessions in D1 database
|
|
||||||
- [x] #3 Bottom-right graph visualization shows room users and connections
|
|
||||||
- [ ] #4 3D graph at graph.jeffemmett.com is interactive (spin, zoom, click)
|
|
||||||
- [ ] #5 Clicking edges allows defining relationship metadata
|
|
||||||
- [x] #6 Real-time updates when connections change
|
|
||||||
- [x] #7 Privacy-respecting (honors trust circle permissions)
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Design decisions made:
|
|
||||||
- Binary connections only: 'connected' or 'not connected'
|
|
||||||
- All usernames publicly searchable
|
|
||||||
- One-way following allowed (no acceptance required)
|
|
||||||
- Graph scope: full network in grey, room participants colored by presence
|
|
||||||
- Edge metadata private to the two connected parties
|
|
||||||
|
|
||||||
Implementation complete:
|
|
||||||
|
|
||||||
**Files Created:**
|
|
||||||
- worker/schema.sql: Added user_profiles, user_connections, connection_metadata tables
|
|
||||||
- worker/types.ts: Added TrustLevel, UserConnection, GraphEdge, NetworkGraph types
|
|
||||||
- worker/networkingApi.ts: Full API implementation for connections, search, graph
|
|
||||||
- src/lib/networking/types.ts: Client-side types with trust levels
|
|
||||||
- src/lib/networking/connectionService.ts: API client
|
|
||||||
- src/lib/networking/index.ts: Module exports
|
|
||||||
- src/components/networking/useNetworkGraph.ts: React hook for graph state
|
|
||||||
- src/components/networking/UserSearchModal.tsx: User search UI
|
|
||||||
- src/components/networking/NetworkGraphMinimap.tsx: 2D force graph with d3
|
|
||||||
- src/components/networking/NetworkGraphPanel.tsx: Tldraw integration wrapper
|
|
||||||
- src/components/networking/index.ts: Component exports
|
|
||||||
|
|
||||||
**Modified Files:**
|
|
||||||
- worker/worker.ts: Added networking API routes
|
|
||||||
- src/ui/components.tsx: Added NetworkGraphPanel to InFrontOfCanvas
|
|
||||||
|
|
||||||
**Trust Levels:**
|
|
||||||
- unconnected (grey): No permissions
|
|
||||||
- connected (yellow): View permission
|
|
||||||
- trusted (green): Edit permission
|
|
||||||
|
|
||||||
**Features:**
|
|
||||||
- One-way following (no acceptance required)
|
|
||||||
- Trust level upgrade/downgrade
|
|
||||||
- Edge metadata (private labels, notes, colors)
|
|
||||||
- Room participants highlighted with presence colors
|
|
||||||
- Full network shown in grey, room subset colored
|
|
||||||
- Expandable to 3D view (future: graph.jeffemmett.com)
|
|
||||||
|
|
||||||
2D implementation complete. Follow-up task-042 created for 3D graph and edge metadata editor modal.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,52 +0,0 @@
|
||||||
---
|
|
||||||
id: task-042
|
|
||||||
title: 3D Network Graph Visualization & Edge Metadata Editor
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-06 06:46'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- visualization
|
|
||||||
- 3d
|
|
||||||
- networking
|
|
||||||
dependencies:
|
|
||||||
- task-041
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Build the 3D interactive network visualization at graph.jeffemmett.com and implement the edge metadata editor modal. This extends the 2D minimap created in task-041.
|
|
||||||
|
|
||||||
Key Features:
|
|
||||||
1. **3D Force Graph** at graph.jeffemmett.com
|
|
||||||
- Three.js / react-force-graph-3d visualization
|
|
||||||
- Full-screen, interactive (spin, zoom, pan)
|
|
||||||
- Click nodes to view user profiles
|
|
||||||
- Click edges to edit metadata
|
|
||||||
- Same trust level coloring (grey/yellow/green)
|
|
||||||
- Real-time presence sync with canvas rooms
|
|
||||||
|
|
||||||
2. **Edge Metadata Editor Modal**
|
|
||||||
- Opens on edge click in 2D minimap or 3D view
|
|
||||||
- Edit: label, notes, color, strength (1-10)
|
|
||||||
- Private to each party on the edge
|
|
||||||
- Bidirectional - each user has their own metadata view
|
|
||||||
|
|
||||||
3. **Expand Button Integration**
|
|
||||||
- 2D minimap expand button opens 3D view
|
|
||||||
- URL sharing for specific graph views
|
|
||||||
- Optional: embed 3D graph back in canvas as iframe
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 3D force graph at graph.jeffemmett.com renders user network
|
|
||||||
- [ ] #2 Graph is interactive: spin, zoom, pan, click nodes/edges
|
|
||||||
- [ ] #3 Edge metadata editor modal allows editing label, notes, color, strength
|
|
||||||
- [ ] #4 Edge metadata persists to D1 and is private per-user
|
|
||||||
- [ ] #5 Expand button in 2D minimap opens 3D view
|
|
||||||
- [ ] #6 Real-time updates when connections change
|
|
||||||
- [ ] #7 Trust level colors match 2D minimap (grey/yellow/green)
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,79 +0,0 @@
|
||||||
---
|
|
||||||
id: task-042
|
|
||||||
title: User Permissions - View, Edit, Admin Levels
|
|
||||||
status: In Progress
|
|
||||||
assignee: [@claude]
|
|
||||||
created_date: '2025-12-05 14:00'
|
|
||||||
updated_date: '2025-12-05 14:00'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- auth
|
|
||||||
- permissions
|
|
||||||
- cryptid
|
|
||||||
- security
|
|
||||||
dependencies:
|
|
||||||
- task-018
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement a three-tier permission system for canvas boards:
|
|
||||||
|
|
||||||
**Permission Levels:**
|
|
||||||
1. **View** - Can see board contents, cannot edit. Default for anonymous/unauthenticated users.
|
|
||||||
2. **Edit** - Can see and modify board contents. Requires CryptID authentication.
|
|
||||||
3. **Admin** - Full access + can manage board settings and user permissions. Board owner by default.
|
|
||||||
|
|
||||||
**Key Features:**
|
|
||||||
- Anonymous users can view any shared board but cannot edit
|
|
||||||
- Creating a CryptID (username only, no password) grants edit access
|
|
||||||
- CryptID uses WebCrypto API for browser-based cryptographic keys (W3C standard)
|
|
||||||
- Session state encrypted and stored offline for authenticated users
|
|
||||||
- Admins can invite users with specific permission levels
|
|
||||||
|
|
||||||
**Anonymous User Banner:**
|
|
||||||
Display a banner for unauthenticated users:
|
|
||||||
> "If you want to edit this board, just sign in by creating a username as your CryptID - no password required! Your CryptID is secured with encrypted keys, right in your browser, by a W3C standard algorithm. As a bonus, your session will be stored for offline access, encrypted in your browser storage by the same key, allowing you to use it securely any time you like, with full data portability."
|
|
||||||
|
|
||||||
**Technical Foundation:**
|
|
||||||
- Builds on existing CryptID WebCrypto authentication (`auth-webcrypto` branch)
|
|
||||||
- Extends D1 database schema for board-level permissions
|
|
||||||
- Read-only mode in tldraw editor for view-only users
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Anonymous users can view any shared board content
|
|
||||||
- [ ] #2 Anonymous users cannot create, edit, or delete shapes
|
|
||||||
- [ ] #3 Anonymous users see a dismissible banner prompting CryptID sign-up
|
|
||||||
- [ ] #4 Creating a CryptID grants immediate edit access to current board
|
|
||||||
- [ ] #5 Board creator automatically becomes admin
|
|
||||||
- [ ] #6 Admins can view and manage board permissions
|
|
||||||
- [ ] #7 Permission levels enforced on both client and server (worker)
|
|
||||||
- [ ] #8 Authenticated user sessions stored encrypted in browser storage
|
|
||||||
- [ ] #9 Read-only toolbar/UI state for view-only users
|
|
||||||
- [ ] #10 Permission state syncs correctly across devices via CryptID
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
**Branch:** `feature/user-permissions`
|
|
||||||
|
|
||||||
**Completed:**
|
|
||||||
- [x] Database schema for boards and board_permissions tables
|
|
||||||
- [x] Permission types (PermissionLevel) in worker and client
|
|
||||||
- [x] Permission API handlers (boardPermissions.ts)
|
|
||||||
- [x] AuthContext updated with permission fetching/caching
|
|
||||||
- [x] AnonymousViewerBanner component with CryptID signup
|
|
||||||
|
|
||||||
**In Progress:**
|
|
||||||
- [ ] Board component read-only mode integration
|
|
||||||
- [ ] Automerge sync permission checking
|
|
||||||
|
|
||||||
**Dependencies:**
|
|
||||||
- `task-018` - D1 database creation (blocking for production)
|
|
||||||
- `auth-webcrypto` branch - WebCrypto authentication (merged)
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
---
|
|
||||||
id: task-043
|
|
||||||
title: Build and publish Voice Command Android APK
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-07 06:31'
|
|
||||||
labels:
|
|
||||||
- android
|
|
||||||
- voice-command
|
|
||||||
- mobile
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Native Android app for voice-to-text transcription with on-device Whisper processing has been scaffolded. Next steps:
|
|
||||||
|
|
||||||
1. Download Whisper model files (run download-models.sh)
|
|
||||||
2. Set up Android signing keystore
|
|
||||||
3. Build debug APK and test on device
|
|
||||||
4. Fix any runtime issues
|
|
||||||
5. Build release APK
|
|
||||||
6. Publish to GitHub releases
|
|
||||||
|
|
||||||
The app uses sherpa-onnx for on-device transcription, supports floating button, volume button triggers, and Quick Settings tile.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Model files downloaded and bundled
|
|
||||||
- [ ] #2 APK builds successfully
|
|
||||||
- [ ] #3 Recording works on real device
|
|
||||||
- [ ] #4 Transcription produces accurate results
|
|
||||||
- [ ] #5 All trigger methods functional
|
|
||||||
- [ ] #6 Release APK signed and published
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,39 +0,0 @@
|
||||||
---
|
|
||||||
id: task-044
|
|
||||||
title: Test dev branch UI redesign and Map fixes
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-07 23:26'
|
|
||||||
updated_date: '2025-12-08 01:19'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Test the changes pushed to dev branch in commit 8123f0f
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 CryptID dropdown works (sign in/out, Google integration)
|
|
||||||
- [ ] #2 Settings gear dropdown shows dark mode toggle
|
|
||||||
- [ ] #3 Social Network graph shows user as lone node when solo
|
|
||||||
- [ ] #4 Map marker tool adds markers on click
|
|
||||||
- [ ] #5 Map scroll wheel zooms correctly
|
|
||||||
- [ ] #6 Old boards with Map shapes load without validation errors
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Session completed. All changes pushed to dev branch:
|
|
||||||
- UI redesign: unified top-right menu with grey oval container
|
|
||||||
- Social Network graph: dark theme with directional arrows
|
|
||||||
- MI bar: responsive layout (bottom on mobile)
|
|
||||||
- Map fixes: tool clicks work, scroll zoom works
|
|
||||||
- Automerge: Map shape schema validation fix
|
|
||||||
- Network graph: graceful fallback on API errors
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
---
|
|
||||||
id: task-045
|
|
||||||
title: Implement offline-first loading from IndexedDB
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-08 08:47'
|
|
||||||
labels:
|
|
||||||
- bug-fix
|
|
||||||
- offline
|
|
||||||
- automerge
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Fixed a bug where the app would hang indefinitely when the server wasn't running because `await adapter.whenReady()` blocked IndexedDB loading. Now the app loads from IndexedDB first (offline-first), then syncs with server in the background with a 5-second timeout.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
---
|
|
||||||
id: task-046
|
|
||||||
title: Add maximize button to StandardizedToolWrapper
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-08 08:51'
|
|
||||||
updated_date: '2025-12-08 09:03'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- ui
|
|
||||||
- shapes
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Added a maximize/fullscreen button to the standardized header bar. When clicked, the tool fills the viewport. Press Esc or click again to restore original dimensions. Created useMaximize hook that shape utils can use. Implemented on ChatBoxShapeUtil as example.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Added maximize to ALL 16 shapes using StandardizedToolWrapper (not just ChatBox)
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,49 +0,0 @@
|
||||||
---
|
|
||||||
id: task-047
|
|
||||||
title: Improve mobile touch/pen interactions across custom tools
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-10 18:28'
|
|
||||||
updated_date: '2025-12-10 18:28'
|
|
||||||
labels:
|
|
||||||
- mobile
|
|
||||||
- touch
|
|
||||||
- ux
|
|
||||||
- accessibility
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Fixed touch and pen interaction issues across all custom canvas tools to ensure they work properly on mobile devices and with stylus input.
|
|
||||||
|
|
||||||
Changes made:
|
|
||||||
- Added onTouchStart/onTouchEnd handlers to all interactive elements
|
|
||||||
- Added touchAction: 'manipulation' CSS to prevent 300ms click delay
|
|
||||||
- Increased minimum touch target sizes to 44px for accessibility
|
|
||||||
- Fixed ImageGen: Generate button, Copy/Download/Delete, input field
|
|
||||||
- Fixed VideoGen: Upload, URL input, prompt, duration, Generate button
|
|
||||||
- Fixed Transcription: Start/Stop/Pause buttons, textarea, Save/Cancel
|
|
||||||
- Fixed Multmux: Create Session, Refresh, session list, input fields
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 All buttons respond to touch on mobile devices
|
|
||||||
- [x] #2 No 300ms click delay on interactive elements
|
|
||||||
- [x] #3 Touch targets are at least 44px for accessibility
|
|
||||||
- [x] #4 Image generation works on mobile
|
|
||||||
- [x] #5 Video generation works on mobile
|
|
||||||
- [x] #6 Transcription controls work on mobile
|
|
||||||
- [x] #7 Terminal (Multmux) controls work on mobile
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Pushed to dev branch: b6af3ec
|
|
||||||
|
|
||||||
Files modified: ImageGenShapeUtil.tsx, VideoGenShapeUtil.tsx, TranscriptionShapeUtil.tsx, MultmuxShapeUtil.tsx
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,58 +0,0 @@
|
||||||
---
|
|
||||||
id: task-048
|
|
||||||
title: Version History & CryptID Registration Enhancements
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-10 22:22'
|
|
||||||
updated_date: '2025-12-10 22:22'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- auth
|
|
||||||
- history
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Add version history feature with diff visualization and enhance CryptID registration flow with email backup
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
## Implementation Summary
|
|
||||||
|
|
||||||
### Email Service (SendGrid → Resend)
|
|
||||||
- Updated `worker/types.ts` to use `RESEND_API_KEY`
|
|
||||||
- Updated `worker/cryptidAuth.ts` sendEmail() to use Resend API
|
|
||||||
|
|
||||||
### CryptID Registration Flow
|
|
||||||
- Multi-step registration: welcome → username → email → success
|
|
||||||
- Detailed explainer about passwordless authentication
|
|
||||||
- Email backup for multi-device access
|
|
||||||
- Added `email` field to Session type
|
|
||||||
|
|
||||||
### Version History Feature
|
|
||||||
|
|
||||||
**Backend API Endpoints:**
|
|
||||||
- `GET /room/:roomId/history` - Get version history
|
|
||||||
- `GET /room/:roomId/snapshot/:hash` - Get snapshot at version
|
|
||||||
- `POST /room/:roomId/diff` - Compute diff between versions
|
|
||||||
- `POST /room/:roomId/revert` - Revert to a version
|
|
||||||
|
|
||||||
**Frontend Components:**
|
|
||||||
- `VersionHistoryPanel.tsx` - Timeline with diff visualization
|
|
||||||
- `useVersionHistory.ts` - React hook for programmatic access
|
|
||||||
- GREEN highlighting for added shapes
|
|
||||||
- RED highlighting for removed shapes
|
|
||||||
- PURPLE highlighting for modified shapes
|
|
||||||
|
|
||||||
### Other Fixes
|
|
||||||
- Network graph connect/trust buttons now work
|
|
||||||
- CryptID dropdown integration buttons improved
|
|
||||||
- Obsidian vault connection modal added
|
|
||||||
|
|
||||||
Pushed to dev branch: commit 195cc7f
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,35 +0,0 @@
|
||||||
---
|
|
||||||
id: task-049
|
|
||||||
title: Implement second device verification for CryptID
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-10 22:24'
|
|
||||||
labels:
|
|
||||||
- cryptid
|
|
||||||
- auth
|
|
||||||
- security
|
|
||||||
- testing
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Set up and test second device verification flow for the CryptID authentication system. This ensures users can recover their account and verify identity across multiple devices.
|
|
||||||
|
|
||||||
Key areas to implement/verify:
|
|
||||||
- QR code scanning between devices for key sharing
|
|
||||||
- Email backup verification flow
|
|
||||||
- Device linking and trust establishment
|
|
||||||
- Recovery flow when primary device is lost
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Second device can scan QR code to link account
|
|
||||||
- [ ] #2 Email backup sends verification code correctly (via Resend)
|
|
||||||
- [ ] #3 Linked devices can both access the same account
|
|
||||||
- [ ] #4 Recovery flow works when primary device unavailable
|
|
||||||
- [ ] #5 Test across different browsers/devices
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,52 +0,0 @@
|
||||||
---
|
|
||||||
id: task-050
|
|
||||||
title: Implement Make-Real Feature (Wireframe to Working Prototype)
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-14 18:32'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- ai
|
|
||||||
- canvas
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement the full make-real workflow that converts wireframe sketches/designs on the canvas into working HTML/CSS/JS prototypes using AI.
|
|
||||||
|
|
||||||
## Current State
|
|
||||||
The backend infrastructure is ~60% complete:
|
|
||||||
- ✅ `makeRealSettings` atom in `src/lib/settings.tsx` with provider/model/API key configs
|
|
||||||
- ✅ System prompt in `src/prompt.ts` for wireframe-to-prototype conversion
|
|
||||||
- ✅ LLM backend in `src/utils/llmUtils.ts` with OpenAI, Anthropic, Ollama, RunPod support
|
|
||||||
- ✅ Settings migration in `src/routes/Board.tsx` loading `makereal_settings_2`
|
|
||||||
- ✅ "Make Real" placeholder in AI_TOOLS dropdown
|
|
||||||
|
|
||||||
## Missing Components
|
|
||||||
1. **Selection-to-image capture** - Export selected shapes as base64 PNG
|
|
||||||
2. **`makeReal()` action function** - Orchestrate the capture → AI → render pipeline
|
|
||||||
3. **ResponseShape/PreviewShape** - Custom tldraw shape to render generated HTML in iframe
|
|
||||||
4. **UI trigger** - Button/keyboard shortcut to invoke make-real on selection
|
|
||||||
5. **Iteration support** - Allow annotations on generated output for refinement
|
|
||||||
|
|
||||||
## Reference Implementation
|
|
||||||
- tldraw make-real demo: https://github.com/tldraw/make-real
|
|
||||||
- Key files to reference: `makeReal.ts`, `ResponseShape.tsx`, `getSelectionAsImageDataUrl.ts`
|
|
||||||
|
|
||||||
## Old Branch
|
|
||||||
`remotes/origin/make-real-integration` exists but is very outdated with errors - needs complete rewrite rather than merge.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 User can select shapes on canvas and trigger make-real action
|
|
||||||
- [ ] #2 Selection is captured as image and sent to configured AI provider
|
|
||||||
- [ ] #3 AI generates HTML/CSS/JS prototype based on wireframe and system prompt
|
|
||||||
- [ ] #4 Generated prototype renders in interactive iframe on canvas (ResponseShape)
|
|
||||||
- [ ] #5 User can annotate/modify and re-run make-real for iterations
|
|
||||||
- [ ] #6 Settings modal allows configuring provider/model/API keys
|
|
||||||
- [ ] #7 Works with Ollama (free), OpenAI, and Anthropic backends
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,88 +0,0 @@
|
||||||
---
|
|
||||||
id: task-051
|
|
||||||
title: Offline storage and cold reload from offline state
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-15 04:58'
|
|
||||||
updated_date: '2025-12-25 23:38'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- offline
|
|
||||||
- storage
|
|
||||||
- IndexedDB
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Implement offline storage fallback so that when a browser reloads without network connectivity, it automatically loads from local IndexedDB storage and renders the last known state of the board for that user.
|
|
||||||
|
|
||||||
## Implementation Summary (Completed)
|
|
||||||
|
|
||||||
### Changes Made:
|
|
||||||
1. **Board.tsx** - Updated render condition to allow rendering when offline with local data (`isOfflineWithLocalData` flag)
|
|
||||||
2. **useAutomergeStoreV2** - Added `isNetworkOnline` parameter and offline fast path that immediately loads records from Automerge doc without waiting for network patches
|
|
||||||
3. **useAutomergeSyncRepo** - Passes `isNetworkOnline` to `useAutomergeStoreV2`
|
|
||||||
4. **ConnectionStatusIndicator** - Updated messaging to clarify users are viewing locally cached canvas when offline
|
|
||||||
|
|
||||||
### How It Works:
|
|
||||||
1. useAutomergeSyncRepo detects no network and loads data from IndexedDB
|
|
||||||
2. useAutomergeStoreV2 receives handle with local data and detects offline state
|
|
||||||
3. Offline Fast Path immediately loads records into TLDraw store
|
|
||||||
4. Board.tsx renders with local data
|
|
||||||
5. ConnectionStatusIndicator shows "Working Offline - Viewing locally saved canvas"
|
|
||||||
6. When back online, Automerge automatically syncs via CRDT merge
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Board renders from local IndexedDB when browser reloads offline
|
|
||||||
- [x] #2 User sees 'Working Offline' indicator with clear messaging
|
|
||||||
- [x] #3 Changes made offline are saved locally
|
|
||||||
- [x] #4 Auto-sync when network connectivity returns
|
|
||||||
- [x] #5 No data loss during offline/online transitions
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
## Testing Required
|
|
||||||
- Test cold reload while offline (airplane mode)
|
|
||||||
- Test with board containing various shape types
|
|
||||||
- Test transition from offline to online (auto-sync)
|
|
||||||
- Test making changes while offline and syncing
|
|
||||||
- Verify no data loss scenarios
|
|
||||||
|
|
||||||
Commit: 4df9e42 pushed to dev branch
|
|
||||||
|
|
||||||
## Code Review Complete (2025-12-25)
|
|
||||||
|
|
||||||
All acceptance criteria implemented:
|
|
||||||
|
|
||||||
**AC #1 - Board renders from IndexedDB offline:**
|
|
||||||
- Board.tsx line 1225: `isOfflineWithLocalData = !isNetworkOnline && hasStore`
|
|
||||||
- Line 1229: `shouldRender = hasStore && (isSynced || isOfflineWithLocalData)`
|
|
||||||
|
|
||||||
**AC #2 - Working Offline indicator:**
|
|
||||||
- ConnectionStatusIndicator shows 'Working Offline' with purple badge
|
|
||||||
- Detailed message explains local caching and auto-sync
|
|
||||||
|
|
||||||
**AC #3 - Changes saved locally:**
|
|
||||||
- Automerge Repo uses IndexedDBStorageAdapter
|
|
||||||
- Changes persisted via handle.change() automatically
|
|
||||||
|
|
||||||
**AC #4 - Auto-sync on reconnect:**
|
|
||||||
- CloudflareAdapter has networkOnlineHandler/networkOfflineHandler
|
|
||||||
- Triggers reconnect when network returns
|
|
||||||
|
|
||||||
**AC #5 - No data loss:**
|
|
||||||
- CRDT merge semantics preserve all changes
|
|
||||||
- JSON sync fallback also handles offline changes
|
|
||||||
|
|
||||||
**Manual testing recommended:**
|
|
||||||
- Test in airplane mode with browser reload
|
|
||||||
- Verify data persists across offline sessions
|
|
||||||
- Test online/offline transitions
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,79 +0,0 @@
|
||||||
---
|
|
||||||
id: task-052
|
|
||||||
title: 'Flip permissions model: everyone edits by default, protected boards opt-in'
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-15 17:23'
|
|
||||||
updated_date: '2025-12-15 19:26'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Change the default permission model so ALL users (including anonymous) can edit by default. Boards can be marked as "protected" by an admin, making them view-only for non-designated users.
|
|
||||||
|
|
||||||
Key changes:
|
|
||||||
1. Add is_protected column to boards table
|
|
||||||
2. Add global_admins table (jeffemmett@gmail.com as initial admin)
|
|
||||||
3. Flip getEffectivePermission logic
|
|
||||||
4. Create BoardSettingsDropdown component with view-only toggle
|
|
||||||
5. Add user invite for protected boards
|
|
||||||
6. Admin request email flow
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Anonymous users can edit unprotected boards
|
|
||||||
- [x] #2 Protected boards are view-only for non-editors
|
|
||||||
- [x] #3 Global admin (jeffemmett@gmail.com) has admin on all boards
|
|
||||||
- [x] #4 Settings dropdown shows view-only toggle for admins
|
|
||||||
- [x] #5 Can add/remove editors on protected boards
|
|
||||||
- [x] #6 Admin request button sends email
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
## Implementation Complete (Dec 15, 2025)
|
|
||||||
|
|
||||||
### Backend Changes (commit 2fe96fa)
|
|
||||||
- **worker/schema.sql**: Added `is_protected` column to boards, created `global_admins` table
|
|
||||||
- **worker/types.ts**: Added `GlobalAdmin` interface, extended `PermissionCheckResult`
|
|
||||||
- **worker/boardPermissions.ts**: Rewrote `getEffectivePermission()` with new logic, added `isGlobalAdmin()`, new API handlers
|
|
||||||
- **worker/worker.ts**: Added routes for `/boards/:boardId/info`, `/boards/:boardId/editors`, `/admin/request`
|
|
||||||
- **worker/migrations/001_add_protected_boards.sql**: Migration script created
|
|
||||||
|
|
||||||
### D1 Migration (executed manually)
|
|
||||||
```sql
|
|
||||||
ALTER TABLE boards ADD COLUMN is_protected INTEGER DEFAULT 0;
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_boards_protected ON boards(is_protected);
|
|
||||||
CREATE TABLE IF NOT EXISTS global_admins (email TEXT PRIMARY KEY, added_at TEXT, added_by TEXT);
|
|
||||||
INSERT OR IGNORE INTO global_admins (email) VALUES ('jeffemmett@gmail.com');
|
|
||||||
```
|
|
||||||
|
|
||||||
### Frontend Changes (commit 3f71222)
|
|
||||||
- **src/ui/components.tsx**: Integrated board protection settings into existing settings dropdown
|
|
||||||
- Protection toggle (view-only mode)
|
|
||||||
- Editor list management (add/remove)
|
|
||||||
- Global Admin badge display
|
|
||||||
- **src/context/AuthContext.tsx**: Changed default permission to 'edit' for everyone
|
|
||||||
- **src/routes/Board.tsx**: Updated `isReadOnly` logic for new permission model
|
|
||||||
- **src/components/BoardSettingsDropdown.tsx**: Created standalone component (kept for reference)
|
|
||||||
|
|
||||||
### Worker Deployment
|
|
||||||
- Deployed to Cloudflare Workers (version 5ddd1e23-d32f-459f-bc5c-cf3f799ab93f)
|
|
||||||
|
|
||||||
### Remaining
|
|
||||||
- [ ] AC #6: Admin request email flow (Resend integration needed)
|
|
||||||
|
|
||||||
### Resend Email Integration (commit a46ce44)
|
|
||||||
- Added `RESEND_API_KEY` secret to Cloudflare Worker
|
|
||||||
- Fixed from email to use verified domain: `Canvas <noreply@jeffemmett.com>`
|
|
||||||
- Admin request emails will be sent to jeffemmett@gmail.com
|
|
||||||
- Test email sent successfully: ID 7113526b-ce1e-43e7-b18d-42b3d54823d1
|
|
||||||
|
|
||||||
**All acceptance criteria now complete!**
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,44 +0,0 @@
|
||||||
---
|
|
||||||
id: task-053
|
|
||||||
title: Initial mycro-zine toolkit setup
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-15 23:41'
|
|
||||||
updated_date: '2025-12-15 23:41'
|
|
||||||
labels:
|
|
||||||
- setup
|
|
||||||
- feature
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Created the mycro-zine repository with:
|
|
||||||
- Single-page print layout generator (2x4 grid, all 8 pages on one 8.5"x11" sheet)
|
|
||||||
- Prompt templates for AI content/image generation
|
|
||||||
- Example Undernet zine pages
|
|
||||||
- Support for US Letter and A4 paper sizes
|
|
||||||
- CLI and programmatic API
|
|
||||||
- Pushed to Gitea and GitHub
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Repository structure created
|
|
||||||
- [x] #2 Layout script generates single-page output
|
|
||||||
- [x] #3 Prompt templates created
|
|
||||||
- [x] #4 Example zine pages included
|
|
||||||
- [x] #5 Pushed to Gitea and GitHub
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Completed 2025-12-15. Repository at:
|
|
||||||
- Gitea: gitea.jeffemmett.com:jeffemmett/mycro-zine
|
|
||||||
- GitHub: github.com/Jeff-Emmett/mycro-zine
|
|
||||||
|
|
||||||
Test with: cd /home/jeffe/Github/mycro-zine && npm run example
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
---
|
|
||||||
id: task-054
|
|
||||||
title: Re-enable Map tool with GPS location sharing
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-15 23:40'
|
|
||||||
updated_date: '2025-12-15 23:40'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- map
|
|
||||||
- collaboration
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Re-enabled the Map tool in the toolbar and context menu. Added GPS location sharing feature allowing collaborators to share their real-time location on the map with colored markers.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Map tool visible in toolbar (globe icon)
|
|
||||||
- [x] #2 Map tool available in context menu under Create Tool
|
|
||||||
- [x] #3 GPS location sharing toggle button works
|
|
||||||
- [x] #4 Collaborator locations shown as colored markers
|
|
||||||
- [x] #5 GPS watch cleaned up on component unmount
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Implemented in commit 2d9d216.
|
|
||||||
|
|
||||||
Changes:
|
|
||||||
- CustomToolbar.tsx: Uncommented Map tool
|
|
||||||
- CustomContextMenu.tsx: Uncommented Map tool in Create Tool submenu
|
|
||||||
- MapShapeUtil.tsx: Added GPS location sharing with collaborator markers
|
|
||||||
|
|
||||||
GPS feature includes toggle button, real-time location updates, colored markers for each collaborator, and proper cleanup on unmount.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,75 +0,0 @@
|
||||||
---
|
|
||||||
id: task-055
|
|
||||||
title: Integrate MycroZine generator tool into canvas
|
|
||||||
status: In Progress
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-15 23:41'
|
|
||||||
updated_date: '2025-12-18 23:24'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- canvas
|
|
||||||
- ai
|
|
||||||
- gemini
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Create a MycroZineGeneratorShape - an interactive tool on the canvas that allows users to generate complete 8-page mini-zines from a topic/prompt.
|
|
||||||
|
|
||||||
5-phase iterative workflow:
|
|
||||||
1. Ideation: User discusses content with Claude (conversational)
|
|
||||||
2. Drafts: Claude generates 8 draft pages using Gemini, spawns on canvas
|
|
||||||
3. Feedback: User gives spatial feedback on each page
|
|
||||||
4. Finalization: Claude integrates feedback into final versions
|
|
||||||
5. Print: Aggregate into single-page printable (2x4 grid)
|
|
||||||
|
|
||||||
Key requirements:
|
|
||||||
- Always use Gemini for image generation (latest model)
|
|
||||||
- Store completed zines as templates for reprinting
|
|
||||||
- Individual image shapes spawned on canvas for spatial feedback
|
|
||||||
- Single-page print layout (all 8 pages on one 8.5"x11" sheet)
|
|
||||||
|
|
||||||
References mycro-zine repo at /home/jeffe/Github/mycro-zine for layout utilities and prompt templates.
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 MycroZineGeneratorShapeUtil.tsx created
|
|
||||||
- [x] #2 MycroZineGeneratorTool.ts created and registered
|
|
||||||
- [ ] #3 Ideation phase with embedded chat UI
|
|
||||||
- [ ] #4 Drafts phase generates 8 images via Gemini and spawns on canvas
|
|
||||||
- [ ] #5 Feedback phase collects user input per page
|
|
||||||
- [ ] #6 Finalizing phase regenerates pages with feedback
|
|
||||||
- [ ] #7 Complete phase with print-ready download and template save
|
|
||||||
- [ ] #8 Templates stored in localStorage for reprinting
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Starting implementation of full 5-phase MycroZineGenerator shape
|
|
||||||
|
|
||||||
Created MycroZineGeneratorShapeUtil.tsx with full 5-phase workflow (ideation, drafts, feedback, finalizing, complete)
|
|
||||||
|
|
||||||
Created MycroZineGeneratorTool.ts
|
|
||||||
|
|
||||||
Registered in Board.tsx
|
|
||||||
|
|
||||||
Build successful - no TypeScript errors
|
|
||||||
|
|
||||||
Integrated Gemini Nano Banana Pro for image generation:
|
|
||||||
- Updated standalone mycro-zine app (generate-page/route.ts) with fallback chain: Nano Banana Pro → Imagen 3 → Gemini 2.0 Flash → placeholder
|
|
||||||
- Updated canvas MycroZineGeneratorShapeUtil.tsx to call Gemini API directly with proper types
|
|
||||||
- Added getGeminiConfig() to clientConfig.ts for API key management
|
|
||||||
- Aspect ratio: 3:4 portrait for zine pages (825x1275 target dimensions)
|
|
||||||
|
|
||||||
2025-12-18: Fixed geo-restriction issue for image generation
|
|
||||||
- Direct Gemini API calls were blocked in EU (Netcup server location)
|
|
||||||
- Created RunPod serverless proxy (US-based) to bypass geo-restrictions
|
|
||||||
- Added /api/generate-image endpoint to zine.jeffemmett.com that returns base64
|
|
||||||
- Updated canvas MycroZineGeneratorShapeUtil to call zine.jeffemmett.com API instead of Gemini directly
|
|
||||||
- Image generation now works reliably from any location
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,75 +0,0 @@
|
||||||
---
|
|
||||||
id: task-056
|
|
||||||
title: Test Infrastructure & Merge Readiness Tests
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-18 07:25'
|
|
||||||
updated_date: '2025-12-18 07:26'
|
|
||||||
labels:
|
|
||||||
- testing
|
|
||||||
- ci-cd
|
|
||||||
- infrastructure
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Established comprehensive testing infrastructure to verify readiness for merging dev to main. Includes:
|
|
||||||
|
|
||||||
- Vitest for unit/integration tests
|
|
||||||
- Playwright for E2E tests
|
|
||||||
- Miniflare setup for worker tests
|
|
||||||
- GitHub Actions CI/CD pipeline with 80% coverage gate
|
|
||||||
|
|
||||||
Test coverage for:
|
|
||||||
- Automerge CRDT sync (collaboration tests)
|
|
||||||
- Offline storage/cold reload
|
|
||||||
- CryptID authentication (registration, login, device linking)
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 Vitest configured with jsdom environment
|
|
||||||
- [x] #2 Playwright configured for E2E tests
|
|
||||||
- [x] #3 Unit tests for crypto and IndexedDB document mapping
|
|
||||||
- [x] #4 E2E tests for collaboration, offline mode, authentication
|
|
||||||
- [x] #5 GitHub Actions workflow for CI/CD
|
|
||||||
- [x] #6 All current tests passing
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
## Implementation Summary
|
|
||||||
|
|
||||||
### Files Created:
|
|
||||||
- `vitest.config.ts` - Vitest configuration with jsdom, coverage thresholds
|
|
||||||
- `playwright.config.ts` - Playwright E2E test configuration
|
|
||||||
- `tests/setup.ts` - Global test setup (mocks for matchMedia, ResizeObserver, etc.)
|
|
||||||
- `tests/mocks/indexeddb.ts` - fake-indexeddb utilities
|
|
||||||
- `tests/mocks/websocket.ts` - MockWebSocket for sync tests
|
|
||||||
- `tests/mocks/automerge.ts` - Test helpers for CRDT documents
|
|
||||||
- `tests/unit/cryptid/crypto.test.ts` - WebCrypto unit tests (14 tests)
|
|
||||||
- `tests/unit/offline/document-mapping.test.ts` - IndexedDB tests (13 tests)
|
|
||||||
- `tests/e2e/collaboration.spec.ts` - CRDT sync E2E tests
|
|
||||||
- `tests/e2e/offline-mode.spec.ts` - Offline storage E2E tests
|
|
||||||
- `tests/e2e/authentication.spec.ts` - CryptID auth E2E tests
|
|
||||||
- `.github/workflows/test.yml` - CI/CD pipeline
|
|
||||||
|
|
||||||
### Test Commands Added to package.json:
|
|
||||||
- `npm run test` - Run Vitest in watch mode
|
|
||||||
- `npm run test:run` - Run once
|
|
||||||
- `npm run test:coverage` - With coverage report
|
|
||||||
- `npm run test:e2e` - Run Playwright E2E tests
|
|
||||||
|
|
||||||
### Current Test Results:
|
|
||||||
- 27 unit tests passing
|
|
||||||
- E2E tests ready to run against dev server
|
|
||||||
|
|
||||||
### Next Steps:
|
|
||||||
- Add worker tests with Miniflare (task-056 continuation)
|
|
||||||
- Run E2E tests to verify collaboration/offline/auth flows
|
|
||||||
- Increase unit test coverage to 80%
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
---
|
|
||||||
id: task-057
|
|
||||||
title: Set up Cloudflare WARP split tunnels for Claude Code
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-19 01:10'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Configured Cloudflare Zero Trust split tunnel excludes to allow Claude Code to work in WSL2 with WARP enabled on Windows.
|
|
||||||
|
|
||||||
Completed:
|
|
||||||
- Created Zero Trust API token with device config permissions
|
|
||||||
- Added localhost (127.0.0.0/8) to excludes
|
|
||||||
- Added Anthropic domains (api.anthropic.com, claude.ai, anthropic.com)
|
|
||||||
- Private networks already excluded (172.16.0.0/12, 192.168.0.0/16, 10.0.0.0/8)
|
|
||||||
- Created ~/bin/warp-split-tunnel CLI tool for future management
|
|
||||||
- Saved token to Netcup ~/.cloudflare-credentials.env
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
@ -1,48 +0,0 @@
|
||||||
---
|
|
||||||
id: task-058
|
|
||||||
title: Set FAL_API_KEY and RUNPOD_API_KEY secrets in Cloudflare Worker
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-25 23:30'
|
|
||||||
updated_date: '2025-12-26 01:26'
|
|
||||||
labels:
|
|
||||||
- security
|
|
||||||
- infrastructure
|
|
||||||
- canvas-website
|
|
||||||
dependencies: []
|
|
||||||
priority: high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
SECURITY FIX: API keys were exposed in browser bundle. They've been removed from client code and proxy endpoints added to the worker. Need to set the secrets server-side for the proxy to work.
|
|
||||||
|
|
||||||
Run these commands:
|
|
||||||
```bash
|
|
||||||
cd /home/jeffe/Github/canvas-website
|
|
||||||
wrangler secret put FAL_API_KEY
|
|
||||||
# Paste: (REDACTED-FAL-KEY)
|
|
||||||
|
|
||||||
wrangler secret put RUNPOD_API_KEY
|
|
||||||
# Paste: (REDACTED-RUNPOD-KEY)
|
|
||||||
|
|
||||||
wrangler deploy
|
|
||||||
```
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [x] #1 FAL_API_KEY secret set in Cloudflare Worker
|
|
||||||
- [x] #2 RUNPOD_API_KEY secret set in Cloudflare Worker
|
|
||||||
- [x] #3 Worker deployed with new secrets
|
|
||||||
- [x] #4 Browser console no longer shows 'fal credentials exposed' warning
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
||||||
## Implementation Notes
|
|
||||||
|
|
||||||
<!-- SECTION:NOTES:BEGIN -->
|
|
||||||
Secrets set and deployed on 2025-12-25
|
|
||||||
|
|
||||||
Dec 25: Completed full client migration to server-side proxies. Pushed to dev branch.
|
|
||||||
<!-- SECTION:NOTES:END -->
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
---
|
|
||||||
id: task-059
|
|
||||||
title: Debug Drawfast tool output
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-26 04:37'
|
|
||||||
labels:
|
|
||||||
- bug
|
|
||||||
- ai
|
|
||||||
- shapes
|
|
||||||
dependencies: []
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
The Drawfast tool has been temporarily disabled due to output issues that need debugging.
|
|
||||||
|
|
||||||
## Background
|
|
||||||
Drawfast is a real-time AI image generation tool that generates images as users draw. The tool has been disabled in Board.tsx pending debugging.
|
|
||||||
|
|
||||||
## Files to investigate
|
|
||||||
- `src/shapes/DrawfastShapeUtil.tsx` - Shape rendering and state
|
|
||||||
- `src/tools/DrawfastTool.ts` - Tool interaction logic
|
|
||||||
- `src/hooks/useLiveImage.tsx` - Live image generation hook
|
|
||||||
|
|
||||||
## To re-enable
|
|
||||||
1. Uncomment imports in Board.tsx (lines 50-52)
|
|
||||||
2. Uncomment DrawfastShape in customShapeUtils array (line 173)
|
|
||||||
3. Uncomment DrawfastTool in customTools array (line 199)
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
@ -1,60 +0,0 @@
|
||||||
---
|
|
||||||
id: task-060
|
|
||||||
title: Snapshot Voting Integration
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-01-02 16:08'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- web3
|
|
||||||
- governance
|
|
||||||
- voting
|
|
||||||
dependencies:
|
|
||||||
- task-007
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Integrate Snapshot.js SDK for off-chain governance voting through the canvas interface.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
Enable CryptID users with linked wallets to participate in Snapshot governance votes directly from the canvas. Proposals and voting can be visualized as shapes on the canvas.
|
|
||||||
|
|
||||||
## Dependencies
|
|
||||||
- Requires task-007 (Web3 Wallet Linking) to be completed first
|
|
||||||
- User must have at least one linked wallet with voting power
|
|
||||||
|
|
||||||
## Technical Approach
|
|
||||||
- Use Snapshot.js SDK for proposal fetching and vote submission
|
|
||||||
- Create VotingShape to visualize proposals on canvas
|
|
||||||
- Support EIP-712 signature-based voting via linked wallet
|
|
||||||
- Cache voting power from linked wallets
|
|
||||||
|
|
||||||
## Features
|
|
||||||
1. **Proposal Browser** - List active proposals from configured spaces
|
|
||||||
2. **VotingShape** - Canvas shape to display proposal details and vote
|
|
||||||
3. **Vote Signing** - Use wagmi's signTypedData for EIP-712 votes
|
|
||||||
4. **Voting Power Display** - Show user's voting power per space
|
|
||||||
5. **Vote History** - Track user's past votes
|
|
||||||
|
|
||||||
## Spaces to Support Initially
|
|
||||||
- mycofi.eth (MycoFi DAO)
|
|
||||||
- Add configuration for additional spaces
|
|
||||||
|
|
||||||
## References
|
|
||||||
- Snapshot.js: https://docs.snapshot.org/tools/snapshot.js
|
|
||||||
- Snapshot API: https://docs.snapshot.org/tools/api
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Install and configure Snapshot.js SDK
|
|
||||||
- [ ] #2 Create VotingShape with proposal details display
|
|
||||||
- [ ] #3 Implement vote signing flow with EIP-712
|
|
||||||
- [ ] #4 Add proposal browser panel to canvas UI
|
|
||||||
- [ ] #5 Display voting power from linked wallets
|
|
||||||
- [ ] #6 Support multiple Snapshot spaces via configuration
|
|
||||||
- [ ] #7 Cache and display vote history
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,68 +0,0 @@
|
||||||
---
|
|
||||||
id: task-061
|
|
||||||
title: Safe Multisig Integration for Collaborative Transactions
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-01-02 16:08'
|
|
||||||
labels:
|
|
||||||
- feature
|
|
||||||
- web3
|
|
||||||
- multisig
|
|
||||||
- safe
|
|
||||||
- governance
|
|
||||||
dependencies:
|
|
||||||
- task-007
|
|
||||||
priority: medium
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Integrate Safe (Gnosis Safe) SDK to enable collaborative transaction building and signing through the canvas interface.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
Allow CryptID users to create, propose, and sign Safe multisig transactions visually on the canvas. Multiple signers can collaborate in real-time to approve transactions.
|
|
||||||
|
|
||||||
## Dependencies
|
|
||||||
- Requires task-007 (Web3 Wallet Linking) to be completed first
|
|
||||||
- Users must link their Safe wallet or EOA that is a Safe signer
|
|
||||||
|
|
||||||
## Technical Approach
|
|
||||||
- Use Safe{Core} SDK for transaction building and signing
|
|
||||||
- Create TransactionBuilderShape for visual tx composition
|
|
||||||
- Use Safe Transaction Service API for proposal queue
|
|
||||||
- Real-time signature collection via canvas collaboration
|
|
||||||
|
|
||||||
## Features
|
|
||||||
1. **Safe Linking** - Link Safe addresses (detect via ERC-1271)
|
|
||||||
2. **TransactionBuilderShape** - Visual transaction composer
|
|
||||||
3. **Signature Collection UI** - See who has signed, who is pending
|
|
||||||
4. **Transaction Queue** - View pending transactions for linked Safes
|
|
||||||
5. **Execution** - Execute transactions when threshold is met
|
|
||||||
|
|
||||||
## Visual Transaction Builder Capabilities
|
|
||||||
- Transfer ETH/tokens
|
|
||||||
- Contract interactions (with ABI import)
|
|
||||||
- Batch transactions
|
|
||||||
- Scheduled transactions (via delay module)
|
|
||||||
|
|
||||||
## Collaboration Features
|
|
||||||
- Real-time signature status on canvas
|
|
||||||
- Notifications when signatures are needed
|
|
||||||
- Discussion threads on pending transactions
|
|
||||||
|
|
||||||
## References
|
|
||||||
- Safe{Core} SDK: https://docs.safe.global/sdk/overview
|
|
||||||
- Safe Transaction Service API: https://docs.safe.global/core-api/transaction-service-overview
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Install and configure Safe{Core} SDK
|
|
||||||
- [ ] #2 Implement ERC-1271 signature verification for Safe linking
|
|
||||||
- [ ] #3 Create TransactionBuilderShape for visual tx composition
|
|
||||||
- [ ] #4 Build signature collection UI with real-time updates
|
|
||||||
- [ ] #5 Display pending transaction queue for linked Safes
|
|
||||||
- [ ] #6 Enable transaction execution when threshold is met
|
|
||||||
- [ ] #7 Support basic transfer and contract interaction transactions
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,72 +0,0 @@
|
||||||
---
|
|
||||||
id: task-062
|
|
||||||
title: Account Abstraction (ERC-4337) Exploration
|
|
||||||
status: To Do
|
|
||||||
assignee: []
|
|
||||||
created_date: '2026-01-02 16:08'
|
|
||||||
labels:
|
|
||||||
- research
|
|
||||||
- web3
|
|
||||||
- account-abstraction
|
|
||||||
- erc-4337
|
|
||||||
dependencies:
|
|
||||||
- task-007
|
|
||||||
priority: low
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Research and prototype using ERC-4337 Account Abstraction to enable CryptID's P-256 keys to directly control smart contract wallets.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
Explore the possibility of using Account Abstraction (ERC-4337) to bridge CryptID's WebCrypto P-256 keys with Ethereum transactions. This would eliminate the need for wallet linking by allowing CryptID keys to directly sign UserOperations that control a smart wallet.
|
|
||||||
|
|
||||||
## Background
|
|
||||||
- CryptID uses ECDSA P-256 (NIST curve) via WebCrypto API
|
|
||||||
- Ethereum uses ECDSA secp256k1
|
|
||||||
- These curves are incompatible for direct signing
|
|
||||||
- ERC-4337 allows any signature scheme via custom validation logic
|
|
||||||
|
|
||||||
## Research Questions
|
|
||||||
1. Is P-256 signature verification gas-efficient on-chain?
|
|
||||||
2. What existing implementations exist? (Clave, Daimo)
|
|
||||||
3. What are the wallet deployment costs per user?
|
|
||||||
4. How do we handle gas sponsorship (paymaster)?
|
|
||||||
5. Which bundler/paymaster providers support this?
|
|
||||||
|
|
||||||
## Potential Benefits
|
|
||||||
- Single key for auth AND transactions
|
|
||||||
- Gasless transactions via paymaster
|
|
||||||
- Social recovery using CryptID email
|
|
||||||
- No MetaMask/wallet app needed
|
|
||||||
- True passwordless Web3
|
|
||||||
|
|
||||||
## Risks & Challenges
|
|
||||||
- Complex implementation
|
|
||||||
- Gas costs for P-256 verification (~100k gas)
|
|
||||||
- Not all L2s support ERC-4337 yet
|
|
||||||
- User education on new paradigm
|
|
||||||
|
|
||||||
## Providers to Evaluate
|
|
||||||
- Pimlico (bundler + paymaster)
|
|
||||||
- Alchemy Account Kit
|
|
||||||
- Stackup
|
|
||||||
- Biconomy
|
|
||||||
|
|
||||||
## References
|
|
||||||
- ERC-4337 Spec: https://eips.ethereum.org/EIPS/eip-4337
|
|
||||||
- Clave (P-256 wallet): https://getclave.io/
|
|
||||||
- Daimo (P-256 wallet): https://daimo.com/
|
|
||||||
- viem Account Abstraction: https://viem.sh/account-abstraction
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Research P-256 on-chain verification gas costs
|
|
||||||
- [ ] #2 Evaluate existing P-256 wallet implementations (Clave, Daimo)
|
|
||||||
- [ ] #3 Prototype UserOperation signing with CryptID keys
|
|
||||||
- [ ] #4 Evaluate bundler/paymaster providers
|
|
||||||
- [ ] #5 Document architecture proposal if viable
|
|
||||||
- [ ] #6 Estimate implementation timeline and costs
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,23 +0,0 @@
|
||||||
---
|
|
||||||
id: task-high.01
|
|
||||||
title: 'MI Bar UX: Modal Fade & Scrollable Try Next'
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 06:34'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
parent_task_id: task-high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Improved Mycelial Intelligence bar UX: fades when modals/popups are open, combined Tools + Follow-up suggestions into a single scrollable 'Try Next' section
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 MI bar fades when settings modal is open
|
|
||||||
- [ ] #2 MI bar fades when auth modal is open
|
|
||||||
- [ ] #3 Suggested tools and follow-ups in single scrollable row
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
---
|
|
||||||
id: task-high.02
|
|
||||||
title: CryptID Email Recovery in Settings
|
|
||||||
status: Done
|
|
||||||
assignee: []
|
|
||||||
created_date: '2025-12-04 06:35'
|
|
||||||
labels: []
|
|
||||||
dependencies: []
|
|
||||||
parent_task_id: task-high
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
|
||||||
Added email linking to User Settings modal General tab - allows users to attach their email to their CryptID account for device recovery and verification
|
|
||||||
<!-- SECTION:DESCRIPTION:END -->
|
|
||||||
|
|
||||||
## Acceptance Criteria
|
|
||||||
<!-- AC:BEGIN -->
|
|
||||||
- [ ] #1 Email linking UI in General settings tab
|
|
||||||
- [ ] #2 Shows email verification status
|
|
||||||
- [ ] #3 Sends verification email on link
|
|
||||||
- [ ] #4 Dark mode aware styling
|
|
||||||
<!-- AC:END -->
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
if('serviceWorker' in navigator) navigator.serviceWorker.register('/dev-sw.js?dev-sw', { scope: '/', type: 'classic' })
|
|
||||||
114
dev-dist/sw.js
114
dev-dist/sw.js
|
|
@ -1,114 +0,0 @@
|
||||||
/**
|
|
||||||
* Copyright 2018 Google Inc. All Rights Reserved.
|
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
* you may not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
// If the loader is already loaded, just stop.
|
|
||||||
if (!self.define) {
|
|
||||||
let registry = {};
|
|
||||||
|
|
||||||
// Used for `eval` and `importScripts` where we can't get script URL by other means.
|
|
||||||
// In both cases, it's safe to use a global var because those functions are synchronous.
|
|
||||||
let nextDefineUri;
|
|
||||||
|
|
||||||
const singleRequire = (uri, parentUri) => {
|
|
||||||
uri = new URL(uri + ".js", parentUri).href;
|
|
||||||
return registry[uri] || (
|
|
||||||
|
|
||||||
new Promise(resolve => {
|
|
||||||
if ("document" in self) {
|
|
||||||
const script = document.createElement("script");
|
|
||||||
script.src = uri;
|
|
||||||
script.onload = resolve;
|
|
||||||
document.head.appendChild(script);
|
|
||||||
} else {
|
|
||||||
nextDefineUri = uri;
|
|
||||||
importScripts(uri);
|
|
||||||
resolve();
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
.then(() => {
|
|
||||||
let promise = registry[uri];
|
|
||||||
if (!promise) {
|
|
||||||
throw new Error(`Module ${uri} didn’t register its module`);
|
|
||||||
}
|
|
||||||
return promise;
|
|
||||||
})
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
self.define = (depsNames, factory) => {
|
|
||||||
const uri = nextDefineUri || ("document" in self ? document.currentScript.src : "") || location.href;
|
|
||||||
if (registry[uri]) {
|
|
||||||
// Module is already loading or loaded.
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let exports = {};
|
|
||||||
const require = depUri => singleRequire(depUri, uri);
|
|
||||||
const specialDeps = {
|
|
||||||
module: { uri },
|
|
||||||
exports,
|
|
||||||
require
|
|
||||||
};
|
|
||||||
registry[uri] = Promise.all(depsNames.map(
|
|
||||||
depName => specialDeps[depName] || require(depName)
|
|
||||||
)).then(deps => {
|
|
||||||
factory(...deps);
|
|
||||||
return exports;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
}
|
|
||||||
define(['./workbox-52f2a342'], (function (workbox) { 'use strict';
|
|
||||||
|
|
||||||
self.skipWaiting();
|
|
||||||
workbox.clientsClaim();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The precacheAndRoute() method efficiently caches and responds to
|
|
||||||
* requests for URLs in the manifest.
|
|
||||||
* See https://goo.gl/S9QRab
|
|
||||||
*/
|
|
||||||
workbox.precacheAndRoute([{
|
|
||||||
"url": "registerSW.js",
|
|
||||||
"revision": "3ca0b8505b4bec776b69afdba2768812"
|
|
||||||
}, {
|
|
||||||
"url": "index.html",
|
|
||||||
"revision": "0.n708e9nairg"
|
|
||||||
}], {});
|
|
||||||
workbox.cleanupOutdatedCaches();
|
|
||||||
workbox.registerRoute(new workbox.NavigationRoute(workbox.createHandlerBoundToURL("index.html"), {
|
|
||||||
allowlist: [/^\/$/]
|
|
||||||
}));
|
|
||||||
workbox.registerRoute(/^https?:\/\/.*\/api\/.*/i, new workbox.NetworkFirst({
|
|
||||||
"cacheName": "api-cache",
|
|
||||||
"networkTimeoutSeconds": 10,
|
|
||||||
plugins: [new workbox.ExpirationPlugin({
|
|
||||||
maxEntries: 100,
|
|
||||||
maxAgeSeconds: 86400
|
|
||||||
})]
|
|
||||||
}), 'GET');
|
|
||||||
workbox.registerRoute(/^https:\/\/fonts\.googleapis\.com\/.*/i, new workbox.CacheFirst({
|
|
||||||
"cacheName": "google-fonts-cache",
|
|
||||||
plugins: [new workbox.ExpirationPlugin({
|
|
||||||
maxEntries: 10,
|
|
||||||
maxAgeSeconds: 31536000
|
|
||||||
})]
|
|
||||||
}), 'GET');
|
|
||||||
workbox.registerRoute(/^https:\/\/fonts\.gstatic\.com\/.*/i, new workbox.CacheFirst({
|
|
||||||
"cacheName": "gstatic-fonts-cache",
|
|
||||||
plugins: [new workbox.ExpirationPlugin({
|
|
||||||
maxEntries: 10,
|
|
||||||
maxAgeSeconds: 31536000
|
|
||||||
})]
|
|
||||||
}), 'GET');
|
|
||||||
|
|
||||||
}));
|
|
||||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,31 +0,0 @@
|
||||||
# Canvas Website - Dev Branch Deployment
|
|
||||||
# Automatically deploys from `dev` branch for testing
|
|
||||||
# Access at: staging.jeffemmett.com
|
|
||||||
|
|
||||||
services:
|
|
||||||
canvas-dev:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
args:
|
|
||||||
- VITE_WORKER_ENV=staging
|
|
||||||
container_name: canvas-dev
|
|
||||||
restart: unless-stopped
|
|
||||||
labels:
|
|
||||||
- "traefik.enable=true"
|
|
||||||
- "traefik.docker.network=traefik-public"
|
|
||||||
- "traefik.http.services.canvas-dev.loadbalancer.server.port=80"
|
|
||||||
- "traefik.http.routers.canvas-dev.rule=Host(`staging.jeffemmett.com`)"
|
|
||||||
- "traefik.http.routers.canvas-dev.entrypoints=web"
|
|
||||||
- "traefik.http.routers.canvas-dev.service=canvas-dev"
|
|
||||||
networks:
|
|
||||||
- traefik-public
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-f", "http://localhost/health"]
|
|
||||||
interval: 30s
|
|
||||||
timeout: 10s
|
|
||||||
retries: 3
|
|
||||||
|
|
||||||
networks:
|
|
||||||
traefik-public:
|
|
||||||
external: true
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
# Canvas Website Docker Compose
|
|
||||||
# Production: jeffemmett.com, www.jeffemmett.com
|
|
||||||
# Dev branch: staging.jeffemmett.com (separate container via docker-compose.dev.yml)
|
|
||||||
|
|
||||||
services:
|
|
||||||
canvas-website:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
args:
|
|
||||||
- VITE_WORKER_ENV=production
|
|
||||||
# Add other build args from .env if needed
|
|
||||||
container_name: canvas-website
|
|
||||||
restart: unless-stopped
|
|
||||||
labels:
|
|
||||||
- "traefik.enable=true"
|
|
||||||
- "traefik.docker.network=traefik-public"
|
|
||||||
- "traefik.http.services.canvas.loadbalancer.server.port=80"
|
|
||||||
# Production deployment (jeffemmett.com and www)
|
|
||||||
- "traefik.http.routers.canvas-prod.rule=Host(`jeffemmett.com`) || Host(`www.jeffemmett.com`)"
|
|
||||||
- "traefik.http.routers.canvas-prod.entrypoints=web"
|
|
||||||
- "traefik.http.routers.canvas-prod.service=canvas"
|
|
||||||
networks:
|
|
||||||
- traefik-public
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-f", "http://localhost/health"]
|
|
||||||
interval: 30s
|
|
||||||
timeout: 10s
|
|
||||||
retries: 3
|
|
||||||
|
|
||||||
networks:
|
|
||||||
traefik-public:
|
|
||||||
external: true
|
|
||||||
|
|
@ -1,913 +0,0 @@
|
||||||
# Google Data Sovereignty: Local-First Secure Storage
|
|
||||||
|
|
||||||
This document outlines the architecture for securely importing, storing, and optionally sharing Google Workspace data (Gmail, Drive, Photos, Calendar) using a **local-first, data sovereign** approach.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
**Philosophy**: Your data should be yours. Import it locally, encrypt it client-side, and choose when/what to share.
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────────────────┐
|
|
||||||
│ USER'S BROWSER (Data Sovereign Zone) │
|
|
||||||
├─────────────────────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ ┌─────────────┐ ┌──────────────────────────────────────────────┐ │
|
|
||||||
│ │ Google APIs │───>│ Local Processing Layer │ │
|
|
||||||
│ │ (OAuth 2.0) │ │ ├── Fetch data │ │
|
|
||||||
│ └─────────────┘ │ ├── Encrypt with user's WebCrypto keys │ │
|
|
||||||
│ │ └── Store to IndexedDB │ │
|
|
||||||
│ └────────────────────────┬─────────────────────┘ │
|
|
||||||
│ │ │
|
|
||||||
│ ┌───────────────────────────────────────────┴───────────────────────┐ │
|
|
||||||
│ │ IndexedDB Encrypted Storage │ │
|
|
||||||
│ │ ├── gmail_messages (encrypted blobs) │ │
|
|
||||||
│ │ ├── drive_documents (encrypted blobs) │ │
|
|
||||||
│ │ ├── photos_media (encrypted references) │ │
|
|
||||||
│ │ ├── calendar_events (encrypted data) │ │
|
|
||||||
│ │ └── encryption_metadata (key derivation info) │ │
|
|
||||||
│ └─────────────────────────────────────────────────────────────────── │
|
|
||||||
│ │ │
|
|
||||||
│ ┌────────────────────────┴───────────────────────┐ │
|
|
||||||
│ │ Share Decision Layer (User Controlled) │ │
|
|
||||||
│ │ ├── Keep Private (local only) │ │
|
|
||||||
│ │ ├── Share to Board (Automerge sync) │ │
|
|
||||||
│ │ └── Backup to R2 (encrypted cloud backup) │ │
|
|
||||||
│ └────────────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## Browser Storage Capabilities & Limitations
|
|
||||||
|
|
||||||
### IndexedDB Storage
|
|
||||||
|
|
||||||
| Browser | Default Quota | Max Quota | Persistence |
|
|
||||||
|---------|--------------|-----------|-------------|
|
|
||||||
| Chrome/Edge | 60% of disk | Unlimited* | Persistent with permission |
|
|
||||||
| Firefox | 10% up to 10GB | 50% of disk | Persistent with permission |
|
|
||||||
| Safari | 1GB (lax) | ~1GB per origin | Non-persistent (7-day eviction) |
|
|
||||||
|
|
||||||
*Chrome "Unlimited" requires `navigator.storage.persist()` permission
|
|
||||||
|
|
||||||
### Storage API Persistence
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Request persistent storage (prevents automatic eviction)
|
|
||||||
async function requestPersistentStorage(): Promise<boolean> {
|
|
||||||
if (navigator.storage && navigator.storage.persist) {
|
|
||||||
const isPersisted = await navigator.storage.persist();
|
|
||||||
console.log(`Persistent storage ${isPersisted ? 'granted' : 'denied'}`);
|
|
||||||
return isPersisted;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check current storage quota
|
|
||||||
async function checkStorageQuota(): Promise<{used: number, quota: number}> {
|
|
||||||
if (navigator.storage && navigator.storage.estimate) {
|
|
||||||
const estimate = await navigator.storage.estimate();
|
|
||||||
return {
|
|
||||||
used: estimate.usage || 0,
|
|
||||||
quota: estimate.quota || 0
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return { used: 0, quota: 0 };
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Safari's 7-Day Eviction Rule
|
|
||||||
|
|
||||||
**CRITICAL for Safari users**: Safari evicts IndexedDB data after 7 days of non-use.
|
|
||||||
|
|
||||||
**Mitigations**:
|
|
||||||
1. Use a Service Worker with periodic background sync to "touch" data
|
|
||||||
2. Prompt Safari users to add to Home Screen (PWA mode bypasses some restrictions)
|
|
||||||
3. Automatically sync important data to R2 backup
|
|
||||||
4. Show clear warnings about Safari limitations
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Detect Safari's storage limitations
|
|
||||||
function hasSafariLimitations(): boolean {
|
|
||||||
const isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
|
|
||||||
const isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent);
|
|
||||||
return isSafari || isIOS;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Register touch activity to prevent eviction
|
|
||||||
async function touchLocalData(): Promise<void> {
|
|
||||||
const db = await openDatabase();
|
|
||||||
const tx = db.transaction('metadata', 'readwrite');
|
|
||||||
tx.objectStore('metadata').put({
|
|
||||||
key: 'last_accessed',
|
|
||||||
timestamp: Date.now()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Data Types & Storage Strategies
|
|
||||||
|
|
||||||
### 1. Gmail Messages
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface EncryptedEmailStore {
|
|
||||||
id: string; // Gmail message ID
|
|
||||||
threadId: string; // Thread ID for grouping
|
|
||||||
encryptedSubject: ArrayBuffer; // AES-GCM encrypted
|
|
||||||
encryptedBody: ArrayBuffer; // AES-GCM encrypted
|
|
||||||
encryptedFrom: ArrayBuffer; // Sender info
|
|
||||||
encryptedTo: ArrayBuffer[]; // Recipients
|
|
||||||
date: number; // Timestamp (unencrypted for sorting)
|
|
||||||
labels: string[]; // Gmail labels (encrypted or not based on sensitivity)
|
|
||||||
hasAttachments: boolean; // Flag only, attachments stored separately
|
|
||||||
snippet: ArrayBuffer; // Encrypted preview
|
|
||||||
|
|
||||||
// Metadata for search (encrypted bloom filter or encrypted index)
|
|
||||||
searchIndex: ArrayBuffer;
|
|
||||||
|
|
||||||
// Sync metadata
|
|
||||||
syncedAt: number;
|
|
||||||
localOnly: boolean; // Not yet synced to any external storage
|
|
||||||
}
|
|
||||||
|
|
||||||
// Storage estimate per email:
|
|
||||||
// - Average email: ~20KB raw → ~25KB encrypted
|
|
||||||
// - With attachments: varies, but reference stored, not full attachment
|
|
||||||
// - 10,000 emails ≈ 250MB
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Google Drive Documents
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface EncryptedDriveDocument {
|
|
||||||
id: string; // Drive file ID
|
|
||||||
encryptedName: ArrayBuffer;
|
|
||||||
encryptedMimeType: ArrayBuffer;
|
|
||||||
encryptedContent: ArrayBuffer; // For text-based docs
|
|
||||||
encryptedPreview: ArrayBuffer; // Thumbnail or preview
|
|
||||||
|
|
||||||
// Large files: store reference, not content
|
|
||||||
contentStrategy: 'inline' | 'reference' | 'chunked';
|
|
||||||
chunks?: string[]; // IDs of content chunks if chunked
|
|
||||||
|
|
||||||
// Hierarchy
|
|
||||||
parentId: string | null;
|
|
||||||
path: ArrayBuffer; // Encrypted path string
|
|
||||||
|
|
||||||
// Sharing & permissions (for UI display)
|
|
||||||
isShared: boolean;
|
|
||||||
|
|
||||||
modifiedTime: number;
|
|
||||||
size: number; // Unencrypted for quota management
|
|
||||||
|
|
||||||
syncedAt: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Storage considerations:
|
|
||||||
// - Google Docs: Convert to markdown/HTML, typically 10-100KB
|
|
||||||
// - Spreadsheets: JSON export, 100KB-10MB depending on size
|
|
||||||
// - PDFs: Store reference only, load on demand
|
|
||||||
// - Images: Thumbnail locally, full resolution on demand
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Google Photos
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface EncryptedPhotoReference {
|
|
||||||
id: string; // Photos media item ID
|
|
||||||
encryptedFilename: ArrayBuffer;
|
|
||||||
encryptedDescription: ArrayBuffer;
|
|
||||||
|
|
||||||
// Thumbnails stored locally (encrypted)
|
|
||||||
thumbnail: {
|
|
||||||
width: number;
|
|
||||||
height: number;
|
|
||||||
encryptedData: ArrayBuffer; // Base64 or blob
|
|
||||||
};
|
|
||||||
|
|
||||||
// Full resolution: reference only (fetch on demand)
|
|
||||||
fullResolution: {
|
|
||||||
width: number;
|
|
||||||
height: number;
|
|
||||||
// NOT storing full image - too large
|
|
||||||
// Fetch via API when user requests
|
|
||||||
};
|
|
||||||
|
|
||||||
mediaType: 'image' | 'video';
|
|
||||||
creationTime: number;
|
|
||||||
|
|
||||||
// Album associations
|
|
||||||
albumIds: string[];
|
|
||||||
|
|
||||||
// Location data (highly sensitive - always encrypted)
|
|
||||||
encryptedLocation?: ArrayBuffer;
|
|
||||||
|
|
||||||
syncedAt: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Storage strategy:
|
|
||||||
// - Thumbnails: ~50KB each, store locally
|
|
||||||
// - Full images: NOT stored locally (too large)
|
|
||||||
// - 1,000 photos thumbnails ≈ 50MB
|
|
||||||
// - Full resolution loaded via API on demand
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Google Calendar Events
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface EncryptedCalendarEvent {
|
|
||||||
id: string; // Calendar event ID
|
|
||||||
calendarId: string;
|
|
||||||
|
|
||||||
encryptedSummary: ArrayBuffer;
|
|
||||||
encryptedDescription: ArrayBuffer;
|
|
||||||
encryptedLocation: ArrayBuffer;
|
|
||||||
|
|
||||||
// Time data (unencrypted for query/sort performance)
|
|
||||||
startTime: number;
|
|
||||||
endTime: number;
|
|
||||||
isAllDay: boolean;
|
|
||||||
timezone: string;
|
|
||||||
|
|
||||||
// Recurrence
|
|
||||||
isRecurring: boolean;
|
|
||||||
encryptedRecurrence?: ArrayBuffer;
|
|
||||||
|
|
||||||
// Attendees (encrypted)
|
|
||||||
encryptedAttendees: ArrayBuffer;
|
|
||||||
|
|
||||||
// Reminders
|
|
||||||
reminders: { method: string; minutes: number }[];
|
|
||||||
|
|
||||||
// Meeting links (encrypted - sensitive)
|
|
||||||
encryptedMeetingLink?: ArrayBuffer;
|
|
||||||
|
|
||||||
syncedAt: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Storage estimate:
|
|
||||||
// - Average event: ~5KB encrypted
|
|
||||||
// - 2 years of events (~3000): ~15MB
|
|
||||||
```
|
|
||||||
|
|
||||||
## Encryption Strategy
|
|
||||||
|
|
||||||
### Key Derivation
|
|
||||||
|
|
||||||
Using the existing WebCrypto infrastructure, derive data encryption keys from the user's master key:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Derive a data-specific encryption key from master key
|
|
||||||
async function deriveDataEncryptionKey(
|
|
||||||
masterKey: CryptoKey,
|
|
||||||
purpose: 'gmail' | 'drive' | 'photos' | 'calendar'
|
|
||||||
): Promise<CryptoKey> {
|
|
||||||
const encoder = new TextEncoder();
|
|
||||||
const purposeBytes = encoder.encode(`canvas-data-${purpose}`);
|
|
||||||
|
|
||||||
// Import master key for HKDF
|
|
||||||
const baseKey = await crypto.subtle.importKey(
|
|
||||||
'raw',
|
|
||||||
await crypto.subtle.exportKey('raw', masterKey),
|
|
||||||
'HKDF',
|
|
||||||
false,
|
|
||||||
['deriveKey']
|
|
||||||
);
|
|
||||||
|
|
||||||
// Derive purpose-specific key
|
|
||||||
return await crypto.subtle.deriveKey(
|
|
||||||
{
|
|
||||||
name: 'HKDF',
|
|
||||||
hash: 'SHA-256',
|
|
||||||
salt: purposeBytes,
|
|
||||||
info: new ArrayBuffer(0)
|
|
||||||
},
|
|
||||||
baseKey,
|
|
||||||
{ name: 'AES-GCM', length: 256 },
|
|
||||||
false,
|
|
||||||
['encrypt', 'decrypt']
|
|
||||||
);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Encryption/Decryption
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Encrypt data before storing
|
|
||||||
async function encryptData(
|
|
||||||
data: string | ArrayBuffer,
|
|
||||||
key: CryptoKey
|
|
||||||
): Promise<{encrypted: ArrayBuffer, iv: Uint8Array}> {
|
|
||||||
const iv = crypto.getRandomValues(new Uint8Array(12)); // 96-bit IV for AES-GCM
|
|
||||||
|
|
||||||
const dataBuffer = typeof data === 'string'
|
|
||||||
? new TextEncoder().encode(data)
|
|
||||||
: data;
|
|
||||||
|
|
||||||
const encrypted = await crypto.subtle.encrypt(
|
|
||||||
{ name: 'AES-GCM', iv },
|
|
||||||
key,
|
|
||||||
dataBuffer
|
|
||||||
);
|
|
||||||
|
|
||||||
return { encrypted, iv };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decrypt data when reading
|
|
||||||
async function decryptData(
|
|
||||||
encrypted: ArrayBuffer,
|
|
||||||
iv: Uint8Array,
|
|
||||||
key: CryptoKey
|
|
||||||
): Promise<ArrayBuffer> {
|
|
||||||
return await crypto.subtle.decrypt(
|
|
||||||
{ name: 'AES-GCM', iv },
|
|
||||||
key,
|
|
||||||
encrypted
|
|
||||||
);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## IndexedDB Schema
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Database schema for encrypted Google data
|
|
||||||
const GOOGLE_DATA_DB = 'canvas-google-data';
|
|
||||||
const DB_VERSION = 1;
|
|
||||||
|
|
||||||
interface GoogleDataSchema {
|
|
||||||
gmail: {
|
|
||||||
key: string; // message ID
|
|
||||||
indexes: ['threadId', 'date', 'syncedAt'];
|
|
||||||
};
|
|
||||||
drive: {
|
|
||||||
key: string; // file ID
|
|
||||||
indexes: ['parentId', 'modifiedTime', 'mimeType'];
|
|
||||||
};
|
|
||||||
photos: {
|
|
||||||
key: string; // media item ID
|
|
||||||
indexes: ['creationTime', 'mediaType'];
|
|
||||||
};
|
|
||||||
calendar: {
|
|
||||||
key: string; // event ID
|
|
||||||
indexes: ['calendarId', 'startTime', 'endTime'];
|
|
||||||
};
|
|
||||||
syncMetadata: {
|
|
||||||
key: string; // 'gmail' | 'drive' | 'photos' | 'calendar'
|
|
||||||
// Stores last sync token, sync progress, etc.
|
|
||||||
};
|
|
||||||
encryptionKeys: {
|
|
||||||
key: string; // purpose
|
|
||||||
// Stores IV, salt for key derivation
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function initGoogleDataDB(): Promise<IDBDatabase> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const request = indexedDB.open(GOOGLE_DATA_DB, DB_VERSION);
|
|
||||||
|
|
||||||
request.onerror = () => reject(request.error);
|
|
||||||
request.onsuccess = () => resolve(request.result);
|
|
||||||
|
|
||||||
request.onupgradeneeded = (event) => {
|
|
||||||
const db = (event.target as IDBOpenDBRequest).result;
|
|
||||||
|
|
||||||
// Gmail store
|
|
||||||
if (!db.objectStoreNames.contains('gmail')) {
|
|
||||||
const gmailStore = db.createObjectStore('gmail', { keyPath: 'id' });
|
|
||||||
gmailStore.createIndex('threadId', 'threadId', { unique: false });
|
|
||||||
gmailStore.createIndex('date', 'date', { unique: false });
|
|
||||||
gmailStore.createIndex('syncedAt', 'syncedAt', { unique: false });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Drive store
|
|
||||||
if (!db.objectStoreNames.contains('drive')) {
|
|
||||||
const driveStore = db.createObjectStore('drive', { keyPath: 'id' });
|
|
||||||
driveStore.createIndex('parentId', 'parentId', { unique: false });
|
|
||||||
driveStore.createIndex('modifiedTime', 'modifiedTime', { unique: false });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Photos store
|
|
||||||
if (!db.objectStoreNames.contains('photos')) {
|
|
||||||
const photosStore = db.createObjectStore('photos', { keyPath: 'id' });
|
|
||||||
photosStore.createIndex('creationTime', 'creationTime', { unique: false });
|
|
||||||
photosStore.createIndex('mediaType', 'mediaType', { unique: false });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calendar store
|
|
||||||
if (!db.objectStoreNames.contains('calendar')) {
|
|
||||||
const calendarStore = db.createObjectStore('calendar', { keyPath: 'id' });
|
|
||||||
calendarStore.createIndex('calendarId', 'calendarId', { unique: false });
|
|
||||||
calendarStore.createIndex('startTime', 'startTime', { unique: false });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sync metadata
|
|
||||||
if (!db.objectStoreNames.contains('syncMetadata')) {
|
|
||||||
db.createObjectStore('syncMetadata', { keyPath: 'service' });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Encryption metadata
|
|
||||||
if (!db.objectStoreNames.contains('encryptionMeta')) {
|
|
||||||
db.createObjectStore('encryptionMeta', { keyPath: 'purpose' });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Google OAuth & API Integration
|
|
||||||
|
|
||||||
### OAuth 2.0 Scopes
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const GOOGLE_SCOPES = {
|
|
||||||
// Read-only access (data sovereignty - we import, not modify)
|
|
||||||
gmail: 'https://www.googleapis.com/auth/gmail.readonly',
|
|
||||||
drive: 'https://www.googleapis.com/auth/drive.readonly',
|
|
||||||
photos: 'https://www.googleapis.com/auth/photoslibrary.readonly',
|
|
||||||
calendar: 'https://www.googleapis.com/auth/calendar.readonly',
|
|
||||||
|
|
||||||
// Profile for user identification
|
|
||||||
profile: 'https://www.googleapis.com/auth/userinfo.profile',
|
|
||||||
email: 'https://www.googleapis.com/auth/userinfo.email'
|
|
||||||
};
|
|
||||||
|
|
||||||
// Selective scope request - user chooses what to import
|
|
||||||
function getRequestedScopes(services: string[]): string {
|
|
||||||
const scopes = [GOOGLE_SCOPES.profile, GOOGLE_SCOPES.email];
|
|
||||||
|
|
||||||
services.forEach(service => {
|
|
||||||
if (GOOGLE_SCOPES[service as keyof typeof GOOGLE_SCOPES]) {
|
|
||||||
scopes.push(GOOGLE_SCOPES[service as keyof typeof GOOGLE_SCOPES]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return scopes.join(' ');
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### OAuth Flow with PKCE
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface GoogleAuthState {
|
|
||||||
codeVerifier: string;
|
|
||||||
redirectUri: string;
|
|
||||||
state: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function initiateGoogleAuth(services: string[]): Promise<void> {
|
|
||||||
const codeVerifier = generateCodeVerifier();
|
|
||||||
const codeChallenge = await generateCodeChallenge(codeVerifier);
|
|
||||||
const state = crypto.randomUUID();
|
|
||||||
|
|
||||||
// Store state for verification
|
|
||||||
sessionStorage.setItem('google_auth_state', JSON.stringify({
|
|
||||||
codeVerifier,
|
|
||||||
state,
|
|
||||||
redirectUri: window.location.origin + '/oauth/google/callback'
|
|
||||||
}));
|
|
||||||
|
|
||||||
const params = new URLSearchParams({
|
|
||||||
client_id: import.meta.env.VITE_GOOGLE_CLIENT_ID,
|
|
||||||
redirect_uri: window.location.origin + '/oauth/google/callback',
|
|
||||||
response_type: 'code',
|
|
||||||
scope: getRequestedScopes(services),
|
|
||||||
access_type: 'offline', // Get refresh token
|
|
||||||
prompt: 'consent',
|
|
||||||
code_challenge: codeChallenge,
|
|
||||||
code_challenge_method: 'S256',
|
|
||||||
state
|
|
||||||
});
|
|
||||||
|
|
||||||
window.location.href = `https://accounts.google.com/o/oauth2/v2/auth?${params}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
// PKCE helpers
|
|
||||||
function generateCodeVerifier(): string {
|
|
||||||
const array = new Uint8Array(32);
|
|
||||||
crypto.getRandomValues(array);
|
|
||||||
return base64UrlEncode(array);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function generateCodeChallenge(verifier: string): Promise<string> {
|
|
||||||
const encoder = new TextEncoder();
|
|
||||||
const data = encoder.encode(verifier);
|
|
||||||
const hash = await crypto.subtle.digest('SHA-256', data);
|
|
||||||
return base64UrlEncode(new Uint8Array(hash));
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Token Storage (Encrypted)
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface EncryptedTokens {
|
|
||||||
accessToken: ArrayBuffer; // Encrypted
|
|
||||||
refreshToken: ArrayBuffer; // Encrypted
|
|
||||||
accessTokenIv: Uint8Array;
|
|
||||||
refreshTokenIv: Uint8Array;
|
|
||||||
expiresAt: number; // Unencrypted for refresh logic
|
|
||||||
scopes: string[]; // Unencrypted for UI display
|
|
||||||
}
|
|
||||||
|
|
||||||
async function storeGoogleTokens(
|
|
||||||
tokens: { access_token: string; refresh_token?: string; expires_in: number },
|
|
||||||
encryptionKey: CryptoKey
|
|
||||||
): Promise<void> {
|
|
||||||
const { encrypted: encAccessToken, iv: accessIv } = await encryptData(
|
|
||||||
tokens.access_token,
|
|
||||||
encryptionKey
|
|
||||||
);
|
|
||||||
|
|
||||||
const encryptedTokens: Partial<EncryptedTokens> = {
|
|
||||||
accessToken: encAccessToken,
|
|
||||||
accessTokenIv: accessIv,
|
|
||||||
expiresAt: Date.now() + (tokens.expires_in * 1000)
|
|
||||||
};
|
|
||||||
|
|
||||||
if (tokens.refresh_token) {
|
|
||||||
const { encrypted: encRefreshToken, iv: refreshIv } = await encryptData(
|
|
||||||
tokens.refresh_token,
|
|
||||||
encryptionKey
|
|
||||||
);
|
|
||||||
encryptedTokens.refreshToken = encRefreshToken;
|
|
||||||
encryptedTokens.refreshTokenIv = refreshIv;
|
|
||||||
}
|
|
||||||
|
|
||||||
const db = await initGoogleDataDB();
|
|
||||||
const tx = db.transaction('encryptionMeta', 'readwrite');
|
|
||||||
tx.objectStore('encryptionMeta').put({
|
|
||||||
purpose: 'google_tokens',
|
|
||||||
...encryptedTokens
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Data Import Workflow
|
|
||||||
|
|
||||||
### Progressive Import with Background Sync
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface ImportProgress {
|
|
||||||
service: 'gmail' | 'drive' | 'photos' | 'calendar';
|
|
||||||
total: number;
|
|
||||||
imported: number;
|
|
||||||
lastSyncToken?: string;
|
|
||||||
status: 'idle' | 'importing' | 'paused' | 'error';
|
|
||||||
errorMessage?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
class GoogleDataImporter {
|
|
||||||
private encryptionKey: CryptoKey;
|
|
||||||
private db: IDBDatabase;
|
|
||||||
|
|
||||||
async importGmail(options: {
|
|
||||||
maxMessages?: number;
|
|
||||||
labelsFilter?: string[];
|
|
||||||
dateAfter?: Date;
|
|
||||||
}): Promise<void> {
|
|
||||||
const accessToken = await this.getAccessToken();
|
|
||||||
|
|
||||||
// Use pagination for large mailboxes
|
|
||||||
let pageToken: string | undefined;
|
|
||||||
let imported = 0;
|
|
||||||
|
|
||||||
do {
|
|
||||||
const response = await fetch(
|
|
||||||
`https://gmail.googleapis.com/gmail/v1/users/me/messages?${new URLSearchParams({
|
|
||||||
maxResults: '100',
|
|
||||||
...(pageToken && { pageToken }),
|
|
||||||
...(options.labelsFilter && { labelIds: options.labelsFilter.join(',') }),
|
|
||||||
...(options.dateAfter && { q: `after:${Math.floor(options.dateAfter.getTime() / 1000)}` })
|
|
||||||
})}`,
|
|
||||||
{ headers: { Authorization: `Bearer ${accessToken}` } }
|
|
||||||
);
|
|
||||||
|
|
||||||
const data = await response.json();
|
|
||||||
|
|
||||||
// Fetch and encrypt each message
|
|
||||||
for (const msg of data.messages || []) {
|
|
||||||
const fullMessage = await this.fetchGmailMessage(msg.id, accessToken);
|
|
||||||
await this.storeEncryptedEmail(fullMessage);
|
|
||||||
imported++;
|
|
||||||
|
|
||||||
// Update progress
|
|
||||||
this.updateProgress('gmail', imported);
|
|
||||||
|
|
||||||
// Yield to UI periodically
|
|
||||||
if (imported % 10 === 0) {
|
|
||||||
await new Promise(r => setTimeout(r, 0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pageToken = data.nextPageToken;
|
|
||||||
} while (pageToken && (!options.maxMessages || imported < options.maxMessages));
|
|
||||||
}
|
|
||||||
|
|
||||||
private async storeEncryptedEmail(message: any): Promise<void> {
|
|
||||||
const emailKey = await deriveDataEncryptionKey(this.encryptionKey, 'gmail');
|
|
||||||
|
|
||||||
const encrypted: EncryptedEmailStore = {
|
|
||||||
id: message.id,
|
|
||||||
threadId: message.threadId,
|
|
||||||
encryptedSubject: (await encryptData(
|
|
||||||
this.extractHeader(message, 'Subject') || '',
|
|
||||||
emailKey
|
|
||||||
)).encrypted,
|
|
||||||
encryptedBody: (await encryptData(
|
|
||||||
this.extractBody(message),
|
|
||||||
emailKey
|
|
||||||
)).encrypted,
|
|
||||||
// ... other fields
|
|
||||||
date: parseInt(message.internalDate),
|
|
||||||
syncedAt: Date.now(),
|
|
||||||
localOnly: true
|
|
||||||
};
|
|
||||||
|
|
||||||
const tx = this.db.transaction('gmail', 'readwrite');
|
|
||||||
tx.objectStore('gmail').put(encrypted);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Sharing to Canvas Board
|
|
||||||
|
|
||||||
### Selective Sharing Model
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface ShareableItem {
|
|
||||||
type: 'email' | 'document' | 'photo' | 'event';
|
|
||||||
id: string;
|
|
||||||
// Decrypted data for sharing
|
|
||||||
decryptedData: any;
|
|
||||||
}
|
|
||||||
|
|
||||||
class DataSharingService {
|
|
||||||
/**
|
|
||||||
* Share a specific item to the current board
|
|
||||||
* This decrypts the item and adds it to the Automerge document
|
|
||||||
*/
|
|
||||||
async shareToBoard(
|
|
||||||
item: ShareableItem,
|
|
||||||
boardHandle: DocumentHandle<CanvasDoc>,
|
|
||||||
userKey: CryptoKey
|
|
||||||
): Promise<void> {
|
|
||||||
// 1. Decrypt the item
|
|
||||||
const decrypted = await this.decryptItem(item, userKey);
|
|
||||||
|
|
||||||
// 2. Create a canvas shape representation
|
|
||||||
const shape = this.createShapeFromItem(decrypted, item.type);
|
|
||||||
|
|
||||||
// 3. Add to Automerge document (syncs to other board users)
|
|
||||||
boardHandle.change(doc => {
|
|
||||||
doc.shapes[shape.id] = shape;
|
|
||||||
});
|
|
||||||
|
|
||||||
// 4. Mark item as shared (no longer localOnly)
|
|
||||||
await this.markAsShared(item.id, item.type);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a visual shape from data
|
|
||||||
*/
|
|
||||||
private createShapeFromItem(data: any, type: string): TLShape {
|
|
||||||
switch (type) {
|
|
||||||
case 'email':
|
|
||||||
return {
|
|
||||||
id: createShapeId(),
|
|
||||||
type: 'email-card',
|
|
||||||
props: {
|
|
||||||
subject: data.subject,
|
|
||||||
from: data.from,
|
|
||||||
date: data.date,
|
|
||||||
snippet: data.snippet
|
|
||||||
}
|
|
||||||
};
|
|
||||||
case 'event':
|
|
||||||
return {
|
|
||||||
id: createShapeId(),
|
|
||||||
type: 'calendar-event',
|
|
||||||
props: {
|
|
||||||
title: data.summary,
|
|
||||||
startTime: data.startTime,
|
|
||||||
endTime: data.endTime,
|
|
||||||
location: data.location
|
|
||||||
}
|
|
||||||
};
|
|
||||||
// ... other types
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## R2 Encrypted Backup
|
|
||||||
|
|
||||||
### Backup Architecture
|
|
||||||
|
|
||||||
```
|
|
||||||
User Browser Cloudflare Worker R2 Storage
|
|
||||||
│ │ │
|
|
||||||
│ 1. Encrypt data locally │ │
|
|
||||||
│ (already encrypted in IndexedDB) │ │
|
|
||||||
│ │ │
|
|
||||||
│ 2. Generate backup key │ │
|
|
||||||
│ (derived from master key) │ │
|
|
||||||
│ │ │
|
|
||||||
│ 3. POST encrypted blob ──────────> 4. Validate user │
|
|
||||||
│ │ (CryptID auth) │
|
|
||||||
│ │ │
|
|
||||||
│ │ 5. Store blob ─────────────────> │
|
|
||||||
│ │ (already encrypted, │
|
|
||||||
│ │ worker can't read) │
|
|
||||||
│ │ │
|
|
||||||
│ <──────────────────────────────── 6. Return backup ID │
|
|
||||||
```
|
|
||||||
|
|
||||||
### Backup Implementation
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface BackupMetadata {
|
|
||||||
id: string;
|
|
||||||
createdAt: number;
|
|
||||||
services: ('gmail' | 'drive' | 'photos' | 'calendar')[];
|
|
||||||
itemCount: number;
|
|
||||||
sizeBytes: number;
|
|
||||||
// Encrypted with user's key - only they can read
|
|
||||||
encryptedManifest: ArrayBuffer;
|
|
||||||
}
|
|
||||||
|
|
||||||
class R2BackupService {
|
|
||||||
private workerUrl = '/api/backup';
|
|
||||||
|
|
||||||
async createBackup(
|
|
||||||
services: string[],
|
|
||||||
encryptionKey: CryptoKey
|
|
||||||
): Promise<BackupMetadata> {
|
|
||||||
// 1. Gather all encrypted data from IndexedDB
|
|
||||||
const dataToBackup = await this.gatherData(services);
|
|
||||||
|
|
||||||
// 2. Create a manifest (encrypted)
|
|
||||||
const manifest = {
|
|
||||||
version: 1,
|
|
||||||
createdAt: Date.now(),
|
|
||||||
services,
|
|
||||||
itemCounts: dataToBackup.counts
|
|
||||||
};
|
|
||||||
const { encrypted: encManifest } = await encryptData(
|
|
||||||
JSON.stringify(manifest),
|
|
||||||
encryptionKey
|
|
||||||
);
|
|
||||||
|
|
||||||
// 3. Serialize and chunk if large
|
|
||||||
const blob = await this.serializeForBackup(dataToBackup);
|
|
||||||
|
|
||||||
// 4. Upload to R2 via worker
|
|
||||||
const response = await fetch(this.workerUrl, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/octet-stream',
|
|
||||||
'X-Backup-Manifest': base64Encode(encManifest)
|
|
||||||
},
|
|
||||||
body: blob
|
|
||||||
});
|
|
||||||
|
|
||||||
const { backupId } = await response.json();
|
|
||||||
|
|
||||||
return {
|
|
||||||
id: backupId,
|
|
||||||
createdAt: Date.now(),
|
|
||||||
services: services as any,
|
|
||||||
itemCount: Object.values(dataToBackup.counts).reduce((a, b) => a + b, 0),
|
|
||||||
sizeBytes: blob.size,
|
|
||||||
encryptedManifest: encManifest
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async restoreBackup(
|
|
||||||
backupId: string,
|
|
||||||
encryptionKey: CryptoKey
|
|
||||||
): Promise<void> {
|
|
||||||
// 1. Fetch encrypted blob from R2
|
|
||||||
const response = await fetch(`${this.workerUrl}/${backupId}`);
|
|
||||||
const encryptedBlob = await response.arrayBuffer();
|
|
||||||
|
|
||||||
// 2. Data is already encrypted with user's key
|
|
||||||
// Just write directly to IndexedDB
|
|
||||||
await this.writeToIndexedDB(encryptedBlob);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Privacy & Security Guarantees
|
|
||||||
|
|
||||||
### What Never Leaves the Browser (Unencrypted)
|
|
||||||
|
|
||||||
1. **Email content** - body, subject, attachments
|
|
||||||
2. **Document content** - file contents, names
|
|
||||||
3. **Photo data** - images, location metadata
|
|
||||||
4. **Calendar details** - event descriptions, attendee info
|
|
||||||
5. **OAuth tokens** - access/refresh tokens
|
|
||||||
|
|
||||||
### What the Server Never Sees
|
|
||||||
|
|
||||||
1. **Encryption keys** - derived locally, never transmitted
|
|
||||||
2. **Plaintext data** - all API calls are client-side
|
|
||||||
3. **User's Google account data** - we use read-only scopes
|
|
||||||
|
|
||||||
### Data Flow Summary
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────┐
|
|
||||||
│ Google APIs │
|
|
||||||
│ (authenticated) │
|
|
||||||
└──────────┬──────────┘
|
|
||||||
│
|
|
||||||
┌─────────▼─────────┐
|
|
||||||
│ Browser Fetch │
|
|
||||||
│ (client-side) │
|
|
||||||
└─────────┬─────────┘
|
|
||||||
│
|
|
||||||
┌─────────▼─────────┐
|
|
||||||
│ Encrypt with │
|
|
||||||
│ WebCrypto │
|
|
||||||
│ (AES-256-GCM) │
|
|
||||||
└─────────┬─────────┘
|
|
||||||
│
|
|
||||||
┌────────────────────┼────────────────────┐
|
|
||||||
│ │ │
|
|
||||||
┌─────────▼─────────┐ ┌───────▼────────┐ ┌────────▼───────┐
|
|
||||||
│ IndexedDB │ │ Share to │ │ R2 Backup │
|
|
||||||
│ (local only) │ │ Board │ │ (encrypted) │
|
|
||||||
│ │ │ (Automerge) │ │ │
|
|
||||||
└───────────────────┘ └────────────────┘ └────────────────┘
|
|
||||||
│ │ │
|
|
||||||
▼ ▼ ▼
|
|
||||||
Only you can read Board members Only you can
|
|
||||||
(your keys) see shared items decrypt backup
|
|
||||||
```
|
|
||||||
|
|
||||||
## Implementation Phases
|
|
||||||
|
|
||||||
### Phase 1: Foundation
|
|
||||||
- [ ] IndexedDB schema for encrypted data
|
|
||||||
- [ ] Key derivation from existing WebCrypto keys
|
|
||||||
- [ ] Encrypt/decrypt utility functions
|
|
||||||
- [ ] Storage quota monitoring
|
|
||||||
|
|
||||||
### Phase 2: Google OAuth
|
|
||||||
- [ ] OAuth 2.0 with PKCE flow
|
|
||||||
- [ ] Token encryption and storage
|
|
||||||
- [ ] Token refresh logic
|
|
||||||
- [ ] Scope selection UI
|
|
||||||
|
|
||||||
### Phase 3: Data Import
|
|
||||||
- [ ] Gmail import with pagination
|
|
||||||
- [ ] Drive document import
|
|
||||||
- [ ] Photos thumbnail import
|
|
||||||
- [ ] Calendar event import
|
|
||||||
- [ ] Progress tracking UI
|
|
||||||
|
|
||||||
### Phase 4: Canvas Integration
|
|
||||||
- [ ] Email card shape
|
|
||||||
- [ ] Document preview shape
|
|
||||||
- [ ] Photo thumbnail shape
|
|
||||||
- [ ] Calendar event shape
|
|
||||||
- [ ] Share to board functionality
|
|
||||||
|
|
||||||
### Phase 5: R2 Backup
|
|
||||||
- [ ] Encrypted backup creation
|
|
||||||
- [ ] Backup restore
|
|
||||||
- [ ] Backup management UI
|
|
||||||
- [ ] Automatic backup scheduling
|
|
||||||
|
|
||||||
### Phase 6: Polish
|
|
||||||
- [ ] Safari storage warnings
|
|
||||||
- [ ] Offline data access
|
|
||||||
- [ ] Search within encrypted data
|
|
||||||
- [ ] Data export (Google Takeout style)
|
|
||||||
|
|
||||||
## Security Checklist
|
|
||||||
|
|
||||||
- [ ] All data encrypted before storage
|
|
||||||
- [ ] Keys never leave browser unencrypted
|
|
||||||
- [ ] OAuth tokens encrypted at rest
|
|
||||||
- [ ] PKCE used for OAuth flow
|
|
||||||
- [ ] Read-only Google API scopes
|
|
||||||
- [ ] Safari 7-day eviction handled
|
|
||||||
- [ ] Storage quota warnings
|
|
||||||
- [ ] Secure context required (HTTPS)
|
|
||||||
- [ ] CSP headers configured
|
|
||||||
- [ ] No sensitive data in console logs
|
|
||||||
|
|
||||||
## Related Documents
|
|
||||||
|
|
||||||
- [Local File Upload](./LOCAL_FILE_UPLOAD.md) - Multi-item upload with same encryption model
|
|
||||||
- [Offline Storage Feasibility](../OFFLINE_STORAGE_FEASIBILITY.md) - IndexedDB + Automerge foundation
|
|
||||||
|
|
||||||
## References
|
|
||||||
|
|
||||||
- [IndexedDB API](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API)
|
|
||||||
- [Web Crypto API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Crypto_API)
|
|
||||||
- [Storage API](https://developer.mozilla.org/en-US/docs/Web/API/Storage_API)
|
|
||||||
- [Google OAuth 2.0](https://developers.google.com/identity/protocols/oauth2)
|
|
||||||
- [Gmail API](https://developers.google.com/gmail/api)
|
|
||||||
- [Drive API](https://developers.google.com/drive/api)
|
|
||||||
- [Photos Library API](https://developers.google.com/photos/library/reference/rest)
|
|
||||||
- [Calendar API](https://developers.google.com/calendar/api)
|
|
||||||
|
|
@ -1,862 +0,0 @@
|
||||||
# Local File Upload: Multi-Item Encrypted Import
|
|
||||||
|
|
||||||
A simpler, more broadly compatible approach to importing local files into the canvas with the same privacy-first, encrypted storage model.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
Instead of maintaining persistent folder connections (which have browser compatibility issues), provide a **drag-and-drop / file picker** interface for batch importing files into encrypted local storage.
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────────────────┐
|
|
||||||
│ UPLOAD INTERFACE │
|
|
||||||
├─────────────────────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ ┌─────────────────────────────────────────────────────────────────┐ │
|
|
||||||
│ │ │ │
|
|
||||||
│ │ 📁 Drop files here or click to browse │ │
|
|
||||||
│ │ │ │
|
|
||||||
│ │ Supports: Images, PDFs, Documents, Text, Audio, Video │ │
|
|
||||||
│ │ │ │
|
|
||||||
│ └─────────────────────────────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
│ ┌──────────────────────────────────────────────────────────────────┐ │
|
|
||||||
│ │ Import Queue [Upload] │ │
|
|
||||||
│ ├──────────────────────────────────────────────────────────────────┤ │
|
|
||||||
│ │ ☑ photo_001.jpg (2.4 MB) 🔒 Encrypt 📤 Share │ │
|
|
||||||
│ │ ☑ meeting_notes.pdf (450 KB) 🔒 Encrypt ☐ Private │ │
|
|
||||||
│ │ ☑ project_plan.md (12 KB) 🔒 Encrypt ☐ Private │ │
|
|
||||||
│ │ ☐ sensitive_doc.docx (1.2 MB) 🔒 Encrypt ☐ Private │ │
|
|
||||||
│ └──────────────────────────────────────────────────────────────────┘ │
|
|
||||||
│ │
|
|
||||||
│ Storage: 247 MB used / ~5 GB available │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## Why Multi-Item Upload vs. Folder Connection
|
|
||||||
|
|
||||||
| Feature | Folder Connection | Multi-Item Upload |
|
|
||||||
|---------|------------------|-------------------|
|
|
||||||
| Browser Support | Chrome/Edge only | All browsers |
|
|
||||||
| Persistent Access | Yes (with permission) | No (one-time import) |
|
|
||||||
| Implementation | Complex | Simple |
|
|
||||||
| User Control | Less explicit | Very explicit |
|
|
||||||
| Privacy UX | Hidden | Clear per-file choices |
|
|
||||||
|
|
||||||
**Recommendation**: Multi-item upload is better for privacy-conscious users who want explicit control over what enters the system.
|
|
||||||
|
|
||||||
## Supported File Types
|
|
||||||
|
|
||||||
### Documents
|
|
||||||
| Type | Extension | Processing | Storage Strategy |
|
|
||||||
|------|-----------|-----------|------------------|
|
|
||||||
| Markdown | `.md` | Parse frontmatter, render | Full content |
|
|
||||||
| PDF | `.pdf` | Extract text, thumbnail | Text + thumbnail |
|
|
||||||
| Word | `.docx` | Convert to markdown | Converted content |
|
|
||||||
| Text | `.txt`, `.csv`, `.json` | Direct | Full content |
|
|
||||||
| Code | `.js`, `.ts`, `.py`, etc. | Syntax highlight | Full content |
|
|
||||||
|
|
||||||
### Images
|
|
||||||
| Type | Extension | Processing | Storage Strategy |
|
|
||||||
|------|-----------|-----------|------------------|
|
|
||||||
| Photos | `.jpg`, `.png`, `.webp` | Generate thumbnail | Thumbnail + full |
|
|
||||||
| Vector | `.svg` | Direct | Full content |
|
|
||||||
| GIF | `.gif` | First frame thumb | Thumbnail + full |
|
|
||||||
|
|
||||||
### Media
|
|
||||||
| Type | Extension | Processing | Storage Strategy |
|
|
||||||
|------|-----------|-----------|------------------|
|
|
||||||
| Audio | `.mp3`, `.wav`, `.m4a` | Waveform preview | Reference + metadata |
|
|
||||||
| Video | `.mp4`, `.webm` | Frame thumbnail | Reference + metadata |
|
|
||||||
|
|
||||||
### Archives (Future)
|
|
||||||
| Type | Extension | Processing |
|
|
||||||
|------|-----------|-----------|
|
|
||||||
| ZIP | `.zip` | List contents, selective extract |
|
|
||||||
| Obsidian Export | `.zip` | Vault structure import |
|
|
||||||
|
|
||||||
## Architecture
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface UploadedFile {
|
|
||||||
id: string; // Generated UUID
|
|
||||||
originalName: string; // User's filename
|
|
||||||
mimeType: string;
|
|
||||||
size: number;
|
|
||||||
|
|
||||||
// Processing results
|
|
||||||
processed: {
|
|
||||||
thumbnail?: ArrayBuffer; // For images/PDFs/videos
|
|
||||||
extractedText?: string; // For searchable docs
|
|
||||||
metadata?: Record<string, any>; // EXIF, frontmatter, etc.
|
|
||||||
};
|
|
||||||
|
|
||||||
// Encryption
|
|
||||||
encrypted: {
|
|
||||||
content: ArrayBuffer; // Encrypted file content
|
|
||||||
iv: Uint8Array;
|
|
||||||
keyId: string; // Reference to encryption key
|
|
||||||
};
|
|
||||||
|
|
||||||
// User choices
|
|
||||||
sharing: {
|
|
||||||
localOnly: boolean; // Default true
|
|
||||||
sharedToBoard?: string; // Board ID if shared
|
|
||||||
backedUpToR2?: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Timestamps
|
|
||||||
importedAt: number;
|
|
||||||
lastAccessedAt: number;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Implementation
|
|
||||||
|
|
||||||
### 1. File Input Component
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import React, { useCallback, useState } from 'react';
|
|
||||||
|
|
||||||
interface FileUploadProps {
|
|
||||||
onFilesSelected: (files: File[]) => void;
|
|
||||||
maxFileSize?: number; // bytes
|
|
||||||
maxFiles?: number;
|
|
||||||
acceptedTypes?: string[];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function FileUploadZone({
|
|
||||||
onFilesSelected,
|
|
||||||
maxFileSize = 100 * 1024 * 1024, // 100MB default
|
|
||||||
maxFiles = 50,
|
|
||||||
acceptedTypes
|
|
||||||
}: FileUploadProps) {
|
|
||||||
const [isDragging, setIsDragging] = useState(false);
|
|
||||||
const [errors, setErrors] = useState<string[]>([]);
|
|
||||||
|
|
||||||
const handleDrop = useCallback((e: React.DragEvent) => {
|
|
||||||
e.preventDefault();
|
|
||||||
setIsDragging(false);
|
|
||||||
|
|
||||||
const files = Array.from(e.dataTransfer.files);
|
|
||||||
validateAndProcess(files);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const handleFileInput = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
|
|
||||||
const files = Array.from(e.target.files || []);
|
|
||||||
validateAndProcess(files);
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const validateAndProcess = (files: File[]) => {
|
|
||||||
const errors: string[] = [];
|
|
||||||
const validFiles: File[] = [];
|
|
||||||
|
|
||||||
for (const file of files.slice(0, maxFiles)) {
|
|
||||||
if (file.size > maxFileSize) {
|
|
||||||
errors.push(`${file.name}: exceeds ${maxFileSize / 1024 / 1024}MB limit`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (acceptedTypes && !acceptedTypes.some(t => file.type.match(t))) {
|
|
||||||
errors.push(`${file.name}: unsupported file type`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
validFiles.push(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (files.length > maxFiles) {
|
|
||||||
errors.push(`Only first ${maxFiles} files will be imported`);
|
|
||||||
}
|
|
||||||
|
|
||||||
setErrors(errors);
|
|
||||||
if (validFiles.length > 0) {
|
|
||||||
onFilesSelected(validFiles);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
onDrop={handleDrop}
|
|
||||||
onDragOver={(e) => { e.preventDefault(); setIsDragging(true); }}
|
|
||||||
onDragLeave={() => setIsDragging(false)}
|
|
||||||
className={`upload-zone ${isDragging ? 'dragging' : ''}`}
|
|
||||||
>
|
|
||||||
<input
|
|
||||||
type="file"
|
|
||||||
multiple
|
|
||||||
onChange={handleFileInput}
|
|
||||||
accept={acceptedTypes?.join(',')}
|
|
||||||
id="file-upload"
|
|
||||||
hidden
|
|
||||||
/>
|
|
||||||
<label htmlFor="file-upload">
|
|
||||||
<span className="upload-icon">📁</span>
|
|
||||||
<span>Drop files here or click to browse</span>
|
|
||||||
<span className="upload-hint">
|
|
||||||
Images, PDFs, Documents, Text files
|
|
||||||
</span>
|
|
||||||
</label>
|
|
||||||
|
|
||||||
{errors.length > 0 && (
|
|
||||||
<div className="upload-errors">
|
|
||||||
{errors.map((err, i) => <div key={i}>{err}</div>)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. File Processing Pipeline
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface ProcessedFile {
|
|
||||||
file: File;
|
|
||||||
thumbnail?: Blob;
|
|
||||||
extractedText?: string;
|
|
||||||
metadata?: Record<string, any>;
|
|
||||||
}
|
|
||||||
|
|
||||||
class FileProcessor {
|
|
||||||
|
|
||||||
async process(file: File): Promise<ProcessedFile> {
|
|
||||||
const result: ProcessedFile = { file };
|
|
||||||
|
|
||||||
// Route based on MIME type
|
|
||||||
if (file.type.startsWith('image/')) {
|
|
||||||
return this.processImage(file, result);
|
|
||||||
} else if (file.type === 'application/pdf') {
|
|
||||||
return this.processPDF(file, result);
|
|
||||||
} else if (file.type.startsWith('text/') || this.isTextFile(file)) {
|
|
||||||
return this.processText(file, result);
|
|
||||||
} else if (file.type.startsWith('video/')) {
|
|
||||||
return this.processVideo(file, result);
|
|
||||||
} else if (file.type.startsWith('audio/')) {
|
|
||||||
return this.processAudio(file, result);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Default: store as-is
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async processImage(file: File, result: ProcessedFile): Promise<ProcessedFile> {
|
|
||||||
// Generate thumbnail
|
|
||||||
const img = await createImageBitmap(file);
|
|
||||||
const canvas = new OffscreenCanvas(200, 200);
|
|
||||||
const ctx = canvas.getContext('2d')!;
|
|
||||||
|
|
||||||
// Calculate aspect-ratio preserving dimensions
|
|
||||||
const scale = Math.min(200 / img.width, 200 / img.height);
|
|
||||||
const w = img.width * scale;
|
|
||||||
const h = img.height * scale;
|
|
||||||
|
|
||||||
ctx.drawImage(img, (200 - w) / 2, (200 - h) / 2, w, h);
|
|
||||||
result.thumbnail = await canvas.convertToBlob({ type: 'image/webp', quality: 0.8 });
|
|
||||||
|
|
||||||
// Extract EXIF if available
|
|
||||||
if (file.type === 'image/jpeg') {
|
|
||||||
result.metadata = await this.extractExif(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async processPDF(file: File, result: ProcessedFile): Promise<ProcessedFile> {
|
|
||||||
// Use pdf.js for text extraction and thumbnail
|
|
||||||
const pdfjsLib = await import('pdfjs-dist');
|
|
||||||
const arrayBuffer = await file.arrayBuffer();
|
|
||||||
const pdf = await pdfjsLib.getDocument({ data: arrayBuffer }).promise;
|
|
||||||
|
|
||||||
// Get first page as thumbnail
|
|
||||||
const page = await pdf.getPage(1);
|
|
||||||
const viewport = page.getViewport({ scale: 0.5 });
|
|
||||||
const canvas = new OffscreenCanvas(viewport.width, viewport.height);
|
|
||||||
const ctx = canvas.getContext('2d')!;
|
|
||||||
|
|
||||||
await page.render({ canvasContext: ctx, viewport }).promise;
|
|
||||||
result.thumbnail = await canvas.convertToBlob({ type: 'image/webp' });
|
|
||||||
|
|
||||||
// Extract text from all pages
|
|
||||||
let text = '';
|
|
||||||
for (let i = 1; i <= pdf.numPages; i++) {
|
|
||||||
const page = await pdf.getPage(i);
|
|
||||||
const content = await page.getTextContent();
|
|
||||||
text += content.items.map((item: any) => item.str).join(' ') + '\n';
|
|
||||||
}
|
|
||||||
result.extractedText = text;
|
|
||||||
|
|
||||||
result.metadata = { pageCount: pdf.numPages };
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async processText(file: File, result: ProcessedFile): Promise<ProcessedFile> {
|
|
||||||
result.extractedText = await file.text();
|
|
||||||
|
|
||||||
// Parse markdown frontmatter if applicable
|
|
||||||
if (file.name.endsWith('.md')) {
|
|
||||||
const frontmatter = this.parseFrontmatter(result.extractedText);
|
|
||||||
if (frontmatter) {
|
|
||||||
result.metadata = frontmatter;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async processVideo(file: File, result: ProcessedFile): Promise<ProcessedFile> {
|
|
||||||
// Generate thumbnail from first frame
|
|
||||||
const video = document.createElement('video');
|
|
||||||
video.preload = 'metadata';
|
|
||||||
video.src = URL.createObjectURL(file);
|
|
||||||
|
|
||||||
await new Promise(resolve => video.addEventListener('loadedmetadata', resolve));
|
|
||||||
video.currentTime = 1; // First second
|
|
||||||
await new Promise(resolve => video.addEventListener('seeked', resolve));
|
|
||||||
|
|
||||||
const canvas = new OffscreenCanvas(200, 200);
|
|
||||||
const ctx = canvas.getContext('2d')!;
|
|
||||||
const scale = Math.min(200 / video.videoWidth, 200 / video.videoHeight);
|
|
||||||
ctx.drawImage(video, 0, 0, video.videoWidth * scale, video.videoHeight * scale);
|
|
||||||
|
|
||||||
result.thumbnail = await canvas.convertToBlob({ type: 'image/webp' });
|
|
||||||
result.metadata = {
|
|
||||||
duration: video.duration,
|
|
||||||
width: video.videoWidth,
|
|
||||||
height: video.videoHeight
|
|
||||||
};
|
|
||||||
|
|
||||||
URL.revokeObjectURL(video.src);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
private async processAudio(file: File, result: ProcessedFile): Promise<ProcessedFile> {
|
|
||||||
// Extract duration and basic metadata
|
|
||||||
const audio = document.createElement('audio');
|
|
||||||
audio.src = URL.createObjectURL(file);
|
|
||||||
|
|
||||||
await new Promise(resolve => audio.addEventListener('loadedmetadata', resolve));
|
|
||||||
|
|
||||||
result.metadata = {
|
|
||||||
duration: audio.duration
|
|
||||||
};
|
|
||||||
|
|
||||||
URL.revokeObjectURL(audio.src);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
private isTextFile(file: File): boolean {
|
|
||||||
const textExtensions = ['.md', '.txt', '.json', '.csv', '.yaml', '.yml', '.xml', '.html', '.css', '.js', '.ts', '.py', '.sh'];
|
|
||||||
return textExtensions.some(ext => file.name.toLowerCase().endsWith(ext));
|
|
||||||
}
|
|
||||||
|
|
||||||
private parseFrontmatter(content: string): Record<string, any> | null {
|
|
||||||
const match = content.match(/^---\n([\s\S]*?)\n---/);
|
|
||||||
if (!match) return null;
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Simple YAML-like parsing (or use a proper YAML parser)
|
|
||||||
const lines = match[1].split('\n');
|
|
||||||
const result: Record<string, any> = {};
|
|
||||||
for (const line of lines) {
|
|
||||||
const [key, ...valueParts] = line.split(':');
|
|
||||||
if (key && valueParts.length) {
|
|
||||||
result[key.trim()] = valueParts.join(':').trim();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private async extractExif(file: File): Promise<Record<string, any>> {
|
|
||||||
// Would use exif-js or similar library
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Encryption & Storage
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
class LocalFileStore {
|
|
||||||
private db: IDBDatabase;
|
|
||||||
private encryptionKey: CryptoKey;
|
|
||||||
|
|
||||||
async storeFile(processed: ProcessedFile, options: {
|
|
||||||
shareToBoard?: boolean;
|
|
||||||
} = {}): Promise<UploadedFile> {
|
|
||||||
const fileId = crypto.randomUUID();
|
|
||||||
|
|
||||||
// Read file content
|
|
||||||
const content = await processed.file.arrayBuffer();
|
|
||||||
|
|
||||||
// Encrypt content
|
|
||||||
const iv = crypto.getRandomValues(new Uint8Array(12));
|
|
||||||
const encryptedContent = await crypto.subtle.encrypt(
|
|
||||||
{ name: 'AES-GCM', iv },
|
|
||||||
this.encryptionKey,
|
|
||||||
content
|
|
||||||
);
|
|
||||||
|
|
||||||
// Encrypt thumbnail if present
|
|
||||||
let encryptedThumbnail: ArrayBuffer | undefined;
|
|
||||||
let thumbnailIv: Uint8Array | undefined;
|
|
||||||
if (processed.thumbnail) {
|
|
||||||
thumbnailIv = crypto.getRandomValues(new Uint8Array(12));
|
|
||||||
const thumbBuffer = await processed.thumbnail.arrayBuffer();
|
|
||||||
encryptedThumbnail = await crypto.subtle.encrypt(
|
|
||||||
{ name: 'AES-GCM', iv: thumbnailIv },
|
|
||||||
this.encryptionKey,
|
|
||||||
thumbBuffer
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const uploadedFile: UploadedFile = {
|
|
||||||
id: fileId,
|
|
||||||
originalName: processed.file.name,
|
|
||||||
mimeType: processed.file.type,
|
|
||||||
size: processed.file.size,
|
|
||||||
processed: {
|
|
||||||
extractedText: processed.extractedText,
|
|
||||||
metadata: processed.metadata
|
|
||||||
},
|
|
||||||
encrypted: {
|
|
||||||
content: encryptedContent,
|
|
||||||
iv,
|
|
||||||
keyId: 'user-master-key'
|
|
||||||
},
|
|
||||||
sharing: {
|
|
||||||
localOnly: !options.shareToBoard,
|
|
||||||
sharedToBoard: options.shareToBoard ? getCurrentBoardId() : undefined
|
|
||||||
},
|
|
||||||
importedAt: Date.now(),
|
|
||||||
lastAccessedAt: Date.now()
|
|
||||||
};
|
|
||||||
|
|
||||||
// Store encrypted thumbnail separately (for faster listing)
|
|
||||||
if (encryptedThumbnail && thumbnailIv) {
|
|
||||||
await this.storeThumbnail(fileId, encryptedThumbnail, thumbnailIv);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store to IndexedDB
|
|
||||||
const tx = this.db.transaction('files', 'readwrite');
|
|
||||||
tx.objectStore('files').put(uploadedFile);
|
|
||||||
|
|
||||||
return uploadedFile;
|
|
||||||
}
|
|
||||||
|
|
||||||
async getFile(fileId: string): Promise<{
|
|
||||||
file: UploadedFile;
|
|
||||||
decryptedContent: ArrayBuffer;
|
|
||||||
} | null> {
|
|
||||||
const tx = this.db.transaction('files', 'readonly');
|
|
||||||
const file = await new Promise<UploadedFile | undefined>(resolve => {
|
|
||||||
const req = tx.objectStore('files').get(fileId);
|
|
||||||
req.onsuccess = () => resolve(req.result);
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!file) return null;
|
|
||||||
|
|
||||||
// Decrypt content
|
|
||||||
const decryptedContent = await crypto.subtle.decrypt(
|
|
||||||
{ name: 'AES-GCM', iv: file.encrypted.iv },
|
|
||||||
this.encryptionKey,
|
|
||||||
file.encrypted.content
|
|
||||||
);
|
|
||||||
|
|
||||||
return { file, decryptedContent };
|
|
||||||
}
|
|
||||||
|
|
||||||
async listFiles(options?: {
|
|
||||||
mimeTypeFilter?: string;
|
|
||||||
limit?: number;
|
|
||||||
offset?: number;
|
|
||||||
}): Promise<UploadedFile[]> {
|
|
||||||
const tx = this.db.transaction('files', 'readonly');
|
|
||||||
const store = tx.objectStore('files');
|
|
||||||
|
|
||||||
return new Promise(resolve => {
|
|
||||||
const files: UploadedFile[] = [];
|
|
||||||
const req = store.openCursor();
|
|
||||||
|
|
||||||
req.onsuccess = (e) => {
|
|
||||||
const cursor = (e.target as IDBRequest).result;
|
|
||||||
if (cursor) {
|
|
||||||
const file = cursor.value as UploadedFile;
|
|
||||||
|
|
||||||
// Filter by MIME type if specified
|
|
||||||
if (!options?.mimeTypeFilter || file.mimeType.startsWith(options.mimeTypeFilter)) {
|
|
||||||
files.push(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
cursor.continue();
|
|
||||||
} else {
|
|
||||||
resolve(files);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. IndexedDB Schema
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const LOCAL_FILES_DB = 'canvas-local-files';
|
|
||||||
const DB_VERSION = 1;
|
|
||||||
|
|
||||||
async function initLocalFilesDB(): Promise<IDBDatabase> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const request = indexedDB.open(LOCAL_FILES_DB, DB_VERSION);
|
|
||||||
|
|
||||||
request.onerror = () => reject(request.error);
|
|
||||||
request.onsuccess = () => resolve(request.result);
|
|
||||||
|
|
||||||
request.onupgradeneeded = (event) => {
|
|
||||||
const db = (event.target as IDBOpenDBRequest).result;
|
|
||||||
|
|
||||||
// Main files store
|
|
||||||
if (!db.objectStoreNames.contains('files')) {
|
|
||||||
const store = db.createObjectStore('files', { keyPath: 'id' });
|
|
||||||
store.createIndex('mimeType', 'mimeType', { unique: false });
|
|
||||||
store.createIndex('importedAt', 'importedAt', { unique: false });
|
|
||||||
store.createIndex('originalName', 'originalName', { unique: false });
|
|
||||||
store.createIndex('sharedToBoard', 'sharing.sharedToBoard', { unique: false });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Thumbnails store (separate for faster listing)
|
|
||||||
if (!db.objectStoreNames.contains('thumbnails')) {
|
|
||||||
db.createObjectStore('thumbnails', { keyPath: 'fileId' });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Search index (encrypted full-text search)
|
|
||||||
if (!db.objectStoreNames.contains('searchIndex')) {
|
|
||||||
const searchStore = db.createObjectStore('searchIndex', { keyPath: 'fileId' });
|
|
||||||
searchStore.createIndex('tokens', 'tokens', { unique: false, multiEntry: true });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## UI Components
|
|
||||||
|
|
||||||
### Import Dialog
|
|
||||||
|
|
||||||
```tsx
|
|
||||||
function ImportFilesDialog({ isOpen, onClose }: { isOpen: boolean; onClose: () => void }) {
|
|
||||||
const [selectedFiles, setSelectedFiles] = useState<ProcessedFile[]>([]);
|
|
||||||
const [importing, setImporting] = useState(false);
|
|
||||||
const [progress, setProgress] = useState(0);
|
|
||||||
const fileStore = useLocalFileStore();
|
|
||||||
|
|
||||||
const handleFilesSelected = async (files: File[]) => {
|
|
||||||
const processor = new FileProcessor();
|
|
||||||
const processed: ProcessedFile[] = [];
|
|
||||||
|
|
||||||
for (const file of files) {
|
|
||||||
processed.push(await processor.process(file));
|
|
||||||
}
|
|
||||||
|
|
||||||
setSelectedFiles(prev => [...prev, ...processed]);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleImport = async () => {
|
|
||||||
setImporting(true);
|
|
||||||
|
|
||||||
for (let i = 0; i < selectedFiles.length; i++) {
|
|
||||||
await fileStore.storeFile(selectedFiles[i]);
|
|
||||||
setProgress((i + 1) / selectedFiles.length * 100);
|
|
||||||
}
|
|
||||||
|
|
||||||
setImporting(false);
|
|
||||||
onClose();
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<Dialog open={isOpen} onClose={onClose}>
|
|
||||||
<DialogTitle>Import Files</DialogTitle>
|
|
||||||
|
|
||||||
<FileUploadZone onFilesSelected={handleFilesSelected} />
|
|
||||||
|
|
||||||
{selectedFiles.length > 0 && (
|
|
||||||
<div className="file-list">
|
|
||||||
{selectedFiles.map((pf, i) => (
|
|
||||||
<FilePreviewRow
|
|
||||||
key={i}
|
|
||||||
file={pf}
|
|
||||||
onRemove={() => setSelectedFiles(prev => prev.filter((_, j) => j !== i))}
|
|
||||||
/>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{importing && (
|
|
||||||
<progress value={progress} max={100} />
|
|
||||||
)}
|
|
||||||
|
|
||||||
<DialogActions>
|
|
||||||
<button onClick={onClose}>Cancel</button>
|
|
||||||
<button
|
|
||||||
onClick={handleImport}
|
|
||||||
disabled={selectedFiles.length === 0 || importing}
|
|
||||||
>
|
|
||||||
Import {selectedFiles.length} files
|
|
||||||
</button>
|
|
||||||
</DialogActions>
|
|
||||||
</Dialog>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### File Browser Panel
|
|
||||||
|
|
||||||
```tsx
|
|
||||||
function LocalFilesBrowser() {
|
|
||||||
const [files, setFiles] = useState<UploadedFile[]>([]);
|
|
||||||
const [filter, setFilter] = useState<string>('all');
|
|
||||||
const fileStore = useLocalFileStore();
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
loadFiles();
|
|
||||||
}, [filter]);
|
|
||||||
|
|
||||||
const loadFiles = async () => {
|
|
||||||
const mimeFilter = filter === 'all' ? undefined : filter;
|
|
||||||
setFiles(await fileStore.listFiles({ mimeTypeFilter: mimeFilter }));
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleDragToCanvas = (file: UploadedFile) => {
|
|
||||||
// Create a shape from the file and add to canvas
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="local-files-browser">
|
|
||||||
<div className="filter-bar">
|
|
||||||
<button onClick={() => setFilter('all')}>All</button>
|
|
||||||
<button onClick={() => setFilter('image/')}>Images</button>
|
|
||||||
<button onClick={() => setFilter('application/pdf')}>PDFs</button>
|
|
||||||
<button onClick={() => setFilter('text/')}>Documents</button>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="files-grid">
|
|
||||||
{files.map(file => (
|
|
||||||
<FileCard
|
|
||||||
key={file.id}
|
|
||||||
file={file}
|
|
||||||
onDragStart={() => handleDragToCanvas(file)}
|
|
||||||
/>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Canvas Integration
|
|
||||||
|
|
||||||
### Drag Files to Canvas
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// When user drags a local file onto the canvas
|
|
||||||
async function createShapeFromLocalFile(
|
|
||||||
file: UploadedFile,
|
|
||||||
position: { x: number; y: number },
|
|
||||||
editor: Editor
|
|
||||||
): Promise<TLShapeId> {
|
|
||||||
const fileStore = getLocalFileStore();
|
|
||||||
const { decryptedContent } = await fileStore.getFile(file.id);
|
|
||||||
|
|
||||||
if (file.mimeType.startsWith('image/')) {
|
|
||||||
// Create image shape
|
|
||||||
const blob = new Blob([decryptedContent], { type: file.mimeType });
|
|
||||||
const assetId = AssetRecordType.createId();
|
|
||||||
|
|
||||||
await editor.createAssets([{
|
|
||||||
id: assetId,
|
|
||||||
type: 'image',
|
|
||||||
typeName: 'asset',
|
|
||||||
props: {
|
|
||||||
name: file.originalName,
|
|
||||||
src: URL.createObjectURL(blob),
|
|
||||||
w: 400,
|
|
||||||
h: 300,
|
|
||||||
mimeType: file.mimeType,
|
|
||||||
isAnimated: file.mimeType === 'image/gif'
|
|
||||||
}
|
|
||||||
}]);
|
|
||||||
|
|
||||||
return editor.createShape({
|
|
||||||
type: 'image',
|
|
||||||
x: position.x,
|
|
||||||
y: position.y,
|
|
||||||
props: { assetId, w: 400, h: 300 }
|
|
||||||
}).id;
|
|
||||||
|
|
||||||
} else if (file.mimeType === 'application/pdf') {
|
|
||||||
// Create PDF embed or preview shape
|
|
||||||
return editor.createShape({
|
|
||||||
type: 'pdf-preview',
|
|
||||||
x: position.x,
|
|
||||||
y: position.y,
|
|
||||||
props: {
|
|
||||||
fileId: file.id,
|
|
||||||
name: file.originalName,
|
|
||||||
pageCount: file.processed.metadata?.pageCount
|
|
||||||
}
|
|
||||||
}).id;
|
|
||||||
|
|
||||||
} else if (file.mimeType.startsWith('text/') || file.originalName.endsWith('.md')) {
|
|
||||||
// Create note shape with content
|
|
||||||
const text = new TextDecoder().decode(decryptedContent);
|
|
||||||
return editor.createShape({
|
|
||||||
type: 'note',
|
|
||||||
x: position.x,
|
|
||||||
y: position.y,
|
|
||||||
props: {
|
|
||||||
text: text.slice(0, 1000), // Truncate for display
|
|
||||||
fileId: file.id,
|
|
||||||
fullContentAvailable: text.length > 1000
|
|
||||||
}
|
|
||||||
}).id;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Default: generic file card
|
|
||||||
return editor.createShape({
|
|
||||||
type: 'file-card',
|
|
||||||
x: position.x,
|
|
||||||
y: position.y,
|
|
||||||
props: {
|
|
||||||
fileId: file.id,
|
|
||||||
name: file.originalName,
|
|
||||||
size: file.size,
|
|
||||||
mimeType: file.mimeType
|
|
||||||
}
|
|
||||||
}).id;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Storage Considerations
|
|
||||||
|
|
||||||
### Size Limits & Recommendations
|
|
||||||
|
|
||||||
| File Type | Max Recommended | Notes |
|
|
||||||
|-----------|----------------|-------|
|
|
||||||
| Images | 20MB each | Larger images get resized |
|
|
||||||
| PDFs | 50MB each | Text extracted for search |
|
|
||||||
| Videos | 100MB each | Store reference, thumbnail only |
|
|
||||||
| Audio | 50MB each | Store with waveform preview |
|
|
||||||
| Documents | 10MB each | Full content stored |
|
|
||||||
|
|
||||||
### Total Storage Budget
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const STORAGE_CONFIG = {
|
|
||||||
// Soft warning at 500MB
|
|
||||||
warningThreshold: 500 * 1024 * 1024,
|
|
||||||
|
|
||||||
// Hard limit at 2GB (leaves room for other data)
|
|
||||||
maxStorage: 2 * 1024 * 1024 * 1024,
|
|
||||||
|
|
||||||
// Auto-cleanup: remove thumbnails for files not accessed in 30 days
|
|
||||||
thumbnailRetentionDays: 30
|
|
||||||
};
|
|
||||||
|
|
||||||
async function checkStorageQuota(): Promise<{
|
|
||||||
used: number;
|
|
||||||
available: number;
|
|
||||||
warning: boolean;
|
|
||||||
}> {
|
|
||||||
const estimate = await navigator.storage.estimate();
|
|
||||||
const used = estimate.usage || 0;
|
|
||||||
const quota = estimate.quota || 0;
|
|
||||||
|
|
||||||
return {
|
|
||||||
used,
|
|
||||||
available: Math.min(quota - used, STORAGE_CONFIG.maxStorage - used),
|
|
||||||
warning: used > STORAGE_CONFIG.warningThreshold
|
|
||||||
};
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Privacy Features
|
|
||||||
|
|
||||||
### Per-File Privacy Controls
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
interface FilePrivacySettings {
|
|
||||||
// Encryption is always on - this is about sharing
|
|
||||||
localOnly: boolean; // Never leaves browser
|
|
||||||
shareableToBoard: boolean; // Can be added to shared board
|
|
||||||
includeInR2Backup: boolean; // Include in cloud backup
|
|
||||||
|
|
||||||
// Metadata privacy
|
|
||||||
stripExif: boolean; // Remove location/camera data from images
|
|
||||||
anonymizeFilename: boolean; // Use generated name instead of original
|
|
||||||
}
|
|
||||||
|
|
||||||
const DEFAULT_PRIVACY: FilePrivacySettings = {
|
|
||||||
localOnly: true,
|
|
||||||
shareableToBoard: false,
|
|
||||||
includeInR2Backup: true,
|
|
||||||
stripExif: true,
|
|
||||||
anonymizeFilename: false
|
|
||||||
};
|
|
||||||
```
|
|
||||||
|
|
||||||
### Sharing Flow
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────────┐
|
|
||||||
│ User drags local file onto shared board │
|
|
||||||
├─────────────────────────────────────────────────────────────────┤
|
|
||||||
│ │
|
|
||||||
│ ⚠️ Share "meeting_notes.pdf" to this board? │
|
|
||||||
│ │
|
|
||||||
│ This file is currently private. Sharing it will: │
|
|
||||||
│ • Make it visible to all board members │
|
|
||||||
│ • Upload an encrypted copy to sync storage │
|
|
||||||
│ • Keep the original encrypted on your device │
|
|
||||||
│ │
|
|
||||||
│ [Keep Private] [Share to Board] │
|
|
||||||
│ │
|
|
||||||
└─────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## Implementation Checklist
|
|
||||||
|
|
||||||
### Phase 1: Core Upload
|
|
||||||
- [ ] File drop zone component
|
|
||||||
- [ ] File type detection
|
|
||||||
- [ ] Image thumbnail generation
|
|
||||||
- [ ] PDF text extraction & thumbnail
|
|
||||||
- [ ] Encryption before storage
|
|
||||||
- [ ] IndexedDB schema & storage
|
|
||||||
|
|
||||||
### Phase 2: File Management
|
|
||||||
- [ ] File browser panel
|
|
||||||
- [ ] Filter by type
|
|
||||||
- [ ] Search within files
|
|
||||||
- [ ] Delete files
|
|
||||||
- [ ] Storage quota display
|
|
||||||
|
|
||||||
### Phase 3: Canvas Integration
|
|
||||||
- [ ] Drag files to canvas
|
|
||||||
- [ ] Image shape from file
|
|
||||||
- [ ] PDF preview shape
|
|
||||||
- [ ] Document/note shape
|
|
||||||
- [ ] Generic file card shape
|
|
||||||
|
|
||||||
### Phase 4: Sharing & Backup
|
|
||||||
- [ ] Share confirmation dialog
|
|
||||||
- [ ] Upload to Automerge sync
|
|
||||||
- [ ] Include in R2 backup
|
|
||||||
- [ ] Privacy settings per file
|
|
||||||
|
|
||||||
## Related Documents
|
|
||||||
|
|
||||||
- [Google Data Sovereignty](./GOOGLE_DATA_SOVEREIGNTY.md) - Same encryption model for Google imports
|
|
||||||
- [Offline Storage Feasibility](../OFFLINE_STORAGE_FEASIBILITY.md) - IndexedDB + Automerge foundation
|
|
||||||
|
|
@ -4,7 +4,7 @@ This document describes the complete WebCryptoAPI authentication system implemen
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
The WebCryptoAPI authentication system provides cryptographic authentication using ECDSA P-256 key pairs, challenge-response authentication, and secure key storage. This is the primary authentication mechanism for the application.
|
The WebCryptoAPI authentication system provides cryptographic authentication using ECDSA P-256 key pairs, challenge-response authentication, and secure key storage. It integrates with the existing ODD (Open Data Directory) framework while providing a fallback authentication mechanism.
|
||||||
|
|
||||||
## Architecture
|
## Architecture
|
||||||
|
|
||||||
|
|
@ -23,14 +23,13 @@ The WebCryptoAPI authentication system provides cryptographic authentication usi
|
||||||
- User registration and login
|
- User registration and login
|
||||||
- Credential verification
|
- Credential verification
|
||||||
|
|
||||||
3. **AuthService** (`src/lib/auth/authService.ts`)
|
3. **Enhanced AuthService** (`src/lib/auth/authService.ts`)
|
||||||
- Simplified authentication service
|
- Integrates crypto authentication with ODD
|
||||||
|
- Fallback mechanisms
|
||||||
- Session management
|
- Session management
|
||||||
- Integration with CryptoAuthService
|
|
||||||
|
|
||||||
4. **UI Components**
|
4. **UI Components**
|
||||||
- `CryptID.tsx` - Cryptographic authentication UI
|
- `CryptoLogin.tsx` - Cryptographic authentication UI
|
||||||
- `CryptoDebug.tsx` - Debug component for verification
|
|
||||||
- `CryptoTest.tsx` - Test component for verification
|
- `CryptoTest.tsx` - Test component for verification
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
@ -42,6 +41,7 @@ The WebCryptoAPI authentication system provides cryptographic authentication usi
|
||||||
- **Public Key Infrastructure**: Store and verify public keys
|
- **Public Key Infrastructure**: Store and verify public keys
|
||||||
- **Browser Support Detection**: Checks for WebCryptoAPI availability
|
- **Browser Support Detection**: Checks for WebCryptoAPI availability
|
||||||
- **Secure Context Validation**: Ensures HTTPS requirement
|
- **Secure Context Validation**: Ensures HTTPS requirement
|
||||||
|
- **Fallback Authentication**: Works with existing ODD system
|
||||||
- **Modern UI**: Responsive design with dark mode support
|
- **Modern UI**: Responsive design with dark mode support
|
||||||
- **Comprehensive Testing**: Test component for verification
|
- **Comprehensive Testing**: Test component for verification
|
||||||
|
|
||||||
|
|
@ -98,26 +98,26 @@ const isSecure = window.isSecureContext;
|
||||||
1. **Secure Context Requirement**: Only works over HTTPS
|
1. **Secure Context Requirement**: Only works over HTTPS
|
||||||
2. **ECDSA P-256**: Industry-standard elliptic curve
|
2. **ECDSA P-256**: Industry-standard elliptic curve
|
||||||
3. **Challenge-Response**: Prevents replay attacks
|
3. **Challenge-Response**: Prevents replay attacks
|
||||||
4. **Key Storage**: Public keys stored securely in localStorage
|
4. **Key Storage**: Public keys stored securely
|
||||||
5. **Input Validation**: Username format validation
|
5. **Input Validation**: Username format validation
|
||||||
6. **Error Handling**: Comprehensive error management
|
6. **Error Handling**: Comprehensive error management
|
||||||
|
|
||||||
### ⚠️ Security Notes
|
### ⚠️ Security Notes
|
||||||
|
|
||||||
1. **Private Key Storage**: Currently uses localStorage for demo purposes
|
1. **Private Key Storage**: Currently simplified for demo purposes
|
||||||
- In production, consider using Web Crypto API's non-extractable keys
|
- In production, use Web Crypto API's key storage
|
||||||
- Consider hardware security modules (HSM)
|
- Consider hardware security modules (HSM)
|
||||||
- Implement proper key derivation
|
- Implement proper key derivation
|
||||||
|
|
||||||
2. **Session Management**:
|
2. **Session Management**:
|
||||||
- Uses localStorage for session persistence
|
- Integrates with existing ODD session system
|
||||||
- Consider implementing JWT tokens for server-side verification
|
- Consider implementing JWT tokens
|
||||||
- Add session expiration and refresh logic
|
- Add session expiration
|
||||||
|
|
||||||
3. **Network Security**:
|
3. **Network Security**:
|
||||||
- All crypto operations happen client-side
|
- All crypto operations happen client-side
|
||||||
- No private keys transmitted over network
|
- No private keys transmitted over network
|
||||||
- Consider adding server-side signature verification
|
- Consider adding server-side verification
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
|
@ -146,22 +146,11 @@ import { useAuth } from './context/AuthContext';
|
||||||
|
|
||||||
const { login, register } = useAuth();
|
const { login, register } = useAuth();
|
||||||
|
|
||||||
// AuthService automatically uses crypto auth
|
// The AuthService automatically tries crypto auth first,
|
||||||
|
// then falls back to ODD authentication
|
||||||
const success = await login('username');
|
const success = await login('username');
|
||||||
```
|
```
|
||||||
|
|
||||||
### Using the CryptID Component
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import CryptID from './components/auth/CryptID';
|
|
||||||
|
|
||||||
// Render the authentication component
|
|
||||||
<CryptID
|
|
||||||
onSuccess={() => console.log('Login successful')}
|
|
||||||
onCancel={() => console.log('Login cancelled')}
|
|
||||||
/>
|
|
||||||
```
|
|
||||||
|
|
||||||
### Testing the Implementation
|
### Testing the Implementation
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
|
|
@ -179,18 +168,14 @@ src/
|
||||||
│ ├── auth/
|
│ ├── auth/
|
||||||
│ │ ├── crypto.ts # WebCryptoAPI wrapper
|
│ │ ├── crypto.ts # WebCryptoAPI wrapper
|
||||||
│ │ ├── cryptoAuthService.ts # High-level auth service
|
│ │ ├── cryptoAuthService.ts # High-level auth service
|
||||||
│ │ ├── authService.ts # Simplified auth service
|
│ │ ├── authService.ts # Enhanced auth service
|
||||||
│ │ ├── sessionPersistence.ts # Session storage utilities
|
│ │ └── account.ts # User account management
|
||||||
│ │ └── types.ts # TypeScript types
|
|
||||||
│ └── utils/
|
│ └── utils/
|
||||||
│ └── browser.ts # Browser support detection
|
│ └── browser.ts # Browser support detection
|
||||||
├── components/
|
├── components/
|
||||||
│ └── auth/
|
│ └── auth/
|
||||||
│ ├── CryptID.tsx # Main crypto auth UI
|
│ ├── CryptoLogin.tsx # Crypto auth UI
|
||||||
│ ├── CryptoDebug.tsx # Debug component
|
|
||||||
│ └── CryptoTest.tsx # Test component
|
│ └── CryptoTest.tsx # Test component
|
||||||
├── context/
|
|
||||||
│ └── AuthContext.tsx # React context for auth state
|
|
||||||
└── css/
|
└── css/
|
||||||
└── crypto-auth.css # Styles for crypto components
|
└── crypto-auth.css # Styles for crypto components
|
||||||
```
|
```
|
||||||
|
|
@ -199,20 +184,13 @@ src/
|
||||||
|
|
||||||
### Required Packages
|
### Required Packages
|
||||||
- `one-webcrypto`: WebCryptoAPI polyfill (^1.0.3)
|
- `one-webcrypto`: WebCryptoAPI polyfill (^1.0.3)
|
||||||
|
- `@oddjs/odd`: Open Data Directory framework (^0.37.2)
|
||||||
|
|
||||||
### Browser APIs Used
|
### Browser APIs Used
|
||||||
- `window.crypto.subtle`: WebCryptoAPI
|
- `window.crypto.subtle`: WebCryptoAPI
|
||||||
- `window.localStorage`: Key and session storage
|
- `window.localStorage`: Key storage
|
||||||
- `window.isSecureContext`: Security context check
|
- `window.isSecureContext`: Security context check
|
||||||
|
|
||||||
## Storage
|
|
||||||
|
|
||||||
### localStorage Keys Used
|
|
||||||
- `registeredUsers`: Array of registered usernames
|
|
||||||
- `${username}_publicKey`: User's public key (Base64)
|
|
||||||
- `${username}_authData`: Authentication data (challenge, signature, timestamp)
|
|
||||||
- `session`: Current user session data
|
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
### Manual Testing
|
### Manual Testing
|
||||||
|
|
@ -230,7 +208,6 @@ src/
|
||||||
- [x] User registration
|
- [x] User registration
|
||||||
- [x] User login
|
- [x] User login
|
||||||
- [x] Credential verification
|
- [x] Credential verification
|
||||||
- [x] Session persistence
|
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
|
|
@ -251,13 +228,13 @@ src/
|
||||||
- Try refreshing the page
|
- Try refreshing the page
|
||||||
|
|
||||||
4. **"Authentication failed"**
|
4. **"Authentication failed"**
|
||||||
- Verify user exists in localStorage
|
- Verify user exists
|
||||||
- Check stored credentials
|
- Check stored credentials
|
||||||
- Clear browser data and retry
|
- Clear browser data and retry
|
||||||
|
|
||||||
### Debug Mode
|
### Debug Mode
|
||||||
|
|
||||||
Enable debug logging by opening the browser console:
|
Enable debug logging by setting:
|
||||||
```typescript
|
```typescript
|
||||||
localStorage.setItem('debug_crypto', 'true');
|
localStorage.setItem('debug_crypto', 'true');
|
||||||
```
|
```
|
||||||
|
|
@ -265,7 +242,7 @@ localStorage.setItem('debug_crypto', 'true');
|
||||||
## Future Enhancements
|
## Future Enhancements
|
||||||
|
|
||||||
### Planned Improvements
|
### Planned Improvements
|
||||||
1. **Enhanced Key Storage**: Use Web Crypto API's non-extractable keys
|
1. **Enhanced Key Storage**: Use Web Crypto API's key storage
|
||||||
2. **Server-Side Verification**: Add server-side signature verification
|
2. **Server-Side Verification**: Add server-side signature verification
|
||||||
3. **Multi-Factor Authentication**: Add additional authentication factors
|
3. **Multi-Factor Authentication**: Add additional authentication factors
|
||||||
4. **Key Rotation**: Implement automatic key rotation
|
4. **Key Rotation**: Implement automatic key rotation
|
||||||
|
|
@ -277,15 +254,6 @@ localStorage.setItem('debug_crypto', 'true');
|
||||||
3. **Post-Quantum Cryptography**: Prepare for quantum threats
|
3. **Post-Quantum Cryptography**: Prepare for quantum threats
|
||||||
4. **Biometric Integration**: Add biometric authentication
|
4. **Biometric Integration**: Add biometric authentication
|
||||||
|
|
||||||
## Integration with Automerge Sync
|
|
||||||
|
|
||||||
The authentication system works seamlessly with the Automerge-based real-time collaboration:
|
|
||||||
|
|
||||||
- **User Identification**: Each user is identified by their username in Automerge
|
|
||||||
- **Session Management**: Sessions persist across page reloads via localStorage
|
|
||||||
- **Collaboration**: Authenticated users can join shared canvas rooms
|
|
||||||
- **Privacy**: Only authenticated users can access canvas data
|
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
When contributing to the WebCryptoAPI authentication system:
|
When contributing to the WebCryptoAPI authentication system:
|
||||||
|
|
|
||||||
20
index.html
20
index.html
|
|
@ -4,42 +4,32 @@
|
||||||
<head>
|
<head>
|
||||||
<title>Jeff Emmett</title>
|
<title>Jeff Emmett</title>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<link rel="icon" href="data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 100 100'><text y='.9em' font-size='90'>🍄</text></svg>" />
|
|
||||||
<link rel="apple-touch-icon" href="/pwa-192x192.svg" />
|
|
||||||
<link rel="manifest" href="/manifest.webmanifest" />
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
|
||||||
<meta http-equiv="Permissions-Policy" content="midi=*, microphone=*, camera=*, autoplay=*">
|
<meta http-equiv="Permissions-Policy" content="midi=*, microphone=*, camera=*, autoplay=*">
|
||||||
<!-- Preconnect to critical origins for faster loading -->
|
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
||||||
<link rel="dns-prefetch" href="https://jeffemmett-canvas.jeffemmett.workers.dev" />
|
|
||||||
<link rel="dns-prefetch" href="https://jeffemmett-canvas-dev.jeffemmett.workers.dev" />
|
|
||||||
<link rel="preconnect" href="https://jeffemmett-canvas.jeffemmett.workers.dev" crossorigin />
|
|
||||||
<link rel="preconnect" href="https://jeffemmett-canvas-dev.jeffemmett.workers.dev" crossorigin />
|
|
||||||
<link
|
<link
|
||||||
href="https://fonts.googleapis.com/css2?family=Recursive:slnt,wght,CASL,CRSV,MONO@-15..0,300..1000,0..1,0..1,0..1&display=swap"
|
href="https://fonts.googleapis.com/css2?family=Recursive:slnt,wght,CASL,CRSV,MONO@-15..0,300..1000,0..1,0..1,0..1&display=swap"
|
||||||
rel="stylesheet">
|
rel="stylesheet">
|
||||||
|
|
||||||
<!-- Social Meta Tags -->
|
<!-- Social Meta Tags -->
|
||||||
<meta name="description"
|
<meta name="description"
|
||||||
content="Exploring mycoeconomics, token engineering, psilo-cybernetics, zero-knowledge local-first systems, and institutional neuroplasticity. Research at the intersection of regenerative systems, crypto commons, and emancipatory technology.">
|
content="My research investigates the intersection of computing, human-system interfaces, and emancipatory politics. I am interested in the potential of computing as a medium for thought, as a tool for collective action, and as a means of emancipation.">
|
||||||
|
|
||||||
<meta property="og:url" content="https://jeffemmett.com">
|
<meta property="og:url" content="https://jeffemmett.com">
|
||||||
<meta property="og:type" content="website">
|
<meta property="og:type" content="website">
|
||||||
<meta property="og:title" content="Jeff Emmett">
|
<meta property="og:title" content="Jeff Emmett">
|
||||||
<meta property="og:description"
|
<meta property="og:description"
|
||||||
content="Exploring mycoeconomics, token engineering, psilo-cybernetics, zero-knowledge local-first systems, and institutional neuroplasticity. Research at the intersection of regenerative systems, crypto commons, and emancipatory technology.">
|
content="My research doesn't investigate the intersection of computing, human-system interfaces, and emancipatory politics. I am interested in the potential of computing as a medium for thought, as a tool for collective action, and as a means of emancipation.">
|
||||||
<meta property="og:image" content="https://jeffemmett.com/og-image.jpg">
|
<meta property="og:image" content="/website-embed.png">
|
||||||
<meta property="og:image:width" content="1200">
|
|
||||||
<meta property="og:image:height" content="630">
|
|
||||||
|
|
||||||
<meta name="twitter:card" content="summary_large_image">
|
<meta name="twitter:card" content="summary_large_image">
|
||||||
<meta property="twitter:domain" content="jeffemmett.com">
|
<meta property="twitter:domain" content="jeffemmett.com">
|
||||||
<meta property="twitter:url" content="https://jeffemmett.com">
|
<meta property="twitter:url" content="https://jeffemmett.com">
|
||||||
<meta name="twitter:title" content="Jeff Emmett">
|
<meta name="twitter:title" content="Jeff Emmett">
|
||||||
<meta name="twitter:description"
|
<meta name="twitter:description"
|
||||||
content="Exploring mycoeconomics, token engineering, psilo-cybernetics, zero-knowledge local-first systems, and institutional neuroplasticity. Research at the intersection of regenerative systems, crypto commons, and emancipatory technology.">
|
content="My research doesn't investigate the intersection of computing, human-system interfaces, and emancipatory politics. I am interested in the potential of computing as a medium for thought, as a tool for collective action, and as a means of emancipation.">
|
||||||
<meta name="twitter:image" content="https://jeffemmett.com/og-image.jpg">
|
<meta name="twitter:image" content="/website-embed.png">
|
||||||
|
|
||||||
<!-- Analytics -->
|
<!-- Analytics -->
|
||||||
<script data-goatcounter="https://jeff.goatcounter.com/count" async src="//gc.zgo.at/count.js"></script>
|
<script data-goatcounter="https://jeff.goatcounter.com/count" async src="//gc.zgo.at/count.js"></script>
|
||||||
|
|
|
||||||
|
|
@ -1,8 +0,0 @@
|
||||||
node_modules
|
|
||||||
packages/*/node_modules
|
|
||||||
packages/*/dist
|
|
||||||
*.log
|
|
||||||
.git
|
|
||||||
.gitignore
|
|
||||||
README.md
|
|
||||||
infrastructure/
|
|
||||||
|
|
@ -1,35 +0,0 @@
|
||||||
# Dependencies
|
|
||||||
node_modules/
|
|
||||||
package-lock.json
|
|
||||||
|
|
||||||
# Build outputs
|
|
||||||
dist/
|
|
||||||
*.tsbuildinfo
|
|
||||||
|
|
||||||
# Logs
|
|
||||||
logs/
|
|
||||||
*.log
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
pm2.log
|
|
||||||
|
|
||||||
# Environment variables
|
|
||||||
.env
|
|
||||||
.env.local
|
|
||||||
.env.*.local
|
|
||||||
|
|
||||||
# IDE
|
|
||||||
.vscode/
|
|
||||||
.idea/
|
|
||||||
*.swp
|
|
||||||
*.swo
|
|
||||||
*~
|
|
||||||
|
|
||||||
# OS
|
|
||||||
.DS_Store
|
|
||||||
Thumbs.db
|
|
||||||
|
|
||||||
# PM2
|
|
||||||
ecosystem.config.js
|
|
||||||
.pm2/
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
# mulTmux Server Dockerfile
|
|
||||||
FROM node:20-slim
|
|
||||||
|
|
||||||
# Install tmux and build dependencies for node-pty
|
|
||||||
RUN apt-get update && apt-get install -y \
|
|
||||||
tmux \
|
|
||||||
python3 \
|
|
||||||
make \
|
|
||||||
g++ \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Copy workspace root files
|
|
||||||
COPY package.json ./
|
|
||||||
COPY tsconfig.json ./
|
|
||||||
|
|
||||||
# Copy packages
|
|
||||||
COPY packages/server ./packages/server
|
|
||||||
COPY packages/cli ./packages/cli
|
|
||||||
|
|
||||||
# Install dependencies (including node-pty native compilation)
|
|
||||||
RUN npm install --workspaces
|
|
||||||
|
|
||||||
# Build TypeScript
|
|
||||||
RUN npm run build
|
|
||||||
|
|
||||||
# Expose port
|
|
||||||
EXPOSE 3002
|
|
||||||
|
|
||||||
# Run the server
|
|
||||||
CMD ["node", "packages/server/dist/index.js"]
|
|
||||||
|
|
@ -1,240 +0,0 @@
|
||||||
# mulTmux
|
|
||||||
|
|
||||||
A collaborative terminal tool that lets multiple users interact with the same tmux session in real-time.
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- **Real-time Collaboration**: Multiple users can connect to the same terminal session
|
|
||||||
- **tmux Backend**: Leverages tmux for robust terminal multiplexing
|
|
||||||
- **Token-based Auth**: Secure invite links with expiration
|
|
||||||
- **Presence Indicators**: See who's connected to your session
|
|
||||||
- **Low Resource Usage**: ~200-300MB RAM for typical usage
|
|
||||||
- **Easy Deployment**: Works alongside existing services on your server
|
|
||||||
|
|
||||||
## Architecture
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────┐ ┌──────────────────┐
|
|
||||||
│ Client │ ──── WebSocket ────────> │ Server │
|
|
||||||
│ (CLI) │ (token auth) │ │
|
|
||||||
└─────────────┘ │ ┌────────────┐ │
|
|
||||||
│ │ Node.js │ │
|
|
||||||
┌─────────────┐ │ │ Backend │ │
|
|
||||||
│ Client 2 │ ──── Invite Link ──────> │ └─────┬──────┘ │
|
|
||||||
│ (CLI) │ │ │ │
|
|
||||||
└─────────────┘ │ ┌─────▼──────┐ │
|
|
||||||
│ │ tmux │ │
|
|
||||||
│ │ Sessions │ │
|
|
||||||
│ └────────────┘ │
|
|
||||||
└──────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
### Server Setup
|
|
||||||
|
|
||||||
1. **Deploy to your AI server:**
|
|
||||||
```bash
|
|
||||||
cd multmux
|
|
||||||
chmod +x infrastructure/deploy.sh
|
|
||||||
./infrastructure/deploy.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
This will:
|
|
||||||
- Install tmux if needed
|
|
||||||
- Build the server
|
|
||||||
- Set up PM2 for process management
|
|
||||||
- Start the server
|
|
||||||
|
|
||||||
2. **(Optional) Set up nginx reverse proxy:**
|
|
||||||
```bash
|
|
||||||
sudo cp infrastructure/nginx.conf /etc/nginx/sites-available/multmux
|
|
||||||
sudo ln -s /etc/nginx/sites-available/multmux /etc/nginx/sites-enabled/
|
|
||||||
# Edit the file to set your domain
|
|
||||||
sudo nano /etc/nginx/sites-available/multmux
|
|
||||||
sudo nginx -t
|
|
||||||
sudo systemctl reload nginx
|
|
||||||
```
|
|
||||||
|
|
||||||
### CLI Installation
|
|
||||||
|
|
||||||
**On your local machine:**
|
|
||||||
```bash
|
|
||||||
cd multmux/packages/cli
|
|
||||||
npm install
|
|
||||||
npm run build
|
|
||||||
npm link # Installs 'multmux' command globally
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
### Create a Session
|
|
||||||
|
|
||||||
```bash
|
|
||||||
multmux create my-project --repo /path/to/repo
|
|
||||||
```
|
|
||||||
|
|
||||||
This outputs an invite link like:
|
|
||||||
```
|
|
||||||
multmux join a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6
|
|
||||||
```
|
|
||||||
|
|
||||||
### Join a Session
|
|
||||||
|
|
||||||
```bash
|
|
||||||
multmux join a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6
|
|
||||||
```
|
|
||||||
|
|
||||||
### List Active Sessions
|
|
||||||
|
|
||||||
```bash
|
|
||||||
multmux list
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using a Remote Server
|
|
||||||
|
|
||||||
If your server is on a different machine:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create session
|
|
||||||
multmux create my-project --server http://your-server:3000
|
|
||||||
|
|
||||||
# Join session
|
|
||||||
multmux join <token> --server ws://your-server:3001
|
|
||||||
```
|
|
||||||
|
|
||||||
## CLI Commands
|
|
||||||
|
|
||||||
| Command | Description |
|
|
||||||
|---------|-------------|
|
|
||||||
| `multmux create <name>` | Create a new collaborative session |
|
|
||||||
| `multmux join <token>` | Join an existing session |
|
|
||||||
| `multmux list` | List all active sessions |
|
|
||||||
|
|
||||||
### Options
|
|
||||||
|
|
||||||
**create:**
|
|
||||||
- `-s, --server <url>` - Server URL (default: http://localhost:3000)
|
|
||||||
- `-r, --repo <path>` - Repository path to cd into
|
|
||||||
|
|
||||||
**join:**
|
|
||||||
- `-s, --server <url>` - WebSocket server URL (default: ws://localhost:3001)
|
|
||||||
|
|
||||||
**list:**
|
|
||||||
- `-s, --server <url>` - Server URL (default: http://localhost:3000)
|
|
||||||
|
|
||||||
## Server Management
|
|
||||||
|
|
||||||
### PM2 Commands
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pm2 status # Check server status
|
|
||||||
pm2 logs multmux-server # View server logs
|
|
||||||
pm2 restart multmux-server # Restart server
|
|
||||||
pm2 stop multmux-server # Stop server
|
|
||||||
```
|
|
||||||
|
|
||||||
### Resource Usage
|
|
||||||
|
|
||||||
- **Idle**: ~100-150MB RAM
|
|
||||||
- **Per session**: ~5-10MB RAM
|
|
||||||
- **Per user**: ~1-2MB RAM
|
|
||||||
- **Typical usage**: 200-300MB RAM total
|
|
||||||
|
|
||||||
## API Reference
|
|
||||||
|
|
||||||
### HTTP API (default: port 3000)
|
|
||||||
|
|
||||||
| Endpoint | Method | Description |
|
|
||||||
|----------|--------|-------------|
|
|
||||||
| `/api/sessions` | POST | Create a new session |
|
|
||||||
| `/api/sessions` | GET | List active sessions |
|
|
||||||
| `/api/sessions/:id` | GET | Get session info |
|
|
||||||
| `/api/sessions/:id/tokens` | POST | Generate new invite token |
|
|
||||||
| `/api/health` | GET | Health check |
|
|
||||||
|
|
||||||
### WebSocket (default: port 3001)
|
|
||||||
|
|
||||||
Connect with: `ws://localhost:3001?token=<your-token>`
|
|
||||||
|
|
||||||
**Message Types:**
|
|
||||||
- `output` - Terminal output from server
|
|
||||||
- `input` - User input to terminal
|
|
||||||
- `resize` - Terminal resize event
|
|
||||||
- `presence` - User join/leave notifications
|
|
||||||
- `joined` - Connection confirmation
|
|
||||||
|
|
||||||
## Security
|
|
||||||
|
|
||||||
- **Token Expiration**: Invite tokens expire after 60 minutes (configurable)
|
|
||||||
- **Session Isolation**: Each session runs in its own tmux instance
|
|
||||||
- **Input Validation**: All terminal input is validated
|
|
||||||
- **No Persistence**: Sessions are destroyed when all users leave
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Server won't start
|
|
||||||
|
|
||||||
Check if ports are available:
|
|
||||||
```bash
|
|
||||||
netstat -tlnp | grep -E '3000|3001'
|
|
||||||
```
|
|
||||||
|
|
||||||
### Can't connect to server
|
|
||||||
|
|
||||||
1. Check server is running: `pm2 status`
|
|
||||||
2. Check logs: `pm2 logs multmux-server`
|
|
||||||
3. Verify firewall allows ports 3000 and 3001
|
|
||||||
|
|
||||||
### Terminal not responding
|
|
||||||
|
|
||||||
1. Check WebSocket connection in browser console
|
|
||||||
2. Verify token hasn't expired
|
|
||||||
3. Restart session: `pm2 restart multmux-server`
|
|
||||||
|
|
||||||
## Development
|
|
||||||
|
|
||||||
### Project Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
multmux/
|
|
||||||
├── packages/
|
|
||||||
│ ├── server/ # Backend server
|
|
||||||
│ │ ├── src/
|
|
||||||
│ │ │ ├── managers/ # Session & token management
|
|
||||||
│ │ │ ├── websocket/ # WebSocket handler
|
|
||||||
│ │ │ └── api/ # HTTP routes
|
|
||||||
│ └── cli/ # CLI client
|
|
||||||
│ ├── src/
|
|
||||||
│ │ ├── commands/ # CLI commands
|
|
||||||
│ │ ├── connection/ # WebSocket client
|
|
||||||
│ │ └── ui/ # Terminal UI
|
|
||||||
└── infrastructure/ # Deployment scripts
|
|
||||||
```
|
|
||||||
|
|
||||||
### Running in Development
|
|
||||||
|
|
||||||
**Terminal 1 - Server:**
|
|
||||||
```bash
|
|
||||||
npm run dev:server
|
|
||||||
```
|
|
||||||
|
|
||||||
**Terminal 2 - CLI:**
|
|
||||||
```bash
|
|
||||||
cd packages/cli
|
|
||||||
npm run dev -- create test-session
|
|
||||||
```
|
|
||||||
|
|
||||||
### Building
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm run build # Builds both packages
|
|
||||||
```
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
MIT
|
|
||||||
|
|
||||||
## Contributing
|
|
||||||
|
|
||||||
Contributions welcome! Please open an issue or PR.
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
version: '3.8'
|
|
||||||
|
|
||||||
services:
|
|
||||||
multmux:
|
|
||||||
build: .
|
|
||||||
container_name: multmux-server
|
|
||||||
restart: unless-stopped
|
|
||||||
environment:
|
|
||||||
- NODE_ENV=production
|
|
||||||
- PORT=3002
|
|
||||||
labels:
|
|
||||||
- "traefik.enable=true"
|
|
||||||
# HTTP router
|
|
||||||
- "traefik.http.routers.multmux.rule=Host(`terminal.jeffemmett.com`)"
|
|
||||||
- "traefik.http.routers.multmux.entrypoints=web"
|
|
||||||
- "traefik.http.services.multmux.loadbalancer.server.port=3002"
|
|
||||||
# WebSocket support - Traefik handles this automatically for HTTP/1.1 upgrades
|
|
||||||
# Enable sticky sessions for WebSocket connections
|
|
||||||
- "traefik.http.services.multmux.loadbalancer.sticky.cookie=true"
|
|
||||||
- "traefik.http.services.multmux.loadbalancer.sticky.cookie.name=multmux_session"
|
|
||||||
networks:
|
|
||||||
- traefik-public
|
|
||||||
# Health check
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:3002/api/health"]
|
|
||||||
interval: 30s
|
|
||||||
timeout: 10s
|
|
||||||
retries: 3
|
|
||||||
start_period: 10s
|
|
||||||
|
|
||||||
networks:
|
|
||||||
traefik-public:
|
|
||||||
external: true
|
|
||||||
|
|
@ -1,91 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# mulTmux Deployment Script for AI Server
|
|
||||||
# This script sets up mulTmux on your existing droplet
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "🚀 mulTmux Deployment Script"
|
|
||||||
echo "============================"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Check if tmux is installed
|
|
||||||
if ! command -v tmux &> /dev/null; then
|
|
||||||
echo "📦 Installing tmux..."
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y tmux
|
|
||||||
else
|
|
||||||
echo "✅ tmux is already installed"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if Node.js is installed
|
|
||||||
if ! command -v node &> /dev/null; then
|
|
||||||
echo "📦 Installing Node.js..."
|
|
||||||
curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash -
|
|
||||||
sudo apt-get install -y nodejs
|
|
||||||
else
|
|
||||||
echo "✅ Node.js is already installed ($(node --version))"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if npm is installed
|
|
||||||
if ! command -v npm &> /dev/null; then
|
|
||||||
echo "❌ npm is not installed. Please install npm first."
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "✅ npm is already installed ($(npm --version))"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Build the server
|
|
||||||
echo ""
|
|
||||||
echo "🔨 Building mulTmux..."
|
|
||||||
cd "$(dirname "$0")/.."
|
|
||||||
npm install
|
|
||||||
npm run build
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "📝 Setting up PM2 for process management..."
|
|
||||||
if ! command -v pm2 &> /dev/null; then
|
|
||||||
sudo npm install -g pm2
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Create PM2 ecosystem file
|
|
||||||
cat > ecosystem.config.js << EOF
|
|
||||||
module.exports = {
|
|
||||||
apps: [{
|
|
||||||
name: 'multmux-server',
|
|
||||||
script: './packages/server/dist/index.js',
|
|
||||||
instances: 1,
|
|
||||||
autorestart: true,
|
|
||||||
watch: false,
|
|
||||||
max_memory_restart: '500M',
|
|
||||||
env: {
|
|
||||||
NODE_ENV: 'production',
|
|
||||||
PORT: 3000,
|
|
||||||
WS_PORT: 3001
|
|
||||||
}
|
|
||||||
}]
|
|
||||||
};
|
|
||||||
EOF
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "🚀 Starting mulTmux server with PM2..."
|
|
||||||
pm2 start ecosystem.config.js
|
|
||||||
pm2 save
|
|
||||||
pm2 startup | tail -n 1 | bash || true
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "✅ mulTmux deployed successfully!"
|
|
||||||
echo ""
|
|
||||||
echo "Server is running on:"
|
|
||||||
echo " HTTP API: http://localhost:3000"
|
|
||||||
echo " WebSocket: ws://localhost:3001"
|
|
||||||
echo ""
|
|
||||||
echo "Useful PM2 commands:"
|
|
||||||
echo " pm2 status - Check server status"
|
|
||||||
echo " pm2 logs multmux-server - View logs"
|
|
||||||
echo " pm2 restart multmux-server - Restart server"
|
|
||||||
echo " pm2 stop multmux-server - Stop server"
|
|
||||||
echo ""
|
|
||||||
echo "To install the CLI globally:"
|
|
||||||
echo " cd packages/cli && npm link"
|
|
||||||
echo ""
|
|
||||||
|
|
@ -1,53 +0,0 @@
|
||||||
# nginx configuration for mulTmux
|
|
||||||
# Place this in /etc/nginx/sites-available/multmux
|
|
||||||
# Then: sudo ln -s /etc/nginx/sites-available/multmux /etc/nginx/sites-enabled/
|
|
||||||
|
|
||||||
upstream multmux_api {
|
|
||||||
server localhost:3000;
|
|
||||||
}
|
|
||||||
|
|
||||||
upstream multmux_ws {
|
|
||||||
server localhost:3001;
|
|
||||||
}
|
|
||||||
|
|
||||||
server {
|
|
||||||
listen 80;
|
|
||||||
server_name your-server-domain.com; # Change this to your domain or IP
|
|
||||||
|
|
||||||
# HTTP API
|
|
||||||
location /api {
|
|
||||||
proxy_pass http://multmux_api;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection 'upgrade';
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_cache_bypass $http_upgrade;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
}
|
|
||||||
|
|
||||||
# WebSocket
|
|
||||||
location /ws {
|
|
||||||
proxy_pass http://multmux_ws;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection "upgrade";
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
|
||||||
proxy_set_header X-Forwarded-Proto $scheme;
|
|
||||||
proxy_read_timeout 86400;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Optional: SSL configuration (if using Let's Encrypt)
|
|
||||||
# server {
|
|
||||||
# listen 443 ssl http2;
|
|
||||||
# server_name your-server-domain.com;
|
|
||||||
#
|
|
||||||
# ssl_certificate /etc/letsencrypt/live/your-server-domain.com/fullchain.pem;
|
|
||||||
# ssl_certificate_key /etc/letsencrypt/live/your-server-domain.com/privkey.pem;
|
|
||||||
#
|
|
||||||
# # Same location blocks as above...
|
|
||||||
# }
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
{
|
|
||||||
"name": "multmux",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"private": true,
|
|
||||||
"description": "Collaborative terminal tool with tmux backend",
|
|
||||||
"workspaces": [
|
|
||||||
"packages/*"
|
|
||||||
],
|
|
||||||
"scripts": {
|
|
||||||
"build": "npm run build -ws",
|
|
||||||
"dev:server": "npm run dev -w @multmux/server",
|
|
||||||
"dev:cli": "npm run dev -w @multmux/cli",
|
|
||||||
"start:server": "npm run start -w @multmux/server"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^20.0.0",
|
|
||||||
"typescript": "^5.0.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,30 +0,0 @@
|
||||||
{
|
|
||||||
"name": "@multmux/cli",
|
|
||||||
"version": "0.1.0",
|
|
||||||
"description": "mulTmux CLI - collaborative terminal client",
|
|
||||||
"main": "dist/index.js",
|
|
||||||
"bin": {
|
|
||||||
"multmux": "./dist/index.js"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"build": "tsc",
|
|
||||||
"dev": "tsx src/index.ts",
|
|
||||||
"start": "node dist/index.js"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"commander": "^11.1.0",
|
|
||||||
"ws": "^8.16.0",
|
|
||||||
"blessed": "^0.1.81",
|
|
||||||
"chalk": "^4.1.2",
|
|
||||||
"ora": "^5.4.1",
|
|
||||||
"node-fetch": "^2.7.0"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/ws": "^8.5.10",
|
|
||||||
"@types/node": "^20.0.0",
|
|
||||||
"@types/blessed": "^0.1.25",
|
|
||||||
"@types/node-fetch": "^2.6.9",
|
|
||||||
"tsx": "^4.7.0",
|
|
||||||
"typescript": "^5.0.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
import fetch from 'node-fetch';
|
|
||||||
import chalk from 'chalk';
|
|
||||||
import ora from 'ora';
|
|
||||||
|
|
||||||
export async function createSession(
|
|
||||||
name: string,
|
|
||||||
options: { server?: string; repo?: string }
|
|
||||||
): Promise<void> {
|
|
||||||
const serverUrl = options.server || 'http://localhost:3000';
|
|
||||||
const spinner = ora('Creating session...').start();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch(`${serverUrl}/api/sessions`, {
|
|
||||||
method: 'POST',
|
|
||||||
headers: {
|
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
|
||||||
name,
|
|
||||||
repoPath: options.repo,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Failed to create session: ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data: any = await response.json();
|
|
||||||
|
|
||||||
spinner.succeed('Session created!');
|
|
||||||
|
|
||||||
console.log('');
|
|
||||||
console.log(chalk.bold('Session Details:'));
|
|
||||||
console.log(` Name: ${chalk.cyan(data.session.name)}`);
|
|
||||||
console.log(` ID: ${chalk.gray(data.session.id)}`);
|
|
||||||
console.log(` Created: ${new Date(data.session.createdAt).toLocaleString()}`);
|
|
||||||
console.log('');
|
|
||||||
console.log(chalk.bold('To join this session:'));
|
|
||||||
console.log(chalk.green(` ${data.inviteUrl}`));
|
|
||||||
console.log('');
|
|
||||||
console.log(chalk.bold('Or share this token:'));
|
|
||||||
console.log(` ${chalk.yellow(data.token)}`);
|
|
||||||
console.log('');
|
|
||||||
console.log(chalk.dim('Token expires in 60 minutes'));
|
|
||||||
} catch (error) {
|
|
||||||
spinner.fail('Failed to create session');
|
|
||||||
console.error(chalk.red((error as Error).message));
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,45 +0,0 @@
|
||||||
import chalk from 'chalk';
|
|
||||||
import ora from 'ora';
|
|
||||||
import { WebSocketClient } from '../connection/WebSocketClient';
|
|
||||||
import { TerminalUI } from '../ui/Terminal';
|
|
||||||
|
|
||||||
export async function joinSession(
|
|
||||||
token: string,
|
|
||||||
options: { server?: string }
|
|
||||||
): Promise<void> {
|
|
||||||
const serverUrl = options.server || 'ws://localhost:3001';
|
|
||||||
const spinner = ora('Connecting to session...').start();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const client = new WebSocketClient(serverUrl, token);
|
|
||||||
|
|
||||||
// Wait for connection
|
|
||||||
await client.connect();
|
|
||||||
spinner.succeed('Connected!');
|
|
||||||
|
|
||||||
// Wait a moment for the 'joined' event
|
|
||||||
await new Promise((resolve) => {
|
|
||||||
client.once('joined', resolve);
|
|
||||||
setTimeout(resolve, 1000); // Fallback timeout
|
|
||||||
});
|
|
||||||
|
|
||||||
console.log(chalk.green('\nJoined session! Press ESC or Ctrl-C to exit.\n'));
|
|
||||||
|
|
||||||
// Create terminal UI
|
|
||||||
const ui = new TerminalUI(client);
|
|
||||||
|
|
||||||
// Handle errors
|
|
||||||
client.on('error', (error: Error) => {
|
|
||||||
console.error(chalk.red('\nConnection error:'), error.message);
|
|
||||||
});
|
|
||||||
|
|
||||||
client.on('reconnect-failed', () => {
|
|
||||||
console.error(chalk.red('\nFailed to reconnect. Exiting...'));
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
spinner.fail('Failed to connect');
|
|
||||||
console.error(chalk.red((error as Error).message));
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,38 +0,0 @@
|
||||||
import fetch from 'node-fetch';
|
|
||||||
import chalk from 'chalk';
|
|
||||||
import ora from 'ora';
|
|
||||||
|
|
||||||
export async function listSessions(options: { server?: string }): Promise<void> {
|
|
||||||
const serverUrl = options.server || 'http://localhost:3000';
|
|
||||||
const spinner = ora('Fetching sessions...').start();
|
|
||||||
|
|
||||||
try {
|
|
||||||
const response = await fetch(`${serverUrl}/api/sessions`);
|
|
||||||
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Failed to fetch sessions: ${response.statusText}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const data: any = await response.json();
|
|
||||||
spinner.stop();
|
|
||||||
|
|
||||||
if (data.sessions.length === 0) {
|
|
||||||
console.log(chalk.yellow('No active sessions found.'));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(chalk.bold(`\nActive Sessions (${data.sessions.length}):\n`));
|
|
||||||
|
|
||||||
data.sessions.forEach((session: any) => {
|
|
||||||
console.log(chalk.cyan(` ${session.name}`));
|
|
||||||
console.log(` ID: ${chalk.gray(session.id)}`);
|
|
||||||
console.log(` Clients: ${session.activeClients}`);
|
|
||||||
console.log(` Created: ${new Date(session.createdAt).toLocaleString()}`);
|
|
||||||
console.log('');
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
spinner.fail('Failed to fetch sessions');
|
|
||||||
console.error(chalk.red((error as Error).message));
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,120 +0,0 @@
|
||||||
import WebSocket from 'ws';
|
|
||||||
import { EventEmitter } from 'events';
|
|
||||||
|
|
||||||
export interface TerminalMessage {
|
|
||||||
type: 'output' | 'input' | 'resize' | 'join' | 'leave' | 'presence' | 'joined' | 'error';
|
|
||||||
data?: any;
|
|
||||||
clientId?: string;
|
|
||||||
timestamp?: number;
|
|
||||||
sessionId?: string;
|
|
||||||
sessionName?: string;
|
|
||||||
message?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class WebSocketClient extends EventEmitter {
|
|
||||||
private ws: WebSocket | null = null;
|
|
||||||
private reconnectAttempts = 0;
|
|
||||||
private maxReconnectAttempts = 5;
|
|
||||||
|
|
||||||
constructor(private url: string, private token: string) {
|
|
||||||
super();
|
|
||||||
}
|
|
||||||
|
|
||||||
connect(): Promise<void> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const wsUrl = `${this.url}?token=${this.token}`;
|
|
||||||
this.ws = new WebSocket(wsUrl);
|
|
||||||
|
|
||||||
this.ws.on('open', () => {
|
|
||||||
this.reconnectAttempts = 0;
|
|
||||||
this.emit('connected');
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
|
|
||||||
this.ws.on('message', (data) => {
|
|
||||||
try {
|
|
||||||
const message: TerminalMessage = JSON.parse(data.toString());
|
|
||||||
this.handleMessage(message);
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to parse message:', error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
this.ws.on('close', () => {
|
|
||||||
this.emit('disconnected');
|
|
||||||
this.attemptReconnect();
|
|
||||||
});
|
|
||||||
|
|
||||||
this.ws.on('error', (error) => {
|
|
||||||
this.emit('error', error);
|
|
||||||
reject(error);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private handleMessage(message: TerminalMessage): void {
|
|
||||||
switch (message.type) {
|
|
||||||
case 'output':
|
|
||||||
this.emit('output', message.data);
|
|
||||||
break;
|
|
||||||
case 'joined':
|
|
||||||
this.emit('joined', {
|
|
||||||
sessionId: message.sessionId,
|
|
||||||
sessionName: message.sessionName,
|
|
||||||
clientId: message.clientId,
|
|
||||||
});
|
|
||||||
break;
|
|
||||||
case 'presence':
|
|
||||||
this.emit('presence', message.data);
|
|
||||||
break;
|
|
||||||
case 'error':
|
|
||||||
this.emit('error', new Error(message.message || 'Unknown error'));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sendInput(data: string): void {
|
|
||||||
if (this.ws && this.ws.readyState === WebSocket.OPEN) {
|
|
||||||
this.ws.send(
|
|
||||||
JSON.stringify({
|
|
||||||
type: 'input',
|
|
||||||
data,
|
|
||||||
timestamp: Date.now(),
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
resize(cols: number, rows: number): void {
|
|
||||||
if (this.ws && this.ws.readyState === WebSocket.OPEN) {
|
|
||||||
this.ws.send(
|
|
||||||
JSON.stringify({
|
|
||||||
type: 'resize',
|
|
||||||
data: { cols, rows },
|
|
||||||
timestamp: Date.now(),
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
disconnect(): void {
|
|
||||||
if (this.ws) {
|
|
||||||
this.ws.close();
|
|
||||||
this.ws = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private attemptReconnect(): void {
|
|
||||||
if (this.reconnectAttempts < this.maxReconnectAttempts) {
|
|
||||||
this.reconnectAttempts++;
|
|
||||||
setTimeout(() => {
|
|
||||||
this.emit('reconnecting', this.reconnectAttempts);
|
|
||||||
this.connect().catch(() => {
|
|
||||||
// Reconnection failed, will retry
|
|
||||||
});
|
|
||||||
}, 1000 * this.reconnectAttempts);
|
|
||||||
} else {
|
|
||||||
this.emit('reconnect-failed');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue