Merge pull request #15 from Jeff-Emmett/add-runpod-AI-API
Add runpod ai api
This commit is contained in:
commit
6039481d0c
|
|
@ -11,9 +11,9 @@
|
||||||
"build": "tsc && vite build",
|
"build": "tsc && vite build",
|
||||||
"build:worker": "wrangler build --config wrangler.dev.toml",
|
"build:worker": "wrangler build --config wrangler.dev.toml",
|
||||||
"preview": "vite preview",
|
"preview": "vite preview",
|
||||||
"deploy": "tsc && vite build && wrangler deploy",
|
"deploy": "tsc && vite build && wrangler deploy --config worker/wrangler.toml",
|
||||||
"deploy:pages": "tsc && vite build",
|
"deploy:pages": "tsc && vite build",
|
||||||
"deploy:worker": "wrangler deploy",
|
"deploy:worker": "wrangler deploy --config worker/wrangler.toml",
|
||||||
"deploy:worker:dev": "wrangler deploy --config wrangler.dev.toml",
|
"deploy:worker:dev": "wrangler deploy --config wrangler.dev.toml",
|
||||||
"types": "tsc --noEmit"
|
"types": "tsc --noEmit"
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -207,6 +207,49 @@ export function useAutomergeStoreV2({
|
||||||
// once into the automerge doc and then back again.
|
// once into the automerge doc and then back again.
|
||||||
let isLocalChange = false
|
let isLocalChange = false
|
||||||
|
|
||||||
|
// Helper function to manually trigger sync after document changes
|
||||||
|
// The Automerge Repo doesn't auto-broadcast because our WebSocket setup doesn't use peer discovery
|
||||||
|
const triggerSync = () => {
|
||||||
|
try {
|
||||||
|
const repo = (handle as any).repo
|
||||||
|
if (repo) {
|
||||||
|
// Try multiple approaches to trigger sync
|
||||||
|
|
||||||
|
// Approach 1: Use networkSubsystem.syncDoc if available
|
||||||
|
if (repo.networkSubsystem && typeof repo.networkSubsystem.syncDoc === 'function') {
|
||||||
|
console.log('🔄 Triggering sync via networkSubsystem.syncDoc()')
|
||||||
|
repo.networkSubsystem.syncDoc(handle.documentId)
|
||||||
|
}
|
||||||
|
// Approach 2: Broadcast to all network adapters directly
|
||||||
|
else if (repo.networkSubsystem && repo.networkSubsystem.adapters) {
|
||||||
|
console.log('🔄 Broadcasting sync to all network adapters')
|
||||||
|
const adapters = Array.from(repo.networkSubsystem.adapters.values())
|
||||||
|
adapters.forEach((adapter: any) => {
|
||||||
|
if (adapter && typeof adapter.send === 'function') {
|
||||||
|
// Send a sync message via the adapter
|
||||||
|
// The adapter should handle converting this to the right format
|
||||||
|
adapter.send({
|
||||||
|
type: 'sync',
|
||||||
|
documentId: handle.documentId,
|
||||||
|
data: handle.doc()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// Approach 3: Emit an event to trigger sync
|
||||||
|
else if (repo.emit && typeof repo.emit === 'function') {
|
||||||
|
console.log('🔄 Emitting document change event')
|
||||||
|
repo.emit('change', { documentId: handle.documentId, doc: handle.doc() })
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
console.warn('⚠️ No known method to trigger sync broadcast found')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Error triggering manual sync:', error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Listen for changes from Automerge and apply them to TLDraw
|
// Listen for changes from Automerge and apply them to TLDraw
|
||||||
const automergeChangeHandler = (payload: DocHandleChangePayload<any>) => {
|
const automergeChangeHandler = (payload: DocHandleChangePayload<any>) => {
|
||||||
if (isLocalChange) {
|
if (isLocalChange) {
|
||||||
|
|
@ -230,7 +273,10 @@ export function useAutomergeStoreV2({
|
||||||
const recordsBefore = store.allRecords()
|
const recordsBefore = store.allRecords()
|
||||||
const shapesBefore = recordsBefore.filter((r: any) => r.typeName === 'shape')
|
const shapesBefore = recordsBefore.filter((r: any) => r.typeName === 'shape')
|
||||||
|
|
||||||
applyAutomergePatchesToTLStore(payload.patches, store)
|
// CRITICAL: Pass Automerge document to patch handler so it can read full records
|
||||||
|
// This prevents coordinates from defaulting to 0,0 when patches create new records
|
||||||
|
const automergeDoc = handle.doc()
|
||||||
|
applyAutomergePatchesToTLStore(payload.patches, store, automergeDoc)
|
||||||
|
|
||||||
const recordsAfter = store.allRecords()
|
const recordsAfter = store.allRecords()
|
||||||
const shapesAfter = recordsAfter.filter((r: any) => r.typeName === 'shape')
|
const shapesAfter = recordsAfter.filter((r: any) => r.typeName === 'shape')
|
||||||
|
|
@ -249,9 +295,11 @@ export function useAutomergeStoreV2({
|
||||||
// This is a fallback - ideally we should fix the data at the source
|
// This is a fallback - ideally we should fix the data at the source
|
||||||
let successCount = 0
|
let successCount = 0
|
||||||
let failedPatches: any[] = []
|
let failedPatches: any[] = []
|
||||||
|
// CRITICAL: Pass Automerge document to patch handler so it can read full records
|
||||||
|
const automergeDoc = handle.doc()
|
||||||
for (const patch of payload.patches) {
|
for (const patch of payload.patches) {
|
||||||
try {
|
try {
|
||||||
applyAutomergePatchesToTLStore([patch], store)
|
applyAutomergePatchesToTLStore([patch], store, automergeDoc)
|
||||||
successCount++
|
successCount++
|
||||||
} catch (individualPatchError) {
|
} catch (individualPatchError) {
|
||||||
failedPatches.push({ patch, error: individualPatchError })
|
failedPatches.push({ patch, error: individualPatchError })
|
||||||
|
|
@ -404,6 +452,8 @@ export function useAutomergeStoreV2({
|
||||||
handle.change((doc) => {
|
handle.change((doc) => {
|
||||||
applyTLStoreChangesToAutomerge(doc, queuedChanges)
|
applyTLStoreChangesToAutomerge(doc, queuedChanges)
|
||||||
})
|
})
|
||||||
|
// Trigger sync to broadcast position updates
|
||||||
|
triggerSync()
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
isLocalChange = false
|
isLocalChange = false
|
||||||
}, 100)
|
}, 100)
|
||||||
|
|
@ -1044,6 +1094,8 @@ export function useAutomergeStoreV2({
|
||||||
handle.change((doc) => {
|
handle.change((doc) => {
|
||||||
applyTLStoreChangesToAutomerge(doc, queuedChanges)
|
applyTLStoreChangesToAutomerge(doc, queuedChanges)
|
||||||
})
|
})
|
||||||
|
// Trigger sync to broadcast eraser changes
|
||||||
|
triggerSync()
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
isLocalChange = false
|
isLocalChange = false
|
||||||
}, 100)
|
}, 100)
|
||||||
|
|
@ -1079,6 +1131,8 @@ export function useAutomergeStoreV2({
|
||||||
handle.change((doc) => {
|
handle.change((doc) => {
|
||||||
applyTLStoreChangesToAutomerge(doc, mergedChanges)
|
applyTLStoreChangesToAutomerge(doc, mergedChanges)
|
||||||
})
|
})
|
||||||
|
// Trigger sync to broadcast merged changes
|
||||||
|
triggerSync()
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
isLocalChange = false
|
isLocalChange = false
|
||||||
}, 100)
|
}, 100)
|
||||||
|
|
@ -1091,11 +1145,15 @@ export function useAutomergeStoreV2({
|
||||||
const applyChanges = () => {
|
const applyChanges = () => {
|
||||||
// Set flag to prevent feedback loop when this change comes back from Automerge
|
// Set flag to prevent feedback loop when this change comes back from Automerge
|
||||||
isLocalChange = true
|
isLocalChange = true
|
||||||
|
|
||||||
handle.change((doc) => {
|
handle.change((doc) => {
|
||||||
applyTLStoreChangesToAutomerge(doc, finalFilteredChanges)
|
applyTLStoreChangesToAutomerge(doc, finalFilteredChanges)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// CRITICAL: Manually trigger Automerge Repo to broadcast changes
|
||||||
|
// Use requestAnimationFrame to defer this slightly so the change is fully processed
|
||||||
|
requestAnimationFrame(triggerSync)
|
||||||
|
|
||||||
// Reset flag after a short delay to allow Automerge change handler to process
|
// Reset flag after a short delay to allow Automerge change handler to process
|
||||||
// This prevents feedback loops while ensuring all changes are saved
|
// This prevents feedback loops while ensuring all changes are saved
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,6 @@
|
||||||
|
# Worker configuration
|
||||||
|
# Note: This wrangler.toml is for the Worker backend only.
|
||||||
|
# Pages deployment is configured separately in the Cloudflare dashboard.
|
||||||
main = "worker/worker.ts"
|
main = "worker/worker.ts"
|
||||||
compatibility_date = "2024-07-01"
|
compatibility_date = "2024-07-01"
|
||||||
name = "jeffemmett-canvas"
|
name = "jeffemmett-canvas"
|
||||||
Loading…
Reference in New Issue