final update fix old data conversion

This commit is contained in:
Jeff Emmett 2025-11-10 15:38:53 -08:00
parent f250eb3145
commit 1c50f2eeb0
5 changed files with 114 additions and 397 deletions

View File

@ -166,11 +166,13 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
private maxReconnectAttempts: number = 5
private reconnectDelay: number = 1000
private isConnecting: boolean = false
private onJsonSyncData?: (data: any) => void
constructor(workerUrl: string, roomId?: string) {
constructor(workerUrl: string, roomId?: string, onJsonSyncData?: (data: any) => void) {
super()
this.workerUrl = workerUrl
this.roomId = roomId || 'default-room'
this.onJsonSyncData = onJsonSyncData
this.readyPromise = new Promise((resolve) => {
this.readyResolve = resolve
})
@ -199,7 +201,10 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Use the room ID from constructor or default
// Add sessionId as a query parameter as required by AutomergeDurableObject
const sessionId = peerId || `session-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`
const wsUrl = `${this.workerUrl.replace('http', 'ws')}/connect/${this.roomId}?sessionId=${sessionId}`
// Convert https:// to wss:// or http:// to ws://
const protocol = this.workerUrl.startsWith('https://') ? 'wss://' : 'ws://'
const baseUrl = this.workerUrl.replace(/^https?:\/\//, '')
const wsUrl = `${protocol}${baseUrl}/connect/${this.roomId}?sessionId=${sessionId}`
this.isConnecting = true
@ -265,15 +270,45 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
if (message.type === 'sync' && message.data) {
console.log('🔌 CloudflareAdapter: Received sync message with data:', {
hasStore: !!message.data.store,
storeKeys: message.data.store ? Object.keys(message.data.store).length : 0
storeKeys: message.data.store ? Object.keys(message.data.store).length : 0,
documentId: message.documentId,
documentIdType: typeof message.documentId
})
// For backward compatibility, handle JSON sync data
// Check if this is a JSON sync message with full document data
// These should NOT go through Automerge's sync protocol (which expects binary messages)
// Instead, apply the data directly to the handle via callback
const isJsonDocumentData = message.data && typeof message.data === 'object' && message.data.store
if (isJsonDocumentData && this.onJsonSyncData) {
console.log('🔌 CloudflareAdapter: Applying JSON document data directly to handle (bypassing sync protocol)')
this.onJsonSyncData(message.data)
return // Don't emit as sync message
}
// Validate documentId - Automerge requires a valid Automerge URL format
// Valid formats: "automerge:xxxxx" or other valid URL formats
// Invalid: plain strings like "default", "default-room", etc.
const isValidDocumentId = message.documentId &&
(typeof message.documentId === 'string' &&
(message.documentId.startsWith('automerge:') ||
message.documentId.includes(':') ||
/^[a-f0-9-]{36,}$/i.test(message.documentId))) // UUID-like format
// For binary sync messages, use Automerge's sync protocol
// Only include documentId if it's a valid Automerge document ID format
const syncMessage: Message = {
type: 'sync',
senderId: message.senderId || this.peerId || ('unknown' as PeerId),
targetId: message.targetId || this.peerId || ('unknown' as PeerId),
data: message.data
data: message.data,
...(isValidDocumentId && { documentId: message.documentId })
}
if (message.documentId && !isValidDocumentId) {
console.warn('⚠️ CloudflareAdapter: Ignoring invalid documentId from server:', message.documentId)
}
this.emit('message', syncMessage)
} else if (message.senderId && message.targetId) {
this.emit('message', message as Message)

View File

@ -1,347 +1,2 @@
import { useMemo, useEffect, useState, useCallback } from "react"
import { TLStoreSnapshot } from "@tldraw/tldraw"
import { CloudflareAdapter } from "./CloudflareAdapter"
import { useAutomergeStoreV2, useAutomergePresence } from "./useAutomergeStoreV2"
import { TLStoreWithStatus } from "@tldraw/tldraw"
export { useAutomergeSync } from "./useAutomergeSyncRepo"
interface AutomergeSyncConfig {
uri: string
assets?: any
shapeUtils?: any[]
bindingUtils?: any[]
user?: {
id: string
name: string
}
}
export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus & { handle: any | null } {
const { uri, user } = config
// Extract roomId from URI (e.g., "https://worker.com/connect/room123" -> "room123")
const roomId = useMemo(() => {
const match = uri.match(/\/connect\/([^\/]+)$/)
return match ? match[1] : "default-room"
}, [uri])
// Extract worker URL from URI (remove /connect/roomId part)
const workerUrl = useMemo(() => {
return uri.replace(/\/connect\/.*$/, '')
}, [uri])
const [adapter] = useState(() => new CloudflareAdapter(workerUrl, roomId))
const [handle, setHandle] = useState<any>(null)
const [isLoading, setIsLoading] = useState(true)
// Initialize Automerge document handle
useEffect(() => {
let mounted = true
const initializeHandle = async () => {
// Add a small delay to ensure the server is ready
await new Promise(resolve => setTimeout(resolve, 500));
try {
// Try to load existing document from Cloudflare
const existingDoc = await adapter.loadFromCloudflare(roomId)
if (mounted) {
const handle = await adapter.getHandle(roomId)
// If we loaded an existing document, properly initialize it
if (existingDoc) {
console.log("Initializing Automerge document with existing data:", {
hasStore: !!existingDoc.store,
storeKeys: existingDoc.store ? Object.keys(existingDoc.store).length : 0,
sampleKeys: existingDoc.store ? Object.keys(existingDoc.store).slice(0, 5) : []
})
handle.change((doc) => {
// Always load R2 data if it exists and has content
const r2StoreKeys = existingDoc.store ? Object.keys(existingDoc.store).length : 0
console.log("Loading R2 data:", {
r2StoreKeys,
hasR2Data: r2StoreKeys > 0,
sampleStoreKeys: existingDoc.store ? Object.keys(existingDoc.store).slice(0, 5) : []
})
if (r2StoreKeys > 0) {
console.log("Loading R2 data into Automerge document")
if (existingDoc.store) {
// Debug: Log what we're about to load
const storeEntries = Object.entries(existingDoc.store)
const shapeCount = storeEntries.filter(([_, v]: [string, any]) => v?.typeName === 'shape').length
console.log("📊 R2 data to load:", {
totalRecords: storeEntries.length,
shapeCount,
recordTypes: storeEntries.reduce((acc: any, [_, v]: [string, any]) => {
const type = v?.typeName || 'unknown'
acc[type] = (acc[type] || 0) + 1
return acc
}, {}),
sampleRecords: storeEntries.slice(0, 5).map(([k, v]: [string, any]) => {
// Log full structure for debugging
try {
const fullRecord = JSON.parse(JSON.stringify(v))
return {
key: k,
id: v?.id,
typeName: v?.typeName,
type: v?.type,
hasProps: !!v?.props,
propsKeys: v?.props ? Object.keys(v.props).slice(0, 5) : [],
allKeys: v ? Object.keys(v).slice(0, 10) : [],
fullRecord: fullRecord // Include full record for debugging
}
} catch (e) {
return {
key: k,
id: v?.id,
typeName: v?.typeName,
type: v?.type,
error: String(e)
}
}
})
})
// Initialize store if it doesn't exist
if (!doc.store) {
doc.store = {}
}
// Assign each record individually with deep copy to ensure Automerge properly handles nested objects
// This matches how records are saved in TLStoreToAutomerge.ts
// Cast to any to allow string indexing (Automerge handles the typing internally)
const store = doc.store as any
let assignedCount = 0
for (const [key, record] of Object.entries(existingDoc.store)) {
try {
// Create a deep copy to ensure Automerge properly handles nested objects
// This is critical for preserving nested structures like props, richText, etc.
// Cast record to any to access properties that may not exist on all TLRecord types
const recordAny = record as any
let recordToSave: any
try {
recordToSave = JSON.parse(JSON.stringify(record))
// Verify essential properties are preserved
if (!recordToSave.typeName && recordAny.typeName) {
recordToSave.typeName = recordAny.typeName
}
if (!recordToSave.id && recordAny.id) {
recordToSave.id = recordAny.id
}
// Use bracket notation to avoid TypeScript errors on union types
if (!recordToSave.type && (recordAny as any).type) {
recordToSave.type = (recordAny as any).type
}
if (!recordToSave.props && (recordAny as any).props) {
recordToSave.props = (recordAny as any).props
}
// Copy all enumerable properties that might have been lost
for (const prop in recordAny) {
if (!(prop in recordToSave)) {
try {
// Use bracket notation with explicit any cast to avoid indexing errors
(recordToSave as any)[prop] = (recordAny as any)[prop]
} catch (e) {
// Skip properties that can't be accessed
}
}
}
} catch (jsonError) {
// If JSON serialization fails, manually copy properties
console.warn(`⚠️ JSON serialization failed for record ${key}, using manual copy`)
recordToSave = {}
for (const prop in recordAny) {
try {
// Use bracket notation with explicit any cast to avoid indexing errors
(recordToSave as any)[prop] = (recordAny as any)[prop]
} catch (e) {
// Skip properties that can't be accessed
}
}
}
store[key] = recordToSave
assignedCount++
} catch (e) {
console.error(`❌ Error copying record ${key}:`, e)
// Fallback: assign directly (might not work for nested objects)
store[key] = record
}
}
console.log("Loaded store data into Automerge document:", {
loadedStoreKeys: Object.keys(doc.store).length,
assignedCount,
sampleLoadedKeys: Object.keys(doc.store).slice(0, 5)
})
// Verify what was actually loaded
const loadedValues = Object.values(doc.store)
const loadedShapeCount = loadedValues.filter((v: any) => v?.typeName === 'shape').length
console.log("📊 Verification after loading:", {
totalLoaded: loadedValues.length,
loadedShapeCount,
loadedRecordTypes: loadedValues.reduce((acc: any, v: any) => {
const type = v?.typeName || 'unknown'
acc[type] = (acc[type] || 0) + 1
return acc
}, {})
})
}
if (existingDoc.schema) {
doc.schema = existingDoc.schema
}
} else {
console.log("No R2 data to load")
}
})
} else {
console.log("No existing document found, loading snapshot data")
// Load snapshot data for new rooms
try {
const snapshotResponse = await fetch('/src/snapshot.json')
if (snapshotResponse.ok) {
const snapshotData = await snapshotResponse.json() as TLStoreSnapshot
console.log("Loaded snapshot data:", {
hasStore: !!snapshotData.store,
storeKeys: snapshotData.store ? Object.keys(snapshotData.store).length : 0,
shapeCount: snapshotData.store ? Object.values(snapshotData.store).filter((r: any) => r.typeName === 'shape').length : 0
})
handle.change((doc) => {
if (snapshotData.store) {
// Pre-sanitize snapshot data to remove invalid properties
const sanitizedStore = { ...snapshotData.store }
let sanitizedCount = 0
Object.keys(sanitizedStore).forEach(key => {
const record = (sanitizedStore as any)[key]
if (record && record.typeName === 'shape') {
// Remove invalid properties from embed shapes (both custom Embed and default embed)
if ((record.type === 'Embed' || record.type === 'embed') && record.props) {
const invalidEmbedProps = ['doesResize', 'doesResizeHeight', 'richText']
invalidEmbedProps.forEach(prop => {
if (prop in record.props) {
console.log(`🔧 Pre-sanitizing snapshot: Removing invalid prop '${prop}' from embed shape ${record.id}`)
delete record.props[prop]
sanitizedCount++
}
})
}
// Remove invalid properties from text shapes
if (record.type === 'text' && record.props) {
const invalidTextProps = ['text', 'richText']
invalidTextProps.forEach(prop => {
if (prop in record.props) {
console.log(`🔧 Pre-sanitizing snapshot: Removing invalid prop '${prop}' from text shape ${record.id}`)
delete record.props[prop]
sanitizedCount++
}
})
}
}
})
if (sanitizedCount > 0) {
console.log(`🔧 Pre-sanitized ${sanitizedCount} invalid properties from snapshot data`)
}
doc.store = sanitizedStore
console.log("Loaded snapshot store data into Automerge document:", {
storeKeys: Object.keys(doc.store).length,
shapeCount: Object.values(doc.store).filter((r: any) => r.typeName === 'shape').length,
sampleKeys: Object.keys(doc.store).slice(0, 5)
})
}
if (snapshotData.schema) {
doc.schema = snapshotData.schema
}
})
}
} catch (error) {
console.error('Error loading snapshot data:', error)
}
}
// Wait a bit more to ensure the handle is fully ready with data
await new Promise(resolve => setTimeout(resolve, 500))
setHandle(handle)
setIsLoading(false)
console.log("Automerge handle initialized and loading completed")
}
} catch (error) {
console.error('Error initializing Automerge handle:', error)
if (mounted) {
setIsLoading(false)
}
}
}
initializeHandle()
return () => {
mounted = false
}
}, [adapter, roomId])
// Auto-save to Cloudflare on every change (with debouncing to prevent excessive calls)
useEffect(() => {
if (!handle) return
let saveTimeout: NodeJS.Timeout
const scheduleSave = () => {
// Clear existing timeout
if (saveTimeout) clearTimeout(saveTimeout)
// Schedule save with a short debounce (500ms) to batch rapid changes
saveTimeout = setTimeout(async () => {
try {
await adapter.saveToCloudflare(roomId)
} catch (error) {
console.error('Error in change-triggered save:', error)
}
}, 500)
}
// Listen for changes to the Automerge document
const changeHandler = (_payload: any) => {
scheduleSave()
}
handle.on('change', changeHandler)
return () => {
handle.off('change', changeHandler)
if (saveTimeout) clearTimeout(saveTimeout)
}
}, [handle, adapter, roomId])
// Use the Automerge store (only when handle is ready and not loading)
const store = useAutomergeStoreV2({
handle: !isLoading && handle ? handle : null,
userId: user?.id || 'anonymous',
})
// Set up presence if user is provided (always call hooks, but handle null internally)
useAutomergePresence({
handle,
store,
userMetadata: {
userId: user?.id || 'anonymous',
name: user?.name || 'Anonymous',
color: '#000000', // Default color
},
})
// Return loading state while initializing
if (isLoading || !handle) {
return { ...store, handle: null }
}
return { ...store, handle }
}

View File

@ -1,4 +1,4 @@
import { useMemo, useEffect, useState, useCallback } from "react"
import { useMemo, useEffect, useState, useCallback, useRef } from "react"
import { TLStoreSnapshot } from "@tldraw/tldraw"
import { CloudflareNetworkAdapter } from "./CloudflareAdapter"
import { useAutomergeStoreV2, useAutomergePresence } from "./useAutomergeStoreV2"
@ -30,11 +30,59 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
return uri.replace(/\/connect\/.*$/, '')
}, [uri])
const [repo] = useState(() => new Repo({
network: [new CloudflareNetworkAdapter(workerUrl, roomId)]
}))
const [handle, setHandle] = useState<any>(null)
const [isLoading, setIsLoading] = useState(true)
const handleRef = useRef<any>(null)
// Update ref when handle changes
useEffect(() => {
handleRef.current = handle
}, [handle])
// Callback to apply JSON sync data directly to handle (bypassing Automerge sync protocol)
const applyJsonSyncData = useCallback((data: TLStoreSnapshot) => {
const currentHandle = handleRef.current
if (!currentHandle) {
console.warn('⚠️ Cannot apply JSON sync data: handle not ready yet')
return
}
try {
console.log('🔌 Applying JSON sync data directly to handle:', {
hasStore: !!data.store,
storeKeys: data.store ? Object.keys(data.store).length : 0
})
// Apply the data directly to the handle
currentHandle.change((doc: any) => {
// Merge the store data into the document
if (data.store) {
if (!doc.store) {
doc.store = {}
}
// Merge all records from the sync data
Object.entries(data.store).forEach(([id, record]) => {
doc.store[id] = record
})
}
// Preserve schema if provided
if (data.schema) {
doc.schema = data.schema
}
})
console.log('✅ Successfully applied JSON sync data to handle')
} catch (error) {
console.error('❌ Error applying JSON sync data to handle:', error)
}
}, [])
const [repo] = useState(() => {
const adapter = new CloudflareNetworkAdapter(workerUrl, roomId, applyJsonSyncData)
return new Repo({
network: [adapter]
})
})
// Initialize Automerge document handle
useEffect(() => {
@ -123,36 +171,7 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
}
}, [handle])
// Get the store from the Automerge document
const store = useMemo(() => {
if (!handle?.doc()) {
return null
}
const doc = handle.doc()
if (!doc.store) {
return null
}
return doc.store
}, [handle])
// Get the store with status
const storeWithStatus = useMemo((): TLStoreWithStatus => {
if (!store) {
return {
status: 'loading' as const
}
}
return {
status: 'synced-remote' as const,
connectionStatus: 'online' as const,
store
}
}, [store, isLoading])
// Get presence data (only when handle is ready)
// Get user metadata for presence
const userMetadata: { userId: string; name: string; color: string } = (() => {
if (user && 'userId' in user) {
return {
@ -168,14 +187,22 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
}
})()
// Use useAutomergeStoreV2 to create a proper TLStore instance that syncs with Automerge
const storeWithStatus = useAutomergeStoreV2({
handle: handle || null as any,
userId: userMetadata.userId
})
// Get presence data (only when handle is ready)
const presence = useAutomergePresence({
handle: handle || null,
store: store || null,
store: storeWithStatus.store || null,
userMetadata
})
return {
...storeWithStatus,
handle,
presence
} as TLStoreWithStatus & { presence: typeof presence }
} as TLStoreWithStatus & { presence: typeof presence; handle: typeof handle }
}

View File

@ -10,7 +10,7 @@ const WORKER_ENV = import.meta.env.VITE_WORKER_ENV || 'production' // Default to
const WORKER_URLS = {
local: `http://${window.location.hostname}:5172`,
dev: "https://jeffemmett-canvas-automerge-dev.jeffemmett.workers.dev",
dev: `http://${window.location.hostname}:5172`,
production: "https://jeffemmett-canvas.jeffemmett.workers.dev"
}

View File

@ -239,7 +239,7 @@ export function Board() {
console.log(`📊 Board: Editor can see ${editorShapes.length} shapes for rendering`)
// Debug: Check all shapes in the store vs what editor can see
const storeShapes = store.store?.allRecords().filter(r => r.typeName === 'shape') || []
const storeShapes = store.store?.allRecords().filter((r: any) => r.typeName === 'shape') || []
console.log(`📊 Board: Store has ${storeShapes.length} shapes, editor sees ${editorShapes.length}`)
if (editorShapes.length > 0 && editor) {
@ -256,8 +256,8 @@ export function Board() {
const currentPageId = editor.getCurrentPageId()
console.log(`📊 Board: Current page ID: ${currentPageId}`)
const pageRecords = store.store?.allRecords().filter(r => r.typeName === 'page') || []
console.log(`📊 Board: Available pages:`, pageRecords.map(p => ({
const pageRecords = store.store?.allRecords().filter((r: any) => r.typeName === 'page') || []
console.log(`📊 Board: Available pages:`, pageRecords.map((p: any) => ({
id: p.id,
name: (p as any).name
})))
@ -265,8 +265,8 @@ export function Board() {
// Check if there are shapes in store that editor can't see
if (storeShapes.length > editorShapes.length) {
const editorShapeIds = new Set(editorShapes.map(s => s.id))
const missingShapes = storeShapes.filter(s => !editorShapeIds.has(s.id))
console.warn(`📊 Board: ${missingShapes.length} shapes in store but not visible to editor:`, missingShapes.map(s => ({
const missingShapes = storeShapes.filter((s: any) => !editorShapeIds.has(s.id))
console.warn(`📊 Board: ${missingShapes.length} shapes in store but not visible to editor:`, missingShapes.map((s: any) => ({
id: s.id,
type: s.type,
x: s.x,
@ -275,19 +275,19 @@ export function Board() {
})))
// Check if missing shapes are on a different page
const shapesOnCurrentPage = missingShapes.filter(s => s.parentId === currentPageId)
const shapesOnOtherPages = missingShapes.filter(s => s.parentId !== currentPageId)
const shapesOnCurrentPage = missingShapes.filter((s: any) => s.parentId === currentPageId)
const shapesOnOtherPages = missingShapes.filter((s: any) => s.parentId !== currentPageId)
console.log(`📊 Board: Missing shapes on current page: ${shapesOnCurrentPage.length}, on other pages: ${shapesOnOtherPages.length}`)
if (shapesOnOtherPages.length > 0) {
console.log(`📊 Board: Shapes on other pages:`, shapesOnOtherPages.map(s => ({
console.log(`📊 Board: Shapes on other pages:`, shapesOnOtherPages.map((s: any) => ({
id: s.id,
parentId: s.parentId
})))
// Fix: Move shapes to the current page
console.log(`📊 Board: Moving ${shapesOnOtherPages.length} shapes to current page ${currentPageId}`)
const shapesToMove = shapesOnOtherPages.map(s => ({
const shapesToMove = shapesOnOtherPages.map((s: any) => ({
id: s.id,
type: s.type,
parentId: currentPageId