diff --git a/src/automerge/AutomergeToTLStore.ts b/src/automerge/AutomergeToTLStore.ts index d132c0c..78ff84f 100644 --- a/src/automerge/AutomergeToTLStore.ts +++ b/src/automerge/AutomergeToTLStore.ts @@ -204,37 +204,49 @@ export function applyAutomergePatchesToTLStore( console.error("Failed to sanitize records:", failedRecords) } - // CRITICAL: Final safety check - ensure no geo shapes have w/h/geo at top level - // Also ensure text shapes don't have props.text (should use props.richText instead) - const finalSanitized = toPut.map(record => { - if (record.typeName === 'shape' && record.type === 'geo') { - // Store values before removing from top level - const wValue = 'w' in record ? (record as any).w : undefined - const hValue = 'h' in record ? (record as any).h : undefined - const geoValue = 'geo' in record ? (record as any).geo : undefined - - // Create cleaned record without w/h/geo at top level - const cleaned: any = {} - for (const key in record) { - if (key !== 'w' && key !== 'h' && key !== 'geo') { - cleaned[key] = (record as any)[key] + // CRITICAL: Final safety check - ensure no geo shapes have w/h/geo at top level + // Also ensure text shapes don't have props.text (should use props.richText instead) + const finalSanitized = toPut.map(record => { + if (record.typeName === 'shape' && record.type === 'geo') { + // Store values before removing from top level + const wValue = 'w' in record ? (record as any).w : undefined + const hValue = 'h' in record ? (record as any).h : undefined + const geoValue = 'geo' in record ? (record as any).geo : undefined + + // Create cleaned record without w/h/geo at top level + const cleaned: any = {} + for (const key in record) { + if (key !== 'w' && key !== 'h' && key !== 'geo') { + cleaned[key] = (record as any)[key] + } } + + // Ensure props exists and move values there if needed + if (!cleaned.props) cleaned.props = {} + if (wValue !== undefined && (!('w' in cleaned.props) || cleaned.props.w === undefined)) { + cleaned.props.w = wValue + } + if (hValue !== undefined && (!('h' in cleaned.props) || cleaned.props.h === undefined)) { + cleaned.props.h = hValue + } + + // CRITICAL: props.geo is REQUIRED for geo shapes - TLDraw validation will fail without it + // Use geoValue if available, otherwise default to 'rectangle' + if (geoValue !== undefined) { + cleaned.props.geo = geoValue + } else if (!cleaned.props.geo || cleaned.props.geo === undefined || cleaned.props.geo === null) { + // Default to rectangle if geo is missing + cleaned.props.geo = 'rectangle' + } + + // CRITICAL: props.dash is REQUIRED for geo shapes - TLDraw validation will fail without it + // Ensure it's always set, defaulting to 'draw' if missing + if (!cleaned.props.dash || cleaned.props.dash === undefined || cleaned.props.dash === null) { + cleaned.props.dash = 'draw' + } + + return cleaned as TLRecord } - - // Ensure props exists and move values there if needed - if (!cleaned.props) cleaned.props = {} - if (wValue !== undefined && (!('w' in cleaned.props) || cleaned.props.w === undefined)) { - cleaned.props.w = wValue - } - if (hValue !== undefined && (!('h' in cleaned.props) || cleaned.props.h === undefined)) { - cleaned.props.h = hValue - } - if (geoValue !== undefined && (!('geo' in cleaned.props) || cleaned.props.geo === undefined)) { - cleaned.props.geo = geoValue - } - - return cleaned as TLRecord - } // CRITICAL: Remove props.text from text shapes (TLDraw schema doesn't allow it) if (record.typeName === 'shape' && record.type === 'text' && (record as any).props && 'text' in (record as any).props) { @@ -418,6 +430,18 @@ function sanitizeRecord(record: any): TLRecord { delete (sanitized as any).geo } + // CRITICAL: props.geo is REQUIRED for geo shapes - TLDraw validation will fail without it + // Ensure it's always set, defaulting to 'rectangle' if missing + if (!sanitized.props.geo || sanitized.props.geo === undefined || sanitized.props.geo === null) { + sanitized.props.geo = 'rectangle' + } + + // CRITICAL: props.dash is REQUIRED for geo shapes - TLDraw validation will fail without it + // Ensure it's always set, defaulting to 'draw' if missing + if (!sanitized.props.dash || sanitized.props.dash === undefined || sanitized.props.dash === null) { + sanitized.props.dash = 'draw' + } + } // Only fix type if completely missing diff --git a/src/automerge/CloudflareAdapter.ts b/src/automerge/CloudflareAdapter.ts index 4ca8a8e..08c6ac4 100644 --- a/src/automerge/CloudflareAdapter.ts +++ b/src/automerge/CloudflareAdapter.ts @@ -275,15 +275,14 @@ export class CloudflareNetworkAdapter extends NetworkAdapter { documentIdType: typeof message.documentId }) - // Check if this is a JSON sync message with full document data - // These should NOT go through Automerge's sync protocol (which expects binary messages) - // Instead, apply the data directly to the handle via callback + // JSON sync is deprecated - all data flows through Automerge sync protocol + // Old format content is converted server-side and saved to R2 in Automerge format + // Skip JSON sync messages - they should not be sent anymore const isJsonDocumentData = message.data && typeof message.data === 'object' && message.data.store - if (isJsonDocumentData && this.onJsonSyncData) { - console.log('🔌 CloudflareAdapter: Applying JSON document data directly to handle (bypassing sync protocol)') - this.onJsonSyncData(message.data) - return // Don't emit as sync message + if (isJsonDocumentData) { + console.warn('âš ī¸ CloudflareAdapter: Received JSON sync message (deprecated). Ignoring - all data should flow through Automerge sync protocol.') + return // Don't process JSON sync messages } // Validate documentId - Automerge requires a valid Automerge URL format diff --git a/src/automerge/useAutomergeStoreV2.ts b/src/automerge/useAutomergeStoreV2.ts index 7ad7e6d..038e3e8 100644 --- a/src/automerge/useAutomergeStoreV2.ts +++ b/src/automerge/useAutomergeStoreV2.ts @@ -126,44 +126,90 @@ export function useAutomergeStoreV2({ try { // Apply patches from Automerge to TLDraw store if (payload.patches && payload.patches.length > 0) { + // Debug: Check if patches contain shapes + const shapePatches = payload.patches.filter((p: any) => { + const id = p.path?.[1] + return id && typeof id === 'string' && id.startsWith('shape:') + }) + if (shapePatches.length > 0) { + console.log(`🔌 Automerge patches contain ${shapePatches.length} shape patches out of ${payload.patches.length} total patches`) + } + try { + const recordsBefore = store.allRecords() + const shapesBefore = recordsBefore.filter((r: any) => r.typeName === 'shape') + applyAutomergePatchesToTLStore(payload.patches, store) + + const recordsAfter = store.allRecords() + const shapesAfter = recordsAfter.filter((r: any) => r.typeName === 'shape') + + if (shapesAfter.length !== shapesBefore.length) { + console.log(`✅ Applied ${payload.patches.length} patches: shapes changed from ${shapesBefore.length} to ${shapesAfter.length}`) + } + // Only log if there are many patches or if debugging is needed if (payload.patches.length > 5) { console.log(`✅ Successfully applied ${payload.patches.length} patches`) } } catch (patchError) { - console.error("Error applying patches, attempting individual patch application:", patchError) + console.error("Error applying patches batch, attempting individual patch application:", patchError) // Try applying patches one by one to identify problematic ones + // This is a fallback - ideally we should fix the data at the source let successCount = 0 + let failedPatches: any[] = [] for (const patch of payload.patches) { try { applyAutomergePatchesToTLStore([patch], store) successCount++ } catch (individualPatchError) { + failedPatches.push({ patch, error: individualPatchError }) console.error(`Failed to apply individual patch:`, individualPatchError) + // Log the problematic patch for debugging + const recordId = patch.path[1] as string console.error("Problematic patch details:", { action: patch.action, path: patch.path, + recordId: recordId, value: 'value' in patch ? patch.value : undefined, - patchId: patch.path[1], errorMessage: individualPatchError instanceof Error ? individualPatchError.message : String(individualPatchError) }) // Try to get more context about the failing record - const recordId = patch.path[1] as string try { const existingRecord = store.get(recordId as any) console.error("Existing record that failed:", existingRecord) + + // If it's a geo shape missing props.geo, try to fix it + if (existingRecord && (existingRecord as any).typeName === 'shape' && (existingRecord as any).type === 'geo') { + const geoRecord = existingRecord as any + if (!geoRecord.props || !geoRecord.props.geo) { + console.log(`🔧 Attempting to fix geo shape ${recordId} missing props.geo`) + // This won't help with the current patch, but might help future patches + // The real fix should happen in AutomergeToTLStore sanitization + } + } } catch (e) { console.error("Could not retrieve existing record:", e) } } } - // Only log if there are failures or many patches + + // Log summary + if (failedPatches.length > 0) { + console.error(`❌ Failed to apply ${failedPatches.length} out of ${payload.patches.length} patches`) + // Most common issue: geo shapes missing props.geo - this should be fixed in sanitization + const geoShapeErrors = failedPatches.filter(p => + p.error instanceof Error && p.error.message.includes('props.geo') + ) + if (geoShapeErrors.length > 0) { + console.error(`âš ī¸ ${geoShapeErrors.length} failures due to missing props.geo - this should be fixed in AutomergeToTLStore sanitization`) + } + } + if (successCount < payload.patches.length || payload.patches.length > 5) { - console.log(`Successfully applied ${successCount} out of ${payload.patches.length} patches`) + console.log(`✅ Successfully applied ${successCount} out of ${payload.patches.length} patches`) } } } @@ -320,8 +366,28 @@ export function useAutomergeStoreV2({ // Force cache refresh - pre-sanitization code has been removed // Initialize store with existing records from Automerge + // NOTE: JSON sync might have already loaded data into the store + // Check if store is already populated before loading from Automerge + const existingStoreRecords = store.allRecords() + const existingStoreShapes = existingStoreRecords.filter((r: any) => r.typeName === 'shape') + if (doc.store) { const storeKeys = Object.keys(doc.store) + const docShapes = Object.values(doc.store).filter((r: any) => r?.typeName === 'shape').length + console.log(`📊 Automerge store initialization: doc has ${storeKeys.length} records (${docShapes} shapes), store already has ${existingStoreRecords.length} records (${existingStoreShapes.length} shapes)`) + + // If store already has shapes (from JSON sync), skip Automerge initialization + // JSON sync happened first and loaded the data + if (existingStoreShapes.length > 0 && docShapes === 0) { + console.log(`â„šī¸ Store already populated from JSON sync (${existingStoreShapes.length} shapes). Skipping Automerge initialization to prevent overwriting.`) + setStoreWithStatus({ + store, + status: "synced-remote", + connectionStatus: "online", + }) + return // Skip Automerge initialization + } + console.log(`📊 Store keys count: ${storeKeys.length}`, storeKeys.slice(0, 10)) // Get all store values - Automerge should handle this correctly @@ -372,7 +438,16 @@ export function useAutomergeStoreV2({ return true }) + // Track shape types before processing to ensure all are loaded + const shapeRecordsBefore = records.filter((r: any) => r.typeName === 'shape') + const shapeTypeCountsBefore = shapeRecordsBefore.reduce((acc: any, r: any) => { + const type = r.type || 'unknown' + acc[type] = (acc[type] || 0) + 1 + return acc + }, {}) + console.log(`📊 After filtering: ${records.length} valid records from ${allStoreValues.length} total store values`) + console.log(`📊 Shape type breakdown before processing (${shapeRecordsBefore.length} shapes):`, shapeTypeCountsBefore) // Only log if there are many records or if debugging is needed if (records.length > 50) { @@ -977,11 +1052,31 @@ export function useAutomergeStoreV2({ } // Validate that the shape type is supported by our schema + // CRITICAL: Include ALL original tldraw shapes to ensure they're preserved const validCustomShapes = ['ObsNote', 'VideoChat', 'Transcription', 'SharedPiano', 'Prompt', 'ChatBox', 'Embed', 'Markdown', 'MycrozineTemplate', 'Slide', 'FathomTranscript', 'Holon', 'ObsidianBrowser', 'HolonBrowser', 'FathomMeetingsBrowser', 'LocationShare'] const validDefaultShapes = ['arrow', 'bookmark', 'draw', 'embed', 'frame', 'geo', 'group', 'highlight', 'image', 'line', 'note', 'text', 'video'] const allValidShapes = [...validCustomShapes, ...validDefaultShapes] - if (!allValidShapes.includes(processedRecord.type)) { + // Normalize shape type to handle case variations and known aliases + const normalizedType = processedRecord.type?.toLowerCase() + const isDefaultShape = validDefaultShapes.includes(normalizedType) + const isCustomShape = validCustomShapes.includes(processedRecord.type) + + // Handle known shape type aliases/variations + const shapeTypeAliases: Record = { + 'transcribe': 'Transcription', // "Transcribe" -> "Transcription" + 'transcription': 'Transcription', // lowercase -> proper case + } + const aliasType = shapeTypeAliases[normalizedType] || shapeTypeAliases[processedRecord.type] + if (aliasType) { + console.log(`🔧 Normalizing shape type from "${processedRecord.type}" to "${aliasType}" for shape:`, processedRecord.id) + processedRecord.type = aliasType + } else if (isDefaultShape && processedRecord.type !== normalizedType) { + // If it's a valid default shape but with wrong casing, normalize it + console.log(`🔧 Normalizing shape type from "${processedRecord.type}" to "${normalizedType}" for shape:`, processedRecord.id) + processedRecord.type = normalizedType + } else if (!isDefaultShape && !isCustomShape) { + // Only convert to text if it's truly unknown console.log(`🔧 Unknown shape type ${processedRecord.type}, converting to text shape for shape:`, processedRecord.id) processedRecord.type = 'text' if (!processedRecord.props) processedRecord.props = {} @@ -1351,9 +1446,15 @@ export function useAutomergeStoreV2({ hasProps: !!r.props }))) - // Debug: Log shape structures before loading + // Debug: Log shape structures before loading - track ALL shape types const shapesToLoad = processedRecords.filter(r => r.typeName === 'shape') + const shapeTypeCountsToLoad = shapesToLoad.reduce((acc: any, r: any) => { + const type = r.type || 'unknown' + acc[type] = (acc[type] || 0) + 1 + return acc + }, {}) console.log(`📊 About to load ${shapesToLoad.length} shapes into store`) + console.log(`📊 Shape type breakdown to load:`, shapeTypeCountsToLoad) if (shapesToLoad.length > 0) { console.log("📊 Sample processed shape structure:", { @@ -1366,8 +1467,9 @@ export function useAutomergeStoreV2({ allKeys: Object.keys(shapesToLoad[0]) }) - // Log all shapes with their positions - console.log("📊 All processed shapes:", shapesToLoad.map(s => ({ + // Log all shapes with their positions (first 20) + const shapesToLog = shapesToLoad.slice(0, 20) + console.log("📊 Processed shapes (first 20):", shapesToLog.map(s => ({ id: s.id, type: s.type, x: s.x, @@ -1377,6 +1479,9 @@ export function useAutomergeStoreV2({ propsH: s.props?.h, parentId: s.parentId }))) + if (shapesToLoad.length > 20) { + console.log(`📊 ... and ${shapesToLoad.length - 20} more shapes`) + } } // Load records into store @@ -1521,11 +1626,18 @@ export function useAutomergeStoreV2({ delete (record as any).geo } - // Ensure geo property exists in props + // CRITICAL: props.geo is REQUIRED for geo shapes - TLDraw validation will fail without it + // Ensure it's always set, defaulting to 'rectangle' if missing if (!record.props) record.props = {} - if (!record.props.geo) { + if (!record.props.geo || record.props.geo === undefined || record.props.geo === null) { record.props.geo = 'rectangle' } + + // CRITICAL: props.dash is REQUIRED for geo shapes - TLDraw validation will fail without it + // Ensure it's always set, defaulting to 'draw' if missing + if (!record.props.dash || record.props.dash === undefined || record.props.dash === null) { + record.props.dash = 'draw' + } } // CRITICAL: Final safety check - ensure no geo shapes have w/h/geo at top level @@ -1963,10 +2075,16 @@ export function useAutomergeStoreV2({ } } - // Verify loading + // Verify loading - track ALL shape types that were successfully loaded const storeRecords = store.allRecords() const shapes = storeRecords.filter(r => r.typeName === 'shape') + const shapeTypeCountsAfter = shapes.reduce((acc: any, r: any) => { + const type = (r as any).type || 'unknown' + acc[type] = (acc[type] || 0) + 1 + return acc + }, {}) console.log(`📊 Store verification: ${processedRecords.length} processed records, ${storeRecords.length} total store records, ${shapes.length} shapes`) + console.log(`📊 Shape type breakdown after loading:`, shapeTypeCountsAfter) // Debug: Check if shapes have the right structure if (shapes.length > 0) { diff --git a/src/automerge/useAutomergeSyncRepo.ts b/src/automerge/useAutomergeSyncRepo.ts index 7fc6a57..f28eb28 100644 --- a/src/automerge/useAutomergeSyncRepo.ts +++ b/src/automerge/useAutomergeSyncRepo.ts @@ -34,48 +34,20 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus const [handle, setHandle] = useState(null) const [isLoading, setIsLoading] = useState(true) const handleRef = useRef(null) + const storeRef = useRef(null) - // Update ref when handle changes + // Update refs when handle/store changes useEffect(() => { handleRef.current = handle }, [handle]) - // Callback to apply JSON sync data directly to handle (bypassing Automerge sync protocol) + // JSON sync is deprecated - all data now flows through Automerge sync protocol + // Old format content is converted server-side and saved to R2 in Automerge format + // This callback is kept for backwards compatibility but should not be used const applyJsonSyncData = useCallback((data: TLStoreSnapshot) => { - const currentHandle = handleRef.current - if (!currentHandle) { - console.warn('âš ī¸ Cannot apply JSON sync data: handle not ready yet') - return - } - - try { - console.log('🔌 Applying JSON sync data directly to handle:', { - hasStore: !!data.store, - storeKeys: data.store ? Object.keys(data.store).length : 0 - }) - - // Apply the data directly to the handle - currentHandle.change((doc: any) => { - // Merge the store data into the document - if (data.store) { - if (!doc.store) { - doc.store = {} - } - // Merge all records from the sync data - Object.entries(data.store).forEach(([id, record]) => { - doc.store[id] = record - }) - } - // Preserve schema if provided - if (data.schema) { - doc.schema = data.schema - } - }) - - console.log('✅ Successfully applied JSON sync data to handle') - } catch (error) { - console.error('❌ Error applying JSON sync data to handle:', error) - } + console.warn('âš ī¸ JSON sync callback called but JSON sync is deprecated. All data should flow through Automerge sync protocol.') + // Don't apply JSON sync - let Automerge sync handle everything + return }, []) const [repo] = useState(() => { @@ -91,11 +63,12 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus const initializeHandle = async () => { try { - console.log("🔌 Initializing Automerge Repo with NetworkAdapter") + console.log("🔌 Initializing Automerge Repo with NetworkAdapter for room:", roomId) if (mounted) { - // Create a new document - Automerge will generate the proper document ID - // Force refresh to clear cache + // CRITICAL: Create a new Automerge document (repo.create() generates a proper document ID) + // We can't use repo.find() with a custom ID because Automerge requires specific document ID formats + // Instead, we'll create a new document and load initial data from the server const handle = repo.create() console.log("Created Automerge handle via Repo:", { @@ -106,9 +79,53 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus // Wait for the handle to be ready await handle.whenReady() - console.log("Automerge handle is ready:", { - hasDoc: !!handle.doc(), - docKeys: handle.doc() ? Object.keys(handle.doc()).length : 0 + // CRITICAL: Always load initial data from the server + // The server stores documents in R2 as JSON, so we need to load and initialize the Automerge document + console.log("đŸ“Ĩ Loading initial data from server...") + try { + const response = await fetch(`${workerUrl}/room/${roomId}`) + if (response.ok) { + const serverDoc = await response.json() as TLStoreSnapshot + const serverShapeCount = serverDoc.store ? Object.values(serverDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0 + const serverRecordCount = Object.keys(serverDoc.store || {}).length + + console.log(`đŸ“Ĩ Loaded document from server: ${serverRecordCount} records, ${serverShapeCount} shapes`) + + // Initialize the Automerge document with server data + if (serverDoc.store && serverRecordCount > 0) { + handle.change((doc: any) => { + // Initialize store if it doesn't exist + if (!doc.store) { + doc.store = {} + } + // Copy all records from server document + Object.entries(serverDoc.store).forEach(([id, record]) => { + doc.store[id] = record + }) + }) + + console.log(`✅ Initialized Automerge document with ${serverRecordCount} records from server`) + } else { + console.log("đŸ“Ĩ Server document is empty - starting with empty Automerge document") + } + } else if (response.status === 404) { + console.log("đŸ“Ĩ No document found on server (404) - starting with empty document") + } else { + console.warn(`âš ī¸ Failed to load document from server: ${response.status} ${response.statusText}`) + } + } catch (error) { + console.error("❌ Error loading initial document from server:", error) + // Continue anyway - user can still create new content + } + + const finalDoc = handle.doc() + const finalStoreKeys = finalDoc?.store ? Object.keys(finalDoc.store).length : 0 + const finalShapeCount = finalDoc?.store ? Object.values(finalDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0 + + console.log("Automerge handle initialized:", { + hasDoc: !!finalDoc, + storeKeys: finalStoreKeys, + shapeCount: finalShapeCount }) setHandle(handle) @@ -130,47 +147,98 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus }, [repo, roomId]) // Auto-save to Cloudflare on every change (with debouncing to prevent excessive calls) + // CRITICAL: This ensures new shapes are persisted to R2 useEffect(() => { if (!handle) return let saveTimeout: NodeJS.Timeout + const saveDocumentToWorker = async () => { + try { + const doc = handle.doc() + if (!doc || !doc.store) { + console.log("🔍 No document to save yet") + return + } + + const shapeCount = Object.values(doc.store).filter((r: any) => r?.typeName === 'shape').length + const storeKeys = Object.keys(doc.store).length + + // Track shape types being persisted + const shapeTypeCounts = Object.values(doc.store) + .filter((r: any) => r?.typeName === 'shape') + .reduce((acc: any, r: any) => { + const type = r?.type || 'unknown' + acc[type] = (acc[type] || 0) + 1 + return acc + }, {}) + + console.log(`💾 Persisting document to worker for R2 storage: ${storeKeys} records, ${shapeCount} shapes`) + console.log(`💾 Shape type breakdown being persisted:`, shapeTypeCounts) + + // Send document state to worker via POST /room/:roomId + // This updates the worker's currentDoc so it can be persisted to R2 + const response = await fetch(`${workerUrl}/room/${roomId}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(doc), + }) + + if (!response.ok) { + throw new Error(`Failed to save to worker: ${response.statusText}`) + } + + console.log(`✅ Successfully sent document state to worker for persistence (${shapeCount} shapes)`) + } catch (error) { + console.error('❌ Error saving document to worker:', error) + } + } + const scheduleSave = () => { // Clear existing timeout if (saveTimeout) clearTimeout(saveTimeout) - // Schedule save with a short debounce (500ms) to batch rapid changes - saveTimeout = setTimeout(async () => { - try { - // With Repo, we don't need manual saving - the NetworkAdapter handles sync - console.log("🔍 Automerge changes detected - NetworkAdapter will handle sync") - } catch (error) { - console.error('Error in change-triggered save:', error) - } - }, 500) + // Schedule save with a debounce (2 seconds) to batch rapid changes + // This matches the worker's persistence throttle + saveTimeout = setTimeout(saveDocumentToWorker, 2000) } // Listen for changes to the Automerge document const changeHandler = (payload: any) => { - console.log('🔍 Automerge document changed:', { - hasPatches: !!payload.patches, - patchCount: payload.patches?.length || 0, - patches: payload.patches?.map((p: any) => ({ - action: p.action, - path: p.path, - value: p.value ? (typeof p.value === 'object' ? 'object' : p.value) : 'undefined' - })) + const patchCount = payload.patches?.length || 0 + + // Check if patches contain shape changes + const hasShapeChanges = payload.patches?.some((p: any) => { + const id = p.path?.[1] + return id && typeof id === 'string' && id.startsWith('shape:') }) + + if (hasShapeChanges) { + console.log('🔍 Automerge document changed with shape patches:', { + patchCount: patchCount, + shapePatches: payload.patches.filter((p: any) => { + const id = p.path?.[1] + return id && typeof id === 'string' && id.startsWith('shape:') + }).length + }) + } + + // Schedule save to worker for persistence scheduleSave() } handle.on('change', changeHandler) + // Also save immediately on mount to ensure initial state is persisted + setTimeout(saveDocumentToWorker, 3000) + return () => { handle.off('change', changeHandler) if (saveTimeout) clearTimeout(saveTimeout) } - }, [handle]) + }, [handle, roomId, workerUrl]) // Get user metadata for presence const userMetadata: { userId: string; name: string; color: string } = (() => { @@ -194,6 +262,13 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus userId: userMetadata.userId }) + // Update store ref when store is available + useEffect(() => { + if (storeWithStatus.store) { + storeRef.current = storeWithStatus.store + } + }, [storeWithStatus.store]) + // Get presence data (only when handle is ready) const presence = useAutomergePresence({ handle: handle || null, diff --git a/src/routes/Board.tsx b/src/routes/Board.tsx index cc10862..01a921d 100644 --- a/src/routes/Board.tsx +++ b/src/routes/Board.tsx @@ -266,12 +266,17 @@ export function Board() { // Try to get the shapes from the editor to see if they exist but aren't being returned const missingShapeIds = missingShapes.map((s: any) => s.id) - const shapesFromEditor = missingShapeIds.map(id => editor.getShape(id)).filter(Boolean) + const shapesFromEditor = missingShapeIds + .map(id => editor.getShape(id)) + .filter((s): s is NonNullable => s !== undefined) if (shapesFromEditor.length > 0) { console.log(`📊 Board: ${shapesFromEditor.length} missing shapes actually exist in editor but aren't in getCurrentPageShapes()`) // Try to select them to make them visible - editor.setSelectedShapes(shapesFromEditor.map(s => s.id)) + const shapeIds = shapesFromEditor.map(s => s.id).filter((id): id is TLShapeId => id !== undefined) + if (shapeIds.length > 0) { + editor.setSelectedShapes(shapeIds) + } } else { // Shapes don't exist in editor - might be a sync issue console.error(`📊 Board: ${missingShapes.length} shapes are in store but don't exist in editor - possible sync issue`) @@ -314,6 +319,54 @@ export function Board() { const shapesOnOtherPages = storeShapes.filter((s: any) => s.parentId !== currentPageId) if (shapesOnOtherPages.length > 0) { console.log(`📊 Board: ${shapesOnOtherPages.length} shapes exist on other pages (not current page ${currentPageId})`) + + // Find which page has the most shapes + const pageShapeCounts = new Map() + storeShapes.forEach((s: any) => { + if (s.parentId) { + pageShapeCounts.set(s.parentId, (pageShapeCounts.get(s.parentId) || 0) + 1) + } + }) + + // Find the page with the most shapes + let maxShapes = 0 + let pageWithMostShapes: string | null = null + pageShapeCounts.forEach((count, pageId) => { + if (count > maxShapes) { + maxShapes = count + pageWithMostShapes = pageId + } + }) + + // If current page has no shapes but another page does, switch to that page + if (editorShapes.length === 0 && pageWithMostShapes && pageWithMostShapes !== currentPageId) { + console.log(`📊 Board: Current page has no shapes. Switching to page ${pageWithMostShapes} which has ${maxShapes} shapes`) + try { + editor.setCurrentPage(pageWithMostShapes as any) + // Focus camera on shapes after switching + setTimeout(() => { + const newPageShapes = editor.getCurrentPageShapes() + if (newPageShapes.length > 0) { + const bounds = editor.getShapePageBounds(newPageShapes[0]) + if (bounds) { + editor.setCamera({ + x: bounds.x - editor.getViewportPageBounds().w / 2 + bounds.w / 2, + y: bounds.y - editor.getViewportPageBounds().h / 2 + bounds.h / 2, + z: editor.getCamera().z + }, { animation: { duration: 300 } }) + } + } + }, 100) + } catch (error) { + console.error(`❌ Board: Error switching to page ${pageWithMostShapes}:`, error) + } + } else if (pageWithMostShapes) { + console.log(`📊 Board: Page breakdown:`, Array.from(pageShapeCounts.entries()).map(([pageId, count]) => ({ + pageId, + shapeCount: count, + isCurrent: pageId === currentPageId + }))) + } } } diff --git a/worker/AutomergeDurableObject.ts b/worker/AutomergeDurableObject.ts index 611852e..fc5b3b6 100644 --- a/worker/AutomergeDurableObject.ts +++ b/worker/AutomergeDurableObject.ts @@ -22,6 +22,8 @@ export class AutomergeDurableObject { private clients: Map = new Map() // Track last persisted state to detect changes private lastPersistedHash: string | null = null + // Track if document was converted from old format (for JSON sync decision) + private wasConvertedFromOldFormat: boolean = false constructor(private readonly ctx: DurableObjectState, env: Environment) { this.r2 = env.TLDRAW_BUCKET @@ -211,26 +213,23 @@ export class AutomergeDurableObject { })) console.log(`🔌 AutomergeDurableObject: Test message sent to client: ${sessionId}`) - // Then try to send the document - console.log(`🔌 AutomergeDurableObject: Getting document for session ${sessionId}`) + // CRITICAL: No JSON sync - all data flows through Automerge sync protocol + // Old format content is converted to Automerge format server-side during getDocument() + // and saved back to R2, then Automerge sync loads it normally + console.log(`🔌 AutomergeDurableObject: Document ready for Automerge sync (was converted: ${this.wasConvertedFromOldFormat})`) + const doc = await this.getDocument() - console.log(`🔌 AutomergeDurableObject: Document loaded, sending to client:`, { + const shapeCount = doc.store ? Object.values(doc.store).filter((record: any) => record.typeName === 'shape').length : 0 + + console.log(`🔌 AutomergeDurableObject: Document loaded:`, { hasStore: !!doc.store, storeKeys: doc.store ? Object.keys(doc.store).length : 0, - shapes: doc.store ? Object.values(doc.store).filter((record: any) => record.typeName === 'shape').length : 0, - pages: doc.store ? Object.values(doc.store).filter((record: any) => record.typeName === 'page').length : 0 + shapes: shapeCount, + wasConvertedFromOldFormat: this.wasConvertedFromOldFormat }) - // Send the document using Automerge's sync protocol - console.log(`🔌 AutomergeDurableObject: Sending document data to client ${sessionId}`) - serverWebSocket.send(JSON.stringify({ - type: "sync", - senderId: "server", - targetId: sessionId, - documentId: "default", - data: doc - })) - console.log(`🔌 AutomergeDurableObject: Document sent to client: ${sessionId}`) + // Automerge sync protocol will handle loading the document + // No JSON sync needed - everything goes through Automerge's native sync } catch (error) { console.error(`❌ AutomergeDurableObject: Error sending document to client ${sessionId}:`, error) console.error(`❌ AutomergeDurableObject: Error stack:`, error instanceof Error ? error.stack : 'No stack trace') @@ -301,12 +300,15 @@ export class AutomergeDurableObject { const doc = await this.getDocument() const client = this.clients.get(sessionId) if (client) { + // Use consistent document ID format: automerge:${roomId} + // This matches what the client uses when calling repo.find() + const documentId = message.documentId || `automerge:${this.roomId}` // Send the document as a sync message client.send(JSON.stringify({ type: "sync", senderId: "server", targetId: sessionId, - documentId: message.documentId || this.roomId, + documentId: documentId, data: doc })) } @@ -317,15 +319,22 @@ export class AutomergeDurableObject { const doc = await this.getDocument() const requestClient = this.clients.get(sessionId) if (requestClient) { + // Use consistent document ID format: automerge:${roomId} + // This matches what the client uses when calling repo.find() + const documentId = message.documentId || `automerge:${this.roomId}` requestClient.send(JSON.stringify({ type: "sync", senderId: "server", targetId: sessionId, - documentId: message.documentId || this.roomId, + documentId: documentId, data: doc })) } break + case "request-document-state": + // Handle document state request from worker (for persistence) + await this.handleDocumentStateRequest(sessionId) + break default: console.log("Unknown message type:", message.type) } @@ -338,6 +347,9 @@ export class AutomergeDurableObject { // Broadcast binary data directly to other clients for Automerge's native sync protocol // Automerge Repo handles the binary sync protocol internally this.broadcastBinaryToOthers(sessionId, data) + + // NOTE: Clients will periodically POST their document state to /room/:roomId + // which updates this.currentDoc and triggers persistence to R2 } private async handleSyncMessage(sessionId: string, message: any) { @@ -441,12 +453,27 @@ export class AutomergeDurableObject { async getDocument() { if (!this.roomId) throw new Error("Missing roomId") - // If we already have a current document, return it - if (this.currentDoc) { - return this.currentDoc + // CRITICAL: Always load from R2 first if we haven't loaded yet + // Don't return currentDoc if it was set by a client POST before R2 load + // This ensures we get all shapes from R2, not just what the client sent + + // If R2 load is in progress or completed, wait for it and return the result + if (this.roomPromise) { + const doc = await this.roomPromise + // After R2 load, merge any client updates that happened during load + if (this.currentDoc && this.currentDoc !== doc) { + // Merge client updates into R2-loaded document + if (doc.store && this.currentDoc.store) { + Object.entries(this.currentDoc.store).forEach(([id, record]) => { + doc.store[id] = record + }) + } + this.currentDoc = doc + } + return this.currentDoc || doc } - // Otherwise, load from R2 (only once) + // Otherwise, start loading from R2 (only once) if (!this.roomPromise) { this.roomPromise = (async () => { let initialDoc: any @@ -459,11 +486,17 @@ export class AutomergeDurableObject { if (docFromBucket) { try { const rawDoc = await docFromBucket.json() + const r2ShapeCount = (rawDoc as any).store ? + Object.values((rawDoc as any).store).filter((r: any) => r?.typeName === 'shape').length : + (Array.isArray(rawDoc) ? rawDoc.filter((r: any) => r?.state?.typeName === 'shape').length : 0) + console.log(`Loaded raw document from R2 for room ${this.roomId}:`, { isArray: Array.isArray(rawDoc), length: Array.isArray(rawDoc) ? rawDoc.length : 'not array', hasStore: !!(rawDoc as any).store, hasDocuments: !!(rawDoc as any).documents, + shapeCount: r2ShapeCount, + storeKeys: (rawDoc as any).store ? Object.keys((rawDoc as any).store).length : 0, sampleKeys: Array.isArray(rawDoc) ? rawDoc.slice(0, 3).map((r: any) => r.state?.id) : [] }) @@ -535,13 +568,16 @@ export class AutomergeDurableObject { } this.currentDoc = initialDoc + // Store conversion flag for JSON sync decision + this.wasConvertedFromOldFormat = wasConverted // Initialize the last persisted hash with the loaded document this.lastPersistedHash = this.generateDocHash(initialDoc) // If document was converted/migrated, persist it immediately to save in new format if (wasConverted && initialDoc.store && Object.keys(initialDoc.store).length > 0) { - console.log(`đŸ“Ļ Persisting converted document to R2 in new format for room ${this.roomId}`) + const shapeCount = Object.values(initialDoc.store).filter((r: any) => r.typeName === 'shape').length + console.log(`đŸ“Ļ Persisting converted document to R2 in new format for room ${this.roomId} (${shapeCount} shapes)`) // Persist immediately without throttling for converted documents try { const docJson = JSON.stringify(initialDoc) @@ -551,7 +587,7 @@ export class AutomergeDurableObject { } }) this.lastPersistedHash = this.generateDocHash(initialDoc) - console.log(`✅ Successfully persisted converted document for room ${this.roomId}`) + console.log(`✅ Successfully persisted converted document for room ${this.roomId} with ${shapeCount} shapes`) } catch (persistError) { console.error(`❌ Error persisting converted document for room ${this.roomId}:`, persistError) } @@ -777,8 +813,100 @@ export class AutomergeDurableObject { } private async updateDocument(newDoc: any) { - this.currentDoc = newDoc - this.schedulePersistToR2() + // CRITICAL: Wait for R2 load to complete before processing updates + // This ensures we have all shapes from R2 before merging client updates + if (this.roomPromise) { + try { + await this.roomPromise + } catch (e) { + // R2 load might have failed, continue anyway + console.warn(`âš ī¸ R2 load failed, continuing with client update:`, e) + } + } + + const oldShapeCount = this.currentDoc?.store ? Object.values(this.currentDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0 + const newShapeCount = newDoc?.store ? Object.values(newDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0 + + // Get list of old shape IDs to check if we're losing any + const oldShapeIds = this.currentDoc?.store ? + Object.values(this.currentDoc.store) + .filter((r: any) => r?.typeName === 'shape') + .map((r: any) => r.id) : [] + const newShapeIds = newDoc?.store ? + Object.values(newDoc.store) + .filter((r: any) => r?.typeName === 'shape') + .map((r: any) => r.id) : [] + + // CRITICAL: Merge stores instead of replacing entire document + // This prevents client from overwriting old shapes when it only has partial data + if (this.currentDoc && newDoc?.store) { + // Merge new records into existing store, but don't delete old ones + if (!this.currentDoc.store) { + this.currentDoc.store = {} + } + + // Count records before merge + const recordsBefore = Object.keys(this.currentDoc.store).length + + // Merge: add/update records from newDoc, but keep existing ones that aren't in newDoc + Object.entries(newDoc.store).forEach(([id, record]) => { + this.currentDoc.store[id] = record + }) + + // Count records after merge + const recordsAfter = Object.keys(this.currentDoc.store).length + + // Update schema if provided + if (newDoc.schema) { + this.currentDoc.schema = newDoc.schema + } + + console.log(`📊 updateDocument: Merged ${Object.keys(newDoc.store).length} records from client into ${recordsBefore} existing records (total: ${recordsAfter})`) + } else { + // If no current doc yet, set it (R2 load should have completed by now) + console.log(`📊 updateDocument: No current doc, setting to new doc (${newShapeCount} shapes)`) + this.currentDoc = newDoc + } + + const finalShapeCount = this.currentDoc?.store ? Object.values(this.currentDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0 + const finalShapeIds = this.currentDoc?.store ? + Object.values(this.currentDoc.store) + .filter((r: any) => r?.typeName === 'shape') + .map((r: any) => r.id) : [] + + // Check for lost shapes + const lostShapes = oldShapeIds.filter(id => !finalShapeIds.includes(id)) + if (lostShapes.length > 0) { + console.error(`❌ CRITICAL: Lost ${lostShapes.length} shapes during merge! Lost IDs:`, lostShapes) + } + + if (finalShapeCount !== oldShapeCount) { + console.log(`📊 Document updated: shape count changed from ${oldShapeCount} to ${finalShapeCount} (merged from client with ${newShapeCount} shapes)`) + // CRITICAL: Always persist when shape count changes + this.schedulePersistToR2() + } else if (newShapeCount < oldShapeCount) { + console.log(`âš ī¸ Client sent ${newShapeCount} shapes but server has ${oldShapeCount}. Merged to preserve all shapes (final: ${finalShapeCount})`) + // Persist to ensure we save the merged state + this.schedulePersistToR2() + } else if (newShapeCount === oldShapeCount && oldShapeCount > 0) { + // Check if any records were actually added/updated (not just same count) + const recordsAdded = Object.keys(newDoc.store || {}).filter(id => + !this.currentDoc?.store?.[id] || + JSON.stringify(this.currentDoc.store[id]) !== JSON.stringify(newDoc.store[id]) + ).length + + if (recordsAdded > 0) { + console.log(`â„šī¸ Client sent ${newShapeCount} shapes, server had ${oldShapeCount}. ${recordsAdded} records were updated. Merge complete (final: ${finalShapeCount})`) + // Persist if records were updated + this.schedulePersistToR2() + } else { + console.log(`â„šī¸ Client sent ${newShapeCount} shapes, server had ${oldShapeCount}. No changes detected, skipping persistence.`) + } + } else { + // New shapes or other changes - always persist + console.log(`📊 Document updated: scheduling persistence (old: ${oldShapeCount}, new: ${newShapeCount}, final: ${finalShapeCount})`) + this.schedulePersistToR2() + } } // Migrate old documents format to new store format @@ -859,6 +987,9 @@ export class AutomergeDurableObject { console.warn(`âš ī¸ migrateDocumentsToStore: oldDoc.documents is not an array or doesn't exist`) } + // Count shapes after migration + const shapeCount = Object.values(newDoc.store).filter((r: any) => r?.typeName === 'shape').length + console.log(`📊 Documents to Store migration statistics:`, { total: migrationStats.total, converted: migrationStats.converted, @@ -866,11 +997,21 @@ export class AutomergeDurableObject { errors: migrationStats.errors, storeKeys: Object.keys(newDoc.store).length, recordTypes: migrationStats.recordTypes, + shapeCount: shapeCount, customRecordCount: migrationStats.customRecords.length, customRecordIds: migrationStats.customRecords.slice(0, 10), errorCount: migrationStats.errorDetails.length }) + // CRITICAL: Log if shapes are missing after migration + if (shapeCount === 0 && migrationStats.recordTypes['shape'] === undefined) { + console.warn(`âš ī¸ Migration completed but NO shapes found! This might indicate old format didn't have shapes or they were filtered out.`) + } else if (migrationStats.recordTypes['shape'] && shapeCount !== migrationStats.recordTypes['shape']) { + console.warn(`âš ī¸ Shape count mismatch: Expected ${migrationStats.recordTypes['shape']} shapes but found ${shapeCount} after migration`) + } else if (shapeCount > 0) { + console.log(`✅ Migration successfully converted ${shapeCount} shapes from old format to new format`) + } + // Verify custom records are preserved if (migrationStats.customRecords.length > 0) { console.log(`✅ Verified ${migrationStats.customRecords.length} custom records preserved during migration`) @@ -1107,41 +1248,155 @@ export class AutomergeDurableObject { // we throttle persistence so it only happens every 2 seconds, batching all updates schedulePersistToR2 = throttle(async () => { - if (!this.roomId || !this.currentDoc) return + if (!this.roomId || !this.currentDoc) { + console.log(`âš ī¸ Cannot persist to R2: roomId=${this.roomId}, currentDoc=${!!this.currentDoc}`) + return + } - // Generate hash of current document state - const currentHash = this.generateDocHash(this.currentDoc) + // CRITICAL: Load current R2 state and merge with this.currentDoc before saving + // This ensures we never overwrite old shapes that might be in R2 but not in currentDoc + let mergedDoc = { ...this.currentDoc } + let r2ShapeCount = 0 + let mergedShapeCount = 0 - console.log(`Server checking R2 persistence for room ${this.roomId}:`, { + try { + // Try to load current R2 state + const docFromBucket = await this.r2.get(`rooms/${this.roomId}`) + if (docFromBucket) { + try { + const r2Doc = await docFromBucket.json() + r2ShapeCount = r2Doc.store ? + Object.values(r2Doc.store).filter((r: any) => r?.typeName === 'shape').length : 0 + + // Merge R2 document with current document + if (r2Doc.store && mergedDoc.store) { + // Start with R2 document (has all old shapes) + mergedDoc = { ...r2Doc } + + // Merge currentDoc into R2 doc (adds/updates new shapes) + Object.entries(this.currentDoc.store).forEach(([id, record]) => { + mergedDoc.store[id] = record + }) + + // Update schema from currentDoc if it exists + if (this.currentDoc.schema) { + mergedDoc.schema = this.currentDoc.schema + } + + mergedShapeCount = Object.values(mergedDoc.store).filter((r: any) => r?.typeName === 'shape').length + + // Track shape types in merged document + const mergedShapeTypeCounts = Object.values(mergedDoc.store) + .filter((r: any) => r?.typeName === 'shape') + .reduce((acc: any, r: any) => { + const type = r?.type || 'unknown' + acc[type] = (acc[type] || 0) + 1 + return acc + }, {}) + + console.log(`🔀 Merging R2 state with current state before persistence:`, { + r2Shapes: r2ShapeCount, + currentShapes: this.currentDoc.store ? Object.values(this.currentDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0, + mergedShapes: mergedShapeCount, + r2Records: Object.keys(r2Doc.store || {}).length, + currentRecords: Object.keys(this.currentDoc.store || {}).length, + mergedRecords: Object.keys(mergedDoc.store || {}).length + }) + console.log(`🔀 Merged shape type breakdown:`, mergedShapeTypeCounts) + + // Check if we're preserving all shapes + if (mergedShapeCount < r2ShapeCount) { + console.error(`❌ CRITICAL: Merged document has fewer shapes (${mergedShapeCount}) than R2 (${r2ShapeCount})! This should not happen.`) + } else if (mergedShapeCount > r2ShapeCount) { + console.log(`✅ Merged document has ${mergedShapeCount - r2ShapeCount} new shapes added to R2's ${r2ShapeCount} shapes`) + } + } else if (r2Doc.store && !mergedDoc.store) { + // R2 has store but currentDoc doesn't - use R2 + mergedDoc = r2Doc + mergedShapeCount = r2ShapeCount + console.log(`âš ī¸ Current doc has no store, using R2 document (${r2ShapeCount} shapes)`) + } else { + // Neither has store or R2 doesn't have store - use currentDoc + mergedDoc = this.currentDoc + mergedShapeCount = this.currentDoc.store ? Object.values(this.currentDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0 + console.log(`â„šī¸ R2 has no store, using current document (${mergedShapeCount} shapes)`) + } + } catch (r2ParseError) { + console.warn(`âš ī¸ Error parsing R2 document, using current document:`, r2ParseError) + mergedDoc = this.currentDoc + mergedShapeCount = this.currentDoc.store ? Object.values(this.currentDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0 + } + } else { + // No R2 document exists yet - use currentDoc + mergedDoc = this.currentDoc + mergedShapeCount = this.currentDoc.store ? Object.values(this.currentDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0 + console.log(`â„šī¸ No existing R2 document, using current document (${mergedShapeCount} shapes)`) + } + } catch (r2LoadError) { + // If R2 load fails, use currentDoc (better than losing data) + console.warn(`âš ī¸ Error loading from R2, using current document:`, r2LoadError) + mergedDoc = this.currentDoc + mergedShapeCount = this.currentDoc.store ? Object.values(this.currentDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0 + } + + // Generate hash of merged document state + const currentHash = this.generateDocHash(mergedDoc) + + console.log(`🔍 Server checking R2 persistence for room ${this.roomId}:`, { currentHash: currentHash.substring(0, 8) + '...', lastHash: this.lastPersistedHash ? this.lastPersistedHash.substring(0, 8) + '...' : 'none', - hasStore: !!this.currentDoc.store, - storeKeys: this.currentDoc.store ? Object.keys(this.currentDoc.store).length : 0 + hasStore: !!mergedDoc.store, + storeKeys: mergedDoc.store ? Object.keys(mergedDoc.store).length : 0, + shapeCount: mergedShapeCount, + hashesMatch: currentHash === this.lastPersistedHash }) // Skip persistence if document hasn't changed if (currentHash === this.lastPersistedHash) { - console.log(`Skipping R2 persistence for room ${this.roomId} - no changes detected`) + console.log(`â­ī¸ Skipping R2 persistence for room ${this.roomId} - no changes detected (hash matches)`) return } try { - // convert the document to JSON and upload it to R2 - const docJson = JSON.stringify(this.currentDoc) + // Update currentDoc to the merged version + this.currentDoc = mergedDoc + + // convert the merged document to JSON and upload it to R2 + const docJson = JSON.stringify(mergedDoc) await this.r2.put(`rooms/${this.roomId}`, docJson, { httpMetadata: { contentType: 'application/json' } }) + // Track shape types in final persisted document + const persistedShapeTypeCounts = Object.values(mergedDoc.store || {}) + .filter((r: any) => r?.typeName === 'shape') + .reduce((acc: any, r: any) => { + const type = r?.type || 'unknown' + acc[type] = (acc[type] || 0) + 1 + return acc + }, {}) + // Update last persisted hash only after successful save this.lastPersistedHash = currentHash - console.log(`Successfully persisted room ${this.roomId} to R2 (batched):`, { - storeKeys: this.currentDoc.store ? Object.keys(this.currentDoc.store).length : 0, - docSize: docJson.length + console.log(`✅ Successfully persisted room ${this.roomId} to R2 (merged):`, { + storeKeys: mergedDoc.store ? Object.keys(mergedDoc.store).length : 0, + shapeCount: mergedShapeCount, + docSize: docJson.length, + preservedR2Shapes: r2ShapeCount > 0 ? `${r2ShapeCount} from R2` : 'none' }) + console.log(`✅ Persisted shape type breakdown:`, persistedShapeTypeCounts) } catch (error) { - console.error(`Error persisting room ${this.roomId} to R2:`, error) + console.error(`❌ Error persisting room ${this.roomId} to R2:`, error) } }, 2_000) + + // Handle request-document-state message from worker + // This allows the worker to request current document state from clients for persistence + private async handleDocumentStateRequest(sessionId: string) { + // When worker requests document state, we'll respond via the existing POST endpoint + // Clients should periodically send their document state, so this is mainly for logging + console.log(`📡 Worker: Document state requested from ${sessionId} (clients should send via POST /room/:roomId)`) + } }