Compare commits
2 Commits
802e61abfe
...
d92077e4f7
| Author | SHA1 | Date |
|---|---|---|
|
|
d92077e4f7 | |
|
|
c6b53aebdb |
|
|
@ -300,11 +300,9 @@ export function applyAutomergePatchesToTLStore(
|
|||
case "unmark":
|
||||
case "conflict": {
|
||||
// These actions are not currently supported for TLDraw
|
||||
console.log("Unsupported patch action:", patch.action)
|
||||
break
|
||||
}
|
||||
default: {
|
||||
console.log("Unsupported patch:", patch)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -422,7 +420,6 @@ export function applyAutomergePatchesToTLStore(
|
|||
|
||||
// Filter out SharedPiano shapes since they're no longer supported
|
||||
if (record.typeName === 'shape' && (record as any).type === 'SharedPiano') {
|
||||
console.log(`⚠️ Filtering out deprecated SharedPiano shape: ${record.id}`)
|
||||
return // Skip - SharedPiano is deprecated
|
||||
}
|
||||
|
||||
|
|
@ -444,24 +441,7 @@ export function applyAutomergePatchesToTLStore(
|
|||
|
||||
// put / remove the records in the store
|
||||
// Log patch application for debugging
|
||||
console.log(`🔧 AutomergeToTLStore: Applying ${patches.length} patches, ${toPut.length} records to put, ${toRemove.length} records to remove`)
|
||||
|
||||
// DEBUG: Log shape updates being applied to store
|
||||
toPut.forEach(record => {
|
||||
if (record.typeName === 'shape' && (record as any).props?.w) {
|
||||
console.log(`🔧 AutomergeToTLStore: Putting shape ${(record as any).type} ${record.id}:`, {
|
||||
w: (record as any).props.w,
|
||||
h: (record as any).props.h,
|
||||
x: (record as any).x,
|
||||
y: (record as any).y
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (failedRecords.length > 0) {
|
||||
console.log({ patches, toPut: toPut.length, failed: failedRecords.length })
|
||||
}
|
||||
|
||||
if (failedRecords.length > 0) {
|
||||
console.error("Failed to sanitize records:", failedRecords)
|
||||
}
|
||||
|
|
@ -695,14 +675,12 @@ export function sanitizeRecord(record: any): TLRecord {
|
|||
|
||||
// Normalize the shape type if it's a custom type with incorrect case
|
||||
if (sanitized.type && typeof sanitized.type === 'string' && customShapeTypeMap[sanitized.type]) {
|
||||
console.log(`🔧 Normalizing shape type: "${sanitized.type}" → "${customShapeTypeMap[sanitized.type]}"`)
|
||||
sanitized.type = customShapeTypeMap[sanitized.type]
|
||||
}
|
||||
|
||||
// CRITICAL: Sanitize Multmux shapes AFTER case normalization - ensure all required props exist
|
||||
// Old shapes may have wsUrl (removed) or undefined values
|
||||
if (sanitized.type === 'Multmux') {
|
||||
console.log(`🔧 Sanitizing Multmux shape ${sanitized.id}:`, JSON.stringify(sanitized.props))
|
||||
// Remove deprecated wsUrl prop
|
||||
if ('wsUrl' in sanitized.props) {
|
||||
delete sanitized.props.wsUrl
|
||||
|
|
@ -762,7 +740,6 @@ export function sanitizeRecord(record: any): TLRecord {
|
|||
}
|
||||
|
||||
sanitized.props = cleanProps
|
||||
console.log(`🔧 Sanitized Multmux shape ${sanitized.id} props:`, JSON.stringify(sanitized.props))
|
||||
}
|
||||
|
||||
// CRITICAL: Sanitize Map shapes - ensure all required props have defaults
|
||||
|
|
@ -835,7 +812,6 @@ export function sanitizeRecord(record: any): TLRecord {
|
|||
if (typeof sanitized.props.h !== 'number' || isNaN(sanitized.props.h)) {
|
||||
sanitized.props.h = 550
|
||||
}
|
||||
console.log(`🔧 Sanitized Map shape ${sanitized.id}`)
|
||||
}
|
||||
|
||||
// CRITICAL: Infer type from properties BEFORE defaulting to 'geo'
|
||||
|
|
|
|||
|
|
@ -462,49 +462,6 @@ export function applyTLStoreChangesToAutomerge(
|
|||
originalX = (record as any).x
|
||||
originalY = (record as any).y
|
||||
}
|
||||
// DEBUG: Log richText, meta.text, and Obsidian note properties before sanitization
|
||||
if (record.typeName === 'shape') {
|
||||
if (record.type === 'geo' && (record.props as any)?.richText) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Geo shape ${record.id} has richText before sanitization:`, {
|
||||
hasRichText: !!(record.props as any).richText,
|
||||
richTextType: typeof (record.props as any).richText,
|
||||
richTextContent: Array.isArray((record.props as any).richText) ? 'array' : (record.props as any).richText?.content ? 'object with content' : 'object without content'
|
||||
})
|
||||
}
|
||||
if (record.type === 'geo' && (record.meta as any)?.text !== undefined) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Geo shape ${record.id} has meta.text before sanitization:`, {
|
||||
hasMetaText: !!(record.meta as any).text,
|
||||
metaTextValue: (record.meta as any).text,
|
||||
metaTextType: typeof (record.meta as any).text
|
||||
})
|
||||
}
|
||||
if (record.type === 'note' && (record.props as any)?.richText) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Note shape ${record.id} has richText before sanitization:`, {
|
||||
hasRichText: !!(record.props as any).richText,
|
||||
richTextType: typeof (record.props as any).richText,
|
||||
richTextContent: Array.isArray((record.props as any).richText) ? 'array' : (record.props as any).richText?.content ? 'object with content' : 'object without content',
|
||||
richTextContentLength: Array.isArray((record.props as any).richText?.content) ? (record.props as any).richText.content.length : 'not array'
|
||||
})
|
||||
}
|
||||
if (record.type === 'arrow' && (record.props as any)?.text !== undefined) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Arrow shape ${record.id} has text before sanitization:`, {
|
||||
hasText: !!(record.props as any).text,
|
||||
textValue: (record.props as any).text,
|
||||
textType: typeof (record.props as any).text
|
||||
})
|
||||
}
|
||||
if (record.type === 'ObsNote') {
|
||||
console.log(`🔍 TLStoreToAutomerge: ObsNote shape ${record.id} before sanitization:`, {
|
||||
hasTitle: !!(record.props as any).title,
|
||||
hasContent: !!(record.props as any).content,
|
||||
hasTags: Array.isArray((record.props as any).tags),
|
||||
title: (record.props as any).title,
|
||||
contentLength: (record.props as any).content?.length || 0,
|
||||
tagsCount: Array.isArray((record.props as any).tags) ? (record.props as any).tags.length : 0
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const sanitizedRecord = sanitizeRecord(record)
|
||||
|
||||
// CRITICAL: Restore original coordinates if they were valid
|
||||
|
|
@ -518,99 +475,11 @@ export function applyTLStoreChangesToAutomerge(
|
|||
}
|
||||
}
|
||||
|
||||
// DEBUG: Log richText, meta.text, and Obsidian note properties after sanitization
|
||||
if (sanitizedRecord.typeName === 'shape') {
|
||||
if (sanitizedRecord.type === 'geo' && (sanitizedRecord.props as any)?.richText) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Geo shape ${sanitizedRecord.id} has richText after sanitization:`, {
|
||||
hasRichText: !!(sanitizedRecord.props as any).richText,
|
||||
richTextType: typeof (sanitizedRecord.props as any).richText,
|
||||
richTextContent: Array.isArray((sanitizedRecord.props as any).richText) ? 'array' : (sanitizedRecord.props as any).richText?.content ? 'object with content' : 'object without content'
|
||||
})
|
||||
}
|
||||
if (sanitizedRecord.type === 'geo' && (sanitizedRecord.meta as any)?.text !== undefined) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Geo shape ${sanitizedRecord.id} has meta.text after sanitization:`, {
|
||||
hasMetaText: !!(sanitizedRecord.meta as any).text,
|
||||
metaTextValue: (sanitizedRecord.meta as any).text,
|
||||
metaTextType: typeof (sanitizedRecord.meta as any).text
|
||||
})
|
||||
}
|
||||
if (sanitizedRecord.type === 'note' && (sanitizedRecord.props as any)?.richText) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Note shape ${sanitizedRecord.id} has richText after sanitization:`, {
|
||||
hasRichText: !!(sanitizedRecord.props as any).richText,
|
||||
richTextType: typeof (sanitizedRecord.props as any).richText,
|
||||
richTextContent: Array.isArray((sanitizedRecord.props as any).richText) ? 'array' : (sanitizedRecord.props as any).richText?.content ? 'object with content' : 'object without content',
|
||||
richTextContentLength: Array.isArray((sanitizedRecord.props as any).richText?.content) ? (sanitizedRecord.props as any).richText.content.length : 'not array'
|
||||
})
|
||||
}
|
||||
if (sanitizedRecord.type === 'arrow' && (sanitizedRecord.props as any)?.text !== undefined) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Arrow shape ${sanitizedRecord.id} has text after sanitization:`, {
|
||||
hasText: !!(sanitizedRecord.props as any).text,
|
||||
textValue: (sanitizedRecord.props as any).text,
|
||||
textType: typeof (sanitizedRecord.props as any).text
|
||||
})
|
||||
}
|
||||
if (sanitizedRecord.type === 'ObsNote') {
|
||||
console.log(`🔍 TLStoreToAutomerge: ObsNote shape ${sanitizedRecord.id} after sanitization:`, {
|
||||
hasTitle: !!(sanitizedRecord.props as any).title,
|
||||
hasContent: !!(sanitizedRecord.props as any).content,
|
||||
hasTags: Array.isArray((sanitizedRecord.props as any).tags),
|
||||
title: (sanitizedRecord.props as any).title,
|
||||
contentLength: (sanitizedRecord.props as any).content?.length || 0,
|
||||
tagsCount: Array.isArray((sanitizedRecord.props as any).tags) ? (sanitizedRecord.props as any).tags.length : 0
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// CRITICAL: Create a deep copy to ensure all properties (including richText and text) are preserved
|
||||
// This prevents Automerge from treating the object as read-only
|
||||
// Note: sanitizedRecord.props is already a deep copy from sanitizeRecord, but we need to deep copy the entire record
|
||||
const recordToSave = JSON.parse(JSON.stringify(sanitizedRecord))
|
||||
|
||||
// DEBUG: Log richText, meta.text, and Obsidian note properties after deep copy
|
||||
if (recordToSave.typeName === 'shape') {
|
||||
if (recordToSave.type === 'geo' && recordToSave.props?.richText) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Geo shape ${recordToSave.id} has richText after deep copy:`, {
|
||||
hasRichText: !!recordToSave.props.richText,
|
||||
richTextType: typeof recordToSave.props.richText,
|
||||
richTextContent: Array.isArray(recordToSave.props.richText) ? 'array' : recordToSave.props.richText?.content ? 'object with content' : 'object without content',
|
||||
richTextContentLength: Array.isArray(recordToSave.props.richText?.content) ? recordToSave.props.richText.content.length : 'not array'
|
||||
})
|
||||
}
|
||||
if (recordToSave.type === 'geo' && recordToSave.meta?.text !== undefined) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Geo shape ${recordToSave.id} has meta.text after deep copy:`, {
|
||||
hasMetaText: !!recordToSave.meta.text,
|
||||
metaTextValue: recordToSave.meta.text,
|
||||
metaTextType: typeof recordToSave.meta.text
|
||||
})
|
||||
}
|
||||
if (recordToSave.type === 'note' && recordToSave.props?.richText) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Note shape ${recordToSave.id} has richText after deep copy:`, {
|
||||
hasRichText: !!recordToSave.props.richText,
|
||||
richTextType: typeof recordToSave.props.richText,
|
||||
richTextContent: Array.isArray(recordToSave.props.richText) ? 'array' : recordToSave.props.richText?.content ? 'object with content' : 'object without content',
|
||||
richTextContentLength: Array.isArray(recordToSave.props.richText?.content) ? recordToSave.props.richText.content.length : 'not array'
|
||||
})
|
||||
}
|
||||
if (recordToSave.type === 'arrow' && recordToSave.props?.text !== undefined) {
|
||||
console.log(`🔍 TLStoreToAutomerge: Arrow shape ${recordToSave.id} has text after deep copy:`, {
|
||||
hasText: !!recordToSave.props.text,
|
||||
textValue: recordToSave.props.text,
|
||||
textType: typeof recordToSave.props.text
|
||||
})
|
||||
}
|
||||
if (recordToSave.type === 'ObsNote') {
|
||||
console.log(`🔍 TLStoreToAutomerge: ObsNote shape ${recordToSave.id} after deep copy:`, {
|
||||
hasTitle: !!recordToSave.props.title,
|
||||
hasContent: !!recordToSave.props.content,
|
||||
hasTags: Array.isArray(recordToSave.props.tags),
|
||||
title: recordToSave.props.title,
|
||||
contentLength: recordToSave.props.content?.length || 0,
|
||||
tagsCount: Array.isArray(recordToSave.props.tags) ? recordToSave.props.tags.length : 0,
|
||||
allPropsKeys: Object.keys(recordToSave.props || {})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Replace the entire record - Automerge will handle merging with concurrent changes
|
||||
doc.store[record.id] = recordToSave
|
||||
})
|
||||
|
|
|
|||
|
|
@ -115,7 +115,6 @@ export async function saveDocumentId(roomId: string, documentId: string): Promis
|
|||
}
|
||||
|
||||
request.onsuccess = () => {
|
||||
console.log(`Saved document mapping: ${roomId} -> ${documentId}`)
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
|
|
@ -171,7 +170,6 @@ export async function deleteDocumentMapping(roomId: string): Promise<void> {
|
|||
}
|
||||
|
||||
request.onsuccess = () => {
|
||||
console.log(`Deleted document mapping for: ${roomId}`)
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
|
|
@ -238,7 +236,6 @@ export async function cleanupOldMappings(maxAgeDays: number = 30): Promise<numbe
|
|||
deletedCount++
|
||||
cursor.continue()
|
||||
} else {
|
||||
console.log(`Cleaned up ${deletedCount} old document mappings`)
|
||||
resolve(deletedCount)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -363,7 +363,6 @@ export function useAutomergeStoreV2({
|
|||
if (existingRecord && (existingRecord as any).typeName === 'shape' && (existingRecord as any).type === 'geo') {
|
||||
const geoRecord = existingRecord as any
|
||||
if (!geoRecord.props || !geoRecord.props.geo) {
|
||||
console.log(`🔧 Attempting to fix geo shape ${recordId} missing props.geo`)
|
||||
// This won't help with the current patch, but might help future patches
|
||||
// The real fix should happen in AutomergeToTLStore sanitization
|
||||
}
|
||||
|
|
@ -421,7 +420,6 @@ export function useAutomergeStoreV2({
|
|||
const storeShapeCount = store.allRecords().filter((r: any) => r.typeName === 'shape').length
|
||||
|
||||
if (docShapeCount > 0 && storeShapeCount === 0) {
|
||||
console.log(`🔧 Handler set up after data was written. Manually processing ${docShapeCount} shapes that were loaded before handler was ready...`)
|
||||
// Since patches were already emitted when handle.change() was called in useAutomergeSyncRepo,
|
||||
// we need to manually process the data that's already in the doc
|
||||
try {
|
||||
|
|
@ -451,14 +449,12 @@ export function useAutomergeStoreV2({
|
|||
// Filter out SharedPiano shapes since they're no longer supported
|
||||
const filteredRecords = allRecords.filter((record: any) => {
|
||||
if (record.typeName === 'shape' && record.type === 'SharedPiano') {
|
||||
console.log(`⚠️ Filtering out deprecated SharedPiano shape: ${record.id}`)
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
if (filteredRecords.length > 0) {
|
||||
console.log(`🔧 Manually applying ${filteredRecords.length} records to store (patches were missed during initial load, filtered out ${allRecords.length - filteredRecords.length} SharedPiano shapes)`)
|
||||
store.mergeRemoteChanges(() => {
|
||||
const pageRecords = filteredRecords.filter(r => r.typeName === 'page')
|
||||
const shapeRecords = filteredRecords.filter(r => r.typeName === 'shape')
|
||||
|
|
@ -466,7 +462,6 @@ export function useAutomergeStoreV2({
|
|||
const recordsToAdd = [...pageRecords, ...otherRecords, ...shapeRecords]
|
||||
store.put(recordsToAdd)
|
||||
})
|
||||
console.log(`✅ Manually applied ${filteredRecords.length} records to store`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`❌ Error manually processing initial data:`, error)
|
||||
|
|
@ -807,7 +802,6 @@ export function useAutomergeStoreV2({
|
|||
|
||||
// If only position changed (x/y), restore original coordinates
|
||||
if (!otherPropsChanged && (newX !== originalX || newY !== originalY)) {
|
||||
console.log(`🚫 Filtering out x/y coordinate change for pinned shape ${id}: (${newX}, ${newY}) -> keeping original (${originalX}, ${originalY})`)
|
||||
// Restore original coordinates
|
||||
const recordWithOriginalCoords = {
|
||||
...record,
|
||||
|
|
@ -1098,11 +1092,9 @@ export function useAutomergeStoreV2({
|
|||
if (doc.store) {
|
||||
const storeKeys = Object.keys(doc.store)
|
||||
const docShapes = Object.values(doc.store).filter((r: any) => r?.typeName === 'shape').length
|
||||
console.log(`📊 Patch-based initialization: doc has ${storeKeys.length} records (${docShapes} shapes), store has ${existingStoreRecords.length} records (${existingStoreShapes.length} shapes), network: ${connectionStatus}`)
|
||||
|
||||
// If store already has shapes, patches have been applied (dev mode behavior)
|
||||
if (existingStoreShapes.length > 0) {
|
||||
console.log(`✅ Store already populated from patches (${existingStoreShapes.length} shapes) - using patch-based loading like dev`)
|
||||
|
||||
// REMOVED: Aggressive shape refresh that was causing coordinate loss
|
||||
// Shapes should be visible through normal patch application
|
||||
|
|
@ -1119,7 +1111,6 @@ export function useAutomergeStoreV2({
|
|||
// OFFLINE FAST PATH: When offline with local data, load immediately
|
||||
// Don't wait for patches that will never come from the network
|
||||
if (!isNetworkOnline && docShapes > 0) {
|
||||
console.log(`📴 Offline mode with ${docShapes} shapes in local storage - loading immediately`)
|
||||
|
||||
// Manually load data from Automerge doc since patches won't come through
|
||||
try {
|
||||
|
|
@ -1155,7 +1146,6 @@ export function useAutomergeStoreV2({
|
|||
})
|
||||
|
||||
if (filteredRecords.length > 0) {
|
||||
console.log(`📴 Loading ${filteredRecords.length} records from offline storage`)
|
||||
store.mergeRemoteChanges(() => {
|
||||
const pageRecords = filteredRecords.filter(r => r.typeName === 'page')
|
||||
const shapeRecords = filteredRecords.filter(r => r.typeName === 'shape')
|
||||
|
|
@ -1163,7 +1153,6 @@ export function useAutomergeStoreV2({
|
|||
const recordsToAdd = [...pageRecords, ...otherRecords, ...shapeRecords]
|
||||
store.put(recordsToAdd)
|
||||
})
|
||||
console.log(`✅ Offline data loaded: ${filteredRecords.filter(r => r.typeName === 'shape').length} shapes`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`❌ Error loading offline data:`, error)
|
||||
|
|
@ -1181,7 +1170,6 @@ export function useAutomergeStoreV2({
|
|||
// The automergeChangeHandler (set up above) should process them automatically
|
||||
// Just wait a bit for patches to be processed, then set status
|
||||
if (docShapes > 0 && existingStoreShapes.length === 0) {
|
||||
console.log(`📊 Doc has ${docShapes} shapes but store is empty. Waiting for patches to be processed by handler...`)
|
||||
|
||||
// Wait briefly for patches to be processed by automergeChangeHandler
|
||||
// The handler is already set up, so it should catch patches from the initial data load
|
||||
|
|
@ -1194,7 +1182,6 @@ export function useAutomergeStoreV2({
|
|||
const currentShapes = store.allRecords().filter((r: any) => r.typeName === 'shape')
|
||||
|
||||
if (currentShapes.length > 0) {
|
||||
console.log(`✅ Patches applied successfully: ${currentShapes.length} shapes loaded via patches`)
|
||||
|
||||
// REMOVED: Aggressive shape refresh that was causing coordinate loss
|
||||
// Shapes loaded via patches should be visible without forced refresh
|
||||
|
|
@ -1237,7 +1224,6 @@ export function useAutomergeStoreV2({
|
|||
|
||||
// If doc is empty, just set status
|
||||
if (docShapes === 0) {
|
||||
console.log(`📊 Empty document - starting fresh (patch-based loading)`)
|
||||
setStoreWithStatus({
|
||||
store,
|
||||
status: "synced-remote",
|
||||
|
|
@ -1247,7 +1233,6 @@ export function useAutomergeStoreV2({
|
|||
}
|
||||
} else {
|
||||
// No store in doc - empty document
|
||||
console.log(`📊 No store in Automerge doc - starting fresh (patch-based loading)`)
|
||||
setStoreWithStatus({
|
||||
store,
|
||||
status: "synced-remote",
|
||||
|
|
|
|||
|
|
@ -68,7 +68,6 @@ function migrateStoreData(store: Record<string, any>): Record<string, any> {
|
|||
return store
|
||||
}
|
||||
|
||||
console.log('🔄 Migrating store data: fixing invalid shape indices')
|
||||
|
||||
// Copy non-shape records as-is
|
||||
for (const [id, record] of nonShapes) {
|
||||
|
|
@ -99,7 +98,6 @@ function migrateStoreData(store: Record<string, any>): Record<string, any> {
|
|||
migratedStore[id] = migratedRecord
|
||||
}
|
||||
|
||||
console.log(`✅ Migrated ${shapes.length} shapes with new indices`)
|
||||
return migratedStore
|
||||
}
|
||||
|
||||
|
|
@ -160,22 +158,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
const deletedRecordIds = data.deleted || []
|
||||
const deletedShapes = deletedRecordIds.filter(id => id.startsWith('shape:'))
|
||||
|
||||
// Log incoming sync data for debugging
|
||||
console.log(`📥 Received JSON sync: ${changedRecordCount} records (${shapeRecords.length} shapes), ${deletedRecordIds.length} deletions (${deletedShapes.length} shapes)`)
|
||||
if (shapeRecords.length > 0) {
|
||||
shapeRecords.forEach((shape: any) => {
|
||||
console.log(`📥 Shape update: ${shape.type} ${shape.id}`, {
|
||||
x: shape.x,
|
||||
y: shape.y,
|
||||
w: shape.props?.w,
|
||||
h: shape.props?.h
|
||||
})
|
||||
})
|
||||
}
|
||||
if (deletedShapes.length > 0) {
|
||||
console.log(`📥 Shape deletions:`, deletedShapes)
|
||||
}
|
||||
|
||||
// Apply changes to the Automerge document
|
||||
// This will trigger patches which will update the TLDraw store
|
||||
// NOTE: We do NOT increment pendingLocalChanges here because these are REMOTE changes
|
||||
|
|
@ -200,7 +182,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
}
|
||||
})
|
||||
|
||||
console.log(`✅ Applied ${changedRecordCount} records and ${deletedRecordIds.length} deletions to Automerge document`)
|
||||
}, [])
|
||||
|
||||
// Presence update batching to prevent "Maximum update depth exceeded" errors
|
||||
|
|
@ -315,7 +296,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
)
|
||||
|
||||
if (presenceRecord) {
|
||||
console.log('👋 Removing presence record for session:', sessionId, presenceRecord.id)
|
||||
currentStore.remove([presenceRecord.id])
|
||||
}
|
||||
} catch (error) {
|
||||
|
|
@ -380,7 +360,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
const storedDocumentId = await getDocumentId(roomId)
|
||||
|
||||
if (storedDocumentId) {
|
||||
console.log(`Found stored document ID for room ${roomId}: ${storedDocumentId}`)
|
||||
try {
|
||||
// Parse the URL to get the DocumentId
|
||||
const parsed = parseAutomergeUrl(storedDocumentId as AutomergeUrl)
|
||||
|
|
@ -392,7 +371,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
|
||||
let foundHandle: DocHandle<TLStoreSnapshot>
|
||||
if (existingHandle) {
|
||||
console.log(`Document ${docId} already in repo cache, reusing handle`)
|
||||
foundHandle = existingHandle
|
||||
} else {
|
||||
// Try to find the existing document in the repo (loads from IndexedDB)
|
||||
|
|
@ -408,14 +386,12 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
const localShapeCount = localDoc?.store ? Object.values(localDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0
|
||||
|
||||
if (localRecordCount > 0) {
|
||||
console.log(`📦 Loaded document from IndexedDB: ${localRecordCount} records, ${localShapeCount} shapes`)
|
||||
|
||||
// CRITICAL: Migrate local IndexedDB data to fix any invalid indices
|
||||
// This ensures shapes with old-format indices like "b1" are fixed
|
||||
if (localDoc?.store) {
|
||||
const migratedStore = migrateStoreData(localDoc.store)
|
||||
if (migratedStore !== localDoc.store) {
|
||||
console.log('🔄 Applying index migration to local IndexedDB data')
|
||||
handle.change((doc: any) => {
|
||||
doc.store = migratedStore
|
||||
})
|
||||
|
|
@ -424,7 +400,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
|
||||
loadedFromLocal = true
|
||||
} else {
|
||||
console.log(`Document found in IndexedDB but is empty, will load from server`)
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Failed to load document ${storedDocumentId} from IndexedDB:`, error)
|
||||
|
|
@ -434,7 +409,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
|
||||
// If we didn't load from local storage, create a new document
|
||||
if (!loadedFromLocal || !handle!) {
|
||||
console.log(`Creating new Automerge document for room ${roomId}`)
|
||||
handle = repo.create<TLStoreSnapshot>()
|
||||
await handle.whenReady()
|
||||
|
||||
|
|
@ -442,7 +416,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
const documentId = handle.url
|
||||
if (documentId) {
|
||||
await saveDocumentId(roomId, documentId)
|
||||
console.log(`Saved new document mapping: ${roomId} -> ${documentId}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -452,14 +425,12 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
// This allows the UI to render immediately with local data
|
||||
if (handle.url) {
|
||||
adapter.setDocumentId(handle.url)
|
||||
console.log(`📋 Set documentId on adapter: ${handle.url}`)
|
||||
}
|
||||
|
||||
// If we loaded from local, set handle immediately so UI can render
|
||||
if (loadedFromLocal) {
|
||||
const localDoc = handle.doc() as any
|
||||
const localShapeCount = localDoc?.store ? Object.values(localDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0
|
||||
console.log(`📴 Offline-ready: ${localShapeCount} shapes available from IndexedDB`)
|
||||
setHandle(handle)
|
||||
setIsLoading(false)
|
||||
}
|
||||
|
|
@ -477,7 +448,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
const result = await Promise.race([networkReadyPromise, timeoutPromise])
|
||||
|
||||
if (result === 'timeout') {
|
||||
console.log(`⏱️ Network adapter timeout - continuing in offline mode`)
|
||||
// If we haven't set the handle yet (no local data), set it now
|
||||
if (!loadedFromLocal && mounted) {
|
||||
setHandle(handle)
|
||||
|
|
@ -541,22 +511,17 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
}
|
||||
})
|
||||
|
||||
console.log(`📥 Merge strategy: local was ${localIsEmpty ? 'EMPTY' : 'populated'}, added ${addedFromServer} from server, preserved ${skippedExisting} local records`)
|
||||
})
|
||||
|
||||
const finalDoc = handle.doc()
|
||||
const finalRecordCount = finalDoc?.store ? Object.keys(finalDoc.store).length : 0
|
||||
console.log(`🔄 Merged server data: server had ${serverRecordCount}, local had ${localRecordCount}, final has ${finalRecordCount} records`)
|
||||
} else if (!loadedFromLocal) {
|
||||
// Server is empty and we didn't load from local - fresh start
|
||||
console.log(`Starting fresh - no data on server or locally`)
|
||||
}
|
||||
} else if (response.status === 404) {
|
||||
// No document found on server
|
||||
if (loadedFromLocal) {
|
||||
console.log(`No server document, but loaded ${handle.doc()?.store ? Object.keys(handle.doc()!.store).length : 0} records from local storage`)
|
||||
} else {
|
||||
console.log(`No document found on server - starting fresh`)
|
||||
}
|
||||
} else {
|
||||
console.warn(`Failed to load document from server: ${response.status} ${response.statusText}`)
|
||||
|
|
@ -564,7 +529,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
} catch (error) {
|
||||
// Network error - continue with local data if available
|
||||
if (loadedFromLocal) {
|
||||
console.log(`📴 Offline mode: using local data from IndexedDB`)
|
||||
} else {
|
||||
console.error("Error loading from server (offline?):", error)
|
||||
}
|
||||
|
|
@ -574,7 +538,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
const finalDoc = handle.doc() as any
|
||||
const finalStoreKeys = finalDoc?.store ? Object.keys(finalDoc.store).length : 0
|
||||
const finalShapeCount = finalDoc?.store ? Object.values(finalDoc.store).filter((r: any) => r?.typeName === 'shape').length : 0
|
||||
console.log(`✅ Automerge handle ready: ${finalStoreKeys} records, ${finalShapeCount} shapes (loaded from ${loadedFromLocal ? 'IndexedDB' : 'server/new'})`)
|
||||
|
||||
// If we haven't set the handle yet (no local data), set it now after server sync
|
||||
if (!loadedFromLocal && mounted) {
|
||||
|
|
@ -645,18 +608,6 @@ export function useAutomergeSync(config: AutomergeSyncConfig): TLStoreWithStatus
|
|||
return
|
||||
}
|
||||
|
||||
// Log significant changes for debugging
|
||||
const shapePatches = payload.patches.filter((p: any) => {
|
||||
const id = p.path?.[1]
|
||||
return id && typeof id === 'string' && id.startsWith('shape:')
|
||||
})
|
||||
|
||||
if (shapePatches.length > 0) {
|
||||
console.log('🔄 Automerge document changed (binary sync will propagate):', {
|
||||
patchCount: patchCount,
|
||||
shapePatches: shapePatches.length
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
handle.on('change', changeHandler)
|
||||
|
|
|
|||
|
|
@ -80,7 +80,6 @@ export function FathomMeetingsPanel({ onClose, onMeetingSelect, shapeMode = fals
|
|||
}
|
||||
})
|
||||
} catch (error) {
|
||||
console.log('Production worker failed, trying local worker...')
|
||||
response = await fetch(`${LOCAL_WORKER_URL}/fathom/meetings`, {
|
||||
headers: {
|
||||
'X-Api-Key': key,
|
||||
|
|
@ -150,13 +149,6 @@ export function FathomMeetingsPanel({ onClose, onMeetingSelect, shapeMode = fals
|
|||
|
||||
// Handler for individual data type buttons - creates shapes directly
|
||||
const handleDataButtonClick = async (meeting: FathomMeeting, dataType: 'summary' | 'transcript' | 'actionItems' | 'video') => {
|
||||
// Log to verify the correct meeting is being used
|
||||
console.log('🔵 handleDataButtonClick called with meeting:', {
|
||||
recording_id: meeting.recording_id,
|
||||
title: meeting.title,
|
||||
dataType
|
||||
})
|
||||
|
||||
if (!onMeetingSelect) {
|
||||
// Fallback for non-browser mode
|
||||
const options = {
|
||||
|
|
@ -251,7 +243,6 @@ export function FathomMeetingsPanel({ onClose, onMeetingSelect, shapeMode = fals
|
|||
(callId ? `https://fathom.video/calls/${callId}` : null)
|
||||
|
||||
if (videoUrl) {
|
||||
console.log('Opening Fathom video URL:', videoUrl, 'for meeting:', { callId, recording_id: meeting.recording_id })
|
||||
window.open(videoUrl, '_blank', 'noopener,noreferrer')
|
||||
} else {
|
||||
console.error('Could not determine Fathom video URL for meeting:', meeting)
|
||||
|
|
@ -272,7 +263,6 @@ export function FathomMeetingsPanel({ onClose, onMeetingSelect, shapeMode = fals
|
|||
}
|
||||
})
|
||||
} catch (error) {
|
||||
console.log('Production worker failed, trying local worker...')
|
||||
response = await fetch(`${LOCAL_WORKER_URL}/fathom/meetings/${meeting.recording_id}${includeTranscript ? '?include_transcript=true' : ''}`, {
|
||||
headers: {
|
||||
'X-Api-Key': apiKey,
|
||||
|
|
|
|||
|
|
@ -37,7 +37,6 @@ export function GoogleDataTest() {
|
|||
const [viewItems, setViewItems] = useState<ShareableItem[]>([]);
|
||||
|
||||
const addLog = (msg: string) => {
|
||||
console.log(msg);
|
||||
setLogs(prev => [...prev.slice(-20), `${new Date().toLocaleTimeString()}: ${msg}`]);
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -142,7 +142,6 @@ export function HolonBrowser({ isOpen, onClose, onSelectHolon, shapeMode = false
|
|||
try {
|
||||
metadata = await holosphereService.getData(holonId, 'metadata')
|
||||
} catch (error) {
|
||||
console.log('No metadata found for holon')
|
||||
}
|
||||
|
||||
// Get available lenses by trying to fetch data from common lens types
|
||||
|
|
@ -161,7 +160,6 @@ export function HolonBrowser({ isOpen, onClose, onSelectHolon, shapeMode = false
|
|||
const data = await holosphereService.getDataWithWait(holonId, lens, 1000)
|
||||
if (data && (Array.isArray(data) ? data.length > 0 : Object.keys(data).length > 0)) {
|
||||
availableLenses.push(lens)
|
||||
console.log(`✓ Found lens: ${lens} with ${Object.keys(data).length} keys`)
|
||||
}
|
||||
} catch (error) {
|
||||
// Lens doesn't exist or is empty, skip
|
||||
|
|
@ -207,7 +205,6 @@ export function HolonBrowser({ isOpen, onClose, onSelectHolon, shapeMode = false
|
|||
// Use getDataWithWait for better Gun data retrieval
|
||||
const data = await holosphereService.getDataWithWait(holonInfo.id, lens, 2000)
|
||||
setLensData(data)
|
||||
console.log(`📊 Loaded lens data for ${lens}:`, data)
|
||||
} catch (error) {
|
||||
console.error('Error loading lens data:', error)
|
||||
setLensData(null)
|
||||
|
|
|
|||
|
|
@ -136,7 +136,6 @@ const CryptIDDropdown: React.FC<CryptIDDropdownProps> = ({ isDarkMode = false })
|
|||
// Listen for session-cleared event to immediately clear connections state
|
||||
useEffect(() => {
|
||||
const handleSessionCleared = () => {
|
||||
console.log('🔐 CryptIDDropdown: Session cleared - resetting connections state');
|
||||
setConnections([]);
|
||||
setConnectionsLoading(false);
|
||||
setShowDropdown(false);
|
||||
|
|
|
|||
|
|
@ -32,7 +32,6 @@ export const Profile: React.FC<ProfileProps> = ({ onLogout, onOpenVaultBrowser }
|
|||
obsidianVaultName: undefined
|
||||
});
|
||||
setIsEditingVault(false);
|
||||
console.log('🔧 Vault disconnected from profile');
|
||||
};
|
||||
|
||||
const handleChangeVault = () => {
|
||||
|
|
|
|||
|
|
@ -205,7 +205,6 @@ export function NetworkGraphPanel({ onExpand }: NetworkGraphPanelProps) {
|
|||
window.history.replaceState(null, '', url.toString());
|
||||
}
|
||||
|
||||
console.log('Stopped following user');
|
||||
}, [editor]);
|
||||
|
||||
// Keyboard handler for ESC and X to exit broadcast mode
|
||||
|
|
@ -297,9 +296,8 @@ export function NetworkGraphPanel({ onExpand }: NetworkGraphPanelProps) {
|
|||
}, [disconnect]);
|
||||
|
||||
// Handle node click
|
||||
const handleNodeClick = useCallback((node: any) => {
|
||||
const handleNodeClick = useCallback((_node: any) => {
|
||||
// Could open a profile modal or navigate to user
|
||||
console.log('Node clicked:', node);
|
||||
}, []);
|
||||
|
||||
// Handle going to a user's cursor on canvas (navigate/pan to their location)
|
||||
|
|
@ -320,7 +318,6 @@ export function NetworkGraphPanel({ onExpand }: NetworkGraphPanelProps) {
|
|||
editor.centerOnPoint({ x, y });
|
||||
} else {
|
||||
// If no cursor position, try to find any presence data
|
||||
console.log('Could not find cursor position for user:', node.username);
|
||||
}
|
||||
}, [editor, collaborators]);
|
||||
|
||||
|
|
@ -353,9 +350,7 @@ export function NetworkGraphPanel({ onExpand }: NetworkGraphPanelProps) {
|
|||
url.searchParams.set('followId', userId);
|
||||
window.history.replaceState(null, '', url.toString());
|
||||
|
||||
console.log('Now following user:', node.username, '- Press ESC or X to exit');
|
||||
} else {
|
||||
console.log('Could not find user to follow:', node.username);
|
||||
}
|
||||
}, [editor, collaborators]);
|
||||
|
||||
|
|
@ -371,7 +366,6 @@ export function NetworkGraphPanel({ onExpand }: NetworkGraphPanelProps) {
|
|||
const handleEdgeClick = useCallback((edge: GraphEdge) => {
|
||||
setSelectedEdge(edge);
|
||||
// Could open an edge metadata editor modal
|
||||
console.log('Edge clicked:', edge);
|
||||
}, []);
|
||||
|
||||
// Handle expand to full 3D view
|
||||
|
|
|
|||
|
|
@ -313,7 +313,6 @@ export function useNetworkGraph(options: UseNetworkGraphOptions = {}): UseNetwor
|
|||
// Listen for session-cleared event to immediately clear graph state
|
||||
useEffect(() => {
|
||||
const handleSessionCleared = () => {
|
||||
console.log('🔐 useNetworkGraph: Session cleared - resetting graph state');
|
||||
clearGraphCache();
|
||||
setState({
|
||||
nodes: [],
|
||||
|
|
|
|||
|
|
@ -134,7 +134,6 @@ export function saveQuartzSyncSettings(settings: Partial<QuartzSyncSettings>): v
|
|||
const currentSettings = getQuartzSyncSettings()
|
||||
const newSettings = { ...currentSettings, ...settings }
|
||||
localStorage.setItem('quartz_sync_settings', JSON.stringify(newSettings))
|
||||
console.log('✅ Quartz sync settings saved')
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to save Quartz sync settings:', error)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,7 +30,3 @@ export const getWorkerInfo = () => ({
|
|||
})
|
||||
|
||||
// Log current environment on import (for debugging)
|
||||
console.log(`🔧 Worker Environment: ${WORKER_ENV}`)
|
||||
console.log(`🔧 Worker URL: ${WORKER_URL}`)
|
||||
console.log(`🔧 Available environments: local, dev, production`)
|
||||
console.log(`🔧 To switch: Set VITE_WORKER_ENV environment variable or change WORKER_ENV in this file`)
|
||||
|
|
|
|||
|
|
@ -145,7 +145,6 @@ export const useAdvancedSpeakerDiarization = ({
|
|||
source.connect(processor)
|
||||
processor.connect(audioContext.destination)
|
||||
|
||||
console.log('🎤 Advanced speaker diarization started')
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error starting speaker diarization:', error)
|
||||
|
|
@ -172,7 +171,6 @@ export const useAdvancedSpeakerDiarization = ({
|
|||
}
|
||||
|
||||
setIsProcessing(false)
|
||||
console.log('🛑 Advanced speaker diarization stopped')
|
||||
}, [])
|
||||
|
||||
// Cleanup on unmount
|
||||
|
|
|
|||
|
|
@ -44,7 +44,6 @@ export function LiveImageProvider({ children, apiKey: initialApiKey }: LiveImage
|
|||
if (apiKey) {
|
||||
fal.config({ credentials: apiKey })
|
||||
setIsConnected(true)
|
||||
console.log('LiveImage: Fal.ai client configured with default key')
|
||||
} else {
|
||||
setIsConnected(false)
|
||||
}
|
||||
|
|
@ -181,7 +180,6 @@ export function useLiveImage({
|
|||
// Generate AI image from the sketch
|
||||
const generateImage = useCallback(async () => {
|
||||
if (!context?.isConnected || !enabled) {
|
||||
console.log('LiveImage: Not connected or disabled')
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -198,7 +196,6 @@ export function useLiveImage({
|
|||
|
||||
// Check if this request is still valid (not superseded by newer request)
|
||||
if (currentVersion !== requestVersionRef.current) {
|
||||
console.log('LiveImage: Request superseded, skipping')
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -209,7 +206,6 @@ export function useLiveImage({
|
|||
? `${prompt}, hd, award-winning, impressive, detailed`
|
||||
: 'hd, award-winning, impressive, detailed illustration'
|
||||
|
||||
console.log('LiveImage: Generating with prompt:', fullPrompt)
|
||||
|
||||
const result = await fal.subscribe(modelEndpoint, {
|
||||
input: {
|
||||
|
|
@ -228,7 +224,6 @@ export function useLiveImage({
|
|||
|
||||
// Check if this result is still relevant
|
||||
if (currentVersion !== requestVersionRef.current) {
|
||||
console.log('LiveImage: Result from old request, discarding')
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -247,7 +242,6 @@ export function useLiveImage({
|
|||
}
|
||||
|
||||
if (imageUrl) {
|
||||
console.log('LiveImage: Generated image:', imageUrl)
|
||||
setState(prev => ({
|
||||
...prev,
|
||||
isGenerating: false,
|
||||
|
|
|
|||
|
|
@ -79,7 +79,6 @@ export function usePinnedToView(
|
|||
|
||||
// If just became pinned (transition from false to true)
|
||||
if (isPinned && !wasPinnedRef.current) {
|
||||
console.log('📌 usePinnedToView: Shape became PINNED', shapeId)
|
||||
// Clear any leftover state from previous pin sessions
|
||||
clearPinState()
|
||||
|
||||
|
|
@ -152,7 +151,6 @@ export function usePinnedToView(
|
|||
|
||||
// If just became unpinned, animate back to original coordinates
|
||||
if (!isPinned && wasPinnedRef.current) {
|
||||
console.log('📌 usePinnedToView: Shape became UNPINNED', shapeId)
|
||||
// Cancel any ongoing animations
|
||||
if (driftAnimationRef.current) {
|
||||
cancelAnimationFrame(driftAnimationRef.current)
|
||||
|
|
|
|||
|
|
@ -163,7 +163,6 @@ export const useWebSpeechTranscription = ({
|
|||
// Reduced debug logging
|
||||
} else {
|
||||
setIsSupported(false)
|
||||
console.log('❌ Web Speech API is not supported')
|
||||
onError?.(new Error('Web Speech API is not supported in this browser'))
|
||||
}
|
||||
}, [onError])
|
||||
|
|
@ -181,7 +180,6 @@ export const useWebSpeechTranscription = ({
|
|||
recognition.maxAlternatives = 1
|
||||
|
||||
recognition.onstart = () => {
|
||||
console.log('🎤 Web Speech API started')
|
||||
setIsRecording(true)
|
||||
setIsTranscribing(true)
|
||||
}
|
||||
|
|
@ -221,7 +219,6 @@ export const useWebSpeechTranscription = ({
|
|||
finalTranscriptRef.current += newText
|
||||
setTranscript(finalTranscriptRef.current)
|
||||
onTranscriptUpdate?.(newText) // Only send the new text portion
|
||||
console.log(`✅ Final transcript: "${processedFinal}" (confidence: ${confidence.toFixed(2)})`)
|
||||
|
||||
// Trigger pause detection
|
||||
handlePauseDetection()
|
||||
|
|
@ -232,7 +229,6 @@ export const useWebSpeechTranscription = ({
|
|||
const processedInterim = processTranscript(interimTranscript, false)
|
||||
interimTranscriptRef.current = processedInterim
|
||||
setInterimTranscript(processedInterim)
|
||||
console.log(`🔄 Interim transcript: "${processedInterim}"`)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -244,7 +240,6 @@ export const useWebSpeechTranscription = ({
|
|||
}
|
||||
|
||||
recognition.onend = () => {
|
||||
console.log('🛑 Web Speech API ended')
|
||||
setIsRecording(false)
|
||||
setIsTranscribing(false)
|
||||
}
|
||||
|
|
@ -260,7 +255,6 @@ export const useWebSpeechTranscription = ({
|
|||
}
|
||||
|
||||
try {
|
||||
console.log('🎤 Starting Web Speech API recording...')
|
||||
|
||||
// Don't reset transcripts for continuous transcription - keep existing content
|
||||
// finalTranscriptRef.current = ''
|
||||
|
|
@ -291,7 +285,6 @@ export const useWebSpeechTranscription = ({
|
|||
// Stop recording
|
||||
const stopRecording = useCallback(() => {
|
||||
if (recognitionRef.current) {
|
||||
console.log('🛑 Stopping Web Speech API recording...')
|
||||
recognitionRef.current.stop()
|
||||
recognitionRef.current = null
|
||||
}
|
||||
|
|
|
|||
|
|
@ -207,7 +207,6 @@ export const useWhisperTranscription = ({
|
|||
const initializeTranscriber = useCallback(async () => {
|
||||
// Skip model loading if using RunPod
|
||||
if (shouldUseRunPod) {
|
||||
console.log('🚀 Using RunPod WhisperX endpoint - skipping local model loading')
|
||||
setModelLoaded(true) // Mark as "loaded" since we don't need a local model
|
||||
return null
|
||||
}
|
||||
|
|
@ -215,7 +214,6 @@ export const useWhisperTranscription = ({
|
|||
if (transcriberRef.current) return transcriberRef.current
|
||||
|
||||
try {
|
||||
console.log('🤖 Loading Whisper model...')
|
||||
|
||||
// Check if we're running in a CORS-restricted environment
|
||||
if (typeof window !== 'undefined' && window.location.protocol === 'file:') {
|
||||
|
|
@ -230,16 +228,13 @@ export const useWhisperTranscription = ({
|
|||
|
||||
for (const modelOption of modelOptions) {
|
||||
try {
|
||||
console.log(`🔄 Trying model: ${modelOption.name}`)
|
||||
transcriber = await pipeline('automatic-speech-recognition', modelOption.name, {
|
||||
...modelOption.options,
|
||||
progress_callback: (progress: any) => {
|
||||
if (progress.status === 'downloading') {
|
||||
console.log(`📦 Downloading model: ${progress.file} (${Math.round(progress.progress * 100)}%)`)
|
||||
}
|
||||
}
|
||||
})
|
||||
console.log(`✅ Successfully loaded model: ${modelOption.name}`)
|
||||
break
|
||||
} catch (error) {
|
||||
console.warn(`⚠️ Failed to load model ${modelOption.name}:`, error)
|
||||
|
|
@ -273,9 +268,7 @@ export const useWhisperTranscription = ({
|
|||
quantized: true,
|
||||
progress_callback: (progress: any) => {
|
||||
if (progress.status === 'downloading') {
|
||||
console.log(`📦 Downloading model: ${progress.file} (${Math.round(progress.progress * 100)}%)`)
|
||||
} else if (progress.status === 'loading') {
|
||||
console.log(`🔄 Loading model: ${progress.file}`)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
|
@ -288,7 +281,6 @@ export const useWhisperTranscription = ({
|
|||
|
||||
transcriberRef.current = transcriber
|
||||
setModelLoaded(true)
|
||||
console.log(`✅ Whisper model loaded: ${modelName}`)
|
||||
|
||||
return transcriber
|
||||
} catch (error) {
|
||||
|
|
@ -356,8 +348,6 @@ export const useWhisperTranscription = ({
|
|||
previousTranscriptLengthRef.current = processedTranscript.length
|
||||
}
|
||||
|
||||
console.log(`📝 Real-time transcript updated: "${newTextTrimmed}" -> Total: "${processedTranscript}"`)
|
||||
console.log(`🔄 Streaming transcript state updated, calling onTranscriptUpdate with: "${processedTranscript}"`)
|
||||
}
|
||||
}, [onTranscriptUpdate, processTranscript])
|
||||
|
||||
|
|
@ -372,7 +362,6 @@ export const useWhisperTranscription = ({
|
|||
|
||||
const chunks = audioChunksRef.current || []
|
||||
if (chunks.length === 0 || chunks.length < 2) {
|
||||
console.log(`⚠️ Not enough chunks for real-time processing: ${chunks.length}`)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -381,13 +370,11 @@ export const useWhisperTranscription = ({
|
|||
const validChunks = recentChunks.filter(chunk => chunk && chunk.size > 2000) // Filter out small chunks
|
||||
|
||||
if (validChunks.length < 2) {
|
||||
console.log(`⚠️ Not enough valid chunks for real-time processing: ${validChunks.length}`)
|
||||
return
|
||||
}
|
||||
|
||||
const totalSize = validChunks.reduce((sum, chunk) => sum + chunk.size, 0)
|
||||
if (totalSize < 20000) { // Increased to 20KB for reliable decoding
|
||||
console.log(`⚠️ Not enough audio data for real-time processing: ${totalSize} bytes`)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -397,16 +384,12 @@ export const useWhisperTranscription = ({
|
|||
mimeType = mediaRecorderRef.current.mimeType
|
||||
}
|
||||
|
||||
console.log(`🔄 Real-time processing ${validChunks.length} chunks, total size: ${totalSize} bytes, type: ${mimeType}`)
|
||||
console.log(`🔄 Chunk sizes:`, validChunks.map(c => c.size))
|
||||
console.log(`🔄 Chunk types:`, validChunks.map(c => c.type))
|
||||
|
||||
// Create a more robust blob with proper headers
|
||||
const tempBlob = new Blob(validChunks, { type: mimeType })
|
||||
|
||||
// Validate blob size
|
||||
if (tempBlob.size < 10000) {
|
||||
console.log(`⚠️ Blob too small for processing: ${tempBlob.size} bytes`)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -414,7 +397,6 @@ export const useWhisperTranscription = ({
|
|||
|
||||
// Validate audio buffer
|
||||
if (audioBuffer.byteLength < 10000) {
|
||||
console.log(`⚠️ Audio buffer too small: ${audioBuffer.byteLength} bytes`)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -424,18 +406,14 @@ export const useWhisperTranscription = ({
|
|||
try {
|
||||
// Try to decode the audio buffer
|
||||
audioBufferFromBlob = await audioContext.decodeAudioData(audioBuffer)
|
||||
console.log(`✅ Successfully decoded real-time audio buffer: ${audioBufferFromBlob.length} samples`)
|
||||
} catch (decodeError) {
|
||||
console.log('⚠️ Real-time chunk decode failed, trying alternative approach:', decodeError)
|
||||
|
||||
// Try alternative approach: create a new blob with different MIME type
|
||||
try {
|
||||
const alternativeBlob = new Blob(validChunks, { type: 'audio/webm' })
|
||||
const alternativeBuffer = await alternativeBlob.arrayBuffer()
|
||||
audioBufferFromBlob = await audioContext.decodeAudioData(alternativeBuffer)
|
||||
console.log(`✅ Successfully decoded with alternative approach: ${audioBufferFromBlob.length} samples`)
|
||||
} catch (altError) {
|
||||
console.log('⚠️ Alternative decode also failed, skipping:', altError)
|
||||
await audioContext.close()
|
||||
return
|
||||
}
|
||||
|
|
@ -459,15 +437,12 @@ export const useWhisperTranscription = ({
|
|||
const maxAmplitude = Math.max(...processedAudioData.map(Math.abs))
|
||||
const dynamicRange = maxAmplitude - Math.min(...processedAudioData.map(Math.abs))
|
||||
|
||||
console.log(`🔊 Real-time audio analysis: RMS=${rms.toFixed(6)}, Max=${maxAmplitude.toFixed(6)}, Range=${dynamicRange.toFixed(6)}`)
|
||||
|
||||
if (rms < 0.001) {
|
||||
console.log('⚠️ Audio too quiet for transcription (RMS < 0.001)')
|
||||
return // Skip very quiet audio
|
||||
}
|
||||
|
||||
if (dynamicRange < 0.01) {
|
||||
console.log('⚠️ Audio has very low dynamic range, may be mostly noise')
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -481,20 +456,17 @@ export const useWhisperTranscription = ({
|
|||
return // Skip very short audio
|
||||
}
|
||||
|
||||
console.log(`🎵 Real-time audio: ${processedAudioData.length} samples (${(processedAudioData.length / 16000).toFixed(2)}s)`)
|
||||
|
||||
let transcriptionText = ''
|
||||
|
||||
// Use RunPod if configured, otherwise use local model
|
||||
if (shouldUseRunPod) {
|
||||
console.log('🚀 Using RunPod WhisperX API for real-time transcription...')
|
||||
// Convert processed audio data back to blob for RunPod
|
||||
const wavBlob = await createWavBlob(processedAudioData, 16000)
|
||||
transcriptionText = await transcribeWithRunPod(wavBlob, language)
|
||||
} else {
|
||||
// Use local Whisper model
|
||||
if (!transcriberRef.current) {
|
||||
console.log('⚠️ Transcriber not available for real-time processing')
|
||||
return
|
||||
}
|
||||
const result = await transcriberRef.current(processedAudioData, {
|
||||
|
|
@ -512,11 +484,8 @@ export const useWhisperTranscription = ({
|
|||
}
|
||||
if (transcriptionText.trim()) {
|
||||
lastTranscriptionTimeRef.current = Date.now()
|
||||
console.log(`✅ Real-time transcript: "${transcriptionText.trim()}"`)
|
||||
console.log(`🔄 Calling handleStreamingTranscriptUpdate with: "${transcriptionText.trim()}"`)
|
||||
handleStreamingTranscriptUpdate(transcriptionText.trim())
|
||||
} else {
|
||||
console.log('⚠️ No real-time transcription text produced, trying fallback parameters...')
|
||||
|
||||
// Try with more permissive parameters for real-time processing (only for local model)
|
||||
if (!shouldUseRunPod && transcriberRef.current) {
|
||||
|
|
@ -533,14 +502,11 @@ export const useWhisperTranscription = ({
|
|||
|
||||
const fallbackText = fallbackResult?.text || ''
|
||||
if (fallbackText.trim()) {
|
||||
console.log(`✅ Fallback real-time transcript: "${fallbackText.trim()}"`)
|
||||
lastTranscriptionTimeRef.current = Date.now()
|
||||
handleStreamingTranscriptUpdate(fallbackText.trim())
|
||||
} else {
|
||||
console.log('⚠️ Fallback transcription also produced no text')
|
||||
}
|
||||
} catch (fallbackError) {
|
||||
console.log('⚠️ Fallback transcription failed:', fallbackError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -553,20 +519,17 @@ export const useWhisperTranscription = ({
|
|||
// Process recorded audio chunks (final processing)
|
||||
const processAudioChunks = useCallback(async () => {
|
||||
if (audioChunksRef.current.length === 0) {
|
||||
console.log('⚠️ No audio chunks to process')
|
||||
return
|
||||
}
|
||||
|
||||
// For local model, ensure transcriber is loaded
|
||||
if (!shouldUseRunPod) {
|
||||
if (!transcriberRef.current) {
|
||||
console.log('⚠️ No transcriber available')
|
||||
return
|
||||
}
|
||||
|
||||
// Ensure model is loaded
|
||||
if (!modelLoaded) {
|
||||
console.log('⚠️ Model not loaded yet, waiting...')
|
||||
try {
|
||||
await initializeTranscriber()
|
||||
} catch (error) {
|
||||
|
|
@ -579,7 +542,6 @@ export const useWhisperTranscription = ({
|
|||
|
||||
try {
|
||||
setIsTranscribing(true)
|
||||
console.log('🔄 Processing final audio chunks...')
|
||||
|
||||
// Create a blob from all chunks with proper MIME type detection
|
||||
let mimeType = 'audio/webm;codecs=opus'
|
||||
|
|
@ -591,17 +553,14 @@ export const useWhisperTranscription = ({
|
|||
const validChunks = audioChunksRef.current.filter(chunk => chunk && chunk.size > 1000)
|
||||
|
||||
if (validChunks.length === 0) {
|
||||
console.log('⚠️ No valid audio chunks to process')
|
||||
return
|
||||
}
|
||||
|
||||
console.log(`🔄 Processing ${validChunks.length} valid chunks out of ${audioChunksRef.current.length} total chunks`)
|
||||
|
||||
const audioBlob = new Blob(validChunks, { type: mimeType })
|
||||
|
||||
// Validate blob size
|
||||
if (audioBlob.size < 10000) {
|
||||
console.log(`⚠️ Audio blob too small for processing: ${audioBlob.size} bytes`)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -610,7 +569,6 @@ export const useWhisperTranscription = ({
|
|||
|
||||
// Validate array buffer
|
||||
if (arrayBuffer.byteLength < 10000) {
|
||||
console.log(`⚠️ Audio buffer too small: ${arrayBuffer.byteLength} bytes`)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -620,17 +578,14 @@ export const useWhisperTranscription = ({
|
|||
let audioBuffer: AudioBuffer
|
||||
try {
|
||||
audioBuffer = await audioContext.decodeAudioData(arrayBuffer)
|
||||
console.log(`✅ Successfully decoded final audio buffer: ${audioBuffer.length} samples`)
|
||||
} catch (decodeError) {
|
||||
console.error('❌ Failed to decode final audio buffer:', decodeError)
|
||||
|
||||
// Try alternative approach with different MIME type
|
||||
try {
|
||||
console.log('🔄 Trying alternative MIME type for final processing...')
|
||||
const alternativeBlob = new Blob(validChunks, { type: 'audio/webm' })
|
||||
const alternativeBuffer = await alternativeBlob.arrayBuffer()
|
||||
audioBuffer = await audioContext.decodeAudioData(alternativeBuffer)
|
||||
console.log(`✅ Successfully decoded with alternative approach: ${audioBuffer.length} samples`)
|
||||
} catch (altError) {
|
||||
console.error('❌ Alternative decode also failed:', altError)
|
||||
await audioContext.close()
|
||||
|
|
@ -643,38 +598,29 @@ export const useWhisperTranscription = ({
|
|||
// Get the first channel as Float32Array
|
||||
const audioData = audioBuffer.getChannelData(0)
|
||||
|
||||
console.log(`🔍 Audio buffer info: sampleRate=${audioBuffer.sampleRate}, length=${audioBuffer.length}, duration=${audioBuffer.duration}s`)
|
||||
console.log(`🔍 Audio data: length=${audioData.length}, first 10 values:`, Array.from(audioData.slice(0, 10)))
|
||||
|
||||
// Check for meaningful audio content
|
||||
const rms = Math.sqrt(audioData.reduce((sum, val) => sum + val * val, 0) / audioData.length)
|
||||
console.log(`🔊 Audio RMS level: ${rms.toFixed(6)}`)
|
||||
|
||||
if (rms < 0.001) {
|
||||
console.log('⚠️ Audio appears to be mostly silence (RMS < 0.001)')
|
||||
}
|
||||
|
||||
// Resample if necessary
|
||||
let processedAudioData: Float32Array = audioData
|
||||
if (audioBuffer.sampleRate !== 16000) {
|
||||
console.log(`🔄 Resampling from ${audioBuffer.sampleRate}Hz to 16000Hz`)
|
||||
processedAudioData = resampleAudio(audioData as Float32Array, audioBuffer.sampleRate, 16000)
|
||||
}
|
||||
|
||||
console.log(`🎵 Processing audio: ${processedAudioData.length} samples (${(processedAudioData.length / 16000).toFixed(2)}s)`)
|
||||
|
||||
console.log('🔄 Starting transcription...')
|
||||
|
||||
let newText = ''
|
||||
|
||||
// Use RunPod if configured, otherwise use local model
|
||||
if (shouldUseRunPod) {
|
||||
console.log('🚀 Using RunPod WhisperX API...')
|
||||
// Convert processed audio data back to blob for RunPod
|
||||
// Create a WAV blob from the Float32Array
|
||||
const wavBlob = await createWavBlob(processedAudioData, 16000)
|
||||
newText = await transcribeWithRunPod(wavBlob, language)
|
||||
console.log('✅ RunPod transcription result:', newText)
|
||||
} else {
|
||||
// Use local Whisper model
|
||||
if (!transcriberRef.current) {
|
||||
|
|
@ -686,7 +632,6 @@ export const useWhisperTranscription = ({
|
|||
return_timestamps: false
|
||||
})
|
||||
|
||||
console.log('🔍 Transcription result:', result)
|
||||
newText = result?.text?.trim() || ''
|
||||
}
|
||||
if (newText) {
|
||||
|
|
@ -710,24 +655,19 @@ export const useWhisperTranscription = ({
|
|||
previousTranscriptLengthRef.current = updatedTranscript.length
|
||||
}
|
||||
|
||||
console.log(`✅ Transcription: "${processedText}" -> Total: "${updatedTranscript}"`)
|
||||
}
|
||||
} else {
|
||||
console.log('⚠️ No transcription text produced')
|
||||
|
||||
// Try alternative transcription parameters (only for local model)
|
||||
if (!shouldUseRunPod && transcriberRef.current) {
|
||||
console.log('🔄 Trying alternative transcription parameters...')
|
||||
try {
|
||||
const altResult = await transcriberRef.current(processedAudioData, {
|
||||
task: 'transcribe',
|
||||
return_timestamps: false
|
||||
})
|
||||
console.log('🔍 Alternative transcription result:', altResult)
|
||||
|
||||
if (altResult?.text?.trim()) {
|
||||
const processedAltText = processTranscript(altResult.text, enableStreaming)
|
||||
console.log('✅ Alternative transcription successful:', processedAltText)
|
||||
const currentTranscript = transcriptRef.current
|
||||
const updatedTranscript = currentTranscript ? `${currentTranscript} ${processedAltText}` : processedAltText
|
||||
|
||||
|
|
@ -742,7 +682,6 @@ export const useWhisperTranscription = ({
|
|||
}
|
||||
}
|
||||
} catch (altError) {
|
||||
console.log('⚠️ Alternative transcription also failed:', altError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -761,12 +700,9 @@ export const useWhisperTranscription = ({
|
|||
// Start recording
|
||||
const startRecording = useCallback(async () => {
|
||||
try {
|
||||
console.log('🎤 Starting recording...')
|
||||
console.log('🔍 enableStreaming in startRecording:', enableStreaming)
|
||||
|
||||
// Ensure model is loaded before starting (skip for RunPod)
|
||||
if (!shouldUseRunPod && !modelLoaded) {
|
||||
console.log('🔄 Model not loaded, initializing...')
|
||||
await initializeTranscriber()
|
||||
} else if (shouldUseRunPod) {
|
||||
// For RunPod, just mark as ready
|
||||
|
|
@ -813,7 +749,6 @@ export const useWhisperTranscription = ({
|
|||
|
||||
for (const option of options) {
|
||||
if (MediaRecorder.isTypeSupported(option.mimeType)) {
|
||||
console.log('🎵 Using MIME type:', option.mimeType)
|
||||
mediaRecorder = new MediaRecorder(stream, option)
|
||||
break
|
||||
}
|
||||
|
|
@ -825,7 +760,6 @@ export const useWhisperTranscription = ({
|
|||
|
||||
// Store the MIME type for later use
|
||||
const mimeType = mediaRecorder.mimeType
|
||||
console.log('🎵 Final MIME type:', mimeType)
|
||||
|
||||
mediaRecorderRef.current = mediaRecorder
|
||||
|
||||
|
|
@ -835,56 +769,44 @@ export const useWhisperTranscription = ({
|
|||
// Validate chunk before adding
|
||||
if (event.data.size > 1000) { // Only add chunks with meaningful size
|
||||
audioChunksRef.current.push(event.data)
|
||||
console.log(`📦 Received chunk ${audioChunksRef.current.length}, size: ${event.data.size} bytes, type: ${event.data.type}`)
|
||||
|
||||
// Limit the number of chunks to prevent memory issues
|
||||
if (audioChunksRef.current.length > 20) {
|
||||
audioChunksRef.current = audioChunksRef.current.slice(-15) // Keep last 15 chunks
|
||||
}
|
||||
} else {
|
||||
console.log(`⚠️ Skipping small chunk: ${event.data.size} bytes`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle recording stop
|
||||
mediaRecorder.onstop = () => {
|
||||
console.log('🛑 Recording stopped, processing audio...')
|
||||
processAudioChunks()
|
||||
}
|
||||
|
||||
// Handle MediaRecorder state changes
|
||||
mediaRecorder.onstart = () => {
|
||||
console.log('🎤 MediaRecorder started')
|
||||
console.log('🔍 enableStreaming value:', enableStreaming)
|
||||
setIsRecording(true)
|
||||
isRecordingRef.current = true
|
||||
|
||||
// Start periodic transcription processing for streaming mode
|
||||
if (enableStreaming) {
|
||||
console.log('🔄 Starting streaming transcription (every 0.8 seconds)')
|
||||
periodicTranscriptionRef.current = setInterval(() => {
|
||||
console.log('🔄 Interval triggered, isRecordingRef.current:', isRecordingRef.current)
|
||||
if (isRecordingRef.current) {
|
||||
console.log('🔄 Running periodic streaming transcription...')
|
||||
processAccumulatedAudioChunks()
|
||||
} else {
|
||||
console.log('⚠️ Not running transcription - recording stopped')
|
||||
}
|
||||
}, 800) // Update every 0.8 seconds for better responsiveness
|
||||
} else {
|
||||
console.log('ℹ️ Streaming transcription disabled - enableStreaming is false')
|
||||
}
|
||||
}
|
||||
|
||||
// Start recording with appropriate timeslice
|
||||
const timeslice = enableStreaming ? 1000 : 2000 // Larger chunks for more stable processing
|
||||
console.log(`🎵 Starting recording with ${timeslice}ms timeslice`)
|
||||
mediaRecorder.start(timeslice)
|
||||
isRecordingRef.current = true
|
||||
setIsRecording(true)
|
||||
|
||||
console.log('✅ Recording started - MediaRecorder state:', mediaRecorder.state)
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error starting recording:', error)
|
||||
|
|
@ -895,7 +817,6 @@ export const useWhisperTranscription = ({
|
|||
// Stop recording
|
||||
const stopRecording = useCallback(async () => {
|
||||
try {
|
||||
console.log('🛑 Stopping recording...')
|
||||
|
||||
// Clear periodic transcription timer
|
||||
if (periodicTranscriptionRef.current) {
|
||||
|
|
@ -915,7 +836,6 @@ export const useWhisperTranscription = ({
|
|||
isRecordingRef.current = false
|
||||
setIsRecording(false)
|
||||
|
||||
console.log('✅ Recording stopped')
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error stopping recording:', error)
|
||||
|
|
@ -925,12 +845,10 @@ export const useWhisperTranscription = ({
|
|||
|
||||
// Pause recording (placeholder for compatibility)
|
||||
const pauseRecording = useCallback(async () => {
|
||||
console.log('⏸️ Pause recording not implemented')
|
||||
}, [])
|
||||
|
||||
// Cleanup function
|
||||
const cleanup = useCallback(() => {
|
||||
console.log('🧹 Cleaning up transcription resources...')
|
||||
|
||||
// Stop recording if active
|
||||
if (isRecordingRef.current) {
|
||||
|
|
@ -958,13 +876,11 @@ export const useWhisperTranscription = ({
|
|||
// Clear chunks
|
||||
audioChunksRef.current = []
|
||||
|
||||
console.log('✅ Cleanup completed')
|
||||
}, [])
|
||||
|
||||
// Convenience functions for compatibility
|
||||
const startTranscription = useCallback(async () => {
|
||||
try {
|
||||
console.log('🎤 Starting transcription...')
|
||||
|
||||
// Reset all transcription state for clean start
|
||||
streamingTranscriptRef.current = ''
|
||||
|
|
@ -987,7 +903,6 @@ export const useWhisperTranscription = ({
|
|||
}
|
||||
|
||||
await startRecording()
|
||||
console.log('✅ Transcription started')
|
||||
|
||||
} catch (error) {
|
||||
console.error('❌ Error starting transcription:', error)
|
||||
|
|
@ -997,9 +912,7 @@ export const useWhisperTranscription = ({
|
|||
|
||||
const stopTranscription = useCallback(async () => {
|
||||
try {
|
||||
console.log('🛑 Stopping transcription...')
|
||||
await stopRecording()
|
||||
console.log('✅ Transcription stopped')
|
||||
} catch (error) {
|
||||
console.error('❌ Error stopping transcription:', error)
|
||||
onError?.(error as Error)
|
||||
|
|
@ -1008,9 +921,7 @@ export const useWhisperTranscription = ({
|
|||
|
||||
const pauseTranscription = useCallback(async () => {
|
||||
try {
|
||||
console.log('⏸️ Pausing transcription...')
|
||||
await pauseRecording()
|
||||
console.log('✅ Transcription paused')
|
||||
} catch (error) {
|
||||
console.error('❌ Error pausing transcription:', error)
|
||||
onError?.(error as Error)
|
||||
|
|
|
|||
|
|
@ -51,7 +51,6 @@ export class HoloSphereService {
|
|||
this.isInitialized = true
|
||||
// Only log if Holon functionality is enabled
|
||||
if (HOLON_ENABLED) {
|
||||
console.log('⚠️ HoloSphere service initialized (STUB MODE - awaiting Nostr integration)')
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -308,20 +308,16 @@ export async function isAIOrchestratorAvailable(): Promise<boolean> {
|
|||
const url = import.meta.env.VITE_AI_ORCHESTRATOR_URL
|
||||
|
||||
if (!url) {
|
||||
console.log('🔍 AI Orchestrator URL not configured')
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
const available = await aiOrchestrator.isAvailable()
|
||||
if (available) {
|
||||
console.log('✅ AI Orchestrator is available at', url)
|
||||
} else {
|
||||
console.log('⚠️ AI Orchestrator configured but not responding at', url)
|
||||
}
|
||||
return available
|
||||
} catch (error) {
|
||||
console.log('❌ Error checking AI Orchestrator availability:', error)
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -31,7 +31,6 @@ export class AuthService {
|
|||
obsidianVaultPath: storedSession.obsidianVaultPath,
|
||||
obsidianVaultName: storedSession.obsidianVaultName
|
||||
};
|
||||
console.log('🔐 Restored authenticated session for:', storedSession.username);
|
||||
} else {
|
||||
// No valid session - user is anonymous
|
||||
// Note: User may still have crypto keys stored from previous sessions,
|
||||
|
|
@ -42,7 +41,6 @@ export class AuthService {
|
|||
loading: false,
|
||||
backupCreated: null
|
||||
};
|
||||
console.log('🔐 No valid session found - user is anonymous');
|
||||
}
|
||||
|
||||
return { session };
|
||||
|
|
|
|||
|
|
@ -39,7 +39,6 @@ export const addRegisteredUser = (username: string): void => {
|
|||
|
||||
// Check if a username is available
|
||||
export const isUsernameAvailable = async (username: string): Promise<boolean> => {
|
||||
console.log('Checking if username is available:', username);
|
||||
|
||||
try {
|
||||
// Get the list of registered users
|
||||
|
|
@ -48,7 +47,6 @@ export const isUsernameAvailable = async (username: string): Promise<boolean> =>
|
|||
// Check if the username is already taken
|
||||
const isAvailable = !users.includes(username);
|
||||
|
||||
console.log('Username availability result:', isAvailable);
|
||||
return isAvailable;
|
||||
} catch (error) {
|
||||
console.error('Error checking username availability:', error);
|
||||
|
|
|
|||
|
|
@ -37,7 +37,6 @@ export const saveSession = (session: Session): boolean => {
|
|||
window.dispatchEvent(new CustomEvent('session-logged-in', {
|
||||
detail: { username: session.username }
|
||||
}));
|
||||
console.log('🔐 Session saved and session-logged-in event dispatched for:', session.username);
|
||||
}
|
||||
|
||||
return true;
|
||||
|
|
@ -56,21 +55,14 @@ export const loadSession = (): StoredSession | null => {
|
|||
try {
|
||||
const stored = localStorage.getItem(SESSION_STORAGE_KEY);
|
||||
if (!stored) {
|
||||
console.log('🔐 loadSession: No stored session found');
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(stored) as StoredSession;
|
||||
console.log('🔐 loadSession: Found stored session:', {
|
||||
username: parsed.username,
|
||||
authed: parsed.authed,
|
||||
timestamp: new Date(parsed.timestamp).toISOString()
|
||||
});
|
||||
|
||||
// Check if session is not too old (7 days)
|
||||
const maxAge = 7 * 24 * 60 * 60 * 1000; // 7 days in milliseconds
|
||||
if (Date.now() - parsed.timestamp > maxAge) {
|
||||
console.log('🔐 loadSession: Session expired, removing');
|
||||
localStorage.removeItem(SESSION_STORAGE_KEY);
|
||||
return null;
|
||||
}
|
||||
|
|
@ -129,7 +121,6 @@ export const clearStoredSession = (): boolean => {
|
|||
detail: { previousUsername: username }
|
||||
}));
|
||||
|
||||
console.log('🔐 Session cleared - removed session state, preserved account data (crypto keys, tldraw IDs)');
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('🔧 Error clearing session:', error);
|
||||
|
|
|
|||
|
|
@ -324,11 +324,9 @@ export function getOpenAIConfig(): { apiKey: string } | null {
|
|||
try {
|
||||
const parsed = JSON.parse(userApiKeys)
|
||||
if (parsed.keys && parsed.keys.openai && parsed.keys.openai.trim() !== '') {
|
||||
console.log('🔑 Found user-specific OpenAI API key')
|
||||
return { apiKey: parsed.keys.openai }
|
||||
}
|
||||
} catch (e) {
|
||||
console.log('🔑 Error parsing user-specific API keys:', e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -339,22 +337,18 @@ export function getOpenAIConfig(): { apiKey: string } | null {
|
|||
try {
|
||||
const parsed = JSON.parse(settings)
|
||||
if (parsed.keys && parsed.keys.openai && parsed.keys.openai.trim() !== '') {
|
||||
console.log('🔑 Found global OpenAI API key')
|
||||
return { apiKey: parsed.keys.openai }
|
||||
}
|
||||
} catch (e) {
|
||||
// If it's not JSON, it might be the old format (just a string)
|
||||
if (settings.startsWith('sk-') && settings.trim() !== '') {
|
||||
console.log('🔑 Found old format OpenAI API key')
|
||||
return { apiKey: settings }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('🔑 No OpenAI API key found')
|
||||
return null
|
||||
} catch (e) {
|
||||
console.log('🔑 Error getting OpenAI config:', e)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
|
@ -413,11 +407,9 @@ export function getGeminiConfig(): { apiKey: string } | null {
|
|||
try {
|
||||
const parsed = JSON.parse(userApiKeys)
|
||||
if (parsed.keys && parsed.keys.gemini && parsed.keys.gemini.trim() !== '') {
|
||||
console.log('🔑 Found user-specific Gemini API key')
|
||||
return { apiKey: parsed.keys.gemini }
|
||||
}
|
||||
} catch (e) {
|
||||
console.log('🔑 Error parsing user-specific API keys:', e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -425,21 +417,17 @@ export function getGeminiConfig(): { apiKey: string } | null {
|
|||
// Fallback to global API keys in localStorage
|
||||
const settings = localStorage.getItem("gemini_api_key")
|
||||
if (settings && settings.trim() !== '') {
|
||||
console.log('🔑 Found global Gemini API key in localStorage')
|
||||
return { apiKey: settings }
|
||||
}
|
||||
|
||||
// Fallback to environment variable
|
||||
const config = getClientConfig()
|
||||
if (config.geminiApiKey && config.geminiApiKey.trim() !== '') {
|
||||
console.log('🔑 Found Gemini API key in environment')
|
||||
return { apiKey: config.geminiApiKey }
|
||||
}
|
||||
|
||||
console.log('🔑 No Gemini API key found')
|
||||
return null
|
||||
} catch (e) {
|
||||
console.log('🔑 Error getting Gemini config:', e)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -101,27 +101,5 @@ export function getGitHubSetupInstructions(): string[] {
|
|||
export function logGitHubSetupStatus(): void {
|
||||
const status = validateGitHubSetup()
|
||||
|
||||
console.log('🔧 GitHub Integration Setup Status:')
|
||||
|
||||
if (status.isValid) {
|
||||
console.log('✅ GitHub integration is properly configured!')
|
||||
} else {
|
||||
console.log('❌ GitHub integration has issues:')
|
||||
status.issues.forEach(issue => console.log(` - ${issue}`))
|
||||
}
|
||||
|
||||
if (status.warnings.length > 0) {
|
||||
console.log('⚠️ Warnings:')
|
||||
status.warnings.forEach(warning => console.log(` - ${warning}`))
|
||||
}
|
||||
|
||||
if (status.suggestions.length > 0) {
|
||||
console.log('💡 Suggestions:')
|
||||
status.suggestions.forEach(suggestion => console.log(` - ${suggestion}`))
|
||||
}
|
||||
|
||||
if (!status.isValid) {
|
||||
console.log('\n📋 Setup Instructions:')
|
||||
getGitHubSetupInstructions().forEach(instruction => console.log(instruction))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -444,7 +444,6 @@ export const tokensStore = {
|
|||
export async function requestPersistentStorage(): Promise<boolean> {
|
||||
if (navigator.storage && navigator.storage.persist) {
|
||||
const isPersisted = await navigator.storage.persist();
|
||||
console.log(`Persistent storage ${isPersisted ? 'granted' : 'denied'}`);
|
||||
return isPersisted;
|
||||
}
|
||||
return false;
|
||||
|
|
|
|||
|
|
@ -309,7 +309,6 @@ export class GoogleDataService {
|
|||
setInterval(async () => {
|
||||
try {
|
||||
await touchLocalData();
|
||||
console.log('Touched local data to prevent Safari eviction');
|
||||
} catch (error) {
|
||||
console.warn('Failed to touch local data:', error);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -807,7 +807,6 @@ A collection of creative project ideas and concepts.
|
|||
|
||||
try {
|
||||
// First, try to fetch the main page to discover content
|
||||
console.log('🔍 Fetching main page to discover content structure...')
|
||||
const mainPageResponse = await fetch(baseUrl)
|
||||
if (mainPageResponse.ok) {
|
||||
const mainPageContent = await mainPageResponse.text()
|
||||
|
|
|
|||
|
|
@ -40,23 +40,14 @@ export class QuartzSync {
|
|||
try {
|
||||
const { githubToken, githubRepo } = this.config
|
||||
const [owner, repo] = githubRepo.split('/')
|
||||
|
||||
console.log('🔧 GitHub sync details:', {
|
||||
owner,
|
||||
repo,
|
||||
noteTitle: note.title,
|
||||
noteFilePath: note.filePath
|
||||
})
|
||||
|
||||
|
||||
// Get the current file content to check if it exists
|
||||
const filePath = `content/${note.filePath}`
|
||||
let sha: string | undefined
|
||||
|
||||
console.log('🔍 Checking for existing file:', filePath)
|
||||
|
||||
try {
|
||||
const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${filePath}`
|
||||
console.log('🌐 Making API call to:', apiUrl)
|
||||
|
||||
const existingFile = await fetch(apiUrl, {
|
||||
headers: {
|
||||
|
|
@ -65,18 +56,14 @@ export class QuartzSync {
|
|||
}
|
||||
})
|
||||
|
||||
console.log('📡 API response status:', existingFile.status)
|
||||
|
||||
if (existingFile.ok) {
|
||||
const fileData = await existingFile.json() as { sha: string }
|
||||
sha = fileData.sha
|
||||
console.log('✅ File exists, will update with SHA:', sha)
|
||||
} else {
|
||||
console.log('ℹ️ File does not exist, will create new one')
|
||||
}
|
||||
} catch (error) {
|
||||
// File doesn't exist, that's okay
|
||||
console.log('ℹ️ File does not exist, will create new one:', error)
|
||||
}
|
||||
|
||||
// Create the markdown content
|
||||
|
|
@ -113,9 +100,6 @@ ${note.content}`
|
|||
|
||||
if (response.ok) {
|
||||
const result = await response.json() as { commit: { sha: string } }
|
||||
console.log('✅ Successfully synced note to GitHub:', note.title)
|
||||
console.log('📁 File path:', filePath)
|
||||
console.log('🔗 Commit SHA:', result.commit.sha)
|
||||
return true
|
||||
} else {
|
||||
const error = await response.text()
|
||||
|
|
@ -162,7 +146,6 @@ ${note.content}`
|
|||
})
|
||||
|
||||
if (response.ok) {
|
||||
console.log('✅ Successfully synced note to Cloudflare:', note.title)
|
||||
return true
|
||||
} else {
|
||||
throw new Error(`Cloudflare sync failed: ${response.statusText}`)
|
||||
|
|
@ -192,7 +175,6 @@ ${note.content}`
|
|||
})
|
||||
|
||||
if (response.ok) {
|
||||
console.log('✅ Successfully synced note to Quartz API:', note.title)
|
||||
return true
|
||||
} else {
|
||||
throw new Error(`Quartz API error: ${response.statusText}`)
|
||||
|
|
@ -222,7 +204,6 @@ ${note.content}`
|
|||
})
|
||||
|
||||
if (response.ok) {
|
||||
console.log('✅ Successfully sent note to webhook:', note.title)
|
||||
return true
|
||||
} else {
|
||||
throw new Error(`Webhook error: ${response.statusText}`)
|
||||
|
|
@ -238,33 +219,16 @@ ${note.content}`
|
|||
* Prioritizes GitHub integration for Quartz sites
|
||||
*/
|
||||
async smartSync(note: QuartzNote): Promise<boolean> {
|
||||
console.log('🔄 Starting smart sync for note:', note.title)
|
||||
console.log('🔧 Sync config available:', {
|
||||
hasGitHubToken: !!this.config.githubToken,
|
||||
hasGitHubRepo: !!this.config.githubRepo,
|
||||
hasCloudflareApiKey: !!this.config.cloudflareApiKey,
|
||||
hasCloudflareAccountId: !!this.config.cloudflareAccountId,
|
||||
hasQuartzUrl: !!this.config.quartzUrl
|
||||
})
|
||||
|
||||
// Check if GitHub integration is available and preferred
|
||||
if (this.config.githubToken && this.config.githubRepo) {
|
||||
try {
|
||||
console.log('🔄 Attempting GitHub sync (preferred method)')
|
||||
const result = await this.syncToGitHub(note)
|
||||
if (result) {
|
||||
console.log('✅ GitHub sync successful!')
|
||||
return true
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('⚠️ GitHub sync failed, trying other methods:', error)
|
||||
console.warn('⚠️ GitHub sync error details:', {
|
||||
message: error instanceof Error ? error.message : 'Unknown error',
|
||||
stack: error instanceof Error ? error.stack : 'No stack trace'
|
||||
})
|
||||
}
|
||||
} else {
|
||||
console.log('⚠️ GitHub sync not available - missing token or repo')
|
||||
}
|
||||
|
||||
// Fallback to other methods
|
||||
|
|
|
|||
|
|
@ -110,11 +110,9 @@ export async function transcribeWithRunPod(
|
|||
|
||||
const data: RunPodTranscriptionResponse = await response.json()
|
||||
|
||||
console.log('RunPod initial response:', data)
|
||||
|
||||
// Handle async job pattern (RunPod often returns job IDs)
|
||||
if (data.id && (data.status === 'IN_QUEUE' || data.status === 'IN_PROGRESS')) {
|
||||
console.log('Job is async, polling for results...', data.id)
|
||||
return await pollRunPodJob(data.id, config.apiKey, config.endpointId)
|
||||
}
|
||||
|
||||
|
|
@ -157,7 +155,6 @@ async function pollRunPodJob(
|
|||
): Promise<string> {
|
||||
const statusUrl = `https://api.runpod.ai/v2/${endpointId}/status/${jobId}`
|
||||
|
||||
console.log(`Polling job ${jobId} (max ${maxAttempts} attempts, ${pollInterval}ms interval)`)
|
||||
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
try {
|
||||
|
|
@ -185,7 +182,6 @@ async function pollRunPodJob(
|
|||
|
||||
// Don't fail immediately on 404 - job might still be processing
|
||||
if (response.status === 404 && attempt < maxAttempts - 1) {
|
||||
console.log('Job not found yet, continuing to poll...')
|
||||
await new Promise(resolve => setTimeout(resolve, pollInterval))
|
||||
continue
|
||||
}
|
||||
|
|
@ -195,10 +191,8 @@ async function pollRunPodJob(
|
|||
|
||||
const data: RunPodTranscriptionResponse = await response.json()
|
||||
|
||||
console.log(`Job status (attempt ${attempt + 1}/${maxAttempts}):`, data.status)
|
||||
|
||||
if (data.status === 'COMPLETED') {
|
||||
console.log('Job completed, extracting transcription...')
|
||||
|
||||
if (data.output?.text) {
|
||||
return data.output.text.trim()
|
||||
|
|
@ -220,7 +214,6 @@ async function pollRunPodJob(
|
|||
|
||||
// Job still in progress, wait and retry
|
||||
if (attempt % 10 === 0) {
|
||||
console.log(`Job still processing... (${attempt + 1}/${maxAttempts} attempts)`)
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, pollInterval))
|
||||
} catch (error: any) {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,6 @@ export const generateCanvasScreenshot = async (editor: Editor): Promise<string |
|
|||
const shapes = editor.getCurrentPageShapes();
|
||||
|
||||
if (shapes.length === 0) {
|
||||
console.log('No shapes found, no screenshot generated');
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -48,11 +48,9 @@ async function initializeModel(): Promise<void> {
|
|||
const { pipeline: pipelineFn } = await import('@xenova/transformers')
|
||||
pipeline = pipelineFn
|
||||
|
||||
console.log('🔄 Loading embedding model...')
|
||||
embeddingModel = await pipeline('feature-extraction', MODEL_NAME, {
|
||||
quantized: true, // Use quantized model for faster inference
|
||||
})
|
||||
console.log('✅ Embedding model loaded')
|
||||
} catch (error) {
|
||||
console.error('❌ Failed to load embedding model:', error)
|
||||
throw error
|
||||
|
|
@ -264,7 +262,6 @@ export class SemanticSearchService {
|
|||
const shapes = this.editor.getCurrentPageShapes()
|
||||
const shapesWithText = shapes.filter(s => extractShapeText(s).length > 10) // Only shapes with meaningful text
|
||||
|
||||
console.log(`🔍 Indexing ${shapesWithText.length} shapes with text content...`)
|
||||
|
||||
for (let i = 0; i < shapesWithText.length; i++) {
|
||||
const shape = shapesWithText[i]
|
||||
|
|
@ -292,7 +289,6 @@ export class SemanticSearchService {
|
|||
onProgress?.(this.indexingProgress)
|
||||
}
|
||||
|
||||
console.log('✅ Canvas indexing complete')
|
||||
} finally {
|
||||
this.isIndexing = false
|
||||
}
|
||||
|
|
@ -467,7 +463,6 @@ export class SemanticSearchService {
|
|||
}
|
||||
|
||||
if (removed > 0) {
|
||||
console.log(`🧹 Cleaned up ${removed} stale embeddings`)
|
||||
}
|
||||
|
||||
return removed
|
||||
|
|
@ -478,7 +473,6 @@ export class SemanticSearchService {
|
|||
*/
|
||||
async clearIndex(): Promise<void> {
|
||||
await embeddingStore.clear()
|
||||
console.log('🗑️ Embedding index cleared')
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -6,22 +6,10 @@
|
|||
import { getClientConfig, isGitHubConfigured, getGitHubConfig } from './clientConfig'
|
||||
|
||||
export function testClientConfig() {
|
||||
console.log('🧪 Testing client configuration...')
|
||||
|
||||
const config = getClientConfig()
|
||||
console.log('📋 Client config:', {
|
||||
hasGithubToken: !!config.githubToken,
|
||||
hasQuartzRepo: !!config.quartzRepo,
|
||||
githubTokenLength: config.githubToken?.length || 0,
|
||||
quartzRepo: config.quartzRepo
|
||||
})
|
||||
|
||||
const isConfigured = isGitHubConfigured()
|
||||
console.log('✅ GitHub configured:', isConfigured)
|
||||
|
||||
const githubConfig = getGitHubConfig()
|
||||
console.log('🔧 GitHub config:', githubConfig)
|
||||
|
||||
|
||||
return {
|
||||
config,
|
||||
isConfigured,
|
||||
|
|
|
|||
|
|
@ -2,21 +2,17 @@
|
|||
import { holosphereService } from './HoloSphereService'
|
||||
|
||||
export async function testHolonFunctionality() {
|
||||
console.log('🧪 Testing Holon functionality...')
|
||||
|
||||
try {
|
||||
// Test initialization
|
||||
const isInitialized = await holosphereService.initialize()
|
||||
console.log('✅ HoloSphere initialized:', isInitialized)
|
||||
|
||||
if (!isInitialized) {
|
||||
console.log('❌ HoloSphere not initialized, skipping tests')
|
||||
return false
|
||||
}
|
||||
|
||||
// Test getting a holon
|
||||
const holonId = await holosphereService.getHolon(40.7128, -74.0060, 7)
|
||||
console.log('✅ Got holon ID:', holonId)
|
||||
|
||||
if (holonId) {
|
||||
// Test storing data
|
||||
|
|
@ -27,22 +23,17 @@ export async function testHolonFunctionality() {
|
|||
}
|
||||
|
||||
const storeSuccess = await holosphereService.putData(holonId, 'test', testData)
|
||||
console.log('✅ Stored data:', storeSuccess)
|
||||
|
||||
// Test retrieving data
|
||||
const retrievedData = await holosphereService.getData(holonId, 'test')
|
||||
console.log('✅ Retrieved data:', retrievedData)
|
||||
|
||||
// Test getting hierarchy
|
||||
const hierarchy = holosphereService.getHolonHierarchy(holonId)
|
||||
console.log('✅ Holon hierarchy:', hierarchy)
|
||||
|
||||
// Test getting scalespace
|
||||
const scalespace = holosphereService.getHolonScalespace(holonId)
|
||||
console.log('✅ Holon scalespace:', scalespace)
|
||||
}
|
||||
|
||||
console.log('✅ All Holon tests passed!')
|
||||
return true
|
||||
|
||||
} catch (error) {
|
||||
|
|
|
|||
|
|
@ -257,9 +257,6 @@ registerBlockExecutor('ai.llm', async (_ctx, inputs, config) => {
|
|||
const systemPrompt = config.systemPrompt as string
|
||||
|
||||
// Placeholder - would integrate with actual LLM API
|
||||
console.log('[AI LLM] Prompt:', prompt)
|
||||
console.log('[AI LLM] Context:', context)
|
||||
console.log('[AI LLM] System:', systemPrompt)
|
||||
|
||||
return {
|
||||
response: `[LLM Response placeholder for: ${prompt}]`,
|
||||
|
|
@ -272,7 +269,6 @@ registerBlockExecutor('ai.imageGen', async (_ctx, inputs, config) => {
|
|||
const prompt = inputs.prompt as string
|
||||
const size = config.size as string
|
||||
|
||||
console.log('[AI Image] Prompt:', prompt, 'Size:', size)
|
||||
|
||||
return {
|
||||
image: `[Generated image placeholder for: ${prompt}]`,
|
||||
|
|
@ -291,7 +287,6 @@ registerBlockExecutor('output.display', async (_ctx, inputs, config) => {
|
|||
displayValue = String(value)
|
||||
}
|
||||
|
||||
console.log('[Display]:', displayValue)
|
||||
return { displayed: displayValue }
|
||||
})
|
||||
|
||||
|
|
@ -308,7 +303,6 @@ registerBlockExecutor('output.log', async (_ctx, inputs, config) => {
|
|||
console.warn('[Workflow Log]:', message)
|
||||
break
|
||||
default:
|
||||
console.log('[Workflow Log]:', message)
|
||||
}
|
||||
|
||||
return { logged: true }
|
||||
|
|
@ -460,7 +454,6 @@ export async function executeWorkflow(
|
|||
return results
|
||||
}
|
||||
|
||||
console.log(`[Workflow] Executing ${executionOrder.length} blocks`)
|
||||
|
||||
for (const blockId of executionOrder) {
|
||||
// Gather inputs from upstream blocks
|
||||
|
|
@ -479,7 +472,6 @@ export async function executeWorkflow(
|
|||
}
|
||||
}
|
||||
|
||||
console.log('[Workflow] Execution complete')
|
||||
return results
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -183,7 +183,6 @@ export function MapCanvas({
|
|||
useEffect(() => {
|
||||
if (!mapRef.current || !isLoaded) return;
|
||||
// TODO: Add layer management
|
||||
console.log('MapCanvas: Updating layers', layers);
|
||||
}, [layers, isLoaded]);
|
||||
|
||||
// Project function for presence layer
|
||||
|
|
|
|||
|
|
@ -62,7 +62,6 @@ export function useCollaboration({
|
|||
useEffect(() => {
|
||||
if (!sessionId) return;
|
||||
|
||||
console.log('useCollaboration: Would connect to session', sessionId);
|
||||
// const ydoc = new Y.Doc();
|
||||
// const provider = new WebsocketProvider(serverUrl, sessionId, ydoc);
|
||||
|
||||
|
|
@ -75,21 +74,18 @@ export function useCollaboration({
|
|||
};
|
||||
}, [sessionId, serverUrl]);
|
||||
|
||||
const createSession = useCallback(async (name: string): Promise<string> => {
|
||||
const createSession = useCallback(async (_name: string): Promise<string> => {
|
||||
// TODO: Create new Y.js document and return session ID
|
||||
const newSessionId = `session-${Date.now()}`;
|
||||
console.log('useCollaboration: Creating session', name, newSessionId);
|
||||
return newSessionId;
|
||||
}, []);
|
||||
|
||||
const joinSession = useCallback(async (sessionIdToJoin: string): Promise<void> => {
|
||||
const joinSession = useCallback(async (_sessionIdToJoin: string): Promise<void> => {
|
||||
// TODO: Join existing Y.js session
|
||||
console.log('useCollaboration: Joining session', sessionIdToJoin);
|
||||
}, []);
|
||||
|
||||
const leaveSession = useCallback(() => {
|
||||
// TODO: Disconnect from session
|
||||
console.log('useCollaboration: Leaving session');
|
||||
setSession(null);
|
||||
setParticipants([]);
|
||||
setIsConnected(false);
|
||||
|
|
@ -100,19 +96,16 @@ export function useCollaboration({
|
|||
// awareness.setLocalStateField('cursor', coordinate);
|
||||
}, []);
|
||||
|
||||
const broadcastRouteChange = useCallback((route: Route) => {
|
||||
const broadcastRouteChange = useCallback((_route: Route) => {
|
||||
// TODO: Update Y.js shared route array
|
||||
console.log('useCollaboration: Broadcasting route change', route.id);
|
||||
}, []);
|
||||
|
||||
const broadcastWaypointChange = useCallback((waypoint: Waypoint) => {
|
||||
const broadcastWaypointChange = useCallback((_waypoint: Waypoint) => {
|
||||
// TODO: Update Y.js shared waypoint array
|
||||
console.log('useCollaboration: Broadcasting waypoint change', waypoint.id);
|
||||
}, []);
|
||||
|
||||
const broadcastLayerChange = useCallback((layer: MapLayer) => {
|
||||
const broadcastLayerChange = useCallback((_layer: MapLayer) => {
|
||||
// TODO: Update Y.js shared layer array
|
||||
console.log('useCollaboration: Broadcasting layer change', layer.id);
|
||||
}, []);
|
||||
|
||||
return {
|
||||
|
|
|
|||
|
|
@ -324,7 +324,6 @@ export function Board() {
|
|||
// When auth state changes, reset permission to trigger fresh fetch
|
||||
setPermission(null)
|
||||
setPermissionLoading(true)
|
||||
console.log('🔄 Auth changed, forcing tldraw remount. New auth state:', session.authed)
|
||||
}, [session.authed])
|
||||
|
||||
// Fetch permission when board loads or auth changes
|
||||
|
|
@ -337,7 +336,6 @@ export function Board() {
|
|||
const perm = await fetchBoardPermission(roomId)
|
||||
if (mounted) {
|
||||
setPermission(perm)
|
||||
console.log('🔐 Permission fetched:', perm)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to fetch permission:', error)
|
||||
|
|
@ -378,26 +376,6 @@ export function Board() {
|
|||
? false // Don't restrict while loading/transitioning - assume can edit
|
||||
: permission === 'view' // Only restrict if explicitly view (protected board)
|
||||
|
||||
// Debug logging for permission issues
|
||||
console.log('🔐 Permission Debug:', {
|
||||
permission,
|
||||
permissionLoading,
|
||||
sessionAuthed: session.authed,
|
||||
sessionLoading: session.loading,
|
||||
sessionUsername: session.username,
|
||||
authJustChanged,
|
||||
isReadOnly,
|
||||
reason: session.loading
|
||||
? 'auth loading - allowing edit temporarily'
|
||||
: authJustChanged
|
||||
? 'auth just changed - allowing edit until effects run'
|
||||
: permissionLoading
|
||||
? 'permission loading - allowing edit temporarily'
|
||||
: permission === 'view'
|
||||
? 'protected board - user not an editor (view-only)'
|
||||
: 'open board or user is editor (can edit)'
|
||||
})
|
||||
|
||||
// Handler for when user tries to edit in read-only mode
|
||||
const handleEditAttempt = () => {
|
||||
if (isReadOnly) {
|
||||
|
|
@ -415,7 +393,6 @@ export function Board() {
|
|||
// Force permission state reset - the useEffect will fetch fresh permissions
|
||||
setPermission(null)
|
||||
setPermissionLoading(true)
|
||||
console.log('🔐 handleAuthenticated: Cleared permission state, useEffect will fetch fresh')
|
||||
}
|
||||
|
||||
// Store roomId in localStorage for VideoChatShapeUtil to access
|
||||
|
|
@ -431,7 +408,6 @@ export function Board() {
|
|||
|
||||
oldStorageKeys.forEach(key => {
|
||||
if (localStorage.getItem(key)) {
|
||||
console.log(`Migrating: clearing old video chat storage entry: ${key}`);
|
||||
localStorage.removeItem(key);
|
||||
localStorage.removeItem(`${key}_token`);
|
||||
}
|
||||
|
|
@ -491,7 +467,6 @@ export function Board() {
|
|||
color: session.username ? generateUserColor(session.username) : generateUserColor(uniqueUserId),
|
||||
colorScheme: getColorScheme(),
|
||||
})
|
||||
console.log('🔐 User preferences set for authenticated user:', session.username)
|
||||
} else {
|
||||
// Not authenticated - reset to anonymous with fresh ID
|
||||
const anonymousId = `anonymous-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`
|
||||
|
|
@ -501,7 +476,6 @@ export function Board() {
|
|||
color: '#6b7280', // Gray for anonymous
|
||||
colorScheme: getColorScheme(),
|
||||
})
|
||||
console.log('🔐 User preferences reset to anonymous')
|
||||
}
|
||||
}, [uniqueUserId, session.username, session.authed])
|
||||
|
||||
|
|
@ -562,10 +536,8 @@ export function Board() {
|
|||
|
||||
if (isReadOnly) {
|
||||
editor.updateInstanceState({ isReadonly: true })
|
||||
console.log('🔒 Permission changed: Board is now read-only')
|
||||
} else {
|
||||
editor.updateInstanceState({ isReadonly: false })
|
||||
console.log('🔓 Permission changed: Board is now editable')
|
||||
}
|
||||
}, [editor, isReadOnly])
|
||||
|
||||
|
|
@ -576,7 +548,6 @@ export function Board() {
|
|||
|
||||
const handleSessionLoggedIn = (event: Event) => {
|
||||
const customEvent = event as CustomEvent<{ username: string }>;
|
||||
console.log('🔐 Board: session-logged-in event received for:', customEvent.detail.username);
|
||||
|
||||
// Immediately enable editing - user just logged in
|
||||
editor.updateInstanceState({ isReadonly: false });
|
||||
|
|
@ -584,7 +555,6 @@ export function Board() {
|
|||
// Switch to select tool to ensure tools are available
|
||||
editor.setCurrentTool('select');
|
||||
|
||||
console.log('🔓 Board: Enabled editing mode after login');
|
||||
};
|
||||
|
||||
window.addEventListener('session-logged-in', handleSessionLoggedIn);
|
||||
|
|
@ -691,36 +661,6 @@ export function Board() {
|
|||
|
||||
// Debug: Log page information
|
||||
const allPages = store.store.allRecords().filter((r: any) => r.typeName === 'page')
|
||||
console.log(`📊 Board: Current page ID: ${currentPageId}`)
|
||||
console.log(`📊 Board: Available pages:`, allPages.map((p: any) => ({ id: p.id, name: p.name })))
|
||||
console.log(`📊 Board: Store has ${storeShapes.length} total shapes, ${storeShapesOnCurrentPage.length} on current page. Editor sees ${editorShapes.length} shapes on current page.`)
|
||||
|
||||
// CRITICAL DEBUG: Check if shapes exist in editor but aren't returned by getCurrentPageShapes
|
||||
if (storeShapesOnCurrentPage.length > 0 && editorShapes.length === 0) {
|
||||
console.log(`🔍 DEBUG: Checking why ${storeShapesOnCurrentPage.length} shapes aren't visible...`)
|
||||
const sampleShape = storeShapesOnCurrentPage[0]
|
||||
const shapeInEditor = editor.getShape(sampleShape.id as TLShapeId)
|
||||
console.log(`🔍 DEBUG: Sample shape ${sampleShape.id} in editor:`, shapeInEditor ? 'EXISTS' : 'MISSING')
|
||||
if (shapeInEditor) {
|
||||
console.log(`🔍 DEBUG: Shape details:`, {
|
||||
id: shapeInEditor.id,
|
||||
type: shapeInEditor.type,
|
||||
parentId: shapeInEditor.parentId,
|
||||
pageId: editor.getCurrentPageId(),
|
||||
matches: shapeInEditor.parentId === editor.getCurrentPageId()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Debug: Log shape parent IDs to see if there's a mismatch
|
||||
if (storeShapes.length > 0 && editorShapes.length === 0) {
|
||||
const parentIdCounts = new Map<string, number>()
|
||||
storeShapes.forEach((s: any) => {
|
||||
const pid = s.parentId || 'no-parent'
|
||||
parentIdCounts.set(pid, (parentIdCounts.get(pid) || 0) + 1)
|
||||
})
|
||||
console.log(`📊 Board: Shape parent ID distribution:`, Array.from(parentIdCounts.entries()))
|
||||
}
|
||||
|
||||
// REMOVED: Aggressive force refresh that was causing coordinate loss
|
||||
// If shapes are in store but editor doesn't see them, it's likely a different issue
|
||||
|
|
@ -751,7 +691,6 @@ export function Board() {
|
|||
.filter((s): s is NonNullable<typeof s> => s !== undefined)
|
||||
|
||||
if (shapesFromEditor.length > 0) {
|
||||
console.log(`📊 Board: ${shapesFromEditor.length} missing shapes actually exist in editor but aren't in getCurrentPageShapes()`)
|
||||
// Try to select them to make them visible
|
||||
const shapeIds = shapesFromEditor.map((s: any) => s.id).filter((id: string): id is TLShapeId => id !== undefined)
|
||||
if (shapeIds.length > 0) {
|
||||
|
|
@ -763,7 +702,6 @@ export function Board() {
|
|||
|
||||
// REMOVED: Force refresh that was causing coordinate loss
|
||||
// Re-putting shapes was resetting coordinates to 0,0
|
||||
console.log(`📊 Board: ${missingShapes.length} shapes are in store but not visible in editor - this may indicate a sync issue`)
|
||||
}
|
||||
|
||||
// Check if shapes are outside viewport
|
||||
|
|
@ -785,7 +723,6 @@ export function Board() {
|
|||
})
|
||||
|
||||
if (shapesOutsideViewport.length > 0) {
|
||||
console.log(`📊 Board: ${shapesOutsideViewport.length} missing shapes are outside viewport - focusing on them`)
|
||||
// Focus on the first missing shape
|
||||
const firstShape = shapesOutsideViewport[0] as any
|
||||
if (firstShape && firstShape.x !== undefined && firstShape.y !== undefined) {
|
||||
|
|
@ -807,7 +744,6 @@ export function Board() {
|
|||
s.parentId !== currentPageId
|
||||
)
|
||||
if (shapesOnOtherPages.length > 0) {
|
||||
console.log(`📊 Board: ${shapesOnOtherPages.length} shapes exist on other pages (not current page ${currentPageId})`)
|
||||
|
||||
// Find which page has the most shapes
|
||||
// CRITICAL: Only count shapes that are DIRECT children of pages, not frame/group children
|
||||
|
|
@ -897,7 +833,6 @@ export function Board() {
|
|||
}
|
||||
})
|
||||
}
|
||||
console.log(`📊 Board: Fixed ${fixedShapes.length} shapes by assigning them to current page ${currentPageId} (coordinates preserved)`)
|
||||
} catch (error) {
|
||||
console.error(`📊 Board: Error fixing shapes with invalid parentId:`, error)
|
||||
}
|
||||
|
|
@ -915,13 +850,11 @@ export function Board() {
|
|||
|
||||
// If current page has no shapes but another page does, switch to that page
|
||||
if (editorShapes.length === 0 && pageWithMostShapes && pageWithMostShapes !== currentPageId) {
|
||||
console.log(`📊 Board: Current page has no shapes. Switching to page ${pageWithMostShapes} which has ${maxShapes} shapes`)
|
||||
try {
|
||||
editor.setCurrentPage(pageWithMostShapes as any)
|
||||
// Focus camera on shapes after switching
|
||||
setTimeout(() => {
|
||||
const newPageShapes = editor.getCurrentPageShapes()
|
||||
console.log(`📊 Board: After page switch, editor sees ${newPageShapes.length} shapes on page ${pageWithMostShapes}`)
|
||||
if (newPageShapes.length > 0) {
|
||||
const bounds = editor.getShapePageBounds(newPageShapes[0])
|
||||
if (bounds) {
|
||||
|
|
@ -935,17 +868,14 @@ export function Board() {
|
|||
// Still no shapes after switching - might be a validation issue
|
||||
console.warn(`📊 Board: After switching to page ${pageWithMostShapes}, still no shapes visible. Checking store...`)
|
||||
const shapesOnNewPage = storeShapes.filter((s: any) => s.parentId === pageWithMostShapes)
|
||||
console.log(`📊 Board: Store has ${shapesOnNewPage.length} shapes on page ${pageWithMostShapes}`)
|
||||
if (shapesOnNewPage.length > 0) {
|
||||
// Try to manually add shapes that might have validation issues
|
||||
console.log(`📊 Board: Attempting to force visibility by selecting all shapes on page`)
|
||||
const shapeIds = shapesOnNewPage.map((s: any) => s.id as TLShapeId).filter((id): id is TLShapeId => id !== undefined)
|
||||
if (shapeIds.length > 0) {
|
||||
// Try to get shapes from editor to see if they exist
|
||||
const existingShapes = shapeIds
|
||||
.map((id: TLShapeId) => editor.getShape(id))
|
||||
.filter((s): s is NonNullable<typeof s> => s !== undefined)
|
||||
console.log(`📊 Board: ${existingShapes.length} of ${shapeIds.length} shapes exist in editor`)
|
||||
if (existingShapes.length > 0) {
|
||||
editor.setSelectedShapes(existingShapes.map((s: any) => s.id))
|
||||
editor.zoomToFit()
|
||||
|
|
@ -957,12 +887,6 @@ export function Board() {
|
|||
} catch (error) {
|
||||
console.error(`❌ Board: Error switching to page ${pageWithMostShapes}:`, error)
|
||||
}
|
||||
} else if (pageWithMostShapes) {
|
||||
console.log(`📊 Board: Page breakdown:`, Array.from(pageShapeCounts.entries()).map(([pageId, count]) => ({
|
||||
pageId,
|
||||
shapeCount: count,
|
||||
isCurrent: pageId === currentPageId
|
||||
})))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1007,7 +931,6 @@ export function Board() {
|
|||
})
|
||||
|
||||
if (presencesToRemove.length > 0) {
|
||||
console.log(`🧹 Force cleaning ${presencesToRemove.length} non-current presence record(s) on auth change`)
|
||||
editor.store.remove(presencesToRemove.map((r: any) => r.id))
|
||||
}
|
||||
} else {
|
||||
|
|
@ -1019,7 +942,6 @@ export function Board() {
|
|||
)
|
||||
|
||||
if (stalePresences.length > 0) {
|
||||
console.log(`🧹 Cleaning up ${stalePresences.length} stale presence record(s)`)
|
||||
editor.store.remove(stalePresences.map((r: any) => r.id))
|
||||
}
|
||||
}
|
||||
|
|
@ -1041,10 +963,8 @@ export function Board() {
|
|||
const handleSessionCleared = (event: Event) => {
|
||||
const customEvent = event as CustomEvent<{ previousUsername: string }>;
|
||||
const previousUsername = customEvent.detail?.previousUsername;
|
||||
console.log('🧹 Session cleared event received for user:', previousUsername)
|
||||
|
||||
if (!previousUsername) {
|
||||
console.log('🧹 No previous username, skipping presence cleanup')
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -1054,7 +974,6 @@ export function Board() {
|
|||
const previousUserId = localStorage.getItem(storageKey);
|
||||
|
||||
if (!previousUserId) {
|
||||
console.log('🧹 No tldraw user ID found for', previousUsername)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -1071,7 +990,6 @@ export function Board() {
|
|||
})
|
||||
|
||||
if (userPresences.length > 0) {
|
||||
console.log(`🧹 Removing ${userPresences.length} presence record(s) for logged-out user: ${previousUsername}`)
|
||||
editor.store.remove(userPresences.map((r: any) => r.id))
|
||||
}
|
||||
} catch (error) {
|
||||
|
|
@ -1463,10 +1381,6 @@ export function Board() {
|
|||
const isAuthenticated = checkAuthFromStorage();
|
||||
const initialReadOnly = !isAuthenticated;
|
||||
editor.updateInstanceState({ isReadonly: initialReadOnly })
|
||||
console.log('🔄 onMount: isAuthenticated (from storage) =', isAuthenticated, ', setting isReadonly =', initialReadOnly)
|
||||
console.log(initialReadOnly
|
||||
? '🔒 Board is in read-only mode (not authenticated)'
|
||||
: '🔓 Board is editable (authenticated)')
|
||||
|
||||
// Also ensure the current tool is appropriate for the mode
|
||||
if (!initialReadOnly) {
|
||||
|
|
|
|||
|
|
@ -35,8 +35,8 @@ export class DrawfastShape extends BaseBoxShapeUtil<IDrawfastShape> {
|
|||
|
||||
getDefaultProps(): IDrawfastShape["props"] {
|
||||
return {
|
||||
w: 512,
|
||||
h: 512,
|
||||
w: 900,
|
||||
h: 500,
|
||||
prompt: "",
|
||||
generatedImageUrl: null,
|
||||
overlayMode: true,
|
||||
|
|
@ -160,16 +160,6 @@ export class DrawfastShape extends BaseBoxShapeUtil<IDrawfastShape> {
|
|||
})
|
||||
}
|
||||
|
||||
const handleToggleOverlay = () => {
|
||||
editor.updateShape<IDrawfastShape>({
|
||||
id: shape.id,
|
||||
type: 'Drawfast',
|
||||
props: {
|
||||
overlayMode: !shape.props.overlayMode,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const handleToggleAutoGenerate = () => {
|
||||
editor.updateShape<IDrawfastShape>({
|
||||
id: shape.id,
|
||||
|
|
@ -212,7 +202,6 @@ export class DrawfastShape extends BaseBoxShapeUtil<IDrawfastShape> {
|
|||
})
|
||||
|
||||
if (childShapes.length === 0) {
|
||||
console.log('Drawfast: No shapes to capture')
|
||||
editor.updateShape<IDrawfastShape>({
|
||||
id: shape.id,
|
||||
type: 'Drawfast',
|
||||
|
|
@ -246,7 +235,6 @@ export class DrawfastShape extends BaseBoxShapeUtil<IDrawfastShape> {
|
|||
? `${shape.props.prompt}, hd, award-winning, impressive, detailed`
|
||||
: 'hd, award-winning, impressive, detailed illustration'
|
||||
|
||||
console.log('Drawfast: Generating with prompt:', fullPrompt)
|
||||
|
||||
const result = await fal.subscribe('fal-ai/lcm-sd15-i2i', {
|
||||
input: {
|
||||
|
|
@ -277,7 +265,6 @@ export class DrawfastShape extends BaseBoxShapeUtil<IDrawfastShape> {
|
|||
}
|
||||
|
||||
if (imageUrl) {
|
||||
console.log('Drawfast: Generated image:', imageUrl)
|
||||
editor.updateShape<IDrawfastShape>({
|
||||
id: shape.id,
|
||||
type: 'Drawfast',
|
||||
|
|
@ -364,107 +351,151 @@ export class DrawfastShape extends BaseBoxShapeUtil<IDrawfastShape> {
|
|||
backgroundColor: '#1a1a2e',
|
||||
overflow: 'hidden',
|
||||
}}>
|
||||
{/* Drawing Area / Result Display */}
|
||||
{/* Main content area - INPUT and OUTPUT side by side */}
|
||||
<div style={{
|
||||
flex: 1,
|
||||
position: 'relative',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
backgroundColor: '#fff',
|
||||
flexDirection: 'row',
|
||||
overflow: 'hidden',
|
||||
}}>
|
||||
{/* Generated Image (if available and overlay mode) */}
|
||||
{shape.props.generatedImageUrl && shape.props.overlayMode && (
|
||||
<img
|
||||
src={shape.props.generatedImageUrl}
|
||||
alt="AI Generated"
|
||||
style={{
|
||||
position: 'absolute',
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: '100%',
|
||||
height: '100%',
|
||||
objectFit: 'contain',
|
||||
pointerEvents: 'none',
|
||||
opacity: 0.9,
|
||||
zIndex: 10,
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Instructions when empty */}
|
||||
{!shape.props.generatedImageUrl && (
|
||||
{/* INPUT - Drawing Area (Left Side) */}
|
||||
<div style={{
|
||||
flex: 1,
|
||||
position: 'relative',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
backgroundColor: '#fff',
|
||||
overflow: 'hidden',
|
||||
borderRight: '2px solid #333',
|
||||
}}>
|
||||
{/* INPUT Label */}
|
||||
<div style={{
|
||||
position: 'absolute',
|
||||
top: '50%',
|
||||
left: '50%',
|
||||
transform: 'translate(-50%, -50%)',
|
||||
textAlign: 'center',
|
||||
color: '#666',
|
||||
fontSize: '14px',
|
||||
padding: '20px',
|
||||
pointerEvents: 'none',
|
||||
zIndex: 5,
|
||||
padding: '4px 8px',
|
||||
backgroundColor: '#2a2a3e',
|
||||
color: '#888',
|
||||
fontSize: '10px',
|
||||
fontWeight: 600,
|
||||
textTransform: 'uppercase',
|
||||
letterSpacing: '0.5px',
|
||||
borderBottom: '1px solid #333',
|
||||
}}>
|
||||
<div style={{ fontSize: '32px', marginBottom: '8px' }}>✏️</div>
|
||||
<div>Draw inside this frame</div>
|
||||
<div style={{ fontSize: '12px', marginTop: '4px', color: '#999' }}>
|
||||
Use the pencil, pen, or other tools to sketch
|
||||
</div>
|
||||
📝 Input (Draw Here)
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Loading indicator */}
|
||||
{(shape.props.isGenerating || liveImageState.isGenerating) && (
|
||||
{/* Drawing canvas area */}
|
||||
<div style={{
|
||||
position: 'absolute',
|
||||
top: '50%',
|
||||
left: '50%',
|
||||
transform: 'translate(-50%, -50%)',
|
||||
zIndex: 20,
|
||||
backgroundColor: 'rgba(0,0,0,0.7)',
|
||||
padding: '16px 24px',
|
||||
borderRadius: '8px',
|
||||
color: 'white',
|
||||
flex: 1,
|
||||
position: 'relative',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: '12px',
|
||||
justifyContent: 'center',
|
||||
}}>
|
||||
{/* Instructions when empty */}
|
||||
<div style={{
|
||||
width: 24,
|
||||
height: 24,
|
||||
border: '3px solid rgba(255,255,255,0.3)',
|
||||
borderTopColor: DrawfastShape.PRIMARY_COLOR,
|
||||
borderRadius: '50%',
|
||||
animation: 'spin 1s linear infinite',
|
||||
}} />
|
||||
Generating...
|
||||
position: 'absolute',
|
||||
top: '50%',
|
||||
left: '50%',
|
||||
transform: 'translate(-50%, -50%)',
|
||||
textAlign: 'center',
|
||||
color: '#666',
|
||||
fontSize: '14px',
|
||||
padding: '20px',
|
||||
pointerEvents: 'none',
|
||||
zIndex: 5,
|
||||
}}>
|
||||
<div style={{ fontSize: '32px', marginBottom: '8px' }}>✏️</div>
|
||||
<div>Draw inside this frame</div>
|
||||
<div style={{ fontSize: '12px', marginTop: '4px', color: '#999' }}>
|
||||
Use the pencil, pen, or other tools to sketch
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Side-by-side result (when not overlay mode) */}
|
||||
{shape.props.generatedImageUrl && !shape.props.overlayMode && (
|
||||
<div style={{
|
||||
height: '40%',
|
||||
borderTop: '2px solid #333',
|
||||
backgroundColor: '#111',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
}}>
|
||||
<img
|
||||
src={shape.props.generatedImageUrl}
|
||||
alt="AI Generated"
|
||||
style={{
|
||||
maxWidth: '100%',
|
||||
maxHeight: '100%',
|
||||
objectFit: 'contain',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* OUTPUT - Generated Image (Right Side) */}
|
||||
<div style={{
|
||||
flex: 1,
|
||||
position: 'relative',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
backgroundColor: '#111',
|
||||
overflow: 'hidden',
|
||||
}}>
|
||||
{/* OUTPUT Label */}
|
||||
<div style={{
|
||||
padding: '4px 8px',
|
||||
backgroundColor: '#2a2a3e',
|
||||
color: '#888',
|
||||
fontSize: '10px',
|
||||
fontWeight: 600,
|
||||
textTransform: 'uppercase',
|
||||
letterSpacing: '0.5px',
|
||||
borderBottom: '1px solid #333',
|
||||
}}>
|
||||
✨ Output (AI Generated)
|
||||
</div>
|
||||
|
||||
{/* Output content area */}
|
||||
<div style={{
|
||||
flex: 1,
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
position: 'relative',
|
||||
}}>
|
||||
{/* Generated Image */}
|
||||
{shape.props.generatedImageUrl ? (
|
||||
<img
|
||||
src={shape.props.generatedImageUrl}
|
||||
alt="AI Generated"
|
||||
style={{
|
||||
maxWidth: '100%',
|
||||
maxHeight: '100%',
|
||||
objectFit: 'contain',
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<div style={{
|
||||
textAlign: 'center',
|
||||
color: '#555',
|
||||
fontSize: '12px',
|
||||
padding: '20px',
|
||||
}}>
|
||||
<div style={{ fontSize: '24px', marginBottom: '8px', opacity: 0.5 }}>🖼️</div>
|
||||
<div>Generated image will appear here</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Loading indicator */}
|
||||
{(shape.props.isGenerating || liveImageState.isGenerating) && (
|
||||
<div style={{
|
||||
position: 'absolute',
|
||||
top: '50%',
|
||||
left: '50%',
|
||||
transform: 'translate(-50%, -50%)',
|
||||
zIndex: 20,
|
||||
backgroundColor: 'rgba(0,0,0,0.8)',
|
||||
padding: '16px 24px',
|
||||
borderRadius: '8px',
|
||||
color: 'white',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
gap: '12px',
|
||||
}}>
|
||||
<div style={{
|
||||
width: 24,
|
||||
height: 24,
|
||||
border: '3px solid rgba(255,255,255,0.3)',
|
||||
borderTopColor: DrawfastShape.PRIMARY_COLOR,
|
||||
borderRadius: '50%',
|
||||
animation: 'spin 1s linear infinite',
|
||||
}} />
|
||||
Generating...
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Controls */}
|
||||
<div style={{
|
||||
|
|
@ -600,23 +631,6 @@ export class DrawfastShape extends BaseBoxShapeUtil<IDrawfastShape> {
|
|||
/>
|
||||
Real-time
|
||||
</label>
|
||||
|
||||
{/* Overlay toggle */}
|
||||
<button
|
||||
onClick={handleToggleOverlay}
|
||||
onPointerDown={(e) => e.stopPropagation()}
|
||||
style={{
|
||||
padding: '4px 8px',
|
||||
borderRadius: '4px',
|
||||
border: '1px solid #444',
|
||||
backgroundColor: shape.props.overlayMode ? DrawfastShape.PRIMARY_COLOR : '#2a2a3e',
|
||||
color: '#fff',
|
||||
fontSize: '10px',
|
||||
cursor: 'pointer',
|
||||
}}
|
||||
>
|
||||
{shape.props.overlayMode ? 'Overlay' : 'Side-by-side'}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -106,14 +106,6 @@ export class FathomMeetingsBrowserShape extends BaseBoxShapeUtil<IFathomMeetings
|
|||
return
|
||||
}
|
||||
|
||||
// Log to verify the correct meeting is being received
|
||||
console.log('🔵 handleMeetingSelect called with meeting:', {
|
||||
recording_id: meetingRecordingId,
|
||||
title: meetingTitle,
|
||||
options,
|
||||
fullMeetingObject: meeting
|
||||
})
|
||||
|
||||
// Get API key from user identity
|
||||
const apiKey = getFathomApiKey(session.username)
|
||||
if (!apiKey) {
|
||||
|
|
@ -128,13 +120,11 @@ export class FathomMeetingsBrowserShape extends BaseBoxShapeUtil<IFathomMeetings
|
|||
const includeTranscript = options.transcript
|
||||
|
||||
// Use the stored meetingRecordingId (already extracted above)
|
||||
console.log('🔵 Fetching data for meeting recording_id:', meetingRecordingId)
|
||||
|
||||
let response
|
||||
try {
|
||||
// Fetch data for THIS specific meeting using its recording_id
|
||||
const apiUrl = `${WORKER_URL}/fathom/meetings/${meetingRecordingId}${includeTranscript ? '?include_transcript=true' : ''}`
|
||||
console.log('🔵 API URL:', apiUrl)
|
||||
response = await fetch(apiUrl, {
|
||||
headers: {
|
||||
'X-Api-Key': apiKey,
|
||||
|
|
@ -159,15 +149,9 @@ export class FathomMeetingsBrowserShape extends BaseBoxShapeUtil<IFathomMeetings
|
|||
const fullMeeting = await response.json() as any
|
||||
|
||||
// Debug: Log the meeting response structure
|
||||
console.log('Full meeting response:', fullMeeting)
|
||||
console.log('Meeting keys:', Object.keys(fullMeeting))
|
||||
console.log('Has default_summary:', !!fullMeeting.default_summary)
|
||||
console.log('Has action_items:', !!fullMeeting.action_items)
|
||||
if (fullMeeting.default_summary) {
|
||||
console.log('default_summary structure:', fullMeeting.default_summary)
|
||||
}
|
||||
if (fullMeeting.action_items) {
|
||||
console.log('action_items length:', fullMeeting.action_items.length)
|
||||
}
|
||||
|
||||
// Helper function to format date as YYYY.MM.DD
|
||||
|
|
@ -461,7 +445,6 @@ export class FathomMeetingsBrowserShape extends BaseBoxShapeUtil<IFathomMeetings
|
|||
(callId ? `https://fathom.video/calls/${callId}` : null)
|
||||
|
||||
if (videoUrl) {
|
||||
console.log('Opening Fathom video URL:', videoUrl, 'for meeting:', { callId, recording_id: meeting.recording_id })
|
||||
window.open(videoUrl, '_blank', 'noopener,noreferrer')
|
||||
} else {
|
||||
console.error('Could not determine Fathom video URL for meeting:', { meeting, fullMeeting })
|
||||
|
|
|
|||
|
|
@ -102,7 +102,6 @@ export class HolonBrowserShape extends BaseBoxShapeUtil<IHolonBrowser> {
|
|||
}
|
||||
})
|
||||
|
||||
console.log('✅ Created Holon shape from browser:', holonShape.id)
|
||||
|
||||
// Restore camera position if it changed
|
||||
const newCamera = this.editor.getCamera()
|
||||
|
|
|
|||
|
|
@ -164,10 +164,8 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
// This prevents the shape from auto-generating IDs based on coordinates.
|
||||
|
||||
const loadHolonData = useCallback(async () => {
|
||||
console.log('🔄 loadHolonData called with holonId:', holonId)
|
||||
|
||||
if (!holonId) {
|
||||
console.log('⚠️ No holonId, skipping data load')
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -175,7 +173,6 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
setIsLoading(true)
|
||||
setError(null)
|
||||
|
||||
console.log('📡 Starting to load data from GunDB for holon:', holonId)
|
||||
|
||||
// Load data from specific categories
|
||||
const lensesToCheck = [
|
||||
|
|
@ -203,21 +200,16 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
// This properly waits for Gun data to load from the network
|
||||
for (const lens of lensesToCheck) {
|
||||
try {
|
||||
console.log(`📂 Checking lens: ${lens}`)
|
||||
// Use getDataWithWait which subscribes and waits for Gun data (5 second timeout for network sync)
|
||||
const lensData = await holosphereService.getDataWithWait(holonId, lens, 5000)
|
||||
if (lensData && Object.keys(lensData).length > 0) {
|
||||
console.log(`✓ Found data in lens ${lens}:`, Object.keys(lensData).length, 'keys')
|
||||
allData[lens] = lensData
|
||||
} else {
|
||||
console.log(`⚠️ No data found in lens ${lens} after waiting`)
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`⚠️ Error loading data from lens ${lens}:`, err)
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`📊 Total data loaded: ${Object.keys(allData).length} categories`)
|
||||
|
||||
// If no data was loaded, check for connection issues
|
||||
if (Object.keys(allData).length === 0) {
|
||||
|
|
@ -241,7 +233,6 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
}
|
||||
})
|
||||
|
||||
console.log(`✅ Successfully loaded data from ${Object.keys(allData).length} categories:`, Object.keys(allData))
|
||||
} catch (error) {
|
||||
console.error('❌ Error loading holon data:', error)
|
||||
setError('Failed to load data')
|
||||
|
|
@ -252,15 +243,12 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
|
||||
// Load data when holon is connected
|
||||
useEffect(() => {
|
||||
console.log('🔍 useEffect triggered - holonId:', holonId, 'isConnected:', isConnected, 'selectedLens:', selectedLens)
|
||||
|
||||
if (holonId && isConnected) {
|
||||
console.log('✓ Conditions met, calling loadHolonData')
|
||||
loadHolonData()
|
||||
} else {
|
||||
console.log('⚠️ Conditions not met for loading data')
|
||||
if (!holonId) console.log(' - Missing holonId')
|
||||
if (!isConnected) console.log(' - Not connected')
|
||||
if (!holonId) {}
|
||||
if (!isConnected) {}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [holonId, isConnected, selectedLens])
|
||||
|
|
@ -342,7 +330,6 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
return
|
||||
}
|
||||
|
||||
console.log('🔌 Connecting to Holon:', trimmedHolonId)
|
||||
setError(null)
|
||||
|
||||
// Extract H3 cell info if applicable (coordinates and resolution)
|
||||
|
|
@ -357,14 +344,12 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
cellLatitude = lat
|
||||
cellLongitude = lng
|
||||
cellResolution = h3.getResolution(trimmedHolonId)
|
||||
console.log(`📍 H3 Cell Info: lat=${lat}, lng=${lng}, resolution=${cellResolution}`)
|
||||
} catch (e) {
|
||||
console.warn('Could not extract H3 cell coordinates:', e)
|
||||
}
|
||||
} else {
|
||||
// For numeric/alphanumeric Holon IDs, use default coordinates
|
||||
// The holon is not geospatially indexed
|
||||
console.log(`📍 Numeric Holon ID detected: ${trimmedHolonId} (not geospatially indexed)`)
|
||||
cellResolution = -1 // Indicate non-H3 holon
|
||||
}
|
||||
|
||||
|
|
@ -422,7 +407,6 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('⚠️ Could not load metadata, using default name:', error)
|
||||
}
|
||||
|
||||
// Explicitly load holon data after connecting
|
||||
|
|
@ -434,7 +418,6 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
setIsLoading(true)
|
||||
setError(null)
|
||||
|
||||
console.log('📡 Starting to load data from GunDB for holon:', trimmedHolonId)
|
||||
|
||||
// Load data from specific categories
|
||||
const lensesToCheck = [
|
||||
|
|
@ -461,20 +444,15 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
// Load data from each lens
|
||||
for (const lens of lensesToCheck) {
|
||||
try {
|
||||
console.log(`📂 Checking lens: ${lens}`)
|
||||
const lensData = await holosphereService.getDataWithWait(trimmedHolonId, lens, 2000)
|
||||
if (lensData && Object.keys(lensData).length > 0) {
|
||||
console.log(`✓ Found data in lens ${lens}:`, Object.keys(lensData).length, 'keys')
|
||||
allData[lens] = lensData
|
||||
} else {
|
||||
console.log(`⚠️ No data found in lens ${lens} after waiting`)
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(`⚠️ Error loading data from lens ${lens}:`, err)
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`📊 Total data loaded: ${Object.keys(allData).length} categories`)
|
||||
|
||||
// Update current data for selected lens
|
||||
const currentLensData = allData[shape.props.selectedLens || 'users']
|
||||
|
|
@ -493,7 +471,6 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
},
|
||||
})
|
||||
|
||||
console.log(`✅ Successfully loaded data from ${Object.keys(allData).length} categories:`, Object.keys(allData))
|
||||
} catch (error) {
|
||||
console.error('❌ Error loading holon data:', error)
|
||||
setError('Failed to load data')
|
||||
|
|
@ -510,11 +487,6 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
// If holonId is provided, mark as connected
|
||||
const shouldConnect = !!(holonId && holonId.trim() !== '')
|
||||
|
||||
console.log('💾 Saving Holon shape')
|
||||
console.log(' holonId:', holonId)
|
||||
console.log(' shouldConnect:', shouldConnect)
|
||||
console.log(' newName:', newName)
|
||||
console.log(' newDescription:', newDescription)
|
||||
|
||||
// Create new props without the editing fields
|
||||
const { editingName: _editingName, editingDescription: _editingDescription, ...restProps } = shape.props
|
||||
|
|
@ -528,7 +500,6 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
holonId: holonId, // Explicitly set holonId
|
||||
}
|
||||
|
||||
console.log(' New props:', newProps)
|
||||
|
||||
// Update the shape
|
||||
this.editor.updateShape<IHolon>({
|
||||
|
|
@ -537,18 +508,15 @@ export class HolonShape extends BaseBoxShapeUtil<IHolon> {
|
|||
props: newProps,
|
||||
})
|
||||
|
||||
console.log('✅ Shape updated, isConnected:', shouldConnect)
|
||||
|
||||
// If we have a connected holon, store the metadata
|
||||
if (holonId && shouldConnect) {
|
||||
console.log('📝 Storing metadata to GunDB for holon:', holonId)
|
||||
try {
|
||||
await holosphereService.putData(holonId, 'metadata', {
|
||||
name: newName,
|
||||
description: newDescription,
|
||||
lastUpdated: Date.now()
|
||||
})
|
||||
console.log('✅ Metadata saved to GunDB')
|
||||
} catch (error) {
|
||||
console.error('❌ Error saving metadata:', error)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -70,7 +70,6 @@ async function pollRunPodJob(
|
|||
pollInterval: number = 2000
|
||||
): Promise<string> {
|
||||
const statusUrl = `https://api.runpod.ai/v2/${endpointId}/status/${jobId}`
|
||||
console.log('🔄 ImageGen: Polling job:', jobId)
|
||||
|
||||
for (let attempt = 0; attempt < maxAttempts; attempt++) {
|
||||
try {
|
||||
|
|
@ -88,11 +87,8 @@ async function pollRunPodJob(
|
|||
}
|
||||
|
||||
const data = await response.json() as RunPodJobResponse
|
||||
console.log(`🔄 ImageGen: Poll attempt ${attempt + 1}/${maxAttempts}, status:`, data.status)
|
||||
console.log(`📋 ImageGen: Full response data:`, JSON.stringify(data, null, 2))
|
||||
|
||||
if (data.status === 'COMPLETED') {
|
||||
console.log('✅ ImageGen: Job completed, processing output...')
|
||||
|
||||
// Extract image URL from various possible response formats
|
||||
let imageUrl = ''
|
||||
|
|
@ -101,22 +97,17 @@ async function pollRunPodJob(
|
|||
if (!data.output) {
|
||||
// Only retry 2-3 times, then proceed to check alternatives
|
||||
if (attempt < 3) {
|
||||
console.log(`⏳ ImageGen: COMPLETED but no output yet, waiting briefly (attempt ${attempt + 1}/3)...`)
|
||||
await new Promise(resolve => setTimeout(resolve, 500))
|
||||
continue
|
||||
}
|
||||
|
||||
// Try alternative ways to get the output - maybe it's at the top level
|
||||
console.log('⚠️ ImageGen: No output field found, checking for alternative response formats...')
|
||||
console.log('📋 ImageGen: All available fields:', Object.keys(data))
|
||||
|
||||
// Check if image data is at top level
|
||||
if (data.image) {
|
||||
imageUrl = data.image
|
||||
console.log('✅ ImageGen: Found image at top level')
|
||||
} else if (data.url) {
|
||||
imageUrl = data.url
|
||||
console.log('✅ ImageGen: Found url at top level')
|
||||
} else if (data.result) {
|
||||
// Some endpoints return result instead of output
|
||||
if (typeof data.result === 'string') {
|
||||
|
|
@ -126,10 +117,8 @@ async function pollRunPodJob(
|
|||
} else if (data.result.url) {
|
||||
imageUrl = data.result.url
|
||||
}
|
||||
console.log('✅ ImageGen: Found result field')
|
||||
} else {
|
||||
// Last resort: try to fetch output via stream endpoint (some RunPod endpoints use this)
|
||||
console.log('⚠️ ImageGen: Trying alternative endpoint to retrieve output...')
|
||||
try {
|
||||
const streamUrl = `https://api.runpod.ai/v2/${endpointId}/stream/${jobId}`
|
||||
const streamResponse = await fetch(streamUrl, {
|
||||
|
|
@ -141,7 +130,6 @@ async function pollRunPodJob(
|
|||
|
||||
if (streamResponse.ok) {
|
||||
const streamData = await streamResponse.json() as RunPodJobResponse
|
||||
console.log('📥 ImageGen: Stream endpoint response:', JSON.stringify(streamData, null, 2))
|
||||
|
||||
if (streamData.output) {
|
||||
if (typeof streamData.output === 'string') {
|
||||
|
|
@ -160,13 +148,11 @@ async function pollRunPodJob(
|
|||
}
|
||||
|
||||
if (imageUrl) {
|
||||
console.log('✅ ImageGen: Found image URL via stream endpoint')
|
||||
return imageUrl
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (streamError) {
|
||||
console.log('⚠️ ImageGen: Stream endpoint not available or failed:', streamError)
|
||||
}
|
||||
|
||||
console.error('❌ ImageGen: Job completed but no output field in response after retries:', JSON.stringify(data, null, 2))
|
||||
|
|
@ -232,13 +218,10 @@ async function pollRunPodJob(
|
|||
// Assume base64 without prefix
|
||||
imageUrl = `data:image/${firstImage.type || 'png'};base64,${firstImage.data}`
|
||||
}
|
||||
console.log('✅ ImageGen: Found image in ComfyUI format (images array)')
|
||||
} else if (firstImage.url) {
|
||||
imageUrl = firstImage.url
|
||||
console.log('✅ ImageGen: Found image URL in ComfyUI format')
|
||||
} else if (firstImage.filename) {
|
||||
// Try to construct URL from filename (may need endpoint-specific handling)
|
||||
console.log('⚠️ ImageGen: Found filename but no URL, filename:', firstImage.filename)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -345,7 +328,6 @@ export class ImageGenShape extends BaseBoxShapeUtil<IImageGen> {
|
|||
}
|
||||
|
||||
const generateImage = async (prompt: string) => {
|
||||
console.log("🎨 ImageGen: Generating image with prompt:", prompt)
|
||||
|
||||
// Store the prompt being used and clear any previous errors
|
||||
editor.updateShape<IImageGen>({
|
||||
|
|
@ -366,8 +348,6 @@ export class ImageGenShape extends BaseBoxShapeUtil<IImageGen> {
|
|||
|
||||
// Mock API mode: Return placeholder image without calling RunPod
|
||||
if (USE_MOCK_API) {
|
||||
console.log("🎭 ImageGen: Using MOCK API mode (no real RunPod call)")
|
||||
console.log("🎨 ImageGen: Mock prompt:", prompt)
|
||||
|
||||
// Simulate API delay
|
||||
await new Promise(resolve => setTimeout(resolve, 1500))
|
||||
|
|
@ -375,7 +355,6 @@ export class ImageGenShape extends BaseBoxShapeUtil<IImageGen> {
|
|||
// Use a placeholder image service
|
||||
const mockImageUrl = `https://via.placeholder.com/512x512/4F46E5/FFFFFF?text=${encodeURIComponent(prompt.substring(0, 30))}`
|
||||
|
||||
console.log("✅ ImageGen: Mock image generated:", mockImageUrl)
|
||||
|
||||
// Get current shape to access existing history
|
||||
const currentShape = editor.getShape<IImageGen>(shape.id)
|
||||
|
|
@ -411,7 +390,6 @@ export class ImageGenShape extends BaseBoxShapeUtil<IImageGen> {
|
|||
// Use runsync for synchronous execution - returns output directly without polling
|
||||
const url = `https://api.runpod.ai/v2/${endpointId}/runsync`
|
||||
|
||||
console.log("📤 ImageGen: Sending request to:", url)
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: "POST",
|
||||
|
|
@ -433,7 +411,6 @@ export class ImageGenShape extends BaseBoxShapeUtil<IImageGen> {
|
|||
}
|
||||
|
||||
const data = await response.json() as RunPodJobResponse
|
||||
console.log("📥 ImageGen: Response data:", JSON.stringify(data, null, 2).substring(0, 500) + '...')
|
||||
|
||||
// With runsync, we get the output directly (no polling needed)
|
||||
if (data.output) {
|
||||
|
|
@ -446,7 +423,6 @@ export class ImageGenShape extends BaseBoxShapeUtil<IImageGen> {
|
|||
// Base64 encoded image string
|
||||
if (typeof firstImage === 'string') {
|
||||
imageUrl = firstImage.startsWith('data:') ? firstImage : `data:image/png;base64,${firstImage}`
|
||||
console.log('✅ ImageGen: Found base64 image in output.images array')
|
||||
} else if (firstImage.data) {
|
||||
imageUrl = firstImage.data.startsWith('data:') ? firstImage.data : `data:image/png;base64,${firstImage.data}`
|
||||
} else if (firstImage.url) {
|
||||
|
|
@ -470,7 +446,6 @@ export class ImageGenShape extends BaseBoxShapeUtil<IImageGen> {
|
|||
}
|
||||
|
||||
if (imageUrl) {
|
||||
console.log('✅ ImageGen: Image generated successfully')
|
||||
|
||||
// Get current shape to access existing history
|
||||
const currentShape = editor.getShape<IImageGen>(shape.id)
|
||||
|
|
@ -795,12 +770,10 @@ export class ImageGenShape extends BaseBoxShapeUtil<IImageGen> {
|
|||
new ClipboardItem({ [blob.type]: blob })
|
||||
])
|
||||
}
|
||||
console.log('✅ ImageGen: Image copied to clipboard')
|
||||
} catch (err) {
|
||||
console.error('❌ ImageGen: Failed to copy image:', err)
|
||||
// Fallback: copy the URL
|
||||
await navigator.clipboard.writeText(image.imageUrl)
|
||||
console.log('✅ ImageGen: Image URL copied to clipboard (fallback)')
|
||||
}
|
||||
}}
|
||||
onPointerDown={(e) => e.stopPropagation()}
|
||||
|
|
@ -851,7 +824,6 @@ export class ImageGenShape extends BaseBoxShapeUtil<IImageGen> {
|
|||
document.body.appendChild(link)
|
||||
link.click()
|
||||
document.body.removeChild(link)
|
||||
console.log('✅ ImageGen: Image download initiated')
|
||||
}}
|
||||
onPointerDown={(e) => e.stopPropagation()}
|
||||
onTouchStart={(e) => e.stopPropagation()}
|
||||
|
|
|
|||
|
|
@ -527,7 +527,6 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
const currentTool = activeToolRef.current;
|
||||
const currentDrawingPoints = drawingPointsRef.current;
|
||||
|
||||
console.log('Map click with tool:', currentTool, 'at', coord, 'points:', currentDrawingPoints.length);
|
||||
|
||||
if (currentTool === 'marker') {
|
||||
addAnnotation('marker', [coord]);
|
||||
|
|
@ -570,7 +569,6 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
const currentTool = activeToolRef.current;
|
||||
const currentDrawingPoints = drawingPointsRef.current;
|
||||
|
||||
console.log('Map double-click with tool:', currentTool, 'points:', currentDrawingPoints.length);
|
||||
|
||||
if (currentTool === 'line' && currentDrawingPoints.length >= 2) {
|
||||
addAnnotation('line', currentDrawingPoints);
|
||||
|
|
@ -658,15 +656,6 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
);
|
||||
const currentCollaboratorIds = new Set(collaboratorsWithLocation.map((c: CollaboratorPresence) => c.id));
|
||||
|
||||
// Debug logging
|
||||
if (collaboratorsWithLocation.length > 0) {
|
||||
console.log('📍 GPS Markers Update:', {
|
||||
total: allCollaborators.length,
|
||||
withLocation: collaboratorsWithLocation.length,
|
||||
users: collaboratorsWithLocation.map(c => ({ id: c.id.slice(0, 8), name: c.name, loc: c.location })),
|
||||
});
|
||||
}
|
||||
|
||||
// Remove old collaborator markers that are no longer sharing
|
||||
collaboratorMarkersRef.current.forEach((marker, id) => {
|
||||
if (!currentCollaboratorIds.has(id)) {
|
||||
|
|
@ -1282,13 +1271,11 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
const findNearby = useCallback(async (category: typeof NEARBY_CATEGORIES[0]) => {
|
||||
if (!mapRef.current || !isMountedRef.current) return;
|
||||
|
||||
console.log('🗺️ findNearby called for category:', category.label);
|
||||
setIsFetchingNearby(true);
|
||||
|
||||
let bounds;
|
||||
try {
|
||||
bounds = mapRef.current.getBounds();
|
||||
console.log('🗺️ Map bounds:', bounds.toString());
|
||||
} catch (err) {
|
||||
console.error('🗺️ Error getting bounds:', err);
|
||||
setIsFetchingNearby(false);
|
||||
|
|
@ -1303,7 +1290,6 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
);
|
||||
out body 10;
|
||||
`;
|
||||
console.log('🗺️ Overpass query:', query);
|
||||
|
||||
const response = await fetch('https://overpass-api.de/api/interpreter', {
|
||||
method: 'POST',
|
||||
|
|
@ -1315,9 +1301,7 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
return;
|
||||
}
|
||||
|
||||
console.log('🗺️ Overpass response status:', response.status);
|
||||
const data = await response.json() as { elements: { id: number; lat: number; lon: number; tags?: { name?: string; amenity?: string } }[] };
|
||||
console.log('🗺️ Found', data.elements.length, 'places');
|
||||
|
||||
const places = data.elements.slice(0, 10).map((el) => ({
|
||||
id: el.id,
|
||||
|
|
@ -1335,7 +1319,6 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
setNearbyPlaces(places);
|
||||
|
||||
// Add markers for nearby places
|
||||
console.log('🗺️ Adding', places.length, 'markers');
|
||||
places.forEach((place: any) => {
|
||||
if (isMountedRef.current) {
|
||||
addAnnotation('marker', [{ lat: place.lat, lng: place.lng }], {
|
||||
|
|
@ -1388,7 +1371,6 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
const userColor = editor.user.getColor();
|
||||
|
||||
setIsSharingLocation(true);
|
||||
console.log('📍 Starting location sharing for user:', userName);
|
||||
|
||||
watchIdRef.current = navigator.geolocation.watchPosition(
|
||||
(position) => {
|
||||
|
|
@ -1419,7 +1401,6 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
lastSeen: Date.now(),
|
||||
};
|
||||
|
||||
console.log('📍 Broadcasting location:', newLocation, 'Total collaborators:', existingCollaborators.length + 1);
|
||||
|
||||
editor.updateShape<IMapShape>({
|
||||
id: shape.id,
|
||||
|
|
@ -1453,7 +1434,6 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
// Get current shape to avoid stale closure
|
||||
const currentShape = editor.getShape<IMapShape>(shape.id);
|
||||
if (!currentShape) {
|
||||
console.log('📍 Shape not found, skipping collaborator removal');
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -1463,7 +1443,6 @@ function MapComponent({ shape, editor, isSelected }: { shape: IMapShape; editor:
|
|||
(c: CollaboratorPresence) => c.id !== userId
|
||||
);
|
||||
|
||||
console.log('📍 Stopping location sharing, remaining collaborators:', filteredCollaborators.length);
|
||||
|
||||
editor.updateShape<IMapShape>({
|
||||
id: shape.id,
|
||||
|
|
|
|||
|
|
@ -470,7 +470,6 @@ export class MultmuxShape extends BaseBoxShapeUtil<IMultmuxShape> {
|
|||
})
|
||||
|
||||
// Session created - terminal will connect via WebSocket
|
||||
console.log('✓ Created session:', session.name)
|
||||
} catch (error) {
|
||||
console.error('Failed to create session:', error)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -122,7 +122,6 @@ export class PromptShape extends BaseBoxShapeUtil<IPrompt> {
|
|||
}, {} as Record<string, TLShape>)
|
||||
|
||||
const generateText = async (prompt: string) => {
|
||||
console.log("🎯 generateText called with prompt:", prompt);
|
||||
|
||||
// Clear any previous errors
|
||||
this.editor.updateShape<IPrompt>({
|
||||
|
|
@ -137,8 +136,6 @@ export class PromptShape extends BaseBoxShapeUtil<IPrompt> {
|
|||
const escapedPrompt = prompt.replace(/[\\"]/g, '\\$&').replace(/\n/g, '\\n')
|
||||
const userMessage = `{"role": "user", "content": "${escapedPrompt}"}`
|
||||
|
||||
console.log("💬 User message:", userMessage);
|
||||
console.log("📚 Conversation history:", conversationHistory);
|
||||
|
||||
// Update with user message and trigger scroll
|
||||
this.editor.updateShape<IPrompt>({
|
||||
|
|
@ -153,23 +150,19 @@ export class PromptShape extends BaseBoxShapeUtil<IPrompt> {
|
|||
|
||||
let fullResponse = ''
|
||||
|
||||
console.log("🚀 Calling llm function...");
|
||||
try {
|
||||
await llm(prompt, (partial: string, done?: boolean) => {
|
||||
console.log(`📝 LLM callback received - partial: "${partial}", done: ${done}`);
|
||||
if (partial) {
|
||||
fullResponse = partial
|
||||
const escapedResponse = partial.replace(/[\\"]/g, '\\$&').replace(/\n/g, '\\n')
|
||||
const assistantMessage = `{"role": "assistant", "content": "${escapedResponse}"}`
|
||||
|
||||
console.log("🤖 Assistant message:", assistantMessage);
|
||||
|
||||
try {
|
||||
JSON.parse(assistantMessage)
|
||||
|
||||
// Use requestAnimationFrame to ensure smooth scrolling during streaming
|
||||
requestAnimationFrame(() => {
|
||||
console.log("🔄 Updating shape with partial response...");
|
||||
this.editor.updateShape<IPrompt>({
|
||||
id: shape.id,
|
||||
type: "Prompt",
|
||||
|
|
@ -185,7 +178,6 @@ export class PromptShape extends BaseBoxShapeUtil<IPrompt> {
|
|||
}
|
||||
}
|
||||
}, shape.props.personality)
|
||||
console.log("✅ LLM function completed successfully");
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
console.error("❌ Error in LLM function:", errorMessage);
|
||||
|
|
@ -211,7 +203,6 @@ export class PromptShape extends BaseBoxShapeUtil<IPrompt> {
|
|||
|
||||
// Ensure the final message is saved after streaming is complete
|
||||
if (fullResponse) {
|
||||
console.log("💾 Saving final response:", fullResponse);
|
||||
const escapedResponse = fullResponse.replace(/[\\"]/g, '\\$&').replace(/\n/g, '\\n')
|
||||
const assistantMessage = `{"role": "assistant", "content": "${escapedResponse}"}`
|
||||
|
||||
|
|
@ -228,7 +219,6 @@ export class PromptShape extends BaseBoxShapeUtil<IPrompt> {
|
|||
error: null // Clear any errors on success
|
||||
},
|
||||
})
|
||||
console.log("✅ Final response saved successfully");
|
||||
} catch (error) {
|
||||
console.error('❌ Invalid JSON in final message:', error)
|
||||
}
|
||||
|
|
@ -392,7 +382,6 @@ export class PromptShape extends BaseBoxShapeUtil<IPrompt> {
|
|||
}
|
||||
|
||||
const handlePinToggle = () => {
|
||||
console.log('📌 Pin toggle clicked, current state:', shape.props.pinnedToView, '-> new state:', !shape.props.pinnedToView)
|
||||
this.editor.updateShape<IPrompt>({
|
||||
id: shape.id,
|
||||
type: shape.type,
|
||||
|
|
|
|||
|
|
@ -313,7 +313,6 @@ export class TranscriptionShape extends BaseBoxShapeUtil<ITranscription> {
|
|||
props: cleanProps
|
||||
})
|
||||
|
||||
console.log(`🔄 Updated shape state: isTranscribing=${cleanProps.isTranscribing}, hookIsTranscribing=${hookIsTranscribing}, isRecording=${isRecording}`)
|
||||
}
|
||||
}, [hookIsTranscribing, isRecording, shape.id]) // Removed shape.props from dependencies
|
||||
|
||||
|
|
@ -468,7 +467,6 @@ export class TranscriptionShape extends BaseBoxShapeUtil<ITranscription> {
|
|||
try {
|
||||
if (isRecording) {
|
||||
// Currently recording, stop it
|
||||
console.log('🛑 Stopping transcription...')
|
||||
stopRecording()
|
||||
this.editor.updateShape<ITranscription>({
|
||||
id: shape.id,
|
||||
|
|
@ -482,7 +480,6 @@ export class TranscriptionShape extends BaseBoxShapeUtil<ITranscription> {
|
|||
} else {
|
||||
// Not recording, start it (or resume if paused)
|
||||
if (isPaused) {
|
||||
console.log('▶️ Resuming transcription...')
|
||||
startRecording()
|
||||
this.editor.updateShape<ITranscription>({
|
||||
id: shape.id,
|
||||
|
|
@ -494,7 +491,6 @@ export class TranscriptionShape extends BaseBoxShapeUtil<ITranscription> {
|
|||
}
|
||||
})
|
||||
} else {
|
||||
console.log('🎤 Starting transcription...')
|
||||
|
||||
// Clear editing content and live edit transcript when starting new recording session
|
||||
if (isLiveEditing) {
|
||||
|
|
@ -523,7 +519,6 @@ export class TranscriptionShape extends BaseBoxShapeUtil<ITranscription> {
|
|||
try {
|
||||
if (isPaused) {
|
||||
// Currently paused, resume
|
||||
console.log('▶️ Resuming transcription...')
|
||||
if (useWebSpeech) {
|
||||
// For Web Speech, restart recording
|
||||
startRecording()
|
||||
|
|
@ -543,7 +538,6 @@ export class TranscriptionShape extends BaseBoxShapeUtil<ITranscription> {
|
|||
})
|
||||
} else {
|
||||
// Currently recording, pause it
|
||||
console.log('⏸️ Pausing transcription...')
|
||||
if (useWebSpeech) {
|
||||
// For Web Speech, stop recording (pause not natively supported)
|
||||
stopRecording()
|
||||
|
|
|
|||
|
|
@ -77,7 +77,6 @@ export class VideoGenShape extends BaseBoxShapeUtil<IVideoGen> {
|
|||
const editor = this.editor
|
||||
|
||||
// Debug: log what's in shape props on each render
|
||||
console.log('🎬 VideoGen render - shape.props.videoUrl:', shape.props.videoUrl?.substring(0, 80) || 'null')
|
||||
|
||||
const [prompt, setPrompt] = useState(shape.props.prompt)
|
||||
const [imageUrl, setImageUrl] = useState(shape.props.imageUrl)
|
||||
|
|
@ -97,7 +96,6 @@ export class VideoGenShape extends BaseBoxShapeUtil<IVideoGen> {
|
|||
// This ensures the displayed video matches the shape's stored videoUrl
|
||||
useEffect(() => {
|
||||
if (shape.props.videoUrl !== videoUrl) {
|
||||
console.log('🎬 VideoGen: Syncing videoUrl from shape props:', shape.props.videoUrl?.substring(0, 50))
|
||||
setVideoUrl(shape.props.videoUrl)
|
||||
}
|
||||
}, [shape.props.videoUrl])
|
||||
|
|
@ -176,10 +174,7 @@ export class VideoGenShape extends BaseBoxShapeUtil<IVideoGen> {
|
|||
}
|
||||
|
||||
const currentMode = (imageUrl.trim() || imageBase64) ? 'i2v' : 't2v'
|
||||
console.log(`🎬 VideoGen: Starting ${currentMode.toUpperCase()} generation via fal.ai`)
|
||||
console.log('🎬 VideoGen: Prompt:', prompt)
|
||||
if (currentMode === 'i2v') {
|
||||
console.log('🎬 VideoGen: Image source:', imageUrl ? 'URL' : 'Uploaded')
|
||||
}
|
||||
|
||||
// Clear any existing video and set loading state
|
||||
|
|
@ -209,7 +204,6 @@ export class VideoGenShape extends BaseBoxShapeUtil<IVideoGen> {
|
|||
// WAN 2.1 models: fast startup, good quality
|
||||
const endpoint = currentMode === 'i2v' ? 'fal-ai/wan-i2v' : 'fal-ai/wan-t2v'
|
||||
|
||||
console.log('🎬 VideoGen: Submitting to fal.ai endpoint:', endpoint)
|
||||
const submitUrl = `https://queue.fal.run/${endpoint}`
|
||||
|
||||
// Build input payload for fal.ai
|
||||
|
|
@ -248,7 +242,6 @@ export class VideoGenShape extends BaseBoxShapeUtil<IVideoGen> {
|
|||
}
|
||||
|
||||
const jobData = await response.json() as FalQueueResponse
|
||||
console.log('🎬 VideoGen: Job submitted:', jobData.request_id)
|
||||
|
||||
if (!jobData.request_id) {
|
||||
throw new Error('No request_id returned from fal.ai')
|
||||
|
|
@ -275,7 +268,6 @@ export class VideoGenShape extends BaseBoxShapeUtil<IVideoGen> {
|
|||
}
|
||||
|
||||
const statusData = await statusResponse.json() as FalQueueResponse
|
||||
console.log(`🎬 VideoGen: Poll ${attempts}/${maxAttempts}, status:`, statusData.status)
|
||||
|
||||
if (statusData.status === 'COMPLETED') {
|
||||
// Fetch the result
|
||||
|
|
@ -289,13 +281,11 @@ export class VideoGenShape extends BaseBoxShapeUtil<IVideoGen> {
|
|||
}
|
||||
|
||||
const resultData = await resultResponse.json() as { video?: { url: string }; output?: { video?: { url: string } } }
|
||||
console.log('🎬 VideoGen: Result data:', JSON.stringify(resultData).substring(0, 200))
|
||||
|
||||
// Extract video URL from result
|
||||
const videoResultUrl = resultData.video?.url || resultData.output?.video?.url
|
||||
|
||||
if (videoResultUrl) {
|
||||
console.log('✅ VideoGen: Generation complete, URL:', videoResultUrl.substring(0, 100))
|
||||
|
||||
// Update local state immediately
|
||||
setVideoUrl(videoResultUrl)
|
||||
|
|
@ -319,7 +309,6 @@ export class VideoGenShape extends BaseBoxShapeUtil<IVideoGen> {
|
|||
}
|
||||
return
|
||||
} else {
|
||||
console.log('⚠️ VideoGen: Completed but no video in result:', JSON.stringify(resultData))
|
||||
throw new Error('Video generation completed but no video URL returned')
|
||||
}
|
||||
} else if (statusData.status === 'FAILED') {
|
||||
|
|
@ -702,7 +691,7 @@ export class VideoGenShape extends BaseBoxShapeUtil<IVideoGen> {
|
|||
playsInline
|
||||
onPointerDown={(e) => e.stopPropagation()}
|
||||
onTouchStart={(e) => e.stopPropagation()}
|
||||
onLoadedData={() => console.log('🎬 VideoGen: Video loaded successfully')}
|
||||
onLoadedData={() => {}}
|
||||
onError={(e) => console.error('🎬 VideoGen: Video load error:', e)}
|
||||
style={{
|
||||
width: '100%',
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ export class FathomMeetingsTool extends StateNode {
|
|||
onSelect() {
|
||||
// Don't create a shape immediately when tool is selected
|
||||
// The user will create one by clicking on the canvas (onPointerDown in idle state)
|
||||
console.log('🎯 FathomMeetingsTool parent: tool selected - waiting for user click')
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -83,11 +82,9 @@ export class FathomMeetingsIdle extends StateNode {
|
|||
}
|
||||
|
||||
override onPointerDown = (info?: any) => {
|
||||
console.log('📍 FathomMeetingsTool: onPointerDown called', { info, fullInfo: JSON.stringify(info) })
|
||||
|
||||
// Prevent multiple shapes from being created if user clicks multiple times
|
||||
if (this.isCreatingShape) {
|
||||
console.log('📍 FathomMeetingsTool: Shape creation already in progress, ignoring click')
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -106,7 +103,6 @@ export class FathomMeetingsIdle extends StateNode {
|
|||
// CRITICAL: Ensure this is a primary button click (left mouse button = 0)
|
||||
// This prevents accidental triggers from other pointer events
|
||||
if (info.button !== 0) {
|
||||
console.log('📍 FathomMeetingsTool: Non-primary button click, ignoring', { button: info.button })
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -120,7 +116,6 @@ export class FathomMeetingsIdle extends StateNode {
|
|||
target.closest('.tlui-toolbar') ||
|
||||
target.closest('[role="menu"]') ||
|
||||
target.closest('[role="toolbar"]')) {
|
||||
console.log('📍 FathomMeetingsTool: Click on UI element, ignoring')
|
||||
return
|
||||
}
|
||||
}
|
||||
|
|
@ -137,7 +132,6 @@ export class FathomMeetingsIdle extends StateNode {
|
|||
const pagePoint = this.editor.screenToPage(info.point)
|
||||
clickX = pagePoint.x
|
||||
clickY = pagePoint.y
|
||||
console.log('📍 FathomMeetingsTool: Using info.point converted to page:', { screen: info.point, page: { x: clickX, y: clickY } })
|
||||
} catch (e) {
|
||||
console.error('📍 FathomMeetingsTool: Failed to convert info.point to page coordinates', e)
|
||||
}
|
||||
|
|
@ -193,7 +187,6 @@ export class FathomMeetingsIdle extends StateNode {
|
|||
onSelect() {
|
||||
// Don't create a shape immediately when tool is selected
|
||||
// The user will create one by clicking on the canvas (onPointerDown)
|
||||
console.log('🎯 FathomMeetings tool selected - waiting for user click')
|
||||
}
|
||||
|
||||
override onExit = () => {
|
||||
|
|
@ -221,7 +214,6 @@ export class FathomMeetingsIdle extends StateNode {
|
|||
this.isCreatingShape = true
|
||||
|
||||
try {
|
||||
console.log('📍 FathomMeetingsTool: createFathomMeetingsBrowserShape called', { clickX, clickY })
|
||||
|
||||
// Store current camera position to prevent it from changing
|
||||
const currentCamera = this.editor.getCamera()
|
||||
|
|
@ -234,20 +226,12 @@ export class FathomMeetingsIdle extends StateNode {
|
|||
// Position new browser shape at click location (centered on click)
|
||||
const baseX = clickX - shapeWidth / 2 // Center the shape on click
|
||||
const baseY = clickY - shapeHeight / 2 // Center the shape on click
|
||||
console.log('📍 FathomMeetingsTool: Using click position:', { clickX, clickY, baseX, baseY })
|
||||
|
||||
// User clicked - ALWAYS use that exact position, no collision detection
|
||||
// This ensures the shape appears exactly where the user clicked
|
||||
const finalX = baseX
|
||||
const finalY = baseY
|
||||
console.log('📍 FathomMeetingsTool: Using click position directly (no collision check):', {
|
||||
clickPosition: { x: clickX, y: clickY },
|
||||
shapePosition: { x: finalX, y: finalY },
|
||||
shapeSize: { w: shapeWidth, h: shapeHeight }
|
||||
})
|
||||
|
||||
console.log('📍 FathomMeetingsTool: Final position for shape:', { finalX, finalY })
|
||||
|
||||
const browserShape = this.editor.createShape({
|
||||
type: 'FathomMeetingsBrowser',
|
||||
x: finalX,
|
||||
|
|
@ -258,7 +242,6 @@ export class FathomMeetingsIdle extends StateNode {
|
|||
}
|
||||
})
|
||||
|
||||
console.log('✅ Created FathomMeetingsBrowser shape:', browserShape.id)
|
||||
|
||||
// Restore camera position if it changed
|
||||
const newCamera = this.editor.getCamera()
|
||||
|
|
|
|||
|
|
@ -88,9 +88,7 @@ export class HolonIdle extends StateNode {
|
|||
const pagePoint = this.editor.screenToPage(info.point)
|
||||
clickX = pagePoint.x
|
||||
clickY = pagePoint.y
|
||||
console.log('📍 HolonTool: Method 1 - info.point converted:', { screen: info.point, page: { x: clickX, y: clickY } })
|
||||
} catch (e) {
|
||||
console.log('📍 HolonTool: Failed to convert info.point, trying other methods')
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -100,7 +98,6 @@ export class HolonIdle extends StateNode {
|
|||
if (currentPagePoint && currentPagePoint.x !== undefined && currentPagePoint.y !== undefined) {
|
||||
clickX = currentPagePoint.x
|
||||
clickY = currentPagePoint.y
|
||||
console.log('📍 HolonTool: Method 2 - currentPagePoint:', { x: clickX, y: clickY })
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -110,7 +107,6 @@ export class HolonIdle extends StateNode {
|
|||
if (originPagePoint && originPagePoint.x !== undefined && originPagePoint.y !== undefined) {
|
||||
clickX = originPagePoint.x
|
||||
clickY = originPagePoint.y
|
||||
console.log('📍 HolonTool: Method 3 - originPagePoint:', { x: clickX, y: clickY })
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -145,7 +141,6 @@ export class HolonIdle extends StateNode {
|
|||
}
|
||||
} catch (e) {
|
||||
// Element might already be removed, ignore error
|
||||
console.log('Tooltip element already removed')
|
||||
}
|
||||
this.tooltipElement = undefined
|
||||
}
|
||||
|
|
@ -169,7 +164,6 @@ export class HolonIdle extends StateNode {
|
|||
// Position new Holon shape at click location (centered on click)
|
||||
baseX = clickX - shapeWidth / 2 // Center the shape on click
|
||||
baseY = clickY - shapeHeight / 2 // Center the shape on click
|
||||
console.log('📍 HolonTool: Calculated base position from click:', { clickX, clickY, baseX, baseY, shapeWidth, shapeHeight })
|
||||
} else {
|
||||
// Fallback to viewport center if no click coordinates
|
||||
const viewport = this.editor.getViewportPageBounds()
|
||||
|
|
@ -185,32 +179,14 @@ export class HolonIdle extends StateNode {
|
|||
|
||||
// ALWAYS use click position directly when provided - user clicked where they want it
|
||||
// Skip collision detection entirely for user clicks to ensure it appears exactly where clicked
|
||||
let finalX = baseX
|
||||
let finalY = baseY
|
||||
|
||||
if (clickX !== undefined && clickY !== undefined) {
|
||||
// User clicked - ALWAYS use that exact position, no collision detection
|
||||
// This ensures the shape appears exactly where the user clicked
|
||||
finalX = baseX
|
||||
finalY = baseY
|
||||
console.log('📍 Using click position directly (no collision check):', {
|
||||
clickPosition: { x: clickX, y: clickY },
|
||||
shapePosition: { x: finalX, y: finalY },
|
||||
shapeSize: { w: shapeWidth, h: shapeHeight }
|
||||
})
|
||||
} else {
|
||||
// For fallback (no click), use base position directly
|
||||
finalX = baseX
|
||||
finalY = baseY
|
||||
console.log('📍 No click position - using base position:', { finalX, finalY })
|
||||
}
|
||||
|
||||
const finalX = baseX
|
||||
const finalY = baseY
|
||||
|
||||
// Default coordinates (can be changed by user)
|
||||
const defaultLat = 40.7128 // NYC
|
||||
const defaultLng = -74.0060
|
||||
const defaultResolution = 7 // City level
|
||||
|
||||
console.log('📍 HolonTool: Final position for shape:', { finalX, finalY, wasOverlap: clickX !== undefined && clickY !== undefined && (finalX !== baseX || finalY !== baseY) })
|
||||
|
||||
const holonShape = this.editor.createShape({
|
||||
type: 'Holon',
|
||||
|
|
@ -234,7 +210,6 @@ export class HolonIdle extends StateNode {
|
|||
}
|
||||
})
|
||||
|
||||
console.log('✅ Created Holon shape:', holonShape.id)
|
||||
|
||||
// Restore camera position if it changed
|
||||
const newCamera = this.editor.getCamera()
|
||||
|
|
@ -262,21 +237,18 @@ export class HolonIdle extends StateNode {
|
|||
// If Holon shapes exist, select them and center the view
|
||||
this.editor.setSelectedShapes(holonShapes.map(shape => shape.id))
|
||||
this.editor.zoomToFit()
|
||||
console.log('🎯 Holon tool selected - showing existing Holon shapes:', holonShapes.length)
|
||||
|
||||
// Add refresh all functionality
|
||||
this.addRefreshAllListener()
|
||||
} else {
|
||||
// If no Holon shapes exist, don't automatically create one
|
||||
// The user will create one by clicking on the canvas (onPointerDown)
|
||||
console.log('🎯 Holon tool selected - no Holon shapes found, waiting for user interaction')
|
||||
}
|
||||
}
|
||||
|
||||
private addRefreshAllListener() {
|
||||
// Listen for refresh-all-holons event
|
||||
const handleRefreshAll = async () => {
|
||||
console.log('🔄 Refreshing all Holon shapes...')
|
||||
const shapeUtil = new HolonShape(this.editor)
|
||||
shapeUtil.editor = this.editor
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ export class ImageGenTool extends BaseBoxShapeTool {
|
|||
override shapeType = 'ImageGen'
|
||||
|
||||
override onComplete: TLEventHandlers["onComplete"] = () => {
|
||||
console.log('🎨 ImageGenTool: Shape creation completed')
|
||||
this.editor.setCurrentTool('select')
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ export class MultmuxTool extends StateNode {
|
|||
static override children = () => [MultmuxIdle]
|
||||
|
||||
onSelect() {
|
||||
console.log('🖥️ MultmuxTool: tool selected - waiting for user click')
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -118,7 +117,6 @@ export class MultmuxIdle extends StateNode {
|
|||
}
|
||||
})
|
||||
|
||||
console.log('🖥️ Created Multmux shape:', multmuxShape.id)
|
||||
|
||||
const newCamera = this.editor.getCamera()
|
||||
if (currentCamera.x !== newCamera.x || currentCamera.y !== newCamera.y || currentCamera.z !== newCamera.z) {
|
||||
|
|
|
|||
|
|
@ -161,7 +161,6 @@ export class ObsNoteIdle extends StateNode {
|
|||
|
||||
if (existingBrowserShapes.length > 0) {
|
||||
// If a browser already exists, just select it
|
||||
console.log('✅ ObsidianBrowser already exists, selecting it')
|
||||
this.editor.setSelectedShapes([existingBrowserShapes[0].id])
|
||||
this.editor.setCurrentTool('select')
|
||||
return
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ export class TranscriptionTool extends StateNode {
|
|||
if (transcriptionShapes.length > 0) {
|
||||
// If Transcription shapes exist, start whisper audio processing on the first one
|
||||
const firstTranscriptionShape = transcriptionShapes[0]
|
||||
console.log('🎯 Transcription tool selected - starting whisper audio processing on existing shape:', firstTranscriptionShape.id)
|
||||
|
||||
// Select the first transcription shape
|
||||
this.editor.setSelectedShapes([`shape:${firstTranscriptionShape.id}`] as any)
|
||||
|
|
@ -34,7 +33,6 @@ export class TranscriptionTool extends StateNode {
|
|||
this.editor.setCurrentTool('select')
|
||||
} else {
|
||||
// If no Transcription shapes exist, create a new one
|
||||
console.log('🎯 Transcription tool selected - creating new Transcription shape')
|
||||
this.createTranscriptionShape()
|
||||
}
|
||||
}
|
||||
|
|
@ -91,7 +89,6 @@ export class TranscriptionTool extends StateNode {
|
|||
}
|
||||
})
|
||||
|
||||
console.log('✅ Created transcription shape:', transcriptionShape.id)
|
||||
|
||||
// Select the new shape and switch to select tool
|
||||
this.editor.setSelectedShapes([`shape:${transcriptionShape.id}`] as any)
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ export class VideoGenTool extends StateNode {
|
|||
static override children = () => [VideoGenIdle]
|
||||
|
||||
onSelect() {
|
||||
console.log('🎬 VideoGenTool: tool selected - waiting for user click')
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -112,7 +111,6 @@ export class VideoGenIdle extends StateNode {
|
|||
}
|
||||
})
|
||||
|
||||
console.log('🎬 Created VideoGen shape:', videoGenShape.id)
|
||||
|
||||
const newCamera = this.editor.getCamera()
|
||||
if (currentCamera.x !== newCamera.x || currentCamera.y !== newCamera.y || currentCamera.z !== newCamera.z) {
|
||||
|
|
|
|||
|
|
@ -37,10 +37,8 @@ export const overrides: TLUiOverrides = {
|
|||
onSelect: () => {
|
||||
const onlySelectedShape = editor.getOnlySelectedShape()
|
||||
if (!onlySelectedShape || onlySelectedShape.type !== "draw") return
|
||||
console.log("recognizing")
|
||||
const verts = editor.getShapeGeometry(onlySelectedShape).vertices
|
||||
const result = R.recognize(verts)
|
||||
console.log(result)
|
||||
},
|
||||
},
|
||||
addGesture: {
|
||||
|
|
@ -51,7 +49,6 @@ export const overrides: TLUiOverrides = {
|
|||
if (!onlySelectedShape || onlySelectedShape.type !== "draw") return
|
||||
const name = onlySelectedShape.meta.name
|
||||
if (!name) return
|
||||
console.log("adding gesture:", name)
|
||||
const points = editor.getShapeGeometry(onlySelectedShape).vertices
|
||||
R.addGesture(name as string, points)
|
||||
},
|
||||
|
|
@ -64,7 +61,6 @@ export const overrides: TLUiOverrides = {
|
|||
if (!onlySelectedShape || onlySelectedShape.type !== "draw") return
|
||||
const points = editor.getShapeGeometry(onlySelectedShape).vertices
|
||||
const result = R.recognize(points)
|
||||
console.log("morphing to closest:", result.name)
|
||||
const newShape: TLShapePartial<TLDrawShape> = {
|
||||
...onlySelectedShape,
|
||||
type: "draw",
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ export function CustomMainMenu() {
|
|||
}
|
||||
try {
|
||||
const jsonData = JSON.parse(event.target.result)
|
||||
console.log('Parsed JSON data:', jsonData)
|
||||
|
||||
// Helper function to validate and normalize shape types
|
||||
const validateAndNormalizeShapeType = (shape: any): string => {
|
||||
|
|
@ -243,7 +242,6 @@ export function CustomMainMenu() {
|
|||
// CRITICAL: Validate and normalize shape type
|
||||
const normalizedType = validateAndNormalizeShapeType(fixedShape)
|
||||
if (normalizedType !== fixedShape.type) {
|
||||
console.log(`🔧 Normalizing shape type "${fixedShape.type}" to "${normalizedType}" for shape:`, fixedShape.id)
|
||||
fixedShape.type = normalizedType
|
||||
|
||||
// If converted to text, set up proper text shape props
|
||||
|
|
@ -349,7 +347,6 @@ export function CustomMainMenu() {
|
|||
|
||||
// Check if it's a worker export format (has documents array)
|
||||
if (jsonData.documents && Array.isArray(jsonData.documents)) {
|
||||
console.log('Detected worker export format with', jsonData.documents.length, 'documents')
|
||||
|
||||
// Convert worker export format to TLContent format
|
||||
const pageId = jsonData.documents.find((doc: any) => doc.state?.typeName === 'page')?.state?.id || 'page:default'
|
||||
|
|
@ -365,7 +362,6 @@ export function CustomMainMenu() {
|
|||
.filter((doc: any) => doc.state?.typeName === 'asset')
|
||||
.map((doc: any) => doc.state)
|
||||
|
||||
console.log('Extracted:', { shapes: shapes.length, bindings: bindings.length, assets: assets.length })
|
||||
|
||||
// CRITICAL: rootShapeIds should only include shapes that are direct children of the page
|
||||
// Shapes inside frames should NOT be in rootShapeIds (they're children of frames)
|
||||
|
|
@ -382,7 +378,6 @@ export function CustomMainMenu() {
|
|||
assets: assets,
|
||||
}
|
||||
} else if (jsonData.store && jsonData.schema) {
|
||||
console.log('Detected Automerge format')
|
||||
// Convert Automerge format to TLContent format
|
||||
const store = jsonData.store
|
||||
const shapes: any[] = []
|
||||
|
|
@ -411,7 +406,6 @@ export function CustomMainMenu() {
|
|||
}
|
||||
})
|
||||
|
||||
console.log('Extracted from Automerge format:', { shapes: shapes.length, bindings: bindings.length, assets: assets.length })
|
||||
|
||||
// CRITICAL: rootShapeIds should only include shapes that are direct children of the page
|
||||
// Shapes inside frames should NOT be in rootShapeIds (they're children of frames)
|
||||
|
|
@ -428,7 +422,6 @@ export function CustomMainMenu() {
|
|||
assets: assets,
|
||||
}
|
||||
} else if (jsonData.shapes && Array.isArray(jsonData.shapes)) {
|
||||
console.log('Detected standard TLContent format with', jsonData.shapes.length, 'shapes')
|
||||
// Find page ID from imported data or use current page
|
||||
const importedPageId = jsonData.pages?.[0]?.id || 'page:default'
|
||||
const currentPageId = editor.getCurrentPageId()
|
||||
|
|
@ -455,7 +448,6 @@ export function CustomMainMenu() {
|
|||
assets: jsonData.assets || [],
|
||||
}
|
||||
} else {
|
||||
console.log('Detected unknown format, attempting fallback')
|
||||
// Try to extract shapes from any other format
|
||||
const pageId = 'page:default'
|
||||
// Filter out null shapes (shapes that failed validation)
|
||||
|
|
@ -480,7 +472,6 @@ export function CustomMainMenu() {
|
|||
}
|
||||
|
||||
// Validate all required properties
|
||||
console.log('Final contentToImport:', contentToImport)
|
||||
|
||||
if (!contentToImport.shapes || !Array.isArray(contentToImport.shapes)) {
|
||||
console.error('Invalid JSON format: missing or invalid shapes array')
|
||||
|
|
@ -530,7 +521,6 @@ export function CustomMainMenu() {
|
|||
// Validate and normalize shape type
|
||||
const normalizedType = validateAndNormalizeShapeType(shape)
|
||||
if (normalizedType !== shape.type) {
|
||||
console.log(`🔧 Normalizing shape type "${shape.type}" to "${normalizedType}" for shape:`, shape.id)
|
||||
shape.type = normalizedType
|
||||
|
||||
// If converted to text, set up proper text shape props
|
||||
|
|
@ -584,7 +574,6 @@ export function CustomMainMenu() {
|
|||
})
|
||||
}
|
||||
|
||||
console.log('About to call putContentOntoCurrentPage with:', contentToImport)
|
||||
|
||||
try {
|
||||
editor.putContentOntoCurrentPage(contentToImport, { select: true })
|
||||
|
|
@ -593,7 +582,6 @@ export function CustomMainMenu() {
|
|||
|
||||
// Fallback: Create shapes individually
|
||||
if (contentToImport.shapes && contentToImport.shapes.length > 0) {
|
||||
console.log('Attempting to create shapes individually...')
|
||||
|
||||
// Clear current page first
|
||||
const currentShapes = editor.getCurrentPageShapes()
|
||||
|
|
@ -614,7 +602,6 @@ export function CustomMainMenu() {
|
|||
// CRITICAL: Validate and normalize shape type
|
||||
const normalizedType = validateAndNormalizeShapeType(shape)
|
||||
if (normalizedType !== shape.type) {
|
||||
console.log(`🔧 Normalizing shape type "${shape.type}" to "${normalizedType}" for shape:`, shape.id)
|
||||
shape.type = normalizedType
|
||||
|
||||
// If converted to text, set up proper text shape props
|
||||
|
|
@ -690,7 +677,6 @@ export function CustomMainMenu() {
|
|||
})
|
||||
}
|
||||
|
||||
console.log('Individual shape creation completed')
|
||||
} else {
|
||||
alert('No valid shapes found in the JSON file.')
|
||||
}
|
||||
|
|
@ -770,7 +756,6 @@ export function CustomMainMenu() {
|
|||
// Get all shapes on the current page
|
||||
const shapes = editor.getCurrentPageShapes()
|
||||
if (shapes.length === 0) {
|
||||
console.log("No shapes to fit to")
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -789,7 +774,6 @@ export function CustomMainMenu() {
|
|||
const maxDimension = Math.max(width, height)
|
||||
const zoom = Math.min(1, 800 / maxDimension) // Fit in 800px viewport
|
||||
|
||||
console.log("Fitting to content:", { bounds, centerX, centerY, zoom })
|
||||
|
||||
// Set camera to show all shapes
|
||||
editor.setCamera({ x: centerX, y: centerY, z: zoom })
|
||||
|
|
|
|||
|
|
@ -1392,7 +1392,6 @@ export function MycelialIntelligenceBar() {
|
|||
break
|
||||
|
||||
default:
|
||||
console.log(`Direct input not implemented for ${toolType}`)
|
||||
}
|
||||
|
||||
// Clear the prompt
|
||||
|
|
@ -1431,7 +1430,6 @@ export function MycelialIntelligenceBar() {
|
|||
})
|
||||
setFollowUpSuggestions(newFollowUps)
|
||||
|
||||
console.log(`Spawned ${tool.displayName} on canvas`)
|
||||
}
|
||||
}, [editor])
|
||||
|
||||
|
|
@ -1450,7 +1448,6 @@ export function MycelialIntelligenceBar() {
|
|||
|
||||
if (ids.length > 0) {
|
||||
setSpawnedToolIds(prev => new Set([...prev, ...toolsToSpawn.map(t => t.id)]))
|
||||
console.log(`Spawned ${ids.length} tools on canvas`)
|
||||
}
|
||||
}, [editor, suggestedTools, spawnedToolIds])
|
||||
|
||||
|
|
|
|||
|
|
@ -154,13 +154,11 @@ export function SettingsDialog({ onClose }: TLUiDialogProps) {
|
|||
|
||||
// If user is logged in, save to user-specific storage
|
||||
if (session.authed && session.username) {
|
||||
console.log(`💾 Saving user-specific settings for ${session.username}:`, settings);
|
||||
localStorage.setItem(`${session.username}_api_keys`, JSON.stringify(settings))
|
||||
|
||||
// Also save to global storage as fallback
|
||||
localStorage.setItem("openai_api_key", JSON.stringify(settings))
|
||||
} else {
|
||||
console.log("💾 Saving global settings to localStorage:", settings);
|
||||
localStorage.setItem("openai_api_key", JSON.stringify(settings))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1640,7 +1640,6 @@ function CustomSharePanel() {
|
|||
roomId={boardId}
|
||||
onClose={() => setShowVersionHistory(false)}
|
||||
onRevert={(hash) => {
|
||||
console.log('Reverted to version:', hash)
|
||||
window.location.reload()
|
||||
}}
|
||||
isDarkMode={isDarkMode}
|
||||
|
|
|
|||
|
|
@ -480,15 +480,13 @@ export const overrides: TLUiOverrides = {
|
|||
onSelect: async () => {
|
||||
try {
|
||||
// Create a simple modal/prompt for AI response
|
||||
const answer = await askCanvasAI(editor, undefined, (partial, done) => {
|
||||
const answer = await askCanvasAI(editor, undefined, (_partial, done) => {
|
||||
// Log streaming response to console for now
|
||||
if (!done) {
|
||||
console.log("AI response:", partial)
|
||||
}
|
||||
})
|
||||
if (answer) {
|
||||
// Could display in a UI element - for now show alert with result
|
||||
console.log("Canvas AI answer:", answer)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Canvas AI error:", error)
|
||||
|
|
@ -502,11 +500,8 @@ export const overrides: TLUiOverrides = {
|
|||
readonlyOk: true,
|
||||
onSelect: async () => {
|
||||
try {
|
||||
console.log("Starting canvas indexing...")
|
||||
await indexCanvasForSearch(editor, (progress) => {
|
||||
console.log(`Indexing progress: ${progress.toFixed(1)}%`)
|
||||
await indexCanvasForSearch(editor, (_progress) => {
|
||||
})
|
||||
console.log("Canvas indexing complete!")
|
||||
} catch (error) {
|
||||
console.error("Canvas indexing error:", error)
|
||||
}
|
||||
|
|
@ -519,10 +514,8 @@ export const overrides: TLUiOverrides = {
|
|||
readonlyOk: true,
|
||||
onSelect: async () => {
|
||||
try {
|
||||
console.log("Analyzing viewport...")
|
||||
await explainViewport(editor, (partial, done) => {
|
||||
await explainViewport(editor, (_partial, done) => {
|
||||
if (!done) {
|
||||
console.log("Viewport analysis:", partial)
|
||||
}
|
||||
})
|
||||
} catch (error) {
|
||||
|
|
@ -537,12 +530,10 @@ export const overrides: TLUiOverrides = {
|
|||
readonlyOk: true,
|
||||
onSelect: async () => {
|
||||
if (editor.getSelectedShapeIds().length === 0) {
|
||||
console.log("Select a shape first to find similar ones")
|
||||
return
|
||||
}
|
||||
try {
|
||||
const results = await findSimilarToSelection(editor)
|
||||
console.log(`Found ${results.length} similar shapes`)
|
||||
} catch (error) {
|
||||
console.error("Find similar error:", error)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -64,7 +64,6 @@ export class AudioAnalyzer {
|
|||
if (this.audioContext && this.analyser) {
|
||||
this.microphone = this.audioContext.createMediaStreamSource(stream)
|
||||
this.microphone.connect(this.analyser)
|
||||
console.log('🎤 Microphone connected to audio analyzer')
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -233,7 +232,6 @@ export class AudioAnalyzer {
|
|||
}
|
||||
this.speakers.set(newSpeakerId, newSpeaker)
|
||||
bestMatch = newSpeakerId
|
||||
console.log(`🎤 New speaker identified: ${newSpeaker.name} (${newSpeakerId})`)
|
||||
} else {
|
||||
// Update existing speaker profile
|
||||
const speaker = this.speakers.get(bestMatch)!
|
||||
|
|
@ -307,7 +305,6 @@ export class AudioAnalyzer {
|
|||
const speaker = this.speakers.get(speakerId)
|
||||
if (speaker) {
|
||||
speaker.name = name
|
||||
console.log(`🎤 Updated speaker name: ${speakerId} -> ${name}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -320,7 +320,6 @@ export function setupMultiPasteHandler(editor: Editor): () => void {
|
|||
e.preventDefault()
|
||||
e.stopPropagation()
|
||||
|
||||
console.log(`📋 Multi-paste: ${imageFiles.length} images, ${urls.length} URLs`)
|
||||
|
||||
// Calculate grid positions
|
||||
const positions = calculateGridPositions(center, totalItems)
|
||||
|
|
|
|||
Loading…
Reference in New Issue