fix: register Calendar and Drawfast shapes in automerge store

Added missing Calendar and Drawfast shapes to the automerge store
schema registration to fix ValidationError when using these tools.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Jeff Emmett 2025-12-24 10:36:51 -05:00
parent c4cb97c0bf
commit c6ed0b77d8
4 changed files with 134 additions and 509 deletions

View File

@ -23,20 +23,16 @@ export class CloudflareAdapter {
async getHandle(roomId: string): Promise<DocHandle<TLStoreSnapshot>> { async getHandle(roomId: string): Promise<DocHandle<TLStoreSnapshot>> {
if (!this.handles.has(roomId)) { if (!this.handles.has(roomId)) {
console.log(`Creating new Automerge handle for room ${roomId}`)
const handle = this.repo.create<TLStoreSnapshot>() const handle = this.repo.create<TLStoreSnapshot>()
// Initialize with default store if this is a new document // Initialize with default store if this is a new document
handle.change((doc) => { handle.change((doc) => {
if (!doc.store) { if (!doc.store) {
console.log("Initializing new document with default store")
init(doc) init(doc)
} }
}) })
this.handles.set(roomId, handle) this.handles.set(roomId, handle)
} else {
console.log(`Reusing existing Automerge handle for room ${roomId}`)
} }
return this.handles.get(roomId)! return this.handles.get(roomId)!
@ -72,13 +68,11 @@ export class CloudflareAdapter {
async saveToCloudflare(roomId: string): Promise<void> { async saveToCloudflare(roomId: string): Promise<void> {
const handle = this.handles.get(roomId) const handle = this.handles.get(roomId)
if (!handle) { if (!handle) {
console.log(`No handle found for room ${roomId}`)
return return
} }
const doc = handle.doc() const doc = handle.doc()
if (!doc) { if (!doc) {
console.log(`No document found for room ${roomId}`)
return return
} }
@ -114,7 +108,6 @@ export class CloudflareAdapter {
async loadFromCloudflare(roomId: string): Promise<TLStoreSnapshot | null> { async loadFromCloudflare(roomId: string): Promise<TLStoreSnapshot | null> {
try { try {
// Add retry logic for connection issues // Add retry logic for connection issues
let response: Response; let response: Response;
let retries = 3; let retries = 3;
@ -131,7 +124,7 @@ export class CloudflareAdapter {
} }
} }
} }
if (!response!.ok) { if (!response!.ok) {
if (response!.status === 404) { if (response!.status === 404) {
return null // Room doesn't exist yet return null // Room doesn't exist yet
@ -141,12 +134,7 @@ export class CloudflareAdapter {
} }
const doc = await response!.json() as TLStoreSnapshot const doc = await response!.json() as TLStoreSnapshot
console.log(`Successfully loaded document from Cloudflare for room ${roomId}:`, {
hasStore: !!doc.store,
storeKeys: doc.store ? Object.keys(doc.store).length : 0
})
// Initialize the last persisted state with the loaded document // Initialize the last persisted state with the loaded document
if (doc) { if (doc) {
const docHash = this.generateDocHash(doc) const docHash = this.generateDocHash(doc)
@ -202,7 +190,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
private setConnectionState(state: ConnectionState): void { private setConnectionState(state: ConnectionState): void {
if (this._connectionState !== state) { if (this._connectionState !== state) {
console.log(`🔌 Connection state: ${this._connectionState}${state}`)
this._connectionState = state this._connectionState = state
this.connectionStateListeners.forEach(listener => listener(state)) this.connectionStateListeners.forEach(listener => listener(state))
} }
@ -237,7 +224,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Set up network online/offline listeners // Set up network online/offline listeners
this.networkOnlineHandler = () => { this.networkOnlineHandler = () => {
console.log('🌐 Network: online')
this._isNetworkOnline = true this._isNetworkOnline = true
// Trigger reconnect if we were disconnected // Trigger reconnect if we were disconnected
if (this._connectionState === 'disconnected' && this.peerId) { if (this._connectionState === 'disconnected' && this.peerId) {
@ -246,7 +232,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
} }
} }
this.networkOfflineHandler = () => { this.networkOfflineHandler = () => {
console.log('🌐 Network: offline')
this._isNetworkOnline = false this._isNetworkOnline = false
if (this._connectionState === 'connected') { if (this._connectionState === 'connected') {
this.setConnectionState('disconnected') this.setConnectionState('disconnected')
@ -273,12 +258,10 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
* @param documentId The Automerge document ID to use for incoming messages * @param documentId The Automerge document ID to use for incoming messages
*/ */
setDocumentId(documentId: string): void { setDocumentId(documentId: string): void {
console.log('📋 CloudflareAdapter: Setting documentId:', documentId)
this.currentDocumentId = documentId this.currentDocumentId = documentId
// Process any buffered binary messages now that we have a documentId // Process any buffered binary messages now that we have a documentId
if (this.pendingBinaryMessages.length > 0) { if (this.pendingBinaryMessages.length > 0) {
console.log(`📦 CloudflareAdapter: Processing ${this.pendingBinaryMessages.length} buffered binary messages`)
const bufferedMessages = this.pendingBinaryMessages const bufferedMessages = this.pendingBinaryMessages
this.pendingBinaryMessages = [] this.pendingBinaryMessages = []
@ -290,7 +273,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
targetId: this.peerId || ('unknown' as PeerId), targetId: this.peerId || ('unknown' as PeerId),
documentId: this.currentDocumentId as any documentId: this.currentDocumentId as any
} }
console.log('📥 CloudflareAdapter: Emitting buffered sync message with documentId:', this.currentDocumentId, 'size:', binaryData.byteLength)
this.emit('message', message) this.emit('message', message)
} }
} }
@ -305,7 +287,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
connect(peerId: PeerId, peerMetadata?: PeerMetadata): void { connect(peerId: PeerId, peerMetadata?: PeerMetadata): void {
if (this.isConnecting) { if (this.isConnecting) {
console.log('🔌 CloudflareAdapter: Connection already in progress, skipping')
return return
} }
@ -329,33 +310,27 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
const wsUrl = `${protocol}${baseUrl}/connect/${this.roomId}?sessionId=${sessionId}` const wsUrl = `${protocol}${baseUrl}/connect/${this.roomId}?sessionId=${sessionId}`
this.isConnecting = true this.isConnecting = true
// Add a small delay to ensure the server is ready // Add a small delay to ensure the server is ready
setTimeout(() => { setTimeout(() => {
try { try {
console.log('🔌 CloudflareAdapter: Creating WebSocket connection to:', wsUrl)
this.websocket = new WebSocket(wsUrl) this.websocket = new WebSocket(wsUrl)
this.websocket.onopen = () => { this.websocket.onopen = () => {
console.log('🔌 CloudflareAdapter: WebSocket connection opened successfully')
this.isConnecting = false this.isConnecting = false
this.reconnectAttempts = 0 this.reconnectAttempts = 0
this.setConnectionState('connected') this.setConnectionState('connected')
this.readyResolve?.() this.readyResolve?.()
this.startKeepAlive() this.startKeepAlive()
// CRITICAL: Emit 'ready' event for Automerge Repo // Emit 'ready' event for Automerge Repo
// This tells the Repo that the network adapter is ready to sync
// @ts-expect-error - 'ready' event is valid but not in NetworkAdapterEvents type // @ts-expect-error - 'ready' event is valid but not in NetworkAdapterEvents type
this.emit('ready', { network: this }) this.emit('ready', { network: this })
// Create a server peer ID based on the room // Create a server peer ID based on the room
// The server acts as a "hub" peer that all clients sync with
this.serverPeerId = `server-${this.roomId}` as PeerId this.serverPeerId = `server-${this.roomId}` as PeerId
// CRITICAL: Emit 'peer-candidate' to announce the server as a sync peer // Emit 'peer-candidate' to announce the server as a sync peer
// This tells the Automerge Repo there's a peer to sync documents with
console.log('🔌 CloudflareAdapter: Announcing server peer for Automerge sync:', this.serverPeerId)
this.emit('peer-candidate', { this.emit('peer-candidate', {
peerId: this.serverPeerId, peerId: this.serverPeerId,
peerMetadata: { storageId: undefined, isEphemeral: false } peerMetadata: { storageId: undefined, isEphemeral: false }
@ -367,16 +342,8 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Automerge's native protocol uses binary messages // Automerge's native protocol uses binary messages
// We need to handle both binary and text messages // We need to handle both binary and text messages
if (event.data instanceof ArrayBuffer) { if (event.data instanceof ArrayBuffer) {
console.log('🔌 CloudflareAdapter: Received binary message (Automerge protocol)', event.data.byteLength, 'bytes')
// Handle binary Automerge sync messages - convert ArrayBuffer to Uint8Array
// Automerge Repo expects binary sync messages as Uint8Array
// CRITICAL: senderId should be the SERVER (where the message came from)
// targetId should be US (where the message is going to)
// CRITICAL: Include documentId for Automerge Repo to route the message correctly
const binaryData = new Uint8Array(event.data) const binaryData = new Uint8Array(event.data)
if (!this.currentDocumentId) { if (!this.currentDocumentId) {
console.log('📦 CloudflareAdapter: Buffering binary sync message (no documentId yet), size:', binaryData.byteLength)
// Buffer for later processing when we have a documentId
this.pendingBinaryMessages.push(binaryData) this.pendingBinaryMessages.push(binaryData)
return return
} }
@ -385,17 +352,13 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
data: binaryData, data: binaryData,
senderId: this.serverPeerId || ('server' as PeerId), senderId: this.serverPeerId || ('server' as PeerId),
targetId: this.peerId || ('unknown' as PeerId), targetId: this.peerId || ('unknown' as PeerId),
documentId: this.currentDocumentId as any // DocumentId type documentId: this.currentDocumentId as any
} }
console.log('📥 CloudflareAdapter: Emitting sync message with documentId:', this.currentDocumentId)
this.emit('message', message) this.emit('message', message)
} else if (event.data instanceof Blob) { } else if (event.data instanceof Blob) {
// Handle Blob messages (convert to Uint8Array)
event.data.arrayBuffer().then((buffer) => { event.data.arrayBuffer().then((buffer) => {
console.log('🔌 CloudflareAdapter: Received Blob message, converted to Uint8Array', buffer.byteLength, 'bytes')
const binaryData = new Uint8Array(buffer) const binaryData = new Uint8Array(buffer)
if (!this.currentDocumentId) { if (!this.currentDocumentId) {
console.log('📦 CloudflareAdapter: Buffering Blob sync message (no documentId yet), size:', binaryData.byteLength)
this.pendingBinaryMessages.push(binaryData) this.pendingBinaryMessages.push(binaryData)
return return
} }
@ -406,18 +369,12 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
targetId: this.peerId || ('unknown' as PeerId), targetId: this.peerId || ('unknown' as PeerId),
documentId: this.currentDocumentId as any documentId: this.currentDocumentId as any
} }
console.log('📥 CloudflareAdapter: Emitting Blob sync message with documentId:', this.currentDocumentId)
this.emit('message', message) this.emit('message', message)
}) })
} else { } else {
// Handle text messages (our custom protocol for backward compatibility) // Handle text messages (our custom protocol for backward compatibility)
const message = JSON.parse(event.data) const message = JSON.parse(event.data)
// Only log non-presence messages to reduce console spam
if (message.type !== 'presence' && message.type !== 'pong') {
console.log('🔌 CloudflareAdapter: Received WebSocket message:', message.type)
}
// Handle ping/pong messages for keep-alive // Handle ping/pong messages for keep-alive
if (message.type === 'ping') { if (message.type === 'ping') {
this.sendPong() this.sendPong()
@ -426,13 +383,11 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Handle test messages // Handle test messages
if (message.type === 'test') { if (message.type === 'test') {
console.log('🔌 CloudflareAdapter: Received test message:', message.message)
return return
} }
// Handle presence updates from other clients // Handle presence updates from other clients
if (message.type === 'presence') { if (message.type === 'presence') {
// Pass senderId, userName, and userColor so we can create proper instance_presence records
if (this.onPresenceUpdate && message.userId && message.data) { if (this.onPresenceUpdate && message.userId && message.data) {
this.onPresenceUpdate(message.userId, message.data, message.senderId, message.userName, message.userColor) this.onPresenceUpdate(message.userId, message.data, message.senderId, message.userName, message.userColor)
} }
@ -441,49 +396,31 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Handle leave messages (user disconnected) // Handle leave messages (user disconnected)
if (message.type === 'leave') { if (message.type === 'leave') {
console.log('👋 CloudflareAdapter: User left:', message.sessionId)
if (this.onPresenceLeave && message.sessionId) { if (this.onPresenceLeave && message.sessionId) {
this.onPresenceLeave(message.sessionId) this.onPresenceLeave(message.sessionId)
} }
return return
} }
// Convert the message to the format expected by Automerge // Convert the message to the format expected by Automerge
if (message.type === 'sync' && message.data) { if (message.type === 'sync' && message.data) {
console.log('🔌 CloudflareAdapter: Received sync message with data:', {
hasStore: !!message.data.store,
storeKeys: message.data.store ? Object.keys(message.data.store).length : 0,
documentId: message.documentId,
documentIdType: typeof message.documentId
})
// JSON sync for real-time collaboration // JSON sync for real-time collaboration
// When we receive TLDraw changes from other clients, apply them locally
const isJsonDocumentData = message.data && typeof message.data === 'object' && message.data.store const isJsonDocumentData = message.data && typeof message.data === 'object' && message.data.store
if (isJsonDocumentData) { if (isJsonDocumentData) {
console.log('📥 CloudflareAdapter: Received JSON sync message with store data')
// Call the JSON sync callback to apply changes
if (this.onJsonSyncData) { if (this.onJsonSyncData) {
this.onJsonSyncData(message.data) this.onJsonSyncData(message.data)
} else {
console.warn('⚠️ No JSON sync callback registered')
} }
return // JSON sync handled return
} }
// Validate documentId - Automerge requires a valid Automerge URL format // Validate documentId format
// Valid formats: "automerge:xxxxx" or other valid URL formats const isValidDocumentId = message.documentId &&
// Invalid: plain strings like "default", "default-room", etc. (typeof message.documentId === 'string' &&
const isValidDocumentId = message.documentId && (message.documentId.startsWith('automerge:') ||
(typeof message.documentId === 'string' && message.documentId.includes(':') ||
(message.documentId.startsWith('automerge:') || /^[a-f0-9-]{36,}$/i.test(message.documentId)))
message.documentId.includes(':') ||
/^[a-f0-9-]{36,}$/i.test(message.documentId))) // UUID-like format
// For binary sync messages, use Automerge's sync protocol
// Only include documentId if it's a valid Automerge document ID format
const syncMessage: Message = { const syncMessage: Message = {
type: 'sync', type: 'sync',
senderId: message.senderId || this.peerId || ('unknown' as PeerId), senderId: message.senderId || this.peerId || ('unknown' as PeerId),
@ -491,42 +428,22 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
data: message.data, data: message.data,
...(isValidDocumentId && { documentId: message.documentId }) ...(isValidDocumentId && { documentId: message.documentId })
} }
if (message.documentId && !isValidDocumentId) {
console.warn('⚠️ CloudflareAdapter: Ignoring invalid documentId from server:', message.documentId)
}
this.emit('message', syncMessage) this.emit('message', syncMessage)
} else if (message.senderId && message.targetId) { } else if (message.senderId && message.targetId) {
this.emit('message', message as Message) this.emit('message', message as Message)
} }
} }
} catch (error) { } catch (error) {
console.error('❌ CloudflareAdapter: Error parsing WebSocket message:', error) console.error('Error parsing WebSocket message:', error)
} }
} }
this.websocket.onclose = (event) => { this.websocket.onclose = (event) => {
console.log('Disconnected from Cloudflare WebSocket', {
code: event.code,
reason: event.reason,
wasClean: event.wasClean,
url: wsUrl,
reconnectAttempts: this.reconnectAttempts
})
this.isConnecting = false this.isConnecting = false
this.stopKeepAlive() this.stopKeepAlive()
// Log specific error codes for debugging if (event.code === 1000) {
if (event.code === 1005) {
console.error('❌ WebSocket closed with code 1005 (No Status Received) - this usually indicates a connection issue or idle timeout')
} else if (event.code === 1006) {
console.error('❌ WebSocket closed with code 1006 (Abnormal Closure) - connection was lost unexpectedly')
} else if (event.code === 1011) {
console.error('❌ WebSocket closed with code 1011 (Server Error) - server encountered an error')
} else if (event.code === 1000) {
console.log('✅ WebSocket closed normally (code 1000)')
this.setConnectionState('disconnected') this.setConnectionState('disconnected')
return // Don't reconnect on normal closure return // Don't reconnect on normal closure
} }
@ -544,15 +461,7 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
this.scheduleReconnect(peerId, peerMetadata) this.scheduleReconnect(peerId, peerMetadata)
} }
this.websocket.onerror = (error) => { this.websocket.onerror = () => {
console.error('WebSocket error:', error)
console.error('WebSocket readyState:', this.websocket?.readyState)
console.error('WebSocket URL:', wsUrl)
console.error('Error event details:', {
type: error.type,
target: error.target,
isTrusted: error.isTrusted
})
this.isConnecting = false this.isConnecting = false
} }
} catch (error) { } catch (error) {
@ -564,25 +473,10 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
} }
send(message: Message): void { send(message: Message): void {
// Only log non-presence messages to reduce console spam // Capture documentId from outgoing sync messages
if (message.type !== 'presence') {
console.log('📤 CloudflareAdapter.send() called:', {
messageType: message.type,
dataType: (message as any).data?.constructor?.name || typeof (message as any).data,
dataLength: (message as any).data?.byteLength || (message as any).data?.length,
documentId: (message as any).documentId,
hasTargetId: !!message.targetId,
hasSenderId: !!message.senderId,
useBinarySync: this.useBinarySync
})
}
// CRITICAL: Capture documentId from outgoing sync messages
// This allows us to use it for incoming messages from the server
if (message.type === 'sync' && (message as any).documentId) { if (message.type === 'sync' && (message as any).documentId) {
const docId = (message as any).documentId const docId = (message as any).documentId
if (this.currentDocumentId !== docId) { if (this.currentDocumentId !== docId) {
console.log('📋 CloudflareAdapter: Captured documentId from outgoing sync:', docId)
this.currentDocumentId = docId this.currentDocumentId = docId
} }
} }
@ -590,49 +484,14 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) { if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
// Check if this is a binary sync message from Automerge Repo // Check if this is a binary sync message from Automerge Repo
if (message.type === 'sync' && (message as any).data instanceof ArrayBuffer) { if (message.type === 'sync' && (message as any).data instanceof ArrayBuffer) {
console.log('📤 CloudflareAdapter: Sending binary sync message (Automerge protocol)', {
dataLength: (message as any).data.byteLength,
documentId: (message as any).documentId,
targetId: message.targetId
})
// Send binary data directly for Automerge's native sync protocol
this.websocket.send((message as any).data) this.websocket.send((message as any).data)
return // CRITICAL: Don't fall through to JSON send return
} else if (message.type === 'sync' && (message as any).data instanceof Uint8Array) { } else if (message.type === 'sync' && (message as any).data instanceof Uint8Array) {
console.log('📤 CloudflareAdapter: Sending Uint8Array sync message (Automerge protocol)', {
dataLength: (message as any).data.length,
documentId: (message as any).documentId,
targetId: message.targetId
})
// Send Uint8Array directly - WebSocket accepts Uint8Array
this.websocket.send((message as any).data) this.websocket.send((message as any).data)
return // CRITICAL: Don't fall through to JSON send return
} else { } else {
// Handle text-based messages (backward compatibility and control messages)
// Only log non-presence messages
if (message.type !== 'presence') {
console.log('📤 Sending WebSocket message:', message.type)
}
// Debug: Log patch content if it's a patch message
if (message.type === 'patch' && (message as any).patches) {
console.log('🔍 Sending patches:', (message as any).patches.length, 'patches')
;(message as any).patches.forEach((patch: any, index: number) => {
console.log(` Patch ${index}:`, {
action: patch.action,
path: patch.path,
value: patch.value ? (typeof patch.value === 'object' ? 'object' : patch.value) : 'undefined'
})
})
}
this.websocket.send(JSON.stringify(message)) this.websocket.send(JSON.stringify(message))
} }
} else {
if (message.type !== 'presence') {
console.warn('⚠️ CloudflareAdapter: Cannot send message - WebSocket not open', {
messageType: message.type,
readyState: this.websocket?.readyState
})
}
} }
} }
@ -669,7 +528,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
type: 'leave', type: 'leave',
sessionId: this.sessionId sessionId: this.sessionId
})) }))
console.log('👋 CloudflareAdapter: Sent leave message for session:', this.sessionId)
} catch (e) { } catch (e) {
// Ignore errors when sending leave message // Ignore errors when sending leave message
} }
@ -683,13 +541,12 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Send ping every 30 seconds to prevent idle timeout // Send ping every 30 seconds to prevent idle timeout
this.keepAliveInterval = setInterval(() => { this.keepAliveInterval = setInterval(() => {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) { if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
console.log('🔌 CloudflareAdapter: Sending keep-alive ping')
this.websocket.send(JSON.stringify({ this.websocket.send(JSON.stringify({
type: 'ping', type: 'ping',
timestamp: Date.now() timestamp: Date.now()
})) }))
} }
}, 30000) // 30 seconds }, 30000)
} }
private stopKeepAlive(): void { private stopKeepAlive(): void {
@ -710,18 +567,14 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
private scheduleReconnect(peerId: PeerId, peerMetadata?: PeerMetadata): void { private scheduleReconnect(peerId: PeerId, peerMetadata?: PeerMetadata): void {
if (this.reconnectAttempts >= this.maxReconnectAttempts) { if (this.reconnectAttempts >= this.maxReconnectAttempts) {
console.error('❌ CloudflareAdapter: Max reconnection attempts reached, giving up')
return return
} }
this.reconnectAttempts++ this.reconnectAttempts++
const delay = Math.min(this.reconnectDelay * Math.pow(2, this.reconnectAttempts - 1), 30000) // Max 30 seconds const delay = Math.min(this.reconnectDelay * Math.pow(2, this.reconnectAttempts - 1), 30000)
console.log(`🔄 CloudflareAdapter: Scheduling reconnect attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts} in ${delay}ms`)
this.reconnectTimeout = setTimeout(() => { this.reconnectTimeout = setTimeout(() => {
if (this.roomId) { if (this.roomId) {
console.log(`🔄 CloudflareAdapter: Attempting reconnect ${this.reconnectAttempts}/${this.maxReconnectAttempts}`)
this.connect(peerId, peerMetadata) this.connect(peerId, peerMetadata)
} }
}, delay) }, delay)

View File

@ -132,6 +132,10 @@ import { MultmuxShape } from "@/shapes/MultmuxShapeUtil"
import { MycelialIntelligenceShape } from "@/shapes/MycelialIntelligenceShapeUtil" import { MycelialIntelligenceShape } from "@/shapes/MycelialIntelligenceShapeUtil"
// Open Mapping - OSM map shape for geographic visualization // Open Mapping - OSM map shape for geographic visualization
import { MapShape } from "@/shapes/MapShapeUtil" import { MapShape } from "@/shapes/MapShapeUtil"
// Calendar shape for calendar functionality
import { CalendarShape } from "@/shapes/CalendarShapeUtil"
// Drawfast shape for quick drawing/sketching
import { DrawfastShape } from "@/shapes/DrawfastShapeUtil"
export function useAutomergeStoreV2({ export function useAutomergeStoreV2({
handle, handle,
@ -169,6 +173,8 @@ export function useAutomergeStoreV2({
MultmuxShape, MultmuxShape,
MycelialIntelligenceShape, // Deprecated - kept for backwards compatibility MycelialIntelligenceShape, // Deprecated - kept for backwards compatibility
MapShape, // Open Mapping - OSM map shape MapShape, // Open Mapping - OSM map shape
CalendarShape, // Calendar with view switching
DrawfastShape, // Drawfast quick sketching
] ]
// CRITICAL: Explicitly list ALL custom shape types to ensure they're registered // CRITICAL: Explicitly list ALL custom shape types to ensure they're registered
@ -193,6 +199,8 @@ export function useAutomergeStoreV2({
'Multmux', 'Multmux',
'MycelialIntelligence', // Deprecated - kept for backwards compatibility 'MycelialIntelligence', // Deprecated - kept for backwards compatibility
'Map', // Open Mapping - OSM map shape 'Map', // Open Mapping - OSM map shape
'Calendar', // Calendar with view switching
'Drawfast', // Drawfast quick sketching
] ]
// Build schema with explicit entries for all custom shapes // Build schema with explicit entries for all custom shapes

View File

@ -82,58 +82,48 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
// Save vault to Automerge store // Save vault to Automerge store
const saveVaultToAutomerge = (vault: ObsidianVault) => { const saveVaultToAutomerge = (vault: ObsidianVault) => {
if (!automergeHandle) { if (!automergeHandle) {
console.warn('⚠️ Automerge handle not available, saving to localStorage only')
try { try {
const vaultRecord = importer.vaultToRecord(vault) const vaultRecord = importer.vaultToRecord(vault)
localStorage.setItem(`obsidian_vault_cache:${vault.name}`, JSON.stringify({ localStorage.setItem(`obsidian_vault_cache:${vault.name}`, JSON.stringify({
...vaultRecord, ...vaultRecord,
lastImported: vaultRecord.lastImported instanceof Date ? vaultRecord.lastImported.toISOString() : vaultRecord.lastImported lastImported: vaultRecord.lastImported instanceof Date ? vaultRecord.lastImported.toISOString() : vaultRecord.lastImported
})) }))
console.log('🔧 Saved vault to localStorage (Automerge handle not available):', vaultRecord.id)
} catch (localStorageError) { } catch (localStorageError) {
console.warn('⚠️ Could not save vault to localStorage:', localStorageError) console.warn('Could not save vault to localStorage:', localStorageError)
} }
return return
} }
try { try {
const vaultRecord = importer.vaultToRecord(vault) const vaultRecord = importer.vaultToRecord(vault)
// Save directly to Automerge, bypassing TLDraw store validation // Save directly to Automerge, bypassing TLDraw store validation
// This allows us to save custom record types like obsidian_vault
automergeHandle.change((doc: any) => { automergeHandle.change((doc: any) => {
// Ensure doc.store exists
if (!doc.store) { if (!doc.store) {
doc.store = {} doc.store = {}
} }
// Save the vault record directly to Automerge store
// Convert Date to ISO string for serialization
const recordToSave = { const recordToSave = {
...vaultRecord, ...vaultRecord,
lastImported: vaultRecord.lastImported instanceof Date lastImported: vaultRecord.lastImported instanceof Date
? vaultRecord.lastImported.toISOString() ? vaultRecord.lastImported.toISOString()
: vaultRecord.lastImported : vaultRecord.lastImported
} }
doc.store[vaultRecord.id] = recordToSave doc.store[vaultRecord.id] = recordToSave
}) })
console.log('🔧 Saved vault to Automerge:', vaultRecord.id)
// Also save to localStorage as a backup // Also save to localStorage as a backup
try { try {
localStorage.setItem(`obsidian_vault_cache:${vault.name}`, JSON.stringify({ localStorage.setItem(`obsidian_vault_cache:${vault.name}`, JSON.stringify({
...vaultRecord, ...vaultRecord,
lastImported: vaultRecord.lastImported instanceof Date ? vaultRecord.lastImported.toISOString() : vaultRecord.lastImported lastImported: vaultRecord.lastImported instanceof Date ? vaultRecord.lastImported.toISOString() : vaultRecord.lastImported
})) }))
console.log('🔧 Saved vault to localStorage as backup:', vaultRecord.id)
} catch (localStorageError) { } catch (localStorageError) {
console.warn('⚠️ Could not save vault to localStorage:', localStorageError) // Silent fail for backup
} }
} catch (error) { } catch (error) {
console.error('❌ Error saving vault to Automerge:', error) console.error('Error saving vault to Automerge:', error)
// Don't throw - allow vault loading to continue even if saving fails
// Try localStorage as fallback // Try localStorage as fallback
try { try {
const vaultRecord = importer.vaultToRecord(vault) const vaultRecord = importer.vaultToRecord(vault)
@ -141,9 +131,8 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
...vaultRecord, ...vaultRecord,
lastImported: vaultRecord.lastImported instanceof Date ? vaultRecord.lastImported.toISOString() : vaultRecord.lastImported lastImported: vaultRecord.lastImported instanceof Date ? vaultRecord.lastImported.toISOString() : vaultRecord.lastImported
})) }))
console.log('🔧 Saved vault to localStorage as fallback:', vaultRecord.id)
} catch (localStorageError) { } catch (localStorageError) {
console.warn('⚠️ Could not save vault to localStorage:', localStorageError) console.warn('Could not save vault to localStorage:', localStorageError)
} }
} }
} }
@ -157,10 +146,8 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
if (doc && doc.store) { if (doc && doc.store) {
const vaultId = `obsidian_vault:${vaultName}` const vaultId = `obsidian_vault:${vaultName}`
const vaultRecord = doc.store[vaultId] as ObsidianVaultRecord | undefined const vaultRecord = doc.store[vaultId] as ObsidianVaultRecord | undefined
if (vaultRecord && vaultRecord.typeName === 'obsidian_vault') { if (vaultRecord && vaultRecord.typeName === 'obsidian_vault') {
console.log('🔧 Loaded vault from Automerge:', vaultId)
// Convert date string back to Date object if needed
const recordCopy = JSON.parse(JSON.stringify(vaultRecord)) const recordCopy = JSON.parse(JSON.stringify(vaultRecord))
if (typeof recordCopy.lastImported === 'string') { if (typeof recordCopy.lastImported === 'string') {
recordCopy.lastImported = new Date(recordCopy.lastImported) recordCopy.lastImported = new Date(recordCopy.lastImported)
@ -169,18 +156,16 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
} }
} }
} catch (error) { } catch (error) {
console.warn('⚠️ Could not load vault from Automerge:', error) // Fall through to localStorage
} }
} }
// Try localStorage as fallback // Try localStorage as fallback
try { try {
const cached = localStorage.getItem(`obsidian_vault_cache:${vaultName}`) const cached = localStorage.getItem(`obsidian_vault_cache:${vaultName}`)
if (cached) { if (cached) {
const vaultRecord = JSON.parse(cached) as ObsidianVaultRecord const vaultRecord = JSON.parse(cached) as ObsidianVaultRecord
if (vaultRecord && vaultRecord.typeName === 'obsidian_vault') { if (vaultRecord && vaultRecord.typeName === 'obsidian_vault') {
console.log('🔧 Loaded vault from localStorage cache:', vaultName)
// Convert date string back to Date object
if (typeof vaultRecord.lastImported === 'string') { if (typeof vaultRecord.lastImported === 'string') {
vaultRecord.lastImported = new Date(vaultRecord.lastImported) vaultRecord.lastImported = new Date(vaultRecord.lastImported)
} }
@ -188,9 +173,9 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
} }
} }
} catch (e) { } catch (e) {
console.warn('⚠️ Could not load vault from localStorage:', e) // Silent fail
} }
return null return null
} }
@ -198,47 +183,31 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
useEffect(() => { useEffect(() => {
// Prevent multiple loads if already loading or already loaded once // Prevent multiple loads if already loading or already loaded once
if (isLoadingVault || hasLoadedOnce) { if (isLoadingVault || hasLoadedOnce) {
console.log('🔧 ObsidianVaultBrowser: Skipping load - already loading or loaded once')
return return
} }
console.log('🔧 ObsidianVaultBrowser: Component mounted, checking user identity for vault...')
console.log('🔧 Current session vault data:', {
path: session.obsidianVaultPath,
name: session.obsidianVaultName,
authed: session.authed,
username: session.username
})
// FIRST PRIORITY: Try to load from user's configured vault in session (user identity) // FIRST PRIORITY: Try to load from user's configured vault in session (user identity)
if (session.obsidianVaultPath && session.obsidianVaultPath !== 'folder-selected') { if (session.obsidianVaultPath && session.obsidianVaultPath !== 'folder-selected') {
console.log('✅ Found configured vault in user identity:', session.obsidianVaultPath)
console.log('🔧 Loading vault from user identity...')
// First try to load from Automerge cache for faster loading // First try to load from Automerge cache for faster loading
if (session.obsidianVaultName) { if (session.obsidianVaultName) {
const cachedVault = loadVaultFromAutomerge(session.obsidianVaultName) const cachedVault = loadVaultFromAutomerge(session.obsidianVaultName)
if (cachedVault) { if (cachedVault) {
console.log('✅ Loaded vault from Automerge cache')
setVault(cachedVault) setVault(cachedVault)
setIsLoading(false) setIsLoading(false)
setHasLoadedOnce(true) setHasLoadedOnce(true)
return return
} }
} }
// If not in cache, load from source (Quartz URL or local path) // If not in cache, load from source (Quartz URL or local path)
console.log('🔧 Loading vault from source:', session.obsidianVaultPath)
loadVault(session.obsidianVaultPath) loadVault(session.obsidianVaultPath)
} else if (session.obsidianVaultPath === 'folder-selected' && session.obsidianVaultName) { } else if (session.obsidianVaultPath === 'folder-selected' && session.obsidianVaultName) {
console.log('🔧 Vault was previously selected via folder picker, showing reselect interface')
// For folder-selected vaults, we can't reload them, so show a special reselect interface // For folder-selected vaults, we can't reload them, so show a special reselect interface
setVault(null) setVault(null)
setShowFolderReselect(true) setShowFolderReselect(true)
setIsLoading(false) setIsLoading(false)
setHasLoadedOnce(true) setHasLoadedOnce(true)
} else { } else {
console.log('⚠️ No vault configured in user identity, showing empty state...')
setVault(null) setVault(null)
setIsLoading(false) setIsLoading(false)
setHasLoadedOnce(true) setHasLoadedOnce(true)
@ -250,30 +219,28 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
// Check if values actually changed (not just object reference) // Check if values actually changed (not just object reference)
const vaultPathChanged = previousVaultPathRef.current !== session.obsidianVaultPath const vaultPathChanged = previousVaultPathRef.current !== session.obsidianVaultPath
const vaultNameChanged = previousVaultNameRef.current !== session.obsidianVaultName const vaultNameChanged = previousVaultNameRef.current !== session.obsidianVaultName
// If vault is already loaded and values haven't changed, don't do anything // If vault is already loaded and values haven't changed, don't do anything
if (hasLoadedOnce && !vaultPathChanged && !vaultNameChanged) { if (hasLoadedOnce && !vaultPathChanged && !vaultNameChanged) {
return // Already loaded and nothing changed, no need to reload return
} }
// Update refs to current values // Update refs to current values
previousVaultPathRef.current = session.obsidianVaultPath previousVaultPathRef.current = session.obsidianVaultPath
previousVaultNameRef.current = session.obsidianVaultName previousVaultNameRef.current = session.obsidianVaultName
// Only proceed if values actually changed and we haven't loaded yet // Only proceed if values actually changed and we haven't loaded yet
if (!vaultPathChanged && !vaultNameChanged) { if (!vaultPathChanged && !vaultNameChanged) {
return // Values haven't changed, no need to reload return
} }
if (hasLoadedOnce || isLoadingVault) { if (hasLoadedOnce || isLoadingVault) {
return // Don't reload if we've already loaded or are currently loading return
} }
if (session.obsidianVaultPath && session.obsidianVaultPath !== 'folder-selected') { if (session.obsidianVaultPath && session.obsidianVaultPath !== 'folder-selected') {
console.log('🔧 Session vault path changed, loading vault:', session.obsidianVaultPath)
loadVault(session.obsidianVaultPath) loadVault(session.obsidianVaultPath)
} else if (session.obsidianVaultPath === 'folder-selected' && session.obsidianVaultName) { } else if (session.obsidianVaultPath === 'folder-selected' && session.obsidianVaultName) {
console.log('🔧 Session shows folder-selected vault, showing reselect interface')
setVault(null) setVault(null)
setShowFolderReselect(true) setShowFolderReselect(true)
setIsLoading(false) setIsLoading(false)
@ -284,7 +251,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
// Auto-open folder picker if requested // Auto-open folder picker if requested
useEffect(() => { useEffect(() => {
if (autoOpenFolderPicker) { if (autoOpenFolderPicker) {
console.log('Auto-opening folder picker...')
handleFolderPicker() handleFolderPicker()
} }
}, [autoOpenFolderPicker]) }, [autoOpenFolderPicker])
@ -312,7 +278,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
useEffect(() => { useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => { const handleKeyDown = (event: KeyboardEvent) => {
if (event.key === 'Escape') { if (event.key === 'Escape') {
console.log('🔧 ESC key pressed, closing vault browser')
onClose() onClose()
} }
} }
@ -326,57 +291,38 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
const loadVault = async (path?: string) => { const loadVault = async (path?: string) => {
// Prevent concurrent loading operations // Prevent concurrent loading operations
if (isLoadingVault) { if (isLoadingVault) {
console.log('🔧 loadVault: Already loading, skipping concurrent request')
return return
} }
setIsLoadingVault(true) setIsLoadingVault(true)
setIsLoading(true) setIsLoading(true)
setError(null) setError(null)
try { try {
if (path) { if (path) {
// Check if it's a Quartz URL // Check if it's a Quartz URL
if (path.startsWith('http') || path.includes('quartz') || path.includes('.xyz') || path.includes('.com')) { if (path.startsWith('http') || path.includes('quartz') || path.includes('.xyz') || path.includes('.com')) {
// Load from Quartz URL - always get latest data
console.log('🔧 Loading Quartz vault from URL (getting latest data):', path)
const loadedVault = await importer.importFromQuartzUrl(path) const loadedVault = await importer.importFromQuartzUrl(path)
console.log('Loaded Quartz vault from URL:', loadedVault)
setVault(loadedVault) setVault(loadedVault)
setShowVaultInput(false) setShowVaultInput(false)
setShowFolderReselect(false) setShowFolderReselect(false)
// Save the vault path and name to user session updateSession({
console.log('🔧 Saving Quartz vault to session:', { path, name: loadedVault.name })
updateSession({
obsidianVaultPath: path, obsidianVaultPath: path,
obsidianVaultName: loadedVault.name obsidianVaultName: loadedVault.name
}) })
console.log('🔧 Quartz vault saved to session successfully')
// Save vault to Automerge for persistence
saveVaultToAutomerge(loadedVault) saveVaultToAutomerge(loadedVault)
} else { } else {
// Load from local directory
console.log('🔧 Loading vault from local directory:', path)
const loadedVault = await importer.importFromDirectory(path) const loadedVault = await importer.importFromDirectory(path)
console.log('Loaded vault from path:', loadedVault)
setVault(loadedVault) setVault(loadedVault)
setShowVaultInput(false) setShowVaultInput(false)
setShowFolderReselect(false) setShowFolderReselect(false)
// Save the vault path and name to user session updateSession({
console.log('🔧 Saving vault to session:', { path, name: loadedVault.name })
updateSession({
obsidianVaultPath: path, obsidianVaultPath: path,
obsidianVaultName: loadedVault.name obsidianVaultName: loadedVault.name
}) })
console.log('🔧 Vault saved to session successfully')
// Save vault to Automerge for persistence
saveVaultToAutomerge(loadedVault) saveVaultToAutomerge(loadedVault)
} }
} else { } else {
// No vault configured - show empty state
console.log('No vault configured, showing empty state...')
setVault(null) setVault(null)
setShowVaultInput(false) setShowVaultInput(false)
} }
@ -384,8 +330,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
console.error('Failed to load vault:', err) console.error('Failed to load vault:', err)
setError('Failed to load Obsidian vault. Please try again.') setError('Failed to load Obsidian vault. Please try again.')
setVault(null) setVault(null)
// Don't show vault input if user already has a vault configured
// Only show vault input if this is a fresh attempt
if (!session.obsidianVaultPath) { if (!session.obsidianVaultPath) {
setShowVaultInput(true) setShowVaultInput(true)
} }
@ -401,11 +345,8 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
setError('Please enter a vault path or URL') setError('Please enter a vault path or URL')
return return
} }
console.log('📝 Submitting vault path:', vaultPath.trim(), 'Method:', inputMethod)
if (inputMethod === 'quartz') { if (inputMethod === 'quartz') {
// Handle Quartz URL
try { try {
setIsLoading(true) setIsLoading(true)
setError(null) setError(null)
@ -413,70 +354,49 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
setVault(loadedVault) setVault(loadedVault)
setShowVaultInput(false) setShowVaultInput(false)
setShowFolderReselect(false) setShowFolderReselect(false)
updateSession({
// Save Quartz vault to user identity (session)
console.log('🔧 Saving Quartz vault to user identity:', {
path: vaultPath.trim(),
name: loadedVault.name
})
updateSession({
obsidianVaultPath: vaultPath.trim(), obsidianVaultPath: vaultPath.trim(),
obsidianVaultName: loadedVault.name obsidianVaultName: loadedVault.name
}) })
} catch (error) { } catch (error) {
console.error('Error loading Quartz vault:', error) console.error('Error loading Quartz vault:', error)
setError(error instanceof Error ? error.message : 'Failed to load Quartz vault') setError(error instanceof Error ? error.message : 'Failed to load Quartz vault')
} finally { } finally {
setIsLoading(false) setIsLoading(false)
} }
} else { } else {
// Handle regular vault path (local folder or URL)
loadVault(vaultPath.trim()) loadVault(vaultPath.trim())
} }
} }
const handleFolderPicker = async () => { const handleFolderPicker = async () => {
console.log('📁 Folder picker button clicked')
if (!('showDirectoryPicker' in window)) { if (!('showDirectoryPicker' in window)) {
setError('File System Access API is not supported in this browser. Please use "Enter Path" instead.') setError('File System Access API is not supported in this browser. Please use "Enter Path" instead.')
setShowVaultInput(true) setShowVaultInput(true)
return return
} }
try { try {
setIsLoading(true) setIsLoading(true)
setError(null) setError(null)
console.log('📁 Opening directory picker...')
const loadedVault = await importer.importFromFileSystem() const loadedVault = await importer.importFromFileSystem()
console.log('✅ Vault loaded from folder picker:', loadedVault.name)
setVault(loadedVault) setVault(loadedVault)
setShowVaultInput(false) setShowVaultInput(false)
setShowFolderReselect(false) setShowFolderReselect(false)
// Note: We can't get the actual path from importFromFileSystem, updateSession({
// but we can save a flag that a folder was selected
console.log('🔧 Saving folder-selected vault to user identity:', {
path: 'folder-selected',
name: loadedVault.name
})
updateSession({
obsidianVaultPath: 'folder-selected', obsidianVaultPath: 'folder-selected',
obsidianVaultName: loadedVault.name obsidianVaultName: loadedVault.name
}) })
console.log('✅ Folder-selected vault saved to user identity successfully')
// Save vault to Automerge for persistence
saveVaultToAutomerge(loadedVault) saveVaultToAutomerge(loadedVault)
} catch (err) { } catch (err) {
console.error('❌ Failed to load vault from folder picker:', err)
if ((err as any).name === 'AbortError') { if ((err as any).name === 'AbortError') {
// User cancelled the folder picker setError(null)
console.log('📁 User cancelled folder picker')
setError(null) // Don't show error for cancellation
} else { } else {
console.error('Failed to load vault from folder picker:', err)
setError('Failed to load Obsidian vault. Please try again.') setError('Failed to load Obsidian vault. Please try again.')
} }
} finally { } finally {
@ -514,45 +434,27 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
const folderNotes = importer.getAllNotesFromTree(folder) const folderNotes = importer.getAllNotesFromTree(folder)
obs_notes = obs_notes.filter(note => folderNotes.some(folderNote => folderNote.id === note.id)) obs_notes = obs_notes.filter(note => folderNotes.some(folderNote => folderNote.id === note.id))
} }
} else if (viewMode === 'tree' && selectedFolder === null) {
// In tree view but no folder selected, show all notes
// This allows users to see all notes when no specific folder is selected
} }
// Debug logging
console.log('Search query:', debouncedSearchQuery)
console.log('View mode:', viewMode)
console.log('Selected folder:', selectedFolder)
console.log('Total notes:', vault.obs_notes.length)
console.log('Filtered notes:', obs_notes.length)
return obs_notes return obs_notes
}, [vault, debouncedSearchQuery, viewMode, selectedFolder, folderTree, importer]) }, [vault, debouncedSearchQuery, viewMode, selectedFolder, folderTree, importer])
// Listen for trigger-obsnote-creation event from CustomToolbar // Listen for trigger-obsnote-creation event from CustomToolbar
useEffect(() => { useEffect(() => {
const handleTriggerCreation = () => { const handleTriggerCreation = () => {
console.log('🎯 ObsidianVaultBrowser: Received trigger-obsnote-creation event')
if (selectedNotes.size > 0) { if (selectedNotes.size > 0) {
// Create shapes from currently selected notes
const selectedObsNotes = filteredObsNotes.filter(obs_note => selectedNotes.has(obs_note.id)) const selectedObsNotes = filteredObsNotes.filter(obs_note => selectedNotes.has(obs_note.id))
console.log('🎯 Creating shapes from selected notes:', selectedObsNotes.length)
onObsNotesSelect(selectedObsNotes) onObsNotesSelect(selectedObsNotes)
} else { } else {
// If no notes are selected, select all visible notes
const allVisibleNotes = filteredObsNotes const allVisibleNotes = filteredObsNotes
if (allVisibleNotes.length > 0) { if (allVisibleNotes.length > 0) {
console.log('🎯 No notes selected, creating shapes from all visible notes:', allVisibleNotes.length)
onObsNotesSelect(allVisibleNotes) onObsNotesSelect(allVisibleNotes)
} else {
console.log('🎯 No notes available to create shapes from')
} }
} }
} }
window.addEventListener('trigger-obsnote-creation', handleTriggerCreation as EventListener) window.addEventListener('trigger-obsnote-creation', handleTriggerCreation as EventListener)
return () => { return () => {
window.removeEventListener('trigger-obsnote-creation', handleTriggerCreation as EventListener) window.removeEventListener('trigger-obsnote-creation', handleTriggerCreation as EventListener)
} }
@ -663,7 +565,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
} }
const handleObsNoteClick = (obs_note: ObsidianObsNote) => { const handleObsNoteClick = (obs_note: ObsidianObsNote) => {
console.log('🎯 ObsidianVaultBrowser: handleObsNoteClick called with:', obs_note)
onObsNoteSelect(obs_note) onObsNoteSelect(obs_note)
} }
@ -679,7 +580,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
const handleBulkImport = () => { const handleBulkImport = () => {
const selectedObsNotes = filteredObsNotes.filter(obs_note => selectedNotes.has(obs_note.id)) const selectedObsNotes = filteredObsNotes.filter(obs_note => selectedNotes.has(obs_note.id))
console.log('🎯 ObsidianVaultBrowser: handleBulkImport called with:', selectedObsNotes.length, 'notes')
onObsNotesSelect(selectedObsNotes) onObsNotesSelect(selectedObsNotes)
setSelectedNotes(new Set()) setSelectedNotes(new Set())
} }
@ -730,13 +630,11 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
const handleDisconnectVault = () => { const handleDisconnectVault = () => {
// Clear the vault from session updateSession({
updateSession({
obsidianVaultPath: undefined, obsidianVaultPath: undefined,
obsidianVaultName: undefined obsidianVaultName: undefined
}) })
// Reset component state
setVault(null) setVault(null)
setSearchQuery('') setSearchQuery('')
setDebouncedSearchQuery('') setDebouncedSearchQuery('')
@ -746,8 +644,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
setError(null) setError(null)
setHasLoadedOnce(false) setHasLoadedOnce(false)
setIsLoadingVault(false) setIsLoadingVault(false)
console.log('🔧 Vault disconnected successfully')
} }
const handleBackdropClick = (e: React.MouseEvent<HTMLDivElement>) => { const handleBackdropClick = (e: React.MouseEvent<HTMLDivElement>) => {
@ -841,24 +737,19 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
<h3>Load Obsidian Vault</h3> <h3>Load Obsidian Vault</h3>
<p>Choose how you'd like to load your Obsidian vault:</p> <p>Choose how you'd like to load your Obsidian vault:</p>
<div className="vault-options"> <div className="vault-options">
<button <button
onClick={() => { onClick={handleFolderPicker}
console.log('📁 Select Folder button clicked')
handleFolderPicker()
}}
className="load-vault-button primary" className="load-vault-button primary"
> >
📁 Select Folder 📁 Select Folder
</button> </button>
<button <button
onClick={() => { onClick={() => {
console.log('📝 Enter Path button clicked')
// Pre-populate with session vault path if available
if (session.obsidianVaultPath && session.obsidianVaultPath !== 'folder-selected') { if (session.obsidianVaultPath && session.obsidianVaultPath !== 'folder-selected') {
setVaultPath(session.obsidianVaultPath) setVaultPath(session.obsidianVaultPath)
} }
setShowVaultInput(true) setShowVaultInput(true)
}} }}
className="load-vault-button secondary" className="load-vault-button secondary"
> >
📝 Enter Path 📝 Enter Path

View File

@ -65,16 +65,8 @@ export async function llm(
// Get all available providers with valid keys // Get all available providers with valid keys
const availableProviders = getAvailableProviders(availableKeys, settings); const availableProviders = getAvailableProviders(availableKeys, settings);
console.log(`🔍 Found ${availableProviders.length} available AI providers:`,
availableProviders.map(p => `${p.provider} (${p.model})`).join(', '));
if (availableProviders.length === 0) { if (availableProviders.length === 0) {
const runpodConfig = getRunPodConfig();
if (runpodConfig && runpodConfig.apiKey && runpodConfig.endpointId) {
// RunPod should have been added, but if not, try one more time
console.log('⚠️ No user API keys found, but RunPod is configured - this should not happen');
}
throw new Error("No valid API key found for any provider. Please configure API keys in settings or set up RunPod environment variables (VITE_RUNPOD_API_KEY and VITE_RUNPOD_ENDPOINT_ID).") throw new Error("No valid API key found for any provider. Please configure API keys in settings or set up RunPod environment variables (VITE_RUNPOD_API_KEY and VITE_RUNPOD_ENDPOINT_ID).")
} }
@ -95,55 +87,43 @@ export async function llm(
for (const providerInfo of availableProviders) { for (const providerInfo of availableProviders) {
const { provider, apiKey, model, endpointId } = providerInfo as any; const { provider, apiKey, model, endpointId } = providerInfo as any;
try { try {
console.log(`🔄 Attempting to use ${provider} API (${model})...`);
attemptedProviders.push(`${provider} (${model})`); attemptedProviders.push(`${provider} (${model})`);
// Add retry logic for temporary failures // Add retry logic for temporary failures
await callProviderAPIWithRetry(provider, apiKey, model, userPrompt, onToken, settings, endpointId, customSystemPrompt); await callProviderAPIWithRetry(provider, apiKey, model, userPrompt, onToken, settings, endpointId, customSystemPrompt);
console.log(`✅ Successfully used ${provider} API (${model})`);
return; // Success, exit the function return; // Success, exit the function
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error); const errorMessage = error instanceof Error ? error.message : String(error);
// Check if it's a model not found error (404) for Anthropic - try fallback models // Check if it's a model not found error (404) for Anthropic - try fallback models
if (provider === 'anthropic' && (errorMessage.includes('404') || errorMessage.includes('not_found_error') || errorMessage.includes('model:'))) { if (provider === 'anthropic' && (errorMessage.includes('404') || errorMessage.includes('not_found_error') || errorMessage.includes('model:'))) {
console.warn(`${provider} model ${model} not found, trying fallback models...`);
// Try fallback models // Try fallback models
let fallbackSucceeded = false; let fallbackSucceeded = false;
for (const fallbackModel of anthropicFallbackModels) { for (const fallbackModel of anthropicFallbackModels) {
if (fallbackModel === model) continue; // Skip the one we already tried if (fallbackModel === model) continue; // Skip the one we already tried
try { try {
console.log(`🔄 Trying fallback model: ${fallbackModel}...`);
attemptedProviders.push(`${provider} (${fallbackModel})`); attemptedProviders.push(`${provider} (${fallbackModel})`);
const providerInfo = availableProviders.find(p => p.provider === provider); const providerInfo = availableProviders.find(p => p.provider === provider);
const endpointId = (providerInfo as any)?.endpointId; const endpointId = (providerInfo as any)?.endpointId;
await callProviderAPIWithRetry(provider, apiKey, fallbackModel, userPrompt, onToken, settings, endpointId, customSystemPrompt); await callProviderAPIWithRetry(provider, apiKey, fallbackModel, userPrompt, onToken, settings, endpointId, customSystemPrompt);
console.log(`✅ Successfully used ${provider} API with fallback model ${fallbackModel}`);
fallbackSucceeded = true; fallbackSucceeded = true;
return; // Success, exit the function return; // Success, exit the function
} catch (fallbackError) { } catch (fallbackError) {
const fallbackErrorMessage = fallbackError instanceof Error ? fallbackError.message : String(fallbackError);
console.warn(`❌ Fallback model ${fallbackModel} also failed:`, fallbackErrorMessage);
// Continue to next fallback model // Continue to next fallback model
} }
} }
if (!fallbackSucceeded) { if (!fallbackSucceeded) {
console.warn(`❌ All ${provider} models failed`);
lastError = error as Error; lastError = error as Error;
} }
} else if (errorMessage.includes('401') || errorMessage.includes('403') || } else if (errorMessage.includes('401') || errorMessage.includes('403') ||
errorMessage.includes('Unauthorized') || errorMessage.includes('Invalid API key') || errorMessage.includes('Unauthorized') || errorMessage.includes('Invalid API key') ||
errorMessage.includes('expired') || errorMessage.includes('Expired')) { errorMessage.includes('expired') || errorMessage.includes('Expired')) {
console.warn(`${provider} API authentication failed (invalid/expired API key):`, errorMessage);
// Mark this specific API key as invalid for future attempts // Mark this specific API key as invalid for future attempts
markApiKeyAsInvalid(provider, apiKey); markApiKeyAsInvalid(provider, apiKey);
console.log(`🔄 Will try next available API key...`);
lastError = error as Error; lastError = error as Error;
} else { } else {
console.warn(`${provider} API failed (non-auth error):`, errorMessage);
lastError = error as Error; lastError = error as Error;
} }
// Continue to next provider/key // Continue to next provider/key
@ -173,8 +153,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
} }
providers.push(providerInfo); providers.push(providerInfo);
return true; return true;
} else if (isApiKeyInvalid(provider, apiKey)) {
console.log(`⏭️ Skipping ${provider} API key (marked as invalid)`);
} }
return false; return false;
}; };
@ -184,7 +162,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
if (ollamaConfig && ollamaConfig.url) { if (ollamaConfig && ollamaConfig.url) {
// Get the selected Ollama model from settings // Get the selected Ollama model from settings
const selectedOllamaModel = settings.ollamaModel || 'llama3.1:8b'; const selectedOllamaModel = settings.ollamaModel || 'llama3.1:8b';
console.log(`🦙 Found Ollama configuration - using as primary AI provider (FREE) with model: ${selectedOllamaModel}`);
providers.push({ providers.push({
provider: 'ollama', provider: 'ollama',
apiKey: 'ollama', // Ollama doesn't need an API key apiKey: 'ollama', // Ollama doesn't need an API key
@ -197,7 +174,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
// RunPod vLLM text endpoint is used as fallback when Ollama is not available // RunPod vLLM text endpoint is used as fallback when Ollama is not available
const runpodTextConfig = getRunPodTextConfig(); const runpodTextConfig = getRunPodTextConfig();
if (runpodTextConfig && runpodTextConfig.apiKey && runpodTextConfig.endpointId) { if (runpodTextConfig && runpodTextConfig.apiKey && runpodTextConfig.endpointId) {
console.log('🔑 Found RunPod TEXT endpoint configuration from environment variables');
providers.push({ providers.push({
provider: 'runpod', provider: 'runpod',
apiKey: runpodTextConfig.apiKey, apiKey: runpodTextConfig.apiKey,
@ -208,7 +184,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
// Fallback to generic RunPod config if text endpoint not configured // Fallback to generic RunPod config if text endpoint not configured
const runpodConfig = getRunPodConfig(); const runpodConfig = getRunPodConfig();
if (runpodConfig && runpodConfig.apiKey && runpodConfig.endpointId) { if (runpodConfig && runpodConfig.apiKey && runpodConfig.endpointId) {
console.log('🔑 Found RunPod configuration from environment variables (generic endpoint)');
providers.push({ providers.push({
provider: 'runpod', provider: 'runpod',
apiKey: runpodConfig.apiKey, apiKey: runpodConfig.apiKey,
@ -262,8 +237,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
apiKey: directSettings, apiKey: directSettings,
model: getDefaultModel('openai') model: getDefaultModel('openai')
}); });
} else if (isApiKeyInvalid('openai', directSettings)) {
console.log(`⏭️ Skipping OpenAI API key (marked as invalid)`);
} }
} else { } else {
// Try to parse as JSON // Try to parse as JSON
@ -277,8 +250,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
apiKey: value, apiKey: value,
model: parsed.models?.[key] || getDefaultModel(key) model: parsed.models?.[key] || getDefaultModel(key)
}); });
} else if (isApiKeyInvalid(key, value as string)) {
console.log(`⏭️ Skipping ${key} API key (marked as invalid)`);
} }
} }
} }
@ -290,8 +261,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
apiKey: directSettings, apiKey: directSettings,
model: getDefaultModel('openai') model: getDefaultModel('openai')
}); });
} else if (isApiKeyInvalid('openai', directSettings)) {
console.log(`⏭️ Skipping OpenAI API key (marked as invalid)`);
} }
} }
} }
@ -336,13 +305,11 @@ function getUserSpecificApiKeys() {
apiKey, apiKey,
model: parsed.models?.[provider] || getDefaultModel(provider) model: parsed.models?.[provider] || getDefaultModel(provider)
}); });
} else if (isApiKeyInvalid(provider, apiKey as string)) {
console.log(`⏭️ Skipping ${provider} API key (marked as invalid)`);
} }
} }
} }
} catch (parseError) { } catch (parseError) {
console.warn('Failed to parse user-specific API keys:', parseError); // Silently skip invalid JSON
} }
} }
@ -356,8 +323,6 @@ function getUserSpecificApiKeys() {
apiKey: key, apiKey: key,
model: getDefaultModel(provider) model: getDefaultModel(provider)
}); });
} else if (isApiKeyInvalid(provider, key as string)) {
console.log(`⏭️ Skipping ${provider} API key (marked as invalid)`);
} }
} }
} }
@ -389,8 +354,6 @@ function getUserSpecificApiKeys() {
apiKey, apiKey,
model: parsed.models?.[provider] || getDefaultModel(provider) model: parsed.models?.[provider] || getDefaultModel(provider)
}); });
} else if (isApiKeyInvalid(provider, apiKey as string)) {
console.log(`⏭️ Skipping ${provider} API key (marked as invalid)`);
} }
} }
} }
@ -400,11 +363,11 @@ function getUserSpecificApiKeys() {
} }
} }
} catch (parseError) { } catch (parseError) {
console.warn('Failed to parse registered users:', parseError); // Silently skip parse errors
} }
} }
} catch (error) { } catch (error) {
console.warn('Error checking user-specific API keys:', error); // Silently skip errors
} }
return providers; return providers;
@ -463,10 +426,9 @@ async function callProviderAPIWithRetry(
if (attempt === maxRetries) { if (attempt === maxRetries) {
throw error; throw error;
} }
// Wait before retry (exponential backoff) // Wait before retry (exponential backoff)
const delay = Math.pow(2, attempt - 1) * 1000; // 1s, 2s, 4s... const delay = Math.pow(2, attempt - 1) * 1000; // 1s, 2s, 4s...
console.log(`⏳ Retrying ${provider} API in ${delay}ms... (attempt ${attempt + 1}/${maxRetries})`);
await new Promise(resolve => setTimeout(resolve, delay)); await new Promise(resolve => setTimeout(resolve, delay));
} }
} }
@ -499,7 +461,6 @@ function markApiKeyAsInvalid(provider: string, apiKey: string) {
if (!invalidKeys[provider].includes(apiKey)) { if (!invalidKeys[provider].includes(apiKey)) {
invalidKeys[provider].push(apiKey); invalidKeys[provider].push(apiKey);
localStorage.setItem(invalidKeysKey, JSON.stringify(invalidKeys)); localStorage.setItem(invalidKeysKey, JSON.stringify(invalidKeys));
console.log(`🚫 Marked ${provider} API key as invalid`);
} }
} catch (e) { } catch (e) {
// Silently handle errors // Silently handle errors
@ -549,21 +510,12 @@ async function callProviderAPI(
// Use custom system prompt if provided, otherwise fall back to personality-based prompt // Use custom system prompt if provided, otherwise fall back to personality-based prompt
const systemPrompt = customSystemPrompt || (settings ? getSystemPrompt(settings) : 'You are a helpful assistant.'); const systemPrompt = customSystemPrompt || (settings ? getSystemPrompt(settings) : 'You are a helpful assistant.');
// Debug: log which system prompt is being used
if (customSystemPrompt) {
console.log(`🧠 Using custom system prompt (${customSystemPrompt.length} chars)`);
} else {
console.log(`🧠 Using personality-based system prompt: ${settings?.personality || 'default'}`);
}
if (provider === 'ollama') { if (provider === 'ollama') {
// Ollama API integration via AI Orchestrator // Ollama API integration via AI Orchestrator
// The orchestrator provides /api/chat endpoint that routes to local Ollama // The orchestrator provides /api/chat endpoint that routes to local Ollama
const ollamaConfig = getOllamaConfig(); const ollamaConfig = getOllamaConfig();
const baseUrl = (settings as any)?.baseUrl || ollamaConfig?.url || 'http://localhost:11434'; const baseUrl = (settings as any)?.baseUrl || ollamaConfig?.url || 'http://localhost:11434';
console.log(`🦙 Ollama API: Using ${baseUrl}/api/chat with model ${model}`);
const messages = []; const messages = [];
if (systemPrompt) { if (systemPrompt) {
messages.push({ role: 'system', content: systemPrompt }); messages.push({ role: 'system', content: systemPrompt });
@ -586,12 +538,10 @@ async function callProviderAPI(
if (!response.ok) { if (!response.ok) {
const errorText = await response.text(); const errorText = await response.text();
console.error('❌ Ollama API error:', response.status, errorText);
throw new Error(`Ollama API error: ${response.status} - ${errorText}`); throw new Error(`Ollama API error: ${response.status} - ${errorText}`);
} }
const data = await response.json() as Record<string, any>; const data = await response.json() as Record<string, any>;
console.log('📥 Ollama API: Response received:', JSON.stringify(data, null, 2).substring(0, 500));
// Extract response from AI Orchestrator format // Extract response from AI Orchestrator format
let responseText = ''; let responseText = '';
@ -615,11 +565,9 @@ async function callProviderAPI(
} }
} }
console.log('✅ Ollama API: Response complete, length:', partial.length);
onToken(partial, true); onToken(partial, true);
return; return;
} catch (error) { } catch (error) {
console.error('❌ Ollama API error:', error);
throw error; throw error;
} }
} else if (provider === 'runpod') { } else if (provider === 'runpod') {
@ -660,10 +608,7 @@ async function callProviderAPI(
stream: false // vLLM can handle streaming, but we'll process it synchronously for now stream: false // vLLM can handle streaming, but we'll process it synchronously for now
} }
}; };
console.log('📤 RunPod API: Trying synchronous endpoint first:', syncUrl);
console.log('📤 RunPod API: Using OpenAI-compatible messages format');
try { try {
// First, try synchronous endpoint (/runsync) - this returns output immediately // First, try synchronous endpoint (/runsync) - this returns output immediately
try { try {
@ -675,11 +620,10 @@ async function callProviderAPI(
}, },
body: JSON.stringify(requestBody) body: JSON.stringify(requestBody)
}); });
if (syncResponse.ok) { if (syncResponse.ok) {
const syncData = await syncResponse.json() as Record<string, any>; const syncData = await syncResponse.json() as Record<string, any>;
console.log('📥 RunPod API: Synchronous response:', JSON.stringify(syncData, null, 2));
// Check if we got output directly // Check if we got output directly
if (syncData.output) { if (syncData.output) {
let responseText = ''; let responseText = '';
@ -695,9 +639,8 @@ async function callProviderAPI(
} else if (syncData.output.response) { } else if (syncData.output.response) {
responseText = syncData.output.response; responseText = syncData.output.response;
} }
if (responseText) { if (responseText) {
console.log('✅ RunPod API: Got output from synchronous endpoint, length:', responseText.length);
// Stream the response character by character to simulate streaming // Stream the response character by character to simulate streaming
for (let i = 0; i < responseText.length; i++) { for (let i = 0; i < responseText.length; i++) {
partial += responseText[i]; partial += responseText[i];
@ -708,23 +651,20 @@ async function callProviderAPI(
return; return;
} }
} }
// If sync endpoint returned a job ID, fall through to async polling // If sync endpoint returned a job ID, fall through to async polling
if (syncData.id && (syncData.status === 'IN_QUEUE' || syncData.status === 'IN_PROGRESS')) { if (syncData.id && (syncData.status === 'IN_QUEUE' || syncData.status === 'IN_PROGRESS')) {
console.log('⏳ RunPod API: Sync endpoint returned job ID, polling:', syncData.id);
const result = await pollRunPodJob(syncData.id, apiKey, runpodEndpointId); const result = await pollRunPodJob(syncData.id, apiKey, runpodEndpointId);
console.log('✅ RunPod API: Job completed, result length:', result.length);
partial = result; partial = result;
onToken(partial, true); onToken(partial, true);
return; return;
} }
} }
} catch (syncError) { } catch (syncError) {
console.log('⚠️ RunPod API: Synchronous endpoint not available, trying async:', syncError); // Synchronous endpoint not available, fall back to async
} }
// Fall back to async endpoint (/run) if sync didn't work // Fall back to async endpoint (/run) if sync didn't work
console.log('📤 RunPod API: Using async endpoint:', asyncUrl);
const response = await fetch(asyncUrl, { const response = await fetch(asyncUrl, {
method: 'POST', method: 'POST',
headers: { headers: {
@ -733,36 +673,28 @@ async function callProviderAPI(
}, },
body: JSON.stringify(requestBody) body: JSON.stringify(requestBody)
}); });
console.log('📥 RunPod API: Response status:', response.status, response.statusText);
if (!response.ok) { if (!response.ok) {
const errorText = await response.text(); const errorText = await response.text();
console.error('❌ RunPod API: Error response:', errorText);
throw new Error(`RunPod API error: ${response.status} - ${errorText}`); throw new Error(`RunPod API error: ${response.status} - ${errorText}`);
} }
const data = await response.json() as Record<string, any>; const data = await response.json() as Record<string, any>;
console.log('📥 RunPod API: Response data:', JSON.stringify(data, null, 2));
// Handle async job pattern (RunPod often returns job IDs) // Handle async job pattern (RunPod often returns job IDs)
if (data.id && (data.status === 'IN_QUEUE' || data.status === 'IN_PROGRESS')) { if (data.id && (data.status === 'IN_QUEUE' || data.status === 'IN_PROGRESS')) {
console.log('⏳ RunPod API: Job queued/in progress, polling job ID:', data.id);
const result = await pollRunPodJob(data.id, apiKey, runpodEndpointId); const result = await pollRunPodJob(data.id, apiKey, runpodEndpointId);
console.log('✅ RunPod API: Job completed, result length:', result.length);
partial = result; partial = result;
onToken(partial, true); onToken(partial, true);
return; return;
} }
// Handle OpenAI-compatible response format (vLLM endpoints) // Handle OpenAI-compatible response format (vLLM endpoints)
if (data.output && data.output.choices && Array.isArray(data.output.choices)) { if (data.output && data.output.choices && Array.isArray(data.output.choices)) {
console.log('📥 RunPod API: Detected OpenAI-compatible response format');
const choice = data.output.choices[0]; const choice = data.output.choices[0];
if (choice && choice.message && choice.message.content) { if (choice && choice.message && choice.message.content) {
const responseText = choice.message.content; const responseText = choice.message.content;
console.log('✅ RunPod API: Extracted content from OpenAI-compatible format, length:', responseText.length);
// Stream the response character by character to simulate streaming // Stream the response character by character to simulate streaming
for (let i = 0; i < responseText.length; i++) { for (let i = 0; i < responseText.length; i++) {
partial += responseText[i]; partial += responseText[i];
@ -774,33 +706,26 @@ async function callProviderAPI(
return; return;
} }
} }
// Handle direct response // Handle direct response
if (data.output) { if (data.output) {
console.log('📥 RunPod API: Processing output:', typeof data.output, Array.isArray(data.output) ? 'array' : 'object');
// Try to extract text from various possible response formats // Try to extract text from various possible response formats
let responseText = ''; let responseText = '';
if (typeof data.output === 'string') { if (typeof data.output === 'string') {
responseText = data.output; responseText = data.output;
console.log('✅ RunPod API: Extracted string output, length:', responseText.length);
} else if (data.output.text) { } else if (data.output.text) {
responseText = data.output.text; responseText = data.output.text;
console.log('✅ RunPod API: Extracted text from output.text, length:', responseText.length);
} else if (data.output.response) { } else if (data.output.response) {
responseText = data.output.response; responseText = data.output.response;
console.log('✅ RunPod API: Extracted response from output.response, length:', responseText.length);
} else if (data.output.content) { } else if (data.output.content) {
responseText = data.output.content; responseText = data.output.content;
console.log('✅ RunPod API: Extracted content from output.content, length:', responseText.length);
} else if (Array.isArray(data.output.segments)) { } else if (Array.isArray(data.output.segments)) {
responseText = data.output.segments.map((seg: any) => seg.text || seg).join(' '); responseText = data.output.segments.map((seg: any) => seg.text || seg).join(' ');
console.log('✅ RunPod API: Extracted text from segments, length:', responseText.length);
} else { } else {
// Fallback: stringify the output // Fallback: stringify the output
console.warn('⚠️ RunPod API: Unknown output format, stringifying:', Object.keys(data.output));
responseText = JSON.stringify(data.output); responseText = JSON.stringify(data.output);
} }
// Stream the response character by character to simulate streaming // Stream the response character by character to simulate streaming
for (let i = 0; i < responseText.length; i++) { for (let i = 0; i < responseText.length; i++) {
partial += responseText[i]; partial += responseText[i];
@ -811,28 +736,23 @@ async function callProviderAPI(
onToken(partial, true); onToken(partial, true);
return; return;
} }
// Handle error response // Handle error response
if (data.error) { if (data.error) {
console.error('❌ RunPod API: Error in response:', data.error);
throw new Error(`RunPod API error: ${data.error}`); throw new Error(`RunPod API error: ${data.error}`);
} }
// Check for status messages that might indicate endpoint is starting up // Check for status messages that might indicate endpoint is starting up
if (data.status) { if (data.status) {
console.log(' RunPod API: Response status:', data.status);
if (data.status === 'STARTING' || data.status === 'PENDING') { if (data.status === 'STARTING' || data.status === 'PENDING') {
console.log('⏳ RunPod API: Endpoint appears to be starting up, this may take a moment...');
// Wait a bit and retry // Wait a bit and retry
await new Promise(resolve => setTimeout(resolve, 2000)); await new Promise(resolve => setTimeout(resolve, 2000));
throw new Error('RunPod endpoint is starting up. Please wait a moment and try again.'); throw new Error('RunPod endpoint is starting up. Please wait a moment and try again.');
} }
} }
console.error('❌ RunPod API: No valid response format detected. Full response:', JSON.stringify(data, null, 2));
throw new Error('No valid response from RunPod API'); throw new Error('No valid response from RunPod API');
} catch (error) { } catch (error) {
console.error('❌ RunPod API error:', error);
throw error; throw error;
} }
} else if (provider === 'openai') { } else if (provider === 'openai') {
@ -924,8 +844,7 @@ async function pollRunPodJob(
pollInterval: number = 1000 pollInterval: number = 1000
): Promise<string> { ): Promise<string> {
const statusUrl = `https://api.runpod.ai/v2/${endpointId}/status/${jobId}`; const statusUrl = `https://api.runpod.ai/v2/${endpointId}/status/${jobId}`;
console.log('🔄 RunPod API: Starting to poll job:', jobId);
for (let attempt = 0; attempt < maxAttempts; attempt++) { for (let attempt = 0; attempt < maxAttempts; attempt++) {
try { try {
const response = await fetch(statusUrl, { const response = await fetch(statusUrl, {
@ -937,30 +856,21 @@ async function pollRunPodJob(
if (!response.ok) { if (!response.ok) {
const errorText = await response.text(); const errorText = await response.text();
console.error(`❌ RunPod API: Poll error (attempt ${attempt + 1}/${maxAttempts}):`, response.status, errorText);
throw new Error(`Failed to check job status: ${response.status} - ${errorText}`); throw new Error(`Failed to check job status: ${response.status} - ${errorText}`);
} }
const data = await response.json() as Record<string, any>; const data = await response.json() as Record<string, any>;
console.log(`🔄 RunPod API: Poll attempt ${attempt + 1}/${maxAttempts}, status:`, data.status);
console.log(`📥 RunPod API: Full poll response:`, JSON.stringify(data, null, 2));
if (data.status === 'COMPLETED') { if (data.status === 'COMPLETED') {
console.log('✅ RunPod API: Job completed, processing output...');
console.log('📥 RunPod API: Output structure:', typeof data.output, data.output ? Object.keys(data.output) : 'null');
console.log('📥 RunPod API: Full data object keys:', Object.keys(data));
// If no output after a couple of retries, try the stream endpoint as fallback // If no output after a couple of retries, try the stream endpoint as fallback
if (!data.output) { if (!data.output) {
if (attempt < 3) { if (attempt < 3) {
// Only retry 2-3 times, then try stream endpoint // Only retry 2-3 times, then try stream endpoint
console.log(`⏳ RunPod API: COMPLETED but no output yet, waiting briefly (attempt ${attempt + 1}/3)...`);
await new Promise(resolve => setTimeout(resolve, 500)); await new Promise(resolve => setTimeout(resolve, 500));
continue; continue;
} }
// After a few retries, try the stream endpoint as fallback // After a few retries, try the stream endpoint as fallback
console.log('⚠️ RunPod API: Status endpoint not returning output, trying stream endpoint...');
try { try {
const streamUrl = `https://api.runpod.ai/v2/${endpointId}/stream/${jobId}`; const streamUrl = `https://api.runpod.ai/v2/${endpointId}/stream/${jobId}`;
const streamResponse = await fetch(streamUrl, { const streamResponse = await fetch(streamUrl, {
@ -969,52 +879,41 @@ async function pollRunPodJob(
'Authorization': `Bearer ${apiKey}` 'Authorization': `Bearer ${apiKey}`
} }
}); });
if (streamResponse.ok) { if (streamResponse.ok) {
const streamData = await streamResponse.json() as Record<string, any>; const streamData = await streamResponse.json() as Record<string, any>;
console.log('📥 RunPod API: Stream endpoint response:', JSON.stringify(streamData, null, 2));
if (streamData.output) { if (streamData.output) {
// Use stream endpoint output // Use stream endpoint output
data.output = streamData.output; data.output = streamData.output;
console.log('✅ RunPod API: Found output via stream endpoint');
} else if (streamData.choices && Array.isArray(streamData.choices)) { } else if (streamData.choices && Array.isArray(streamData.choices)) {
// Handle OpenAI-compatible format from stream endpoint // Handle OpenAI-compatible format from stream endpoint
data.output = { choices: streamData.choices }; data.output = { choices: streamData.choices };
console.log('✅ RunPod API: Found choices via stream endpoint');
} }
} else {
console.log(`⚠️ RunPod API: Stream endpoint returned ${streamResponse.status}`);
} }
} catch (streamError) { } catch (streamError) {
console.log('⚠️ RunPod API: Stream endpoint not available or failed:', streamError); // Stream endpoint not available or failed
} }
} }
// Extract text from various possible response formats // Extract text from various possible response formats
let result = ''; let result = '';
if (typeof data.output === 'string') { if (typeof data.output === 'string') {
result = data.output; result = data.output;
console.log('✅ RunPod API: Extracted string output from job, length:', result.length);
} else if (data.output?.text) { } else if (data.output?.text) {
result = data.output.text; result = data.output.text;
console.log('✅ RunPod API: Extracted text from output.text, length:', result.length);
} else if (data.output?.response) { } else if (data.output?.response) {
result = data.output.response; result = data.output.response;
console.log('✅ RunPod API: Extracted response from output.response, length:', result.length);
} else if (data.output?.content) { } else if (data.output?.content) {
result = data.output.content; result = data.output.content;
console.log('✅ RunPod API: Extracted content from output.content, length:', result.length);
} else if (data.output?.choices && Array.isArray(data.output.choices)) { } else if (data.output?.choices && Array.isArray(data.output.choices)) {
// Handle OpenAI-compatible response format (vLLM endpoints) // Handle OpenAI-compatible response format (vLLM endpoints)
const choice = data.output.choices[0]; const choice = data.output.choices[0];
if (choice && choice.message && choice.message.content) { if (choice && choice.message && choice.message.content) {
result = choice.message.content; result = choice.message.content;
console.log('✅ RunPod API: Extracted content from OpenAI-compatible format, length:', result.length);
} }
} else if (data.output?.segments && Array.isArray(data.output.segments)) { } else if (data.output?.segments && Array.isArray(data.output.segments)) {
result = data.output.segments.map((seg: any) => seg.text || seg).join(' '); result = data.output.segments.map((seg: any) => seg.text || seg).join(' ');
console.log('✅ RunPod API: Extracted text from segments, length:', result.length);
} else if (Array.isArray(data.output)) { } else if (Array.isArray(data.output)) {
// Handle array responses (some vLLM endpoints return arrays) // Handle array responses (some vLLM endpoints return arrays)
result = data.output.map((item: any) => { result = data.output.map((item: any) => {
@ -1023,63 +922,37 @@ async function pollRunPodJob(
if (item.response) return item.response; if (item.response) return item.response;
return JSON.stringify(item); return JSON.stringify(item);
}).join('\n'); }).join('\n');
console.log('✅ RunPod API: Extracted text from array output, length:', result.length); } else if (!data.output) {
} else if (!data.output) { // No output field - check alternative structures or return empty
// No output field - check alternative structures or return empty // Try checking if output is directly in data (not data.output)
console.warn('⚠️ RunPod API: No output field found, checking alternative structures...'); if (typeof data === 'string') {
console.log('📥 RunPod API: Full data structure:', JSON.stringify(data, null, 2)); result = data;
} else if (data.text) {
// Try checking if output is directly in data (not data.output) result = data.text;
if (typeof data === 'string') { } else if (data.response) {
result = data; result = data.response;
console.log('✅ RunPod API: Data itself is a string, length:', result.length); } else if (data.content) {
} else if (data.text) { result = data.content;
result = data.text; } else {
console.log('✅ RunPod API: Found text at top level, length:', result.length); // If still no result, return empty string instead of throwing error
} else if (data.response) { // This allows the UI to render something instead of failing
result = data.response; result = '';
console.log('✅ RunPod API: Found response at top level, length:', result.length);
} else if (data.content) {
result = data.content;
console.log('✅ RunPod API: Found content at top level, length:', result.length);
} else {
// Stream endpoint already tried above (around line 848), just log that we couldn't find output
if (attempt >= 3) {
console.warn('⚠️ RunPod API: Could not find output in status or stream endpoint after multiple attempts');
}
// If still no result, return empty string instead of throwing error
// This allows the UI to render something instead of failing
if (!result) {
console.warn('⚠️ RunPod API: No output found in response. Returning empty result.');
console.log('📥 RunPod API: Available fields:', Object.keys(data));
result = ''; // Return empty string so UI can render
}
}
} }
}
// Return result even if empty - don't loop forever // Return result even if empty - don't loop forever
if (result !== undefined) { if (result !== undefined) {
// Return empty string if no result found - allows UI to render
console.log('✅ RunPod API: Returning result (may be empty):', result ? `length ${result.length}` : 'empty');
return result || ''; return result || '';
} }
// If we get here, no output was found - return empty string instead of looping // If we get here, no output was found - return empty string instead of looping
console.warn('⚠️ RunPod API: No output found after checking all formats. Returning empty result.');
return ''; return '';
} }
if (data.status === 'FAILED') { if (data.status === 'FAILED') {
console.error('❌ RunPod API: Job failed:', data.error || 'Unknown error');
throw new Error(`Job failed: ${data.error || 'Unknown error'}`); throw new Error(`Job failed: ${data.error || 'Unknown error'}`);
} }
// Check for starting/pending status
if (data.status === 'STARTING' || data.status === 'PENDING') {
console.log(`⏳ RunPod API: Endpoint still starting (attempt ${attempt + 1}/${maxAttempts})...`);
}
// Job still in progress, wait and retry // Job still in progress, wait and retry
await new Promise(resolve => setTimeout(resolve, pollInterval)); await new Promise(resolve => setTimeout(resolve, pollInterval));
} catch (error) { } catch (error) {
@ -1090,7 +963,7 @@ async function pollRunPodJob(
await new Promise(resolve => setTimeout(resolve, pollInterval)); await new Promise(resolve => setTimeout(resolve, pollInterval));
} }
} }
throw new Error('Job polling timeout - job did not complete in time'); throw new Error('Job polling timeout - job did not complete in time');
} }
@ -1119,7 +992,7 @@ async function autoMigrateAPIKeys() {
} }
if (needsUpdate) { if (needsUpdate) {
localStorage.setItem("openai_api_key", JSON.stringify(parsed)); localStorage.setItem("openai_api_key", JSON.stringify(parsed));
console.log('🔄 Migrated invalid Anthropic model name to claude-3-opus-20240229');
} }
} }
return; // Already migrated return; // Already migrated
@ -1260,7 +1133,7 @@ export function getFirstAvailableApiKeyAndProvider(): { key: string; provider: s
export function clearInvalidApiKeys() { export function clearInvalidApiKeys() {
try { try {
localStorage.removeItem('invalid_api_keys'); localStorage.removeItem('invalid_api_keys');
console.log('🧹 Cleared all invalid API key markers');
} catch (e) { } catch (e) {
console.warn('Failed to clear invalid API keys:', e); console.warn('Failed to clear invalid API keys:', e);
} }