fix: register Calendar and Drawfast shapes in automerge store

Added missing Calendar and Drawfast shapes to the automerge store
schema registration to fix ValidationError when using these tools.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Jeff Emmett 2025-12-24 10:36:51 -05:00
parent c4cb97c0bf
commit c6ed0b77d8
4 changed files with 134 additions and 509 deletions

View File

@ -23,20 +23,16 @@ export class CloudflareAdapter {
async getHandle(roomId: string): Promise<DocHandle<TLStoreSnapshot>> {
if (!this.handles.has(roomId)) {
console.log(`Creating new Automerge handle for room ${roomId}`)
const handle = this.repo.create<TLStoreSnapshot>()
// Initialize with default store if this is a new document
handle.change((doc) => {
if (!doc.store) {
console.log("Initializing new document with default store")
init(doc)
}
})
this.handles.set(roomId, handle)
} else {
console.log(`Reusing existing Automerge handle for room ${roomId}`)
}
return this.handles.get(roomId)!
@ -72,13 +68,11 @@ export class CloudflareAdapter {
async saveToCloudflare(roomId: string): Promise<void> {
const handle = this.handles.get(roomId)
if (!handle) {
console.log(`No handle found for room ${roomId}`)
return
}
const doc = handle.doc()
if (!doc) {
console.log(`No document found for room ${roomId}`)
return
}
@ -114,7 +108,6 @@ export class CloudflareAdapter {
async loadFromCloudflare(roomId: string): Promise<TLStoreSnapshot | null> {
try {
// Add retry logic for connection issues
let response: Response;
let retries = 3;
@ -131,7 +124,7 @@ export class CloudflareAdapter {
}
}
}
if (!response!.ok) {
if (response!.status === 404) {
return null // Room doesn't exist yet
@ -141,12 +134,7 @@ export class CloudflareAdapter {
}
const doc = await response!.json() as TLStoreSnapshot
console.log(`Successfully loaded document from Cloudflare for room ${roomId}:`, {
hasStore: !!doc.store,
storeKeys: doc.store ? Object.keys(doc.store).length : 0
})
// Initialize the last persisted state with the loaded document
if (doc) {
const docHash = this.generateDocHash(doc)
@ -202,7 +190,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
private setConnectionState(state: ConnectionState): void {
if (this._connectionState !== state) {
console.log(`🔌 Connection state: ${this._connectionState}${state}`)
this._connectionState = state
this.connectionStateListeners.forEach(listener => listener(state))
}
@ -237,7 +224,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Set up network online/offline listeners
this.networkOnlineHandler = () => {
console.log('🌐 Network: online')
this._isNetworkOnline = true
// Trigger reconnect if we were disconnected
if (this._connectionState === 'disconnected' && this.peerId) {
@ -246,7 +232,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
}
}
this.networkOfflineHandler = () => {
console.log('🌐 Network: offline')
this._isNetworkOnline = false
if (this._connectionState === 'connected') {
this.setConnectionState('disconnected')
@ -273,12 +258,10 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
* @param documentId The Automerge document ID to use for incoming messages
*/
setDocumentId(documentId: string): void {
console.log('📋 CloudflareAdapter: Setting documentId:', documentId)
this.currentDocumentId = documentId
// Process any buffered binary messages now that we have a documentId
if (this.pendingBinaryMessages.length > 0) {
console.log(`📦 CloudflareAdapter: Processing ${this.pendingBinaryMessages.length} buffered binary messages`)
const bufferedMessages = this.pendingBinaryMessages
this.pendingBinaryMessages = []
@ -290,7 +273,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
targetId: this.peerId || ('unknown' as PeerId),
documentId: this.currentDocumentId as any
}
console.log('📥 CloudflareAdapter: Emitting buffered sync message with documentId:', this.currentDocumentId, 'size:', binaryData.byteLength)
this.emit('message', message)
}
}
@ -305,7 +287,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
connect(peerId: PeerId, peerMetadata?: PeerMetadata): void {
if (this.isConnecting) {
console.log('🔌 CloudflareAdapter: Connection already in progress, skipping')
return
}
@ -329,33 +310,27 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
const wsUrl = `${protocol}${baseUrl}/connect/${this.roomId}?sessionId=${sessionId}`
this.isConnecting = true
// Add a small delay to ensure the server is ready
setTimeout(() => {
try {
console.log('🔌 CloudflareAdapter: Creating WebSocket connection to:', wsUrl)
this.websocket = new WebSocket(wsUrl)
this.websocket.onopen = () => {
console.log('🔌 CloudflareAdapter: WebSocket connection opened successfully')
this.isConnecting = false
this.reconnectAttempts = 0
this.setConnectionState('connected')
this.readyResolve?.()
this.startKeepAlive()
// CRITICAL: Emit 'ready' event for Automerge Repo
// This tells the Repo that the network adapter is ready to sync
// Emit 'ready' event for Automerge Repo
// @ts-expect-error - 'ready' event is valid but not in NetworkAdapterEvents type
this.emit('ready', { network: this })
// Create a server peer ID based on the room
// The server acts as a "hub" peer that all clients sync with
this.serverPeerId = `server-${this.roomId}` as PeerId
// CRITICAL: Emit 'peer-candidate' to announce the server as a sync peer
// This tells the Automerge Repo there's a peer to sync documents with
console.log('🔌 CloudflareAdapter: Announcing server peer for Automerge sync:', this.serverPeerId)
// Emit 'peer-candidate' to announce the server as a sync peer
this.emit('peer-candidate', {
peerId: this.serverPeerId,
peerMetadata: { storageId: undefined, isEphemeral: false }
@ -367,16 +342,8 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Automerge's native protocol uses binary messages
// We need to handle both binary and text messages
if (event.data instanceof ArrayBuffer) {
console.log('🔌 CloudflareAdapter: Received binary message (Automerge protocol)', event.data.byteLength, 'bytes')
// Handle binary Automerge sync messages - convert ArrayBuffer to Uint8Array
// Automerge Repo expects binary sync messages as Uint8Array
// CRITICAL: senderId should be the SERVER (where the message came from)
// targetId should be US (where the message is going to)
// CRITICAL: Include documentId for Automerge Repo to route the message correctly
const binaryData = new Uint8Array(event.data)
if (!this.currentDocumentId) {
console.log('📦 CloudflareAdapter: Buffering binary sync message (no documentId yet), size:', binaryData.byteLength)
// Buffer for later processing when we have a documentId
this.pendingBinaryMessages.push(binaryData)
return
}
@ -385,17 +352,13 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
data: binaryData,
senderId: this.serverPeerId || ('server' as PeerId),
targetId: this.peerId || ('unknown' as PeerId),
documentId: this.currentDocumentId as any // DocumentId type
documentId: this.currentDocumentId as any
}
console.log('📥 CloudflareAdapter: Emitting sync message with documentId:', this.currentDocumentId)
this.emit('message', message)
} else if (event.data instanceof Blob) {
// Handle Blob messages (convert to Uint8Array)
event.data.arrayBuffer().then((buffer) => {
console.log('🔌 CloudflareAdapter: Received Blob message, converted to Uint8Array', buffer.byteLength, 'bytes')
const binaryData = new Uint8Array(buffer)
if (!this.currentDocumentId) {
console.log('📦 CloudflareAdapter: Buffering Blob sync message (no documentId yet), size:', binaryData.byteLength)
this.pendingBinaryMessages.push(binaryData)
return
}
@ -406,18 +369,12 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
targetId: this.peerId || ('unknown' as PeerId),
documentId: this.currentDocumentId as any
}
console.log('📥 CloudflareAdapter: Emitting Blob sync message with documentId:', this.currentDocumentId)
this.emit('message', message)
})
} else {
// Handle text messages (our custom protocol for backward compatibility)
const message = JSON.parse(event.data)
// Only log non-presence messages to reduce console spam
if (message.type !== 'presence' && message.type !== 'pong') {
console.log('🔌 CloudflareAdapter: Received WebSocket message:', message.type)
}
// Handle ping/pong messages for keep-alive
if (message.type === 'ping') {
this.sendPong()
@ -426,13 +383,11 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Handle test messages
if (message.type === 'test') {
console.log('🔌 CloudflareAdapter: Received test message:', message.message)
return
}
// Handle presence updates from other clients
if (message.type === 'presence') {
// Pass senderId, userName, and userColor so we can create proper instance_presence records
if (this.onPresenceUpdate && message.userId && message.data) {
this.onPresenceUpdate(message.userId, message.data, message.senderId, message.userName, message.userColor)
}
@ -441,49 +396,31 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Handle leave messages (user disconnected)
if (message.type === 'leave') {
console.log('👋 CloudflareAdapter: User left:', message.sessionId)
if (this.onPresenceLeave && message.sessionId) {
this.onPresenceLeave(message.sessionId)
}
return
}
// Convert the message to the format expected by Automerge
if (message.type === 'sync' && message.data) {
console.log('🔌 CloudflareAdapter: Received sync message with data:', {
hasStore: !!message.data.store,
storeKeys: message.data.store ? Object.keys(message.data.store).length : 0,
documentId: message.documentId,
documentIdType: typeof message.documentId
})
// JSON sync for real-time collaboration
// When we receive TLDraw changes from other clients, apply them locally
const isJsonDocumentData = message.data && typeof message.data === 'object' && message.data.store
if (isJsonDocumentData) {
console.log('📥 CloudflareAdapter: Received JSON sync message with store data')
// Call the JSON sync callback to apply changes
if (this.onJsonSyncData) {
this.onJsonSyncData(message.data)
} else {
console.warn('⚠️ No JSON sync callback registered')
}
return // JSON sync handled
return
}
// Validate documentId - Automerge requires a valid Automerge URL format
// Valid formats: "automerge:xxxxx" or other valid URL formats
// Invalid: plain strings like "default", "default-room", etc.
const isValidDocumentId = message.documentId &&
(typeof message.documentId === 'string' &&
(message.documentId.startsWith('automerge:') ||
message.documentId.includes(':') ||
/^[a-f0-9-]{36,}$/i.test(message.documentId))) // UUID-like format
// For binary sync messages, use Automerge's sync protocol
// Only include documentId if it's a valid Automerge document ID format
// Validate documentId format
const isValidDocumentId = message.documentId &&
(typeof message.documentId === 'string' &&
(message.documentId.startsWith('automerge:') ||
message.documentId.includes(':') ||
/^[a-f0-9-]{36,}$/i.test(message.documentId)))
const syncMessage: Message = {
type: 'sync',
senderId: message.senderId || this.peerId || ('unknown' as PeerId),
@ -491,42 +428,22 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
data: message.data,
...(isValidDocumentId && { documentId: message.documentId })
}
if (message.documentId && !isValidDocumentId) {
console.warn('⚠️ CloudflareAdapter: Ignoring invalid documentId from server:', message.documentId)
}
this.emit('message', syncMessage)
} else if (message.senderId && message.targetId) {
this.emit('message', message as Message)
}
}
} catch (error) {
console.error('❌ CloudflareAdapter: Error parsing WebSocket message:', error)
console.error('Error parsing WebSocket message:', error)
}
}
this.websocket.onclose = (event) => {
console.log('Disconnected from Cloudflare WebSocket', {
code: event.code,
reason: event.reason,
wasClean: event.wasClean,
url: wsUrl,
reconnectAttempts: this.reconnectAttempts
})
this.isConnecting = false
this.stopKeepAlive()
// Log specific error codes for debugging
if (event.code === 1005) {
console.error('❌ WebSocket closed with code 1005 (No Status Received) - this usually indicates a connection issue or idle timeout')
} else if (event.code === 1006) {
console.error('❌ WebSocket closed with code 1006 (Abnormal Closure) - connection was lost unexpectedly')
} else if (event.code === 1011) {
console.error('❌ WebSocket closed with code 1011 (Server Error) - server encountered an error')
} else if (event.code === 1000) {
console.log('✅ WebSocket closed normally (code 1000)')
if (event.code === 1000) {
this.setConnectionState('disconnected')
return // Don't reconnect on normal closure
}
@ -544,15 +461,7 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
this.scheduleReconnect(peerId, peerMetadata)
}
this.websocket.onerror = (error) => {
console.error('WebSocket error:', error)
console.error('WebSocket readyState:', this.websocket?.readyState)
console.error('WebSocket URL:', wsUrl)
console.error('Error event details:', {
type: error.type,
target: error.target,
isTrusted: error.isTrusted
})
this.websocket.onerror = () => {
this.isConnecting = false
}
} catch (error) {
@ -564,25 +473,10 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
}
send(message: Message): void {
// Only log non-presence messages to reduce console spam
if (message.type !== 'presence') {
console.log('📤 CloudflareAdapter.send() called:', {
messageType: message.type,
dataType: (message as any).data?.constructor?.name || typeof (message as any).data,
dataLength: (message as any).data?.byteLength || (message as any).data?.length,
documentId: (message as any).documentId,
hasTargetId: !!message.targetId,
hasSenderId: !!message.senderId,
useBinarySync: this.useBinarySync
})
}
// CRITICAL: Capture documentId from outgoing sync messages
// This allows us to use it for incoming messages from the server
// Capture documentId from outgoing sync messages
if (message.type === 'sync' && (message as any).documentId) {
const docId = (message as any).documentId
if (this.currentDocumentId !== docId) {
console.log('📋 CloudflareAdapter: Captured documentId from outgoing sync:', docId)
this.currentDocumentId = docId
}
}
@ -590,49 +484,14 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
// Check if this is a binary sync message from Automerge Repo
if (message.type === 'sync' && (message as any).data instanceof ArrayBuffer) {
console.log('📤 CloudflareAdapter: Sending binary sync message (Automerge protocol)', {
dataLength: (message as any).data.byteLength,
documentId: (message as any).documentId,
targetId: message.targetId
})
// Send binary data directly for Automerge's native sync protocol
this.websocket.send((message as any).data)
return // CRITICAL: Don't fall through to JSON send
return
} else if (message.type === 'sync' && (message as any).data instanceof Uint8Array) {
console.log('📤 CloudflareAdapter: Sending Uint8Array sync message (Automerge protocol)', {
dataLength: (message as any).data.length,
documentId: (message as any).documentId,
targetId: message.targetId
})
// Send Uint8Array directly - WebSocket accepts Uint8Array
this.websocket.send((message as any).data)
return // CRITICAL: Don't fall through to JSON send
return
} else {
// Handle text-based messages (backward compatibility and control messages)
// Only log non-presence messages
if (message.type !== 'presence') {
console.log('📤 Sending WebSocket message:', message.type)
}
// Debug: Log patch content if it's a patch message
if (message.type === 'patch' && (message as any).patches) {
console.log('🔍 Sending patches:', (message as any).patches.length, 'patches')
;(message as any).patches.forEach((patch: any, index: number) => {
console.log(` Patch ${index}:`, {
action: patch.action,
path: patch.path,
value: patch.value ? (typeof patch.value === 'object' ? 'object' : patch.value) : 'undefined'
})
})
}
this.websocket.send(JSON.stringify(message))
}
} else {
if (message.type !== 'presence') {
console.warn('⚠️ CloudflareAdapter: Cannot send message - WebSocket not open', {
messageType: message.type,
readyState: this.websocket?.readyState
})
}
}
}
@ -669,7 +528,6 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
type: 'leave',
sessionId: this.sessionId
}))
console.log('👋 CloudflareAdapter: Sent leave message for session:', this.sessionId)
} catch (e) {
// Ignore errors when sending leave message
}
@ -683,13 +541,12 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
// Send ping every 30 seconds to prevent idle timeout
this.keepAliveInterval = setInterval(() => {
if (this.websocket && this.websocket.readyState === WebSocket.OPEN) {
console.log('🔌 CloudflareAdapter: Sending keep-alive ping')
this.websocket.send(JSON.stringify({
type: 'ping',
timestamp: Date.now()
}))
}
}, 30000) // 30 seconds
}, 30000)
}
private stopKeepAlive(): void {
@ -710,18 +567,14 @@ export class CloudflareNetworkAdapter extends NetworkAdapter {
private scheduleReconnect(peerId: PeerId, peerMetadata?: PeerMetadata): void {
if (this.reconnectAttempts >= this.maxReconnectAttempts) {
console.error('❌ CloudflareAdapter: Max reconnection attempts reached, giving up')
return
}
this.reconnectAttempts++
const delay = Math.min(this.reconnectDelay * Math.pow(2, this.reconnectAttempts - 1), 30000) // Max 30 seconds
console.log(`🔄 CloudflareAdapter: Scheduling reconnect attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts} in ${delay}ms`)
const delay = Math.min(this.reconnectDelay * Math.pow(2, this.reconnectAttempts - 1), 30000)
this.reconnectTimeout = setTimeout(() => {
if (this.roomId) {
console.log(`🔄 CloudflareAdapter: Attempting reconnect ${this.reconnectAttempts}/${this.maxReconnectAttempts}`)
this.connect(peerId, peerMetadata)
}
}, delay)

View File

@ -132,6 +132,10 @@ import { MultmuxShape } from "@/shapes/MultmuxShapeUtil"
import { MycelialIntelligenceShape } from "@/shapes/MycelialIntelligenceShapeUtil"
// Open Mapping - OSM map shape for geographic visualization
import { MapShape } from "@/shapes/MapShapeUtil"
// Calendar shape for calendar functionality
import { CalendarShape } from "@/shapes/CalendarShapeUtil"
// Drawfast shape for quick drawing/sketching
import { DrawfastShape } from "@/shapes/DrawfastShapeUtil"
export function useAutomergeStoreV2({
handle,
@ -169,6 +173,8 @@ export function useAutomergeStoreV2({
MultmuxShape,
MycelialIntelligenceShape, // Deprecated - kept for backwards compatibility
MapShape, // Open Mapping - OSM map shape
CalendarShape, // Calendar with view switching
DrawfastShape, // Drawfast quick sketching
]
// CRITICAL: Explicitly list ALL custom shape types to ensure they're registered
@ -193,6 +199,8 @@ export function useAutomergeStoreV2({
'Multmux',
'MycelialIntelligence', // Deprecated - kept for backwards compatibility
'Map', // Open Mapping - OSM map shape
'Calendar', // Calendar with view switching
'Drawfast', // Drawfast quick sketching
]
// Build schema with explicit entries for all custom shapes

View File

@ -82,58 +82,48 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
// Save vault to Automerge store
const saveVaultToAutomerge = (vault: ObsidianVault) => {
if (!automergeHandle) {
console.warn('⚠️ Automerge handle not available, saving to localStorage only')
try {
const vaultRecord = importer.vaultToRecord(vault)
localStorage.setItem(`obsidian_vault_cache:${vault.name}`, JSON.stringify({
...vaultRecord,
lastImported: vaultRecord.lastImported instanceof Date ? vaultRecord.lastImported.toISOString() : vaultRecord.lastImported
}))
console.log('🔧 Saved vault to localStorage (Automerge handle not available):', vaultRecord.id)
} catch (localStorageError) {
console.warn('⚠️ Could not save vault to localStorage:', localStorageError)
console.warn('Could not save vault to localStorage:', localStorageError)
}
return
}
try {
const vaultRecord = importer.vaultToRecord(vault)
// Save directly to Automerge, bypassing TLDraw store validation
// This allows us to save custom record types like obsidian_vault
automergeHandle.change((doc: any) => {
// Ensure doc.store exists
if (!doc.store) {
doc.store = {}
}
// Save the vault record directly to Automerge store
// Convert Date to ISO string for serialization
const recordToSave = {
...vaultRecord,
lastImported: vaultRecord.lastImported instanceof Date
? vaultRecord.lastImported.toISOString()
lastImported: vaultRecord.lastImported instanceof Date
? vaultRecord.lastImported.toISOString()
: vaultRecord.lastImported
}
doc.store[vaultRecord.id] = recordToSave
})
console.log('🔧 Saved vault to Automerge:', vaultRecord.id)
// Also save to localStorage as a backup
try {
localStorage.setItem(`obsidian_vault_cache:${vault.name}`, JSON.stringify({
...vaultRecord,
lastImported: vaultRecord.lastImported instanceof Date ? vaultRecord.lastImported.toISOString() : vaultRecord.lastImported
}))
console.log('🔧 Saved vault to localStorage as backup:', vaultRecord.id)
} catch (localStorageError) {
console.warn('⚠️ Could not save vault to localStorage:', localStorageError)
// Silent fail for backup
}
} catch (error) {
console.error('❌ Error saving vault to Automerge:', error)
// Don't throw - allow vault loading to continue even if saving fails
console.error('Error saving vault to Automerge:', error)
// Try localStorage as fallback
try {
const vaultRecord = importer.vaultToRecord(vault)
@ -141,9 +131,8 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
...vaultRecord,
lastImported: vaultRecord.lastImported instanceof Date ? vaultRecord.lastImported.toISOString() : vaultRecord.lastImported
}))
console.log('🔧 Saved vault to localStorage as fallback:', vaultRecord.id)
} catch (localStorageError) {
console.warn('⚠️ Could not save vault to localStorage:', localStorageError)
console.warn('Could not save vault to localStorage:', localStorageError)
}
}
}
@ -157,10 +146,8 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
if (doc && doc.store) {
const vaultId = `obsidian_vault:${vaultName}`
const vaultRecord = doc.store[vaultId] as ObsidianVaultRecord | undefined
if (vaultRecord && vaultRecord.typeName === 'obsidian_vault') {
console.log('🔧 Loaded vault from Automerge:', vaultId)
// Convert date string back to Date object if needed
const recordCopy = JSON.parse(JSON.stringify(vaultRecord))
if (typeof recordCopy.lastImported === 'string') {
recordCopy.lastImported = new Date(recordCopy.lastImported)
@ -169,18 +156,16 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
}
}
} catch (error) {
console.warn('⚠️ Could not load vault from Automerge:', error)
// Fall through to localStorage
}
}
// Try localStorage as fallback
try {
const cached = localStorage.getItem(`obsidian_vault_cache:${vaultName}`)
if (cached) {
const vaultRecord = JSON.parse(cached) as ObsidianVaultRecord
if (vaultRecord && vaultRecord.typeName === 'obsidian_vault') {
console.log('🔧 Loaded vault from localStorage cache:', vaultName)
// Convert date string back to Date object
if (typeof vaultRecord.lastImported === 'string') {
vaultRecord.lastImported = new Date(vaultRecord.lastImported)
}
@ -188,9 +173,9 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
}
}
} catch (e) {
console.warn('⚠️ Could not load vault from localStorage:', e)
// Silent fail
}
return null
}
@ -198,47 +183,31 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
useEffect(() => {
// Prevent multiple loads if already loading or already loaded once
if (isLoadingVault || hasLoadedOnce) {
console.log('🔧 ObsidianVaultBrowser: Skipping load - already loading or loaded once')
return
}
console.log('🔧 ObsidianVaultBrowser: Component mounted, checking user identity for vault...')
console.log('🔧 Current session vault data:', {
path: session.obsidianVaultPath,
name: session.obsidianVaultName,
authed: session.authed,
username: session.username
})
// FIRST PRIORITY: Try to load from user's configured vault in session (user identity)
if (session.obsidianVaultPath && session.obsidianVaultPath !== 'folder-selected') {
console.log('✅ Found configured vault in user identity:', session.obsidianVaultPath)
console.log('🔧 Loading vault from user identity...')
// First try to load from Automerge cache for faster loading
if (session.obsidianVaultName) {
const cachedVault = loadVaultFromAutomerge(session.obsidianVaultName)
if (cachedVault) {
console.log('✅ Loaded vault from Automerge cache')
setVault(cachedVault)
setIsLoading(false)
setHasLoadedOnce(true)
return
}
}
// If not in cache, load from source (Quartz URL or local path)
console.log('🔧 Loading vault from source:', session.obsidianVaultPath)
loadVault(session.obsidianVaultPath)
} else if (session.obsidianVaultPath === 'folder-selected' && session.obsidianVaultName) {
console.log('🔧 Vault was previously selected via folder picker, showing reselect interface')
// For folder-selected vaults, we can't reload them, so show a special reselect interface
setVault(null)
setShowFolderReselect(true)
setIsLoading(false)
setHasLoadedOnce(true)
} else {
console.log('⚠️ No vault configured in user identity, showing empty state...')
setVault(null)
setIsLoading(false)
setHasLoadedOnce(true)
@ -250,30 +219,28 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
// Check if values actually changed (not just object reference)
const vaultPathChanged = previousVaultPathRef.current !== session.obsidianVaultPath
const vaultNameChanged = previousVaultNameRef.current !== session.obsidianVaultName
// If vault is already loaded and values haven't changed, don't do anything
if (hasLoadedOnce && !vaultPathChanged && !vaultNameChanged) {
return // Already loaded and nothing changed, no need to reload
return
}
// Update refs to current values
previousVaultPathRef.current = session.obsidianVaultPath
previousVaultNameRef.current = session.obsidianVaultName
// Only proceed if values actually changed and we haven't loaded yet
if (!vaultPathChanged && !vaultNameChanged) {
return // Values haven't changed, no need to reload
return
}
if (hasLoadedOnce || isLoadingVault) {
return // Don't reload if we've already loaded or are currently loading
return
}
if (session.obsidianVaultPath && session.obsidianVaultPath !== 'folder-selected') {
console.log('🔧 Session vault path changed, loading vault:', session.obsidianVaultPath)
loadVault(session.obsidianVaultPath)
} else if (session.obsidianVaultPath === 'folder-selected' && session.obsidianVaultName) {
console.log('🔧 Session shows folder-selected vault, showing reselect interface')
setVault(null)
setShowFolderReselect(true)
setIsLoading(false)
@ -284,7 +251,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
// Auto-open folder picker if requested
useEffect(() => {
if (autoOpenFolderPicker) {
console.log('Auto-opening folder picker...')
handleFolderPicker()
}
}, [autoOpenFolderPicker])
@ -312,7 +278,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {
if (event.key === 'Escape') {
console.log('🔧 ESC key pressed, closing vault browser')
onClose()
}
}
@ -326,57 +291,38 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
const loadVault = async (path?: string) => {
// Prevent concurrent loading operations
if (isLoadingVault) {
console.log('🔧 loadVault: Already loading, skipping concurrent request')
return
}
setIsLoadingVault(true)
setIsLoading(true)
setError(null)
try {
if (path) {
// Check if it's a Quartz URL
if (path.startsWith('http') || path.includes('quartz') || path.includes('.xyz') || path.includes('.com')) {
// Load from Quartz URL - always get latest data
console.log('🔧 Loading Quartz vault from URL (getting latest data):', path)
const loadedVault = await importer.importFromQuartzUrl(path)
console.log('Loaded Quartz vault from URL:', loadedVault)
setVault(loadedVault)
setShowVaultInput(false)
setShowFolderReselect(false)
// Save the vault path and name to user session
console.log('🔧 Saving Quartz vault to session:', { path, name: loadedVault.name })
updateSession({
updateSession({
obsidianVaultPath: path,
obsidianVaultName: loadedVault.name
})
console.log('🔧 Quartz vault saved to session successfully')
// Save vault to Automerge for persistence
saveVaultToAutomerge(loadedVault)
} else {
// Load from local directory
console.log('🔧 Loading vault from local directory:', path)
const loadedVault = await importer.importFromDirectory(path)
console.log('Loaded vault from path:', loadedVault)
setVault(loadedVault)
setShowVaultInput(false)
setShowFolderReselect(false)
// Save the vault path and name to user session
console.log('🔧 Saving vault to session:', { path, name: loadedVault.name })
updateSession({
updateSession({
obsidianVaultPath: path,
obsidianVaultName: loadedVault.name
})
console.log('🔧 Vault saved to session successfully')
// Save vault to Automerge for persistence
saveVaultToAutomerge(loadedVault)
}
} else {
// No vault configured - show empty state
console.log('No vault configured, showing empty state...')
setVault(null)
setShowVaultInput(false)
}
@ -384,8 +330,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
console.error('Failed to load vault:', err)
setError('Failed to load Obsidian vault. Please try again.')
setVault(null)
// Don't show vault input if user already has a vault configured
// Only show vault input if this is a fresh attempt
if (!session.obsidianVaultPath) {
setShowVaultInput(true)
}
@ -401,11 +345,8 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
setError('Please enter a vault path or URL')
return
}
console.log('📝 Submitting vault path:', vaultPath.trim(), 'Method:', inputMethod)
if (inputMethod === 'quartz') {
// Handle Quartz URL
try {
setIsLoading(true)
setError(null)
@ -413,70 +354,49 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
setVault(loadedVault)
setShowVaultInput(false)
setShowFolderReselect(false)
// Save Quartz vault to user identity (session)
console.log('🔧 Saving Quartz vault to user identity:', {
path: vaultPath.trim(),
name: loadedVault.name
})
updateSession({
updateSession({
obsidianVaultPath: vaultPath.trim(),
obsidianVaultName: loadedVault.name
})
} catch (error) {
console.error('Error loading Quartz vault:', error)
console.error('Error loading Quartz vault:', error)
setError(error instanceof Error ? error.message : 'Failed to load Quartz vault')
} finally {
setIsLoading(false)
}
} else {
// Handle regular vault path (local folder or URL)
loadVault(vaultPath.trim())
}
}
const handleFolderPicker = async () => {
console.log('📁 Folder picker button clicked')
if (!('showDirectoryPicker' in window)) {
setError('File System Access API is not supported in this browser. Please use "Enter Path" instead.')
setShowVaultInput(true)
return
}
try {
setIsLoading(true)
setError(null)
console.log('📁 Opening directory picker...')
const loadedVault = await importer.importFromFileSystem()
console.log('✅ Vault loaded from folder picker:', loadedVault.name)
setVault(loadedVault)
setShowVaultInput(false)
setShowFolderReselect(false)
// Note: We can't get the actual path from importFromFileSystem,
// but we can save a flag that a folder was selected
console.log('🔧 Saving folder-selected vault to user identity:', {
path: 'folder-selected',
name: loadedVault.name
})
updateSession({
updateSession({
obsidianVaultPath: 'folder-selected',
obsidianVaultName: loadedVault.name
})
console.log('✅ Folder-selected vault saved to user identity successfully')
// Save vault to Automerge for persistence
saveVaultToAutomerge(loadedVault)
} catch (err) {
console.error('❌ Failed to load vault from folder picker:', err)
if ((err as any).name === 'AbortError') {
// User cancelled the folder picker
console.log('📁 User cancelled folder picker')
setError(null) // Don't show error for cancellation
setError(null)
} else {
console.error('Failed to load vault from folder picker:', err)
setError('Failed to load Obsidian vault. Please try again.')
}
} finally {
@ -514,45 +434,27 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
const folderNotes = importer.getAllNotesFromTree(folder)
obs_notes = obs_notes.filter(note => folderNotes.some(folderNote => folderNote.id === note.id))
}
} else if (viewMode === 'tree' && selectedFolder === null) {
// In tree view but no folder selected, show all notes
// This allows users to see all notes when no specific folder is selected
}
// Debug logging
console.log('Search query:', debouncedSearchQuery)
console.log('View mode:', viewMode)
console.log('Selected folder:', selectedFolder)
console.log('Total notes:', vault.obs_notes.length)
console.log('Filtered notes:', obs_notes.length)
return obs_notes
}, [vault, debouncedSearchQuery, viewMode, selectedFolder, folderTree, importer])
// Listen for trigger-obsnote-creation event from CustomToolbar
useEffect(() => {
const handleTriggerCreation = () => {
console.log('🎯 ObsidianVaultBrowser: Received trigger-obsnote-creation event')
if (selectedNotes.size > 0) {
// Create shapes from currently selected notes
const selectedObsNotes = filteredObsNotes.filter(obs_note => selectedNotes.has(obs_note.id))
console.log('🎯 Creating shapes from selected notes:', selectedObsNotes.length)
onObsNotesSelect(selectedObsNotes)
} else {
// If no notes are selected, select all visible notes
const allVisibleNotes = filteredObsNotes
if (allVisibleNotes.length > 0) {
console.log('🎯 No notes selected, creating shapes from all visible notes:', allVisibleNotes.length)
onObsNotesSelect(allVisibleNotes)
} else {
console.log('🎯 No notes available to create shapes from')
}
}
}
window.addEventListener('trigger-obsnote-creation', handleTriggerCreation as EventListener)
return () => {
window.removeEventListener('trigger-obsnote-creation', handleTriggerCreation as EventListener)
}
@ -663,7 +565,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
}
const handleObsNoteClick = (obs_note: ObsidianObsNote) => {
console.log('🎯 ObsidianVaultBrowser: handleObsNoteClick called with:', obs_note)
onObsNoteSelect(obs_note)
}
@ -679,7 +580,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
const handleBulkImport = () => {
const selectedObsNotes = filteredObsNotes.filter(obs_note => selectedNotes.has(obs_note.id))
console.log('🎯 ObsidianVaultBrowser: handleBulkImport called with:', selectedObsNotes.length, 'notes')
onObsNotesSelect(selectedObsNotes)
setSelectedNotes(new Set())
}
@ -730,13 +630,11 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
const handleDisconnectVault = () => {
// Clear the vault from session
updateSession({
updateSession({
obsidianVaultPath: undefined,
obsidianVaultName: undefined
})
// Reset component state
setVault(null)
setSearchQuery('')
setDebouncedSearchQuery('')
@ -746,8 +644,6 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
setError(null)
setHasLoadedOnce(false)
setIsLoadingVault(false)
console.log('🔧 Vault disconnected successfully')
}
const handleBackdropClick = (e: React.MouseEvent<HTMLDivElement>) => {
@ -841,24 +737,19 @@ export const ObsidianVaultBrowser: React.FC<ObsidianVaultBrowserProps> = ({
<h3>Load Obsidian Vault</h3>
<p>Choose how you'd like to load your Obsidian vault:</p>
<div className="vault-options">
<button
onClick={() => {
console.log('📁 Select Folder button clicked')
handleFolderPicker()
}}
<button
onClick={handleFolderPicker}
className="load-vault-button primary"
>
📁 Select Folder
</button>
<button
<button
onClick={() => {
console.log('📝 Enter Path button clicked')
// Pre-populate with session vault path if available
if (session.obsidianVaultPath && session.obsidianVaultPath !== 'folder-selected') {
setVaultPath(session.obsidianVaultPath)
}
setShowVaultInput(true)
}}
}}
className="load-vault-button secondary"
>
📝 Enter Path

View File

@ -65,16 +65,8 @@ export async function llm(
// Get all available providers with valid keys
const availableProviders = getAvailableProviders(availableKeys, settings);
console.log(`🔍 Found ${availableProviders.length} available AI providers:`,
availableProviders.map(p => `${p.provider} (${p.model})`).join(', '));
if (availableProviders.length === 0) {
const runpodConfig = getRunPodConfig();
if (runpodConfig && runpodConfig.apiKey && runpodConfig.endpointId) {
// RunPod should have been added, but if not, try one more time
console.log('⚠️ No user API keys found, but RunPod is configured - this should not happen');
}
throw new Error("No valid API key found for any provider. Please configure API keys in settings or set up RunPod environment variables (VITE_RUNPOD_API_KEY and VITE_RUNPOD_ENDPOINT_ID).")
}
@ -95,55 +87,43 @@ export async function llm(
for (const providerInfo of availableProviders) {
const { provider, apiKey, model, endpointId } = providerInfo as any;
try {
console.log(`🔄 Attempting to use ${provider} API (${model})...`);
attemptedProviders.push(`${provider} (${model})`);
// Add retry logic for temporary failures
await callProviderAPIWithRetry(provider, apiKey, model, userPrompt, onToken, settings, endpointId, customSystemPrompt);
console.log(`✅ Successfully used ${provider} API (${model})`);
return; // Success, exit the function
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
// Check if it's a model not found error (404) for Anthropic - try fallback models
if (provider === 'anthropic' && (errorMessage.includes('404') || errorMessage.includes('not_found_error') || errorMessage.includes('model:'))) {
console.warn(`${provider} model ${model} not found, trying fallback models...`);
// Try fallback models
let fallbackSucceeded = false;
for (const fallbackModel of anthropicFallbackModels) {
if (fallbackModel === model) continue; // Skip the one we already tried
try {
console.log(`🔄 Trying fallback model: ${fallbackModel}...`);
attemptedProviders.push(`${provider} (${fallbackModel})`);
const providerInfo = availableProviders.find(p => p.provider === provider);
const endpointId = (providerInfo as any)?.endpointId;
await callProviderAPIWithRetry(provider, apiKey, fallbackModel, userPrompt, onToken, settings, endpointId, customSystemPrompt);
console.log(`✅ Successfully used ${provider} API with fallback model ${fallbackModel}`);
fallbackSucceeded = true;
return; // Success, exit the function
} catch (fallbackError) {
const fallbackErrorMessage = fallbackError instanceof Error ? fallbackError.message : String(fallbackError);
console.warn(`❌ Fallback model ${fallbackModel} also failed:`, fallbackErrorMessage);
// Continue to next fallback model
}
}
if (!fallbackSucceeded) {
console.warn(`❌ All ${provider} models failed`);
lastError = error as Error;
}
} else if (errorMessage.includes('401') || errorMessage.includes('403') ||
} else if (errorMessage.includes('401') || errorMessage.includes('403') ||
errorMessage.includes('Unauthorized') || errorMessage.includes('Invalid API key') ||
errorMessage.includes('expired') || errorMessage.includes('Expired')) {
console.warn(`${provider} API authentication failed (invalid/expired API key):`, errorMessage);
// Mark this specific API key as invalid for future attempts
markApiKeyAsInvalid(provider, apiKey);
console.log(`🔄 Will try next available API key...`);
lastError = error as Error;
} else {
console.warn(`${provider} API failed (non-auth error):`, errorMessage);
lastError = error as Error;
}
// Continue to next provider/key
@ -173,8 +153,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
}
providers.push(providerInfo);
return true;
} else if (isApiKeyInvalid(provider, apiKey)) {
console.log(`⏭️ Skipping ${provider} API key (marked as invalid)`);
}
return false;
};
@ -184,7 +162,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
if (ollamaConfig && ollamaConfig.url) {
// Get the selected Ollama model from settings
const selectedOllamaModel = settings.ollamaModel || 'llama3.1:8b';
console.log(`🦙 Found Ollama configuration - using as primary AI provider (FREE) with model: ${selectedOllamaModel}`);
providers.push({
provider: 'ollama',
apiKey: 'ollama', // Ollama doesn't need an API key
@ -197,7 +174,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
// RunPod vLLM text endpoint is used as fallback when Ollama is not available
const runpodTextConfig = getRunPodTextConfig();
if (runpodTextConfig && runpodTextConfig.apiKey && runpodTextConfig.endpointId) {
console.log('🔑 Found RunPod TEXT endpoint configuration from environment variables');
providers.push({
provider: 'runpod',
apiKey: runpodTextConfig.apiKey,
@ -208,7 +184,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
// Fallback to generic RunPod config if text endpoint not configured
const runpodConfig = getRunPodConfig();
if (runpodConfig && runpodConfig.apiKey && runpodConfig.endpointId) {
console.log('🔑 Found RunPod configuration from environment variables (generic endpoint)');
providers.push({
provider: 'runpod',
apiKey: runpodConfig.apiKey,
@ -262,8 +237,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
apiKey: directSettings,
model: getDefaultModel('openai')
});
} else if (isApiKeyInvalid('openai', directSettings)) {
console.log(`⏭️ Skipping OpenAI API key (marked as invalid)`);
}
} else {
// Try to parse as JSON
@ -277,8 +250,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
apiKey: value,
model: parsed.models?.[key] || getDefaultModel(key)
});
} else if (isApiKeyInvalid(key, value as string)) {
console.log(`⏭️ Skipping ${key} API key (marked as invalid)`);
}
}
}
@ -290,8 +261,6 @@ function getAvailableProviders(availableKeys: Record<string, string>, settings:
apiKey: directSettings,
model: getDefaultModel('openai')
});
} else if (isApiKeyInvalid('openai', directSettings)) {
console.log(`⏭️ Skipping OpenAI API key (marked as invalid)`);
}
}
}
@ -336,13 +305,11 @@ function getUserSpecificApiKeys() {
apiKey,
model: parsed.models?.[provider] || getDefaultModel(provider)
});
} else if (isApiKeyInvalid(provider, apiKey as string)) {
console.log(`⏭️ Skipping ${provider} API key (marked as invalid)`);
}
}
}
} catch (parseError) {
console.warn('Failed to parse user-specific API keys:', parseError);
// Silently skip invalid JSON
}
}
@ -356,8 +323,6 @@ function getUserSpecificApiKeys() {
apiKey: key,
model: getDefaultModel(provider)
});
} else if (isApiKeyInvalid(provider, key as string)) {
console.log(`⏭️ Skipping ${provider} API key (marked as invalid)`);
}
}
}
@ -389,8 +354,6 @@ function getUserSpecificApiKeys() {
apiKey,
model: parsed.models?.[provider] || getDefaultModel(provider)
});
} else if (isApiKeyInvalid(provider, apiKey as string)) {
console.log(`⏭️ Skipping ${provider} API key (marked as invalid)`);
}
}
}
@ -400,11 +363,11 @@ function getUserSpecificApiKeys() {
}
}
} catch (parseError) {
console.warn('Failed to parse registered users:', parseError);
// Silently skip parse errors
}
}
} catch (error) {
console.warn('Error checking user-specific API keys:', error);
// Silently skip errors
}
return providers;
@ -463,10 +426,9 @@ async function callProviderAPIWithRetry(
if (attempt === maxRetries) {
throw error;
}
// Wait before retry (exponential backoff)
const delay = Math.pow(2, attempt - 1) * 1000; // 1s, 2s, 4s...
console.log(`⏳ Retrying ${provider} API in ${delay}ms... (attempt ${attempt + 1}/${maxRetries})`);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
@ -499,7 +461,6 @@ function markApiKeyAsInvalid(provider: string, apiKey: string) {
if (!invalidKeys[provider].includes(apiKey)) {
invalidKeys[provider].push(apiKey);
localStorage.setItem(invalidKeysKey, JSON.stringify(invalidKeys));
console.log(`🚫 Marked ${provider} API key as invalid`);
}
} catch (e) {
// Silently handle errors
@ -549,21 +510,12 @@ async function callProviderAPI(
// Use custom system prompt if provided, otherwise fall back to personality-based prompt
const systemPrompt = customSystemPrompt || (settings ? getSystemPrompt(settings) : 'You are a helpful assistant.');
// Debug: log which system prompt is being used
if (customSystemPrompt) {
console.log(`🧠 Using custom system prompt (${customSystemPrompt.length} chars)`);
} else {
console.log(`🧠 Using personality-based system prompt: ${settings?.personality || 'default'}`);
}
if (provider === 'ollama') {
// Ollama API integration via AI Orchestrator
// The orchestrator provides /api/chat endpoint that routes to local Ollama
const ollamaConfig = getOllamaConfig();
const baseUrl = (settings as any)?.baseUrl || ollamaConfig?.url || 'http://localhost:11434';
console.log(`🦙 Ollama API: Using ${baseUrl}/api/chat with model ${model}`);
const messages = [];
if (systemPrompt) {
messages.push({ role: 'system', content: systemPrompt });
@ -586,12 +538,10 @@ async function callProviderAPI(
if (!response.ok) {
const errorText = await response.text();
console.error('❌ Ollama API error:', response.status, errorText);
throw new Error(`Ollama API error: ${response.status} - ${errorText}`);
}
const data = await response.json() as Record<string, any>;
console.log('📥 Ollama API: Response received:', JSON.stringify(data, null, 2).substring(0, 500));
// Extract response from AI Orchestrator format
let responseText = '';
@ -615,11 +565,9 @@ async function callProviderAPI(
}
}
console.log('✅ Ollama API: Response complete, length:', partial.length);
onToken(partial, true);
return;
} catch (error) {
console.error('❌ Ollama API error:', error);
throw error;
}
} else if (provider === 'runpod') {
@ -660,10 +608,7 @@ async function callProviderAPI(
stream: false // vLLM can handle streaming, but we'll process it synchronously for now
}
};
console.log('📤 RunPod API: Trying synchronous endpoint first:', syncUrl);
console.log('📤 RunPod API: Using OpenAI-compatible messages format');
try {
// First, try synchronous endpoint (/runsync) - this returns output immediately
try {
@ -675,11 +620,10 @@ async function callProviderAPI(
},
body: JSON.stringify(requestBody)
});
if (syncResponse.ok) {
const syncData = await syncResponse.json() as Record<string, any>;
console.log('📥 RunPod API: Synchronous response:', JSON.stringify(syncData, null, 2));
// Check if we got output directly
if (syncData.output) {
let responseText = '';
@ -695,9 +639,8 @@ async function callProviderAPI(
} else if (syncData.output.response) {
responseText = syncData.output.response;
}
if (responseText) {
console.log('✅ RunPod API: Got output from synchronous endpoint, length:', responseText.length);
// Stream the response character by character to simulate streaming
for (let i = 0; i < responseText.length; i++) {
partial += responseText[i];
@ -708,23 +651,20 @@ async function callProviderAPI(
return;
}
}
// If sync endpoint returned a job ID, fall through to async polling
if (syncData.id && (syncData.status === 'IN_QUEUE' || syncData.status === 'IN_PROGRESS')) {
console.log('⏳ RunPod API: Sync endpoint returned job ID, polling:', syncData.id);
const result = await pollRunPodJob(syncData.id, apiKey, runpodEndpointId);
console.log('✅ RunPod API: Job completed, result length:', result.length);
partial = result;
onToken(partial, true);
return;
}
}
} catch (syncError) {
console.log('⚠️ RunPod API: Synchronous endpoint not available, trying async:', syncError);
// Synchronous endpoint not available, fall back to async
}
// Fall back to async endpoint (/run) if sync didn't work
console.log('📤 RunPod API: Using async endpoint:', asyncUrl);
const response = await fetch(asyncUrl, {
method: 'POST',
headers: {
@ -733,36 +673,28 @@ async function callProviderAPI(
},
body: JSON.stringify(requestBody)
});
console.log('📥 RunPod API: Response status:', response.status, response.statusText);
if (!response.ok) {
const errorText = await response.text();
console.error('❌ RunPod API: Error response:', errorText);
throw new Error(`RunPod API error: ${response.status} - ${errorText}`);
}
const data = await response.json() as Record<string, any>;
console.log('📥 RunPod API: Response data:', JSON.stringify(data, null, 2));
// Handle async job pattern (RunPod often returns job IDs)
if (data.id && (data.status === 'IN_QUEUE' || data.status === 'IN_PROGRESS')) {
console.log('⏳ RunPod API: Job queued/in progress, polling job ID:', data.id);
const result = await pollRunPodJob(data.id, apiKey, runpodEndpointId);
console.log('✅ RunPod API: Job completed, result length:', result.length);
partial = result;
onToken(partial, true);
return;
}
// Handle OpenAI-compatible response format (vLLM endpoints)
if (data.output && data.output.choices && Array.isArray(data.output.choices)) {
console.log('📥 RunPod API: Detected OpenAI-compatible response format');
const choice = data.output.choices[0];
if (choice && choice.message && choice.message.content) {
const responseText = choice.message.content;
console.log('✅ RunPod API: Extracted content from OpenAI-compatible format, length:', responseText.length);
// Stream the response character by character to simulate streaming
for (let i = 0; i < responseText.length; i++) {
partial += responseText[i];
@ -774,33 +706,26 @@ async function callProviderAPI(
return;
}
}
// Handle direct response
if (data.output) {
console.log('📥 RunPod API: Processing output:', typeof data.output, Array.isArray(data.output) ? 'array' : 'object');
// Try to extract text from various possible response formats
let responseText = '';
if (typeof data.output === 'string') {
responseText = data.output;
console.log('✅ RunPod API: Extracted string output, length:', responseText.length);
} else if (data.output.text) {
responseText = data.output.text;
console.log('✅ RunPod API: Extracted text from output.text, length:', responseText.length);
} else if (data.output.response) {
responseText = data.output.response;
console.log('✅ RunPod API: Extracted response from output.response, length:', responseText.length);
} else if (data.output.content) {
responseText = data.output.content;
console.log('✅ RunPod API: Extracted content from output.content, length:', responseText.length);
} else if (Array.isArray(data.output.segments)) {
responseText = data.output.segments.map((seg: any) => seg.text || seg).join(' ');
console.log('✅ RunPod API: Extracted text from segments, length:', responseText.length);
} else {
// Fallback: stringify the output
console.warn('⚠️ RunPod API: Unknown output format, stringifying:', Object.keys(data.output));
responseText = JSON.stringify(data.output);
}
// Stream the response character by character to simulate streaming
for (let i = 0; i < responseText.length; i++) {
partial += responseText[i];
@ -811,28 +736,23 @@ async function callProviderAPI(
onToken(partial, true);
return;
}
// Handle error response
if (data.error) {
console.error('❌ RunPod API: Error in response:', data.error);
throw new Error(`RunPod API error: ${data.error}`);
}
// Check for status messages that might indicate endpoint is starting up
if (data.status) {
console.log(' RunPod API: Response status:', data.status);
if (data.status === 'STARTING' || data.status === 'PENDING') {
console.log('⏳ RunPod API: Endpoint appears to be starting up, this may take a moment...');
// Wait a bit and retry
await new Promise(resolve => setTimeout(resolve, 2000));
throw new Error('RunPod endpoint is starting up. Please wait a moment and try again.');
}
}
console.error('❌ RunPod API: No valid response format detected. Full response:', JSON.stringify(data, null, 2));
throw new Error('No valid response from RunPod API');
} catch (error) {
console.error('❌ RunPod API error:', error);
throw error;
}
} else if (provider === 'openai') {
@ -924,8 +844,7 @@ async function pollRunPodJob(
pollInterval: number = 1000
): Promise<string> {
const statusUrl = `https://api.runpod.ai/v2/${endpointId}/status/${jobId}`;
console.log('🔄 RunPod API: Starting to poll job:', jobId);
for (let attempt = 0; attempt < maxAttempts; attempt++) {
try {
const response = await fetch(statusUrl, {
@ -937,30 +856,21 @@ async function pollRunPodJob(
if (!response.ok) {
const errorText = await response.text();
console.error(`❌ RunPod API: Poll error (attempt ${attempt + 1}/${maxAttempts}):`, response.status, errorText);
throw new Error(`Failed to check job status: ${response.status} - ${errorText}`);
}
const data = await response.json() as Record<string, any>;
console.log(`🔄 RunPod API: Poll attempt ${attempt + 1}/${maxAttempts}, status:`, data.status);
console.log(`📥 RunPod API: Full poll response:`, JSON.stringify(data, null, 2));
if (data.status === 'COMPLETED') {
console.log('✅ RunPod API: Job completed, processing output...');
console.log('📥 RunPod API: Output structure:', typeof data.output, data.output ? Object.keys(data.output) : 'null');
console.log('📥 RunPod API: Full data object keys:', Object.keys(data));
// If no output after a couple of retries, try the stream endpoint as fallback
if (!data.output) {
if (attempt < 3) {
// Only retry 2-3 times, then try stream endpoint
console.log(`⏳ RunPod API: COMPLETED but no output yet, waiting briefly (attempt ${attempt + 1}/3)...`);
await new Promise(resolve => setTimeout(resolve, 500));
continue;
}
// After a few retries, try the stream endpoint as fallback
console.log('⚠️ RunPod API: Status endpoint not returning output, trying stream endpoint...');
try {
const streamUrl = `https://api.runpod.ai/v2/${endpointId}/stream/${jobId}`;
const streamResponse = await fetch(streamUrl, {
@ -969,52 +879,41 @@ async function pollRunPodJob(
'Authorization': `Bearer ${apiKey}`
}
});
if (streamResponse.ok) {
const streamData = await streamResponse.json() as Record<string, any>;
console.log('📥 RunPod API: Stream endpoint response:', JSON.stringify(streamData, null, 2));
if (streamData.output) {
// Use stream endpoint output
data.output = streamData.output;
console.log('✅ RunPod API: Found output via stream endpoint');
} else if (streamData.choices && Array.isArray(streamData.choices)) {
// Handle OpenAI-compatible format from stream endpoint
data.output = { choices: streamData.choices };
console.log('✅ RunPod API: Found choices via stream endpoint');
}
} else {
console.log(`⚠️ RunPod API: Stream endpoint returned ${streamResponse.status}`);
}
} catch (streamError) {
console.log('⚠️ RunPod API: Stream endpoint not available or failed:', streamError);
// Stream endpoint not available or failed
}
}
// Extract text from various possible response formats
let result = '';
if (typeof data.output === 'string') {
result = data.output;
console.log('✅ RunPod API: Extracted string output from job, length:', result.length);
} else if (data.output?.text) {
result = data.output.text;
console.log('✅ RunPod API: Extracted text from output.text, length:', result.length);
} else if (data.output?.response) {
result = data.output.response;
console.log('✅ RunPod API: Extracted response from output.response, length:', result.length);
} else if (data.output?.content) {
result = data.output.content;
console.log('✅ RunPod API: Extracted content from output.content, length:', result.length);
} else if (data.output?.choices && Array.isArray(data.output.choices)) {
// Handle OpenAI-compatible response format (vLLM endpoints)
const choice = data.output.choices[0];
if (choice && choice.message && choice.message.content) {
result = choice.message.content;
console.log('✅ RunPod API: Extracted content from OpenAI-compatible format, length:', result.length);
}
} else if (data.output?.segments && Array.isArray(data.output.segments)) {
result = data.output.segments.map((seg: any) => seg.text || seg).join(' ');
console.log('✅ RunPod API: Extracted text from segments, length:', result.length);
} else if (Array.isArray(data.output)) {
// Handle array responses (some vLLM endpoints return arrays)
result = data.output.map((item: any) => {
@ -1023,63 +922,37 @@ async function pollRunPodJob(
if (item.response) return item.response;
return JSON.stringify(item);
}).join('\n');
console.log('✅ RunPod API: Extracted text from array output, length:', result.length);
} else if (!data.output) {
// No output field - check alternative structures or return empty
console.warn('⚠️ RunPod API: No output field found, checking alternative structures...');
console.log('📥 RunPod API: Full data structure:', JSON.stringify(data, null, 2));
// Try checking if output is directly in data (not data.output)
if (typeof data === 'string') {
result = data;
console.log('✅ RunPod API: Data itself is a string, length:', result.length);
} else if (data.text) {
result = data.text;
console.log('✅ RunPod API: Found text at top level, length:', result.length);
} else if (data.response) {
result = data.response;
console.log('✅ RunPod API: Found response at top level, length:', result.length);
} else if (data.content) {
result = data.content;
console.log('✅ RunPod API: Found content at top level, length:', result.length);
} else {
// Stream endpoint already tried above (around line 848), just log that we couldn't find output
if (attempt >= 3) {
console.warn('⚠️ RunPod API: Could not find output in status or stream endpoint after multiple attempts');
}
// If still no result, return empty string instead of throwing error
// This allows the UI to render something instead of failing
if (!result) {
console.warn('⚠️ RunPod API: No output found in response. Returning empty result.');
console.log('📥 RunPod API: Available fields:', Object.keys(data));
result = ''; // Return empty string so UI can render
}
}
} else if (!data.output) {
// No output field - check alternative structures or return empty
// Try checking if output is directly in data (not data.output)
if (typeof data === 'string') {
result = data;
} else if (data.text) {
result = data.text;
} else if (data.response) {
result = data.response;
} else if (data.content) {
result = data.content;
} else {
// If still no result, return empty string instead of throwing error
// This allows the UI to render something instead of failing
result = '';
}
}
// Return result even if empty - don't loop forever
if (result !== undefined) {
// Return empty string if no result found - allows UI to render
console.log('✅ RunPod API: Returning result (may be empty):', result ? `length ${result.length}` : 'empty');
return result || '';
}
// If we get here, no output was found - return empty string instead of looping
console.warn('⚠️ RunPod API: No output found after checking all formats. Returning empty result.');
return '';
}
if (data.status === 'FAILED') {
console.error('❌ RunPod API: Job failed:', data.error || 'Unknown error');
throw new Error(`Job failed: ${data.error || 'Unknown error'}`);
}
// Check for starting/pending status
if (data.status === 'STARTING' || data.status === 'PENDING') {
console.log(`⏳ RunPod API: Endpoint still starting (attempt ${attempt + 1}/${maxAttempts})...`);
}
// Job still in progress, wait and retry
await new Promise(resolve => setTimeout(resolve, pollInterval));
} catch (error) {
@ -1090,7 +963,7 @@ async function pollRunPodJob(
await new Promise(resolve => setTimeout(resolve, pollInterval));
}
}
throw new Error('Job polling timeout - job did not complete in time');
}
@ -1119,7 +992,7 @@ async function autoMigrateAPIKeys() {
}
if (needsUpdate) {
localStorage.setItem("openai_api_key", JSON.stringify(parsed));
console.log('🔄 Migrated invalid Anthropic model name to claude-3-opus-20240229');
}
}
return; // Already migrated
@ -1260,7 +1133,7 @@ export function getFirstAvailableApiKeyAndProvider(): { key: string; provider: s
export function clearInvalidApiKeys() {
try {
localStorage.removeItem('invalid_api_keys');
console.log('🧹 Cleared all invalid API key markers');
} catch (e) {
console.warn('Failed to clear invalid API keys:', e);
}