diff --git a/src/shapes/PromptShapeUtil.tsx b/src/shapes/PromptShapeUtil.tsx index 1557453..7fe2591 100644 --- a/src/shapes/PromptShapeUtil.tsx +++ b/src/shapes/PromptShapeUtil.tsx @@ -75,7 +75,7 @@ export class PromptShape extends BaseBoxShapeUtil { }, {} as Record) const generateText = async (prompt: string) => { - await llm(prompt, (partial: string, done: boolean) => { + await llm(prompt, localStorage.getItem("openai_api_key") || "", (partial: string, done: boolean) => { console.log("DONE??", done) this.editor.updateShape({ id: shape.id, diff --git a/src/ui/CustomContextMenu.tsx b/src/ui/CustomContextMenu.tsx index 298f049..c262c4b 100644 --- a/src/ui/CustomContextMenu.tsx +++ b/src/ui/CustomContextMenu.tsx @@ -122,7 +122,8 @@ export function CustomContextMenu(props: TLUiContextMenuProps) { llm( `Instruction: ${edge.text} ${sourceText ? `Context: ${sourceText}` : ""}`, - (partialResponse) => { + localStorage.getItem("openai_api_key") || "", + (partialResponse: string) => { editor.updateShape({ id: edge.to, type: "geo", diff --git a/src/ui/overrides.tsx b/src/ui/overrides.tsx index bc71b15..1c02522 100644 --- a/src/ui/overrides.tsx +++ b/src/ui/overrides.tsx @@ -409,7 +409,8 @@ export const overrides: TLUiOverrides = { llm( `Instruction: ${edge.text} ${sourceText ? `Context: ${sourceText}` : ""}`, - (partialResponse) => { + localStorage.getItem("openai_api_key") || "", + (partialResponse: string) => { editor.updateShape({ id: edge.to, type: "geo", diff --git a/src/utils/llmUtils.ts b/src/utils/llmUtils.ts index 58a8b2d..5127c95 100644 --- a/src/utils/llmUtils.ts +++ b/src/utils/llmUtils.ts @@ -1,14 +1,9 @@ import OpenAI from "openai"; -const apiKey = localStorage.getItem("openai_api_key") || "" -const openai = new OpenAI({ - apiKey, - dangerouslyAllowBrowser: true, -}); - export async function llm( //systemPrompt: string, userPrompt: string, + apiKey: string, onToken: (partialResponse: string, done: boolean) => void, ) { if (!apiKey) { @@ -17,6 +12,10 @@ export async function llm( //console.log("System Prompt:", systemPrompt); //console.log("User Prompt:", userPrompt); let partial = ""; + const openai = new OpenAI({ + apiKey, + dangerouslyAllowBrowser: true, + }); const stream = await openai.chat.completions.create({ model: "gpt-4o", messages: [ diff --git a/worker/TldrawDurableObject.ts b/worker/TldrawDurableObject.ts index 2a467e2..add19fe 100644 --- a/worker/TldrawDurableObject.ts +++ b/worker/TldrawDurableObject.ts @@ -18,6 +18,7 @@ import { EmbedShape } from "@/shapes/EmbedShapeUtil" import { MarkdownShape } from "@/shapes/MarkdownShapeUtil" import { MycrozineTemplateShape } from "@/shapes/MycrozineTemplateShapeUtil" import { SlideShape } from "@/shapes/SlideShapeUtil" +import { PromptShape } from "@/shapes/PromptShapeUtil" // add custom shapes and bindings here if needed: export const customSchema = createTLSchema({ @@ -47,6 +48,10 @@ export const customSchema = createTLSchema({ props: SlideShape.props, migrations: SlideShape.migrations, }, + Prompt: { + props: PromptShape.props, + migrations: PromptShape.migrations, + }, }, bindings: defaultBindingSchemas, })