From d733b61a6691dcd61f55aec0a0e918225277fdef Mon Sep 17 00:00:00 2001 From: Jeff-Emmett Date: Sat, 8 Feb 2025 20:29:06 +0100 Subject: [PATCH] fix llm prompt for mobile --- src/routes/Board.tsx | 2 +- src/shapes/PromptShapeUtil.tsx | 6 ++--- src/ui/CustomContextMenu.tsx | 38 +++++++++++++++++++++++++++++++ src/ui/overrides.tsx | 3 +-- src/utils/{llm.ts => llmUtils.ts} | 6 ++--- 5 files changed, 46 insertions(+), 9 deletions(-) rename src/utils/{llm.ts => llmUtils.ts} (86%) diff --git a/src/routes/Board.tsx b/src/routes/Board.tsx index b5d43b1..1600a9f 100644 --- a/src/routes/Board.tsx +++ b/src/routes/Board.tsx @@ -29,7 +29,7 @@ import { SlideShape } from "@/shapes/SlideShapeUtil" import { makeRealSettings, applySettingsMigrations } from "@/lib/settings" import { PromptShapeTool } from "@/tools/PromptShapeTool" import { PromptShape } from "@/shapes/PromptShapeUtil" -import { llm } from "@/utils/llm" +import { llm } from "@/utils/llmUtils" // Default to production URL if env var isn't available export const WORKER_URL = "https://jeffemmett-canvas.jeffemmett.workers.dev" diff --git a/src/shapes/PromptShapeUtil.tsx b/src/shapes/PromptShapeUtil.tsx index bc74207..1557453 100644 --- a/src/shapes/PromptShapeUtil.tsx +++ b/src/shapes/PromptShapeUtil.tsx @@ -6,7 +6,7 @@ import { TLShape, } from "tldraw" import { getEdge } from "@/propagators/tlgraph" -import { llm } from "@/utils/llm" +import { llm } from "@/utils/llmUtils" import { isShapeOfType } from "@/propagators/utils" type IPrompt = TLBaseShape< @@ -74,8 +74,8 @@ export class PromptShape extends BaseBoxShapeUtil { return acc }, {} as Record) - const generateText = async (prompt: string) => { - await llm("", prompt, (partial: string, done: boolean) => { + const generateText = async (prompt: string) => { + await llm(prompt, (partial: string, done: boolean) => { console.log("DONE??", done) this.editor.updateShape({ id: shape.id, diff --git a/src/ui/CustomContextMenu.tsx b/src/ui/CustomContextMenu.tsx index 1379f8b..298f049 100644 --- a/src/ui/CustomContextMenu.tsx +++ b/src/ui/CustomContextMenu.tsx @@ -3,6 +3,7 @@ import { TldrawUiMenuActionItem, TldrawUiMenuItem, TldrawUiMenuSubmenu, + TLGeoShape, TLShape, } from "tldraw" import { TldrawUiMenuGroup } from "tldraw" @@ -19,6 +20,8 @@ import { useState, useEffect } from "react" import { saveToPdf } from "../utils/pdfUtils" import { TLFrameShape } from "tldraw" import { searchText } from "../utils/searchUtils" +import { llm } from "../utils/llmUtils" +import { getEdge } from "@/propagators/tlgraph" const getAllFrames = (editor: Editor) => { return editor @@ -97,6 +100,41 @@ export function CustomContextMenu(props: TLUiContextMenuProps) { disabled={!hasSelection} onSelect={() => saveToPdf(editor)} /> + { + const selectedShape = editor.getSelectedShapes()[0]; + if (!selectedShape || selectedShape.type !== 'arrow') return; + + const edge = getEdge(selectedShape, editor); + if (!edge) return; + + const sourceShape = editor.getShape(edge.from); + const sourceText = + sourceShape && sourceShape.type === "geo" + ? (sourceShape as TLGeoShape).props.text + : ""; + + llm( + `Instruction: ${edge.text} + ${sourceText ? `Context: ${sourceText}` : ""}`, + (partialResponse) => { + editor.updateShape({ + id: edge.to, + type: "geo", + props: { + ...(editor.getShape(edge.to) as TLGeoShape).props, + text: partialResponse + } + }); + } + ) + }} + /> {/* Creation Tools Group */} diff --git a/src/ui/overrides.tsx b/src/ui/overrides.tsx index 683424e..bc71b15 100644 --- a/src/ui/overrides.tsx +++ b/src/ui/overrides.tsx @@ -17,7 +17,7 @@ import { EmbedShape, IEmbedShape } from "@/shapes/EmbedShapeUtil" import { moveToSlide } from "@/slides/useSlides" import { ISlideShape } from "@/shapes/SlideShapeUtil" import { getEdge } from "@/propagators/tlgraph" -import { llm } from "@/utils/llm" +import { llm } from "@/utils/llmUtils" export const overrides: TLUiOverrides = { tools(editor, tools) { @@ -407,7 +407,6 @@ export const overrides: TLUiOverrides = { ? (sourceShape as TLGeoShape).props.text : "" llm( - "You are a helpful assistant.", `Instruction: ${edge.text} ${sourceText ? `Context: ${sourceText}` : ""}`, (partialResponse) => { diff --git a/src/utils/llm.ts b/src/utils/llmUtils.ts similarity index 86% rename from src/utils/llm.ts rename to src/utils/llmUtils.ts index 2c09c31..58a8b2d 100644 --- a/src/utils/llm.ts +++ b/src/utils/llmUtils.ts @@ -7,15 +7,15 @@ const openai = new OpenAI({ }); export async function llm( - systemPrompt: string, + //systemPrompt: string, userPrompt: string, onToken: (partialResponse: string, done: boolean) => void, ) { if (!apiKey) { throw new Error("No API key found") } - console.log("System Prompt:", systemPrompt); - console.log("User Prompt:", userPrompt); + //console.log("System Prompt:", systemPrompt); + //console.log("User Prompt:", userPrompt); let partial = ""; const stream = await openai.chat.completions.create({ model: "gpt-4o",