fix llm prompt for mobile

This commit is contained in:
Jeff-Emmett 2025-02-08 20:29:06 +01:00
parent 62fb60420b
commit f739b1f78a
5 changed files with 46 additions and 9 deletions

View File

@ -29,7 +29,7 @@ import { SlideShape } from "@/shapes/SlideShapeUtil"
import { makeRealSettings, applySettingsMigrations } from "@/lib/settings"
import { PromptShapeTool } from "@/tools/PromptShapeTool"
import { PromptShape } from "@/shapes/PromptShapeUtil"
import { llm } from "@/utils/llm"
import { llm } from "@/utils/llmUtils"
// Default to production URL if env var isn't available
export const WORKER_URL = "https://jeffemmett-canvas.jeffemmett.workers.dev"

View File

@ -6,7 +6,7 @@ import {
TLShape,
} from "tldraw"
import { getEdge } from "@/propagators/tlgraph"
import { llm } from "@/utils/llm"
import { llm } from "@/utils/llmUtils"
import { isShapeOfType } from "@/propagators/utils"
type IPrompt = TLBaseShape<
@ -74,8 +74,8 @@ export class PromptShape extends BaseBoxShapeUtil<IPrompt> {
return acc
}, {} as Record<string, TLShape>)
const generateText = async (prompt: string) => {
await llm("", prompt, (partial: string, done: boolean) => {
const generateText = async (prompt: string) => {
await llm(prompt, (partial: string, done: boolean) => {
console.log("DONE??", done)
this.editor.updateShape<IPrompt>({
id: shape.id,

View File

@ -3,6 +3,7 @@ import {
TldrawUiMenuActionItem,
TldrawUiMenuItem,
TldrawUiMenuSubmenu,
TLGeoShape,
TLShape,
} from "tldraw"
import { TldrawUiMenuGroup } from "tldraw"
@ -19,6 +20,8 @@ import { useState, useEffect } from "react"
import { saveToPdf } from "../utils/pdfUtils"
import { TLFrameShape } from "tldraw"
import { searchText } from "../utils/searchUtils"
import { llm } from "../utils/llmUtils"
import { getEdge } from "@/propagators/tlgraph"
const getAllFrames = (editor: Editor) => {
return editor
@ -97,6 +100,41 @@ export function CustomContextMenu(props: TLUiContextMenuProps) {
disabled={!hasSelection}
onSelect={() => saveToPdf(editor)}
/>
<TldrawUiMenuItem
id="run-llm-prompt"
label="Run LLM Prompt"
icon="file"
kbd="g"
disabled={!hasSelection}
onSelect={() => {
const selectedShape = editor.getSelectedShapes()[0];
if (!selectedShape || selectedShape.type !== 'arrow') return;
const edge = getEdge(selectedShape, editor);
if (!edge) return;
const sourceShape = editor.getShape(edge.from);
const sourceText =
sourceShape && sourceShape.type === "geo"
? (sourceShape as TLGeoShape).props.text
: "";
llm(
`Instruction: ${edge.text}
${sourceText ? `Context: ${sourceText}` : ""}`,
(partialResponse) => {
editor.updateShape({
id: edge.to,
type: "geo",
props: {
...(editor.getShape(edge.to) as TLGeoShape).props,
text: partialResponse
}
});
}
)
}}
/>
</TldrawUiMenuGroup>
{/* Creation Tools Group */}

View File

@ -17,7 +17,7 @@ import { EmbedShape, IEmbedShape } from "@/shapes/EmbedShapeUtil"
import { moveToSlide } from "@/slides/useSlides"
import { ISlideShape } from "@/shapes/SlideShapeUtil"
import { getEdge } from "@/propagators/tlgraph"
import { llm } from "@/utils/llm"
import { llm } from "@/utils/llmUtils"
export const overrides: TLUiOverrides = {
tools(editor, tools) {
@ -407,7 +407,6 @@ export const overrides: TLUiOverrides = {
? (sourceShape as TLGeoShape).props.text
: ""
llm(
"You are a helpful assistant.",
`Instruction: ${edge.text}
${sourceText ? `Context: ${sourceText}` : ""}`,
(partialResponse) => {

View File

@ -7,15 +7,15 @@ const openai = new OpenAI({
});
export async function llm(
systemPrompt: string,
//systemPrompt: string,
userPrompt: string,
onToken: (partialResponse: string, done: boolean) => void,
) {
if (!apiKey) {
throw new Error("No API key found")
}
console.log("System Prompt:", systemPrompt);
console.log("User Prompt:", userPrompt);
//console.log("System Prompt:", systemPrompt);
//console.log("User Prompt:", userPrompt);
let partial = "";
const stream = await openai.chat.completions.create({
model: "gpt-4o",