Fix localstorage error on worker, promptshape

This commit is contained in:
Jeff-Emmett 2025-02-11 14:35:22 +01:00
parent d733b61a66
commit ebe2d4c0a2
5 changed files with 15 additions and 9 deletions

View File

@ -75,7 +75,7 @@ export class PromptShape extends BaseBoxShapeUtil<IPrompt> {
}, {} as Record<string, TLShape>) }, {} as Record<string, TLShape>)
const generateText = async (prompt: string) => { const generateText = async (prompt: string) => {
await llm(prompt, (partial: string, done: boolean) => { await llm(prompt, localStorage.getItem("openai_api_key") || "", (partial: string, done: boolean) => {
console.log("DONE??", done) console.log("DONE??", done)
this.editor.updateShape<IPrompt>({ this.editor.updateShape<IPrompt>({
id: shape.id, id: shape.id,

View File

@ -122,7 +122,8 @@ export function CustomContextMenu(props: TLUiContextMenuProps) {
llm( llm(
`Instruction: ${edge.text} `Instruction: ${edge.text}
${sourceText ? `Context: ${sourceText}` : ""}`, ${sourceText ? `Context: ${sourceText}` : ""}`,
(partialResponse) => { localStorage.getItem("openai_api_key") || "",
(partialResponse: string) => {
editor.updateShape({ editor.updateShape({
id: edge.to, id: edge.to,
type: "geo", type: "geo",

View File

@ -409,7 +409,8 @@ export const overrides: TLUiOverrides = {
llm( llm(
`Instruction: ${edge.text} `Instruction: ${edge.text}
${sourceText ? `Context: ${sourceText}` : ""}`, ${sourceText ? `Context: ${sourceText}` : ""}`,
(partialResponse) => { localStorage.getItem("openai_api_key") || "",
(partialResponse: string) => {
editor.updateShape({ editor.updateShape({
id: edge.to, id: edge.to,
type: "geo", type: "geo",

View File

@ -1,14 +1,9 @@
import OpenAI from "openai"; import OpenAI from "openai";
const apiKey = localStorage.getItem("openai_api_key") || ""
const openai = new OpenAI({
apiKey,
dangerouslyAllowBrowser: true,
});
export async function llm( export async function llm(
//systemPrompt: string, //systemPrompt: string,
userPrompt: string, userPrompt: string,
apiKey: string,
onToken: (partialResponse: string, done: boolean) => void, onToken: (partialResponse: string, done: boolean) => void,
) { ) {
if (!apiKey) { if (!apiKey) {
@ -17,6 +12,10 @@ export async function llm(
//console.log("System Prompt:", systemPrompt); //console.log("System Prompt:", systemPrompt);
//console.log("User Prompt:", userPrompt); //console.log("User Prompt:", userPrompt);
let partial = ""; let partial = "";
const openai = new OpenAI({
apiKey,
dangerouslyAllowBrowser: true,
});
const stream = await openai.chat.completions.create({ const stream = await openai.chat.completions.create({
model: "gpt-4o", model: "gpt-4o",
messages: [ messages: [

View File

@ -18,6 +18,7 @@ import { EmbedShape } from "@/shapes/EmbedShapeUtil"
import { MarkdownShape } from "@/shapes/MarkdownShapeUtil" import { MarkdownShape } from "@/shapes/MarkdownShapeUtil"
import { MycrozineTemplateShape } from "@/shapes/MycrozineTemplateShapeUtil" import { MycrozineTemplateShape } from "@/shapes/MycrozineTemplateShapeUtil"
import { SlideShape } from "@/shapes/SlideShapeUtil" import { SlideShape } from "@/shapes/SlideShapeUtil"
import { PromptShape } from "@/shapes/PromptShapeUtil"
// add custom shapes and bindings here if needed: // add custom shapes and bindings here if needed:
export const customSchema = createTLSchema({ export const customSchema = createTLSchema({
@ -47,6 +48,10 @@ export const customSchema = createTLSchema({
props: SlideShape.props, props: SlideShape.props,
migrations: SlideShape.migrations, migrations: SlideShape.migrations,
}, },
Prompt: {
props: PromptShape.props,
migrations: PromptShape.migrations,
},
}, },
bindings: defaultBindingSchemas, bindings: defaultBindingSchemas,
}) })