failed attempt at audio and transcriptions

This commit is contained in:
Jeff-Emmett 2025-02-22 12:45:38 -05:00
parent 3a99af257d
commit b2cf4280ca
10 changed files with 3655 additions and 341 deletions

2936
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -26,6 +26,7 @@
"@tldraw/tlschema": "^3.6.0", "@tldraw/tlschema": "^3.6.0",
"@types/markdown-it": "^14.1.1", "@types/markdown-it": "^14.1.1",
"@types/marked": "^5.0.2", "@types/marked": "^5.0.2",
"@uiw/react-md-editor": "^4.0.5",
"@vercel/analytics": "^1.2.2", "@vercel/analytics": "^1.2.2",
"ai": "^4.1.0", "ai": "^4.1.0",
"cherry-markdown": "^0.8.57", "cherry-markdown": "^0.8.57",
@ -48,7 +49,7 @@
}, },
"devDependencies": { "devDependencies": {
"@cloudflare/types": "^6.0.0", "@cloudflare/types": "^6.0.0",
"@cloudflare/workers-types": "^4.20240821.1", "@cloudflare/workers-types": "^4.20250214.0",
"@types/lodash.throttle": "^4", "@types/lodash.throttle": "^4",
"@types/rbush": "^4.0.0", "@types/rbush": "^4.0.0",
"@types/react": "^19.0.1", "@types/react": "^19.0.1",
@ -57,7 +58,7 @@
"concurrently": "^9.1.0", "concurrently": "^9.1.0",
"typescript": "^5.6.3", "typescript": "^5.6.3",
"vite": "^6.0.3", "vite": "^6.0.3",
"wrangler": "^3.107.3" "wrangler": "^3.109.1"
}, },
"engines": { "engines": {
"node": ">=18.0.0" "node": ">=18.0.0"

View File

@ -1,6 +1,8 @@
import { inject } from "@vercel/analytics" import { inject } from "@vercel/analytics"
import "tldraw/tldraw.css" import "tldraw/tldraw.css"
import "@/css/style.css" import "@/css/style.css"
import "@uiw/react-md-editor/markdown-editor.css"
import "@uiw/react-markdown-preview/markdown.css"
import { Default } from "@/routes/Default" import { Default } from "@/routes/Default"
import { BrowserRouter, Route, Routes } from "react-router-dom" import { BrowserRouter, Route, Routes } from "react-router-dom"
import { Contact } from "@/routes/Contact" import { Contact } from "@/routes/Contact"

View File

@ -41,10 +41,24 @@ export class MarkdownShape extends BaseBoxShapeUtil<
> { > {
static override type = "MarkdownTool" static override type = "MarkdownTool"
styles = { static styles = {
color: MarkdownColor, color: DefaultColorStyle,
size: MarkdownSize, size: DefaultSizeStyle,
font: MarkdownFont, font: DefaultFontStyle,
} as any // Type assertion to allow dynamic property addition
styles = MarkdownShape.styles
constructor(props: any) {
super(props)
console.log('MarkdownShape constructor - styles:', this.styles)
// Add a fallback get method if it doesn't exist
if (!this.styles.get) {
this.styles.get = function(style: string) {
console.log('Fallback get called for style:', style)
return this[style] || null
}
}
} }
getDefaultProps(): IMarkdownShape["props"] & { w: number; h: number } { getDefaultProps(): IMarkdownShape["props"] & { w: number; h: number } {

View File

@ -1,5 +1,6 @@
import { BaseBoxShapeUtil, TLBaseShape } from "tldraw" import { BaseBoxShapeUtil, TLBaseShape } from "tldraw"
import { useEffect, useState } from "react" import { useEffect, useState, useRef } from "react"
import { useParticipantCounts, DailyProvider, useDaily, useTranscription } from '@daily-co/daily-react';
interface DailyApiResponse { interface DailyApiResponse {
url: string; url: string;
@ -9,6 +10,16 @@ interface DailyRecordingResponse {
id: string; id: string;
} }
interface TranscriptResponse {
data: TranscriptItem[];
}
interface TranscriptItem {
user_name?: string;
text: string;
transcriptId: string;
}
export type IVideoChatShape = TLBaseShape< export type IVideoChatShape = TLBaseShape<
"VideoChat", "VideoChat",
{ {
@ -18,7 +29,8 @@ export type IVideoChatShape = TLBaseShape<
allowCamera: boolean allowCamera: boolean
allowMicrophone: boolean allowMicrophone: boolean
enableRecording: boolean enableRecording: boolean
recordingId: string | null // Track active recording recordingId: string | null
isTranscribing: boolean
} }
> >
@ -37,7 +49,8 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
allowCamera: false, allowCamera: false,
allowMicrophone: false, allowMicrophone: false,
enableRecording: true, enableRecording: true,
recordingId: null recordingId: null,
isTranscribing: false
} }
} }
@ -102,13 +115,19 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
start_cloud_recording: true, start_cloud_recording: true,
start_cloud_recording_opts: { start_cloud_recording_opts: {
layout: { layout: {
preset: "active-speaker" preset: "audio-only"
}, },
format: "mp4", format: "mp4",
mode: "audio-only" mode: "audio-only"
}, },
auto_start_transcription: true, auto_start_transcription: true,
recordings_template: "{room_name}/audio-{epoch_time}.mp4" recordings_template: "{room_name}/audio-{epoch_time}.mp4",
permissions: {
hasPresence: true,
canSend: true,
canAdmin: false
},
enable_transcription: true,
} }
}) })
}); });
@ -152,30 +171,51 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
const apiKey = import.meta.env.VITE_DAILY_API_KEY; const apiKey = import.meta.env.VITE_DAILY_API_KEY;
try { try {
const response = await fetch(`${workerUrl}/daily/recordings/start`, { const url = new URL(shape.props.roomUrl);
const roomName = url.pathname.substring(1);
// Get the board name and timestamp
const boardName = this.editor.getCurrentPage().name || 'untitled';
const timestamp = new Date().toISOString()
.replace('T', '_')
.replace(/:/g, '-')
.slice(0, 16);
console.log('Starting recording for room:', roomName);
console.log('Recording access URL will be available at:',
`https://api.daily.co/v1/recordings?room=${roomName}`);
console.log('Making request to:', `${workerUrl}/daily/recordings/${roomName}/start`);
const response = await fetch(`${workerUrl}/daily/recordings/${roomName}/start`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Authorization': `Bearer ${apiKey}`, 'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json' 'Content-Type': 'application/json'
}, },
body: JSON.stringify({ body: JSON.stringify({
room_name: shape.id,
layout: { layout: {
preset: "active-speaker" preset: "audio-only"
} },
recording_name: `${boardName}_${timestamp}_audio`
}) })
}); });
if (!response.ok) throw new Error('Failed to start recording'); console.log('Recording start response status:', response.status);
const responseData = await response.json();
console.log('Recording start response data:', responseData);
if (!response.ok) {
throw new Error(`Failed to start recording: ${response.statusText} (${JSON.stringify(responseData)})`);
}
const data = await response.json() as DailyRecordingResponse; const data = responseData as DailyRecordingResponse;
await this.editor.updateShape<IVideoChatShape>({ await this.editor.updateShape<IVideoChatShape>({
id: shape.id, id: shape.id,
type: shape.type, type: shape.type,
props: { props: {
...shape.props, ...shape.props,
recordingId: data.id recordingId: data.id || null
} }
}); });
@ -186,19 +226,33 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
} }
async stopRecording(shape: IVideoChatShape) { async stopRecording(shape: IVideoChatShape) {
if (!shape.props.recordingId) return; if (!shape.props.roomUrl) return;
const workerUrl = import.meta.env.VITE_TLDRAW_WORKER_URL; const workerUrl = import.meta.env.VITE_TLDRAW_WORKER_URL;
const apiKey = import.meta.env.VITE_DAILY_API_KEY; const apiKey = import.meta.env.VITE_DAILY_API_KEY;
try { try {
await fetch(`${workerUrl}/daily/recordings/${shape.props.recordingId}/stop`, { // Parse the URL to get just the room name
const url = new URL(shape.props.roomUrl);
const roomName = url.pathname.substring(1); // Remove leading slash
// Verify we have a room name
if (!roomName) {
throw new Error('Room name not found in URL');
}
const response = await fetch(`${workerUrl}/daily/recordings/${roomName}/stop`, {
method: 'POST', method: 'POST',
headers: { headers: {
'Authorization': `Bearer ${apiKey}` 'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
} }
}); });
if (!response.ok) {
throw new Error(`Failed to stop recording: ${response.statusText}`);
}
await this.editor.updateShape<IVideoChatShape>({ await this.editor.updateShape<IVideoChatShape>({
id: shape.id, id: shape.id,
type: shape.type, type: shape.type,
@ -214,11 +268,334 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
} }
} }
async startTranscription(shape: IVideoChatShape) {
// Get room URL directly from shape props
const roomUrl = shape.props.roomUrl;
if (!roomUrl) {
console.log('Cannot start transcription: no room URL');
return;
}
const workerUrl = import.meta.env.VITE_TLDRAW_WORKER_URL;
const apiKey = import.meta.env.VITE_DAILY_API_KEY;
try {
// Ensure we have a valid URL and extract the room name
const url = new URL(roomUrl); // Use roomUrl from props
const roomName = url.pathname.substring(1);
console.log('Starting transcription for room:', roomName);
console.log('Making request to:', `${workerUrl}/daily/transcription/${roomName}/start`);
const response = await fetch(`${workerUrl}/daily/transcription/${roomName}/start`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
language: 'en',
punctuate: true,
profanity_filter: true
})
});
console.log('Transcription start response status:', response.status);
const responseData = await response.json();
console.log('Transcription start response data:', responseData);
if (!response.ok) {
throw new Error(`Failed to start transcription: ${response.statusText} (${JSON.stringify(responseData)})`);
}
console.log('Updating shape with transcription status');
await this.editor.updateShape<IVideoChatShape>({
id: shape.id,
type: shape.type,
props: {
...shape.props,
isTranscribing: true
}
});
console.log('Transcription started successfully');
} catch (error) {
console.error('Error starting transcription:', error);
throw error;
}
}
async stopTranscription(shape: IVideoChatShape) {
const roomUrl = shape.props.roomUrl;
if (!roomUrl) {
console.log('Cannot stop transcription: no room URL');
return;
}
const workerUrl = import.meta.env.VITE_TLDRAW_WORKER_URL;
const apiKey = import.meta.env.VITE_DAILY_API_KEY;
try {
const url = new URL(roomUrl);
const roomName = url.pathname.substring(1);
console.log('Stopping transcription for room:', roomName);
// First, stop the transcription
const stopResponse = await fetch(`${workerUrl}/daily/transcription/${roomName}/stop`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!stopResponse.ok) {
throw new Error(`Failed to stop transcription: ${stopResponse.statusText}`);
}
console.log('Transcription stopped, waiting for processing...');
// Increase wait time for processing
await new Promise(resolve => setTimeout(resolve, 10000));
// Get list of transcripts
console.log('Fetching transcripts list...');
const transcriptsResponse = await fetch(`${workerUrl}/daily/transcription/${roomName}`, {
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!transcriptsResponse.ok) {
console.error('Transcripts list response:', await transcriptsResponse.text());
throw new Error(`Failed to fetch transcripts list: ${transcriptsResponse.statusText}`);
}
const transcriptsData = await transcriptsResponse.json() as TranscriptResponse;
console.log('Transcripts list:', transcriptsData);
// Get the most recent transcript ID
if (!transcriptsData.data || transcriptsData.data.length === 0) {
console.warn('No transcripts found in response');
throw new Error('No transcripts found');
}
// Sort by most recent and get the first one
const latestTranscript = transcriptsData.data
.sort((a: any, b: any) => new Date(b.created_at || 0).getTime() - new Date(a.created_at || 0).getTime())[0];
const transcriptId = latestTranscript.transcriptId;
console.log('Latest transcript ID:', transcriptId);
// Fetch the actual transcript content
console.log('Fetching transcript content...');
const contentResponse = await fetch(`${workerUrl}/daily/transcription/${roomName}/${transcriptId}`, {
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!contentResponse.ok) {
console.error('Content response:', await contentResponse.text());
throw new Error(`Failed to fetch transcript content: ${contentResponse.statusText}`);
}
const contentData = await contentResponse.json() as TranscriptResponse;
console.log('Transcript content:', contentData);
// Create a text note with the transcript
if (contentData.data && contentData.data.length > 0) {
const transcriptText = contentData.data
.filter((item: any) => item.text) // Filter out items with no text
.map((item: any) => {
const speaker = item.user_name || 'Speaker';
const text = item.text.trim();
return `${speaker}: ${text}`;
})
.join('\n');
if (!transcriptText) {
console.warn('Transcript text is empty after processing');
return;
}
// Get the current shape's position
const currentShape = this.editor.getShape(shape.id) as IVideoChatShape;
if (!currentShape) {
console.error('Failed to find current shape for positioning transcript');
throw new Error('Shape not found');
}
console.log('Creating transcript note at position:', {
x: currentShape.x,
y: currentShape.y + currentShape.props.h + 20
});
console.log('Transcript text to be added:', transcriptText);
try {
const newShape = await this.editor.createShape({
type: 'note',
x: currentShape.x,
y: currentShape.y + currentShape.props.h + 20,
props: {
text: `Transcript from ${new Date().toLocaleString()}\n\n${transcriptText}`,
color: 'blue',
size: 'l',
},
});
console.log('Successfully created transcript note:', newShape);
} catch (error) {
console.error('Failed to create transcript shape:', error);
throw new Error('Failed to create transcript shape');
}
} else {
console.warn('No valid transcript content found in response:', contentData);
}
// Update shape transcription status
await this.editor.updateShape<IVideoChatShape>({
id: shape.id,
type: shape.type,
props: {
...shape.props,
isTranscribing: false
}
});
console.log('Transcription stopped successfully');
} catch (error) {
console.error('Error stopping transcription:', error);
throw error;
}
}
component(shape: IVideoChatShape) { component(shape: IVideoChatShape) {
const [roomUrl, setRoomUrl] = useState<string | null>(shape.props.roomUrl)
const daily = useDaily()
const { present, hidden } = useParticipantCounts()
const [hasPermissions, setHasPermissions] = useState(false) const [hasPermissions, setHasPermissions] = useState(false)
const [error, setError] = useState<Error | null>(null) const [error, setError] = useState<Error | null>(null)
const [isLoading, setIsLoading] = useState(true) const [isLoading, setIsLoading] = useState(true)
const [roomUrl, setRoomUrl] = useState<string | null>(shape.props.roomUrl) const [isRecording, setIsRecording] = useState(!!shape.props.recordingId)
const [isTranscribing, setIsTranscribing] = useState(shape.props.isTranscribing)
// Add loading states for buttons
const [isRecordingLoading, setIsRecordingLoading] = useState(false)
const [isTranscribingLoading, setIsTranscribingLoading] = useState(false)
// Add timeout ref to handle stuck states
const transcriptionTimeoutRef = useRef<NodeJS.Timeout | undefined>(undefined)
// Cleanup timeout on unmount
useEffect(() => {
return () => {
if (transcriptionTimeoutRef.current) {
clearTimeout(transcriptionTimeoutRef.current)
}
}
}, [])
// Updated recording toggle handler
const handleRecordingToggle = async () => {
if (isRecordingLoading) return; // Prevent multiple clicks
try {
setIsRecordingLoading(true);
if (isRecording) {
await this.stopRecording(shape);
setIsRecording(false);
} else {
await this.startRecording(shape);
setIsRecording(true);
}
} catch (err) {
console.error("Recording toggle failed:", err);
} finally {
setIsRecordingLoading(false);
}
};
// Add useTranscription hook with callbacks
const {
isTranscribing: dailyTranscribing,
transcriptions,
error: transcriptionError,
startTranscription: dailyStartTranscription,
stopTranscription: dailyStopTranscription
} = useTranscription({
onTranscriptionStarted: () => {
console.log('Transcription started');
},
onTranscriptionStopped: async () => {
console.log('Transcription stopped, transcriptions:', transcriptions);
if (transcriptions && transcriptions.length > 0) {
// Create transcript text from the transcriptions array
const transcriptText = transcriptions
.map((t: any) => `${t.user_name || 'Speaker'}: ${t.text}`)
.join('\n');
// Get the current shape for positioning
const currentShape = this.editor.getShape(shape.id) as IVideoChatShape;
if (!currentShape) throw new Error('Shape not found');
try {
const newShape = await this.editor.createShape({
type: 'note',
x: currentShape.x,
y: currentShape.y + currentShape.props.h + 20,
props: {
text: `Transcript from ${new Date().toLocaleString()}\n\n${transcriptText}`,
color: 'blue',
size: 'l',
},
});
console.log('Created transcript note:', newShape);
} catch (error) {
console.error('Failed to create transcript shape:', error);
}
}
},
onTranscriptionError: (error) => {
console.error('Transcription error:', error);
}
});
// Update the transcription toggle handler
const handleTranscriptionToggle = async () => {
if (isTranscribingLoading) return;
try {
setIsTranscribingLoading(true);
if (isTranscribing) {
await dailyStopTranscription();
} else {
await dailyStartTranscription({
language: 'en',
punctuate: true,
profanity_filter: true
});
}
setIsTranscribing(!isTranscribing);
} catch (err) {
console.error("Transcription toggle failed:", err);
} finally {
setIsTranscribingLoading(false);
}
};
// Add a useEffect to monitor and reset stuck states
useEffect(() => {
if (isTranscribingLoading) {
const timeout = setTimeout(() => {
console.warn('Forcing reset of transcription loading state');
setIsTranscribingLoading(false);
}, 15000); // 15 second failsafe
return () => clearTimeout(timeout);
}
}, [isTranscribingLoading]);
useEffect(() => { useEffect(() => {
let mounted = true; let mounted = true;
@ -282,11 +659,58 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
} }
}, [shape.props.allowCamera, shape.props.allowMicrophone]) }, [shape.props.allowCamera, shape.props.allowMicrophone])
if (error) { // Add effect to log participant counts
return <div>Error creating room: {error.message}</div> useEffect(() => {
} console.log('Participant counts:', { present, hidden })
console.log('Daily instance:', daily)
console.log('Current participants:', daily?.participants())
}, [present, hidden, daily])
if (isLoading || !roomUrl || roomUrl === 'undefined') { // Add effect to join the room when Daily is ready
useEffect(() => {
if (daily && roomUrl) {
console.log('Attempting to join room:', roomUrl);
daily.join({ url: roomUrl }).then(() => {
console.log('Successfully joined room');
console.log('Initial participants:', daily.participants());
}).catch(error => {
console.error('Failed to join room:', error);
});
}
// Cleanup: leave the room when component unmounts
return () => {
if (daily) {
console.log('Leaving room');
daily.leave();
}
};
}, [daily, roomUrl]);
// Add effect to track participant changes
useEffect(() => {
if (!daily) return;
const handleParticipantJoined = (event: any) => {
console.log('Participant joined:', event.participant);
console.log('All participants:', daily.participants());
};
const handleParticipantLeft = (event: any) => {
console.log('Participant left:', event.participant);
console.log('All participants:', daily.participants());
};
daily.on('participant-joined', handleParticipantJoined);
daily.on('participant-left', handleParticipantLeft);
return () => {
daily.off('participant-joined', handleParticipantJoined);
daily.off('participant-left', handleParticipantLeft);
};
}, [daily]);
if (error || isLoading || !roomUrl) {
return ( return (
<div <div
style={{ style={{
@ -304,94 +728,119 @@ export class VideoChatShape extends BaseBoxShapeUtil<IVideoChatShape> {
) )
} }
// Construct URL with permission parameters // Add recording UI parameter to the URL
const roomUrlWithParams = new URL(roomUrl) const roomUrlWithParams = new URL(roomUrl)
roomUrlWithParams.searchParams.set( roomUrlWithParams.searchParams.set("show_record_button", "true")
"allow_camera", roomUrlWithParams.searchParams.set("allow_camera", String(shape.props.allowCamera))
String(shape.props.allowCamera), roomUrlWithParams.searchParams.set("allow_mic", String(shape.props.allowMicrophone))
)
roomUrlWithParams.searchParams.set(
"allow_mic",
String(shape.props.allowMicrophone),
)
console.log(roomUrl)
return ( return (
<div <DailyProvider>
style={{ <div
width: `${shape.props.w}px`,
height: `${shape.props.h}px`,
position: "relative",
pointerEvents: "all",
overflow: "hidden",
}}
>
<iframe
src={roomUrlWithParams.toString()}
width="100%"
height="100%"
style={{
border: "none",
position: "absolute",
top: 0,
left: 0,
right: 0,
bottom: 0,
}}
allow={`camera ${shape.props.allowCamera ? "self" : ""}; microphone ${
shape.props.allowMicrophone ? "self" : ""
}`}
></iframe>
{shape.props.enableRecording && (
<button
onClick={async () => {
try {
if (shape.props.recordingId) {
await this.stopRecording(shape);
} else {
await this.startRecording(shape);
}
} catch (err) {
console.error('Recording error:', err);
}
}}
style={{
position: "absolute",
top: "8px",
right: "8px",
padding: "4px 8px",
background: shape.props.recordingId ? "#ff4444" : "#ffffff",
border: "1px solid #ccc",
borderRadius: "4px",
cursor: "pointer",
zIndex: 1,
}}
>
{shape.props.recordingId ? "Stop Recording" : "Start Recording"}
</button>
)}
<p
style={{ style={{
position: "absolute", width: `${shape.props.w}px`,
bottom: 0, display: "flex",
left: 0, flexDirection: "column",
margin: "8px",
padding: "4px 8px",
background: "rgba(255, 255, 255, 0.9)",
borderRadius: "4px",
fontSize: "12px",
pointerEvents: "all", pointerEvents: "all",
cursor: "text",
userSelect: "text",
zIndex: 1,
}} }}
> >
url: {roomUrl} {/* Video container */}
</p> <div
</div> style={{
width: "100%",
height: `${shape.props.h}px`,
position: "relative",
overflow: "hidden",
}}
>
<iframe
src={roomUrlWithParams.toString()}
width="100%"
height="100%"
style={{
border: "none",
position: "absolute",
top: 0,
left: 0,
right: 0,
bottom: 0,
}}
allow={`camera ${shape.props.allowCamera ? "self" : ""}; microphone ${
shape.props.allowMicrophone ? "self" : ""
}`}
></iframe>
</div>
{/* Controls container below video */}
<div
style={{
width: "100%",
display: "flex",
justifyContent: "space-between",
alignItems: "center",
padding: "8px",
background: "#ffffff",
borderTop: "1px solid #eee",
marginTop: "4px",
}}
>
<p
style={{
margin: 0,
padding: "4px 8px",
borderRadius: "4px",
fontSize: "12px",
userSelect: "text",
}}
>
url: {roomUrl}
</p>
<div style={{ display: 'flex', gap: '8px' }}>
{shape.props.enableRecording && (
<button
onClick={handleRecordingToggle}
onPointerDown={(e) => e.stopPropagation()}
disabled={isRecordingLoading}
style={{
padding: "4px 8px",
borderRadius: "4px",
border: "1px solid #ccc",
background: isRecording ? "#ff4444" : "#ffffff",
cursor: isRecordingLoading ? "not-allowed" : "pointer",
fontSize: "12px",
pointerEvents: "auto",
opacity: isRecordingLoading ? 0.7 : 1,
}}
>
{isRecordingLoading
? (isRecording ? "Stopping..." : "Starting...")
: (isRecording ? "Stop Recording" : "Start Recording")}
</button>
)}
<button
onClick={handleTranscriptionToggle}
onPointerDown={(e) => e.stopPropagation()}
disabled={isTranscribingLoading}
style={{
padding: "4px 8px",
borderRadius: "4px",
border: "1px solid #ccc",
background: isTranscribing ? "#ff4444" : "#ffffff",
cursor: isTranscribingLoading ? "not-allowed" : "pointer",
fontSize: "12px",
pointerEvents: "auto",
opacity: isTranscribingLoading ? 0.7 : 1,
}}
>
{isTranscribingLoading
? (isTranscribing ? "Stopping..." : "Starting...")
: (isTranscribing ? "Stop Transcription" : "Start Transcription")}
</button>
</div>
</div>
</div>
</DailyProvider>
) )
} }
} }

View File

@ -1,3 +1,5 @@
// FIX DEFAULT MENU (copy, paste, etc)
import { import {
Editor, Editor,
TldrawUiMenuActionItem, TldrawUiMenuActionItem,
@ -67,6 +69,8 @@ export function CustomContextMenu(props: TLUiContextMenuProps) {
return ( return (
<DefaultContextMenu {...props}> <DefaultContextMenu {...props}>
{props.children}
{/* Camera Controls Group */} {/* Camera Controls Group */}
<TldrawUiMenuGroup id="camera-controls"> <TldrawUiMenuGroup id="camera-controls">
<TldrawUiMenuItem <TldrawUiMenuItem

View File

@ -0,0 +1,44 @@
import type { Environment } from './types';
interface RecordingPayload {
recordingUrl: string;
roomName: string;
recordingId: string;
}
export class RecordingProcessor {
async fetch(request: Request, env: Environment) {
const payload = await request.json() as RecordingPayload;
const { recordingUrl, roomName, recordingId } = payload;
// Step 1: Download the recording
const response = await fetch(recordingUrl, {
headers: {
'Authorization': `Bearer ${env.DAILY_API_KEY}`
}
});
if (!response.ok) {
throw new Error(`Failed to download recording: ${response.statusText}`);
}
const downloadResult = await response.arrayBuffer();
// Step 2: Upload to R2
const bucket = env.TLDRAW_BUCKET;
const key = `uploads/recordings/${roomName}/${recordingId}.mp4`;
await bucket.put(key, downloadResult, {
httpMetadata: {
contentType: 'video/mp4',
}
});
return new Response(JSON.stringify({
status: "success",
location: key
}), {
headers: { 'Content-Type': 'application/json' }
});
}
}

View File

@ -8,4 +8,5 @@ export interface Environment {
TLDRAW_DURABLE_OBJECT: DurableObjectNamespace TLDRAW_DURABLE_OBJECT: DurableObjectNamespace
DAILY_API_KEY: string; DAILY_API_KEY: string;
DAILY_DOMAIN: string; DAILY_DOMAIN: string;
//RECORDING_PROCESSOR: Workflow;
} }

View File

@ -2,6 +2,10 @@ import { handleUnfurlRequest } from "cloudflare-workers-unfurl"
import { AutoRouter, cors, error, IRequest } from "itty-router" import { AutoRouter, cors, error, IRequest } from "itty-router"
import { handleAssetDownload, handleAssetUpload } from "./assetUploads" import { handleAssetDownload, handleAssetUpload } from "./assetUploads"
import { Environment } from "./types" import { Environment } from "./types"
import { RecordingProcessor } from './RecordingProcessor'
// At the top with other exports
export { RecordingProcessor } from './RecordingProcessor'
// make sure our sync durable object is made available to cloudflare // make sure our sync durable object is made available to cloudflare
export { TldrawDurableObject } from "./TldrawDurableObject" export { TldrawDurableObject } from "./TldrawDurableObject"
@ -40,7 +44,7 @@ const { preflight, corsify } = cors({
// For development - check if it's a localhost or local IP // For development - check if it's a localhost or local IP
if ( if (
origin.match( origin.match(
/^http:\/\/(localhost|127\.0\.0\.1|192\.168\.|169\.254\.|10\.)/, /^http:\/\/(localhost|127\.0\.0\.1|192\.168\.|169\.254\.|10\.255\.255\.254|10\.)/,
) )
) { ) {
return origin return origin
@ -155,6 +159,169 @@ const router = AutoRouter<IRequest, [env: Environment, ctx: ExecutionContext]>({
} }
}) })
// Update the recording start endpoint to use room name in the URL
.post("/daily/recordings/:roomName/start", async (req) => {
const apiKey = req.headers.get('Authorization')?.split('Bearer ')[1]
const roomName = req.params.roomName
if (!apiKey) {
return new Response(JSON.stringify({ error: 'No API key provided' }), {
status: 401,
headers: { 'Content-Type': 'application/json' }
})
}
try {
const body = await req.json() as { layout?: { preset: string } };
const requestBody = {
layout: body.layout
};
const response = await fetch(`https://api.daily.co/v1/rooms/${roomName}/recordings/start`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${apiKey}`
},
body: JSON.stringify(requestBody)
});
const data = await response.json();
// Return the response with the same status code
return new Response(JSON.stringify(data), {
status: response.status,
headers: { 'Content-Type': 'application/json' }
});
} catch (error) {
return new Response(JSON.stringify({ error: (error as Error).message }), {
status: 500,
headers: { 'Content-Type': 'application/json' }
});
}
})
.post("/daily/recordings/:roomName/stop", async (req) => {
const apiKey = req.headers.get('Authorization')?.split('Bearer ')[1]
const roomName = req.params.roomName
if (!apiKey) {
return new Response(JSON.stringify({ error: 'No API key provided' }), {
status: 401,
headers: { 'Content-Type': 'application/json' }
})
}
try {
const response = await fetch(`https://api.daily.co/v1/rooms/${roomName}/recordings/stop`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${apiKey}`
}
})
const data = await response.json()
return new Response(JSON.stringify(data), {
status: response.status,
headers: { 'Content-Type': 'application/json' }
})
} catch (error) {
return new Response(JSON.stringify({ error: (error as Error).message }), {
status: 500,
headers: { 'Content-Type': 'application/json' }
})
}
})
// Add new endpoint to list recordings for a room
.get("/daily/recordings/:roomName", async (req) => {
const apiKey = req.headers.get('Authorization')?.split('Bearer ')[1]
const roomName = req.params.roomName
if (!apiKey) {
return new Response(JSON.stringify({ error: 'No API key provided' }), {
status: 401,
headers: { 'Content-Type': 'application/json' }
})
}
try {
// Add query parameters to get recent recordings
const timeframeStart = new Date(Date.now() - 24 * 60 * 60 * 1000).toISOString(); // Last 24 hours
const response = await fetch(`https://api.daily.co/v1/recordings?room_name=${roomName}&timeframe_start=${timeframeStart}`, {
method: 'GET',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(`Failed to fetch recordings: ${JSON.stringify(errorData)}`);
}
const data = await response.json();
return new Response(JSON.stringify(data), {
headers: { 'Content-Type': 'application/json' }
});
} catch (error) {
console.error('Fetching recordings failed:', error);
return new Response(JSON.stringify({ error: (error as Error).message }), {
status: 500,
headers: { 'Content-Type': 'application/json' }
});
}
});
interface DailyWebhookPayload {
event: string;
recording: {
download_url: string;
id: string;
};
room: {
name: string;
};
}
// Add a new webhook endpoint for Daily.co
router
.post("/webhooks/daily", async (request: IRequest, env: Environment) => {
// 1. Verify webhook signature
const signature = request.headers.get('X-Webhook-Signature');
const timestamp = request.headers.get('X-Webhook-Timestamp');
if (!signature || !timestamp) {
return new Response('Missing webhook signature headers', { status: 401 });
}
const payload = await request.json() as DailyWebhookPayload;
// Only process recording-ready events
if (payload.event !== 'recording-ready') {
return new Response('Ignored non-recording event', { status: 200 });
}
try {
const processor = new RecordingProcessor();
const response = await processor.fetch(new Request(payload.recording.download_url), env);
const result = await response.json() as { location: string };
return Response.json({
success: true,
location: result.location
});
} catch (error) {
console.error('Failed to process recording:', error);
return Response.json({
success: false,
error: (error as Error).message
}, { status: 500 });
}
})
async function backupAllBoards(env: Environment) { async function backupAllBoards(env: Environment) {
try { try {
// List all room files from TLDRAW_BUCKET // List all room files from TLDRAW_BUCKET
@ -211,5 +378,160 @@ router
}) })
}) })
// Start transcription
router
.post('/daily/transcription/:room/start', async (req, _env) => {
const { room } = req.params;
const apiKey = req.headers.get('Authorization')?.split(' ')[1];
if (!apiKey) {
return new Response('API key required', { status: 401 });
}
try {
const response = await fetch(`https://api.daily.co/v1/rooms/${room}/transcription/start`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(req.body)
});
const data = await response.json();
if (!response.ok) {
throw new Error((data as { error?: string })?.error || 'Failed to start transcription');
}
return new Response(JSON.stringify(data), {
headers: { 'Content-Type': 'application/json' }
});
} catch (error) {
console.error('Error starting transcription:', error);
return new Response(JSON.stringify({ error: 'Failed to start transcription' }), {
status: 500,
headers: { 'Content-Type': 'application/json' }
});
}
})
// Stop transcription
router
.post('/daily/transcription/:room/stop', async (req, _env) => {
const { room } = req.params;
const apiKey = req.headers.get('Authorization')?.split(' ')[1];
if (!apiKey) {
return new Response('API key required', { status: 401 });
}
try {
const response = await fetch(`https://api.daily.co/v1/rooms/${room}/transcription/stop`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
const data = await response.json();
if (!response.ok) {
throw new Error((data as { error?: string })?.error || 'Failed to stop transcription');
}
return new Response(JSON.stringify(data), {
headers: { 'Content-Type': 'application/json' }
});
} catch (error) {
console.error('Error stopping transcription:', error);
return new Response(JSON.stringify({ error: 'Failed to stop transcription' }), {
status: 500,
headers: { 'Content-Type': 'application/json' }
});
}
})
// Add new endpoint to list transcripts for a room
router
.get("/daily/transcription/:roomName", async (req) => {
const apiKey = req.headers.get('Authorization')?.split('Bearer ')[1]
const roomName = req.params.roomName
if (!apiKey) {
return new Response(JSON.stringify({ error: 'No API key provided' }), {
status: 401,
headers: { 'Content-Type': 'application/json' }
})
}
try {
// Use the correct query parameter name: room_name instead of roomId
const response = await fetch(`https://api.daily.co/v1/transcript?room_name=${roomName}`, {
method: 'GET',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(`Failed to fetch transcripts: ${JSON.stringify(errorData)}`);
}
const data = await response.json();
return new Response(JSON.stringify(data), {
headers: { 'Content-Type': 'application/json' }
});
} catch (error) {
console.error('Fetching transcripts failed:', error);
return new Response(JSON.stringify({ error: (error as Error).message }), {
status: 500,
headers: { 'Content-Type': 'application/json' }
});
}
});
// Add new endpoint to get transcript access link
router.get("/daily/transcription/:id/access-link", async (req) => {
const apiKey = req.headers.get('Authorization')?.split('Bearer ')[1];
const transcriptId = req.params.id;
if (!apiKey) {
return new Response(JSON.stringify({ error: 'No API key provided' }), {
status: 401,
headers: { 'Content-Type': 'application/json' }
});
}
try {
const response = await fetch(`https://api.daily.co/v1/transcript/${transcriptId}/access-link`, {
method: 'GET',
headers: {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(`Failed to get transcript link: ${JSON.stringify(errorData)}`);
}
const data = await response.json();
return new Response(JSON.stringify(data), {
headers: { 'Content-Type': 'application/json' }
});
} catch (error) {
console.error('Getting transcript link failed:', error);
return new Response(JSON.stringify({ error: (error as Error).message }), {
status: 500,
headers: { 'Content-Type': 'application/json' }
});
}
});
// export our router for cloudflare // export our router for cloudflare
export default router export default router

View File

@ -33,6 +33,11 @@ binding = 'BOARD_BACKUPS_BUCKET'
bucket_name = 'board-backups' bucket_name = 'board-backups'
preview_bucket_name = 'board-backups-preview' preview_bucket_name = 'board-backups-preview'
# [[workflows]]
# name = "recording-processor"
# binding = "RECORDING_PROCESSOR"
# class_name = "RecordingProcessor"
[miniflare] [miniflare]
kv_persist = true kv_persist = true
r2_persist = true r2_persist = true