'use client'; import { useState, useRef, useCallback, useEffect } from 'react'; /* Web Speech API types — not in default TS lib */ interface SpeechRecognitionResult { readonly isFinal: boolean; readonly length: number; item(index: number): { transcript: string; confidence: number }; [index: number]: { transcript: string; confidence: number }; } interface SpeechRecognitionResultList { readonly length: number; item(index: number): SpeechRecognitionResult; [index: number]: SpeechRecognitionResult; } interface SpeechRecognitionEvent extends Event { readonly resultIndex: number; readonly results: SpeechRecognitionResultList; } interface SpeechRecognitionErrorEvent extends Event { readonly error: string; } interface ISpeechRecognition extends EventTarget { continuous: boolean; interimResults: boolean; lang: string; onresult: ((event: SpeechRecognitionEvent) => void) | null; onerror: ((event: SpeechRecognitionErrorEvent) => void) | null; onend: (() => void) | null; start(): void; stop(): void; } type SpeechRecognitionCtor = new () => ISpeechRecognition; type DemoStatus = 'idle' | 'listening' | 'unsupported'; interface TranscriptLine { id: number; text: string; final: boolean; } export function TranscriptionDemo() { const [status, setStatus] = useState('idle'); const [lines, setLines] = useState([]); const [interim, setInterim] = useState(''); const [elapsed, setElapsed] = useState(0); const recognitionRef = useRef(null); const timerRef = useRef | null>(null); const startTimeRef = useRef(0); const scrollRef = useRef(null); const lineIdRef = useRef(0); const supported = typeof window !== 'undefined' && ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window); useEffect(() => { return () => { if (timerRef.current) clearInterval(timerRef.current); if (recognitionRef.current) { try { recognitionRef.current.stop(); } catch {} } }; }, []); useEffect(() => { if (scrollRef.current) { scrollRef.current.scrollTop = scrollRef.current.scrollHeight; } }, [lines, interim]); const start = useCallback(() => { if (!supported) { setStatus('unsupported'); return; } const Ctor: SpeechRecognitionCtor | undefined = (window as unknown as Record).SpeechRecognition || (window as unknown as Record).webkitSpeechRecognition; if (!Ctor) { setStatus('unsupported'); return; } const recognition = new Ctor(); recognition.continuous = true; recognition.interimResults = true; recognition.lang = 'en-US'; recognition.onresult = (event: SpeechRecognitionEvent) => { let interimText = ''; for (let i = event.resultIndex; i < event.results.length; i++) { const result = event.results[i]; if (result.isFinal) { const id = ++lineIdRef.current; setLines((prev) => [...prev, { id, text: result[0].transcript.trim(), final: true }]); interimText = ''; } else { interimText += result[0].transcript; } } setInterim(interimText); }; recognition.onerror = (event: SpeechRecognitionErrorEvent) => { if (event.error !== 'aborted') { console.warn('Speech recognition error:', event.error); } }; recognition.onend = () => { if (recognitionRef.current === recognition) { try { recognition.start(); } catch {} } }; recognitionRef.current = recognition; setLines([]); setInterim(''); lineIdRef.current = 0; setElapsed(0); startTimeRef.current = Date.now(); timerRef.current = setInterval(() => { setElapsed(Math.floor((Date.now() - startTimeRef.current) / 1000)); }, 1000); recognition.start(); setStatus('listening'); }, [supported, status]); const stop = useCallback(() => { if (recognitionRef.current) { const ref = recognitionRef.current; recognitionRef.current = null; try { ref.stop(); } catch {} } if (timerRef.current) { clearInterval(timerRef.current); timerRef.current = null; } setInterim(''); setStatus('idle'); }, []); const reset = useCallback(() => { stop(); setLines([]); setElapsed(0); }, [stop]); const formatTime = (s: number) => { const m = Math.floor(s / 60).toString().padStart(2, '0'); const sec = (s % 60).toString().padStart(2, '0'); return `${m}:${sec}`; }; return (
{/* Header bar */}
Live Transcription {status === 'listening' && ( LIVE )}
{status === 'listening' && ( {formatTime(elapsed)} )}
{/* Transcript area */}
{status === 'idle' && lines.length === 0 && (

Tap the mic to start live transcription

Works in your browser — no download needed

)} {status === 'unsupported' && (

Speech recognition requires Chrome, Edge, or Safari.

rNotes also supports offline transcription with Parakeet.js (NVIDIA) for full privacy.

)} {lines.length > 0 && (
{lines.map((line) => (
{line.text}
))}
)} {interim && (
{interim}
)} {status === 'idle' && lines.length > 0 && (
{lines.length} segment{lines.length !== 1 ? 's' : ''} transcribed
)}
{/* Controls */}
{status === 'idle' ? ( ) : status === 'listening' ? ( ) : null}
{/* Capability badges */}
Live streaming Audio file upload Video transcription Offline (Parakeet.js)
); }