From e3be857703529bad92c2cdcbc9fcdd07b1db3cdf Mon Sep 17 00:00:00 2001
From: Jeff Emmett
Date: Wed, 18 Feb 2026 10:05:48 +0000
Subject: [PATCH] feat: add live transcription demo to landing page and demo
page
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Interactive Web Speech API transcription widget on the homepage
("Try Live Transcription") and demo page. Zero-download, no auth
needed — works instantly in Chrome/Edge/Safari. Updated feature
cards to highlight live transcribe, audio/video, and offline privacy.
Co-Authored-By: Claude Opus 4.6
---
src/app/demo/demo-content.tsx | 27 ++-
src/components/TranscriptionDemo.tsx | 289 +++++++++++++++++++++++++++
2 files changed, 314 insertions(+), 2 deletions(-)
create mode 100644 src/components/TranscriptionDemo.tsx
diff --git a/src/app/demo/demo-content.tsx b/src/app/demo/demo-content.tsx
index cfc9133..a57a44e 100644
--- a/src/app/demo/demo-content.tsx
+++ b/src/app/demo/demo-content.tsx
@@ -3,6 +3,7 @@
import Link from 'next/link'
import { useState, useMemo, useCallback } from 'react'
import { useDemoSync, type DemoShape } from '@/lib/demo-sync'
+import { TranscriptionDemo } from '@/components/TranscriptionDemo'
/* --- Types -------------------------------------------------------------- */
@@ -652,8 +653,9 @@ export default function DemoContent() {
{notebook?.description || 'A collaborative knowledge base for your team'}
+ Live transcription
+ Audio & video
Organized notebooks
- Flexible tagging
Canvas sync
Real-time collaboration
@@ -786,11 +788,27 @@ export default function DemoContent() {
+ {/* Live transcription demo */}
+
+
+
Live Voice Transcription
+
+ Speak and see your words appear in real time. rNotes transcribes audio and video — live or from files — with offline privacy via NVIDIA Parakeet.
+
+
+
+
+
{/* Features showcase */}
Everything you need to capture knowledge
-
+
{[
+ {
+ icon: 'voice',
+ title: 'Live Transcription',
+ desc: 'Record and transcribe in real time. Stream audio via WebSocket or transcribe offline with Parakeet.js.',
+ },
{
icon: 'rich-edit',
title: 'Rich Editing',
@@ -817,6 +835,11 @@ export default function DemoContent() {
className="bg-slate-800/50 rounded-xl border border-slate-700/50 p-5"
>
+ {feature.icon === 'voice' && (
+
+
+
+ )}
{feature.icon === 'rich-edit' && (
diff --git a/src/components/TranscriptionDemo.tsx b/src/components/TranscriptionDemo.tsx
new file mode 100644
index 0000000..bb5a195
--- /dev/null
+++ b/src/components/TranscriptionDemo.tsx
@@ -0,0 +1,289 @@
+'use client';
+
+import { useState, useRef, useCallback, useEffect } from 'react';
+
+/* Web Speech API types — not in default TS lib */
+interface SpeechRecognitionResult {
+ readonly isFinal: boolean;
+ readonly length: number;
+ item(index: number): { transcript: string; confidence: number };
+ [index: number]: { transcript: string; confidence: number };
+}
+interface SpeechRecognitionResultList {
+ readonly length: number;
+ item(index: number): SpeechRecognitionResult;
+ [index: number]: SpeechRecognitionResult;
+}
+interface SpeechRecognitionEvent extends Event {
+ readonly resultIndex: number;
+ readonly results: SpeechRecognitionResultList;
+}
+interface SpeechRecognitionErrorEvent extends Event {
+ readonly error: string;
+}
+interface ISpeechRecognition extends EventTarget {
+ continuous: boolean;
+ interimResults: boolean;
+ lang: string;
+ onresult: ((event: SpeechRecognitionEvent) => void) | null;
+ onerror: ((event: SpeechRecognitionErrorEvent) => void) | null;
+ onend: (() => void) | null;
+ start(): void;
+ stop(): void;
+}
+type SpeechRecognitionCtor = new () => ISpeechRecognition;
+
+type DemoStatus = 'idle' | 'listening' | 'unsupported';
+
+interface TranscriptLine {
+ id: number;
+ text: string;
+ final: boolean;
+}
+
+export function TranscriptionDemo() {
+ const [status, setStatus] = useState('idle');
+ const [lines, setLines] = useState([]);
+ const [interim, setInterim] = useState('');
+ const [elapsed, setElapsed] = useState(0);
+ const recognitionRef = useRef(null);
+ const timerRef = useRef | null>(null);
+ const startTimeRef = useRef(0);
+ const scrollRef = useRef(null);
+ const lineIdRef = useRef(0);
+
+ const supported =
+ typeof window !== 'undefined' &&
+ ('SpeechRecognition' in window || 'webkitSpeechRecognition' in window);
+
+ useEffect(() => {
+ return () => {
+ if (timerRef.current) clearInterval(timerRef.current);
+ if (recognitionRef.current) {
+ try { recognitionRef.current.stop(); } catch {}
+ }
+ };
+ }, []);
+
+ useEffect(() => {
+ if (scrollRef.current) {
+ scrollRef.current.scrollTop = scrollRef.current.scrollHeight;
+ }
+ }, [lines, interim]);
+
+ const start = useCallback(() => {
+ if (!supported) {
+ setStatus('unsupported');
+ return;
+ }
+
+ const Ctor: SpeechRecognitionCtor | undefined =
+ (window as unknown as Record).SpeechRecognition ||
+ (window as unknown as Record).webkitSpeechRecognition;
+ if (!Ctor) {
+ setStatus('unsupported');
+ return;
+ }
+
+ const recognition = new Ctor();
+ recognition.continuous = true;
+ recognition.interimResults = true;
+ recognition.lang = 'en-US';
+
+ recognition.onresult = (event: SpeechRecognitionEvent) => {
+ let interimText = '';
+ for (let i = event.resultIndex; i < event.results.length; i++) {
+ const result = event.results[i];
+ if (result.isFinal) {
+ const id = ++lineIdRef.current;
+ setLines((prev) => [...prev, { id, text: result[0].transcript.trim(), final: true }]);
+ interimText = '';
+ } else {
+ interimText += result[0].transcript;
+ }
+ }
+ setInterim(interimText);
+ };
+
+ recognition.onerror = (event: SpeechRecognitionErrorEvent) => {
+ if (event.error !== 'aborted') {
+ console.warn('Speech recognition error:', event.error);
+ }
+ };
+
+ recognition.onend = () => {
+ if (recognitionRef.current === recognition) {
+ try { recognition.start(); } catch {}
+ }
+ };
+
+ recognitionRef.current = recognition;
+ setLines([]);
+ setInterim('');
+ lineIdRef.current = 0;
+ setElapsed(0);
+ startTimeRef.current = Date.now();
+
+ timerRef.current = setInterval(() => {
+ setElapsed(Math.floor((Date.now() - startTimeRef.current) / 1000));
+ }, 1000);
+
+ recognition.start();
+ setStatus('listening');
+ }, [supported, status]);
+
+ const stop = useCallback(() => {
+ if (recognitionRef.current) {
+ const ref = recognitionRef.current;
+ recognitionRef.current = null;
+ try { ref.stop(); } catch {}
+ }
+ if (timerRef.current) {
+ clearInterval(timerRef.current);
+ timerRef.current = null;
+ }
+ setInterim('');
+ setStatus('idle');
+ }, []);
+
+ const reset = useCallback(() => {
+ stop();
+ setLines([]);
+ setElapsed(0);
+ }, [stop]);
+
+ const formatTime = (s: number) => {
+ const m = Math.floor(s / 60).toString().padStart(2, '0');
+ const sec = (s % 60).toString().padStart(2, '0');
+ return `${m}:${sec}`;
+ };
+
+ return (
+
+
+ {/* Header bar */}
+
+
+
+
+
+
Live Transcription
+ {status === 'listening' && (
+
+
+ LIVE
+
+ )}
+
+ {status === 'listening' && (
+
{formatTime(elapsed)}
+ )}
+
+
+ {/* Transcript area */}
+
+ {status === 'idle' && lines.length === 0 && (
+
+
+
+
+
+ Tap the mic to start live transcription
+
+
+ Works in your browser — no download needed
+
+
+ )}
+
+ {status === 'unsupported' && (
+
+
+ Speech recognition requires Chrome, Edge, or Safari.
+
+
+ rNotes also supports offline transcription with Parakeet.js (NVIDIA) for full privacy.
+
+
+ )}
+
+ {lines.length > 0 && (
+
+ {lines.map((line) => (
+
+ {line.text}
+
+ ))}
+
+ )}
+
+ {interim && (
+
+ {interim}
+
+ )}
+
+ {status === 'idle' && lines.length > 0 && (
+
+
+ {lines.length} segment{lines.length !== 1 ? 's' : ''} transcribed
+
+
+ Clear
+
+
+ )}
+
+
+ {/* Controls */}
+
+ {status === 'idle' ? (
+
+
+
+
+
+ Start Transcribing
+
+ ) : status === 'listening' ? (
+
+
+ Stop
+
+ ) : null}
+
+
+ {/* Capability badges */}
+
+
+ Live streaming
+
+
+ Audio file upload
+
+
+ Video transcription
+
+
+ Offline (Parakeet.js)
+
+
+
+
+ );
+}