Merge dev into main: add rVoice PWA + browser extension voice recorder

Resolves merge conflicts between dev (voice/transcription features) and
main (logseq import/export, memory cards, attachments). Both feature
sets coexist cleanly.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Jeff Emmett 2026-02-24 17:47:31 -08:00
commit 6fc4bd7c17
21 changed files with 6170 additions and 24 deletions

View File

@ -34,10 +34,13 @@ COPY --from=builder /app/scripts ./scripts
COPY --from=builder /app/src/lib/content-convert.ts ./src/lib/content-convert.ts
COPY --from=builder /app/node_modules/.prisma ./node_modules/.prisma
COPY --from=builder /app/node_modules/@prisma ./node_modules/@prisma
COPY rnotes-online/entrypoint.sh /app/entrypoint.sh
RUN chmod +x /app/entrypoint.sh
USER nextjs
EXPOSE 3000
ENV PORT=3000
ENV HOSTNAME="0.0.0.0"
ENTRYPOINT ["/app/entrypoint.sh"]
CMD ["node", "server.js"]

View File

@ -26,6 +26,12 @@ chrome.runtime.onInstalled.addListener(() => {
title: 'Clip selection to rNotes',
contexts: ['selection'],
});
chrome.contextMenus.create({
id: 'unlock-article',
title: 'Unlock & Clip article to rNotes',
contexts: ['page', 'link'],
});
});
// --- Helpers ---
@ -132,6 +138,31 @@ async function uploadImage(imageUrl) {
return response.json();
}
async function unlockArticle(url) {
const token = await getToken();
if (!token) {
showNotification('rNotes Error', 'Not signed in. Open extension settings to sign in.');
return null;
}
const settings = await getSettings();
const response = await fetch(`${settings.host}/api/articles/unlock`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
},
body: JSON.stringify({ url }),
});
if (!response.ok) {
const text = await response.text();
throw new Error(`Unlock failed: ${response.status} ${text}`);
}
return response.json();
}
// --- Context Menu Handler ---
chrome.contextMenus.onClicked.addListener(async (info, tab) => {
@ -197,6 +228,28 @@ chrome.contextMenus.onClicked.addListener(async (info, tab) => {
break;
}
case 'unlock-article': {
const targetUrl = info.linkUrl || tab.url;
showNotification('Unlocking Article', `Finding readable version of ${new URL(targetUrl).hostname}...`);
const result = await unlockArticle(targetUrl);
if (result && result.success && result.archiveUrl) {
// Create a CLIP note with the archive URL
await createNote({
title: tab.title || 'Unlocked Article',
content: `<p>Unlocked via ${result.strategy}</p><p>Original: <a href="${targetUrl}">${targetUrl}</a></p><p>Archive: <a href="${result.archiveUrl}">${result.archiveUrl}</a></p>`,
type: 'CLIP',
url: targetUrl,
});
showNotification('Article Unlocked', `Readable version found via ${result.strategy}`);
// Open the unlocked article in a new tab
chrome.tabs.create({ url: result.archiveUrl });
} else {
showNotification('Unlock Failed', result?.error || 'No archived version found');
}
break;
}
case 'clip-selection': {
// Get selection HTML
let content = '';
@ -238,6 +291,20 @@ chrome.contextMenus.onClicked.addListener(async (info, tab) => {
}
});
// --- Keyboard shortcut handler ---
chrome.commands.onCommand.addListener((command) => {
if (command === 'open-voice-recorder') {
chrome.windows.create({
url: chrome.runtime.getURL('voice.html'),
type: 'popup',
width: 380,
height: 520,
focused: true,
});
}
});
// --- Message Handler (from popup) ---
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {

View File

@ -1,13 +1,14 @@
{
"manifest_version": 3,
"name": "rNotes Web Clipper",
"version": "1.0.0",
"description": "Clip pages, text, links, and images to rNotes.online",
"name": "rNotes Web Clipper & Voice",
"version": "1.1.0",
"description": "Clip pages, text, links, and images to rNotes.online. Record voice notes with transcription.",
"permissions": [
"activeTab",
"contextMenus",
"storage",
"notifications"
"notifications",
"offscreen"
],
"host_permissions": [
"https://rnotes.online/*",
@ -33,5 +34,17 @@
"options_ui": {
"page": "options.html",
"open_in_tab": false
},
"content_security_policy": {
"extension_pages": "script-src 'self' https://esm.sh; object-src 'self'"
},
"commands": {
"open-voice-recorder": {
"suggested_key": {
"default": "Ctrl+Shift+V",
"mac": "Command+Shift+V"
},
"description": "Open rVoice recorder"
}
}
}

View File

@ -0,0 +1,147 @@
/**
* Offline transcription using parakeet.js (NVIDIA Parakeet TDT 0.6B v2).
* Loaded at runtime from CDN. Model ~634 MB (int8) on first download,
* cached in IndexedDB after. Works fully offline after first download.
*
* Port of src/lib/parakeetOffline.ts for the browser extension.
*/
const CACHE_KEY = 'parakeet-offline-cached';
// Singleton model — don't reload on subsequent calls
let cachedModel = null;
let loadingPromise = null;
/**
* Check if the Parakeet model has been downloaded before.
*/
function isModelCached() {
try {
return localStorage.getItem(CACHE_KEY) === 'true';
} catch {
return false;
}
}
/**
* Detect WebGPU availability.
*/
async function detectWebGPU() {
if (!navigator.gpu) return false;
try {
const adapter = await navigator.gpu.requestAdapter();
return !!adapter;
} catch {
return false;
}
}
/**
* Get or create the Parakeet model singleton.
* @param {function} onProgress - callback({ status, progress, file, message })
*/
async function getModel(onProgress) {
if (cachedModel) return cachedModel;
if (loadingPromise) return loadingPromise;
loadingPromise = (async () => {
onProgress?.({ status: 'loading', message: 'Loading Parakeet model...' });
// Dynamic import from CDN at runtime
const { fromHub } = await import('https://esm.sh/parakeet.js@1.1.2');
const backend = (await detectWebGPU()) ? 'webgpu' : 'wasm';
const fileProgress = {};
const model = await fromHub('parakeet-tdt-0.6b-v2', {
backend,
progress: ({ file, loaded, total }) => {
fileProgress[file] = { loaded, total };
let totalBytes = 0;
let loadedBytes = 0;
for (const fp of Object.values(fileProgress)) {
totalBytes += fp.total || 0;
loadedBytes += fp.loaded || 0;
}
if (totalBytes > 0) {
const pct = Math.round((loadedBytes / totalBytes) * 100);
onProgress?.({
status: 'downloading',
progress: pct,
file,
message: `Downloading model... ${pct}%`,
});
}
},
});
localStorage.setItem(CACHE_KEY, 'true');
onProgress?.({ status: 'loading', message: 'Model loaded' });
cachedModel = model;
loadingPromise = null;
return model;
})();
return loadingPromise;
}
/**
* Decode an audio Blob to Float32Array at 16 kHz mono.
*/
async function decodeAudioBlob(blob) {
const arrayBuffer = await blob.arrayBuffer();
const audioCtx = new AudioContext({ sampleRate: 16000 });
try {
const audioBuffer = await audioCtx.decodeAudioData(arrayBuffer);
if (audioBuffer.sampleRate === 16000 && audioBuffer.numberOfChannels === 1) {
return audioBuffer.getChannelData(0);
}
// Resample via OfflineAudioContext
const numSamples = Math.ceil(audioBuffer.duration * 16000);
const offlineCtx = new OfflineAudioContext(1, numSamples, 16000);
const source = offlineCtx.createBufferSource();
source.buffer = audioBuffer;
source.connect(offlineCtx.destination);
source.start();
const resampled = await offlineCtx.startRendering();
return resampled.getChannelData(0);
} finally {
await audioCtx.close();
}
}
/**
* Transcribe an audio Blob offline using Parakeet in the browser.
* First call downloads the model (~634 MB). Subsequent calls use cached.
*
* @param {Blob} audioBlob
* @param {function} onProgress - callback({ status, progress, file, message })
* @returns {Promise<string>} transcribed text
*/
async function transcribeOffline(audioBlob, onProgress) {
const model = await getModel(onProgress);
onProgress?.({ status: 'transcribing', message: 'Transcribing audio...' });
const audioData = await decodeAudioBlob(audioBlob);
const result = await model.transcribe(audioData, 16000, {
returnTimestamps: false,
enableProfiling: false,
});
const text = result.utterance_text?.trim() || '';
onProgress?.({ status: 'done', message: 'Transcription complete' });
return text;
}
// Export for use in voice.js (loaded as ES module)
window.ParakeetOffline = {
isModelCached,
transcribeOffline,
};

View File

@ -133,6 +133,23 @@
color: #e5e5e5;
border: 1px solid #404040;
}
.btn-voice {
background: #450a0a;
color: #fca5a5;
border: 1px solid #991b1b;
}
.btn-voice svg {
flex-shrink: 0;
}
.btn-unlock {
background: #172554;
color: #93c5fd;
border: 1px solid #1e40af;
}
.btn-unlock svg {
flex-shrink: 0;
}
.status {
margin: 0 14px 10px;
@ -212,6 +229,28 @@
</button>
</div>
<div class="actions">
<button class="btn-voice" id="voiceBtn" disabled>
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<path d="M12 1a3 3 0 0 0-3 3v8a3 3 0 0 0 6 0V4a3 3 0 0 0-3-3z"></path>
<path d="M19 10v2a7 7 0 0 1-14 0v-2"></path>
<line x1="12" y1="19" x2="12" y2="23"></line>
<line x1="8" y1="23" x2="16" y2="23"></line>
</svg>
Voice Note
</button>
</div>
<div class="actions">
<button class="btn-unlock" id="unlockBtn" disabled>
<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round">
<rect x="3" y="11" width="18" height="11" rx="2" ry="2"></rect>
<path d="M7 11V7a5 5 0 0 1 9.9-1"></path>
</svg>
Unlock Article
</button>
</div>
<div id="status" class="status"></div>
<div class="footer">

View File

@ -152,6 +152,8 @@ async function init() {
// Enable buttons
document.getElementById('clipPageBtn').disabled = false;
document.getElementById('unlockBtn').disabled = false;
document.getElementById('voiceBtn').disabled = false;
// Load notebooks
await populateNotebooks();
@ -255,6 +257,62 @@ document.getElementById('clipSelectionBtn').addEventListener('click', async () =
}
});
document.getElementById('unlockBtn').addEventListener('click', async () => {
const btn = document.getElementById('unlockBtn');
btn.disabled = true;
showStatus('Unlocking article...', 'loading');
try {
const token = await getToken();
const settings = await getSettings();
const response = await fetch(`${settings.host}/api/articles/unlock`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
},
body: JSON.stringify({ url: currentTab.url }),
});
const result = await response.json();
if (result.success && result.archiveUrl) {
// Also save as a note
await createNote({
title: currentTab.title || 'Unlocked Article',
content: `<p>Unlocked via ${result.strategy}</p><p>Original: <a href="${currentTab.url}">${currentTab.url}</a></p><p>Archive: <a href="${result.archiveUrl}">${result.archiveUrl}</a></p>`,
type: 'CLIP',
url: currentTab.url,
});
showStatus(`Unlocked via ${result.strategy}! Opening...`, 'success');
// Open archive in new tab
chrome.tabs.create({ url: result.archiveUrl });
} else {
showStatus(result.error || 'No archived version found', 'error');
}
} catch (err) {
showStatus(`Error: ${err.message}`, 'error');
} finally {
btn.disabled = false;
}
});
document.getElementById('voiceBtn').addEventListener('click', () => {
// Open voice recorder in a small popup window
chrome.windows.create({
url: chrome.runtime.getURL('voice.html'),
type: 'popup',
width: 380,
height: 520,
focused: true,
});
// Close the current popup
window.close();
});
document.getElementById('optionsLink').addEventListener('click', (e) => {
e.preventDefault();
chrome.runtime.openOptionsPage();

View File

@ -0,0 +1,414 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<style>
* { margin: 0; padding: 0; box-sizing: border-box; }
body {
width: 360px;
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
background: #0a0a0a;
color: #e5e5e5;
font-size: 13px;
overflow: hidden;
}
.header {
display: flex;
align-items: center;
justify-content: space-between;
padding: 8px 14px;
background: #171717;
border-bottom: 1px solid #262626;
-webkit-app-region: drag;
}
.header .brand {
font-weight: 700;
font-size: 14px;
color: #ef4444;
}
.header .brand-sub {
color: #a3a3a3;
font-weight: 400;
font-size: 12px;
}
.header .close-btn {
-webkit-app-region: no-drag;
background: none;
border: none;
color: #737373;
cursor: pointer;
font-size: 18px;
padding: 2px 6px;
border-radius: 4px;
}
.header .close-btn:hover {
color: #e5e5e5;
background: #262626;
}
.auth-warning {
padding: 10px 14px;
background: #451a03;
border-bottom: 1px solid #78350f;
text-align: center;
font-size: 12px;
color: #fbbf24;
}
.recorder {
padding: 20px 14px;
display: flex;
flex-direction: column;
align-items: center;
gap: 12px;
}
/* Record button */
.rec-btn {
width: 72px;
height: 72px;
border-radius: 50%;
border: 3px solid #404040;
background: #171717;
cursor: pointer;
display: flex;
align-items: center;
justify-content: center;
transition: all 0.2s;
position: relative;
}
.rec-btn:hover {
border-color: #ef4444;
}
.rec-btn .inner {
width: 32px;
height: 32px;
background: #ef4444;
border-radius: 50%;
transition: all 0.2s;
}
.rec-btn.recording {
border-color: #ef4444;
}
.rec-btn.recording .inner {
width: 24px;
height: 24px;
border-radius: 4px;
background: #ef4444;
}
.rec-btn.recording::after {
content: '';
position: absolute;
inset: -6px;
border-radius: 50%;
border: 2px solid rgba(239, 68, 68, 0.3);
animation: pulse-ring 1.5s infinite;
}
@keyframes pulse-ring {
0% { transform: scale(1); opacity: 1; }
100% { transform: scale(1.15); opacity: 0; }
}
.timer {
font-size: 28px;
font-family: 'SF Mono', 'Consolas', 'Courier New', monospace;
font-weight: 600;
color: #e5e5e5;
letter-spacing: 2px;
}
.timer.recording {
color: #ef4444;
}
.status-label {
font-size: 11px;
text-transform: uppercase;
letter-spacing: 1.5px;
font-weight: 600;
}
.status-label.idle { color: #737373; }
.status-label.recording { color: #ef4444; }
.status-label.processing { color: #f59e0b; }
.status-label.done { color: #4ade80; }
/* Transcript area */
.transcript-area {
width: 100%;
padding: 0 14px 12px;
display: none;
}
.transcript-area.visible {
display: block;
}
.transcript-label {
font-size: 10px;
text-transform: uppercase;
letter-spacing: 1px;
color: #737373;
margin-bottom: 6px;
font-weight: 600;
}
.transcript-text {
background: #171717;
border: 1px solid #262626;
border-radius: 6px;
padding: 10px 12px;
font-size: 13px;
line-height: 1.5;
color: #d4d4d4;
max-height: 120px;
overflow-y: auto;
min-height: 40px;
white-space: pre-wrap;
}
.transcript-text.editable {
outline: none;
border-color: #404040;
cursor: text;
}
.transcript-text.editable:focus {
border-color: #f59e0b;
}
.transcript-text .placeholder {
color: #525252;
font-style: italic;
}
.transcript-text .final-text {
color: #d4d4d4;
}
.transcript-text .interim-text {
color: #737373;
font-style: italic;
}
/* Controls row */
.controls {
width: 100%;
padding: 0 14px 10px;
}
.controls select {
width: 100%;
padding: 6px 8px;
background: #171717;
border: 1px solid #404040;
border-radius: 4px;
color: #e5e5e5;
font-size: 12px;
outline: none;
}
.controls select:focus {
border-color: #f59e0b;
}
.controls label {
display: block;
font-size: 10px;
color: #737373;
margin-bottom: 3px;
font-weight: 600;
text-transform: uppercase;
letter-spacing: 0.5px;
}
/* Action buttons */
.actions {
width: 100%;
padding: 0 14px 12px;
display: flex;
gap: 8px;
}
.actions button {
flex: 1;
padding: 8px 12px;
border: none;
border-radius: 6px;
font-size: 12px;
font-weight: 600;
cursor: pointer;
transition: opacity 0.15s;
}
.actions button:hover:not(:disabled) { opacity: 0.85; }
.actions button:disabled { opacity: 0.35; cursor: not-allowed; }
.btn-save {
background: #f59e0b;
color: #0a0a0a;
}
.btn-discard {
background: #262626;
color: #a3a3a3;
border: 1px solid #404040;
}
.btn-copy {
background: #172554;
color: #93c5fd;
border: 1px solid #1e40af;
}
/* Status bar */
.status-bar {
padding: 8px 14px;
border-top: 1px solid #262626;
font-size: 11px;
color: #525252;
text-align: center;
display: none;
}
.status-bar.visible {
display: block;
}
.status-bar.success { color: #4ade80; background: #052e16; border-top-color: #166534; }
.status-bar.error { color: #fca5a5; background: #450a0a; border-top-color: #991b1b; }
.status-bar.loading { color: #93c5fd; background: #172554; border-top-color: #1e40af; }
/* Live indicator */
.live-indicator {
display: none;
align-items: center;
gap: 5px;
font-size: 10px;
font-weight: 700;
text-transform: uppercase;
letter-spacing: 1.5px;
color: #4ade80;
}
.live-indicator.visible {
display: flex;
}
.live-indicator .dot {
width: 6px;
height: 6px;
border-radius: 50%;
background: #4ade80;
animation: pulse-dot 1s infinite;
}
@keyframes pulse-dot {
0%, 100% { opacity: 1; }
50% { opacity: 0.3; }
}
/* Progress bar (for model download) */
.progress-area {
width: 100%;
padding: 0 14px 8px;
display: none;
}
.progress-area.visible {
display: block;
}
.progress-label {
font-size: 11px;
color: #a3a3a3;
margin-bottom: 4px;
}
.progress-bar {
width: 100%;
height: 6px;
background: #262626;
border-radius: 3px;
overflow: hidden;
}
.progress-bar .fill {
height: 100%;
background: #f59e0b;
border-radius: 3px;
transition: width 0.3s;
width: 0%;
}
/* Audio preview */
.audio-preview {
width: 100%;
padding: 0 14px 8px;
display: none;
}
.audio-preview.visible {
display: block;
}
.audio-preview audio {
width: 100%;
height: 32px;
}
/* Keyboard hint */
.kbd-hint {
padding: 4px 14px 8px;
text-align: center;
font-size: 10px;
color: #404040;
}
.kbd-hint kbd {
background: #1a1a1a;
border: 1px solid #333;
border-radius: 3px;
padding: 1px 5px;
font-family: inherit;
font-size: 10px;
}
</style>
</head>
<body>
<div class="header">
<span>
<span class="brand">rVoice</span>
<span class="brand-sub">voice notes</span>
</span>
<button class="close-btn" id="closeBtn" title="Close">&times;</button>
</div>
<div id="authWarning" class="auth-warning" style="display: none;">
Sign in via rNotes Clipper settings first.
</div>
<div class="recorder">
<div class="status-label idle" id="statusLabel">Ready</div>
<button class="rec-btn" id="recBtn" title="Start recording">
<div class="inner"></div>
</button>
<div class="timer" id="timer">00:00</div>
<div class="live-indicator" id="liveIndicator">
<span class="dot"></span>
Live transcribe
</div>
</div>
<div class="progress-area" id="progressArea">
<div class="progress-label" id="progressLabel">Loading model...</div>
<div class="progress-bar"><div class="fill" id="progressFill"></div></div>
</div>
<div class="audio-preview" id="audioPreview">
<audio controls id="audioPlayer"></audio>
</div>
<div class="transcript-area" id="transcriptArea">
<div class="transcript-label">Transcript</div>
<div class="transcript-text editable" id="transcriptText" contenteditable="true">
<span class="placeholder">Transcribing...</span>
</div>
</div>
<div class="controls" id="notebookControls">
<label for="notebook">Save to notebook</label>
<select id="notebook">
<option value="">Default notebook</option>
</select>
</div>
<div class="actions" id="postActions" style="display: none;">
<button class="btn-discard" id="discardBtn">Discard</button>
<button class="btn-copy" id="copyBtn" title="Copy transcript">Copy</button>
<button class="btn-save" id="saveBtn">Save to rNotes</button>
</div>
<div class="status-bar" id="statusBar"></div>
<div class="kbd-hint">
<kbd>Space</kbd> to record &middot; <kbd>Esc</kbd> to close &middot; Offline ready
</div>
<script src="parakeet-offline.js" type="module"></script>
<script src="voice.js"></script>
</body>
</html>

610
browser-extension/voice.js Normal file
View File

@ -0,0 +1,610 @@
const DEFAULT_HOST = 'https://rnotes.online';
// --- State ---
let state = 'idle'; // idle | recording | processing | done
let mediaRecorder = null;
let audioChunks = [];
let timerInterval = null;
let startTime = 0;
let audioBlob = null;
let audioUrl = null;
let transcript = '';
let liveTranscript = ''; // accumulated from Web Speech API
let uploadedFileUrl = '';
let uploadedMimeType = '';
let uploadedFileSize = 0;
let duration = 0;
// Web Speech API
let recognition = null;
let speechSupported = !!(window.SpeechRecognition || window.webkitSpeechRecognition);
// --- DOM refs ---
const recBtn = document.getElementById('recBtn');
const timerEl = document.getElementById('timer');
const statusLabel = document.getElementById('statusLabel');
const transcriptArea = document.getElementById('transcriptArea');
const transcriptText = document.getElementById('transcriptText');
const liveIndicator = document.getElementById('liveIndicator');
const audioPreview = document.getElementById('audioPreview');
const audioPlayer = document.getElementById('audioPlayer');
const notebookSelect = document.getElementById('notebook');
const postActions = document.getElementById('postActions');
const saveBtn = document.getElementById('saveBtn');
const discardBtn = document.getElementById('discardBtn');
const copyBtn = document.getElementById('copyBtn');
const statusBar = document.getElementById('statusBar');
const authWarning = document.getElementById('authWarning');
const closeBtn = document.getElementById('closeBtn');
// --- Helpers ---
async function getSettings() {
const result = await chrome.storage.sync.get(['rnotesHost']);
return { host: result.rnotesHost || DEFAULT_HOST };
}
async function getToken() {
const result = await chrome.storage.local.get(['encryptid_token']);
return result.encryptid_token || null;
}
function decodeToken(token) {
try {
const payload = JSON.parse(atob(token.split('.')[1]));
if (payload.exp && payload.exp * 1000 < Date.now()) return null;
return payload;
} catch { return null; }
}
function formatTime(seconds) {
const m = Math.floor(seconds / 60).toString().padStart(2, '0');
const s = (seconds % 60).toString().padStart(2, '0');
return `${m}:${s}`;
}
function setStatusLabel(text, cls) {
statusLabel.textContent = text;
statusLabel.className = `status-label ${cls}`;
}
function showStatusBar(message, type) {
statusBar.textContent = message;
statusBar.className = `status-bar visible ${type}`;
if (type === 'success') {
setTimeout(() => { statusBar.className = 'status-bar'; }, 3000);
}
}
// --- Parakeet progress UI ---
const progressArea = document.getElementById('progressArea');
const progressLabel = document.getElementById('progressLabel');
const progressFill = document.getElementById('progressFill');
function showParakeetProgress(p) {
if (!progressArea) return;
progressArea.classList.add('visible');
if (p.message) {
progressLabel.textContent = p.message;
}
if (p.status === 'downloading' && p.progress !== undefined) {
progressFill.style.width = `${p.progress}%`;
} else if (p.status === 'transcribing') {
progressFill.style.width = '100%';
} else if (p.status === 'loading') {
progressFill.style.width = '0%';
}
}
function hideParakeetProgress() {
if (progressArea) {
progressArea.classList.remove('visible');
progressFill.style.width = '0%';
}
}
// --- Notebook loader ---
async function loadNotebooks() {
const token = await getToken();
if (!token) return;
const settings = await getSettings();
try {
const res = await fetch(`${settings.host}/api/notebooks`, {
headers: { 'Authorization': `Bearer ${token}` },
});
if (!res.ok) return;
const notebooks = await res.json();
for (const nb of notebooks) {
const opt = document.createElement('option');
opt.value = nb.id;
opt.textContent = nb.title;
notebookSelect.appendChild(opt);
}
// Restore last used
const { lastNotebookId } = await chrome.storage.local.get(['lastNotebookId']);
if (lastNotebookId) notebookSelect.value = lastNotebookId;
} catch (err) {
console.error('Failed to load notebooks:', err);
}
}
notebookSelect.addEventListener('change', (e) => {
chrome.storage.local.set({ lastNotebookId: e.target.value });
});
// --- Live transcription (Web Speech API) ---
function startLiveTranscription() {
if (!speechSupported) return;
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
recognition = new SpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
recognition.lang = 'en-US';
let finalizedText = '';
recognition.onresult = (event) => {
let interimText = '';
// Rebuild finalized text from all final results
finalizedText = '';
for (let i = 0; i < event.results.length; i++) {
const result = event.results[i];
if (result.isFinal) {
finalizedText += result[0].transcript.trim() + ' ';
} else {
interimText += result[0].transcript;
}
}
liveTranscript = finalizedText.trim();
// Update the live transcript display
updateLiveDisplay(finalizedText.trim(), interimText.trim());
};
recognition.onerror = (event) => {
if (event.error !== 'aborted' && event.error !== 'no-speech') {
console.warn('Speech recognition error:', event.error);
}
};
// Auto-restart on end (Chrome stops after ~60s of silence)
recognition.onend = () => {
if (state === 'recording' && recognition) {
try { recognition.start(); } catch {}
}
};
try {
recognition.start();
if (liveIndicator) liveIndicator.classList.add('visible');
} catch (err) {
console.warn('Could not start speech recognition:', err);
speechSupported = false;
}
}
function stopLiveTranscription() {
if (recognition) {
const ref = recognition;
recognition = null;
try { ref.stop(); } catch {}
}
if (liveIndicator) liveIndicator.classList.remove('visible');
}
function updateLiveDisplay(finalText, interimText) {
if (state !== 'recording') return;
// Show transcript area while recording
transcriptArea.classList.add('visible');
let html = '';
if (finalText) {
html += `<span class="final-text">${escapeHtml(finalText)}</span>`;
}
if (interimText) {
html += `<span class="interim-text">${escapeHtml(interimText)}</span>`;
}
if (!finalText && !interimText) {
html = '<span class="placeholder">Listening...</span>';
}
transcriptText.innerHTML = html;
// Auto-scroll
transcriptText.scrollTop = transcriptText.scrollHeight;
}
function escapeHtml(text) {
const div = document.createElement('div');
div.textContent = text;
return div.innerHTML;
}
// --- Recording ---
async function startRecording() {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mimeType = MediaRecorder.isTypeSupported('audio/webm;codecs=opus')
? 'audio/webm;codecs=opus'
: 'audio/webm';
mediaRecorder = new MediaRecorder(stream, { mimeType });
audioChunks = [];
liveTranscript = '';
mediaRecorder.ondataavailable = (e) => {
if (e.data.size > 0) audioChunks.push(e.data);
};
mediaRecorder.start(1000);
startTime = Date.now();
state = 'recording';
// UI updates
recBtn.classList.add('recording');
timerEl.classList.add('recording');
setStatusLabel('Recording', 'recording');
postActions.style.display = 'none';
audioPreview.classList.remove('visible');
statusBar.className = 'status-bar';
// Show transcript area with listening placeholder
if (speechSupported) {
transcriptArea.classList.add('visible');
transcriptText.innerHTML = '<span class="placeholder">Listening...</span>';
} else {
transcriptArea.classList.remove('visible');
}
timerInterval = setInterval(() => {
const elapsed = Math.floor((Date.now() - startTime) / 1000);
timerEl.textContent = formatTime(elapsed);
}, 1000);
// Start live transcription alongside recording
startLiveTranscription();
} catch (err) {
showStatusBar(err.message || 'Microphone access denied', 'error');
}
}
async function stopRecording() {
if (!mediaRecorder || mediaRecorder.state === 'inactive') return;
clearInterval(timerInterval);
timerInterval = null;
duration = Math.floor((Date.now() - startTime) / 1000);
// Capture live transcript before stopping recognition
const capturedLiveTranscript = liveTranscript;
// Stop live transcription
stopLiveTranscription();
state = 'processing';
recBtn.classList.remove('recording');
timerEl.classList.remove('recording');
setStatusLabel('Processing...', 'processing');
// Stop recorder and collect blob
audioBlob = await new Promise((resolve) => {
mediaRecorder.onstop = () => {
mediaRecorder.stream.getTracks().forEach(t => t.stop());
resolve(new Blob(audioChunks, { type: mediaRecorder.mimeType }));
};
mediaRecorder.stop();
});
// Show audio preview
if (audioUrl) URL.revokeObjectURL(audioUrl);
audioUrl = URL.createObjectURL(audioBlob);
audioPlayer.src = audioUrl;
audioPreview.classList.add('visible');
// Show live transcript while we process (if we have one)
transcriptArea.classList.add('visible');
if (capturedLiveTranscript) {
transcriptText.textContent = capturedLiveTranscript;
showStatusBar('Improving transcript...', 'loading');
} else {
transcriptText.innerHTML = '<span class="placeholder">Transcribing...</span>';
showStatusBar('Uploading & transcribing...', 'loading');
}
// Upload audio file
const token = await getToken();
const settings = await getSettings();
try {
const uploadForm = new FormData();
uploadForm.append('file', audioBlob, 'voice-note.webm');
const uploadRes = await fetch(`${settings.host}/api/uploads`, {
method: 'POST',
headers: { 'Authorization': `Bearer ${token}` },
body: uploadForm,
});
if (!uploadRes.ok) throw new Error('Upload failed');
const uploadResult = await uploadRes.json();
uploadedFileUrl = uploadResult.url;
uploadedMimeType = uploadResult.mimeType;
uploadedFileSize = uploadResult.size;
// --- Three-tier transcription cascade ---
// Tier 1: Batch API (Whisper on server — highest quality)
let bestTranscript = '';
try {
showStatusBar('Transcribing via server...', 'loading');
const transcribeForm = new FormData();
transcribeForm.append('audio', audioBlob, 'voice-note.webm');
const transcribeRes = await fetch(`${settings.host}/api/voice/transcribe`, {
method: 'POST',
headers: { 'Authorization': `Bearer ${token}` },
body: transcribeForm,
});
if (transcribeRes.ok) {
const transcribeResult = await transcribeRes.json();
bestTranscript = transcribeResult.text || '';
}
} catch {
console.warn('Tier 1 (batch API) unavailable');
}
// Tier 2: Live transcript from Web Speech API (already captured)
if (!bestTranscript && capturedLiveTranscript) {
bestTranscript = capturedLiveTranscript;
}
// Tier 3: Offline Parakeet.js (NVIDIA, runs in browser)
if (!bestTranscript && window.ParakeetOffline) {
try {
showStatusBar('Transcribing offline (Parakeet)...', 'loading');
bestTranscript = await window.ParakeetOffline.transcribeOffline(audioBlob, (p) => {
showParakeetProgress(p);
});
hideParakeetProgress();
} catch (offlineErr) {
console.warn('Tier 3 (Parakeet offline) failed:', offlineErr);
hideParakeetProgress();
}
}
transcript = bestTranscript;
// Show transcript (editable)
if (transcript) {
transcriptText.textContent = transcript;
} else {
transcriptText.innerHTML = '<span class="placeholder">No transcript available - you can type one here</span>';
}
state = 'done';
setStatusLabel('Done', 'done');
postActions.style.display = 'flex';
statusBar.className = 'status-bar';
} catch (err) {
// On upload error, try offline transcription directly
let fallbackTranscript = capturedLiveTranscript || '';
if (!fallbackTranscript && window.ParakeetOffline) {
try {
showStatusBar('Upload failed, transcribing offline...', 'loading');
fallbackTranscript = await window.ParakeetOffline.transcribeOffline(audioBlob, (p) => {
showParakeetProgress(p);
});
hideParakeetProgress();
} catch {
hideParakeetProgress();
}
}
transcript = fallbackTranscript;
if (transcript) {
transcriptText.textContent = transcript;
}
showStatusBar(`Error: ${err.message}`, 'error');
state = 'done';
setStatusLabel('Error', 'idle');
postActions.style.display = 'flex';
}
}
function toggleRecording() {
if (state === 'idle' || state === 'done') {
startRecording();
} else if (state === 'recording') {
stopRecording();
}
// Ignore clicks while processing
}
// --- Save to rNotes ---
async function saveToRNotes() {
saveBtn.disabled = true;
showStatusBar('Saving to rNotes...', 'loading');
const token = await getToken();
const settings = await getSettings();
// Get current transcript text (user may have edited it)
const editedTranscript = transcriptText.textContent.trim();
const isPlaceholder = transcriptText.querySelector('.placeholder') !== null;
const finalTranscript = isPlaceholder ? '' : editedTranscript;
const now = new Date();
const timeStr = now.toLocaleString('en-US', {
month: 'short', day: 'numeric',
hour: 'numeric', minute: '2-digit',
hour12: true
});
const body = {
title: `Voice note - ${timeStr}`,
content: finalTranscript
? `<p>${finalTranscript.replace(/\n/g, '</p><p>')}</p>`
: '<p><em>Voice recording (no transcript)</em></p>',
type: 'AUDIO',
mimeType: uploadedMimeType || 'audio/webm',
fileUrl: uploadedFileUrl,
fileSize: uploadedFileSize,
duration: duration,
tags: ['voice'],
};
const notebookId = notebookSelect.value;
if (notebookId) body.notebookId = notebookId;
try {
const res = await fetch(`${settings.host}/api/notes`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': `Bearer ${token}`,
},
body: JSON.stringify(body),
});
if (!res.ok) {
const text = await res.text();
throw new Error(`${res.status}: ${text}`);
}
showStatusBar('Saved to rNotes!', 'success');
// Notify
chrome.runtime.sendMessage({
type: 'notify',
title: 'Voice Note Saved',
message: `${formatTime(duration)} recording saved to rNotes`,
});
// Reset after short delay
setTimeout(resetState, 1500);
} catch (err) {
showStatusBar(`Save failed: ${err.message}`, 'error');
} finally {
saveBtn.disabled = false;
}
}
// --- Copy to clipboard ---
async function copyTranscript() {
const text = transcriptText.textContent.trim();
if (!text || transcriptText.querySelector('.placeholder')) {
showStatusBar('No transcript to copy', 'error');
return;
}
try {
await navigator.clipboard.writeText(text);
showStatusBar('Copied to clipboard', 'success');
} catch {
showStatusBar('Copy failed', 'error');
}
}
// --- Discard ---
function resetState() {
state = 'idle';
mediaRecorder = null;
audioChunks = [];
audioBlob = null;
transcript = '';
liveTranscript = '';
uploadedFileUrl = '';
uploadedMimeType = '';
uploadedFileSize = 0;
duration = 0;
stopLiveTranscription();
if (audioUrl) {
URL.revokeObjectURL(audioUrl);
audioUrl = null;
}
timerEl.textContent = '00:00';
timerEl.classList.remove('recording');
recBtn.classList.remove('recording');
setStatusLabel('Ready', 'idle');
postActions.style.display = 'none';
audioPreview.classList.remove('visible');
transcriptArea.classList.remove('visible');
hideParakeetProgress();
statusBar.className = 'status-bar';
}
// --- Keyboard shortcuts ---
document.addEventListener('keydown', (e) => {
// Space bar: toggle recording (unless editing transcript)
if (e.code === 'Space' && document.activeElement !== transcriptText) {
e.preventDefault();
toggleRecording();
}
// Escape: close window
if (e.code === 'Escape') {
window.close();
}
// Ctrl+Enter: save (when in done state)
if ((e.ctrlKey || e.metaKey) && e.code === 'Enter' && state === 'done') {
e.preventDefault();
saveToRNotes();
}
});
// Clear placeholder on focus
transcriptText.addEventListener('focus', () => {
const ph = transcriptText.querySelector('.placeholder');
if (ph) transcriptText.textContent = '';
});
// --- Event listeners ---
recBtn.addEventListener('click', toggleRecording);
saveBtn.addEventListener('click', saveToRNotes);
discardBtn.addEventListener('click', resetState);
copyBtn.addEventListener('click', copyTranscript);
closeBtn.addEventListener('click', () => window.close());
// --- Init ---
async function init() {
const token = await getToken();
const claims = token ? decodeToken(token) : null;
if (!claims) {
authWarning.style.display = 'block';
recBtn.style.opacity = '0.3';
recBtn.style.pointerEvents = 'none';
return;
}
authWarning.style.display = 'none';
await loadNotebooks();
}
document.addEventListener('DOMContentLoaded', init);

View File

@ -6,13 +6,12 @@ services:
container_name: rnotes-online
restart: unless-stopped
environment:
- INFISICAL_CLIENT_ID=${INFISICAL_CLIENT_ID}
- INFISICAL_CLIENT_SECRET=${INFISICAL_CLIENT_SECRET}
- INFISICAL_PROJECT_SLUG=rnotes
- INFISICAL_ENV=prod
- INFISICAL_URL=http://infisical:8080
- DATABASE_URL=postgresql://rnotes:${DB_PASSWORD}@rnotes-postgres:5432/rnotes
- NEXT_PUBLIC_RSPACE_URL=${NEXT_PUBLIC_RSPACE_URL:-https://rspace.online}
- RSPACE_INTERNAL_URL=${RSPACE_INTERNAL_URL:-http://rspace-online:3000}
- NEXT_PUBLIC_ENCRYPTID_SERVER_URL=${NEXT_PUBLIC_ENCRYPTID_SERVER_URL:-https://encryptid.jeffemmett.com}
- RSPACE_INTERNAL_KEY=${RSPACE_INTERNAL_KEY}
- VOICE_API_URL=${VOICE_API_URL:-http://voice-command-api:8000}
- NEXT_PUBLIC_VOICE_WS_URL=${NEXT_PUBLIC_VOICE_WS_URL:-wss://voice.jeffemmett.com}
volumes:
- uploads_data:/app/uploads
labels:

82
entrypoint.sh Normal file
View File

@ -0,0 +1,82 @@
#!/bin/sh
# Infisical secret injection entrypoint
# Fetches secrets from Infisical API and injects them as env vars before starting the app.
# Required env vars: INFISICAL_CLIENT_ID, INFISICAL_CLIENT_SECRET
# Optional: INFISICAL_PROJECT_SLUG (default: rnotes), INFISICAL_ENV (default: prod),
# INFISICAL_URL (default: http://infisical:8080)
set -e
INFISICAL_URL="${INFISICAL_URL:-http://infisical:8080}"
INFISICAL_ENV="${INFISICAL_ENV:-prod}"
INFISICAL_PROJECT_SLUG="${INFISICAL_PROJECT_SLUG:-rnotes}"
if [ -z "$INFISICAL_CLIENT_ID" ] || [ -z "$INFISICAL_CLIENT_SECRET" ]; then
echo "[infisical] No credentials set, starting without secret injection"
exec "$@"
fi
echo "[infisical] Fetching secrets from ${INFISICAL_PROJECT_SLUG}/${INFISICAL_ENV}..."
# Use Node.js (already in the image) for reliable JSON parsing and HTTP calls
EXPORTS=$(node -e "
const http = require('http');
const https = require('https');
const url = new URL(process.env.INFISICAL_URL);
const client = url.protocol === 'https:' ? https : http;
const post = (path, body) => new Promise((resolve, reject) => {
const data = JSON.stringify(body);
const req = client.request({ hostname: url.hostname, port: url.port, path, method: 'POST',
headers: { 'Content-Type': 'application/json', 'Content-Length': data.length }
}, res => { let d = ''; res.on('data', c => d += c); res.on('end', () => resolve(JSON.parse(d))); });
req.on('error', reject);
req.end(data);
});
const get = (path, token) => new Promise((resolve, reject) => {
const req = client.request({ hostname: url.hostname, port: url.port, path, method: 'GET',
headers: { 'Authorization': 'Bearer ' + token }
}, res => { let d = ''; res.on('data', c => d += c); res.on('end', () => resolve(JSON.parse(d))); });
req.on('error', reject);
req.end();
});
(async () => {
try {
const auth = await post('/api/v1/auth/universal-auth/login', {
clientId: process.env.INFISICAL_CLIENT_ID,
clientSecret: process.env.INFISICAL_CLIENT_SECRET
});
if (!auth.accessToken) { console.error('[infisical] Auth failed'); process.exit(1); }
const slug = process.env.INFISICAL_PROJECT_SLUG;
const env = process.env.INFISICAL_ENV;
const secrets = await get('/api/v3/secrets/raw?workspaceSlug=' + slug + '&environment=' + env + '&secretPath=/&recursive=true', auth.accessToken);
if (!secrets.secrets) { console.error('[infisical] No secrets returned'); process.exit(1); }
// Output as shell-safe export statements
for (const s of secrets.secrets) {
// Single-quote the value to prevent shell expansion, escape existing single quotes
const escaped = s.secretValue.replace(/'/g, \"'\\\\''\" );
console.log('export ' + s.secretKey + \"='\" + escaped + \"'\");
}
} catch (e) { console.error('[infisical] Error:', e.message); process.exit(1); }
})();
" 2>&1) || {
echo "[infisical] WARNING: Failed to fetch secrets, starting with existing env vars"
exec "$@"
}
# Check if we got export statements or error messages
if echo "$EXPORTS" | grep -q "^export "; then
COUNT=$(echo "$EXPORTS" | grep -c "^export ")
eval "$EXPORTS"
echo "[infisical] Injected ${COUNT} secrets"
else
echo "[infisical] WARNING: $EXPORTS"
echo "[infisical] Starting with existing env vars"
fi
exec "$@"

3563
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -24,7 +24,7 @@
"@tiptap/pm": "^3.19.0",
"@tiptap/react": "^3.19.0",
"@tiptap/starter-kit": "^3.19.0",
"archiver": "^7.0.0",
"archiver": "^7.0.1",
"dompurify": "^3.2.0",
"lowlight": "^3.3.0",
"marked": "^15.0.0",
@ -35,7 +35,7 @@
"zustand": "^5.0.11"
},
"devDependencies": {
"@types/archiver": "^6",
"@types/archiver": "^6.0.4",
"@types/dompurify": "^3",
"@types/node": "^20",
"@types/react": "^18",

View File

@ -76,6 +76,7 @@ model Note {
contentPlain String? @db.Text
type NoteType @default(NOTE)
url String?
archiveUrl String?
language String?
mimeType String?
fileUrl String?

View File

@ -35,5 +35,33 @@
"type": "image/png",
"purpose": "maskable"
}
],
"shortcuts": [
{
"name": "Voice Note",
"short_name": "Voice",
"description": "Record a voice note with live transcription",
"url": "/voice",
"icons": [
{
"src": "/icon-192.png",
"sizes": "192x192",
"type": "image/png"
}
]
},
{
"name": "New Note",
"short_name": "Note",
"description": "Create a new note",
"url": "/notes/new",
"icons": [
{
"src": "/icon-192.png",
"sizes": "192x192",
"type": "image/png"
}
]
}
]
}

View File

@ -0,0 +1,61 @@
import { NextRequest, NextResponse } from 'next/server';
import { prisma } from '@/lib/prisma';
import { requireAuth, isAuthed } from '@/lib/auth';
import { unlockArticle } from '@/lib/article-unlock';
/**
* POST /api/articles/unlock
*
* Attempts to find an archived/readable version of a paywalled article.
*
* Body: { url: string, noteId?: string }
* - url: The article URL to unlock
* - noteId: (optional) If provided, updates the note's archiveUrl on success
*
* Returns: { success, strategy, archiveUrl, error? }
*/
export async function POST(request: NextRequest) {
try {
const auth = await requireAuth(request);
if (!isAuthed(auth)) return auth;
const body = await request.json();
const { url, noteId } = body;
if (!url || typeof url !== 'string') {
return NextResponse.json({ error: 'URL is required' }, { status: 400 });
}
// Validate URL format
try {
new URL(url);
} catch {
return NextResponse.json({ error: 'Invalid URL format' }, { status: 400 });
}
const result = await unlockArticle(url);
// If successful and noteId provided, update the note's archiveUrl
if (result.success && result.archiveUrl && noteId) {
const existing = await prisma.note.findUnique({
where: { id: noteId },
select: { authorId: true },
});
if (existing && (!existing.authorId || existing.authorId === auth.user.id)) {
await prisma.note.update({
where: { id: noteId },
data: { archiveUrl: result.archiveUrl },
});
}
}
return NextResponse.json(result);
} catch (error) {
console.error('Article unlock error:', error);
return NextResponse.json(
{ success: false, strategy: 'none', error: 'Internal server error' },
{ status: 500 }
);
}
}

View File

@ -61,7 +61,7 @@ export async function PUT(
const body = await request.json();
const {
title, content, type, url, language, isPinned, notebookId, tags,
title, content, type, url, archiveUrl, language, isPinned, notebookId, tags,
// Memory Card fields
parentId, cardType, visibility, properties, summary, position,
bodyJson: clientBodyJson,
@ -71,6 +71,7 @@ export async function PUT(
if (title !== undefined) data.title = title.trim();
if (type !== undefined) data.type = type;
if (url !== undefined) data.url = url || null;
if (archiveUrl !== undefined) data.archiveUrl = archiveUrl || null;
if (language !== undefined) data.language = language || null;
if (isPinned !== undefined) data.isPinned = isPinned;
if (notebookId !== undefined) data.notebookId = notebookId || null;

View File

@ -52,7 +52,7 @@ export async function POST(request: NextRequest) {
const { user } = auth;
const body = await request.json();
const {
title, content, type, notebookId, url, language, tags,
title, content, type, notebookId, url, archiveUrl, language, tags,
fileUrl, mimeType, fileSize, duration,
// Memory Card fields
parentId, cardType: cardTypeOverride, visibility, properties, summary, position, bodyJson: clientBodyJson,
@ -107,6 +107,7 @@ export async function POST(request: NextRequest) {
notebookId: notebookId || null,
authorId: user.id,
url: url || null,
archiveUrl: archiveUrl || null,
language: language || null,
fileUrl: fileUrl || null,
mimeType: mimeType || null,

View File

@ -39,6 +39,7 @@ interface NoteData {
type: string;
cardType: string;
url: string | null;
archiveUrl: string | null;
language: string | null;
fileUrl: string | null;
mimeType: string | null;
@ -70,6 +71,8 @@ export default function NoteDetailPage() {
const [saving, setSaving] = useState(false);
const [diarizing, setDiarizing] = useState(false);
const [speakers, setSpeakers] = useState<{ speaker: string; start: number; end: number }[] | null>(null);
const [unlocking, setUnlocking] = useState(false);
const [unlockError, setUnlockError] = useState<string | null>(null);
useEffect(() => {
fetch(`/api/notes/${params.id}`)
@ -168,6 +171,30 @@ export default function NoteDetailPage() {
}
};
const handleUnlock = async () => {
if (!note?.url || unlocking) return;
setUnlocking(true);
setUnlockError(null);
try {
const res = await authFetch('/api/articles/unlock', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ url: note.url, noteId: note.id }),
});
const result = await res.json();
if (result.success && result.archiveUrl) {
setNote({ ...note, archiveUrl: result.archiveUrl });
} else {
setUnlockError(result.error || 'No archived version found');
}
} catch (error) {
setUnlockError('Failed to unlock article');
console.error('Unlock error:', error);
} finally {
setUnlocking(false);
}
};
if (loading) {
return (
<div className="min-h-screen bg-[#0a0a0a] flex items-center justify-center">
@ -313,16 +340,63 @@ export default function NoteDetailPage() {
</div>
)}
{/* URL */}
{/* URL + Unlock */}
{note.url && (
<a
href={note.url}
target="_blank"
rel="noopener noreferrer"
className="text-sm text-blue-400 hover:text-blue-300 mb-4 block truncate"
>
{note.url}
</a>
<div className="mb-4 space-y-2">
<a
href={note.url}
target="_blank"
rel="noopener noreferrer"
className="text-sm text-blue-400 hover:text-blue-300 block truncate"
>
{note.url}
</a>
{note.archiveUrl ? (
<div className="flex items-center gap-2">
<a
href={note.archiveUrl}
target="_blank"
rel="noopener noreferrer"
className="inline-flex items-center gap-1.5 px-3 py-1.5 text-xs font-medium bg-emerald-500/10 text-emerald-400 border border-emerald-500/20 rounded-lg hover:bg-emerald-500/20 transition-colors"
>
<svg className="w-3.5 h-3.5" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M8 11V7a4 4 0 118 0m-4 8v2m-6 4h12a2 2 0 002-2v-6a2 2 0 00-2-2H6a2 2 0 00-2 2v6a2 2 0 002 2z" />
</svg>
View Unlocked Article
</a>
<span className="text-[10px] text-slate-500 truncate max-w-[200px]">{note.archiveUrl}</span>
</div>
) : (
<div className="flex items-center gap-2">
<button
onClick={handleUnlock}
disabled={unlocking}
className="inline-flex items-center gap-1.5 px-3 py-1.5 text-xs font-medium bg-amber-500/10 text-amber-400 border border-amber-500/20 rounded-lg hover:bg-amber-500/20 transition-colors disabled:opacity-50"
>
{unlocking ? (
<>
<svg className="animate-spin w-3.5 h-3.5" viewBox="0 0 24 24">
<circle className="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" strokeWidth="4" fill="none" />
<path className="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z" />
</svg>
Unlocking...
</>
) : (
<>
<svg className="w-3.5 h-3.5" fill="none" viewBox="0 0 24 24" stroke="currentColor">
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 15v2m-6 4h12a2 2 0 002-2v-6a2 2 0 00-2-2H6a2 2 0 00-2 2v6a2 2 0 002 2zm10-10V7a4 4 0 00-8 0v4h8z" />
</svg>
Unlock Article
</>
)}
</button>
{unlockError && (
<span className="text-[10px] text-red-400">{unlockError}</span>
)}
</div>
)}
</div>
)}
{/* Uploaded file/image */}

747
src/app/voice/page.tsx Normal file
View File

@ -0,0 +1,747 @@
'use client';
import { useState, useRef, useCallback, useEffect } from 'react';
import { useRouter } from 'next/navigation';
import { authFetch } from '@/lib/authFetch';
// --- Types ---
interface Segment {
id: number;
text: string;
start: number;
end: number;
}
interface WhisperProgress {
status: 'checking' | 'downloading' | 'loading' | 'transcribing' | 'done' | 'error';
progress?: number;
message?: string;
}
interface NotebookOption {
id: string;
title: string;
}
type RecorderState = 'idle' | 'recording' | 'processing' | 'done';
// --- Constants ---
const VOICE_WS_URL =
process.env.NEXT_PUBLIC_VOICE_WS_URL || 'wss://voice.jeffemmett.com';
// Web Speech API types
interface ISpeechRecognition extends EventTarget {
continuous: boolean;
interimResults: boolean;
lang: string;
onresult: ((event: any) => void) | null;
onerror: ((event: any) => void) | null;
onend: (() => void) | null;
start(): void;
stop(): void;
}
function getSpeechRecognition(): (new () => ISpeechRecognition) | null {
if (typeof window === 'undefined') return null;
return (window as any).SpeechRecognition || (window as any).webkitSpeechRecognition || null;
}
// --- Component ---
export default function VoicePage() {
const router = useRouter();
// Recording state
const [state, setState] = useState<RecorderState>('idle');
const [elapsed, setElapsed] = useState(0);
const [streaming, setStreaming] = useState(false);
// Transcript
const [segments, setSegments] = useState<Segment[]>([]);
const [liveText, setLiveText] = useState('');
const [interimText, setInterimText] = useState('');
const [finalTranscript, setFinalTranscript] = useState('');
const [isEditing, setIsEditing] = useState(false);
// Audio
const [audioUrl, setAudioUrl] = useState<string | null>(null);
const [duration, setDuration] = useState(0);
// Upload state
const [uploadedFileUrl, setUploadedFileUrl] = useState('');
const [uploadedMimeType, setUploadedMimeType] = useState('');
const [uploadedFileSize, setUploadedFileSize] = useState(0);
// UI
const [notebooks, setNotebooks] = useState<NotebookOption[]>([]);
const [notebookId, setNotebookId] = useState('');
const [status, setStatus] = useState<{ message: string; type: 'success' | 'error' | 'loading' } | null>(null);
const [offlineProgress, setOfflineProgress] = useState<WhisperProgress | null>(null);
const [saving, setSaving] = useState(false);
// Refs
const mediaRecorderRef = useRef<MediaRecorder | null>(null);
const chunksRef = useRef<Blob[]>([]);
const audioBlobRef = useRef<Blob | null>(null);
const timerRef = useRef<ReturnType<typeof setInterval> | null>(null);
const startTimeRef = useRef(0);
const recognitionRef = useRef<ISpeechRecognition | null>(null);
const liveTextRef = useRef('');
const segmentsRef = useRef<Segment[]>([]);
const wsRef = useRef<WebSocket | null>(null);
const audioContextRef = useRef<AudioContext | null>(null);
const workletNodeRef = useRef<AudioWorkletNode | null>(null);
const sourceNodeRef = useRef<MediaStreamAudioSourceNode | null>(null);
const transcriptRef = useRef<HTMLDivElement>(null);
const editRef = useRef<HTMLTextAreaElement>(null);
// Load notebooks
useEffect(() => {
authFetch('/api/notebooks')
.then((res) => res.json())
.then((data) => {
if (Array.isArray(data)) {
setNotebooks(data.map((nb: any) => ({ id: nb.id, title: nb.title })));
}
})
.catch(() => {});
}, []);
// Cleanup on unmount
useEffect(() => {
return () => {
if (timerRef.current) clearInterval(timerRef.current);
if (audioUrl) URL.revokeObjectURL(audioUrl);
};
}, [audioUrl]);
// Auto-scroll transcript
useEffect(() => {
if (transcriptRef.current) {
transcriptRef.current.scrollTop = transcriptRef.current.scrollHeight;
}
}, [segments, liveText, interimText]);
const formatTime = (s: number) => {
const m = Math.floor(s / 60).toString().padStart(2, '0');
const sec = (s % 60).toString().padStart(2, '0');
return `${m}:${sec}`;
};
// --- WebSocket live streaming ---
const setupWebSocket = useCallback(async (stream: MediaStream) => {
try {
const ws = new WebSocket(`${VOICE_WS_URL}/api/voice/stream`);
wsRef.current = ws;
await new Promise<void>((resolve, reject) => {
const timeout = setTimeout(() => { ws.close(); reject(new Error('timeout')); }, 5000);
ws.onopen = () => { clearTimeout(timeout); resolve(); };
ws.onerror = () => { clearTimeout(timeout); reject(new Error('failed')); };
});
ws.onmessage = (event) => {
try {
const data = JSON.parse(event.data);
if (data.type === 'segment') {
const seg = { id: data.id, text: data.text, start: data.start, end: data.end };
segmentsRef.current = [...segmentsRef.current, seg];
setSegments([...segmentsRef.current]);
}
} catch {}
};
// AudioWorklet for PCM16 streaming at 16kHz
const audioCtx = new AudioContext({ sampleRate: 16000 });
audioContextRef.current = audioCtx;
const source = audioCtx.createMediaStreamSource(stream);
sourceNodeRef.current = source;
await audioCtx.audioWorklet.addModule('/pcm-processor.js');
const workletNode = new AudioWorkletNode(audioCtx, 'pcm-processor');
workletNodeRef.current = workletNode;
workletNode.port.onmessage = (e) => {
if (ws.readyState === WebSocket.OPEN) ws.send(e.data as ArrayBuffer);
};
source.connect(workletNode);
setStreaming(true);
} catch {
setStreaming(false);
}
}, []);
// --- Web Speech API (live local) ---
const startSpeechRecognition = useCallback(() => {
const SpeechRecognition = getSpeechRecognition();
if (!SpeechRecognition) return;
const recognition = new SpeechRecognition();
recognition.continuous = true;
recognition.interimResults = true;
recognition.lang = 'en-US';
recognition.onresult = (event: any) => {
let finalized = '';
let interim = '';
for (let i = 0; i < event.results.length; i++) {
if (event.results[i].isFinal) {
finalized += event.results[i][0].transcript.trim() + ' ';
} else {
interim += event.results[i][0].transcript;
}
}
liveTextRef.current = finalized.trim();
setLiveText(finalized.trim());
setInterimText(interim.trim());
};
recognition.onerror = () => {};
recognition.onend = () => {
// Auto-restart (Chrome stops after ~60s silence)
if (recognitionRef.current === recognition) {
try { recognition.start(); } catch {}
}
};
recognitionRef.current = recognition;
try { recognition.start(); } catch {}
}, []);
const stopSpeechRecognition = useCallback(() => {
if (recognitionRef.current) {
const ref = recognitionRef.current;
recognitionRef.current = null;
try { ref.stop(); } catch {}
}
setInterimText('');
}, []);
// --- Cleanup streaming ---
const cleanupStreaming = useCallback(() => {
if (workletNodeRef.current) { workletNodeRef.current.disconnect(); workletNodeRef.current = null; }
if (sourceNodeRef.current) { sourceNodeRef.current.disconnect(); sourceNodeRef.current = null; }
if (audioContextRef.current && audioContextRef.current.state !== 'closed') {
audioContextRef.current.close().catch(() => {});
audioContextRef.current = null;
}
if (wsRef.current) {
if (wsRef.current.readyState === WebSocket.OPEN) wsRef.current.close();
wsRef.current = null;
}
setStreaming(false);
}, []);
// --- Start recording ---
const startRecording = useCallback(async () => {
setSegments([]);
segmentsRef.current = [];
setLiveText('');
liveTextRef.current = '';
setInterimText('');
setFinalTranscript('');
setIsEditing(false);
setStatus(null);
setOfflineProgress(null);
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mimeType = MediaRecorder.isTypeSupported('audio/webm;codecs=opus')
? 'audio/webm;codecs=opus'
: 'audio/webm';
const recorder = new MediaRecorder(stream, { mimeType });
chunksRef.current = [];
recorder.ondataavailable = (e) => { if (e.data.size > 0) chunksRef.current.push(e.data); };
recorder.start(1000);
mediaRecorderRef.current = recorder;
startTimeRef.current = Date.now();
setState('recording');
setElapsed(0);
timerRef.current = setInterval(() => {
setElapsed(Math.floor((Date.now() - startTimeRef.current) / 1000));
}, 1000);
// Start both transcription methods in parallel
setupWebSocket(stream);
startSpeechRecognition();
} catch (err) {
setStatus({ message: err instanceof Error ? err.message : 'Microphone access denied', type: 'error' });
}
}, [setupWebSocket, startSpeechRecognition]);
// --- Stop recording ---
const stopRecording = useCallback(async () => {
const recorder = mediaRecorderRef.current;
if (!recorder || recorder.state === 'inactive') return;
if (timerRef.current) { clearInterval(timerRef.current); timerRef.current = null; }
const dur = Math.floor((Date.now() - startTimeRef.current) / 1000);
setDuration(dur);
// Capture live text before stopping
const capturedLive = liveTextRef.current;
stopSpeechRecognition();
// Get WS final text
let wsFullText = '';
if (wsRef.current && wsRef.current.readyState === WebSocket.OPEN) {
try {
const ws = wsRef.current;
wsFullText = await new Promise<string>((resolve) => {
const timeout = setTimeout(() => resolve(''), 5000);
const handler = (event: MessageEvent) => {
try {
const data = JSON.parse(event.data);
if (data.type === 'segment') {
const seg = { id: data.id, text: data.text, start: data.start, end: data.end };
segmentsRef.current = [...segmentsRef.current, seg];
setSegments([...segmentsRef.current]);
}
if (data.type === 'done') {
clearTimeout(timeout);
ws.removeEventListener('message', handler);
resolve(data.fullText || '');
}
} catch {}
};
ws.addEventListener('message', handler);
ws.send(JSON.stringify({ type: 'end' }));
});
} catch {}
}
cleanupStreaming();
setState('processing');
// Stop recorder
const blob = await new Promise<Blob>((resolve) => {
recorder.onstop = () => {
recorder.stream.getTracks().forEach((t) => t.stop());
resolve(new Blob(chunksRef.current, { type: recorder.mimeType }));
};
recorder.stop();
});
audioBlobRef.current = blob;
if (audioUrl) URL.revokeObjectURL(audioUrl);
const url = URL.createObjectURL(blob);
setAudioUrl(url);
// --- Three-tier transcription cascade ---
// Show immediate live text while we process
const immediateLive = wsFullText || (segmentsRef.current.length > 0
? segmentsRef.current.map(s => s.text).join(' ')
: capturedLive);
if (immediateLive) setFinalTranscript(immediateLive);
// Tier 1: Upload + batch API
let bestTranscript = '';
try {
setStatus({ message: 'Uploading recording...', type: 'loading' });
const uploadForm = new FormData();
uploadForm.append('file', blob, 'voice-note.webm');
const uploadRes = await authFetch('/api/uploads', { method: 'POST', body: uploadForm });
if (uploadRes.ok) {
const uploadResult = await uploadRes.json();
setUploadedFileUrl(uploadResult.url);
setUploadedMimeType(uploadResult.mimeType);
setUploadedFileSize(uploadResult.size);
setStatus({ message: 'Transcribing...', type: 'loading' });
const tForm = new FormData();
tForm.append('audio', blob, 'voice-note.webm');
const tRes = await authFetch('/api/voice/transcribe', { method: 'POST', body: tForm });
if (tRes.ok) {
const tResult = await tRes.json();
bestTranscript = tResult.text || '';
}
}
} catch {
console.warn('Tier 1 (batch API) failed');
}
// Tier 2: WebSocket / Web Speech API (already captured)
if (!bestTranscript) bestTranscript = immediateLive || '';
// Tier 3: Offline Parakeet.js
if (!bestTranscript) {
try {
setStatus({ message: 'Loading offline model...', type: 'loading' });
const { transcribeOffline } = await import('@/lib/parakeetOffline');
bestTranscript = await transcribeOffline(blob, (p) => setOfflineProgress(p));
setOfflineProgress(null);
} catch {
setOfflineProgress(null);
}
}
setFinalTranscript(bestTranscript);
setStatus(null);
setState('done');
}, [audioUrl, stopSpeechRecognition, cleanupStreaming]);
// --- Toggle ---
const toggleRecording = useCallback(() => {
if (state === 'idle' || state === 'done') startRecording();
else if (state === 'recording') stopRecording();
}, [state, startRecording, stopRecording]);
// --- Save ---
const saveToRNotes = useCallback(async () => {
setSaving(true);
setStatus({ message: 'Saving...', type: 'loading' });
const now = new Date();
const timeStr = now.toLocaleString('en-US', {
month: 'short', day: 'numeric', hour: 'numeric', minute: '2-digit', hour12: true
});
const transcript = finalTranscript.trim();
const body: Record<string, unknown> = {
title: `Voice note - ${timeStr}`,
content: transcript
? `<p>${transcript.replace(/\n/g, '</p><p>')}</p>`
: '<p><em>Voice recording (no transcript)</em></p>',
type: 'AUDIO',
mimeType: uploadedMimeType || 'audio/webm',
fileUrl: uploadedFileUrl,
fileSize: uploadedFileSize,
duration,
tags: ['voice'],
};
if (notebookId) body.notebookId = notebookId;
// If upload failed earlier, try uploading now
if (!uploadedFileUrl && audioBlobRef.current) {
try {
const form = new FormData();
form.append('file', audioBlobRef.current, 'voice-note.webm');
const res = await authFetch('/api/uploads', { method: 'POST', body: form });
if (res.ok) {
const result = await res.json();
body.fileUrl = result.url;
body.mimeType = result.mimeType;
body.fileSize = result.size;
}
} catch {}
}
try {
const res = await authFetch('/api/notes', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(body),
});
if (!res.ok) throw new Error('Save failed');
const note = await res.json();
setStatus({ message: 'Saved!', type: 'success' });
setTimeout(() => router.push(`/notes/${note.id}`), 1000);
} catch (err) {
setStatus({ message: err instanceof Error ? err.message : 'Save failed', type: 'error' });
} finally {
setSaving(false);
}
}, [finalTranscript, uploadedFileUrl, uploadedMimeType, uploadedFileSize, duration, notebookId, router]);
// --- Copy ---
const copyTranscript = useCallback(async () => {
if (!finalTranscript.trim()) return;
try {
await navigator.clipboard.writeText(finalTranscript);
setStatus({ message: 'Copied!', type: 'success' });
setTimeout(() => setStatus(null), 2000);
} catch {
setStatus({ message: 'Copy failed', type: 'error' });
}
}, [finalTranscript]);
// --- Reset ---
const discard = useCallback(() => {
setState('idle');
setSegments([]);
segmentsRef.current = [];
setLiveText('');
liveTextRef.current = '';
setInterimText('');
setFinalTranscript('');
setIsEditing(false);
setElapsed(0);
setDuration(0);
setStatus(null);
setOfflineProgress(null);
setUploadedFileUrl('');
setUploadedMimeType('');
setUploadedFileSize(0);
if (audioUrl) { URL.revokeObjectURL(audioUrl); setAudioUrl(null); }
audioBlobRef.current = null;
}, [audioUrl]);
// --- Keyboard ---
useEffect(() => {
const handler = (e: KeyboardEvent) => {
const target = e.target as HTMLElement;
if (target.tagName === 'TEXTAREA' || target.tagName === 'INPUT' || target.isContentEditable) return;
if (e.code === 'Space') {
e.preventDefault();
toggleRecording();
}
if ((e.ctrlKey || e.metaKey) && e.code === 'Enter' && state === 'done') {
e.preventDefault();
saveToRNotes();
}
};
window.addEventListener('keydown', handler);
return () => window.removeEventListener('keydown', handler);
}, [toggleRecording, saveToRNotes, state]);
// --- Render ---
const hasLiveText = liveText || interimText || segments.length > 0;
const hasTranscript = state === 'done' && finalTranscript.trim().length > 0;
return (
<div className="min-h-screen bg-[#0a0a0a] flex flex-col">
{/* Header */}
<header className="border-b border-slate-800 px-4 py-3 flex items-center justify-between">
<div className="flex items-center gap-3">
<div className="w-8 h-8 rounded-lg bg-gradient-to-br from-red-500 to-rose-600 flex items-center justify-center">
<svg className="w-4 h-4 text-white" fill="currentColor" viewBox="0 0 24 24">
<path d="M12 14c1.66 0 3-1.34 3-3V5c0-1.66-1.34-3-3-3S9 3.34 9 5v6c0 1.66 1.34 3 3 3z" />
<path d="M17 11c0 2.76-2.24 5-5 5s-5-2.24-5-5H5c0 3.53 2.61 6.43 6 6.92V21h2v-3.08c3.39-.49 6-3.39 6-6.92h-2z" />
</svg>
</div>
<div>
<h1 className="text-white font-bold text-sm">rVoice</h1>
<p className="text-[10px] text-slate-500 uppercase tracking-wider">Voice notes for rNotes</p>
</div>
</div>
<div className="flex items-center gap-2">
{streaming && (
<span className="flex items-center gap-1.5 text-[10px] font-bold text-green-400 uppercase tracking-wider">
<span className="w-1.5 h-1.5 rounded-full bg-green-400 animate-pulse" />
Live
</span>
)}
{getSpeechRecognition() && state === 'recording' && !streaming && (
<span className="flex items-center gap-1.5 text-[10px] font-bold text-blue-400 uppercase tracking-wider">
<span className="w-1.5 h-1.5 rounded-full bg-blue-400 animate-pulse" />
Local
</span>
)}
</div>
</header>
{/* Main content */}
<main className="flex-1 flex flex-col items-center justify-center px-4 py-8 gap-6 max-w-lg mx-auto w-full">
{/* Record button + timer */}
<div className="flex flex-col items-center gap-4">
<button
onClick={toggleRecording}
disabled={state === 'processing'}
className={`w-24 h-24 rounded-full border-[3px] flex items-center justify-center transition-all relative ${
state === 'recording'
? 'border-red-500 bg-slate-900'
: state === 'processing'
? 'border-slate-600 bg-slate-900 opacity-50'
: 'border-slate-600 bg-slate-900 hover:border-red-500'
}`}
>
<div className={`transition-all ${
state === 'recording'
? 'w-8 h-8 rounded-md bg-red-500'
: 'w-10 h-10 rounded-full bg-red-500'
}`} />
{state === 'recording' && (
<span className="absolute inset-[-6px] rounded-full border-2 border-red-500/30 animate-ping" />
)}
</button>
<div className={`text-3xl font-mono font-bold tracking-wider ${
state === 'recording' ? 'text-red-500' : 'text-slate-300'
}`}>
{formatTime(state === 'done' ? duration : elapsed)}
</div>
<p className="text-xs text-slate-500">
{state === 'idle' && 'Tap to record or press Space'}
{state === 'recording' && 'Recording... tap to stop'}
{state === 'processing' && (offlineProgress?.message || 'Processing...')}
{state === 'done' && 'Recording complete'}
</p>
</div>
{/* Offline model progress bar */}
{offlineProgress && offlineProgress.status === 'downloading' && (
<div className="w-full max-w-xs">
<div className="text-xs text-slate-400 mb-1 text-center">{offlineProgress.message}</div>
<div className="h-1.5 bg-slate-800 rounded-full overflow-hidden">
<div
className="h-full bg-amber-500 rounded-full transition-all duration-300"
style={{ width: `${offlineProgress.progress || 0}%` }}
/>
</div>
</div>
)}
{/* Live transcript (while recording) */}
{state === 'recording' && hasLiveText && (
<div className="w-full">
<div className="text-[10px] text-slate-500 uppercase tracking-wider font-semibold mb-2">Live transcript</div>
<div
ref={transcriptRef}
className="bg-slate-900/50 border border-slate-800 rounded-lg p-4 max-h-40 overflow-y-auto"
>
{segments.length > 0 && (
<div className="space-y-1">
{segments.map((seg) => (
<p key={seg.id} className="text-sm text-slate-300">{seg.text}</p>
))}
</div>
)}
{segments.length === 0 && liveText && (
<p className="text-sm text-slate-300">{liveText}</p>
)}
{interimText && (
<p className="text-sm text-slate-500 italic">{interimText}</p>
)}
</div>
</div>
)}
{/* Audio player + transcript (after recording) */}
{(state === 'done' || state === 'processing') && audioUrl && (
<div className="w-full space-y-4">
<audio controls src={audioUrl} className="w-full h-10" />
{/* Transcript */}
<div>
<div className="flex items-center justify-between mb-2">
<span className="text-[10px] text-slate-500 uppercase tracking-wider font-semibold">Transcript</span>
{state === 'done' && finalTranscript && (
<button
onClick={() => {
setIsEditing(!isEditing);
if (!isEditing) setTimeout(() => editRef.current?.focus(), 50);
}}
className="text-[10px] text-slate-500 hover:text-amber-400 transition-colors"
>
{isEditing ? 'Done editing' : 'Edit'}
</button>
)}
</div>
{isEditing ? (
<textarea
ref={editRef}
value={finalTranscript}
onChange={(e) => setFinalTranscript(e.target.value)}
className="w-full min-h-[100px] bg-slate-900/50 border border-amber-500/30 rounded-lg p-4 text-sm text-slate-200 leading-relaxed resize-y focus:outline-none focus:border-amber-500/50"
/>
) : (
<div className="bg-slate-900/50 border border-slate-800 rounded-lg p-4 min-h-[60px] max-h-48 overflow-y-auto">
{finalTranscript ? (
<p className="text-sm text-slate-200 leading-relaxed whitespace-pre-wrap">{finalTranscript}</p>
) : state === 'processing' ? (
<p className="text-sm text-slate-500 italic">Transcribing...</p>
) : (
<p className="text-sm text-slate-500 italic">No transcript available</p>
)}
</div>
)}
</div>
</div>
)}
{/* Notebook + actions (after recording) */}
{state === 'done' && (
<div className="w-full space-y-3">
<div>
<label className="block text-[10px] text-slate-500 uppercase tracking-wider font-semibold mb-1">
Save to notebook
</label>
<select
value={notebookId}
onChange={(e) => setNotebookId(e.target.value)}
className="w-full px-3 py-2 bg-slate-900/50 border border-slate-700 rounded-lg text-sm text-white focus:outline-none focus:border-amber-500/50"
>
<option value="">No notebook (standalone)</option>
{notebooks.map((nb) => (
<option key={nb.id} value={nb.id}>{nb.title}</option>
))}
</select>
</div>
<div className="flex gap-2">
<button
onClick={discard}
className="flex-1 px-4 py-2.5 bg-slate-800 border border-slate-700 rounded-lg text-sm text-slate-400 hover:text-white transition-colors"
>
Discard
</button>
{hasTranscript && (
<button
onClick={copyTranscript}
className="px-4 py-2.5 bg-slate-800 border border-blue-500/30 rounded-lg text-sm text-blue-400 hover:text-blue-300 transition-colors"
>
Copy
</button>
)}
<button
onClick={saveToRNotes}
disabled={saving}
className="flex-1 px-4 py-2.5 bg-amber-500 hover:bg-amber-400 disabled:bg-slate-700 disabled:text-slate-500 text-black font-semibold rounded-lg text-sm transition-colors"
>
{saving ? 'Saving...' : 'Save to rNotes'}
</button>
</div>
</div>
)}
{/* Status bar */}
{status && (
<div className={`w-full text-center text-xs px-4 py-2 rounded-lg ${
status.type === 'success' ? 'bg-green-900/30 text-green-400 border border-green-800' :
status.type === 'error' ? 'bg-red-900/30 text-red-400 border border-red-800' :
'bg-blue-900/30 text-blue-400 border border-blue-800'
}`}>
{status.message}
</div>
)}
</main>
{/* Footer */}
<footer className="border-t border-slate-800 px-4 py-3 flex items-center justify-between text-[10px] text-slate-600">
<div className="flex gap-3">
<kbd className="px-1.5 py-0.5 bg-slate-900 border border-slate-700 rounded text-[10px]">Space</kbd>
<span>record</span>
<kbd className="px-1.5 py-0.5 bg-slate-900 border border-slate-700 rounded text-[10px]">Ctrl+Enter</kbd>
<span>save</span>
</div>
<a href="/" className="hover:text-amber-400 transition-colors">rNotes.online</a>
</footer>
</div>
);
}

View File

@ -34,6 +34,7 @@ interface NoteCardProps {
updatedAt: string;
tags: { id: string; name: string; color: string | null }[];
url?: string | null;
archiveUrl?: string | null;
visibility?: string;
children?: { id: string }[];
properties?: Record<string, unknown>;
@ -41,7 +42,7 @@ interface NoteCardProps {
export function NoteCard({
id, title, type, cardType = 'note', contentPlain, summary,
isPinned, updatedAt, tags, url, visibility, children, properties,
isPinned, updatedAt, tags, url, archiveUrl, visibility, children, properties,
}: NoteCardProps) {
const snippet = summary || (contentPlain || '').slice(0, 120);
const cardStyle = CARD_TYPE_STYLES[cardType] || CARD_TYPE_STYLES.note;
@ -77,6 +78,11 @@ export function NoteCard({
{visibility}
</span>
)}
{archiveUrl && (
<span className="text-emerald-400 text-[10px] font-bold uppercase px-1 py-0.5 rounded bg-emerald-500/10" title="Unlocked article">
unlocked
</span>
)}
<span className="text-[10px] text-slate-500 ml-auto">
{new Date(updatedAt).toLocaleDateString()}
</span>

232
src/lib/article-unlock.ts Normal file
View File

@ -0,0 +1,232 @@
/**
* Article Unlock multi-strategy approach to get readable versions of
* paywalled or permissioned articles.
*
* Strategies (tried in order):
* 1. Wayback Machine check for existing snapshot, or request a new one
* 2. Google Web Cache fast lookup, often has full text
* 3. archive.ph check for existing snapshots (read-only, no submission)
*/
export interface UnlockResult {
success: boolean;
strategy: string;
archiveUrl?: string;
content?: string;
error?: string;
}
const BROWSER_UA =
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/131.0.0.0 Safari/537.36';
// ---------------------------------------------------------------------------
// Strategy 1: Internet Archive Wayback Machine
// ---------------------------------------------------------------------------
async function tryWaybackMachine(url: string): Promise<UnlockResult | null> {
// First check if a snapshot already exists
try {
const checkUrl = `https://archive.org/wayback/available?url=${encodeURIComponent(url)}`;
const res = await fetch(checkUrl, {
headers: { 'User-Agent': BROWSER_UA },
signal: AbortSignal.timeout(10000),
});
if (res.ok) {
const data = await res.json();
const snapshot = data?.archived_snapshots?.closest;
if (snapshot?.available && snapshot?.url) {
return {
success: true,
strategy: 'wayback',
archiveUrl: snapshot.url.replace('http://', 'https://'),
};
}
}
} catch {
// availability check failed, try Save Page Now
}
// No existing snapshot — request one via Save Page Now (SPN)
try {
const saveRes = await fetch('https://web.archive.org/save', {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'User-Agent': BROWSER_UA,
Accept: 'application/json',
},
body: `url=${encodeURIComponent(url)}&capture_all=1`,
signal: AbortSignal.timeout(30000),
});
if (saveRes.ok) {
const data = await saveRes.json();
// SPN returns a job_id — we can construct the URL
if (data.url) {
return {
success: true,
strategy: 'wayback-save',
archiveUrl: data.url,
};
}
if (data.job_id) {
// Poll for completion (up to 30s)
const archiveUrl = await pollWaybackJob(data.job_id);
if (archiveUrl) {
return { success: true, strategy: 'wayback-save', archiveUrl };
}
}
}
// Sometimes SPN redirects to the archived page directly
if (saveRes.status === 302 || saveRes.status === 301) {
const location = saveRes.headers.get('location');
if (location) {
return { success: true, strategy: 'wayback-save', archiveUrl: location };
}
}
} catch {
// SPN failed
}
return null;
}
async function pollWaybackJob(jobId: string): Promise<string | null> {
for (let i = 0; i < 6; i++) {
await new Promise((r) => setTimeout(r, 5000));
try {
const res = await fetch(`https://web.archive.org/save/status/${jobId}`, {
headers: { Accept: 'application/json', 'User-Agent': BROWSER_UA },
signal: AbortSignal.timeout(10000),
});
if (res.ok) {
const data = await res.json();
if (data.status === 'success' && data.original_url && data.timestamp) {
return `https://web.archive.org/web/${data.timestamp}/${data.original_url}`;
}
if (data.status === 'error') return null;
}
} catch {
// keep polling
}
}
return null;
}
// ---------------------------------------------------------------------------
// Strategy 2: Google Web Cache
// ---------------------------------------------------------------------------
async function tryGoogleCache(url: string): Promise<UnlockResult | null> {
const cacheUrl = `https://webcache.googleusercontent.com/search?q=cache:${encodeURIComponent(url)}`;
try {
const res = await fetch(cacheUrl, {
headers: { 'User-Agent': BROWSER_UA },
redirect: 'follow',
signal: AbortSignal.timeout(10000),
});
if (res.ok) {
// Google cache returns the full page — verify it's not an error page
const text = await res.text();
if (text.length > 1000 && !text.includes('did not match any documents')) {
return {
success: true,
strategy: 'google-cache',
archiveUrl: cacheUrl,
content: text,
};
}
}
} catch {
// Google cache not available
}
return null;
}
// ---------------------------------------------------------------------------
// Strategy 3: archive.ph (read-only — check for existing snapshots)
// ---------------------------------------------------------------------------
async function tryArchivePh(url: string): Promise<UnlockResult | null> {
// Only check if an archive already exists — do NOT submit new pages
// (archive.ph has no API and aggressive anti-bot + security concerns)
const checkUrl = `https://archive.ph/newest/${encodeURIComponent(url)}`;
try {
const res = await fetch(checkUrl, {
headers: { 'User-Agent': BROWSER_UA },
redirect: 'manual', // archive.ph redirects to the snapshot
signal: AbortSignal.timeout(10000),
});
// A 302 redirect means a snapshot exists
if (res.status === 302 || res.status === 301) {
const location = res.headers.get('location');
if (location && location.includes('archive.ph/') && !location.includes('/submit')) {
return {
success: true,
strategy: 'archive-ph',
archiveUrl: location,
};
}
}
// A 200 with content also means it found one
if (res.ok) {
const finalUrl = res.url;
if (finalUrl && finalUrl !== checkUrl && finalUrl.includes('archive.ph/')) {
return {
success: true,
strategy: 'archive-ph',
archiveUrl: finalUrl,
};
}
}
} catch {
// archive.ph not reachable
}
return null;
}
// ---------------------------------------------------------------------------
// Main unlock function
// ---------------------------------------------------------------------------
export async function unlockArticle(url: string): Promise<UnlockResult> {
// Validate URL
try {
new URL(url);
} catch {
return { success: false, strategy: 'none', error: 'Invalid URL' };
}
// Try strategies in order of reliability
const strategies = [
{ name: 'Wayback Machine', fn: tryWaybackMachine },
{ name: 'Google Cache', fn: tryGoogleCache },
{ name: 'archive.ph', fn: tryArchivePh },
];
const errors: string[] = [];
for (const { name, fn } of strategies) {
try {
const result = await fn(url);
if (result?.success) {
return result;
}
} catch (err) {
errors.push(`${name}: ${err instanceof Error ? err.message : 'unknown error'}`);
}
}
return {
success: false,
strategy: 'none',
error: `No archived version found. Tried: ${strategies.map((s) => s.name).join(', ')}`,
};
}