diff --git a/Dockerfile b/Dockerfile
index 195921b..6e0ce32 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -34,10 +34,13 @@ COPY --from=builder /app/scripts ./scripts
COPY --from=builder /app/src/lib/content-convert.ts ./src/lib/content-convert.ts
COPY --from=builder /app/node_modules/.prisma ./node_modules/.prisma
COPY --from=builder /app/node_modules/@prisma ./node_modules/@prisma
+COPY rnotes-online/entrypoint.sh /app/entrypoint.sh
+RUN chmod +x /app/entrypoint.sh
USER nextjs
EXPOSE 3000
ENV PORT=3000
ENV HOSTNAME="0.0.0.0"
+ENTRYPOINT ["/app/entrypoint.sh"]
CMD ["node", "server.js"]
diff --git a/browser-extension/background.js b/browser-extension/background.js
index 31120cb..20ac1c8 100644
--- a/browser-extension/background.js
+++ b/browser-extension/background.js
@@ -26,6 +26,12 @@ chrome.runtime.onInstalled.addListener(() => {
title: 'Clip selection to rNotes',
contexts: ['selection'],
});
+
+ chrome.contextMenus.create({
+ id: 'unlock-article',
+ title: 'Unlock & Clip article to rNotes',
+ contexts: ['page', 'link'],
+ });
});
// --- Helpers ---
@@ -132,6 +138,31 @@ async function uploadImage(imageUrl) {
return response.json();
}
+async function unlockArticle(url) {
+ const token = await getToken();
+ if (!token) {
+ showNotification('rNotes Error', 'Not signed in. Open extension settings to sign in.');
+ return null;
+ }
+
+ const settings = await getSettings();
+ const response = await fetch(`${settings.host}/api/articles/unlock`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ 'Authorization': `Bearer ${token}`,
+ },
+ body: JSON.stringify({ url }),
+ });
+
+ if (!response.ok) {
+ const text = await response.text();
+ throw new Error(`Unlock failed: ${response.status} ${text}`);
+ }
+
+ return response.json();
+}
+
// --- Context Menu Handler ---
chrome.contextMenus.onClicked.addListener(async (info, tab) => {
@@ -197,6 +228,28 @@ chrome.contextMenus.onClicked.addListener(async (info, tab) => {
break;
}
+ case 'unlock-article': {
+ const targetUrl = info.linkUrl || tab.url;
+ showNotification('Unlocking Article', `Finding readable version of ${new URL(targetUrl).hostname}...`);
+
+ const result = await unlockArticle(targetUrl);
+ if (result && result.success && result.archiveUrl) {
+ // Create a CLIP note with the archive URL
+ await createNote({
+ title: tab.title || 'Unlocked Article',
+ content: `
Unlocked via ${result.strategy}
Original: ${targetUrl}
Archive: ${result.archiveUrl}
`,
+ type: 'CLIP',
+ url: targetUrl,
+ });
+ showNotification('Article Unlocked', `Readable version found via ${result.strategy}`);
+ // Open the unlocked article in a new tab
+ chrome.tabs.create({ url: result.archiveUrl });
+ } else {
+ showNotification('Unlock Failed', result?.error || 'No archived version found');
+ }
+ break;
+ }
+
case 'clip-selection': {
// Get selection HTML
let content = '';
@@ -238,6 +291,20 @@ chrome.contextMenus.onClicked.addListener(async (info, tab) => {
}
});
+// --- Keyboard shortcut handler ---
+
+chrome.commands.onCommand.addListener((command) => {
+ if (command === 'open-voice-recorder') {
+ chrome.windows.create({
+ url: chrome.runtime.getURL('voice.html'),
+ type: 'popup',
+ width: 380,
+ height: 520,
+ focused: true,
+ });
+ }
+});
+
// --- Message Handler (from popup) ---
chrome.runtime.onMessage.addListener((message, sender, sendResponse) => {
diff --git a/browser-extension/manifest.json b/browser-extension/manifest.json
index 315fc00..7317a84 100644
--- a/browser-extension/manifest.json
+++ b/browser-extension/manifest.json
@@ -1,13 +1,14 @@
{
"manifest_version": 3,
- "name": "rNotes Web Clipper",
- "version": "1.0.0",
- "description": "Clip pages, text, links, and images to rNotes.online",
+ "name": "rNotes Web Clipper & Voice",
+ "version": "1.1.0",
+ "description": "Clip pages, text, links, and images to rNotes.online. Record voice notes with transcription.",
"permissions": [
"activeTab",
"contextMenus",
"storage",
- "notifications"
+ "notifications",
+ "offscreen"
],
"host_permissions": [
"https://rnotes.online/*",
@@ -33,5 +34,17 @@
"options_ui": {
"page": "options.html",
"open_in_tab": false
+ },
+ "content_security_policy": {
+ "extension_pages": "script-src 'self' https://esm.sh; object-src 'self'"
+ },
+ "commands": {
+ "open-voice-recorder": {
+ "suggested_key": {
+ "default": "Ctrl+Shift+V",
+ "mac": "Command+Shift+V"
+ },
+ "description": "Open rVoice recorder"
+ }
}
}
diff --git a/browser-extension/parakeet-offline.js b/browser-extension/parakeet-offline.js
new file mode 100644
index 0000000..2aa4443
--- /dev/null
+++ b/browser-extension/parakeet-offline.js
@@ -0,0 +1,147 @@
+/**
+ * Offline transcription using parakeet.js (NVIDIA Parakeet TDT 0.6B v2).
+ * Loaded at runtime from CDN. Model ~634 MB (int8) on first download,
+ * cached in IndexedDB after. Works fully offline after first download.
+ *
+ * Port of src/lib/parakeetOffline.ts for the browser extension.
+ */
+
+const CACHE_KEY = 'parakeet-offline-cached';
+
+// Singleton model — don't reload on subsequent calls
+let cachedModel = null;
+let loadingPromise = null;
+
+/**
+ * Check if the Parakeet model has been downloaded before.
+ */
+function isModelCached() {
+ try {
+ return localStorage.getItem(CACHE_KEY) === 'true';
+ } catch {
+ return false;
+ }
+}
+
+/**
+ * Detect WebGPU availability.
+ */
+async function detectWebGPU() {
+ if (!navigator.gpu) return false;
+ try {
+ const adapter = await navigator.gpu.requestAdapter();
+ return !!adapter;
+ } catch {
+ return false;
+ }
+}
+
+/**
+ * Get or create the Parakeet model singleton.
+ * @param {function} onProgress - callback({ status, progress, file, message })
+ */
+async function getModel(onProgress) {
+ if (cachedModel) return cachedModel;
+ if (loadingPromise) return loadingPromise;
+
+ loadingPromise = (async () => {
+ onProgress?.({ status: 'loading', message: 'Loading Parakeet model...' });
+
+ // Dynamic import from CDN at runtime
+ const { fromHub } = await import('https://esm.sh/parakeet.js@1.1.2');
+
+ const backend = (await detectWebGPU()) ? 'webgpu' : 'wasm';
+ const fileProgress = {};
+
+ const model = await fromHub('parakeet-tdt-0.6b-v2', {
+ backend,
+ progress: ({ file, loaded, total }) => {
+ fileProgress[file] = { loaded, total };
+
+ let totalBytes = 0;
+ let loadedBytes = 0;
+ for (const fp of Object.values(fileProgress)) {
+ totalBytes += fp.total || 0;
+ loadedBytes += fp.loaded || 0;
+ }
+
+ if (totalBytes > 0) {
+ const pct = Math.round((loadedBytes / totalBytes) * 100);
+ onProgress?.({
+ status: 'downloading',
+ progress: pct,
+ file,
+ message: `Downloading model... ${pct}%`,
+ });
+ }
+ },
+ });
+
+ localStorage.setItem(CACHE_KEY, 'true');
+ onProgress?.({ status: 'loading', message: 'Model loaded' });
+
+ cachedModel = model;
+ loadingPromise = null;
+ return model;
+ })();
+
+ return loadingPromise;
+}
+
+/**
+ * Decode an audio Blob to Float32Array at 16 kHz mono.
+ */
+async function decodeAudioBlob(blob) {
+ const arrayBuffer = await blob.arrayBuffer();
+ const audioCtx = new AudioContext({ sampleRate: 16000 });
+ try {
+ const audioBuffer = await audioCtx.decodeAudioData(arrayBuffer);
+
+ if (audioBuffer.sampleRate === 16000 && audioBuffer.numberOfChannels === 1) {
+ return audioBuffer.getChannelData(0);
+ }
+
+ // Resample via OfflineAudioContext
+ const numSamples = Math.ceil(audioBuffer.duration * 16000);
+ const offlineCtx = new OfflineAudioContext(1, numSamples, 16000);
+ const source = offlineCtx.createBufferSource();
+ source.buffer = audioBuffer;
+ source.connect(offlineCtx.destination);
+ source.start();
+ const resampled = await offlineCtx.startRendering();
+ return resampled.getChannelData(0);
+ } finally {
+ await audioCtx.close();
+ }
+}
+
+/**
+ * Transcribe an audio Blob offline using Parakeet in the browser.
+ * First call downloads the model (~634 MB). Subsequent calls use cached.
+ *
+ * @param {Blob} audioBlob
+ * @param {function} onProgress - callback({ status, progress, file, message })
+ * @returns {Promise} transcribed text
+ */
+async function transcribeOffline(audioBlob, onProgress) {
+ const model = await getModel(onProgress);
+
+ onProgress?.({ status: 'transcribing', message: 'Transcribing audio...' });
+
+ const audioData = await decodeAudioBlob(audioBlob);
+
+ const result = await model.transcribe(audioData, 16000, {
+ returnTimestamps: false,
+ enableProfiling: false,
+ });
+
+ const text = result.utterance_text?.trim() || '';
+ onProgress?.({ status: 'done', message: 'Transcription complete' });
+ return text;
+}
+
+// Export for use in voice.js (loaded as ES module)
+window.ParakeetOffline = {
+ isModelCached,
+ transcribeOffline,
+};
diff --git a/browser-extension/popup.html b/browser-extension/popup.html
index d0db5ac..dcb72a9 100644
--- a/browser-extension/popup.html
+++ b/browser-extension/popup.html
@@ -133,6 +133,23 @@
color: #e5e5e5;
border: 1px solid #404040;
}
+ .btn-voice {
+ background: #450a0a;
+ color: #fca5a5;
+ border: 1px solid #991b1b;
+ }
+ .btn-voice svg {
+ flex-shrink: 0;
+ }
+
+ .btn-unlock {
+ background: #172554;
+ color: #93c5fd;
+ border: 1px solid #1e40af;
+ }
+ .btn-unlock svg {
+ flex-shrink: 0;
+ }
.status {
margin: 0 14px 10px;
@@ -212,6 +229,28 @@
+
+
+
+
+
+
+
+