Add background uploads via Service Worker + IndexedDB
Uploads now continue even if the user navigates away from the page. Files are stored in IndexedDB, the SW handles the actual fetch to direct.rfiles.online, and results are broadcast back to any open tab. Shows a notification if no tab is focused when upload completes. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
9214aed499
commit
0ce44383b5
|
|
@ -1,6 +1,7 @@
|
|||
// rfiles.online Service Worker
|
||||
const CACHE_NAME = 'rfiles-upload-v1';
|
||||
const OFFLINE_QUEUE_NAME = 'rfiles-offline-queue';
|
||||
const CACHE_NAME = 'rfiles-upload-v2';
|
||||
const DB_NAME = 'rfiles-upload';
|
||||
const DB_VERSION = 2;
|
||||
|
||||
// Assets to cache for offline use
|
||||
const ASSETS_TO_CACHE = [
|
||||
|
|
@ -13,9 +14,7 @@ const ASSETS_TO_CACHE = [
|
|||
// Install event - cache assets
|
||||
self.addEventListener('install', (event) => {
|
||||
event.waitUntil(
|
||||
caches.open(CACHE_NAME).then((cache) => {
|
||||
return cache.addAll(ASSETS_TO_CACHE);
|
||||
})
|
||||
caches.open(CACHE_NAME).then((cache) => cache.addAll(ASSETS_TO_CACHE))
|
||||
);
|
||||
self.skipWaiting();
|
||||
});
|
||||
|
|
@ -23,41 +22,28 @@ self.addEventListener('install', (event) => {
|
|||
// Activate event - clean old caches
|
||||
self.addEventListener('activate', (event) => {
|
||||
event.waitUntil(
|
||||
caches.keys().then((cacheNames) => {
|
||||
return Promise.all(
|
||||
cacheNames
|
||||
.filter((name) => name !== CACHE_NAME)
|
||||
.map((name) => caches.delete(name))
|
||||
);
|
||||
})
|
||||
caches.keys().then((cacheNames) =>
|
||||
Promise.all(
|
||||
cacheNames.filter((n) => n !== CACHE_NAME).map((n) => caches.delete(n))
|
||||
)
|
||||
)
|
||||
);
|
||||
self.clients.claim();
|
||||
});
|
||||
|
||||
// Fetch event - serve from cache, fallback to network
|
||||
self.addEventListener('fetch', (event) => {
|
||||
// Skip share-target requests (handle them separately)
|
||||
if (event.request.url.includes('/share-target/')) {
|
||||
if (event.request.url.includes('/share-target/') || event.request.url.includes('/api/')) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Skip API requests - always go to network
|
||||
if (event.request.url.includes('/api/')) {
|
||||
return;
|
||||
}
|
||||
|
||||
event.respondWith(
|
||||
caches.match(event.request).then((response) => {
|
||||
return response || fetch(event.request);
|
||||
})
|
||||
caches.match(event.request).then((r) => r || fetch(event.request))
|
||||
);
|
||||
});
|
||||
|
||||
// Handle share target data
|
||||
// Handle share target
|
||||
self.addEventListener('fetch', (event) => {
|
||||
const url = new URL(event.request.url);
|
||||
|
||||
// Handle Web Share Target
|
||||
if (url.pathname === '/share-target/' && event.request.method === 'POST') {
|
||||
event.respondWith(handleShareTarget(event.request));
|
||||
}
|
||||
|
|
@ -65,96 +51,175 @@ self.addEventListener('fetch', (event) => {
|
|||
|
||||
async function handleShareTarget(request) {
|
||||
const formData = await request.formData();
|
||||
|
||||
// Extract shared data
|
||||
const title = formData.get('title') || '';
|
||||
const text = formData.get('text') || '';
|
||||
const url = formData.get('url') || '';
|
||||
const files = formData.getAll('files');
|
||||
|
||||
// Store in IndexedDB for the page to pick up
|
||||
const shareData = {
|
||||
title,
|
||||
text,
|
||||
url,
|
||||
files: files.length,
|
||||
timestamp: Date.now()
|
||||
};
|
||||
|
||||
// If we have files, upload them directly
|
||||
if (files.length > 0) {
|
||||
try {
|
||||
const uploadPromises = files.map(async (file) => {
|
||||
const uploadFormData = new FormData();
|
||||
uploadFormData.append('file', file);
|
||||
uploadFormData.append('title', title || file.name);
|
||||
uploadFormData.append('description', text || url || '');
|
||||
|
||||
const response = await fetch('/api/upload/', {
|
||||
method: 'POST',
|
||||
body: uploadFormData,
|
||||
});
|
||||
|
||||
return response.json();
|
||||
const fd = new FormData();
|
||||
fd.append('file', file);
|
||||
fd.append('title', title || file.name);
|
||||
fd.append('description', text || url || '');
|
||||
return (await fetch('/api/upload/', { method: 'POST', body: fd })).json();
|
||||
});
|
||||
|
||||
const results = await Promise.all(uploadPromises);
|
||||
|
||||
// Redirect to upload page with success message
|
||||
await Promise.all(uploadPromises);
|
||||
const successUrl = new URL('/', self.location.origin);
|
||||
successUrl.searchParams.set('shared', 'files');
|
||||
successUrl.searchParams.set('count', files.length);
|
||||
|
||||
return Response.redirect(successUrl.toString(), 303);
|
||||
} catch (error) {
|
||||
console.error('Share upload failed:', error);
|
||||
// Queue for later if offline
|
||||
await queueOfflineUpload({ title, text, url, files });
|
||||
|
||||
const offlineUrl = new URL('/', self.location.origin);
|
||||
offlineUrl.searchParams.set('queued', 'true');
|
||||
return Response.redirect(offlineUrl.toString(), 303);
|
||||
}
|
||||
}
|
||||
|
||||
// If we only have URL/text, redirect to upload page with params
|
||||
const redirectUrl = new URL('/', self.location.origin);
|
||||
if (url) redirectUrl.searchParams.set('url', url);
|
||||
if (text) redirectUrl.searchParams.set('text', text);
|
||||
if (title) redirectUrl.searchParams.set('title', title);
|
||||
redirectUrl.searchParams.set('shared', 'true');
|
||||
|
||||
return Response.redirect(redirectUrl.toString(), 303);
|
||||
}
|
||||
|
||||
// Queue uploads for when back online
|
||||
async function queueOfflineUpload(data) {
|
||||
// Use IndexedDB to store offline queue
|
||||
const db = await openDB();
|
||||
const tx = db.transaction('offline-queue', 'readwrite');
|
||||
await tx.store.add({
|
||||
...data,
|
||||
queuedAt: Date.now()
|
||||
});
|
||||
}
|
||||
// --- IndexedDB helpers ---
|
||||
|
||||
function openDB() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const request = indexedDB.open('rfiles-upload', 1);
|
||||
|
||||
request.onerror = () => reject(request.error);
|
||||
request.onsuccess = () => resolve(request.result);
|
||||
|
||||
request.onupgradeneeded = (event) => {
|
||||
const req = indexedDB.open(DB_NAME, DB_VERSION);
|
||||
req.onerror = () => reject(req.error);
|
||||
req.onsuccess = () => resolve(req.result);
|
||||
req.onupgradeneeded = (event) => {
|
||||
const db = event.target.result;
|
||||
if (!db.objectStoreNames.contains('offline-queue')) {
|
||||
db.createObjectStore('offline-queue', { keyPath: 'queuedAt' });
|
||||
}
|
||||
if (!db.objectStoreNames.contains('uploads')) {
|
||||
db.createObjectStore('uploads', { keyPath: 'id' });
|
||||
}
|
||||
if (!db.objectStoreNames.contains('results')) {
|
||||
db.createObjectStore('results', { keyPath: 'id' });
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Sync offline queue when back online
|
||||
function dbGet(storeName, key) {
|
||||
return openDB().then(db => new Promise((resolve, reject) => {
|
||||
const tx = db.transaction(storeName, 'readonly');
|
||||
const req = tx.objectStore(storeName).get(key);
|
||||
req.onsuccess = () => resolve(req.result);
|
||||
req.onerror = () => reject(req.error);
|
||||
}));
|
||||
}
|
||||
|
||||
function dbPut(storeName, value) {
|
||||
return openDB().then(db => new Promise((resolve, reject) => {
|
||||
const tx = db.transaction(storeName, 'readwrite');
|
||||
const req = tx.objectStore(storeName).put(value);
|
||||
req.onsuccess = () => resolve();
|
||||
req.onerror = () => reject(req.error);
|
||||
}));
|
||||
}
|
||||
|
||||
function dbDelete(storeName, key) {
|
||||
return openDB().then(db => new Promise((resolve, reject) => {
|
||||
const tx = db.transaction(storeName, 'readwrite');
|
||||
const req = tx.objectStore(storeName).delete(key);
|
||||
req.onsuccess = () => resolve();
|
||||
req.onerror = () => reject(req.error);
|
||||
}));
|
||||
}
|
||||
|
||||
function dbGetAll(storeName) {
|
||||
return openDB().then(db => new Promise((resolve, reject) => {
|
||||
const tx = db.transaction(storeName, 'readonly');
|
||||
const req = tx.objectStore(storeName).getAll();
|
||||
req.onsuccess = () => resolve(req.result);
|
||||
req.onerror = () => reject(req.error);
|
||||
}));
|
||||
}
|
||||
|
||||
// --- Background upload handling ---
|
||||
|
||||
// Broadcast to all open pages
|
||||
async function broadcast(msg) {
|
||||
const clients = await self.clients.matchAll({ type: 'window' });
|
||||
clients.forEach(c => c.postMessage(msg));
|
||||
}
|
||||
|
||||
self.addEventListener('message', (event) => {
|
||||
if (event.data && event.data.type === 'START_UPLOAD') {
|
||||
event.waitUntil(handleBackgroundUpload(event.data));
|
||||
}
|
||||
if (event.data && event.data.type === 'GET_RESULTS') {
|
||||
event.waitUntil(sendResults(event.source));
|
||||
}
|
||||
if (event.data && event.data.type === 'CLEAR_RESULT') {
|
||||
event.waitUntil(dbDelete('results', event.data.id));
|
||||
}
|
||||
});
|
||||
|
||||
async function handleBackgroundUpload(data) {
|
||||
const { id, uploadUrl, space, filename, action } = data;
|
||||
|
||||
try {
|
||||
// Read file blob from IndexedDB
|
||||
const record = await dbGet('uploads', id);
|
||||
if (!record) {
|
||||
await broadcast({ type: 'UPLOAD_ERROR', id, error: 'File not found in storage' });
|
||||
return;
|
||||
}
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', record.blob, filename);
|
||||
formData.append('space', space);
|
||||
if (action) formData.append('action', action);
|
||||
|
||||
const response = await fetch(uploadUrl, { method: 'POST', body: formData });
|
||||
const result = await response.json();
|
||||
|
||||
// Clean up the stored blob
|
||||
await dbDelete('uploads', id);
|
||||
|
||||
if (response.status === 409 && result.duplicate) {
|
||||
// Store duplicate result for the page to handle
|
||||
await dbPut('results', { id, status: 'duplicate', data: result, filename, space });
|
||||
await broadcast({ type: 'UPLOAD_DUPLICATE', id, data: result, filename });
|
||||
} else if (response.ok && result.success) {
|
||||
await dbPut('results', { id, status: 'success', data: result, filename });
|
||||
await broadcast({ type: 'UPLOAD_COMPLETE', id, data: result });
|
||||
// Show notification if no page is focused
|
||||
const clients = await self.clients.matchAll({ type: 'window', includeUncontrolled: false });
|
||||
const hasFocus = clients.some(c => c.focused);
|
||||
if (!hasFocus) {
|
||||
self.registration.showNotification('Upload complete', {
|
||||
body: `${result.file.title} uploaded successfully`,
|
||||
icon: '/static/portal/icon-192.png',
|
||||
tag: 'upload-' + id,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const error = result.error || 'Upload failed';
|
||||
await dbPut('results', { id, status: 'error', error, filename });
|
||||
await broadcast({ type: 'UPLOAD_ERROR', id, error });
|
||||
}
|
||||
} catch (err) {
|
||||
await dbPut('results', { id, status: 'error', error: err.message, filename });
|
||||
await broadcast({ type: 'UPLOAD_ERROR', id, error: err.message });
|
||||
// Keep the blob so user can retry
|
||||
}
|
||||
}
|
||||
|
||||
async function sendResults(client) {
|
||||
const results = await dbGetAll('results');
|
||||
client.postMessage({ type: 'PENDING_RESULTS', results });
|
||||
}
|
||||
|
||||
// Background sync
|
||||
self.addEventListener('sync', (event) => {
|
||||
if (event.tag === 'upload-queue') {
|
||||
event.waitUntil(processOfflineQueue());
|
||||
|
|
@ -164,16 +229,14 @@ self.addEventListener('sync', (event) => {
|
|||
async function processOfflineQueue() {
|
||||
const db = await openDB();
|
||||
const tx = db.transaction('offline-queue', 'readonly');
|
||||
const items = await tx.store.getAll();
|
||||
|
||||
const req = tx.objectStore('offline-queue').getAll();
|
||||
const items = await new Promise((resolve) => {
|
||||
req.onsuccess = () => resolve(req.result);
|
||||
});
|
||||
for (const item of items) {
|
||||
try {
|
||||
// Process queued item
|
||||
console.log('Processing queued item:', item);
|
||||
|
||||
// Remove from queue on success
|
||||
const deleteTx = db.transaction('offline-queue', 'readwrite');
|
||||
await deleteTx.store.delete(item.queuedAt);
|
||||
deleteTx.objectStore('offline-queue').delete(item.queuedAt);
|
||||
} catch (error) {
|
||||
console.error('Failed to process queued item:', error);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -247,9 +247,14 @@
|
|||
const uploadZone = document.getElementById('uploadZone');
|
||||
const fileInput = document.getElementById('fileInput');
|
||||
const results = document.getElementById('results');
|
||||
const maxSize = {{ space.max_file_size_mb }} * 1024 * 1024; // 0 = unlimited
|
||||
// All uploads go direct to server, bypassing Cloudflare's 100MB limit
|
||||
const maxSize = {{ space.max_file_size_mb }} * 1024 * 1024;
|
||||
const UPLOAD_URL = 'https://direct.rfiles.online/api/upload/';
|
||||
const SPACE = '{{ space.slug }}';
|
||||
const DB_NAME = 'rfiles-upload';
|
||||
const DB_VERSION = 2;
|
||||
|
||||
// Track active upload items by ID so SW messages can update them
|
||||
const uploadItems = {};
|
||||
|
||||
uploadZone.addEventListener('click', () => fileInput.click());
|
||||
uploadZone.addEventListener('dragover', (e) => { e.preventDefault(); uploadZone.classList.add('dragover'); });
|
||||
|
|
@ -266,7 +271,122 @@ function formatBytes(bytes) {
|
|||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||
}
|
||||
|
||||
function uploadFile(file, action) {
|
||||
// --- IndexedDB helpers (same schema as SW) ---
|
||||
function openDB() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = indexedDB.open(DB_NAME, DB_VERSION);
|
||||
req.onerror = () => reject(req.error);
|
||||
req.onsuccess = () => resolve(req.result);
|
||||
req.onupgradeneeded = (event) => {
|
||||
const db = event.target.result;
|
||||
if (!db.objectStoreNames.contains('offline-queue'))
|
||||
db.createObjectStore('offline-queue', { keyPath: 'queuedAt' });
|
||||
if (!db.objectStoreNames.contains('uploads'))
|
||||
db.createObjectStore('uploads', { keyPath: 'id' });
|
||||
if (!db.objectStoreNames.contains('results'))
|
||||
db.createObjectStore('results', { keyPath: 'id' });
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
function dbPut(storeName, value) {
|
||||
return openDB().then(db => new Promise((resolve, reject) => {
|
||||
const tx = db.transaction(storeName, 'readwrite');
|
||||
const req = tx.objectStore(storeName).put(value);
|
||||
req.onsuccess = () => resolve();
|
||||
req.onerror = () => reject(req.error);
|
||||
}));
|
||||
}
|
||||
|
||||
function dbDelete(storeName, key) {
|
||||
return openDB().then(db => new Promise((resolve, reject) => {
|
||||
const tx = db.transaction(storeName, 'readwrite');
|
||||
const req = tx.objectStore(storeName).delete(key);
|
||||
req.onsuccess = () => resolve();
|
||||
req.onerror = () => reject(req.error);
|
||||
}));
|
||||
}
|
||||
|
||||
// --- Upload logic ---
|
||||
|
||||
function makeItem(id, filename, size) {
|
||||
const item = document.createElement('div');
|
||||
item.className = 'result-item';
|
||||
item.id = 'upload-' + id;
|
||||
item.innerHTML = `
|
||||
<div class="result-info">
|
||||
<h3>${filename}</h3>
|
||||
<div class="meta">${formatBytes(size)} - Uploading (continues in background)...</div>
|
||||
<div class="progress-bar"><div class="progress" style="width: 5%"></div></div>
|
||||
</div>
|
||||
`;
|
||||
results.insertBefore(item, results.firstChild);
|
||||
uploadItems[id] = item;
|
||||
return item;
|
||||
}
|
||||
|
||||
function showSuccess(item, data) {
|
||||
item.className = 'result-item success';
|
||||
item.innerHTML = `
|
||||
<div class="result-info">
|
||||
<h3>${data.file.title}</h3>
|
||||
<div class="meta">${formatBytes(data.file.size)} - Uploaded successfully</div>
|
||||
</div>
|
||||
<div class="share-link">
|
||||
<input type="text" value="${data.share.url}" readonly onclick="this.select()">
|
||||
<button class="copy-btn" onclick="copyLink(this, '${data.share.url}')">Copy</button>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
function showDuplicate(item, id, data, filename) {
|
||||
item.id = 'dup-' + filename;
|
||||
item.className = 'result-item duplicate';
|
||||
const existingSize = data.existing_file.size ? formatBytes(data.existing_file.size) : 'unknown size';
|
||||
item.innerHTML = `
|
||||
<div class="result-info">
|
||||
<h3>${filename}</h3>
|
||||
<div class="meta">"${data.existing_file.title}" already exists (${existingSize})</div>
|
||||
</div>
|
||||
<div class="duplicate-actions">
|
||||
<button class="btn-overwrite">Overwrite</button>
|
||||
<button class="btn-skip">Skip</button>
|
||||
</div>
|
||||
`;
|
||||
// For overwrite, we need the original file — re-read from file input won't work
|
||||
// So we keep the blob in IndexedDB and re-trigger with action=overwrite
|
||||
item.querySelector('.btn-overwrite').addEventListener('click', async () => {
|
||||
item.className = 'result-item';
|
||||
item.innerHTML = `<div class="result-info"><h3>${filename}</h3><div class="meta">Overwriting...</div><div class="progress-bar"><div class="progress" style="width: 5%"></div></div></div>`;
|
||||
// Clear the duplicate result
|
||||
navigator.serviceWorker.controller.postMessage({
|
||||
type: 'CLEAR_RESULT', id
|
||||
});
|
||||
// Re-store blob and re-trigger upload with overwrite
|
||||
// The blob was deleted after first upload attempt, so we need to handle this
|
||||
// For overwrite of duplicates detected by SW, the blob was already cleaned up
|
||||
// We need to use the original file reference if still available
|
||||
if (uploadItems['_file_' + id]) {
|
||||
const file = uploadItems['_file_' + id];
|
||||
await dbPut('uploads', { id, blob: file, filename: file.name });
|
||||
navigator.serviceWorker.controller.postMessage({
|
||||
type: 'START_UPLOAD', id, uploadUrl: UPLOAD_URL, space: SPACE, filename: file.name, action: 'overwrite'
|
||||
});
|
||||
}
|
||||
});
|
||||
item.querySelector('.btn-skip').addEventListener('click', () => {
|
||||
item.className = 'result-item error';
|
||||
item.innerHTML = `<div class="result-info"><h3>${filename}</h3><div class="meta">Skipped (duplicate)</div></div>`;
|
||||
navigator.serviceWorker.controller.postMessage({ type: 'CLEAR_RESULT', id });
|
||||
});
|
||||
}
|
||||
|
||||
function showError(item, filename, msg) {
|
||||
item.className = 'result-item error';
|
||||
item.innerHTML = `<div class="result-info"><h3>${filename}</h3><div class="meta">${msg}</div></div>`;
|
||||
}
|
||||
|
||||
async function uploadFile(file, action) {
|
||||
if (maxSize > 0 && file.size > maxSize) {
|
||||
const item = document.createElement('div');
|
||||
item.className = 'result-item error';
|
||||
|
|
@ -275,84 +395,81 @@ function uploadFile(file, action) {
|
|||
return;
|
||||
}
|
||||
|
||||
let item = action ? document.getElementById('dup-' + file.name) : null;
|
||||
if (!item) {
|
||||
item = document.createElement('div');
|
||||
item.className = 'result-item';
|
||||
results.insertBefore(item, results.firstChild);
|
||||
const id = crypto.randomUUID();
|
||||
const item = makeItem(id, file.name, file.size);
|
||||
|
||||
// Keep file reference for potential overwrite
|
||||
uploadItems['_file_' + id] = file;
|
||||
|
||||
// Store blob in IndexedDB so service worker can access it
|
||||
await dbPut('uploads', { id, blob: file, filename: file.name });
|
||||
|
||||
// Tell service worker to upload
|
||||
if (navigator.serviceWorker.controller) {
|
||||
navigator.serviceWorker.controller.postMessage({
|
||||
type: 'START_UPLOAD',
|
||||
id,
|
||||
uploadUrl: UPLOAD_URL,
|
||||
space: SPACE,
|
||||
filename: file.name,
|
||||
action: action || null,
|
||||
});
|
||||
} else {
|
||||
item.className = 'result-item';
|
||||
// Fallback: no SW controller yet, upload directly
|
||||
showError(item, file.name, 'Service worker not ready — please refresh and try again');
|
||||
}
|
||||
item.innerHTML = `
|
||||
<div class="result-info">
|
||||
<h3>${file.name}</h3>
|
||||
<div class="meta">${formatBytes(file.size)} - Uploading...</div>
|
||||
<div class="progress-bar"><div class="progress" style="width: 0%"></div></div>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
formData.append('space', '{{ space.slug }}');
|
||||
if (action) formData.append('action', action);
|
||||
// --- Listen for messages from service worker ---
|
||||
|
||||
const xhr = new XMLHttpRequest();
|
||||
xhr.upload.addEventListener('progress', (e) => {
|
||||
if (e.lengthComputable) item.querySelector('.progress').style.width = (e.loaded / e.total * 100) + '%';
|
||||
});
|
||||
xhr.addEventListener('load', () => {
|
||||
if (xhr.status === 200) {
|
||||
const data = JSON.parse(xhr.responseText);
|
||||
item.classList.add('success');
|
||||
item.innerHTML = `
|
||||
<div class="result-info">
|
||||
<h3>${data.file.title}</h3>
|
||||
<div class="meta">${formatBytes(data.file.size)} - Uploaded successfully</div>
|
||||
</div>
|
||||
<div class="share-link">
|
||||
<input type="text" value="${data.share.url}" readonly onclick="this.select()">
|
||||
<button class="copy-btn" onclick="copyLink(this, '${data.share.url}')">Copy</button>
|
||||
</div>
|
||||
`;
|
||||
} else if (xhr.status === 409) {
|
||||
const data = JSON.parse(xhr.responseText);
|
||||
item.id = 'dup-' + file.name;
|
||||
item.classList.add('duplicate');
|
||||
const existingSize = data.existing_file.size ? formatBytes(data.existing_file.size) : 'unknown size';
|
||||
item.innerHTML = `
|
||||
<div class="result-info">
|
||||
<h3>${file.name}</h3>
|
||||
<div class="meta">"${data.existing_file.title}" already exists (${existingSize})</div>
|
||||
</div>
|
||||
<div class="duplicate-actions">
|
||||
<button class="btn-overwrite">Overwrite</button>
|
||||
<button class="btn-skip">Skip</button>
|
||||
</div>
|
||||
`;
|
||||
item.querySelector('.btn-overwrite').addEventListener('click', () => uploadFile(file, 'overwrite'));
|
||||
item.querySelector('.btn-skip').addEventListener('click', () => {
|
||||
item.classList.remove('duplicate');
|
||||
item.classList.add('error');
|
||||
item.innerHTML = `<div class="result-info"><h3>${file.name}</h3><div class="meta">Skipped (duplicate)</div></div>`;
|
||||
});
|
||||
} else {
|
||||
let msg = 'Upload failed';
|
||||
try { msg = JSON.parse(xhr.responseText).error || msg; } catch(e) {}
|
||||
item.classList.add('error');
|
||||
item.querySelector('.meta').textContent = msg;
|
||||
const pb = item.querySelector('.progress-bar');
|
||||
if (pb) pb.remove();
|
||||
}
|
||||
});
|
||||
xhr.addEventListener('error', () => {
|
||||
item.classList.add('error');
|
||||
item.querySelector('.meta').textContent = 'Upload failed - network error';
|
||||
const pb = item.querySelector('.progress-bar');
|
||||
if (pb) pb.remove();
|
||||
});
|
||||
navigator.serviceWorker.addEventListener('message', (event) => {
|
||||
const { type, id, data, error, filename } = event.data;
|
||||
|
||||
xhr.open('POST', UPLOAD_URL);
|
||||
xhr.send(formData);
|
||||
if (type === 'UPLOAD_COMPLETE') {
|
||||
const item = uploadItems[id] || document.getElementById('upload-' + id);
|
||||
if (item) showSuccess(item, data);
|
||||
}
|
||||
|
||||
if (type === 'UPLOAD_DUPLICATE') {
|
||||
const item = uploadItems[id] || document.getElementById('upload-' + id);
|
||||
if (item) showDuplicate(item, id, data, filename);
|
||||
}
|
||||
|
||||
if (type === 'UPLOAD_ERROR') {
|
||||
const item = uploadItems[id] || document.getElementById('upload-' + id);
|
||||
if (item) showError(item, item.querySelector('h3')?.textContent || 'File', error);
|
||||
}
|
||||
|
||||
// Handle results loaded on page init
|
||||
if (type === 'PENDING_RESULTS') {
|
||||
(event.data.results || []).forEach(r => {
|
||||
// Only show results for this space
|
||||
if (r.space && r.space !== SPACE) return;
|
||||
const existing = document.getElementById('upload-' + r.id);
|
||||
if (existing) return; // Already shown
|
||||
|
||||
const item = document.createElement('div');
|
||||
item.className = 'result-item';
|
||||
item.id = 'upload-' + r.id;
|
||||
results.insertBefore(item, results.firstChild);
|
||||
|
||||
if (r.status === 'success') {
|
||||
showSuccess(item, r.data);
|
||||
// Auto-clear after showing
|
||||
navigator.serviceWorker.controller?.postMessage({ type: 'CLEAR_RESULT', id: r.id });
|
||||
} else if (r.status === 'duplicate') {
|
||||
showDuplicate(item, r.id, r.data, r.filename);
|
||||
} else if (r.status === 'error') {
|
||||
showError(item, r.filename || 'File', r.error);
|
||||
navigator.serviceWorker.controller?.postMessage({ type: 'CLEAR_RESULT', id: r.id });
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// On page load, ask SW for any completed results from background uploads
|
||||
if (navigator.serviceWorker.controller) {
|
||||
navigator.serviceWorker.controller.postMessage({ type: 'GET_RESULTS' });
|
||||
}
|
||||
|
||||
function copyLink(btn, url) {
|
||||
|
|
|
|||
Loading…
Reference in New Issue