Remove background SW uploads, use simple direct fetch instead
SW was causing issues. Reverted to straightforward XHR uploads with progress bar, duplicate detection, and overwrite/skip. No IndexedDB, no SW messaging. SW is now minimal (cache-only for static assets). Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
6c4c21591f
commit
5aae2776ed
|
|
@ -1,17 +1,12 @@
|
||||||
// rfiles.online Service Worker
|
// rfiles.online Service Worker — minimal cache-only
|
||||||
const CACHE_NAME = 'rfiles-upload-v2';
|
const CACHE_NAME = 'rfiles-v3';
|
||||||
const DB_NAME = 'rfiles-upload';
|
|
||||||
const DB_VERSION = 2;
|
|
||||||
|
|
||||||
// Assets to cache for offline use
|
|
||||||
const ASSETS_TO_CACHE = [
|
const ASSETS_TO_CACHE = [
|
||||||
'/',
|
|
||||||
'/static/portal/manifest.json',
|
'/static/portal/manifest.json',
|
||||||
'/static/portal/icon-192.png',
|
'/static/portal/icon-192.png',
|
||||||
'/static/portal/icon-512.png',
|
'/static/portal/icon-512.png',
|
||||||
];
|
];
|
||||||
|
|
||||||
// Install event - cache assets
|
|
||||||
self.addEventListener('install', (event) => {
|
self.addEventListener('install', (event) => {
|
||||||
event.waitUntil(
|
event.waitUntil(
|
||||||
caches.open(CACHE_NAME).then((cache) => cache.addAll(ASSETS_TO_CACHE))
|
caches.open(CACHE_NAME).then((cache) => cache.addAll(ASSETS_TO_CACHE))
|
||||||
|
|
@ -19,226 +14,18 @@ self.addEventListener('install', (event) => {
|
||||||
self.skipWaiting();
|
self.skipWaiting();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Activate event - clean old caches
|
|
||||||
self.addEventListener('activate', (event) => {
|
self.addEventListener('activate', (event) => {
|
||||||
event.waitUntil(
|
event.waitUntil(
|
||||||
caches.keys().then((cacheNames) =>
|
caches.keys().then((names) =>
|
||||||
Promise.all(
|
Promise.all(names.filter((n) => n !== CACHE_NAME).map((n) => caches.delete(n)))
|
||||||
cacheNames.filter((n) => n !== CACHE_NAME).map((n) => caches.delete(n))
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
self.clients.claim();
|
self.clients.claim();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Fetch event - serve from cache, fallback to network
|
|
||||||
self.addEventListener('fetch', (event) => {
|
self.addEventListener('fetch', (event) => {
|
||||||
if (event.request.url.includes('/share-target/') || event.request.url.includes('/api/')) {
|
if (event.request.url.includes('/api/')) return;
|
||||||
return;
|
|
||||||
}
|
|
||||||
event.respondWith(
|
event.respondWith(
|
||||||
caches.match(event.request).then((r) => r || fetch(event.request))
|
caches.match(event.request).then((r) => r || fetch(event.request))
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Handle share target
|
|
||||||
self.addEventListener('fetch', (event) => {
|
|
||||||
const url = new URL(event.request.url);
|
|
||||||
if (url.pathname === '/share-target/' && event.request.method === 'POST') {
|
|
||||||
event.respondWith(handleShareTarget(event.request));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
async function handleShareTarget(request) {
|
|
||||||
const formData = await request.formData();
|
|
||||||
const title = formData.get('title') || '';
|
|
||||||
const text = formData.get('text') || '';
|
|
||||||
const url = formData.get('url') || '';
|
|
||||||
const files = formData.getAll('files');
|
|
||||||
|
|
||||||
if (files.length > 0) {
|
|
||||||
try {
|
|
||||||
const uploadPromises = files.map(async (file) => {
|
|
||||||
const fd = new FormData();
|
|
||||||
fd.append('file', file);
|
|
||||||
fd.append('title', title || file.name);
|
|
||||||
fd.append('description', text || url || '');
|
|
||||||
return (await fetch('/api/upload/', { method: 'POST', body: fd })).json();
|
|
||||||
});
|
|
||||||
await Promise.all(uploadPromises);
|
|
||||||
const successUrl = new URL('/', self.location.origin);
|
|
||||||
successUrl.searchParams.set('shared', 'files');
|
|
||||||
successUrl.searchParams.set('count', files.length);
|
|
||||||
return Response.redirect(successUrl.toString(), 303);
|
|
||||||
} catch (error) {
|
|
||||||
const offlineUrl = new URL('/', self.location.origin);
|
|
||||||
offlineUrl.searchParams.set('queued', 'true');
|
|
||||||
return Response.redirect(offlineUrl.toString(), 303);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const redirectUrl = new URL('/', self.location.origin);
|
|
||||||
if (url) redirectUrl.searchParams.set('url', url);
|
|
||||||
if (text) redirectUrl.searchParams.set('text', text);
|
|
||||||
if (title) redirectUrl.searchParams.set('title', title);
|
|
||||||
redirectUrl.searchParams.set('shared', 'true');
|
|
||||||
return Response.redirect(redirectUrl.toString(), 303);
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- IndexedDB helpers ---
|
|
||||||
|
|
||||||
function openDB() {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const req = indexedDB.open(DB_NAME, DB_VERSION);
|
|
||||||
req.onerror = () => reject(req.error);
|
|
||||||
req.onsuccess = () => resolve(req.result);
|
|
||||||
req.onupgradeneeded = (event) => {
|
|
||||||
const db = event.target.result;
|
|
||||||
if (!db.objectStoreNames.contains('offline-queue')) {
|
|
||||||
db.createObjectStore('offline-queue', { keyPath: 'queuedAt' });
|
|
||||||
}
|
|
||||||
if (!db.objectStoreNames.contains('uploads')) {
|
|
||||||
db.createObjectStore('uploads', { keyPath: 'id' });
|
|
||||||
}
|
|
||||||
if (!db.objectStoreNames.contains('results')) {
|
|
||||||
db.createObjectStore('results', { keyPath: 'id' });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function dbGet(storeName, key) {
|
|
||||||
return openDB().then(db => new Promise((resolve, reject) => {
|
|
||||||
const tx = db.transaction(storeName, 'readonly');
|
|
||||||
const req = tx.objectStore(storeName).get(key);
|
|
||||||
req.onsuccess = () => resolve(req.result);
|
|
||||||
req.onerror = () => reject(req.error);
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
function dbPut(storeName, value) {
|
|
||||||
return openDB().then(db => new Promise((resolve, reject) => {
|
|
||||||
const tx = db.transaction(storeName, 'readwrite');
|
|
||||||
const req = tx.objectStore(storeName).put(value);
|
|
||||||
req.onsuccess = () => resolve();
|
|
||||||
req.onerror = () => reject(req.error);
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
function dbDelete(storeName, key) {
|
|
||||||
return openDB().then(db => new Promise((resolve, reject) => {
|
|
||||||
const tx = db.transaction(storeName, 'readwrite');
|
|
||||||
const req = tx.objectStore(storeName).delete(key);
|
|
||||||
req.onsuccess = () => resolve();
|
|
||||||
req.onerror = () => reject(req.error);
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
function dbGetAll(storeName) {
|
|
||||||
return openDB().then(db => new Promise((resolve, reject) => {
|
|
||||||
const tx = db.transaction(storeName, 'readonly');
|
|
||||||
const req = tx.objectStore(storeName).getAll();
|
|
||||||
req.onsuccess = () => resolve(req.result);
|
|
||||||
req.onerror = () => reject(req.error);
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Background upload handling ---
|
|
||||||
|
|
||||||
// Broadcast to all open pages
|
|
||||||
async function broadcast(msg) {
|
|
||||||
const clients = await self.clients.matchAll({ type: 'window' });
|
|
||||||
clients.forEach(c => c.postMessage(msg));
|
|
||||||
}
|
|
||||||
|
|
||||||
self.addEventListener('message', (event) => {
|
|
||||||
if (event.data && event.data.type === 'START_UPLOAD') {
|
|
||||||
event.waitUntil(handleBackgroundUpload(event.data));
|
|
||||||
}
|
|
||||||
if (event.data && event.data.type === 'GET_RESULTS') {
|
|
||||||
event.waitUntil(sendResults(event.source));
|
|
||||||
}
|
|
||||||
if (event.data && event.data.type === 'CLEAR_RESULT') {
|
|
||||||
event.waitUntil(dbDelete('results', event.data.id));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
async function handleBackgroundUpload(data) {
|
|
||||||
const { id, uploadUrl, space, filename, action } = data;
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Read file blob from IndexedDB
|
|
||||||
const record = await dbGet('uploads', id);
|
|
||||||
if (!record) {
|
|
||||||
await broadcast({ type: 'UPLOAD_ERROR', id, error: 'File not found in storage' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const formData = new FormData();
|
|
||||||
formData.append('file', record.blob, filename);
|
|
||||||
formData.append('space', space);
|
|
||||||
if (action) formData.append('action', action);
|
|
||||||
|
|
||||||
const response = await fetch(uploadUrl, { method: 'POST', body: formData });
|
|
||||||
const result = await response.json();
|
|
||||||
|
|
||||||
// Clean up the stored blob
|
|
||||||
await dbDelete('uploads', id);
|
|
||||||
|
|
||||||
if (response.status === 409 && result.duplicate) {
|
|
||||||
// Store duplicate result for the page to handle
|
|
||||||
await dbPut('results', { id, status: 'duplicate', data: result, filename, space });
|
|
||||||
await broadcast({ type: 'UPLOAD_DUPLICATE', id, data: result, filename });
|
|
||||||
} else if (response.ok && result.success) {
|
|
||||||
await dbPut('results', { id, status: 'success', data: result, filename });
|
|
||||||
await broadcast({ type: 'UPLOAD_COMPLETE', id, data: result });
|
|
||||||
// Show notification if no page is focused
|
|
||||||
const clients = await self.clients.matchAll({ type: 'window', includeUncontrolled: false });
|
|
||||||
const hasFocus = clients.some(c => c.focused);
|
|
||||||
if (!hasFocus) {
|
|
||||||
self.registration.showNotification('Upload complete', {
|
|
||||||
body: `${result.file.title} uploaded successfully`,
|
|
||||||
icon: '/static/portal/icon-192.png',
|
|
||||||
tag: 'upload-' + id,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const error = result.error || 'Upload failed';
|
|
||||||
await dbPut('results', { id, status: 'error', error, filename });
|
|
||||||
await broadcast({ type: 'UPLOAD_ERROR', id, error });
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
await dbPut('results', { id, status: 'error', error: err.message, filename });
|
|
||||||
await broadcast({ type: 'UPLOAD_ERROR', id, error: err.message });
|
|
||||||
// Keep the blob so user can retry
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sendResults(client) {
|
|
||||||
const results = await dbGetAll('results');
|
|
||||||
client.postMessage({ type: 'PENDING_RESULTS', results });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Background sync
|
|
||||||
self.addEventListener('sync', (event) => {
|
|
||||||
if (event.tag === 'upload-queue') {
|
|
||||||
event.waitUntil(processOfflineQueue());
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
async function processOfflineQueue() {
|
|
||||||
const db = await openDB();
|
|
||||||
const tx = db.transaction('offline-queue', 'readonly');
|
|
||||||
const req = tx.objectStore('offline-queue').getAll();
|
|
||||||
const items = await new Promise((resolve) => {
|
|
||||||
req.onsuccess = () => resolve(req.result);
|
|
||||||
});
|
|
||||||
for (const item of items) {
|
|
||||||
try {
|
|
||||||
const deleteTx = db.transaction('offline-queue', 'readwrite');
|
|
||||||
deleteTx.objectStore('offline-queue').delete(item.queuedAt);
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to process queued item:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -161,18 +161,6 @@
|
||||||
{% block content %}{% endblock %}
|
{% block content %}{% endblock %}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<script>
|
|
||||||
if ('serviceWorker' in navigator) {
|
|
||||||
navigator.serviceWorker.register('/static/portal/sw.js').then(reg => {
|
|
||||||
// If the SW is installed but not yet controlling this page, reload once
|
|
||||||
if (reg.active && !navigator.serviceWorker.controller) {
|
|
||||||
navigator.serviceWorker.addEventListener('controllerchange', () => {
|
|
||||||
// Controller is now active, no need to reload
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
{% block extra_js %}{% endblock %}
|
{% block extra_js %}{% endblock %}
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
||||||
|
|
@ -246,15 +246,10 @@
|
||||||
<script>
|
<script>
|
||||||
const uploadZone = document.getElementById('uploadZone');
|
const uploadZone = document.getElementById('uploadZone');
|
||||||
const fileInput = document.getElementById('fileInput');
|
const fileInput = document.getElementById('fileInput');
|
||||||
const results = document.getElementById('results');
|
const resultsEl = document.getElementById('results');
|
||||||
const maxSize = {{ space.max_file_size_mb }} * 1024 * 1024;
|
const maxSize = {{ space.max_file_size_mb }} * 1024 * 1024;
|
||||||
const UPLOAD_URL = 'https://direct.rfiles.online/api/upload/';
|
const UPLOAD_URL = 'https://direct.rfiles.online/api/upload/';
|
||||||
const SPACE = '{{ space.slug }}';
|
const SPACE = '{{ space.slug }}';
|
||||||
const DB_NAME = 'rfiles-upload';
|
|
||||||
const DB_VERSION = 2;
|
|
||||||
|
|
||||||
// Track active upload items by ID so SW messages can update them
|
|
||||||
const uploadItems = {};
|
|
||||||
|
|
||||||
uploadZone.addEventListener('click', () => fileInput.click());
|
uploadZone.addEventListener('click', () => fileInput.click());
|
||||||
uploadZone.addEventListener('dragover', (e) => { e.preventDefault(); uploadZone.classList.add('dragover'); });
|
uploadZone.addEventListener('dragover', (e) => { e.preventDefault(); uploadZone.classList.add('dragover'); });
|
||||||
|
|
@ -271,60 +266,6 @@ function formatBytes(bytes) {
|
||||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- IndexedDB helpers (same schema as SW) ---
|
|
||||||
function openDB() {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const req = indexedDB.open(DB_NAME, DB_VERSION);
|
|
||||||
req.onerror = () => reject(req.error);
|
|
||||||
req.onsuccess = () => resolve(req.result);
|
|
||||||
req.onupgradeneeded = (event) => {
|
|
||||||
const db = event.target.result;
|
|
||||||
if (!db.objectStoreNames.contains('offline-queue'))
|
|
||||||
db.createObjectStore('offline-queue', { keyPath: 'queuedAt' });
|
|
||||||
if (!db.objectStoreNames.contains('uploads'))
|
|
||||||
db.createObjectStore('uploads', { keyPath: 'id' });
|
|
||||||
if (!db.objectStoreNames.contains('results'))
|
|
||||||
db.createObjectStore('results', { keyPath: 'id' });
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function dbPut(storeName, value) {
|
|
||||||
return openDB().then(db => new Promise((resolve, reject) => {
|
|
||||||
const tx = db.transaction(storeName, 'readwrite');
|
|
||||||
const req = tx.objectStore(storeName).put(value);
|
|
||||||
req.onsuccess = () => resolve();
|
|
||||||
req.onerror = () => reject(req.error);
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
function dbDelete(storeName, key) {
|
|
||||||
return openDB().then(db => new Promise((resolve, reject) => {
|
|
||||||
const tx = db.transaction(storeName, 'readwrite');
|
|
||||||
const req = tx.objectStore(storeName).delete(key);
|
|
||||||
req.onsuccess = () => resolve();
|
|
||||||
req.onerror = () => reject(req.error);
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Upload logic ---
|
|
||||||
|
|
||||||
function makeItem(id, filename, size) {
|
|
||||||
const item = document.createElement('div');
|
|
||||||
item.className = 'result-item';
|
|
||||||
item.id = 'upload-' + id;
|
|
||||||
item.innerHTML = `
|
|
||||||
<div class="result-info">
|
|
||||||
<h3>${filename}</h3>
|
|
||||||
<div class="meta">${formatBytes(size)} - Uploading (continues in background)...</div>
|
|
||||||
<div class="progress-bar"><div class="progress" style="width: 5%"></div></div>
|
|
||||||
</div>
|
|
||||||
`;
|
|
||||||
results.insertBefore(item, results.firstChild);
|
|
||||||
uploadItems[id] = item;
|
|
||||||
return item;
|
|
||||||
}
|
|
||||||
|
|
||||||
function showSuccess(item, data) {
|
function showSuccess(item, data) {
|
||||||
item.className = 'result-item success';
|
item.className = 'result-item success';
|
||||||
item.innerHTML = `
|
item.innerHTML = `
|
||||||
|
|
@ -339,13 +280,12 @@ function showSuccess(item, data) {
|
||||||
`;
|
`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function showDuplicate(item, id, data, filename) {
|
function showDuplicate(item, file, data) {
|
||||||
item.id = 'dup-' + filename;
|
|
||||||
item.className = 'result-item duplicate';
|
item.className = 'result-item duplicate';
|
||||||
const existingSize = data.existing_file.size ? formatBytes(data.existing_file.size) : 'unknown size';
|
const existingSize = data.existing_file.size ? formatBytes(data.existing_file.size) : 'unknown size';
|
||||||
item.innerHTML = `
|
item.innerHTML = `
|
||||||
<div class="result-info">
|
<div class="result-info">
|
||||||
<h3>${filename}</h3>
|
<h3>${file.name}</h3>
|
||||||
<div class="meta">"${data.existing_file.title}" already exists (${existingSize})</div>
|
<div class="meta">"${data.existing_file.title}" already exists (${existingSize})</div>
|
||||||
</div>
|
</div>
|
||||||
<div class="duplicate-actions">
|
<div class="duplicate-actions">
|
||||||
|
|
@ -353,31 +293,13 @@ function showDuplicate(item, id, data, filename) {
|
||||||
<button class="btn-skip">Skip</button>
|
<button class="btn-skip">Skip</button>
|
||||||
</div>
|
</div>
|
||||||
`;
|
`;
|
||||||
// For overwrite, we need the original file — re-read from file input won't work
|
item.querySelector('.btn-overwrite').addEventListener('click', () => {
|
||||||
// So we keep the blob in IndexedDB and re-trigger with action=overwrite
|
uploadFile(file, 'overwrite');
|
||||||
item.querySelector('.btn-overwrite').addEventListener('click', async () => {
|
item.remove();
|
||||||
item.className = 'result-item';
|
|
||||||
item.innerHTML = `<div class="result-info"><h3>${filename}</h3><div class="meta">Overwriting...</div><div class="progress-bar"><div class="progress" style="width: 5%"></div></div></div>`;
|
|
||||||
// Clear the duplicate result
|
|
||||||
navigator.serviceWorker.controller.postMessage({
|
|
||||||
type: 'CLEAR_RESULT', id
|
|
||||||
});
|
|
||||||
// Re-store blob and re-trigger upload with overwrite
|
|
||||||
// The blob was deleted after first upload attempt, so we need to handle this
|
|
||||||
// For overwrite of duplicates detected by SW, the blob was already cleaned up
|
|
||||||
// We need to use the original file reference if still available
|
|
||||||
if (uploadItems['_file_' + id]) {
|
|
||||||
const file = uploadItems['_file_' + id];
|
|
||||||
await dbPut('uploads', { id, blob: file, filename: file.name });
|
|
||||||
navigator.serviceWorker.controller.postMessage({
|
|
||||||
type: 'START_UPLOAD', id, uploadUrl: UPLOAD_URL, space: SPACE, filename: file.name, action: 'overwrite'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
item.querySelector('.btn-skip').addEventListener('click', () => {
|
item.querySelector('.btn-skip').addEventListener('click', () => {
|
||||||
item.className = 'result-item error';
|
item.className = 'result-item error';
|
||||||
item.innerHTML = `<div class="result-info"><h3>${filename}</h3><div class="meta">Skipped (duplicate)</div></div>`;
|
item.innerHTML = `<div class="result-info"><h3>${file.name}</h3><div class="meta">Skipped (duplicate)</div></div>`;
|
||||||
navigator.serviceWorker.controller.postMessage({ type: 'CLEAR_RESULT', id });
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -391,123 +313,64 @@ async function uploadFile(file, action) {
|
||||||
const item = document.createElement('div');
|
const item = document.createElement('div');
|
||||||
item.className = 'result-item error';
|
item.className = 'result-item error';
|
||||||
item.innerHTML = `<div class="result-info"><h3>${file.name}</h3><div class="meta">File too large (${formatBytes(file.size)}). Max: {{ space.max_file_size_mb }}MB</div></div>`;
|
item.innerHTML = `<div class="result-info"><h3>${file.name}</h3><div class="meta">File too large (${formatBytes(file.size)}). Max: {{ space.max_file_size_mb }}MB</div></div>`;
|
||||||
results.insertBefore(item, results.firstChild);
|
resultsEl.insertBefore(item, resultsEl.firstChild);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const id = crypto.randomUUID();
|
const item = document.createElement('div');
|
||||||
const item = makeItem(id, file.name, file.size);
|
item.className = 'result-item';
|
||||||
|
item.innerHTML = `
|
||||||
|
<div class="result-info">
|
||||||
|
<h3>${file.name}</h3>
|
||||||
|
<div class="meta">${formatBytes(file.size)} - Uploading...</div>
|
||||||
|
<div class="progress-bar"><div class="progress" style="width: 5%"></div></div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
resultsEl.insertBefore(item, resultsEl.firstChild);
|
||||||
|
|
||||||
// Keep file reference for potential overwrite
|
|
||||||
uploadItems['_file_' + id] = file;
|
|
||||||
|
|
||||||
// Store blob in IndexedDB so service worker can access it
|
|
||||||
await dbPut('uploads', { id, blob: file, filename: file.name });
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Wait for SW controller if not ready yet (first visit)
|
|
||||||
const ctrl = await getSWController();
|
|
||||||
ctrl.postMessage({
|
|
||||||
type: 'START_UPLOAD',
|
|
||||||
id,
|
|
||||||
uploadUrl: UPLOAD_URL,
|
|
||||||
space: SPACE,
|
|
||||||
filename: file.name,
|
|
||||||
action: action || null,
|
|
||||||
});
|
|
||||||
} catch (e) {
|
|
||||||
// SW not available — fall back to direct upload from page
|
|
||||||
directUpload(item, id, file, action);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function directUpload(item, id, file, action) {
|
|
||||||
try {
|
try {
|
||||||
const fd = new FormData();
|
const fd = new FormData();
|
||||||
fd.append('file', file, file.name);
|
fd.append('file', file, file.name);
|
||||||
fd.append('space', SPACE);
|
fd.append('space', SPACE);
|
||||||
if (action) fd.append('action', action);
|
if (action) fd.append('action', action);
|
||||||
const resp = await fetch(UPLOAD_URL, { method: 'POST', body: fd });
|
|
||||||
const result = await resp.json();
|
const xhr = new XMLHttpRequest();
|
||||||
await dbDelete('uploads', id);
|
xhr.open('POST', UPLOAD_URL);
|
||||||
if (resp.status === 409 && result.duplicate) {
|
|
||||||
showDuplicate(item, id, result, file.name);
|
xhr.upload.addEventListener('progress', (e) => {
|
||||||
} else if (resp.ok && result.success) {
|
if (e.lengthComputable) {
|
||||||
showSuccess(item, result);
|
const pct = Math.round((e.loaded / e.total) * 100);
|
||||||
|
const bar = item.querySelector('.progress');
|
||||||
|
if (bar) bar.style.width = pct + '%';
|
||||||
|
const meta = item.querySelector('.meta');
|
||||||
|
if (meta) meta.textContent = `${formatBytes(file.size)} - Uploading... ${pct}%`;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await new Promise((resolve, reject) => {
|
||||||
|
xhr.onload = () => {
|
||||||
|
try {
|
||||||
|
resolve({ status: xhr.status, data: JSON.parse(xhr.responseText) });
|
||||||
|
} catch (e) {
|
||||||
|
reject(new Error('Invalid response'));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
xhr.onerror = () => reject(new Error('Network error'));
|
||||||
|
xhr.send(fd);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.status === 409 && result.data.duplicate) {
|
||||||
|
showDuplicate(item, file, result.data);
|
||||||
|
} else if (result.status >= 200 && result.status < 300 && result.data.success) {
|
||||||
|
showSuccess(item, result.data);
|
||||||
} else {
|
} else {
|
||||||
showError(item, file.name, result.error || 'Upload failed');
|
showError(item, file.name, result.data.error || 'Upload failed');
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
showError(item, file.name, err.message);
|
showError(item, file.name, err.message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function getSWController() {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
if (navigator.serviceWorker.controller) {
|
|
||||||
return resolve(navigator.serviceWorker.controller);
|
|
||||||
}
|
|
||||||
// Wait for the SW to claim this page (happens after first install)
|
|
||||||
const timeout = setTimeout(() => reject(new Error('SW timeout')), 10000);
|
|
||||||
navigator.serviceWorker.addEventListener('controllerchange', () => {
|
|
||||||
clearTimeout(timeout);
|
|
||||||
resolve(navigator.serviceWorker.controller);
|
|
||||||
}, { once: true });
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// --- Listen for messages from service worker ---
|
|
||||||
|
|
||||||
navigator.serviceWorker.addEventListener('message', (event) => {
|
|
||||||
const { type, id, data, error, filename } = event.data;
|
|
||||||
|
|
||||||
if (type === 'UPLOAD_COMPLETE') {
|
|
||||||
const item = uploadItems[id] || document.getElementById('upload-' + id);
|
|
||||||
if (item) showSuccess(item, data);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (type === 'UPLOAD_DUPLICATE') {
|
|
||||||
const item = uploadItems[id] || document.getElementById('upload-' + id);
|
|
||||||
if (item) showDuplicate(item, id, data, filename);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (type === 'UPLOAD_ERROR') {
|
|
||||||
const item = uploadItems[id] || document.getElementById('upload-' + id);
|
|
||||||
if (item) showError(item, item.querySelector('h3')?.textContent || 'File', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle results loaded on page init
|
|
||||||
if (type === 'PENDING_RESULTS') {
|
|
||||||
(event.data.results || []).forEach(r => {
|
|
||||||
// Only show results for this space
|
|
||||||
if (r.space && r.space !== SPACE) return;
|
|
||||||
const existing = document.getElementById('upload-' + r.id);
|
|
||||||
if (existing) return; // Already shown
|
|
||||||
|
|
||||||
const item = document.createElement('div');
|
|
||||||
item.className = 'result-item';
|
|
||||||
item.id = 'upload-' + r.id;
|
|
||||||
results.insertBefore(item, results.firstChild);
|
|
||||||
|
|
||||||
if (r.status === 'success') {
|
|
||||||
showSuccess(item, r.data);
|
|
||||||
// Auto-clear after showing
|
|
||||||
navigator.serviceWorker.controller?.postMessage({ type: 'CLEAR_RESULT', id: r.id });
|
|
||||||
} else if (r.status === 'duplicate') {
|
|
||||||
showDuplicate(item, r.id, r.data, r.filename);
|
|
||||||
} else if (r.status === 'error') {
|
|
||||||
showError(item, r.filename || 'File', r.error);
|
|
||||||
navigator.serviceWorker.controller?.postMessage({ type: 'CLEAR_RESULT', id: r.id });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// On page load, ask SW for any completed results from background uploads
|
|
||||||
getSWController().then(ctrl => {
|
|
||||||
ctrl.postMessage({ type: 'GET_RESULTS' });
|
|
||||||
}).catch(() => {});
|
|
||||||
|
|
||||||
function copyLink(btn, url) {
|
function copyLink(btn, url) {
|
||||||
navigator.clipboard.writeText(url).then(() => {
|
navigator.clipboard.writeText(url).then(() => {
|
||||||
const orig = btn.textContent;
|
const orig = btn.textContent;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue