Add chunked upload to bypass Cloudflare 100MB limit

Cloudflare free plan caps request bodies at 100MB. Files > 90MB now
upload in 80MB chunks: init session, send chunks sequentially, server
reassembles and creates MediaFile. Small files still use direct upload.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Jeff Emmett 2026-02-10 16:47:29 +00:00
parent d1b44e9b1e
commit 6eabd8eaa2
3 changed files with 319 additions and 106 deletions

View File

@ -10,6 +10,8 @@ from portal.views_shared_space import (
SharedSpaceLogoutView,
SharedSpaceUploadAPIView,
SharedSpaceFileListView,
ChunkedUploadInitView,
ChunkedUploadChunkView,
)
@ -18,5 +20,7 @@ urlpatterns = [
path('login/', SharedSpaceLoginView.as_view(), name='shared_space_login'),
path('logout/', SharedSpaceLogoutView.as_view(), name='shared_space_logout'),
path('api/upload/', SharedSpaceUploadAPIView.as_view(), name='shared_space_upload'),
path('api/upload/init/', ChunkedUploadInitView.as_view(), name='chunked_upload_init'),
path('api/upload/chunk/', ChunkedUploadChunkView.as_view(), name='chunked_upload_chunk'),
path('files/', SharedSpaceFileListView.as_view(), name='shared_space_files'),
]

View File

@ -248,60 +248,25 @@ const uploadZone = document.getElementById('uploadZone');
const fileInput = document.getElementById('fileInput');
const results = document.getElementById('results');
const maxSize = {{ space.max_file_size_mb }} * 1024 * 1024; // 0 = unlimited
const CHUNK_SIZE = 80 * 1024 * 1024; // 80MB chunks (under Cloudflare's 100MB limit)
const CHUNK_THRESHOLD = 90 * 1024 * 1024; // Use chunked upload for files > 90MB
// Click to select
uploadZone.addEventListener('click', () => fileInput.click());
uploadZone.addEventListener('dragover', (e) => { e.preventDefault(); uploadZone.classList.add('dragover'); });
uploadZone.addEventListener('dragleave', () => { uploadZone.classList.remove('dragover'); });
uploadZone.addEventListener('drop', (e) => { e.preventDefault(); uploadZone.classList.remove('dragover'); handleFiles(e.dataTransfer.files); });
fileInput.addEventListener('change', () => { handleFiles(fileInput.files); fileInput.value = ''; });
// Drag and drop
uploadZone.addEventListener('dragover', (e) => {
e.preventDefault();
uploadZone.classList.add('dragover');
});
uploadZone.addEventListener('dragleave', () => {
uploadZone.classList.remove('dragover');
});
uploadZone.addEventListener('drop', (e) => {
e.preventDefault();
uploadZone.classList.remove('dragover');
handleFiles(e.dataTransfer.files);
});
// File input change
fileInput.addEventListener('change', () => {
handleFiles(fileInput.files);
fileInput.value = '';
});
function handleFiles(files) {
Array.from(files).forEach(uploadFile);
}
function handleFiles(files) { Array.from(files).forEach(f => uploadFile(f)); }
function formatBytes(bytes) {
if (bytes === 0) return '0 Bytes';
const k = 1024;
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
const k = 1024, sizes = ['Bytes', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
function uploadFile(file, action) {
// Check file size before upload (skip if unlimited)
if (maxSize > 0 && file.size > maxSize) {
const item = document.createElement('div');
item.className = 'result-item error';
item.innerHTML = `
<div class="result-info">
<h3>${file.name}</h3>
<div class="meta">File too large (${formatBytes(file.size)}). Max: {{ space.max_file_size_mb }}MB</div>
</div>
`;
results.insertBefore(item, results.firstChild);
return;
}
// Create or reuse result item
function makeItem(file, action) {
let item = action ? document.getElementById('dup-' + file.name) : null;
if (!item) {
item = document.createElement('div');
@ -317,89 +282,161 @@ function uploadFile(file, action) {
<div class="progress-bar"><div class="progress" style="width: 0%"></div></div>
</div>
`;
return item;
}
function showSuccess(item, data) {
item.classList.add('success');
item.innerHTML = `
<div class="result-info">
<h3>${data.file.title}</h3>
<div class="meta">${formatBytes(data.file.size)} - Uploaded successfully</div>
</div>
<div class="share-link">
<input type="text" value="${data.share.url}" readonly onclick="this.select()">
<button class="copy-btn" onclick="copyLink(this, '${data.share.url}')">Copy</button>
</div>
`;
}
function showDuplicate(item, file, data, action) {
item.id = 'dup-' + file.name;
item.classList.add('duplicate');
const existingSize = data.existing_file.size ? formatBytes(data.existing_file.size) : 'unknown size';
item.innerHTML = `
<div class="result-info">
<h3>${file.name}</h3>
<div class="meta">"${data.existing_file.title}" already exists (${existingSize})</div>
</div>
<div class="duplicate-actions">
<button class="btn-overwrite">Overwrite</button>
<button class="btn-skip">Skip</button>
</div>
`;
item.querySelector('.btn-overwrite').addEventListener('click', () => uploadFile(file, 'overwrite'));
item.querySelector('.btn-skip').addEventListener('click', () => {
item.classList.remove('duplicate');
item.classList.add('error');
item.innerHTML = `<div class="result-info"><h3>${file.name}</h3><div class="meta">Skipped (duplicate)</div></div>`;
});
}
function showError(item, msg) {
item.classList.add('error');
item.innerHTML = `<div class="result-info"><h3>${item.querySelector('h3')?.textContent || 'File'}</h3><div class="meta">${msg}</div></div>`;
}
function uploadFile(file, action) {
if (maxSize > 0 && file.size > maxSize) {
const item = document.createElement('div');
item.className = 'result-item error';
item.innerHTML = `<div class="result-info"><h3>${file.name}</h3><div class="meta">File too large (${formatBytes(file.size)}). Max: {{ space.max_file_size_mb }}MB</div></div>`;
results.insertBefore(item, results.firstChild);
return;
}
if (file.size > CHUNK_THRESHOLD) {
uploadFileChunked(file, action);
} else {
uploadFileSimple(file, action);
}
}
// Simple upload for files < 90MB
function uploadFileSimple(file, action) {
const item = makeItem(file, action);
const formData = new FormData();
formData.append('file', file);
if (action) formData.append('action', action);
const xhr = new XMLHttpRequest();
xhr.upload.addEventListener('progress', (e) => {
if (e.lengthComputable) {
const percent = (e.loaded / e.total) * 100;
item.querySelector('.progress').style.width = percent + '%';
}
if (e.lengthComputable) item.querySelector('.progress').style.width = (e.loaded / e.total * 100) + '%';
});
xhr.addEventListener('load', () => {
if (xhr.status === 200) {
const data = JSON.parse(xhr.responseText);
item.classList.add('success');
item.innerHTML = `
<div class="result-info">
<h3>${data.file.title}</h3>
<div class="meta">${formatBytes(data.file.size)} - Uploaded successfully</div>
</div>
<div class="share-link">
<input type="text" value="${data.share.url}" readonly onclick="this.select()">
<button class="copy-btn" onclick="copyLink(this, '${data.share.url}')">Copy</button>
</div>
`;
showSuccess(item, JSON.parse(xhr.responseText));
} else if (xhr.status === 409) {
const data = JSON.parse(xhr.responseText);
item.id = 'dup-' + file.name;
item.classList.add('duplicate');
const existingSize = data.existing_file.size ? formatBytes(data.existing_file.size) : 'unknown size';
item.innerHTML = `
<div class="result-info">
<h3>${file.name}</h3>
<div class="meta">"${data.existing_file.title}" already exists (${existingSize})</div>
</div>
<div class="duplicate-actions">
<button class="btn-overwrite" data-action="overwrite">Overwrite</button>
<button class="btn-skip" data-action="skip">Skip</button>
</div>
`;
item.querySelector('.btn-overwrite').addEventListener('click', () => uploadFile(file, 'overwrite'));
item.querySelector('.btn-skip').addEventListener('click', () => {
item.classList.remove('duplicate');
item.classList.add('error');
item.innerHTML = `
<div class="result-info">
<h3>${file.name}</h3>
<div class="meta">Skipped (duplicate)</div>
</div>
`;
});
showDuplicate(item, file, JSON.parse(xhr.responseText));
} else {
let errorMsg = 'Upload failed';
try {
const data = JSON.parse(xhr.responseText);
errorMsg = data.error || errorMsg;
} catch(e) {}
item.classList.add('error');
item.querySelector('.meta').textContent = errorMsg;
const pb = item.querySelector('.progress-bar');
if (pb) pb.remove();
let msg = 'Upload failed';
try { msg = JSON.parse(xhr.responseText).error || msg; } catch(e) {}
showError(item, msg);
}
});
xhr.addEventListener('error', () => {
item.classList.add('error');
item.querySelector('.meta').textContent = 'Upload failed - network error';
const pb = item.querySelector('.progress-bar');
if (pb) pb.remove();
});
xhr.addEventListener('error', () => showError(item, 'Upload failed - network error'));
xhr.open('POST', '{% url "shared_space_upload" %}');
xhr.send(formData);
}
// Chunked upload for files >= 90MB
async function uploadFileChunked(file, action) {
const item = makeItem(file, action);
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
try {
// Step 1: Initialize upload
const initForm = new FormData();
initForm.append('filename', file.name);
initForm.append('total_size', file.size);
initForm.append('total_chunks', totalChunks);
initForm.append('mime_type', file.type || 'application/octet-stream');
if (action) initForm.append('action', action);
const initResp = await fetch('{% url "chunked_upload_init" %}', { method: 'POST', body: initForm });
const initData = await initResp.json();
if (initResp.status === 409) {
showDuplicate(item, file, initData);
return;
}
if (!initResp.ok) {
showError(item, initData.error || 'Failed to start upload');
return;
}
const uploadId = initData.upload_id;
// Step 2: Send chunks sequentially
for (let i = 0; i < totalChunks; i++) {
const start = i * CHUNK_SIZE;
const end = Math.min(start + CHUNK_SIZE, file.size);
const chunk = file.slice(start, end);
const chunkForm = new FormData();
chunkForm.append('upload_id', uploadId);
chunkForm.append('chunk_index', i);
chunkForm.append('chunk', chunk, `chunk_${i}`);
const chunkResp = await fetch('{% url "chunked_upload_chunk" %}', { method: 'POST', body: chunkForm });
const chunkData = await chunkResp.json();
if (!chunkResp.ok) {
showError(item, chunkData.error || 'Chunk upload failed');
return;
}
// Update progress
const percent = ((i + 1) / totalChunks) * 100;
item.querySelector('.progress').style.width = percent + '%';
item.querySelector('.meta').textContent = `${formatBytes(file.size)} - Uploading chunk ${i + 1}/${totalChunks}...`;
// If this was the last chunk, the response includes the final file data
if (chunkData.success) {
showSuccess(item, chunkData);
return;
}
}
} catch (err) {
showError(item, 'Upload failed - ' + err.message);
}
}
function copyLink(btn, url) {
navigator.clipboard.writeText(url).then(() => {
const originalText = btn.textContent;
const orig = btn.textContent;
btn.textContent = 'Copied!';
setTimeout(() => btn.textContent = originalText, 2000);
setTimeout(() => btn.textContent = orig, 2000);
});
}
</script>

View File

@ -5,6 +5,11 @@ Each topic is accessible via a subdomain (e.g., cofi.rfiles.online).
Anyone can view files and upload new ones - no password required.
"""
import os
import uuid
from django.conf import settings
from django.core.files.base import File
from django.shortcuts import render, redirect, get_object_or_404
from django.http import JsonResponse, Http404
from django.views import View
@ -13,6 +18,8 @@ from django.utils.decorators import method_decorator
from files.models import SharedSpace, MediaFile, PublicShare
CHUNK_UPLOAD_DIR = os.path.join(settings.MEDIA_ROOT, 'chunks')
def get_topic_or_404(request):
"""Get the topic (shared space) from the request's subdomain slug."""
@ -132,6 +139,171 @@ class SharedSpaceUploadAPIView(View):
})
@method_decorator(csrf_exempt, name='dispatch')
class ChunkedUploadInitView(View):
"""Initialize a chunked upload session."""
def post(self, request):
space = get_topic_or_404(request)
filename = request.POST.get('filename', '')
total_size = int(request.POST.get('total_size', 0))
total_chunks = int(request.POST.get('total_chunks', 0))
mime_type = request.POST.get('mime_type', 'application/octet-stream')
action = request.POST.get('action', '')
if not filename or total_chunks < 1:
return JsonResponse({'error': 'Invalid parameters'}, status=400)
if space.max_file_size_mb > 0:
max_size_bytes = space.max_file_size_mb * 1024 * 1024
if total_size > max_size_bytes:
return JsonResponse({
'error': f'File too large. Maximum size is {space.max_file_size_mb}MB'
}, status=400)
# Check for duplicate
existing = MediaFile.objects.filter(
shared_space=space,
original_filename=filename,
).first()
if existing and action != 'overwrite':
existing_share = existing.public_shares.filter(is_active=True).first()
return JsonResponse({
'duplicate': True,
'existing_file': {
'id': str(existing.id),
'title': existing.title,
'filename': existing.original_filename,
'size': existing.file_size,
'share_url': existing_share.get_public_url() if existing_share else None,
},
}, status=409)
upload_id = str(uuid.uuid4())
chunk_dir = os.path.join(CHUNK_UPLOAD_DIR, upload_id)
os.makedirs(chunk_dir, exist_ok=True)
# Store metadata
import json
meta = {
'filename': filename,
'total_size': total_size,
'total_chunks': total_chunks,
'mime_type': mime_type,
'space_id': str(space.id),
'action': action,
}
with open(os.path.join(chunk_dir, 'meta.json'), 'w') as f:
json.dump(meta, f)
return JsonResponse({'upload_id': upload_id})
@method_decorator(csrf_exempt, name='dispatch')
class ChunkedUploadChunkView(View):
"""Receive a single chunk and finalize when all chunks are received."""
def post(self, request):
space = get_topic_or_404(request)
upload_id = request.POST.get('upload_id', '')
chunk_index = int(request.POST.get('chunk_index', -1))
chunk_file = request.FILES.get('chunk')
if not upload_id or chunk_index < 0 or not chunk_file:
return JsonResponse({'error': 'Invalid parameters'}, status=400)
chunk_dir = os.path.join(CHUNK_UPLOAD_DIR, upload_id)
meta_path = os.path.join(chunk_dir, 'meta.json')
if not os.path.isdir(chunk_dir) or not os.path.exists(meta_path):
return JsonResponse({'error': 'Invalid upload_id'}, status=400)
import json
with open(meta_path) as f:
meta = json.load(f)
# Verify this chunk belongs to this space
if meta['space_id'] != str(space.id):
return JsonResponse({'error': 'Space mismatch'}, status=403)
# Write chunk to disk
chunk_path = os.path.join(chunk_dir, f'{chunk_index:06d}')
with open(chunk_path, 'wb') as f:
for part in chunk_file.chunks():
f.write(part)
# Check if all chunks received
received = len([
n for n in os.listdir(chunk_dir)
if n != 'meta.json'
])
if received < meta['total_chunks']:
return JsonResponse({
'received': received,
'total': meta['total_chunks'],
})
# All chunks received — assemble the file
assembled_path = os.path.join(chunk_dir, 'assembled')
with open(assembled_path, 'wb') as out:
for i in range(meta['total_chunks']):
part_path = os.path.join(chunk_dir, f'{i:06d}')
with open(part_path, 'rb') as part:
while True:
buf = part.read(8192)
if not buf:
break
out.write(buf)
# Handle overwrite
if meta['action'] == 'overwrite':
existing = MediaFile.objects.filter(
shared_space=space,
original_filename=meta['filename'],
).first()
if existing:
existing.file.delete(save=False)
existing.delete()
# Create MediaFile from assembled file
with open(assembled_path, 'rb') as f:
django_file = File(f, name=meta['filename'])
media_file = MediaFile.objects.create(
file=django_file,
original_filename=meta['filename'],
title=meta['filename'],
mime_type=meta['mime_type'],
uploaded_by=request.user if request.user.is_authenticated else None,
shared_space=space,
)
share = PublicShare.objects.create(
media_file=media_file,
created_by=request.user if request.user.is_authenticated else None,
note=f'Uploaded to topic: {space.slug}',
)
# Cleanup chunk directory
import shutil
shutil.rmtree(chunk_dir, ignore_errors=True)
return JsonResponse({
'success': True,
'file': {
'id': str(media_file.id),
'title': media_file.title,
'filename': media_file.original_filename,
'size': media_file.file_size,
'mime_type': media_file.mime_type,
},
'share': {
'token': share.token,
'url': share.get_public_url(),
}
})
class SharedSpaceFileListView(View):
"""List all files in the topic."""