diff --git a/config/middleware.py b/config/middleware.py
index e3c3e98..5389aac 100644
--- a/config/middleware.py
+++ b/config/middleware.py
@@ -36,7 +36,7 @@ class HostBasedURLConfMiddleware:
match = self.RFILES_SUBDOMAIN_PATTERN.match(host)
if match:
subdomain = match.group(1)
- if subdomain not in ('www',):
+ if subdomain not in ('www', 'direct'):
request.shared_space_slug = subdomain
request.urlconf = 'config.urls_shared_space'
set_urlconf('config.urls_shared_space')
diff --git a/config/settings.py b/config/settings.py
index 2702c4e..9ff07ca 100644
--- a/config/settings.py
+++ b/config/settings.py
@@ -122,6 +122,9 @@ REST_FRAMEWORK = {
CORS_ALLOWED_ORIGINS = [
h.strip() for h in os.environ.get('CORS_ALLOWED_ORIGINS', 'http://localhost:3000,http://localhost:8000').split(',') if h.strip()
]
+CORS_ALLOWED_ORIGIN_REGEXES = [
+ r'^https://.*\.rfiles\.online$',
+]
CORS_ALLOW_ALL_ORIGINS = DEBUG
# Celery
diff --git a/config/urls.py b/config/urls.py
index b14047a..e0cd356 100644
--- a/config/urls.py
+++ b/config/urls.py
@@ -11,6 +11,7 @@ from django.db import connection
from files.urls import api_urlpatterns as files_api_urls, public_urlpatterns as files_public_urls
from portal.views import ServiceWorkerView, ManifestView
+from portal.views_shared_space import DirectUploadAPIView
def health_check(request):
@@ -38,6 +39,7 @@ urlpatterns = [
path("manifest.json", ManifestView.as_view(), name="manifest"),
path("api/health/", health_check, name="health_check"),
+ path("api/upload/", DirectUploadAPIView.as_view(), name="direct_upload"),
path("admin/", admin.site.urls),
path("api/v1/", include(files_api_urls)),
path("s/", include(files_public_urls)),
diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml
index 8589555..1634225 100644
--- a/docker-compose.prod.yml
+++ b/docker-compose.prod.yml
@@ -47,7 +47,7 @@ services:
- CELERY_RESULT_BACKEND=redis://:${REDIS_PASSWORD}@redis:6379/0
- DJANGO_SETTINGS_MODULE=config.settings
- DEBUG=False
- - ALLOWED_HOSTS=rfiles.online,www.rfiles.online,.rfiles.online,localhost
+ - ALLOWED_HOSTS=rfiles.online,www.rfiles.online,.rfiles.online,direct.rfiles.online,localhost
- SHARE_BASE_URL=https://rfiles.online
- SECRET_KEY=${SECRET_KEY}
depends_on:
@@ -57,8 +57,14 @@ services:
condition: service_healthy
labels:
- "traefik.enable=true"
+ # Main router (via Cloudflare tunnel → port 80)
- "traefik.http.routers.rfiles.rule=Host(`rfiles.online`) || Host(`www.rfiles.online`) || HostRegexp(`{subdomain:[a-z0-9-]+}.rfiles.online`)"
- "traefik.http.routers.rfiles.entrypoints=web"
+ # Direct upload router (bypasses Cloudflare, TLS via Let's Encrypt)
+ - "traefik.http.routers.rfiles-direct.rule=Host(`direct.rfiles.online`)"
+ - "traefik.http.routers.rfiles-direct.entrypoints=websecure"
+ - "traefik.http.routers.rfiles-direct.tls=true"
+ - "traefik.http.routers.rfiles-direct.tls.certresolver=letsencrypt"
- "traefik.http.services.rfiles.loadbalancer.server.port=8000"
- "traefik.docker.network=traefik-public"
networks:
diff --git a/portal/templates/portal/shared_space/home.html b/portal/templates/portal/shared_space/home.html
index 5e17a1f..5254042 100644
--- a/portal/templates/portal/shared_space/home.html
+++ b/portal/templates/portal/shared_space/home.html
@@ -248,8 +248,8 @@ const uploadZone = document.getElementById('uploadZone');
const fileInput = document.getElementById('fileInput');
const results = document.getElementById('results');
const maxSize = {{ space.max_file_size_mb }} * 1024 * 1024; // 0 = unlimited
-const CHUNK_SIZE = 80 * 1024 * 1024; // 80MB chunks (under Cloudflare's 100MB limit)
-const CHUNK_THRESHOLD = 90 * 1024 * 1024; // Use chunked upload for files > 90MB
+// All uploads go direct to server, bypassing Cloudflare's 100MB limit
+const UPLOAD_URL = 'https://direct.rfiles.online/api/upload/';
uploadZone.addEventListener('click', () => fileInput.click());
uploadZone.addEventListener('dragover', (e) => { e.preventDefault(); uploadZone.classList.add('dragover'); });
@@ -266,7 +266,15 @@ function formatBytes(bytes) {
return parseFloat((bytes / Math.pow(k, i)).toFixed(2)) + ' ' + sizes[i];
}
-function makeItem(file, action) {
+function uploadFile(file, action) {
+ if (maxSize > 0 && file.size > maxSize) {
+ const item = document.createElement('div');
+ item.className = 'result-item error';
+ item.innerHTML = `
${file.name}
File too large (${formatBytes(file.size)}). Max: {{ space.max_file_size_mb }}MB
`;
+ results.insertBefore(item, results.firstChild);
+ return;
+ }
+
let item = action ? document.getElementById('dup-' + file.name) : null;
if (!item) {
item = document.createElement('div');
@@ -282,71 +290,10 @@ function makeItem(file, action) {
`;
- return item;
-}
-function showSuccess(item, data) {
- item.classList.add('success');
- item.innerHTML = `
-
-
${data.file.title}
-
${formatBytes(data.file.size)} - Uploaded successfully
-
-
-
-
-
- `;
-}
-
-function showDuplicate(item, file, data, action) {
- item.id = 'dup-' + file.name;
- item.classList.add('duplicate');
- const existingSize = data.existing_file.size ? formatBytes(data.existing_file.size) : 'unknown size';
- item.innerHTML = `
-
-
${file.name}
-
"${data.existing_file.title}" already exists (${existingSize})
-
-
-
-
-
- `;
- item.querySelector('.btn-overwrite').addEventListener('click', () => uploadFile(file, 'overwrite'));
- item.querySelector('.btn-skip').addEventListener('click', () => {
- item.classList.remove('duplicate');
- item.classList.add('error');
- item.innerHTML = `${file.name}
Skipped (duplicate)
`;
- });
-}
-
-function showError(item, msg) {
- item.classList.add('error');
- item.innerHTML = `${item.querySelector('h3')?.textContent || 'File'}
${msg}
`;
-}
-
-function uploadFile(file, action) {
- if (maxSize > 0 && file.size > maxSize) {
- const item = document.createElement('div');
- item.className = 'result-item error';
- item.innerHTML = `${file.name}
File too large (${formatBytes(file.size)}). Max: {{ space.max_file_size_mb }}MB
`;
- results.insertBefore(item, results.firstChild);
- return;
- }
-
- if (file.size > CHUNK_THRESHOLD) {
- uploadFileChunked(file, action);
- } else {
- uploadFileSimple(file, action);
- }
-}
-
-// Simple upload for files < 90MB
-function uploadFileSimple(file, action) {
- const item = makeItem(file, action);
const formData = new FormData();
formData.append('file', file);
+ formData.append('space', '{{ space.slug }}');
if (action) formData.append('action', action);
const xhr = new XMLHttpRequest();
@@ -355,83 +302,59 @@ function uploadFileSimple(file, action) {
});
xhr.addEventListener('load', () => {
if (xhr.status === 200) {
- showSuccess(item, JSON.parse(xhr.responseText));
+ const data = JSON.parse(xhr.responseText);
+ item.classList.add('success');
+ item.innerHTML = `
+
+
${data.file.title}
+
${formatBytes(data.file.size)} - Uploaded successfully
+
+
+
+
+
+ `;
} else if (xhr.status === 409) {
- showDuplicate(item, file, JSON.parse(xhr.responseText));
+ const data = JSON.parse(xhr.responseText);
+ item.id = 'dup-' + file.name;
+ item.classList.add('duplicate');
+ const existingSize = data.existing_file.size ? formatBytes(data.existing_file.size) : 'unknown size';
+ item.innerHTML = `
+
+
${file.name}
+
"${data.existing_file.title}" already exists (${existingSize})
+
+
+
+
+
+ `;
+ item.querySelector('.btn-overwrite').addEventListener('click', () => uploadFile(file, 'overwrite'));
+ item.querySelector('.btn-skip').addEventListener('click', () => {
+ item.classList.remove('duplicate');
+ item.classList.add('error');
+ item.innerHTML = `${file.name}
Skipped (duplicate)
`;
+ });
} else {
let msg = 'Upload failed';
try { msg = JSON.parse(xhr.responseText).error || msg; } catch(e) {}
- showError(item, msg);
+ item.classList.add('error');
+ item.querySelector('.meta').textContent = msg;
+ const pb = item.querySelector('.progress-bar');
+ if (pb) pb.remove();
}
});
- xhr.addEventListener('error', () => showError(item, 'Upload failed - network error'));
- xhr.open('POST', '{% url "shared_space_upload" %}');
+ xhr.addEventListener('error', () => {
+ item.classList.add('error');
+ item.querySelector('.meta').textContent = 'Upload failed - network error';
+ const pb = item.querySelector('.progress-bar');
+ if (pb) pb.remove();
+ });
+
+ xhr.open('POST', UPLOAD_URL);
xhr.send(formData);
}
-// Chunked upload for files >= 90MB
-async function uploadFileChunked(file, action) {
- const item = makeItem(file, action);
- const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
-
- try {
- // Step 1: Initialize upload
- const initForm = new FormData();
- initForm.append('filename', file.name);
- initForm.append('total_size', file.size);
- initForm.append('total_chunks', totalChunks);
- initForm.append('mime_type', file.type || 'application/octet-stream');
- if (action) initForm.append('action', action);
-
- const initResp = await fetch('{% url "chunked_upload_init" %}', { method: 'POST', body: initForm });
- const initData = await initResp.json();
-
- if (initResp.status === 409) {
- showDuplicate(item, file, initData);
- return;
- }
- if (!initResp.ok) {
- showError(item, initData.error || 'Failed to start upload');
- return;
- }
-
- const uploadId = initData.upload_id;
-
- // Step 2: Send chunks sequentially
- for (let i = 0; i < totalChunks; i++) {
- const start = i * CHUNK_SIZE;
- const end = Math.min(start + CHUNK_SIZE, file.size);
- const chunk = file.slice(start, end);
-
- const chunkForm = new FormData();
- chunkForm.append('upload_id', uploadId);
- chunkForm.append('chunk_index', i);
- chunkForm.append('chunk', chunk, `chunk_${i}`);
-
- const chunkResp = await fetch('{% url "chunked_upload_chunk" %}', { method: 'POST', body: chunkForm });
- const chunkData = await chunkResp.json();
-
- if (!chunkResp.ok) {
- showError(item, chunkData.error || 'Chunk upload failed');
- return;
- }
-
- // Update progress
- const percent = ((i + 1) / totalChunks) * 100;
- item.querySelector('.progress').style.width = percent + '%';
- item.querySelector('.meta').textContent = `${formatBytes(file.size)} - Uploading chunk ${i + 1}/${totalChunks}...`;
-
- // If this was the last chunk, the response includes the final file data
- if (chunkData.success) {
- showSuccess(item, chunkData);
- return;
- }
- }
- } catch (err) {
- showError(item, 'Upload failed - ' + err.message);
- }
-}
-
function copyLink(btn, url) {
navigator.clipboard.writeText(url).then(() => {
const orig = btn.textContent;
diff --git a/portal/views_shared_space.py b/portal/views_shared_space.py
index 98dbcf1..19d743e 100644
--- a/portal/views_shared_space.py
+++ b/portal/views_shared_space.py
@@ -139,6 +139,91 @@ class SharedSpaceUploadAPIView(View):
})
+@method_decorator(csrf_exempt, name='dispatch')
+class DirectUploadAPIView(View):
+ """Handle uploads via direct.rfiles.online (bypasses Cloudflare).
+
+ Space slug is passed as a form field instead of from the subdomain.
+ """
+
+ def post(self, request):
+ space_slug = request.POST.get('space', '')
+ if not space_slug:
+ return JsonResponse({'error': 'Missing space parameter'}, status=400)
+
+ space = get_object_or_404(SharedSpace, slug=space_slug, is_active=True)
+
+ if not request.FILES.get('file'):
+ return JsonResponse({'error': 'No file provided'}, status=400)
+
+ uploaded_file = request.FILES['file']
+
+ if space.max_file_size_mb > 0:
+ max_size_bytes = space.max_file_size_mb * 1024 * 1024
+ if uploaded_file.size > max_size_bytes:
+ return JsonResponse({
+ 'error': f'File too large. Maximum size is {space.max_file_size_mb}MB'
+ }, status=400)
+
+ action = request.POST.get('action', '')
+
+ existing = MediaFile.objects.filter(
+ shared_space=space,
+ original_filename=uploaded_file.name,
+ ).first()
+
+ if existing and action != 'overwrite':
+ existing_share = existing.public_shares.filter(is_active=True).first()
+ return JsonResponse({
+ 'duplicate': True,
+ 'existing_file': {
+ 'id': str(existing.id),
+ 'title': existing.title,
+ 'filename': existing.original_filename,
+ 'size': existing.file_size,
+ 'share_url': existing_share.get_public_url() if existing_share else None,
+ },
+ }, status=409)
+
+ if existing and action == 'overwrite':
+ existing.file.delete(save=False)
+ existing.delete()
+
+ title = request.POST.get('title', '') or uploaded_file.name
+ description = request.POST.get('description', '')
+
+ media_file = MediaFile.objects.create(
+ file=uploaded_file,
+ original_filename=uploaded_file.name,
+ title=title,
+ description=description,
+ mime_type=uploaded_file.content_type or 'application/octet-stream',
+ uploaded_by=request.user if request.user.is_authenticated else None,
+ shared_space=space,
+ )
+
+ share = PublicShare.objects.create(
+ media_file=media_file,
+ created_by=request.user if request.user.is_authenticated else None,
+ note=f'Uploaded to topic: {space.slug}',
+ )
+
+ return JsonResponse({
+ 'success': True,
+ 'file': {
+ 'id': str(media_file.id),
+ 'title': media_file.title,
+ 'filename': media_file.original_filename,
+ 'size': media_file.file_size,
+ 'mime_type': media_file.mime_type,
+ },
+ 'share': {
+ 'token': share.token,
+ 'url': share.get_public_url(),
+ }
+ })
+
+
@method_decorator(csrf_exempt, name='dispatch')
class ChunkedUploadInitView(View):
"""Initialize a chunked upload session."""