Initial rDesign service — Scribus-based document design & PDF generation
Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
commit
a686a4a142
|
|
@ -0,0 +1,9 @@
|
||||||
|
# rDesign environment
|
||||||
|
# Copy to .env and fill in values
|
||||||
|
|
||||||
|
# Infisical (secrets fetched at runtime)
|
||||||
|
INFISICAL_CLIENT_ID=
|
||||||
|
INFISICAL_CLIENT_SECRET=
|
||||||
|
|
||||||
|
# VNC password for the studio container
|
||||||
|
VNC_PASSWORD=changeme
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
output/
|
||||||
|
jobs/
|
||||||
|
__pycache__/
|
||||||
|
*.pyc
|
||||||
|
.env
|
||||||
|
*.egg-info/
|
||||||
|
.venv/
|
||||||
|
|
@ -0,0 +1,41 @@
|
||||||
|
FROM python:3.11-slim AS base
|
||||||
|
|
||||||
|
# Install Scribus and dependencies
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
scribus \
|
||||||
|
scribus-data \
|
||||||
|
xvfb \
|
||||||
|
fonts-liberation \
|
||||||
|
fonts-dejavu \
|
||||||
|
fonts-noto \
|
||||||
|
fonts-noto-color-emoji \
|
||||||
|
imagemagick \
|
||||||
|
ghostscript \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
COPY requirements.txt /app/requirements.txt
|
||||||
|
RUN pip install --no-cache-dir -r /app/requirements.txt
|
||||||
|
|
||||||
|
# Copy application code
|
||||||
|
COPY server/ /app/server/
|
||||||
|
COPY scripts/ /app/scripts/
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Create directories
|
||||||
|
RUN mkdir -p /app/templates /app/output /app/jobs
|
||||||
|
|
||||||
|
# Allow ImageMagick to process PDFs (needed for thumbnails)
|
||||||
|
RUN if [ -f /etc/ImageMagick-6/policy.xml ]; then \
|
||||||
|
sed -i 's/rights="none" pattern="PDF"/rights="read|write" pattern="PDF"/' /etc/ImageMagick-6/policy.xml; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
ENV SCRIBUS_PATH=/usr/bin/scribus
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:8080/health || exit 1
|
||||||
|
|
||||||
|
CMD ["uvicorn", "server.app:app", "--host", "0.0.0.0", "--port", "8080"]
|
||||||
|
|
@ -0,0 +1,42 @@
|
||||||
|
FROM ubuntu:24.04
|
||||||
|
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
ENV DISPLAY=:1
|
||||||
|
|
||||||
|
# Install Scribus, VNC, noVNC, and a lightweight window manager
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
scribus \
|
||||||
|
scribus-data \
|
||||||
|
tigervnc-standalone-server \
|
||||||
|
novnc \
|
||||||
|
websockify \
|
||||||
|
openbox \
|
||||||
|
xterm \
|
||||||
|
fonts-liberation \
|
||||||
|
fonts-dejavu \
|
||||||
|
fonts-noto \
|
||||||
|
fonts-noto-color-emoji \
|
||||||
|
dbus-x11 \
|
||||||
|
x11-utils \
|
||||||
|
procps \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Create app user
|
||||||
|
RUN useradd -m -s /bin/bash designer
|
||||||
|
|
||||||
|
# Create directories
|
||||||
|
RUN mkdir -p /app/templates /app/output /app/rswag-designs \
|
||||||
|
&& chown -R designer:designer /app
|
||||||
|
|
||||||
|
# Copy startup script
|
||||||
|
COPY studio/start.sh /start.sh
|
||||||
|
COPY studio/openbox-rc.xml /home/designer/.config/openbox/rc.xml
|
||||||
|
RUN chmod +x /start.sh && chown -R designer:designer /home/designer
|
||||||
|
|
||||||
|
EXPOSE 6080
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --retries=3 \
|
||||||
|
CMD curl -sf http://localhost:6080/ || exit 1
|
||||||
|
|
||||||
|
CMD ["/start.sh"]
|
||||||
|
|
@ -0,0 +1,76 @@
|
||||||
|
services:
|
||||||
|
rdesign-api:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: rdesign-api
|
||||||
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- ./templates:/app/templates
|
||||||
|
- ./output:/app/output
|
||||||
|
- ./jobs:/app/jobs
|
||||||
|
- ./scripts:/app/scripts
|
||||||
|
# Access rSwag designs for integration
|
||||||
|
- /opt/apps/rswag/designs:/app/rswag-designs:ro
|
||||||
|
environment:
|
||||||
|
- PYTHONUNBUFFERED=1
|
||||||
|
- BASE_URL=https://scribus.rspace.online
|
||||||
|
- RSWAG_DESIGNS_PATH=/app/rswag-designs
|
||||||
|
labels:
|
||||||
|
- "traefik.enable=true"
|
||||||
|
- "traefik.http.routers.rdesign-api.rule=Host(`scribus.rspace.online`) && !PathPrefix(`/vnc`)"
|
||||||
|
- "traefik.http.routers.rdesign-api.entrypoints=websecure"
|
||||||
|
- "traefik.http.routers.rdesign-api.tls.certresolver=letsencrypt"
|
||||||
|
- "traefik.http.services.rdesign-api.loadbalancer.server.port=8080"
|
||||||
|
- "traefik.http.middlewares.rdesign-cors.headers.accesscontrolallowmethods=GET,POST,OPTIONS"
|
||||||
|
- "traefik.http.middlewares.rdesign-cors.headers.accesscontrolallowheaders=*"
|
||||||
|
- "traefik.http.middlewares.rdesign-cors.headers.accesscontrolalloworiginlist=https://scribus.rspace.online,https://rswag.online"
|
||||||
|
- "traefik.http.middlewares.rdesign-cors.headers.accesscontrolmaxage=86400"
|
||||||
|
- "traefik.http.routers.rdesign-api.middlewares=rdesign-cors"
|
||||||
|
networks:
|
||||||
|
- traefik-public
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: "4"
|
||||||
|
memory: 8G
|
||||||
|
reservations:
|
||||||
|
cpus: "1"
|
||||||
|
memory: 2G
|
||||||
|
|
||||||
|
rdesign-studio:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.studio
|
||||||
|
container_name: rdesign-studio
|
||||||
|
restart: unless-stopped
|
||||||
|
volumes:
|
||||||
|
- ./templates:/app/templates
|
||||||
|
- ./output:/app/output
|
||||||
|
- /opt/apps/rswag/designs:/app/rswag-designs:ro
|
||||||
|
environment:
|
||||||
|
- VNC_PASSWORD=changeme
|
||||||
|
- DISPLAY_WIDTH=1920
|
||||||
|
- DISPLAY_HEIGHT=1080
|
||||||
|
labels:
|
||||||
|
- "traefik.enable=true"
|
||||||
|
- "traefik.http.routers.rdesign-studio.rule=Host(`scribus.rspace.online`) && PathPrefix(`/vnc`)"
|
||||||
|
- "traefik.http.routers.rdesign-studio.entrypoints=websecure"
|
||||||
|
- "traefik.http.routers.rdesign-studio.tls.certresolver=letsencrypt"
|
||||||
|
- "traefik.http.services.rdesign-studio.loadbalancer.server.port=6080"
|
||||||
|
- "traefik.http.routers.rdesign-studio.middlewares=rdesign-studio-strip"
|
||||||
|
- "traefik.http.middlewares.rdesign-studio-strip.stripprefix.prefixes=/vnc"
|
||||||
|
networks:
|
||||||
|
- traefik-public
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: "2"
|
||||||
|
memory: 4G
|
||||||
|
reservations:
|
||||||
|
cpus: "0.5"
|
||||||
|
memory: 1G
|
||||||
|
|
||||||
|
networks:
|
||||||
|
traefik-public:
|
||||||
|
external: true
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
fastapi==0.115.6
|
||||||
|
uvicorn[standard]==0.34.0
|
||||||
|
pydantic==2.10.3
|
||||||
|
python-multipart==0.0.19
|
||||||
|
pyyaml==6.0.2
|
||||||
|
aiofiles==24.1.0
|
||||||
|
httpx==0.28.1
|
||||||
|
Pillow==11.1.0
|
||||||
|
|
@ -0,0 +1,99 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Scribus script: Import an IDML file (InDesign Markup Language) and export
|
||||||
|
to SLA (Scribus native) and/or PDF.
|
||||||
|
|
||||||
|
Scribus has a built-in IDML import filter. This script leverages it headlessly.
|
||||||
|
|
||||||
|
Called via: scribus -g -ns -py convert_idml.py -- [args]
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
args = {}
|
||||||
|
argv = sys.argv[1:]
|
||||||
|
i = 0
|
||||||
|
while i < len(argv):
|
||||||
|
if argv[i] == "--input" and i + 1 < len(argv):
|
||||||
|
args["input"] = argv[i + 1]
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--output-sla" and i + 1 < len(argv):
|
||||||
|
args["output_sla"] = argv[i + 1]
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--output-pdf" and i + 1 < len(argv):
|
||||||
|
args["output_pdf"] = argv[i + 1]
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--dpi" and i + 1 < len(argv):
|
||||||
|
args["dpi"] = int(argv[i + 1])
|
||||||
|
i += 2
|
||||||
|
else:
|
||||||
|
i += 1
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
try:
|
||||||
|
import scribus
|
||||||
|
except ImportError:
|
||||||
|
print("ERROR: Must run inside Scribus (scribus -g -py)")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
args = parse_args()
|
||||||
|
input_file = args.get("input")
|
||||||
|
output_sla = args.get("output_sla")
|
||||||
|
output_pdf = args.get("output_pdf")
|
||||||
|
dpi = args.get("dpi", 300)
|
||||||
|
|
||||||
|
if not input_file:
|
||||||
|
print("ERROR: --input required")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not os.path.exists(input_file):
|
||||||
|
print(f"ERROR: File not found: {input_file}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not output_sla and not output_pdf:
|
||||||
|
print("ERROR: At least one of --output-sla or --output-pdf required")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Open the IDML file — Scribus handles import via its built-in filter
|
||||||
|
try:
|
||||||
|
scribus.openDoc(input_file)
|
||||||
|
print(f"Opened: {input_file}")
|
||||||
|
print(f"Pages: {scribus.pageCount()}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR: Could not open {input_file}: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Save as SLA (Scribus native format)
|
||||||
|
if output_sla:
|
||||||
|
try:
|
||||||
|
scribus.saveDocAs(output_sla)
|
||||||
|
print(f"Saved SLA: {output_sla}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR saving SLA: {e}")
|
||||||
|
|
||||||
|
# Export to PDF
|
||||||
|
if output_pdf:
|
||||||
|
try:
|
||||||
|
pdf = scribus.PDFfile()
|
||||||
|
pdf.file = output_pdf
|
||||||
|
pdf.quality = 0
|
||||||
|
pdf.resolution = dpi
|
||||||
|
pdf.version = 14
|
||||||
|
pdf.compress = True
|
||||||
|
pdf.compressmtd = 0
|
||||||
|
pdf.save()
|
||||||
|
print(f"Exported PDF: {output_pdf}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR exporting PDF: {e}")
|
||||||
|
|
||||||
|
scribus.closeDoc()
|
||||||
|
print("Done.")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
@ -0,0 +1,115 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Scribus headless export script.
|
||||||
|
Called via: scribus -g -ns -py export_document.py -- [args]
|
||||||
|
|
||||||
|
Exports a .sla template to PDF or PNG, optionally substituting variables.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Parse args after '--' separator
|
||||||
|
def parse_args():
|
||||||
|
args = {}
|
||||||
|
variables = {}
|
||||||
|
argv = sys.argv[1:] # Scribus passes script args after '--'
|
||||||
|
i = 0
|
||||||
|
while i < len(argv):
|
||||||
|
if argv[i] == "--input" and i + 1 < len(argv):
|
||||||
|
args["input"] = argv[i + 1]
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--output" and i + 1 < len(argv):
|
||||||
|
args["output"] = argv[i + 1]
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--format" and i + 1 < len(argv):
|
||||||
|
args["format"] = argv[i + 1]
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--dpi" and i + 1 < len(argv):
|
||||||
|
args["dpi"] = int(argv[i + 1])
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--var" and i + 1 < len(argv):
|
||||||
|
key, _, value = argv[i + 1].partition("=")
|
||||||
|
variables[key] = value
|
||||||
|
i += 2
|
||||||
|
else:
|
||||||
|
i += 1
|
||||||
|
args["variables"] = variables
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
try:
|
||||||
|
import scribus
|
||||||
|
except ImportError:
|
||||||
|
print("ERROR: This script must be run inside Scribus (scribus -g -py)")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
args = parse_args()
|
||||||
|
input_file = args.get("input")
|
||||||
|
output_file = args.get("output")
|
||||||
|
fmt = args.get("format", "pdf")
|
||||||
|
dpi = args.get("dpi", 300)
|
||||||
|
variables = args.get("variables", {})
|
||||||
|
|
||||||
|
if not input_file or not output_file:
|
||||||
|
print("ERROR: --input and --output required")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Open the document
|
||||||
|
try:
|
||||||
|
scribus.openDoc(input_file)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"ERROR: Could not open {input_file}: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Substitute variables in text frames
|
||||||
|
if variables:
|
||||||
|
page_count = scribus.pageCount()
|
||||||
|
for page_num in range(1, page_count + 1):
|
||||||
|
scribus.gotoPage(page_num)
|
||||||
|
items = scribus.getPageItems()
|
||||||
|
for item_name, item_type, _ in items:
|
||||||
|
# item_type 4 = text frame
|
||||||
|
if item_type == 4:
|
||||||
|
try:
|
||||||
|
text = scribus.getText(item_name)
|
||||||
|
modified = False
|
||||||
|
for key, value in variables.items():
|
||||||
|
placeholder = f"%{key}%"
|
||||||
|
if placeholder in text:
|
||||||
|
text = text.replace(placeholder, value)
|
||||||
|
modified = True
|
||||||
|
if modified:
|
||||||
|
scribus.selectText(0, scribus.getTextLength(item_name), item_name)
|
||||||
|
scribus.deleteText(item_name)
|
||||||
|
scribus.insertText(text, 0, item_name)
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Export
|
||||||
|
if fmt == "pdf":
|
||||||
|
pdf = scribus.PDFfile()
|
||||||
|
pdf.file = output_file
|
||||||
|
pdf.quality = 0 # Maximum quality
|
||||||
|
pdf.resolution = dpi
|
||||||
|
pdf.version = 14 # PDF 1.4
|
||||||
|
pdf.compress = True
|
||||||
|
pdf.compressmtd = 0 # Automatic
|
||||||
|
pdf.save()
|
||||||
|
elif fmt == "png":
|
||||||
|
# Export each page as PNG
|
||||||
|
scribus.savePageAsEPS(output_file.replace(".png", ".eps"))
|
||||||
|
# Note: for PNG, we rely on post-processing with ghostscript/imagemagick
|
||||||
|
print(f"EPS exported to {output_file.replace('.png', '.eps')}")
|
||||||
|
print("Convert with: convert -density {dpi} file.eps file.png")
|
||||||
|
elif fmt == "svg":
|
||||||
|
# Scribus can export to SVG
|
||||||
|
scribus.saveDoc()
|
||||||
|
|
||||||
|
scribus.closeDoc()
|
||||||
|
print(f"Exported: {output_file}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
@ -0,0 +1,180 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Scribus script: Create a print-ready PDF from an rSwag design image.
|
||||||
|
Generates a new Scribus document with the design placed on a page with
|
||||||
|
optional bleed and crop marks.
|
||||||
|
|
||||||
|
Called via: scribus -g -ns -py rswag_export.py -- [args]
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Paper sizes in mm (width, height)
|
||||||
|
PAPER_SIZES = {
|
||||||
|
"A4": (210, 297),
|
||||||
|
"A3": (297, 420),
|
||||||
|
"A5": (148, 210),
|
||||||
|
"Letter": (215.9, 279.4),
|
||||||
|
"Tabloid": (279.4, 431.8),
|
||||||
|
}
|
||||||
|
|
||||||
|
BLEED_MM = 3.0
|
||||||
|
CROP_MARK_LEN = 5.0
|
||||||
|
CROP_MARK_OFFSET = 3.0
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
args = {"bleed": False, "crop_marks": False}
|
||||||
|
argv = sys.argv[1:]
|
||||||
|
i = 0
|
||||||
|
while i < len(argv):
|
||||||
|
if argv[i] == "--image" and i + 1 < len(argv):
|
||||||
|
args["image"] = argv[i + 1]
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--output" and i + 1 < len(argv):
|
||||||
|
args["output"] = argv[i + 1]
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--paper" and i + 1 < len(argv):
|
||||||
|
args["paper"] = argv[i + 1]
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--dpi" and i + 1 < len(argv):
|
||||||
|
args["dpi"] = int(argv[i + 1])
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--title" and i + 1 < len(argv):
|
||||||
|
args["title"] = argv[i + 1]
|
||||||
|
i += 2
|
||||||
|
elif argv[i] == "--bleed":
|
||||||
|
args["bleed"] = True
|
||||||
|
i += 1
|
||||||
|
elif argv[i] == "--crop-marks":
|
||||||
|
args["crop_marks"] = True
|
||||||
|
i += 1
|
||||||
|
else:
|
||||||
|
i += 1
|
||||||
|
return args
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
try:
|
||||||
|
import scribus
|
||||||
|
except ImportError:
|
||||||
|
print("ERROR: Must run inside Scribus")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
args = parse_args()
|
||||||
|
image_path = args.get("image")
|
||||||
|
output_path = args.get("output")
|
||||||
|
paper = args.get("paper", "A4")
|
||||||
|
dpi = args.get("dpi", 300)
|
||||||
|
title = args.get("title", "rSwag Design")
|
||||||
|
|
||||||
|
if not image_path or not output_path:
|
||||||
|
print("ERROR: --image and --output required")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not os.path.exists(image_path):
|
||||||
|
print(f"ERROR: Image not found: {image_path}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Get paper dimensions
|
||||||
|
paper_w, paper_h = PAPER_SIZES.get(paper, PAPER_SIZES["A4"])
|
||||||
|
|
||||||
|
# Calculate page size with bleed
|
||||||
|
bleed = BLEED_MM if args["bleed"] else 0
|
||||||
|
page_w = paper_w + (bleed * 2)
|
||||||
|
page_h = paper_h + (bleed * 2)
|
||||||
|
|
||||||
|
# Create new document (dimensions in mm, SCRIBUS_UNIT_MILLIMETERS = 1)
|
||||||
|
scribus.newDocument(
|
||||||
|
(page_w, page_h), # page size
|
||||||
|
(10, 10, 10, 10), # margins (left, right, top, bottom)
|
||||||
|
scribus.PORTRAIT,
|
||||||
|
1, # first page number
|
||||||
|
scribus.UNIT_MILLIMETERS,
|
||||||
|
scribus.PAGE_1, # single page facing
|
||||||
|
0, # first page left
|
||||||
|
1, # number of pages
|
||||||
|
)
|
||||||
|
|
||||||
|
# Place the image centered on the page
|
||||||
|
# Calculate placement: center the image on the page
|
||||||
|
margin = 15 # mm margin around image
|
||||||
|
img_area_w = paper_w - (margin * 2)
|
||||||
|
img_area_h = paper_h - (margin * 2)
|
||||||
|
|
||||||
|
img_x = bleed + margin
|
||||||
|
img_y = bleed + margin
|
||||||
|
|
||||||
|
# Create image frame
|
||||||
|
img_frame = scribus.createImage(img_x, img_y, img_area_w, img_area_h, "design")
|
||||||
|
scribus.loadImage(image_path, img_frame)
|
||||||
|
|
||||||
|
# Scale image to fit frame proportionally
|
||||||
|
scribus.setScaleImageToFrame(True, True, img_frame)
|
||||||
|
|
||||||
|
# Add title text below the image
|
||||||
|
title_y = bleed + margin + img_area_h + 5
|
||||||
|
title_frame = scribus.createText(
|
||||||
|
bleed + margin, title_y,
|
||||||
|
img_area_w, 10,
|
||||||
|
"title"
|
||||||
|
)
|
||||||
|
scribus.setText(title, title_frame)
|
||||||
|
scribus.setFontSize(12, title_frame)
|
||||||
|
scribus.setTextAlignment(scribus.ALIGN_CENTERED, title_frame)
|
||||||
|
|
||||||
|
# Add crop marks if requested
|
||||||
|
if args["crop_marks"]:
|
||||||
|
mark_color = "Registration"
|
||||||
|
# Ensure Registration color exists (it's a default Scribus color)
|
||||||
|
line_width = 0.25 # mm
|
||||||
|
|
||||||
|
# Top-left corner
|
||||||
|
for mark_args in [
|
||||||
|
# Top-left
|
||||||
|
(bleed - CROP_MARK_OFFSET - CROP_MARK_LEN, bleed, CROP_MARK_LEN, 0),
|
||||||
|
(bleed, bleed - CROP_MARK_OFFSET - CROP_MARK_LEN, 0, CROP_MARK_LEN),
|
||||||
|
# Top-right
|
||||||
|
(bleed + paper_w + CROP_MARK_OFFSET, bleed, CROP_MARK_LEN, 0),
|
||||||
|
(bleed + paper_w, bleed - CROP_MARK_OFFSET - CROP_MARK_LEN, 0, CROP_MARK_LEN),
|
||||||
|
# Bottom-left
|
||||||
|
(bleed - CROP_MARK_OFFSET - CROP_MARK_LEN, bleed + paper_h, CROP_MARK_LEN, 0),
|
||||||
|
(bleed, bleed + paper_h + CROP_MARK_OFFSET, 0, CROP_MARK_LEN),
|
||||||
|
# Bottom-right
|
||||||
|
(bleed + paper_w + CROP_MARK_OFFSET, bleed + paper_h, CROP_MARK_LEN, 0),
|
||||||
|
(bleed + paper_w, bleed + paper_h + CROP_MARK_OFFSET, 0, CROP_MARK_LEN),
|
||||||
|
]:
|
||||||
|
x, y, w, h = mark_args
|
||||||
|
if w > 0:
|
||||||
|
line = scribus.createLine(x, y, x + w, y)
|
||||||
|
else:
|
||||||
|
line = scribus.createLine(x, y, x, y + h)
|
||||||
|
scribus.setLineWidth(line_width, line)
|
||||||
|
|
||||||
|
# Export to PDF
|
||||||
|
pdf = scribus.PDFfile()
|
||||||
|
pdf.file = output_path
|
||||||
|
pdf.quality = 0 # Maximum
|
||||||
|
pdf.resolution = dpi
|
||||||
|
pdf.version = 14 # PDF 1.4
|
||||||
|
pdf.compress = True
|
||||||
|
pdf.compressmtd = 0
|
||||||
|
if args["bleed"]:
|
||||||
|
pdf.useDocBleeds = False
|
||||||
|
pdf.bleedt = BLEED_MM
|
||||||
|
pdf.bleedb = BLEED_MM
|
||||||
|
pdf.bleedl = BLEED_MM
|
||||||
|
pdf.bleedr = BLEED_MM
|
||||||
|
if args["crop_marks"]:
|
||||||
|
pdf.cropMarks = True
|
||||||
|
pdf.markLength = CROP_MARK_LEN
|
||||||
|
pdf.markOffset = CROP_MARK_OFFSET
|
||||||
|
pdf.save()
|
||||||
|
|
||||||
|
scribus.closeDoc()
|
||||||
|
print(f"Exported: {output_path}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
@ -0,0 +1,716 @@
|
||||||
|
"""
|
||||||
|
rDesign — Document design & PDF generation service.
|
||||||
|
Headless Scribus automation API, following the blender-automation pattern.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
import glob as globmod
|
||||||
|
import time
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
import aiofiles
|
||||||
|
from fastapi import FastAPI, BackgroundTasks, HTTPException, UploadFile, File, Query
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.responses import FileResponse, JSONResponse
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Config
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
BASE_URL = os.getenv("BASE_URL", "https://scribus.rspace.online")
|
||||||
|
SCRIBUS_PATH = os.getenv("SCRIBUS_PATH", "/usr/bin/scribus")
|
||||||
|
TEMPLATES_DIR = Path(os.getenv("TEMPLATES_DIR", "/app/templates"))
|
||||||
|
OUTPUT_DIR = Path(os.getenv("OUTPUT_DIR", "/app/output"))
|
||||||
|
JOBS_DIR = Path(os.getenv("JOBS_DIR", "/app/jobs"))
|
||||||
|
RSWAG_DESIGNS_PATH = Path(os.getenv("RSWAG_DESIGNS_PATH", "/app/rswag-designs"))
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# App
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
app = FastAPI(
|
||||||
|
title="rDesign",
|
||||||
|
description="Document design & PDF generation — headless Scribus automation",
|
||||||
|
version="0.1.0",
|
||||||
|
)
|
||||||
|
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=[
|
||||||
|
"https://scribus.rspace.online",
|
||||||
|
"https://rswag.online",
|
||||||
|
],
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# In-memory job store (same pattern as blender-automation)
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
jobs: dict[str, dict] = {}
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Models
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
class ExportRequest(BaseModel):
|
||||||
|
"""Generate a PDF from a Scribus template."""
|
||||||
|
template: str = Field(..., description="Template slug (directory name under /templates)")
|
||||||
|
output_format: str = Field("pdf", description="Output format: pdf, png, svg")
|
||||||
|
variables: dict[str, str] = Field(default_factory=dict, description="Template variables to substitute")
|
||||||
|
dpi: int = Field(300, ge=72, le=600)
|
||||||
|
|
||||||
|
|
||||||
|
class BatchExportRequest(BaseModel):
|
||||||
|
"""Generate multiple PDFs from a template + data rows (ScribusGenerator pattern)."""
|
||||||
|
template: str
|
||||||
|
output_format: str = Field("pdf", description="pdf or png")
|
||||||
|
rows: list[dict[str, str]] = Field(..., description="List of variable dicts, one per output document")
|
||||||
|
dpi: int = Field(300, ge=72, le=600)
|
||||||
|
|
||||||
|
|
||||||
|
class RswagExportRequest(BaseModel):
|
||||||
|
"""Export an rSwag design to print-ready PDF with bleed/crop marks."""
|
||||||
|
design_slug: str
|
||||||
|
category: str = Field("stickers", description="rSwag category: stickers, shirts, prints")
|
||||||
|
paper_size: str = Field("A4", description="Paper size: A4, A3, Letter, custom")
|
||||||
|
add_bleed: bool = Field(True, description="Add 3mm bleed")
|
||||||
|
add_crop_marks: bool = Field(True, description="Add crop/trim marks")
|
||||||
|
|
||||||
|
|
||||||
|
class ConvertIdmlRequest(BaseModel):
|
||||||
|
"""Convert an IDML file (InDesign interchange) to SLA and/or PDF."""
|
||||||
|
output_sla: bool = Field(True, description="Save as Scribus .sla file")
|
||||||
|
output_pdf: bool = Field(True, description="Export to PDF")
|
||||||
|
dpi: int = Field(300, ge=72, le=600)
|
||||||
|
|
||||||
|
|
||||||
|
class TemplateInfo(BaseModel):
|
||||||
|
slug: str
|
||||||
|
name: str
|
||||||
|
description: str
|
||||||
|
category: str
|
||||||
|
variables: list[str]
|
||||||
|
preview_url: Optional[str] = None
|
||||||
|
created: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class JobStatus(BaseModel):
|
||||||
|
job_id: str
|
||||||
|
status: str # queued, processing, completed, failed
|
||||||
|
progress: int = 0
|
||||||
|
result_url: Optional[str] = None
|
||||||
|
error: Optional[str] = None
|
||||||
|
created_at: str
|
||||||
|
completed_at: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Template management
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
def scan_templates() -> list[TemplateInfo]:
|
||||||
|
"""Scan templates directory for available Scribus templates."""
|
||||||
|
templates = []
|
||||||
|
for meta_path in sorted(TEMPLATES_DIR.glob("*/metadata.yaml")):
|
||||||
|
try:
|
||||||
|
with open(meta_path) as f:
|
||||||
|
meta = yaml.safe_load(f)
|
||||||
|
slug = meta_path.parent.name
|
||||||
|
preview = None
|
||||||
|
for ext in ("png", "jpg", "webp"):
|
||||||
|
pf = meta_path.parent / f"preview.{ext}"
|
||||||
|
if pf.exists():
|
||||||
|
preview = f"{BASE_URL}/templates/{slug}/preview"
|
||||||
|
break
|
||||||
|
templates.append(TemplateInfo(
|
||||||
|
slug=slug,
|
||||||
|
name=meta.get("name", slug),
|
||||||
|
description=meta.get("description", ""),
|
||||||
|
category=meta.get("category", "general"),
|
||||||
|
variables=meta.get("variables", []),
|
||||||
|
preview_url=preview,
|
||||||
|
created=meta.get("created"),
|
||||||
|
))
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
return templates
|
||||||
|
|
||||||
|
|
||||||
|
def find_template_sla(slug: str) -> Path:
|
||||||
|
"""Find the .sla file for a template slug."""
|
||||||
|
tpl_dir = TEMPLATES_DIR / slug
|
||||||
|
if not tpl_dir.is_dir():
|
||||||
|
raise HTTPException(404, f"Template '{slug}' not found")
|
||||||
|
sla_files = list(tpl_dir.glob("*.sla"))
|
||||||
|
if not sla_files:
|
||||||
|
raise HTTPException(404, f"No .sla file found in template '{slug}'")
|
||||||
|
return sla_files[0]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Scribus headless execution
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
async def run_scribus_script(script_path: str, args: list[str], timeout: int = 120) -> tuple[str, str, int]:
|
||||||
|
"""Run a Scribus Python script in headless mode via xvfb-run."""
|
||||||
|
cmd = [
|
||||||
|
"xvfb-run", "--auto-servernum", "--server-args=-screen 0 1920x1080x24",
|
||||||
|
SCRIBUS_PATH, "-g", "-ns", "-py", script_path, "--", *args,
|
||||||
|
]
|
||||||
|
proc = await asyncio.create_subprocess_exec(
|
||||||
|
*cmd,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
stdout, stderr = await asyncio.wait_for(proc.communicate(), timeout=timeout)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
proc.kill()
|
||||||
|
await proc.communicate()
|
||||||
|
return "", "Process timed out", -1
|
||||||
|
return stdout.decode(), stderr.decode(), proc.returncode
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Job processing
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
async def process_export_job(job_id: str, req: ExportRequest):
|
||||||
|
"""Process a single template export job."""
|
||||||
|
jobs[job_id]["status"] = "processing"
|
||||||
|
jobs[job_id]["progress"] = 10
|
||||||
|
|
||||||
|
try:
|
||||||
|
sla_path = find_template_sla(req.template)
|
||||||
|
timestamp = int(time.time())
|
||||||
|
output_filename = f"{req.template}_{timestamp}.{req.output_format}"
|
||||||
|
output_path = OUTPUT_DIR / output_filename
|
||||||
|
|
||||||
|
# Build script args
|
||||||
|
script_args = [
|
||||||
|
"--input", str(sla_path),
|
||||||
|
"--output", str(output_path),
|
||||||
|
"--format", req.output_format,
|
||||||
|
"--dpi", str(req.dpi),
|
||||||
|
]
|
||||||
|
# Pass variables as --var key=value
|
||||||
|
for k, v in req.variables.items():
|
||||||
|
script_args.extend(["--var", f"{k}={v}"])
|
||||||
|
|
||||||
|
jobs[job_id]["progress"] = 30
|
||||||
|
|
||||||
|
stdout, stderr, returncode = await run_scribus_script(
|
||||||
|
"/app/scripts/export_document.py", script_args
|
||||||
|
)
|
||||||
|
|
||||||
|
if returncode != 0 or not output_path.exists():
|
||||||
|
jobs[job_id]["status"] = "failed"
|
||||||
|
jobs[job_id]["error"] = stderr[:500] if stderr else "Export failed — no output produced"
|
||||||
|
return
|
||||||
|
|
||||||
|
jobs[job_id]["progress"] = 100
|
||||||
|
jobs[job_id]["status"] = "completed"
|
||||||
|
jobs[job_id]["result_url"] = f"{BASE_URL}/output/{output_filename}"
|
||||||
|
jobs[job_id]["completed_at"] = datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
except HTTPException as e:
|
||||||
|
jobs[job_id]["status"] = "failed"
|
||||||
|
jobs[job_id]["error"] = e.detail
|
||||||
|
except Exception as e:
|
||||||
|
jobs[job_id]["status"] = "failed"
|
||||||
|
jobs[job_id]["error"] = str(e)[:500]
|
||||||
|
|
||||||
|
|
||||||
|
async def process_batch_job(job_id: str, req: BatchExportRequest):
|
||||||
|
"""Process a batch export job (ScribusGenerator pattern)."""
|
||||||
|
jobs[job_id]["status"] = "processing"
|
||||||
|
total = len(req.rows)
|
||||||
|
results = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
sla_path = find_template_sla(req.template)
|
||||||
|
|
||||||
|
for i, row in enumerate(req.rows):
|
||||||
|
jobs[job_id]["progress"] = int((i / total) * 100)
|
||||||
|
timestamp = int(time.time())
|
||||||
|
row_id = row.get("id", str(i))
|
||||||
|
output_filename = f"{req.template}_{row_id}_{timestamp}.{req.output_format}"
|
||||||
|
output_path = OUTPUT_DIR / output_filename
|
||||||
|
|
||||||
|
script_args = [
|
||||||
|
"--input", str(sla_path),
|
||||||
|
"--output", str(output_path),
|
||||||
|
"--format", req.output_format,
|
||||||
|
"--dpi", str(req.dpi),
|
||||||
|
]
|
||||||
|
for k, v in row.items():
|
||||||
|
script_args.extend(["--var", f"{k}={v}"])
|
||||||
|
|
||||||
|
stdout, stderr, returncode = await run_scribus_script(
|
||||||
|
"/app/scripts/export_document.py", script_args
|
||||||
|
)
|
||||||
|
|
||||||
|
if returncode == 0 and output_path.exists():
|
||||||
|
results.append({
|
||||||
|
"row_id": row_id,
|
||||||
|
"url": f"{BASE_URL}/output/{output_filename}",
|
||||||
|
"status": "ok",
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
results.append({
|
||||||
|
"row_id": row_id,
|
||||||
|
"status": "failed",
|
||||||
|
"error": (stderr or "unknown error")[:200],
|
||||||
|
})
|
||||||
|
|
||||||
|
jobs[job_id]["progress"] = 100
|
||||||
|
jobs[job_id]["status"] = "completed"
|
||||||
|
jobs[job_id]["results"] = results
|
||||||
|
jobs[job_id]["completed_at"] = datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
jobs[job_id]["status"] = "failed"
|
||||||
|
jobs[job_id]["error"] = str(e)[:500]
|
||||||
|
|
||||||
|
|
||||||
|
async def process_rswag_export(job_id: str, req: RswagExportRequest):
|
||||||
|
"""Export an rSwag design to print-ready PDF."""
|
||||||
|
jobs[job_id]["status"] = "processing"
|
||||||
|
jobs[job_id]["progress"] = 10
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Find the rSwag design
|
||||||
|
design_dir = RSWAG_DESIGNS_PATH / req.category / req.design_slug
|
||||||
|
if not design_dir.is_dir():
|
||||||
|
raise FileNotFoundError(f"rSwag design not found: {req.category}/{req.design_slug}")
|
||||||
|
|
||||||
|
# Find the design image
|
||||||
|
design_image = None
|
||||||
|
for candidate in [
|
||||||
|
design_dir / "exports" / "300dpi" / f"{req.design_slug}.png",
|
||||||
|
design_dir / f"{req.design_slug}.png",
|
||||||
|
]:
|
||||||
|
if candidate.exists():
|
||||||
|
design_image = candidate
|
||||||
|
break
|
||||||
|
|
||||||
|
# Fallback: find any PNG
|
||||||
|
if not design_image:
|
||||||
|
pngs = list(design_dir.glob("*.png"))
|
||||||
|
if pngs:
|
||||||
|
design_image = pngs[0]
|
||||||
|
|
||||||
|
if not design_image:
|
||||||
|
raise FileNotFoundError(f"No image found for design: {req.design_slug}")
|
||||||
|
|
||||||
|
# Load design metadata if available
|
||||||
|
meta_path = design_dir / "metadata.yaml"
|
||||||
|
meta = {}
|
||||||
|
if meta_path.exists():
|
||||||
|
with open(meta_path) as f:
|
||||||
|
meta = yaml.safe_load(f) or {}
|
||||||
|
|
||||||
|
jobs[job_id]["progress"] = 30
|
||||||
|
|
||||||
|
timestamp = int(time.time())
|
||||||
|
output_filename = f"rswag_{req.design_slug}_{timestamp}.pdf"
|
||||||
|
output_path = OUTPUT_DIR / output_filename
|
||||||
|
|
||||||
|
script_args = [
|
||||||
|
"--image", str(design_image),
|
||||||
|
"--output", str(output_path),
|
||||||
|
"--paper", req.paper_size,
|
||||||
|
"--dpi", str(300),
|
||||||
|
"--title", meta.get("name", req.design_slug),
|
||||||
|
]
|
||||||
|
if req.add_bleed:
|
||||||
|
script_args.append("--bleed")
|
||||||
|
if req.add_crop_marks:
|
||||||
|
script_args.append("--crop-marks")
|
||||||
|
|
||||||
|
stdout, stderr, returncode = await run_scribus_script(
|
||||||
|
"/app/scripts/rswag_export.py", script_args, timeout=180
|
||||||
|
)
|
||||||
|
|
||||||
|
if returncode != 0 or not output_path.exists():
|
||||||
|
jobs[job_id]["status"] = "failed"
|
||||||
|
jobs[job_id]["error"] = stderr[:500] if stderr else "rSwag export failed"
|
||||||
|
return
|
||||||
|
|
||||||
|
jobs[job_id]["progress"] = 100
|
||||||
|
jobs[job_id]["status"] = "completed"
|
||||||
|
jobs[job_id]["result_url"] = f"{BASE_URL}/output/{output_filename}"
|
||||||
|
jobs[job_id]["completed_at"] = datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
jobs[job_id]["status"] = "failed"
|
||||||
|
jobs[job_id]["error"] = str(e)[:500]
|
||||||
|
|
||||||
|
|
||||||
|
async def process_idml_convert(job_id: str, idml_path: Path, req: ConvertIdmlRequest):
|
||||||
|
"""Convert an IDML file to SLA and/or PDF."""
|
||||||
|
jobs[job_id]["status"] = "processing"
|
||||||
|
jobs[job_id]["progress"] = 10
|
||||||
|
|
||||||
|
try:
|
||||||
|
timestamp = int(time.time())
|
||||||
|
stem = idml_path.stem
|
||||||
|
results = {}
|
||||||
|
|
||||||
|
script_args = ["--input", str(idml_path)]
|
||||||
|
|
||||||
|
if req.output_sla:
|
||||||
|
sla_filename = f"{stem}_{timestamp}.sla"
|
||||||
|
sla_path = OUTPUT_DIR / sla_filename
|
||||||
|
script_args.extend(["--output-sla", str(sla_path)])
|
||||||
|
|
||||||
|
if req.output_pdf:
|
||||||
|
pdf_filename = f"{stem}_{timestamp}.pdf"
|
||||||
|
pdf_path = OUTPUT_DIR / pdf_filename
|
||||||
|
script_args.extend(["--output-pdf", str(pdf_path)])
|
||||||
|
|
||||||
|
script_args.extend(["--dpi", str(req.dpi)])
|
||||||
|
|
||||||
|
jobs[job_id]["progress"] = 30
|
||||||
|
|
||||||
|
stdout, stderr, returncode = await run_scribus_script(
|
||||||
|
"/app/scripts/convert_idml.py", script_args, timeout=300
|
||||||
|
)
|
||||||
|
|
||||||
|
if returncode != 0:
|
||||||
|
jobs[job_id]["status"] = "failed"
|
||||||
|
jobs[job_id]["error"] = stderr[:500] if stderr else "IDML conversion failed"
|
||||||
|
return
|
||||||
|
|
||||||
|
if req.output_sla and sla_path.exists():
|
||||||
|
results["sla_url"] = f"{BASE_URL}/output/{sla_filename}"
|
||||||
|
if req.output_pdf and pdf_path.exists():
|
||||||
|
results["pdf_url"] = f"{BASE_URL}/output/{pdf_filename}"
|
||||||
|
|
||||||
|
# Save as template too (SLA goes into templates dir)
|
||||||
|
if req.output_sla and sla_path.exists():
|
||||||
|
tpl_dir = TEMPLATES_DIR / stem
|
||||||
|
tpl_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
import shutil
|
||||||
|
shutil.copy2(sla_path, tpl_dir / f"{stem}.sla")
|
||||||
|
meta = {
|
||||||
|
"name": stem.replace("-", " ").replace("_", " ").title(),
|
||||||
|
"description": f"Imported from IDML: {idml_path.name}",
|
||||||
|
"category": "imported",
|
||||||
|
"variables": [],
|
||||||
|
"created": datetime.now(timezone.utc).strftime("%Y-%m-%d"),
|
||||||
|
"source": {"format": "idml", "original": idml_path.name},
|
||||||
|
}
|
||||||
|
with open(tpl_dir / "metadata.yaml", "w") as f:
|
||||||
|
yaml.dump(meta, f, default_flow_style=False)
|
||||||
|
results["template_slug"] = stem
|
||||||
|
|
||||||
|
jobs[job_id]["progress"] = 100
|
||||||
|
jobs[job_id]["status"] = "completed"
|
||||||
|
jobs[job_id]["results"] = results
|
||||||
|
jobs[job_id]["completed_at"] = datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
jobs[job_id]["status"] = "failed"
|
||||||
|
jobs[job_id]["error"] = str(e)[:500]
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Routes
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/", tags=["health"])
|
||||||
|
async def root():
|
||||||
|
return {"service": "rDesign", "version": "0.1.0", "status": "ok"}
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/health", tags=["health"])
|
||||||
|
async def health():
|
||||||
|
scribus_exists = os.path.isfile(SCRIBUS_PATH)
|
||||||
|
return {
|
||||||
|
"status": "ok" if scribus_exists else "degraded",
|
||||||
|
"scribus": "available" if scribus_exists else "missing",
|
||||||
|
"templates": len(scan_templates()),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# --- Templates ---
|
||||||
|
|
||||||
|
@app.get("/templates", response_model=list[TemplateInfo], tags=["templates"])
|
||||||
|
async def list_templates(category: Optional[str] = None):
|
||||||
|
"""List available Scribus templates."""
|
||||||
|
templates = scan_templates()
|
||||||
|
if category:
|
||||||
|
templates = [t for t in templates if t.category == category]
|
||||||
|
return templates
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/templates/{slug}", response_model=TemplateInfo, tags=["templates"])
|
||||||
|
async def get_template(slug: str):
|
||||||
|
"""Get template metadata."""
|
||||||
|
for t in scan_templates():
|
||||||
|
if t.slug == slug:
|
||||||
|
return t
|
||||||
|
raise HTTPException(404, f"Template '{slug}' not found")
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/templates/{slug}/preview", tags=["templates"])
|
||||||
|
async def get_template_preview(slug: str):
|
||||||
|
"""Get template preview image."""
|
||||||
|
tpl_dir = TEMPLATES_DIR / slug
|
||||||
|
if not tpl_dir.is_dir():
|
||||||
|
raise HTTPException(404, "Template not found")
|
||||||
|
for ext in ("png", "jpg", "webp"):
|
||||||
|
preview = tpl_dir / f"preview.{ext}"
|
||||||
|
if preview.exists():
|
||||||
|
return FileResponse(preview)
|
||||||
|
raise HTTPException(404, "No preview available")
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/templates/upload", tags=["templates"])
|
||||||
|
async def upload_template(
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
name: str = Query(...),
|
||||||
|
description: str = Query(""),
|
||||||
|
category: str = Query("general"),
|
||||||
|
):
|
||||||
|
"""Upload a new Scribus template (.sla file)."""
|
||||||
|
if not file.filename or not file.filename.endswith(".sla"):
|
||||||
|
raise HTTPException(400, "File must be a .sla Scribus document")
|
||||||
|
|
||||||
|
slug = name.lower().replace(" ", "-").replace("_", "-")
|
||||||
|
tpl_dir = TEMPLATES_DIR / slug
|
||||||
|
tpl_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
sla_path = tpl_dir / file.filename
|
||||||
|
async with aiofiles.open(sla_path, "wb") as f:
|
||||||
|
content = await file.read()
|
||||||
|
await f.write(content)
|
||||||
|
|
||||||
|
# Create metadata
|
||||||
|
meta = {
|
||||||
|
"name": name,
|
||||||
|
"description": description,
|
||||||
|
"category": category,
|
||||||
|
"variables": [],
|
||||||
|
"created": datetime.now(timezone.utc).strftime("%Y-%m-%d"),
|
||||||
|
}
|
||||||
|
meta_path = tpl_dir / "metadata.yaml"
|
||||||
|
with open(meta_path, "w") as f:
|
||||||
|
yaml.dump(meta, f, default_flow_style=False)
|
||||||
|
|
||||||
|
return {"slug": slug, "message": f"Template '{name}' uploaded", "path": str(sla_path)}
|
||||||
|
|
||||||
|
|
||||||
|
# --- IDML Import / Convert ---
|
||||||
|
|
||||||
|
@app.post("/convert/idml", tags=["convert"])
|
||||||
|
async def convert_idml(
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
output_sla: bool = Query(True, description="Save as Scribus .sla"),
|
||||||
|
output_pdf: bool = Query(True, description="Export to PDF"),
|
||||||
|
dpi: int = Query(300, ge=72, le=600),
|
||||||
|
background_tasks: BackgroundTasks = None,
|
||||||
|
):
|
||||||
|
"""Upload an IDML file (InDesign interchange format) and convert to SLA/PDF.
|
||||||
|
|
||||||
|
IDML is Adobe's open interchange format — export from InDesign via
|
||||||
|
File → Save As → InDesign Markup (IDML). Scribus imports IDML natively.
|
||||||
|
|
||||||
|
Note: Native .indd files cannot be converted directly. Use IDML instead.
|
||||||
|
"""
|
||||||
|
if not file.filename or not file.filename.lower().endswith((".idml", ".idms")):
|
||||||
|
raise HTTPException(400, "File must be .idml or .idms (InDesign Markup). "
|
||||||
|
"Native .indd files are not supported — export as IDML from InDesign first.")
|
||||||
|
|
||||||
|
# Save uploaded file
|
||||||
|
upload_dir = JOBS_DIR / "uploads"
|
||||||
|
upload_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
idml_path = upload_dir / f"{uuid.uuid4()}_{file.filename}"
|
||||||
|
async with aiofiles.open(idml_path, "wb") as f:
|
||||||
|
content = await file.read()
|
||||||
|
await f.write(content)
|
||||||
|
|
||||||
|
req = ConvertIdmlRequest(output_sla=output_sla, output_pdf=output_pdf, dpi=dpi)
|
||||||
|
|
||||||
|
job_id = str(uuid.uuid4())
|
||||||
|
jobs[job_id] = {
|
||||||
|
"job_id": job_id,
|
||||||
|
"status": "queued",
|
||||||
|
"progress": 0,
|
||||||
|
"created_at": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"source": file.filename,
|
||||||
|
"type": "idml_convert",
|
||||||
|
}
|
||||||
|
background_tasks.add_task(process_idml_convert, job_id, idml_path, req)
|
||||||
|
return {
|
||||||
|
"job_id": job_id,
|
||||||
|
"status": "queued",
|
||||||
|
"poll_url": f"{BASE_URL}/jobs/{job_id}",
|
||||||
|
"note": "IDML import may take a few minutes for complex documents.",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# --- Export (single document) ---
|
||||||
|
|
||||||
|
@app.post("/export", tags=["export"])
|
||||||
|
async def export_document(req: ExportRequest, background_tasks: BackgroundTasks):
|
||||||
|
"""Export a Scribus template to PDF/PNG (async job)."""
|
||||||
|
job_id = str(uuid.uuid4())
|
||||||
|
jobs[job_id] = {
|
||||||
|
"job_id": job_id,
|
||||||
|
"status": "queued",
|
||||||
|
"progress": 0,
|
||||||
|
"created_at": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"template": req.template,
|
||||||
|
}
|
||||||
|
background_tasks.add_task(process_export_job, job_id, req)
|
||||||
|
return {"job_id": job_id, "status": "queued", "poll_url": f"{BASE_URL}/jobs/{job_id}"}
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/export/sync", tags=["export"])
|
||||||
|
async def export_document_sync(req: ExportRequest):
|
||||||
|
"""Export a Scribus template synchronously (blocks until done)."""
|
||||||
|
job_id = str(uuid.uuid4())
|
||||||
|
jobs[job_id] = {
|
||||||
|
"job_id": job_id,
|
||||||
|
"status": "queued",
|
||||||
|
"progress": 0,
|
||||||
|
"created_at": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"template": req.template,
|
||||||
|
}
|
||||||
|
await process_export_job(job_id, req)
|
||||||
|
return jobs[job_id]
|
||||||
|
|
||||||
|
|
||||||
|
# --- Batch export ---
|
||||||
|
|
||||||
|
@app.post("/export/batch", tags=["export"])
|
||||||
|
async def export_batch(req: BatchExportRequest, background_tasks: BackgroundTasks):
|
||||||
|
"""Batch export: one template + multiple data rows = multiple PDFs."""
|
||||||
|
job_id = str(uuid.uuid4())
|
||||||
|
jobs[job_id] = {
|
||||||
|
"job_id": job_id,
|
||||||
|
"status": "queued",
|
||||||
|
"progress": 0,
|
||||||
|
"created_at": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"template": req.template,
|
||||||
|
"total_rows": len(req.rows),
|
||||||
|
}
|
||||||
|
background_tasks.add_task(process_batch_job, job_id, req)
|
||||||
|
return {"job_id": job_id, "status": "queued", "total_rows": len(req.rows), "poll_url": f"{BASE_URL}/jobs/{job_id}"}
|
||||||
|
|
||||||
|
|
||||||
|
# --- rSwag integration ---
|
||||||
|
|
||||||
|
@app.get("/rswag/designs", tags=["rswag"])
|
||||||
|
async def list_rswag_designs(category: Optional[str] = None):
|
||||||
|
"""List available rSwag designs that can be exported to print-ready PDFs."""
|
||||||
|
designs = []
|
||||||
|
search_dirs = [RSWAG_DESIGNS_PATH]
|
||||||
|
if category:
|
||||||
|
search_dirs = [RSWAG_DESIGNS_PATH / category]
|
||||||
|
|
||||||
|
for search_dir in search_dirs:
|
||||||
|
if not search_dir.is_dir():
|
||||||
|
continue
|
||||||
|
for cat_dir in sorted(search_dir.iterdir()):
|
||||||
|
if not cat_dir.is_dir():
|
||||||
|
continue
|
||||||
|
# If filtering by category, cat_dir IS the design dir
|
||||||
|
design_dirs = [cat_dir] if category else sorted(cat_dir.iterdir())
|
||||||
|
for ddir in design_dirs:
|
||||||
|
if not ddir.is_dir():
|
||||||
|
continue
|
||||||
|
meta_path = ddir / "metadata.yaml"
|
||||||
|
if not meta_path.exists():
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
with open(meta_path) as f:
|
||||||
|
meta = yaml.safe_load(f)
|
||||||
|
designs.append({
|
||||||
|
"slug": ddir.name,
|
||||||
|
"name": meta.get("name", ddir.name),
|
||||||
|
"category": cat_dir.name if not category else category,
|
||||||
|
"description": meta.get("description", ""),
|
||||||
|
"status": meta.get("status", "unknown"),
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
return designs
|
||||||
|
|
||||||
|
|
||||||
|
@app.post("/rswag/export", tags=["rswag"])
|
||||||
|
async def export_rswag_design(req: RswagExportRequest, background_tasks: BackgroundTasks):
|
||||||
|
"""Export an rSwag design to a print-ready PDF with bleed and crop marks."""
|
||||||
|
job_id = str(uuid.uuid4())
|
||||||
|
jobs[job_id] = {
|
||||||
|
"job_id": job_id,
|
||||||
|
"status": "queued",
|
||||||
|
"progress": 0,
|
||||||
|
"created_at": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"design": req.design_slug,
|
||||||
|
}
|
||||||
|
background_tasks.add_task(process_rswag_export, job_id, req)
|
||||||
|
return {"job_id": job_id, "status": "queued", "poll_url": f"{BASE_URL}/jobs/{job_id}"}
|
||||||
|
|
||||||
|
|
||||||
|
# --- Jobs ---
|
||||||
|
|
||||||
|
@app.get("/jobs/{job_id}", tags=["jobs"])
|
||||||
|
async def get_job(job_id: str):
|
||||||
|
"""Get job status."""
|
||||||
|
if job_id not in jobs:
|
||||||
|
raise HTTPException(404, "Job not found")
|
||||||
|
return jobs[job_id]
|
||||||
|
|
||||||
|
|
||||||
|
@app.get("/jobs", tags=["jobs"])
|
||||||
|
async def list_jobs(limit: int = Query(20, le=100)):
|
||||||
|
"""List recent jobs."""
|
||||||
|
sorted_jobs = sorted(jobs.values(), key=lambda j: j["created_at"], reverse=True)
|
||||||
|
return sorted_jobs[:limit]
|
||||||
|
|
||||||
|
|
||||||
|
@app.delete("/jobs/{job_id}", tags=["jobs"])
|
||||||
|
async def delete_job(job_id: str):
|
||||||
|
"""Delete a job and its output."""
|
||||||
|
if job_id not in jobs:
|
||||||
|
raise HTTPException(404, "Job not found")
|
||||||
|
job = jobs.pop(job_id)
|
||||||
|
if "result_url" in job:
|
||||||
|
filename = job["result_url"].split("/")[-1]
|
||||||
|
output_file = OUTPUT_DIR / filename
|
||||||
|
if output_file.exists():
|
||||||
|
output_file.unlink()
|
||||||
|
return {"message": "Job deleted"}
|
||||||
|
|
||||||
|
|
||||||
|
# --- Output files ---
|
||||||
|
|
||||||
|
@app.get("/output/{filename}", tags=["output"])
|
||||||
|
async def get_output(filename: str):
|
||||||
|
"""Serve an output file."""
|
||||||
|
file_path = OUTPUT_DIR / filename
|
||||||
|
# Path traversal protection
|
||||||
|
if not file_path.resolve().is_relative_to(OUTPUT_DIR.resolve()):
|
||||||
|
raise HTTPException(403, "Access denied")
|
||||||
|
if not file_path.exists():
|
||||||
|
raise HTTPException(404, "File not found")
|
||||||
|
return FileResponse(
|
||||||
|
file_path,
|
||||||
|
headers={"Cache-Control": "public, max-age=86400"},
|
||||||
|
)
|
||||||
|
|
@ -0,0 +1,27 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<openbox_config xmlns="http://openbox.org/3.4/rc">
|
||||||
|
<resistance>
|
||||||
|
<strength>10</strength>
|
||||||
|
<screen_edge_strength>20</screen_edge_strength>
|
||||||
|
</resistance>
|
||||||
|
<focus>
|
||||||
|
<focusNew>yes</focusNew>
|
||||||
|
<followMouse>no</followMouse>
|
||||||
|
</focus>
|
||||||
|
<theme>
|
||||||
|
<name>Clearlooks</name>
|
||||||
|
<titleLayout>NLIMC</titleLayout>
|
||||||
|
<font place="ActiveWindow"><name>DejaVu Sans</name><size>10</size></font>
|
||||||
|
<font place="InactiveWindow"><name>DejaVu Sans</name><size>10</size></font>
|
||||||
|
</theme>
|
||||||
|
<desktops>
|
||||||
|
<number>1</number>
|
||||||
|
<names><name>rDesign Studio</name></names>
|
||||||
|
</desktops>
|
||||||
|
<applications>
|
||||||
|
<application name="scribus">
|
||||||
|
<maximized>yes</maximized>
|
||||||
|
<decor>yes</decor>
|
||||||
|
</application>
|
||||||
|
</applications>
|
||||||
|
</openbox_config>
|
||||||
|
|
@ -0,0 +1,28 @@
|
||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
VNC_PASSWORD="${VNC_PASSWORD:-changeme}"
|
||||||
|
DISPLAY_WIDTH="${DISPLAY_WIDTH:-1920}"
|
||||||
|
DISPLAY_HEIGHT="${DISPLAY_HEIGHT:-1080}"
|
||||||
|
|
||||||
|
# Set VNC password
|
||||||
|
mkdir -p /home/designer/.vnc
|
||||||
|
echo "$VNC_PASSWORD" | vncpasswd -f > /home/designer/.vnc/passwd
|
||||||
|
chmod 600 /home/designer/.vnc/passwd
|
||||||
|
chown -R designer:designer /home/designer/.vnc
|
||||||
|
|
||||||
|
# Start VNC server
|
||||||
|
su - designer -c "vncserver :1 \
|
||||||
|
-geometry ${DISPLAY_WIDTH}x${DISPLAY_HEIGHT} \
|
||||||
|
-depth 24 \
|
||||||
|
-SecurityTypes VncAuth \
|
||||||
|
-localhost no \
|
||||||
|
-xstartup /usr/bin/openbox-session" &
|
||||||
|
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
# Start noVNC (websockify bridge)
|
||||||
|
/usr/share/novnc/utils/novnc_proxy \
|
||||||
|
--vnc localhost:5901 \
|
||||||
|
--listen 6080 \
|
||||||
|
--web /usr/share/novnc
|
||||||
|
|
@ -0,0 +1,10 @@
|
||||||
|
name: Example Flyer
|
||||||
|
description: A simple A4 flyer template with placeholder text and image. Good starting point for event flyers, announcements, or product sheets.
|
||||||
|
category: flyer
|
||||||
|
variables:
|
||||||
|
- title
|
||||||
|
- subtitle
|
||||||
|
- body
|
||||||
|
- date
|
||||||
|
- location
|
||||||
|
created: "2026-03-24"
|
||||||
Loading…
Reference in New Issue