immich-docker/search-app/server.js

260 lines
9.9 KiB
JavaScript

const http = require('http');
const net = require('net');
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const PORT = process.env.PORT || 3000;
const IMMICH_URL = process.env.IMMICH_URL || 'http://immich-server:2283';
const immichParsed = new URL(IMMICH_URL);
// Timeouts for large uploads (10 minutes)
const UPLOAD_TIMEOUT = 10 * 60 * 1000;
// Load the custom JS to inject. Version hash lets PWAs detect stale caches.
const customJS = fs.readFileSync(path.join(__dirname, 'live-search.js'), 'utf8');
const CUSTOM_JS_HASH = crypto
.createHash('sha256')
.update(customJS)
.digest('hex')
.slice(0, 12);
// `currentVersion` combines our custom script with a fingerprint of Immich's
// served HTML so that any upstream update (chunk-hash rotation, etc.) also
// bumps the version. Stuck PWAs polling /api/custom/inject-version will see
// the change and self-reload. Initialized with just the custom hash until the
// first HTML response lands.
let currentVersion = CUSTOM_JS_HASH;
console.log(`Injected script hash: ${CUSTOM_JS_HASH} (initial version: ${currentVersion})`);
function makeScriptTag(version) {
return (
`<script>window.__LS_VERSION=${JSON.stringify(version)};</script>` +
`<script>${customJS}</script>`
);
}
// Extract just the chunk/manifest references from Immich's HTML (the parts
// that actually change on upgrades). Hashing the full HTML would include
// server-rendered state (CSRF tokens, session user id) that change per-user
// per-request and would cause spurious version churn.
function extractShellFingerprint(html) {
const matches = html.match(/\/_app\/immutable\/[^"'\s]+/g) || [];
return matches.sort().join('|');
}
// Helper: make an internal request to Immich (for small JSON requests only)
function immichRequest(method, apiPath, headers, body) {
return new Promise((resolve, reject) => {
const url = new URL(apiPath, IMMICH_URL);
const opts = {
hostname: url.hostname,
port: url.port || 80,
path: url.pathname + url.search,
method,
headers: { ...headers, host: url.host }
};
delete opts.headers['accept-encoding'];
const req = http.request(opts, (res) => {
let chunks = [];
res.on('data', c => chunks.push(c));
res.on('end', () => {
const data = Buffer.concat(chunks).toString('utf8');
resolve({ status: res.statusCode, headers: res.headers, body: data });
});
});
req.on('error', reject);
if (body) req.write(body);
req.end();
});
}
const server = http.createServer((req, res) => {
// --- Custom endpoint: report current injected-script version ---
if (req.url === '/api/custom/inject-version' && req.method === 'GET') {
res.writeHead(200, {
'Content-Type': 'application/json',
'Cache-Control': 'no-store, no-cache, must-revalidate'
});
res.end(JSON.stringify({ version: currentVersion }));
return;
}
// --- Custom endpoint: batch fetch exif (needs buffered body) ---
if (req.url === '/api/custom/batch-exif' && req.method === 'POST') {
let bodyChunks = [];
req.on('data', chunk => bodyChunks.push(chunk));
req.on('end', async () => {
const body = Buffer.concat(bodyChunks);
try {
const { ids } = JSON.parse(body.toString());
const authHeader = req.headers['authorization'] || '';
const apiKey = req.headers['x-api-key'] || '';
const hdrs = { 'Content-Type': 'application/json' };
if (authHeader) hdrs['Authorization'] = authHeader;
if (apiKey) hdrs['x-api-key'] = apiKey;
const results = await Promise.all(ids.map(async (id) => {
try {
const r = await immichRequest('GET', `/api/assets/${id}`, hdrs);
if (r.status === 200) {
const asset = JSON.parse(r.body);
const exif = asset.exifInfo || {};
return {
id,
latitude: exif.latitude || null,
longitude: exif.longitude || null,
city: exif.city || null,
state: exif.state || null,
country: exif.country || null
};
}
} catch {}
return { id, latitude: null, longitude: null };
}));
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(results));
} catch (e) {
res.writeHead(400, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ error: e.message }));
}
});
return;
}
// --- Standard streaming proxy ---
const targetUrl = new URL(req.url, IMMICH_URL);
const headers = { ...req.headers };
headers.host = targetUrl.host;
delete headers['accept-encoding'];
const proxyOpts = {
hostname: targetUrl.hostname,
port: targetUrl.port || 80,
path: req.url,
method: req.method,
headers,
timeout: UPLOAD_TIMEOUT
};
const proxyReq = http.request(proxyOpts, (proxyRes) => {
const contentType = proxyRes.headers['content-type'] || '';
const isHTML = contentType.includes('text/html');
if (isHTML) {
// Buffer HTML only to inject script tag
let htmlChunks = [];
proxyRes.on('data', chunk => htmlChunks.push(chunk));
proxyRes.on('end', () => {
let html = Buffer.concat(htmlChunks).toString('utf8');
// Compute a per-response version: custom script hash XOR'd
// with the upstream HTML's chunk/manifest fingerprint. This
// way an Immich upgrade that rotates chunk hashes bumps our
// version and stuck PWAs will self-heal on the next poll.
const shellHash = crypto
.createHash('sha256')
.update(customJS)
.update(extractShellFingerprint(html))
.digest('hex')
.slice(0, 12);
currentVersion = shellHash;
html = html.replace('</body>', makeScriptTag(shellHash) + '</body>');
const resHeaders = { ...proxyRes.headers };
delete resHeaders['content-length'];
delete resHeaders['content-encoding'];
resHeaders['content-type'] = 'text/html; charset=utf-8';
// Prevent browser + CDN from caching our modified HTML so the
// inline script version check stays fresh on every navigation.
resHeaders['cache-control'] = 'no-store, no-cache, must-revalidate';
delete resHeaders['etag'];
delete resHeaders['last-modified'];
// Drop CSP/XFO so we can iframe heatmap.jeffemmett.com and
// inject arbitrary elements into pages.
delete resHeaders['content-security-policy'];
delete resHeaders['content-security-policy-report-only'];
delete resHeaders['x-frame-options'];
res.writeHead(proxyRes.statusCode, resHeaders);
res.end(html);
});
} else {
// Stream non-HTML responses directly (videos, images, API JSON)
res.writeHead(proxyRes.statusCode, proxyRes.headers);
proxyRes.pipe(res);
}
});
proxyReq.on('error', (e) => {
console.error('Proxy error:', e.message);
if (!res.headersSent) {
res.writeHead(502, { 'Content-Type': 'text/plain' });
res.end('Bad Gateway');
}
});
proxyReq.on('timeout', () => {
console.error('Proxy request timed out');
proxyReq.destroy();
if (!res.headersSent) {
res.writeHead(504, { 'Content-Type': 'text/plain' });
res.end('Gateway Timeout');
}
});
// Stream request body directly to Immich (no buffering)
req.pipe(proxyReq);
});
// WebSocket proxy — handle HTTP upgrade events
server.on('upgrade', (req, clientSocket, head) => {
const targetHost = immichParsed.hostname;
const targetPort = immichParsed.port || 80;
const proxySocket = net.connect(targetPort, targetHost, () => {
// Reconstruct the HTTP upgrade request to forward
const reqHeaders = [`${req.method} ${req.url} HTTP/1.1`];
for (const [key, value] of Object.entries(req.headers)) {
if (key.toLowerCase() === 'host') {
reqHeaders.push(`Host: ${targetHost}:${targetPort}`);
} else {
reqHeaders.push(`${key}: ${value}`);
}
}
reqHeaders.push('', '');
proxySocket.write(reqHeaders.join('\r\n'));
if (head.length > 0) proxySocket.write(head);
// Bidirectional pipe
proxySocket.pipe(clientSocket);
clientSocket.pipe(proxySocket);
});
proxySocket.on('error', (e) => {
console.error('WebSocket proxy error:', e.message);
clientSocket.destroy();
});
clientSocket.on('error', (e) => {
console.error('WebSocket client error:', e.message);
proxySocket.destroy();
});
});
// Increase server timeouts for large uploads
server.timeout = UPLOAD_TIMEOUT;
server.requestTimeout = UPLOAD_TIMEOUT;
server.headersTimeout = UPLOAD_TIMEOUT + 1000;
server.keepAliveTimeout = UPLOAD_TIMEOUT;
server.listen(PORT, '0.0.0.0', () => {
console.log(`Immich proxy with live search running on port ${PORT}`);
console.log(`Proxying to: ${IMMICH_URL}`);
console.log(`Upload timeout: ${UPLOAD_TIMEOUT / 1000}s`);
});