import { Buffer } from 'node:buffer'; import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs'; import path from 'node:path'; const repoRoot = process.cwd(); const promptsPath = path.join(repoRoot, 'public', 'bark-battle-assets', 'bark-battle-image-prompts.json'); const outDir = path.join(repoRoot, 'public', 'bark-battle-assets', 'generated'); const args = new Set(process.argv.slice(2)); function readDotenv(fileName) { const filePath = path.join(repoRoot, fileName); if (!existsSync(filePath)) return {}; const values = {}; for (const line of readFileSync(filePath, 'utf8').split(/\r?\n/u)) { const trimmed = line.trim(); if (!trimmed || trimmed.startsWith('#')) continue; const match = /^([A-Za-z_][A-Za-z0-9_]*)=(.*)$/u.exec(trimmed); if (!match) continue; let value = match[2].trim(); if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith("'") && value.endsWith("'"))) { value = value.slice(1, -1); } values[match[1]] = value; } return values; } function resolveEnv() { const loaded = { ...readDotenv('.env.example'), ...readDotenv('.env.local'), ...readDotenv('.env.secrets.local'), ...process.env, }; return { baseUrl: String(loaded.VECTOR_ENGINE_BASE_URL || '').trim().replace(/\/+$/u, ''), apiKey: String(loaded.VECTOR_ENGINE_API_KEY || '').trim(), timeoutMs: Number.parseInt(String(loaded.VECTOR_ENGINE_IMAGE_REQUEST_TIMEOUT_MS || 180000), 10), }; } function generationUrl(baseUrl) { return baseUrl.endsWith('/v1') ? `${baseUrl}/images/generations` : `${baseUrl}/v1/images/generations`; } function collectStringsByKey(value, targetKey, output) { if (Array.isArray(value)) { value.forEach((entry) => collectStringsByKey(entry, targetKey, output)); return; } if (!value || typeof value !== 'object') return; for (const [key, nested] of Object.entries(value)) { if (key === targetKey) { if (typeof nested === 'string' && nested.trim()) output.push(nested.trim()); if (Array.isArray(nested)) nested.forEach((entry) => typeof entry === 'string' && entry.trim() && output.push(entry.trim())); } collectStringsByKey(nested, targetKey, output); } } function inferExtensionFromBytes(bytes) { if (bytes.subarray(0, 8).equals(Buffer.from('\x89PNG\r\n\x1A\n', 'binary'))) return 'png'; if (bytes.subarray(0, 3).equals(Buffer.from([0xff, 0xd8, 0xff]))) return 'jpg'; if (bytes.subarray(0, 4).toString('ascii') === 'RIFF' && bytes.subarray(8, 12).toString('ascii') === 'WEBP') return 'webp'; return 'png'; } async function fetchJson(url, options, timeoutMs) { const abortController = new AbortController(); const timer = setTimeout(() => abortController.abort(), timeoutMs); try { const response = await fetch(url, { ...options, signal: abortController.signal }); const text = await response.text(); if (!response.ok) throw new Error(`VectorEngine ${response.status}: ${text.slice(0, 300)}`); return JSON.parse(text); } finally { clearTimeout(timer); } } async function downloadUrl(url, timeoutMs) { const abortController = new AbortController(); const timer = setTimeout(() => abortController.abort(), timeoutMs); try { const response = await fetch(url, { signal: abortController.signal }); if (!response.ok) throw new Error(`download ${response.status}`); const bytes = Buffer.from(await response.arrayBuffer()); const type = response.headers.get('content-type') || ''; const extension = type.includes('webp') ? 'webp' : type.includes('jpeg') ? 'jpg' : 'png'; return { bytes, extension }; } finally { clearTimeout(timer); } } const rawTemplates = JSON.parse(readFileSync(promptsPath, 'utf8')); const onlyIds = process.argv .slice(2) .flatMap((arg, index, values) => (arg === '--only' ? String(values[index + 1] || '').split(',') : [])) .map((value) => value.trim()) .filter(Boolean); const templates = rawTemplates.filter((template) => !onlyIds.length || onlyIds.includes(template.id)); const dryRun = args.has('--dry-run') || !args.has('--live'); const requests = templates.map((template) => ({ id: template.id, title: template.title, body: { model: 'gpt-image-2-all', prompt: template.prompt, n: 1, size: '1024x1024' } })); if (dryRun) { console.log(JSON.stringify({ mode: 'dry-run', outDir, count: requests.length, requests }, null, 2)); process.exit(0); } const env = resolveEnv(); if (!env.baseUrl || !env.apiKey) { console.error(JSON.stringify({ ok: false, error: 'Missing VECTOR_ENGINE_BASE_URL or VECTOR_ENGINE_API_KEY', hasBaseUrl: Boolean(env.baseUrl), hasApiKey: Boolean(env.apiKey) })); process.exit(1); } mkdirSync(outDir, { recursive: true }); const files = []; for (const request of requests) { console.log(`Generating ${request.id}...`); const payload = await fetchJson(generationUrl(env.baseUrl), { method: 'POST', headers: { Authorization: `Bearer ${env.apiKey}`, Accept: 'application/json', 'Content-Type': 'application/json' }, body: JSON.stringify(request.body), }, env.timeoutMs); const urls = []; const b64 = []; collectStringsByKey(payload, 'url', urls); collectStringsByKey(payload, 'image', urls); collectStringsByKey(payload, 'image_url', urls); collectStringsByKey(payload, 'b64_json', b64); let image; const url = [...new Set(urls)].find((item) => /^https?:\/\//u.test(item)); if (url) { image = await downloadUrl(url, env.timeoutMs); } else if (b64[0]) { const bytes = Buffer.from(b64[0], 'base64'); image = { bytes, extension: inferExtensionFromBytes(bytes) }; } else { throw new Error(`VectorEngine returned no image for ${request.id}`); } const outputPath = path.join(outDir, `${request.id}.${image.extension}`); writeFileSync(outputPath, image.bytes); files.push(outputPath); } console.log(JSON.stringify({ ok: true, count: files.length, files }, null, 2));