初始仓库迁移
Some checks failed
CI / verify (push) Has been cancelled

This commit is contained in:
2026-04-04 23:57:06 +08:00
parent 80986b790d
commit c49c64896a
18446 changed files with 532435 additions and 2 deletions

228
src/services/llmClient.ts Normal file
View File

@@ -0,0 +1,228 @@
import type {TextStreamOptions} from './aiTypes';
const API_BASE_URL = import.meta.env.VITE_LLM_PROXY_BASE_URL || '/api/llm';
const MODEL = import.meta.env.VITE_LLM_MODEL || 'doubao-1-5-pro-32k-character-250715';
const ENABLE_LLM_DEBUG_LOG = import.meta.env.DEV || import.meta.env.VITE_LLM_DEBUG_LOG === 'true';
export interface PlainTextCompletionOptions {
timeoutMs?: number;
debugLabel?: string;
}
export class LlmConnectivityError extends Error {
constructor(message: string) {
super(message);
this.name = 'LlmConnectivityError';
}
}
export function resolveTimeoutMs(rawValue: string | undefined, fallback: number) {
const parsed = Number(rawValue);
return Number.isFinite(parsed) && parsed > 0 ? Math.round(parsed) : fallback;
}
export const REQUEST_TIMEOUT_MS = resolveTimeoutMs(import.meta.env.VITE_LLM_REQUEST_TIMEOUT_MS, 15000);
export const CUSTOM_WORLD_REQUEST_TIMEOUT_MS = resolveTimeoutMs(
import.meta.env.VITE_LLM_CUSTOM_WORLD_TIMEOUT_MS,
Math.max(REQUEST_TIMEOUT_MS, 45000),
);
function logLlmDebug(title: string, payload: unknown) {
if (!ENABLE_LLM_DEBUG_LOG) {
return;
}
console.warn(title, payload);
}
function normalizeLlmError(error: unknown): never {
if (error instanceof DOMException && error.name === 'AbortError') {
throw new LlmConnectivityError('The LLM request timed out. Please check the network or endpoint.');
}
if (error instanceof TypeError) {
throw new LlmConnectivityError('Unable to reach the LLM endpoint. The network or proxy may be unavailable.');
}
throw error;
}
export function isLlmConnectivityError(error: unknown): error is LlmConnectivityError {
return error instanceof LlmConnectivityError;
}
async function requestMessageContent(
systemPrompt: string,
userPrompt: string,
options: PlainTextCompletionOptions = {},
) {
const timeoutMs = options.timeoutMs ?? REQUEST_TIMEOUT_MS;
const debugLabel = options.debugLabel ?? 'chat';
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), timeoutMs);
const startedAt = performance.now();
const requestBody = {
model: MODEL,
messages: [
{role: 'system' as const, content: systemPrompt},
{role: 'user' as const, content: userPrompt},
],
};
const rawPromptText = `[System]\n${systemPrompt}\n\n[User]\n${userPrompt}`;
try {
logLlmDebug(`[LLM:${debugLabel}] prompt text`, rawPromptText);
const response = await fetch(`${API_BASE_URL}/chat/completions`, {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: JSON.stringify(requestBody),
signal: controller.signal,
});
const rawResponseText = await response.text();
if (!response.ok) {
if (response.status === 401) {
throw new Error('LLM authentication failed. Check the configured API key on the dev server.');
}
throw new Error(`LLM request failed: ${response.status} ${rawResponseText}`);
}
const data = JSON.parse(rawResponseText);
const content = data?.choices?.[0]?.message?.content;
if (!content || typeof content !== 'string') {
throw new Error('LLM response did not include message content.');
}
logLlmDebug(`[LLM:${debugLabel}] output text`, content);
logLlmDebug(`[LLM:${debugLabel}] completion success`, {
model: MODEL,
elapsedMs: Math.round(performance.now() - startedAt),
responseLength: content.length,
timeoutMs,
});
return content.trim();
} catch (error) {
console.error(`[LLM:${debugLabel}] completion failed`, {
model: MODEL,
elapsedMs: Math.round(performance.now() - startedAt),
timeoutMs,
error: error instanceof Error ? error.message : String(error),
});
return normalizeLlmError(error);
} finally {
clearTimeout(timeout);
}
}
export async function requestChatMessageContent(
systemPrompt: string,
userPrompt: string,
options: PlainTextCompletionOptions = {},
) {
return requestMessageContent(systemPrompt, userPrompt, options);
}
export async function requestPlainTextCompletion(
systemPrompt: string,
userPrompt: string,
options: PlainTextCompletionOptions = {},
) {
return requestMessageContent(systemPrompt, userPrompt, options);
}
export async function streamPlainTextCompletion(
systemPrompt: string,
userPrompt: string,
options: TextStreamOptions = {},
) {
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), REQUEST_TIMEOUT_MS);
try {
const response = await fetch(`${API_BASE_URL}/chat/completions`, {
method: 'POST',
headers: {'Content-Type': 'application/json'},
body: JSON.stringify({
model: MODEL,
stream: true,
messages: [
{role: 'system' as const, content: systemPrompt},
{role: 'user' as const, content: userPrompt},
],
}),
signal: controller.signal,
});
if (!response.ok) {
const rawResponseText = await response.text();
if (response.status === 401) {
throw new Error('LLM authentication failed. Check the configured API key on the dev server.');
}
throw new Error(`LLM request failed: ${response.status} ${rawResponseText}`);
}
if (!response.body) {
const fallbackText = await requestPlainTextCompletion(systemPrompt, userPrompt);
let progressiveText = '';
for (const char of fallbackText) {
progressiveText += char;
options.onUpdate?.(progressiveText);
}
return fallbackText;
}
const reader = response.body.getReader();
const decoder = new TextDecoder('utf-8');
let buffer = '';
let accumulatedText = '';
for (;;) {
const {done, value} = await reader.read();
if (done) {
break;
}
buffer += decoder.decode(value, {stream: true});
while (buffer.includes('\n\n')) {
const boundary = buffer.indexOf('\n\n');
const eventBlock = buffer.slice(0, boundary);
buffer = buffer.slice(boundary + 2);
for (const rawLine of eventBlock.split(/\r?\n/u)) {
const line = rawLine.trim();
if (!line.startsWith('data:')) {
continue;
}
const data = line.slice(5).trim();
if (!data || data === '[DONE]') {
continue;
}
try {
const parsed = JSON.parse(data);
const delta = parsed?.choices?.[0]?.delta?.content;
if (typeof delta === 'string' && delta.length > 0) {
accumulatedText += delta;
options.onUpdate?.(accumulatedText);
}
} catch {
// Ignore malformed SSE frames and continue consuming the stream.
}
}
}
}
return accumulatedText.trim();
} catch (error) {
return normalizeLlmError(error);
} finally {
clearTimeout(timeout);
}
}