Files
Genarrative/server-node/src/services/runtimeItemService.ts
2026-04-08 16:41:29 +08:00

105 lines
3.5 KiB
TypeScript

import { buildRuntimeItemAiIntent } from '../../../src/data/runtimeItemNarrative.js';
import { parseJsonResponseText } from '../../../src/services/llmParsers.js';
import {
buildRuntimeItemIntentPrompt,
RUNTIME_ITEM_INTENT_SYSTEM_PROMPT,
} from '../../../src/services/runtimeItemAiPrompt.js';
import type {
RuntimeItemAiIntent,
RuntimeItemGenerationContext,
RuntimeItemPlan,
} from '../../../src/types/runtimeItem.js';
import type { UpstreamLlmClient } from './llmClient.js';
function coerceString(value: unknown, fallback: string) {
return typeof value === 'string' && value.trim() ? value.trim() : fallback;
}
function coerceStringArray(value: unknown, fallback: string[], limit: number) {
if (!Array.isArray(value)) {
return fallback;
}
const normalized = value
.map((item) => (typeof item === 'string' ? item.trim() : ''))
.filter(Boolean)
.slice(0, limit);
return normalized.length > 0 ? normalized : fallback;
}
function sanitizeRuntimeItemAiIntent(
rawIntent: unknown,
fallback: RuntimeItemAiIntent,
): RuntimeItemAiIntent {
if (!rawIntent || typeof rawIntent !== 'object') {
return fallback;
}
const intent = rawIntent as Record<string, unknown>;
const desiredFunctionalBias = coerceStringArray(
intent.desiredFunctionalBias,
fallback.desiredFunctionalBias,
2,
).filter(
(
item,
): item is RuntimeItemAiIntent['desiredFunctionalBias'][number] =>
['heal', 'mana', 'cooldown', 'guard', 'damage'].includes(item),
);
const tone = coerceString(intent.tone, fallback.tone);
return {
shortNameSeed: coerceString(intent.shortNameSeed, fallback.shortNameSeed),
sourcePhrase: coerceString(intent.sourcePhrase, fallback.sourcePhrase),
reasonToAppear: coerceString(intent.reasonToAppear, fallback.reasonToAppear),
relationHooks: coerceStringArray(intent.relationHooks, fallback.relationHooks, 2),
desiredBuildTags: coerceStringArray(intent.desiredBuildTags, fallback.desiredBuildTags, 3),
desiredFunctionalBias:
desiredFunctionalBias.length > 0
? desiredFunctionalBias
: fallback.desiredFunctionalBias,
tone: ['grim', 'mysterious', 'martial', 'ritual', 'survival'].includes(tone)
? (tone as RuntimeItemAiIntent['tone'])
: fallback.tone,
visibleClue: coerceString(intent.visibleClue, fallback.visibleClue ?? ''),
witnessMark: coerceString(intent.witnessMark, fallback.witnessMark ?? ''),
unfinishedBusiness: coerceString(
intent.unfinishedBusiness,
fallback.unfinishedBusiness ?? '',
),
hiddenHook: coerceString(intent.hiddenHook, fallback.hiddenHook ?? ''),
reactionHooks: coerceStringArray(
intent.reactionHooks,
fallback.reactionHooks ?? [],
4,
),
namingPattern: coerceString(intent.namingPattern, fallback.namingPattern ?? ''),
};
}
export async function generateRuntimeItemIntents(
llmClient: UpstreamLlmClient,
params: {
context: RuntimeItemGenerationContext;
plans: RuntimeItemPlan[];
},
) {
const fallbackIntents = params.plans.map((plan) =>
buildRuntimeItemAiIntent(params.context, plan),
);
const content = await llmClient.requestMessageContent({
systemPrompt: RUNTIME_ITEM_INTENT_SYSTEM_PROMPT,
userPrompt: buildRuntimeItemIntentPrompt(params),
});
const parsed = parseJsonResponseText(content) as {
intents?: unknown[];
};
const rawIntents = Array.isArray(parsed.intents) ? parsed.intents : [];
return params.plans.map((_, index) =>
sanitizeRuntimeItemAiIntent(rawIntents[index], fallbackIntents[index]!),
);
}