Files
Genarrative/server-rs/crates/api-server/src/runtime_story/compat/ai.rs

359 lines
13 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
use super::*;
pub(super) async fn build_runtime_story_ai_response(
state: &AppState,
payload: RuntimeStoryAiRequest,
initial: bool,
) -> RuntimeStoryAiResponse {
let options = build_ai_response_options(&payload);
let fallback = build_ai_fallback_story_text(&payload, initial);
let story_text = generate_ai_story_text(state, &payload, initial)
.await
.filter(|text| !text.trim().is_empty())
.unwrap_or(fallback);
RuntimeStoryAiResponse {
story_text,
options,
encounter: None,
}
}
pub(super) async fn generate_ai_story_text(
state: &AppState,
payload: &RuntimeStoryAiRequest,
initial: bool,
) -> Option<String> {
let llm_client = state.llm_client()?;
let system_prompt = if initial {
"你是游戏运行时剧情导演。请用中文输出一段可直接展示给玩家的开局剧情,不要输出 JSON。"
} else {
"你是游戏运行时剧情导演。请用中文根据玩家选择续写一段剧情,不要输出 JSON。"
};
let user_prompt = json!({
"worldType": payload.world_type,
"character": payload.character,
"monsters": payload.monsters,
"history": payload.history,
"choice": payload.choice,
"context": payload.context,
"availableOptions": payload.request_options.available_options,
})
.to_string();
let mut request = LlmTextRequest::new(vec![
LlmMessage::system(system_prompt),
LlmMessage::user(user_prompt),
]);
request.max_tokens = Some(700);
llm_client
.request_text(request)
.await
.ok()
.map(|response| response.content.trim().to_string())
.filter(|text| !text.is_empty())
}
pub(super) async fn generate_action_story_payload(
state: &AppState,
game_state: &Value,
request: &RuntimeStoryActionRequest,
function_id: &str,
action_text: &str,
result_text: &str,
options: &[RuntimeStoryOptionView],
battle: Option<&RuntimeBattlePresentation>,
) -> Option<GeneratedStoryPayload> {
let llm_client = state.llm_client()?;
// 动作结算仍由确定性规则完成LLM 只负责把已结算结果改写为可展示文本,失败时不影响主链。
if function_id == "npc_chat" || function_id == "story_opening_camp_dialogue" {
return generate_npc_dialogue_payload(
llm_client,
game_state,
request,
action_text,
result_text,
options,
)
.await;
}
if should_generate_reasoned_combat_story(battle) {
return generate_reasoned_story_payload(
llm_client,
game_state,
request,
action_text,
result_text,
options,
battle,
)
.await;
}
None
}
pub(super) async fn generate_npc_dialogue_payload(
llm_client: &LlmClient,
game_state: &Value,
request: &RuntimeStoryActionRequest,
action_text: &str,
result_text: &str,
deferred_options: &[RuntimeStoryOptionView],
) -> Option<GeneratedStoryPayload> {
let world_type = current_world_type(game_state)?;
let character = read_object_field(game_state, "playerCharacter")?.clone();
let encounter = read_object_field(game_state, "currentEncounter")?;
if read_required_string_field(encounter, "kind").as_deref() != Some("npc") {
return None;
}
let npc_name = read_optional_string_field(encounter, "npcName")
.or_else(|| read_optional_string_field(encounter, "name"))
.unwrap_or_else(|| "对方".to_string());
let user_prompt = json!({
"worldType": world_type,
"character": character,
"encounter": encounter,
"monsters": read_array_field(game_state, "sceneHostileNpcs").into_iter().cloned().collect::<Vec<_>>(),
"history": build_action_story_history(game_state, action_text, result_text),
"context": build_action_story_prompt_context(game_state, None),
"topic": action_text,
"resultSummary": result_text,
"requestedOption": request.action.payload,
"availableOptions": build_action_prompt_options(deferred_options),
})
.to_string();
let mut llm_request = LlmTextRequest::new(vec![
LlmMessage::system(
"你是游戏运行时 NPC 对话导演。只输出中文正文,不要输出 JSON、Markdown 或规则说明;不要新增系统尚未结算的奖励、任务结果或战斗结果。",
),
LlmMessage::user(format!(
"请基于以下运行时状态,把玩家这一轮选择改写成 2 到 5 行可直接展示的 NPC 对话。可以使用“你:”和“{npc_name}:”格式,必须保留既有结算含义。\n{user_prompt}"
)),
]);
llm_request.max_tokens = Some(700);
let dialogue_text = llm_client
.request_text(llm_request)
.await
.ok()
.map(|response| response.content.trim().to_string())
.filter(|text| !text.is_empty())?;
let presentation_options = vec![build_continue_adventure_runtime_story_option()];
let saved_current_story =
build_dialogue_current_story(npc_name.as_str(), dialogue_text.as_str(), deferred_options);
Some(GeneratedStoryPayload {
story_text: dialogue_text.clone(),
history_result_text: dialogue_text,
presentation_options,
saved_current_story,
})
}
pub(super) async fn generate_reasoned_story_payload(
llm_client: &LlmClient,
game_state: &Value,
request: &RuntimeStoryActionRequest,
action_text: &str,
result_text: &str,
options: &[RuntimeStoryOptionView],
battle: Option<&RuntimeBattlePresentation>,
) -> Option<GeneratedStoryPayload> {
let world_type = current_world_type(game_state)?;
let character = read_object_field(game_state, "playerCharacter")?.clone();
let user_prompt = json!({
"worldType": world_type,
"character": character,
"monsters": read_array_field(game_state, "sceneHostileNpcs").into_iter().cloned().collect::<Vec<_>>(),
"history": build_action_story_history(game_state, action_text, result_text),
"context": build_action_story_prompt_context(game_state, battle),
"choice": action_text,
"resultSummary": result_text,
"requestedOption": request.action.payload,
"availableOptions": build_action_prompt_options(options),
})
.to_string();
let mut llm_request = LlmTextRequest::new(vec![
LlmMessage::system(
"你是游戏运行时剧情导演。只输出中文剧情正文,不要输出 JSON、Markdown 或规则说明;必须尊重已结算的战斗 outcome、伤害和状态不要发明额外奖励。",
),
LlmMessage::user(format!(
"请基于以下运行时状态,为这一轮战斗结算生成一段 120 字以内的结果叙事,并自然引出下一组选项。\n{user_prompt}"
)),
]);
llm_request.max_tokens = Some(700);
let story_text = llm_client
.request_text(llm_request)
.await
.ok()
.map(|response| response.content.trim().to_string())
.filter(|text| !text.is_empty())?;
Some(GeneratedStoryPayload {
story_text: story_text.clone(),
history_result_text: story_text.clone(),
presentation_options: options.to_vec(),
saved_current_story: build_legacy_current_story(story_text.as_str(), options),
})
}
pub(super) fn should_generate_reasoned_combat_story(
battle: Option<&RuntimeBattlePresentation>,
) -> bool {
battle
.and_then(|presentation| presentation.outcome.as_deref())
.is_some_and(|outcome| matches!(outcome, "victory" | "spar_complete" | "escaped"))
}
pub(super) fn build_action_story_history(
game_state: &Value,
action_text: &str,
result_text: &str,
) -> Vec<Value> {
let mut history = read_array_field(game_state, "storyHistory")
.into_iter()
.filter_map(|entry| {
let text = read_optional_string_field(entry, "text")?;
let history_role = read_optional_string_field(entry, "historyRole")
.unwrap_or_else(|| "result".to_string());
Some(json!({
"text": text,
"historyRole": history_role,
}))
})
.collect::<Vec<_>>();
history.push(json!({
"text": action_text,
"historyRole": "action",
}));
history.push(json!({
"text": result_text,
"historyRole": "result",
}));
let keep_from = history.len().saturating_sub(12);
history.into_iter().skip(keep_from).collect()
}
pub(super) fn build_action_story_prompt_context(
game_state: &Value,
battle: Option<&RuntimeBattlePresentation>,
) -> Value {
let scene_preset = read_object_field(game_state, "currentScenePreset");
let battle_value = battle
.and_then(|presentation| serde_json::to_value(presentation).ok())
.unwrap_or(Value::Null);
json!({
"sceneName": scene_preset
.and_then(|scene| read_optional_string_field(scene, "name"))
.or_else(|| read_optional_string_field(game_state, "currentScene"))
.unwrap_or_else(|| "当前区域".to_string()),
"sceneDescription": scene_preset
.and_then(|scene| read_optional_string_field(scene, "description"))
.or_else(|| read_optional_string_field(game_state, "sceneDescription"))
.unwrap_or_else(|| "周围气氛仍在继续变化。".to_string()),
"encounterName": read_object_field(game_state, "currentEncounter")
.and_then(|encounter| {
read_optional_string_field(encounter, "npcName")
.or_else(|| read_optional_string_field(encounter, "name"))
}),
"encounterId": current_encounter_id(game_state),
"playerHp": read_i32_field(game_state, "playerHp").unwrap_or(0),
"playerMaxHp": read_i32_field(game_state, "playerMaxHp").unwrap_or(1),
"playerMana": read_i32_field(game_state, "playerMana").unwrap_or(0),
"playerMaxMana": read_i32_field(game_state, "playerMaxMana").unwrap_or(1),
"inBattle": read_bool_field(game_state, "inBattle").unwrap_or(false),
"currentNpcBattleOutcome": read_optional_string_field(game_state, "currentNpcBattleOutcome"),
"battle": battle_value,
})
}
pub(super) fn build_action_prompt_options(options: &[RuntimeStoryOptionView]) -> Vec<Value> {
options
.iter()
.filter(|option| !option.disabled.unwrap_or(false))
.map(|option| {
json!({
"functionId": option.function_id,
"actionText": option.action_text,
"text": option.action_text,
})
})
.collect()
}
pub(super) fn build_ai_response_options(payload: &RuntimeStoryAiRequest) -> Vec<Value> {
let source = if payload.request_options.available_options.is_empty() {
&payload.request_options.option_catalog
} else {
&payload.request_options.available_options
};
let options = source
.iter()
.filter_map(normalize_ai_story_option)
.collect::<Vec<_>>();
if !options.is_empty() {
return options;
}
vec![
build_ai_story_option_value("idle_observe_signs", "观察周围迹象"),
build_ai_story_option_value("idle_explore_forward", "继续向前探索"),
build_ai_story_option_value("idle_rest_focus", "原地调息"),
]
}
pub(super) fn normalize_ai_story_option(value: &Value) -> Option<Value> {
let function_id = read_required_string_field(value, "functionId")?;
let action_text = read_required_string_field(value, "actionText")
.or_else(|| read_required_string_field(value, "text"))
.unwrap_or_else(|| function_id.clone());
let mut option = value.as_object()?.clone();
option.insert("functionId".to_string(), Value::String(function_id));
option.insert("actionText".to_string(), Value::String(action_text.clone()));
option
.entry("text".to_string())
.or_insert_with(|| Value::String(action_text));
Some(Value::Object(option))
}
pub(super) fn build_ai_story_option_value(function_id: &str, action_text: &str) -> Value {
json!({
"functionId": function_id,
"actionText": action_text,
"text": action_text,
"visuals": {
"playerAnimation": "idle",
"playerMoveMeters": 0,
"playerOffsetY": 0,
"playerFacing": "right",
"scrollWorld": false,
"monsterChanges": []
}
})
}
pub(super) fn build_ai_fallback_story_text(
payload: &RuntimeStoryAiRequest,
initial: bool,
) -> String {
let character_name =
read_optional_string_field(&payload.character, "name").unwrap_or_else(|| "".to_string());
let scene_name = read_optional_string_field(&payload.context, "sceneName")
.or_else(|| read_optional_string_field(&payload.context, "scene"))
.unwrap_or_else(|| "当前区域".to_string());
if initial {
return format!(
"{character_name}{scene_name} 稳住脚步,周围的气息正在变化,第一轮选择已经摆到眼前。"
);
}
let choice = normalize_required_string(payload.choice.as_str())
.unwrap_or_else(|| "继续推进".to_string());
format!("{character_name} 选择了「{choice}」,{scene_name} 的局势随之向下一步展开。")
}