Files
Genarrative/server-rs/crates/api-server/src/runtime_chat.rs
2026-05-02 14:18:12 +08:00

1017 lines
35 KiB
Rust

use axum::{
Json,
extract::{Extension, State},
http::StatusCode,
response::{
IntoResponse, Response,
sse::{Event, Sse},
},
};
use platform_llm::{LlmMessage, LlmTextRequest};
use serde::Deserialize;
use serde_json::{Value, json};
use shared_contracts::story::StoryRuntimeSnapshotPayload as RuntimeStorySnapshotPayload;
use std::convert::Infallible;
use module_runtime_story::{
RuntimeStoryPromptContextExtras, build_runtime_story_prompt_context, current_world_type,
normalize_required_string, read_array_field, read_field, read_i32_field, read_object_field,
read_optional_string_field, read_runtime_session_id,
};
use crate::{
auth::AuthenticatedAccessToken,
http_error::AppError,
llm_model_routing::RPG_STORY_LLM_MODEL,
prompt::runtime_chat::{
NPC_CHAT_TURN_REPLY_SYSTEM_PROMPT, NPC_CHAT_TURN_SUGGESTION_SYSTEM_PROMPT,
NpcChatTurnPromptInput, build_deterministic_chat_suggestions,
build_deterministic_hostile_breakoff_reply, build_deterministic_npc_reply,
build_fallback_function_suggestions, build_fallback_npc_chat_suggestions,
build_npc_chat_turn_reply_prompt, build_npc_chat_turn_suggestion_prompt,
},
request_context::RequestContext,
state::AppState,
};
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct NpcChatTurnRequest {
#[serde(default)]
session_id: Option<String>,
#[serde(default)]
snapshot: Option<RuntimeStorySnapshotPayload>,
#[serde(default)]
world_type: String,
#[serde(default)]
character: Option<Value>,
#[serde(default)]
player: Option<Value>,
encounter: Value,
#[serde(default)]
monsters: Vec<Value>,
#[serde(default)]
history: Vec<Value>,
#[serde(default)]
context: Value,
#[serde(default)]
conversation_history: Vec<Value>,
#[serde(default)]
dialogue: Vec<Value>,
#[serde(default)]
combat_context: Option<Value>,
player_message: String,
#[serde(default)]
npc_state: Value,
#[serde(default)]
npc_initiates_conversation: bool,
#[serde(default)]
quest_offer_context: Option<Value>,
#[serde(default)]
chat_directive: Option<Value>,
}
pub async fn stream_runtime_npc_chat_turn(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
Extension(authenticated): Extension<AuthenticatedAccessToken>,
Json(mut payload): Json<NpcChatTurnRequest>,
) -> Result<Response, Response> {
hydrate_npc_chat_turn_request_from_session(
&state,
&request_context,
authenticated.claims().user_id().to_string(),
&mut payload,
)
.await?;
let npc_name = read_string_field(&payload.encounter, "npcName")
.or_else(|| read_string_field(&payload.encounter, "name"))
.unwrap_or_else(|| "对方".to_string());
let player_message = payload.player_message.trim().to_string();
if player_message.is_empty() && !payload.npc_initiates_conversation {
return Err(runtime_chat_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "runtime-chat",
"message": "playerMessage 不能为空",
})),
));
}
let stream = async_stream::stream! {
let (reply_tx, mut reply_rx) = tokio::sync::mpsc::unbounded_channel::<String>();
// `platform-llm` 在当前任务内持续回调增量文本;外层用 channel 把增量转成真正的 SSE 分片。
let llm_turn = generate_llm_npc_chat_turn(
&state,
&payload,
&npc_name,
move |text| {
let _ = reply_tx.send(text.to_string());
},
);
tokio::pin!(llm_turn);
let llm_result = loop {
// 模型尚未结束时优先把已收到的累计回复推出去,避免等完整建议生成后才一次性返回。
tokio::select! {
result = &mut llm_turn => break result,
maybe_text = reply_rx.recv() => {
if let Some(text) = maybe_text {
yield Ok::<Event, Infallible>(runtime_chat_sse_json_event_or_error(
"reply_delta",
json!({ "text": text }),
));
}
}
}
};
while let Some(text) = reply_rx.recv().await {
yield Ok::<Event, Infallible>(runtime_chat_sse_json_event_or_error(
"reply_delta",
json!({ "text": text }),
));
}
let (npc_reply, suggestions, function_suggestions, force_exit) = match llm_result {
Some(result) => result,
None => {
let deterministic_hostile_breakoff =
should_hostile_chat_breakoff_deterministically(
player_message.as_str(),
payload.chat_directive.as_ref(),
Some(&payload.npc_state),
);
let force_exit = should_force_chat_exit(payload.chat_directive.as_ref())
|| deterministic_hostile_breakoff;
let npc_reply = if deterministic_hostile_breakoff {
build_deterministic_hostile_breakoff_reply(
npc_name.as_str(),
player_message.as_str(),
)
} else {
build_deterministic_npc_reply(
npc_name.as_str(),
player_message.as_str(),
payload.npc_initiates_conversation,
)
};
let suggestions = if force_exit {
Vec::new()
} else {
build_deterministic_chat_suggestions(npc_name.as_str(), player_message.as_str())
};
let function_suggestions = if force_exit {
Vec::new()
} else {
build_fallback_function_suggestions(payload.chat_directive.as_ref())
};
yield Ok::<Event, Infallible>(runtime_chat_sse_json_event_or_error(
"reply_delta",
json!({ "text": npc_reply }),
));
(npc_reply, suggestions, function_suggestions, force_exit)
}
};
let chatted_count = read_number_field(&payload.npc_state, "chattedCount").unwrap_or(0.0);
let affinity_delta = if payload.npc_initiates_conversation {
0
} else {
compute_npc_chat_affinity_delta(player_message.as_str(), npc_reply.as_str(), chatted_count)
};
let complete_payload = json!({
"npcReply": npc_reply,
"affinityDelta": affinity_delta,
"affinityText": describe_affinity_shift(affinity_delta),
"suggestions": suggestions,
"functionSuggestions": function_suggestions,
"pendingQuestOffer": null,
"chatDirective": build_completion_directive(payload.chat_directive.as_ref(), force_exit),
});
yield Ok::<Event, Infallible>(runtime_chat_sse_json_event_or_error(
"complete",
complete_payload,
));
yield Ok::<Event, Infallible>(Event::default().data("[DONE]"));
};
Ok(Sse::new(stream).into_response())
}
async fn generate_llm_npc_chat_turn<F>(
state: &AppState,
payload: &NpcChatTurnRequest,
npc_name: &str,
mut on_reply_update: F,
) -> Option<(String, Vec<String>, Vec<Value>, bool)>
where
F: FnMut(&str),
{
let llm_client = state.llm_client()?;
let character = payload
.character
.as_ref()
.or(payload.player.as_ref())
.unwrap_or(&Value::Null);
let prompt_input = NpcChatTurnPromptInput {
world_type: payload.world_type.as_str(),
character,
encounter: &payload.encounter,
monsters: &payload.monsters,
history: &payload.history,
context: &payload.context,
conversation_history: &payload.conversation_history,
dialogue: &payload.dialogue,
combat_context: payload.combat_context.as_ref(),
player_message: payload.player_message.as_str(),
npc_state: &payload.npc_state,
npc_initiates_conversation: payload.npc_initiates_conversation,
chat_directive: payload.chat_directive.as_ref(),
};
let reply_prompt = build_npc_chat_turn_reply_prompt(&prompt_input);
let mut reply_request = LlmTextRequest::new(vec![
LlmMessage::system(NPC_CHAT_TURN_REPLY_SYSTEM_PROMPT),
LlmMessage::user(reply_prompt),
]);
reply_request.max_tokens = Some(700);
reply_request.enable_web_search = state.config.rpg_llm_web_search_enabled;
reply_request.model = Some(RPG_STORY_LLM_MODEL.to_string());
let reply_response = llm_client
.stream_text(reply_request, |delta| {
on_reply_update(delta.accumulated_text.as_str());
})
.await
.ok()?;
let npc_reply = normalize_required_text(reply_response.content.as_str()).unwrap_or_else(|| {
build_deterministic_npc_reply(
npc_name,
payload.player_message.as_str(),
payload.npc_initiates_conversation,
)
});
if should_force_chat_exit(payload.chat_directive.as_ref()) {
return Some((npc_reply, Vec::new(), Vec::new(), true));
}
let suggestion_prompt =
build_npc_chat_turn_suggestion_prompt(&prompt_input, npc_reply.as_str());
let mut suggestion_request = LlmTextRequest::new(vec![
LlmMessage::system(NPC_CHAT_TURN_SUGGESTION_SYSTEM_PROMPT),
LlmMessage::user(suggestion_prompt),
]);
suggestion_request.max_tokens = Some(200);
suggestion_request.enable_web_search = state.config.rpg_llm_web_search_enabled;
suggestion_request.model = Some(RPG_STORY_LLM_MODEL.to_string());
let suggestion_text = llm_client
.request_text(suggestion_request)
.await
.ok()
.map(|response| response.content)
.unwrap_or_default();
let (mut suggestions, mut function_suggestions, should_end_chat) =
parse_npc_chat_suggestion_resolution(
suggestion_text.as_str(),
payload.chat_directive.as_ref(),
);
let force_exit = should_end_chat
|| should_hostile_chat_breakoff_deterministically(
payload.player_message.as_str(),
payload.chat_directive.as_ref(),
Some(&payload.npc_state),
);
if force_exit {
suggestions.clear();
function_suggestions.clear();
} else if suggestions.is_empty() {
suggestions = build_fallback_npc_chat_suggestions(payload.player_message.as_str());
}
Some((npc_reply, suggestions, function_suggestions, force_exit))
}
async fn hydrate_npc_chat_turn_request_from_session(
state: &AppState,
request_context: &RequestContext,
user_id: String,
payload: &mut NpcChatTurnRequest,
) -> Result<(), Response> {
let Some(session_id) = payload
.session_id
.as_deref()
.and_then(normalize_required_string)
else {
// 中文注释:旧调用没有 sessionId 时继续使用请求体字段;正式运行态由后端快照投影上下文。
return Ok(());
};
if let Some(game_state) = resolve_request_snapshot_game_state(
request_context,
session_id.as_str(),
payload.snapshot.as_ref(),
)? {
apply_npc_chat_turn_game_state(payload, game_state);
return Ok(());
}
let record = state
.get_runtime_snapshot_record(user_id)
.await
.map_err(|error| {
runtime_chat_error_response(
request_context,
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "spacetimedb",
"message": error.to_string(),
})),
)
})?
.ok_or_else(|| {
runtime_chat_error_response(
request_context,
AppError::from_status(StatusCode::CONFLICT).with_details(json!({
"provider": "runtime-chat",
"message": "运行时快照不存在,请先初始化并保存一次游戏",
})),
)
})?;
let game_state = record.game_state;
let snapshot_session_id =
read_runtime_session_id(&game_state).unwrap_or_else(|| session_id.clone());
if snapshot_session_id != session_id {
return Err(runtime_chat_error_response(
request_context,
AppError::from_status(StatusCode::CONFLICT).with_details(json!({
"provider": "runtime-chat",
"message": "请求的运行时会话与服务端快照不一致,请重新进入游戏",
"sessionId": session_id,
"snapshotSessionId": snapshot_session_id,
})),
));
}
apply_npc_chat_turn_game_state(payload, game_state);
Ok(())
}
fn resolve_request_snapshot_game_state(
request_context: &RequestContext,
session_id: &str,
snapshot: Option<&RuntimeStorySnapshotPayload>,
) -> Result<Option<Value>, Response> {
let Some(snapshot) = snapshot else {
return Ok(None);
};
if !snapshot.game_state.is_object() {
return Err(runtime_chat_error_response(
request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "runtime-chat",
"field": "snapshot.gameState",
"message": "snapshot.gameState 必须是 JSON object",
})),
));
}
let snapshot_session_id =
read_runtime_session_id(&snapshot.game_state).unwrap_or_else(|| session_id.to_string());
if snapshot_session_id != session_id {
return Err(runtime_chat_error_response(
request_context,
AppError::from_status(StatusCode::CONFLICT).with_details(json!({
"provider": "runtime-chat",
"message": "请求的运行时会话与服务端快照不一致,请重新进入游戏",
"sessionId": session_id,
"snapshotSessionId": snapshot_session_id,
})),
));
}
// 中文注释:预览/测试/禁存运行态只把请求 snapshot 用于本轮 prompt 投影,不写入正式存档。
Ok(Some(snapshot.game_state.clone()))
}
fn apply_npc_chat_turn_game_state(payload: &mut NpcChatTurnRequest, game_state: Value) {
payload.world_type = current_world_type(&game_state).unwrap_or_default();
payload.character = read_field(&game_state, "playerCharacter").cloned();
payload.player = payload.character.clone();
payload.encounter = read_field(&game_state, "currentEncounter")
.cloned()
.unwrap_or_else(|| payload.encounter.clone());
payload.monsters = read_array_field(&game_state, "sceneHostileNpcs")
.into_iter()
.cloned()
.collect();
payload.history = read_array_field(&game_state, "storyHistory")
.into_iter()
.rev()
.take(12)
.collect::<Vec<_>>()
.into_iter()
.rev()
.cloned()
.collect();
payload.context = build_runtime_story_prompt_context(
&game_state,
RuntimeStoryPromptContextExtras {
last_function_id: Some("npc_chat".to_string()),
..RuntimeStoryPromptContextExtras::default()
},
);
payload.npc_state =
resolve_current_request_npc_state(&game_state).unwrap_or_else(|| payload.npc_state.clone());
if let Some(quest_context) = payload.quest_offer_context.as_mut() {
if let Some(object) = quest_context.as_object_mut() {
object.insert("state".to_string(), game_state);
}
}
}
fn resolve_current_request_npc_state(game_state: &Value) -> Option<Value> {
let encounter = read_object_field(game_state, "currentEncounter")?;
let npc_name = read_optional_string_field(encounter, "npcName")
.or_else(|| read_optional_string_field(encounter, "name"))
.unwrap_or_else(|| "当前遭遇".to_string());
let npc_id = read_optional_string_field(encounter, "id").unwrap_or_else(|| npc_name.clone());
let state = read_object_field(game_state, "npcStates").and_then(|states| {
states
.get(npc_id.as_str())
.or_else(|| states.get(npc_name.as_str()))
})?;
Some(json!({
"affinity": read_i32_field(state, "affinity").unwrap_or(0),
"chattedCount": read_i32_field(state, "chattedCount").unwrap_or(0),
"recruited": state.get("recruited").and_then(Value::as_bool).unwrap_or(false),
}))
}
fn build_completion_directive(chat_directive: Option<&Value>, force_exit: bool) -> Value {
let Some(directive) = chat_directive else {
return Value::Null;
};
let closing_mode = read_string_field(directive, "closingMode")
.filter(|value| value == "foreshadow_close")
.unwrap_or_else(|| "free".to_string());
let force_exit = force_exit
|| closing_mode == "foreshadow_close"
|| directive
.get("forceExitAfterTurn")
.and_then(Value::as_bool)
.unwrap_or(false);
let termination_reason = if force_exit {
read_string_field(directive, "terminationReason")
.filter(|value| value == "player_exit" || value == "hostile_breakoff")
.or_else(|| {
if is_hostile_model_chat(chat_directive) {
Some("hostile_breakoff".to_string())
} else {
None
}
})
} else {
None
};
json!({
"turnLimit": directive.get("turnLimit").cloned().unwrap_or(Value::Null),
"remainingTurns": directive.get("remainingTurns").cloned().unwrap_or(Value::Null),
"forceExit": force_exit,
"closingMode": if force_exit { "foreshadow_close" } else { closing_mode.as_str() },
"terminationReason": termination_reason,
})
}
fn parse_npc_chat_suggestion_resolution(
text: &str,
chat_directive: Option<&Value>,
) -> (Vec<String>, Vec<Value>, bool) {
let normalized = text.trim();
if normalized.is_empty() {
return (
Vec::new(),
build_fallback_function_suggestions(chat_directive),
false,
);
}
if let Ok(value) = serde_json::from_str::<Value>(normalized) {
let should_end_chat = value
.get("shouldEndChat")
.and_then(Value::as_bool)
.unwrap_or(false)
&& is_hostile_model_chat(chat_directive);
let suggestions = value
.get("suggestions")
.and_then(Value::as_array)
.map(|items| {
items
.iter()
.filter_map(Value::as_str)
.map(str::trim)
.filter(|item| !item.is_empty())
.map(ToOwned::to_owned)
.take(3)
.collect::<Vec<_>>()
})
.unwrap_or_default();
let function_suggestions =
parse_function_suggestions(value.get("functionSuggestions"), chat_directive);
return (suggestions, function_suggestions, should_end_chat);
}
(
parse_line_list_content(normalized, 3),
build_fallback_function_suggestions(chat_directive),
false,
)
}
fn parse_function_suggestions(value: Option<&Value>, chat_directive: Option<&Value>) -> Vec<Value> {
let allowed_options = read_function_options(chat_directive);
let allowed_ids = allowed_options
.iter()
.filter_map(|item| read_string_field(item, "functionId"))
.collect::<Vec<_>>();
let mut used_ids: Vec<String> = Vec::new();
value
.and_then(Value::as_array)
.into_iter()
.flatten()
.filter_map(|item| {
let function_id = read_string_field(item, "functionId")?;
if function_id == "npc_chat" {
return None;
}
if !allowed_ids.is_empty() && !allowed_ids.contains(&function_id) {
return None;
}
if used_ids.contains(&function_id) {
return None;
}
let fallback_text = allowed_options
.iter()
.find(|option| {
read_string_field(option, "functionId").as_deref() == Some(function_id.as_str())
})
.and_then(|option| read_string_field(option, "actionText"));
let action_text = read_string_field(item, "actionText")
.or(fallback_text)
.filter(|text| !text.trim().is_empty())?;
used_ids.push(function_id.clone());
Some(json!({
"functionId": function_id,
"actionText": action_text,
}))
})
.take(3)
.collect()
}
fn read_function_options(chat_directive: Option<&Value>) -> Vec<&Value> {
chat_directive
.and_then(|directive| directive.get("functionOptions"))
.and_then(Value::as_array)
.map(|items| items.iter().collect::<Vec<_>>())
.unwrap_or_default()
}
fn read_string_field(value: &Value, field: &str) -> Option<String> {
value
.get(field)
.and_then(Value::as_str)
.map(str::trim)
.filter(|text| !text.is_empty())
.map(ToOwned::to_owned)
}
fn read_number_field(value: &Value, field: &str) -> Option<f64> {
value
.get(field)
.and_then(Value::as_f64)
.filter(|number| number.is_finite())
}
fn read_bool_field(value: &Value, field: &str) -> Option<bool> {
value.get(field).and_then(Value::as_bool)
}
fn should_force_chat_exit(chat_directive: Option<&Value>) -> bool {
let Some(directive) = chat_directive else {
return false;
};
read_string_field(directive, "closingMode").as_deref() == Some("foreshadow_close")
|| read_string_field(directive, "terminationReason").as_deref() == Some("player_exit")
|| directive
.get("forceExitAfterTurn")
.and_then(Value::as_bool)
.unwrap_or(false)
}
fn is_hostile_model_chat(chat_directive: Option<&Value>) -> bool {
let Some(directive) = chat_directive else {
return false;
};
read_string_field(directive, "terminationMode").as_deref() == Some("hostile_model")
|| read_bool_field(directive, "isHostileChat").unwrap_or(false)
}
fn should_hostile_chat_breakoff_deterministically(
player_message: &str,
chat_directive: Option<&Value>,
npc_state: Option<&Value>,
) -> bool {
if !is_hostile_model_chat(chat_directive) {
return false;
}
let Some(directive) = chat_directive else {
return false;
};
if read_string_field(directive, "terminationReason").as_deref() == Some("player_exit") {
return true;
}
// 中文注释:模型建议不可用时,后端兜底仍按敌对聊天口径避免负面挑衅被拖成闲聊。
if npc_state
.and_then(|state| read_number_field(state, "chattedCount"))
.is_some_and(|chatted_count| chatted_count >= 4.0)
{
return true;
}
let hostile_break_words = [
"动手",
"开战",
"拔刀",
"",
"",
"闭嘴",
"少废话",
"别挡路",
"废话",
"威胁",
"找死",
"送死",
"住口",
"让开",
"滚开",
"不退",
"不会退",
"别装",
"骗子",
"叛徒",
];
count_keyword_matches(player_message, &hostile_break_words) > 0
}
fn normalize_required_text(value: &str) -> Option<String> {
let normalized = value.trim();
if normalized.is_empty() {
return None;
}
Some(normalized.to_string())
}
fn parse_line_list_content(text: &str, max_items: usize) -> Vec<String> {
text.replace('\r', "")
.lines()
.map(|line| trim_line_list_marker(line.trim()).trim().to_string())
.filter(|line| !line.is_empty())
.take(max_items)
.collect()
}
fn trim_line_list_marker(line: &str) -> &str {
line.trim_start_matches(|character: char| {
character == '-'
|| character == '*'
|| character.is_ascii_digit()
|| character == '.'
|| character == ')'
|| character.is_whitespace()
})
}
fn count_keyword_matches(text: &str, keywords: &[&str]) -> i32 {
keywords
.iter()
.filter(|keyword| text.contains(**keyword))
.count() as i32
}
fn clamp_affinity_delta(value: i32) -> i32 {
value.clamp(-3, 3)
}
fn compute_npc_chat_affinity_delta(
player_message: &str,
npc_reply: &str,
chatted_count: f64,
) -> i32 {
let positive_keywords = [
"谢谢", "辛苦", "抱歉", "理解", "相信", "放心", "一起", "帮你", "在意", "关心",
];
let negative_keywords = [
"闭嘴",
"",
"少废话",
"威胁",
"",
"不信",
"别装",
"快说",
"审问",
"怀疑",
];
let warm_reply_keywords = ["可以", "愿意", "放心", "谢谢", "明白", ""];
let cold_reply_keywords = ["没必要", "不想", "别问", "与你无关", "算了", "住口"];
let positive_score = count_keyword_matches(player_message.trim(), &positive_keywords)
+ count_keyword_matches(npc_reply.trim(), &warm_reply_keywords);
let negative_score = count_keyword_matches(player_message.trim(), &negative_keywords)
+ count_keyword_matches(npc_reply.trim(), &cold_reply_keywords);
if positive_score == 0 && negative_score == 0 {
return if chatted_count == 0.0 { 1 } else { 0 };
}
if positive_score > negative_score {
let base_delta = positive_score - negative_score + if chatted_count <= 1.0 { 1 } else { 0 };
return clamp_affinity_delta(base_delta);
}
if negative_score > positive_score {
return clamp_affinity_delta(positive_score - negative_score);
}
0
}
fn describe_affinity_shift(affinity_delta: i32) -> &'static str {
if affinity_delta >= 8 {
return "态度明显软化了下来。";
}
if affinity_delta >= 5 {
return "态度比刚才亲近了一些。";
}
if affinity_delta > 0 {
return "对话气氛稍微松动了一点。";
}
if affinity_delta < 0 {
return "这轮对话让气氛变得更紧了一些。";
}
"这轮对话暂时没有带来明显关系变化。"
}
fn runtime_chat_sse_json_event_or_error(event_name: &str, payload: Value) -> Event {
match serde_json::to_string(&payload) {
Ok(payload_text) => Event::default().event(event_name).data(payload_text),
Err(_) => runtime_chat_sse_error_event_message("SSE payload 序列化失败".to_string()),
}
}
fn runtime_chat_sse_error_event_message(message: String) -> Event {
let payload = format!(
"{{\"message\":{}}}",
serde_json::to_string(&message)
.unwrap_or_else(|_| "\"SSE 错误事件序列化失败\"".to_string())
);
Event::default().event("error").data(payload)
}
fn runtime_chat_error_response(request_context: &RequestContext, error: AppError) -> Response {
error.into_response_with_context(Some(request_context))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{config::AppConfig, request_context::RequestContext, state::AppState};
use std::time::Duration;
#[test]
fn npc_chat_affinity_delta_keeps_node_keyword_rules() {
assert_eq!(
compute_npc_chat_affinity_delta("谢谢你愿意帮忙", "放心,我明白。", 0.0),
3
);
assert_eq!(
compute_npc_chat_affinity_delta("快说,别装。", "与你无关。", 2.0),
-3
);
assert_eq!(
compute_npc_chat_affinity_delta("这里怎么了", "我还在想。", 0.0),
1
);
assert_eq!(
compute_npc_chat_affinity_delta("这里怎么了", "我还在想。", 2.0),
0
);
}
#[test]
fn npc_initiated_opening_keeps_neutral_affinity_delta() {
// 首遇主动开场不是玩家发言结算,不能因为空 playerMessage 或占位文本触发好感变化。
let npc_initiates_conversation = true;
let player_message = "";
let npc_reply = "你来了。先别急着走,我正有话想和你说。";
let affinity_delta = if npc_initiates_conversation {
0
} else {
compute_npc_chat_affinity_delta(player_message, npc_reply, 0.0)
};
assert_eq!(affinity_delta, 0);
}
#[test]
fn npc_chat_suggestion_parser_strips_list_markers() {
assert_eq!(
parse_line_list_content("1. 继续问线索\n- 表明立场\n* 拉近关系\n4. 多余", 3),
vec!["继续问线索", "表明立场", "拉近关系"]
);
}
#[test]
fn hostile_chat_breakoff_fallback_triggers_on_negative_words() {
let chat_directive = json!({
"terminationMode": "hostile_model",
"isHostileChat": true,
});
let npc_state = json!({ "chattedCount": 1 });
assert!(should_hostile_chat_breakoff_deterministically(
"少废话,让开,不然现在就动手。",
Some(&chat_directive),
Some(&npc_state),
));
}
#[test]
fn hostile_chat_breakoff_fallback_triggers_after_four_turns() {
let chat_directive = json!({
"terminationMode": "hostile_model",
"isHostileChat": true,
});
let npc_state = json!({ "chattedCount": 4 });
assert!(should_hostile_chat_breakoff_deterministically(
"我还想再问一个问题。",
Some(&chat_directive),
Some(&npc_state),
));
}
#[test]
fn hostile_chat_breakoff_fallback_ignores_non_hostile_chat() {
let chat_directive = json!({
"terminationMode": "none",
"isHostileChat": false,
});
let npc_state = json!({ "chattedCount": 6 });
assert!(!should_hostile_chat_breakoff_deterministically(
"少废话,让开。",
Some(&chat_directive),
Some(&npc_state),
));
}
#[tokio::test]
async fn npc_chat_turn_prefers_request_snapshot_over_persisted_session() {
let state = AppState::new(AppConfig::default()).expect("state should build");
state
.put_runtime_snapshot_record(
"user_00000001".to_string(),
1,
"adventure".to_string(),
json!({
"worldType": "WUXIA",
"runtimeSessionId": "runtime-main",
"playerCharacter": { "id": "hero-main", "name": "旧存档" },
"currentEncounter": { "id": "npc-main", "npcName": "旧 NPC" },
"sceneHostileNpcs": [],
"storyHistory": [],
}),
None,
1,
)
.await
.expect("snapshot should seed");
let request_context = test_request_context();
let mut payload = test_npc_chat_turn_payload(
"runtime-preview",
Some(json!({
"worldType": "CUSTOM",
"runtimeSessionId": "runtime-preview",
"runtimePersistenceDisabled": true,
"playerCharacter": { "id": "hero-preview", "name": "临时角色" },
"currentEncounter": { "id": "npc-preview", "npcName": "临时 NPC" },
"sceneHostileNpcs": [{ "id": "monster-preview", "name": "雾影" }],
"storyHistory": [{ "text": "临时故事" }],
"npcStates": {
"npc-preview": {
"affinity": 12,
"helpUsed": false,
"chattedCount": 2,
"giftsGiven": 0,
"recruited": false
}
}
})),
);
hydrate_npc_chat_turn_request_from_session(
&state,
&request_context,
"user_00000001".to_string(),
&mut payload,
)
.await
.expect("request snapshot should hydrate");
assert_eq!(payload.world_type, "CUSTOM");
assert_eq!(
read_optional_string_field(&payload.encounter, "npcName").as_deref(),
Some("临时 NPC")
);
assert_eq!(payload.monsters.len(), 1);
assert_eq!(read_i32_field(&payload.npc_state, "affinity"), Some(12));
}
#[tokio::test]
async fn npc_chat_turn_rejects_request_snapshot_session_mismatch() {
let state = AppState::new(AppConfig::default()).expect("state should build");
let request_context = test_request_context();
let mut payload = test_npc_chat_turn_payload(
"runtime-preview",
Some(json!({
"worldType": "WUXIA",
"runtimeSessionId": "runtime-other",
})),
);
let response = hydrate_npc_chat_turn_request_from_session(
&state,
&request_context,
"user_00000001".to_string(),
&mut payload,
)
.await
.expect_err("snapshot session mismatch should fail");
assert_eq!(response.status(), StatusCode::CONFLICT);
}
fn test_request_context() -> RequestContext {
RequestContext::new(
"runtime-chat-test".to_string(),
"POST /api/runtime/chat/npc/turn/stream".to_string(),
Duration::ZERO,
false,
)
}
fn test_npc_chat_turn_payload(
session_id: &str,
game_state: Option<Value>,
) -> NpcChatTurnRequest {
NpcChatTurnRequest {
session_id: Some(session_id.to_string()),
snapshot: game_state.map(|game_state| RuntimeStorySnapshotPayload {
saved_at: None,
bottom_tab: "adventure".to_string(),
game_state,
current_story: None,
}),
world_type: String::new(),
character: None,
player: None,
encounter: json!({ "id": "npc-request", "npcName": "请求 NPC" }),
monsters: Vec::new(),
history: Vec::new(),
context: Value::Null,
conversation_history: Vec::new(),
dialogue: Vec::new(),
combat_context: None,
player_message: "你刚才看见了什么?".to_string(),
npc_state: Value::Null,
npc_initiates_conversation: false,
quest_offer_context: None,
chat_directive: None,
}
}
}