1
Some checks failed
CI / verify (push) Has been cancelled

This commit is contained in:
2026-04-26 21:07:55 +08:00
609 changed files with 4601 additions and 14501 deletions

View File

@@ -1,12 +1,16 @@
use axum::{
Json,
extract::{Extension, State},
http::{StatusCode, header},
response::{IntoResponse, Response},
http::StatusCode,
response::{
IntoResponse, Response,
sse::{Event, Sse},
},
};
use platform_llm::{LlmMessage, LlmTextRequest};
use serde::Deserialize;
use serde_json::{Value, json};
use std::convert::Infallible;
use crate::{
http_error::AppError,
@@ -58,7 +62,7 @@ pub async fn stream_runtime_npc_chat_turn(
let npc_name = read_string_field(&payload.encounter, "npcName")
.or_else(|| read_string_field(&payload.encounter, "name"))
.unwrap_or_else(|| "对方".to_string());
let player_message = payload.player_message.trim();
let player_message = payload.player_message.trim().to_string();
if player_message.is_empty() && !payload.npc_initiates_conversation {
return Err(runtime_chat_error_response(
&request_context,
@@ -69,75 +73,106 @@ pub async fn stream_runtime_npc_chat_turn(
));
}
let llm_result =
generate_llm_npc_chat_turn(&state, &request_context, &payload, &npc_name).await;
let (mut body, npc_reply, suggestions, function_suggestions, force_exit) = match llm_result {
Some(result) => result,
None => {
let npc_reply = build_deterministic_npc_reply(
npc_name.as_str(),
player_message,
payload.npc_initiates_conversation,
);
let force_exit = should_force_chat_exit(payload.chat_directive.as_ref())
|| should_hostile_chat_breakoff_deterministically(
player_message,
payload.chat_directive.as_ref(),
);
let suggestions = if force_exit {
Vec::new()
} else {
build_deterministic_chat_suggestions(npc_name.as_str(), player_message)
};
let function_suggestions = if force_exit {
Vec::new()
} else {
build_fallback_function_suggestions(payload.chat_directive.as_ref())
};
let mut body = String::new();
append_sse_event(
&request_context,
&mut body,
let stream = async_stream::stream! {
let (reply_tx, mut reply_rx) = tokio::sync::mpsc::unbounded_channel::<String>();
// `platform-llm` 在当前任务内持续回调增量文本;外层用 channel 把增量转成真正的 SSE 分片。
let llm_turn = generate_llm_npc_chat_turn(
&state,
&payload,
&npc_name,
move |text| {
let _ = reply_tx.send(text.to_string());
},
);
tokio::pin!(llm_turn);
let llm_result = loop {
// 模型尚未结束时优先把已收到的累计回复推出去,避免等完整建议生成后才一次性返回。
tokio::select! {
result = &mut llm_turn => break result,
maybe_text = reply_rx.recv() => {
if let Some(text) = maybe_text {
yield Ok::<Event, Infallible>(runtime_chat_sse_json_event_or_error(
"reply_delta",
json!({ "text": text }),
));
}
}
}
};
while let Some(text) = reply_rx.recv().await {
yield Ok::<Event, Infallible>(runtime_chat_sse_json_event_or_error(
"reply_delta",
&json!({ "text": npc_reply }),
)?;
(
body,
npc_reply,
suggestions,
function_suggestions,
force_exit,
)
json!({ "text": text }),
));
}
};
let chatted_count = read_number_field(&payload.npc_state, "chattedCount").unwrap_or(0.0);
let affinity_delta = if payload.npc_initiates_conversation {
0
} else {
compute_npc_chat_affinity_delta(player_message, npc_reply.as_str(), chatted_count)
};
let complete_payload = json!({
"npcReply": npc_reply,
"affinityDelta": affinity_delta,
"affinityText": describe_affinity_shift(affinity_delta),
"suggestions": suggestions,
"functionSuggestions": function_suggestions,
"pendingQuestOffer": null,
"chatDirective": build_completion_directive(payload.chat_directive.as_ref(), force_exit),
});
let (npc_reply, suggestions, function_suggestions, force_exit) = match llm_result {
Some(result) => result,
None => {
let npc_reply = build_deterministic_npc_reply(
npc_name.as_str(),
player_message.as_str(),
payload.npc_initiates_conversation,
);
let force_exit = should_force_chat_exit(payload.chat_directive.as_ref())
|| should_hostile_chat_breakoff_deterministically(
player_message.as_str(),
payload.chat_directive.as_ref(),
);
let suggestions = if force_exit {
Vec::new()
} else {
build_deterministic_chat_suggestions(npc_name.as_str(), player_message.as_str())
};
let function_suggestions = if force_exit {
Vec::new()
} else {
build_fallback_function_suggestions(payload.chat_directive.as_ref())
};
yield Ok::<Event, Infallible>(runtime_chat_sse_json_event_or_error(
"reply_delta",
json!({ "text": npc_reply }),
));
(npc_reply, suggestions, function_suggestions, force_exit)
}
};
append_sse_event(&request_context, &mut body, "complete", &complete_payload)?;
body.push_str("data: [DONE]\n\n");
Ok(build_event_stream_response(body))
let chatted_count = read_number_field(&payload.npc_state, "chattedCount").unwrap_or(0.0);
let affinity_delta = if payload.npc_initiates_conversation {
0
} else {
compute_npc_chat_affinity_delta(player_message.as_str(), npc_reply.as_str(), chatted_count)
};
let complete_payload = json!({
"npcReply": npc_reply,
"affinityDelta": affinity_delta,
"affinityText": describe_affinity_shift(affinity_delta),
"suggestions": suggestions,
"functionSuggestions": function_suggestions,
"pendingQuestOffer": null,
"chatDirective": build_completion_directive(payload.chat_directive.as_ref(), force_exit),
});
yield Ok::<Event, Infallible>(runtime_chat_sse_json_event_or_error(
"complete",
complete_payload,
));
yield Ok::<Event, Infallible>(Event::default().data("[DONE]"));
};
Ok(Sse::new(stream).into_response())
}
async fn generate_llm_npc_chat_turn(
async fn generate_llm_npc_chat_turn<F>(
state: &AppState,
request_context: &RequestContext,
payload: &NpcChatTurnRequest,
npc_name: &str,
) -> Option<(String, String, Vec<String>, Vec<Value>, bool)> {
mut on_reply_update: F,
) -> Option<(String, Vec<String>, Vec<Value>, bool)>
where
F: FnMut(&str),
{
let llm_client = state.llm_client()?;
let character = payload
.character
@@ -160,7 +195,6 @@ async fn generate_llm_npc_chat_turn(
chat_directive: payload.chat_directive.as_ref(),
};
let mut body = String::new();
let reply_prompt = build_npc_chat_turn_reply_prompt(&prompt_input);
let mut reply_request = LlmTextRequest::new(vec![
LlmMessage::system(NPC_CHAT_TURN_REPLY_SYSTEM_PROMPT),
@@ -171,12 +205,7 @@ async fn generate_llm_npc_chat_turn(
let reply_response = llm_client
.stream_text(reply_request, |delta| {
let _ = append_sse_event(
request_context,
&mut body,
"reply_delta",
&json!({ "text": delta.accumulated_text }),
);
on_reply_update(delta.accumulated_text.as_str());
})
.await
.ok()?;
@@ -189,7 +218,7 @@ async fn generate_llm_npc_chat_turn(
});
if should_force_chat_exit(payload.chat_directive.as_ref()) {
return Some((body, npc_reply, Vec::new(), Vec::new(), true));
return Some((npc_reply, Vec::new(), Vec::new(), true));
}
let suggestion_prompt =
@@ -224,13 +253,7 @@ async fn generate_llm_npc_chat_turn(
suggestions = build_fallback_npc_chat_suggestions(payload.player_message.as_str());
}
Some((
body,
npc_reply,
suggestions,
function_suggestions,
force_exit,
))
Some((npc_reply, suggestions, function_suggestions, force_exit))
}
fn build_deterministic_npc_reply(
@@ -595,39 +618,20 @@ fn describe_affinity_shift(affinity_delta: i32) -> &'static str {
"这轮对话暂时没有带来明显关系变化。"
}
fn append_sse_event(
request_context: &RequestContext,
body: &mut String,
event: &str,
payload: &Value,
) -> Result<(), Response> {
let payload_text = serde_json::to_string(payload).map_err(|error| {
runtime_chat_error_response(
request_context,
AppError::from_status(StatusCode::INTERNAL_SERVER_ERROR).with_details(json!({
"provider": "runtime-chat",
"message": format!("SSE payload 序列化失败:{error}"),
})),
)
})?;
body.push_str("event: ");
body.push_str(event);
body.push('\n');
body.push_str("data: ");
body.push_str(&payload_text);
body.push_str("\n\n");
Ok(())
fn runtime_chat_sse_json_event_or_error(event_name: &str, payload: Value) -> Event {
match serde_json::to_string(&payload) {
Ok(payload_text) => Event::default().event(event_name).data(payload_text),
Err(_) => runtime_chat_sse_error_event_message("SSE payload 序列化失败".to_string()),
}
}
fn build_event_stream_response(body: String) -> Response {
(
[
(header::CONTENT_TYPE, "text/event-stream; charset=utf-8"),
(header::CACHE_CONTROL, "no-cache"),
],
body,
)
.into_response()
fn runtime_chat_sse_error_event_message(message: String) -> Event {
let payload = format!(
"{{\"message\":{}}}",
serde_json::to_string(&message)
.unwrap_or_else(|_| "\"SSE 错误事件序列化失败\"".to_string())
);
Event::default().event("error").data(payload)
}
fn runtime_chat_error_response(request_context: &RequestContext, error: AppError) -> Response {