This commit is contained in:
2026-04-25 22:19:04 +08:00
parent 2ebfd1cf55
commit 8404081d7b
149 changed files with 10508 additions and 2732 deletions

View File

@@ -1,5 +1,5 @@
use module_big_fish::{BigFishAnchorPack, BigFishAnchorStatus, BigFishCreationStage};
use platform_llm::{LlmClient, LlmMessage, LlmStreamDelta, LlmTextRequest};
use platform_llm::LlmClient;
use serde::{Deserialize, Serialize};
use serde_json::{Value as JsonValue, json};
use spacetime_client::{
@@ -10,6 +10,9 @@ use crate::creation_agent_anchor_templates::{
get_creation_agent_anchor_template, render_anchor_question_block,
};
use crate::creation_agent_chat::render_quick_fill_extra_rules;
use crate::creation_agent_llm_turn::{
CreationAgentLlmTurnErrorMessages, stream_creation_agent_json_turn,
};
#[derive(Clone, Debug)]
pub(crate) struct BigFishAgentTurnRequest<'a> {
@@ -109,41 +112,26 @@ const BIG_FISH_AGENT_OUTPUT_CONTRACT: &str = r#"请严格按以下 JSON 输出
pub(crate) async fn run_big_fish_agent_turn<F>(
request: BigFishAgentTurnRequest<'_>,
mut on_reply_update: F,
on_reply_update: F,
) -> Result<BigFishAgentTurnResult, BigFishAgentTurnError>
where
F: FnMut(&str),
{
let llm_client = request
.llm_client
.ok_or_else(|| BigFishAgentTurnError::new("当前模型不可用,请稍后重试。"))?;
let prompt = build_big_fish_agent_prompt(request.session, request.quick_fill_requested);
let mut latest_reply_text = String::new();
let response = llm_client
.stream_text(
LlmTextRequest::new(vec![
LlmMessage::system(format!("{BIG_FISH_AGENT_SYSTEM_PROMPT}\n\n{prompt}")),
LlmMessage::user("请按约定输出这一轮的 JSON"),
]),
|delta: &LlmStreamDelta| {
if let Some(reply_progress) =
extract_reply_text_from_partial_json(delta.accumulated_text.as_str())
&& reply_progress != latest_reply_text
{
latest_reply_text = reply_progress.clone();
on_reply_update(reply_progress.as_str());
}
},
)
.await
.map_err(|_| BigFishAgentTurnError::new("大鱼吃小鱼聊天生成失败,请稍后重试。"))?;
let parsed = parse_json_response_text(response.content.as_str())
.map_err(|_| BigFishAgentTurnError::new("大鱼吃小鱼聊天结果解析失败,请稍后重试。"))?;
let output = parse_big_fish_model_output(&parsed)?;
if output.reply_text != latest_reply_text {
on_reply_update(output.reply_text.as_str());
}
let turn_output = stream_creation_agent_json_turn(
request.llm_client,
format!("{BIG_FISH_AGENT_SYSTEM_PROMPT}\n\n{prompt}"),
"请按约定输出这一轮的 JSON。",
CreationAgentLlmTurnErrorMessages {
model_unavailable: "当前模型不可用,请稍后重试",
generation_failed: "大鱼吃小鱼聊天生成失败,请稍后重试。",
parse_failed: "大鱼吃小鱼聊天结果解析失败,请稍后重试。",
},
on_reply_update,
BigFishAgentTurnError::new,
)
.await?;
let output = parse_big_fish_model_output(&turn_output.parsed)?;
Ok(BigFishAgentTurnResult {
assistant_reply_text: output.reply_text,
@@ -373,57 +361,6 @@ fn parse_big_fish_anchor_status(value: &str) -> BigFishAnchorStatus {
}
}
fn parse_json_response_text(text: &str) -> Result<JsonValue, serde_json::Error> {
if let Ok(value) = serde_json::from_str::<JsonValue>(text) {
return Ok(value);
}
let Some(start) = text.find('{') else {
return serde_json::from_str(text);
};
let Some(end) = text.rfind('}') else {
return serde_json::from_str(text);
};
serde_json::from_str(&text[start..=end])
}
fn extract_reply_text_from_partial_json(text: &str) -> Option<String> {
let marker = "\"replyText\"";
let marker_index = text.find(marker)?;
let after_marker = &text[marker_index + marker.len()..];
let colon_index = after_marker.find(':')?;
let after_colon = after_marker[colon_index + 1..].trim_start();
let content = after_colon.strip_prefix('"')?;
let mut result = String::new();
let mut escaped = false;
for character in content.chars() {
if escaped {
result.push(match character {
'n' => '\n',
'r' => '\r',
't' => '\t',
'"' => '"',
'\\' => '\\',
other => other,
});
escaped = false;
continue;
}
if character == '\\' {
escaped = true;
continue;
}
if character == '"' {
return Some(result);
}
result.push(character);
}
if result.is_empty() {
None
} else {
Some(result)
}
}
#[cfg(test)]
mod tests {
use super::build_big_fish_agent_prompt;