chore: checkpoint local workspace changes

This commit is contained in:
2026-04-23 12:45:15 +08:00
parent 3eb9390e8f
commit a6cd9afcbb
47 changed files with 2154 additions and 529 deletions

View File

@@ -248,9 +248,29 @@ struct SingleTurnModelOutput {
next_anchor_content: EightAnchorContent,
progress_percent: u32,
reply_text: String,
fallback_error: Option<String>,
}
#[derive(Clone, Debug)]
pub(crate) struct CustomWorldTurnError {
message: String,
}
impl CustomWorldTurnError {
fn new(message: impl Into<String>) -> Self {
Self {
message: message.into(),
}
}
}
impl std::fmt::Display for CustomWorldTurnError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.message)
}
}
impl std::error::Error for CustomWorldTurnError {}
const BASE_SYSTEM_PROMPT: &str = r#"你是一个负责共创游戏世界设定的专业策划。
你正在和用户一起共创一个游戏世界。每一轮你都必须读取:
@@ -477,7 +497,7 @@ const OUTPUT_CONTRACT_REMINDER: &str = r#"请严格按以下 JSON 结构输出
pub(crate) async fn run_custom_world_agent_turn<F>(
request: CustomWorldAgentTurnRequest<'_>,
on_reply_update: F,
) -> CustomWorldAgentTurnResult
) -> Result<CustomWorldAgentTurnResult, CustomWorldTurnError>
where
F: FnMut(&str),
{
@@ -495,7 +515,7 @@ where
&current_anchor_content,
on_reply_update,
)
.await;
.await?;
let next_anchor_content = assistant_turn.next_anchor_content.clone();
let next_creator_intent = build_creator_intent_from_eight_anchor_content(&next_anchor_content);
@@ -607,30 +627,12 @@ where
))
};
let (phase_label, phase_detail, operation_status, operation_progress, error_message) =
match assistant_turn.fallback_error {
Some(message) => (
"模型暂不可用".to_string(),
message.clone(),
"failed".to_string(),
100,
Some(message),
),
None => (
"消息已处理".to_string(),
"本轮回复已由大模型生成并回写会话。".to_string(),
"completed".to_string(),
100,
None,
),
};
CustomWorldAgentTurnResult {
Ok(CustomWorldAgentTurnResult {
assistant_reply_text: assistant_turn.reply_text,
phase_label,
phase_detail,
operation_status,
operation_progress,
phase_label: "消息已处理".to_string(),
phase_detail: "本轮回复已由大模型生成并回写会话。".to_string(),
operation_status: "completed".to_string(),
operation_progress: 100,
stage: next_stage,
progress_percent,
focus_card_id: if should_stay_in_draft_stage {
@@ -648,8 +650,8 @@ where
recommended_replies_json,
quality_findings_json,
asset_coverage_json,
error_message,
}
error_message: None,
})
}
pub(crate) fn build_finalize_record_input(
@@ -664,8 +666,8 @@ pub(crate) fn build_finalize_record_input(
session_id,
owner_user_id,
operation_id,
assistant_message_id,
assistant_reply_text: result.assistant_reply_text,
assistant_message_id: Some(assistant_message_id),
assistant_reply_text: Some(result.assistant_reply_text),
phase_label: result.phase_label,
phase_detail: result.phase_detail,
operation_status: result.operation_status,
@@ -688,6 +690,75 @@ pub(crate) fn build_finalize_record_input(
}
}
fn serialize_optional_json_object(value: &JsonValue) -> Option<String> {
if value.is_null() {
None
} else {
Some(serialize_json(value, &empty_json_object()))
}
}
fn serialize_string_array(values: &[String]) -> String {
serialize_json(
&JsonValue::Array(
values
.iter()
.cloned()
.map(JsonValue::String)
.collect::<Vec<_>>(),
),
&empty_json_array(),
)
}
pub(crate) fn build_failed_finalize_record_input(
session_id: String,
owner_user_id: String,
operation_id: String,
session: &CustomWorldAgentSessionRecord,
error_message: String,
updated_at_micros: i64,
) -> CustomWorldAgentMessageFinalizeRecordInput {
CustomWorldAgentMessageFinalizeRecordInput {
session_id,
owner_user_id,
operation_id,
assistant_message_id: None,
assistant_reply_text: None,
phase_label: "消息处理失败".to_string(),
phase_detail: error_message.clone(),
operation_status: "failed".to_string(),
operation_progress: 100,
stage: session.stage.clone(),
progress_percent: session.progress_percent,
focus_card_id: session.focus_card_id.clone(),
anchor_content_json: serialize_json(&session.anchor_content, &empty_agent_anchor_content_json()),
creator_intent_json: serialize_optional_json_object(&session.creator_intent),
creator_intent_readiness_json: serialize_json(
&session.creator_intent_readiness,
&empty_agent_creator_intent_readiness_json(),
),
anchor_pack_json: serialize_optional_json_object(&session.anchor_pack),
draft_profile_json: serialize_optional_json_object(&session.draft_profile),
pending_clarifications_json: serialize_json(
&JsonValue::Array(session.pending_clarifications.clone()),
&empty_json_array(),
),
suggested_actions_json: serialize_json(
&JsonValue::Array(session.suggested_actions.clone()),
&empty_json_array(),
),
recommended_replies_json: serialize_string_array(&session.recommended_replies),
quality_findings_json: serialize_json(
&JsonValue::Array(session.quality_findings.clone()),
&empty_json_array(),
),
asset_coverage_json: serialize_json(&session.asset_coverage, &empty_agent_asset_coverage_json()),
error_message: Some(error_message),
updated_at_micros,
}
}
async fn stream_single_turn<F>(
llm_client: Option<&LlmClient>,
messages: &[CustomWorldAgentMessageRecord],
@@ -696,17 +767,13 @@ async fn stream_single_turn<F>(
quick_fill_requested: bool,
current_anchor_content: &EightAnchorContent,
mut on_reply_update: F,
) -> SingleTurnModelOutput
) -> Result<SingleTurnModelOutput, CustomWorldTurnError>
where
F: FnMut(&str),
{
if llm_client.is_none() {
let fallback = build_unavailable_output(current_anchor_content, progress_percent, true);
on_reply_update(fallback.reply_text.as_str());
return fallback;
}
let llm_client = llm_client.expect("checked above");
let llm_client = llm_client.ok_or_else(|| {
CustomWorldTurnError::new("当前模型不可用,请稍后重试。")
})?;
let chat_history = build_chat_history(messages);
let dynamic_state =
resolve_dynamic_state(llm_client, current_turn, progress_percent, quick_fill_requested, current_anchor_content, &chat_history)
@@ -739,21 +806,13 @@ where
)
.await;
let Ok(response) = response else {
let fallback = build_unavailable_output(current_anchor_content, progress_percent, false);
if fallback.reply_text != latest_reply_text {
on_reply_update(fallback.reply_text.as_str());
}
return fallback;
};
let response = response.map_err(|_| {
CustomWorldTurnError::new("这一轮设定生成失败,请稍后重试。")
})?;
let Ok(parsed) = parse_json_response_text(response.content.as_str()) else {
let fallback = build_unavailable_output(current_anchor_content, progress_percent, false);
if fallback.reply_text != latest_reply_text {
on_reply_update(fallback.reply_text.as_str());
}
return fallback;
};
let parsed = parse_json_response_text(response.content.as_str()).map_err(|_| {
CustomWorldTurnError::new("模型返回结果解析失败,请稍后重试。")
})?;
let next_anchor_content = normalize_eight_anchor_content(
parsed
@@ -765,19 +824,18 @@ where
} else {
clamp_progress_percent(parsed.get("progressPercent"))
};
let reply_text = to_text(parsed.get("replyText")).unwrap_or_else(|| {
build_unavailable_output(current_anchor_content, progress_percent, false).reply_text
});
let reply_text = to_text(parsed.get("replyText")).ok_or_else(|| {
CustomWorldTurnError::new("模型返回结果缺少有效回复,请稍后重试。")
})?;
if reply_text != latest_reply_text {
on_reply_update(reply_text.as_str());
}
SingleTurnModelOutput {
Ok(SingleTurnModelOutput {
next_anchor_content,
progress_percent,
reply_text,
fallback_error: None,
}
})
}
async fn resolve_dynamic_state(
@@ -1605,28 +1663,6 @@ fn render_chat_history_context(chat_history: &[JsonValue]) -> String {
)
}
fn build_unavailable_output(
current_anchor_content: &EightAnchorContent,
progress_percent: u32,
unavailable: bool,
) -> SingleTurnModelOutput {
SingleTurnModelOutput {
next_anchor_content: current_anchor_content.clone(),
progress_percent,
reply_text: if unavailable {
"当前模型不可用,这一轮设定先保留上一版。你可以稍后重试。".to_string()
} else {
"这一轮设定还没成功更新,我先保留上一版。你可以再发一次,我继续接着收。"
.to_string()
},
fallback_error: Some(if unavailable {
"当前模型不可用,这一轮设定先保留上一版。".to_string()
} else {
"这一轮设定还没成功更新,我先保留上一版。".to_string()
}),
}
}
fn parse_json_response_text(text: &str) -> Result<JsonValue, serde_json::Error> {
let trimmed = text.trim();
if let Some(start) = trimmed.find('{')
@@ -1642,53 +1678,47 @@ fn extract_reply_text_from_partial_json(text: &str) -> Option<String> {
let key_index = text.find("\"replyText\"")?;
let colon_index = text[key_index..].find(':')? + key_index;
let mut cursor = colon_index + 1;
let bytes = text.as_bytes();
while cursor < bytes.len() && bytes[cursor].is_ascii_whitespace() {
while cursor < text.len() && text.as_bytes()[cursor].is_ascii_whitespace() {
cursor += 1;
}
if bytes.get(cursor).copied() != Some(b'"') {
if text.as_bytes().get(cursor).copied() != Some(b'"') {
return None;
}
cursor += 1;
let mut decoded = String::new();
while cursor < bytes.len() {
let current = bytes[cursor];
if current == b'"' {
let remainder = text.get(cursor..)?;
let mut characters = remainder.chars().peekable();
while let Some(current) = characters.next() {
if current == '"' {
return Some(decoded);
}
if current == b'\\' {
cursor += 1;
if cursor >= bytes.len() {
break;
}
match bytes[cursor] {
b'"' => decoded.push('"'),
b'\\' => decoded.push('\\'),
b'/' => decoded.push('/'),
b'b' => decoded.push('\u{0008}'),
b'f' => decoded.push('\u{000C}'),
b'n' => decoded.push('\n'),
b'r' => decoded.push('\r'),
b't' => decoded.push('\t'),
b'u' => {
if cursor + 4 < bytes.len()
&& let Ok(hex) = std::str::from_utf8(&bytes[cursor + 1..cursor + 5])
&& let Ok(code) = u16::from_str_radix(hex, 16)
if current == '\\' {
let escaped = characters.next()?;
match escaped {
'"' => decoded.push('"'),
'\\' => decoded.push('\\'),
'/' => decoded.push('/'),
'b' => decoded.push('\u{0008}'),
'f' => decoded.push('\u{000C}'),
'n' => decoded.push('\n'),
'r' => decoded.push('\r'),
't' => decoded.push('\t'),
'u' => {
let mut hex = String::new();
for _ in 0..4 {
hex.push(characters.next()?);
}
if let Ok(code) = u16::from_str_radix(hex.as_str(), 16)
&& let Some(character) = char::from_u32(code as u32)
{
decoded.push(character);
cursor += 5;
continue;
}
decoded.push('u');
}
other => decoded.push(other as char),
other => decoded.push(other),
}
cursor += 1;
continue;
}
decoded.push(current as char);
cursor += 1;
decoded.push(current);
}
Some(decoded)
}
@@ -2010,3 +2040,17 @@ impl PromptConversationMode {
}
}
}
#[cfg(test)]
mod tests {
use super::extract_reply_text_from_partial_json;
#[test]
fn extract_reply_text_from_partial_json_preserves_chinese_characters() {
let partial_json = r#"{"replyText":"你好,潮雾列岛","progressPercent":32"#;
let extracted = extract_reply_text_from_partial_json(partial_json);
assert_eq!(extracted.as_deref(), Some("你好,潮雾列岛"));
}
}