Auto-open draft result after foundation completes

This commit is contained in:
2026-04-25 10:52:39 +08:00
parent 35c2bce6f1
commit 03acbc5cb1
31 changed files with 36472 additions and 232 deletions

View File

@@ -315,7 +315,7 @@ fn default_big_fish_anchor_label(field_name: &str) -> &'static str {
fn serialize_record_anchor_pack(anchor_pack: &spacetime_client::BigFishAnchorPackRecord) -> String {
serde_json::to_string_pretty(&map_big_fish_record_anchor_pack(anchor_pack))
.unwrap_or_else(|_| "{}".to_string())
.unwrap_or_else(|_| "{}".to_string())
}
fn map_big_fish_record_anchor_pack(

View File

@@ -82,7 +82,11 @@ pub struct AppConfig {
pub llm_retry_backoff_ms: u64,
pub dashscope_base_url: String,
pub dashscope_api_key: Option<String>,
pub dashscope_scene_image_model: String,
pub dashscope_reference_image_model: String,
pub dashscope_cover_image_model: String,
pub dashscope_image_request_timeout_ms: u64,
pub draft_asset_generation_max_concurrent_requests: usize,
pub ark_character_video_base_url: String,
pub ark_character_video_api_key: Option<String>,
pub ark_character_video_request_timeout_ms: u64,
@@ -166,7 +170,11 @@ impl Default for AppConfig {
llm_retry_backoff_ms: DEFAULT_RETRY_BACKOFF_MS,
dashscope_base_url: "https://dashscope.aliyuncs.com/api/v1".to_string(),
dashscope_api_key: None,
dashscope_scene_image_model: "wan2.2-t2i-flash".to_string(),
dashscope_reference_image_model: "qwen-image-2.0".to_string(),
dashscope_cover_image_model: "wan2.2-t2i-flash".to_string(),
dashscope_image_request_timeout_ms: 150_000,
draft_asset_generation_max_concurrent_requests: 4,
ark_character_video_base_url: DEFAULT_ARK_BASE_URL.to_string(),
ark_character_video_api_key: None,
ark_character_video_request_timeout_ms: 420_000,
@@ -397,16 +405,14 @@ impl AppConfig {
if let Some(spacetime_server_url) = read_first_non_empty_env(&[
"GENARRATIVE_SPACETIME_SERVER_URL",
"GENARRATIVE_SPACETIME_MAINCLOUD_SERVER_URL",
])
{
]) {
config.spacetime_server_url = spacetime_server_url;
}
if let Some(spacetime_database) = read_first_non_empty_env(&[
"GENARRATIVE_SPACETIME_DATABASE",
"GENARRATIVE_SPACETIME_MAINCLOUD_DATABASE",
])
{
]) {
config.spacetime_database = spacetime_database;
}
@@ -466,12 +472,38 @@ impl AppConfig {
config.dashscope_api_key = read_first_non_empty_env(&["DASHSCOPE_API_KEY"]);
if let Some(dashscope_scene_image_model) =
read_first_non_empty_env(&["DASHSCOPE_SCENE_IMAGE_MODEL", "DASHSCOPE_IMAGE_MODEL"])
{
config.dashscope_scene_image_model = dashscope_scene_image_model;
}
if let Some(dashscope_reference_image_model) = read_first_non_empty_env(&[
"DASHSCOPE_REFERENCE_IMAGE_MODEL",
"DASHSCOPE_IMAGE_EDIT_MODEL",
]) {
config.dashscope_reference_image_model = dashscope_reference_image_model;
}
if let Some(dashscope_cover_image_model) =
read_first_non_empty_env(&["DASHSCOPE_COVER_IMAGE_MODEL", "DASHSCOPE_IMAGE_MODEL"])
{
config.dashscope_cover_image_model = dashscope_cover_image_model;
}
if let Some(dashscope_image_request_timeout_ms) =
read_first_positive_u64_env(&["DASHSCOPE_IMAGE_REQUEST_TIMEOUT_MS"])
{
config.dashscope_image_request_timeout_ms = dashscope_image_request_timeout_ms;
}
if let Some(max_concurrent_requests) = read_first_usize_env(&[
"GENARRATIVE_DRAFT_ASSET_GENERATION_MAX_CONCURRENT_REQUESTS",
"DRAFT_ASSET_GENERATION_MAX_CONCURRENT_REQUESTS",
]) {
config.draft_asset_generation_max_concurrent_requests = max_concurrent_requests;
}
if let Some(ark_character_video_base_url) = read_first_non_empty_env(&[
"ARK_CHARACTER_VIDEO_BASE_URL",
"ARK_BASE_URL",
@@ -625,6 +657,14 @@ fn read_first_u64_env(keys: &[&str]) -> Option<u64> {
.find_map(|key| env::var(key).ok().and_then(|value| parse_u64(&value)))
}
fn read_first_usize_env(keys: &[&str]) -> Option<usize> {
keys.iter().find_map(|key| {
env::var(key)
.ok()
.and_then(|value| parse_positive_usize(&value))
})
}
fn read_first_u8_env(keys: &[&str]) -> Option<u8> {
keys.iter()
.find_map(|key| env::var(key).ok().and_then(|value| parse_u8(&value)))
@@ -706,6 +746,15 @@ fn parse_u64(raw: &str) -> Option<u64> {
raw.trim().parse::<u64>().ok()
}
fn parse_positive_usize(raw: &str) -> Option<usize> {
let value = raw.trim().parse::<usize>().ok()?;
if value == 0 {
return None;
}
Some(value)
}
fn parse_u8(raw: &str) -> Option<u8> {
raw.trim().parse::<u8>().ok()
}

View File

@@ -1,3 +1,5 @@
use std::collections::BTreeMap;
use axum::{
Json,
extract::{Extension, Path, State, rejection::JsonRejection},
@@ -31,14 +33,14 @@ use spacetime_client::{
CustomWorldAgentMessageFinalizeRecordInput, CustomWorldAgentMessageRecord,
CustomWorldAgentMessageSubmitRecordInput, CustomWorldAgentOperationProgressRecordInput,
CustomWorldAgentOperationRecord, CustomWorldAgentSessionCreateRecordInput,
CustomWorldAgentSessionRecord,
CustomWorldDraftCardDetailRecord, CustomWorldDraftCardDetailSectionRecord,
CustomWorldDraftCardRecord, CustomWorldGalleryEntryRecord, CustomWorldLibraryEntryRecord,
CustomWorldAgentSessionRecord, CustomWorldDraftCardDetailRecord,
CustomWorldDraftCardDetailSectionRecord, CustomWorldDraftCardRecord,
CustomWorldGalleryEntryRecord, CustomWorldLibraryEntryRecord,
CustomWorldProfileUpsertRecordInput, CustomWorldPublishGateRecord,
CustomWorldResultPreviewBlockerRecord, CustomWorldSupportedActionRecord,
CustomWorldWorkSummaryRecord, SpacetimeClientError,
};
use std::{collections::BTreeSet, convert::Infallible, sync::Arc};
use std::{collections::BTreeSet, convert::Infallible, sync::Arc, time::Instant};
use tokio::sync::Semaphore;
use tokio::task::JoinSet;
use tracing::info;
@@ -66,7 +68,6 @@ use crate::{
};
const DRAFT_ASSET_GENERATION_MAX_ATTEMPTS: u32 = 3;
const DRAFT_ASSET_GENERATION_MAX_CONCURRENT_REQUESTS: usize = 2;
pub async fn get_custom_world_library(
State(state): State<AppState>,
@@ -1229,7 +1230,7 @@ fn spawn_custom_world_draft_foundation_job(
};
let image_generation_limiter = Arc::new(Semaphore::new(
DRAFT_ASSET_GENERATION_MAX_CONCURRENT_REQUESTS,
state.config.draft_asset_generation_max_concurrent_requests,
));
let role_visual_profile_input = draft_profile_value.clone();
let act_background_profile_input = draft_profile_value.clone();
@@ -1270,7 +1271,9 @@ fn spawn_custom_world_draft_foundation_job(
Err(message) => asset_generation_errors.push(("生成角色主形象失败", message)),
}
match act_background_result {
Ok(profile) => merge_generated_act_backgrounds(&mut draft_profile_with_assets, &profile),
Ok(profile) => {
merge_generated_act_backgrounds(&mut draft_profile_with_assets, &profile)
}
Err(message) => asset_generation_errors.push(("生成幕背景图失败", message)),
}
draft_profile_value = draft_profile_with_assets;
@@ -1465,13 +1468,12 @@ async fn generate_draft_foundation_role_visuals(
)
.await
};
match generation_result
{
match generation_result {
Ok(generated) => {
return Ok::<_, String>((role_ref.key, role_ref.index, generated));
}
Err(error) => {
last_error = Some(error.message().to_string());
last_error = Some(error.body_text());
if attempt < DRAFT_ASSET_GENERATION_MAX_ATTEMPTS {
tokio::time::sleep(std::time::Duration::from_millis(
300 * u64::from(attempt),
@@ -1531,8 +1533,16 @@ async fn generate_draft_foundation_act_backgrounds(
let world_name =
json_text_from_value(draft_profile, "name").unwrap_or_else(|| "未命名世界".to_string());
let profile_id = json_text_from_value(draft_profile, "id");
let scene_image_profile_input = draft_profile.clone();
let act_refs = collect_scene_act_refs(draft_profile);
validate_scene_act_background_prompts(&act_refs)?;
tracing::info!(
operation_id,
session_id = %session.session_id,
act_count = act_refs.len(),
max_concurrent_requests = state.config.draft_asset_generation_max_concurrent_requests,
"开始并行生成草稿幕背景图"
);
upsert_custom_world_draft_foundation_progress(
state,
&session.session_id,
@@ -1553,38 +1563,79 @@ async fn generate_draft_foundation_act_backgrounds(
let task_owner_user_id = owner_user_id.to_string();
let task_profile_id = profile_id.clone();
let task_world_name = world_name.clone();
let task_profile = scene_image_profile_input.clone();
let task_limiter = image_generation_limiter.clone();
let task_operation_id = operation_id.to_string();
let task_session_id = session.session_id.clone();
generation_tasks.spawn(async move {
let mut last_error = None;
for attempt in 1..=DRAFT_ASSET_GENERATION_MAX_ATTEMPTS {
let attempt_started_at = Instant::now();
tracing::info!(
operation_id = %task_operation_id,
session_id = %task_session_id,
chapter_index = act_ref.chapter_index,
act_index = act_ref.act_index,
scene_id = %act_ref.scene_id,
scene_name = %act_ref.scene_name,
attempt,
"开始生成单幕背景图"
);
let generation_result = {
let _permit = task_limiter
.acquire()
.await
.map_err(|error| format!("图片生成并发控制失效:{error}"))?;
let _permit = task_limiter.acquire().await.map_err(|error| {
(
act_ref.chapter_index,
act_ref.act_index,
format!("图片生成并发控制失效:{error}"),
)
})?;
generate_custom_world_scene_image_for_profile(
&task_state,
task_owner_user_id.as_str(),
&task_profile,
task_profile_id.as_deref(),
task_world_name.as_str(),
act_ref.scene_id.as_str(),
act_ref.title.as_str(),
act_ref.summary.as_str(),
act_ref.scene_name.as_str(),
act_ref.scene_description.as_str(),
act_ref.prompt.as_str(),
)
.await
};
match generation_result
{
match generation_result {
Ok(generated) => {
return Ok::<_, String>((
tracing::info!(
operation_id = %task_operation_id,
session_id = %task_session_id,
chapter_index = act_ref.chapter_index,
act_index = act_ref.act_index,
scene_id = %act_ref.scene_id,
scene_name = %act_ref.scene_name,
attempt,
elapsed_ms = attempt_started_at.elapsed().as_millis(),
"单幕背景图生成成功"
);
return Ok::<_, (usize, usize, String)>((
act_ref.chapter_index,
act_ref.act_index,
generated,
));
}
Err(error) => {
last_error = Some(error.message().to_string());
let error_message = error.body_text();
tracing::warn!(
operation_id = %task_operation_id,
session_id = %task_session_id,
chapter_index = act_ref.chapter_index,
act_index = act_ref.act_index,
scene_id = %act_ref.scene_id,
scene_name = %act_ref.scene_name,
attempt,
elapsed_ms = attempt_started_at.elapsed().as_millis(),
error_message = %error_message,
"单幕背景图生成失败"
);
last_error = Some(error_message);
if attempt < DRAFT_ASSET_GENERATION_MAX_ATTEMPTS {
tokio::time::sleep(std::time::Duration::from_millis(
300 * u64::from(attempt),
@@ -1595,23 +1646,34 @@ async fn generate_draft_foundation_act_backgrounds(
}
}
Err(format!(
"{}章第{}幕「{}」背景图连续生成 {} 次失败:{}",
act_ref.chapter_index + 1,
act_ref.act_index + 1,
act_ref.title,
DRAFT_ASSET_GENERATION_MAX_ATTEMPTS,
last_error.unwrap_or_else(|| "未知错误".to_string())
Err((
act_ref.chapter_index,
act_ref.act_index,
format!(
"{}章第{}幕「{}」背景图连续生成 {} 次失败:{}",
act_ref.chapter_index + 1,
act_ref.act_index + 1,
act_ref.scene_name,
DRAFT_ASSET_GENERATION_MAX_ATTEMPTS,
last_error.unwrap_or_else(|| "未知错误".to_string())
),
))
});
}
let mut errors = Vec::new();
let mut generated_count = 0usize;
while let Some(result) = generation_tasks.join_next().await {
let task_result = result.map_err(|error| error.to_string())?;
let (chapter_index, act_index, generated) = match task_result {
Ok(value) => value,
Err(message) => {
Err((chapter_index, act_index, message)) => {
mark_scene_act_background_generation_error(
draft_profile,
chapter_index,
act_index,
&message,
);
errors.push(message);
continue;
}
@@ -1641,14 +1703,48 @@ async fn generate_draft_foundation_act_backgrounds(
"generatedSceneModel".to_string(),
Value::String(generated.model),
);
generated_count += 1;
}
}
if !errors.is_empty() {
if generated_count > 0 {
// 自动草稿生成和手动生成用的是同一套生图与资产入库能力;这里不能因为批量中的个别幕失败,
// 把已经写入 profile 分支的 backgroundImageSrc 一起丢掉,否则前端就看不到已经生成好的图。
tracing::warn!(
generated_count,
failed_count = errors.len(),
error_message = %join_unique_error_messages(errors),
"部分幕背景图生成失败,已保留成功生成的幕图"
);
return Ok(());
}
return Err(join_unique_error_messages(errors));
}
Ok(())
}
fn mark_scene_act_background_generation_error(
draft_profile: &mut Value,
chapter_index: usize,
act_index: usize,
message: &str,
) {
if let Some(act_object) = draft_profile
.get_mut("sceneChapterBlueprints")
.and_then(Value::as_array_mut)
.and_then(|chapters| chapters.get_mut(chapter_index))
.and_then(|chapter| chapter.get_mut("acts"))
.and_then(Value::as_array_mut)
.and_then(|acts| acts.get_mut(act_index))
.and_then(Value::as_object_mut)
{
act_object.insert(
"backgroundGenerationError".to_string(),
Value::String(message.trim().to_string()),
);
}
}
fn join_unique_error_messages(messages: Vec<String>) -> String {
// 并行图片任务可能从同一个上游故障返回完全相同的业务错误;用户侧只需要看到去重后的失败项。
messages
@@ -1673,12 +1769,13 @@ struct SceneActGenerationRef {
chapter_index: usize,
act_index: usize,
scene_id: String,
title: String,
summary: String,
scene_name: String,
scene_description: String,
prompt: String,
}
fn collect_scene_act_refs(draft_profile: &Value) -> Vec<SceneActGenerationRef> {
let scene_context_by_id = collect_scene_context_by_id(draft_profile);
draft_profile
.get("sceneChapterBlueprints")
.and_then(Value::as_array)
@@ -1689,21 +1786,31 @@ fn collect_scene_act_refs(draft_profile: &Value) -> Vec<SceneActGenerationRef> {
let chapter_scene_id = json_text_from_value(chapter, "sceneId")
.or_else(|| json_text_from_value(chapter, "id"))
.unwrap_or_else(|| format!("chapter-{chapter_index}"));
let chapter_scene_name = json_first_text_from_value(
chapter,
&["sceneName", "landmarkName", "name", "title"],
)
.unwrap_or_else(|| chapter_scene_id.clone());
let chapter_scene_context = scene_context_by_id
.get(&chapter_scene_id)
.cloned()
.unwrap_or_else(|| SceneImageContext {
id: chapter_scene_id.clone(),
name: chapter_scene_name.clone(),
description: json_text_from_value(chapter, "description")
.or_else(|| json_text_from_value(chapter, "summary"))
.unwrap_or_default(),
danger_level: json_text_from_value(chapter, "dangerLevel").unwrap_or_default(),
});
let scene_contexts = scene_context_by_id.clone();
chapter
.get("acts")
.and_then(Value::as_array)
.into_iter()
.flatten()
.enumerate()
.map(move |(act_index, act)| SceneActGenerationRef {
chapter_index,
act_index,
scene_id: json_text_from_value(act, "sceneId")
.unwrap_or_else(|| chapter_scene_id.clone()),
title: json_text_from_value(act, "title")
.unwrap_or_else(|| format!("{}", act_index + 1)),
summary: json_text_from_value(act, "summary").unwrap_or_default(),
prompt: json_first_text_from_value(
.map(move |(act_index, act)| {
let prompt = json_first_text_from_value(
act,
&[
"backgroundPromptText",
@@ -1715,19 +1822,90 @@ fn collect_scene_act_refs(draft_profile: &Value) -> Vec<SceneActGenerationRef> {
"visualPrompt",
],
)
.unwrap_or_default(),
.unwrap_or_default();
let scene_name = json_first_text_from_value(
act,
&["sceneName", "landmarkName", "locationName"],
)
.unwrap_or_else(|| chapter_scene_context.name.clone());
let act_scene_id = json_text_from_value(act, "sceneId")
.unwrap_or_else(|| chapter_scene_context.id.clone());
let scene_context =
scene_contexts
.get(&act_scene_id)
.cloned()
.unwrap_or_else(|| SceneImageContext {
id: act_scene_id.clone(),
name: scene_name,
description: chapter_scene_context.description.clone(),
danger_level: chapter_scene_context.danger_level.clone(),
});
SceneActGenerationRef {
chapter_index,
act_index,
scene_id: act_scene_id,
scene_name: scene_context.name,
scene_description: scene_context.description,
prompt: prompt.clone(),
}
})
})
.collect()
}
#[derive(Clone, Debug)]
struct SceneImageContext {
id: String,
name: String,
description: String,
danger_level: String,
}
fn collect_scene_context_by_id(draft_profile: &Value) -> BTreeMap<String, SceneImageContext> {
let mut contexts = BTreeMap::new();
if let Some(camp) = draft_profile.get("camp").and_then(Value::as_object) {
if let Some(context) = scene_context_from_object(camp, "camp") {
contexts.insert(context.id.clone(), context);
}
}
if let Some(landmarks) = draft_profile.get("landmarks").and_then(Value::as_array) {
for landmark in landmarks.iter().filter_map(Value::as_object) {
if let Some(context) = scene_context_from_object(landmark, "landmark") {
contexts.insert(context.id.clone(), context);
}
}
}
contexts
}
fn scene_context_from_object(
object: &Map<String, Value>,
fallback_id: &str,
) -> Option<SceneImageContext> {
let id = read_string_field(object, "id")
.or_else(|| read_string_field(object, "sceneId"))
.unwrap_or_else(|| fallback_id.to_string());
let name = read_string_field(object, "name")
.or_else(|| read_string_field(object, "sceneName"))
.unwrap_or_else(|| id.clone());
Some(SceneImageContext {
id,
name,
description: read_string_field(object, "description")
.or_else(|| read_string_field(object, "visualDescription"))
.unwrap_or_default(),
danger_level: read_string_field(object, "dangerLevel").unwrap_or_default(),
})
}
fn validate_scene_act_background_prompts(act_refs: &[SceneActGenerationRef]) -> Result<(), String> {
if let Some(act_ref) = act_refs.iter().find(|act_ref| act_ref.prompt.is_empty()) {
return Err(format!(
"{}章第{}幕「{}」缺少 backgroundPromptText不能在幕背景图描述文本生成前直接生图。",
act_ref.chapter_index + 1,
act_ref.act_index + 1,
act_ref.title
act_ref.scene_name
));
}
@@ -2480,13 +2658,28 @@ mod tests {
#[test]
fn collect_scene_act_refs_accepts_scene_prompt_text_alias() {
let draft_profile = json!({
"name": "雾港纪元",
"tone": "潮湿、悬疑、低照度",
"landmarks": [
{
"id": "scene-office",
"name": "旧港办公室",
"description": "旧港边缘的玻璃办公室,窗外能看到潮湿码头。",
"dangerLevel": "low"
}
],
"sceneChapterBlueprints": [
{
"sceneId": "scene-office",
"sceneName": "旧港办公室",
"acts": [
{
"title": "深夜工位",
"summary": "团队在凌晨三点继续赶版本。",
"actGoal": "找到丢失的部署钥匙",
"transitionHook": "电梯门在无人操作时打开",
"primaryRoleName": "林澈",
"supportRoleNames": ["阿岚"],
"scenePromptText": "现代创业公司办公室,凌晨灯光,紧张忙碌"
}
]
@@ -2498,6 +2691,12 @@ mod tests {
assert_eq!(act_refs.len(), 1);
assert_eq!(act_refs[0].prompt, "现代创业公司办公室,凌晨灯光,紧张忙碌");
assert_eq!(act_refs[0].scene_id, "scene-office");
assert_eq!(act_refs[0].scene_name, "旧港办公室");
assert_eq!(
act_refs[0].scene_description,
"旧港边缘的玻璃办公室,窗外能看到潮湿码头。"
);
assert!(validate_scene_act_background_prompts(&act_refs).is_ok());
}
}

View File

@@ -323,7 +323,6 @@ struct NormalizedSceneImageRequest {
prompt: String,
negative_prompt: String,
reference_image_src: Option<String>,
model: String,
}
#[derive(Debug)]
@@ -341,10 +340,6 @@ struct OptimizedCoverUpload {
bytes: Vec<u8>,
}
const TEXT_TO_IMAGE_SCENE_MODEL: &str = "wan2.2-t2i-flash";
const REFERENCE_IMAGE_SCENE_MODEL: &str = "qwen-image-2.0";
const TEXT_TO_IMAGE_COVER_MODEL: &str = "wan2.2-t2i-flash";
const REFERENCE_IMAGE_COVER_MODEL: &str = "qwen-image-2.0";
const DEFAULT_CUSTOM_WORLD_SCENE_IMAGE_NEGATIVE_PROMPT: &str = "文字水印logoUI界面对话框边框人物近景特写多人合照模糊低清晰度畸形建筑现代车辆监控摄像头";
const COVER_OUTPUT_WIDTH: u32 = 1600;
const COVER_OUTPUT_HEIGHT: u32 = 900;
@@ -467,7 +462,7 @@ pub async fn generate_custom_world_scene_image(
create_reference_image_generation(
&http_client,
&settings,
REFERENCE_IMAGE_SCENE_MODEL,
state.config.dashscope_reference_image_model.as_str(),
normalized.prompt.as_str(),
normalized.size.as_str(),
&[reference_image.to_string()],
@@ -481,7 +476,7 @@ pub async fn generate_custom_world_scene_image(
create_text_to_image_generation(
&http_client,
&settings,
TEXT_TO_IMAGE_SCENE_MODEL,
state.config.dashscope_scene_image_model.as_str(),
normalized.prompt.as_str(),
Some(normalized.negative_prompt.as_str()),
normalized.size.as_str(),
@@ -493,6 +488,11 @@ pub async fn generate_custom_world_scene_image(
.await
}
.map_err(|error| custom_world_ai_error_response(&request_context, error))?;
let scene_model = if reference_image.is_some() {
state.config.dashscope_reference_image_model.clone()
} else {
state.config.dashscope_scene_image_model.clone()
};
let downloaded = download_remote_image(
&http_client,
generated.image_url.as_str(),
@@ -532,7 +532,7 @@ pub async fn generate_custom_world_scene_image(
image_src: String::new(),
asset_id: asset_id.clone(),
source_type: "generated".to_string(),
model: Some(normalized.model),
model: Some(scene_model),
size: Some(normalized.size),
task_id: Some(generated.task_id),
prompt: Some(normalized.prompt),
@@ -548,6 +548,7 @@ pub async fn generate_custom_world_scene_image(
pub(crate) async fn generate_custom_world_scene_image_for_profile(
state: &AppState,
owner_user_id: &str,
profile: &Value,
profile_id: Option<&str>,
world_name: &str,
scene_id: &str,
@@ -560,20 +561,16 @@ pub(crate) async fn generate_custom_world_scene_image_for_profile(
world_name: Some(world_name.to_string()),
landmark_id: Some(scene_id.to_string()),
landmark_name: Some(scene_name.to_string()),
prompt: Some(prompt_text.to_string()),
size: Some("1600*900".to_string()),
// 自动草稿生成必须和草稿页手动生成走同一条 prompt 编译链:
// 只把幕级描述作为 userPrompt 输入,仍交给 normalize_scene_image_request 组装世界名、地点名、风格与负面词。
prompt: None,
size: Some("1280*720".to_string()),
negative_prompt: None,
reference_image_src: None,
user_prompt: Some(prompt_text.to_string()),
profile: Some(SceneImageProfileInput {
id: profile_id.map(ToOwned::to_owned),
name: Some(world_name.to_string()),
subtitle: None,
summary: None,
tone: None,
player_goal: None,
setting_text: None,
}),
profile: Some(scene_image_profile_input_from_value(
profile, profile_id, world_name,
)),
landmark: Some(SceneImageLandmarkInput {
id: Some(scene_id.to_string()),
name: Some(scene_name.to_string()),
@@ -587,7 +584,7 @@ pub(crate) async fn generate_custom_world_scene_image_for_profile(
let generated = create_text_to_image_generation(
&http_client,
&settings,
TEXT_TO_IMAGE_SCENE_MODEL,
state.config.dashscope_scene_image_model.as_str(),
normalized.prompt.as_str(),
Some(normalized.negative_prompt.as_str()),
normalized.size.as_str(),
@@ -627,7 +624,7 @@ pub(crate) async fn generate_custom_world_scene_image_for_profile(
slot: "scene_image",
source_job_id: Some(generated.task_id.clone()),
};
let model = normalized.model.clone();
let model = state.config.dashscope_scene_image_model.clone();
let prompt = normalized.prompt.clone();
let asset = persist_custom_world_asset(
state,
@@ -653,6 +650,31 @@ pub(crate) async fn generate_custom_world_scene_image_for_profile(
})
}
fn scene_image_profile_input_from_value(
profile: &Value,
profile_id: Option<&str>,
world_name: &str,
) -> SceneImageProfileInput {
SceneImageProfileInput {
id: profile_id.map(ToOwned::to_owned),
name: Some(world_name.to_string()),
subtitle: json_text_from_value(profile, "subtitle"),
summary: json_text_from_value(profile, "summary"),
tone: json_text_from_value(profile, "tone"),
player_goal: json_text_from_value(profile, "playerGoal"),
setting_text: json_text_from_value(profile, "settingText"),
}
}
fn json_text_from_value(value: &Value, key: &str) -> Option<String> {
value
.get(key)
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
}
pub async fn generate_custom_world_cover_image(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
@@ -707,7 +729,7 @@ pub async fn generate_custom_world_cover_image(
create_text_to_image_generation(
&http_client,
&settings,
TEXT_TO_IMAGE_COVER_MODEL,
state.config.dashscope_cover_image_model.as_str(),
prompt.as_str(),
None,
size.as_str(),
@@ -721,7 +743,7 @@ pub async fn generate_custom_world_cover_image(
create_reference_image_generation(
&http_client,
&settings,
REFERENCE_IMAGE_COVER_MODEL,
state.config.dashscope_reference_image_model.as_str(),
prompt.as_str(),
size.as_str(),
&reference_images,
@@ -766,9 +788,9 @@ pub async fn generate_custom_world_cover_image(
asset_id: asset_id.clone(),
source_type: "generated".to_string(),
model: Some(if reference_images.is_empty() {
TEXT_TO_IMAGE_COVER_MODEL.to_string()
state.config.dashscope_cover_image_model.clone()
} else {
REFERENCE_IMAGE_COVER_MODEL.to_string()
state.config.dashscope_reference_image_model.clone()
}),
size: Some(size),
task_id: Some(generated.task_id),
@@ -1187,11 +1209,6 @@ fn normalize_scene_image_request(
negative_prompt: trim_to_option(payload.negative_prompt.as_deref())
.unwrap_or_else(|| DEFAULT_CUSTOM_WORLD_SCENE_IMAGE_NEGATIVE_PROMPT.to_string()),
reference_image_src: reference_image_src.clone(),
model: if reference_image_src.is_some() {
REFERENCE_IMAGE_SCENE_MODEL.to_string()
} else {
TEXT_TO_IMAGE_SCENE_MODEL.to_string()
},
})
}
@@ -2580,6 +2597,103 @@ mod tests {
);
}
#[test]
fn automatic_scene_image_payload_reuses_manual_prompt_compiler() {
let profile = json!({
"id": "profile_001",
"name": "雾海群岛",
"subtitle": "失落航线",
"summary": "玩家在雾海中追查沉没王冠。",
"tone": "潮湿、神秘、低魔奇幻",
"playerGoal": "找到王冠并阻止海妖复苏",
"settingText": "群岛被永恒雾潮包围。"
});
let payload = CustomWorldSceneImageRequest {
profile_id: Some("profile_001".to_string()),
world_name: Some("雾海群岛".to_string()),
landmark_id: Some("reef_temple".to_string()),
landmark_name: Some("礁石神殿".to_string()),
prompt: None,
size: Some("1280*720".to_string()),
negative_prompt: None,
reference_image_src: None,
user_prompt: Some("破碎神殿矗立在蓝绿色雾潮中,潮湿石阶上有幽光贝壳。".to_string()),
profile: Some(scene_image_profile_input_from_value(
&profile,
Some("profile_001"),
"雾海群岛",
)),
landmark: Some(SceneImageLandmarkInput {
id: Some("reef_temple".to_string()),
name: Some("礁石神殿".to_string()),
description: Some("古老礁石上的半沉神殿。".to_string()),
danger_level: None,
}),
};
let normalized = normalize_scene_image_request(payload).expect("payload should normalize");
assert!(normalized.prompt.contains("世界名:雾海群岛"));
assert!(normalized.prompt.contains("世界副标题:失落航线"));
assert!(normalized.prompt.contains("场景名称:礁石神殿"));
assert!(
normalized
.prompt
.contains("本次想要生成的画面内容:破碎神殿")
);
assert_ne!(
normalized.prompt,
"破碎神殿矗立在蓝绿色雾潮中,潮湿石阶上有幽光贝壳。"
);
}
#[test]
fn automatic_default_scene_image_context_matches_manual_default_context() {
let profile = json!({
"id": "profile_001",
"name": "雾海群岛",
"subtitle": "失落航线",
"summary": "玩家在雾海中追查沉没王冠。",
"tone": "潮湿、神秘、低魔奇幻",
"playerGoal": "找到王冠并阻止海妖复苏",
"settingText": "群岛被永恒雾潮包围。"
});
let user_prompt = "破碎神殿矗立在蓝绿色雾潮中,潮湿石阶上有幽光贝壳。";
let profile_input =
scene_image_profile_input_from_value(&profile, Some("profile_001"), "雾海群岛");
let landmark = SceneImageLandmarkInput {
id: Some("reef_temple".to_string()),
name: Some("礁石神殿".to_string()),
description: Some("古老礁石上的半沉神殿。".to_string()),
danger_level: Some("high".to_string()),
};
let manual_prompt = build_custom_world_scene_image_prompt(
&profile_input,
&landmark,
user_prompt,
false,
Some("礁石神殿"),
"雾海群岛",
);
let normalized = normalize_scene_image_request(CustomWorldSceneImageRequest {
profile_id: Some("profile_001".to_string()),
world_name: Some("雾海群岛".to_string()),
landmark_id: Some("reef_temple".to_string()),
landmark_name: Some("礁石神殿".to_string()),
prompt: None,
size: Some("1280*720".to_string()),
negative_prompt: None,
reference_image_src: None,
user_prompt: Some(user_prompt.to_string()),
profile: Some(profile_input),
landmark: Some(landmark),
})
.expect("payload should normalize");
assert_eq!(normalized.prompt, manual_prompt);
}
#[tokio::test]
async fn cover_image_returns_service_unavailable_when_dashscope_missing() {
let state = AppState::new(AppConfig::default()).expect("state should build");

View File

@@ -38,6 +38,18 @@ impl AppError {
&self.message
}
pub fn body_text(&self) -> String {
// 批处理任务不能只读 HTTP 状态文案,否则 DashScope 返回的真实失败原因会被压成“上游服务请求失败”。
self.details
.as_ref()
.and_then(|details| details.get("message"))
.and_then(Value::as_str)
.map(str::trim)
.filter(|message| !message.is_empty())
.unwrap_or(self.message.as_str())
.to_string()
}
pub fn with_message(mut self, message: impl Into<String>) -> Self {
self.message = message.into();
self

View File

@@ -1518,13 +1518,17 @@ struct GeneratedPuzzleAssetResponse {
asset_id: String,
}
fn require_puzzle_dashscope_settings(state: &AppState) -> Result<PuzzleDashScopeSettings, AppError> {
fn require_puzzle_dashscope_settings(
state: &AppState,
) -> Result<PuzzleDashScopeSettings, AppError> {
let base_url = state.config.dashscope_base_url.trim().trim_end_matches('/');
if base_url.is_empty() {
return Err(AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "dashscope",
"reason": "DASHSCOPE_BASE_URL 未配置",
})));
return Err(
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "dashscope",
"reason": "DASHSCOPE_BASE_URL 未配置",
})),
);
}
let api_key = state
@@ -1613,7 +1617,9 @@ async fn create_puzzle_text_to_image_generation(
}))
.send()
.await
.map_err(|error| map_puzzle_dashscope_request_error(format!("创建拼图图片生成任务失败:{error}")))?;
.map_err(|error| {
map_puzzle_dashscope_request_error(format!("创建拼图图片生成任务失败:{error}"))
})?;
let status = response.status();
let response_text = response.text().await.map_err(|error| {
map_puzzle_dashscope_request_error(format!("读取拼图图片生成响应失败:{error}"))
@@ -1655,7 +1661,8 @@ async fn create_puzzle_text_to_image_generation(
"查询拼图图片生成任务失败",
));
}
let poll_payload = parse_puzzle_json_payload(poll_text.as_str(), "解析拼图图片生成任务响应失败")?;
let poll_payload =
parse_puzzle_json_payload(poll_text.as_str(), "解析拼图图片生成任务响应失败")?;
let task_status = find_first_puzzle_string_by_key(&poll_payload, "task_status")
.unwrap_or_default()
.trim()
@@ -1663,13 +1670,18 @@ async fn create_puzzle_text_to_image_generation(
if task_status == "SUCCEEDED" {
let image_urls = extract_puzzle_image_urls(&poll_payload);
if image_urls.is_empty() {
return Err(AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": "拼图图片生成成功但未返回图片地址",
})));
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": "拼图图片生成成功但未返回图片地址",
})),
);
}
let mut images = Vec::with_capacity(image_urls.len());
for image_url in image_urls.into_iter().take(candidate_count.clamp(1, 2) as usize) {
for image_url in image_urls
.into_iter()
.take(candidate_count.clamp(1, 2) as usize)
{
images.push(download_puzzle_remote_image(http_client, image_url.as_str()).await?);
}
return Ok(PuzzleGeneratedImages { task_id, images });
@@ -1683,10 +1695,12 @@ async fn create_puzzle_text_to_image_generation(
sleep(Duration::from_secs(2)).await;
}
Err(AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": "拼图图片生成超时或未返回图片地址",
})))
Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": "拼图图片生成超时或未返回图片地址",
})),
)
}
async fn download_puzzle_remote_image(
@@ -1707,11 +1721,13 @@ async fn download_puzzle_remote_image(
map_puzzle_dashscope_request_error(format!("读取拼图正式图片内容失败:{error}"))
})?;
if !status.is_success() {
return Err(AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": "下载拼图正式图片失败",
"status": status.as_u16(),
})));
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": "下载拼图正式图片失败",
"status": status.as_u16(),
})),
);
}
let mime_type = normalize_puzzle_downloaded_image_mime_type(content_type.as_str());
Ok(PuzzleDownloadedImage {

View File

@@ -1,4 +1,12 @@
use std::{error::Error, fmt, str as std_str, time::Duration};
use std::{
env,
error::Error,
fmt, fs,
path::PathBuf,
str as std_str,
sync::atomic::{AtomicU64, Ordering},
time::{Duration, SystemTime, UNIX_EPOCH},
};
use log::{debug, warn};
use reqwest::{Client, StatusCode};
@@ -10,6 +18,9 @@ pub const DEFAULT_REQUEST_TIMEOUT_MS: u64 = 30_000;
pub const DEFAULT_MAX_RETRIES: u32 = 1;
pub const DEFAULT_RETRY_BACKOFF_MS: u64 = 500;
pub const CHAT_COMPLETIONS_PATH: &str = "/chat/completions";
const DEFAULT_LLM_RAW_LOG_DIR: &str = "logs/llm-raw";
static LLM_RAW_LOG_SEQUENCE: AtomicU64 = AtomicU64::new(1);
// 冻结平台来源,避免上层继续散落 provider 字符串。
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
@@ -113,6 +124,17 @@ struct ChatCompletionsRequestBody<'a> {
max_tokens: Option<u32>,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct LlmRawFailureInputLog<'a> {
provider: &'static str,
model: &'a str,
stream: bool,
attempt: u32,
max_tokens: Option<u32>,
messages: &'a [LlmMessage],
}
#[derive(Deserialize)]
struct ChatCompletionsResponseEnvelope {
id: Option<String>,
@@ -156,6 +178,7 @@ struct ChatCompletionsContentPart {
#[derive(Default)]
struct OpenAiCompatibleSseParser {
buffer: String,
raw_text: String,
}
#[derive(Debug)]
@@ -382,12 +405,31 @@ impl LlmClient {
request.validate()?;
let resolved_model = request.resolved_model(self.config.model()).to_string();
let response = self.execute_request(&request, false).await?;
let raw_text = response
.text()
.await
.map_err(|error| map_stream_read_error(error, 1))?;
let raw_text = response.text().await.map_err(|error| {
let llm_error = map_stream_read_error(error, 1);
log_llm_raw_failure(
&self.config,
&request,
false,
1,
"read_response_failed",
llm_error.to_string().as_str(),
);
llm_error
})?;
parse_chat_completions_response(self.config.provider(), &resolved_model, raw_text.as_str())
.map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
false,
1,
"parse_response_failed",
raw_text.as_str(),
);
error
})
}
pub async fn request_single_message_text(
@@ -422,10 +464,18 @@ impl LlmClient {
let mut undecoded_chunk_bytes = Vec::new();
loop {
let next_chunk = response
.chunk()
.await
.map_err(|error| map_stream_read_error(error, 1))?;
let next_chunk = response.chunk().await.map_err(|error| {
let llm_error = map_stream_read_error(error, 1);
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"read_stream_failed",
parser.raw_text().as_str(),
);
llm_error
})?;
let Some(chunk) = next_chunk else {
break;
@@ -433,12 +483,33 @@ impl LlmClient {
undecoded_chunk_bytes.extend_from_slice(chunk.as_ref());
let (chunk_text, remaining_bytes) =
decode_utf8_stream_chunk(undecoded_chunk_bytes.as_slice())?;
decode_utf8_stream_chunk(undecoded_chunk_bytes.as_slice()).map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"decode_stream_failed",
parser.raw_text().as_str(),
);
error
})?;
undecoded_chunk_bytes = remaining_bytes;
if chunk_text.is_empty() {
continue;
}
for event in parser.push_chunk(chunk_text.as_ref())? {
let stream_events = parser.push_chunk(chunk_text.as_ref()).map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"parse_stream_failed",
parser.raw_text().as_str(),
);
error
})?;
for event in stream_events {
if let Some(delta_text) = event.delta_text
&& !delta_text.is_empty()
{
@@ -460,10 +531,29 @@ impl LlmClient {
if !undecoded_chunk_bytes.is_empty() {
let trailing_text =
std_str::from_utf8(undecoded_chunk_bytes.as_slice()).map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"decode_stream_failed",
parser.raw_text().as_str(),
);
LlmError::Deserialize(format!("解析 LLM 流式 UTF-8 响应失败:{error}"))
})?;
if !trailing_text.is_empty() {
for event in parser.push_chunk(trailing_text)? {
let trailing_events = parser.push_chunk(trailing_text).map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"parse_stream_failed",
parser.raw_text().as_str(),
);
error
})?;
for event in trailing_events {
if let Some(delta_text) = event.delta_text
&& !delta_text.is_empty()
{
@@ -483,7 +573,18 @@ impl LlmClient {
}
}
for event in parser.finish()? {
let remaining_events = parser.finish().map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"parse_stream_failed",
parser.raw_text().as_str(),
);
error
})?;
for event in remaining_events {
if let Some(delta_text) = event.delta_text
&& !delta_text.is_empty()
{
@@ -503,6 +604,14 @@ impl LlmClient {
let content = accumulated_text.trim().to_string();
if content.is_empty() {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"empty_stream_response",
parser.raw_text().as_str(),
);
return Err(LlmError::EmptyResponse);
}
@@ -591,6 +700,14 @@ impl LlmClient {
continue;
}
log_llm_raw_failure(
&self.config,
request,
stream,
attempt,
"upstream_status_failed",
raw_text.as_str(),
);
return Err(LlmError::Upstream {
status_code: status.as_u16(),
message,
@@ -607,7 +724,16 @@ impl LlmClient {
continue;
}
return Err(LlmError::Timeout { attempts: attempt });
let error = LlmError::Timeout { attempts: attempt };
log_llm_raw_failure(
&self.config,
request,
stream,
attempt,
"request_timeout",
error.to_string().as_str(),
);
return Err(error);
}
Err(error) if error.is_connect() => {
let message = error.to_string();
@@ -622,13 +748,31 @@ impl LlmClient {
continue;
}
return Err(LlmError::Connectivity {
let error = LlmError::Connectivity {
attempts: attempt,
message,
});
};
log_llm_raw_failure(
&self.config,
request,
stream,
attempt,
"request_connectivity_failed",
error.to_string().as_str(),
);
return Err(error);
}
Err(error) => {
return Err(LlmError::Transport(error.to_string()));
let error = LlmError::Transport(error.to_string());
log_llm_raw_failure(
&self.config,
request,
stream,
attempt,
"request_transport_failed",
error.to_string().as_str(),
);
return Err(error);
}
}
}
@@ -652,11 +796,16 @@ impl LlmClient {
impl OpenAiCompatibleSseParser {
fn push_chunk(&mut self, chunk: &str) -> Result<Vec<ParsedStreamEvent>, LlmError> {
self.raw_text.push_str(chunk);
self.buffer.push_str(chunk);
self.buffer = self.buffer.replace("\r\n", "\n");
self.drain_complete_events()
}
fn raw_text(&self) -> String {
self.raw_text.clone()
}
fn finish(&mut self) -> Result<Vec<ParsedStreamEvent>, LlmError> {
if self.buffer.trim().is_empty() {
return Ok(Vec::new());
@@ -691,6 +840,87 @@ fn normalize_non_empty(value: String, error_message: &str) -> Result<String, Llm
Ok(trimmed)
}
fn log_llm_raw_failure(
config: &LlmConfig,
request: &LlmTextRequest,
stream: bool,
attempt: u32,
failure_stage: &str,
raw_output: &str,
) {
if let Err(error) =
write_llm_raw_failure(config, request, stream, attempt, failure_stage, raw_output)
{
warn!(
"LLM 失败原文日志落盘失败,主错误流程继续执行: failure_stage={}, error={}",
failure_stage, error
);
}
}
fn write_llm_raw_failure(
config: &LlmConfig,
request: &LlmTextRequest,
stream: bool,
attempt: u32,
failure_stage: &str,
raw_output: &str,
) -> Result<(), String> {
let log_dir = env::var("LLM_RAW_LOG_DIR")
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from(DEFAULT_LLM_RAW_LOG_DIR));
fs::create_dir_all(&log_dir).map_err(|error| format!("创建日志目录失败:{error}"))?;
let prefix = build_llm_raw_log_prefix(failure_stage);
let model = request.resolved_model(config.model());
let input_log = LlmRawFailureInputLog {
provider: config.provider().as_str(),
model,
stream,
attempt,
max_tokens: request.max_tokens,
messages: request.messages.as_slice(),
};
let input_text = serde_json::to_string_pretty(&input_log)
.map_err(|error| format!("序列化模型输入日志失败:{error}"))?;
fs::write(log_dir.join(format!("{prefix}.input.json")), input_text)
.map_err(|error| format!("写入模型输入日志失败:{error}"))?;
fs::write(log_dir.join(format!("{prefix}.output.txt")), raw_output)
.map_err(|error| format!("写入模型输出日志失败:{error}"))?;
Ok(())
}
fn build_llm_raw_log_prefix(failure_stage: &str) -> String {
let millis = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|duration| duration.as_millis())
.unwrap_or_default();
let sequence = LLM_RAW_LOG_SEQUENCE.fetch_add(1, Ordering::Relaxed);
let safe_stage = sanitize_log_file_segment(failure_stage);
format!("{millis}-{}-{sequence:06}-{safe_stage}", std::process::id())
}
fn sanitize_log_file_segment(value: &str) -> String {
let sanitized = value
.chars()
.map(|character| {
if character.is_ascii_alphanumeric() || character == '-' || character == '_' {
character
} else {
'_'
}
})
.collect::<String>();
if sanitized.is_empty() {
"unknown".to_string()
} else {
sanitized
}
}
fn parse_chat_completions_response(
provider: LlmProvider,
fallback_model: &str,
@@ -1028,6 +1258,62 @@ mod tests {
assert_eq!(response.response_id.as_deref(), Some("req_stream_01"));
}
#[tokio::test]
async fn request_text_writes_raw_failure_logs_after_parse_error() {
let log_dir = std::env::temp_dir().join(format!(
"platform-llm-raw-log-test-{}",
build_llm_raw_log_prefix("parse_error")
));
unsafe {
std::env::set_var("LLM_RAW_LOG_DIR", &log_dir);
}
let server_url = spawn_mock_server(vec![MockResponse {
status_line: "200 OK",
content_type: "application/json; charset=utf-8",
body: "不是合法 JSON".to_string(),
extra_headers: Vec::new(),
}]);
let client = build_test_client(server_url, 0);
let error = client
.request_single_message_text("系统原文", "用户原文")
.await
.expect_err("invalid json should fail");
assert!(matches!(error, LlmError::Deserialize(_)));
let mut input_logs = Vec::new();
let mut output_logs = Vec::new();
for entry in fs::read_dir(&log_dir).expect("log dir should exist") {
let path = entry.expect("log entry should be readable").path();
let file_name = path
.file_name()
.and_then(|name| name.to_str())
.unwrap_or_default()
.to_string();
if file_name.ends_with(".input.json") {
input_logs.push(path);
} else if file_name.ends_with(".output.txt") {
output_logs.push(path);
}
}
assert_eq!(input_logs.len(), 1);
assert_eq!(output_logs.len(), 1);
let input_text = fs::read_to_string(&input_logs[0]).expect("input log should be readable");
let output_text =
fs::read_to_string(&output_logs[0]).expect("output log should be readable");
assert!(input_text.contains("系统原文"));
assert!(input_text.contains("用户原文"));
assert!(!input_text.contains("test-key"));
assert_eq!(output_text, "不是合法 JSON");
unsafe {
std::env::remove_var("LLM_RAW_LOG_DIR");
}
fs::remove_dir_all(log_dir).expect("log dir should be removed");
}
fn build_test_client(base_url: String, max_retries: u32) -> LlmClient {
let config = LlmConfig::new(
LlmProvider::Ark,

View File

@@ -520,38 +520,4 @@ impl SpacetimeClient {
.await
}
pub async fn upsert_custom_world_agent_operation_progress(
&self,
input: CustomWorldAgentOperationProgressRecordInput,
) -> Result<CustomWorldAgentOperationRecord, SpacetimeClientError> {
let procedure_input = CustomWorldAgentOperationProgressInput {
session_id: input.session_id,
owner_user_id: input.owner_user_id,
operation_id: input.operation_id,
operation_type: parse_rpg_agent_operation_type_record(input.operation_type.as_str())?,
operation_status: parse_rpg_agent_operation_status_record(
input.operation_status.as_str(),
)?,
phase_label: input.phase_label,
phase_detail: input.phase_detail,
operation_progress: input.operation_progress,
error_message: input.error_message,
updated_at_micros: input.updated_at_micros,
};
self.call_after_connect(move |connection, sender| {
connection
.procedures()
.upsert_custom_world_agent_operation_progress_then(
procedure_input,
move |_, result| {
let mapped = result
.map_err(|error| SpacetimeClientError::Procedure(error.to_string()))
.and_then(map_custom_world_agent_operation_procedure_result);
send_once(&sender, mapped);
},
);
})
.await
}
}

View File

@@ -2762,41 +2762,6 @@ pub(crate) fn format_rpg_agent_operation_status(
}
}
pub(crate) fn parse_rpg_agent_operation_type_record(
value: &str,
) -> Result<crate::module_bindings::RpgAgentOperationType, SpacetimeClientError> {
match value.trim() {
"process_message" => Ok(crate::module_bindings::RpgAgentOperationType::ProcessMessage),
"draft_foundation" => Ok(crate::module_bindings::RpgAgentOperationType::DraftFoundation),
"update_draft_card" => Ok(crate::module_bindings::RpgAgentOperationType::UpdateDraftCard),
"sync_result_profile" => {
Ok(crate::module_bindings::RpgAgentOperationType::SyncResultProfile)
}
"generate_characters" => {
Ok(crate::module_bindings::RpgAgentOperationType::GenerateCharacters)
}
"generate_landmarks" => {
Ok(crate::module_bindings::RpgAgentOperationType::GenerateLandmarks)
}
"generate_role_assets" => {
Ok(crate::module_bindings::RpgAgentOperationType::GenerateRoleAssets)
}
"sync_role_assets" => Ok(crate::module_bindings::RpgAgentOperationType::SyncRoleAssets),
"generate_scene_assets" => {
Ok(crate::module_bindings::RpgAgentOperationType::GenerateSceneAssets)
}
"sync_scene_assets" => Ok(crate::module_bindings::RpgAgentOperationType::SyncSceneAssets),
"expand_long_tail" => Ok(crate::module_bindings::RpgAgentOperationType::ExpandLongTail),
"publish_world" => Ok(crate::module_bindings::RpgAgentOperationType::PublishWorld),
"revert_checkpoint" => Ok(crate::module_bindings::RpgAgentOperationType::RevertCheckpoint),
"delete_characters" => Ok(crate::module_bindings::RpgAgentOperationType::DeleteCharacters),
"delete_landmarks" => Ok(crate::module_bindings::RpgAgentOperationType::DeleteLandmarks),
other => Err(SpacetimeClientError::Runtime(format!(
"未知 rpg agent operation type: {other}"
))),
}
}
pub(crate) fn parse_rpg_agent_operation_status_record(
value: &str,
) -> Result<crate::module_bindings::RpgAgentOperationStatus, SpacetimeClientError> {
@@ -3745,20 +3710,6 @@ pub struct CustomWorldAgentMessageFinalizeRecordInput {
pub updated_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CustomWorldAgentOperationProgressRecordInput {
pub session_id: String,
pub owner_user_id: String,
pub operation_id: String,
pub operation_type: String,
pub operation_status: String,
pub phase_label: String,
pub phase_detail: String,
pub operation_progress: u32,
pub error_message: Option<String>,
pub updated_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CustomWorldAgentActionExecuteRecordInput {
pub session_id: String,

View File

@@ -117,7 +117,6 @@ pub mod custom_world_agent_operation_type;
pub mod custom_world_agent_operation_get_input_type;
pub mod custom_world_agent_operation_progress_input_type;
pub mod custom_world_agent_operation_procedure_result_type;
pub mod custom_world_agent_operation_progress_input_type;
pub mod custom_world_agent_operation_snapshot_type;
pub mod custom_world_agent_session_type;
pub mod custom_world_agent_session_create_input_type;
@@ -343,7 +342,6 @@ pub mod start_ai_task_reducer;
pub mod start_ai_task_stage_reducer;
pub mod turn_in_quest_reducer;
pub mod unpublish_custom_world_profile_reducer;
pub mod upsert_custom_world_agent_operation_progress_procedure;
pub mod upsert_chapter_progression_reducer;
pub mod upsert_custom_world_profile_reducer;
pub mod upsert_npc_state_reducer;
@@ -591,7 +589,6 @@ pub use custom_world_agent_operation_type::CustomWorldAgentOperation;
pub use custom_world_agent_operation_get_input_type::CustomWorldAgentOperationGetInput;
pub use custom_world_agent_operation_progress_input_type::CustomWorldAgentOperationProgressInput;
pub use custom_world_agent_operation_procedure_result_type::CustomWorldAgentOperationProcedureResult;
pub use custom_world_agent_operation_progress_input_type::CustomWorldAgentOperationProgressInput;
pub use custom_world_agent_operation_snapshot_type::CustomWorldAgentOperationSnapshot;
pub use custom_world_agent_session_type::CustomWorldAgentSession;
pub use custom_world_agent_session_create_input_type::CustomWorldAgentSessionCreateInput;
@@ -859,7 +856,6 @@ pub use start_ai_task_reducer::start_ai_task;
pub use start_ai_task_stage_reducer::start_ai_task_stage;
pub use turn_in_quest_reducer::turn_in_quest;
pub use unpublish_custom_world_profile_reducer::unpublish_custom_world_profile;
pub use upsert_custom_world_agent_operation_progress_procedure::upsert_custom_world_agent_operation_progress;
pub use upsert_chapter_progression_reducer::upsert_chapter_progression;
pub use upsert_custom_world_profile_reducer::upsert_custom_world_profile;
pub use upsert_npc_state_reducer::upsert_npc_state;