feat: restore generation draft persistence

This commit is contained in:
2026-04-25 11:41:09 +08:00
60 changed files with 38221 additions and 382 deletions

View File

@@ -0,0 +1,233 @@
use module_ai::{
AiTaskCreateInput, AiTaskKind, AiTaskStageBlueprint, AiTaskStageKind, AiTaskStageStartInput,
AiTextChunkAppendInput,
};
use serde_json::json;
use spacetime_client::{SpacetimeClient, SpacetimeClientError};
use std::sync::{Arc, Mutex};
use tracing::warn;
#[derive(Clone, Debug)]
pub(crate) struct AiGenerationDraftContext {
pub task_id: String,
pub owner_user_id: String,
pub request_label: String,
pub source_module: String,
pub source_entity_id: String,
pub template_key: String,
pub operation_id: String,
}
impl AiGenerationDraftContext {
pub fn new(
template_key: &str,
owner_user_id: &str,
session_id: &str,
operation_id: &str,
request_label: &str,
) -> Self {
let normalized_template = normalize_identifier_segment(template_key);
let normalized_session = normalize_identifier_segment(session_id);
let normalized_operation = normalize_identifier_segment(operation_id);
Self {
// 生成过程草稿使用稳定 task_id保证同一模板会话操作重试时能继续定位已有内容。
task_id: format!(
"aitask_draft_{normalized_template}_{normalized_session}_{normalized_operation}"
),
owner_user_id: owner_user_id.trim().to_string(),
request_label: request_label.trim().to_string(),
source_module: normalized_template,
source_entity_id: session_id.trim().to_string(),
template_key: template_key.trim().to_string(),
operation_id: operation_id.trim().to_string(),
}
}
}
#[derive(Clone, Debug)]
pub(crate) struct AiGenerationDraftSink {
context: AiGenerationDraftContext,
client: SpacetimeClient,
next_sequence: Arc<Mutex<u32>>,
persisted_text: Arc<Mutex<String>>,
}
impl AiGenerationDraftSink {
pub fn new(context: AiGenerationDraftContext, client: SpacetimeClient) -> Self {
Self {
context,
client,
next_sequence: Arc::new(Mutex::new(1)),
persisted_text: Arc::new(Mutex::new(String::new())),
}
}
pub fn persist_visible_text_async(&self, visible_text: &str) {
let (sequence, delta_text) = {
let mut persisted_text = self
.persisted_text
.lock()
.unwrap_or_else(|poisoned| poisoned.into_inner());
let delta_text = visible_text
.strip_prefix(persisted_text.as_str())
.unwrap_or(visible_text)
.to_string();
*persisted_text = visible_text.to_string();
if delta_text.trim().is_empty() {
return;
}
let mut next_sequence = self
.next_sequence
.lock()
.unwrap_or_else(|poisoned| poisoned.into_inner());
let sequence = *next_sequence;
*next_sequence = next_sequence.saturating_add(1);
(sequence, delta_text)
};
let context = self.context.clone();
let client = self.client.clone();
tokio::spawn(async move {
if let Err(error) = client
.append_ai_text_chunk(AiTextChunkAppendInput {
task_id: context.task_id.clone(),
stage_kind: AiTaskStageKind::RequestModel,
sequence,
delta_text,
created_at_micros: current_utc_micros(),
})
.await
{
warn!(
task_id = %context.task_id,
sequence,
error = %error,
"AI 生成草稿后台增量落库失败,主生成流程继续执行"
);
}
});
}
}
#[derive(Debug)]
pub(crate) struct AiGenerationDraftWriter {
context: AiGenerationDraftContext,
next_sequence: u32,
persisted_text: String,
}
impl AiGenerationDraftWriter {
pub fn new(context: AiGenerationDraftContext) -> Self {
Self {
context,
next_sequence: 1,
persisted_text: String::new(),
}
}
pub async fn ensure_started(
&mut self,
client: &SpacetimeClient,
) -> Result<(), SpacetimeClientError> {
let now_micros = current_utc_micros();
match client
.create_ai_task(AiTaskCreateInput {
task_id: self.context.task_id.clone(),
task_kind: AiTaskKind::CustomWorldGeneration,
owner_user_id: self.context.owner_user_id.clone(),
request_label: self.context.request_label.clone(),
source_module: self.context.source_module.clone(),
source_entity_id: Some(self.context.source_entity_id.clone()),
request_payload_json: Some(
json!({
"templateKey": self.context.template_key,
"operationId": self.context.operation_id,
})
.to_string(),
),
stages: vec![AiTaskStageBlueprint {
stage_kind: AiTaskStageKind::RequestModel,
label: "请求模型".to_string(),
detail: "模板生成过程中持续写入模型已生成文本。".to_string(),
order: 1,
}],
created_at_micros: now_micros,
})
.await
{
Ok(_) => {}
Err(error) if is_duplicate_ai_task_error(&error) => {}
Err(error) => return Err(error),
}
client
.start_ai_task_stage(AiTaskStageStartInput {
task_id: self.context.task_id.clone(),
stage_kind: AiTaskStageKind::RequestModel,
started_at_micros: now_micros,
})
.await
}
pub async fn persist_visible_text(&mut self, client: &SpacetimeClient, visible_text: &str) {
let delta_text = match visible_text.strip_prefix(self.persisted_text.as_str()) {
Some(delta) => delta,
None => visible_text,
};
if delta_text.trim().is_empty() {
self.persisted_text = visible_text.to_string();
return;
}
let sequence = self.next_sequence;
self.next_sequence = self.next_sequence.saturating_add(1);
self.persisted_text = visible_text.to_string();
if let Err(error) = client
.append_ai_text_chunk(AiTextChunkAppendInput {
task_id: self.context.task_id.clone(),
stage_kind: AiTaskStageKind::RequestModel,
sequence,
delta_text: delta_text.to_string(),
created_at_micros: current_utc_micros(),
})
.await
{
warn!(
task_id = %self.context.task_id,
sequence,
error = %error,
"AI 生成草稿增量落库失败,主生成流程继续执行"
);
}
}
}
fn normalize_identifier_segment(value: &str) -> String {
let normalized = value
.trim()
.chars()
.map(|character| {
if character.is_ascii_alphanumeric() || character == '-' || character == '_' {
character
} else {
'_'
}
})
.collect::<String>();
if normalized.is_empty() {
"unknown".to_string()
} else {
normalized
}
}
fn is_duplicate_ai_task_error(error: &SpacetimeClientError) -> bool {
error.to_string().contains("ai_task.task_id 已存在")
}
fn current_utc_micros() -> i64 {
time::OffsetDateTime::now_utc().unix_timestamp_nanos() as i64 / 1_000
}

View File

@@ -42,8 +42,14 @@ use crate::big_fish_agent_turn::{
run_big_fish_agent_turn,
};
use crate::{
api_response::json_success_body, auth::AuthenticatedAccessToken, http_error::AppError,
request_context::RequestContext, state::AppState,
ai_generation_drafts::{
AiGenerationDraftContext, AiGenerationDraftSink, AiGenerationDraftWriter,
},
api_response::json_success_body,
auth::AuthenticatedAccessToken,
http_error::AppError,
request_context::RequestContext,
state::AppState,
};
pub async fn create_big_fish_session(
@@ -203,15 +209,45 @@ pub async fn submit_big_fish_message(
.map_err(|error| {
big_fish_error_response(&request_context, map_big_fish_client_error(error))
})?;
let mut draft_writer = AiGenerationDraftWriter::new(AiGenerationDraftContext::new(
"big_fish",
owner_user_id.as_str(),
session_id.as_str(),
payload.client_message_id.as_str(),
"大鱼吃小鱼模板生成草稿",
));
if let Err(error) = draft_writer.ensure_started(state.spacetime_client()).await {
tracing::warn!(error = %error, "大鱼吃小鱼模板生成草稿任务启动失败,主生成流程继续执行");
}
let draft_sink = AiGenerationDraftSink::new(
AiGenerationDraftContext::new(
"big_fish",
owner_user_id.as_str(),
session_id.as_str(),
payload.client_message_id.as_str(),
"大鱼吃小鱼模板生成草稿",
),
state.spacetime_client().clone(),
);
let turn_result = run_big_fish_agent_turn(
BigFishAgentTurnRequest {
llm_client: state.llm_client(),
session: &submitted_session,
quick_fill_requested: payload.quick_fill_requested.unwrap_or(false),
},
|_| {},
move |text| {
draft_sink.persist_visible_text_async(text);
},
)
.await;
if let Ok(result) = &turn_result {
draft_writer
.persist_visible_text(
state.spacetime_client(),
result.assistant_reply_text.as_str(),
)
.await;
}
let finalize_input = match turn_result {
Ok(turn_result) => build_finalize_record_input(
session_id.clone(),
@@ -287,6 +323,26 @@ pub async fn stream_big_fish_message(
big_fish_error_response(&request_context, map_big_fish_client_error(error))
})?;
let quick_fill_requested = payload.quick_fill_requested.unwrap_or(false);
let mut draft_writer = AiGenerationDraftWriter::new(AiGenerationDraftContext::new(
"big_fish",
owner_user_id.as_str(),
session_id.as_str(),
payload.client_message_id.as_str(),
"大鱼吃小鱼模板生成草稿",
));
if let Err(error) = draft_writer.ensure_started(state.spacetime_client()).await {
tracing::warn!(error = %error, "大鱼吃小鱼模板生成草稿任务启动失败,主生成流程继续执行");
}
let draft_sink = AiGenerationDraftSink::new(
AiGenerationDraftContext::new(
"big_fish",
owner_user_id.as_str(),
session_id.as_str(),
payload.client_message_id.as_str(),
"大鱼吃小鱼模板生成草稿",
),
state.spacetime_client().clone(),
);
let mut streamed_reply_text = String::new();
let turn_result = run_big_fish_agent_turn(
BigFishAgentTurnRequest {
@@ -295,10 +351,16 @@ pub async fn stream_big_fish_message(
quick_fill_requested,
},
|text| {
draft_sink.persist_visible_text_async(text);
streamed_reply_text = text.to_string();
},
)
.await;
if !streamed_reply_text.is_empty() {
draft_writer
.persist_visible_text(state.spacetime_client(), streamed_reply_text.as_str())
.await;
}
let reply_text = match &turn_result {
Ok(result) => result.assistant_reply_text.clone(),
Err(error) => error.to_string(),

View File

@@ -82,7 +82,11 @@ pub struct AppConfig {
pub llm_retry_backoff_ms: u64,
pub dashscope_base_url: String,
pub dashscope_api_key: Option<String>,
pub dashscope_scene_image_model: String,
pub dashscope_reference_image_model: String,
pub dashscope_cover_image_model: String,
pub dashscope_image_request_timeout_ms: u64,
pub draft_asset_generation_max_concurrent_requests: usize,
pub ark_character_video_base_url: String,
pub ark_character_video_api_key: Option<String>,
pub ark_character_video_request_timeout_ms: u64,
@@ -166,7 +170,11 @@ impl Default for AppConfig {
llm_retry_backoff_ms: DEFAULT_RETRY_BACKOFF_MS,
dashscope_base_url: "https://dashscope.aliyuncs.com/api/v1".to_string(),
dashscope_api_key: None,
dashscope_scene_image_model: "wan2.2-t2i-flash".to_string(),
dashscope_reference_image_model: "qwen-image-2.0".to_string(),
dashscope_cover_image_model: "wan2.2-t2i-flash".to_string(),
dashscope_image_request_timeout_ms: 150_000,
draft_asset_generation_max_concurrent_requests: 4,
ark_character_video_base_url: DEFAULT_ARK_BASE_URL.to_string(),
ark_character_video_api_key: None,
ark_character_video_request_timeout_ms: 420_000,
@@ -464,12 +472,38 @@ impl AppConfig {
config.dashscope_api_key = read_first_non_empty_env(&["DASHSCOPE_API_KEY"]);
if let Some(dashscope_scene_image_model) =
read_first_non_empty_env(&["DASHSCOPE_SCENE_IMAGE_MODEL", "DASHSCOPE_IMAGE_MODEL"])
{
config.dashscope_scene_image_model = dashscope_scene_image_model;
}
if let Some(dashscope_reference_image_model) = read_first_non_empty_env(&[
"DASHSCOPE_REFERENCE_IMAGE_MODEL",
"DASHSCOPE_IMAGE_EDIT_MODEL",
]) {
config.dashscope_reference_image_model = dashscope_reference_image_model;
}
if let Some(dashscope_cover_image_model) =
read_first_non_empty_env(&["DASHSCOPE_COVER_IMAGE_MODEL", "DASHSCOPE_IMAGE_MODEL"])
{
config.dashscope_cover_image_model = dashscope_cover_image_model;
}
if let Some(dashscope_image_request_timeout_ms) =
read_first_positive_u64_env(&["DASHSCOPE_IMAGE_REQUEST_TIMEOUT_MS"])
{
config.dashscope_image_request_timeout_ms = dashscope_image_request_timeout_ms;
}
if let Some(max_concurrent_requests) = read_first_usize_env(&[
"GENARRATIVE_DRAFT_ASSET_GENERATION_MAX_CONCURRENT_REQUESTS",
"DRAFT_ASSET_GENERATION_MAX_CONCURRENT_REQUESTS",
]) {
config.draft_asset_generation_max_concurrent_requests = max_concurrent_requests;
}
if let Some(ark_character_video_base_url) = read_first_non_empty_env(&[
"ARK_CHARACTER_VIDEO_BASE_URL",
"ARK_BASE_URL",
@@ -623,6 +657,14 @@ fn read_first_u64_env(keys: &[&str]) -> Option<u64> {
.find_map(|key| env::var(key).ok().and_then(|value| parse_u64(&value)))
}
fn read_first_usize_env(keys: &[&str]) -> Option<usize> {
keys.iter().find_map(|key| {
env::var(key)
.ok()
.and_then(|value| parse_positive_usize(&value))
})
}
fn read_first_u8_env(keys: &[&str]) -> Option<u8> {
keys.iter()
.find_map(|key| env::var(key).ok().and_then(|value| parse_u8(&value)))
@@ -704,6 +746,15 @@ fn parse_u64(raw: &str) -> Option<u64> {
raw.trim().parse::<u64>().ok()
}
fn parse_positive_usize(raw: &str) -> Option<usize> {
let value = raw.trim().parse::<usize>().ok()?;
if value == 0 {
return None;
}
Some(value)
}
fn parse_u8(raw: &str) -> Option<u8> {
raw.trim().parse::<u8>().ok()
}

View File

@@ -1,3 +1,5 @@
use std::collections::BTreeMap;
use axum::{
Json,
extract::{Extension, Path, State, rejection::JsonRejection},
@@ -28,19 +30,25 @@ use shared_contracts::runtime::{
use shared_kernel::build_prefixed_uuid_id;
use spacetime_client::{
CustomWorldAgentActionExecuteRecordInput, CustomWorldAgentCheckpointRecord,
CustomWorldAgentMessageRecord, CustomWorldAgentMessageSubmitRecordInput,
CustomWorldAgentOperationProgressRecordInput, CustomWorldAgentOperationRecord,
CustomWorldAgentSessionCreateRecordInput, CustomWorldAgentSessionRecord,
CustomWorldDraftCardDetailRecord, CustomWorldDraftCardDetailSectionRecord,
CustomWorldDraftCardRecord, CustomWorldGalleryEntryRecord, CustomWorldLibraryEntryRecord,
CustomWorldAgentMessageFinalizeRecordInput, CustomWorldAgentMessageRecord,
CustomWorldAgentMessageSubmitRecordInput, CustomWorldAgentOperationProgressRecordInput,
CustomWorldAgentOperationRecord, CustomWorldAgentSessionCreateRecordInput,
CustomWorldAgentSessionRecord, CustomWorldDraftCardDetailRecord,
CustomWorldDraftCardDetailSectionRecord, CustomWorldDraftCardRecord,
CustomWorldGalleryEntryRecord, CustomWorldLibraryEntryRecord,
CustomWorldProfileUpsertRecordInput, CustomWorldPublishGateRecord,
CustomWorldResultPreviewBlockerRecord, CustomWorldSupportedActionRecord,
CustomWorldWorkSummaryRecord, SpacetimeClientError,
};
use std::convert::Infallible;
use std::{collections::BTreeSet, convert::Infallible, sync::Arc, time::Instant};
use tokio::sync::Semaphore;
use tokio::task::JoinSet;
use tracing::info;
use crate::{
ai_generation_drafts::{
AiGenerationDraftContext, AiGenerationDraftSink, AiGenerationDraftWriter,
},
api_response::json_success_body,
auth::AuthenticatedAccessToken,
character_visual_assets::generate_character_primary_visual_for_profile,
@@ -59,6 +67,8 @@ use crate::{
state::AppState,
};
const DRAFT_ASSET_GENERATION_MAX_ATTEMPTS: u32 = 3;
pub async fn get_custom_world_library(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
@@ -633,6 +643,26 @@ pub async fn submit_custom_world_agent_message(
.map_err(|error| {
custom_world_error_response(&request_context, map_custom_world_client_error(error))
})?;
let mut draft_writer = AiGenerationDraftWriter::new(AiGenerationDraftContext::new(
"custom_world",
owner_user_id.as_str(),
session_id.as_str(),
operation_id.as_str(),
"自定义世界模板生成草稿",
));
if let Err(error) = draft_writer.ensure_started(state.spacetime_client()).await {
tracing::warn!(error = %error, "自定义世界模板生成草稿任务启动失败,主生成流程继续执行");
}
let draft_sink = AiGenerationDraftSink::new(
AiGenerationDraftContext::new(
"custom_world",
owner_user_id.as_str(),
session_id.as_str(),
operation_id.as_str(),
"自定义世界模板生成草稿",
),
state.spacetime_client().clone(),
);
let turn_result = run_custom_world_agent_turn(
CustomWorldAgentTurnRequest {
llm_client: state.llm_client(),
@@ -640,9 +670,19 @@ pub async fn submit_custom_world_agent_message(
quick_fill_requested: payload.quick_fill_requested.unwrap_or(false),
focus_card_id: payload.focus_card_id.clone(),
},
|_| {},
move |text| {
draft_sink.persist_visible_text_async(text);
},
)
.await;
if let Ok(result) = &turn_result {
draft_writer
.persist_visible_text(
state.spacetime_client(),
result.assistant_reply_text.as_str(),
)
.await;
}
let finalize_input = match turn_result {
Ok(turn_result) => build_finalize_record_input(
session_id.clone(),
@@ -761,6 +801,26 @@ pub async fn stream_custom_world_agent_message(
let owner_user_id_for_stream = owner_user_id.clone();
let operation_id = operation.operation_id.clone();
let stream = async_stream::stream! {
let mut draft_writer = AiGenerationDraftWriter::new(AiGenerationDraftContext::new(
"custom_world",
owner_user_id_for_stream.as_str(),
session_id_for_stream.as_str(),
operation_id.as_str(),
"自定义世界模板生成草稿",
));
if let Err(error) = draft_writer.ensure_started(state.spacetime_client()).await {
tracing::warn!(error = %error, "自定义世界模板生成草稿任务启动失败,主生成流程继续执行");
}
let draft_sink = AiGenerationDraftSink::new(
AiGenerationDraftContext::new(
"custom_world",
owner_user_id_for_stream.as_str(),
session_id_for_stream.as_str(),
operation_id.as_str(),
"自定义世界模板生成草稿",
),
state.spacetime_client().clone(),
);
// 聊天回复必须等本轮模型解析、进度与会话快照全部落库后,
// 再随最终 session 一次性返回,避免玩家先看到回复而进度仍停在旧状态。
let turn_result = run_custom_world_agent_turn(
@@ -770,9 +830,16 @@ pub async fn stream_custom_world_agent_message(
quick_fill_requested,
focus_card_id,
},
|_| {},
move |text| {
draft_sink.persist_visible_text_async(text);
},
)
.await;
if let Ok(result) = &turn_result {
draft_writer
.persist_visible_text(state.spacetime_client(), result.assistant_reply_text.as_str())
.await;
}
let finalize_input = match turn_result {
Ok(turn_result) => build_finalize_record_input(
@@ -1162,49 +1229,73 @@ fn spawn_custom_world_draft_foundation_job(
}
};
if let Err(message) = generate_draft_foundation_role_visuals(
&state,
&session,
&owner_user_id,
&operation_id,
&mut draft_profile_value,
)
.await
{
let _ = upsert_custom_world_draft_foundation_progress(
&state,
&session.session_id,
&owner_user_id,
&operation_id,
"failed",
"生成角色主形象失败",
message.as_str(),
100,
Some(message.clone()),
)
.await;
return;
}
let image_generation_limiter = Arc::new(Semaphore::new(
state.config.draft_asset_generation_max_concurrent_requests,
));
let role_visual_profile_input = draft_profile_value.clone();
let act_background_profile_input = draft_profile_value.clone();
// 角色主形象与幕背景图互不依赖,必须并行发起;上游生图请求统一限流,避免同批草稿瞬时打满供应商接口。
let (role_visual_result, act_background_result) = tokio::join!(
async {
let mut profile = role_visual_profile_input;
generate_draft_foundation_role_visuals(
&state,
&session,
&owner_user_id,
&operation_id,
&mut profile,
image_generation_limiter.clone(),
)
.await
.map(|_| profile)
},
async {
let mut profile = act_background_profile_input;
generate_draft_foundation_act_backgrounds(
&state,
&session,
&owner_user_id,
&operation_id,
&mut profile,
image_generation_limiter.clone(),
)
.await
.map(|_| profile)
}
);
if let Err(message) = generate_draft_foundation_act_backgrounds(
&state,
&session,
&owner_user_id,
&operation_id,
&mut draft_profile_value,
)
.await
{
let _ = upsert_custom_world_draft_foundation_progress(
let mut draft_profile_with_assets = draft_profile_value.clone();
let mut asset_generation_errors = Vec::new();
match role_visual_result {
Ok(profile) => draft_profile_with_assets = profile,
Err(message) => asset_generation_errors.push(("生成角色主形象失败", message)),
}
match act_background_result {
Ok(profile) => {
merge_generated_act_backgrounds(&mut draft_profile_with_assets, &profile)
}
Err(message) => asset_generation_errors.push(("生成幕背景图失败", message)),
}
draft_profile_value = draft_profile_with_assets;
if !asset_generation_errors.is_empty() {
let message = asset_generation_errors
.iter()
.map(|(_, message)| message.as_str())
.collect::<Vec<_>>()
.join("");
let phase_label = asset_generation_errors
.first()
.map(|(label, _)| *label)
.unwrap_or("素材生成失败");
persist_partial_draft_foundation_after_asset_failure(
&state,
&session.session_id,
&session,
&owner_user_id,
&operation_id,
"failed",
"生成幕背景图失败",
&draft_profile_value,
phase_label,
message.as_str(),
100,
Some(message.clone()),
)
.await;
return;
@@ -1305,6 +1396,7 @@ async fn generate_draft_foundation_role_visuals(
owner_user_id: &str,
operation_id: &str,
draft_profile: &mut Value,
image_generation_limiter: Arc<Semaphore>,
) -> Result<(), String> {
let Some(profile_object) = draft_profile.as_object_mut() else {
return Err("foundation draft JSON 必须是 object".to_string());
@@ -1317,8 +1409,8 @@ async fn generate_draft_foundation_role_visuals(
}
}
}
let total = role_refs.len().max(1);
for (completed, (key, index)) in role_refs.into_iter().enumerate() {
let mut role_generation_refs = Vec::new();
for (key, index) in role_refs {
let role = profile_object
.get(key.as_str())
.and_then(Value::as_array)
@@ -1328,31 +1420,89 @@ async fn generate_draft_foundation_role_visuals(
let name =
json_text_from_value(&role, "name").unwrap_or_else(|| format!("角色{}", index + 1));
let role_id = json_text_from_value(&role, "id").unwrap_or_else(|| format!("{key}-{index}"));
let visual_prompt = json_text_from_value(&role, "visualDescription")
.or_else(|| json_text_from_value(&role, "description"))
.unwrap_or_else(|| name.clone());
upsert_custom_world_draft_foundation_progress(
state,
&session.session_id,
owner_user_id,
operation_id,
"running",
"生成角色主形象",
format!("正在生成角色主形象 {}/{}{}", completed + 1, total, name).as_str(),
97 + ((completed as u32).min(1)),
None,
)
.await
.map_err(|error| error.to_string())?;
let generated = generate_character_primary_visual_for_profile(
state,
owner_user_id,
role_id.as_str(),
visual_prompt.as_str(),
Some(name.as_str()),
)
.await
.map_err(|error| error.message().to_string())?;
let visual_prompt = json_text_from_value(&role, "visualDescription").ok_or_else(|| {
format!("角色「{name}」缺少 visualDescription不能在角色形象设定文本生成前直接生图。")
})?;
role_generation_refs.push(RoleVisualGenerationRef {
key,
index,
role_id,
name,
prompt: visual_prompt,
});
}
upsert_custom_world_draft_foundation_progress(
state,
&session.session_id,
owner_user_id,
operation_id,
"running",
"并行生成角色主形象",
format!("正在同时生成 {} 张角色主形象。", role_generation_refs.len()).as_str(),
97,
None,
)
.await
.map_err(|error| error.to_string())?;
let mut generation_tasks = JoinSet::new();
for role_ref in role_generation_refs {
let task_state = (*state).clone();
let task_owner_user_id = owner_user_id.to_string();
let task_limiter = image_generation_limiter.clone();
generation_tasks.spawn(async move {
let mut last_error = None;
for attempt in 1..=DRAFT_ASSET_GENERATION_MAX_ATTEMPTS {
let generation_result = {
let _permit = task_limiter
.acquire()
.await
.map_err(|error| format!("图片生成并发控制失效:{error}"))?;
generate_character_primary_visual_for_profile(
&task_state,
task_owner_user_id.as_str(),
role_ref.role_id.as_str(),
role_ref.prompt.as_str(),
Some(role_ref.name.as_str()),
)
.await
};
match generation_result {
Ok(generated) => {
return Ok::<_, String>((role_ref.key, role_ref.index, generated));
}
Err(error) => {
last_error = Some(error.body_text());
if attempt < DRAFT_ASSET_GENERATION_MAX_ATTEMPTS {
tokio::time::sleep(std::time::Duration::from_millis(
300 * u64::from(attempt),
))
.await;
}
}
}
}
Err(format!(
"角色「{}」主形象连续生成 {} 次失败:{}",
role_ref.name,
DRAFT_ASSET_GENERATION_MAX_ATTEMPTS,
last_error.unwrap_or_else(|| "未知错误".to_string())
))
});
}
let mut errors = Vec::new();
while let Some(result) = generation_tasks.join_next().await {
let task_result = result.map_err(|error| error.to_string())?;
let (key, index, generated) = match task_result {
Ok(value) => value,
Err(message) => {
errors.push(message);
continue;
}
};
if let Some(role_object) = profile_object
.get_mut(key.as_str())
.and_then(Value::as_array_mut)
@@ -1366,6 +1516,9 @@ async fn generate_draft_foundation_role_visuals(
);
}
}
if !errors.is_empty() {
return Err(join_unique_error_messages(errors));
}
Ok(())
}
@@ -1375,51 +1528,163 @@ async fn generate_draft_foundation_act_backgrounds(
owner_user_id: &str,
operation_id: &str,
draft_profile: &mut Value,
image_generation_limiter: Arc<Semaphore>,
) -> Result<(), String> {
let world_name =
json_text_from_value(draft_profile, "name").unwrap_or_else(|| "未命名世界".to_string());
let profile_id = json_text_from_value(draft_profile, "id");
let scene_image_profile_input = draft_profile.clone();
let act_refs = collect_scene_act_refs(draft_profile);
let total = act_refs.len().max(1);
for (completed, act_ref) in act_refs.into_iter().enumerate() {
upsert_custom_world_draft_foundation_progress(
state,
&session.session_id,
owner_user_id,
operation_id,
"running",
"生成幕背景图",
format!(
"正在生成幕背景图 {}/{}{}",
completed + 1,
total,
act_ref.title
)
.as_str(),
98,
None,
)
.await
.map_err(|error| error.to_string())?;
let generated = generate_custom_world_scene_image_for_profile(
state,
owner_user_id,
profile_id.as_deref(),
world_name.as_str(),
act_ref.scene_id.as_str(),
act_ref.title.as_str(),
act_ref.summary.as_str(),
act_ref.prompt.as_str(),
)
.await
.map_err(|error| error.message().to_string())?;
validate_scene_act_background_prompts(&act_refs)?;
tracing::info!(
operation_id,
session_id = %session.session_id,
act_count = act_refs.len(),
max_concurrent_requests = state.config.draft_asset_generation_max_concurrent_requests,
"开始并行生成草稿幕背景图"
);
upsert_custom_world_draft_foundation_progress(
state,
&session.session_id,
owner_user_id,
operation_id,
"running",
"并行生成幕背景图",
format!("正在同时生成 {} 张幕背景图。", act_refs.len()).as_str(),
98,
None,
)
.await
.map_err(|error| error.to_string())?;
let mut generation_tasks = JoinSet::new();
for act_ref in act_refs {
let task_state = (*state).clone();
let task_owner_user_id = owner_user_id.to_string();
let task_profile_id = profile_id.clone();
let task_world_name = world_name.clone();
let task_profile = scene_image_profile_input.clone();
let task_limiter = image_generation_limiter.clone();
let task_operation_id = operation_id.to_string();
let task_session_id = session.session_id.clone();
generation_tasks.spawn(async move {
let mut last_error = None;
for attempt in 1..=DRAFT_ASSET_GENERATION_MAX_ATTEMPTS {
let attempt_started_at = Instant::now();
tracing::info!(
operation_id = %task_operation_id,
session_id = %task_session_id,
chapter_index = act_ref.chapter_index,
act_index = act_ref.act_index,
scene_id = %act_ref.scene_id,
scene_name = %act_ref.scene_name,
attempt,
"开始生成单幕背景图"
);
let generation_result = {
let _permit = task_limiter.acquire().await.map_err(|error| {
(
act_ref.chapter_index,
act_ref.act_index,
format!("图片生成并发控制失效:{error}"),
)
})?;
generate_custom_world_scene_image_for_profile(
&task_state,
task_owner_user_id.as_str(),
&task_profile,
task_profile_id.as_deref(),
task_world_name.as_str(),
act_ref.scene_id.as_str(),
act_ref.scene_name.as_str(),
act_ref.scene_description.as_str(),
act_ref.prompt.as_str(),
)
.await
};
match generation_result {
Ok(generated) => {
tracing::info!(
operation_id = %task_operation_id,
session_id = %task_session_id,
chapter_index = act_ref.chapter_index,
act_index = act_ref.act_index,
scene_id = %act_ref.scene_id,
scene_name = %act_ref.scene_name,
attempt,
elapsed_ms = attempt_started_at.elapsed().as_millis(),
"单幕背景图生成成功"
);
return Ok::<_, (usize, usize, String)>((
act_ref.chapter_index,
act_ref.act_index,
generated,
));
}
Err(error) => {
let error_message = error.body_text();
tracing::warn!(
operation_id = %task_operation_id,
session_id = %task_session_id,
chapter_index = act_ref.chapter_index,
act_index = act_ref.act_index,
scene_id = %act_ref.scene_id,
scene_name = %act_ref.scene_name,
attempt,
elapsed_ms = attempt_started_at.elapsed().as_millis(),
error_message = %error_message,
"单幕背景图生成失败"
);
last_error = Some(error_message);
if attempt < DRAFT_ASSET_GENERATION_MAX_ATTEMPTS {
tokio::time::sleep(std::time::Duration::from_millis(
300 * u64::from(attempt),
))
.await;
}
}
}
}
Err((
act_ref.chapter_index,
act_ref.act_index,
format!(
"{}章第{}幕「{}」背景图连续生成 {} 次失败:{}",
act_ref.chapter_index + 1,
act_ref.act_index + 1,
act_ref.scene_name,
DRAFT_ASSET_GENERATION_MAX_ATTEMPTS,
last_error.unwrap_or_else(|| "未知错误".to_string())
),
))
});
}
let mut errors = Vec::new();
let mut generated_count = 0usize;
while let Some(result) = generation_tasks.join_next().await {
let task_result = result.map_err(|error| error.to_string())?;
let (chapter_index, act_index, generated) = match task_result {
Ok(value) => value,
Err((chapter_index, act_index, message)) => {
mark_scene_act_background_generation_error(
draft_profile,
chapter_index,
act_index,
&message,
);
errors.push(message);
continue;
}
};
if let Some(act_object) = draft_profile
.get_mut("sceneChapterBlueprints")
.and_then(Value::as_array_mut)
.and_then(|chapters| chapters.get_mut(act_ref.chapter_index))
.and_then(|chapters| chapters.get_mut(chapter_index))
.and_then(|chapter| chapter.get_mut("acts"))
.and_then(Value::as_array_mut)
.and_then(|acts| acts.get_mut(act_ref.act_index))
.and_then(|acts| acts.get_mut(act_index))
.and_then(Value::as_object_mut)
{
act_object.insert(
@@ -1438,21 +1703,79 @@ async fn generate_draft_foundation_act_backgrounds(
"generatedSceneModel".to_string(),
Value::String(generated.model),
);
generated_count += 1;
}
}
if !errors.is_empty() {
if generated_count > 0 {
// 自动草稿生成和手动生成用的是同一套生图与资产入库能力;这里不能因为批量中的个别幕失败,
// 把已经写入 profile 分支的 backgroundImageSrc 一起丢掉,否则前端就看不到已经生成好的图。
tracing::warn!(
generated_count,
failed_count = errors.len(),
error_message = %join_unique_error_messages(errors),
"部分幕背景图生成失败,已保留成功生成的幕图"
);
return Ok(());
}
return Err(join_unique_error_messages(errors));
}
Ok(())
}
fn mark_scene_act_background_generation_error(
draft_profile: &mut Value,
chapter_index: usize,
act_index: usize,
message: &str,
) {
if let Some(act_object) = draft_profile
.get_mut("sceneChapterBlueprints")
.and_then(Value::as_array_mut)
.and_then(|chapters| chapters.get_mut(chapter_index))
.and_then(|chapter| chapter.get_mut("acts"))
.and_then(Value::as_array_mut)
.and_then(|acts| acts.get_mut(act_index))
.and_then(Value::as_object_mut)
{
act_object.insert(
"backgroundGenerationError".to_string(),
Value::String(message.trim().to_string()),
);
}
}
fn join_unique_error_messages(messages: Vec<String>) -> String {
// 并行图片任务可能从同一个上游故障返回完全相同的业务错误;用户侧只需要看到去重后的失败项。
messages
.into_iter()
.map(|message| message.trim().to_string())
.filter(|message| !message.is_empty())
.collect::<BTreeSet<_>>()
.into_iter()
.collect::<Vec<_>>()
.join("")
}
struct RoleVisualGenerationRef {
key: String,
index: usize,
role_id: String,
name: String,
prompt: String,
}
struct SceneActGenerationRef {
chapter_index: usize,
act_index: usize,
scene_id: String,
title: String,
summary: String,
scene_name: String,
scene_description: String,
prompt: String,
}
fn collect_scene_act_refs(draft_profile: &Value) -> Vec<SceneActGenerationRef> {
let scene_context_by_id = collect_scene_context_by_id(draft_profile);
draft_profile
.get("sceneChapterBlueprints")
.and_then(Value::as_array)
@@ -1463,28 +1786,188 @@ fn collect_scene_act_refs(draft_profile: &Value) -> Vec<SceneActGenerationRef> {
let chapter_scene_id = json_text_from_value(chapter, "sceneId")
.or_else(|| json_text_from_value(chapter, "id"))
.unwrap_or_else(|| format!("chapter-{chapter_index}"));
let chapter_scene_name = json_first_text_from_value(
chapter,
&["sceneName", "landmarkName", "name", "title"],
)
.unwrap_or_else(|| chapter_scene_id.clone());
let chapter_scene_context = scene_context_by_id
.get(&chapter_scene_id)
.cloned()
.unwrap_or_else(|| SceneImageContext {
id: chapter_scene_id.clone(),
name: chapter_scene_name.clone(),
description: json_text_from_value(chapter, "description")
.or_else(|| json_text_from_value(chapter, "summary"))
.unwrap_or_default(),
danger_level: json_text_from_value(chapter, "dangerLevel").unwrap_or_default(),
});
let scene_contexts = scene_context_by_id.clone();
chapter
.get("acts")
.and_then(Value::as_array)
.into_iter()
.flatten()
.enumerate()
.map(move |(act_index, act)| SceneActGenerationRef {
chapter_index,
act_index,
scene_id: json_text_from_value(act, "sceneId")
.unwrap_or_else(|| chapter_scene_id.clone()),
title: json_text_from_value(act, "title")
.unwrap_or_else(|| format!("{}", act_index + 1)),
summary: json_text_from_value(act, "summary").unwrap_or_default(),
prompt: json_text_from_value(act, "backgroundPromptText")
.or_else(|| json_text_from_value(act, "summary"))
.unwrap_or_else(|| "场景幕背景图,突出探索空间与局势氛围。".to_string()),
.map(move |(act_index, act)| {
let prompt = json_first_text_from_value(
act,
&[
"backgroundPromptText",
"scenePromptText",
"visualPromptText",
"promptText",
"imagePromptText",
"backgroundPrompt",
"visualPrompt",
],
)
.unwrap_or_default();
let scene_name = json_first_text_from_value(
act,
&["sceneName", "landmarkName", "locationName"],
)
.unwrap_or_else(|| chapter_scene_context.name.clone());
let act_scene_id = json_text_from_value(act, "sceneId")
.unwrap_or_else(|| chapter_scene_context.id.clone());
let scene_context =
scene_contexts
.get(&act_scene_id)
.cloned()
.unwrap_or_else(|| SceneImageContext {
id: act_scene_id.clone(),
name: scene_name,
description: chapter_scene_context.description.clone(),
danger_level: chapter_scene_context.danger_level.clone(),
});
SceneActGenerationRef {
chapter_index,
act_index,
scene_id: act_scene_id,
scene_name: scene_context.name,
scene_description: scene_context.description,
prompt: prompt.clone(),
}
})
})
.collect()
}
#[derive(Clone, Debug)]
struct SceneImageContext {
id: String,
name: String,
description: String,
danger_level: String,
}
fn collect_scene_context_by_id(draft_profile: &Value) -> BTreeMap<String, SceneImageContext> {
let mut contexts = BTreeMap::new();
if let Some(camp) = draft_profile.get("camp").and_then(Value::as_object) {
if let Some(context) = scene_context_from_object(camp, "camp") {
contexts.insert(context.id.clone(), context);
}
}
if let Some(landmarks) = draft_profile.get("landmarks").and_then(Value::as_array) {
for landmark in landmarks.iter().filter_map(Value::as_object) {
if let Some(context) = scene_context_from_object(landmark, "landmark") {
contexts.insert(context.id.clone(), context);
}
}
}
contexts
}
fn scene_context_from_object(
object: &Map<String, Value>,
fallback_id: &str,
) -> Option<SceneImageContext> {
let id = read_string_field(object, "id")
.or_else(|| read_string_field(object, "sceneId"))
.unwrap_or_else(|| fallback_id.to_string());
let name = read_string_field(object, "name")
.or_else(|| read_string_field(object, "sceneName"))
.unwrap_or_else(|| id.clone());
Some(SceneImageContext {
id,
name,
description: read_string_field(object, "description")
.or_else(|| read_string_field(object, "visualDescription"))
.unwrap_or_default(),
danger_level: read_string_field(object, "dangerLevel").unwrap_or_default(),
})
}
fn validate_scene_act_background_prompts(act_refs: &[SceneActGenerationRef]) -> Result<(), String> {
if let Some(act_ref) = act_refs.iter().find(|act_ref| act_ref.prompt.is_empty()) {
return Err(format!(
"{}章第{}幕「{}」缺少 backgroundPromptText不能在幕背景图描述文本生成前直接生图。",
act_ref.chapter_index + 1,
act_ref.act_index + 1,
act_ref.scene_name
));
}
Ok(())
}
fn json_first_text_from_value(value: &Value, keys: &[&str]) -> Option<String> {
keys.iter().find_map(|key| json_text_from_value(value, key))
}
fn merge_generated_act_backgrounds(target_profile: &mut Value, background_profile: &Value) {
let Some(target_chapters) = target_profile
.get_mut("sceneChapterBlueprints")
.and_then(Value::as_array_mut)
else {
return;
};
let Some(background_chapters) = background_profile
.get("sceneChapterBlueprints")
.and_then(Value::as_array)
else {
return;
};
for (chapter_index, background_chapter) in background_chapters.iter().enumerate() {
let Some(target_acts) = target_chapters
.get_mut(chapter_index)
.and_then(|chapter| chapter.get_mut("acts"))
.and_then(Value::as_array_mut)
else {
continue;
};
let Some(background_acts) = background_chapter.get("acts").and_then(Value::as_array) else {
continue;
};
for (act_index, background_act) in background_acts.iter().enumerate() {
let Some(target_act_object) = target_acts
.get_mut(act_index)
.and_then(Value::as_object_mut)
else {
continue;
};
let Some(background_act_object) = background_act.as_object() else {
continue;
};
// 只合并图片生成产物字段,避免并行分支把其他草稿内容互相覆盖。
for key in [
"backgroundImageSrc",
"backgroundAssetId",
"generatedScenePrompt",
"generatedSceneModel",
] {
if let Some(value) = background_act_object.get(key) {
target_act_object.insert(key.to_string(), value.clone());
}
}
}
}
}
fn json_text_from_value(value: &Value, key: &str) -> Option<String> {
value
.get(key)
@@ -1494,6 +1977,68 @@ fn json_text_from_value(value: &Value, key: &str) -> Option<String> {
.map(ToOwned::to_owned)
}
async fn persist_partial_draft_foundation_after_asset_failure(
state: &AppState,
session: &CustomWorldAgentSessionRecord,
owner_user_id: &str,
operation_id: &str,
draft_profile: &Value,
phase_label: &str,
error_message: &str,
) {
let draft_profile_json = match serde_json::to_string(draft_profile) {
Ok(value) => Some(value),
Err(error) => {
tracing::warn!(error = %error, "素材失败后的部分底稿序列化失败");
None
}
};
let finalize_result = state
.spacetime_client()
.finalize_custom_world_agent_message(CustomWorldAgentMessageFinalizeRecordInput {
session_id: session.session_id.clone(),
owner_user_id: owner_user_id.to_string(),
operation_id: operation_id.to_string(),
assistant_message_id: None,
assistant_reply_text: None,
phase_label: phase_label.to_string(),
phase_detail: format!("已保存成功生成的素材,失败项超过 {DRAFT_ASSET_GENERATION_MAX_ATTEMPTS} 次重试:{error_message}"),
operation_status: "failed".to_string(),
operation_progress: 100,
stage: session.stage.clone(),
progress_percent: session.progress_percent,
focus_card_id: session.focus_card_id.clone(),
anchor_content_json: session.anchor_content.to_string(),
creator_intent_json: Some(session.creator_intent.to_string()),
creator_intent_readiness_json: session.creator_intent_readiness.to_string(),
anchor_pack_json: Some(session.anchor_pack.to_string()),
draft_profile_json,
pending_clarifications_json: Value::Array(session.pending_clarifications.clone()).to_string(),
suggested_actions_json: Value::Array(session.suggested_actions.clone()).to_string(),
recommended_replies_json: json!(session.recommended_replies).to_string(),
quality_findings_json: Value::Array(session.quality_findings.clone()).to_string(),
asset_coverage_json: session.asset_coverage.to_string(),
error_message: Some(error_message.to_string()),
updated_at_micros: current_utc_micros(),
})
.await;
if let Err(error) = finalize_result {
tracing::warn!(error = %error, "素材失败后的部分底稿持久化失败");
let _ = upsert_custom_world_draft_foundation_progress(
state,
&session.session_id,
owner_user_id,
operation_id,
"failed",
phase_label,
error_message,
100,
Some(error_message.to_string()),
)
.await;
}
}
async fn upsert_custom_world_draft_foundation_progress(
state: &AppState,
session_id: &str,
@@ -2105,3 +2650,53 @@ fn current_utc_micros() -> i64 {
.expect("system clock should be after unix epoch");
i64::try_from(duration.as_micros()).expect("current unix micros should fit in i64")
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn collect_scene_act_refs_accepts_scene_prompt_text_alias() {
let draft_profile = json!({
"name": "雾港纪元",
"tone": "潮湿、悬疑、低照度",
"landmarks": [
{
"id": "scene-office",
"name": "旧港办公室",
"description": "旧港边缘的玻璃办公室,窗外能看到潮湿码头。",
"dangerLevel": "low"
}
],
"sceneChapterBlueprints": [
{
"sceneId": "scene-office",
"sceneName": "旧港办公室",
"acts": [
{
"title": "深夜工位",
"summary": "团队在凌晨三点继续赶版本。",
"actGoal": "找到丢失的部署钥匙",
"transitionHook": "电梯门在无人操作时打开",
"primaryRoleName": "林澈",
"supportRoleNames": ["阿岚"],
"scenePromptText": "现代创业公司办公室,凌晨灯光,紧张忙碌"
}
]
}
]
});
let act_refs = collect_scene_act_refs(&draft_profile);
assert_eq!(act_refs.len(), 1);
assert_eq!(act_refs[0].prompt, "现代创业公司办公室,凌晨灯光,紧张忙碌");
assert_eq!(act_refs[0].scene_id, "scene-office");
assert_eq!(act_refs[0].scene_name, "旧港办公室");
assert_eq!(
act_refs[0].scene_description,
"旧港边缘的玻璃办公室,窗外能看到潮湿码头。"
);
assert!(validate_scene_act_background_prompts(&act_refs).is_ok());
}
}

View File

@@ -323,7 +323,6 @@ struct NormalizedSceneImageRequest {
prompt: String,
negative_prompt: String,
reference_image_src: Option<String>,
model: String,
}
#[derive(Debug)]
@@ -341,10 +340,6 @@ struct OptimizedCoverUpload {
bytes: Vec<u8>,
}
const TEXT_TO_IMAGE_SCENE_MODEL: &str = "wan2.2-t2i-flash";
const REFERENCE_IMAGE_SCENE_MODEL: &str = "qwen-image-2.0";
const TEXT_TO_IMAGE_COVER_MODEL: &str = "wan2.2-t2i-flash";
const REFERENCE_IMAGE_COVER_MODEL: &str = "qwen-image-2.0";
const DEFAULT_CUSTOM_WORLD_SCENE_IMAGE_NEGATIVE_PROMPT: &str = "文字水印logoUI界面对话框边框人物近景特写多人合照模糊低清晰度畸形建筑现代车辆监控摄像头";
const COVER_OUTPUT_WIDTH: u32 = 1600;
const COVER_OUTPUT_HEIGHT: u32 = 900;
@@ -467,7 +462,7 @@ pub async fn generate_custom_world_scene_image(
create_reference_image_generation(
&http_client,
&settings,
REFERENCE_IMAGE_SCENE_MODEL,
state.config.dashscope_reference_image_model.as_str(),
normalized.prompt.as_str(),
normalized.size.as_str(),
&[reference_image.to_string()],
@@ -481,7 +476,7 @@ pub async fn generate_custom_world_scene_image(
create_text_to_image_generation(
&http_client,
&settings,
TEXT_TO_IMAGE_SCENE_MODEL,
state.config.dashscope_scene_image_model.as_str(),
normalized.prompt.as_str(),
Some(normalized.negative_prompt.as_str()),
normalized.size.as_str(),
@@ -493,6 +488,11 @@ pub async fn generate_custom_world_scene_image(
.await
}
.map_err(|error| custom_world_ai_error_response(&request_context, error))?;
let scene_model = if reference_image.is_some() {
state.config.dashscope_reference_image_model.clone()
} else {
state.config.dashscope_scene_image_model.clone()
};
let downloaded = download_remote_image(
&http_client,
generated.image_url.as_str(),
@@ -532,7 +532,7 @@ pub async fn generate_custom_world_scene_image(
image_src: String::new(),
asset_id: asset_id.clone(),
source_type: "generated".to_string(),
model: Some(normalized.model),
model: Some(scene_model),
size: Some(normalized.size),
task_id: Some(generated.task_id),
prompt: Some(normalized.prompt),
@@ -548,6 +548,7 @@ pub async fn generate_custom_world_scene_image(
pub(crate) async fn generate_custom_world_scene_image_for_profile(
state: &AppState,
owner_user_id: &str,
profile: &Value,
profile_id: Option<&str>,
world_name: &str,
scene_id: &str,
@@ -560,20 +561,16 @@ pub(crate) async fn generate_custom_world_scene_image_for_profile(
world_name: Some(world_name.to_string()),
landmark_id: Some(scene_id.to_string()),
landmark_name: Some(scene_name.to_string()),
prompt: Some(prompt_text.to_string()),
size: Some("1600*900".to_string()),
// 自动草稿生成必须和草稿页手动生成走同一条 prompt 编译链:
// 只把幕级描述作为 userPrompt 输入,仍交给 normalize_scene_image_request 组装世界名、地点名、风格与负面词。
prompt: None,
size: Some("1280*720".to_string()),
negative_prompt: None,
reference_image_src: None,
user_prompt: Some(prompt_text.to_string()),
profile: Some(SceneImageProfileInput {
id: profile_id.map(ToOwned::to_owned),
name: Some(world_name.to_string()),
subtitle: None,
summary: None,
tone: None,
player_goal: None,
setting_text: None,
}),
profile: Some(scene_image_profile_input_from_value(
profile, profile_id, world_name,
)),
landmark: Some(SceneImageLandmarkInput {
id: Some(scene_id.to_string()),
name: Some(scene_name.to_string()),
@@ -587,7 +584,7 @@ pub(crate) async fn generate_custom_world_scene_image_for_profile(
let generated = create_text_to_image_generation(
&http_client,
&settings,
TEXT_TO_IMAGE_SCENE_MODEL,
state.config.dashscope_scene_image_model.as_str(),
normalized.prompt.as_str(),
Some(normalized.negative_prompt.as_str()),
normalized.size.as_str(),
@@ -627,7 +624,7 @@ pub(crate) async fn generate_custom_world_scene_image_for_profile(
slot: "scene_image",
source_job_id: Some(generated.task_id.clone()),
};
let model = normalized.model.clone();
let model = state.config.dashscope_scene_image_model.clone();
let prompt = normalized.prompt.clone();
let asset = persist_custom_world_asset(
state,
@@ -653,6 +650,31 @@ pub(crate) async fn generate_custom_world_scene_image_for_profile(
})
}
fn scene_image_profile_input_from_value(
profile: &Value,
profile_id: Option<&str>,
world_name: &str,
) -> SceneImageProfileInput {
SceneImageProfileInput {
id: profile_id.map(ToOwned::to_owned),
name: Some(world_name.to_string()),
subtitle: json_text_from_value(profile, "subtitle"),
summary: json_text_from_value(profile, "summary"),
tone: json_text_from_value(profile, "tone"),
player_goal: json_text_from_value(profile, "playerGoal"),
setting_text: json_text_from_value(profile, "settingText"),
}
}
fn json_text_from_value(value: &Value, key: &str) -> Option<String> {
value
.get(key)
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
}
pub async fn generate_custom_world_cover_image(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
@@ -707,7 +729,7 @@ pub async fn generate_custom_world_cover_image(
create_text_to_image_generation(
&http_client,
&settings,
TEXT_TO_IMAGE_COVER_MODEL,
state.config.dashscope_cover_image_model.as_str(),
prompt.as_str(),
None,
size.as_str(),
@@ -721,7 +743,7 @@ pub async fn generate_custom_world_cover_image(
create_reference_image_generation(
&http_client,
&settings,
REFERENCE_IMAGE_COVER_MODEL,
state.config.dashscope_reference_image_model.as_str(),
prompt.as_str(),
size.as_str(),
&reference_images,
@@ -766,9 +788,9 @@ pub async fn generate_custom_world_cover_image(
asset_id: asset_id.clone(),
source_type: "generated".to_string(),
model: Some(if reference_images.is_empty() {
TEXT_TO_IMAGE_COVER_MODEL.to_string()
state.config.dashscope_cover_image_model.clone()
} else {
REFERENCE_IMAGE_COVER_MODEL.to_string()
state.config.dashscope_reference_image_model.clone()
}),
size: Some(size),
task_id: Some(generated.task_id),
@@ -1187,11 +1209,6 @@ fn normalize_scene_image_request(
negative_prompt: trim_to_option(payload.negative_prompt.as_deref())
.unwrap_or_else(|| DEFAULT_CUSTOM_WORLD_SCENE_IMAGE_NEGATIVE_PROMPT.to_string()),
reference_image_src: reference_image_src.clone(),
model: if reference_image_src.is_some() {
REFERENCE_IMAGE_SCENE_MODEL.to_string()
} else {
TEXT_TO_IMAGE_SCENE_MODEL.to_string()
},
})
}
@@ -2580,6 +2597,103 @@ mod tests {
);
}
#[test]
fn automatic_scene_image_payload_reuses_manual_prompt_compiler() {
let profile = json!({
"id": "profile_001",
"name": "雾海群岛",
"subtitle": "失落航线",
"summary": "玩家在雾海中追查沉没王冠。",
"tone": "潮湿、神秘、低魔奇幻",
"playerGoal": "找到王冠并阻止海妖复苏",
"settingText": "群岛被永恒雾潮包围。"
});
let payload = CustomWorldSceneImageRequest {
profile_id: Some("profile_001".to_string()),
world_name: Some("雾海群岛".to_string()),
landmark_id: Some("reef_temple".to_string()),
landmark_name: Some("礁石神殿".to_string()),
prompt: None,
size: Some("1280*720".to_string()),
negative_prompt: None,
reference_image_src: None,
user_prompt: Some("破碎神殿矗立在蓝绿色雾潮中,潮湿石阶上有幽光贝壳。".to_string()),
profile: Some(scene_image_profile_input_from_value(
&profile,
Some("profile_001"),
"雾海群岛",
)),
landmark: Some(SceneImageLandmarkInput {
id: Some("reef_temple".to_string()),
name: Some("礁石神殿".to_string()),
description: Some("古老礁石上的半沉神殿。".to_string()),
danger_level: None,
}),
};
let normalized = normalize_scene_image_request(payload).expect("payload should normalize");
assert!(normalized.prompt.contains("世界名:雾海群岛"));
assert!(normalized.prompt.contains("世界副标题:失落航线"));
assert!(normalized.prompt.contains("场景名称:礁石神殿"));
assert!(
normalized
.prompt
.contains("本次想要生成的画面内容:破碎神殿")
);
assert_ne!(
normalized.prompt,
"破碎神殿矗立在蓝绿色雾潮中,潮湿石阶上有幽光贝壳。"
);
}
#[test]
fn automatic_default_scene_image_context_matches_manual_default_context() {
let profile = json!({
"id": "profile_001",
"name": "雾海群岛",
"subtitle": "失落航线",
"summary": "玩家在雾海中追查沉没王冠。",
"tone": "潮湿、神秘、低魔奇幻",
"playerGoal": "找到王冠并阻止海妖复苏",
"settingText": "群岛被永恒雾潮包围。"
});
let user_prompt = "破碎神殿矗立在蓝绿色雾潮中,潮湿石阶上有幽光贝壳。";
let profile_input =
scene_image_profile_input_from_value(&profile, Some("profile_001"), "雾海群岛");
let landmark = SceneImageLandmarkInput {
id: Some("reef_temple".to_string()),
name: Some("礁石神殿".to_string()),
description: Some("古老礁石上的半沉神殿。".to_string()),
danger_level: Some("high".to_string()),
};
let manual_prompt = build_custom_world_scene_image_prompt(
&profile_input,
&landmark,
user_prompt,
false,
Some("礁石神殿"),
"雾海群岛",
);
let normalized = normalize_scene_image_request(CustomWorldSceneImageRequest {
profile_id: Some("profile_001".to_string()),
world_name: Some("雾海群岛".to_string()),
landmark_id: Some("reef_temple".to_string()),
landmark_name: Some("礁石神殿".to_string()),
prompt: None,
size: Some("1280*720".to_string()),
negative_prompt: None,
reference_image_src: None,
user_prompt: Some(user_prompt.to_string()),
profile: Some(profile_input),
landmark: Some(landmark),
})
.expect("payload should normalize");
assert_eq!(normalized.prompt, manual_prompt);
}
#[tokio::test]
async fn cover_image_returns_service_unavailable_when_dashscope_missing() {
let state = AppState::new(AppConfig::default()).expect("state should build");

View File

@@ -1,4 +1,4 @@
use platform_llm::{LlmClient, LlmMessage, LlmTextRequest};
use platform_llm::{LlmClient, LlmMessage, LlmTextRequest};
use serde_json::{Map as JsonMap, Value as JsonValue, json};
use shared_contracts::runtime::ExecuteCustomWorldAgentActionRequest;
use spacetime_client::CustomWorldAgentSessionRecord;
@@ -738,7 +738,7 @@ fn build_custom_world_landmark_seed_batch_prompt(
) -> String {
[
"请根据下面的世界核心信息,生成一批关键场景框架名单。".to_string(),
"后续我会继续补全场景网络,所以这一步每个地点只保留场景骨架默认生图描述。".to_string(),
"后续我会继续补全场景网络,所以这一步每个地点只保留场景骨架、地点默认生图描述和逐幕背景描述".to_string(),
"你必须只输出一个能被 JSON.parse 直接解析的 JSON 对象,不要输出 Markdown、代码块、注释或解释。".to_string(),
"世界核心信息:".to_string(),
build_framework_summary_text(framework, 0),
@@ -751,6 +751,7 @@ fn build_custom_world_landmark_seed_batch_prompt(
" \"name\": \"场景名称\",".to_string(),
" \"description\": \"场景极简描述\",".to_string(),
" \"visualDescription\": \"默认场景生图描述\",".to_string(),
" \"actBackgroundPromptTexts\": [\"第一幕背景画面描述\", \"第二幕背景画面描述\", \"第三幕背景画面描述\"],".to_string(),
" \"dangerLevel\": \"low|medium|high|extreme\"".to_string(),
" }".to_string(),
" ]".to_string(),
@@ -760,8 +761,10 @@ fn build_custom_world_landmark_seed_batch_prompt(
format!("- 必须生成恰好 {batch_count} 个关键场景。"),
"- 这是一个完全独立的自定义世界;地点名称必须直接服务玩家输入主题。".to_string(),
"- 名称必须具体且互不重复,不要使用 地点1、场景1 之类的占位名。".to_string(),
"- 每个地点只保留name、description、visualDescription、dangerLevel。".to_string(),
"- 每个地点只保留name、description、visualDescription、actBackgroundPromptTexts、dangerLevel。".to_string(),
"- visualDescription 是打开场景背景图像生成面板时默认填入的场景描述,必须具体到画面主体、远近景层次、地面可站立区域和氛围识别点,控制在 32 到 80 个汉字内。".to_string(),
"- actBackgroundPromptTexts 必须恰好 3 条,分别对应这个场景章节的第 1/2/3 幕背景图画面内容描述;每条都必须是大模型根据当前地点、主线阶段和可出场角色直接写出的画面描述,控制在 40 到 90 个汉字内。".to_string(),
"- actBackgroundPromptTexts 禁止使用“某某第1幕背景玩家会在……”这类标题、摘要、规则句拼接格式必须像可直接交给生图模型的自然画面描述。".to_string(),
"- description 控制在 12 到 24 个汉字内。".to_string(),
"- dangerLevel 只能是 low、medium、high、extreme 之一。".to_string(),
"- 所有生成文本都必须使用中文。".to_string(),
@@ -780,8 +783,8 @@ fn build_custom_world_landmark_seed_batch_json_repair_prompt(
"顶层必须只包含一个 landmarks 数组。".to_string(),
format!("必须保留恰好 {expected_count} 个地点对象。"),
if forbidden_names.is_empty() { "".to_string() } else { format!("禁止使用这些重复名:{}", forbidden_names.join("")) },
"每个地点只包含name、description、visualDescription、dangerLevel。".to_string(),
"如果缺少字段字符串补空字符串dangerLevel 补 medium。".to_string(),
"每个地点只包含name、description、visualDescription、actBackgroundPromptTexts、dangerLevel。".to_string(),
"如果缺少字段:字符串补空字符串,actBackgroundPromptTexts 补空数组,dangerLevel 补 medium。".to_string(),
"不要输出 sceneNpcNames、connectedLandmarks、items 或任何其他字段。".to_string(),
"原始文本:".to_string(),
response_text.trim().to_string(),
@@ -945,6 +948,7 @@ fn build_custom_world_role_batch_json_repair_prompt(
response_text.trim().to_string(),
].join("\n")
}
#[cfg(test)]
fn build_foundation_draft_user_prompt(session: &CustomWorldAgentSessionRecord) -> String {
let anchor_content = to_pretty_json(&session.anchor_content);
let creator_intent = to_pretty_json(&session.creator_intent);
@@ -1063,11 +1067,80 @@ fn build_foundation_draft_profile_from_framework(
JsonValue::Array(playable_detailed),
);
object.insert("storyNpcs".to_string(), JsonValue::Array(story_detailed));
let scene_chapter_blueprints = build_scene_chapter_blueprints_from_landmarks(&landmarks);
object.insert("landmarks".to_string(), JsonValue::Array(landmarks));
object.insert("chapters".to_string(), JsonValue::Array(Vec::new()));
object.insert(
"sceneChapterBlueprints".to_string(),
JsonValue::Array(scene_chapter_blueprints),
);
normalize_foundation_draft_profile(JsonValue::Object(object), session)
}
fn build_scene_chapter_blueprints_from_landmarks(landmarks: &[JsonValue]) -> Vec<JsonValue> {
// 幕背景描述必须来自关键场景生成步骤,不能在草稿合成阶段再用规则句拼接。
landmarks
.iter()
.enumerate()
.map(|(chapter_index, landmark)| {
let scene_name = json_text(landmark, "name")
.unwrap_or_else(|| format!("关键场景{}", chapter_index + 1));
let scene_id = json_text(landmark, "id")
.unwrap_or_else(|| format!("saved-landmark-{}", chapter_index + 1));
let summary = json_text(landmark, "description").unwrap_or_default();
let act_prompts =
json_string_array(landmark, "actBackgroundPromptTexts").unwrap_or_default();
let scene_npc_names = json_string_array(landmark, "sceneNpcNames").unwrap_or_default();
json!({
"id": scene_id.clone(),
"sceneId": scene_id.clone(),
"title": scene_name,
"summary": summary,
"linkedLandmarkIds": [scene_id.clone()],
"acts": (0..3)
.map(|act_index| build_scene_act_blueprint_from_landmark(
&scene_id,
&summary,
&act_prompts,
&scene_npc_names,
act_index,
))
.collect::<Vec<_>>(),
})
})
.collect()
}
fn build_scene_act_blueprint_from_landmark(
scene_id: &str,
scene_summary: &str,
act_prompts: &[String],
scene_npc_names: &[String],
act_index: usize,
) -> JsonValue {
let act_title = if act_index == 0 {
"第1幕".to_string()
} else {
format!("{}", act_index + 1)
};
let prompt = act_prompts
.get(act_index)
.map(String::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.unwrap_or("");
// 缺失时保留空值,让后续生图前校验暴露底稿质量问题。
json!({
"id": format!("{}-act-{}", scene_id, act_index + 1),
"sceneId": scene_id,
"title": act_title,
"summary": scene_summary,
"backgroundPromptText": prompt,
"encounterNpcIds": scene_npc_names,
})
}
fn normalize_framework_shape(framework: &mut JsonValue, setting_text: &str) {
if !framework.is_object() {
*framework = json!({});
@@ -1469,12 +1542,6 @@ fn normalize_scene_chapter_blueprint(chapter: JsonValue) -> JsonValue {
fn normalize_scene_act_blueprint(act: JsonValue, index: usize) -> JsonValue {
let mut object = act.as_object().cloned().unwrap_or_default();
let fallback_act = build_fallback_scene_act_with_index(index);
let fallback_prompt = fallback_act
.get("backgroundPromptText")
.and_then(JsonValue::as_str)
.unwrap_or("当前幕场景背景,突出可探索空间、站位地面和局势氛围。")
.to_string();
let title = object
.get("title")
.and_then(JsonValue::as_str)
@@ -1497,7 +1564,7 @@ fn normalize_scene_act_blueprint(act: JsonValue, index: usize) -> JsonValue {
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
.unwrap_or_else(|| format!("{title}{summary}{fallback_prompt}"));
.unwrap_or_default();
object.insert(
"backgroundPromptText".to_string(),
JsonValue::String(background_prompt),
@@ -1523,7 +1590,7 @@ fn build_fallback_scene_act_with_index(index: usize) -> JsonValue {
"id": format!("scene-act-{}", index + 1),
"title": if index == 0 { "开场场景幕".to_string() } else { format!("{}", index + 1) },
"summary": "玩家被推入第一波局势,必须先确认站位、威胁和下一步追查方向。",
"backgroundPromptText": "第一幕场景背景,突出玩家初入现场时的空间轮廓、可站立地面、远近景层次和第一波威胁氛围。",
"backgroundPromptText": "",
})
}
@@ -1642,10 +1709,12 @@ fn parse_json_response_text(text: &str) -> Result<JsonValue, serde_json::Error>
serde_json::from_str::<JsonValue>(trimmed)
}
#[cfg(test)]
fn to_pretty_json(value: &JsonValue) -> String {
serde_json::to_string_pretty(value).unwrap_or_else(|_| "null".to_string())
}
#[cfg(test)]
fn is_non_null_json(value: &JsonValue) -> bool {
!matches!(value, JsonValue::Null)
}
@@ -1665,6 +1734,54 @@ mod tests {
use super::*;
#[test]
fn scene_chapter_blueprints_use_landmark_act_background_prompts() {
let landmarks = vec![json!({
"name": "雾港码头",
"description": "旧船骨露出黑潮。",
"actBackgroundPromptTexts": [
"潮湿木栈桥在青灰雾里延伸,近处有可站立的破旧甲板,远处旧船骨与灯塔剪影压低天空。",
"封锁绳与巡海灯横切码头,中景堆满浸水货箱,远景黑潮拍打沉船残骸。",
"退潮后的泥滩露出父亲留下的海图匣,雾中灯火错位闪烁,岸边留出对峙站位。"
],
"sceneNpcNames": ["灯童丁"]
})];
let blueprints = build_scene_chapter_blueprints_from_landmarks(&landmarks);
let acts = blueprints[0]
.get("acts")
.and_then(JsonValue::as_array)
.expect("acts should exist");
assert_eq!(acts.len(), 3);
assert_eq!(
acts[0].get("backgroundPromptText"),
Some(&json!(
"潮湿木栈桥在青灰雾里延伸,近处有可站立的破旧甲板,远处旧船骨与灯塔剪影压低天空。"
))
);
assert!(
!acts[0]
.get("backgroundPromptText")
.and_then(JsonValue::as_str)
.unwrap_or_default()
.contains("第1幕背景")
);
}
#[test]
fn normalize_scene_act_keeps_missing_background_prompt_empty() {
let act = normalize_scene_act_blueprint(
json!({
"title": "第1幕",
"summary": "玩家进入雾港码头。"
}),
0,
);
assert_eq!(act.get("backgroundPromptText"), Some(&json!("")));
}
#[test]
fn foundation_prompt_uses_real_seed_text() {
let session = build_test_session();
@@ -1740,7 +1857,7 @@ mod tests {
r#"{"playableNpcs":[{"name":"岑灯","title":"返乡守灯人","role":"主角代理","description":"追查旧案的人","initialAffinity":24,"relationshipHooks":["旧案牵连"],"tags":["守灯人"]}]}"#,
),
llm_response(
r#"{"storyNpcs":[{"name":"议长甲","title":"群岛议长","role":"遮掩者","description":"压住旧档的人","initialAffinity":-10,"relationshipHooks":["旧档案"],"tags":["议会"]},{"name":"潮医乙","title":"潮汐医师","role":"证人","description":"知道沉船伤痕","initialAffinity":20,"relationshipHooks":["救治记录"],"tags":["证人"]}]}"#,
r#"{"storyNpcs":[{"name":"议长甲","title":"群岛议长","role":"遮掩者","description":"压住旧档的人","visualDescription":"深色议会长袍垂到靴边,银扣像封蜡,手里总夹着旧档袋。","actionDescription":"抬手下令封锁,动作缓慢却压迫感强。","sceneVisualDescription":"他常出现在议会石厅高处,旧档柜阴影切过半张脸。","initialAffinity":-10,"relationshipHooks":["旧档案"],"tags":["议会"]},{"name":"潮医乙","title":"潮汐医师","role":"证人","description":"知道沉船伤痕","initialAffinity":20,"relationshipHooks":["救治记录"],"tags":["证人"]}]}"#,
),
llm_response(
r#"{"storyNpcs":[{"name":"雾商丙","title":"雾港商人","role":"中间人","description":"贩卖航线的人","initialAffinity":5,"relationshipHooks":["伪造海图"],"tags":["商人"]},{"name":"灯童丁","title":"灯塔学徒","role":"目击者","description":"听见夜钟的人","initialAffinity":30,"relationshipHooks":["夜钟"],"tags":["学徒"]}]}"#,
@@ -1795,6 +1912,24 @@ mod tests {
assert!(request_text.contains("叙事档案"));
assert!(request_text.contains("养成档案"));
assert!(!request_text.contains("seedText\\uff1acustom-world-agent-session-1"));
assert_eq!(
draft_profile
.get("playableNpcs")
.and_then(JsonValue::as_array)
.and_then(|entries| entries.first())
.and_then(|entry| entry.get("visualDescription"))
.and_then(JsonValue::as_str),
Some("灰蓝旧灯披风压着海盐痕,腰侧挂旧海图筒和短灯杖。")
);
assert_eq!(
draft_profile
.get("storyNpcs")
.and_then(JsonValue::as_array)
.and_then(|entries| entries.first())
.and_then(|entry| entry.get("visualDescription"))
.and_then(JsonValue::as_str),
Some("深色议会长袍垂到靴边,银扣像封蜡,手里总夹着旧档袋。")
);
assert_eq!(draft_profile.get("name"), Some(&json!("雾港归航")));
assert!(
draft_profile

View File

@@ -38,6 +38,18 @@ impl AppError {
&self.message
}
pub fn body_text(&self) -> String {
// 批处理任务不能只读 HTTP 状态文案,否则 DashScope 返回的真实失败原因会被压成“上游服务请求失败”。
self.details
.as_ref()
.and_then(|details| details.get("message"))
.and_then(Value::as_str)
.map(str::trim)
.filter(|message| !message.is_empty())
.unwrap_or(self.message.as_str())
.to_string()
}
pub fn with_message(mut self, message: impl Into<String>) -> Self {
self.message = message.into();
self

View File

@@ -1,4 +1,5 @@
mod admin;
mod ai_generation_drafts;
mod ai_tasks;
mod api_response;
mod app;

View File

@@ -58,6 +58,7 @@ use std::convert::Infallible;
use tokio::time::sleep;
use crate::{
ai_generation_drafts::{AiGenerationDraftContext, AiGenerationDraftWriter},
api_response::json_success_body,
auth::AuthenticatedAccessToken,
http_error::AppError,
@@ -300,6 +301,16 @@ pub async fn stream_puzzle_agent_message(
let session_id_for_stream = session_id.clone();
let owner_user_id_for_stream = owner_user_id.clone();
let stream = async_stream::stream! {
let mut draft_writer = AiGenerationDraftWriter::new(AiGenerationDraftContext::new(
"puzzle",
owner_user_id_for_stream.as_str(),
session_id_for_stream.as_str(),
payload.client_message_id.as_str(),
"拼图模板生成草稿",
));
if let Err(error) = draft_writer.ensure_started(state.spacetime_client()).await {
tracing::warn!(error = %error, "拼图模板生成草稿任务启动失败,主生成流程继续执行");
}
let (reply_tx, mut reply_rx) = tokio::sync::mpsc::unbounded_channel::<String>();
let turn_result = {
let run_turn = run_puzzle_agent_turn(
@@ -319,6 +330,7 @@ pub async fn stream_puzzle_agent_message(
result = &mut run_turn => break result,
maybe_text = reply_rx.recv() => {
if let Some(text) = maybe_text {
draft_writer.persist_visible_text(state.spacetime_client(), text.as_str()).await;
yield Ok::<Event, Infallible>(puzzle_sse_json_event_or_error(
"reply_delta",
json!({ "text": text }),
@@ -330,6 +342,7 @@ pub async fn stream_puzzle_agent_message(
};
while let Some(text) = reply_rx.recv().await {
draft_writer.persist_visible_text(state.spacetime_client(), text.as_str()).await;
yield Ok::<Event, Infallible>(puzzle_sse_json_event_or_error(
"reply_delta",
json!({ "text": text }),

View File

@@ -1,4 +1,12 @@
use std::{error::Error, fmt, str as std_str, time::Duration};
use std::{
env,
error::Error,
fmt, fs,
path::PathBuf,
str as std_str,
sync::atomic::{AtomicU64, Ordering},
time::{Duration, SystemTime, UNIX_EPOCH},
};
use log::{debug, warn};
use reqwest::{Client, StatusCode};
@@ -10,6 +18,9 @@ pub const DEFAULT_REQUEST_TIMEOUT_MS: u64 = 30_000;
pub const DEFAULT_MAX_RETRIES: u32 = 1;
pub const DEFAULT_RETRY_BACKOFF_MS: u64 = 500;
pub const CHAT_COMPLETIONS_PATH: &str = "/chat/completions";
const DEFAULT_LLM_RAW_LOG_DIR: &str = "logs/llm-raw";
static LLM_RAW_LOG_SEQUENCE: AtomicU64 = AtomicU64::new(1);
// 冻结平台来源,避免上层继续散落 provider 字符串。
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
@@ -113,6 +124,17 @@ struct ChatCompletionsRequestBody<'a> {
max_tokens: Option<u32>,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct LlmRawFailureInputLog<'a> {
provider: &'static str,
model: &'a str,
stream: bool,
attempt: u32,
max_tokens: Option<u32>,
messages: &'a [LlmMessage],
}
#[derive(Deserialize)]
struct ChatCompletionsResponseEnvelope {
id: Option<String>,
@@ -156,6 +178,7 @@ struct ChatCompletionsContentPart {
#[derive(Default)]
struct OpenAiCompatibleSseParser {
buffer: String,
raw_text: String,
}
#[derive(Debug)]
@@ -382,12 +405,31 @@ impl LlmClient {
request.validate()?;
let resolved_model = request.resolved_model(self.config.model()).to_string();
let response = self.execute_request(&request, false).await?;
let raw_text = response
.text()
.await
.map_err(|error| map_stream_read_error(error, 1))?;
let raw_text = response.text().await.map_err(|error| {
let llm_error = map_stream_read_error(error, 1);
log_llm_raw_failure(
&self.config,
&request,
false,
1,
"read_response_failed",
llm_error.to_string().as_str(),
);
llm_error
})?;
parse_chat_completions_response(self.config.provider(), &resolved_model, raw_text.as_str())
.map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
false,
1,
"parse_response_failed",
raw_text.as_str(),
);
error
})
}
pub async fn request_single_message_text(
@@ -422,10 +464,18 @@ impl LlmClient {
let mut undecoded_chunk_bytes = Vec::new();
loop {
let next_chunk = response
.chunk()
.await
.map_err(|error| map_stream_read_error(error, 1))?;
let next_chunk = response.chunk().await.map_err(|error| {
let llm_error = map_stream_read_error(error, 1);
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"read_stream_failed",
parser.raw_text().as_str(),
);
llm_error
})?;
let Some(chunk) = next_chunk else {
break;
@@ -433,12 +483,33 @@ impl LlmClient {
undecoded_chunk_bytes.extend_from_slice(chunk.as_ref());
let (chunk_text, remaining_bytes) =
decode_utf8_stream_chunk(undecoded_chunk_bytes.as_slice())?;
decode_utf8_stream_chunk(undecoded_chunk_bytes.as_slice()).map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"decode_stream_failed",
parser.raw_text().as_str(),
);
error
})?;
undecoded_chunk_bytes = remaining_bytes;
if chunk_text.is_empty() {
continue;
}
for event in parser.push_chunk(chunk_text.as_ref())? {
let stream_events = parser.push_chunk(chunk_text.as_ref()).map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"parse_stream_failed",
parser.raw_text().as_str(),
);
error
})?;
for event in stream_events {
if let Some(delta_text) = event.delta_text
&& !delta_text.is_empty()
{
@@ -460,10 +531,29 @@ impl LlmClient {
if !undecoded_chunk_bytes.is_empty() {
let trailing_text =
std_str::from_utf8(undecoded_chunk_bytes.as_slice()).map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"decode_stream_failed",
parser.raw_text().as_str(),
);
LlmError::Deserialize(format!("解析 LLM 流式 UTF-8 响应失败:{error}"))
})?;
if !trailing_text.is_empty() {
for event in parser.push_chunk(trailing_text)? {
let trailing_events = parser.push_chunk(trailing_text).map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"parse_stream_failed",
parser.raw_text().as_str(),
);
error
})?;
for event in trailing_events {
if let Some(delta_text) = event.delta_text
&& !delta_text.is_empty()
{
@@ -483,7 +573,18 @@ impl LlmClient {
}
}
for event in parser.finish()? {
let remaining_events = parser.finish().map_err(|error| {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"parse_stream_failed",
parser.raw_text().as_str(),
);
error
})?;
for event in remaining_events {
if let Some(delta_text) = event.delta_text
&& !delta_text.is_empty()
{
@@ -503,6 +604,14 @@ impl LlmClient {
let content = accumulated_text.trim().to_string();
if content.is_empty() {
log_llm_raw_failure(
&self.config,
&request,
true,
1,
"empty_stream_response",
parser.raw_text().as_str(),
);
return Err(LlmError::EmptyResponse);
}
@@ -591,6 +700,14 @@ impl LlmClient {
continue;
}
log_llm_raw_failure(
&self.config,
request,
stream,
attempt,
"upstream_status_failed",
raw_text.as_str(),
);
return Err(LlmError::Upstream {
status_code: status.as_u16(),
message,
@@ -607,7 +724,16 @@ impl LlmClient {
continue;
}
return Err(LlmError::Timeout { attempts: attempt });
let error = LlmError::Timeout { attempts: attempt };
log_llm_raw_failure(
&self.config,
request,
stream,
attempt,
"request_timeout",
error.to_string().as_str(),
);
return Err(error);
}
Err(error) if error.is_connect() => {
let message = error.to_string();
@@ -622,13 +748,31 @@ impl LlmClient {
continue;
}
return Err(LlmError::Connectivity {
let error = LlmError::Connectivity {
attempts: attempt,
message,
});
};
log_llm_raw_failure(
&self.config,
request,
stream,
attempt,
"request_connectivity_failed",
error.to_string().as_str(),
);
return Err(error);
}
Err(error) => {
return Err(LlmError::Transport(error.to_string()));
let error = LlmError::Transport(error.to_string());
log_llm_raw_failure(
&self.config,
request,
stream,
attempt,
"request_transport_failed",
error.to_string().as_str(),
);
return Err(error);
}
}
}
@@ -652,11 +796,16 @@ impl LlmClient {
impl OpenAiCompatibleSseParser {
fn push_chunk(&mut self, chunk: &str) -> Result<Vec<ParsedStreamEvent>, LlmError> {
self.raw_text.push_str(chunk);
self.buffer.push_str(chunk);
self.buffer = self.buffer.replace("\r\n", "\n");
self.drain_complete_events()
}
fn raw_text(&self) -> String {
self.raw_text.clone()
}
fn finish(&mut self) -> Result<Vec<ParsedStreamEvent>, LlmError> {
if self.buffer.trim().is_empty() {
return Ok(Vec::new());
@@ -691,6 +840,87 @@ fn normalize_non_empty(value: String, error_message: &str) -> Result<String, Llm
Ok(trimmed)
}
fn log_llm_raw_failure(
config: &LlmConfig,
request: &LlmTextRequest,
stream: bool,
attempt: u32,
failure_stage: &str,
raw_output: &str,
) {
if let Err(error) =
write_llm_raw_failure(config, request, stream, attempt, failure_stage, raw_output)
{
warn!(
"LLM 失败原文日志落盘失败,主错误流程继续执行: failure_stage={}, error={}",
failure_stage, error
);
}
}
fn write_llm_raw_failure(
config: &LlmConfig,
request: &LlmTextRequest,
stream: bool,
attempt: u32,
failure_stage: &str,
raw_output: &str,
) -> Result<(), String> {
let log_dir = env::var("LLM_RAW_LOG_DIR")
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from(DEFAULT_LLM_RAW_LOG_DIR));
fs::create_dir_all(&log_dir).map_err(|error| format!("创建日志目录失败:{error}"))?;
let prefix = build_llm_raw_log_prefix(failure_stage);
let model = request.resolved_model(config.model());
let input_log = LlmRawFailureInputLog {
provider: config.provider().as_str(),
model,
stream,
attempt,
max_tokens: request.max_tokens,
messages: request.messages.as_slice(),
};
let input_text = serde_json::to_string_pretty(&input_log)
.map_err(|error| format!("序列化模型输入日志失败:{error}"))?;
fs::write(log_dir.join(format!("{prefix}.input.json")), input_text)
.map_err(|error| format!("写入模型输入日志失败:{error}"))?;
fs::write(log_dir.join(format!("{prefix}.output.txt")), raw_output)
.map_err(|error| format!("写入模型输出日志失败:{error}"))?;
Ok(())
}
fn build_llm_raw_log_prefix(failure_stage: &str) -> String {
let millis = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|duration| duration.as_millis())
.unwrap_or_default();
let sequence = LLM_RAW_LOG_SEQUENCE.fetch_add(1, Ordering::Relaxed);
let safe_stage = sanitize_log_file_segment(failure_stage);
format!("{millis}-{}-{sequence:06}-{safe_stage}", std::process::id())
}
fn sanitize_log_file_segment(value: &str) -> String {
let sanitized = value
.chars()
.map(|character| {
if character.is_ascii_alphanumeric() || character == '-' || character == '_' {
character
} else {
'_'
}
})
.collect::<String>();
if sanitized.is_empty() {
"unknown".to_string()
} else {
sanitized
}
}
fn parse_chat_completions_response(
provider: LlmProvider,
fallback_model: &str,
@@ -1028,6 +1258,62 @@ mod tests {
assert_eq!(response.response_id.as_deref(), Some("req_stream_01"));
}
#[tokio::test]
async fn request_text_writes_raw_failure_logs_after_parse_error() {
let log_dir = std::env::temp_dir().join(format!(
"platform-llm-raw-log-test-{}",
build_llm_raw_log_prefix("parse_error")
));
unsafe {
std::env::set_var("LLM_RAW_LOG_DIR", &log_dir);
}
let server_url = spawn_mock_server(vec![MockResponse {
status_line: "200 OK",
content_type: "application/json; charset=utf-8",
body: "不是合法 JSON".to_string(),
extra_headers: Vec::new(),
}]);
let client = build_test_client(server_url, 0);
let error = client
.request_single_message_text("系统原文", "用户原文")
.await
.expect_err("invalid json should fail");
assert!(matches!(error, LlmError::Deserialize(_)));
let mut input_logs = Vec::new();
let mut output_logs = Vec::new();
for entry in fs::read_dir(&log_dir).expect("log dir should exist") {
let path = entry.expect("log entry should be readable").path();
let file_name = path
.file_name()
.and_then(|name| name.to_str())
.unwrap_or_default()
.to_string();
if file_name.ends_with(".input.json") {
input_logs.push(path);
} else if file_name.ends_with(".output.txt") {
output_logs.push(path);
}
}
assert_eq!(input_logs.len(), 1);
assert_eq!(output_logs.len(), 1);
let input_text = fs::read_to_string(&input_logs[0]).expect("input log should be readable");
let output_text =
fs::read_to_string(&output_logs[0]).expect("output log should be readable");
assert!(input_text.contains("系统原文"));
assert!(input_text.contains("用户原文"));
assert!(!input_text.contains("test-key"));
assert_eq!(output_text, "不是合法 JSON");
unsafe {
std::env::remove_var("LLM_RAW_LOG_DIR");
}
fs::remove_dir_all(log_dir).expect("log dir should be removed");
}
fn build_test_client(base_url: String, max_retries: u32) -> LlmClient {
let config = LlmConfig::new(
LlmProvider::Ark,

View File

@@ -519,4 +519,5 @@ impl SpacetimeClient {
})
.await
}
}

View File

@@ -18,7 +18,8 @@ pub use mapper::{
CustomWorldAgentMessageFinalizeRecordInput, CustomWorldAgentMessageRecord,
CustomWorldAgentMessageSubmitRecordInput, CustomWorldAgentOperationProgressRecordInput,
CustomWorldAgentOperationRecord, CustomWorldAgentSessionCreateRecordInput,
CustomWorldAgentSessionRecord, CustomWorldCheckpointRecord, CustomWorldDraftCardDetailRecord,
CustomWorldAgentSessionRecord,
CustomWorldCheckpointRecord, CustomWorldDraftCardDetailRecord,
CustomWorldDraftCardDetailSectionRecord, CustomWorldDraftCardRecord,
CustomWorldGalleryEntryRecord, CustomWorldLibraryEntryRecord, CustomWorldLibraryMutationRecord,
CustomWorldProfileUpsertRecordInput, CustomWorldPublishGateRecord,

View File

@@ -2726,17 +2726,6 @@ pub(crate) fn format_rpg_agent_operation_type(
}
}
pub(crate) fn format_rpg_agent_operation_status(
value: crate::module_bindings::RpgAgentOperationStatus,
) -> &'static str {
match value {
crate::module_bindings::RpgAgentOperationStatus::Queued => "queued",
crate::module_bindings::RpgAgentOperationStatus::Running => "running",
crate::module_bindings::RpgAgentOperationStatus::Completed => "completed",
crate::module_bindings::RpgAgentOperationStatus::Failed => "failed",
}
}
pub(crate) fn parse_rpg_agent_operation_type_record(
value: &str,
) -> Result<crate::module_bindings::RpgAgentOperationType, SpacetimeClientError> {
@@ -2744,22 +2733,12 @@ pub(crate) fn parse_rpg_agent_operation_type_record(
"process_message" => Ok(crate::module_bindings::RpgAgentOperationType::ProcessMessage),
"draft_foundation" => Ok(crate::module_bindings::RpgAgentOperationType::DraftFoundation),
"update_draft_card" => Ok(crate::module_bindings::RpgAgentOperationType::UpdateDraftCard),
"sync_result_profile" => {
Ok(crate::module_bindings::RpgAgentOperationType::SyncResultProfile)
}
"generate_characters" => {
Ok(crate::module_bindings::RpgAgentOperationType::GenerateCharacters)
}
"generate_landmarks" => {
Ok(crate::module_bindings::RpgAgentOperationType::GenerateLandmarks)
}
"generate_role_assets" => {
Ok(crate::module_bindings::RpgAgentOperationType::GenerateRoleAssets)
}
"sync_result_profile" => Ok(crate::module_bindings::RpgAgentOperationType::SyncResultProfile),
"generate_characters" => Ok(crate::module_bindings::RpgAgentOperationType::GenerateCharacters),
"generate_landmarks" => Ok(crate::module_bindings::RpgAgentOperationType::GenerateLandmarks),
"generate_role_assets" => Ok(crate::module_bindings::RpgAgentOperationType::GenerateRoleAssets),
"sync_role_assets" => Ok(crate::module_bindings::RpgAgentOperationType::SyncRoleAssets),
"generate_scene_assets" => {
Ok(crate::module_bindings::RpgAgentOperationType::GenerateSceneAssets)
}
"generate_scene_assets" => Ok(crate::module_bindings::RpgAgentOperationType::GenerateSceneAssets),
"sync_scene_assets" => Ok(crate::module_bindings::RpgAgentOperationType::SyncSceneAssets),
"expand_long_tail" => Ok(crate::module_bindings::RpgAgentOperationType::ExpandLongTail),
"publish_world" => Ok(crate::module_bindings::RpgAgentOperationType::PublishWorld),
@@ -2772,6 +2751,17 @@ pub(crate) fn parse_rpg_agent_operation_type_record(
}
}
pub(crate) fn format_rpg_agent_operation_status(
value: crate::module_bindings::RpgAgentOperationStatus,
) -> &'static str {
match value {
crate::module_bindings::RpgAgentOperationStatus::Queued => "queued",
crate::module_bindings::RpgAgentOperationStatus::Running => "running",
crate::module_bindings::RpgAgentOperationStatus::Completed => "completed",
crate::module_bindings::RpgAgentOperationStatus::Failed => "failed",
}
}
pub(crate) fn parse_rpg_agent_operation_status_record(
value: &str,
) -> Result<crate::module_bindings::RpgAgentOperationStatus, SpacetimeClientError> {
@@ -2841,22 +2831,6 @@ impl TryFrom<&str> for BigFishAssetKind {
}
}
pub(crate) fn map_big_fish_creation_stage(
value: module_big_fish::BigFishCreationStage,
) -> BigFishCreationStage {
match value {
module_big_fish::BigFishCreationStage::CollectingAnchors => {
BigFishCreationStage::CollectingAnchors
}
module_big_fish::BigFishCreationStage::DraftReady => BigFishCreationStage::DraftReady,
module_big_fish::BigFishCreationStage::AssetRefining => BigFishCreationStage::AssetRefining,
module_big_fish::BigFishCreationStage::ReadyToPublish => {
BigFishCreationStage::ReadyToPublish
}
module_big_fish::BigFishCreationStage::Published => BigFishCreationStage::Published,
}
}
pub(crate) fn parse_big_fish_creation_stage(
value: &str,
) -> Result<BigFishCreationStage, SpacetimeClientError> {
@@ -3501,6 +3475,21 @@ pub struct CustomWorldAgentOperationRecord {
pub error_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CustomWorldAgentOperationProgressRecordInput {
pub session_id: String,
pub owner_user_id: String,
pub operation_id: String,
// SpacetimeDB 模块侧使用枚举存储操作类型,这里保留字符串给 API 层做轻量传参。
pub operation_type: String,
pub operation_status: String,
pub phase_label: String,
pub phase_detail: String,
pub operation_progress: u32,
pub error_message: Option<String>,
pub updated_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq)]
pub struct CustomWorldDraftCardRecord {
pub card_id: String,
@@ -3721,20 +3710,6 @@ pub struct CustomWorldAgentMessageFinalizeRecordInput {
pub updated_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CustomWorldAgentOperationProgressRecordInput {
pub session_id: String,
pub owner_user_id: String,
pub operation_id: String,
pub operation_type: String,
pub operation_status: String,
pub phase_label: String,
pub phase_detail: String,
pub operation_progress: u32,
pub error_message: Option<String>,
pub updated_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CustomWorldAgentActionExecuteRecordInput {
pub session_id: String,

View File

@@ -342,7 +342,6 @@ pub mod start_ai_task_reducer;
pub mod start_ai_task_stage_reducer;
pub mod turn_in_quest_reducer;
pub mod unpublish_custom_world_profile_reducer;
pub mod upsert_custom_world_agent_operation_progress_procedure;
pub mod upsert_chapter_progression_reducer;
pub mod upsert_custom_world_profile_reducer;
pub mod upsert_npc_state_reducer;
@@ -477,6 +476,7 @@ pub mod unpublish_custom_world_profile_and_return_procedure;
pub mod update_puzzle_work_procedure;
pub mod upsert_auth_store_snapshot_procedure;
pub mod upsert_chapter_progression_and_return_procedure;
pub mod upsert_custom_world_agent_operation_progress_procedure;
pub mod upsert_custom_world_profile_and_return_procedure;
pub mod upsert_npc_state_and_return_procedure;
pub mod upsert_platform_browse_history_and_return_procedure;
@@ -856,7 +856,6 @@ pub use start_ai_task_reducer::start_ai_task;
pub use start_ai_task_stage_reducer::start_ai_task_stage;
pub use turn_in_quest_reducer::turn_in_quest;
pub use unpublish_custom_world_profile_reducer::unpublish_custom_world_profile;
pub use upsert_custom_world_agent_operation_progress_procedure::upsert_custom_world_agent_operation_progress;
pub use upsert_chapter_progression_reducer::upsert_chapter_progression;
pub use upsert_custom_world_profile_reducer::upsert_custom_world_profile;
pub use upsert_npc_state_reducer::upsert_npc_state;
@@ -946,6 +945,7 @@ pub use unpublish_custom_world_profile_and_return_procedure::unpublish_custom_wo
pub use update_puzzle_work_procedure::update_puzzle_work;
pub use upsert_auth_store_snapshot_procedure::upsert_auth_store_snapshot;
pub use upsert_chapter_progression_and_return_procedure::upsert_chapter_progression_and_return;
pub use upsert_custom_world_agent_operation_progress_procedure::upsert_custom_world_agent_operation_progress;
pub use upsert_custom_world_profile_and_return_procedure::upsert_custom_world_profile_and_return;
pub use upsert_npc_state_and_return_procedure::upsert_npc_state_and_return;
pub use upsert_platform_browse_history_and_return_procedure::upsert_platform_browse_history_and_return;

View File

@@ -1572,7 +1572,34 @@ fn delete_custom_world_agent_session_tx(
.filter(|row| row.owner_user_id == input.owner_user_id)
.ok_or_else(|| "custom_world_agent_session 不存在".to_string())?;
if session.stage == RpgAgentStage::Published {
return Err("已发布 RPG 作品请通过 profile 删除".to_string());
let published_profile = ctx
.db
.custom_world_profile()
.iter()
.find(|row| {
row.owner_user_id == input.owner_user_id
&& row.source_agent_session_id.as_deref() == Some(input.session_id.as_str())
&& row.deleted_at.is_none()
})
.ok_or_else(|| "已发布 RPG 作品缺少关联 profile无法删除".to_string())?;
// 作品卡可能只携带源 Agent sessionId。这里把“按 session 删除已发布作品”收敛为
// profile 软删除,避免前端误入草稿删除接口时把业务分支放大成上游 502。
delete_custom_world_profile_record(
ctx,
CustomWorldProfileDeleteInput {
profile_id: published_profile.profile_id,
owner_user_id: input.owner_user_id.clone(),
deleted_at_micros: ctx.timestamp.to_micros_since_unix_epoch(),
},
)?;
return list_custom_world_work_snapshots(
ctx,
CustomWorldWorksListInput {
owner_user_id: input.owner_user_id,
},
);
}
// 删除纯 Agent 草稿时同步清理消息、操作与草稿卡,避免作品列表消失后残留孤儿数据。