Files
Genarrative/server-rs/crates/api-server/src/character_animation_assets.rs

3463 lines
119 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
use std::{
collections::BTreeMap,
fs,
path::{Path, PathBuf},
process::{Command, Stdio},
thread,
time::{Duration, Instant},
};
use axum::{
Json,
extract::{Extension, Path as AxumPath, Query, State, rejection::JsonRejection},
http::StatusCode,
response::Response,
};
use image::{
ColorType, ImageEncoder, ImageFormat, Rgba, RgbaImage, codecs::png::PngEncoder,
imageops::FilterType,
};
use module_ai::{
AiStageCompletionInput, AiTaskCreateInput, AiTaskKind, AiTaskServiceError, AiTaskSnapshot,
AiTaskStageKind, AiTaskStatus, generate_ai_task_id,
};
use module_assets::{
AssetObjectAccessPolicy, AssetObjectFieldError, build_asset_entity_binding_input,
build_asset_object_upsert_input, generate_asset_binding_id, generate_asset_object_id,
};
use platform_oss::{
LegacyAssetPrefix, OssHeadObjectRequest, OssObjectAccess, OssPutObjectRequest,
OssSignedGetObjectUrlRequest,
};
use serde::Deserialize;
use serde_json::{Value, json};
use shared_contracts::assets::{
CharacterAnimationDraftPayload, CharacterAnimationGenerateRequest,
CharacterAnimationGenerateResponse, CharacterAnimationImportVideoRequest,
CharacterAnimationImportVideoResponse, CharacterAnimationPublishRequest,
CharacterAnimationPublishResponse, CharacterAnimationStrategy,
CharacterAnimationTemplatePayload, CharacterAnimationTemplatesResponse,
CharacterAssetJobStatusPayload, CharacterAssetJobStatusText, CharacterVisualDraftPayload,
CharacterWorkflowCacheGetResponse, CharacterWorkflowCachePayload,
CharacterWorkflowCacheSaveRequest, CharacterWorkflowCacheSaveResponse,
};
use spacetime_client::SpacetimeClientError;
use crate::{
api_response::json_success_body,
custom_world_asset_prompts::{
build_character_animation_prompt, build_fallback_moderation_safe_animation_prompt,
},
http_error::AppError,
request_context::RequestContext,
state::AppState,
};
use tokio::time::sleep;
const CHARACTER_ANIMATION_MODEL: &str = "doubao-seedance-2-0-fast-260128";
const CHARACTER_ANIMATION_ASSET_KIND: &str = "character_animation";
const CHARACTER_ANIMATION_REFERENCE_ASSET_KIND: &str = "character_animation_reference_video";
const CHARACTER_WORKFLOW_CACHE_ASSET_KIND: &str = "character_workflow_cache";
const CHARACTER_ANIMATION_ENTITY_KIND: &str = "character";
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CharacterWorkflowCacheQuery {
#[serde(default)]
pub cache_scope_id: Option<String>,
}
const CHARACTER_ANIMATION_SLOT: &str = "animation_set";
const CHARACTER_ANIMATION_REFERENCE_SLOT: &str = "animation_reference_video";
const CHARACTER_WORKFLOW_CACHE_SLOT: &str = "workflow_cache";
const CHARACTER_ANIMATION_DRAFT_SLOT: &str = "animation_draft";
const CHARACTER_ANIMATION_PREVIEW_SLOT: &str = "animation_preview";
const DEFAULT_ANIMATION_FRAME_WIDTH: u32 = 192;
const DEFAULT_ANIMATION_FRAME_HEIGHT: u32 = 256;
const FIXED_ARK_CHARACTER_VIDEO_RESOLUTION: &str = "480p";
const FIXED_ARK_CHARACTER_VIDEO_RATIO: &str = "1:1";
const FIXED_ARK_CHARACTER_VIDEO_DURATION_SECONDS: u32 = 4;
const ARK_VIDEO_TASK_POLL_INTERVAL_MS: u64 = 5_000;
const BUILT_IN_MOTION_TEMPLATES: [MotionTemplate; 4] = [
MotionTemplate {
id: "idle_loop",
label: "待机循环",
animation: "idle",
prompt_suffix: "保持呼吸感和轻微重心起伏。",
notes: "适合方案三的默认待机模板。",
},
MotionTemplate {
id: "run_side",
label: "奔跑侧移",
animation: "run",
prompt_suffix: "保持平稳横向移动,脚步连续。",
notes: "适合横版角色的标准奔跑模板。",
},
MotionTemplate {
id: "attack_slash",
label: "横斩攻击",
animation: "attack",
prompt_suffix: "短促前踏后横斩,收招干净。",
notes: "适合近战角色的基础攻击模板。",
},
MotionTemplate {
id: "die_fall",
label: "倒地死亡",
animation: "die",
prompt_suffix: "失衡倒地,动作完整结束。",
notes: "适合终结动作模板。",
},
];
pub async fn list_character_animation_templates(
Extension(request_context): Extension<RequestContext>,
) -> Json<Value> {
json_success_body(
Some(&request_context),
CharacterAnimationTemplatesResponse {
ok: true,
templates: BUILT_IN_MOTION_TEMPLATES
.iter()
.map(MotionTemplate::to_payload)
.collect(),
},
)
}
pub async fn import_character_animation_video(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
payload: Result<Json<CharacterAnimationImportVideoRequest>, JsonRejection>,
) -> Result<Json<Value>, Response> {
let Json(payload) = payload.map_err(|error| {
character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": error.body_text(),
})),
)
})?;
// 旧资产工坊接口没有显式 Bearer 头Rust 兼容层先使用工具用户归属。
let owner_user_id = "asset-tool".to_string();
let character_id = normalize_required_text(payload.character_id.as_str(), "character");
let animation = normalize_required_text(payload.animation.as_str(), "clip");
let source_label = normalize_required_text(
payload.source_label.as_deref().unwrap_or("imported-video"),
"imported-video",
);
let parsed_video = parse_video_data_url(payload.video_source.as_str()).ok_or_else(|| {
character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": "videoSource 当前只支持 data:video/*;base64,...",
})),
)
})?;
let draft_id = format!("animation-import-{}", current_utc_millis());
let put_result = put_imported_video_object(
&state,
&owner_user_id,
character_id.as_str(),
animation.as_str(),
draft_id.as_str(),
source_label.as_str(),
parsed_video,
)
.await
.map_err(|error| character_animation_error_response(&request_context, error))?;
Ok(json_success_body(
Some(&request_context),
CharacterAnimationImportVideoResponse {
ok: true,
imported_video_path: put_result.legacy_public_path,
draft_id,
save_message: "参考视频已导入 OSS 草稿区。".to_string(),
},
))
}
pub async fn generate_character_animation(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
payload: Result<Json<CharacterAnimationGenerateRequest>, JsonRejection>,
) -> Result<Json<Value>, Response> {
let Json(payload) = payload.map_err(|error| {
character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": error.body_text(),
})),
)
})?;
if payload.visual_source.trim().is_empty() {
return Err(character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": "请先准备主形象,再生成动作。",
})),
));
}
// 旧资产工坊接口没有显式 Bearer 头Rust 兼容层先使用工具用户归属。
let owner_user_id = "asset-tool".to_string();
let task_id = generate_ai_task_id(current_utc_micros());
let strategy = payload.strategy.clone();
let character_id = normalize_required_text(payload.character_id.as_str(), "character");
let animation = normalize_required_text(payload.animation.as_str(), "idle");
let prompt = build_character_animation_prompt(
&strategy,
payload.prompt_text.as_str(),
payload.character_brief_text.as_deref(),
payload.action_template_id.as_deref(),
animation.as_str(),
payload.frame_count,
payload.fps,
payload.duration_seconds,
payload.loop_,
payload.use_chroma_key,
);
let model = resolve_character_animation_model(&payload);
let created = create_animation_task(
&state,
task_id.as_str(),
owner_user_id.as_str(),
character_id.as_str(),
animation.as_str(),
&strategy,
model.as_str(),
prompt.as_str(),
)
.map_err(|error| character_animation_error_response(&request_context, error))?;
let result = async {
state
.ai_task_service()
.start_task(task_id.as_str(), current_utc_micros())
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.start_stage(
task_id.as_str(),
AiTaskStageKind::PreparePrompt,
current_utc_micros(),
)
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.complete_stage(AiStageCompletionInput {
task_id: task_id.clone(),
stage_kind: AiTaskStageKind::PreparePrompt,
text_output: Some(prompt.clone()),
structured_payload_json: Some(
json!({
"characterId": character_id,
"animation": animation,
"strategy": strategy,
"referenceImageCount": payload.reference_image_data_urls.len(),
"referenceVideoCount": payload.reference_video_data_urls.len(),
})
.to_string(),
),
warning_messages: Vec::new(),
completed_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.start_stage(
task_id.as_str(),
AiTaskStageKind::RequestModel,
current_utc_micros(),
)
.map_err(map_ai_task_error)?;
let generated = match strategy {
CharacterAnimationStrategy::ImageToVideo => {
let settings = require_ark_video_settings(&state, &payload)?;
let http_client = build_upstream_http_client(settings.request_timeout_ms)?;
let visual_data_url = resolve_media_source_as_data_url(
&state,
&http_client,
payload.visual_source.as_str(),
"visualSource",
)
.await?;
let last_frame_data_url = resolve_media_source_as_data_url(
&state,
&http_client,
payload
.last_frame_image_data_url
.as_deref()
.unwrap_or(payload.visual_source.as_str()),
"lastFrameImageDataUrl",
)
.await?;
let fallback_prompt = build_fallback_moderation_safe_animation_prompt(
animation.as_str(),
payload.loop_,
payload.use_chroma_key,
);
let generated = request_image_to_video_preview(
&state,
&http_client,
&settings,
owner_user_id.as_str(),
character_id.as_str(),
animation.as_str(),
task_id.as_str(),
prompt.as_str(),
fallback_prompt.as_str(),
visual_data_url.as_str(),
last_frame_data_url.as_str(),
)
.await?;
state
.ai_task_service()
.complete_stage(AiStageCompletionInput {
task_id: task_id.clone(),
stage_kind: AiTaskStageKind::RequestModel,
text_output: Some(generated.submitted_prompt.clone()),
structured_payload_json: Some(
json!({
"provider": "ark",
"taskId": generated.upstream_task_id,
"model": settings.model,
"moderationFallbackApplied": generated.moderation_fallback_applied,
})
.to_string(),
),
warning_messages: Vec::new(),
completed_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)?;
CharacterAnimationGeneratedDraft {
image_sources: Vec::new(),
preview_video_path: Some(generated.preview_video_path),
}
}
CharacterAnimationStrategy::ImageSequence => {
let image_sources = persist_animation_draft_frames(
&state,
owner_user_id.as_str(),
character_id.as_str(),
animation.as_str(),
task_id.as_str(),
prompt.as_str(),
normalize_frame_count(payload.frame_count),
)
.await?;
state
.ai_task_service()
.complete_stage(AiStageCompletionInput {
task_id: task_id.clone(),
stage_kind: AiTaskStageKind::RequestModel,
text_output: Some("当前仍使用 Stage 1 序列帧占位链。".to_string()),
structured_payload_json: Some(
json!({
"provider": "character-animation",
"mode": "stage1-image-sequence-placeholder",
})
.to_string(),
),
warning_messages: Vec::new(),
completed_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)?;
CharacterAnimationGeneratedDraft {
image_sources,
preview_video_path: None,
}
}
_ => {
let preview_video_path = persist_animation_preview_video(
&state,
owner_user_id.as_str(),
character_id.as_str(),
animation.as_str(),
task_id.as_str(),
payload
.reference_video_data_urls
.first()
.map(String::as_str),
)
.await?;
state
.ai_task_service()
.complete_stage(AiStageCompletionInput {
task_id: task_id.clone(),
stage_kind: AiTaskStageKind::RequestModel,
text_output: Some("当前仍使用 Stage 1 视频占位链。".to_string()),
structured_payload_json: Some(
json!({
"provider": "character-animation",
"mode": "stage1-video-placeholder",
"strategy": strategy,
})
.to_string(),
),
warning_messages: Vec::new(),
completed_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)?;
CharacterAnimationGeneratedDraft {
image_sources: Vec::new(),
preview_video_path: Some(preview_video_path),
}
}
};
let result_payload = build_animation_generate_result_payload(&generated);
state
.ai_task_service()
.start_stage(
task_id.as_str(),
AiTaskStageKind::NormalizeResult,
current_utc_micros(),
)
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.complete_stage(AiStageCompletionInput {
task_id: task_id.clone(),
stage_kind: AiTaskStageKind::NormalizeResult,
text_output: None,
structured_payload_json: Some(result_payload.to_string()),
warning_messages: Vec::new(),
completed_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.complete_stage(AiStageCompletionInput {
task_id: task_id.clone(),
stage_kind: AiTaskStageKind::PersistResult,
text_output: Some("角色动作草稿已写入 OSS。".to_string()),
structured_payload_json: Some(result_payload.to_string()),
warning_messages: Vec::new(),
completed_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.complete_task(task_id.as_str(), current_utc_micros())
.map_err(map_ai_task_error)?;
Ok::<_, AppError>(generated)
}
.await;
let generated = match result {
Ok(generated) => generated,
Err(error) => {
let _ = state.ai_task_service().fail_task(
created.task_id.as_str(),
error.message().to_string(),
current_utc_micros(),
);
return Err(character_animation_error_response(&request_context, error));
}
};
Ok(json_success_body(
Some(&request_context),
CharacterAnimationGenerateResponse {
ok: true,
task_id,
strategy,
model,
prompt,
image_sources: generated.image_sources,
preview_video_path: generated.preview_video_path,
},
))
}
pub async fn get_character_animation_job(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
AxumPath(task_id): AxumPath<String>,
) -> Result<Json<Value>, Response> {
let task = state
.ai_task_service()
.get_task(task_id.as_str())
.map_err(map_ai_task_error)
.map_err(|error| character_animation_error_response(&request_context, error))?;
Ok(json_success_body(
Some(&request_context),
build_character_animation_job_payload(task),
))
}
pub async fn publish_character_animation(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
payload: Result<Json<CharacterAnimationPublishRequest>, JsonRejection>,
) -> Result<Json<Value>, Response> {
let Json(payload) = payload.map_err(|error| {
character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": error.body_text(),
})),
)
})?;
// 旧资产工坊接口没有显式 Bearer 头Rust 兼容层先使用工具用户归属。
let owner_user_id = "asset-tool".to_string();
let character_id = normalize_required_text(payload.character_id.as_str(), "");
let visual_asset_id = normalize_required_text(payload.visual_asset_id.as_str(), "");
if character_id.is_empty() {
return Err(character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": "characterId is required.",
})),
));
}
if visual_asset_id.is_empty() {
return Err(character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": "visualAssetId is required.",
})),
));
}
if payload.animations.is_empty() {
return Err(character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": "animations is required.",
})),
));
}
let animation_set_id = format!("animation-set-{}", current_utc_millis());
let published = publish_animation_set(
&state,
owner_user_id.as_str(),
character_id.as_str(),
visual_asset_id.as_str(),
animation_set_id.as_str(),
payload.animations,
)
.await
.map_err(|error| character_animation_error_response(&request_context, error))?;
Ok(json_success_body(
Some(&request_context),
CharacterAnimationPublishResponse {
ok: true,
animation_set_id,
override_map: json!({}),
animation_map: published.animation_map,
save_message: if payload.update_character_override == Some(false) {
"基础动作资源已写入 OSS 并绑定当前角色,可直接写回当前自定义世界角色。".to_string()
} else {
"基础动作资源已写入 OSS 并绑定当前角色Rust 后端不再写本地角色覆盖文件。"
.to_string()
},
},
))
}
pub async fn get_character_workflow_cache(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
AxumPath(character_id): AxumPath<String>,
Query(query): Query<CharacterWorkflowCacheQuery>,
) -> Result<Json<Value>, Response> {
let character_id = normalize_required_text(character_id.as_str(), "");
if character_id.is_empty() {
return Err(character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-workflow-cache",
"message": "characterId is required.",
})),
));
}
let cache_scope_id = trim_optional_text(query.cache_scope_id.as_deref());
let cache = load_workflow_cache(&state, character_id.as_str(), cache_scope_id.as_deref())
.await
.map_err(|error| character_animation_error_response(&request_context, error))?;
Ok(json_success_body(
Some(&request_context),
CharacterWorkflowCacheGetResponse { ok: true, cache },
))
}
pub async fn save_character_workflow_cache(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
payload: Result<Json<CharacterWorkflowCacheSaveRequest>, JsonRejection>,
) -> Result<Json<Value>, Response> {
let Json(payload) = payload.map_err(|error| {
character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-workflow-cache",
"message": error.body_text(),
})),
)
})?;
let character_id = normalize_required_text(payload.character_id.as_str(), "");
if character_id.is_empty() {
return Err(character_animation_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-workflow-cache",
"message": "characterId is required.",
})),
));
}
let cache = normalize_workflow_cache_payload(payload, current_utc_iso_text());
save_workflow_cache(&state, cache.clone())
.await
.map_err(|error| character_animation_error_response(&request_context, error))?;
Ok(json_success_body(
Some(&request_context),
CharacterWorkflowCacheSaveResponse {
ok: true,
cache,
save_message: "角色形象生成缓存已更新到 OSS。".to_string(),
},
))
}
fn create_animation_task(
state: &AppState,
task_id: &str,
owner_user_id: &str,
character_id: &str,
animation: &str,
strategy: &CharacterAnimationStrategy,
model: &str,
prompt: &str,
) -> Result<AiTaskSnapshot, AppError> {
state
.ai_task_service()
.create_task(AiTaskCreateInput {
task_id: task_id.to_string(),
task_kind: AiTaskKind::CustomWorldGeneration,
owner_user_id: owner_user_id.to_string(),
request_label: "生成角色动作草稿".to_string(),
source_module: "assets.character_animation".to_string(),
source_entity_id: Some(character_id.to_string()),
request_payload_json: Some(
json!({
"characterId": character_id,
"animation": animation,
"strategy": strategy,
"model": model,
"prompt": prompt,
})
.to_string(),
),
stages: AiTaskKind::CustomWorldGeneration.default_stage_blueprints(),
created_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)
}
async fn persist_animation_draft_frames(
state: &AppState,
owner_user_id: &str,
character_id: &str,
animation: &str,
task_id: &str,
prompt: &str,
frame_count: u32,
) -> Result<Vec<String>, AppError> {
let mut image_sources = Vec::with_capacity(frame_count as usize);
for index in 0..frame_count {
let file_name = format!("frame-{:02}.svg", index + 1);
let body = build_animation_frame_svg(
animation,
prompt,
index,
frame_count,
DEFAULT_ANIMATION_FRAME_WIDTH,
DEFAULT_ANIMATION_FRAME_HEIGHT,
)
.into_bytes();
let put_result = put_character_animation_object(
state,
LegacyAssetPrefix::CharacterDrafts,
vec![
sanitize_storage_segment(character_id, "character"),
"animation".to_string(),
sanitize_storage_segment(animation, "clip"),
task_id.to_string(),
],
file_name,
"image/svg+xml".to_string(),
body,
build_asset_metadata(
CHARACTER_ANIMATION_ASSET_KIND,
owner_user_id,
CHARACTER_ANIMATION_ENTITY_KIND,
character_id,
CHARACTER_ANIMATION_DRAFT_SLOT,
animation,
),
)
.await?;
image_sources.push(put_result.legacy_public_path);
}
Ok(image_sources)
}
async fn persist_animation_preview_video(
state: &AppState,
owner_user_id: &str,
character_id: &str,
animation: &str,
task_id: &str,
reference_video_data_url: Option<&str>,
) -> Result<String, AppError> {
let preview_payload = match reference_video_data_url.and_then(parse_video_data_url) {
Some(parsed_video) => MediaPayload {
mime_type: parsed_video.mime_type,
extension: parsed_video.extension,
bytes: parsed_video.bytes,
},
None => {
return Err(
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": "当前策略需要真实生成视频结果,不再支持回退到仓库占位预览视频。",
})),
);
}
};
let put_result = put_character_animation_object(
state,
LegacyAssetPrefix::CharacterDrafts,
vec![
sanitize_storage_segment(character_id, "character"),
"animation".to_string(),
sanitize_storage_segment(animation, "clip"),
task_id.to_string(),
],
format!("preview.{}", preview_payload.extension),
preview_payload.mime_type,
preview_payload.bytes,
build_asset_metadata(
CHARACTER_ANIMATION_ASSET_KIND,
owner_user_id,
CHARACTER_ANIMATION_ENTITY_KIND,
character_id,
CHARACTER_ANIMATION_PREVIEW_SLOT,
animation,
),
)
.await?;
Ok(put_result.legacy_public_path)
}
fn require_ark_video_settings(
state: &AppState,
payload: &CharacterAnimationGenerateRequest,
) -> Result<ArkVideoSettings, AppError> {
let base_url = state
.config
.ark_character_video_base_url
.trim()
.trim_end_matches('/');
if base_url.is_empty() {
return Err(
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "ark",
"reason": "ARK_CHARACTER_VIDEO_BASE_URL 未配置",
})),
);
}
let api_key = state
.config
.ark_character_video_api_key
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.ok_or_else(|| {
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "ark",
"reason": "ARK_CHARACTER_VIDEO_API_KEY 未配置",
}))
})?;
let requested_model = normalize_required_text(
payload.video_model.as_str(),
state.config.ark_character_video_model.as_str(),
);
Ok(ArkVideoSettings {
base_url: base_url.to_string(),
api_key: api_key.to_string(),
request_timeout_ms: state.config.ark_character_video_request_timeout_ms.max(1),
model: requested_model,
})
}
fn build_upstream_http_client(timeout_ms: u64) -> Result<reqwest::Client, AppError> {
reqwest::Client::builder()
.timeout(Duration::from_millis(timeout_ms))
.build()
.map_err(|error| {
AppError::from_status(StatusCode::INTERNAL_SERVER_ERROR).with_details(json!({
"provider": "character-animation",
"message": format!("构造上游 HTTP 客户端失败:{error}"),
}))
})
}
async fn resolve_media_source_as_data_url(
state: &AppState,
http_client: &reqwest::Client,
source: &str,
field: &str,
) -> Result<String, AppError> {
let payload = load_media_source_payload(state, source).await?;
if !(payload.mime_type.starts_with("image/") || payload.mime_type.starts_with("video/")) {
return Err(
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"field": field,
"message": "媒体资源必须是图片或视频。",
})),
);
}
let _ = http_client;
Ok(format!(
"data:{};base64,{}",
payload.mime_type,
encode_base64(payload.bytes.as_slice())
))
}
async fn request_image_to_video_preview(
state: &AppState,
http_client: &reqwest::Client,
settings: &ArkVideoSettings,
owner_user_id: &str,
character_id: &str,
animation: &str,
task_id: &str,
prompt: &str,
fallback_prompt: &str,
first_frame_data_url: &str,
last_frame_data_url: &str,
) -> Result<GeneratedAnimationPreview, AppError> {
let (submitted_prompt, upstream_task_id, moderation_fallback_applied) =
create_ark_image_to_video_task(
http_client,
settings,
prompt,
fallback_prompt,
first_frame_data_url,
last_frame_data_url,
)
.await?;
let video_url =
wait_for_ark_content_generation_task(http_client, settings, upstream_task_id.as_str())
.await?;
let preview_payload =
download_generated_video(http_client, video_url.as_str(), "下载角色动作视频失败。").await?;
let preview_video_path = put_generated_preview_video(
state,
owner_user_id,
character_id,
animation,
task_id,
preview_payload,
)
.await?;
Ok(GeneratedAnimationPreview {
preview_video_path,
upstream_task_id,
submitted_prompt,
moderation_fallback_applied,
})
}
async fn create_ark_image_to_video_task(
http_client: &reqwest::Client,
settings: &ArkVideoSettings,
prompt: &str,
fallback_prompt: &str,
first_frame_data_url: &str,
last_frame_data_url: &str,
) -> Result<(String, String, bool), AppError> {
let first_try = send_ark_image_to_video_request(
http_client,
settings,
prompt,
first_frame_data_url,
last_frame_data_url,
)
.await?;
if first_try.status().is_success() {
let body = first_try.text().await.map_err(|error| {
map_character_animation_upstream_error(format!("读取 Ark 视频任务响应失败:{error}"))
})?;
let payload = parse_animation_json_payload(body.as_str(), "创建图生视频任务失败。")?;
let task_id = extract_animation_task_id(&payload.payload).ok_or_else(|| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "ark",
"message": "图生视频任务未返回任务 id。",
}))
})?;
return Ok((prompt.to_string(), task_id, false));
}
let error_text = first_try.text().await.map_err(|error| {
map_character_animation_upstream_error(format!("读取 Ark 视频错误响应失败:{error}"))
})?;
if fallback_prompt.trim().is_empty()
|| fallback_prompt.trim() == prompt.trim()
|| !is_inappropriate_content_message(error_text.as_str())
{
return Err(parse_animation_upstream_error(
error_text.as_str(),
"创建图生视频任务失败。",
));
}
let second_try = send_ark_image_to_video_request(
http_client,
settings,
fallback_prompt,
first_frame_data_url,
last_frame_data_url,
)
.await?;
let second_status = second_try.status();
let second_text = second_try.text().await.map_err(|error| {
map_character_animation_upstream_error(format!("读取 Ark 视频重试响应失败:{error}"))
})?;
if !second_status.is_success() {
return Err(parse_animation_upstream_error(
second_text.as_str(),
"创建图生视频任务失败。",
));
}
let second_payload =
parse_animation_json_payload(second_text.as_str(), "创建图生视频任务失败。")?;
let task_id = extract_animation_task_id(&second_payload.payload).ok_or_else(|| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "ark",
"message": "图生视频任务未返回任务 id。",
}))
})?;
Ok((fallback_prompt.to_string(), task_id, true))
}
async fn send_ark_image_to_video_request(
http_client: &reqwest::Client,
settings: &ArkVideoSettings,
prompt: &str,
first_frame_data_url: &str,
last_frame_data_url: &str,
) -> Result<reqwest::Response, AppError> {
http_client
.post(format!("{}/contents/generations/tasks", settings.base_url))
.header(
reqwest::header::AUTHORIZATION,
format!("Bearer {}", settings.api_key),
)
.header(reqwest::header::CONTENT_TYPE, "application/json")
.json(&json!({
"model": settings.model,
"content": [
{
"type": "text",
"text": prompt,
},
{
"type": "image_url",
"image_url": {
"url": first_frame_data_url,
},
"role": "first_frame",
},
{
"type": "image_url",
"image_url": {
"url": last_frame_data_url,
},
"role": "last_frame",
}
],
"resolution": FIXED_ARK_CHARACTER_VIDEO_RESOLUTION,
"ratio": FIXED_ARK_CHARACTER_VIDEO_RATIO,
"duration": FIXED_ARK_CHARACTER_VIDEO_DURATION_SECONDS,
"watermark": false,
}))
.send()
.await
.map_err(|error| {
map_character_animation_upstream_error(format!("请求 Ark 视频服务失败:{error}"))
})
}
async fn wait_for_ark_content_generation_task(
http_client: &reqwest::Client,
settings: &ArkVideoSettings,
task_id: &str,
) -> Result<String, AppError> {
let deadline = Instant::now() + Duration::from_millis(settings.request_timeout_ms);
while Instant::now() < deadline {
let response = http_client
.get(format!(
"{}/contents/generations/tasks/{}",
settings.base_url, task_id
))
.header(
reqwest::header::AUTHORIZATION,
format!("Bearer {}", settings.api_key),
)
.send()
.await
.map_err(|error| {
map_character_animation_upstream_error(format!("查询 Ark 视频任务失败:{error}"))
})?;
let status = response.status();
let text = response.text().await.map_err(|error| {
map_character_animation_upstream_error(format!("读取 Ark 视频任务响应失败:{error}"))
})?;
if !status.is_success() {
return Err(parse_animation_upstream_error(
text.as_str(),
"查询视频生成任务失败。",
));
}
let payload = parse_animation_json_payload(text.as_str(), "查询视频生成任务失败。")?;
let normalized_status = normalize_generation_task_status(
extract_generation_task_status(&payload.payload).as_str(),
);
if let Some(video_url) = extract_video_url(&payload.payload) {
return Ok(video_url);
}
if is_completed_generation_task_status(normalized_status.as_str()) {
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "ark",
"message": "视频任务完成但没有返回 video_url。",
"taskId": task_id,
})),
);
}
if is_failed_generation_task_status(normalized_status.as_str()) {
return Err(parse_animation_upstream_error(
text.as_str(),
"视频生成任务执行失败。",
));
}
sleep(Duration::from_millis(ARK_VIDEO_TASK_POLL_INTERVAL_MS)).await;
}
Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "ark",
"message": "视频生成任务执行超时,请稍后重试。",
"taskId": task_id,
})),
)
}
async fn download_generated_video(
http_client: &reqwest::Client,
video_url: &str,
fallback_message: &str,
) -> Result<MediaPayload, AppError> {
let response = http_client.get(video_url).send().await.map_err(|error| {
map_character_animation_upstream_error(format!("{fallback_message}{error}"))
})?;
let status = response.status();
let content_type = response
.headers()
.get(reqwest::header::CONTENT_TYPE)
.and_then(|value| value.to_str().ok())
.unwrap_or("video/mp4")
.to_string();
let bytes = response.bytes().await.map_err(|error| {
map_character_animation_upstream_error(format!("{fallback_message}{error}"))
})?;
if !status.is_success() {
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": fallback_message,
"status": status.as_u16(),
})),
);
}
Ok(MediaPayload {
mime_type: content_type.clone(),
extension: mime_to_extension(content_type.as_str()).to_string(),
bytes: bytes.to_vec(),
})
}
async fn put_generated_preview_video(
state: &AppState,
owner_user_id: &str,
character_id: &str,
animation: &str,
task_id: &str,
preview_payload: MediaPayload,
) -> Result<String, AppError> {
let put_result = put_character_animation_object(
state,
LegacyAssetPrefix::CharacterDrafts,
vec![
sanitize_storage_segment(character_id, "character"),
"animation".to_string(),
sanitize_storage_segment(animation, "clip"),
task_id.to_string(),
],
format!("preview.{}", preview_payload.extension),
preview_payload.mime_type,
preview_payload.bytes,
build_asset_metadata(
CHARACTER_ANIMATION_ASSET_KIND,
owner_user_id,
CHARACTER_ANIMATION_ENTITY_KIND,
character_id,
CHARACTER_ANIMATION_PREVIEW_SLOT,
animation,
),
)
.await?;
Ok(put_result.legacy_public_path)
}
async fn publish_animation_set(
state: &AppState,
owner_user_id: &str,
character_id: &str,
visual_asset_id: &str,
animation_set_id: &str,
animations: BTreeMap<String, CharacterAnimationDraftPayload>,
) -> Result<PublishedAnimationSet, AppError> {
let mut action_manifests = Vec::new();
let mut animation_map = serde_json::Map::new();
let extraction_settings = resolve_backend_frame_extraction_settings(state);
for (action, draft) in animations {
let published = publish_single_animation_action(
state,
owner_user_id,
character_id,
visual_asset_id,
animation_set_id,
action.as_str(),
draft,
&extraction_settings,
)
.await?;
animation_map.insert(action, published.animation_config);
action_manifests.push(published.manifest);
}
if action_manifests.is_empty() {
return Err(
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": "animations 至少需要包含一个有效动作帧。",
})),
);
}
let manifest_body = serde_json::to_vec_pretty(&json!({
"animationSetId": animation_set_id,
"characterId": character_id,
"visualAssetId": visual_asset_id,
"actions": action_manifests,
"animationMap": animation_map.clone(),
}))
.map_err(|error| {
AppError::from_status(StatusCode::INTERNAL_SERVER_ERROR).with_details(json!({
"provider": "character-animation",
"message": format!("序列化角色动作总 manifest 失败:{error}"),
}))
})?;
let put_result = put_character_animation_object(
state,
LegacyAssetPrefix::Animations,
vec![
sanitize_storage_segment(character_id, "character"),
animation_set_id.to_string(),
],
"manifest.json".to_string(),
"application/json; charset=utf-8".to_string(),
manifest_body,
build_asset_metadata(
CHARACTER_ANIMATION_ASSET_KIND,
owner_user_id,
CHARACTER_ANIMATION_ENTITY_KIND,
character_id,
CHARACTER_ANIMATION_SLOT,
"animation-set",
),
)
.await?;
let confirmed = confirm_character_animation_asset_object(
state,
owner_user_id,
character_id,
animation_set_id,
put_result.object_key,
"application/json; charset=utf-8".to_string(),
)
.await?;
bind_character_animation_asset(
state,
owner_user_id,
character_id,
confirmed.record.asset_object_id,
)
.await?;
Ok(PublishedAnimationSet {
animation_map: Value::Object(animation_map),
})
}
async fn publish_single_animation_action(
state: &AppState,
owner_user_id: &str,
character_id: &str,
visual_asset_id: &str,
animation_set_id: &str,
action: &str,
draft: CharacterAnimationDraftPayload,
extraction_settings: &BackendFrameExtractionSettings,
) -> Result<PublishedAnimationAction, AppError> {
let action_segment = sanitize_storage_segment(action, "clip");
let frame_width = normalize_dimension(draft.frame_width, DEFAULT_ANIMATION_FRAME_WIDTH);
let frame_height = normalize_dimension(draft.frame_height, DEFAULT_ANIMATION_FRAME_HEIGHT);
let fps = draft.fps.clamp(1, 60);
let preview_video_path = draft.preview_video_path.clone();
let loop_ = draft.loop_;
let frame_plan = normalize_animation_frame_extraction_plan(&draft);
let finalized_frames = if draft.frames_data_urls.is_empty() {
let preview_video_path = preview_video_path.as_deref().ok_or_else(|| {
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": format!("动作 {action} 缺少 framesDataUrls 与 previewVideoPath无法发布。"),
}))
})?;
extract_animation_frames_from_preview_video(
state,
preview_video_path,
frame_width,
frame_height,
extraction_settings,
&frame_plan,
)
.await?
} else {
let mut frames = Vec::with_capacity(draft.frames_data_urls.len());
for frame_source in &draft.frames_data_urls {
let frame_payload = load_media_source_payload(state, frame_source.as_str()).await?;
let finalized = finalize_animation_frame_payload(
frame_payload.bytes.as_slice(),
frame_payload.mime_type.as_str(),
frame_width,
frame_height,
frame_plan.apply_chroma_key,
)?;
frames.push(finalized);
}
frames
};
let mut frame_paths = Vec::with_capacity(finalized_frames.len());
let mut frame_extension = "png".to_string();
for (index, frame_payload) in finalized_frames.iter().enumerate() {
frame_extension = frame_payload.extension.clone();
let frame_file_name = format!("frame{:02}.{}", index + 1, frame_payload.extension);
let put_result = put_character_animation_object(
state,
LegacyAssetPrefix::Animations,
vec![
sanitize_storage_segment(character_id, "character"),
animation_set_id.to_string(),
action_segment.clone(),
],
frame_file_name,
frame_payload.mime_type.clone(),
frame_payload.bytes.clone(),
build_asset_metadata(
CHARACTER_ANIMATION_ASSET_KIND,
owner_user_id,
CHARACTER_ANIMATION_ENTITY_KIND,
character_id,
CHARACTER_ANIMATION_SLOT,
action,
),
)
.await?;
frame_paths.push(put_result.legacy_public_path);
}
let base_path = format!(
"/generated-animations/{}/{}/{}",
sanitize_storage_segment(character_id, "character"),
animation_set_id,
action_segment
);
let manifest = json!({
"id": format!("{animation_set_id}-{action_segment}"),
"animationSetId": animation_set_id,
"characterId": character_id,
"visualAssetId": visual_asset_id,
"action": action,
"frameCount": frame_paths.len(),
"fps": fps,
"loop": loop_,
"frameWidth": frame_width,
"frameHeight": frame_height,
"previewVideoPath": preview_video_path,
"framePaths": frame_paths,
});
let manifest_body = serde_json::to_vec_pretty(&manifest).map_err(|error| {
AppError::from_status(StatusCode::INTERNAL_SERVER_ERROR).with_details(json!({
"provider": "character-animation",
"message": format!("序列化角色动作 manifest 失败:{error}"),
}))
})?;
put_character_animation_object(
state,
LegacyAssetPrefix::Animations,
vec![
sanitize_storage_segment(character_id, "character"),
animation_set_id.to_string(),
action_segment,
],
"manifest.json".to_string(),
"application/json; charset=utf-8".to_string(),
manifest_body,
build_asset_metadata(
CHARACTER_ANIMATION_ASSET_KIND,
owner_user_id,
CHARACTER_ANIMATION_ENTITY_KIND,
character_id,
CHARACTER_ANIMATION_SLOT,
action,
),
)
.await?;
let mut animation_config = serde_json::Map::from_iter([
("folder".to_string(), json!(action)),
("prefix".to_string(), json!("frame")),
("frames".to_string(), json!(frame_paths.len())),
("startFrame".to_string(), json!(1)),
("extension".to_string(), json!(frame_extension)),
("basePath".to_string(), json!(base_path)),
("frameWidth".to_string(), json!(frame_width)),
("frameHeight".to_string(), json!(frame_height)),
("fps".to_string(), json!(fps)),
("loop".to_string(), json!(loop_)),
]);
if let Some(preview_video_path) = preview_video_path {
animation_config.insert("previewVideoPath".to_string(), json!(preview_video_path));
}
Ok(PublishedAnimationAction {
manifest,
animation_config: Value::Object(animation_config),
})
}
async fn put_character_animation_object(
state: &AppState,
prefix: LegacyAssetPrefix,
path_segments: Vec<String>,
file_name: String,
content_type: String,
body: Vec<u8>,
metadata: BTreeMap<String, String>,
) -> Result<platform_oss::OssPutObjectResponse, AppError> {
require_oss_client(state)?
.put_object(
&reqwest::Client::new(),
OssPutObjectRequest {
prefix,
path_segments,
file_name,
content_type: Some(content_type),
access: OssObjectAccess::Private,
metadata,
body,
},
)
.await
.map_err(map_character_animation_oss_error)
}
async fn confirm_character_animation_asset_object(
state: &AppState,
owner_user_id: &str,
character_id: &str,
source_job_id: &str,
object_key: String,
content_type: String,
) -> Result<module_assets::ConfirmAssetObjectResult, AppError> {
let oss_client = require_oss_client(state)?;
let head = oss_client
.head_object(&reqwest::Client::new(), OssHeadObjectRequest { object_key })
.await
.map_err(map_character_animation_oss_error)?;
let now_micros = current_utc_micros();
let record = state
.spacetime_client()
.confirm_asset_object(
build_asset_object_upsert_input(
generate_asset_object_id(now_micros),
head.bucket,
head.object_key,
AssetObjectAccessPolicy::Private,
head.content_type.or(Some(content_type)),
head.content_length,
head.etag,
CHARACTER_ANIMATION_ASSET_KIND.to_string(),
Some(source_job_id.to_string()),
Some(owner_user_id.to_string()),
None,
Some(character_id.to_string()),
now_micros,
)
.map_err(map_asset_object_prepare_error)?,
)
.await
.map_err(map_character_animation_spacetime_error)?;
Ok(module_assets::ConfirmAssetObjectResult { record })
}
async fn bind_character_animation_asset(
state: &AppState,
owner_user_id: &str,
character_id: &str,
asset_object_id: String,
) -> Result<(), AppError> {
let now_micros = current_utc_micros();
state
.spacetime_client()
.bind_asset_object_to_entity(
build_asset_entity_binding_input(
generate_asset_binding_id(now_micros),
asset_object_id,
CHARACTER_ANIMATION_ENTITY_KIND.to_string(),
character_id.to_string(),
CHARACTER_ANIMATION_SLOT.to_string(),
CHARACTER_ANIMATION_ASSET_KIND.to_string(),
Some(owner_user_id.to_string()),
None,
now_micros,
)
.map_err(map_asset_binding_prepare_error)?,
)
.await
.map_err(map_character_animation_spacetime_error)?;
Ok(())
}
async fn put_imported_video_object(
state: &AppState,
owner_user_id: &str,
character_id: &str,
animation: &str,
draft_id: &str,
source_label: &str,
parsed_video: ParsedVideoDataUrl,
) -> Result<platform_oss::OssPutObjectResponse, AppError> {
let oss_client = require_oss_client(state)?;
let file_name = format!(
"{}.{}",
sanitize_storage_segment(source_label, "imported-video"),
parsed_video.extension
);
oss_client
.put_object(
&reqwest::Client::new(),
OssPutObjectRequest {
prefix: LegacyAssetPrefix::CharacterDrafts,
path_segments: vec![
sanitize_storage_segment(character_id, "character"),
"animation".to_string(),
sanitize_storage_segment(animation, "clip"),
draft_id.to_string(),
],
file_name,
content_type: Some(parsed_video.mime_type),
access: OssObjectAccess::Private,
metadata: build_asset_metadata(
CHARACTER_ANIMATION_REFERENCE_ASSET_KIND,
owner_user_id,
CHARACTER_ANIMATION_ENTITY_KIND,
character_id,
CHARACTER_ANIMATION_REFERENCE_SLOT,
animation,
),
body: parsed_video.bytes,
},
)
.await
.map_err(map_character_animation_oss_error)
}
async fn load_workflow_cache(
state: &AppState,
character_id: &str,
cache_scope_id: Option<&str>,
) -> Result<Option<CharacterWorkflowCachePayload>, AppError> {
let oss_client = require_oss_client(state)?;
let object_key = workflow_cache_object_key(character_id, cache_scope_id);
let signed = match oss_client.sign_get_object_url(OssSignedGetObjectUrlRequest {
object_key,
expire_seconds: Some(60),
}) {
Ok(signed) => signed,
Err(platform_oss::OssError::ObjectNotFound(_)) => return Ok(None),
Err(error) => return Err(map_character_animation_oss_error(error)),
};
let response = reqwest::Client::new()
.get(signed.signed_url)
.send()
.await
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"message": format!("读取角色工作流缓存失败:{error}"),
}))
})?;
if response.status() == reqwest::StatusCode::NOT_FOUND {
return Ok(None);
}
let response = response.error_for_status().map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"message": format!("读取角色工作流缓存失败:{error}"),
}))
})?;
let cache = response
.json::<CharacterWorkflowCachePayload>()
.await
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"message": format!("解析角色工作流缓存失败:{error}"),
}))
})?;
if cache.character_id == character_id && cache.cache_scope_id.as_deref() == cache_scope_id {
Ok(Some(cache))
} else {
Ok(None)
}
}
async fn save_workflow_cache(
state: &AppState,
cache: CharacterWorkflowCachePayload,
) -> Result<(), AppError> {
let oss_client = require_oss_client(state)?;
let body = serde_json::to_vec_pretty(&cache).map_err(|error| {
AppError::from_status(StatusCode::INTERNAL_SERVER_ERROR).with_details(json!({
"provider": "character-workflow-cache",
"message": format!("序列化角色工作流缓存失败:{error}"),
}))
})?;
oss_client
.put_object(
&reqwest::Client::new(),
OssPutObjectRequest {
prefix: LegacyAssetPrefix::CharacterDrafts,
path_segments: workflow_cache_path_segments(&cache),
file_name: "workflow-cache.json".to_string(),
content_type: Some("application/json; charset=utf-8".to_string()),
access: OssObjectAccess::Private,
metadata: build_workflow_cache_metadata(
"asset-tool",
cache.character_id.as_str(),
cache.cache_scope_id.as_deref(),
),
body,
},
)
.await
.map_err(map_character_animation_oss_error)?;
Ok(())
}
fn normalize_workflow_cache_payload(
payload: CharacterWorkflowCacheSaveRequest,
updated_at: String,
) -> CharacterWorkflowCachePayload {
let character_id = normalize_required_text(payload.character_id.as_str(), "character");
let cache_scope_id = trim_optional_text(payload.cache_scope_id.as_deref());
CharacterWorkflowCachePayload {
character_id: character_id.clone(),
cache_scope_id,
visual_prompt_text: clamp_prompt_seed_text(payload.visual_prompt_text.as_deref()),
animation_prompt_text: clamp_prompt_seed_text(payload.animation_prompt_text.as_deref()),
visual_drafts: normalize_visual_drafts(character_id.as_str(), payload.visual_drafts),
selected_visual_draft_id: trim_optional_text(payload.selected_visual_draft_id.as_deref())
.unwrap_or_default(),
selected_animation: trim_optional_text(payload.selected_animation.as_deref())
.unwrap_or_else(|| "idle".to_string()),
image_src: trim_optional_text(payload.image_src.as_deref()),
generated_visual_asset_id: trim_optional_text(payload.generated_visual_asset_id.as_deref()),
generated_animation_set_id: trim_optional_text(
payload.generated_animation_set_id.as_deref(),
),
animation_map: payload.animation_map.filter(Value::is_object),
updated_at: Some(updated_at),
}
}
fn normalize_visual_drafts(
character_id: &str,
visual_drafts: Vec<CharacterVisualDraftPayload>,
) -> Vec<CharacterVisualDraftPayload> {
visual_drafts
.into_iter()
.enumerate()
.filter_map(|(index, draft)| {
let image_src = trim_optional_text(Some(draft.image_src.as_str()))?;
Some(CharacterVisualDraftPayload {
id: trim_optional_text(Some(draft.id.as_str()))
.unwrap_or_else(|| format!("{character_id}-draft-{}", index + 1)),
label: trim_optional_text(Some(draft.label.as_str()))
.unwrap_or_else(|| format!("候选 {}", index + 1)),
image_src,
width: if draft.width == 0 { 1024 } else { draft.width },
height: if draft.height == 0 {
1536
} else {
draft.height
},
})
})
.collect()
}
fn workflow_cache_path_segments(cache: &CharacterWorkflowCachePayload) -> Vec<String> {
let character_segment = sanitize_storage_segment(cache.character_id.as_str(), "character");
if let Some(cache_scope_id) = cache.cache_scope_id.as_deref() {
vec![
sanitize_storage_segment(cache_scope_id, "world"),
character_segment,
"workflow-cache".to_string(),
]
} else {
vec![character_segment, "workflow-cache".to_string()]
}
}
fn workflow_cache_object_key(character_id: &str, cache_scope_id: Option<&str>) -> String {
if let Some(cache_scope_id) = cache_scope_id.and_then(|value| trim_optional_text(Some(value))) {
format!(
"generated-character-drafts/{}/{}/workflow-cache/workflow-cache.json",
sanitize_storage_segment(cache_scope_id.as_str(), "world"),
sanitize_storage_segment(character_id, "character")
)
} else {
format!(
"generated-character-drafts/{}/workflow-cache/workflow-cache.json",
sanitize_storage_segment(character_id, "character")
)
}
}
fn build_character_animation_job_payload(task: AiTaskSnapshot) -> CharacterAssetJobStatusPayload {
let request_payload = task
.request_payload_json
.as_deref()
.and_then(|value| serde_json::from_str::<Value>(value).ok())
.unwrap_or_else(|| json!({}));
let result = task
.latest_structured_payload_json
.as_deref()
.and_then(|value| serde_json::from_str::<Value>(value).ok());
CharacterAssetJobStatusPayload {
task_id: task.task_id,
kind: "animation".to_string(),
status: match task.status {
AiTaskStatus::Pending => CharacterAssetJobStatusText::Queued,
AiTaskStatus::Running => CharacterAssetJobStatusText::Running,
AiTaskStatus::Completed => CharacterAssetJobStatusText::Completed,
AiTaskStatus::Failed | AiTaskStatus::Cancelled => CharacterAssetJobStatusText::Failed,
},
character_id: request_payload
.get("characterId")
.and_then(Value::as_str)
.unwrap_or_default()
.to_string(),
animation: request_payload
.get("animation")
.and_then(Value::as_str)
.map(ToOwned::to_owned),
strategy: request_payload
.get("strategy")
.and_then(Value::as_str)
.map(ToOwned::to_owned),
model: request_payload
.get("model")
.and_then(Value::as_str)
.unwrap_or(CHARACTER_ANIMATION_MODEL)
.to_string(),
prompt: request_payload
.get("prompt")
.and_then(Value::as_str)
.unwrap_or_default()
.to_string(),
created_at: format_utc_micros(task.created_at_micros),
updated_at: format_utc_micros(task.updated_at_micros),
result,
error_message: task.failure_message,
}
}
pub(crate) fn find_motion_template(id: &str) -> Option<&'static MotionTemplate> {
BUILT_IN_MOTION_TEMPLATES
.iter()
.find(|template| template.id == id.trim())
}
fn resolve_character_animation_model(payload: &CharacterAnimationGenerateRequest) -> String {
let candidate = match payload.strategy {
CharacterAnimationStrategy::ImageSequence => payload.image_sequence_model.as_str(),
CharacterAnimationStrategy::ImageToVideo => payload.video_model.as_str(),
CharacterAnimationStrategy::MotionTransfer => payload.motion_transfer_model.as_str(),
CharacterAnimationStrategy::ReferenceToVideo => payload.reference_video_model.as_str(),
};
normalize_required_text(candidate, CHARACTER_ANIMATION_MODEL)
}
fn build_animation_generate_result_payload(generated: &CharacterAnimationGeneratedDraft) -> Value {
match generated.preview_video_path.as_ref() {
Some(preview_video_path) => json!({
"previewVideoPath": preview_video_path,
}),
None => json!({
"imageSources": generated.image_sources,
}),
}
}
fn resolve_backend_frame_extraction_settings(state: &AppState) -> BackendFrameExtractionSettings {
BackendFrameExtractionSettings {
ffmpeg_path: normalize_required_text(
state.config.character_animation_ffmpeg_path.as_str(),
"ffmpeg",
),
ffprobe_path: normalize_required_text(
state.config.character_animation_ffprobe_path.as_str(),
"ffprobe",
),
timeout_ms: state
.config
.character_animation_frame_extract_timeout_ms
.max(1_000),
}
}
fn normalize_animation_frame_extraction_plan(
draft: &CharacterAnimationDraftPayload,
) -> AnimationFrameExtractionPlan {
let frame_count = normalize_frame_count(draft.frame_count.unwrap_or(8));
let apply_chroma_key = draft.apply_chroma_key.unwrap_or(true);
let default_start_ratio: f32 = if draft.loop_ { 0.12 } else { 0.0 };
let default_end_ratio: f32 = if draft.loop_ { 0.94 } else { 1.0 };
let sample_start_ratio = normalize_sample_ratio(draft.sample_start_ratio, default_start_ratio);
let sample_end_ratio = normalize_sample_ratio(
draft.sample_end_ratio,
default_end_ratio.max(sample_start_ratio + 0.05),
)
.max(sample_start_ratio + 0.05)
.min(1.0);
AnimationFrameExtractionPlan {
frame_count,
apply_chroma_key,
sample_start_ratio,
sample_end_ratio,
}
}
fn normalize_sample_ratio(value: Option<f32>, fallback: f32) -> f32 {
let candidate = value.unwrap_or(fallback);
if candidate.is_finite() {
candidate.clamp(0.0, 1.0)
} else {
fallback.clamp(0.0, 1.0)
}
}
async fn extract_animation_frames_from_preview_video(
state: &AppState,
preview_video_path: &str,
frame_width: u32,
frame_height: u32,
extraction_settings: &BackendFrameExtractionSettings,
plan: &AnimationFrameExtractionPlan,
) -> Result<Vec<FinalizedAnimationFrame>, AppError> {
let preview_payload = load_media_source_payload(state, preview_video_path).await?;
if !preview_payload.mime_type.starts_with("video/") {
return Err(
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"message": "previewVideoPath 必须指向视频资源。",
})),
);
}
let temp_dir = create_animation_temp_dir()?;
let input_path = temp_dir.join(format!("preview.{}", preview_payload.extension));
fs::write(&input_path, preview_payload.bytes).map_err(|error| {
AppError::from_status(StatusCode::INTERNAL_SERVER_ERROR).with_details(json!({
"provider": "character-animation",
"message": format!("写入动作抽帧临时视频失败:{error}"),
}))
})?;
let extraction_result = (|| {
let duration_seconds =
probe_video_duration_seconds(&input_path, extraction_settings)?.max(0.001);
let mut finalized_frames = Vec::with_capacity(plan.frame_count as usize);
for frame_index in 0..plan.frame_count {
let target_seconds = compute_sample_time_seconds(
duration_seconds,
frame_index,
plan.frame_count,
plan.sample_start_ratio,
plan.sample_end_ratio,
plan.frame_count > 1 && plan.sample_end_ratio < 1.0,
);
let raw_frame_path = temp_dir.join(format!("raw-frame-{:02}.png", frame_index + 1));
extract_video_frame_to_png(
&input_path,
&raw_frame_path,
target_seconds,
extraction_settings,
)?;
let frame_bytes = fs::read(&raw_frame_path).map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": format!("读取动作抽帧结果失败:{error}"),
}))
})?;
finalized_frames.push(finalize_animation_frame_payload(
frame_bytes.as_slice(),
"image/png",
frame_width,
frame_height,
plan.apply_chroma_key,
)?);
}
Ok::<_, AppError>(finalized_frames)
})();
let _ = fs::remove_dir_all(&temp_dir);
extraction_result
}
fn create_animation_temp_dir() -> Result<PathBuf, AppError> {
let temp_dir = std::env::temp_dir().join(format!(
"genarrative-character-animation-{}",
current_utc_micros()
));
fs::create_dir_all(&temp_dir).map_err(|error| {
AppError::from_status(StatusCode::INTERNAL_SERVER_ERROR).with_details(json!({
"provider": "character-animation",
"message": format!("创建动作抽帧临时目录失败:{error}"),
}))
})?;
Ok(temp_dir)
}
fn probe_video_duration_seconds(
input_path: &Path,
extraction_settings: &BackendFrameExtractionSettings,
) -> Result<f64, AppError> {
let output = run_process_with_timeout(
&extraction_settings.ffprobe_path,
&[
"-v",
"error",
"-show_entries",
"format=duration",
"-of",
"default=noprint_wrappers=1:nokey=1",
input_path.to_string_lossy().as_ref(),
],
extraction_settings.timeout_ms,
"探测动作预览视频时长失败",
)?;
let duration_text = String::from_utf8_lossy(&output.stdout).trim().to_string();
duration_text.parse::<f64>().map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": format!("解析动作预览视频时长失败:{error}"),
}))
})
}
fn compute_sample_time_seconds(
duration_seconds: f64,
frame_index: u32,
frame_count: u32,
sample_start_ratio: f32,
sample_end_ratio: f32,
loop_mode: bool,
) -> f64 {
let duration_seconds = duration_seconds.max(0.001);
let sample_start = duration_seconds * sample_start_ratio as f64;
let sample_end = duration_seconds * sample_end_ratio as f64;
let sample_window = (sample_end - sample_start).max(0.001);
let progress = if loop_mode {
frame_index as f64 / frame_count.max(1) as f64
} else {
frame_index as f64 / frame_count.saturating_sub(1).max(1) as f64
};
(sample_start + sample_window * progress).min((duration_seconds - 0.001).max(0.0))
}
fn extract_video_frame_to_png(
input_path: &Path,
output_path: &Path,
target_seconds: f64,
extraction_settings: &BackendFrameExtractionSettings,
) -> Result<(), AppError> {
run_process_with_timeout(
&extraction_settings.ffmpeg_path,
&[
"-y",
"-ss",
&format!("{target_seconds:.3}"),
"-i",
input_path.to_string_lossy().as_ref(),
"-frames:v",
"1",
"-f",
"image2",
output_path.to_string_lossy().as_ref(),
],
extraction_settings.timeout_ms,
"抽取动作视频帧失败",
)?;
if !output_path.is_file() {
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": "ffmpeg 已执行但未产出动作帧文件。",
})),
);
}
Ok(())
}
fn run_process_with_timeout(
program: &str,
args: &[&str],
timeout_ms: u64,
fallback_message: &str,
) -> Result<std::process::Output, AppError> {
let mut child = Command::new(program)
.args(args)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
.map_err(|error| {
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "character-animation",
"message": format!("{fallback_message}:无法启动进程 {program}{error}"),
}))
})?;
let deadline = Instant::now() + Duration::from_millis(timeout_ms);
loop {
if let Some(status) = child.try_wait().map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": format!("{fallback_message}:等待进程状态失败:{error}"),
}))
})? {
let output = child.wait_with_output().map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": format!("{fallback_message}:读取进程输出失败:{error}"),
}))
})?;
if !status.success() {
let stderr = String::from_utf8_lossy(&output.stderr).trim().to_string();
let stdout = String::from_utf8_lossy(&output.stdout).trim().to_string();
let detail = if !stderr.is_empty() {
stderr
} else if !stdout.is_empty() {
stdout
} else {
format!("{program} 返回非零退出码:{status}")
};
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": format!("{fallback_message}{detail}"),
})),
);
}
return Ok(output);
}
if Instant::now() >= deadline {
let _ = child.kill();
let _ = child.wait();
return Err(
AppError::from_status(StatusCode::GATEWAY_TIMEOUT).with_details(json!({
"provider": "character-animation",
"message": format!("{fallback_message}:执行超时,已等待 {} ms。", timeout_ms),
})),
);
}
thread::sleep(Duration::from_millis(20));
}
}
fn finalize_animation_frame_payload(
source: &[u8],
mime_type: &str,
frame_width: u32,
frame_height: u32,
apply_chroma_key: bool,
) -> Result<FinalizedAnimationFrame, AppError> {
let image_format = match mime_type {
"image/png" => Some(ImageFormat::Png),
"image/jpeg" | "image/jpg" => Some(ImageFormat::Jpeg),
"image/webp" => Some(ImageFormat::WebP),
_ => None,
};
let mut image = match image_format {
Some(format) => image::load_from_memory_with_format(source, format),
None => image::load_from_memory(source),
}
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": format!("解析动作帧图片失败:{error}"),
}))
})?
.to_rgba8();
let (source_width, source_height) = image.dimensions();
if apply_chroma_key {
remove_background_from_rgba(
image.as_mut(),
source_width as usize,
source_height as usize,
);
}
let normalized = contain_rgba_image(&image, frame_width.max(1), frame_height.max(1));
let mut encoded = Vec::new();
let encoder = PngEncoder::new(&mut encoded);
encoder
.write_image(
normalized.as_raw(),
normalized.width(),
normalized.height(),
ColorType::Rgba8.into(),
)
.map_err(|error| {
AppError::from_status(StatusCode::INTERNAL_SERVER_ERROR).with_details(json!({
"provider": "character-animation",
"message": format!("编码动作帧 PNG 失败:{error}"),
}))
})?;
Ok(FinalizedAnimationFrame {
bytes: encoded,
mime_type: "image/png".to_string(),
extension: "png".to_string(),
})
}
fn contain_rgba_image(source: &RgbaImage, target_width: u32, target_height: u32) -> RgbaImage {
let mut canvas = RgbaImage::from_pixel(target_width, target_height, Rgba([0, 0, 0, 0]));
let source_width = source.width().max(1);
let source_height = source.height().max(1);
let scale = (target_width as f32 / source_width as f32)
.min(target_height as f32 / source_height as f32);
let draw_width = ((source_width as f32 * scale).round() as u32)
.max(1)
.min(target_width);
let draw_height = ((source_height as f32 * scale).round() as u32)
.max(1)
.min(target_height);
let resized = image::imageops::resize(source, draw_width, draw_height, FilterType::Triangle);
let offset_x = ((target_width - draw_width) / 2) as i64;
let offset_y = ((target_height - draw_height) / 2) as i64;
image::imageops::overlay(&mut canvas, &resized, offset_x, offset_y);
canvas
}
async fn load_media_source_payload(
state: &AppState,
source: &str,
) -> Result<MediaPayload, AppError> {
if let Some(payload) = parse_media_data_url(source) {
return Ok(payload);
}
let object_key = resolve_object_key_from_legacy_path(source, "framesDataUrls")?;
let oss_client = require_oss_client(state)?;
let head = oss_client
.head_object(
&reqwest::Client::new(),
OssHeadObjectRequest {
object_key: object_key.clone(),
},
)
.await
.map_err(map_character_animation_oss_error)?;
let signed = oss_client
.sign_get_object_url(OssSignedGetObjectUrlRequest {
object_key,
expire_seconds: Some(60),
})
.map_err(map_character_animation_oss_error)?;
let bytes = reqwest::Client::new()
.get(signed.signed_url)
.send()
.await
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"message": format!("读取角色动作帧失败:{error}"),
}))
})?
.error_for_status()
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"message": format!("读取角色动作帧失败:{error}"),
}))
})?
.bytes()
.await
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"message": format!("读取角色动作帧内容失败:{error}"),
}))
})?
.to_vec();
let mime_type = head
.content_type
.unwrap_or_else(|| "application/octet-stream".to_string());
let extension = mime_to_extension(mime_type.as_str()).to_string();
Ok(MediaPayload {
mime_type,
extension,
bytes,
})
}
fn parse_media_data_url(value: &str) -> Option<MediaPayload> {
let body = value.trim().strip_prefix("data:")?;
let (mime_type, data) = body.split_once(";base64,")?;
let mime_type = mime_type.trim();
if !(mime_type.starts_with("image/") || mime_type.starts_with("video/")) {
return None;
}
let bytes = decode_base64(data)?;
if bytes.is_empty() {
return None;
}
Some(MediaPayload {
mime_type: mime_type.to_string(),
extension: mime_to_extension(mime_type).to_string(),
bytes,
})
}
fn resolve_object_key_from_legacy_path(value: &str, field: &str) -> Result<String, AppError> {
let trimmed = value.trim();
if trimmed.is_empty() {
return Err(
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"field": field,
"message": "媒体资源路径不能为空。",
})),
);
}
if trimmed.starts_with("data:") {
return Err(
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-animation",
"field": field,
"message": "无法解析的 data URL 媒体资源。",
})),
);
}
Ok(trimmed.trim_start_matches('/').to_string())
}
fn parse_video_data_url(value: &str) -> Option<ParsedVideoDataUrl> {
let body = value.trim().strip_prefix("data:")?;
let (mime_type, data) = body.split_once(";base64,")?;
let mime_type = mime_type.trim();
if !mime_type.starts_with("video/") {
return None;
}
let bytes = decode_base64(data)?;
if bytes.is_empty() {
return None;
}
Some(ParsedVideoDataUrl {
mime_type: mime_type.to_string(),
extension: mime_to_extension(mime_type).to_string(),
bytes,
})
}
fn encode_base64(bytes: &[u8]) -> String {
const TABLE: &[u8; 64] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
let mut output = String::with_capacity(bytes.len().div_ceil(3) * 4);
for chunk in bytes.chunks(3) {
let b0 = *chunk.first().unwrap_or(&0);
let b1 = *chunk.get(1).unwrap_or(&0);
let b2 = *chunk.get(2).unwrap_or(&0);
let packed = ((b0 as u32) << 16) | ((b1 as u32) << 8) | b2 as u32;
output.push(TABLE[((packed >> 18) & 0x3f) as usize] as char);
output.push(TABLE[((packed >> 12) & 0x3f) as usize] as char);
output.push(if chunk.len() > 1 {
TABLE[((packed >> 6) & 0x3f) as usize] as char
} else {
'='
});
output.push(if chunk.len() > 2 {
TABLE[(packed & 0x3f) as usize] as char
} else {
'='
});
}
output
}
fn mime_to_extension(mime_type: &str) -> &str {
match mime_type {
"image/svg+xml" => "svg",
"image/png" => "png",
"image/jpeg" => "jpg",
"image/jpg" => "jpg",
"image/webp" => "webp",
"image/gif" => "gif",
"video/mp4" => "mp4",
"video/quicktime" => "mov",
"video/x-msvideo" => "avi",
"video/webm" => "webm",
_ => "bin",
}
}
fn decode_base64(value: &str) -> Option<Vec<u8>> {
let cleaned = value.trim().replace(char::is_whitespace, "");
let mut output = Vec::with_capacity(cleaned.len() * 3 / 4);
let mut buffer = 0u32;
let mut bits = 0u8;
for byte in cleaned.bytes() {
let value = match byte {
b'A'..=b'Z' => byte - b'A',
b'a'..=b'z' => byte - b'a' + 26,
b'0'..=b'9' => byte - b'0' + 52,
b'+' => 62,
b'/' => 63,
b'=' => break,
_ => return None,
} as u32;
buffer = (buffer << 6) | value;
bits += 6;
while bits >= 8 {
bits -= 8;
output.push(((buffer >> bits) & 0xFF) as u8);
}
}
Some(output)
}
fn parse_animation_json_payload(
raw_text: &str,
fallback_message: &str,
) -> Result<ParsedAnimationJsonPayload, AppError> {
serde_json::from_str::<Value>(raw_text)
.map(|payload| ParsedAnimationJsonPayload { payload })
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": format!("{fallback_message}:解析响应失败:{error}"),
}))
})
}
fn extract_animation_task_id(payload: &Value) -> Option<String> {
payload
.get("id")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
.or_else(|| find_first_string_by_key(payload, "task_id"))
}
fn extract_video_url(payload: &Value) -> Option<String> {
find_first_string_by_key(payload, "video_url")
.or_else(|| find_first_string_by_key(payload, "url"))
}
fn collect_strings_by_key(value: &Value, target_key: &str, results: &mut Vec<String>) {
match value {
Value::Array(entries) => {
for entry in entries {
collect_strings_by_key(entry, target_key, results);
}
}
Value::Object(object) => {
for (key, nested_value) in object {
if key == target_key
&& let Some(text) = nested_value
.as_str()
.map(str::trim)
.filter(|value| !value.is_empty())
{
results.push(text.to_string());
continue;
}
collect_strings_by_key(nested_value, target_key, results);
}
}
_ => {}
}
}
fn find_first_string_by_key(value: &Value, target_key: &str) -> Option<String> {
let mut results = Vec::new();
collect_strings_by_key(value, target_key, &mut results);
results.into_iter().next()
}
fn extract_generation_task_status(payload: &Value) -> String {
payload
.get("status")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
.or_else(|| find_first_string_by_key(payload, "task_status"))
.or_else(|| find_first_string_by_key(payload, "status"))
.unwrap_or_default()
}
fn normalize_generation_task_status(value: &str) -> String {
value.trim().to_ascii_lowercase().replace(' ', "_")
}
fn is_completed_generation_task_status(status: &str) -> bool {
matches!(
status,
"completed" | "complete" | "done" | "finished" | "success" | "succeeded" | "succeed"
)
}
fn is_failed_generation_task_status(status: &str) -> bool {
matches!(
status,
"failed"
| "canceled"
| "cancelled"
| "error"
| "aborted"
| "rejected"
| "expired"
| "unknown"
)
}
fn parse_animation_api_error_message(raw_text: &str, fallback_message: &str) -> String {
if raw_text.trim().is_empty() {
return fallback_message.to_string();
}
if let Ok(parsed) = serde_json::from_str::<Value>(raw_text) {
if let Some(message) = parsed
.pointer("/error/message")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
{
return message.to_string();
}
if let Some(message) = parsed
.get("message")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
{
return message.to_string();
}
}
raw_text.trim().to_string()
}
fn parse_animation_upstream_error(raw_text: &str, fallback_message: &str) -> AppError {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": parse_animation_api_error_message(raw_text, fallback_message),
}))
}
fn map_character_animation_upstream_error(message: String) -> AppError {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "character-animation",
"message": message,
}))
}
fn is_inappropriate_content_message(value: &str) -> bool {
let normalized = value.to_ascii_lowercase();
normalized.contains("inappropriate content")
|| normalized.contains("finappropriate-content")
|| value.contains("不适当内容")
|| value.contains("违规内容")
}
fn build_asset_metadata(
asset_kind: &str,
owner_user_id: &str,
entity_kind: &str,
entity_id: &str,
slot: &str,
animation: &str,
) -> BTreeMap<String, String> {
BTreeMap::from([
("asset_kind".to_string(), asset_kind.to_string()),
("owner_user_id".to_string(), owner_user_id.to_string()),
("entity_kind".to_string(), entity_kind.to_string()),
("entity_id".to_string(), entity_id.to_string()),
("slot".to_string(), slot.to_string()),
("animation".to_string(), animation.to_string()),
])
}
fn build_workflow_cache_metadata(
owner_user_id: &str,
character_id: &str,
cache_scope_id: Option<&str>,
) -> BTreeMap<String, String> {
let mut metadata = BTreeMap::from([
(
"asset_kind".to_string(),
CHARACTER_WORKFLOW_CACHE_ASSET_KIND.to_string(),
),
("owner_user_id".to_string(), owner_user_id.to_string()),
(
"entity_kind".to_string(),
CHARACTER_ANIMATION_ENTITY_KIND.to_string(),
),
("entity_id".to_string(), character_id.to_string()),
(
"slot".to_string(),
CHARACTER_WORKFLOW_CACHE_SLOT.to_string(),
),
]);
if let Some(cache_scope_id) = cache_scope_id.and_then(|value| trim_optional_text(Some(value))) {
metadata.insert("cache_scope_id".to_string(), cache_scope_id);
}
metadata
}
fn require_oss_client(state: &AppState) -> Result<&platform_oss::OssClient, AppError> {
state.oss_client().ok_or_else(|| {
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "aliyun-oss",
"reason": "OSS 未完成环境变量配置",
}))
})
}
fn normalize_required_text(value: &str, fallback: &str) -> String {
let normalized = value
.trim()
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.chars()
.take(180)
.collect::<String>()
.trim()
.to_string();
if normalized.is_empty() {
fallback.to_string()
} else {
normalized
}
}
fn trim_optional_text(value: Option<&str>) -> Option<String> {
value
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToOwned::to_owned)
}
fn clamp_prompt_seed_text(value: Option<&str>) -> String {
trim_optional_text(value)
.unwrap_or_default()
.chars()
.take(280)
.collect()
}
fn normalize_frame_count(value: u32) -> u32 {
value.clamp(2, 16)
}
fn normalize_dimension(value: u32, fallback: u32) -> u32 {
if value == 0 {
fallback
} else {
value.min(4096)
}
}
fn build_animation_frame_svg(
animation: &str,
prompt: &str,
frame_index: u32,
frame_count: u32,
width: u32,
height: u32,
) -> String {
let progress = if frame_count <= 1 {
0.0
} else {
frame_index as f32 / (frame_count - 1) as f32
};
let wave = (progress * std::f32::consts::TAU).sin();
let body_offset_x = (wave * 12.0).round() as i32;
let body_offset_y = (wave.abs() * -10.0).round() as i32;
let slash_alpha = if animation.contains("attack") || animation.contains("skill") {
(0.25 + progress * 0.5).min(0.75)
} else {
0.12
};
format!(
r##"<svg xmlns="http://www.w3.org/2000/svg" width="{width}" height="{height}" viewBox="0 0 {width} {height}">
<rect width="100%" height="100%" fill="#00ff00"/>
<ellipse cx="{shadow_x}" cy="{shadow_y}" rx="{shadow_rx}" ry="{shadow_ry}" fill="rgba(0,0,0,0.24)"/>
<g transform="translate({body_offset_x} {body_offset_y})">
<path d="M {body_x} {body_y} C {body_c1x} {body_c1y}, {body_c2x} {body_c2y}, {body_x2} {body_y2} L {leg_x} {leg_y} L {leg2_x} {leg_y} Z" fill="#1f2937"/>
<circle cx="{head_x}" cy="{head_y}" r="{head_r}" fill="#f8d7b0"/>
<path d="M {arm_x} {arm_y} L {arm_x2} {arm_y2}" stroke="#e5e7eb" stroke-width="6" stroke-linecap="round"/>
</g>
<path d="M {slash_x} {slash_y} C {slash_cx} {slash_cy}, {slash_x2} {slash_y2}, {slash_x3} {slash_y3}" fill="none" stroke="#fef3c7" stroke-width="8" stroke-linecap="round" opacity="{slash_alpha}"/>
<text x="50%" y="{title_y}" text-anchor="middle" fill="#052e16" font-size="14" font-family="Microsoft YaHei, PingFang SC, sans-serif">{title}</text>
<text x="50%" y="{frame_y}" text-anchor="middle" fill="#052e16" font-size="11" font-family="Microsoft YaHei, PingFang SC, sans-serif">frame {frame}</text>
</svg>"##,
width = width,
height = height,
shadow_x = width / 2,
shadow_y = height * 91 / 100,
shadow_rx = width / 5,
shadow_ry = height / 26,
body_offset_x = body_offset_x,
body_offset_y = body_offset_y,
body_x = width * 43 / 100,
body_y = height * 34 / 100,
body_c1x = width * 36 / 100,
body_c1y = height * 55 / 100,
body_c2x = width * 43 / 100,
body_c2y = height * 78 / 100,
body_x2 = width * 57 / 100,
body_y2 = height * 78 / 100,
leg_x = width * 47 / 100,
leg_y = height * 88 / 100,
leg2_x = width * 63 / 100,
head_x = width * 54 / 100,
head_y = height * 24 / 100,
head_r = (width.min(height) / 9).max(16),
arm_x = width * 57 / 100,
arm_y = height * 45 / 100,
arm_x2 = width * 72 / 100,
arm_y2 = height * 35 / 100,
slash_x = width * 62 / 100,
slash_y = height * 28 / 100,
slash_cx = width * 80 / 100,
slash_cy = height * 42 / 100,
slash_x2 = width * 74 / 100,
slash_y2 = height * 62 / 100,
slash_x3 = width * 55 / 100,
slash_y3 = height * 72 / 100,
slash_alpha = slash_alpha,
title_y = height * 7 / 100,
frame_y = height * 13 / 100,
title = escape_svg_text(&format!(
"{animation} {}",
prompt.chars().take(12).collect::<String>()
)),
frame = frame_index + 1,
)
}
fn sanitize_storage_segment(value: &str, fallback: &str) -> String {
let normalized = value
.trim()
.chars()
.map(|character| match character {
'a'..='z' | '0'..='9' | '-' | '_' => character,
'A'..='Z' => character.to_ascii_lowercase(),
_ => '-',
})
.collect::<String>();
let normalized = collapse_dashes(&normalized);
if normalized.is_empty() {
fallback.to_string()
} else {
normalized
}
}
fn collapse_dashes(value: &str) -> String {
value
.chars()
.fold(
(String::new(), false),
|(mut output, last_is_dash), character| {
let is_dash = character == '-';
if is_dash && last_is_dash {
return (output, true);
}
output.push(character);
(output, is_dash)
},
)
.0
.trim_matches('-')
.to_string()
}
fn current_utc_millis() -> i64 {
current_utc_micros() / 1_000
}
fn current_utc_micros() -> i64 {
use std::time::{SystemTime, UNIX_EPOCH};
let duration = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("system clock should be after unix epoch");
i64::try_from(duration.as_micros()).expect("current unix micros should fit in i64")
}
fn current_utc_iso_text() -> String {
time::OffsetDateTime::now_utc()
.format(&time::format_description::well_known::Rfc3339)
.unwrap_or_else(|_| format!("{}.000000Z", current_utc_millis()))
}
fn format_utc_micros(micros: i64) -> String {
module_runtime::format_utc_micros(micros)
}
fn escape_svg_text(value: &str) -> String {
value
.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
}
fn clamp01(value: f32) -> f32 {
value.clamp(0.0, 1.0)
}
fn lerp(from: f32, to: f32, t: f32) -> f32 {
from + (to - from) * clamp01(t)
}
fn compute_green_background_score(red: u8, green: u8, blue: u8, alpha: u8) -> f32 {
if alpha == 0 {
return 1.0;
}
let green = green as f32;
let red = red as f32;
let blue = blue as f32;
let green_lead = green - red.max(blue);
if green < 52.0 || green_lead <= 8.0 {
return 0.0;
}
let green_ratio = green / (red + blue).max(1.0);
if green_ratio <= 0.52 {
return 0.0;
}
clamp01(
((green - 52.0) / 168.0) * 0.22
+ ((green_lead - 8.0) / 96.0) * 0.53
+ ((green_ratio - 0.52) / 0.82) * 0.25,
)
}
fn compute_white_background_score(red: u8, green: u8, blue: u8, alpha: u8) -> f32 {
if alpha == 0 {
return 1.0;
}
let red = red as f32;
let green = green as f32;
let blue = blue as f32;
let max_channel = red.max(green).max(blue);
let min_channel = red.min(green).min(blue);
let average = (red + green + blue) / 3.0;
if average < 188.0 || min_channel < 168.0 {
return 0.0;
}
let spread = max_channel - min_channel;
let neutrality = 1.0 - clamp01((spread - 6.0) / 34.0);
let brightness = clamp01((average - 188.0) / 55.0);
let floor = clamp01((min_channel - 168.0) / 60.0);
clamp01(neutrality * (brightness * 0.85 + floor * 0.15))
}
fn collect_foreground_neighbor_color(
pixels: &[u8],
width: usize,
height: usize,
x: usize,
y: usize,
background_mask: &[u8],
background_hints: &[f32],
) -> Option<(u8, u8, u8)> {
let mut total_weight = 0.0f32;
let mut total_red = 0.0f32;
let mut total_green = 0.0f32;
let mut total_blue = 0.0f32;
for offset_y in -2i32..=2 {
for offset_x in -2i32..=2 {
if offset_x == 0 && offset_y == 0 {
continue;
}
let next_x = x as i32 + offset_x;
let next_y = y as i32 + offset_y;
if next_x < 0 || next_x >= width as i32 || next_y < 0 || next_y >= height as i32 {
continue;
}
let next_pixel_index = next_y as usize * width + next_x as usize;
if background_mask[next_pixel_index] != 0 {
continue;
}
if background_hints[next_pixel_index] >= 0.18 {
continue;
}
let next_offset = next_pixel_index * 4;
let next_alpha = pixels[next_offset + 3];
if next_alpha < 96 {
continue;
}
let distance = offset_x.unsigned_abs() + offset_y.unsigned_abs();
let weight = (next_alpha as f32 / 255.0)
* if distance <= 1 {
1.8
} else if distance == 2 {
1.2
} else {
0.7
};
total_weight += weight;
total_red += pixels[next_offset] as f32 * weight;
total_green += pixels[next_offset + 1] as f32 * weight;
total_blue += pixels[next_offset + 2] as f32 * weight;
}
}
if total_weight <= 0.0 {
return None;
}
Some((
(total_red / total_weight).round() as u8,
(total_green / total_weight).round() as u8,
(total_blue / total_weight).round() as u8,
))
}
fn remove_background_from_rgba(pixels: &mut [u8], width: usize, height: usize) -> bool {
const SOFT_EDGE_ALPHA_THRESHOLD: u8 = 224;
const FOREGROUND_NEIGHBOR_ALPHA_THRESHOLD: u8 = 96;
let pixel_count = width * height;
if pixel_count == 0 {
return false;
}
let mut background_mask = vec![0u8; pixel_count];
let mut green_scores = vec![0.0f32; pixel_count];
let mut white_scores = vec![0.0f32; pixel_count];
let mut background_hints = vec![0.0f32; pixel_count];
let mut queue = Vec::<usize>::new();
let mut queue_index = 0usize;
let mut changed = false;
for pixel_index in 0..pixel_count {
let offset = pixel_index * 4;
let red = pixels[offset];
let green = pixels[offset + 1];
let blue = pixels[offset + 2];
let alpha = pixels[offset + 3];
let green_score = compute_green_background_score(red, green, blue, alpha);
let white_score = compute_white_background_score(red, green, blue, alpha);
let transparency_hint = clamp01((56.0 - alpha as f32) / 56.0) * 0.75;
green_scores[pixel_index] = green_score;
white_scores[pixel_index] = white_score;
background_hints[pixel_index] = green_score.max(white_score).max(transparency_hint);
}
let try_seed_background =
|pixel_index: usize, background_mask: &mut [u8], queue: &mut Vec<usize>| {
if background_mask[pixel_index] != 0 {
return;
}
let offset = pixel_index * 4;
let alpha = pixels[offset + 3];
let strong_candidate =
alpha < 40 || green_scores[pixel_index] > 0.12 || white_scores[pixel_index] > 0.32;
if !strong_candidate {
return;
}
background_mask[pixel_index] = 1;
queue.push(pixel_index);
};
for x in 0..width {
try_seed_background(x, &mut background_mask, &mut queue);
try_seed_background((height - 1) * width + x, &mut background_mask, &mut queue);
}
for y in 1..height.saturating_sub(1) {
try_seed_background(y * width, &mut background_mask, &mut queue);
try_seed_background(y * width + width - 1, &mut background_mask, &mut queue);
}
while queue_index < queue.len() {
let pixel_index = queue[queue_index];
queue_index += 1;
let x = pixel_index % width;
let y = pixel_index / width;
let neighbor_indexes = [
if x > 0 { Some(pixel_index - 1) } else { None },
if x + 1 < width {
Some(pixel_index + 1)
} else {
None
},
if y > 0 {
Some(pixel_index - width)
} else {
None
},
if y + 1 < height {
Some(pixel_index + width)
} else {
None
},
];
for next_pixel_index in neighbor_indexes.into_iter().flatten() {
if background_mask[next_pixel_index] != 0 {
continue;
}
let next_offset = next_pixel_index * 4;
let next_alpha = pixels[next_offset + 3];
let next_green_score = green_scores[next_pixel_index];
let next_white_score = white_scores[next_pixel_index];
let next_hint = background_hints[next_pixel_index];
let reachable_soft_edge = next_hint > 0.08
&& next_alpha < SOFT_EDGE_ALPHA_THRESHOLD
&& (next_green_score > 0.04 || next_white_score > 0.08 || next_alpha < 180);
if next_alpha < 40
|| next_green_score > 0.12
|| next_white_score > 0.32
|| reachable_soft_edge
{
background_mask[next_pixel_index] = 1;
queue.push(next_pixel_index);
}
}
}
for _ in 0..2 {
let mut expanded_mask = background_mask.clone();
for y in 0..height {
for x in 0..width {
let pixel_index = y * width + x;
if expanded_mask[pixel_index] != 0 {
continue;
}
let alpha = pixels[pixel_index * 4 + 3];
let hint = background_hints[pixel_index];
if alpha >= SOFT_EDGE_ALPHA_THRESHOLD || hint <= 0.06 {
continue;
}
let mut adjacent_background_count = 0usize;
for offset_y in -1i32..=1 {
for offset_x in -1i32..=1 {
if offset_x == 0 && offset_y == 0 {
continue;
}
let next_x = x as i32 + offset_x;
let next_y = y as i32 + offset_y;
if next_x < 0
|| next_x >= width as i32
|| next_y < 0
|| next_y >= height as i32
{
continue;
}
if background_mask[next_y as usize * width + next_x as usize] != 0 {
adjacent_background_count += 1;
}
}
}
if adjacent_background_count >= 2 || (adjacent_background_count >= 1 && hint > 0.18)
{
expanded_mask[pixel_index] = 1;
}
}
}
background_mask = expanded_mask;
}
for y in 0..height {
for x in 0..width {
let pixel_index = y * width + x;
if background_mask[pixel_index] == 0 {
continue;
}
let offset = pixel_index * 4;
let alpha = pixels[offset + 3];
if alpha == 0 {
continue;
}
let matte_score = background_hints[pixel_index]
.max(green_scores[pixel_index])
.max(white_scores[pixel_index]);
let mut foreground_support = 0usize;
for offset_y in -1i32..=1 {
for offset_x in -1i32..=1 {
if offset_x == 0 && offset_y == 0 {
continue;
}
let next_x = x as i32 + offset_x;
let next_y = y as i32 + offset_y;
if next_x < 0 || next_x >= width as i32 || next_y < 0 || next_y >= height as i32
{
continue;
}
let next_pixel_index = next_y as usize * width + next_x as usize;
if background_mask[next_pixel_index] != 0 {
continue;
}
let next_alpha = pixels[next_pixel_index * 4 + 3];
if next_alpha >= FOREGROUND_NEIGHBOR_ALPHA_THRESHOLD {
foreground_support += 1;
}
}
}
let next_alpha = if matte_score > 0.9 || foreground_support == 0 {
0
} else if matte_score > 0.72 && foreground_support <= 1 {
((alpha as f32) * 0.08).round() as u8
} else {
((alpha as f32) * (0.08f32.max(1.0 - matte_score * 0.95))).round() as u8
};
let mut next_alpha = next_alpha;
if foreground_support >= 3 && matte_score < 0.55 {
next_alpha = next_alpha.max(((alpha as f32) * 0.22).round() as u8);
}
if next_alpha < 10 {
next_alpha = 0;
}
if next_alpha != alpha {
pixels[offset + 3] = next_alpha;
changed = true;
}
}
}
for y in 0..height {
for x in 0..width {
let pixel_index = y * width + x;
let offset = pixel_index * 4;
let alpha = pixels[offset + 3];
if alpha == 0 {
continue;
}
let mut touches_transparent_edge = false;
for offset_y in -1i32..=1 {
for offset_x in -1i32..=1 {
if offset_x == 0 && offset_y == 0 {
continue;
}
let next_x = x as i32 + offset_x;
let next_y = y as i32 + offset_y;
if next_x < 0 || next_x >= width as i32 || next_y < 0 || next_y >= height as i32
{
touches_transparent_edge = true;
continue;
}
let next_pixel_index = next_y as usize * width + next_x as usize;
if background_mask[next_pixel_index] != 0
|| pixels[next_pixel_index * 4 + 3] < 16
{
touches_transparent_edge = true;
}
}
}
if !touches_transparent_edge {
continue;
}
let green_score = green_scores[pixel_index];
let white_score = white_scores[pixel_index];
let contamination = green_score
.max(white_score)
.max(if background_mask[pixel_index] != 0 {
0.35
} else {
0.0
})
.max(if alpha < 220 {
((220 - alpha) as f32 / 220.0) * 0.25
} else {
0.0
});
if contamination < 0.06 {
continue;
}
let mut red = pixels[offset] as f32;
let mut green = pixels[offset + 1] as f32;
let mut blue = pixels[offset + 2] as f32;
let sample = collect_foreground_neighbor_color(
pixels,
width,
height,
x,
y,
&background_mask,
&background_hints,
);
let blend =
clamp01(contamination.max(if touches_transparent_edge { 0.22 } else { 0.0 }));
if let Some((sample_red, sample_green, sample_blue)) = sample {
red = lerp(red, sample_red as f32, blend);
green = lerp(green, sample_green as f32, blend);
blue = lerp(blue, sample_blue as f32, blend);
if green_score > 0.04 {
green = green.min(sample_green as f32 + 18.0);
}
if white_score > 0.1 {
red = red.min(sample_red as f32 + 26.0);
green = green.min(sample_green as f32 + 26.0);
blue = blue.min(sample_blue as f32 + 26.0);
}
} else {
if green_score > 0.04 {
green = green
.max(red.max(blue))
.max((green - (green - red.max(blue)) * 0.78).round());
}
if white_score > 0.12 {
let spread = red.max(green).max(blue) - red.min(green).min(blue);
if spread < 20.0 {
let toned_value = ((red + green + blue) / 3.0 * 0.88).round();
red = red.min(toned_value);
green = green.min(toned_value);
blue = blue.min(toned_value);
}
}
}
let mut next_alpha = alpha;
let edge_fade = (green_score * 0.35).max(white_score * 0.28);
if edge_fade > 0.08 {
next_alpha = ((alpha as f32) * (1.0 - edge_fade)).round() as u8;
if next_alpha < 10 {
next_alpha = 0;
}
}
let next_red = red.round() as u8;
let next_green = green.round() as u8;
let next_blue = blue.round() as u8;
if next_red != pixels[offset]
|| next_green != pixels[offset + 1]
|| next_blue != pixels[offset + 2]
|| next_alpha != alpha
{
pixels[offset] = next_red;
pixels[offset + 1] = next_green;
pixels[offset + 2] = next_blue;
pixels[offset + 3] = next_alpha;
changed = true;
}
}
}
changed
}
fn map_ai_task_error(error: AiTaskServiceError) -> AppError {
let status = match error {
AiTaskServiceError::TaskNotFound => StatusCode::NOT_FOUND,
AiTaskServiceError::TaskAlreadyExists => StatusCode::CONFLICT,
AiTaskServiceError::Field(_) | AiTaskServiceError::StageNotFound => StatusCode::BAD_REQUEST,
AiTaskServiceError::Store(_) => StatusCode::INTERNAL_SERVER_ERROR,
};
AppError::from_status(status).with_details(json!({
"provider": "ai-task",
"message": error.to_string(),
}))
}
fn map_asset_object_prepare_error(error: AssetObjectFieldError) -> AppError {
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "asset-object",
"message": error.to_string(),
}))
}
fn map_asset_binding_prepare_error(error: AssetObjectFieldError) -> AppError {
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "asset-entity-binding",
"message": error.to_string(),
}))
}
fn map_character_animation_spacetime_error(error: SpacetimeClientError) -> AppError {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "spacetimedb",
"message": error.to_string(),
}))
}
fn map_character_animation_oss_error(error: platform_oss::OssError) -> AppError {
let status = match error {
platform_oss::OssError::InvalidConfig(_) | platform_oss::OssError::InvalidRequest(_) => {
StatusCode::BAD_REQUEST
}
platform_oss::OssError::ObjectNotFound(_) => StatusCode::NOT_FOUND,
platform_oss::OssError::Request(_)
| platform_oss::OssError::SerializePolicy(_)
| platform_oss::OssError::Sign(_) => StatusCode::BAD_GATEWAY,
};
AppError::from_status(status).with_details(json!({
"provider": "aliyun-oss",
"message": error.to_string(),
}))
}
fn character_animation_error_response(
request_context: &RequestContext,
error: AppError,
) -> Response {
error.into_response_with_context(Some(request_context))
}
pub(crate) struct MotionTemplate {
pub(crate) id: &'static str,
pub(crate) label: &'static str,
pub(crate) animation: &'static str,
pub(crate) prompt_suffix: &'static str,
pub(crate) notes: &'static str,
}
impl MotionTemplate {
fn to_payload(&self) -> CharacterAnimationTemplatePayload {
CharacterAnimationTemplatePayload {
id: self.id.to_string(),
label: self.label.to_string(),
animation: self.animation.to_string(),
prompt_suffix: self.prompt_suffix.to_string(),
notes: self.notes.to_string(),
}
}
}
struct ParsedVideoDataUrl {
mime_type: String,
extension: String,
bytes: Vec<u8>,
}
struct ParsedAnimationJsonPayload {
payload: Value,
}
struct ArkVideoSettings {
base_url: String,
api_key: String,
request_timeout_ms: u64,
model: String,
}
struct GeneratedAnimationPreview {
preview_video_path: String,
upstream_task_id: String,
submitted_prompt: String,
moderation_fallback_applied: bool,
}
struct BackendFrameExtractionSettings {
ffmpeg_path: String,
ffprobe_path: String,
timeout_ms: u64,
}
struct AnimationFrameExtractionPlan {
frame_count: u32,
apply_chroma_key: bool,
sample_start_ratio: f32,
sample_end_ratio: f32,
}
struct FinalizedAnimationFrame {
bytes: Vec<u8>,
mime_type: String,
extension: String,
}
// 统一收口动作生成阶段返回的草稿载荷,避免图片序列和视频预览分支在 handler 层分叉太散。
struct CharacterAnimationGeneratedDraft {
image_sources: Vec<String>,
preview_video_path: Option<String>,
}
// 统一描述从 data URL、OSS 或仓库内占位资源读取后的媒体对象。
struct MediaPayload {
mime_type: String,
extension: String,
bytes: Vec<u8>,
}
// 发布整套动作后,当前阶段只需要把旧前端依赖的 animationMap 返回出去。
struct PublishedAnimationSet {
animation_map: Value,
}
// 发布单个动作时,同时产出动作级 manifest 与前端直接消费的 animationMap 配置。
struct PublishedAnimationAction {
manifest: Value,
animation_config: Value,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_video_data_url_accepts_mp4_payload() {
let parsed =
parse_video_data_url("data:video/mp4;base64,aGVsbG8=").expect("video should parse");
assert_eq!(parsed.mime_type, "video/mp4");
assert_eq!(parsed.extension, "mp4");
assert_eq!(parsed.bytes, b"hello".to_vec());
}
#[test]
fn parse_video_data_url_rejects_image_payload() {
assert!(parse_video_data_url("data:image/png;base64,aGVsbG8=").is_none());
}
#[test]
fn sanitize_storage_segment_falls_back_for_chinese_label() {
assert_eq!(
sanitize_storage_segment("参考视频", "imported-video"),
"imported-video"
);
}
#[test]
fn normalize_workflow_cache_payload_keeps_legacy_shape() {
let cache = normalize_workflow_cache_payload(
CharacterWorkflowCacheSaveRequest {
character_id: "hero".to_string(),
cache_scope_id: None,
visual_prompt_text: Some("主形象".to_string()),
animation_prompt_text: Some("待机".to_string()),
visual_drafts: vec![CharacterVisualDraftPayload {
id: "".to_string(),
label: "".to_string(),
image_src: " /generated-character-drafts/hero/candidate.svg ".to_string(),
width: 0,
height: 0,
}],
selected_visual_draft_id: None,
selected_animation: None,
image_src: Some("".to_string()),
generated_visual_asset_id: None,
generated_animation_set_id: None,
animation_map: Some(json!({ "idle": { "frames": 4 } })),
},
"2026-04-22T12:00:00Z".to_string(),
);
assert_eq!(cache.character_id, "hero");
assert_eq!(cache.selected_animation, "idle");
assert_eq!(cache.visual_drafts[0].id, "hero-draft-1");
assert_eq!(cache.visual_drafts[0].width, 1024);
assert_eq!(cache.image_src, None);
assert!(cache.animation_map.is_some());
}
#[test]
fn workflow_cache_object_key_uses_character_drafts_prefix() {
assert_eq!(
workflow_cache_object_key("Hero 01", None),
"generated-character-drafts/hero-01/workflow-cache/workflow-cache.json"
);
}
#[test]
fn workflow_cache_object_key_can_scope_by_world() {
assert_eq!(
workflow_cache_object_key("Hero 01", Some("World 99")),
"generated-character-drafts/world-99/hero-01/workflow-cache/workflow-cache.json"
);
}
#[test]
fn build_animation_generate_result_payload_keeps_image_sequence_shape() {
let payload = build_animation_generate_result_payload(&CharacterAnimationGeneratedDraft {
image_sources: vec![
"/generated-character-drafts/hero/animation/idle/task/frame-01.svg".to_string(),
],
preview_video_path: None,
});
assert_eq!(
payload,
json!({
"imageSources": [
"/generated-character-drafts/hero/animation/idle/task/frame-01.svg"
]
})
);
}
#[test]
fn build_animation_generate_result_payload_keeps_video_shape() {
let payload = build_animation_generate_result_payload(&CharacterAnimationGeneratedDraft {
image_sources: Vec::new(),
preview_video_path: Some(
"/generated-character-drafts/hero/animation/idle/task/preview.mp4".to_string(),
),
});
assert_eq!(
payload,
json!({
"previewVideoPath": "/generated-character-drafts/hero/animation/idle/task/preview.mp4"
})
);
}
#[test]
fn resolve_character_animation_model_uses_strategy_specific_field() {
let payload = CharacterAnimationGenerateRequest {
character_id: "hero".to_string(),
strategy: CharacterAnimationStrategy::MotionTransfer,
animation: "attack".to_string(),
prompt_text: "横斩".to_string(),
character_brief_text: None,
action_template_id: None,
visual_source: "/generated-characters/hero/master.svg".to_string(),
reference_image_data_urls: Vec::new(),
reference_video_data_urls: Vec::new(),
last_frame_image_data_url: None,
frame_count: 8,
fps: 8,
duration_seconds: 4,
loop_: false,
use_chroma_key: true,
resolution: "480p".to_string(),
ratio: "1:1".to_string(),
image_sequence_model: "wan-seq".to_string(),
video_model: "wan-video".to_string(),
reference_video_model: "wan-r2v".to_string(),
motion_transfer_model: "wan-move".to_string(),
};
assert_eq!(resolve_character_animation_model(&payload), "wan-move");
}
}