Files
Genarrative/server-rs/crates/api-server/src/character_visual_assets.rs

1914 lines
63 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
use std::{
collections::BTreeMap,
time::{Duration, Instant},
};
use axum::{
Json,
extract::{Extension, Path, State, rejection::JsonRejection},
http::StatusCode,
response::Response,
};
use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64_STANDARD};
use image::{ColorType, ImageEncoder, ImageFormat, codecs::png::PngEncoder};
use module_ai::{
AiResultReferenceKind, AiStageCompletionInput, AiTaskCreateInput, AiTaskKind,
AiTaskServiceError, AiTaskSnapshot, AiTaskStageKind, AiTaskStatus, generate_ai_task_id,
};
use module_assets::{
AssetObjectAccessPolicy, AssetObjectFieldError, build_asset_entity_binding_input,
build_asset_object_upsert_input, generate_asset_binding_id, generate_asset_object_id,
};
use platform_oss::{
LegacyAssetPrefix, OssHeadObjectRequest, OssObjectAccess, OssPutObjectRequest,
OssSignedGetObjectUrlRequest,
};
use serde_json::{Value, json};
use shared_contracts::assets::{
CharacterAssetJobStatusPayload, CharacterAssetJobStatusText, CharacterVisualDraftPayload,
CharacterVisualGenerateRequest, CharacterVisualGenerateResponse, CharacterVisualPublishRequest,
CharacterVisualPublishResponse,
};
use spacetime_client::SpacetimeClientError;
use crate::{
api_response::json_success_body,
custom_world_asset_prompts::{
build_character_visual_negative_prompt, build_character_visual_prompt,
},
http_error::AppError,
request_context::RequestContext,
state::AppState,
};
use tokio::time::sleep;
const CHARACTER_VISUAL_MODEL: &str = "wan2.7-image-pro";
const CHARACTER_VISUAL_ASSET_KIND: &str = "character_visual";
const CHARACTER_VISUAL_ENTITY_KIND: &str = "character";
const CHARACTER_VISUAL_SLOT: &str = "primary_visual";
const CHARACTER_VISUAL_TASK_POLL_INTERVAL_MS: u64 = 2_500;
pub async fn generate_character_visual(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
payload: Result<Json<CharacterVisualGenerateRequest>, JsonRejection>,
) -> Result<Json<Value>, Response> {
let Json(payload) = payload.map_err(|error| {
character_visual_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-visual",
"message": error.body_text(),
})),
)
})?;
// 旧资产工坊接口没有显式 Bearer 头Rust 兼容层先使用工具用户归属,避免破坏现有前端调用。
let owner_user_id = "asset-tool".to_string();
let task_id = generate_ai_task_id(current_utc_micros());
let prompt = build_character_visual_prompt(
payload.prompt_text.as_str(),
payload.character_brief_text.as_deref(),
);
let character_id = normalize_required_text(payload.character_id.as_str(), "character");
let model = normalize_required_text(payload.image_model.as_str(), CHARACTER_VISUAL_MODEL);
let size = normalize_required_text(payload.size.as_str(), "1024*1024");
let candidate_count = payload.candidate_count.clamp(1, 4);
let created = create_visual_task(
&state,
&task_id,
&owner_user_id,
&character_id,
&model,
&prompt,
)
.map_err(|error| character_visual_error_response(&request_context, error))?;
let result = async {
let settings = require_dashscope_settings(&state)?;
let http_client = build_dashscope_http_client(&settings)?;
state
.ai_task_service()
.start_task(task_id.as_str(), current_utc_micros())
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.start_stage(
task_id.as_str(),
AiTaskStageKind::PreparePrompt,
current_utc_micros(),
)
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.complete_stage(AiStageCompletionInput {
task_id: task_id.clone(),
stage_kind: AiTaskStageKind::PreparePrompt,
text_output: Some(prompt.clone()),
structured_payload_json: Some(
json!({
"characterId": character_id,
"sourceMode": payload.source_mode,
"size": size,
"referenceImageCount": payload.reference_image_data_urls.len(),
})
.to_string(),
),
warning_messages: Vec::new(),
completed_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.start_stage(
task_id.as_str(),
AiTaskStageKind::RequestModel,
current_utc_micros(),
)
.map_err(map_ai_task_error)?;
let reference_images = match payload.source_mode {
shared_contracts::assets::CharacterVisualSourceMode::TextToImage => Vec::new(),
_ => {
if payload.reference_image_data_urls.is_empty() {
return Err(AppError::from_status(StatusCode::BAD_REQUEST).with_details(
json!({
"provider": "character-visual",
"message": "图生主形象至少需要一张参考图。",
}),
));
}
let mut normalized_reference_images =
Vec::with_capacity(payload.reference_image_data_urls.len());
for (index, source) in payload.reference_image_data_urls.iter().enumerate() {
normalized_reference_images.push(
resolve_reference_image_as_data_url(
&state,
&http_client,
source,
format!("referenceImageDataUrls[{index}]").as_str(),
)
.await?,
);
}
normalized_reference_images
}
};
let generated = create_character_visual_generation(
&http_client,
&settings,
model.as_str(),
prompt.as_str(),
size.as_str(),
candidate_count,
&reference_images,
)
.await?;
state
.ai_task_service()
.complete_stage(AiStageCompletionInput {
task_id: task_id.clone(),
stage_kind: AiTaskStageKind::RequestModel,
text_output: Some(
generated
.actual_prompt
.clone()
.unwrap_or_else(|| prompt.clone()),
),
structured_payload_json: Some(
json!({
"provider": "dashscope",
"taskId": generated.task_id,
"model": model,
"imageCount": generated.images.len(),
})
.to_string(),
),
warning_messages: Vec::new(),
completed_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)?;
let drafts = persist_visual_drafts(
&state,
&owner_user_id,
&character_id,
&task_id,
generated.images,
size.as_str(),
)
.await?;
let result_payload = json!({
"drafts": drafts,
"draftRelativeDir": format!(
"generated-character-drafts/{}/visual/{}",
sanitize_storage_segment(character_id.as_str(), "character"),
task_id
),
});
state
.ai_task_service()
.start_stage(
task_id.as_str(),
AiTaskStageKind::NormalizeResult,
current_utc_micros(),
)
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.complete_stage(AiStageCompletionInput {
task_id: task_id.clone(),
stage_kind: AiTaskStageKind::NormalizeResult,
text_output: None,
structured_payload_json: Some(result_payload.to_string()),
warning_messages: Vec::new(),
completed_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.complete_stage(AiStageCompletionInput {
task_id: task_id.clone(),
stage_kind: AiTaskStageKind::PersistResult,
text_output: Some("角色主形象候选草稿已写入 OSS。".to_string()),
structured_payload_json: Some(result_payload.to_string()),
warning_messages: Vec::new(),
completed_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)?;
state
.ai_task_service()
.complete_task(task_id.as_str(), current_utc_micros())
.map_err(map_ai_task_error)?;
Ok::<_, AppError>(drafts)
}
.await;
let drafts = match result {
Ok(drafts) => drafts,
Err(error) => {
let _ = state.ai_task_service().fail_task(
created.task_id.as_str(),
error.message().to_string(),
current_utc_micros(),
);
return Err(character_visual_error_response(&request_context, error));
}
};
Ok(json_success_body(
Some(&request_context),
CharacterVisualGenerateResponse {
ok: true,
task_id,
model,
prompt,
drafts,
},
))
}
pub async fn get_character_visual_job(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
Path(task_id): Path<String>,
) -> Result<Json<Value>, Response> {
let task = state
.ai_task_service()
.get_task(task_id.as_str())
.map_err(map_ai_task_error)
.map_err(|error| character_visual_error_response(&request_context, error))?;
Ok(json_success_body(
Some(&request_context),
build_character_visual_job_payload(task),
))
}
pub async fn publish_character_visual(
State(state): State<AppState>,
Extension(request_context): Extension<RequestContext>,
payload: Result<Json<CharacterVisualPublishRequest>, JsonRejection>,
) -> Result<Json<Value>, Response> {
let Json(payload) = payload.map_err(|error| {
character_visual_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-visual",
"message": error.body_text(),
})),
)
})?;
// 旧资产工坊接口没有显式 Bearer 头Rust 兼容层先使用工具用户归属,避免破坏现有前端调用。
let owner_user_id = "asset-tool".to_string();
let character_id = normalize_required_text(payload.character_id.as_str(), "character");
if payload.selected_preview_source.trim().is_empty() {
return Err(character_visual_error_response(
&request_context,
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-visual",
"message": "selectedPreviewSource is required.",
})),
));
}
let asset_id = format!("visual-{}", current_utc_millis());
let published = persist_published_visual(
&state,
&owner_user_id,
&character_id,
asset_id.as_str(),
payload.selected_preview_source.as_str(),
payload.prompt_text.as_deref(),
)
.await
.map_err(|error| character_visual_error_response(&request_context, error))?;
Ok(json_success_body(
Some(&request_context),
CharacterVisualPublishResponse {
ok: true,
asset_id,
portrait_path: published,
override_map: json!({}),
save_message: if payload.update_character_override == Some(false) {
"主形象已写入 OSS 并绑定当前角色,可直接写回当前自定义世界角色。".to_string()
} else {
"主形象已写入 OSS 并绑定当前角色Rust 后端不再写本地角色覆盖文件。".to_string()
},
},
))
}
fn create_visual_task(
state: &AppState,
task_id: &str,
owner_user_id: &str,
character_id: &str,
model: &str,
prompt: &str,
) -> Result<AiTaskSnapshot, AppError> {
state
.ai_task_service()
.create_task(AiTaskCreateInput {
task_id: task_id.to_string(),
task_kind: AiTaskKind::CustomWorldGeneration,
owner_user_id: owner_user_id.to_string(),
request_label: "生成角色主形象".to_string(),
source_module: "assets.character_visual".to_string(),
source_entity_id: Some(character_id.to_string()),
request_payload_json: Some(
json!({
"characterId": character_id,
"model": model,
"prompt": prompt,
})
.to_string(),
),
stages: AiTaskKind::CustomWorldGeneration.default_stage_blueprints(),
created_at_micros: current_utc_micros(),
})
.map_err(map_ai_task_error)
}
async fn persist_visual_drafts(
state: &AppState,
owner_user_id: &str,
character_id: &str,
task_id: &str,
images: Vec<DownloadedGeneratedImage>,
size: &str,
) -> Result<Vec<CharacterVisualDraftPayload>, AppError> {
let mut drafts = Vec::with_capacity(images.len());
for (index, image) in images.into_iter().enumerate() {
let file_name = format!("candidate-{:02}.{}", index + 1, image.extension);
let put_result = put_character_visual_object(
state,
LegacyAssetPrefix::CharacterDrafts,
vec![
sanitize_storage_segment(character_id, "character"),
"visual".to_string(),
task_id.to_string(),
],
file_name,
image.mime_type,
image.bytes,
build_asset_metadata(
CHARACTER_VISUAL_ASSET_KIND,
owner_user_id,
CHARACTER_VISUAL_ENTITY_KIND,
character_id,
"draft",
),
)
.await?;
drafts.push(CharacterVisualDraftPayload {
id: format!("candidate-{}", index + 1),
label: format!("候选 {}", index + 1),
image_src: put_result.legacy_public_path,
width: parse_size(size).0,
height: parse_size(size).1,
});
}
Ok(drafts)
}
async fn persist_published_visual(
state: &AppState,
owner_user_id: &str,
character_id: &str,
asset_id: &str,
selected_preview_source: &str,
prompt_text: Option<&str>,
) -> Result<String, AppError> {
let oss_client = require_oss_client(state)?;
let http_client = reqwest::Client::new();
let source_object_key = resolve_object_key_from_legacy_path(selected_preview_source)?;
let head = oss_client
.head_object(
&http_client,
OssHeadObjectRequest {
object_key: source_object_key.clone(),
},
)
.await
.map_err(map_character_visual_oss_error)?;
let signed = oss_client
.sign_get_object_url(OssSignedGetObjectUrlRequest {
object_key: source_object_key,
expire_seconds: Some(60),
})
.map_err(map_character_visual_oss_error)?;
let source_body = http_client
.get(signed.signed_url)
.send()
.await
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"message": format!("读取候选主形象失败:{error}"),
}))
})?
.error_for_status()
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"message": format!("读取候选主形象失败:{error}"),
}))
})?
.bytes()
.await
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"message": format!("读取候选主形象内容失败:{error}"),
}))
})?
.to_vec();
let content_type = head
.content_type
.clone()
.unwrap_or_else(|| "image/svg+xml".to_string());
let file_name = match content_type.as_str() {
"image/png" => "master.png",
"image/jpeg" => "master.jpg",
"image/webp" => "master.webp",
_ => "master.svg",
}
.to_string();
let put_result = put_character_visual_object(
state,
LegacyAssetPrefix::Characters,
vec![
sanitize_storage_segment(character_id, "character"),
"visual".to_string(),
asset_id.to_string(),
],
file_name,
content_type.clone(),
source_body,
build_asset_metadata(
CHARACTER_VISUAL_ASSET_KIND,
owner_user_id,
CHARACTER_VISUAL_ENTITY_KIND,
character_id,
CHARACTER_VISUAL_SLOT,
),
)
.await?;
let confirmed = confirm_character_visual_asset_object(
state,
owner_user_id,
character_id,
asset_id,
put_result.object_key.clone(),
content_type,
prompt_text.map(str::to_string),
)
.await?;
bind_character_visual_asset(
state,
owner_user_id,
character_id,
confirmed.record.asset_object_id,
)
.await?;
Ok(put_result.legacy_public_path)
}
async fn put_character_visual_object(
state: &AppState,
prefix: LegacyAssetPrefix,
path_segments: Vec<String>,
file_name: String,
content_type: String,
body: Vec<u8>,
metadata: BTreeMap<String, String>,
) -> Result<platform_oss::OssPutObjectResponse, AppError> {
let oss_client = require_oss_client(state)?;
oss_client
.put_object(
&reqwest::Client::new(),
OssPutObjectRequest {
prefix,
path_segments,
file_name,
content_type: Some(content_type),
access: OssObjectAccess::Private,
metadata,
body,
},
)
.await
.map_err(map_character_visual_oss_error)
}
async fn confirm_character_visual_asset_object(
state: &AppState,
owner_user_id: &str,
character_id: &str,
source_job_id: &str,
object_key: String,
content_type: String,
prompt_text: Option<String>,
) -> Result<module_assets::ConfirmAssetObjectResult, AppError> {
let oss_client = require_oss_client(state)?;
let head = oss_client
.head_object(&reqwest::Client::new(), OssHeadObjectRequest { object_key })
.await
.map_err(map_character_visual_oss_error)?;
let now_micros = current_utc_micros();
let record = state
.spacetime_client()
.confirm_asset_object(
build_asset_object_upsert_input(
generate_asset_object_id(now_micros),
head.bucket,
head.object_key,
AssetObjectAccessPolicy::Private,
head.content_type.or(Some(content_type)),
head.content_length,
prompt_text.or(head.etag),
CHARACTER_VISUAL_ASSET_KIND.to_string(),
Some(source_job_id.to_string()),
Some(owner_user_id.to_string()),
None,
Some(character_id.to_string()),
now_micros,
)
.map_err(map_asset_object_prepare_error)?,
)
.await
.map_err(map_character_visual_spacetime_error)?;
let _ = state.ai_task_service().attach_result_reference(
source_job_id,
AiResultReferenceKind::AssetObject,
record.asset_object_id.clone(),
Some("角色主形象正式对象".to_string()),
now_micros,
);
Ok(module_assets::ConfirmAssetObjectResult { record })
}
async fn bind_character_visual_asset(
state: &AppState,
owner_user_id: &str,
character_id: &str,
asset_object_id: String,
) -> Result<(), AppError> {
let now_micros = current_utc_micros();
state
.spacetime_client()
.bind_asset_object_to_entity(
build_asset_entity_binding_input(
generate_asset_binding_id(now_micros),
asset_object_id,
CHARACTER_VISUAL_ENTITY_KIND.to_string(),
character_id.to_string(),
CHARACTER_VISUAL_SLOT.to_string(),
CHARACTER_VISUAL_ASSET_KIND.to_string(),
Some(owner_user_id.to_string()),
None,
now_micros,
)
.map_err(map_asset_binding_prepare_error)?,
)
.await
.map_err(map_character_visual_spacetime_error)?;
Ok(())
}
fn build_character_visual_job_payload(task: AiTaskSnapshot) -> CharacterAssetJobStatusPayload {
let request_payload = task
.request_payload_json
.as_deref()
.and_then(|value| serde_json::from_str::<Value>(value).ok())
.unwrap_or_else(|| json!({}));
let result = task
.latest_structured_payload_json
.as_deref()
.and_then(|value| serde_json::from_str::<Value>(value).ok());
CharacterAssetJobStatusPayload {
task_id: task.task_id,
kind: "visual".to_string(),
status: match task.status {
AiTaskStatus::Pending => CharacterAssetJobStatusText::Queued,
AiTaskStatus::Running => CharacterAssetJobStatusText::Running,
AiTaskStatus::Completed => CharacterAssetJobStatusText::Completed,
AiTaskStatus::Failed | AiTaskStatus::Cancelled => CharacterAssetJobStatusText::Failed,
},
character_id: request_payload
.get("characterId")
.and_then(Value::as_str)
.unwrap_or_default()
.to_string(),
animation: None,
strategy: None,
model: request_payload
.get("model")
.and_then(Value::as_str)
.unwrap_or(CHARACTER_VISUAL_MODEL)
.to_string(),
prompt: request_payload
.get("prompt")
.and_then(Value::as_str)
.unwrap_or_default()
.to_string(),
created_at: format_utc_micros(task.created_at_micros),
updated_at: format_utc_micros(task.updated_at_micros),
result,
error_message: task.failure_message,
}
}
fn require_dashscope_settings(state: &AppState) -> Result<DashScopeSettings, AppError> {
// Stage 2 的真实图片生成统一走 DashScope这里先把配置缺失拦在业务入口前。
let base_url = state.config.dashscope_base_url.trim().trim_end_matches('/');
if base_url.is_empty() {
return Err(
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "dashscope",
"reason": "DASHSCOPE_BASE_URL 未配置",
})),
);
}
let api_key = state
.config
.dashscope_api_key
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.ok_or_else(|| {
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "dashscope",
"reason": "DASHSCOPE_API_KEY 未配置",
}))
})?;
Ok(DashScopeSettings {
base_url: base_url.to_string(),
api_key: api_key.to_string(),
request_timeout_ms: state.config.dashscope_image_request_timeout_ms.max(1),
})
}
fn build_dashscope_http_client(settings: &DashScopeSettings) -> Result<reqwest::Client, AppError> {
reqwest::Client::builder()
.timeout(Duration::from_millis(settings.request_timeout_ms))
.build()
.map_err(|error| {
AppError::from_status(StatusCode::INTERNAL_SERVER_ERROR).with_details(json!({
"provider": "dashscope",
"message": format!("构造 DashScope HTTP 客户端失败:{error}"),
}))
})
}
async fn resolve_reference_image_as_data_url(
state: &AppState,
http_client: &reqwest::Client,
source: &str,
field: &str,
) -> Result<String, AppError> {
let trimmed = source.trim();
if trimmed.is_empty() {
return Err(
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-visual",
"field": field,
"message": "参考图不能为空。",
})),
);
}
if let Some(parsed) = parse_image_data_url(trimmed) {
return Ok(format!(
"data:{};base64,{}",
parsed.mime_type,
BASE64_STANDARD.encode(parsed.bytes)
));
}
if !trimmed.starts_with('/') {
return Err(
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-visual",
"field": field,
"message": "参考图必须是 Data URL 或 /generated-* 旧路径。",
})),
);
}
let object_key = trimmed.trim_start_matches('/');
if LegacyAssetPrefix::from_object_key(object_key).is_none() {
return Err(
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-visual",
"field": field,
"message": "参考图当前只支持 /generated-* 旧路径。",
})),
);
}
let oss_client = require_oss_client(state)?;
let signed = oss_client
.sign_get_object_url(OssSignedGetObjectUrlRequest {
object_key: object_key.to_string(),
expire_seconds: Some(60),
})
.map_err(map_character_visual_oss_error)?;
let response = http_client
.get(signed.signed_url)
.send()
.await
.map_err(|error| {
map_dashscope_request_error(format!("读取角色主形象参考图失败:{error}"))
})?;
let status = response.status();
let content_type = response
.headers()
.get(reqwest::header::CONTENT_TYPE)
.and_then(|value| value.to_str().ok())
.unwrap_or("image/png")
.to_string();
let body = response.bytes().await.map_err(|error| {
map_dashscope_request_error(format!("读取角色主形象参考图内容失败:{error}"))
})?;
if !status.is_success() {
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"field": field,
"message": format!("读取参考图失败,状态码:{status}"),
"objectKey": object_key,
})),
);
}
if body.is_empty() {
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "aliyun-oss",
"field": field,
"message": "读取参考图失败:对象内容为空",
"objectKey": object_key,
})),
);
}
Ok(format!(
"data:{};base64,{}",
content_type,
BASE64_STANDARD.encode(body)
))
}
async fn create_character_visual_generation(
http_client: &reqwest::Client,
settings: &DashScopeSettings,
model: &str,
prompt: &str,
size: &str,
candidate_count: u32,
reference_images: &[String],
) -> Result<GeneratedCharacterVisuals, AppError> {
let mut content = vec![json!({ "text": prompt })];
for image in reference_images {
content.push(json!({ "image": image }));
}
let response = http_client
.post(format!(
"{}/services/aigc/image-generation/generation",
settings.base_url
))
.header(
reqwest::header::AUTHORIZATION,
format!("Bearer {}", settings.api_key),
)
.header(reqwest::header::CONTENT_TYPE, "application/json")
.header("X-DashScope-Async", "enable")
.json(&json!({
"model": model,
"input": {
"messages": [
{
"role": "user",
"content": content,
}
],
},
"parameters": {
"n": candidate_count,
"size": size,
"negative_prompt": build_character_visual_negative_prompt(),
"prompt_extend": true,
"watermark": false,
},
}))
.send()
.await
.map_err(|error| map_dashscope_request_error(format!("创建角色主形象任务失败:{error}")))?;
let response_status = response.status();
let response_text = response.text().await.map_err(|error| {
map_dashscope_request_error(format!("读取角色主形象任务响应失败:{error}"))
})?;
if !response_status.is_success() {
return Err(map_dashscope_upstream_error(
response_text.as_str(),
"创建角色主形象任务失败。",
));
}
let response_json = parse_json_payload(response_text.as_str(), "创建角色主形象任务失败。")?;
let task_id = extract_task_id(&response_json.payload).ok_or_else(|| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": "角色主形象任务未返回 task_id",
}))
})?;
let deadline = Instant::now() + Duration::from_millis(settings.request_timeout_ms);
while Instant::now() < deadline {
let poll_response = http_client
.get(format!("{}/tasks/{}", settings.base_url, task_id))
.header(
reqwest::header::AUTHORIZATION,
format!("Bearer {}", settings.api_key),
)
.send()
.await
.map_err(|error| {
map_dashscope_request_error(format!("查询角色主形象任务失败:{error}"))
})?;
let poll_status = poll_response.status();
let poll_text = poll_response.text().await.map_err(|error| {
map_dashscope_request_error(format!("读取角色主形象任务状态失败:{error}"))
})?;
if !poll_status.is_success() {
return Err(map_dashscope_upstream_error(
poll_text.as_str(),
"查询角色主形象任务失败。",
));
}
let poll_json = parse_json_payload(poll_text.as_str(), "查询角色主形象任务失败。")?;
let task_status = find_first_string_by_key(&poll_json.payload, "task_status")
.unwrap_or_default()
.trim()
.to_string();
if task_status == "SUCCEEDED" {
let image_urls = extract_image_urls(&poll_json.payload);
if image_urls.is_empty() {
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": "角色主形象生成成功,但没有返回可下载图片。",
})),
);
}
let mut images = Vec::with_capacity(image_urls.len());
for image_url in image_urls {
images.push(
download_generated_image(
http_client,
image_url.as_str(),
"下载角色主形象候选图失败。",
)
.await?,
);
}
return Ok(GeneratedCharacterVisuals {
task_id,
actual_prompt: find_first_string_by_key(&poll_json.payload, "actual_prompt"),
images,
});
}
if matches!(task_status.as_str(), "FAILED" | "UNKNOWN" | "CANCELED") {
return Err(map_dashscope_upstream_error(
poll_text.as_str(),
"角色主形象任务执行失败。",
));
}
sleep(Duration::from_millis(
CHARACTER_VISUAL_TASK_POLL_INTERVAL_MS,
))
.await;
}
Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": "角色主形象任务执行超时,请稍后重试。",
})),
)
}
async fn download_generated_image(
http_client: &reqwest::Client,
image_url: &str,
fallback_message: &str,
) -> Result<DownloadedGeneratedImage, AppError> {
let response = http_client
.get(image_url)
.send()
.await
.map_err(|error| map_dashscope_request_error(format!("{fallback_message}{error}")))?;
let status = response.status();
let content_type = response
.headers()
.get(reqwest::header::CONTENT_TYPE)
.and_then(|value| value.to_str().ok())
.unwrap_or("image/jpeg")
.to_string();
let body = response
.bytes()
.await
.map_err(|error| map_dashscope_request_error(format!("{fallback_message}{error}")))?;
if !status.is_success() {
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": fallback_message,
"status": status.as_u16(),
})),
);
}
let normalized_mime_type = normalize_downloaded_image_mime_type(content_type.as_str());
let mut bytes = body.to_vec();
let mut extension = mime_to_extension(normalized_mime_type.as_str()).to_string();
let mut mime_type = normalized_mime_type;
if mime_type == "image/png"
&& let Some(optimized) = try_apply_background_alpha_to_png(bytes.as_slice())
{
bytes = optimized;
extension = "png".to_string();
mime_type = "image/png".to_string();
}
Ok(DownloadedGeneratedImage {
bytes,
mime_type,
extension,
})
}
fn try_apply_background_alpha_to_png(source: &[u8]) -> Option<Vec<u8>> {
let mut image = image::load_from_memory_with_format(source, ImageFormat::Png)
.ok()?
.to_rgba8();
let (width, height) = image.dimensions();
if !remove_background_from_rgba(image.as_mut(), width as usize, height as usize) {
return Some(source.to_vec());
}
let mut encoded = Vec::new();
let encoder = PngEncoder::new(&mut encoded);
encoder
.write_image(image.as_raw(), width, height, ColorType::Rgba8.into())
.ok()?;
Some(encoded)
}
fn resolve_object_key_from_legacy_path(value: &str) -> Result<String, AppError> {
let trimmed = value.trim();
if trimmed.is_empty() {
return Err(
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-visual",
"message": "selectedPreviewSource is required.",
})),
);
}
if trimmed.starts_with("data:") {
return Err(AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "character-visual",
"message": "Rust 版 publish 当前要求 selectedPreviewSource 为已写入 OSS 的 /generated-* 路径。",
})));
}
Ok(trimmed.trim_start_matches('/').to_string())
}
fn build_asset_metadata(
asset_kind: &str,
owner_user_id: &str,
entity_kind: &str,
entity_id: &str,
slot: &str,
) -> BTreeMap<String, String> {
BTreeMap::from([
("asset_kind".to_string(), asset_kind.to_string()),
("owner_user_id".to_string(), owner_user_id.to_string()),
("entity_kind".to_string(), entity_kind.to_string()),
("entity_id".to_string(), entity_id.to_string()),
("slot".to_string(), slot.to_string()),
])
}
fn require_oss_client(state: &AppState) -> Result<&platform_oss::OssClient, AppError> {
state.oss_client().ok_or_else(|| {
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "aliyun-oss",
"reason": "OSS 未完成环境变量配置",
}))
})
}
fn normalize_required_text(value: &str, fallback: &str) -> String {
value
.trim()
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.chars()
.take(180)
.collect::<String>()
.trim()
.to_string()
.if_empty_then(fallback)
}
fn sanitize_storage_segment(value: &str, fallback: &str) -> String {
let normalized = value
.trim()
.chars()
.map(|character| match character {
'a'..='z' | '0'..='9' | '-' | '_' => character,
'A'..='Z' => character.to_ascii_lowercase(),
_ => '-',
})
.collect::<String>();
let normalized = collapse_dashes(&normalized);
if normalized.is_empty() {
fallback.to_string()
} else {
normalized
}
}
fn collapse_dashes(value: &str) -> String {
value
.chars()
.fold(
(String::new(), false),
|(mut output, last_is_dash), character| {
let is_dash = character == '-';
if is_dash && last_is_dash {
return (output, true);
}
output.push(character);
(output, is_dash)
},
)
.0
.trim_matches('-')
.to_string()
}
fn parse_size(size: &str) -> (u32, u32) {
let mut parts = size.split('*');
let width = parts
.next()
.and_then(|value| value.trim().parse::<u32>().ok())
.filter(|value| *value > 0)
.unwrap_or(1024);
let height = parts
.next()
.and_then(|value| value.trim().parse::<u32>().ok())
.filter(|value| *value > 0)
.unwrap_or(1024);
(width, height)
}
fn format_utc_micros(micros: i64) -> String {
module_runtime::format_utc_micros(micros)
}
fn current_utc_millis() -> i64 {
current_utc_micros() / 1_000
}
fn current_utc_micros() -> i64 {
use std::time::{SystemTime, UNIX_EPOCH};
let duration = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("system clock should be after unix epoch");
i64::try_from(duration.as_micros()).expect("current unix micros should fit in i64")
}
fn map_ai_task_error(error: AiTaskServiceError) -> AppError {
let status = match error {
AiTaskServiceError::TaskNotFound => StatusCode::NOT_FOUND,
AiTaskServiceError::TaskAlreadyExists => StatusCode::CONFLICT,
AiTaskServiceError::Field(_) | AiTaskServiceError::StageNotFound => StatusCode::BAD_REQUEST,
AiTaskServiceError::Store(_) => StatusCode::INTERNAL_SERVER_ERROR,
};
AppError::from_status(status).with_details(json!({
"provider": "ai-task",
"message": error.to_string(),
}))
}
fn map_asset_object_prepare_error(error: AssetObjectFieldError) -> AppError {
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "asset-object",
"message": error.to_string(),
}))
}
fn map_asset_binding_prepare_error(error: AssetObjectFieldError) -> AppError {
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
"provider": "asset-entity-binding",
"message": error.to_string(),
}))
}
fn map_character_visual_spacetime_error(error: SpacetimeClientError) -> AppError {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "spacetimedb",
"message": error.to_string(),
}))
}
fn map_character_visual_oss_error(error: platform_oss::OssError) -> AppError {
let status = match error {
platform_oss::OssError::InvalidConfig(_) | platform_oss::OssError::InvalidRequest(_) => {
StatusCode::BAD_REQUEST
}
platform_oss::OssError::ObjectNotFound(_) => StatusCode::NOT_FOUND,
platform_oss::OssError::Request(_)
| platform_oss::OssError::SerializePolicy(_)
| platform_oss::OssError::Sign(_) => StatusCode::BAD_GATEWAY,
};
AppError::from_status(status).with_details(json!({
"provider": "aliyun-oss",
"message": error.to_string(),
}))
}
fn parse_json_payload(
raw_text: &str,
fallback_message: &str,
) -> Result<ParsedJsonPayload, AppError> {
serde_json::from_str::<Value>(raw_text)
.map(|payload| ParsedJsonPayload { payload })
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": format!("{fallback_message}:解析响应失败:{error}"),
}))
})
}
fn parse_api_error_message(raw_text: &str, fallback_message: &str) -> String {
if raw_text.trim().is_empty() {
return fallback_message.to_string();
}
if let Ok(parsed) = serde_json::from_str::<Value>(raw_text) {
if let Some(message) = parsed
.pointer("/error/message")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
{
return message.to_string();
}
if let Some(message) = parsed
.get("message")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
{
return message.to_string();
}
if let Some(code) = parsed
.pointer("/error/code")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
{
return format!("{fallback_message}{code}");
}
if let Some(code) = parsed
.get("code")
.and_then(Value::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
{
return format!("{fallback_message}{code}");
}
}
raw_text.trim().to_string()
}
fn map_dashscope_request_error(message: String) -> AppError {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": message,
}))
}
fn map_dashscope_upstream_error(raw_text: &str, fallback_message: &str) -> AppError {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "dashscope",
"message": parse_api_error_message(raw_text, fallback_message),
}))
}
fn collect_strings_by_key(value: &Value, target_key: &str, results: &mut Vec<String>) {
match value {
Value::Array(entries) => {
for entry in entries {
collect_strings_by_key(entry, target_key, results);
}
}
Value::Object(object) => {
for (key, nested_value) in object {
if key == target_key
&& let Some(text) = nested_value
.as_str()
.map(str::trim)
.filter(|value| !value.is_empty())
{
results.push(text.to_string());
continue;
}
collect_strings_by_key(nested_value, target_key, results);
}
}
_ => {}
}
}
fn find_first_string_by_key(value: &Value, target_key: &str) -> Option<String> {
let mut results = Vec::new();
collect_strings_by_key(value, target_key, &mut results);
results.into_iter().next()
}
fn extract_task_id(payload: &Value) -> Option<String> {
find_first_string_by_key(payload, "task_id")
}
fn extract_image_urls(payload: &Value) -> Vec<String> {
let mut urls = Vec::new();
collect_strings_by_key(payload, "image", &mut urls);
collect_strings_by_key(payload, "url", &mut urls);
let mut deduped = Vec::new();
for url in urls {
if !deduped.contains(&url) {
deduped.push(url);
}
}
deduped
}
fn normalize_downloaded_image_mime_type(content_type: &str) -> String {
let mime_type = content_type
.split(';')
.next()
.map(str::trim)
.unwrap_or("image/jpeg");
match mime_type {
"image/png" | "image/webp" | "image/jpeg" | "image/jpg" | "image/gif" => {
mime_type.to_string()
}
_ => "image/jpeg".to_string(),
}
}
fn mime_to_extension(mime_type: &str) -> &str {
match mime_type {
"image/png" => "png",
"image/webp" => "webp",
"image/gif" => "gif",
_ => "jpg",
}
}
fn parse_image_data_url(value: &str) -> Option<ParsedImageDataUrl> {
let body = value.trim().strip_prefix("data:")?;
let (mime_type, data) = body.split_once(";base64,")?;
if !mime_type.starts_with("image/") {
return None;
}
let bytes = decode_base64(data)?;
if bytes.is_empty() {
return None;
}
Some(ParsedImageDataUrl {
mime_type: mime_type.to_string(),
bytes,
})
}
fn decode_base64(value: &str) -> Option<Vec<u8>> {
let cleaned = value.trim().replace(char::is_whitespace, "");
let mut output = Vec::with_capacity(cleaned.len() * 3 / 4);
let mut buffer = 0u32;
let mut bits = 0u8;
for byte in cleaned.bytes() {
let value = match byte {
b'A'..=b'Z' => byte - b'A',
b'a'..=b'z' => byte - b'a' + 26,
b'0'..=b'9' => byte - b'0' + 52,
b'+' => 62,
b'/' => 63,
b'=' => break,
_ => return None,
} as u32;
buffer = (buffer << 6) | value;
bits += 6;
while bits >= 8 {
bits -= 8;
output.push(((buffer >> bits) & 0xFF) as u8);
}
}
Some(output)
}
fn clamp01(value: f32) -> f32 {
value.clamp(0.0, 1.0)
}
fn lerp(from: f32, to: f32, t: f32) -> f32 {
from + (to - from) * clamp01(t)
}
fn compute_green_background_score(red: u8, green: u8, blue: u8, alpha: u8) -> f32 {
if alpha == 0 {
return 1.0;
}
let green = green as f32;
let red = red as f32;
let blue = blue as f32;
let green_lead = green - red.max(blue);
if green < 52.0 || green_lead <= 8.0 {
return 0.0;
}
let green_ratio = green / (red + blue).max(1.0);
if green_ratio <= 0.52 {
return 0.0;
}
clamp01(
((green - 52.0) / 168.0) * 0.22
+ ((green_lead - 8.0) / 96.0) * 0.53
+ ((green_ratio - 0.52) / 0.82) * 0.25,
)
}
fn compute_white_background_score(red: u8, green: u8, blue: u8, alpha: u8) -> f32 {
if alpha == 0 {
return 1.0;
}
let red = red as f32;
let green = green as f32;
let blue = blue as f32;
let max_channel = red.max(green).max(blue);
let min_channel = red.min(green).min(blue);
let average = (red + green + blue) / 3.0;
if average < 188.0 || min_channel < 168.0 {
return 0.0;
}
let spread = max_channel - min_channel;
let neutrality = 1.0 - clamp01((spread - 6.0) / 34.0);
let brightness = clamp01((average - 188.0) / 55.0);
let floor = clamp01((min_channel - 168.0) / 60.0);
clamp01(neutrality * (brightness * 0.85 + floor * 0.15))
}
fn collect_foreground_neighbor_color(
pixels: &[u8],
width: usize,
height: usize,
x: usize,
y: usize,
background_mask: &[u8],
background_hints: &[f32],
) -> Option<(u8, u8, u8)> {
let mut total_weight = 0.0f32;
let mut total_red = 0.0f32;
let mut total_green = 0.0f32;
let mut total_blue = 0.0f32;
for offset_y in -2i32..=2 {
for offset_x in -2i32..=2 {
if offset_x == 0 && offset_y == 0 {
continue;
}
let next_x = x as i32 + offset_x;
let next_y = y as i32 + offset_y;
if next_x < 0 || next_x >= width as i32 || next_y < 0 || next_y >= height as i32 {
continue;
}
let next_pixel_index = next_y as usize * width + next_x as usize;
if background_mask[next_pixel_index] != 0 {
continue;
}
if background_hints[next_pixel_index] >= 0.18 {
continue;
}
let next_offset = next_pixel_index * 4;
let next_alpha = pixels[next_offset + 3];
if next_alpha < 96 {
continue;
}
let distance = offset_x.unsigned_abs() + offset_y.unsigned_abs();
let weight = (next_alpha as f32 / 255.0)
* if distance <= 1 {
1.8
} else if distance == 2 {
1.2
} else {
0.7
};
total_weight += weight;
total_red += pixels[next_offset] as f32 * weight;
total_green += pixels[next_offset + 1] as f32 * weight;
total_blue += pixels[next_offset + 2] as f32 * weight;
}
}
if total_weight <= 0.0 {
return None;
}
Some((
(total_red / total_weight).round() as u8,
(total_green / total_weight).round() as u8,
(total_blue / total_weight).round() as u8,
))
}
pub(crate) fn remove_background_from_rgba(pixels: &mut [u8], width: usize, height: usize) -> bool {
const SOFT_EDGE_ALPHA_THRESHOLD: u8 = 224;
const FOREGROUND_NEIGHBOR_ALPHA_THRESHOLD: u8 = 96;
let pixel_count = width * height;
if pixel_count == 0 {
return false;
}
let mut background_mask = vec![0u8; pixel_count];
let mut green_scores = vec![0.0f32; pixel_count];
let mut white_scores = vec![0.0f32; pixel_count];
let mut background_hints = vec![0.0f32; pixel_count];
let mut queue = Vec::<usize>::new();
let mut queue_index = 0usize;
let mut changed = false;
for pixel_index in 0..pixel_count {
let offset = pixel_index * 4;
let red = pixels[offset];
let green = pixels[offset + 1];
let blue = pixels[offset + 2];
let alpha = pixels[offset + 3];
let green_score = compute_green_background_score(red, green, blue, alpha);
let white_score = compute_white_background_score(red, green, blue, alpha);
let transparency_hint = clamp01((56.0 - alpha as f32) / 56.0) * 0.75;
green_scores[pixel_index] = green_score;
white_scores[pixel_index] = white_score;
background_hints[pixel_index] = green_score.max(white_score).max(transparency_hint);
}
let try_seed_background =
|pixel_index: usize, background_mask: &mut [u8], queue: &mut Vec<usize>| {
if background_mask[pixel_index] != 0 {
return;
}
let offset = pixel_index * 4;
let alpha = pixels[offset + 3];
let strong_candidate =
alpha < 40 || green_scores[pixel_index] > 0.12 || white_scores[pixel_index] > 0.32;
if !strong_candidate {
return;
}
background_mask[pixel_index] = 1;
queue.push(pixel_index);
};
for x in 0..width {
try_seed_background(x, &mut background_mask, &mut queue);
try_seed_background((height - 1) * width + x, &mut background_mask, &mut queue);
}
for y in 1..height.saturating_sub(1) {
try_seed_background(y * width, &mut background_mask, &mut queue);
try_seed_background(y * width + width - 1, &mut background_mask, &mut queue);
}
while queue_index < queue.len() {
let pixel_index = queue[queue_index];
queue_index += 1;
let x = pixel_index % width;
let y = pixel_index / width;
let neighbor_indexes = [
if x > 0 { Some(pixel_index - 1) } else { None },
if x + 1 < width {
Some(pixel_index + 1)
} else {
None
},
if y > 0 {
Some(pixel_index - width)
} else {
None
},
if y + 1 < height {
Some(pixel_index + width)
} else {
None
},
];
for next_pixel_index in neighbor_indexes.into_iter().flatten() {
if background_mask[next_pixel_index] != 0 {
continue;
}
let next_offset = next_pixel_index * 4;
let next_alpha = pixels[next_offset + 3];
let next_green_score = green_scores[next_pixel_index];
let next_white_score = white_scores[next_pixel_index];
let next_hint = background_hints[next_pixel_index];
let reachable_soft_edge = next_hint > 0.08
&& next_alpha < SOFT_EDGE_ALPHA_THRESHOLD
&& (next_green_score > 0.04 || next_white_score > 0.08 || next_alpha < 180);
if next_alpha < 40
|| next_green_score > 0.12
|| next_white_score > 0.32
|| reachable_soft_edge
{
background_mask[next_pixel_index] = 1;
queue.push(next_pixel_index);
}
}
}
for _ in 0..2 {
let mut expanded_mask = background_mask.clone();
for y in 0..height {
for x in 0..width {
let pixel_index = y * width + x;
if expanded_mask[pixel_index] != 0 {
continue;
}
let alpha = pixels[pixel_index * 4 + 3];
let hint = background_hints[pixel_index];
if alpha >= SOFT_EDGE_ALPHA_THRESHOLD || hint <= 0.06 {
continue;
}
let mut adjacent_background_count = 0usize;
for offset_y in -1i32..=1 {
for offset_x in -1i32..=1 {
if offset_x == 0 && offset_y == 0 {
continue;
}
let next_x = x as i32 + offset_x;
let next_y = y as i32 + offset_y;
if next_x < 0
|| next_x >= width as i32
|| next_y < 0
|| next_y >= height as i32
{
continue;
}
if background_mask[next_y as usize * width + next_x as usize] != 0 {
adjacent_background_count += 1;
}
}
}
if adjacent_background_count >= 2 || (adjacent_background_count >= 1 && hint > 0.18)
{
expanded_mask[pixel_index] = 1;
}
}
}
background_mask = expanded_mask;
}
for y in 0..height {
for x in 0..width {
let pixel_index = y * width + x;
if background_mask[pixel_index] == 0 {
continue;
}
let offset = pixel_index * 4;
let alpha = pixels[offset + 3];
if alpha == 0 {
continue;
}
let matte_score = background_hints[pixel_index]
.max(green_scores[pixel_index])
.max(white_scores[pixel_index]);
let mut foreground_support = 0usize;
for offset_y in -1i32..=1 {
for offset_x in -1i32..=1 {
if offset_x == 0 && offset_y == 0 {
continue;
}
let next_x = x as i32 + offset_x;
let next_y = y as i32 + offset_y;
if next_x < 0 || next_x >= width as i32 || next_y < 0 || next_y >= height as i32
{
continue;
}
let next_pixel_index = next_y as usize * width + next_x as usize;
if background_mask[next_pixel_index] != 0 {
continue;
}
let next_alpha = pixels[next_pixel_index * 4 + 3];
if next_alpha >= FOREGROUND_NEIGHBOR_ALPHA_THRESHOLD {
foreground_support += 1;
}
}
}
let next_alpha = if matte_score > 0.9 || foreground_support == 0 {
0
} else if matte_score > 0.72 && foreground_support <= 1 {
((alpha as f32) * 0.08).round() as u8
} else {
((alpha as f32) * (0.08f32.max(1.0 - matte_score * 0.95))).round() as u8
};
let mut next_alpha = next_alpha;
if foreground_support >= 3 && matte_score < 0.55 {
next_alpha = next_alpha.max(((alpha as f32) * 0.22).round() as u8);
}
if next_alpha < 10 {
next_alpha = 0;
}
if next_alpha != alpha {
pixels[offset + 3] = next_alpha;
changed = true;
}
}
}
for y in 0..height {
for x in 0..width {
let pixel_index = y * width + x;
let offset = pixel_index * 4;
let alpha = pixels[offset + 3];
if alpha == 0 {
continue;
}
let mut touches_transparent_edge = false;
for offset_y in -1i32..=1 {
for offset_x in -1i32..=1 {
if offset_x == 0 && offset_y == 0 {
continue;
}
let next_x = x as i32 + offset_x;
let next_y = y as i32 + offset_y;
if next_x < 0 || next_x >= width as i32 || next_y < 0 || next_y >= height as i32
{
touches_transparent_edge = true;
continue;
}
let next_pixel_index = next_y as usize * width + next_x as usize;
if background_mask[next_pixel_index] != 0
|| pixels[next_pixel_index * 4 + 3] < 16
{
touches_transparent_edge = true;
}
}
}
if !touches_transparent_edge {
continue;
}
let green_score = green_scores[pixel_index];
let white_score = white_scores[pixel_index];
let contamination = green_score
.max(white_score)
.max(if background_mask[pixel_index] != 0 {
0.35
} else {
0.0
})
.max(if alpha < 220 {
((220 - alpha) as f32 / 220.0) * 0.25
} else {
0.0
});
if contamination < 0.06 {
continue;
}
let mut red = pixels[offset] as f32;
let mut green = pixels[offset + 1] as f32;
let mut blue = pixels[offset + 2] as f32;
let sample = collect_foreground_neighbor_color(
pixels,
width,
height,
x,
y,
&background_mask,
&background_hints,
);
let blend =
clamp01(contamination.max(if touches_transparent_edge { 0.22 } else { 0.0 }));
if let Some((sample_red, sample_green, sample_blue)) = sample {
red = lerp(red, sample_red as f32, blend);
green = lerp(green, sample_green as f32, blend);
blue = lerp(blue, sample_blue as f32, blend);
if green_score > 0.04 {
green = green.min(sample_green as f32 + 18.0);
}
if white_score > 0.1 {
red = red.min(sample_red as f32 + 26.0);
green = green.min(sample_green as f32 + 26.0);
blue = blue.min(sample_blue as f32 + 26.0);
}
} else {
if green_score > 0.04 {
green = green
.max(red.max(blue))
.max((green - (green - red.max(blue)) * 0.78).round());
}
if white_score > 0.12 {
let spread = red.max(green).max(blue) - red.min(green).min(blue);
if spread < 20.0 {
let toned_value = ((red + green + blue) / 3.0 * 0.88).round();
red = red.min(toned_value);
green = green.min(toned_value);
blue = blue.min(toned_value);
}
}
}
let mut next_alpha = alpha;
let edge_fade = (green_score * 0.35).max(white_score * 0.28);
if edge_fade > 0.08 {
next_alpha = ((alpha as f32) * (1.0 - edge_fade)).round() as u8;
if next_alpha < 10 {
next_alpha = 0;
}
}
let next_red = red.round() as u8;
let next_green = green.round() as u8;
let next_blue = blue.round() as u8;
if next_red != pixels[offset]
|| next_green != pixels[offset + 1]
|| next_blue != pixels[offset + 2]
|| next_alpha != alpha
{
pixels[offset] = next_red;
pixels[offset + 1] = next_green;
pixels[offset + 2] = next_blue;
pixels[offset + 3] = next_alpha;
changed = true;
}
}
}
changed
}
fn character_visual_error_response(request_context: &RequestContext, error: AppError) -> Response {
error.into_response_with_context(Some(request_context))
}
trait EmptyFallback {
fn if_empty_then(self, fallback: &str) -> String;
}
impl EmptyFallback for String {
fn if_empty_then(self, fallback: &str) -> String {
if self.is_empty() {
fallback.to_string()
} else {
self
}
}
}
struct DashScopeSettings {
base_url: String,
api_key: String,
request_timeout_ms: u64,
}
struct GeneratedCharacterVisuals {
task_id: String,
actual_prompt: Option<String>,
images: Vec<DownloadedGeneratedImage>,
}
struct DownloadedGeneratedImage {
bytes: Vec<u8>,
mime_type: String,
extension: String,
}
struct ParsedJsonPayload {
payload: Value,
}
struct ParsedImageDataUrl {
mime_type: String,
bytes: Vec<u8>,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn build_character_visual_prompt_keeps_generation_constraints() {
let prompt = build_character_visual_prompt("潮雾港向导", Some("旧港守望者"));
assert!(prompt.contains("潮雾港向导"));
assert!(prompt.contains("右向斜侧身"));
assert!(prompt.contains("纯绿色背景"));
}
#[test]
fn sanitize_storage_segment_keeps_legacy_safe_shape() {
assert_eq!(
sanitize_storage_segment("Harbor Guide/潮雾", "character"),
"harbor-guide"
);
}
}