1798 lines
59 KiB
Rust
1798 lines
59 KiB
Rust
use std::collections::BTreeMap;
|
||
|
||
use axum::{
|
||
Json,
|
||
extract::{Extension, Path, State, rejection::JsonRejection},
|
||
http::StatusCode,
|
||
response::Response,
|
||
};
|
||
use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64_STANDARD};
|
||
use image::{ColorType, ImageEncoder, ImageFormat, codecs::png::PngEncoder};
|
||
use module_ai::{
|
||
AiResultReferenceKind, AiStageCompletionInput, AiTaskCreateInput, AiTaskKind,
|
||
AiTaskServiceError, AiTaskSnapshot, AiTaskStageKind, AiTaskStatus, generate_ai_task_id,
|
||
};
|
||
use module_assets::{
|
||
AssetObjectAccessPolicy, AssetObjectFieldError, build_asset_entity_binding_input,
|
||
build_asset_object_upsert_input, generate_asset_binding_id, generate_asset_object_id,
|
||
};
|
||
use platform_oss::{
|
||
LegacyAssetPrefix, OssHeadObjectRequest, OssObjectAccess, OssPutObjectRequest,
|
||
OssSignedGetObjectUrlRequest,
|
||
};
|
||
use serde_json::{Value, json};
|
||
use shared_contracts::assets::{
|
||
CharacterAssetJobStatusPayload, CharacterAssetJobStatusText, CharacterVisualDraftPayload,
|
||
CharacterVisualGenerateRequest, CharacterVisualGenerateResponse, CharacterVisualPublishRequest,
|
||
CharacterVisualPublishResponse,
|
||
};
|
||
use spacetime_client::SpacetimeClientError;
|
||
|
||
use crate::{
|
||
api_response::json_success_body,
|
||
custom_world_asset_prompts::{
|
||
build_character_visual_negative_prompt, build_character_visual_prompt,
|
||
build_fallback_moderation_safe_character_visual_prompt,
|
||
},
|
||
http_error::AppError,
|
||
openai_image_generation::{
|
||
DownloadedOpenAiImage, GPT_IMAGE_2_MODEL, OpenAiImageSettings,
|
||
build_openai_image_http_client, create_openai_image_generation,
|
||
require_openai_image_settings,
|
||
},
|
||
platform_errors::map_oss_error,
|
||
request_context::RequestContext,
|
||
state::AppState,
|
||
};
|
||
|
||
const CHARACTER_VISUAL_MODEL: &str = GPT_IMAGE_2_MODEL;
|
||
const CHARACTER_VISUAL_ASSET_KIND: &str = "character_visual";
|
||
const CHARACTER_VISUAL_ENTITY_KIND: &str = "character";
|
||
const CHARACTER_VISUAL_SLOT: &str = "primary_visual";
|
||
const CHARACTER_VISUAL_MODERATION_FALLBACK_MAX_ATTEMPTS: u8 = 2;
|
||
|
||
#[derive(Clone, Debug)]
|
||
pub(crate) struct GeneratedCharacterPrimaryVisual {
|
||
pub image_src: String,
|
||
pub asset_id: String,
|
||
}
|
||
|
||
pub async fn generate_character_visual(
|
||
State(state): State<AppState>,
|
||
Extension(request_context): Extension<RequestContext>,
|
||
payload: Result<Json<CharacterVisualGenerateRequest>, JsonRejection>,
|
||
) -> Result<Json<Value>, Response> {
|
||
let Json(payload) = payload.map_err(|error| {
|
||
character_visual_error_response(
|
||
&request_context,
|
||
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
|
||
"provider": "character-visual",
|
||
"message": error.body_text(),
|
||
})),
|
||
)
|
||
})?;
|
||
|
||
// 旧资产工坊接口没有显式 Bearer 头,Rust 兼容层先使用工具用户归属,避免破坏现有前端调用。
|
||
let owner_user_id = "asset-tool".to_string();
|
||
let task_id = generate_ai_task_id(current_utc_micros());
|
||
let prompt = build_character_visual_prompt(payload.prompt_text.as_str());
|
||
let fallback_prompt =
|
||
build_fallback_moderation_safe_character_visual_prompt(payload.prompt_text.as_str());
|
||
let character_id = normalize_required_text(payload.character_id.as_str(), "character");
|
||
let model = resolve_character_visual_model(payload.image_model.as_str());
|
||
let size = normalize_required_text(payload.size.as_str(), "1024*1024");
|
||
let candidate_count = payload.candidate_count.clamp(1, 4);
|
||
|
||
let created = create_visual_task(
|
||
&state,
|
||
&task_id,
|
||
&owner_user_id,
|
||
&character_id,
|
||
&model,
|
||
&prompt,
|
||
)
|
||
.map_err(|error| character_visual_error_response(&request_context, error))?;
|
||
|
||
let result = async {
|
||
let settings = require_openai_image_settings(&state)?;
|
||
let http_client = build_openai_image_http_client(&settings)?;
|
||
|
||
state
|
||
.ai_task_service()
|
||
.start_task(task_id.as_str(), current_utc_micros())
|
||
.map_err(map_ai_task_error)?;
|
||
state
|
||
.ai_task_service()
|
||
.start_stage(
|
||
task_id.as_str(),
|
||
AiTaskStageKind::PreparePrompt,
|
||
current_utc_micros(),
|
||
)
|
||
.map_err(map_ai_task_error)?;
|
||
state
|
||
.ai_task_service()
|
||
.complete_stage(AiStageCompletionInput {
|
||
task_id: task_id.clone(),
|
||
stage_kind: AiTaskStageKind::PreparePrompt,
|
||
text_output: Some(prompt.clone()),
|
||
structured_payload_json: Some(
|
||
json!({
|
||
"characterId": character_id,
|
||
"sourceMode": payload.source_mode,
|
||
"size": size,
|
||
"referenceImageCount": payload.reference_image_data_urls.len(),
|
||
"provider": "apimart",
|
||
})
|
||
.to_string(),
|
||
),
|
||
warning_messages: Vec::new(),
|
||
completed_at_micros: current_utc_micros(),
|
||
})
|
||
.map_err(map_ai_task_error)?;
|
||
|
||
state
|
||
.ai_task_service()
|
||
.start_stage(
|
||
task_id.as_str(),
|
||
AiTaskStageKind::RequestModel,
|
||
current_utc_micros(),
|
||
)
|
||
.map_err(map_ai_task_error)?;
|
||
|
||
let reference_images = match payload.source_mode {
|
||
shared_contracts::assets::CharacterVisualSourceMode::TextToImage => Vec::new(),
|
||
_ => {
|
||
if payload.reference_image_data_urls.is_empty() {
|
||
return Err(AppError::from_status(StatusCode::BAD_REQUEST).with_details(
|
||
json!({
|
||
"provider": "character-visual",
|
||
"message": "图生主形象至少需要一张参考图。",
|
||
}),
|
||
));
|
||
}
|
||
|
||
let mut normalized_reference_images =
|
||
Vec::with_capacity(payload.reference_image_data_urls.len());
|
||
for (index, source) in payload.reference_image_data_urls.iter().enumerate() {
|
||
normalized_reference_images.push(
|
||
resolve_reference_image_as_data_url(
|
||
&state,
|
||
&http_client,
|
||
source,
|
||
format!("referenceImageDataUrls[{index}]").as_str(),
|
||
)
|
||
.await?,
|
||
);
|
||
}
|
||
normalized_reference_images
|
||
}
|
||
};
|
||
|
||
let generated = create_character_visual_generation(
|
||
&http_client,
|
||
&settings,
|
||
model.as_str(),
|
||
prompt.as_str(),
|
||
fallback_prompt.as_str(),
|
||
size.as_str(),
|
||
candidate_count,
|
||
&reference_images,
|
||
)
|
||
.await?;
|
||
|
||
state
|
||
.ai_task_service()
|
||
.complete_stage(AiStageCompletionInput {
|
||
task_id: task_id.clone(),
|
||
stage_kind: AiTaskStageKind::RequestModel,
|
||
text_output: Some(
|
||
generated
|
||
.actual_prompt
|
||
.clone()
|
||
.unwrap_or_else(|| generated.submitted_prompt.clone()),
|
||
),
|
||
structured_payload_json: Some(
|
||
json!({
|
||
"provider": "apimart",
|
||
"taskId": generated.task_id,
|
||
"model": model,
|
||
"imageCount": generated.images.len(),
|
||
"moderationFallbackApplied": generated.moderation_fallback_applied,
|
||
})
|
||
.to_string(),
|
||
),
|
||
warning_messages: Vec::new(),
|
||
completed_at_micros: current_utc_micros(),
|
||
})
|
||
.map_err(map_ai_task_error)?;
|
||
|
||
let drafts = persist_visual_drafts(
|
||
&state,
|
||
&owner_user_id,
|
||
&character_id,
|
||
&task_id,
|
||
generated.images,
|
||
size.as_str(),
|
||
)
|
||
.await?;
|
||
|
||
let result_payload = json!({
|
||
"drafts": drafts,
|
||
"draftRelativeDir": format!(
|
||
"generated-character-drafts/{}/visual/{}",
|
||
sanitize_storage_segment(character_id.as_str(), "character"),
|
||
task_id
|
||
),
|
||
});
|
||
|
||
state
|
||
.ai_task_service()
|
||
.start_stage(
|
||
task_id.as_str(),
|
||
AiTaskStageKind::NormalizeResult,
|
||
current_utc_micros(),
|
||
)
|
||
.map_err(map_ai_task_error)?;
|
||
state
|
||
.ai_task_service()
|
||
.complete_stage(AiStageCompletionInput {
|
||
task_id: task_id.clone(),
|
||
stage_kind: AiTaskStageKind::NormalizeResult,
|
||
text_output: None,
|
||
structured_payload_json: Some(result_payload.to_string()),
|
||
warning_messages: Vec::new(),
|
||
completed_at_micros: current_utc_micros(),
|
||
})
|
||
.map_err(map_ai_task_error)?;
|
||
state
|
||
.ai_task_service()
|
||
.complete_stage(AiStageCompletionInput {
|
||
task_id: task_id.clone(),
|
||
stage_kind: AiTaskStageKind::PersistResult,
|
||
text_output: Some("角色主形象候选草稿已写入 OSS。".to_string()),
|
||
structured_payload_json: Some(result_payload.to_string()),
|
||
warning_messages: Vec::new(),
|
||
completed_at_micros: current_utc_micros(),
|
||
})
|
||
.map_err(map_ai_task_error)?;
|
||
state
|
||
.ai_task_service()
|
||
.complete_task(task_id.as_str(), current_utc_micros())
|
||
.map_err(map_ai_task_error)?;
|
||
|
||
Ok::<_, AppError>(drafts)
|
||
}
|
||
.await;
|
||
|
||
let drafts = match result {
|
||
Ok(drafts) => drafts,
|
||
Err(error) => {
|
||
let _ = state.ai_task_service().fail_task(
|
||
created.task_id.as_str(),
|
||
error.message().to_string(),
|
||
current_utc_micros(),
|
||
);
|
||
return Err(character_visual_error_response(&request_context, error));
|
||
}
|
||
};
|
||
|
||
Ok(json_success_body(
|
||
Some(&request_context),
|
||
CharacterVisualGenerateResponse {
|
||
ok: true,
|
||
task_id,
|
||
model,
|
||
prompt,
|
||
drafts,
|
||
},
|
||
))
|
||
}
|
||
|
||
pub(crate) async fn generate_character_primary_visual_for_profile(
|
||
state: &AppState,
|
||
owner_user_id: &str,
|
||
character_id: &str,
|
||
prompt_text: &str,
|
||
) -> Result<GeneratedCharacterPrimaryVisual, AppError> {
|
||
let payload = CharacterVisualGenerateRequest {
|
||
character_id: character_id.to_string(),
|
||
source_mode: shared_contracts::assets::CharacterVisualSourceMode::TextToImage,
|
||
prompt_text: prompt_text.to_string(),
|
||
reference_image_data_urls: Vec::new(),
|
||
candidate_count: 1,
|
||
image_model: CHARACTER_VISUAL_MODEL.to_string(),
|
||
size: "1024*1024".to_string(),
|
||
};
|
||
let task_id = generate_ai_task_id(current_utc_micros());
|
||
let prompt = build_character_visual_prompt(payload.prompt_text.as_str());
|
||
let fallback_prompt =
|
||
build_fallback_moderation_safe_character_visual_prompt(payload.prompt_text.as_str());
|
||
let character_id = normalize_required_text(payload.character_id.as_str(), "character");
|
||
let model = resolve_character_visual_model(payload.image_model.as_str());
|
||
let size = normalize_required_text(payload.size.as_str(), "1024*1024");
|
||
create_visual_task(
|
||
state,
|
||
&task_id,
|
||
owner_user_id,
|
||
&character_id,
|
||
&model,
|
||
&prompt,
|
||
)?;
|
||
let settings = require_openai_image_settings(state)?;
|
||
let http_client = build_openai_image_http_client(&settings)?;
|
||
state
|
||
.ai_task_service()
|
||
.start_task(task_id.as_str(), current_utc_micros())
|
||
.map_err(map_ai_task_error)?;
|
||
let generated = create_character_visual_generation(
|
||
&http_client,
|
||
&settings,
|
||
model.as_str(),
|
||
prompt.as_str(),
|
||
fallback_prompt.as_str(),
|
||
size.as_str(),
|
||
1,
|
||
&[],
|
||
)
|
||
.await?;
|
||
let drafts = persist_visual_drafts(
|
||
state,
|
||
owner_user_id,
|
||
&character_id,
|
||
&task_id,
|
||
generated.images,
|
||
size.as_str(),
|
||
)
|
||
.await?;
|
||
let draft = drafts.into_iter().next().ok_or_else(|| {
|
||
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
|
||
"provider": "character-visual",
|
||
"message": "角色主形象生成没有返回候选图。",
|
||
}))
|
||
})?;
|
||
let asset_id = format!("visual-{character_id}-{task_id}");
|
||
let image_src = persist_published_visual(
|
||
state,
|
||
owner_user_id,
|
||
&character_id,
|
||
asset_id.as_str(),
|
||
draft.image_src.as_str(),
|
||
Some(prompt.as_str()),
|
||
)
|
||
.await?;
|
||
state
|
||
.ai_task_service()
|
||
.complete_task(task_id.as_str(), current_utc_micros())
|
||
.map_err(map_ai_task_error)?;
|
||
Ok(GeneratedCharacterPrimaryVisual {
|
||
image_src,
|
||
asset_id,
|
||
})
|
||
}
|
||
|
||
pub async fn get_character_visual_job(
|
||
State(state): State<AppState>,
|
||
Extension(request_context): Extension<RequestContext>,
|
||
Path(task_id): Path<String>,
|
||
) -> Result<Json<Value>, Response> {
|
||
let task = state
|
||
.ai_task_service()
|
||
.get_task(task_id.as_str())
|
||
.map_err(map_ai_task_error)
|
||
.map_err(|error| character_visual_error_response(&request_context, error))?;
|
||
|
||
Ok(json_success_body(
|
||
Some(&request_context),
|
||
build_character_visual_job_payload(task),
|
||
))
|
||
}
|
||
|
||
pub async fn publish_character_visual(
|
||
State(state): State<AppState>,
|
||
Extension(request_context): Extension<RequestContext>,
|
||
payload: Result<Json<CharacterVisualPublishRequest>, JsonRejection>,
|
||
) -> Result<Json<Value>, Response> {
|
||
let Json(payload) = payload.map_err(|error| {
|
||
character_visual_error_response(
|
||
&request_context,
|
||
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
|
||
"provider": "character-visual",
|
||
"message": error.body_text(),
|
||
})),
|
||
)
|
||
})?;
|
||
|
||
// 旧资产工坊接口没有显式 Bearer 头,Rust 兼容层先使用工具用户归属,避免破坏现有前端调用。
|
||
let owner_user_id = "asset-tool".to_string();
|
||
let character_id = normalize_required_text(payload.character_id.as_str(), "character");
|
||
if payload.selected_preview_source.trim().is_empty() {
|
||
return Err(character_visual_error_response(
|
||
&request_context,
|
||
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
|
||
"provider": "character-visual",
|
||
"message": "selectedPreviewSource is required.",
|
||
})),
|
||
));
|
||
}
|
||
|
||
let asset_id = format!("visual-{}", current_utc_millis());
|
||
let published = persist_published_visual(
|
||
&state,
|
||
&owner_user_id,
|
||
&character_id,
|
||
asset_id.as_str(),
|
||
payload.selected_preview_source.as_str(),
|
||
payload.prompt_text.as_deref(),
|
||
)
|
||
.await
|
||
.map_err(|error| character_visual_error_response(&request_context, error))?;
|
||
|
||
Ok(json_success_body(
|
||
Some(&request_context),
|
||
CharacterVisualPublishResponse {
|
||
ok: true,
|
||
asset_id,
|
||
portrait_path: published,
|
||
override_map: json!({}),
|
||
save_message: if payload.update_character_override == Some(false) {
|
||
"主形象已写入 OSS 并绑定当前角色,可直接写回当前自定义世界角色。".to_string()
|
||
} else {
|
||
"主形象已写入 OSS 并绑定当前角色;Rust 后端不再写本地角色覆盖文件。".to_string()
|
||
},
|
||
},
|
||
))
|
||
}
|
||
|
||
fn create_visual_task(
|
||
state: &AppState,
|
||
task_id: &str,
|
||
owner_user_id: &str,
|
||
character_id: &str,
|
||
model: &str,
|
||
prompt: &str,
|
||
) -> Result<AiTaskSnapshot, AppError> {
|
||
state
|
||
.ai_task_service()
|
||
.create_task(AiTaskCreateInput {
|
||
task_id: task_id.to_string(),
|
||
task_kind: AiTaskKind::CustomWorldGeneration,
|
||
owner_user_id: owner_user_id.to_string(),
|
||
request_label: "生成角色主形象".to_string(),
|
||
source_module: "assets.character_visual".to_string(),
|
||
source_entity_id: Some(character_id.to_string()),
|
||
request_payload_json: Some(
|
||
json!({
|
||
"characterId": character_id,
|
||
"model": model,
|
||
"prompt": prompt,
|
||
})
|
||
.to_string(),
|
||
),
|
||
stages: AiTaskKind::CustomWorldGeneration.default_stage_blueprints(),
|
||
created_at_micros: current_utc_micros(),
|
||
})
|
||
.map_err(map_ai_task_error)
|
||
}
|
||
|
||
async fn persist_visual_drafts(
|
||
state: &AppState,
|
||
owner_user_id: &str,
|
||
character_id: &str,
|
||
task_id: &str,
|
||
images: Vec<DownloadedGeneratedImage>,
|
||
size: &str,
|
||
) -> Result<Vec<CharacterVisualDraftPayload>, AppError> {
|
||
let mut drafts = Vec::with_capacity(images.len());
|
||
for (index, image) in images.into_iter().enumerate() {
|
||
let file_name = format!("candidate-{:02}.{}", index + 1, image.extension);
|
||
let put_result = put_character_visual_object(
|
||
state,
|
||
LegacyAssetPrefix::CharacterDrafts,
|
||
vec![
|
||
sanitize_storage_segment(character_id, "character"),
|
||
"visual".to_string(),
|
||
task_id.to_string(),
|
||
],
|
||
file_name,
|
||
image.mime_type,
|
||
image.bytes,
|
||
build_asset_metadata(
|
||
CHARACTER_VISUAL_ASSET_KIND,
|
||
owner_user_id,
|
||
CHARACTER_VISUAL_ENTITY_KIND,
|
||
character_id,
|
||
"draft",
|
||
),
|
||
)
|
||
.await?;
|
||
|
||
drafts.push(CharacterVisualDraftPayload {
|
||
id: format!("candidate-{}", index + 1),
|
||
label: format!("候选 {}", index + 1),
|
||
image_src: put_result.legacy_public_path,
|
||
width: parse_size(size).0,
|
||
height: parse_size(size).1,
|
||
});
|
||
}
|
||
|
||
Ok(drafts)
|
||
}
|
||
|
||
async fn persist_published_visual(
|
||
state: &AppState,
|
||
owner_user_id: &str,
|
||
character_id: &str,
|
||
asset_id: &str,
|
||
selected_preview_source: &str,
|
||
prompt_text: Option<&str>,
|
||
) -> Result<String, AppError> {
|
||
let oss_client = require_oss_client(state)?;
|
||
let http_client = reqwest::Client::new();
|
||
let source_object_key = resolve_object_key_from_legacy_path(selected_preview_source)?;
|
||
let head = oss_client
|
||
.head_object(
|
||
&http_client,
|
||
OssHeadObjectRequest {
|
||
object_key: source_object_key.clone(),
|
||
},
|
||
)
|
||
.await
|
||
.map_err(map_character_visual_oss_error)?;
|
||
let signed = oss_client
|
||
.sign_get_object_url(OssSignedGetObjectUrlRequest {
|
||
object_key: source_object_key,
|
||
expire_seconds: Some(60),
|
||
})
|
||
.map_err(map_character_visual_oss_error)?;
|
||
let source_body = http_client
|
||
.get(signed.signed_url)
|
||
.send()
|
||
.await
|
||
.map_err(|error| {
|
||
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
|
||
"provider": "aliyun-oss",
|
||
"message": format!("读取候选主形象失败:{error}"),
|
||
}))
|
||
})?
|
||
.error_for_status()
|
||
.map_err(|error| {
|
||
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
|
||
"provider": "aliyun-oss",
|
||
"message": format!("读取候选主形象失败:{error}"),
|
||
}))
|
||
})?
|
||
.bytes()
|
||
.await
|
||
.map_err(|error| {
|
||
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
|
||
"provider": "aliyun-oss",
|
||
"message": format!("读取候选主形象内容失败:{error}"),
|
||
}))
|
||
})?
|
||
.to_vec();
|
||
|
||
let content_type = head
|
||
.content_type
|
||
.clone()
|
||
.unwrap_or_else(|| "image/svg+xml".to_string());
|
||
let file_name = match content_type.as_str() {
|
||
"image/png" => "master.png",
|
||
"image/jpeg" => "master.jpg",
|
||
"image/webp" => "master.webp",
|
||
_ => "master.svg",
|
||
}
|
||
.to_string();
|
||
let put_result = put_character_visual_object(
|
||
state,
|
||
LegacyAssetPrefix::Characters,
|
||
vec![
|
||
sanitize_storage_segment(character_id, "character"),
|
||
"visual".to_string(),
|
||
asset_id.to_string(),
|
||
],
|
||
file_name,
|
||
content_type.clone(),
|
||
source_body,
|
||
build_asset_metadata(
|
||
CHARACTER_VISUAL_ASSET_KIND,
|
||
owner_user_id,
|
||
CHARACTER_VISUAL_ENTITY_KIND,
|
||
character_id,
|
||
CHARACTER_VISUAL_SLOT,
|
||
),
|
||
)
|
||
.await?;
|
||
let confirmed = confirm_character_visual_asset_object(
|
||
state,
|
||
owner_user_id,
|
||
character_id,
|
||
asset_id,
|
||
put_result.object_key.clone(),
|
||
content_type,
|
||
prompt_text.map(str::to_string),
|
||
)
|
||
.await?;
|
||
bind_character_visual_asset(
|
||
state,
|
||
owner_user_id,
|
||
character_id,
|
||
confirmed.record.asset_object_id,
|
||
)
|
||
.await?;
|
||
|
||
Ok(put_result.legacy_public_path)
|
||
}
|
||
|
||
async fn put_character_visual_object(
|
||
state: &AppState,
|
||
prefix: LegacyAssetPrefix,
|
||
path_segments: Vec<String>,
|
||
file_name: String,
|
||
content_type: String,
|
||
body: Vec<u8>,
|
||
metadata: BTreeMap<String, String>,
|
||
) -> Result<platform_oss::OssPutObjectResponse, AppError> {
|
||
let oss_client = require_oss_client(state)?;
|
||
oss_client
|
||
.put_object(
|
||
&reqwest::Client::new(),
|
||
OssPutObjectRequest {
|
||
prefix,
|
||
path_segments,
|
||
file_name,
|
||
content_type: Some(content_type),
|
||
access: OssObjectAccess::Private,
|
||
metadata,
|
||
body,
|
||
},
|
||
)
|
||
.await
|
||
.map_err(map_character_visual_oss_error)
|
||
}
|
||
|
||
async fn confirm_character_visual_asset_object(
|
||
state: &AppState,
|
||
owner_user_id: &str,
|
||
character_id: &str,
|
||
source_job_id: &str,
|
||
object_key: String,
|
||
content_type: String,
|
||
prompt_text: Option<String>,
|
||
) -> Result<module_assets::ConfirmAssetObjectResult, AppError> {
|
||
let oss_client = require_oss_client(state)?;
|
||
let head = oss_client
|
||
.head_object(&reqwest::Client::new(), OssHeadObjectRequest { object_key })
|
||
.await
|
||
.map_err(map_character_visual_oss_error)?;
|
||
let now_micros = current_utc_micros();
|
||
let record = state
|
||
.spacetime_client()
|
||
.confirm_asset_object(
|
||
build_asset_object_upsert_input(
|
||
generate_asset_object_id(now_micros),
|
||
head.bucket,
|
||
head.object_key,
|
||
AssetObjectAccessPolicy::Private,
|
||
head.content_type.or(Some(content_type)),
|
||
head.content_length,
|
||
prompt_text.or(head.etag),
|
||
CHARACTER_VISUAL_ASSET_KIND.to_string(),
|
||
Some(source_job_id.to_string()),
|
||
Some(owner_user_id.to_string()),
|
||
None,
|
||
Some(character_id.to_string()),
|
||
now_micros,
|
||
)
|
||
.map_err(map_asset_object_prepare_error)?,
|
||
)
|
||
.await
|
||
.map_err(map_character_visual_spacetime_error)?;
|
||
let _ = state.ai_task_service().attach_result_reference(
|
||
source_job_id,
|
||
AiResultReferenceKind::AssetObject,
|
||
record.asset_object_id.clone(),
|
||
Some("角色主形象正式对象".to_string()),
|
||
now_micros,
|
||
);
|
||
Ok(module_assets::ConfirmAssetObjectResult { record })
|
||
}
|
||
|
||
async fn bind_character_visual_asset(
|
||
state: &AppState,
|
||
owner_user_id: &str,
|
||
character_id: &str,
|
||
asset_object_id: String,
|
||
) -> Result<(), AppError> {
|
||
let now_micros = current_utc_micros();
|
||
state
|
||
.spacetime_client()
|
||
.bind_asset_object_to_entity(
|
||
build_asset_entity_binding_input(
|
||
generate_asset_binding_id(now_micros),
|
||
asset_object_id,
|
||
CHARACTER_VISUAL_ENTITY_KIND.to_string(),
|
||
character_id.to_string(),
|
||
CHARACTER_VISUAL_SLOT.to_string(),
|
||
CHARACTER_VISUAL_ASSET_KIND.to_string(),
|
||
Some(owner_user_id.to_string()),
|
||
None,
|
||
now_micros,
|
||
)
|
||
.map_err(map_asset_binding_prepare_error)?,
|
||
)
|
||
.await
|
||
.map_err(map_character_visual_spacetime_error)?;
|
||
Ok(())
|
||
}
|
||
|
||
fn build_character_visual_job_payload(task: AiTaskSnapshot) -> CharacterAssetJobStatusPayload {
|
||
let request_payload = task
|
||
.request_payload_json
|
||
.as_deref()
|
||
.and_then(|value| serde_json::from_str::<Value>(value).ok())
|
||
.unwrap_or_else(|| json!({}));
|
||
let result = task
|
||
.latest_structured_payload_json
|
||
.as_deref()
|
||
.and_then(|value| serde_json::from_str::<Value>(value).ok());
|
||
|
||
CharacterAssetJobStatusPayload {
|
||
task_id: task.task_id,
|
||
kind: "visual".to_string(),
|
||
status: match task.status {
|
||
AiTaskStatus::Pending => CharacterAssetJobStatusText::Queued,
|
||
AiTaskStatus::Running => CharacterAssetJobStatusText::Running,
|
||
AiTaskStatus::Completed => CharacterAssetJobStatusText::Completed,
|
||
AiTaskStatus::Failed | AiTaskStatus::Cancelled => CharacterAssetJobStatusText::Failed,
|
||
},
|
||
character_id: request_payload
|
||
.get("characterId")
|
||
.and_then(Value::as_str)
|
||
.unwrap_or_default()
|
||
.to_string(),
|
||
animation: None,
|
||
strategy: None,
|
||
model: request_payload
|
||
.get("model")
|
||
.and_then(Value::as_str)
|
||
.unwrap_or(CHARACTER_VISUAL_MODEL)
|
||
.to_string(),
|
||
prompt: request_payload
|
||
.get("prompt")
|
||
.and_then(Value::as_str)
|
||
.unwrap_or_default()
|
||
.to_string(),
|
||
created_at: format_utc_micros(task.created_at_micros),
|
||
updated_at: format_utc_micros(task.updated_at_micros),
|
||
result,
|
||
error_message: task.failure_message,
|
||
}
|
||
}
|
||
|
||
fn resolve_character_visual_model(value: &str) -> String {
|
||
// 中文注释:旧前端和历史草稿可能仍传 wan2.7-image-pro;RPG 主图当前统一归一到 gpt-image-2。
|
||
let trimmed = value.trim();
|
||
if !trimmed.is_empty() && trimmed != CHARACTER_VISUAL_MODEL {
|
||
tracing::warn!(
|
||
requested_model = trimmed,
|
||
effective_model = CHARACTER_VISUAL_MODEL,
|
||
"角色主形象图片模型已归一到 gpt-image-2"
|
||
);
|
||
}
|
||
CHARACTER_VISUAL_MODEL.to_string()
|
||
}
|
||
|
||
async fn resolve_reference_image_as_data_url(
|
||
state: &AppState,
|
||
http_client: &reqwest::Client,
|
||
source: &str,
|
||
field: &str,
|
||
) -> Result<String, AppError> {
|
||
let trimmed = source.trim();
|
||
if trimmed.is_empty() {
|
||
return Err(
|
||
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
|
||
"provider": "character-visual",
|
||
"field": field,
|
||
"message": "参考图不能为空。",
|
||
})),
|
||
);
|
||
}
|
||
|
||
if let Some(parsed) = parse_image_data_url(trimmed) {
|
||
return Ok(format!(
|
||
"data:{};base64,{}",
|
||
parsed.mime_type,
|
||
BASE64_STANDARD.encode(parsed.bytes)
|
||
));
|
||
}
|
||
|
||
if !trimmed.starts_with('/') {
|
||
return Err(
|
||
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
|
||
"provider": "character-visual",
|
||
"field": field,
|
||
"message": "参考图必须是 Data URL 或 /generated-* 旧路径。",
|
||
})),
|
||
);
|
||
}
|
||
|
||
let object_key = trimmed.trim_start_matches('/');
|
||
if LegacyAssetPrefix::from_object_key(object_key).is_none() {
|
||
return Err(
|
||
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
|
||
"provider": "character-visual",
|
||
"field": field,
|
||
"message": "参考图当前只支持 /generated-* 旧路径。",
|
||
})),
|
||
);
|
||
}
|
||
|
||
let oss_client = require_oss_client(state)?;
|
||
let signed = oss_client
|
||
.sign_get_object_url(OssSignedGetObjectUrlRequest {
|
||
object_key: object_key.to_string(),
|
||
expire_seconds: Some(60),
|
||
})
|
||
.map_err(map_character_visual_oss_error)?;
|
||
let response = http_client
|
||
.get(signed.signed_url)
|
||
.send()
|
||
.await
|
||
.map_err(|error| map_image_request_error(format!("读取角色主形象参考图失败:{error}")))?;
|
||
let status = response.status();
|
||
let content_type = response
|
||
.headers()
|
||
.get(reqwest::header::CONTENT_TYPE)
|
||
.and_then(|value| value.to_str().ok())
|
||
.unwrap_or("image/png")
|
||
.to_string();
|
||
let body = response.bytes().await.map_err(|error| {
|
||
map_image_request_error(format!("读取角色主形象参考图内容失败:{error}"))
|
||
})?;
|
||
if !status.is_success() {
|
||
return Err(
|
||
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
|
||
"provider": "aliyun-oss",
|
||
"field": field,
|
||
"message": format!("读取参考图失败,状态码:{status}"),
|
||
"objectKey": object_key,
|
||
})),
|
||
);
|
||
}
|
||
if body.is_empty() {
|
||
return Err(
|
||
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
|
||
"provider": "aliyun-oss",
|
||
"field": field,
|
||
"message": "读取参考图失败:对象内容为空",
|
||
"objectKey": object_key,
|
||
})),
|
||
);
|
||
}
|
||
|
||
Ok(format!(
|
||
"data:{};base64,{}",
|
||
content_type,
|
||
BASE64_STANDARD.encode(body)
|
||
))
|
||
}
|
||
|
||
async fn create_character_visual_generation(
|
||
http_client: &reqwest::Client,
|
||
settings: &OpenAiImageSettings,
|
||
model: &str,
|
||
prompt: &str,
|
||
fallback_prompt: &str,
|
||
size: &str,
|
||
candidate_count: u32,
|
||
reference_images: &[String],
|
||
) -> Result<GeneratedCharacterVisuals, AppError> {
|
||
let mut active_prompt = prompt;
|
||
let mut moderation_fallback_applied = false;
|
||
let mut last_moderation_error = String::new();
|
||
let model = resolve_character_visual_model(model);
|
||
|
||
for attempt_index in 0..CHARACTER_VISUAL_MODERATION_FALLBACK_MAX_ATTEMPTS {
|
||
match create_character_visual_generation_once(
|
||
http_client,
|
||
settings,
|
||
model.as_str(),
|
||
active_prompt,
|
||
size,
|
||
candidate_count,
|
||
reference_images,
|
||
)
|
||
.await
|
||
{
|
||
Ok(mut generated) => {
|
||
generated.submitted_prompt = active_prompt.to_string();
|
||
generated.moderation_fallback_applied = moderation_fallback_applied;
|
||
return Ok(generated);
|
||
}
|
||
Err(error)
|
||
if attempt_index == 0
|
||
&& !fallback_prompt.trim().is_empty()
|
||
&& fallback_prompt.trim() != prompt.trim()
|
||
&& is_image_moderation_error(&error) =>
|
||
{
|
||
last_moderation_error = error.body_text();
|
||
active_prompt = fallback_prompt;
|
||
moderation_fallback_applied = true;
|
||
}
|
||
Err(error) => return Err(error),
|
||
}
|
||
}
|
||
|
||
Err(map_image_request_error(format!(
|
||
"角色主形象安全兜底重试未返回结果:{}",
|
||
last_moderation_error.if_empty_then("上游内容审核仍未通过。")
|
||
)))
|
||
}
|
||
|
||
async fn create_character_visual_generation_once(
|
||
http_client: &reqwest::Client,
|
||
settings: &OpenAiImageSettings,
|
||
_model: &str,
|
||
prompt: &str,
|
||
size: &str,
|
||
candidate_count: u32,
|
||
reference_images: &[String],
|
||
) -> Result<GeneratedCharacterVisuals, AppError> {
|
||
let generated = create_openai_image_generation(
|
||
http_client,
|
||
settings,
|
||
prompt,
|
||
Some(build_character_visual_negative_prompt().as_str()),
|
||
size,
|
||
candidate_count,
|
||
reference_images,
|
||
"角色主形象生成失败",
|
||
)
|
||
.await?;
|
||
|
||
Ok(GeneratedCharacterVisuals {
|
||
task_id: generated.task_id,
|
||
actual_prompt: generated.actual_prompt,
|
||
submitted_prompt: prompt.to_string(),
|
||
moderation_fallback_applied: false,
|
||
images: generated
|
||
.images
|
||
.into_iter()
|
||
.map(downloaded_openai_to_character_visual_image)
|
||
.collect(),
|
||
})
|
||
}
|
||
|
||
fn downloaded_openai_to_character_visual_image(
|
||
image: DownloadedOpenAiImage,
|
||
) -> DownloadedGeneratedImage {
|
||
let mut bytes = image.bytes;
|
||
let mut extension = image.extension;
|
||
let mut mime_type = image.mime_type;
|
||
|
||
if mime_type == "image/png"
|
||
&& let Some(optimized) = try_apply_background_alpha_to_png(bytes.as_slice())
|
||
{
|
||
bytes = optimized;
|
||
extension = "png".to_string();
|
||
mime_type = "image/png".to_string();
|
||
}
|
||
|
||
DownloadedGeneratedImage {
|
||
bytes,
|
||
mime_type,
|
||
extension,
|
||
}
|
||
}
|
||
|
||
/// 统一的 PNG 透明背景后处理入口。
|
||
/// 目前 RPG 角色主图与其他需要“角色主图口径透明背景”的图片资产都复用这套逻辑。
|
||
pub(crate) fn try_apply_background_alpha_to_png(source: &[u8]) -> Option<Vec<u8>> {
|
||
let mut image = image::load_from_memory_with_format(source, ImageFormat::Png)
|
||
.ok()?
|
||
.to_rgba8();
|
||
let (width, height) = image.dimensions();
|
||
if !remove_background_from_rgba(image.as_mut(), width as usize, height as usize) {
|
||
return Some(source.to_vec());
|
||
}
|
||
|
||
let mut encoded = Vec::new();
|
||
let encoder = PngEncoder::new(&mut encoded);
|
||
encoder
|
||
.write_image(image.as_raw(), width, height, ColorType::Rgba8.into())
|
||
.ok()?;
|
||
Some(encoded)
|
||
}
|
||
|
||
fn resolve_object_key_from_legacy_path(value: &str) -> Result<String, AppError> {
|
||
let trimmed = value.trim();
|
||
if trimmed.is_empty() {
|
||
return Err(
|
||
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
|
||
"provider": "character-visual",
|
||
"message": "selectedPreviewSource is required.",
|
||
})),
|
||
);
|
||
}
|
||
if trimmed.starts_with("data:") {
|
||
return Err(AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
|
||
"provider": "character-visual",
|
||
"message": "Rust 版 publish 当前要求 selectedPreviewSource 为已写入 OSS 的 /generated-* 路径。",
|
||
})));
|
||
}
|
||
Ok(trimmed.trim_start_matches('/').to_string())
|
||
}
|
||
|
||
fn build_asset_metadata(
|
||
asset_kind: &str,
|
||
owner_user_id: &str,
|
||
entity_kind: &str,
|
||
entity_id: &str,
|
||
slot: &str,
|
||
) -> BTreeMap<String, String> {
|
||
BTreeMap::from([
|
||
("asset_kind".to_string(), asset_kind.to_string()),
|
||
("owner_user_id".to_string(), owner_user_id.to_string()),
|
||
("entity_kind".to_string(), entity_kind.to_string()),
|
||
("entity_id".to_string(), entity_id.to_string()),
|
||
("slot".to_string(), slot.to_string()),
|
||
])
|
||
}
|
||
|
||
fn require_oss_client(state: &AppState) -> Result<&platform_oss::OssClient, AppError> {
|
||
state.oss_client().ok_or_else(|| {
|
||
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
|
||
"provider": "aliyun-oss",
|
||
"reason": "OSS 未完成环境变量配置",
|
||
}))
|
||
})
|
||
}
|
||
|
||
fn normalize_required_text(value: &str, fallback: &str) -> String {
|
||
value
|
||
.trim()
|
||
.split_whitespace()
|
||
.collect::<Vec<_>>()
|
||
.join(" ")
|
||
.chars()
|
||
.take(180)
|
||
.collect::<String>()
|
||
.trim()
|
||
.to_string()
|
||
.if_empty_then(fallback)
|
||
}
|
||
|
||
fn sanitize_storage_segment(value: &str, fallback: &str) -> String {
|
||
let normalized = value
|
||
.trim()
|
||
.chars()
|
||
.map(|character| match character {
|
||
'a'..='z' | '0'..='9' | '-' | '_' => character,
|
||
'A'..='Z' => character.to_ascii_lowercase(),
|
||
_ => '-',
|
||
})
|
||
.collect::<String>();
|
||
let normalized = collapse_dashes(&normalized);
|
||
if normalized.is_empty() {
|
||
fallback.to_string()
|
||
} else {
|
||
normalized
|
||
}
|
||
}
|
||
|
||
fn collapse_dashes(value: &str) -> String {
|
||
value
|
||
.chars()
|
||
.fold(
|
||
(String::new(), false),
|
||
|(mut output, last_is_dash), character| {
|
||
let is_dash = character == '-';
|
||
if is_dash && last_is_dash {
|
||
return (output, true);
|
||
}
|
||
output.push(character);
|
||
(output, is_dash)
|
||
},
|
||
)
|
||
.0
|
||
.trim_matches('-')
|
||
.to_string()
|
||
}
|
||
|
||
fn parse_size(size: &str) -> (u32, u32) {
|
||
let mut parts = size.split('*');
|
||
let width = parts
|
||
.next()
|
||
.and_then(|value| value.trim().parse::<u32>().ok())
|
||
.filter(|value| *value > 0)
|
||
.unwrap_or(1024);
|
||
let height = parts
|
||
.next()
|
||
.and_then(|value| value.trim().parse::<u32>().ok())
|
||
.filter(|value| *value > 0)
|
||
.unwrap_or(1024);
|
||
(width, height)
|
||
}
|
||
|
||
fn format_utc_micros(micros: i64) -> String {
|
||
module_runtime::format_utc_micros(micros)
|
||
}
|
||
|
||
fn current_utc_millis() -> i64 {
|
||
current_utc_micros() / 1_000
|
||
}
|
||
|
||
fn current_utc_micros() -> i64 {
|
||
use std::time::{SystemTime, UNIX_EPOCH};
|
||
|
||
let duration = SystemTime::now()
|
||
.duration_since(UNIX_EPOCH)
|
||
.expect("system clock should be after unix epoch");
|
||
i64::try_from(duration.as_micros()).expect("current unix micros should fit in i64")
|
||
}
|
||
|
||
fn map_ai_task_error(error: AiTaskServiceError) -> AppError {
|
||
let status = match error {
|
||
AiTaskServiceError::TaskNotFound => StatusCode::NOT_FOUND,
|
||
AiTaskServiceError::TaskAlreadyExists => StatusCode::CONFLICT,
|
||
AiTaskServiceError::Field(_) | AiTaskServiceError::StageNotFound => StatusCode::BAD_REQUEST,
|
||
AiTaskServiceError::Store(_) => StatusCode::INTERNAL_SERVER_ERROR,
|
||
};
|
||
AppError::from_status(status).with_details(json!({
|
||
"provider": "ai-task",
|
||
"message": error.to_string(),
|
||
}))
|
||
}
|
||
|
||
fn map_asset_object_prepare_error(error: AssetObjectFieldError) -> AppError {
|
||
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
|
||
"provider": "asset-object",
|
||
"message": error.to_string(),
|
||
}))
|
||
}
|
||
|
||
fn map_asset_binding_prepare_error(error: AssetObjectFieldError) -> AppError {
|
||
AppError::from_status(StatusCode::BAD_REQUEST).with_details(json!({
|
||
"provider": "asset-entity-binding",
|
||
"message": error.to_string(),
|
||
}))
|
||
}
|
||
|
||
fn map_character_visual_spacetime_error(error: SpacetimeClientError) -> AppError {
|
||
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
|
||
"provider": "spacetimedb",
|
||
"message": error.to_string(),
|
||
}))
|
||
}
|
||
|
||
fn map_character_visual_oss_error(error: platform_oss::OssError) -> AppError {
|
||
map_oss_error(error, "aliyun-oss")
|
||
}
|
||
|
||
fn map_image_request_error(message: String) -> AppError {
|
||
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
|
||
"provider": "apimart",
|
||
"message": message,
|
||
}))
|
||
}
|
||
|
||
#[cfg(test)]
|
||
fn map_image_upstream_error(raw_text: &str, fallback_message: &str) -> AppError {
|
||
let message = match raw_text.trim() {
|
||
"" => fallback_message.to_string(),
|
||
value => value.to_string(),
|
||
};
|
||
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
|
||
"provider": "apimart",
|
||
"message": message,
|
||
"raw": raw_text.trim(),
|
||
}))
|
||
}
|
||
|
||
#[cfg(test)]
|
||
fn is_image_test_moderation_error(error: &AppError) -> bool {
|
||
is_image_moderation_error(error)
|
||
}
|
||
|
||
fn is_image_moderation_error(error: &AppError) -> bool {
|
||
let text = error.body_text();
|
||
let normalized = text.to_ascii_lowercase();
|
||
normalized.contains("ipinfringementsuspect")
|
||
|| normalized.contains("inappropriate")
|
||
|| normalized.contains("sensitive")
|
||
|| normalized.contains("risk")
|
||
|| text.contains("内容审核")
|
||
|| text.contains("疑似侵权")
|
||
|| text.contains("IP 侵权")
|
||
|| text.contains("知识产权")
|
||
}
|
||
|
||
fn parse_image_data_url(value: &str) -> Option<ParsedImageDataUrl> {
|
||
let body = value.trim().strip_prefix("data:")?;
|
||
let (mime_type, data) = body.split_once(";base64,")?;
|
||
if !mime_type.starts_with("image/") {
|
||
return None;
|
||
}
|
||
let bytes = decode_base64(data)?;
|
||
if bytes.is_empty() {
|
||
return None;
|
||
}
|
||
Some(ParsedImageDataUrl {
|
||
mime_type: mime_type.to_string(),
|
||
bytes,
|
||
})
|
||
}
|
||
|
||
fn decode_base64(value: &str) -> Option<Vec<u8>> {
|
||
let cleaned = value.trim().replace(char::is_whitespace, "");
|
||
let mut output = Vec::with_capacity(cleaned.len() * 3 / 4);
|
||
let mut buffer = 0u32;
|
||
let mut bits = 0u8;
|
||
|
||
for byte in cleaned.bytes() {
|
||
let value = match byte {
|
||
b'A'..=b'Z' => byte - b'A',
|
||
b'a'..=b'z' => byte - b'a' + 26,
|
||
b'0'..=b'9' => byte - b'0' + 52,
|
||
b'+' => 62,
|
||
b'/' => 63,
|
||
b'=' => break,
|
||
_ => return None,
|
||
} as u32;
|
||
buffer = (buffer << 6) | value;
|
||
bits += 6;
|
||
while bits >= 8 {
|
||
bits -= 8;
|
||
output.push(((buffer >> bits) & 0xFF) as u8);
|
||
}
|
||
}
|
||
|
||
Some(output)
|
||
}
|
||
|
||
fn clamp01(value: f32) -> f32 {
|
||
value.clamp(0.0, 1.0)
|
||
}
|
||
|
||
fn lerp(from: f32, to: f32, t: f32) -> f32 {
|
||
from + (to - from) * clamp01(t)
|
||
}
|
||
|
||
fn compute_green_background_score(red: u8, green: u8, blue: u8, alpha: u8) -> f32 {
|
||
if alpha == 0 {
|
||
return 1.0;
|
||
}
|
||
let green = green as f32;
|
||
let red = red as f32;
|
||
let blue = blue as f32;
|
||
let green_lead = green - red.max(blue);
|
||
if green < 52.0 || green_lead <= 8.0 {
|
||
return 0.0;
|
||
}
|
||
let green_ratio = green / (red + blue).max(1.0);
|
||
if green_ratio <= 0.52 {
|
||
return 0.0;
|
||
}
|
||
clamp01(
|
||
((green - 52.0) / 168.0) * 0.22
|
||
+ ((green_lead - 8.0) / 96.0) * 0.53
|
||
+ ((green_ratio - 0.52) / 0.82) * 0.25,
|
||
)
|
||
}
|
||
|
||
fn compute_white_background_score(red: u8, green: u8, blue: u8, alpha: u8) -> f32 {
|
||
if alpha == 0 {
|
||
return 1.0;
|
||
}
|
||
let red = red as f32;
|
||
let green = green as f32;
|
||
let blue = blue as f32;
|
||
let max_channel = red.max(green).max(blue);
|
||
let min_channel = red.min(green).min(blue);
|
||
let average = (red + green + blue) / 3.0;
|
||
if average < 188.0 || min_channel < 168.0 {
|
||
return 0.0;
|
||
}
|
||
|
||
let spread = max_channel - min_channel;
|
||
let neutrality = 1.0 - clamp01((spread - 6.0) / 34.0);
|
||
let brightness = clamp01((average - 188.0) / 55.0);
|
||
let floor = clamp01((min_channel - 168.0) / 60.0);
|
||
clamp01(neutrality * (brightness * 0.85 + floor * 0.15))
|
||
}
|
||
|
||
fn collect_foreground_neighbor_color(
|
||
pixels: &[u8],
|
||
width: usize,
|
||
height: usize,
|
||
x: usize,
|
||
y: usize,
|
||
background_mask: &[u8],
|
||
background_hints: &[f32],
|
||
) -> Option<(u8, u8, u8)> {
|
||
let mut total_weight = 0.0f32;
|
||
let mut total_red = 0.0f32;
|
||
let mut total_green = 0.0f32;
|
||
let mut total_blue = 0.0f32;
|
||
|
||
for offset_y in -2i32..=2 {
|
||
for offset_x in -2i32..=2 {
|
||
if offset_x == 0 && offset_y == 0 {
|
||
continue;
|
||
}
|
||
let next_x = x as i32 + offset_x;
|
||
let next_y = y as i32 + offset_y;
|
||
if next_x < 0 || next_x >= width as i32 || next_y < 0 || next_y >= height as i32 {
|
||
continue;
|
||
}
|
||
let next_pixel_index = next_y as usize * width + next_x as usize;
|
||
if background_mask[next_pixel_index] != 0 {
|
||
continue;
|
||
}
|
||
if background_hints[next_pixel_index] >= 0.18 {
|
||
continue;
|
||
}
|
||
let next_offset = next_pixel_index * 4;
|
||
let next_alpha = pixels[next_offset + 3];
|
||
if next_alpha < 96 {
|
||
continue;
|
||
}
|
||
let distance = offset_x.unsigned_abs() + offset_y.unsigned_abs();
|
||
let weight = (next_alpha as f32 / 255.0)
|
||
* if distance <= 1 {
|
||
1.8
|
||
} else if distance == 2 {
|
||
1.2
|
||
} else {
|
||
0.7
|
||
};
|
||
|
||
total_weight += weight;
|
||
total_red += pixels[next_offset] as f32 * weight;
|
||
total_green += pixels[next_offset + 1] as f32 * weight;
|
||
total_blue += pixels[next_offset + 2] as f32 * weight;
|
||
}
|
||
}
|
||
|
||
if total_weight <= 0.0 {
|
||
return None;
|
||
}
|
||
|
||
Some((
|
||
(total_red / total_weight).round() as u8,
|
||
(total_green / total_weight).round() as u8,
|
||
(total_blue / total_weight).round() as u8,
|
||
))
|
||
}
|
||
|
||
pub(crate) fn remove_background_from_rgba(pixels: &mut [u8], width: usize, height: usize) -> bool {
|
||
const SOFT_EDGE_ALPHA_THRESHOLD: u8 = 224;
|
||
const FOREGROUND_NEIGHBOR_ALPHA_THRESHOLD: u8 = 96;
|
||
|
||
let pixel_count = width * height;
|
||
if pixel_count == 0 {
|
||
return false;
|
||
}
|
||
|
||
let mut background_mask = vec![0u8; pixel_count];
|
||
let mut green_scores = vec![0.0f32; pixel_count];
|
||
let mut white_scores = vec![0.0f32; pixel_count];
|
||
let mut background_hints = vec![0.0f32; pixel_count];
|
||
let mut queue = Vec::<usize>::new();
|
||
let mut queue_index = 0usize;
|
||
let mut changed = false;
|
||
|
||
for pixel_index in 0..pixel_count {
|
||
let offset = pixel_index * 4;
|
||
let red = pixels[offset];
|
||
let green = pixels[offset + 1];
|
||
let blue = pixels[offset + 2];
|
||
let alpha = pixels[offset + 3];
|
||
let green_score = compute_green_background_score(red, green, blue, alpha);
|
||
let white_score = compute_white_background_score(red, green, blue, alpha);
|
||
let transparency_hint = clamp01((56.0 - alpha as f32) / 56.0) * 0.75;
|
||
|
||
green_scores[pixel_index] = green_score;
|
||
white_scores[pixel_index] = white_score;
|
||
background_hints[pixel_index] = green_score.max(white_score).max(transparency_hint);
|
||
}
|
||
|
||
let try_seed_background =
|
||
|pixel_index: usize, background_mask: &mut [u8], queue: &mut Vec<usize>| {
|
||
if background_mask[pixel_index] != 0 {
|
||
return;
|
||
}
|
||
let offset = pixel_index * 4;
|
||
let alpha = pixels[offset + 3];
|
||
let strong_candidate =
|
||
alpha < 40 || green_scores[pixel_index] > 0.12 || white_scores[pixel_index] > 0.32;
|
||
if !strong_candidate {
|
||
return;
|
||
}
|
||
background_mask[pixel_index] = 1;
|
||
queue.push(pixel_index);
|
||
};
|
||
|
||
for x in 0..width {
|
||
try_seed_background(x, &mut background_mask, &mut queue);
|
||
try_seed_background((height - 1) * width + x, &mut background_mask, &mut queue);
|
||
}
|
||
for y in 1..height.saturating_sub(1) {
|
||
try_seed_background(y * width, &mut background_mask, &mut queue);
|
||
try_seed_background(y * width + width - 1, &mut background_mask, &mut queue);
|
||
}
|
||
|
||
while queue_index < queue.len() {
|
||
let pixel_index = queue[queue_index];
|
||
queue_index += 1;
|
||
|
||
let x = pixel_index % width;
|
||
let y = pixel_index / width;
|
||
let neighbor_indexes = [
|
||
if x > 0 { Some(pixel_index - 1) } else { None },
|
||
if x + 1 < width {
|
||
Some(pixel_index + 1)
|
||
} else {
|
||
None
|
||
},
|
||
if y > 0 {
|
||
Some(pixel_index - width)
|
||
} else {
|
||
None
|
||
},
|
||
if y + 1 < height {
|
||
Some(pixel_index + width)
|
||
} else {
|
||
None
|
||
},
|
||
];
|
||
|
||
for next_pixel_index in neighbor_indexes.into_iter().flatten() {
|
||
if background_mask[next_pixel_index] != 0 {
|
||
continue;
|
||
}
|
||
let next_offset = next_pixel_index * 4;
|
||
let next_alpha = pixels[next_offset + 3];
|
||
let next_green_score = green_scores[next_pixel_index];
|
||
let next_white_score = white_scores[next_pixel_index];
|
||
let next_hint = background_hints[next_pixel_index];
|
||
let reachable_soft_edge = next_hint > 0.08
|
||
&& next_alpha < SOFT_EDGE_ALPHA_THRESHOLD
|
||
&& (next_green_score > 0.04 || next_white_score > 0.08 || next_alpha < 180);
|
||
|
||
if next_alpha < 40
|
||
|| next_green_score > 0.12
|
||
|| next_white_score > 0.32
|
||
|| reachable_soft_edge
|
||
{
|
||
background_mask[next_pixel_index] = 1;
|
||
queue.push(next_pixel_index);
|
||
}
|
||
}
|
||
}
|
||
|
||
for _ in 0..2 {
|
||
let mut expanded_mask = background_mask.clone();
|
||
for y in 0..height {
|
||
for x in 0..width {
|
||
let pixel_index = y * width + x;
|
||
if expanded_mask[pixel_index] != 0 {
|
||
continue;
|
||
}
|
||
let alpha = pixels[pixel_index * 4 + 3];
|
||
let hint = background_hints[pixel_index];
|
||
if alpha >= SOFT_EDGE_ALPHA_THRESHOLD || hint <= 0.06 {
|
||
continue;
|
||
}
|
||
|
||
let mut adjacent_background_count = 0usize;
|
||
for offset_y in -1i32..=1 {
|
||
for offset_x in -1i32..=1 {
|
||
if offset_x == 0 && offset_y == 0 {
|
||
continue;
|
||
}
|
||
let next_x = x as i32 + offset_x;
|
||
let next_y = y as i32 + offset_y;
|
||
if next_x < 0
|
||
|| next_x >= width as i32
|
||
|| next_y < 0
|
||
|| next_y >= height as i32
|
||
{
|
||
continue;
|
||
}
|
||
if background_mask[next_y as usize * width + next_x as usize] != 0 {
|
||
adjacent_background_count += 1;
|
||
}
|
||
}
|
||
}
|
||
|
||
if adjacent_background_count >= 2 || (adjacent_background_count >= 1 && hint > 0.18)
|
||
{
|
||
expanded_mask[pixel_index] = 1;
|
||
}
|
||
}
|
||
}
|
||
background_mask = expanded_mask;
|
||
}
|
||
|
||
for y in 0..height {
|
||
for x in 0..width {
|
||
let pixel_index = y * width + x;
|
||
if background_mask[pixel_index] == 0 {
|
||
continue;
|
||
}
|
||
|
||
let offset = pixel_index * 4;
|
||
let alpha = pixels[offset + 3];
|
||
if alpha == 0 {
|
||
continue;
|
||
}
|
||
|
||
let matte_score = background_hints[pixel_index]
|
||
.max(green_scores[pixel_index])
|
||
.max(white_scores[pixel_index]);
|
||
let mut foreground_support = 0usize;
|
||
for offset_y in -1i32..=1 {
|
||
for offset_x in -1i32..=1 {
|
||
if offset_x == 0 && offset_y == 0 {
|
||
continue;
|
||
}
|
||
let next_x = x as i32 + offset_x;
|
||
let next_y = y as i32 + offset_y;
|
||
if next_x < 0 || next_x >= width as i32 || next_y < 0 || next_y >= height as i32
|
||
{
|
||
continue;
|
||
}
|
||
let next_pixel_index = next_y as usize * width + next_x as usize;
|
||
if background_mask[next_pixel_index] != 0 {
|
||
continue;
|
||
}
|
||
let next_alpha = pixels[next_pixel_index * 4 + 3];
|
||
if next_alpha >= FOREGROUND_NEIGHBOR_ALPHA_THRESHOLD {
|
||
foreground_support += 1;
|
||
}
|
||
}
|
||
}
|
||
|
||
let next_alpha = if matte_score > 0.9 || foreground_support == 0 {
|
||
0
|
||
} else if matte_score > 0.72 && foreground_support <= 1 {
|
||
((alpha as f32) * 0.08).round() as u8
|
||
} else {
|
||
((alpha as f32) * (0.08f32.max(1.0 - matte_score * 0.95))).round() as u8
|
||
};
|
||
let mut next_alpha = next_alpha;
|
||
|
||
if foreground_support >= 3 && matte_score < 0.55 {
|
||
next_alpha = next_alpha.max(((alpha as f32) * 0.22).round() as u8);
|
||
}
|
||
if next_alpha < 10 {
|
||
next_alpha = 0;
|
||
}
|
||
|
||
if next_alpha != alpha {
|
||
pixels[offset + 3] = next_alpha;
|
||
changed = true;
|
||
}
|
||
}
|
||
}
|
||
|
||
for y in 0..height {
|
||
for x in 0..width {
|
||
let pixel_index = y * width + x;
|
||
let offset = pixel_index * 4;
|
||
let alpha = pixels[offset + 3];
|
||
if alpha == 0 {
|
||
continue;
|
||
}
|
||
|
||
let mut touches_transparent_edge = false;
|
||
for offset_y in -1i32..=1 {
|
||
for offset_x in -1i32..=1 {
|
||
if offset_x == 0 && offset_y == 0 {
|
||
continue;
|
||
}
|
||
let next_x = x as i32 + offset_x;
|
||
let next_y = y as i32 + offset_y;
|
||
if next_x < 0 || next_x >= width as i32 || next_y < 0 || next_y >= height as i32
|
||
{
|
||
touches_transparent_edge = true;
|
||
continue;
|
||
}
|
||
let next_pixel_index = next_y as usize * width + next_x as usize;
|
||
if background_mask[next_pixel_index] != 0
|
||
|| pixels[next_pixel_index * 4 + 3] < 16
|
||
{
|
||
touches_transparent_edge = true;
|
||
}
|
||
}
|
||
}
|
||
|
||
if !touches_transparent_edge {
|
||
continue;
|
||
}
|
||
|
||
let green_score = green_scores[pixel_index];
|
||
let white_score = white_scores[pixel_index];
|
||
let contamination = green_score
|
||
.max(white_score)
|
||
.max(if background_mask[pixel_index] != 0 {
|
||
0.35
|
||
} else {
|
||
0.0
|
||
})
|
||
.max(if alpha < 220 {
|
||
((220 - alpha) as f32 / 220.0) * 0.25
|
||
} else {
|
||
0.0
|
||
});
|
||
|
||
if contamination < 0.06 {
|
||
continue;
|
||
}
|
||
|
||
let mut red = pixels[offset] as f32;
|
||
let mut green = pixels[offset + 1] as f32;
|
||
let mut blue = pixels[offset + 2] as f32;
|
||
let sample = collect_foreground_neighbor_color(
|
||
pixels,
|
||
width,
|
||
height,
|
||
x,
|
||
y,
|
||
&background_mask,
|
||
&background_hints,
|
||
);
|
||
let blend =
|
||
clamp01(contamination.max(if touches_transparent_edge { 0.22 } else { 0.0 }));
|
||
|
||
if let Some((sample_red, sample_green, sample_blue)) = sample {
|
||
red = lerp(red, sample_red as f32, blend);
|
||
green = lerp(green, sample_green as f32, blend);
|
||
blue = lerp(blue, sample_blue as f32, blend);
|
||
|
||
if green_score > 0.04 {
|
||
green = green.min(sample_green as f32 + 18.0);
|
||
}
|
||
if white_score > 0.1 {
|
||
red = red.min(sample_red as f32 + 26.0);
|
||
green = green.min(sample_green as f32 + 26.0);
|
||
blue = blue.min(sample_blue as f32 + 26.0);
|
||
}
|
||
} else {
|
||
if green_score > 0.04 {
|
||
green = green
|
||
.max(red.max(blue))
|
||
.max((green - (green - red.max(blue)) * 0.78).round());
|
||
}
|
||
|
||
if white_score > 0.12 {
|
||
let spread = red.max(green).max(blue) - red.min(green).min(blue);
|
||
if spread < 20.0 {
|
||
let toned_value = ((red + green + blue) / 3.0 * 0.88).round();
|
||
red = red.min(toned_value);
|
||
green = green.min(toned_value);
|
||
blue = blue.min(toned_value);
|
||
}
|
||
}
|
||
}
|
||
|
||
let mut next_alpha = alpha;
|
||
let edge_fade = (green_score * 0.35).max(white_score * 0.28);
|
||
if edge_fade > 0.08 {
|
||
next_alpha = ((alpha as f32) * (1.0 - edge_fade)).round() as u8;
|
||
if next_alpha < 10 {
|
||
next_alpha = 0;
|
||
}
|
||
}
|
||
|
||
let next_red = red.round() as u8;
|
||
let next_green = green.round() as u8;
|
||
let next_blue = blue.round() as u8;
|
||
if next_red != pixels[offset]
|
||
|| next_green != pixels[offset + 1]
|
||
|| next_blue != pixels[offset + 2]
|
||
|| next_alpha != alpha
|
||
{
|
||
pixels[offset] = next_red;
|
||
pixels[offset + 1] = next_green;
|
||
pixels[offset + 2] = next_blue;
|
||
pixels[offset + 3] = next_alpha;
|
||
changed = true;
|
||
}
|
||
}
|
||
}
|
||
|
||
changed
|
||
}
|
||
|
||
fn character_visual_error_response(request_context: &RequestContext, error: AppError) -> Response {
|
||
error.into_response_with_context(Some(request_context))
|
||
}
|
||
|
||
trait EmptyFallback {
|
||
fn if_empty_then(self, fallback: &str) -> String;
|
||
}
|
||
|
||
impl EmptyFallback for String {
|
||
fn if_empty_then(self, fallback: &str) -> String {
|
||
if self.is_empty() {
|
||
fallback.to_string()
|
||
} else {
|
||
self
|
||
}
|
||
}
|
||
}
|
||
|
||
struct GeneratedCharacterVisuals {
|
||
task_id: String,
|
||
actual_prompt: Option<String>,
|
||
submitted_prompt: String,
|
||
moderation_fallback_applied: bool,
|
||
images: Vec<DownloadedGeneratedImage>,
|
||
}
|
||
|
||
struct DownloadedGeneratedImage {
|
||
bytes: Vec<u8>,
|
||
mime_type: String,
|
||
extension: String,
|
||
}
|
||
|
||
struct ParsedImageDataUrl {
|
||
mime_type: String,
|
||
bytes: Vec<u8>,
|
||
}
|
||
|
||
#[cfg(test)]
|
||
mod tests {
|
||
use super::*;
|
||
|
||
#[test]
|
||
fn build_character_visual_prompt_keeps_generation_constraints() {
|
||
let prompt = build_character_visual_prompt("潮雾港向导");
|
||
|
||
assert!(prompt.contains("潮雾港向导"));
|
||
assert!(prompt.contains("右向斜侧身"));
|
||
assert!(prompt.contains("纯绿色绿幕"));
|
||
}
|
||
|
||
#[test]
|
||
fn fallback_character_visual_prompt_removes_risky_specific_names() {
|
||
let prompt =
|
||
build_fallback_moderation_safe_character_visual_prompt("艾瑞克,银发剑士,红色长披风");
|
||
|
||
assert!(prompt.contains("原创"));
|
||
assert!(prompt.contains("不参考任何现有"));
|
||
assert!(!prompt.contains("艾瑞克"));
|
||
assert!(!prompt.contains("某知名设定参考"));
|
||
}
|
||
|
||
#[test]
|
||
fn legacy_character_visual_model_normalizes_to_gpt_image_2() {
|
||
assert_eq!(
|
||
resolve_character_visual_model("wan2.7-image-pro"),
|
||
"gpt-image-2"
|
||
);
|
||
assert_eq!(resolve_character_visual_model(""), "gpt-image-2");
|
||
}
|
||
|
||
#[test]
|
||
fn image_ip_infringement_error_uses_moderation_fallback() {
|
||
let error = map_image_upstream_error(
|
||
r#"{"request_id":"a18fb05d","output":{"task_id":"cb768c95","task_status":"FAILED","code":"IPInfringementSuspect","message":"Input data is suspected of being involved in IP infringement."}}"#,
|
||
"角色主形象任务执行失败。",
|
||
);
|
||
|
||
assert!(is_image_test_moderation_error(&error));
|
||
}
|
||
|
||
#[test]
|
||
fn sanitize_storage_segment_keeps_legacy_safe_shape() {
|
||
assert_eq!(
|
||
sanitize_storage_segment("Harbor Guide/潮雾", "character"),
|
||
"harbor-guide"
|
||
);
|
||
}
|
||
}
|