This commit is contained in:
2026-05-09 17:15:23 +08:00
parent 80a4183b45
commit a0ed128bde
43 changed files with 2573 additions and 381 deletions

View File

@@ -1,14 +1,15 @@
use std::time::{Duration, Instant};
use std::time::Duration;
use axum::http::StatusCode;
use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64_STANDARD};
use reqwest::header;
use serde_json::{Map, Value, json};
use tokio::time::sleep;
use crate::{http_error::AppError, state::AppState};
pub(crate) const GPT_IMAGE_2_MODEL: &str = "gpt-image-2";
pub(crate) const VECTOR_ENGINE_GPT_IMAGE_2_MODEL: &str = "gpt-image-2-all";
const VECTOR_ENGINE_PROVIDER: &str = "vector-engine";
#[derive(Clone, Debug)]
pub(crate) struct OpenAiImageSettings {
@@ -31,37 +32,41 @@ pub(crate) struct DownloadedOpenAiImage {
pub extension: String,
}
// 中文注释RPG 图片资产与拼图一样走 APIMart 的 OpenAI 兼容图片入口,避免把密钥或供应商协议暴露到前端。
// 中文注释RPG、方洞等图片资产统一走 VectorEngine GPT-image-2-all,避免把密钥或供应商协议暴露到前端。
pub(crate) fn require_openai_image_settings(
state: &AppState,
) -> Result<OpenAiImageSettings, AppError> {
let base_url = state.config.apimart_base_url.trim().trim_end_matches('/');
let base_url = state
.config
.vector_engine_base_url
.trim()
.trim_end_matches('/');
if base_url.is_empty() {
return Err(
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "apimart",
"reason": "APIMART_BASE_URL 未配置",
"provider": VECTOR_ENGINE_PROVIDER,
"reason": "VECTOR_ENGINE_BASE_URL 未配置",
})),
);
}
let api_key = state
.config
.apimart_api_key
.vector_engine_api_key
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.ok_or_else(|| {
AppError::from_status(StatusCode::SERVICE_UNAVAILABLE).with_details(json!({
"provider": "apimart",
"reason": "APIMART_API_KEY 未配置",
"provider": VECTOR_ENGINE_PROVIDER,
"reason": "VECTOR_ENGINE_API_KEY 未配置",
}))
})?;
Ok(OpenAiImageSettings {
base_url: base_url.to_string(),
api_key: api_key.to_string(),
request_timeout_ms: state.config.apimart_image_request_timeout_ms.max(1),
request_timeout_ms: state.config.vector_engine_image_request_timeout_ms.max(1),
})
}
@@ -73,8 +78,8 @@ pub(crate) fn build_openai_image_http_client(
.build()
.map_err(|error| {
AppError::from_status(StatusCode::INTERNAL_SERVER_ERROR).with_details(json!({
"provider": "apimart",
"message": format!("构造 APIMart 图片生成 HTTP 客户端失败:{error}"),
"provider": VECTOR_ENGINE_PROVIDER,
"message": format!("构造 VectorEngine 图片生成 HTTP 客户端失败:{error}"),
}))
})
}
@@ -97,11 +102,12 @@ pub(crate) async fn create_openai_image_generation(
reference_images,
);
let response = http_client
.post(format!("{}/images/generations", settings.base_url))
.post(vector_engine_images_generation_url(settings))
.header(
header::AUTHORIZATION,
format!("Bearer {}", settings.api_key),
)
.header(header::ACCEPT, "application/json")
.header(header::CONTENT_TYPE, "application/json")
.json(&request_body)
.send()
@@ -124,40 +130,29 @@ pub(crate) async fn create_openai_image_generation(
}
let response_json = parse_json_payload(response_text.as_str(), failure_context)?;
let generation_id = extract_generation_id(&response_json.payload)
.unwrap_or_else(|| format!("vector-engine-{}", current_utc_micros()));
let actual_prompt = find_first_string_by_key(&response_json.payload, "revised_prompt")
.or_else(|| find_first_string_by_key(&response_json.payload, "actual_prompt"));
let image_urls = extract_image_urls(&response_json.payload);
if !image_urls.is_empty() {
return download_images_from_urls(
http_client,
format!("apimart-{}", current_utc_micros()),
image_urls,
candidate_count,
)
.await;
let mut generated =
download_images_from_urls(http_client, generation_id, image_urls, candidate_count)
.await?;
generated.actual_prompt = actual_prompt;
return Ok(generated);
}
let b64_images = extract_b64_images(&response_json.payload);
if !b64_images.is_empty() {
return Ok(images_from_base64(
format!("apimart-{}", current_utc_micros()),
b64_images,
candidate_count,
));
let mut generated = images_from_base64(generation_id, b64_images, candidate_count);
generated.actual_prompt = actual_prompt;
return Ok(generated);
}
let task_id = extract_task_id(&response_json.payload).ok_or_else(|| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "apimart",
"message": format!("{failure_context}:上游未返回 task_id 或图片"),
}))
})?;
wait_openai_generated_images(
http_client,
settings,
task_id.as_str(),
candidate_count,
failure_context,
)
.await
Err(AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": VECTOR_ENGINE_PROVIDER,
"message": format!("{failure_context}VectorEngine 未返回图片地址"),
})))
}
pub(crate) fn build_openai_image_request_body(
@@ -170,14 +165,13 @@ pub(crate) fn build_openai_image_request_body(
let mut body = Map::from_iter([
(
"model".to_string(),
Value::String(GPT_IMAGE_2_MODEL.to_string()),
Value::String(VECTOR_ENGINE_GPT_IMAGE_2_MODEL.to_string()),
),
(
"prompt".to_string(),
Value::String(build_prompt_with_negative(prompt, negative_prompt)),
),
("n".to_string(), json!(candidate_count.clamp(1, 4))),
("official_fallback".to_string(), Value::Bool(true)),
(
"size".to_string(),
Value::String(normalize_image_size(size)),
@@ -185,7 +179,7 @@ pub(crate) fn build_openai_image_request_body(
]);
if !reference_images.is_empty() {
body.insert("image_urls".to_string(), json!(reference_images));
body.insert("image".to_string(), json!(reference_images));
}
Value::Object(body)
@@ -205,109 +199,16 @@ fn build_prompt_with_negative(prompt: &str, negative_prompt: Option<&str>) -> St
fn normalize_image_size(size: &str) -> String {
match size.trim() {
"1024*1024" | "1024x1024" | "1:1" => "1:1",
"1280*720" | "1280x720" | "1600*900" | "1600x900" | "16:9" => "16:9",
"1024*1024" | "1024x1024" | "1:1" => "1024x1024",
"1280*720" | "1280x720" | "1600*900" | "1600x900" | "16:9"
| "1536x1024" | "2048x1152" | "2k" => "1536x1024",
"1024*1536" | "1024x1536" | "9:16" => "1024x1536",
value if !value.is_empty() => value,
_ => "1:1",
_ => "1024x1024",
}
.to_string()
}
async fn wait_openai_generated_images(
http_client: &reqwest::Client,
settings: &OpenAiImageSettings,
task_id: &str,
candidate_count: u32,
failure_context: &str,
) -> Result<OpenAiGeneratedImages, AppError> {
let deadline = Instant::now() + Duration::from_millis(settings.request_timeout_ms);
sleep(Duration::from_secs(10)).await;
while Instant::now() < deadline {
let poll_response = http_client
.get(format!("{}/tasks/{}", settings.base_url, task_id))
.header(
header::AUTHORIZATION,
format!("Bearer {}", settings.api_key),
)
.send()
.await
.map_err(|error| {
map_openai_image_request_error(format!(
"{failure_context}:查询图片生成任务失败:{error}"
))
})?;
let poll_status = poll_response.status();
let poll_text = poll_response.text().await.map_err(|error| {
map_openai_image_request_error(format!(
"{failure_context}:读取图片生成任务响应失败:{error}"
))
})?;
if !poll_status.is_success() {
return Err(map_openai_image_upstream_error(
poll_status.as_u16(),
poll_text.as_str(),
failure_context,
));
}
let poll_json = parse_json_payload(poll_text.as_str(), failure_context)?;
let task_status = find_first_string_by_key(&poll_json.payload, "status")
.or_else(|| find_first_string_by_key(&poll_json.payload, "task_status"))
.unwrap_or_default()
.trim()
.to_ascii_lowercase();
if matches!(task_status.as_str(), "completed" | "succeeded" | "success") {
let image_urls = extract_image_urls(&poll_json.payload);
if image_urls.is_empty() {
let b64_images = extract_b64_images(&poll_json.payload);
if b64_images.is_empty() {
return Err(AppError::from_status(StatusCode::BAD_GATEWAY).with_details(
json!({
"provider": "apimart",
"message": format!("{failure_context}:任务成功但未返回图片"),
}),
));
}
let mut generated =
images_from_base64(task_id.to_string(), b64_images, candidate_count);
generated.actual_prompt =
find_first_string_by_key(&poll_json.payload, "actual_prompt");
return Ok(generated);
}
let mut generated = download_images_from_urls(
http_client,
task_id.to_string(),
image_urls,
candidate_count,
)
.await?;
generated.actual_prompt = find_first_string_by_key(&poll_json.payload, "actual_prompt");
return Ok(generated);
}
if matches!(
task_status.as_str(),
"failed" | "error" | "canceled" | "cancelled" | "unknown"
) {
return Err(map_openai_image_upstream_error(
poll_status.as_u16(),
poll_text.as_str(),
failure_context,
));
}
sleep(Duration::from_secs(3)).await;
}
Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "apimart",
"message": format!("{failure_context}:图片生成超时或未返回图片地址"),
})),
)
}
async fn download_images_from_urls(
http_client: &reqwest::Client,
task_id: String,
@@ -377,7 +278,7 @@ pub(crate) async fn download_remote_image(
if !status.is_success() {
return Err(
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "apimart",
"provider": VECTOR_ENGINE_PROVIDER,
"message": "下载生成图片失败",
"status": status.as_u16(),
})),
@@ -400,7 +301,7 @@ fn parse_json_payload(
.map(|payload| ParsedJsonPayload { payload })
.map_err(|error| {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "apimart",
"provider": VECTOR_ENGINE_PROVIDER,
"message": format!("{failure_context}:解析响应失败:{error}"),
"rawExcerpt": truncate_raw(raw_text),
}))
@@ -409,7 +310,7 @@ fn parse_json_payload(
fn map_openai_image_request_error(message: String) -> AppError {
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "apimart",
"provider": VECTOR_ENGINE_PROVIDER,
"message": message,
}))
}
@@ -421,14 +322,14 @@ fn map_openai_image_upstream_error(
) -> AppError {
let message = parse_api_error_message(raw_text, failure_context);
tracing::warn!(
provider = "apimart",
provider = VECTOR_ENGINE_PROVIDER,
upstream_status,
raw_excerpt = %truncate_raw(raw_text),
message,
"APIMart 图片生成上游错误"
"VectorEngine 图片生成上游错误"
);
AppError::from_status(StatusCode::BAD_GATEWAY).with_details(json!({
"provider": "apimart",
"provider": VECTOR_ENGINE_PROVIDER,
"message": message,
"upstreamStatus": upstream_status,
"rawExcerpt": truncate_raw(raw_text),
@@ -516,10 +417,10 @@ fn find_first_string_by_key(value: &Value, target_key: &str) -> Option<String> {
results.into_iter().next()
}
fn extract_task_id(payload: &Value) -> Option<String> {
find_first_string_by_key(payload, "task_id")
.or_else(|| find_first_string_by_key(payload, "taskId"))
.or_else(|| find_first_string_by_key(payload, "id"))
fn extract_generation_id(payload: &Value) -> Option<String> {
find_first_string_by_key(payload, "id")
.or_else(|| find_first_string_by_key(payload, "created"))
.or_else(|| find_first_string_by_key(payload, "request_id"))
}
fn extract_image_urls(payload: &Value) -> Vec<String> {
@@ -542,6 +443,14 @@ fn extract_b64_images(payload: &Value) -> Vec<String> {
values
}
fn vector_engine_images_generation_url(settings: &OpenAiImageSettings) -> String {
if settings.base_url.ends_with("/v1") {
format!("{}/images/generations", settings.base_url)
} else {
format!("{}/v1/images/generations", settings.base_url)
}
}
fn normalize_downloaded_image_mime_type(content_type: &str) -> String {
let mime_type = content_type
.split(';')
@@ -602,7 +511,7 @@ mod tests {
use super::*;
#[test]
fn gpt_image_2_request_normalizes_legacy_sizes_and_reference_images() {
fn gpt_image_2_request_uses_vector_engine_contract() {
let body = build_openai_image_request_body(
"雾海神殿",
Some("文字,水印"),
@@ -611,11 +520,11 @@ mod tests {
&["data:image/png;base64,abcd".to_string()],
);
assert_eq!(body["model"], GPT_IMAGE_2_MODEL);
assert_eq!(body["size"], "16:9");
assert_eq!(body["model"], VECTOR_ENGINE_GPT_IMAGE_2_MODEL);
assert_eq!(body["size"], "1536x1024");
assert_eq!(body["n"], 2);
assert_eq!(body["official_fallback"], true);
assert_eq!(body["image_urls"][0], "data:image/png;base64,abcd");
assert!(body.get("official_fallback").is_none());
assert_eq!(body["image"][0], "data:image/png;base64,abcd");
assert!(body["prompt"].as_str().unwrap_or_default().contains("避免"));
}