推进 server-rs DDD 分层与新接口接线

This commit is contained in:
Codex
2026-04-29 15:46:16 +08:00
parent 9d3fcfae77
commit f82775b852
89 changed files with 3657 additions and 9636 deletions

View File

@@ -0,0 +1,100 @@
use crate::*;
/// AI 任务事件类型。
///
/// 事件表用于给订阅端和 BFF 增量消费状态变化;正式任务真相仍以
/// `ai_task`、`ai_task_stage`、`ai_text_chunk` 和 `ai_result_reference` 为准。
#[derive(Clone, Copy, Debug, PartialEq, Eq, SpacetimeType)]
pub enum AiTaskEventKind {
TaskCreated,
TaskStatusChanged,
StageStarted,
StageCompleted,
TextChunkAppended,
ResultReferenceAttached,
}
#[spacetimedb::table(
accessor = ai_task_event,
public,
event,
index(accessor = by_ai_task_event_task_id, btree(columns = [task_id])),
index(accessor = by_ai_task_event_owner_user_id, btree(columns = [owner_user_id]))
)]
pub struct AiTaskEvent {
#[primary_key]
pub(crate) event_id: String,
pub(crate) task_id: String,
pub(crate) owner_user_id: String,
pub(crate) event_kind: AiTaskEventKind,
pub(crate) task_status: Option<AiTaskStatus>,
pub(crate) stage_kind: Option<AiTaskStageKind>,
pub(crate) text_chunk_row_id: Option<String>,
pub(crate) result_reference_row_id: Option<String>,
pub(crate) occurred_at: Timestamp,
}
pub(crate) fn emit_ai_task_event(
ctx: &ReducerContext,
task: &AiTaskSnapshot,
event_kind: AiTaskEventKind,
stage_kind: Option<AiTaskStageKind>,
text_chunk_row_id: Option<String>,
result_reference_row_id: Option<String>,
occurred_at_micros: i64,
) {
let suffix = match event_kind {
AiTaskEventKind::TaskCreated => "created".to_string(),
AiTaskEventKind::TaskStatusChanged => format!("status_{}", task.status.as_event_slug()),
AiTaskEventKind::StageStarted => {
format!("stage_started_{}", stage_kind_slug(stage_kind))
}
AiTaskEventKind::StageCompleted => {
format!("stage_completed_{}", stage_kind_slug(stage_kind))
}
AiTaskEventKind::TextChunkAppended => {
format!(
"chunk_{}",
text_chunk_row_id.as_deref().unwrap_or("unknown")
)
}
AiTaskEventKind::ResultReferenceAttached => {
format!(
"result_{}",
result_reference_row_id.as_deref().unwrap_or("unknown")
)
}
};
ctx.db.ai_task_event().insert(AiTaskEvent {
event_id: format!("aievt_{}_{}_{}", task.task_id, occurred_at_micros, suffix),
task_id: task.task_id.clone(),
owner_user_id: task.owner_user_id.clone(),
event_kind,
task_status: Some(task.status),
stage_kind,
text_chunk_row_id,
result_reference_row_id,
occurred_at: Timestamp::from_micros_since_unix_epoch(occurred_at_micros),
});
}
fn stage_kind_slug(stage_kind: Option<AiTaskStageKind>) -> &'static str {
stage_kind.map(AiTaskStageKind::as_str).unwrap_or("unknown")
}
trait AiTaskStatusEventSlug {
fn as_event_slug(self) -> &'static str;
}
impl AiTaskStatusEventSlug for AiTaskStatus {
fn as_event_slug(self) -> &'static str {
match self {
Self::Pending => "pending",
Self::Running => "running",
Self::Completed => "completed",
Self::Failed => "failed",
Self::Cancelled => "cancelled",
}
}
}

View File

@@ -1,7 +1,9 @@
mod events;
mod snapshots;
mod stages;
mod tasks;
pub(crate) use events::*;
pub(crate) use snapshots::*;
pub use stages::*;
pub use tasks::*;

View File

@@ -119,13 +119,7 @@ pub(crate) fn build_ai_task_stage_snapshot_from_row(row: &AiTaskStage) -> AiTask
pub(crate) fn build_ai_text_chunk_row(snapshot: &AiTextChunkSnapshot) -> AiTextChunk {
AiTextChunk {
text_chunk_row_id: format!(
"{}{}_{}_{}",
AI_TEXT_CHUNK_ID_PREFIX,
snapshot.task_id,
snapshot.stage_kind.as_str(),
snapshot.sequence
),
text_chunk_row_id: build_ai_text_chunk_row_id(snapshot),
chunk_id: snapshot.chunk_id.clone(),
task_id: snapshot.task_id.clone(),
stage_kind: snapshot.stage_kind,
@@ -135,6 +129,16 @@ pub(crate) fn build_ai_text_chunk_row(snapshot: &AiTextChunkSnapshot) -> AiTextC
}
}
pub(crate) fn build_ai_text_chunk_row_id(snapshot: &AiTextChunkSnapshot) -> String {
format!(
"{}{}_{}_{}",
AI_TEXT_CHUNK_ID_PREFIX,
snapshot.task_id,
snapshot.stage_kind.as_str(),
snapshot.sequence
)
}
pub(crate) fn build_ai_text_chunk_snapshot_from_row(row: &AiTextChunk) -> AiTextChunkSnapshot {
AiTextChunkSnapshot {
chunk_id: row.chunk_id.clone(),
@@ -150,10 +154,7 @@ pub(crate) fn build_ai_result_reference_row(
snapshot: &AiResultReferenceSnapshot,
) -> AiResultReference {
AiResultReference {
result_reference_row_id: format!(
"{}{}_{}",
AI_RESULT_REF_ID_PREFIX, snapshot.task_id, snapshot.result_ref_id
),
result_reference_row_id: build_ai_result_reference_row_id(snapshot),
result_ref_id: snapshot.result_ref_id.clone(),
task_id: snapshot.task_id.clone(),
reference_kind: snapshot.reference_kind,
@@ -163,6 +164,13 @@ pub(crate) fn build_ai_result_reference_row(
}
}
pub(crate) fn build_ai_result_reference_row_id(snapshot: &AiResultReferenceSnapshot) -> String {
format!(
"{}{}_{}",
AI_RESULT_REF_ID_PREFIX, snapshot.task_id, snapshot.result_ref_id
)
}
pub(crate) fn build_ai_result_reference_snapshot_from_row(
row: &AiResultReference,
) -> AiResultReferenceSnapshot {

View File

@@ -156,6 +156,15 @@ pub(crate) fn start_ai_task_stage_tx(
snapshot.version += 1;
persist_ai_task_snapshot(ctx, &snapshot)?;
emit_ai_task_event(
ctx,
&snapshot,
AiTaskEventKind::StageStarted,
Some(input.stage_kind),
None,
None,
input.started_at_micros,
);
Ok(snapshot)
}
@@ -207,6 +216,15 @@ pub(crate) fn append_ai_text_chunk_tx(
snapshot.version += 1;
persist_ai_task_snapshot(ctx, &snapshot)?;
emit_ai_task_event(
ctx,
&snapshot,
AiTaskEventKind::TextChunkAppended,
Some(chunk.stage_kind),
Some(build_ai_text_chunk_row_id(&chunk)),
None,
chunk.created_at_micros,
);
Ok((snapshot, chunk))
}
@@ -235,6 +253,15 @@ pub(crate) fn complete_ai_stage_tx(
snapshot.version += 1;
persist_ai_task_snapshot(ctx, &snapshot)?;
emit_ai_task_event(
ctx,
&snapshot,
AiTaskEventKind::StageCompleted,
Some(input.stage_kind),
None,
None,
input.completed_at_micros,
);
Ok(snapshot)
}
@@ -267,6 +294,19 @@ pub(crate) fn attach_ai_result_reference_tx(
snapshot.version += 1;
persist_ai_task_snapshot(ctx, &snapshot)?;
let reference = snapshot
.result_references
.last()
.ok_or_else(|| "ai_result_reference 写入后缺少快照".to_string())?;
emit_ai_task_event(
ctx,
&snapshot,
AiTaskEventKind::ResultReferenceAttached,
None,
None,
Some(build_ai_result_reference_row_id(reference)),
input.created_at_micros,
);
Ok(snapshot)
}

View File

@@ -135,6 +135,15 @@ fn create_ai_task_tx(
let task_snapshot = build_ai_task_snapshot_from_create_input(&input);
ctx.db.ai_task().insert(build_ai_task_row(&task_snapshot));
replace_ai_task_stages(ctx, &task_snapshot.task_id, &task_snapshot.stages);
emit_ai_task_event(
ctx,
&task_snapshot,
AiTaskEventKind::TaskCreated,
None,
None,
None,
task_snapshot.created_at_micros,
);
get_ai_task_snapshot_tx(ctx, &task_snapshot.task_id)
}
@@ -154,6 +163,15 @@ fn start_ai_task_tx(
snapshot.version += 1;
persist_ai_task_snapshot(ctx, &snapshot)?;
emit_ai_task_event(
ctx,
&snapshot,
AiTaskEventKind::TaskStatusChanged,
None,
None,
None,
input.started_at_micros,
);
Ok(snapshot)
}
@@ -170,6 +188,15 @@ fn complete_ai_task_tx(
snapshot.version += 1;
persist_ai_task_snapshot(ctx, &snapshot)?;
emit_ai_task_event(
ctx,
&snapshot,
AiTaskEventKind::TaskStatusChanged,
None,
None,
None,
input.completed_at_micros,
);
Ok(snapshot)
}
@@ -192,6 +219,15 @@ fn fail_ai_task_tx(
snapshot.version += 1;
persist_ai_task_snapshot(ctx, &snapshot)?;
emit_ai_task_event(
ctx,
&snapshot,
AiTaskEventKind::TaskStatusChanged,
None,
None,
None,
input.completed_at_micros,
);
Ok(snapshot)
}
@@ -208,6 +244,15 @@ fn cancel_ai_task_tx(
snapshot.version += 1;
persist_ai_task_snapshot(ctx, &snapshot)?;
emit_ai_task_event(
ctx,
&snapshot,
AiTaskEventKind::TaskStatusChanged,
None,
None,
None,
input.completed_at_micros,
);
Ok(snapshot)
}

View File

@@ -1,5 +1,6 @@
use crate::big_fish::tables::{big_fish_asset_slot, big_fish_creation_session};
use crate::*;
use module_big_fish::{EvaluateBigFishPublishReadinessCommand, evaluate_publish_readiness};
#[spacetimedb::procedure]
pub fn generate_big_fish_asset(
@@ -70,6 +71,16 @@ pub(crate) fn generate_big_fish_asset_tx(
upsert_big_fish_asset_slot(ctx, slot);
let asset_slots = list_big_fish_asset_slots(ctx, &session.session_id);
let readiness = evaluate_publish_readiness(
EvaluateBigFishPublishReadinessCommand {
session_id: session.session_id.clone(),
owner_user_id: session.owner_user_id.clone(),
draft: Some(draft.clone()),
evaluated_at_micros: input.generated_at_micros,
},
&asset_slots,
)
.map_err(|error| error.to_string())?;
let coverage = build_asset_coverage(Some(&draft), &asset_slots);
let updated_at = Timestamp::from_micros_since_unix_epoch(input.generated_at_micros);
let uses_placeholder = input
@@ -90,7 +101,7 @@ pub(crate) fn generate_big_fish_asset_tx(
}
}
.to_string();
let next_stage = if coverage.publish_ready {
let next_stage = if readiness.readiness.publish_ready {
BigFishCreationStage::ReadyToPublish
} else {
BigFishCreationStage::AssetRefining
@@ -100,19 +111,26 @@ pub(crate) fn generate_big_fish_asset_tx(
owner_user_id: session.owner_user_id.clone(),
seed_text: session.seed_text.clone(),
current_turn: session.current_turn,
progress_percent: if coverage.publish_ready { 96 } else { 88 },
progress_percent: if readiness.readiness.publish_ready {
96
} else {
88
},
stage: next_stage,
anchor_pack_json: session.anchor_pack_json.clone(),
draft_json: session.draft_json.clone(),
asset_coverage_json: serialize_asset_coverage(&coverage)
.map_err(|error| error.to_string())?,
last_assistant_reply: Some(reply.clone()),
publish_ready: coverage.publish_ready,
publish_ready: readiness.readiness.publish_ready,
play_count: session.play_count,
created_at: session.created_at,
updated_at,
};
replace_big_fish_session(ctx, &session, next_session);
for event in readiness.events {
emit_big_fish_publish_readiness_event(ctx, event)?;
}
get_big_fish_session_tx(
ctx,
@@ -140,14 +158,22 @@ pub(crate) fn publish_big_fish_game_tx(
.as_deref()
.ok_or_else(|| "big_fish.draft 尚未编译".to_string())
.and_then(|value| deserialize_draft(value).map_err(|error| error.to_string()))?;
let coverage = build_asset_coverage(
Some(&draft),
&list_big_fish_asset_slots(ctx, &session.session_id),
);
if !coverage.publish_ready {
let asset_slots = list_big_fish_asset_slots(ctx, &session.session_id);
let readiness = evaluate_publish_readiness(
EvaluateBigFishPublishReadinessCommand {
session_id: session.session_id.clone(),
owner_user_id: session.owner_user_id.clone(),
draft: Some(draft.clone()),
evaluated_at_micros: input.published_at_micros,
},
&asset_slots,
)
.map_err(|error| error.to_string())?;
let coverage = build_asset_coverage(Some(&draft), &asset_slots);
if !readiness.readiness.publish_ready {
return Err(format!(
"big_fish 发布校验未通过:{}",
coverage.blockers.join("")
readiness.readiness.blockers.join("")
));
}
@@ -170,6 +196,9 @@ pub(crate) fn publish_big_fish_game_tx(
updated_at: published_at,
};
replace_big_fish_session(ctx, &session, next_session);
for event in readiness.events {
emit_big_fish_publish_readiness_event(ctx, event)?;
}
get_big_fish_session_tx(
ctx,

View File

@@ -0,0 +1,56 @@
use crate::*;
/// Big Fish 创作事件类型。
///
/// 事件表只承接跨层订阅和审计所需的轻量事实,正式作品状态仍以
/// `big_fish_creation_session` 和 `big_fish_asset_slot` 为准。
#[derive(Clone, Copy, Debug, PartialEq, Eq, SpacetimeType)]
pub enum BigFishEventKind {
PublishReadinessEvaluated,
}
#[spacetimedb::table(
accessor = big_fish_event,
public,
event,
index(accessor = by_big_fish_event_session_id, btree(columns = [session_id])),
index(accessor = by_big_fish_event_owner_user_id, btree(columns = [owner_user_id]))
)]
pub struct BigFishEvent {
#[primary_key]
pub(crate) event_id: String,
pub(crate) session_id: String,
pub(crate) owner_user_id: String,
pub(crate) event_kind: BigFishEventKind,
pub(crate) publish_ready: bool,
pub(crate) blockers_json: String,
pub(crate) occurred_at: Timestamp,
}
pub(crate) fn emit_big_fish_publish_readiness_event(
ctx: &ReducerContext,
event: BigFishDomainEvent,
) -> Result<(), String> {
let BigFishDomainEvent::PublishReadinessEvaluated {
session_id,
owner_user_id,
publish_ready,
blockers,
occurred_at_micros,
} = event;
let blockers_json = serde_json::to_string(&blockers)
.map_err(|error| format!("big_fish.publish_readiness.blockers 序列化失败: {error}"))?;
let state_slug = if publish_ready { "ready" } else { "blocked" };
ctx.db.big_fish_event().insert(BigFishEvent {
event_id: format!("bfevt_{session_id}_{occurred_at_micros}_{state_slug}"),
session_id,
owner_user_id,
event_kind: BigFishEventKind::PublishReadinessEvaluated,
publish_ready,
blockers_json,
occurred_at: Timestamp::from_micros_since_unix_epoch(occurred_at_micros),
});
Ok(())
}

View File

@@ -1,7 +1,9 @@
mod assets;
mod events;
mod session;
mod tables;
pub use assets::*;
pub(crate) use events::*;
pub use session::*;
pub use tables::*;

View File

@@ -3,6 +3,7 @@ use crate::runtime::{
ProfilePlayedWorkUpsertInput, add_profile_observed_play_time, upsert_profile_played_work,
};
use crate::*;
use module_big_fish::{EvaluateBigFishPublishReadinessCommand, evaluate_publish_readiness};
const INITIAL_BIG_FISH_CREATION_PROGRESS_PERCENT: u32 = 0;
@@ -552,6 +553,16 @@ pub(crate) fn compile_big_fish_draft_tx(
.map_err(|error| format!("big_fish.draft_json 非法: {error}"))?
.unwrap_or_else(|| compile_default_draft(&anchor_pack));
let asset_slots = list_big_fish_asset_slots(ctx, &session.session_id);
let readiness = evaluate_publish_readiness(
EvaluateBigFishPublishReadinessCommand {
session_id: session.session_id.clone(),
owner_user_id: session.owner_user_id.clone(),
draft: Some(draft.clone()),
evaluated_at_micros: input.compiled_at_micros,
},
&asset_slots,
)
.map_err(|error| error.to_string())?;
let coverage = build_asset_coverage(Some(&draft), &asset_slots);
let compiled_at = Timestamp::from_micros_since_unix_epoch(input.compiled_at_micros);
let reply = "第一版玩法草稿已编译完成,可以在结果页逐级生成主图、动作和场地背景。".to_string();
@@ -568,12 +579,15 @@ pub(crate) fn compile_big_fish_draft_tx(
asset_coverage_json: serialize_asset_coverage(&coverage)
.map_err(|error| error.to_string())?,
last_assistant_reply: Some(reply.clone()),
publish_ready: coverage.publish_ready,
publish_ready: readiness.readiness.publish_ready,
play_count: session.play_count,
created_at: session.created_at,
updated_at: compiled_at,
};
replace_big_fish_session(ctx, &session, next_session);
for event in readiness.events {
emit_big_fish_publish_readiness_event(ctx, event)?;
}
get_big_fish_session_tx(
ctx,

View File

@@ -104,6 +104,7 @@ macro_rules! migration_tables {
ai_task_stage,
ai_text_chunk,
ai_result_reference,
ai_task_event,
runtime_snapshot,
runtime_setting,
user_browse_history,
@@ -142,7 +143,8 @@ macro_rules! migration_tables {
puzzle_runtime_run,
big_fish_creation_session,
big_fish_agent_message,
big_fish_asset_slot
big_fish_asset_slot,
big_fish_event
}
};
}