fix: sync rust api-server runtime and bindings

This commit is contained in:
2026-04-23 20:32:06 +08:00
parent 9d25a47b23
commit 27e84c46a0
82 changed files with 9534 additions and 2222 deletions

View File

@@ -55,6 +55,76 @@
- `cancel_ai_task_and_return`
18. `turn_in_quest``resolve_combat_action(Victory)``player_progression / chapter_progression` 的最小经验联动
## 2.1 `src/lib.rs` 拆分路由规则
`2026-04-23` 起,`src/lib.rs` 不再允许继续承载具体业务域的 table / reducer / procedure / tx helper。
根入口后续只允许保留:
1. `use` 聚合
2. `mod` 声明
3. 少量跨域共享 helper
4. 迁移过渡期测试
根入口与子模块的导入导出规则同步冻结为:
1. `src/lib.rs` 对外统一优先使用 `pub use xxx::*;` 重新导出模块内容
2. 已拆业务模块内部统一优先使用 `use crate::*;` 复用主入口已聚合的类型与函数
3. 只有当 `use crate::*;` 无法覆盖或会引入明显歧义时,才补局部显式 `use`
4. 新增业务域内容禁止为了堆 `use` 列表再回写到 `src/lib.rs`
具体内容必须落到下面的模块:
1. `src/entry.rs`
- SpacetimeDB `init` 入口
2. `src/domain_types.rs`
- 跨域共享的 SpacetimeDB 类型
3. `src/asset_metadata/`
- 资产对象与资产绑定真相表
4. `src/big_fish/`
- Big Fish 创作与运行态
5. `src/runtime/`
- runtime setting / snapshot / browse history / profile 投影
6. `src/gameplay/`
- `story / combat / inventory / npc / quest / runtime_item / progression`
7. `src/custom_world/`
- custom world profile / session / agent / publishing / gallery / works
8. `src/ai/`
- ai task / stage / chunk / result reference
9. `src/puzzle.rs`
- 拼图玩法当前仍为单文件域模块
### 已冻结的二级模块落位点
1. `src/asset_metadata/objects.rs`
2. `src/asset_metadata/bindings.rs`
3. `src/big_fish/tables.rs`
4. `src/big_fish/session.rs`
5. `src/big_fish/assets.rs`
6. `src/big_fish/runtime.rs`
7. `src/runtime/settings.rs`
8. `src/runtime/snapshots.rs`
9. `src/runtime/browse_history.rs`
10. `src/runtime/profile.rs`
11. `src/gameplay/combat.rs`
12. `src/gameplay/inventory.rs`
13. `src/gameplay/npc.rs`
14. `src/gameplay/progression.rs`
15. `src/gameplay/quest.rs`
16. `src/gameplay/runtime_item.rs`
17. `src/gameplay/story.rs`
18. `src/custom_world/profile.rs`
19. `src/custom_world/session.rs`
20. `src/custom_world/agent.rs`
21. `src/custom_world/publishing.rs`
22. `src/custom_world/gallery.rs`
23. `src/custom_world/works.rs`
24. `src/ai/tasks.rs`
25. `src/ai/stages.rs`
26. `src/ai/snapshots.rs`
后续如果新增 SpacetimeDB 表、reducer、procedure 或同域 helper必须先判断属于哪个一级模块与二级落位点再写入对应文件禁止直接追加到 `src/lib.rs`
`asset_object` 的详细设计见:
1. [../../../docs/technical/SPACETIMEDB_ASSET_OBJECT_TABLE_DESIGN_2026-04-21.md](../../../docs/technical/SPACETIMEDB_ASSET_OBJECT_TABLE_DESIGN_2026-04-21.md)

View File

@@ -0,0 +1 @@
// AI snapshot / row 转换 helper 落位点。

View File

@@ -0,0 +1 @@
// AI stage、chunk、reference 与阶段级 helper 落位点。

View File

@@ -0,0 +1 @@
// AI task reducer / procedure 与任务状态迁移落位点。

View File

@@ -0,0 +1,142 @@
use crate::*;
#[spacetimedb::table(
accessor = asset_entity_binding,
index(accessor = by_entity_slot, btree(columns = [entity_kind, entity_id, slot])),
index(accessor = by_asset_object_id, btree(columns = [asset_object_id]))
)]
pub struct AssetEntityBinding {
#[primary_key]
binding_id: String,
asset_object_id: String,
entity_kind: String,
entity_id: String,
slot: String,
asset_kind: String,
owner_user_id: Option<String>,
profile_id: Option<String>,
created_at: Timestamp,
updated_at: Timestamp,
}
// reducer 负责把已确认对象绑定到实体槽位,强业务资产表稳定前先用通用绑定表承接关系。
#[spacetimedb::reducer]
pub fn bind_asset_object_to_entity(
ctx: &ReducerContext,
input: AssetEntityBindingInput,
) -> Result<(), String> {
upsert_asset_entity_binding(ctx, input).map(|_| ())
}
// procedure 面向 Axum 同步绑定接口,返回最终绑定快照,避免 HTTP 层读取 private table。
#[spacetimedb::procedure]
pub fn bind_asset_object_to_entity_and_return(
ctx: &mut ProcedureContext,
input: AssetEntityBindingInput,
) -> AssetEntityBindingProcedureResult {
match ctx.try_with_tx(|tx| upsert_asset_entity_binding(tx, input.clone())) {
Ok(record) => AssetEntityBindingProcedureResult {
ok: true,
record: Some(record),
error_message: None,
},
Err(message) => AssetEntityBindingProcedureResult {
ok: false,
record: None,
error_message: Some(message),
},
}
}
fn upsert_asset_entity_binding(
ctx: &ReducerContext,
input: AssetEntityBindingInput,
) -> Result<AssetEntityBindingSnapshot, String> {
validate_asset_entity_binding_fields(
&input.binding_id,
&input.asset_object_id,
&input.entity_kind,
&input.entity_id,
&input.slot,
&input.asset_kind,
)
.map_err(|error| error.to_string())?;
if !has_asset_object(ctx, &input.asset_object_id) {
return Err("asset_entity_binding.asset_object_id 对应的 asset_object 不存在".to_string());
}
let updated_at = Timestamp::from_micros_since_unix_epoch(input.updated_at_micros);
// 首版绑定按 entity_kind + entity_id + slot 幂等定位,后续访问量明确后再改为组合索引扫描。
let current = ctx.db.asset_entity_binding().iter().find(|row| {
row.entity_kind == input.entity_kind
&& row.entity_id == input.entity_id
&& row.slot == input.slot
});
let snapshot = match current {
Some(existing) => {
ctx.db
.asset_entity_binding()
.binding_id()
.delete(&existing.binding_id);
let row = AssetEntityBinding {
binding_id: existing.binding_id.clone(),
asset_object_id: input.asset_object_id.clone(),
entity_kind: input.entity_kind.clone(),
entity_id: input.entity_id.clone(),
slot: input.slot.clone(),
asset_kind: input.asset_kind.clone(),
owner_user_id: input.owner_user_id.clone(),
profile_id: input.profile_id.clone(),
created_at: existing.created_at,
updated_at,
};
ctx.db.asset_entity_binding().insert(row);
AssetEntityBindingSnapshot {
binding_id: existing.binding_id,
asset_object_id: input.asset_object_id,
entity_kind: input.entity_kind,
entity_id: input.entity_id,
slot: input.slot,
asset_kind: input.asset_kind,
owner_user_id: input.owner_user_id,
profile_id: input.profile_id,
created_at_micros: existing.created_at.to_micros_since_unix_epoch(),
updated_at_micros: input.updated_at_micros,
}
}
None => {
let created_at = updated_at;
let row = AssetEntityBinding {
binding_id: input.binding_id.clone(),
asset_object_id: input.asset_object_id.clone(),
entity_kind: input.entity_kind.clone(),
entity_id: input.entity_id.clone(),
slot: input.slot.clone(),
asset_kind: input.asset_kind.clone(),
owner_user_id: input.owner_user_id.clone(),
profile_id: input.profile_id.clone(),
created_at,
updated_at,
};
ctx.db.asset_entity_binding().insert(row);
AssetEntityBindingSnapshot {
binding_id: input.binding_id,
asset_object_id: input.asset_object_id,
entity_kind: input.entity_kind,
entity_id: input.entity_id,
slot: input.slot,
asset_kind: input.asset_kind,
owner_user_id: input.owner_user_id,
profile_id: input.profile_id,
created_at_micros: input.updated_at_micros,
updated_at_micros: input.updated_at_micros,
}
}
};
Ok(snapshot)
}

View File

@@ -1,305 +1,5 @@
#[spacetimedb::table(
accessor = asset_object,
index(accessor = by_bucket_object_key, btree(columns = [bucket, object_key]))
)]
pub struct AssetObject {
#[primary_key]
asset_object_id: String,
// 正式对象定位固定拆成 bucket + object_key 两列,避免后续再从单字符串路径做 schema 拆分。
bucket: String,
object_key: String,
access_policy: AssetObjectAccessPolicy,
content_type: Option<String>,
content_length: u64,
content_hash: Option<String>,
version: u32,
source_job_id: Option<String>,
owner_user_id: Option<String>,
profile_id: Option<String>,
entity_id: Option<String>,
#[index(btree)]
asset_kind: String,
created_at: Timestamp,
updated_at: Timestamp,
}
mod bindings;
mod objects;
#[spacetimedb::table(
accessor = asset_entity_binding,
index(accessor = by_entity_slot, btree(columns = [entity_kind, entity_id, slot])),
index(accessor = by_asset_object_id, btree(columns = [asset_object_id]))
)]
pub struct AssetEntityBinding {
#[primary_key]
binding_id: String,
asset_object_id: String,
entity_kind: String,
entity_id: String,
slot: String,
asset_kind: String,
owner_user_id: Option<String>,
profile_id: Option<String>,
created_at: Timestamp,
updated_at: Timestamp,
}
// reducer 负责固定资产对象的正式写规则,供后续内部模块逻辑复用。
#[spacetimedb::reducer]
pub fn confirm_asset_object(
ctx: &ReducerContext,
input: AssetObjectUpsertInput,
) -> Result<(), String> {
upsert_asset_object(ctx, input).map(|_| ())
}
// procedure 面向 Axum 同步确认接口,返回最终持久化后的对象记录,避免 HTTP 层再额外查询 private table。
#[spacetimedb::procedure]
pub fn confirm_asset_object_and_return(
ctx: &mut ProcedureContext,
input: AssetObjectUpsertInput,
) -> AssetObjectProcedureResult {
match ctx.try_with_tx(|tx| upsert_asset_object(tx, input.clone())) {
Ok(record) => AssetObjectProcedureResult {
ok: true,
record: Some(record),
error_message: None,
},
Err(message) => AssetObjectProcedureResult {
ok: false,
record: None,
error_message: Some(message),
},
}
}
// reducer 负责把已确认对象绑定到实体槽位,强业务资产表稳定前先用通用绑定表承接关系。
#[spacetimedb::reducer]
pub fn bind_asset_object_to_entity(
ctx: &ReducerContext,
input: AssetEntityBindingInput,
) -> Result<(), String> {
upsert_asset_entity_binding(ctx, input).map(|_| ())
}
// procedure 面向 Axum 同步绑定接口,返回最终绑定快照,避免 HTTP 层读取 private table。
#[spacetimedb::procedure]
pub fn bind_asset_object_to_entity_and_return(
ctx: &mut ProcedureContext,
input: AssetEntityBindingInput,
) -> AssetEntityBindingProcedureResult {
match ctx.try_with_tx(|tx| upsert_asset_entity_binding(tx, input.clone())) {
Ok(record) => AssetEntityBindingProcedureResult {
ok: true,
record: Some(record),
error_message: None,
},
Err(message) => AssetEntityBindingProcedureResult {
ok: false,
record: None,
error_message: Some(message),
},
}
}
fn upsert_asset_object(
ctx: &ReducerContext,
input: AssetObjectUpsertInput,
) -> Result<AssetObjectUpsertSnapshot, String> {
validate_asset_object_fields(
&input.bucket,
&input.object_key,
&input.asset_kind,
input.version,
)
.map_err(|error| error.to_string())?;
let updated_at = Timestamp::from_micros_since_unix_epoch(input.updated_at_micros);
// 这里先保持最小可发布实现:查重语义已经冻结,后续再把实现优化回组合索引扫描。
let current = ctx
.db
.asset_object()
.iter()
.find(|row| row.bucket == input.bucket && row.object_key == input.object_key);
let snapshot = match current {
Some(existing) => {
ctx.db
.asset_object()
.asset_object_id()
.delete(&existing.asset_object_id);
let row = AssetObject {
asset_object_id: existing.asset_object_id.clone(),
bucket: input.bucket.clone(),
object_key: input.object_key.clone(),
access_policy: input.access_policy,
content_type: input.content_type.clone(),
content_length: input.content_length,
content_hash: input.content_hash.clone(),
version: input.version,
source_job_id: input.source_job_id.clone(),
owner_user_id: input.owner_user_id.clone(),
profile_id: input.profile_id.clone(),
entity_id: input.entity_id.clone(),
asset_kind: input.asset_kind.clone(),
created_at: existing.created_at,
updated_at,
};
ctx.db.asset_object().insert(row);
AssetObjectUpsertSnapshot {
asset_object_id: existing.asset_object_id,
bucket: input.bucket,
object_key: input.object_key,
access_policy: input.access_policy,
content_type: input.content_type,
content_length: input.content_length,
content_hash: input.content_hash,
version: input.version,
source_job_id: input.source_job_id,
owner_user_id: input.owner_user_id,
profile_id: input.profile_id,
entity_id: input.entity_id,
asset_kind: input.asset_kind,
created_at_micros: existing.created_at.to_micros_since_unix_epoch(),
updated_at_micros: input.updated_at_micros,
}
}
None => {
let created_at = updated_at;
let row = AssetObject {
asset_object_id: input.asset_object_id.clone(),
bucket: input.bucket.clone(),
object_key: input.object_key.clone(),
access_policy: input.access_policy,
content_type: input.content_type.clone(),
content_length: input.content_length,
content_hash: input.content_hash.clone(),
version: input.version,
source_job_id: input.source_job_id.clone(),
owner_user_id: input.owner_user_id.clone(),
profile_id: input.profile_id.clone(),
entity_id: input.entity_id.clone(),
asset_kind: input.asset_kind.clone(),
created_at,
updated_at,
};
ctx.db.asset_object().insert(row);
AssetObjectUpsertSnapshot {
asset_object_id: input.asset_object_id,
bucket: input.bucket,
object_key: input.object_key,
access_policy: input.access_policy,
content_type: input.content_type,
content_length: input.content_length,
content_hash: input.content_hash,
version: input.version,
source_job_id: input.source_job_id,
owner_user_id: input.owner_user_id,
profile_id: input.profile_id,
entity_id: input.entity_id,
asset_kind: input.asset_kind,
created_at_micros: input.updated_at_micros,
updated_at_micros: input.updated_at_micros,
}
}
};
Ok(snapshot)
}
fn upsert_asset_entity_binding(
ctx: &ReducerContext,
input: AssetEntityBindingInput,
) -> Result<AssetEntityBindingSnapshot, String> {
validate_asset_entity_binding_fields(
&input.binding_id,
&input.asset_object_id,
&input.entity_kind,
&input.entity_id,
&input.slot,
&input.asset_kind,
)
.map_err(|error| error.to_string())?;
if ctx
.db
.asset_object()
.asset_object_id()
.find(&input.asset_object_id)
.is_none()
{
return Err("asset_entity_binding.asset_object_id 对应的 asset_object 不存在".to_string());
}
let updated_at = Timestamp::from_micros_since_unix_epoch(input.updated_at_micros);
// 首版绑定按 entity_kind + entity_id + slot 幂等定位,后续访问量明确后再改为组合索引扫描。
let current = ctx.db.asset_entity_binding().iter().find(|row| {
row.entity_kind == input.entity_kind
&& row.entity_id == input.entity_id
&& row.slot == input.slot
});
let snapshot = match current {
Some(existing) => {
ctx.db
.asset_entity_binding()
.binding_id()
.delete(&existing.binding_id);
let row = AssetEntityBinding {
binding_id: existing.binding_id.clone(),
asset_object_id: input.asset_object_id.clone(),
entity_kind: input.entity_kind.clone(),
entity_id: input.entity_id.clone(),
slot: input.slot.clone(),
asset_kind: input.asset_kind.clone(),
owner_user_id: input.owner_user_id.clone(),
profile_id: input.profile_id.clone(),
created_at: existing.created_at,
updated_at,
};
ctx.db.asset_entity_binding().insert(row);
AssetEntityBindingSnapshot {
binding_id: existing.binding_id,
asset_object_id: input.asset_object_id,
entity_kind: input.entity_kind,
entity_id: input.entity_id,
slot: input.slot,
asset_kind: input.asset_kind,
owner_user_id: input.owner_user_id,
profile_id: input.profile_id,
created_at_micros: existing.created_at.to_micros_since_unix_epoch(),
updated_at_micros: input.updated_at_micros,
}
}
None => {
let created_at = updated_at;
let row = AssetEntityBinding {
binding_id: input.binding_id.clone(),
asset_object_id: input.asset_object_id.clone(),
entity_kind: input.entity_kind.clone(),
entity_id: input.entity_id.clone(),
slot: input.slot.clone(),
asset_kind: input.asset_kind.clone(),
owner_user_id: input.owner_user_id.clone(),
profile_id: input.profile_id.clone(),
created_at,
updated_at,
};
ctx.db.asset_entity_binding().insert(row);
AssetEntityBindingSnapshot {
binding_id: input.binding_id,
asset_object_id: input.asset_object_id,
entity_kind: input.entity_kind,
entity_id: input.entity_id,
slot: input.slot,
asset_kind: input.asset_kind,
owner_user_id: input.owner_user_id,
profile_id: input.profile_id,
created_at_micros: input.updated_at_micros,
updated_at_micros: input.updated_at_micros,
}
}
};
Ok(snapshot)
}
pub use bindings::*;
pub use objects::*;

View File

@@ -0,0 +1,169 @@
use crate::*;
#[spacetimedb::table(
accessor = asset_object,
index(accessor = by_bucket_object_key, btree(columns = [bucket, object_key]))
)]
pub struct AssetObject {
#[primary_key]
asset_object_id: String,
// 正式对象定位固定拆成 bucket + object_key 两列,避免后续再从单字符串路径做 schema 拆分。
bucket: String,
object_key: String,
access_policy: AssetObjectAccessPolicy,
content_type: Option<String>,
content_length: u64,
content_hash: Option<String>,
version: u32,
source_job_id: Option<String>,
owner_user_id: Option<String>,
profile_id: Option<String>,
entity_id: Option<String>,
#[index(btree)]
asset_kind: String,
created_at: Timestamp,
updated_at: Timestamp,
}
// reducer 负责固定资产对象的正式写规则,供后续内部模块逻辑复用。
#[spacetimedb::reducer]
pub fn confirm_asset_object(
ctx: &ReducerContext,
input: AssetObjectUpsertInput,
) -> Result<(), String> {
upsert_asset_object(ctx, input).map(|_| ())
}
// procedure 面向 Axum 同步确认接口,返回最终持久化后的对象记录,避免 HTTP 层再额外查询 private table。
#[spacetimedb::procedure]
pub fn confirm_asset_object_and_return(
ctx: &mut ProcedureContext,
input: AssetObjectUpsertInput,
) -> AssetObjectProcedureResult {
match ctx.try_with_tx(|tx| upsert_asset_object(tx, input.clone())) {
Ok(record) => AssetObjectProcedureResult {
ok: true,
record: Some(record),
error_message: None,
},
Err(message) => AssetObjectProcedureResult {
ok: false,
record: None,
error_message: Some(message),
},
}
}
pub(crate) fn upsert_asset_object(
ctx: &ReducerContext,
input: AssetObjectUpsertInput,
) -> Result<AssetObjectUpsertSnapshot, String> {
validate_asset_object_fields(
&input.bucket,
&input.object_key,
&input.asset_kind,
input.version,
)
.map_err(|error| error.to_string())?;
let updated_at = Timestamp::from_micros_since_unix_epoch(input.updated_at_micros);
// 这里先保持最小可发布实现:查重语义已经冻结,后续再把实现优化回组合索引扫描。
let current = ctx
.db
.asset_object()
.iter()
.find(|row| row.bucket == input.bucket && row.object_key == input.object_key);
let snapshot = match current {
Some(existing) => {
ctx.db
.asset_object()
.asset_object_id()
.delete(&existing.asset_object_id);
let row = AssetObject {
asset_object_id: existing.asset_object_id.clone(),
bucket: input.bucket.clone(),
object_key: input.object_key.clone(),
access_policy: input.access_policy,
content_type: input.content_type.clone(),
content_length: input.content_length,
content_hash: input.content_hash.clone(),
version: input.version,
source_job_id: input.source_job_id.clone(),
owner_user_id: input.owner_user_id.clone(),
profile_id: input.profile_id.clone(),
entity_id: input.entity_id.clone(),
asset_kind: input.asset_kind.clone(),
created_at: existing.created_at,
updated_at,
};
ctx.db.asset_object().insert(row);
AssetObjectUpsertSnapshot {
asset_object_id: existing.asset_object_id,
bucket: input.bucket,
object_key: input.object_key,
access_policy: input.access_policy,
content_type: input.content_type,
content_length: input.content_length,
content_hash: input.content_hash,
version: input.version,
source_job_id: input.source_job_id,
owner_user_id: input.owner_user_id,
profile_id: input.profile_id,
entity_id: input.entity_id,
asset_kind: input.asset_kind,
created_at_micros: existing.created_at.to_micros_since_unix_epoch(),
updated_at_micros: input.updated_at_micros,
}
}
None => {
let created_at = updated_at;
let row = AssetObject {
asset_object_id: input.asset_object_id.clone(),
bucket: input.bucket.clone(),
object_key: input.object_key.clone(),
access_policy: input.access_policy,
content_type: input.content_type.clone(),
content_length: input.content_length,
content_hash: input.content_hash.clone(),
version: input.version,
source_job_id: input.source_job_id.clone(),
owner_user_id: input.owner_user_id.clone(),
profile_id: input.profile_id.clone(),
entity_id: input.entity_id.clone(),
asset_kind: input.asset_kind.clone(),
created_at,
updated_at,
};
ctx.db.asset_object().insert(row);
AssetObjectUpsertSnapshot {
asset_object_id: input.asset_object_id,
bucket: input.bucket,
object_key: input.object_key,
access_policy: input.access_policy,
content_type: input.content_type,
content_length: input.content_length,
content_hash: input.content_hash,
version: input.version,
source_job_id: input.source_job_id,
owner_user_id: input.owner_user_id,
profile_id: input.profile_id,
entity_id: input.entity_id,
asset_kind: input.asset_kind,
created_at_micros: input.updated_at_micros,
updated_at_micros: input.updated_at_micros,
}
}
};
Ok(snapshot)
}
pub(crate) fn has_asset_object(ctx: &ReducerContext, asset_object_id: &str) -> bool {
ctx.db
.asset_object()
.iter()
.any(|row| row.asset_object_id == asset_object_id)
}

View File

@@ -0,0 +1,238 @@
use crate::*;
use crate::big_fish::tables::{big_fish_asset_slot, big_fish_creation_session};
#[spacetimedb::procedure]
pub fn generate_big_fish_asset(
ctx: &mut ProcedureContext,
input: BigFishAssetGenerateInput,
) -> BigFishSessionProcedureResult {
match ctx.try_with_tx(|tx| generate_big_fish_asset_tx(tx, input.clone())) {
Ok(session) => BigFishSessionProcedureResult {
ok: true,
session: Some(session),
error_message: None,
},
Err(message) => BigFishSessionProcedureResult {
ok: false,
session: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn publish_big_fish_game(
ctx: &mut ProcedureContext,
input: BigFishPublishInput,
) -> BigFishSessionProcedureResult {
match ctx.try_with_tx(|tx| publish_big_fish_game_tx(tx, input.clone())) {
Ok(session) => BigFishSessionProcedureResult {
ok: true,
session: Some(session),
error_message: None,
},
Err(message) => BigFishSessionProcedureResult {
ok: false,
session: None,
error_message: Some(message),
},
}
}
pub(crate) fn generate_big_fish_asset_tx(
ctx: &ReducerContext,
input: BigFishAssetGenerateInput,
) -> Result<BigFishSessionSnapshot, String> {
let session = ctx
.db
.big_fish_creation_session()
.session_id()
.find(&input.session_id)
.filter(|row| row.owner_user_id == input.owner_user_id)
.ok_or_else(|| "big_fish_creation_session 不存在".to_string())?;
let draft = session
.draft_json
.as_deref()
.ok_or_else(|| "big_fish.draft 尚未编译".to_string())
.and_then(|value| deserialize_draft(value).map_err(|error| error.to_string()))?;
validate_asset_generate_input(&input, &draft).map_err(|error| error.to_string())?;
let slot = build_generated_asset_slot(
&input.session_id,
&draft,
input.asset_kind,
input.level,
input.motion_key.clone(),
input.asset_url.clone(),
input.generated_at_micros,
)
.map_err(|error| error.to_string())?;
upsert_big_fish_asset_slot(ctx, slot);
let asset_slots = list_big_fish_asset_slots(ctx, &session.session_id);
let coverage = build_asset_coverage(Some(&draft), &asset_slots);
let updated_at = Timestamp::from_micros_since_unix_epoch(input.generated_at_micros);
let uses_placeholder = input
.asset_url
.as_deref()
.map(str::trim)
.is_none_or(str::is_empty);
let reply = match (input.asset_kind, uses_placeholder) {
(BigFishAssetKind::LevelMainImage, true) => "本级主图占位图已生成,可在结果页继续预览。",
(BigFishAssetKind::LevelMainImage, false) => "本级主图已正式生成,可在结果页继续预览。",
(BigFishAssetKind::LevelMotion, true) => "本级动作占位图已生成,可在结果页继续预览。",
(BigFishAssetKind::LevelMotion, false) => "本级动作图已正式生成,可在结果页继续预览。",
(BigFishAssetKind::StageBackground, true) => {
"活动区域背景占位图已生成,可在结果页继续预览。"
}
(BigFishAssetKind::StageBackground, false) => {
"活动区域背景已正式生成,可在结果页继续预览。"
}
}
.to_string();
let next_stage = if coverage.publish_ready {
BigFishCreationStage::ReadyToPublish
} else {
BigFishCreationStage::AssetRefining
};
let next_session = BigFishCreationSession {
session_id: session.session_id.clone(),
owner_user_id: session.owner_user_id.clone(),
seed_text: session.seed_text.clone(),
current_turn: session.current_turn,
progress_percent: if coverage.publish_ready { 96 } else { 88 },
stage: next_stage,
anchor_pack_json: session.anchor_pack_json.clone(),
draft_json: session.draft_json.clone(),
asset_coverage_json: serialize_asset_coverage(&coverage)
.map_err(|error| error.to_string())?,
last_assistant_reply: Some(reply.clone()),
publish_ready: coverage.publish_ready,
created_at: session.created_at,
updated_at,
};
replace_big_fish_session(ctx, &session, next_session);
append_big_fish_system_message(
ctx,
&input.session_id,
format!("big-fish-message-asset-{}", input.generated_at_micros),
reply,
input.generated_at_micros,
);
get_big_fish_session_tx(
ctx,
BigFishSessionGetInput {
session_id: input.session_id,
owner_user_id: input.owner_user_id,
},
)
}
pub(crate) fn publish_big_fish_game_tx(
ctx: &ReducerContext,
input: BigFishPublishInput,
) -> Result<BigFishSessionSnapshot, String> {
validate_publish_input(&input).map_err(|error| error.to_string())?;
let session = ctx
.db
.big_fish_creation_session()
.session_id()
.find(&input.session_id)
.filter(|row| row.owner_user_id == input.owner_user_id)
.ok_or_else(|| "big_fish_creation_session 不存在".to_string())?;
let draft = session
.draft_json
.as_deref()
.ok_or_else(|| "big_fish.draft 尚未编译".to_string())
.and_then(|value| deserialize_draft(value).map_err(|error| error.to_string()))?;
let coverage = build_asset_coverage(
Some(&draft),
&list_big_fish_asset_slots(ctx, &session.session_id),
);
if !coverage.publish_ready {
return Err(format!(
"big_fish 发布校验未通过:{}",
coverage.blockers.join("")
));
}
let published_at = Timestamp::from_micros_since_unix_epoch(input.published_at_micros);
let next_session = BigFishCreationSession {
session_id: session.session_id.clone(),
owner_user_id: session.owner_user_id.clone(),
seed_text: session.seed_text.clone(),
current_turn: session.current_turn,
progress_percent: 100,
stage: BigFishCreationStage::Published,
anchor_pack_json: session.anchor_pack_json.clone(),
draft_json: session.draft_json.clone(),
asset_coverage_json: serialize_asset_coverage(&coverage)
.map_err(|error| error.to_string())?,
last_assistant_reply: Some("玩法已发布,可以进入测试运行态。".to_string()),
publish_ready: true,
created_at: session.created_at,
updated_at: published_at,
};
replace_big_fish_session(ctx, &session, next_session);
get_big_fish_session_tx(
ctx,
BigFishSessionGetInput {
session_id: input.session_id,
owner_user_id: input.owner_user_id,
},
)
}
pub(crate) fn list_big_fish_asset_slots(
ctx: &ReducerContext,
session_id: &str,
) -> Vec<BigFishAssetSlotSnapshot> {
let mut slots = ctx
.db
.big_fish_asset_slot()
.iter()
.filter(|slot| slot.session_id == session_id)
.map(|slot| BigFishAssetSlotSnapshot {
slot_id: slot.slot_id,
session_id: slot.session_id,
asset_kind: slot.asset_kind,
level: slot.level,
motion_key: slot.motion_key,
status: slot.status,
asset_url: slot.asset_url,
prompt_snapshot: slot.prompt_snapshot,
updated_at_micros: slot.updated_at.to_micros_since_unix_epoch(),
})
.collect::<Vec<_>>();
slots.sort_by_key(|slot| {
(
slot.level.unwrap_or(0),
slot.asset_kind.as_str().to_string(),
slot.motion_key.clone().unwrap_or_default(),
slot.slot_id.clone(),
)
});
slots
}
pub(crate) fn upsert_big_fish_asset_slot(ctx: &ReducerContext, slot: BigFishAssetSlotSnapshot) {
if let Some(existing) = ctx.db.big_fish_asset_slot().slot_id().find(&slot.slot_id) {
ctx.db
.big_fish_asset_slot()
.slot_id()
.delete(&existing.slot_id);
}
ctx.db.big_fish_asset_slot().insert(BigFishAssetSlot {
slot_id: slot.slot_id,
session_id: slot.session_id,
asset_kind: slot.asset_kind,
level: slot.level,
motion_key: slot.motion_key,
status: slot.status,
asset_url: slot.asset_url,
prompt_snapshot: slot.prompt_snapshot,
updated_at: Timestamp::from_micros_since_unix_epoch(slot.updated_at_micros),
});
}

View File

@@ -0,0 +1,9 @@
mod assets;
mod runtime;
mod session;
mod tables;
pub use assets::*;
pub use runtime::*;
pub use session::*;
pub use tables::*;

View File

@@ -0,0 +1,190 @@
use crate::*;
use crate::big_fish::tables::{big_fish_creation_session, big_fish_runtime_run};
#[spacetimedb::procedure]
pub fn start_big_fish_run(
ctx: &mut ProcedureContext,
input: BigFishRunStartInput,
) -> BigFishRunProcedureResult {
match ctx.try_with_tx(|tx| start_big_fish_run_tx(tx, input.clone())) {
Ok(run) => BigFishRunProcedureResult {
ok: true,
run: Some(run),
error_message: None,
},
Err(message) => BigFishRunProcedureResult {
ok: false,
run: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn submit_big_fish_input(
ctx: &mut ProcedureContext,
input: BigFishRunInputSubmitInput,
) -> BigFishRunProcedureResult {
match ctx.try_with_tx(|tx| submit_big_fish_input_tx(tx, input.clone())) {
Ok(run) => BigFishRunProcedureResult {
ok: true,
run: Some(run),
error_message: None,
},
Err(message) => BigFishRunProcedureResult {
ok: false,
run: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn get_big_fish_run(
ctx: &mut ProcedureContext,
input: BigFishRunGetInput,
) -> BigFishRunProcedureResult {
match ctx.try_with_tx(|tx| get_big_fish_run_tx(tx, input.clone())) {
Ok(run) => BigFishRunProcedureResult {
ok: true,
run: Some(run),
error_message: None,
},
Err(message) => BigFishRunProcedureResult {
ok: false,
run: None,
error_message: Some(message),
},
}
}
fn start_big_fish_run_tx(
ctx: &ReducerContext,
input: BigFishRunStartInput,
) -> Result<BigFishRuntimeSnapshot, String> {
validate_run_start_input(&input).map_err(|error| error.to_string())?;
if ctx
.db
.big_fish_runtime_run()
.run_id()
.find(&input.run_id)
.is_some()
{
return Err("big_fish_runtime_run.run_id 已存在".to_string());
}
let session = ctx
.db
.big_fish_creation_session()
.session_id()
.find(&input.session_id)
.filter(|row| row.owner_user_id == input.owner_user_id)
.ok_or_else(|| "big_fish_creation_session 不存在".to_string())?;
let draft = session
.draft_json
.as_deref()
.ok_or_else(|| "big_fish.draft 尚未编译".to_string())
.and_then(|value| deserialize_draft(value).map_err(|error| error.to_string()))?;
let snapshot = build_initial_runtime_snapshot(
input.run_id.clone(),
input.session_id.clone(),
&draft,
input.started_at_micros,
);
let now = Timestamp::from_micros_since_unix_epoch(input.started_at_micros);
ctx.db.big_fish_runtime_run().insert(BigFishRuntimeRun {
run_id: input.run_id,
session_id: input.session_id,
owner_user_id: input.owner_user_id,
status: snapshot.status,
snapshot_json: serialize_runtime_snapshot(&snapshot).map_err(|error| error.to_string())?,
last_input_x: 0.0,
last_input_y: 0.0,
tick: snapshot.tick,
created_at: now,
updated_at: now,
});
Ok(snapshot)
}
fn submit_big_fish_input_tx(
ctx: &ReducerContext,
input: BigFishRunInputSubmitInput,
) -> Result<BigFishRuntimeSnapshot, String> {
validate_run_input_submit_input(&input).map_err(|error| error.to_string())?;
let run = ctx
.db
.big_fish_runtime_run()
.run_id()
.find(&input.run_id)
.filter(|row| row.owner_user_id == input.owner_user_id)
.ok_or_else(|| "big_fish_runtime_run 不存在".to_string())?;
let session = ctx
.db
.big_fish_creation_session()
.session_id()
.find(&run.session_id)
.filter(|row| row.owner_user_id == input.owner_user_id)
.ok_or_else(|| "big_fish_creation_session 不存在".to_string())?;
let draft = session
.draft_json
.as_deref()
.ok_or_else(|| "big_fish.draft 尚未编译".to_string())
.and_then(|value| deserialize_draft(value).map_err(|error| error.to_string()))?;
let current_snapshot =
deserialize_runtime_snapshot(&run.snapshot_json).map_err(|error| error.to_string())?;
let next_snapshot = advance_runtime_snapshot(
current_snapshot,
&draft.runtime_params,
input.input_x,
input.input_y,
input.submitted_at_micros,
);
replace_big_fish_run(
ctx,
&run,
BigFishRuntimeRun {
run_id: run.run_id.clone(),
session_id: run.session_id.clone(),
owner_user_id: run.owner_user_id.clone(),
status: next_snapshot.status,
snapshot_json: serialize_runtime_snapshot(&next_snapshot)
.map_err(|error| error.to_string())?,
last_input_x: input.input_x,
last_input_y: input.input_y,
tick: next_snapshot.tick,
created_at: run.created_at,
updated_at: Timestamp::from_micros_since_unix_epoch(input.submitted_at_micros),
},
);
Ok(next_snapshot)
}
fn get_big_fish_run_tx(
ctx: &ReducerContext,
input: BigFishRunGetInput,
) -> Result<BigFishRuntimeSnapshot, String> {
validate_run_get_input(&input).map_err(|error| error.to_string())?;
let run = ctx
.db
.big_fish_runtime_run()
.run_id()
.find(&input.run_id)
.filter(|row| row.owner_user_id == input.owner_user_id)
.ok_or_else(|| "big_fish_runtime_run 不存在".to_string())?;
deserialize_runtime_snapshot(&run.snapshot_json).map_err(|error| error.to_string())
}
fn replace_big_fish_run(
ctx: &ReducerContext,
current: &BigFishRuntimeRun,
next: BigFishRuntimeRun,
) {
ctx.db
.big_fish_runtime_run()
.run_id()
.delete(&current.run_id);
ctx.db.big_fish_runtime_run().insert(next);
}

View File

@@ -0,0 +1,494 @@
use crate::*;
use crate::big_fish::tables::{big_fish_agent_message, big_fish_creation_session};
#[spacetimedb::procedure]
pub fn create_big_fish_session(
ctx: &mut ProcedureContext,
input: BigFishSessionCreateInput,
) -> BigFishSessionProcedureResult {
match ctx.try_with_tx(|tx| create_big_fish_session_tx(tx, input.clone())) {
Ok(session) => BigFishSessionProcedureResult {
ok: true,
session: Some(session),
error_message: None,
},
Err(message) => BigFishSessionProcedureResult {
ok: false,
session: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn get_big_fish_session(
ctx: &mut ProcedureContext,
input: BigFishSessionGetInput,
) -> BigFishSessionProcedureResult {
match ctx.try_with_tx(|tx| get_big_fish_session_tx(tx, input.clone())) {
Ok(session) => BigFishSessionProcedureResult {
ok: true,
session: Some(session),
error_message: None,
},
Err(message) => BigFishSessionProcedureResult {
ok: false,
session: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn list_big_fish_works(
ctx: &mut ProcedureContext,
input: BigFishWorksListInput,
) -> BigFishWorksProcedureResult {
match ctx.try_with_tx(|tx| list_big_fish_works_tx(tx, input.clone())) {
Ok(items) => match serde_json::to_string(&items) {
Ok(items_json) => BigFishWorksProcedureResult {
ok: true,
items_json: Some(items_json),
error_message: None,
},
Err(error) => BigFishWorksProcedureResult {
ok: false,
items_json: None,
error_message: Some(error.to_string()),
},
},
Err(message) => BigFishWorksProcedureResult {
ok: false,
items_json: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn submit_big_fish_message(
ctx: &mut ProcedureContext,
input: BigFishMessageSubmitInput,
) -> BigFishSessionProcedureResult {
match ctx.try_with_tx(|tx| submit_big_fish_message_tx(tx, input.clone())) {
Ok(session) => BigFishSessionProcedureResult {
ok: true,
session: Some(session),
error_message: None,
},
Err(message) => BigFishSessionProcedureResult {
ok: false,
session: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn compile_big_fish_draft(
ctx: &mut ProcedureContext,
input: BigFishDraftCompileInput,
) -> BigFishSessionProcedureResult {
match ctx.try_with_tx(|tx| compile_big_fish_draft_tx(tx, input.clone())) {
Ok(session) => BigFishSessionProcedureResult {
ok: true,
session: Some(session),
error_message: None,
},
Err(message) => BigFishSessionProcedureResult {
ok: false,
session: None,
error_message: Some(message),
},
}
}
pub(crate) fn create_big_fish_session_tx(
ctx: &ReducerContext,
input: BigFishSessionCreateInput,
) -> Result<BigFishSessionSnapshot, String> {
validate_session_create_input(&input).map_err(|error| error.to_string())?;
if ctx
.db
.big_fish_creation_session()
.session_id()
.find(&input.session_id)
.is_some()
{
return Err("big_fish_creation_session.session_id 已存在".to_string());
}
if ctx
.db
.big_fish_agent_message()
.message_id()
.find(&input.welcome_message_id)
.is_some()
{
return Err("big_fish_agent_message.message_id 已存在".to_string());
}
let created_at = Timestamp::from_micros_since_unix_epoch(input.created_at_micros);
let anchor_pack = infer_anchor_pack(&input.seed_text, None);
let asset_coverage = build_asset_coverage(None, &[]);
ctx.db
.big_fish_creation_session()
.insert(BigFishCreationSession {
session_id: input.session_id.clone(),
owner_user_id: input.owner_user_id.clone(),
seed_text: input.seed_text.trim().to_string(),
current_turn: 0,
progress_percent: 20,
stage: BigFishCreationStage::CollectingAnchors,
anchor_pack_json: serialize_anchor_pack(&anchor_pack)
.map_err(|error| error.to_string())?,
draft_json: None,
asset_coverage_json: serialize_asset_coverage(&asset_coverage)
.map_err(|error| error.to_string())?,
last_assistant_reply: Some(input.welcome_message_text.clone()),
publish_ready: false,
created_at,
updated_at: created_at,
});
ctx.db.big_fish_agent_message().insert(BigFishAgentMessage {
message_id: input.welcome_message_id,
session_id: input.session_id.clone(),
role: BigFishAgentMessageRole::Assistant,
kind: BigFishAgentMessageKind::Chat,
text: input.welcome_message_text,
created_at,
});
get_big_fish_session_tx(
ctx,
BigFishSessionGetInput {
session_id: input.session_id,
owner_user_id: input.owner_user_id,
},
)
}
pub(crate) fn get_big_fish_session_tx(
ctx: &ReducerContext,
input: BigFishSessionGetInput,
) -> Result<BigFishSessionSnapshot, String> {
validate_session_get_input(&input).map_err(|error| error.to_string())?;
let session = ctx
.db
.big_fish_creation_session()
.session_id()
.find(&input.session_id)
.filter(|row| row.owner_user_id == input.owner_user_id)
.ok_or_else(|| "big_fish_creation_session 不存在".to_string())?;
build_big_fish_session_snapshot(ctx, &session)
}
pub(crate) fn list_big_fish_works_tx(
ctx: &ReducerContext,
input: BigFishWorksListInput,
) -> Result<Vec<BigFishWorkSummarySnapshot>, String> {
validate_works_list_input(&input).map_err(|error| error.to_string())?;
let mut items = ctx
.db
.big_fish_creation_session()
.iter()
.filter(|row| row.owner_user_id == input.owner_user_id)
.map(|row| build_big_fish_work_summary(ctx, &row))
.collect::<Result<Vec<_>, _>>()?;
items.sort_by(|left, right| {
right
.updated_at_micros
.cmp(&left.updated_at_micros)
.then_with(|| left.work_id.cmp(&right.work_id))
});
Ok(items)
}
pub(crate) fn submit_big_fish_message_tx(
ctx: &ReducerContext,
input: BigFishMessageSubmitInput,
) -> Result<BigFishSessionSnapshot, String> {
validate_message_submit_input(&input).map_err(|error| error.to_string())?;
let session = ctx
.db
.big_fish_creation_session()
.session_id()
.find(&input.session_id)
.filter(|row| row.owner_user_id == input.owner_user_id)
.ok_or_else(|| "big_fish_creation_session 不存在".to_string())?;
if ctx
.db
.big_fish_agent_message()
.message_id()
.find(&input.user_message_id)
.is_some()
{
return Err("big_fish_agent_message.user_message_id 已存在".to_string());
}
if ctx
.db
.big_fish_agent_message()
.message_id()
.find(&input.assistant_message_id)
.is_some()
{
return Err("big_fish_agent_message.assistant_message_id 已存在".to_string());
}
let submitted_at = Timestamp::from_micros_since_unix_epoch(input.submitted_at_micros);
ctx.db.big_fish_agent_message().insert(BigFishAgentMessage {
message_id: input.user_message_id,
session_id: input.session_id.clone(),
role: BigFishAgentMessageRole::User,
kind: BigFishAgentMessageKind::Chat,
text: input.user_message_text.trim().to_string(),
created_at: submitted_at,
});
let anchor_pack = infer_anchor_pack(&session.seed_text, Some(&input.user_message_text));
let assistant_text =
"我已经把这版方向收束成 4 个高杠杆锚点,可以继续细化,也可以直接编译第一版玩法草稿。"
.to_string();
ctx.db.big_fish_agent_message().insert(BigFishAgentMessage {
message_id: input.assistant_message_id,
session_id: input.session_id.clone(),
role: BigFishAgentMessageRole::Assistant,
kind: BigFishAgentMessageKind::Summary,
text: assistant_text.clone(),
created_at: submitted_at,
});
let next_session = BigFishCreationSession {
session_id: session.session_id.clone(),
owner_user_id: session.owner_user_id.clone(),
seed_text: session.seed_text.clone(),
current_turn: session.current_turn.saturating_add(1),
progress_percent: 60,
stage: BigFishCreationStage::CollectingAnchors,
anchor_pack_json: serialize_anchor_pack(&anchor_pack).map_err(|error| error.to_string())?,
draft_json: session.draft_json.clone(),
asset_coverage_json: session.asset_coverage_json.clone(),
last_assistant_reply: Some(assistant_text),
publish_ready: session.publish_ready,
created_at: session.created_at,
updated_at: submitted_at,
};
replace_big_fish_session(ctx, &session, next_session);
get_big_fish_session_tx(
ctx,
BigFishSessionGetInput {
session_id: input.session_id,
owner_user_id: input.owner_user_id,
},
)
}
pub(crate) fn compile_big_fish_draft_tx(
ctx: &ReducerContext,
input: BigFishDraftCompileInput,
) -> Result<BigFishSessionSnapshot, String> {
validate_draft_compile_input(&input).map_err(|error| error.to_string())?;
let session = ctx
.db
.big_fish_creation_session()
.session_id()
.find(&input.session_id)
.filter(|row| row.owner_user_id == input.owner_user_id)
.ok_or_else(|| "big_fish_creation_session 不存在".to_string())?;
let anchor_pack =
deserialize_anchor_pack(&session.anchor_pack_json).map_err(|error| error.to_string())?;
let draft = compile_default_draft(&anchor_pack);
let asset_slots = list_big_fish_asset_slots(ctx, &session.session_id);
let coverage = build_asset_coverage(Some(&draft), &asset_slots);
let compiled_at = Timestamp::from_micros_since_unix_epoch(input.compiled_at_micros);
let reply = "第一版玩法草稿已编译完成,可以在结果页逐级生成主图、动作和场地背景。".to_string();
let next_session = BigFishCreationSession {
session_id: session.session_id.clone(),
owner_user_id: session.owner_user_id.clone(),
seed_text: session.seed_text.clone(),
current_turn: session.current_turn,
progress_percent: 80,
stage: BigFishCreationStage::DraftReady,
anchor_pack_json: session.anchor_pack_json.clone(),
draft_json: Some(serialize_draft(&draft).map_err(|error| error.to_string())?),
asset_coverage_json: serialize_asset_coverage(&coverage)
.map_err(|error| error.to_string())?,
last_assistant_reply: Some(reply.clone()),
publish_ready: coverage.publish_ready,
created_at: session.created_at,
updated_at: compiled_at,
};
replace_big_fish_session(ctx, &session, next_session);
append_big_fish_system_message(
ctx,
&input.session_id,
format!("big-fish-message-compile-{}", input.compiled_at_micros),
reply,
input.compiled_at_micros,
);
get_big_fish_session_tx(
ctx,
BigFishSessionGetInput {
session_id: input.session_id,
owner_user_id: input.owner_user_id,
},
)
}
pub(crate) fn build_big_fish_session_snapshot(
ctx: &ReducerContext,
row: &BigFishCreationSession,
) -> Result<BigFishSessionSnapshot, String> {
let anchor_pack =
deserialize_anchor_pack(&row.anchor_pack_json).unwrap_or_else(|_| empty_anchor_pack());
let draft = row
.draft_json
.as_deref()
.map(deserialize_draft)
.transpose()
.map_err(|error| format!("big_fish.draft_json 非法: {error}"))?;
let asset_slots = list_big_fish_asset_slots(ctx, &row.session_id);
let asset_coverage = build_asset_coverage(draft.as_ref(), &asset_slots);
let mut messages = ctx
.db
.big_fish_agent_message()
.iter()
.filter(|message| message.session_id == row.session_id)
.map(|message| BigFishAgentMessageSnapshot {
message_id: message.message_id,
session_id: message.session_id,
role: message.role,
kind: message.kind,
text: message.text,
created_at_micros: message.created_at.to_micros_since_unix_epoch(),
})
.collect::<Vec<_>>();
messages.sort_by_key(|message| (message.created_at_micros, message.message_id.clone()));
Ok(BigFishSessionSnapshot {
session_id: row.session_id.clone(),
owner_user_id: row.owner_user_id.clone(),
seed_text: row.seed_text.clone(),
current_turn: row.current_turn,
progress_percent: row.progress_percent,
stage: row.stage,
anchor_pack,
draft,
asset_slots,
asset_coverage,
messages,
last_assistant_reply: row.last_assistant_reply.clone(),
publish_ready: row.publish_ready,
created_at_micros: row.created_at.to_micros_since_unix_epoch(),
updated_at_micros: row.updated_at.to_micros_since_unix_epoch(),
})
}
pub(crate) fn build_big_fish_work_summary(
ctx: &ReducerContext,
row: &BigFishCreationSession,
) -> Result<BigFishWorkSummarySnapshot, String> {
let draft = row
.draft_json
.as_deref()
.map(deserialize_draft)
.transpose()
.map_err(|error| format!("big_fish.draft_json 非法: {error}"))?;
let asset_slots = list_big_fish_asset_slots(ctx, &row.session_id);
let coverage = build_asset_coverage(draft.as_ref(), &asset_slots);
let cover_image_src = asset_slots
.iter()
.find(|slot| slot.asset_kind == BigFishAssetKind::StageBackground)
.and_then(|slot| slot.asset_url.clone())
.or_else(|| {
asset_slots
.iter()
.find(|slot| slot.asset_kind == BigFishAssetKind::LevelMainImage)
.and_then(|slot| slot.asset_url.clone())
});
let title = draft
.as_ref()
.map(|value| value.title.clone())
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(|| "未命名大鱼草稿".to_string());
let subtitle = draft
.as_ref()
.map(|value| value.subtitle.clone())
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(|| "等待整理玩法草稿".to_string());
let summary = draft
.as_ref()
.map(|value| value.core_fun.clone())
.filter(|value| !value.trim().is_empty())
.unwrap_or_else(|| {
row.last_assistant_reply
.clone()
.unwrap_or_else(|| "继续补齐锚点后即可生成玩法草稿。".to_string())
});
Ok(BigFishWorkSummarySnapshot {
work_id: format!("big-fish-work-{}", row.session_id),
source_session_id: row.session_id.clone(),
title,
subtitle,
summary,
cover_image_src,
status: if row.stage == BigFishCreationStage::Published {
"published".to_string()
} else {
"draft".to_string()
},
updated_at_micros: row.updated_at.to_micros_since_unix_epoch(),
publish_ready: coverage.publish_ready,
level_count: draft
.as_ref()
.map(|value| value.runtime_params.level_count)
.unwrap_or(BIG_FISH_DEFAULT_LEVEL_COUNT),
level_main_image_ready_count: coverage.level_main_image_ready_count,
level_motion_ready_count: coverage.level_motion_ready_count,
background_ready: coverage.background_ready,
})
}
pub(crate) fn replace_big_fish_session(
ctx: &ReducerContext,
current: &BigFishCreationSession,
next: BigFishCreationSession,
) {
ctx.db
.big_fish_creation_session()
.session_id()
.delete(&current.session_id);
ctx.db.big_fish_creation_session().insert(next);
}
pub(crate) fn append_big_fish_system_message(
ctx: &ReducerContext,
session_id: &str,
message_id: String,
text: String,
created_at_micros: i64,
) {
if ctx
.db
.big_fish_agent_message()
.message_id()
.find(&message_id)
.is_some()
{
return;
}
ctx.db.big_fish_agent_message().insert(BigFishAgentMessage {
message_id,
session_id: session_id.to_string(),
role: BigFishAgentMessageRole::Assistant,
kind: BigFishAgentMessageKind::ActionResult,
text,
created_at: Timestamp::from_micros_since_unix_epoch(created_at_micros),
});
}

View File

@@ -0,0 +1,72 @@
use crate::*;
#[spacetimedb::table(
accessor = big_fish_creation_session,
index(accessor = by_big_fish_session_owner_user_id, btree(columns = [owner_user_id]))
)]
pub struct BigFishCreationSession {
#[primary_key]
pub(crate) session_id: String,
pub(crate) owner_user_id: String,
pub(crate) seed_text: String,
pub(crate) current_turn: u32,
pub(crate) progress_percent: u32,
pub(crate) stage: BigFishCreationStage,
pub(crate) anchor_pack_json: String,
pub(crate) draft_json: Option<String>,
pub(crate) asset_coverage_json: String,
pub(crate) last_assistant_reply: Option<String>,
pub(crate) publish_ready: bool,
pub(crate) created_at: Timestamp,
pub(crate) updated_at: Timestamp,
}
#[spacetimedb::table(
accessor = big_fish_agent_message,
index(accessor = by_big_fish_message_session_id, btree(columns = [session_id]))
)]
pub struct BigFishAgentMessage {
#[primary_key]
pub(crate) message_id: String,
pub(crate) session_id: String,
pub(crate) role: BigFishAgentMessageRole,
pub(crate) kind: BigFishAgentMessageKind,
pub(crate) text: String,
pub(crate) created_at: Timestamp,
}
#[spacetimedb::table(
accessor = big_fish_asset_slot,
index(accessor = by_big_fish_asset_session_id, btree(columns = [session_id]))
)]
pub struct BigFishAssetSlot {
#[primary_key]
pub(crate) slot_id: String,
pub(crate) session_id: String,
pub(crate) asset_kind: BigFishAssetKind,
pub(crate) level: Option<u32>,
pub(crate) motion_key: Option<String>,
pub(crate) status: BigFishAssetStatus,
pub(crate) asset_url: Option<String>,
pub(crate) prompt_snapshot: String,
pub(crate) updated_at: Timestamp,
}
#[spacetimedb::table(
accessor = big_fish_runtime_run,
index(accessor = by_big_fish_run_owner_user_id, btree(columns = [owner_user_id])),
index(accessor = by_big_fish_run_session_id, btree(columns = [session_id]))
)]
pub struct BigFishRuntimeRun {
#[primary_key]
pub(crate) run_id: String,
pub(crate) session_id: String,
pub(crate) owner_user_id: String,
pub(crate) status: BigFishRunStatus,
pub(crate) snapshot_json: String,
pub(crate) last_input_x: f32,
pub(crate) last_input_y: f32,
pub(crate) tick: u64,
pub(crate) created_at: Timestamp,
pub(crate) updated_at: Timestamp,
}

View File

@@ -0,0 +1 @@
// Custom World agent message、operation、draft card 与 action 执行落位点。

View File

@@ -0,0 +1 @@
// Custom World gallery 与 detail 读模型落位点。

View File

@@ -995,15 +995,14 @@ fn upsert_custom_world_profile_record(
.find(&input.profile_id)
.filter(|row| row.owner_user_id == input.owner_user_id)
.or_else(|| {
input.source_agent_session_id.as_ref().and_then(|session_id| {
ctx.db.custom_world_profile().iter().find(|row| {
is_same_agent_draft_profile_candidate(
row,
&input.owner_user_id,
session_id,
)
input
.source_agent_session_id
.as_ref()
.and_then(|session_id| {
ctx.db.custom_world_profile().iter().find(|row| {
is_same_agent_draft_profile_candidate(row, &input.owner_user_id, session_id)
})
})
})
});
let next_row = match current {
@@ -1432,18 +1431,16 @@ fn list_custom_world_work_snapshots(
let mut items = Vec::new();
for session in ctx
.db
.custom_world_agent_session()
.iter()
.filter(|row| row.owner_user_id == input.owner_user_id && row.stage != RpgAgentStage::Published)
{
for session in ctx.db.custom_world_agent_session().iter().filter(|row| {
row.owner_user_id == input.owner_user_id && row.stage != RpgAgentStage::Published
}) {
let gate = build_custom_world_publish_gate_from_session(&session);
let draft_profile = parse_optional_session_object(session.draft_profile_json.as_deref());
let title = resolve_session_work_title(&session, draft_profile.as_ref());
let summary = resolve_session_work_summary(&session, draft_profile.as_ref());
let stage_label = Some(resolve_rpg_agent_stage_label(session.stage).to_string());
let subtitle = resolve_session_work_subtitle(draft_profile.as_ref(), stage_label.as_deref());
let subtitle =
resolve_session_work_subtitle(draft_profile.as_ref(), stage_label.as_deref());
let (playable_npc_count, landmark_count) =
resolve_session_work_counts(ctx, &session, draft_profile.as_ref());
@@ -1516,8 +1513,16 @@ fn list_custom_world_work_snapshots(
.updated_at_micros
.cmp(&left.updated_at_micros)
.then_with(|| {
let left_rank = if left.source_type == "agent_session" { 0 } else { 1 };
let right_rank = if right.source_type == "agent_session" { 0 } else { 1 };
let left_rank = if left.source_type == "agent_session" {
0
} else {
1
};
let right_rank = if right.source_type == "agent_session" {
0
} else {
1
};
left_rank.cmp(&right_rank)
})
.then(left.work_id.cmp(&right.work_id))
@@ -1578,7 +1583,9 @@ fn execute_custom_world_agent_action_tx(
match input.action.trim() {
"draft_foundation" => execute_draft_foundation_action(ctx, &session, &input, &payload),
"update_draft_card" => execute_update_draft_card_action(ctx, &session, &input, &payload),
"sync_result_profile" => execute_sync_result_profile_action(ctx, &session, &input, &payload),
"sync_result_profile" => {
execute_sync_result_profile_action(ctx, &session, &input, &payload)
}
"publish_world" => execute_publish_world_action(ctx, &session, &input, &payload),
"revert_checkpoint" => execute_revert_checkpoint_action(ctx, &session, &input, &payload),
"generate_characters"
@@ -1603,18 +1610,16 @@ fn execute_draft_foundation_action(
}
let updated_at = input.submitted_at_micros;
let draft_profile = if let Some(profile) = payload.get("draftProfile").and_then(JsonValue::as_object) {
profile.clone()
} else if let Some(existing) = parse_optional_session_object(session.draft_profile_json.as_deref()) {
ensure_minimal_draft_profile(existing, &session.seed_text)
} else {
build_minimal_draft_profile_from_seed(&session.seed_text)
};
let draft_profile_json =
serde_json::to_string(&JsonValue::Object(draft_profile.clone())).map_err(|error| {
format!("draft_foundation 无法序列化 draft_profile_json: {error}")
let draft_profile = payload
.get("draftProfile")
.and_then(JsonValue::as_object)
.cloned()
.ok_or_else(|| {
"draft_foundation requires externally generated payload.draftProfile".to_string()
})?;
let draft_profile_json = serde_json::to_string(&JsonValue::Object(draft_profile.clone()))
.map_err(|error| format!("draft_foundation 无法序列化 draft_profile_json: {error}"))?;
let gate = summarize_publish_gate_from_json(
&input.session_id,
RpgAgentStage::ObjectRefining,
@@ -1627,8 +1632,12 @@ fn execute_draft_foundation_action(
progress_percent: Some(100),
stage: Some(RpgAgentStage::ObjectRefining),
draft_profile_json: Some(Some(draft_profile_json.clone())),
last_assistant_reply: Some(Some("世界底稿已整理完成,接下来可以继续细化卡片和发布预览。".to_string())),
publish_gate_json: Some(Some(serialize_json_value(&publish_gate_to_json_value(&gate))?)),
last_assistant_reply: Some(Some(
"世界底稿已整理完成,接下来可以继续细化卡片和发布预览。".to_string(),
)),
publish_gate_json: Some(Some(serialize_json_value(&publish_gate_to_json_value(
&gate,
))?)),
result_preview_json: Some(build_result_preview_json(
Some(&draft_profile),
&gate,
@@ -1675,7 +1684,8 @@ fn execute_update_draft_card_action(
) -> Result<CustomWorldAgentOperationSnapshot, String> {
ensure_refining_stage(session.stage, "update_draft_card")?;
let card_id = read_required_payload_text(payload, "cardId", "update_draft_card requires cardId")?;
let card_id =
read_required_payload_text(payload, "cardId", "update_draft_card requires cardId")?;
let card = ctx
.db
.custom_world_draft_card()
@@ -1691,7 +1701,8 @@ fn execute_update_draft_card_action(
return Err("update_draft_card requires sections".to_string());
}
let mut detail_object = parse_optional_session_object(card.detail_payload_json.as_deref()).unwrap_or_default();
let mut detail_object =
parse_optional_session_object(card.detail_payload_json.as_deref()).unwrap_or_default();
let mut detail_sections = detail_object
.get("sections")
.and_then(JsonValue::as_array)
@@ -1735,27 +1746,36 @@ fn execute_update_draft_card_action(
}
detail_object.insert("id".to_string(), JsonValue::String(card.card_id.clone()));
detail_object.insert("kind".to_string(), JsonValue::String(card.kind.as_str().to_string()));
detail_object.insert(
"kind".to_string(),
JsonValue::String(card.kind.as_str().to_string()),
);
detail_object.insert("title".to_string(), JsonValue::String(card.title.clone()));
detail_object.insert("sections".to_string(), JsonValue::Array(detail_sections.clone()));
detail_object.insert(
"sections".to_string(),
JsonValue::Array(detail_sections.clone()),
);
detail_object.insert(
"linkedIds".to_string(),
serde_json::from_str::<JsonValue>(&card.linked_ids_json).unwrap_or_else(|_| JsonValue::Array(Vec::new())),
serde_json::from_str::<JsonValue>(&card.linked_ids_json)
.unwrap_or_else(|_| JsonValue::Array(Vec::new())),
);
detail_object.insert("locked".to_string(), JsonValue::Bool(false));
detail_object.insert("editable".to_string(), JsonValue::Bool(false));
detail_object.insert("editableSectionIds".to_string(), JsonValue::Array(Vec::new()));
detail_object.insert(
"editableSectionIds".to_string(),
JsonValue::Array(Vec::new()),
);
detail_object.insert("warningMessages".to_string(), JsonValue::Array(Vec::new()));
let updated_title = extract_detail_section_value(&detail_sections, "title").unwrap_or_else(|| card.title.clone());
let updated_subtitle =
extract_detail_section_value(&detail_sections, "subtitle").unwrap_or_else(|| card.subtitle.clone());
let updated_summary =
extract_detail_section_value(&detail_sections, "summary").unwrap_or_else(|| card.summary.clone());
let detail_payload_json =
serde_json::to_string(&JsonValue::Object(detail_object)).map_err(|error| {
format!("update_draft_card 无法序列化 detail_payload_json: {error}")
})?;
let updated_title = extract_detail_section_value(&detail_sections, "title")
.unwrap_or_else(|| card.title.clone());
let updated_subtitle = extract_detail_section_value(&detail_sections, "subtitle")
.unwrap_or_else(|| card.subtitle.clone());
let updated_summary = extract_detail_section_value(&detail_sections, "summary")
.unwrap_or_else(|| card.summary.clone());
let detail_payload_json = serde_json::to_string(&JsonValue::Object(detail_object))
.map_err(|error| format!("update_draft_card 无法序列化 detail_payload_json: {error}"))?;
replace_custom_world_draft_card(
ctx,
@@ -1778,7 +1798,14 @@ fn execute_update_draft_card_action(
},
);
let next_session = sync_session_draft_profile_from_card_update(session, &card, &updated_title, &updated_subtitle, &updated_summary, input.submitted_at_micros)?;
let next_session = sync_session_draft_profile_from_card_update(
session,
&card,
&updated_title,
&updated_subtitle,
&updated_summary,
input.submitted_at_micros,
)?;
replace_custom_world_agent_session(ctx, session, next_session);
append_custom_world_action_result_message(
@@ -1816,7 +1843,10 @@ fn execute_sync_result_profile_action(
.ok_or_else(|| "sync_result_profile requires profile".to_string())?;
if let Some(stable_profile_id) = resolve_stable_agent_draft_profile_id(session) {
// 结果页回写时必须沿用当前草稿的稳定身份,避免把同一草稿写成新条目。
profile.insert("id".to_string(), JsonValue::String(stable_profile_id.clone()));
profile.insert(
"id".to_string(),
JsonValue::String(stable_profile_id.clone()),
);
upsert_nested_result_profile_id(&mut profile, &stable_profile_id);
}
let draft_profile = ensure_minimal_draft_profile(profile, &session.seed_text);
@@ -1830,9 +1860,13 @@ fn execute_sync_result_profile_action(
let next_session = rebuild_custom_world_agent_session_row(
session,
CustomWorldAgentSessionPatch {
draft_profile_json: Some(Some(serialize_json_value(&JsonValue::Object(draft_profile.clone()))?)),
draft_profile_json: Some(Some(serialize_json_value(&JsonValue::Object(
draft_profile.clone(),
))?)),
last_assistant_reply: Some(Some("结果页草稿已同步回当前会话。".to_string())),
publish_gate_json: Some(Some(serialize_json_value(&publish_gate_to_json_value(&gate))?)),
publish_gate_json: Some(Some(serialize_json_value(&publish_gate_to_json_value(
&gate,
))?)),
result_preview_json: Some(build_result_preview_json(
Some(&draft_profile),
&gate,
@@ -1871,12 +1905,14 @@ fn execute_sync_result_profile_action(
}
fn resolve_stable_agent_draft_profile_id(session: &CustomWorldAgentSession) -> Option<String> {
parse_optional_session_object(session.draft_profile_json.as_deref()).and_then(|profile| {
read_optional_text_field(&profile, &["legacyResultProfile.id", "id"])
})
parse_optional_session_object(session.draft_profile_json.as_deref())
.and_then(|profile| read_optional_text_field(&profile, &["legacyResultProfile.id", "id"]))
}
fn upsert_nested_result_profile_id(profile: &mut JsonMap<String, JsonValue>, stable_profile_id: &str) {
fn upsert_nested_result_profile_id(
profile: &mut JsonMap<String, JsonValue>,
stable_profile_id: &str,
) {
let legacy_result_profile = profile
.entry("legacyResultProfile".to_string())
.or_insert_with(|| JsonValue::Object(JsonMap::new()));
@@ -1907,12 +1943,13 @@ fn execute_publish_world_action(
) -> Result<CustomWorldAgentOperationSnapshot, String> {
ensure_publishable_stage(session.stage, "publish_world")?;
let draft_profile = if let Some(explicit) = payload.get("draftProfile").and_then(JsonValue::as_object) {
explicit.clone()
} else {
parse_optional_session_object(session.draft_profile_json.as_deref())
.ok_or_else(|| "publish_world requires draft_profile_json".to_string())?
};
let draft_profile =
if let Some(explicit) = payload.get("draftProfile").and_then(JsonValue::as_object) {
explicit.clone()
} else {
parse_optional_session_object(session.draft_profile_json.as_deref())
.ok_or_else(|| "publish_world requires draft_profile_json".to_string())?
};
let gate = summarize_publish_gate_from_json(
&session.session_id,
session.stage,
@@ -1972,7 +2009,10 @@ fn execute_publish_world_action(
&session.session_id,
RpgAgentOperationType::PublishWorld,
"世界已发布",
&format!("正式世界档案已写入作品库:{}", publish_result.1.profile_id),
&format!(
"正式世界档案已写入作品库:{}",
publish_result.1.profile_id
),
input.submitted_at_micros,
);
@@ -2046,9 +2086,15 @@ fn execute_revert_checkpoint_action(
.map(|value| serialize_json_value(&JsonValue::Object(value.clone())))
.transpose()?,
),
last_assistant_reply: Some(Some("已恢复到所选 checkpoint 的世界草稿状态。".to_string())),
quality_findings_json: Some(serialize_json_value(&JsonValue::Array(restored_quality_findings))?),
publish_gate_json: Some(Some(serialize_json_value(&publish_gate_to_json_value(&gate))?)),
last_assistant_reply: Some(Some(
"已恢复到所选 checkpoint 的世界草稿状态。".to_string(),
)),
quality_findings_json: Some(serialize_json_value(&JsonValue::Array(
restored_quality_findings,
))?),
publish_gate_json: Some(Some(serialize_json_value(&publish_gate_to_json_value(
&gate,
))?)),
result_preview_json: Some(build_result_preview_json(
restored_draft_profile.as_ref(),
&gate,
@@ -2099,7 +2145,10 @@ fn execute_placeholder_custom_world_action(
ctx,
&session.session_id,
&input.operation_id,
&format!("动作 {} 已接入最小兼容占位,后续会继续补真实编排。", input.action),
&format!(
"动作 {} 已接入最小兼容占位,后续会继续补真实编排。",
input.action
),
input.submitted_at_micros,
);
let operation = build_and_insert_custom_world_operation(
@@ -2201,7 +2250,8 @@ fn summarize_publish_gate_from_json(
blockers.push(CustomWorldPublishBlockerSnapshot {
blocker_id: "publish_missing_player_premise".to_string(),
code: "publish_missing_player_premise".to_string(),
message: "当前世界缺少玩家身份与切入前提,发布前需要先补齐玩家 premise。".to_string(),
message: "当前世界缺少玩家身份与切入前提,发布前需要先补齐玩家 premise。"
.to_string(),
});
}
if !json_array_has_non_empty_text(profile.get("coreConflicts")) {
@@ -2342,8 +2392,10 @@ fn build_supported_actions_json(
let has_checkpoint = checkpoints
.iter()
.any(|entry| entry.get("snapshot").is_some());
let draft_refining_enabled =
matches!(stage, RpgAgentStage::ObjectRefining | RpgAgentStage::VisualRefining);
let draft_refining_enabled = matches!(
stage,
RpgAgentStage::ObjectRefining | RpgAgentStage::VisualRefining
);
let long_tail_enabled = matches!(
stage,
RpgAgentStage::ObjectRefining
@@ -2462,8 +2514,10 @@ fn build_custom_world_draft_card_detail_snapshot(
card: &CustomWorldDraftCard,
) -> Result<CustomWorldDraftCardDetailSnapshot, String> {
if let Some(detail_payload_json) = card.detail_payload_json.as_deref() {
let detail_value = serde_json::from_str::<JsonValue>(detail_payload_json)
.map_err(|error| format!("custom_world_draft_card.detail_payload_json 非法: {error}"))?;
let detail_value =
serde_json::from_str::<JsonValue>(detail_payload_json).map_err(|error| {
format!("custom_world_draft_card.detail_payload_json 非法: {error}")
})?;
if let Some(object) = detail_value.as_object() {
let sections = object
.get("sections")
@@ -2501,8 +2555,14 @@ fn build_custom_world_draft_card_detail_snapshot(
.to_string(),
sections,
linked_ids_json: card.linked_ids_json.clone(),
locked: object.get("locked").and_then(JsonValue::as_bool).unwrap_or(false),
editable: object.get("editable").and_then(JsonValue::as_bool).unwrap_or(false),
locked: object
.get("locked")
.and_then(JsonValue::as_bool)
.unwrap_or(false),
editable: object
.get("editable")
.and_then(JsonValue::as_bool)
.unwrap_or(false),
editable_section_ids_json: serialize_json_value(
object
.get("editableSectionIds")
@@ -2534,7 +2594,9 @@ fn build_custom_world_draft_card_detail_snapshot(
})
}
fn build_fallback_card_sections(card: &CustomWorldDraftCard) -> Vec<CustomWorldDraftCardDetailSectionSnapshot> {
fn build_fallback_card_sections(
card: &CustomWorldDraftCard,
) -> Vec<CustomWorldDraftCardDetailSectionSnapshot> {
vec![
CustomWorldDraftCardDetailSectionSnapshot {
section_id: "title".to_string(),
@@ -2578,7 +2640,9 @@ fn rebuild_custom_world_agent_session_row(
current_turn: current.current_turn,
progress_percent: patch.progress_percent.unwrap_or(current.progress_percent),
stage: patch.stage.unwrap_or(current.stage),
focus_card_id: patch.focus_card_id.unwrap_or_else(|| current.focus_card_id.clone()),
focus_card_id: patch
.focus_card_id
.unwrap_or_else(|| current.focus_card_id.clone()),
anchor_content_json: patch
.anchor_content_json
.unwrap_or_else(|| current.anchor_content_json.clone()),
@@ -2588,8 +2652,12 @@ fn rebuild_custom_world_agent_session_row(
creator_intent_readiness_json: patch
.creator_intent_readiness_json
.unwrap_or_else(|| current.creator_intent_readiness_json.clone()),
anchor_pack_json: patch.anchor_pack_json.unwrap_or_else(|| current.anchor_pack_json.clone()),
lock_state_json: patch.lock_state_json.unwrap_or_else(|| current.lock_state_json.clone()),
anchor_pack_json: patch
.anchor_pack_json
.unwrap_or_else(|| current.anchor_pack_json.clone()),
lock_state_json: patch
.lock_state_json
.unwrap_or_else(|| current.lock_state_json.clone()),
draft_profile_json: patch
.draft_profile_json
.unwrap_or_else(|| current.draft_profile_json.clone()),
@@ -2741,7 +2809,8 @@ fn upsert_world_foundation_card(
status: RpgAgentDraftCardStatus::Confirmed,
title: read_optional_text_field(draft_profile, &["name", "title"])
.unwrap_or_else(|| "世界底稿".to_string()),
subtitle: read_optional_text_field(draft_profile, &["subtitle"]).unwrap_or_default(),
subtitle: read_optional_text_field(draft_profile, &["subtitle"])
.unwrap_or_default(),
summary: read_optional_text_field(draft_profile, &["summary"])
.unwrap_or_else(|| "第一版世界底稿已生成。".to_string()),
linked_ids_json: "[]".to_string(),
@@ -2754,24 +2823,27 @@ fn upsert_world_foundation_card(
},
);
} else {
ctx.db.custom_world_draft_card().insert(CustomWorldDraftCard {
card_id,
session_id: session_id.to_string(),
kind: RpgAgentDraftCardKind::World,
status: RpgAgentDraftCardStatus::Confirmed,
title: read_optional_text_field(draft_profile, &["name", "title"])
.unwrap_or_else(|| "世界底稿".to_string()),
subtitle: read_optional_text_field(draft_profile, &["subtitle"]).unwrap_or_default(),
summary: read_optional_text_field(draft_profile, &["summary"])
.unwrap_or_else(|| "第一版世界底稿已生成。".to_string()),
linked_ids_json: "[]".to_string(),
warning_count: 0,
asset_status: None,
asset_status_label: None,
detail_payload_json: Some(detail_payload_json),
created_at: Timestamp::from_micros_since_unix_epoch(updated_at_micros),
updated_at: Timestamp::from_micros_since_unix_epoch(updated_at_micros),
});
ctx.db
.custom_world_draft_card()
.insert(CustomWorldDraftCard {
card_id,
session_id: session_id.to_string(),
kind: RpgAgentDraftCardKind::World,
status: RpgAgentDraftCardStatus::Confirmed,
title: read_optional_text_field(draft_profile, &["name", "title"])
.unwrap_or_else(|| "世界底稿".to_string()),
subtitle: read_optional_text_field(draft_profile, &["subtitle"])
.unwrap_or_default(),
summary: read_optional_text_field(draft_profile, &["summary"])
.unwrap_or_else(|| "第一版世界底稿已生成。".to_string()),
linked_ids_json: "[]".to_string(),
warning_count: 0,
asset_status: None,
asset_status_label: None,
detail_payload_json: Some(detail_payload_json),
created_at: Timestamp::from_micros_since_unix_epoch(updated_at_micros),
updated_at: Timestamp::from_micros_since_unix_epoch(updated_at_micros),
});
}
Ok(())
@@ -2788,7 +2860,10 @@ fn sync_session_draft_profile_from_card_update(
let mut draft_profile = parse_optional_session_object(session.draft_profile_json.as_deref())
.unwrap_or_else(|| build_minimal_draft_profile_from_seed(&session.seed_text));
if card.kind == RpgAgentDraftCardKind::World {
draft_profile.insert("name".to_string(), JsonValue::String(updated_title.to_string()));
draft_profile.insert(
"name".to_string(),
JsonValue::String(updated_title.to_string()),
);
draft_profile.insert(
"subtitle".to_string(),
JsonValue::String(updated_subtitle.to_string()),
@@ -2808,8 +2883,12 @@ fn sync_session_draft_profile_from_card_update(
rebuild_custom_world_agent_session_row(
session,
CustomWorldAgentSessionPatch {
draft_profile_json: Some(Some(serialize_json_value(&JsonValue::Object(draft_profile.clone()))?)),
publish_gate_json: Some(Some(serialize_json_value(&publish_gate_to_json_value(&gate))?)),
draft_profile_json: Some(Some(serialize_json_value(&JsonValue::Object(
draft_profile.clone(),
))?)),
publish_gate_json: Some(Some(serialize_json_value(&publish_gate_to_json_value(
&gate,
))?)),
result_preview_json: Some(build_result_preview_json(
Some(&draft_profile),
&gate,
@@ -2824,7 +2903,10 @@ fn sync_session_draft_profile_from_card_update(
}
fn ensure_refining_stage(stage: RpgAgentStage, action: &str) -> Result<(), String> {
if matches!(stage, RpgAgentStage::ObjectRefining | RpgAgentStage::VisualRefining) {
if matches!(
stage,
RpgAgentStage::ObjectRefining | RpgAgentStage::VisualRefining
) {
Ok(())
} else {
Err(format!(
@@ -2933,10 +3015,7 @@ fn read_required_payload_text(
.ok_or_else(|| error_message.to_string())
}
fn read_optional_text_field(
object: &JsonMap<String, JsonValue>,
keys: &[&str],
) -> Option<String> {
fn read_optional_text_field(object: &JsonMap<String, JsonValue>, keys: &[&str]) -> Option<String> {
for key in keys {
let mut current = JsonValue::Object(object.clone());
let mut found = true;
@@ -2949,7 +3028,11 @@ fn read_optional_text_field(
}
}
if found {
if let Some(value) = current.as_str().map(str::trim).filter(|value| !value.is_empty()) {
if let Some(value) = current
.as_str()
.map(str::trim)
.filter(|value| !value.is_empty())
{
return Some(value.to_string());
}
}
@@ -3144,21 +3227,28 @@ fn append_checkpoint_json(current: &str, checkpoint: &JsonValue) -> Result<Strin
fn extract_detail_section_value(sections: &[JsonValue], target_id: &str) -> Option<String> {
sections.iter().find_map(|entry| {
let object = entry.as_object()?;
(object.get("id").and_then(JsonValue::as_str) == Some(target_id))
.then(|| {
object
.get("value")
.and_then(JsonValue::as_str)
.unwrap_or_default()
.to_string()
})
(object.get("id").and_then(JsonValue::as_str) == Some(target_id)).then(|| {
object
.get("value")
.and_then(JsonValue::as_str)
.unwrap_or_default()
.to_string()
})
})
}
fn json_array_has_non_empty_text(value: Option<&JsonValue>) -> bool {
value
.and_then(JsonValue::as_array)
.map(|entries| entries.iter().any(|entry| entry.as_str().map(str::trim).filter(|text| !text.is_empty()).is_some()))
.map(|entries| {
entries.iter().any(|entry| {
entry
.as_str()
.map(str::trim)
.filter(|text| !text.is_empty())
.is_some()
})
})
.unwrap_or(false)
}
@@ -3341,12 +3431,14 @@ fn build_custom_world_agent_session_snapshot(
recommended_replies_json: row.recommended_replies_json.clone(),
asset_coverage_json: row.asset_coverage_json.clone(),
checkpoints_json: row.checkpoints_json.clone(),
supported_actions_json: serialize_json_value(&JsonValue::Array(build_supported_actions_json(
row.stage,
row.progress_percent,
&build_custom_world_publish_gate_from_session(row),
&parse_json_array_or_empty(&row.checkpoints_json),
)))
supported_actions_json: serialize_json_value(&JsonValue::Array(
build_supported_actions_json(
row.stage,
row.progress_percent,
&build_custom_world_publish_gate_from_session(row),
&parse_json_array_or_empty(&row.checkpoints_json),
),
))
.unwrap_or_else(|_| "[]".to_string()),
messages,
draft_cards,

View File

@@ -0,0 +1 @@
// Custom World profile 读写落位点。

View File

@@ -0,0 +1 @@
// Custom World publish gate、published profile compile 与 publish_world 落位点。

View File

@@ -0,0 +1 @@
// Custom World 旧 session 与 agent session 真相表落位点。

View File

@@ -0,0 +1 @@
// Custom World works 聚合与 work summary 落位点。

View File

@@ -1,3 +1,5 @@
use crate::*;
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct ResolveNpcBattleInteractionInput {
pub npc_interaction: ResolveNpcInteractionInput,
@@ -17,7 +19,7 @@ pub struct ResolveNpcBattleInteractionInput {
// 输出同时返回 NPC 交互结果与 battle_state 快照,避免 Axum 再回头读取 private table。
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct NpcBattleInteractionResult {
pub interaction: module_npc::NpcInteractionResult,
pub interaction: NpcInteractionResult,
pub battle_state: BattleStateSnapshot,
}

View File

@@ -1,3 +1,5 @@
use crate::*;
// 当前阶段先落可发布的最小模块入口,后续再补对象确认、业务绑定与任务编排 reducer。
#[spacetimedb::reducer(init)]
pub fn init(_ctx: &ReducerContext) {

View File

@@ -0,0 +1 @@
// Combat 相关表、procedure 与 helper 落位点。

View File

@@ -0,0 +1 @@
// Inventory 相关表、procedure 与 helper 落位点。

View File

@@ -0,0 +1 @@
// NPC 相关表、procedure 与 helper 落位点。

View File

@@ -0,0 +1 @@
// Progression 相关表、procedure 与 helper 落位点。

View File

@@ -0,0 +1 @@
// Quest 相关表、procedure 与 helper 落位点。

View File

@@ -0,0 +1 @@
// Runtime item / treasure 相关表、procedure 与 helper 落位点。

View File

@@ -0,0 +1 @@
// Story session / story event 相关表、procedure 与 helper 落位点。

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,8 @@
use module_puzzle::{
PUZZLE_MAX_TAG_COUNT, PuzzleAgentMessageKind, PuzzleAgentMessageRole,
PuzzleAgentMessageSnapshot, PuzzleAgentSessionCreateInput, PuzzleAgentSessionGetInput,
PuzzleAgentSessionProcedureResult, PuzzleAgentSessionSnapshot, PuzzleAgentStage,
PuzzleAnchorPack, PuzzleDraftCompileInput, PuzzleGeneratedImageCandidate,
PUZZLE_MAX_TAG_COUNT, PuzzleAgentMessageFinalizeInput, PuzzleAgentMessageKind,
PuzzleAgentMessageRole, PuzzleAgentMessageSnapshot, PuzzleAgentSessionCreateInput,
PuzzleAgentSessionGetInput, PuzzleAgentSessionProcedureResult, PuzzleAgentSessionSnapshot,
PuzzleAgentStage, PuzzleAnchorPack, PuzzleDraftCompileInput, PuzzleGeneratedImageCandidate,
PuzzleGeneratedImagesSaveInput, PuzzlePublicationStatus, PuzzlePublishInput, PuzzleResultDraft,
PuzzleRunDragInput, PuzzleRunGetInput, PuzzleRunNextLevelInput, PuzzleRunProcedureResult,
PuzzleRunSnapshot, PuzzleRunStartInput, PuzzleRunSwapInput, PuzzleRuntimeLevelStatus,
@@ -158,6 +158,25 @@ pub fn submit_puzzle_agent_message(
}
}
#[spacetimedb::procedure]
pub fn finalize_puzzle_agent_message_turn(
ctx: &mut ProcedureContext,
input: PuzzleAgentMessageFinalizeInput,
) -> PuzzleAgentSessionProcedureResult {
match ctx.try_with_tx(|tx| finalize_puzzle_agent_message_turn_tx(tx, input.clone())) {
Ok(session) => PuzzleAgentSessionProcedureResult {
ok: true,
session_json: Some(serialize_json(&session)),
error_message: None,
},
Err(message) => PuzzleAgentSessionProcedureResult {
ok: false,
session_json: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn compile_puzzle_agent_draft(
ctx: &mut ProcedureContext,
@@ -472,11 +491,9 @@ fn submit_puzzle_agent_message_tx(
ctx: &TxContext,
input: module_puzzle::PuzzleAgentMessageSubmitInput,
) -> Result<PuzzleAgentSessionSnapshot, String> {
let row = get_owned_session_row(ctx, &input.session_id, &input.owner_user_id)?;
get_owned_session_row(ctx, &input.session_id, &input.owner_user_id)?;
ensure_message_missing(ctx, &input.user_message_id)?;
let submitted_at = Timestamp::from_micros_since_unix_epoch(input.submitted_at_micros);
let next_anchor_pack = infer_anchor_pack(&row.seed_text, Some(&input.user_message_text));
let assistant_message_text = build_puzzle_assistant_reply(&next_anchor_pack);
ctx.db.puzzle_agent_message().insert(PuzzleAgentMessageRow {
message_id: input.user_message_id.clone(),
@@ -486,19 +503,75 @@ fn submit_puzzle_agent_message_tx(
text: input.user_message_text.clone(),
created_at: submitted_at,
});
let assistant_message_id = format!(
"{}assistant-{}",
input.session_id, input.submitted_at_micros
);
get_puzzle_agent_session_tx(
ctx,
PuzzleAgentSessionGetInput {
session_id: input.session_id,
owner_user_id: input.owner_user_id,
},
)
}
fn finalize_puzzle_agent_message_turn_tx(
ctx: &TxContext,
input: PuzzleAgentMessageFinalizeInput,
) -> Result<PuzzleAgentSessionSnapshot, String> {
let row = get_owned_session_row(ctx, &input.session_id, &input.owner_user_id)?;
let updated_at = Timestamp::from_micros_since_unix_epoch(input.updated_at_micros);
if let Some(error_message) = input
.error_message
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
replace_puzzle_agent_session(
ctx,
&row,
PuzzleAgentSessionRow {
session_id: row.session_id.clone(),
owner_user_id: row.owner_user_id.clone(),
seed_text: row.seed_text.clone(),
current_turn: row.current_turn,
progress_percent: row.progress_percent,
stage: row.stage,
anchor_pack_json: row.anchor_pack_json.clone(),
draft_json: row.draft_json.clone(),
last_assistant_reply: row.last_assistant_reply.clone(),
published_profile_id: row.published_profile_id.clone(),
created_at: row.created_at,
updated_at,
},
);
return Err(error_message.to_string());
}
let assistant_message_id = input
.assistant_message_id
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.ok_or_else(|| "拼图 assistant_message_id 不能为空".to_string())?
.to_string();
let assistant_reply_text = input
.assistant_reply_text
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
.ok_or_else(|| "拼图 assistant_reply_text 不能为空".to_string())?
.to_string();
ensure_message_missing(ctx, &assistant_message_id)?;
let next_anchor_pack = deserialize_anchor_pack(&input.anchor_pack_json)?;
ctx.db.puzzle_agent_message().insert(PuzzleAgentMessageRow {
message_id: assistant_message_id,
session_id: input.session_id.clone(),
role: PuzzleAgentMessageRole::Assistant,
kind: PuzzleAgentMessageKind::Summary,
text: assistant_message_text.clone(),
created_at: submitted_at,
kind: PuzzleAgentMessageKind::Chat,
text: assistant_reply_text.clone(),
created_at: updated_at,
});
replace_puzzle_agent_session(
ctx,
&row,
@@ -507,14 +580,14 @@ fn submit_puzzle_agent_message_tx(
owner_user_id: row.owner_user_id.clone(),
seed_text: row.seed_text.clone(),
current_turn: row.current_turn.saturating_add(1),
progress_percent: (row.progress_percent + 18).min(82),
stage: PuzzleAgentStage::CollectingAnchors,
progress_percent: input.progress_percent.min(100),
stage: input.stage,
anchor_pack_json: serialize_json(&next_anchor_pack),
draft_json: row.draft_json.clone(),
last_assistant_reply: Some(assistant_message_text),
last_assistant_reply: Some(assistant_reply_text),
published_profile_id: row.published_profile_id.clone(),
created_at: row.created_at,
updated_at: submitted_at,
updated_at,
},
);
@@ -535,6 +608,15 @@ fn compile_puzzle_agent_draft_tx(
let anchor_pack = deserialize_anchor_pack(&row.anchor_pack_json)?;
let messages = list_session_messages(ctx, &row.session_id);
let draft = compile_result_draft(&anchor_pack, &messages);
// 创作中心的拼图草稿卡只是 Agent session 的列表投影,
// 每次编译结果页时同步 upsert保证后续能按 source_session_id 恢复聊天。
upsert_puzzle_draft_work_profile(
ctx,
&row.session_id,
&row.owner_user_id,
&draft,
input.compiled_at_micros,
)?;
let compiled_at = Timestamp::from_micros_since_unix_epoch(input.compiled_at_micros);
replace_puzzle_agent_session(
ctx,
@@ -601,6 +683,14 @@ fn save_puzzle_generated_images_tx(
} else {
PuzzleAgentStage::ImageRefining
};
// 结果页草稿封面和候选图发生变化后,草稿卡需要同步刷新。
upsert_puzzle_draft_work_profile(
ctx,
&row.session_id,
&row.owner_user_id,
&draft,
input.saved_at_micros,
)?;
replace_puzzle_agent_session(
ctx,
&row,
@@ -642,6 +732,14 @@ fn select_puzzle_cover_image_tx(
} else {
PuzzleAgentStage::ImageRefining
};
// 选定正式封面后,创作中心草稿卡要立即反映最新正式图。
upsert_puzzle_draft_work_profile(
ctx,
&row.session_id,
&row.owner_user_id,
&draft,
input.selected_at_micros,
)?;
replace_puzzle_agent_session(
ctx,
&row,
@@ -682,9 +780,10 @@ fn publish_puzzle_work_tx(
input.theme_tags.clone(),
)
.map_err(|error| error.to_string())?;
let (work_id, profile_id) = build_puzzle_work_ids_from_session_id(&input.session_id);
let mut profile = create_work_profile(
input.work_id.clone(),
input.profile_id.clone(),
work_id,
profile_id,
input.owner_user_id.clone(),
Some(input.session_id.clone()),
input.author_display_name.clone(),
@@ -996,6 +1095,42 @@ fn build_puzzle_work_profile_from_row(
})
}
fn build_puzzle_work_ids_from_session_id(session_id: &str) -> (String, String) {
let stable_suffix = session_id
.strip_prefix("puzzle-session-")
.unwrap_or(session_id);
(
format!("puzzle-work-{stable_suffix}"),
format!("puzzle-profile-{stable_suffix}"),
)
}
fn upsert_puzzle_draft_work_profile(
ctx: &TxContext,
session_id: &str,
owner_user_id: &str,
draft: &PuzzleResultDraft,
updated_at_micros: i64,
) -> Result<(), String> {
let (work_id, profile_id) = build_puzzle_work_ids_from_session_id(session_id);
if let Some(existing) = ctx.db.puzzle_work_profile().profile_id().find(&profile_id) {
if existing.publication_status == PuzzlePublicationStatus::Published {
return Ok(());
}
}
let profile = create_work_profile(
work_id,
profile_id,
owner_user_id.to_string(),
Some(session_id.to_string()),
"创作者".to_string(),
draft,
updated_at_micros,
)
.map_err(|error| error.to_string())?;
upsert_puzzle_work_profile(ctx, profile)
}
fn list_session_messages(ctx: &TxContext, session_id: &str) -> Vec<PuzzleAgentMessageSnapshot> {
let mut items = ctx
.db
@@ -1045,15 +1180,6 @@ fn build_puzzle_suggested_actions(
}
}
fn build_puzzle_assistant_reply(anchor_pack: &PuzzleAnchorPack) -> String {
format!(
"我先帮你收束成一版拼图方向:题材是“{}”,主体聚焦“{}”,氛围偏“{}”。",
anchor_pack.theme_promise.value,
anchor_pack.visual_subject.value,
anchor_pack.visual_mood.value
)
}
fn append_system_message(
ctx: &TxContext,
session_id: &str,

View File

@@ -0,0 +1 @@
// Browse history 相关表、procedure 与 helper 落位点。

View File

@@ -0,0 +1 @@
// Profile dashboard、wallet 与 played world 投影落位点。

View File

@@ -0,0 +1 @@
// Runtime settings 相关表、procedure 与 helper 落位点。

View File

@@ -0,0 +1 @@
// Runtime snapshot 与 save archive 相关表、procedure 与 helper 落位点。