Merge remote-tracking branch 'origin/master' into codex/ddd

# Conflicts:
#	docs/technical/README.md
#	docs/technical/RUST_API_SERVER_ROUTE_INDEX_2026-04-22.md
#	docs/technical/SPACETIMEDB_TABLE_CATALOG.md
#	scripts/generate-spacetime-bindings.mjs
#	server-rs/crates/api-server/src/app.rs
#	server-rs/crates/api-server/src/assets.rs
#	server-rs/crates/api-server/src/big_fish.rs
#	server-rs/crates/api-server/src/custom_world_ai.rs
#	server-rs/crates/api-server/src/llm.rs
#	server-rs/crates/api-server/src/main.rs
#	server-rs/crates/api-server/src/puzzle.rs
#	server-rs/crates/api-server/src/runtime_profile.rs
#	server-rs/crates/api-server/src/runtime_story/compat/ai.rs
#	server-rs/crates/api-server/src/runtime_story/compat/npc_actions.rs
#	server-rs/crates/api-server/src/runtime_story/compat/presentation.rs
#	server-rs/crates/api-server/src/runtime_story/compat/tests.rs
#	server-rs/crates/api-server/src/state.rs
#	server-rs/crates/module-auth/src/lib.rs
#	server-rs/crates/module-big-fish/src/lib.rs
#	server-rs/crates/module-custom-world/src/lib.rs
#	server-rs/crates/module-puzzle/src/lib.rs
#	server-rs/crates/module-runtime/src/lib.rs
#	server-rs/crates/spacetime-client/src/big_fish.rs
#	server-rs/crates/spacetime-client/src/lib.rs
#	server-rs/crates/spacetime-client/src/mapper.rs
#	server-rs/crates/spacetime-client/src/module_bindings/admin_disable_profile_redeem_code_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/admin_upsert_profile_redeem_code_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/advance_puzzle_next_level_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/append_ai_text_chunk_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/apply_chapter_progression_ledger_entry_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/attach_ai_result_reference_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/authorize_database_migration_operator_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/begin_story_session_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/big_fish_runtime_run_type.rs
#	server-rs/crates/spacetime-client/src/module_bindings/bind_asset_object_to_entity_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/cancel_ai_task_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/clear_platform_browse_history_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/compile_big_fish_draft_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/compile_custom_world_published_profile_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/compile_puzzle_agent_draft_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/complete_ai_stage_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/complete_ai_task_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/confirm_asset_object_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/consume_profile_wallet_points_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/continue_story_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/create_ai_task_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/create_battle_state_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/create_big_fish_session_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/create_custom_world_agent_session_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/create_profile_recharge_order_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/create_puzzle_agent_session_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/delete_big_fish_work_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/delete_custom_world_agent_session_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/delete_custom_world_profile_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/delete_puzzle_work_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/delete_runtime_snapshot_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/drag_puzzle_piece_or_group_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/execute_custom_world_agent_action_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/export_auth_store_snapshot_from_tables_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/export_database_migration_to_file_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/fail_ai_task_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/finalize_big_fish_agent_message_turn_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/finalize_custom_world_agent_message_turn_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/finalize_puzzle_agent_message_turn_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/generate_big_fish_asset_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_auth_store_snapshot_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_battle_state_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_big_fish_session_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_chapter_progression_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_custom_world_agent_card_detail_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_custom_world_agent_operation_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_custom_world_agent_session_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_custom_world_gallery_detail_by_code_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_custom_world_gallery_detail_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_custom_world_library_detail_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_player_progression_or_default_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_profile_dashboard_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_profile_play_stats_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_profile_recharge_center_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_profile_referral_invite_center_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_puzzle_agent_session_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_puzzle_gallery_detail_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_puzzle_run_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_puzzle_work_detail_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_runtime_inventory_state_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_runtime_setting_or_default_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_runtime_snapshot_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/get_story_session_state_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/grant_player_progression_experience_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/import_auth_store_snapshot_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/import_database_migration_from_file_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/import_database_migration_incremental_from_file_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/list_asset_history_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/list_big_fish_works_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/list_custom_world_gallery_entries_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/list_custom_world_profiles_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/list_custom_world_works_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/list_platform_browse_history_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/list_profile_save_archives_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/list_profile_wallet_ledger_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/list_puzzle_gallery_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/list_puzzle_works_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/mod.rs
#	server-rs/crates/spacetime-client/src/module_bindings/publish_big_fish_game_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/publish_custom_world_profile_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/publish_custom_world_world_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/publish_puzzle_work_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/record_big_fish_play_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/redeem_profile_referral_invite_code_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/redeem_profile_reward_code_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/refund_profile_wallet_points_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/resolve_combat_action_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/resolve_npc_battle_interaction_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/resolve_npc_interaction_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/resolve_npc_social_action_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/resolve_treasure_interaction_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/resume_profile_save_archive_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/revoke_database_migration_operator_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/save_puzzle_generated_images_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/select_puzzle_cover_image_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/start_puzzle_run_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/submit_big_fish_message_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/submit_custom_world_agent_message_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/submit_puzzle_agent_message_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/submit_puzzle_leaderboard_entry_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/swap_puzzle_pieces_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/unpublish_custom_world_profile_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/update_puzzle_work_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/upsert_auth_store_snapshot_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/upsert_chapter_progression_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/upsert_custom_world_agent_operation_progress_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/upsert_custom_world_profile_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/upsert_npc_state_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/upsert_platform_browse_history_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/upsert_runtime_setting_and_return_procedure.rs
#	server-rs/crates/spacetime-client/src/module_bindings/upsert_runtime_snapshot_and_return_procedure.rs
#	server-rs/crates/spacetime-module/src/auth/procedures.rs
#	server-rs/crates/spacetime-module/src/custom_world/mod.rs
#	server-rs/crates/spacetime-module/src/lib.rs
#	server-rs/crates/spacetime-module/src/migration.rs
#	server-rs/crates/spacetime-module/src/puzzle.rs
#	server-rs/crates/spacetime-module/src/runtime/profile.rs
#	src/components/platform-entry/PlatformEntryFlowShellImpl.tsx
#	src/components/rpg-entry/RpgEntryFlowShell.agent.interaction.test.tsx
#	src/services/aiService.ts
#	src/services/puzzle-runtime/puzzleRuntimeClient.ts
This commit is contained in:
kdletters
2026-05-02 03:35:59 +08:00
513 changed files with 52813 additions and 6013 deletions

View File

@@ -50,6 +50,8 @@ pub(super) struct AuthUserSnapshot {
pub(super) public_user_code: String,
pub(super) username: String,
pub(super) display_name: String,
#[serde(default)]
pub(super) avatar_url: Option<String>,
pub(super) phone_number_masked: Option<String>,
pub(super) login_method: String,
pub(super) binding_status: String,

View File

@@ -201,6 +201,7 @@ fn import_auth_store_snapshot_tx(
public_user_code: user.public_user_code,
username: user.username,
display_name: user.display_name,
avatar_url: user.avatar_url,
phone_number_masked: user.phone_number_masked,
phone_number_e164: stored_user.phone_number.clone(),
login_method: user.login_method,
@@ -332,6 +333,7 @@ fn export_auth_store_snapshot_from_tables_tx(
public_user_code: user.public_user_code,
username: user.username.clone(),
display_name: user.display_name,
avatar_url: user.avatar_url,
phone_number_masked: user.phone_number_masked,
login_method: user.login_method,
binding_status: user.binding_status,

View File

@@ -19,6 +19,7 @@ pub struct UserAccount {
pub(crate) public_user_code: String,
pub(crate) username: String,
pub(crate) display_name: String,
pub(crate) avatar_url: Option<String>,
pub(crate) phone_number_masked: Option<String>,
pub(crate) phone_number_e164: Option<String>,
pub(crate) login_method: String,

View File

@@ -124,6 +124,9 @@ pub(crate) fn generate_big_fish_asset_tx(
last_assistant_reply: Some(reply.clone()),
publish_ready: readiness.readiness.publish_ready,
play_count: session.play_count,
remix_count: session.remix_count,
like_count: session.like_count,
published_at: session.published_at,
created_at: session.created_at,
updated_at,
};
@@ -192,6 +195,9 @@ pub(crate) fn publish_big_fish_game_tx(
last_assistant_reply: Some("玩法已发布,可以进入测试运行态。".to_string()),
publish_ready: true,
play_count: session.play_count,
remix_count: session.remix_count,
like_count: session.like_count,
published_at: Some(published_at),
created_at: session.created_at,
updated_at: published_at,
};

View File

@@ -2,7 +2,9 @@ use crate::big_fish::tables::{
big_fish_agent_message, big_fish_creation_session, big_fish_runtime_run,
};
use crate::runtime::{
ProfilePlayedWorkUpsertInput, add_profile_observed_play_time, upsert_profile_played_work,
ProfilePlayedWorkUpsertInput, PublicWorkLikeRecordInput, PublicWorkPlayRecordInput,
add_profile_observed_play_time, count_recent_public_work_plays, record_public_work_like,
record_public_work_play, upsert_profile_played_work,
};
use crate::*;
use module_big_fish::{EvaluateBigFishPublishReadinessCommand, evaluate_publish_readiness};
@@ -125,6 +127,51 @@ pub fn record_big_fish_play(
}
}
#[spacetimedb::procedure]
pub fn record_big_fish_like(
ctx: &mut ProcedureContext,
input: BigFishWorkLikeRecordInput,
) -> BigFishWorksProcedureResult {
match ctx.try_with_tx(|tx| record_big_fish_like_tx(tx, input.clone())) {
Ok(items) => match serde_json::to_string(&items) {
Ok(items_json) => BigFishWorksProcedureResult {
ok: true,
items_json: Some(items_json),
error_message: None,
},
Err(error) => BigFishWorksProcedureResult {
ok: false,
items_json: None,
error_message: Some(error.to_string()),
},
},
Err(message) => BigFishWorksProcedureResult {
ok: false,
items_json: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn remix_big_fish_work(
ctx: &mut ProcedureContext,
input: BigFishWorkRemixInput,
) -> BigFishSessionProcedureResult {
match ctx.try_with_tx(|tx| remix_big_fish_work_tx(tx, input.clone())) {
Ok(session) => BigFishSessionProcedureResult {
ok: true,
session: Some(session),
error_message: None,
},
Err(message) => BigFishSessionProcedureResult {
ok: false,
session: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn submit_big_fish_message(
ctx: &mut ProcedureContext,
@@ -227,6 +274,9 @@ pub(crate) fn create_big_fish_session_tx(
last_assistant_reply: Some(input.welcome_message_text.clone()),
publish_ready: false,
play_count: 0,
remix_count: 0,
like_count: 0,
published_at: None,
created_at,
updated_at: created_at,
});
@@ -269,6 +319,7 @@ pub(crate) fn list_big_fish_works_tx(
input: BigFishWorksListInput,
) -> Result<Vec<BigFishWorkSummarySnapshot>, String> {
validate_works_list_input(&input).map_err(|error| error.to_string())?;
let now_micros = ctx.timestamp.to_micros_since_unix_epoch();
let mut items = ctx
.db
@@ -281,7 +332,7 @@ pub(crate) fn list_big_fish_works_tx(
row.owner_user_id == input.owner_user_id && should_include_big_fish_work(ctx, row)
})
.map(|row| build_big_fish_work_summary(ctx, &row))
.map(|row| build_big_fish_work_summary(ctx, &row, now_micros))
.collect::<Result<Vec<_>, _>>()?;
items.sort_by(|left, right| {
@@ -426,6 +477,9 @@ pub(crate) fn submit_big_fish_message_tx(
last_assistant_reply: session.last_assistant_reply.clone(),
publish_ready: session.publish_ready,
play_count: session.play_count,
remix_count: session.remix_count,
like_count: session.like_count,
published_at: session.published_at,
created_at: session.created_at,
updated_at: submitted_at,
};
@@ -473,6 +527,9 @@ pub(crate) fn finalize_big_fish_agent_message_turn_tx(
last_assistant_reply: session.last_assistant_reply.clone(),
publish_ready: session.publish_ready,
play_count: session.play_count,
remix_count: session.remix_count,
like_count: session.like_count,
published_at: session.published_at,
created_at: session.created_at,
updated_at,
};
@@ -528,6 +585,9 @@ pub(crate) fn finalize_big_fish_agent_message_turn_tx(
last_assistant_reply: Some(assistant_reply_text),
publish_ready: session.publish_ready,
play_count: session.play_count,
remix_count: session.remix_count,
like_count: session.like_count,
published_at: session.published_at,
created_at: session.created_at,
updated_at,
};
@@ -592,6 +652,9 @@ pub(crate) fn compile_big_fish_draft_tx(
last_assistant_reply: Some(reply.clone()),
publish_ready: readiness.readiness.publish_ready,
play_count: session.play_count,
remix_count: session.remix_count,
like_count: session.like_count,
published_at: session.published_at,
created_at: session.created_at,
updated_at: compiled_at,
};
@@ -667,6 +730,15 @@ pub(crate) fn record_big_fish_play_tx(
input.elapsed_ms,
input.played_at_micros,
)?;
record_public_work_play(
ctx,
PublicWorkPlayRecordInput {
source_type: "big-fish".to_string(),
owner_user_id: session.owner_user_id.clone(),
profile_id: session.session_id.clone(),
played_at_micros: input.played_at_micros,
},
)?;
let next_session = BigFishCreationSession {
session_id: session.session_id.clone(),
owner_user_id: session.owner_user_id.clone(),
@@ -681,16 +753,184 @@ pub(crate) fn record_big_fish_play_tx(
publish_ready: session.publish_ready,
// 中文注释:正式进入已发布作品时同时累加作品播放数,用户侧去重由 profile_played_world 保证。
play_count: session.play_count.saturating_add(1),
remix_count: session.remix_count,
like_count: session.like_count,
published_at: session.published_at,
created_at: session.created_at,
updated_at: played_at,
};
replace_big_fish_session(ctx, &session, next_session);
list_big_fish_works_tx(
list_big_fish_works_tx(ctx, build_public_big_fish_gallery_list_input())
}
pub(crate) fn record_big_fish_like_tx(
ctx: &ReducerContext,
input: BigFishWorkLikeRecordInput,
) -> Result<Vec<BigFishWorkSummarySnapshot>, String> {
let session_id = input.session_id.trim();
let user_id = input.user_id.trim();
if session_id.is_empty() || user_id.is_empty() {
return Err("big_fish like 参数不能为空".to_string());
}
let session = ctx
.db
.big_fish_creation_session()
.session_id()
.find(&session_id.to_string())
.filter(|row| row.stage == BigFishCreationStage::Published)
.ok_or_else(|| "big_fish 已发布作品不存在,无法点赞".to_string())?;
let inserted_like = record_public_work_like(
ctx,
BigFishWorksListInput {
owner_user_id: String::new(),
published_only: true,
PublicWorkLikeRecordInput {
source_type: "big-fish".to_string(),
owner_user_id: session.owner_user_id.clone(),
profile_id: session.session_id.clone(),
user_id: user_id.to_string(),
liked_at_micros: input.liked_at_micros,
},
)?;
if inserted_like {
let liked_at = Timestamp::from_micros_since_unix_epoch(input.liked_at_micros);
let next_session = BigFishCreationSession {
session_id: session.session_id.clone(),
owner_user_id: session.owner_user_id.clone(),
seed_text: session.seed_text.clone(),
current_turn: session.current_turn,
progress_percent: session.progress_percent,
stage: session.stage,
anchor_pack_json: session.anchor_pack_json.clone(),
draft_json: session.draft_json.clone(),
asset_coverage_json: session.asset_coverage_json.clone(),
last_assistant_reply: session.last_assistant_reply.clone(),
publish_ready: session.publish_ready,
play_count: session.play_count,
remix_count: session.remix_count,
like_count: session.like_count.saturating_add(1),
published_at: session.published_at,
created_at: session.created_at,
updated_at: liked_at,
};
replace_big_fish_session(ctx, &session, next_session);
}
list_big_fish_works_tx(ctx, build_public_big_fish_gallery_list_input())
}
fn remix_big_fish_work_tx(
ctx: &ReducerContext,
input: BigFishWorkRemixInput,
) -> Result<BigFishSessionSnapshot, String> {
let source_session_id = input.source_session_id.trim();
let target_session_id = input.target_session_id.trim();
let target_owner_user_id = input.target_owner_user_id.trim();
let welcome_message_id = input.welcome_message_id.trim();
if source_session_id.is_empty()
|| target_session_id.is_empty()
|| target_owner_user_id.is_empty()
|| welcome_message_id.is_empty()
{
return Err("big_fish remix 参数不能为空".to_string());
}
if ctx
.db
.big_fish_creation_session()
.session_id()
.find(&target_session_id.to_string())
.is_some()
{
return Err("big_fish remix 目标 session 已存在".to_string());
}
if ctx
.db
.big_fish_agent_message()
.message_id()
.find(&welcome_message_id.to_string())
.is_some()
{
return Err("big_fish remix 消息已存在".to_string());
}
let source = ctx
.db
.big_fish_creation_session()
.session_id()
.find(&source_session_id.to_string())
.filter(|row| row.stage == BigFishCreationStage::Published)
.ok_or_else(|| "big_fish 已发布源作品不存在".to_string())?;
let remixed_at = Timestamp::from_micros_since_unix_epoch(input.remixed_at_micros);
let next_source = BigFishCreationSession {
session_id: source.session_id.clone(),
owner_user_id: source.owner_user_id.clone(),
seed_text: source.seed_text.clone(),
current_turn: source.current_turn,
progress_percent: source.progress_percent,
stage: source.stage,
anchor_pack_json: source.anchor_pack_json.clone(),
draft_json: source.draft_json.clone(),
asset_coverage_json: source.asset_coverage_json.clone(),
last_assistant_reply: source.last_assistant_reply.clone(),
publish_ready: source.publish_ready,
play_count: source.play_count,
remix_count: source.remix_count.saturating_add(1),
like_count: source.like_count,
published_at: source.published_at,
created_at: source.created_at,
updated_at: remixed_at,
};
replace_big_fish_session(ctx, &source, next_source);
let target_session = BigFishCreationSession {
session_id: target_session_id.to_string(),
owner_user_id: target_owner_user_id.to_string(),
seed_text: source.seed_text.clone(),
current_turn: 1,
progress_percent: 80,
stage: BigFishCreationStage::DraftReady,
anchor_pack_json: source.anchor_pack_json.clone(),
draft_json: source.draft_json.clone(),
asset_coverage_json: source.asset_coverage_json.clone(),
last_assistant_reply: Some("已从公开作品 Remix 出新的大鱼吃小鱼草稿。".to_string()),
publish_ready: source.publish_ready,
play_count: 0,
remix_count: 0,
like_count: 0,
published_at: None,
created_at: remixed_at,
updated_at: remixed_at,
};
ctx.db.big_fish_creation_session().insert(target_session);
ctx.db.big_fish_agent_message().insert(BigFishAgentMessage {
message_id: welcome_message_id.to_string(),
session_id: target_session_id.to_string(),
role: BigFishAgentMessageRole::Assistant,
kind: BigFishAgentMessageKind::Summary,
text: "已复制公开作品为你的草稿。".to_string(),
created_at: remixed_at,
});
for slot in list_big_fish_asset_slots(ctx, &source.session_id) {
upsert_big_fish_asset_slot(
ctx,
BigFishAssetSlotSnapshot {
slot_id: slot.slot_id.replace(&source.session_id, target_session_id),
session_id: target_session_id.to_string(),
asset_kind: slot.asset_kind,
level: slot.level,
motion_key: slot.motion_key,
status: slot.status,
asset_url: slot.asset_url,
prompt_snapshot: slot.prompt_snapshot,
updated_at_micros: input.remixed_at_micros,
},
);
}
get_big_fish_session_tx(
ctx,
BigFishSessionGetInput {
session_id: target_session_id.to_string(),
owner_user_id: target_owner_user_id.to_string(),
},
)
}
@@ -747,6 +987,7 @@ pub(crate) fn build_big_fish_session_snapshot(
pub(crate) fn build_big_fish_work_summary(
ctx: &ReducerContext,
row: &BigFishCreationSession,
now_micros: i64,
) -> Result<BigFishWorkSummarySnapshot, String> {
let draft = row
.draft_json
@@ -809,9 +1050,29 @@ pub(crate) fn build_big_fish_work_summary(
level_motion_ready_count: coverage.level_motion_ready_count,
background_ready: coverage.background_ready,
play_count: row.play_count,
remix_count: row.remix_count,
like_count: row.like_count,
recent_play_count_7d: count_recent_public_work_plays(
ctx,
"big-fish",
&row.session_id,
now_micros,
),
published_at_micros: row
.published_at
.or_else(|| (row.stage == BigFishCreationStage::Published).then_some(row.updated_at))
.map(|value| value.to_micros_since_unix_epoch()),
})
}
fn build_public_big_fish_gallery_list_input() -> BigFishWorksListInput {
BigFishWorksListInput {
// 中文注释published_only 分支不会按 owner 过滤;非空占位用于兼容旧部署模块的前置校验。
owner_user_id: PUBLIC_BIG_FISH_GALLERY_OWNER_USER_ID.to_string(),
published_only: true,
}
}
pub(crate) fn replace_big_fish_session(
ctx: &ReducerContext,
current: &BigFishCreationSession,
@@ -846,6 +1107,13 @@ mod tests {
last_assistant_reply: Some("欢迎来到大鱼吃小鱼共创。".to_string()),
publish_ready: false,
play_count: 0,
remix_count: 0,
like_count: 0,
published_at: if stage == BigFishCreationStage::Published {
Some(Timestamp::from_micros_since_unix_epoch(1))
} else {
None
},
created_at: Timestamp::from_micros_since_unix_epoch(1),
updated_at: Timestamp::from_micros_since_unix_epoch(1),
}

View File

@@ -17,9 +17,16 @@ pub struct BigFishCreationSession {
pub(crate) asset_coverage_json: String,
pub(crate) last_assistant_reply: Option<String>,
pub(crate) publish_ready: bool,
pub(crate) play_count: u32,
pub(crate) created_at: Timestamp,
pub(crate) updated_at: Timestamp,
#[default(0)]
pub(crate) play_count: u32,
#[default(0)]
pub(crate) remix_count: u32,
#[default(0)]
pub(crate) like_count: u32,
#[default(None::<Timestamp>)]
pub(crate) published_at: Option<Timestamp>,
}
#[spacetimedb::table(

View File

@@ -28,6 +28,13 @@ pub struct CustomWorldProfile {
profile_payload_json: String,
playable_npc_count: u32,
landmark_count: u32,
// 公开消费计数随 profile 真相持久化,发布、编辑和取消发布都不能重置。
#[default(0)]
play_count: u32,
#[default(0)]
remix_count: u32,
#[default(0)]
like_count: u32,
author_display_name: String,
published_at: Option<Timestamp>,
// 软删除后保留 profile 真相,供审计与幂等删除使用。
@@ -175,6 +182,13 @@ pub struct CustomWorldGalleryEntry {
theme_mode: CustomWorldThemeMode,
playable_npc_count: u32,
landmark_count: u32,
// 画廊读模型直接同步互动计数,避免前端临时把评分或游玩数改名成点赞。
#[default(0)]
play_count: u32,
#[default(0)]
remix_count: u32,
#[default(0)]
like_count: u32,
published_at: Timestamp,
updated_at: Timestamp,
}
@@ -979,6 +993,69 @@ pub fn get_custom_world_gallery_detail_by_code(
}
}
#[spacetimedb::procedure]
pub fn remix_custom_world_profile(
ctx: &mut ProcedureContext,
input: module_custom_world::CustomWorldProfileRemixInput,
) -> CustomWorldLibraryMutationResult {
match ctx.try_with_tx(|tx| remix_custom_world_profile_record(tx, input.clone())) {
Ok((entry, gallery_entry)) => CustomWorldLibraryMutationResult {
ok: true,
entry: Some(entry),
gallery_entry,
error_message: None,
},
Err(message) => CustomWorldLibraryMutationResult {
ok: false,
entry: None,
gallery_entry: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn record_custom_world_profile_play(
ctx: &mut ProcedureContext,
input: module_custom_world::CustomWorldProfilePlayRecordInput,
) -> CustomWorldLibraryMutationResult {
match ctx.try_with_tx(|tx| record_custom_world_profile_play_record(tx, input.clone())) {
Ok((entry, gallery_entry)) => CustomWorldLibraryMutationResult {
ok: true,
entry: Some(entry),
gallery_entry: Some(gallery_entry),
error_message: None,
},
Err(message) => CustomWorldLibraryMutationResult {
ok: false,
entry: None,
gallery_entry: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn record_custom_world_profile_like(
ctx: &mut ProcedureContext,
input: module_custom_world::CustomWorldProfileLikeRecordInput,
) -> CustomWorldLibraryMutationResult {
match ctx.try_with_tx(|tx| record_custom_world_profile_like_record(tx, input.clone())) {
Ok((entry, gallery_entry)) => CustomWorldLibraryMutationResult {
ok: true,
entry: Some(entry),
gallery_entry: Some(gallery_entry),
error_message: None,
},
Err(message) => CustomWorldLibraryMutationResult {
ok: false,
entry: None,
gallery_entry: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn list_custom_world_works(
ctx: &mut ProcedureContext,
@@ -1134,6 +1211,9 @@ fn upsert_custom_world_profile_record(
profile_payload_json: input.profile_payload_json.clone(),
playable_npc_count: input.playable_npc_count,
landmark_count: input.landmark_count,
play_count: existing.play_count,
remix_count: existing.remix_count,
like_count: existing.like_count,
author_display_name: input.author_display_name.clone(),
published_at: existing.published_at,
deleted_at: None,
@@ -1156,6 +1236,9 @@ fn upsert_custom_world_profile_record(
profile_payload_json: input.profile_payload_json.clone(),
playable_npc_count: input.playable_npc_count,
landmark_count: input.landmark_count,
play_count: 0,
remix_count: 0,
like_count: 0,
author_display_name: input.author_display_name.clone(),
published_at: None,
deleted_at: None,
@@ -1300,6 +1383,9 @@ fn publish_custom_world_profile_record(
profile_payload_json: existing.profile_payload_json.clone(),
playable_npc_count: existing.playable_npc_count,
landmark_count: existing.landmark_count,
play_count: existing.play_count,
remix_count: existing.remix_count,
like_count: existing.like_count,
author_display_name: input.author_display_name.clone(),
published_at: Some(published_at),
deleted_at: None,
@@ -1363,6 +1449,9 @@ fn unpublish_custom_world_profile_record(
profile_payload_json: existing.profile_payload_json.clone(),
playable_npc_count: existing.playable_npc_count,
landmark_count: existing.landmark_count,
play_count: existing.play_count,
remix_count: existing.remix_count,
like_count: existing.like_count,
author_display_name: input.author_display_name.clone(),
published_at: None,
deleted_at: None,
@@ -1422,6 +1511,9 @@ fn delete_custom_world_profile_record(
profile_payload_json: existing.profile_payload_json.clone(),
playable_npc_count: existing.playable_npc_count,
landmark_count: existing.landmark_count,
play_count: existing.play_count,
remix_count: existing.remix_count,
like_count: existing.like_count,
author_display_name: existing.author_display_name.clone(),
published_at: None,
deleted_at: Some(deleted_at),
@@ -1461,7 +1553,7 @@ fn list_custom_world_gallery_snapshots(
.db
.custom_world_gallery_entry()
.iter()
.map(|row| build_custom_world_gallery_entry_snapshot(&row))
.map(|row| build_custom_world_gallery_entry_snapshot(ctx, &row))
.collect::<Vec<_>>();
entries.sort_by(|left, right| {
@@ -1508,7 +1600,7 @@ fn get_custom_world_library_detail_record(
profile.as_ref().map(build_custom_world_profile_snapshot),
gallery_entry
.as_ref()
.map(build_custom_world_gallery_entry_snapshot),
.map(|row| build_custom_world_gallery_entry_snapshot(ctx, row)),
))
}
@@ -1546,7 +1638,7 @@ fn get_custom_world_gallery_detail_record(
profile.as_ref().map(build_custom_world_profile_snapshot),
gallery_entry
.as_ref()
.map(build_custom_world_gallery_entry_snapshot),
.map(|row| build_custom_world_gallery_entry_snapshot(ctx, row)),
))
}
@@ -1588,7 +1680,273 @@ fn get_custom_world_gallery_detail_record_by_code(
profile.as_ref().map(build_custom_world_profile_snapshot),
gallery_entry
.as_ref()
.map(build_custom_world_gallery_entry_snapshot),
.map(|row| build_custom_world_gallery_entry_snapshot(ctx, row)),
))
}
fn remix_custom_world_profile_record(
ctx: &ReducerContext,
input: module_custom_world::CustomWorldProfileRemixInput,
) -> Result<
(
CustomWorldProfileSnapshot,
Option<CustomWorldGalleryEntrySnapshot>,
),
String,
> {
let source_owner_user_id = input.source_owner_user_id.trim();
let source_profile_id = input.source_profile_id.trim();
let target_owner_user_id = input.target_owner_user_id.trim();
let target_profile_id = input.target_profile_id.trim();
if source_owner_user_id.is_empty()
|| source_profile_id.is_empty()
|| target_owner_user_id.is_empty()
|| target_profile_id.is_empty()
{
return Err("custom_world remix 参数不能为空".to_string());
}
if input.author_display_name.trim().is_empty() {
return Err("custom_world remix 作者名不能为空".to_string());
}
let source = ctx
.db
.custom_world_profile()
.profile_id()
.find(&source_profile_id.to_string())
.filter(|row| row.owner_user_id == source_owner_user_id)
.filter(|row| {
row.publication_status == CustomWorldPublicationStatus::Published
&& row.deleted_at.is_none()
&& row.published_at.is_some()
})
.ok_or_else(|| "custom_world 已发布源作品不存在,无法改编".to_string())?;
let remixed_at = Timestamp::from_micros_since_unix_epoch(input.remixed_at_micros);
ctx.db
.custom_world_profile()
.profile_id()
.delete(&source.profile_id);
let next_source = CustomWorldProfile {
profile_id: source.profile_id.clone(),
owner_user_id: source.owner_user_id.clone(),
public_work_code: source.public_work_code.clone(),
author_public_user_code: source.author_public_user_code.clone(),
source_agent_session_id: source.source_agent_session_id.clone(),
publication_status: source.publication_status,
world_name: source.world_name.clone(),
subtitle: source.subtitle.clone(),
summary_text: source.summary_text.clone(),
theme_mode: source.theme_mode,
cover_image_src: source.cover_image_src.clone(),
profile_payload_json: source.profile_payload_json.clone(),
playable_npc_count: source.playable_npc_count,
landmark_count: source.landmark_count,
play_count: source.play_count,
remix_count: source.remix_count.saturating_add(1),
like_count: source.like_count,
author_display_name: source.author_display_name.clone(),
published_at: source.published_at,
deleted_at: source.deleted_at,
created_at: source.created_at,
updated_at: remixed_at,
};
let updated_source = ctx.db.custom_world_profile().insert(next_source);
let source_gallery = sync_custom_world_gallery_entry_from_profile(ctx, &updated_source)?;
// 改编生成目标用户草稿:复制内容,不复制源作品热度。
let draft = CustomWorldProfile {
profile_id: target_profile_id.to_string(),
owner_user_id: target_owner_user_id.to_string(),
public_work_code: None,
author_public_user_code: None,
source_agent_session_id: None,
publication_status: CustomWorldPublicationStatus::Draft,
world_name: source.world_name.clone(),
subtitle: source.subtitle.clone(),
summary_text: source.summary_text.clone(),
theme_mode: source.theme_mode,
cover_image_src: source.cover_image_src.clone(),
profile_payload_json: source.profile_payload_json.clone(),
playable_npc_count: source.playable_npc_count,
landmark_count: source.landmark_count,
play_count: 0,
remix_count: 0,
like_count: 0,
author_display_name: input.author_display_name.trim().to_string(),
published_at: None,
deleted_at: None,
created_at: remixed_at,
updated_at: remixed_at,
};
if let Some(existing_target) = ctx
.db
.custom_world_profile()
.profile_id()
.find(&target_profile_id.to_string())
.filter(|row| row.owner_user_id == target_owner_user_id)
{
ctx.db
.custom_world_profile()
.profile_id()
.delete(&existing_target.profile_id);
}
let inserted_draft = ctx.db.custom_world_profile().insert(draft);
Ok((
build_custom_world_profile_snapshot(&inserted_draft),
Some(source_gallery),
))
}
fn record_custom_world_profile_play_record(
ctx: &ReducerContext,
input: module_custom_world::CustomWorldProfilePlayRecordInput,
) -> Result<(CustomWorldProfileSnapshot, CustomWorldGalleryEntrySnapshot), String> {
let owner_user_id = input.owner_user_id.trim();
let profile_id = input.profile_id.trim();
if owner_user_id.is_empty() || profile_id.is_empty() {
return Err("custom_world play 参数不能为空".to_string());
}
let existing = ctx
.db
.custom_world_profile()
.profile_id()
.find(&profile_id.to_string())
.filter(|row| row.owner_user_id == owner_user_id)
.filter(|row| {
row.publication_status == CustomWorldPublicationStatus::Published
&& row.deleted_at.is_none()
&& row.published_at.is_some()
})
.ok_or_else(|| "custom_world 已发布作品不存在,无法记录游玩".to_string())?;
let played_at = Timestamp::from_micros_since_unix_epoch(input.played_at_micros);
record_public_work_play(
ctx,
PublicWorkPlayRecordInput {
source_type: "custom-world".to_string(),
owner_user_id: owner_user_id.to_string(),
profile_id: profile_id.to_string(),
played_at_micros: input.played_at_micros,
},
)?;
ctx.db
.custom_world_profile()
.profile_id()
.delete(&existing.profile_id);
let next_row = CustomWorldProfile {
profile_id: existing.profile_id.clone(),
owner_user_id: existing.owner_user_id.clone(),
public_work_code: existing.public_work_code.clone(),
author_public_user_code: existing.author_public_user_code.clone(),
source_agent_session_id: existing.source_agent_session_id.clone(),
publication_status: existing.publication_status,
world_name: existing.world_name.clone(),
subtitle: existing.subtitle.clone(),
summary_text: existing.summary_text.clone(),
theme_mode: existing.theme_mode,
cover_image_src: existing.cover_image_src.clone(),
profile_payload_json: existing.profile_payload_json.clone(),
playable_npc_count: existing.playable_npc_count,
landmark_count: existing.landmark_count,
play_count: existing.play_count.saturating_add(1),
remix_count: existing.remix_count,
like_count: existing.like_count,
author_display_name: existing.author_display_name.clone(),
published_at: existing.published_at,
deleted_at: existing.deleted_at,
created_at: existing.created_at,
updated_at: played_at,
};
let inserted = ctx.db.custom_world_profile().insert(next_row);
let gallery_entry = sync_custom_world_gallery_entry_from_profile(ctx, &inserted)?;
Ok((
build_custom_world_profile_snapshot(&inserted),
gallery_entry,
))
}
fn record_custom_world_profile_like_record(
ctx: &ReducerContext,
input: module_custom_world::CustomWorldProfileLikeRecordInput,
) -> Result<(CustomWorldProfileSnapshot, CustomWorldGalleryEntrySnapshot), String> {
let owner_user_id = input.owner_user_id.trim();
let profile_id = input.profile_id.trim();
let user_id = input.user_id.trim();
if owner_user_id.is_empty() || profile_id.is_empty() || user_id.is_empty() {
return Err("custom_world like 参数不能为空".to_string());
}
let existing = ctx
.db
.custom_world_profile()
.profile_id()
.find(&profile_id.to_string())
.filter(|row| row.owner_user_id == owner_user_id)
.filter(|row| {
row.publication_status == CustomWorldPublicationStatus::Published
&& row.deleted_at.is_none()
&& row.published_at.is_some()
})
.ok_or_else(|| "custom_world 已发布作品不存在,无法点赞".to_string())?;
let liked_at = Timestamp::from_micros_since_unix_epoch(input.liked_at_micros);
let inserted_like = record_public_work_like(
ctx,
PublicWorkLikeRecordInput {
source_type: "custom-world".to_string(),
owner_user_id: owner_user_id.to_string(),
profile_id: profile_id.to_string(),
user_id: user_id.to_string(),
liked_at_micros: input.liked_at_micros,
},
)?;
if !inserted_like {
let gallery_entry = sync_custom_world_gallery_entry_from_profile(ctx, &existing)?;
return Ok((
build_custom_world_profile_snapshot(&existing),
gallery_entry,
));
}
ctx.db
.custom_world_profile()
.profile_id()
.delete(&existing.profile_id);
let next_row = CustomWorldProfile {
profile_id: existing.profile_id.clone(),
owner_user_id: existing.owner_user_id.clone(),
public_work_code: existing.public_work_code.clone(),
author_public_user_code: existing.author_public_user_code.clone(),
source_agent_session_id: existing.source_agent_session_id.clone(),
publication_status: existing.publication_status,
world_name: existing.world_name.clone(),
subtitle: existing.subtitle.clone(),
summary_text: existing.summary_text.clone(),
theme_mode: existing.theme_mode,
cover_image_src: existing.cover_image_src.clone(),
profile_payload_json: existing.profile_payload_json.clone(),
playable_npc_count: existing.playable_npc_count,
landmark_count: existing.landmark_count,
play_count: existing.play_count,
remix_count: existing.remix_count,
like_count: existing.like_count.saturating_add(1),
author_display_name: existing.author_display_name.clone(),
published_at: existing.published_at,
deleted_at: existing.deleted_at,
created_at: existing.created_at,
updated_at: liked_at,
};
let inserted = ctx.db.custom_world_profile().insert(next_row);
let gallery_entry = sync_custom_world_gallery_entry_from_profile(ctx, &inserted)?;
Ok((
build_custom_world_profile_snapshot(&inserted),
gallery_entry,
))
}
@@ -4438,13 +4796,16 @@ fn sync_custom_world_gallery_entry_from_profile(
theme_mode: profile.theme_mode,
playable_npc_count: profile.playable_npc_count,
landmark_count: profile.landmark_count,
play_count: profile.play_count,
remix_count: profile.remix_count,
like_count: profile.like_count,
published_at,
updated_at: profile.updated_at,
};
let inserted = ctx.db.custom_world_gallery_entry().insert(row);
Ok(build_custom_world_gallery_entry_snapshot(&inserted))
Ok(build_custom_world_gallery_entry_snapshot(ctx, &inserted))
}
fn sync_missing_custom_world_gallery_entries(ctx: &ReducerContext) -> Result<(), String> {
@@ -4519,6 +4880,9 @@ fn ensure_custom_world_profile_public_fields(
profile_payload_json: profile.profile_payload_json.clone(),
playable_npc_count: profile.playable_npc_count,
landmark_count: profile.landmark_count,
play_count: profile.play_count,
remix_count: profile.remix_count,
like_count: profile.like_count,
author_display_name: profile.author_display_name.clone(),
published_at: profile.published_at,
deleted_at: profile.deleted_at,
@@ -4545,6 +4909,9 @@ fn build_custom_world_profile_row_copy(profile: &CustomWorldProfile) -> CustomWo
profile_payload_json: profile.profile_payload_json.clone(),
playable_npc_count: profile.playable_npc_count,
landmark_count: profile.landmark_count,
play_count: profile.play_count,
remix_count: profile.remix_count,
like_count: profile.like_count,
author_display_name: profile.author_display_name.clone(),
published_at: profile.published_at,
deleted_at: profile.deleted_at,
@@ -4569,6 +4936,9 @@ fn build_custom_world_profile_snapshot(row: &CustomWorldProfile) -> CustomWorldP
profile_payload_json: row.profile_payload_json.clone(),
playable_npc_count: row.playable_npc_count,
landmark_count: row.landmark_count,
play_count: row.play_count,
remix_count: row.remix_count,
like_count: row.like_count,
author_display_name: row.author_display_name.clone(),
published_at_micros: row
.published_at
@@ -4706,6 +5076,7 @@ fn build_custom_world_draft_card_snapshot(
}
fn build_custom_world_gallery_entry_snapshot(
ctx: &ReducerContext,
row: &CustomWorldGalleryEntry,
) -> CustomWorldGalleryEntrySnapshot {
CustomWorldGalleryEntrySnapshot {
@@ -4721,6 +5092,15 @@ fn build_custom_world_gallery_entry_snapshot(
theme_mode: row.theme_mode,
playable_npc_count: row.playable_npc_count,
landmark_count: row.landmark_count,
play_count: row.play_count,
remix_count: row.remix_count,
like_count: row.like_count,
recent_play_count_7d: count_recent_public_work_plays(
ctx,
"custom-world",
&row.profile_id,
ctx.timestamp.to_micros_since_unix_epoch(),
),
published_at_micros: row.published_at.to_micros_since_unix_epoch(),
updated_at_micros: row.updated_at.to_micros_since_unix_epoch(),
}
@@ -4871,6 +5251,9 @@ mod tests {
profile_payload_json: "{}".to_string(),
playable_npc_count: 0,
landmark_count: 0,
play_count: 0,
remix_count: 0,
like_count: 0,
author_display_name: "玩家".to_string(),
published_at: None,
deleted_at: None,
@@ -4892,6 +5275,9 @@ mod tests {
profile_payload_json: "{}".to_string(),
playable_npc_count: 0,
landmark_count: 0,
play_count: 0,
remix_count: 0,
like_count: 0,
author_display_name: "玩家".to_string(),
published_at: None,
deleted_at: Some(Timestamp::from_micros_since_unix_epoch(2)),
@@ -4913,6 +5299,9 @@ mod tests {
profile_payload_json: "{}".to_string(),
playable_npc_count: 0,
landmark_count: 0,
play_count: 0,
remix_count: 0,
like_count: 0,
author_display_name: "玩家".to_string(),
published_at: None,
deleted_at: None,
@@ -4973,6 +5362,9 @@ mod tests {
profile_payload_json: "{}".to_string(),
playable_npc_count: 0,
landmark_count: 0,
play_count: 0,
remix_count: 0,
like_count: 0,
author_display_name: "玩家".to_string(),
published_at: if publication_status == CustomWorldPublicationStatus::Published {
Some(Timestamp::from_micros_since_unix_epoch(2))
@@ -5034,6 +5426,9 @@ mod tests {
profile_payload_json: "{}".to_string(),
playable_npc_count: 0,
landmark_count: 0,
play_count: 0,
remix_count: 0,
like_count: 0,
author_display_name: "玩家".to_string(),
published_at: None,
deleted_at: None,

View File

@@ -1,4 +1,4 @@
// 中文注释SpacetimeDB 绑定生成依赖根模块继续公开 re-export 各领域类型;
// 中文注释SpacetimeDB 绑定生成依赖根模块继续公开 re-export 各领域类型;
// 少数领域 helper 同名只影响 value namespace 导出,不影响 table / reducer 类型。
#![allow(ambiguous_glob_reexports)]
@@ -28,6 +28,7 @@ mod custom_world;
mod domain_types;
mod entry;
mod gameplay;
mod match3d;
mod migration;
mod puzzle;
mod runtime;
@@ -40,5 +41,6 @@ pub use custom_world::*;
pub use domain_types::*;
pub use entry::*;
pub use gameplay::*;
pub use match3d::*;
pub use migration::*;
pub use runtime::*;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,86 @@
use crate::*;
#[spacetimedb::table(
accessor = match3d_agent_session,
index(accessor = by_match3d_agent_session_owner_user_id, btree(columns = [owner_user_id]))
)]
pub struct Match3DAgentSessionRow {
#[primary_key]
pub(crate) session_id: String,
pub(crate) owner_user_id: String,
pub(crate) seed_text: String,
pub(crate) current_turn: u32,
pub(crate) progress_percent: u32,
pub(crate) stage: String,
pub(crate) config_json: String,
pub(crate) draft_json: String,
pub(crate) last_assistant_reply: String,
pub(crate) published_profile_id: String,
pub(crate) created_at: Timestamp,
pub(crate) updated_at: Timestamp,
}
#[spacetimedb::table(
accessor = match3d_agent_message,
index(accessor = by_match3d_agent_message_session_id, btree(columns = [session_id]))
)]
pub struct Match3DAgentMessageRow {
#[primary_key]
pub(crate) message_id: String,
pub(crate) session_id: String,
pub(crate) role: String,
pub(crate) kind: String,
pub(crate) text: String,
pub(crate) created_at: Timestamp,
}
#[spacetimedb::table(
accessor = match3d_work_profile,
index(accessor = by_match3d_work_owner_user_id, btree(columns = [owner_user_id])),
index(accessor = by_match3d_work_publication_status, btree(columns = [publication_status]))
)]
pub struct Match3DWorkProfileRow {
#[primary_key]
pub(crate) profile_id: String,
pub(crate) owner_user_id: String,
pub(crate) source_session_id: String,
pub(crate) author_display_name: String,
pub(crate) game_name: String,
pub(crate) theme_text: String,
pub(crate) summary_text: String,
pub(crate) tags_json: String,
pub(crate) cover_image_src: String,
pub(crate) cover_asset_id: String,
pub(crate) clear_count: u32,
pub(crate) difficulty: u32,
pub(crate) config_json: String,
pub(crate) publication_status: String,
pub(crate) play_count: u32,
pub(crate) updated_at: Timestamp,
pub(crate) published_at: Option<Timestamp>,
}
#[spacetimedb::table(
accessor = match3d_runtime_run,
index(accessor = by_match3d_run_owner_user_id, btree(columns = [owner_user_id])),
index(accessor = by_match3d_run_profile_id, btree(columns = [profile_id]))
)]
pub struct Match3DRuntimeRunRow {
#[primary_key]
pub(crate) run_id: String,
pub(crate) owner_user_id: String,
pub(crate) profile_id: String,
pub(crate) status: String,
pub(crate) snapshot_version: u32,
pub(crate) started_at_ms: i64,
pub(crate) duration_limit_ms: i64,
pub(crate) finished_at_ms: i64,
pub(crate) elapsed_ms: i64,
pub(crate) clear_count: u32,
pub(crate) total_item_count: u32,
pub(crate) cleared_item_count: u32,
pub(crate) failure_reason: String,
pub(crate) snapshot_json: String,
pub(crate) created_at: Timestamp,
pub(crate) updated_at: Timestamp,
}

View File

@@ -0,0 +1,332 @@
use crate::*;
use serde::{Deserialize, Serialize};
pub const MATCH3D_DEFAULT_DURATION_LIMIT_MS: i64 = 600_000;
pub const MATCH3D_TRAY_SLOT_COUNT: u32 = 7;
pub const MATCH3D_VISUAL_VARIANT_COUNT: u32 = 10;
pub const MATCH3D_MIN_DIFFICULTY: u32 = 1;
pub const MATCH3D_MAX_DIFFICULTY: u32 = 10;
pub const MATCH3D_STAGE_COLLECTING: &str = "Collecting";
pub const MATCH3D_STAGE_READY_TO_COMPILE: &str = "ReadyToCompile";
pub const MATCH3D_STAGE_DRAFT_COMPILED: &str = "DraftCompiled";
pub const MATCH3D_STAGE_PUBLISHED: &str = "Published";
pub const MATCH3D_ROLE_USER: &str = "user";
pub const MATCH3D_ROLE_ASSISTANT: &str = "assistant";
pub const MATCH3D_KIND_TEXT: &str = "text";
pub const MATCH3D_PUBLICATION_DRAFT: &str = "Draft";
pub const MATCH3D_PUBLICATION_PUBLISHED: &str = "Published";
pub const MATCH3D_RUN_RUNNING: &str = "Running";
pub const MATCH3D_RUN_WON: &str = "Won";
pub const MATCH3D_RUN_FAILED: &str = "Failed";
pub const MATCH3D_RUN_STOPPED: &str = "Stopped";
pub const MATCH3D_FAILURE_TIME_UP: &str = "TimeUp";
pub const MATCH3D_FAILURE_TRAY_FULL: &str = "TrayFull";
pub const MATCH3D_CLICK_ACCEPTED: &str = "Accepted";
pub const MATCH3D_CLICK_REJECTED_NOT_CLICKABLE: &str = "RejectedNotClickable";
pub const MATCH3D_CLICK_REJECTED_ALREADY_MOVED: &str = "RejectedAlreadyMoved";
pub const MATCH3D_CLICK_REJECTED_TRAY_FULL: &str = "RejectedTrayFull";
pub const MATCH3D_CLICK_VERSION_CONFLICT: &str = "VersionConflict";
pub const MATCH3D_CLICK_RUN_FINISHED: &str = "RunFinished";
pub const MATCH3D_ITEM_IN_BOARD: &str = "InBoard";
pub const MATCH3D_ITEM_IN_TRAY: &str = "InTray";
pub const MATCH3D_ITEM_CLEARED: &str = "Cleared";
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DAgentSessionCreateInput {
pub session_id: String,
pub owner_user_id: String,
pub seed_text: String,
pub welcome_message_id: String,
pub welcome_message_text: String,
pub config_json: Option<String>,
pub created_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DAgentSessionGetInput {
pub session_id: String,
pub owner_user_id: String,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DAgentMessageSubmitInput {
pub session_id: String,
pub owner_user_id: String,
pub user_message_id: String,
pub user_message_text: String,
pub submitted_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DAgentMessageFinalizeInput {
pub session_id: String,
pub owner_user_id: String,
pub assistant_message_id: Option<String>,
pub assistant_reply_text: Option<String>,
pub config_json: Option<String>,
pub progress_percent: u32,
pub stage: String,
pub updated_at_micros: i64,
pub error_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DDraftCompileInput {
pub session_id: String,
pub owner_user_id: String,
pub profile_id: String,
pub author_display_name: String,
pub game_name: Option<String>,
pub summary_text: Option<String>,
pub tags_json: Option<String>,
pub cover_image_src: Option<String>,
pub cover_asset_id: Option<String>,
pub compiled_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DWorkUpdateInput {
pub profile_id: String,
pub owner_user_id: String,
pub game_name: String,
pub theme_text: String,
pub summary_text: String,
pub tags_json: String,
pub cover_image_src: String,
pub cover_asset_id: String,
pub clear_count: u32,
pub difficulty: u32,
pub updated_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DWorkPublishInput {
pub profile_id: String,
pub owner_user_id: String,
pub published_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DWorksListInput {
pub owner_user_id: String,
pub published_only: bool,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DWorkGetInput {
pub profile_id: String,
pub owner_user_id: String,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DWorkDeleteInput {
pub profile_id: String,
pub owner_user_id: String,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DRunStartInput {
pub run_id: String,
pub owner_user_id: String,
pub profile_id: String,
pub started_at_ms: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DRunGetInput {
pub run_id: String,
pub owner_user_id: String,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DRunClickInput {
pub run_id: String,
pub owner_user_id: String,
pub item_instance_id: String,
pub client_snapshot_version: u32,
pub client_event_id: String,
pub clicked_at_ms: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DRunStopInput {
pub run_id: String,
pub owner_user_id: String,
pub stopped_at_ms: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DRunRestartInput {
pub source_run_id: String,
pub next_run_id: String,
pub owner_user_id: String,
pub restarted_at_ms: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DRunTimeUpInput {
pub run_id: String,
pub owner_user_id: String,
pub finished_at_ms: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DAgentSessionProcedureResult {
pub ok: bool,
pub session_json: Option<String>,
pub error_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DWorkProcedureResult {
pub ok: bool,
pub work_json: Option<String>,
pub error_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DWorksProcedureResult {
pub ok: bool,
pub items_json: Option<String>,
pub error_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DRunProcedureResult {
pub ok: bool,
pub run_json: Option<String>,
pub error_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct Match3DClickItemProcedureResult {
pub ok: bool,
pub status: String,
pub run_json: Option<String>,
pub accepted_item_instance_id: Option<String>,
pub cleared_item_instance_ids: Vec<String>,
pub failure_reason: Option<String>,
pub error_message: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Match3DCreatorConfigSnapshot {
pub theme_text: String,
pub reference_image_src: Option<String>,
pub clear_count: u32,
pub difficulty: u32,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Match3DAgentMessageSnapshot {
pub message_id: String,
pub session_id: String,
pub role: String,
pub kind: String,
pub text: String,
pub created_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Match3DDraftSnapshot {
pub profile_id: String,
pub game_name: String,
pub theme_text: String,
pub summary_text: String,
pub tags: Vec<String>,
pub clear_count: u32,
pub difficulty: u32,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Match3DAgentSessionSnapshot {
pub session_id: String,
pub owner_user_id: String,
pub seed_text: String,
pub current_turn: u32,
pub progress_percent: u32,
pub stage: String,
pub config: Match3DCreatorConfigSnapshot,
pub draft: Option<Match3DDraftSnapshot>,
pub messages: Vec<Match3DAgentMessageSnapshot>,
pub last_assistant_reply: String,
pub published_profile_id: Option<String>,
pub created_at_micros: i64,
pub updated_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Match3DWorkSnapshot {
pub profile_id: String,
pub owner_user_id: String,
pub source_session_id: String,
pub author_display_name: String,
pub game_name: String,
pub theme_text: String,
pub summary_text: String,
pub tags: Vec<String>,
pub cover_image_src: String,
pub cover_asset_id: String,
pub clear_count: u32,
pub difficulty: u32,
pub config: Match3DCreatorConfigSnapshot,
pub publication_status: String,
pub publish_ready: bool,
pub play_count: u32,
pub updated_at_micros: i64,
pub published_at_micros: Option<i64>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Match3DItemSnapshot {
pub item_instance_id: String,
pub item_type_id: String,
pub visual_key: String,
pub x: f32,
pub y: f32,
pub radius: f32,
pub layer: u32,
pub state: String,
pub clickable: bool,
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Match3DTraySlotSnapshot {
pub slot_index: u32,
pub item_instance_id: Option<String>,
pub item_type_id: Option<String>,
pub visual_key: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Match3DRunSnapshot {
pub run_id: String,
pub profile_id: String,
pub status: String,
pub snapshot_version: u32,
pub started_at_ms: i64,
pub duration_limit_ms: i64,
pub server_now_ms: i64,
pub remaining_ms: i64,
pub clear_count: u32,
pub total_item_count: u32,
pub cleared_item_count: u32,
pub tray_slots: Vec<Match3DTraySlotSnapshot>,
pub items: Vec<Match3DItemSnapshot>,
pub failure_reason: Option<String>,
}

View File

@@ -5,6 +5,9 @@ use spacetimedb_lib::sats::ser::serde::SerializeWrapper;
use std::collections::HashSet;
use crate::big_fish::big_fish_runtime_run;
use crate::match3d::tables::{
match3d_agent_message, match3d_agent_session, match3d_runtime_run, match3d_work_profile,
};
use crate::puzzle::{
puzzle_agent_message, puzzle_agent_session, puzzle_event, puzzle_leaderboard_entry,
puzzle_runtime_run, puzzle_work_profile,
@@ -12,6 +15,8 @@ use crate::puzzle::{
const MIGRATION_SCHEMA_VERSION: u32 = 1;
const MIGRATION_MAX_TABLE_NAME_LEN: usize = 96;
const MIGRATION_MAX_IMPORT_UPLOAD_ID_LEN: usize = 128;
const MIGRATION_MAX_IMPORT_CHUNK_BYTES: usize = 1024 * 1024;
const MIGRATION_MAX_OPERATOR_NOTE_CHARS: usize = 160;
const MIGRATION_MIN_BOOTSTRAP_SECRET_LEN: usize = 16;
const MIGRATION_BOOTSTRAP_SECRET: Option<&str> =
@@ -26,6 +31,21 @@ pub struct DatabaseMigrationOperator {
pub note: String,
}
#[spacetimedb::table(
accessor = database_migration_import_chunk,
index(accessor = by_database_migration_import_upload, btree(columns = [upload_id]))
)]
pub struct DatabaseMigrationImportChunk {
#[primary_key]
pub chunk_key: String,
pub upload_id: String,
pub chunk_index: u32,
pub chunk_count: u32,
pub operator_identity: Identity,
pub created_at: Timestamp,
pub chunk: String,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct DatabaseMigrationExportInput {
pub include_tables: Vec<String>,
@@ -39,6 +59,27 @@ pub struct DatabaseMigrationImportInput {
pub dry_run: bool,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct DatabaseMigrationImportChunkInput {
pub upload_id: String,
pub chunk_index: u32,
pub chunk_count: u32,
pub chunk: String,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct DatabaseMigrationImportChunksInput {
pub upload_id: String,
pub include_tables: Vec<String>,
pub replace_existing: bool,
pub dry_run: bool,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct DatabaseMigrationImportChunksClearInput {
pub upload_id: String,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum DatabaseMigrationImportMode {
Strict,
@@ -65,12 +106,20 @@ pub struct DatabaseMigrationTableStat {
pub skipped_row_count: u64,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct DatabaseMigrationWarning {
pub table_name: String,
pub warning_kind: String,
pub message: String,
}
#[derive(Clone, Debug, PartialEq, Eq, SpacetimeType)]
pub struct DatabaseMigrationProcedureResult {
pub ok: bool,
pub schema_version: u32,
pub migration_json: Option<String>,
pub table_stats: Vec<DatabaseMigrationTableStat>,
pub warnings: Vec<DatabaseMigrationWarning>,
pub error_message: Option<String>,
}
@@ -117,6 +166,8 @@ macro_rules! migration_tables {
profile_invite_code,
profile_referral_relation,
profile_played_world,
public_work_play_daily_stat,
public_work_like,
profile_membership,
profile_recharge_order,
profile_save_archive,
@@ -146,6 +197,10 @@ macro_rules! migration_tables {
puzzle_event,
puzzle_runtime_run,
puzzle_leaderboard_entry,
match3d_agent_session,
match3d_agent_message,
match3d_work_profile,
match3d_runtime_run,
big_fish_creation_session,
big_fish_agent_message,
big_fish_asset_slot,
@@ -249,6 +304,7 @@ pub fn export_database_migration_to_file(
schema_version: MIGRATION_SCHEMA_VERSION,
migration_json: Some(migration_json),
table_stats: stats,
warnings: Vec::new(),
error_message: None,
},
Err(error) => DatabaseMigrationProcedureResult {
@@ -256,6 +312,7 @@ pub fn export_database_migration_to_file(
schema_version: MIGRATION_SCHEMA_VERSION,
migration_json: None,
table_stats: Vec::new(),
warnings: Vec::new(),
error_message: Some(error),
},
}
@@ -269,11 +326,12 @@ pub fn import_database_migration_from_file(
) -> DatabaseMigrationProcedureResult {
match import_database_migration_from_file_inner(ctx, input, DatabaseMigrationImportMode::Strict)
{
Ok(stats) => DatabaseMigrationProcedureResult {
Ok((stats, warnings)) => DatabaseMigrationProcedureResult {
ok: true,
schema_version: MIGRATION_SCHEMA_VERSION,
migration_json: None,
table_stats: stats,
warnings,
error_message: None,
},
Err(error) => DatabaseMigrationProcedureResult {
@@ -281,6 +339,7 @@ pub fn import_database_migration_from_file(
schema_version: MIGRATION_SCHEMA_VERSION,
migration_json: None,
table_stats: Vec::new(),
warnings: Vec::new(),
error_message: Some(error),
},
}
@@ -297,11 +356,12 @@ pub fn import_database_migration_incremental_from_file(
input,
DatabaseMigrationImportMode::Incremental,
) {
Ok(stats) => DatabaseMigrationProcedureResult {
Ok((stats, warnings)) => DatabaseMigrationProcedureResult {
ok: true,
schema_version: MIGRATION_SCHEMA_VERSION,
migration_json: None,
table_stats: stats,
warnings,
error_message: None,
},
Err(error) => DatabaseMigrationProcedureResult {
@@ -309,11 +369,82 @@ pub fn import_database_migration_incremental_from_file(
schema_version: MIGRATION_SCHEMA_VERSION,
migration_json: None,
table_stats: Vec::new(),
warnings: Vec::new(),
error_message: Some(error),
},
}
}
// 大迁移 JSON 先按分片写入私有临时表,避免单次 HTTP request body 触发 SpacetimeDB 413。
#[spacetimedb::procedure]
pub fn put_database_migration_import_chunk(
ctx: &mut ProcedureContext,
input: DatabaseMigrationImportChunkInput,
) -> DatabaseMigrationProcedureResult {
match put_database_migration_import_chunk_inner(ctx, input) {
Ok(()) => empty_database_migration_result(true, None),
Err(error) => empty_database_migration_result(false, Some(error)),
}
}
// 分片提交保持与直接导入相同的严格追加语义;提交成功后清理临时分片。
#[spacetimedb::procedure]
pub fn import_database_migration_from_chunks(
ctx: &mut ProcedureContext,
input: DatabaseMigrationImportChunksInput,
) -> DatabaseMigrationProcedureResult {
match import_database_migration_from_chunks_inner(
ctx,
input,
DatabaseMigrationImportMode::Strict,
) {
Ok((stats, warnings)) => DatabaseMigrationProcedureResult {
ok: true,
schema_version: MIGRATION_SCHEMA_VERSION,
migration_json: None,
table_stats: stats,
warnings,
error_message: None,
},
Err(error) => empty_database_migration_result(false, Some(error)),
}
}
// 分片增量提交只插入目标库缺失的行;主键或唯一约束冲突的行会跳过。
#[spacetimedb::procedure]
pub fn import_database_migration_incremental_from_chunks(
ctx: &mut ProcedureContext,
input: DatabaseMigrationImportChunksInput,
) -> DatabaseMigrationProcedureResult {
match import_database_migration_from_chunks_inner(
ctx,
input,
DatabaseMigrationImportMode::Incremental,
) {
Ok((stats, warnings)) => DatabaseMigrationProcedureResult {
ok: true,
schema_version: MIGRATION_SCHEMA_VERSION,
migration_json: None,
table_stats: stats,
warnings,
error_message: None,
},
Err(error) => empty_database_migration_result(false, Some(error)),
}
}
// 调用方上传失败或提交失败时可显式清理同一 upload_id 的临时分片。
#[spacetimedb::procedure]
pub fn clear_database_migration_import_chunks(
ctx: &mut ProcedureContext,
input: DatabaseMigrationImportChunksClearInput,
) -> DatabaseMigrationProcedureResult {
match clear_database_migration_import_chunks_inner(ctx, input) {
Ok(()) => empty_database_migration_result(true, None),
Err(error) => empty_database_migration_result(false, Some(error)),
}
}
fn export_database_migration_to_file_inner(
ctx: &mut ProcedureContext,
input: DatabaseMigrationExportInput,
@@ -337,7 +468,13 @@ fn import_database_migration_from_file_inner(
ctx: &mut ProcedureContext,
input: DatabaseMigrationImportInput,
import_mode: DatabaseMigrationImportMode,
) -> Result<Vec<DatabaseMigrationTableStat>, String> {
) -> Result<
(
Vec<DatabaseMigrationTableStat>,
Vec<DatabaseMigrationWarning>,
),
String,
> {
let caller = ctx.sender();
let included_tables = normalize_include_tables(&input.include_tables)?;
if import_mode == DatabaseMigrationImportMode::Incremental && input.replace_existing {
@@ -348,16 +485,9 @@ fn import_database_migration_from_file_inner(
}
ctx.try_with_tx(|tx| require_migration_operator(tx, caller))?;
let migration_file = serde_json::from_str::<MigrationFile>(&input.migration_json)
.map_err(|error| format!("迁移文件 JSON 解析失败: {error}"))?;
if migration_file.schema_version != MIGRATION_SCHEMA_VERSION {
return Err(format!(
"迁移文件 schema_version 不匹配,期望 {},实际 {}",
MIGRATION_SCHEMA_VERSION, migration_file.schema_version
));
}
let migration_file = parse_migration_file(&input.migration_json)?;
let stats = if input.dry_run {
let (stats, warnings) = if input.dry_run {
build_import_dry_run_stats(&migration_file.tables, included_tables.as_ref())?
} else {
ctx.try_with_tx(|tx| {
@@ -372,7 +502,159 @@ fn import_database_migration_from_file_inner(
})?
};
Ok(stats)
Ok((stats, warnings))
}
fn put_database_migration_import_chunk_inner(
ctx: &mut ProcedureContext,
input: DatabaseMigrationImportChunkInput,
) -> Result<(), String> {
let caller = ctx.sender();
let upload_id = normalize_import_upload_id(&input.upload_id)?;
if input.chunk_count == 0 {
return Err("分片总数必须大于 0".to_string());
}
if input.chunk_index >= input.chunk_count {
return Err(format!(
"分片序号越界: {} / {}",
input.chunk_index, input.chunk_count
));
}
if input.chunk.is_empty() {
return Err("迁移 JSON 分片不能为空".to_string());
}
if input.chunk.len() > MIGRATION_MAX_IMPORT_CHUNK_BYTES {
return Err(format!(
"迁移 JSON 分片过大,单片最多 {} bytes",
MIGRATION_MAX_IMPORT_CHUNK_BYTES
));
}
let chunk_key = build_import_chunk_key(&upload_id, input.chunk_index);
ctx.try_with_tx(|tx| {
require_migration_operator(tx, caller)?;
if let Some(existing) = tx
.db
.database_migration_import_chunk()
.chunk_key()
.find(&chunk_key)
{
if existing.operator_identity != caller {
return Err("同名迁移分片已由其他 identity 上传,已拒绝覆盖".to_string());
}
tx.db
.database_migration_import_chunk()
.chunk_key()
.delete(&chunk_key);
}
tx.db
.database_migration_import_chunk()
.insert(DatabaseMigrationImportChunk {
chunk_key: chunk_key.clone(),
upload_id: upload_id.clone(),
chunk_index: input.chunk_index,
chunk_count: input.chunk_count,
operator_identity: caller,
created_at: tx.timestamp,
chunk: input.chunk.clone(),
});
Ok(())
})?;
Ok(())
}
fn import_database_migration_from_chunks_inner(
ctx: &mut ProcedureContext,
input: DatabaseMigrationImportChunksInput,
import_mode: DatabaseMigrationImportMode,
) -> Result<
(
Vec<DatabaseMigrationTableStat>,
Vec<DatabaseMigrationWarning>,
),
String,
> {
let caller = ctx.sender();
let upload_id = normalize_import_upload_id(&input.upload_id)?;
let included_tables = normalize_include_tables(&input.include_tables)?;
if import_mode == DatabaseMigrationImportMode::Incremental && input.replace_existing {
return Err("增量导入不能同时启用 replace_existing".to_string());
}
let migration_json = ctx.try_with_tx(|tx| {
require_migration_operator(tx, caller)?;
read_database_migration_import_chunks(tx, &upload_id, caller)
})?;
let migration_file = parse_migration_file(&migration_json)?;
let (stats, warnings) = if input.dry_run {
build_import_dry_run_stats(&migration_file.tables, included_tables.as_ref())?
} else {
ctx.try_with_tx(|tx| {
require_migration_operator(tx, caller)?;
apply_migration_file(
tx,
&migration_file,
included_tables.as_ref(),
input.replace_existing,
import_mode,
)
})?
};
ctx.try_with_tx(|tx| {
require_migration_operator(tx, caller)?;
clear_database_migration_import_chunks_tx(tx, &upload_id);
Ok::<(), String>(())
})?;
Ok((stats, warnings))
}
fn clear_database_migration_import_chunks_inner(
ctx: &mut ProcedureContext,
input: DatabaseMigrationImportChunksClearInput,
) -> Result<(), String> {
let caller = ctx.sender();
let upload_id = normalize_import_upload_id(&input.upload_id)?;
ctx.try_with_tx(|tx| {
require_migration_operator(tx, caller)?;
clear_database_migration_import_chunks_tx(tx, &upload_id);
Ok::<(), String>(())
})?;
Ok(())
}
fn empty_database_migration_result(
ok: bool,
error_message: Option<String>,
) -> DatabaseMigrationProcedureResult {
DatabaseMigrationProcedureResult {
ok,
schema_version: MIGRATION_SCHEMA_VERSION,
migration_json: None,
table_stats: Vec::new(),
warnings: Vec::new(),
error_message,
}
}
fn parse_migration_file(migration_json: &str) -> Result<MigrationFile, String> {
if migration_json.trim().is_empty() {
return Err("migration_json 不能为空".to_string());
}
let migration_file = serde_json::from_str::<MigrationFile>(migration_json)
.map_err(|error| format!("迁移文件 JSON 解析失败: {error}"))?;
if migration_file.schema_version != MIGRATION_SCHEMA_VERSION {
return Err(format!(
"迁移文件 schema_version 不匹配,期望 {},实际 {}",
MIGRATION_SCHEMA_VERSION, migration_file.schema_version
));
}
Ok(migration_file)
}
fn authorize_database_migration_operator_inner(
@@ -516,6 +798,96 @@ fn normalize_migration_operator_note(input: &str) -> Result<String, String> {
Ok(note.to_string())
}
fn normalize_import_upload_id(input: &str) -> Result<String, String> {
let upload_id = input.trim();
if upload_id.is_empty() {
return Err("upload_id 不能为空".to_string());
}
if upload_id.len() > MIGRATION_MAX_IMPORT_UPLOAD_ID_LEN {
return Err(format!(
"upload_id 过长,最多 {} bytes",
MIGRATION_MAX_IMPORT_UPLOAD_ID_LEN
));
}
if !upload_id
.chars()
.all(|character| character.is_ascii_alphanumeric() || matches!(character, '-' | '_'))
{
return Err("upload_id 只能使用 ASCII 字母、数字、短横线或下划线".to_string());
}
Ok(upload_id.to_string())
}
fn build_import_chunk_key(upload_id: &str, chunk_index: u32) -> String {
format!("{upload_id}:{chunk_index:010}")
}
fn read_database_migration_import_chunks(
ctx: &ReducerContext,
upload_id: &str,
caller: Identity,
) -> Result<String, String> {
let mut chunks = ctx
.db
.database_migration_import_chunk()
.by_database_migration_import_upload()
.filter(upload_id)
.collect::<Vec<_>>();
if chunks.is_empty() {
return Err(format!("未找到迁移 JSON 分片: {upload_id}"));
}
if chunks.iter().any(|chunk| chunk.operator_identity != caller) {
return Err("迁移 JSON 分片包含其他 identity 上传的片段,已拒绝提交".to_string());
}
let chunk_count = chunks[0].chunk_count;
if chunk_count == 0 {
return Err("迁移 JSON 分片总数不合法".to_string());
}
if chunks
.iter()
.any(|chunk| chunk.chunk_count != chunk_count || chunk.upload_id != upload_id)
{
return Err("迁移 JSON 分片总数不一致".to_string());
}
if chunks.len() != chunk_count as usize {
return Err(format!(
"迁移 JSON 分片未上传完整,已收到 {} / {}",
chunks.len(),
chunk_count
));
}
chunks.sort_by_key(|chunk| chunk.chunk_index);
let mut expected_index = 0u32;
let mut migration_json = String::new();
for chunk in chunks {
if chunk.chunk_index != expected_index {
return Err(format!("迁移 JSON 分片缺失序号: {expected_index}"));
}
migration_json.push_str(&chunk.chunk);
expected_index = expected_index.saturating_add(1);
}
Ok(migration_json)
}
fn clear_database_migration_import_chunks_tx(ctx: &ReducerContext, upload_id: &str) {
let chunk_keys = ctx
.db
.database_migration_import_chunk()
.by_database_migration_import_upload()
.filter(upload_id)
.map(|chunk| chunk.chunk_key)
.collect::<Vec<_>>();
for chunk_key in chunk_keys {
ctx.db
.database_migration_import_chunk()
.chunk_key()
.delete(&chunk_key);
}
}
fn normalize_include_tables(input: &[String]) -> Result<Option<HashSet<String>>, String> {
if input.is_empty() {
return Ok(None);
@@ -574,11 +946,25 @@ fn build_export_stats(tables: &[MigrationTable]) -> Vec<DatabaseMigrationTableSt
fn build_import_dry_run_stats(
tables: &[MigrationTable],
include_tables: Option<&HashSet<String>>,
) -> Result<Vec<DatabaseMigrationTableStat>, String> {
) -> Result<
(
Vec<DatabaseMigrationTableStat>,
Vec<DatabaseMigrationWarning>,
),
String,
> {
let mut stats = Vec::new();
let mut warnings = Vec::new();
for table in tables {
if !is_supported_migration_table(&table.name) {
return Err(format!("迁移文件包含不支持的表: {}", table.name));
warnings.push(build_dropped_table_warning(table));
stats.push(DatabaseMigrationTableStat {
table_name: table.name.clone(),
exported_row_count: 0,
imported_row_count: 0,
skipped_row_count: table.rows.len() as u64,
});
continue;
}
if should_include_table(include_tables, &table.name) {
stats.push(DatabaseMigrationTableStat {
@@ -596,7 +982,7 @@ fn build_import_dry_run_stats(
});
}
}
Ok(stats)
Ok((stats, warnings))
}
fn apply_migration_file(
@@ -605,13 +991,15 @@ fn apply_migration_file(
include_tables: Option<&HashSet<String>>,
replace_existing: bool,
import_mode: DatabaseMigrationImportMode,
) -> Result<Vec<DatabaseMigrationTableStat>, String> {
) -> Result<
(
Vec<DatabaseMigrationTableStat>,
Vec<DatabaseMigrationWarning>,
),
String,
> {
let mut stats = Vec::new();
for table in &migration_file.tables {
if !is_supported_migration_table(&table.name) {
return Err(format!("迁移文件包含不支持的表: {}", table.name));
}
}
let mut warnings = Vec::new();
let import_table_names = build_import_table_name_set(migration_file, include_tables);
if replace_existing {
@@ -620,6 +1008,17 @@ fn apply_migration_file(
}
for table in &migration_file.tables {
if !is_supported_migration_table(&table.name) {
warnings.push(build_dropped_table_warning(table));
stats.push(DatabaseMigrationTableStat {
table_name: table.name.clone(),
exported_row_count: 0,
imported_row_count: 0,
skipped_row_count: table.rows.len() as u64,
});
continue;
}
if !should_include_table(include_tables, &table.name) {
stats.push(DatabaseMigrationTableStat {
table_name: table.name.clone(),
@@ -631,7 +1030,7 @@ fn apply_migration_file(
}
let (imported_row_count, skipped_row_count) =
insert_migration_table_rows(ctx, table, import_mode)?;
insert_migration_table_rows(ctx, table, import_mode, &mut warnings)?;
stats.push(DatabaseMigrationTableStat {
table_name: table.name.clone(),
exported_row_count: 0,
@@ -640,7 +1039,7 @@ fn apply_migration_file(
});
}
Ok(stats)
Ok((stats, warnings))
}
fn build_import_table_name_set(
@@ -655,37 +1054,192 @@ fn build_import_table_name_set(
.collect()
}
fn build_dropped_table_warning(table: &MigrationTable) -> DatabaseMigrationWarning {
DatabaseMigrationWarning {
table_name: table.name.clone(),
warning_kind: "dropped_table".to_string(),
message: format!(
"迁移文件包含当前模块已删除或未加入白名单的表 {},已跳过 {} 行",
table.name,
table.rows.len()
),
}
}
fn build_dropped_field_warning(table_name: &str, field_name: &str) -> DatabaseMigrationWarning {
DatabaseMigrationWarning {
table_name: table_name.to_string(),
warning_kind: "dropped_field".to_string(),
message: format!("表 {table_name} 的旧字段 {field_name} 当前已不存在,已在导入时丢弃"),
}
}
fn row_to_json<T: spacetimedb::Serialize>(row: &T) -> Result<serde_json::Value, String> {
serde_json::to_value(SerializeWrapper::from_ref(row))
.map_err(|error| format!("迁移行序列化失败: {error}"))
}
fn row_from_json<T>(value: &serde_json::Value) -> Result<T, String>
fn row_from_json<T>(
table_name: &str,
value: &serde_json::Value,
warnings: &mut Vec<DatabaseMigrationWarning>,
) -> Result<T, String>
where
T: for<'de> spacetimedb::Deserialize<'de>,
{
let wrapped: DeserializeWrapper<T> = serde_json::from_value(value.clone())
.map_err(|error| format!("迁移行反序列化失败: {error}"))?;
let wrapped = match serde_json::from_value::<DeserializeWrapper<T>>(value.clone()) {
Ok(row) => row,
Err(original_error) => recover_row_with_deleted_fields::<T>(
table_name,
value,
&original_error.to_string(),
warnings,
)
.ok_or_else(|| format!("迁移行反序列化失败,且无法通过丢弃旧字段恢复: {original_error}"))?,
};
Ok(wrapped.0)
}
fn normalize_migration_row(table_name: &str, value: &serde_json::Value) -> serde_json::Value {
let mut next_value = value.clone();
if table_name == "user_account" {
if let Some(object) = next_value.as_object_mut() {
// 中文注释:头像字段晚于认证拆表加入,旧迁移包按未设置头像兼容。
object
.entry("avatar_url".to_string())
.or_insert(serde_json::Value::Null);
}
}
if table_name == "profile_invite_code" {
if let Some(object) = next_value.as_object_mut() {
// 中文注释:邀请码 metadata 晚于邀请表加入,旧迁移包按空对象兼容。
object
.entry("metadata_json".to_string())
.or_insert_with(|| serde_json::Value::String("{}".to_string()));
}
}
if table_name == "big_fish_creation_session" {
if let Some(object) = next_value.as_object_mut() {
// 中文注释:旧迁移包没有公开游玩次数字段,导入时按新建作品默认 0 兼容。
object
.entry("play_count".to_string())
.or_insert_with(|| serde_json::Value::from(0));
object
.entry("remix_count".to_string())
.or_insert_with(|| serde_json::Value::from(0));
object
.entry("like_count".to_string())
.or_insert_with(|| serde_json::Value::from(0));
object
.entry("published_at".to_string())
.or_insert(serde_json::Value::Null);
}
}
if table_name == "custom_world_profile" || table_name == "custom_world_gallery_entry" {
if let Some(object) = next_value.as_object_mut() {
// 中文注释:自定义世界公开互动计数字段晚于基础作品表加入,旧迁移包按 0 兼容。
object
.entry("play_count".to_string())
.or_insert_with(|| serde_json::Value::from(0));
object
.entry("remix_count".to_string())
.or_insert_with(|| serde_json::Value::from(0));
object
.entry("like_count".to_string())
.or_insert_with(|| serde_json::Value::from(0));
}
}
if table_name == "puzzle_work_profile" {
if let Some(object) = next_value.as_object_mut() {
// 中文注释:拼图公开互动计数晚于基础作品表加入,旧迁移包按 0 兼容。
object
.entry("play_count".to_string())
.or_insert_with(|| serde_json::Value::from(0));
object
.entry("remix_count".to_string())
.or_insert_with(|| serde_json::Value::from(0));
object
.entry("like_count".to_string())
.or_insert_with(|| serde_json::Value::from(0));
object
.entry("point_incentive_total_half_points".to_string())
.or_insert_with(|| serde_json::Value::from(0));
object
.entry("point_incentive_claimed_points".to_string())
.or_insert_with(|| serde_json::Value::from(0));
// 中文注释:拼图多关卡字段晚于旧作品表加入,旧迁移包留空并由读取层补出首关。
object
.entry("levels_json".to_string())
.or_insert_with(|| serde_json::Value::from(""));
// 中文注释:作品名称/描述从旧关卡名/画面摘要拆出,旧行保留旧值做兼容回填。
let fallback_title = object
.get("level_name")
.cloned()
.unwrap_or_else(|| serde_json::Value::from(""));
object
.entry("work_title".to_string())
.or_insert(fallback_title);
let fallback_description = object
.get("summary")
.cloned()
.unwrap_or_else(|| serde_json::Value::from(""));
object
.entry("work_description".to_string())
.or_insert(fallback_description);
}
}
next_value
}
fn recover_row_with_deleted_fields<T>(
table_name: &str,
value: &serde_json::Value,
error_message: &str,
warnings: &mut Vec<DatabaseMigrationWarning>,
) -> Option<DeserializeWrapper<T>>
where
T: for<'de> spacetimedb::Deserialize<'de>,
{
let mut candidate = value.as_object()?.clone();
let mut next_error = error_message.to_string();
loop {
let field_name = extract_unknown_field_name(&next_error)?;
candidate.remove(&field_name)?;
warnings.push(build_dropped_field_warning(table_name, &field_name));
match serde_json::from_value::<DeserializeWrapper<T>>(serde_json::Value::Object(
candidate.clone(),
)) {
Ok(row) => return Some(row),
Err(error) => next_error = error.to_string(),
}
}
}
fn extract_unknown_field_name(error_message: &str) -> Option<String> {
let marker = "unknown field";
let marker_index = error_message.find(marker)?;
let after_marker = error_message[marker_index + marker.len()..].trim_start();
for quote in ['`', '"', '\''] {
if let Some(rest) = after_marker.strip_prefix(quote) {
let end_index = rest.find(quote)?;
return Some(rest[..end_index].to_string());
}
}
after_marker
.split(|character: char| !character.is_ascii_alphanumeric() && character != '_')
.find(|value| !value.is_empty())
.map(str::to_string)
}
fn insert_migration_table_rows(
ctx: &ReducerContext,
table: &MigrationTable,
import_mode: DatabaseMigrationImportMode,
warnings: &mut Vec<DatabaseMigrationWarning>,
) -> Result<(u64, u64), String> {
macro_rules! insert_table_match_arm {
($($table:ident),+ $(,)?) => {
@@ -696,7 +1250,7 @@ fn insert_migration_table_rows(
let mut skipped = 0u64;
for value in &table.rows {
let normalized_value = normalize_migration_row(stringify!($table), value);
let row = row_from_json(&normalized_value)
let row = row_from_json(stringify!($table), &normalized_value, warnings)
.map_err(|error| format!("{}: {error}", stringify!($table)))?;
let insert_result = ctx.db
.$table()

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,10 @@
use crate::*;
const PUBLIC_WORK_PLAY_DAY_MICROS: i64 = 86_400_000_000;
const PUBLIC_WORK_RECENT_PLAY_WINDOW_DAYS: i64 = 7;
const PROFILE_REFERRAL_INVITED_USERS_LIMIT: usize = 20;
const PROFILE_NEW_USER_REGISTRATION_LEDGER_PREFIX: &str = "new-user-registration";
#[spacetimedb::table(accessor = profile_dashboard_state)]
pub struct ProfileDashboardState {
#[primary_key]
@@ -67,6 +72,7 @@ pub struct ProfileInviteCode {
pub(crate) user_id: String,
#[unique]
pub(crate) invite_code: String,
pub(crate) metadata_json: String,
pub(crate) created_at: Timestamp,
pub(crate) updated_at: Timestamp,
}
@@ -116,6 +122,42 @@ pub struct ProfilePlayedWorld {
pub(crate) last_observed_play_time_ms: u64,
}
#[spacetimedb::table(
accessor = public_work_play_daily_stat,
index(
accessor = by_public_work_play_daily_stat_work_day,
btree(columns = [source_type, profile_id, played_day])
)
)]
pub struct PublicWorkPlayDailyStat {
#[primary_key]
pub(crate) stat_id: String,
// 中文注释source_type 区分 custom-world / puzzle / big-fish避免不同玩法 profile_id 撞桶。
pub(crate) source_type: String,
pub(crate) owner_user_id: String,
pub(crate) profile_id: String,
// 中文注释UTC 自 Unix 纪元起的自然日桶,用于快速聚合近 7 日新增游玩次数。
pub(crate) played_day: i64,
pub(crate) play_count: u32,
pub(crate) updated_at: Timestamp,
}
#[spacetimedb::table(
accessor = public_work_like,
index(accessor = by_public_work_like_work, btree(columns = [source_type, profile_id])),
index(accessor = by_public_work_like_user, btree(columns = [user_id]))
)]
pub struct PublicWorkLike {
#[primary_key]
pub(crate) like_id: String,
// 中文注释source_type 与 play 统计保持同一套作品类型命名,确保跨玩法 profile_id 不会互相冲突。
pub(crate) source_type: String,
pub(crate) owner_user_id: String,
pub(crate) profile_id: String,
pub(crate) user_id: String,
pub(crate) liked_at: Timestamp,
}
pub(crate) struct ProfilePlayedWorkUpsertInput {
pub(crate) user_id: String,
pub(crate) world_key: String,
@@ -127,6 +169,37 @@ pub(crate) struct ProfilePlayedWorkUpsertInput {
pub(crate) played_at_micros: i64,
}
pub(crate) struct PublicWorkPlayRecordInput {
pub(crate) source_type: String,
pub(crate) owner_user_id: String,
pub(crate) profile_id: String,
pub(crate) played_at_micros: i64,
}
pub(crate) struct PublicWorkLikeRecordInput {
pub(crate) source_type: String,
pub(crate) owner_user_id: String,
pub(crate) profile_id: String,
pub(crate) user_id: String,
pub(crate) liked_at_micros: i64,
}
pub(crate) struct ProfileSaveArchiveUpsertInput {
pub(crate) user_id: String,
pub(crate) world_key: String,
pub(crate) owner_user_id: Option<String>,
pub(crate) profile_id: Option<String>,
pub(crate) world_type: Option<String>,
pub(crate) world_name: String,
pub(crate) subtitle: String,
pub(crate) summary_text: String,
pub(crate) cover_image_src: Option<String>,
pub(crate) bottom_tab: String,
pub(crate) game_state_json: String,
pub(crate) current_story_json: Option<String>,
pub(crate) saved_at_micros: i64,
}
#[spacetimedb::table(accessor = profile_membership)]
pub struct ProfileMembership {
#[primary_key]
@@ -282,6 +355,26 @@ pub fn list_profile_wallet_ledger(
}
}
// 新用户注册赠送由后端注册链路调用;流水 ID 固定,保证重试不重复发放。
#[spacetimedb::procedure]
pub fn grant_new_user_registration_wallet_reward(
ctx: &mut ProcedureContext,
input: RuntimeProfileDashboardGetInput,
) -> RuntimeProfileWalletAdjustmentProcedureResult {
match ctx.try_with_tx(|tx| grant_new_user_registration_wallet_reward_tx(tx, input.clone())) {
Ok(record) => RuntimeProfileWalletAdjustmentProcedureResult {
ok: true,
record: Some(record),
error_message: None,
},
Err(message) => RuntimeProfileWalletAdjustmentProcedureResult {
ok: false,
record: None,
error_message: Some(message),
},
}
}
// 资产生成由 Axum 调用外部模型,钱包扣费必须先在 SpacetimeDB 内原子落账。
#[spacetimedb::procedure]
pub fn consume_profile_wallet_points_and_return(
@@ -420,7 +513,7 @@ pub fn get_profile_referral_invite_center(
}
}
// 填码绑定、每日邀请者奖励上限和双方叙世币发放都在同一事务内完成。
// 填码绑定、每日邀请者奖励上限和双方光点发放都在同一事务内完成。
#[spacetimedb::procedure]
pub fn redeem_profile_referral_invite_code(
ctx: &mut ProcedureContext,
@@ -498,6 +591,25 @@ pub fn admin_disable_profile_redeem_code(
}
}
#[spacetimedb::procedure]
pub fn admin_upsert_profile_invite_code(
ctx: &mut ProcedureContext,
input: RuntimeProfileInviteCodeAdminUpsertInput,
) -> RuntimeProfileInviteCodeAdminProcedureResult {
match ctx.try_with_tx(|tx| admin_upsert_profile_invite_code_record(tx, input.clone())) {
Ok(record) => RuntimeProfileInviteCodeAdminProcedureResult {
ok: true,
record: Some(record),
error_message: None,
},
Err(message) => RuntimeProfileInviteCodeAdminProcedureResult {
ok: false,
record: None,
error_message: Some(message),
},
}
}
pub(crate) fn list_profile_save_archive_rows(
ctx: &ReducerContext,
input: RuntimeProfileSaveArchiveListInput,
@@ -705,6 +817,172 @@ pub(crate) fn add_profile_observed_play_time(
Ok(())
}
pub(crate) fn upsert_profile_save_archive(
ctx: &ReducerContext,
input: ProfileSaveArchiveUpsertInput,
) -> Result<(), String> {
let user_id = input.user_id.trim();
let world_key = input.world_key.trim();
if user_id.is_empty() || world_key.is_empty() {
return Err("profile_save_archive 参数不能为空".to_string());
}
let saved_at = Timestamp::from_micros_since_unix_epoch(input.saved_at_micros);
let archive_id = format!("{user_id}:{world_key}");
let existing = ctx.db.profile_save_archive().archive_id().find(&archive_id);
let created_at = existing
.as_ref()
.map(|row| row.created_at)
.unwrap_or(saved_at);
if let Some(existing) = existing {
ctx.db
.profile_save_archive()
.archive_id()
.delete(&existing.archive_id);
}
ctx.db.profile_save_archive().insert(ProfileSaveArchive {
archive_id,
user_id: user_id.to_string(),
world_key: world_key.to_string(),
owner_user_id: input.owner_user_id,
profile_id: input.profile_id,
world_type: input.world_type,
world_name: input.world_name,
subtitle: input.subtitle,
summary_text: input.summary_text,
cover_image_src: input.cover_image_src,
saved_at,
bottom_tab: input.bottom_tab,
game_state_json: input.game_state_json,
current_story_json: input.current_story_json,
created_at,
updated_at: saved_at,
});
Ok(())
}
pub(crate) fn record_public_work_play(
ctx: &ReducerContext,
input: PublicWorkPlayRecordInput,
) -> Result<(), String> {
let source_type = input.source_type.trim();
let owner_user_id = input.owner_user_id.trim();
let profile_id = input.profile_id.trim();
if source_type.is_empty() || owner_user_id.is_empty() || profile_id.is_empty() {
return Err("public_work_play_daily_stat 参数不能为空".to_string());
}
let played_day = public_work_play_day_from_micros(input.played_at_micros);
let stat_id = build_public_work_play_daily_stat_id(source_type, profile_id, played_day);
let updated_at = Timestamp::from_micros_since_unix_epoch(input.played_at_micros);
let next_count = ctx
.db
.public_work_play_daily_stat()
.stat_id()
.find(&stat_id)
.map(|existing| {
ctx.db
.public_work_play_daily_stat()
.stat_id()
.delete(&existing.stat_id);
existing.play_count.saturating_add(1)
})
.unwrap_or(1);
ctx.db
.public_work_play_daily_stat()
.insert(PublicWorkPlayDailyStat {
stat_id,
source_type: source_type.to_string(),
owner_user_id: owner_user_id.to_string(),
profile_id: profile_id.to_string(),
played_day,
play_count: next_count,
updated_at,
});
Ok(())
}
pub(crate) fn record_public_work_like(
ctx: &ReducerContext,
input: PublicWorkLikeRecordInput,
) -> Result<bool, String> {
let source_type = input.source_type.trim();
let owner_user_id = input.owner_user_id.trim();
let profile_id = input.profile_id.trim();
let user_id = input.user_id.trim();
if source_type.is_empty()
|| owner_user_id.is_empty()
|| profile_id.is_empty()
|| user_id.is_empty()
{
return Err("public_work_like 参数不能为空".to_string());
}
let like_id = build_public_work_like_id(source_type, profile_id, user_id);
if ctx.db.public_work_like().like_id().find(&like_id).is_some() {
return Ok(false);
}
ctx.db.public_work_like().insert(PublicWorkLike {
like_id,
source_type: source_type.to_string(),
owner_user_id: owner_user_id.to_string(),
profile_id: profile_id.to_string(),
user_id: user_id.to_string(),
liked_at: Timestamp::from_micros_since_unix_epoch(input.liked_at_micros),
});
Ok(true)
}
pub(crate) fn count_recent_public_work_plays(
ctx: &ReducerContext,
source_type: &str,
profile_id: &str,
now_micros: i64,
) -> u32 {
let source_type = source_type.trim();
let profile_id = profile_id.trim();
if source_type.is_empty() || profile_id.is_empty() {
return 0;
}
let current_day = public_work_play_day_from_micros(now_micros);
let first_day = current_day.saturating_sub(PUBLIC_WORK_RECENT_PLAY_WINDOW_DAYS - 1);
ctx.db
.public_work_play_daily_stat()
.iter()
.filter(|row| {
row.source_type == source_type
&& row.profile_id == profile_id
&& row.played_day >= first_day
&& row.played_day <= current_day
})
.fold(0u32, |total, row| total.saturating_add(row.play_count))
}
fn public_work_play_day_from_micros(value: i64) -> i64 {
value.div_euclid(PUBLIC_WORK_PLAY_DAY_MICROS)
}
fn build_public_work_play_daily_stat_id(
source_type: &str,
profile_id: &str,
played_day: i64,
) -> String {
format!("{source_type}:{profile_id}:{played_day}")
}
fn build_public_work_like_id(source_type: &str, profile_id: &str, user_id: &str) -> String {
format!("{source_type}:{profile_id}:{user_id}")
}
fn ensure_profile_dashboard_state(ctx: &ReducerContext, user_id: &str, updated_at: Timestamp) {
if ctx
.db
@@ -785,12 +1063,20 @@ fn sync_profile_dashboard_from_snapshot(
.as_ref()
.map(|row| row.total_play_time_ms)
.unwrap_or(0);
let next_wallet_balance = module_runtime::read_runtime_json_non_negative_u64(
game_state.and_then(|state| state.get("playerCurrency")),
);
let has_business_wallet_ledger = has_profile_business_wallet_ledger(ctx, &snapshot.user_id);
let synced_wallet_balance = if has_business_wallet_ledger {
None
} else {
game_state
.and_then(|state| state.get("playerCurrency"))
.map(|value| module_runtime::read_runtime_json_non_negative_u64(Some(value)))
};
let next_wallet_balance = synced_wallet_balance.unwrap_or(previous_wallet_balance);
let mut next_total_play_time_ms = previous_total_play_time_ms;
if next_wallet_balance != previous_wallet_balance {
if let Some(next_wallet_balance) = synced_wallet_balance
&& next_wallet_balance != previous_wallet_balance
{
ctx.db.profile_wallet_ledger().insert(ProfileWalletLedger {
wallet_ledger_id: build_runtime_profile_snapshot_wallet_ledger_id(
&snapshot.user_id,
@@ -965,6 +1251,174 @@ pub(crate) fn build_profile_save_archive_snapshot_from_row(
}
}
fn read_string_from_json(value: Option<&JsonValue>) -> Option<String> {
value
.and_then(JsonValue::as_str)
.map(str::trim)
.filter(|value| !value.is_empty())
.map(ToString::to_string)
}
fn resolve_profile_world_snapshot_meta(
game_state: Option<&serde_json::Map<String, JsonValue>>,
) -> Option<RuntimeProfileWorldSnapshotMeta> {
let game_state = game_state?;
let custom_world_profile = game_state
.get("customWorldProfile")
.and_then(JsonValue::as_object);
if let Some(custom_world_profile) = custom_world_profile {
let profile_id = read_string_from_json(custom_world_profile.get("id"));
let world_title = read_string_from_json(custom_world_profile.get("name"))
.or_else(|| read_string_from_json(custom_world_profile.get("title")));
if profile_id.is_some() || world_title.is_some() {
let world_title = world_title.unwrap_or_else(|| "自定义世界".to_string());
return Some(RuntimeProfileWorldSnapshotMeta {
world_key: profile_id
.as_ref()
.map(|profile_id| format!("custom:{profile_id}"))
.unwrap_or_else(|| format!("custom:{world_title}")),
owner_user_id: None,
profile_id,
world_type: Some("CUSTOM".to_string()),
world_title,
world_subtitle: read_string_from_json(custom_world_profile.get("summary"))
.or_else(|| read_string_from_json(custom_world_profile.get("settingText")))
.unwrap_or_default(),
});
}
}
let world_type = read_string_from_json(game_state.get("worldType"))?;
let current_scene_preset = game_state
.get("currentScenePreset")
.and_then(JsonValue::as_object);
Some(RuntimeProfileWorldSnapshotMeta {
world_key: format!("builtin:{world_type}"),
owner_user_id: None,
profile_id: None,
world_type: Some(world_type.clone()),
world_title: current_scene_preset
.and_then(|preset| read_string_from_json(preset.get("name")))
.unwrap_or_else(|| build_builtin_world_title(&world_type)),
world_subtitle: current_scene_preset
.and_then(|preset| {
read_string_from_json(preset.get("summary"))
.or_else(|| read_string_from_json(preset.get("description")))
})
.unwrap_or_default(),
})
}
fn resolve_profile_save_archive_meta(
game_state: &JsonValue,
current_story_json: Option<&str>,
) -> Option<RuntimeProfileSaveArchiveMeta> {
if is_non_persistent_runtime_snapshot(game_state) {
return None;
}
let game_state_object = game_state.as_object();
let world_meta = resolve_profile_world_snapshot_meta(game_state_object)?;
let story_engine_memory = game_state_object
.and_then(|state| state.get("storyEngineMemory"))
.and_then(JsonValue::as_object);
let continue_game_digest = story_engine_memory
.and_then(|memory| read_string_from_json(memory.get("continueGameDigest")));
let current_story_text = parse_optional_json_str(current_story_json)
.ok()
.flatten()
.and_then(|story| story.as_object().cloned())
.and_then(|story| read_string_from_json(story.get("text")));
let custom_world_profile = game_state_object
.and_then(|state| state.get("customWorldProfile"))
.and_then(JsonValue::as_object);
if let Some(custom_world_profile) = custom_world_profile {
let world_name = read_string_from_json(custom_world_profile.get("name"))
.or_else(|| read_string_from_json(custom_world_profile.get("title")))
.unwrap_or_else(|| world_meta.world_title.clone());
let subtitle = read_string_from_json(custom_world_profile.get("summary"))
.or_else(|| read_string_from_json(custom_world_profile.get("settingText")))
.unwrap_or_else(|| world_meta.world_subtitle.clone());
let summary_text = continue_game_digest
.or(current_story_text)
.or_else(|| {
if subtitle.is_empty() {
None
} else {
Some(subtitle.clone())
}
})
.unwrap_or_else(|| DEFAULT_SAVE_ARCHIVE_SUMMARY_TEXT.to_string());
return Some(RuntimeProfileSaveArchiveMeta {
world_key: world_meta.world_key,
owner_user_id: world_meta.owner_user_id,
profile_id: world_meta.profile_id,
world_type: world_meta.world_type,
world_name,
subtitle,
summary_text,
cover_image_src: read_string_from_json(custom_world_profile.get("coverImageSrc")),
});
}
let summary_text = continue_game_digest
.or(current_story_text)
.or_else(|| {
if world_meta.world_subtitle.is_empty() {
None
} else {
Some(world_meta.world_subtitle.clone())
}
})
.unwrap_or_else(|| DEFAULT_SAVE_ARCHIVE_SUMMARY_TEXT.to_string());
let current_scene_preset = game_state_object
.and_then(|state| state.get("currentScenePreset"))
.and_then(JsonValue::as_object);
Some(RuntimeProfileSaveArchiveMeta {
world_key: world_meta.world_key,
owner_user_id: world_meta.owner_user_id,
profile_id: world_meta.profile_id,
world_type: world_meta.world_type,
world_name: world_meta.world_title,
subtitle: world_meta.world_subtitle.clone(),
summary_text,
cover_image_src: current_scene_preset
.and_then(|preset| read_string_from_json(preset.get("imageSrc"))),
})
}
fn is_non_persistent_runtime_snapshot(game_state: &JsonValue) -> bool {
let Some(game_state) = game_state.as_object() else {
return false;
};
if game_state
.get("runtimePersistenceDisabled")
.and_then(JsonValue::as_bool)
.unwrap_or(false)
{
return true;
}
matches!(
read_string_from_json(game_state.get("runtimeMode")).as_deref(),
Some("preview") | Some("test")
)
}
fn build_builtin_world_title(world_type: &str) -> String {
match world_type {
"WUXIA" => "武侠世界".to_string(),
"XIANXIA" => "仙侠世界".to_string(),
_ => "叙事世界".to_string(),
}
}
fn get_profile_dashboard_snapshot(
ctx: &ReducerContext,
input: RuntimeProfileDashboardGetInput,
@@ -1215,10 +1669,14 @@ fn redeem_profile_referral_invite_code_record(
),
bound_at,
)?;
let today_inviter_reward_count =
count_today_profile_referral_inviter_rewards(ctx, &inviter_code.user_id, bound_at);
let inviter_reward_granted =
should_grant_runtime_profile_inviter_reward(today_inviter_reward_count);
let is_admin_invite_code = is_admin_profile_invite_code_user_id(&inviter_code.user_id);
let today_inviter_reward_count = if is_admin_invite_code {
0
} else {
count_today_profile_referral_inviter_rewards(ctx, &inviter_code.user_id, bound_at)
};
let inviter_reward_granted = !is_admin_invite_code
&& module_runtime::should_grant_runtime_profile_inviter_reward(today_inviter_reward_count);
let inviter_balance_after = if inviter_reward_granted {
apply_profile_wallet_delta(
ctx,
@@ -1410,6 +1868,56 @@ fn admin_disable_profile_redeem_code_record(
Ok(build_profile_redeem_code_snapshot_from_row(&inserted))
}
fn admin_upsert_profile_invite_code_record(
ctx: &ReducerContext,
input: RuntimeProfileInviteCodeAdminUpsertInput,
) -> Result<RuntimeProfileInviteCodeSnapshot, String> {
let validated_input = build_runtime_profile_invite_code_admin_upsert_input(
input.admin_user_id,
input.invite_code,
input.metadata_json,
input.updated_at_micros,
)
.map_err(|error| error.to_string())?;
let updated_at = Timestamp::from_micros_since_unix_epoch(validated_input.updated_at_micros);
let user_id = build_admin_profile_invite_code_user_id(
&validated_input.admin_user_id,
&validated_input.invite_code,
);
if let Some(existing) = ctx
.db
.profile_invite_code()
.invite_code()
.find(&validated_input.invite_code)
{
if existing.user_id != user_id {
return Err("邀请码已被其他用户占用".to_string());
}
ctx.db
.profile_invite_code()
.user_id()
.delete(&existing.user_id);
let inserted = ctx.db.profile_invite_code().insert(ProfileInviteCode {
user_id,
invite_code: validated_input.invite_code,
metadata_json: validated_input.metadata_json,
created_at: existing.created_at,
updated_at,
});
return Ok(build_profile_invite_code_snapshot_from_row(&inserted));
}
let inserted = ctx.db.profile_invite_code().insert(ProfileInviteCode {
user_id,
invite_code: validated_input.invite_code,
metadata_json: validated_input.metadata_json,
created_at: updated_at,
updated_at,
});
Ok(build_profile_invite_code_snapshot_from_row(&inserted))
}
fn build_profile_referral_invite_center_snapshot(
ctx: &ReducerContext,
user_id: &str,
@@ -1445,6 +1953,7 @@ fn build_profile_referral_invite_center_snapshot(
today_inviter_reward_remaining: PROFILE_REFERRAL_DAILY_INVITER_REWARD_LIMIT
.saturating_sub(today_inviter_reward_count),
reward_points: PROFILE_REFERRAL_REWARD_POINTS,
invited_users: list_profile_referral_invited_users(ctx, user_id),
has_redeemed_code: bound_relation.is_some(),
bound_inviter_user_id: bound_relation
.as_ref()
@@ -1456,6 +1965,50 @@ fn build_profile_referral_invite_center_snapshot(
}
}
fn list_profile_referral_invited_users(
ctx: &ReducerContext,
inviter_user_id: &str,
) -> Vec<RuntimeReferralInvitedUserSnapshot> {
// 中文注释:邀请面板只展示最近成功邀请用户,完整统计仍由计数字段承担。
let inviter_user_id = inviter_user_id.to_string();
let mut relations = ctx
.db
.profile_referral_relation()
.by_profile_referral_inviter_user_id()
.filter(&inviter_user_id)
.collect::<Vec<_>>();
relations.sort_by(|left, right| {
right
.bound_at
.to_micros_since_unix_epoch()
.cmp(&left.bound_at.to_micros_since_unix_epoch())
});
relations
.into_iter()
.take(PROFILE_REFERRAL_INVITED_USERS_LIMIT)
.map(|relation| {
let account = ctx
.db
.user_account()
.user_id()
.find(&relation.invitee_user_id);
RuntimeReferralInvitedUserSnapshot {
user_id: relation.invitee_user_id,
display_name: account
.as_ref()
.map(|user| user.display_name.trim())
.filter(|name| !name.is_empty())
.unwrap_or("玩家")
.to_string(),
avatar_url: account.and_then(|user| user.avatar_url),
bound_at_micros: relation.bound_at.to_micros_since_unix_epoch(),
}
})
.collect()
}
fn ensure_profile_invite_code(ctx: &ReducerContext, user_id: &str) -> ProfileInviteCode {
if let Some(row) = ctx
.db
@@ -1482,6 +2035,7 @@ fn ensure_profile_invite_code(ctx: &ReducerContext, user_id: &str) -> ProfileInv
ctx.db.profile_invite_code().insert(ProfileInviteCode {
user_id: user_id.to_string(),
invite_code,
metadata_json: PROFILE_INVITE_CODE_METADATA_DEFAULT_JSON.to_string(),
created_at: ctx.timestamp,
updated_at: ctx.timestamp,
})
@@ -1504,6 +2058,14 @@ fn count_today_profile_referral_inviter_rewards(
.count() as u32
}
fn is_admin_profile_invite_code_user_id(user_id: &str) -> bool {
user_id.starts_with("admin:")
}
fn build_admin_profile_invite_code_user_id(admin_user_id: &str, invite_code: &str) -> String {
format!("admin:{}:{}", admin_user_id, invite_code)
}
fn profile_wallet_balance(ctx: &ReducerContext, user_id: &str) -> u64 {
ctx.db
.profile_dashboard_state()
@@ -1513,6 +2075,42 @@ fn profile_wallet_balance(ctx: &ReducerContext, user_id: &str) -> u64 {
.unwrap_or(0)
}
fn build_new_user_registration_wallet_ledger_id(user_id: &str) -> String {
format!("{PROFILE_NEW_USER_REGISTRATION_LEDGER_PREFIX}:{user_id}")
}
fn grant_new_user_registration_wallet_reward_tx(
ctx: &ReducerContext,
input: RuntimeProfileDashboardGetInput,
) -> Result<RuntimeProfileDashboardSnapshot, String> {
let validated_input = build_runtime_profile_dashboard_get_input(input.user_id)
.map_err(|error| error.to_string())?;
let ledger_id = build_new_user_registration_wallet_ledger_id(&validated_input.user_id);
if ctx
.db
.profile_wallet_ledger()
.wallet_ledger_id()
.find(&ledger_id)
.is_none()
{
apply_profile_wallet_delta(
ctx,
&validated_input.user_id,
PROFILE_NEW_USER_INITIAL_WALLET_POINTS,
RuntimeProfileWalletLedgerSourceType::NewUserRegistrationReward,
&ledger_id,
ctx.timestamp,
)?;
}
get_profile_dashboard_snapshot(
ctx,
RuntimeProfileDashboardGetInput {
user_id: validated_input.user_id,
},
)
}
fn build_profile_recharge_center_snapshot(
ctx: &ReducerContext,
user_id: &str,
@@ -1645,6 +2243,24 @@ fn apply_profile_wallet_delta(
)
}
pub(crate) fn grant_profile_wallet_points(
ctx: &ReducerContext,
user_id: &str,
amount_delta: u64,
source_type: RuntimeProfileWalletLedgerSourceType,
ledger_id: &str,
created_at: Timestamp,
) -> Result<u64, String> {
apply_profile_wallet_delta(
ctx,
user_id,
amount_delta,
source_type,
ledger_id,
created_at,
)
}
fn apply_profile_wallet_adjustment(
ctx: &ReducerContext,
input: RuntimeProfileWalletAdjustmentInput,
@@ -1762,6 +2378,13 @@ fn has_profile_points_recharged(ctx: &ReducerContext, user_id: &str) -> bool {
})
}
fn has_profile_business_wallet_ledger(ctx: &ReducerContext, user_id: &str) -> bool {
ctx.db.profile_wallet_ledger().iter().any(|row| {
row.user_id == user_id
&& row.source_type != RuntimeProfileWalletLedgerSourceType::SnapshotSync
})
}
fn latest_profile_recharge_order(
ctx: &ReducerContext,
user_id: &str,
@@ -1837,6 +2460,18 @@ fn build_profile_redeem_code_snapshot_from_row(
}
}
fn build_profile_invite_code_snapshot_from_row(
row: &ProfileInviteCode,
) -> RuntimeProfileInviteCodeSnapshot {
RuntimeProfileInviteCodeSnapshot {
user_id: row.user_id.clone(),
invite_code: row.invite_code.clone(),
metadata_json: row.metadata_json.clone(),
created_at_micros: row.created_at.to_micros_since_unix_epoch(),
updated_at_micros: row.updated_at.to_micros_since_unix_epoch(),
}
}
fn build_profile_wallet_ledger_snapshot_from_row(
row: &ProfileWalletLedger,
) -> RuntimeProfileWalletLedgerEntrySnapshot {