拼图和大鱼吃小鱼补充游玩记录
Some checks failed
CI / verify (push) Has been cancelled

This commit is contained in:
2026-04-28 11:11:01 +08:00
parent a9febe7678
commit 3cdbf36859
27 changed files with 419 additions and 48 deletions

View File

@@ -35,7 +35,7 @@ use crate::{
big_fish::{
create_big_fish_session, delete_big_fish_work, execute_big_fish_action,
get_big_fish_session, get_big_fish_works, list_big_fish_gallery, stream_big_fish_message,
submit_big_fish_message,
record_big_fish_play, submit_big_fish_message,
},
character_animation_assets::{
generate_character_animation, get_character_animation_job, get_character_workflow_cache,
@@ -83,8 +83,7 @@ use crate::{
get_puzzle_agent_session, get_puzzle_gallery_detail, get_puzzle_run,
get_puzzle_work_detail, get_puzzle_works, list_puzzle_gallery, put_puzzle_work,
start_puzzle_run, stream_puzzle_agent_message, submit_puzzle_agent_message,
submit_puzzle_leaderboard,
swap_puzzle_pieces,
submit_puzzle_leaderboard, swap_puzzle_pieces,
},
refresh_session::refresh_session,
request_context::{attach_request_context, resolve_request_id},
@@ -575,6 +574,10 @@ pub fn build_router(state: AppState) -> Router {
require_bearer_auth,
)),
)
.route(
"/api/runtime/big-fish/works/{session_id}/play",
post(record_big_fish_play),
)
.route(
"/api/runtime/puzzle/agent/sessions",
post(create_puzzle_agent_session).route_layer(middleware::from_fn_with_state(

View File

@@ -191,6 +191,32 @@ pub async fn delete_big_fish_work(
))
}
pub async fn record_big_fish_play(
State(state): State<AppState>,
Path(session_id): Path<String>,
Extension(request_context): Extension<RequestContext>,
) -> Result<Json<Value>, Response> {
ensure_non_empty(&request_context, &session_id, "sessionId")?;
let items = state
.spacetime_client()
.record_big_fish_play(session_id, current_utc_micros())
.await
.map_err(|error| {
big_fish_error_response(&request_context, map_big_fish_client_error(error))
})?;
Ok(json_success_body(
Some(&request_context),
BigFishWorksResponse {
items: items
.into_iter()
.map(map_big_fish_work_summary_response)
.collect(),
},
))
}
pub async fn submit_big_fish_message(
State(state): State<AppState>,
Path(session_id): Path<String>,
@@ -924,6 +950,7 @@ fn map_big_fish_work_summary_response(
level_main_image_ready_count: item.level_main_image_ready_count,
level_motion_ready_count: item.level_motion_ready_count,
background_ready: item.background_ready,
play_count: item.play_count,
}
}

View File

@@ -221,6 +221,7 @@ pub struct BigFishWorkSummarySnapshot {
pub level_main_image_ready_count: u32,
pub level_motion_ready_count: u32,
pub background_ready: bool,
pub play_count: u32,
}
#[cfg_attr(feature = "spacetime-types", derive(SpacetimeType))]
@@ -316,6 +317,13 @@ pub struct BigFishPublishInput {
pub published_at_micros: i64,
}
#[cfg_attr(feature = "spacetime-types", derive(SpacetimeType))]
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct BigFishPlayRecordInput {
pub session_id: String,
pub played_at_micros: i64,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum BigFishFieldError {
MissingSessionId,
@@ -654,6 +662,13 @@ pub fn validate_publish_input(input: &BigFishPublishInput) -> Result<(), BigFish
validate_session_owner(&input.session_id, &input.owner_user_id)
}
pub fn validate_play_record_input(input: &BigFishPlayRecordInput) -> Result<(), BigFishFieldError> {
if normalize_required_string(&input.session_id).is_none() {
return Err(BigFishFieldError::MissingSessionId);
}
Ok(())
}
pub fn serialize_anchor_pack(anchor_pack: &BigFishAnchorPack) -> Result<String, serde_json::Error> {
serde_json::to_string(anchor_pack)
}
@@ -861,5 +876,4 @@ mod tests {
);
assert!(coverage.blockers.iter().any(|item| item.contains("背景图")));
}
}

View File

@@ -1964,14 +1964,18 @@ fn with_next_board(run: &PuzzleRunSnapshot, next_board: PuzzleBoardSnapshot) ->
if current_level.status != PuzzleRuntimeLevelStatus::Cleared && is_cleared {
let cleared_at_ms = current_unix_ms();
current_level.cleared_at_ms = Some(cleared_at_ms);
current_level.elapsed_ms =
Some(cleared_at_ms.saturating_sub(current_level.started_at_ms).max(1_000));
current_level.elapsed_ms = Some(
cleared_at_ms
.saturating_sub(current_level.started_at_ms)
.max(1_000),
);
}
current_level.status = next_level_status;
}
if is_cleared && run.current_level.as_ref().map(|level| level.status)
!= Some(PuzzleRuntimeLevelStatus::Cleared)
if is_cleared
&& run.current_level.as_ref().map(|level| level.status)
!= Some(PuzzleRuntimeLevelStatus::Cleared)
{
next_run.cleared_level_count += 1;
}

View File

@@ -18,6 +18,8 @@ pub struct BigFishWorkSummaryResponse {
pub level_main_image_ready_count: u32,
pub level_motion_ready_count: u32,
pub background_ready: bool,
#[serde(default)]
pub play_count: u32,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]

View File

@@ -1,6 +1,7 @@
use super::*;
use crate::mapper::*;
use crate::module_bindings::delete_big_fish_work_procedure::delete_big_fish_work;
use crate::module_bindings::record_big_fish_play_procedure::record_big_fish_play;
impl SpacetimeClient {
pub async fn create_big_fish_session(
@@ -131,6 +132,30 @@ impl SpacetimeClient {
.await
}
pub async fn record_big_fish_play(
&self,
session_id: String,
played_at_micros: i64,
) -> Result<Vec<BigFishWorkSummaryRecord>, SpacetimeClientError> {
let procedure_input = BigFishPlayRecordInput {
session_id,
played_at_micros,
};
self.call_after_connect(move |connection, sender| {
connection.procedures().record_big_fish_play_then(
procedure_input,
move |_, result| {
let mapped = result
.map_err(|error| SpacetimeClientError::Procedure(error.to_string()))
.and_then(map_big_fish_works_procedure_result);
send_once(&sender, mapped);
},
);
})
.await
}
pub async fn submit_big_fish_message(
&self,
input: BigFishMessageSubmitRecordInput,

View File

@@ -30,10 +30,10 @@ pub use mapper::{
PuzzleBoardRecord, PuzzleCellPositionRecord, PuzzleCreatorIntentRecord,
PuzzleGeneratedImageCandidateRecord, PuzzleGeneratedImagesSaveRecordInput,
PuzzleLeaderboardEntryRecord, PuzzleLeaderboardSubmitRecordInput, PuzzleMergedGroupRecord,
PuzzlePieceStateRecord, PuzzlePublishRecordInput,
PuzzleResultDraftRecord, PuzzleResultPreviewBlockerRecord, PuzzleResultPreviewFindingRecord,
PuzzleResultPreviewRecord, PuzzleRunDragRecordInput, PuzzleRunNextLevelRecordInput,
PuzzleRunRecord, PuzzleRunStartRecordInput, PuzzleRunSwapRecordInput, PuzzleRuntimeLevelRecord,
PuzzlePieceStateRecord, PuzzlePublishRecordInput, PuzzleResultDraftRecord,
PuzzleResultPreviewBlockerRecord, PuzzleResultPreviewFindingRecord, PuzzleResultPreviewRecord,
PuzzleRunDragRecordInput, PuzzleRunNextLevelRecordInput, PuzzleRunRecord,
PuzzleRunStartRecordInput, PuzzleRunSwapRecordInput, PuzzleRuntimeLevelRecord,
PuzzleSelectCoverImageRecordInput, PuzzleWorkProfileRecord, PuzzleWorkUpsertRecordInput,
ResolveCombatActionRecord, ResolveNpcBattleInteractionInput,
};

View File

@@ -4606,6 +4606,7 @@ pub struct BigFishWorkSummaryRecord {
pub level_main_image_ready_count: u32,
pub level_motion_ready_count: u32,
pub background_ready: bool,
pub play_count: u32,
}
#[derive(Clone, Debug, PartialEq, Eq, serde::Deserialize)]

View File

@@ -20,6 +20,7 @@ pub struct BigFishCreationSession {
pub asset_coverage_json: String,
pub last_assistant_reply: Option<String>,
pub publish_ready: bool,
pub play_count: u32,
pub created_at: __sdk::Timestamp,
pub updated_at: __sdk::Timestamp,
}
@@ -43,6 +44,7 @@ pub struct BigFishCreationSessionCols {
pub asset_coverage_json: __sdk::__query_builder::Col<BigFishCreationSession, String>,
pub last_assistant_reply: __sdk::__query_builder::Col<BigFishCreationSession, Option<String>>,
pub publish_ready: __sdk::__query_builder::Col<BigFishCreationSession, bool>,
pub play_count: __sdk::__query_builder::Col<BigFishCreationSession, u32>,
pub created_at: __sdk::__query_builder::Col<BigFishCreationSession, __sdk::Timestamp>,
pub updated_at: __sdk::__query_builder::Col<BigFishCreationSession, __sdk::Timestamp>,
}
@@ -68,6 +70,7 @@ impl __sdk::__query_builder::HasCols for BigFishCreationSession {
"last_assistant_reply",
),
publish_ready: __sdk::__query_builder::Col::new(table_name, "publish_ready"),
play_count: __sdk::__query_builder::Col::new(table_name, "play_count"),
created_at: __sdk::__query_builder::Col::new(table_name, "created_at"),
updated_at: __sdk::__query_builder::Col::new(table_name, "updated_at"),
}

View File

@@ -0,0 +1,16 @@
// THIS FILE IS AUTOMATICALLY GENERATED BY SPACETIMEDB. EDITS TO THIS FILE
// WILL NOT BE SAVED. MODIFY TABLES IN YOUR MODULE SOURCE CODE INSTEAD.
#![allow(unused, clippy::all)]
use spacetimedb_sdk::__codegen::{self as __sdk, __lib, __sats, __ws};
#[derive(__lib::ser::Serialize, __lib::de::Deserialize, Clone, PartialEq, Debug)]
#[sats(crate = __lib)]
pub struct BigFishPlayRecordInput {
pub session_id: String,
pub played_at_micros: i64,
}
impl __sdk::InModule for BigFishPlayRecordInput {
type Module = super::RemoteModule;
}

View File

@@ -89,6 +89,7 @@ pub mod big_fish_game_draft_type;
pub mod big_fish_level_blueprint_type;
pub mod big_fish_message_finalize_input_type;
pub mod big_fish_message_submit_input_type;
pub mod big_fish_play_record_input_type;
pub mod big_fish_publish_input_type;
pub mod big_fish_runtime_params_type;
pub mod big_fish_session_create_input_type;
@@ -331,6 +332,7 @@ pub mod quest_objective_snapshot_type;
pub mod quest_progress_signal_type;
pub mod quest_record_input_type;
pub mod quest_record_type;
pub mod record_big_fish_play_procedure;
pub mod quest_reward_equipment_slot_type;
pub mod quest_reward_intel_type;
pub mod quest_reward_item_rarity_type;
@@ -558,6 +560,7 @@ pub use big_fish_game_draft_type::BigFishGameDraft;
pub use big_fish_level_blueprint_type::BigFishLevelBlueprint;
pub use big_fish_message_finalize_input_type::BigFishMessageFinalizeInput;
pub use big_fish_message_submit_input_type::BigFishMessageSubmitInput;
pub use big_fish_play_record_input_type::BigFishPlayRecordInput;
pub use big_fish_publish_input_type::BigFishPublishInput;
pub use big_fish_runtime_params_type::BigFishRuntimeParams;
pub use big_fish_session_create_input_type::BigFishSessionCreateInput;
@@ -800,6 +803,7 @@ pub use quest_objective_snapshot_type::QuestObjectiveSnapshot;
pub use quest_progress_signal_type::QuestProgressSignal;
pub use quest_record_input_type::QuestRecordInput;
pub use quest_record_type::QuestRecord;
pub use record_big_fish_play_procedure::record_big_fish_play;
pub use quest_reward_equipment_slot_type::QuestRewardEquipmentSlot;
pub use quest_reward_intel_type::QuestRewardIntel;
pub use quest_reward_item_rarity_type::QuestRewardItemRarity;

View File

@@ -0,0 +1,59 @@
// THIS FILE IS AUTOMATICALLY GENERATED BY SPACETIMEDB. EDITS TO THIS FILE
// WILL NOT BE SAVED. MODIFY TABLES IN YOUR MODULE SOURCE CODE INSTEAD.
#![allow(unused, clippy::all)]
use spacetimedb_sdk::__codegen::{self as __sdk, __lib, __sats, __ws};
use super::big_fish_play_record_input_type::BigFishPlayRecordInput;
use super::big_fish_works_procedure_result_type::BigFishWorksProcedureResult;
#[derive(__lib::ser::Serialize, __lib::de::Deserialize, Clone, PartialEq, Debug)]
#[sats(crate = __lib)]
struct RecordBigFishPlayArgs {
pub input: BigFishPlayRecordInput,
}
impl __sdk::InModule for RecordBigFishPlayArgs {
type Module = super::RemoteModule;
}
#[allow(non_camel_case_types)]
/// Extension trait for access to the procedure `record_big_fish_play`.
///
/// Implemented for [`super::RemoteProcedures`].
pub trait record_big_fish_play {
fn record_big_fish_play(&self, input: BigFishPlayRecordInput) {
self.record_big_fish_play_then(input, |_, _| {});
}
fn record_big_fish_play_then(
&self,
input: BigFishPlayRecordInput,
__callback: impl FnOnce(
&super::ProcedureEventContext,
Result<BigFishWorksProcedureResult, __sdk::InternalError>,
) + Send
+ 'static,
);
}
impl record_big_fish_play for super::RemoteProcedures {
fn record_big_fish_play_then(
&self,
input: BigFishPlayRecordInput,
__callback: impl FnOnce(
&super::ProcedureEventContext,
Result<BigFishWorksProcedureResult, __sdk::InternalError>,
) + Send
+ 'static,
) {
self.imp
.invoke_procedure_with_callback::<_, BigFishWorksProcedureResult>(
"record_big_fish_play",
RecordBigFishPlayArgs { input },
__callback,
);
}
}

View File

@@ -478,15 +478,14 @@ impl SpacetimeClient {
};
self.call_after_connect(move |connection, sender| {
connection.procedures().submit_puzzle_leaderboard_entry_then(
procedure_input,
move |_, result| {
connection
.procedures()
.submit_puzzle_leaderboard_entry_then(procedure_input, move |_, result| {
let mapped = result
.map_err(|error| SpacetimeClientError::Procedure(error.to_string()))
.and_then(map_puzzle_run_procedure_result);
send_once(&sender, mapped);
},
);
});
})
.await
}

View File

@@ -108,6 +108,7 @@ pub(crate) fn generate_big_fish_asset_tx(
.map_err(|error| error.to_string())?,
last_assistant_reply: Some(reply.clone()),
publish_ready: coverage.publish_ready,
play_count: session.play_count,
created_at: session.created_at,
updated_at,
};
@@ -164,6 +165,7 @@ pub(crate) fn publish_big_fish_game_tx(
.map_err(|error| error.to_string())?,
last_assistant_reply: Some("玩法已发布,可以进入测试运行态。".to_string()),
publish_ready: true,
play_count: session.play_count,
created_at: session.created_at,
updated_at: published_at,
};

View File

@@ -93,6 +93,32 @@ pub fn delete_big_fish_work(
}
}
#[spacetimedb::procedure]
pub fn record_big_fish_play(
ctx: &mut ProcedureContext,
input: BigFishPlayRecordInput,
) -> BigFishWorksProcedureResult {
match ctx.try_with_tx(|tx| record_big_fish_play_tx(tx, input.clone())) {
Ok(items) => match serde_json::to_string(&items) {
Ok(items_json) => BigFishWorksProcedureResult {
ok: true,
items_json: Some(items_json),
error_message: None,
},
Err(error) => BigFishWorksProcedureResult {
ok: false,
items_json: None,
error_message: Some(error.to_string()),
},
},
Err(message) => BigFishWorksProcedureResult {
ok: false,
items_json: None,
error_message: Some(message),
},
}
}
#[spacetimedb::procedure]
pub fn submit_big_fish_message(
ctx: &mut ProcedureContext,
@@ -194,6 +220,7 @@ pub(crate) fn create_big_fish_session_tx(
.map_err(|error| error.to_string())?,
last_assistant_reply: Some(input.welcome_message_text.clone()),
publish_ready: false,
play_count: 0,
created_at,
updated_at: created_at,
});
@@ -383,6 +410,7 @@ pub(crate) fn submit_big_fish_message_tx(
asset_coverage_json: session.asset_coverage_json.clone(),
last_assistant_reply: session.last_assistant_reply.clone(),
publish_ready: session.publish_ready,
play_count: session.play_count,
created_at: session.created_at,
updated_at: submitted_at,
};
@@ -429,6 +457,7 @@ pub(crate) fn finalize_big_fish_agent_message_turn_tx(
asset_coverage_json: session.asset_coverage_json.clone(),
last_assistant_reply: session.last_assistant_reply.clone(),
publish_ready: session.publish_ready,
play_count: session.play_count,
created_at: session.created_at,
updated_at,
};
@@ -483,6 +512,7 @@ pub(crate) fn finalize_big_fish_agent_message_turn_tx(
asset_coverage_json: session.asset_coverage_json.clone(),
last_assistant_reply: Some(assistant_reply_text),
publish_ready: session.publish_ready,
play_count: session.play_count,
created_at: session.created_at,
updated_at,
};
@@ -530,6 +560,7 @@ pub(crate) fn compile_big_fish_draft_tx(
.map_err(|error| error.to_string())?,
last_assistant_reply: Some(reply.clone()),
publish_ready: coverage.publish_ready,
play_count: session.play_count,
created_at: session.created_at,
updated_at: compiled_at,
};
@@ -657,9 +688,51 @@ pub(crate) fn build_big_fish_work_summary(
level_main_image_ready_count: coverage.level_main_image_ready_count,
level_motion_ready_count: coverage.level_motion_ready_count,
background_ready: coverage.background_ready,
play_count: row.play_count,
})
}
pub(crate) fn record_big_fish_play_tx(
ctx: &ReducerContext,
input: BigFishPlayRecordInput,
) -> Result<Vec<BigFishWorkSummarySnapshot>, String> {
validate_play_record_input(&input).map_err(|error| error.to_string())?;
let session = ctx
.db
.big_fish_creation_session()
.session_id()
.find(&input.session_id)
.filter(|row| row.stage == BigFishCreationStage::Published)
.ok_or_else(|| "big_fish 已发布作品不存在".to_string())?;
let played_at = Timestamp::from_micros_since_unix_epoch(input.played_at_micros);
let next_session = BigFishCreationSession {
session_id: session.session_id.clone(),
owner_user_id: session.owner_user_id.clone(),
seed_text: session.seed_text.clone(),
current_turn: session.current_turn,
progress_percent: session.progress_percent,
stage: session.stage,
anchor_pack_json: session.anchor_pack_json.clone(),
draft_json: session.draft_json.clone(),
asset_coverage_json: session.asset_coverage_json.clone(),
last_assistant_reply: session.last_assistant_reply.clone(),
publish_ready: session.publish_ready,
// 中文注释:这里只记录正式发布作品的进入次数,创作结果页测试运行不走这个 procedure。
play_count: session.play_count.saturating_add(1),
created_at: session.created_at,
updated_at: played_at,
};
replace_big_fish_session(ctx, &session, next_session);
list_big_fish_works_tx(
ctx,
BigFishWorksListInput {
owner_user_id: String::new(),
published_only: true,
},
)
}
pub(crate) fn replace_big_fish_session(
ctx: &ReducerContext,
current: &BigFishCreationSession,
@@ -693,6 +766,7 @@ mod tests {
asset_coverage_json: "{}".to_string(),
last_assistant_reply: Some("欢迎来到大鱼吃小鱼共创。".to_string()),
publish_ready: false,
play_count: 0,
created_at: Timestamp::from_micros_since_unix_epoch(1),
updated_at: Timestamp::from_micros_since_unix_epoch(1),
}

View File

@@ -17,6 +17,7 @@ pub struct BigFishCreationSession {
pub(crate) asset_coverage_json: String,
pub(crate) last_assistant_reply: Option<String>,
pub(crate) publish_ready: bool,
pub(crate) play_count: u32,
pub(crate) created_at: Timestamp,
pub(crate) updated_at: Timestamp,
}

View File

@@ -659,6 +659,19 @@ where
Ok(wrapped.0)
}
fn normalize_migration_row(table_name: &str, value: &serde_json::Value) -> serde_json::Value {
let mut next_value = value.clone();
if table_name == "big_fish_creation_session" {
if let Some(object) = next_value.as_object_mut() {
// 中文注释:旧迁移包没有公开游玩次数字段,导入时按新建作品默认 0 兼容。
object
.entry("play_count".to_string())
.or_insert_with(|| serde_json::Value::from(0));
}
}
next_value
}
fn insert_migration_table_rows(
ctx: &ReducerContext,
table: &MigrationTable,
@@ -672,7 +685,8 @@ fn insert_migration_table_rows(
let mut imported = 0u64;
let mut skipped = 0u64;
for value in &table.rows {
let row = row_from_json(value)
let normalized_value = normalize_migration_row(stringify!($table), value);
let row = row_from_json(&normalized_value)
.map_err(|error| format!("{}: {error}", stringify!($table)))?;
let insert_result = ctx.db
.$table()

View File

@@ -3,10 +3,10 @@ use module_puzzle::{
PuzzleAgentMessageRole, PuzzleAgentMessageSnapshot, PuzzleAgentSessionCreateInput,
PuzzleAgentSessionGetInput, PuzzleAgentSessionProcedureResult, PuzzleAgentSessionSnapshot,
PuzzleAgentStage, PuzzleAnchorPack, PuzzleDraftCompileInput, PuzzleGeneratedImageCandidate,
PuzzleGeneratedImagesSaveInput, PuzzlePublicationStatus, PuzzlePublishInput, PuzzleResultDraft,
PuzzleLeaderboardEntry, PuzzleLeaderboardSubmitInput, PuzzleRunDragInput, PuzzleRunGetInput,
PuzzleRunNextLevelInput, PuzzleRunProcedureResult, PuzzleRunSnapshot, PuzzleRunStartInput,
PuzzleRunSwapInput, PuzzleRuntimeLevelStatus, PuzzleSelectCoverImageInput,
PuzzleGeneratedImagesSaveInput, PuzzleLeaderboardEntry, PuzzleLeaderboardSubmitInput,
PuzzlePublicationStatus, PuzzlePublishInput, PuzzleResultDraft, PuzzleRunDragInput,
PuzzleRunGetInput, PuzzleRunNextLevelInput, PuzzleRunProcedureResult, PuzzleRunSnapshot,
PuzzleRunStartInput, PuzzleRunSwapInput, PuzzleRuntimeLevelStatus, PuzzleSelectCoverImageInput,
PuzzleWorkDeleteInput, PuzzleWorkGetInput, PuzzleWorkProcedureResult, PuzzleWorkProfile,
PuzzleWorkUpsertInput, PuzzleWorksListInput, PuzzleWorksProcedureResult,
apply_publish_overrides_to_draft, apply_selected_candidate, build_result_preview,
@@ -1689,12 +1689,7 @@ fn upsert_puzzle_leaderboard_entry(
) {
let entry_id = build_puzzle_leaderboard_entry_id(user_id, profile_id, grid_size);
let updated_at = Timestamp::from_micros_since_unix_epoch(updated_at_micros);
if let Some(existing) = ctx
.db
.puzzle_leaderboard_entry()
.entry_id()
.find(&entry_id)
{
if let Some(existing) = ctx.db.puzzle_leaderboard_entry().entry_id().find(&entry_id) {
let should_replace = elapsed_ms < existing.best_elapsed_ms
|| (elapsed_ms == existing.best_elapsed_ms
&& updated_at.to_micros_since_unix_epoch()
@@ -1725,16 +1720,18 @@ fn upsert_puzzle_leaderboard_entry(
return;
}
ctx.db.puzzle_leaderboard_entry().insert(PuzzleLeaderboardEntryRow {
entry_id,
profile_id: profile_id.to_string(),
grid_size,
user_id: user_id.to_string(),
nickname: nickname.to_string(),
best_elapsed_ms: elapsed_ms,
last_run_id: run_id.to_string(),
updated_at,
});
ctx.db
.puzzle_leaderboard_entry()
.insert(PuzzleLeaderboardEntryRow {
entry_id,
profile_id: profile_id.to_string(),
grid_size,
user_id: user_id.to_string(),
nickname: nickname.to_string(),
best_elapsed_ms: elapsed_ms,
last_run_id: run_id.to_string(),
updated_at,
});
}
fn list_puzzle_leaderboard_entries(
@@ -1799,8 +1796,8 @@ fn deserialize_run(value: &str) -> Result<PuzzleRunSnapshot, String> {
mod tests {
use super::*;
use module_puzzle::{
build_generated_candidates, empty_anchor_pack, recommendation_score, tag_similarity_score,
PuzzleLeaderboardEntry,
PuzzleLeaderboardEntry, build_generated_candidates, empty_anchor_pack,
recommendation_score, tag_similarity_score,
};
#[test]