Files
Genarrative/server-rs/crates/shared-contracts/src/llm.rs
kdletters cbc27bad4a
Some checks failed
CI / verify (push) Has been cancelled
init with react+axum+spacetimedb
2026-04-26 18:06:23 +08:00

63 lines
1.7 KiB
Rust

use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum LlmChatMessageRole {
System,
User,
Assistant,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct LlmChatMessagePayload {
pub role: LlmChatMessageRole,
pub content: String,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct LlmChatCompletionRequest {
#[serde(default)]
pub model: Option<String>,
#[serde(default)]
pub stream: bool,
pub messages: Vec<LlmChatMessagePayload>,
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct LlmChatCompletionResponse {
pub id: Option<String>,
pub model: String,
pub content: String,
pub finish_reason: Option<String>,
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
#[test]
fn llm_chat_completion_request_keeps_openai_compatible_field_names() {
let payload = serde_json::to_value(LlmChatCompletionRequest {
model: Some("doubao-test".to_string()),
stream: false,
messages: vec![
LlmChatMessagePayload {
role: LlmChatMessageRole::System,
content: "系统".to_string(),
},
LlmChatMessagePayload {
role: LlmChatMessageRole::User,
content: "用户".to_string(),
},
],
})
.expect("payload should serialize");
assert_eq!(payload["model"], json!("doubao-test"));
assert_eq!(payload["stream"], json!(false));
assert_eq!(payload["messages"][0]["role"], json!("system"));
}
}