From 6310c2f8c8c22f58edfa8a677c09b162b0d1fca4 Mon Sep 17 00:00:00 2001 From: Gabriel Tofvesson Date: Mon, 12 Jun 2023 16:03:07 +0200 Subject: [PATCH] Implement optional author name for ChatMessage --- src/chat.rs | 8 ++++++-- src/lib.rs | 4 ++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/chat.rs b/src/chat.rs index b7f16b6..a16d441 100644 --- a/src/chat.rs +++ b/src/chat.rs @@ -46,13 +46,17 @@ impl<'de> Deserialize<'de> for Role { pub struct ChatMessage { pub role: Role, pub content: String, + + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, } impl ChatMessage { - pub fn new(role: Role, message: impl Into) -> Self { + pub fn new(role: Role, message: impl Into, name: Option) -> Self { Self { role, - content: message.into() + content: message.into(), + name } } } diff --git a/src/lib.rs b/src/lib.rs index 8dd97fe..c1e190c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -76,7 +76,7 @@ mod tests { println!("Generating completion for prompt: {PROMPT}"); let completion = ctx.create_chat_completion_sync( ChatHistoryBuilder::default() - .messages(vec![ChatMessage::new(Role::User, PROMPT)]) + .messages(vec![ChatMessage::new(Role::User, PROMPT, None)]) .model("gpt-3.5-turbo") ).await; @@ -89,7 +89,7 @@ mod tests { println!("Generating streamed completion for prompt: {PROMPT}"); let completion = ctx.create_chat_completion_streamed( ChatHistoryBuilder::default() - .messages(vec![ChatMessage::new(Role::User, PROMPT)]) + .messages(vec![ChatMessage::new(Role::User, PROMPT, None)]) .model("gpt-3.5-turbo") ).await;