diff --git a/src/chat.rs b/src/chat.rs
index b7f16b6..a16d441 100644
--- a/src/chat.rs
+++ b/src/chat.rs
@@ -46,13 +46,17 @@ impl<'de> Deserialize<'de> for Role {
 pub struct ChatMessage {
     pub role: Role,
     pub content: String,
+    
+    #[serde(skip_serializing_if = "Option::is_none")]
+    pub name: Option<String>,
 }
 
 impl ChatMessage {
-    pub fn new(role: Role, message: impl Into<String>) -> Self {
+    pub fn new(role: Role, message: impl Into<String>, name: Option<String>) -> Self {
         Self {
             role,
-            content: message.into()
+            content: message.into(),
+            name
         }
     }
 }
diff --git a/src/lib.rs b/src/lib.rs
index 8dd97fe..c1e190c 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -76,7 +76,7 @@ mod tests {
         println!("Generating completion for prompt: {PROMPT}");
         let completion = ctx.create_chat_completion_sync(
             ChatHistoryBuilder::default()
-                .messages(vec![ChatMessage::new(Role::User, PROMPT)])
+                .messages(vec![ChatMessage::new(Role::User, PROMPT, None)])
                 .model("gpt-3.5-turbo")
         ).await;
 
@@ -89,7 +89,7 @@ mod tests {
         println!("Generating streamed completion for prompt: {PROMPT}");
         let completion = ctx.create_chat_completion_streamed(
             ChatHistoryBuilder::default()
-                .messages(vec![ChatMessage::new(Role::User, PROMPT)])
+                .messages(vec![ChatMessage::new(Role::User, PROMPT, None)])
                 .model("gpt-3.5-turbo")
         ).await;