Add parameter

This commit is contained in:
Dongri Jin
2023-07-10 08:07:28 +09:00
parent 4ff7e51a29
commit cf8f890ad7
4 changed files with 62 additions and 0 deletions

View File

@ -34,6 +34,18 @@ let req = ChatCompletionRequest {
role: chat_completion::MessageRole::user, role: chat_completion::MessageRole::user,
content: String::from("Hello OpenAI!"), content: String::from("Hello OpenAI!"),
}], }],
functions: None,
function_call: None,
temperature: None,
top_p: None,
n: None,
stream: None,
stop: None,
max_tokens: None,
presence_penalty: None,
frequency_penalty: None,
logit_bias: None,
user: None,
}; };
``` ```
@ -62,6 +74,16 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
}], }],
functions: None, functions: None,
function_call: None, function_call: None,
temperature: None,
top_p: None,
n: None,
stream: None,
stop: None,
max_tokens: None,
presence_penalty: None,
frequency_penalty: None,
logit_bias: None,
user: None,
}; };
let result = client.chat_completion(req).await?; let result = client.chat_completion(req).await?;
println!("{:?}", result.choices[0].message.content); println!("{:?}", result.choices[0].message.content);

View File

@ -15,6 +15,16 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
}], }],
functions: None, functions: None,
function_call: None, function_call: None,
temperature: None,
top_p: None,
n: None,
stream: None,
stop: None,
max_tokens: None,
presence_penalty: None,
frequency_penalty: None,
logit_bias: None,
user: None,
}; };
let result = client.chat_completion(req).await?; let result = client.chat_completion(req).await?;
println!("{:?}", result.choices[0].message.content); println!("{:?}", result.choices[0].message.content);

View File

@ -48,6 +48,16 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
}), }),
}]), }]),
function_call: Some("auto".to_string()), function_call: Some("auto".to_string()),
temperature: None,
top_p: None,
n: None,
stream: None,
stop: None,
max_tokens: None,
presence_penalty: None,
frequency_penalty: None,
logit_bias: None,
user: None,
}; };
let result = client.chat_completion(req).await?; let result = client.chat_completion(req).await?;

View File

@ -21,6 +21,26 @@ pub struct ChatCompletionRequest {
pub functions: Option<Vec<Function>>, pub functions: Option<Vec<Function>>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub function_call: Option<String>, pub function_call: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub temperature: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_p: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub n: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stream: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stop: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub presence_penalty: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub frequency_penalty: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub logit_bias: Option<HashMap<String, i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub user: Option<String>,
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]