mirror of
https://github.com/mii443/openai-api-rs.git
synced 2025-08-22 15:15:34 +00:00
24f7c82d6964dac8bfaf44a44b9be692703187af
unofficial OpenAI API client library for Rust
Installation:
Cargo.toml
[dependencies]
openai-api-rs = "0.1.5"
Usage
The library needs to be configured with your account's secret key, which is available on the website. We recommend setting it as an environment variable. Here's an example of initializing the library with the API key loaded from an environment variable and creating a completion:
Set OPENAI_API_KEY to environment variable
$ export OPENAI_API_KEY=sk-xxxxxxx
Create client
use openai_api_rs::v1::api::Client;
use std::env;
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().to_string());
Create request
use openai_api_rs::v1::chat_completion::{self, ChatCompletionRequest};
let req = ChatCompletionRequest {
model: chat_completion::GPT4.to_string(),
messages: vec![chat_completion::ChatCompletionMessage {
role: chat_completion::MessageRole::user,
content: String::from("Hello OpenAI!"),
}],
};
Send request
let result = client.completion(req).await?;
println!("{:?}", result.choices[0].text);
Example of chat completion
use openai_api_rs::v1::api::Client;
use openai_api_rs::v1::chat_completion::{self, ChatCompletionRequest};
use std::env;
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().to_string());
let req = ChatCompletionRequest {
model: chat_completion::GPT4.to_string(),
messages: vec![chat_completion::ChatCompletionMessage {
role: chat_completion::MessageRole::user,
content: String::from("Hello OpenAI!"),
}],
};
let result = client.chat_completion(req).await?;
println!("{:?}", result.choices[0].message.content);
Ok(())
}
Check out the full API documentation for examples of all the available functions.
Supported APIs
Languages
Rust
100%