mirror of
https://github.com/mii443/openai-api-rs.git
synced 2025-08-22 15:15:34 +00:00
Add batch api
This commit is contained in:
60
examples/batch.rs
Normal file
60
examples/batch.rs
Normal file
@ -0,0 +1,60 @@
|
||||
use openai_api_rs::v1::api::OpenAIClient;
|
||||
use openai_api_rs::v1::batch::CreateBatchRequest;
|
||||
use openai_api_rs::v1::file::FileUploadRequest;
|
||||
use serde_json::{from_str, to_string_pretty, Value};
|
||||
use std::env;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::str;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let client = OpenAIClient::new(env::var("OPENAI_API_KEY").unwrap().to_string());
|
||||
|
||||
let req = FileUploadRequest::new(
|
||||
"examples/data/batch_request.json".to_string(),
|
||||
"batch".to_string(),
|
||||
);
|
||||
|
||||
let result = client.upload_file(req).await?;
|
||||
println!("File id: {:?}", result.id);
|
||||
|
||||
let input_file_id = result.id;
|
||||
let req = CreateBatchRequest::new(
|
||||
input_file_id.clone(),
|
||||
"/v1/chat/completions".to_string(),
|
||||
"24h".to_string(),
|
||||
);
|
||||
|
||||
let result = client.create_batch(req).await?;
|
||||
println!("Batch id: {:?}", result.id);
|
||||
|
||||
let batch_id = result.id;
|
||||
let result = client.retrieve_batch(batch_id.to_string()).await?;
|
||||
println!("Batch status: {:?}", result.status);
|
||||
|
||||
// sleep 30 seconds
|
||||
println!("Sleeping for 30 seconds...");
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(30)).await;
|
||||
|
||||
let result = client.retrieve_batch(batch_id.to_string()).await?;
|
||||
|
||||
let file_id = result.output_file_id.unwrap();
|
||||
let result = client.retrieve_file_content(file_id).await?;
|
||||
let s = match str::from_utf8(&result) {
|
||||
Ok(v) => v.to_string(),
|
||||
Err(e) => panic!("Invalid UTF-8 sequence: {}", e),
|
||||
};
|
||||
let json_value: Value = from_str(&s)?;
|
||||
let result_json = to_string_pretty(&json_value)?;
|
||||
|
||||
let output_file_path = "examples/data/batch_result.json";
|
||||
let mut file = File::create(output_file_path)?;
|
||||
file.write_all(result_json.as_bytes())?;
|
||||
|
||||
println!("File writed to {:?}", output_file_path);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// OPENAI_API_KEY=xxxx cargo run --package openai-api-rs --example batch
|
1
examples/data/batch_request.json
Normal file
1
examples/data/batch_request.json
Normal file
@ -0,0 +1 @@
|
||||
{"custom_id": "request-1", "method": "POST", "url": "/v1/chat/completions", "body": {"model": "gpt-4o-mini", "messages": [{"role": "system", "content": "You are a helpful assistant."}, {"role": "user", "content": "What is 2+2?"}]}}
|
33
examples/data/batch_result.json
Normal file
33
examples/data/batch_result.json
Normal file
@ -0,0 +1,33 @@
|
||||
{
|
||||
"custom_id": "request-1",
|
||||
"error": null,
|
||||
"id": "batch_req_403hYy7nMxrxXFWXiwvoLG1q",
|
||||
"response": {
|
||||
"body": {
|
||||
"choices": [
|
||||
{
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null,
|
||||
"message": {
|
||||
"content": "2 + 2 equals 4.",
|
||||
"refusal": null,
|
||||
"role": "assistant"
|
||||
}
|
||||
}
|
||||
],
|
||||
"created": 1724858089,
|
||||
"id": "chatcmpl-A1Efhv97EZNQeHKSLPnTmZex20gf2",
|
||||
"model": "gpt-4o-mini-2024-07-18",
|
||||
"object": "chat.completion",
|
||||
"system_fingerprint": "fp_f33667828e",
|
||||
"usage": {
|
||||
"completion_tokens": 8,
|
||||
"prompt_tokens": 24,
|
||||
"total_tokens": 32
|
||||
}
|
||||
},
|
||||
"request_id": "af0bac0d82530234e09bd6b5d9fbf5cf",
|
||||
"status_code": 200
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user