Add embedding models

This commit is contained in:
Dongri Jin
2024-01-26 11:13:24 +09:00
parent 91987abed7
commit 4e6fbcedd5
3 changed files with 11 additions and 8 deletions

View File

@ -1,14 +1,12 @@
use openai_api_rs::v1::api::Client;
use openai_api_rs::v1::common::TEXT_EMBEDDING_3_SMALL;
use openai_api_rs::v1::embedding::EmbeddingRequest;
use std::env;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let client = Client::new(env::var("OPENAI_API_KEY").unwrap().to_string());
let req = EmbeddingRequest::new(
"text-embedding-ada-002".to_string(),
"story time".to_string(),
);
let req = EmbeddingRequest::new(TEXT_EMBEDDING_3_SMALL.to_string(), "story time".to_string());
let result = client.embedding(req)?;
println!("{:?}", result.data);

View File

@ -158,7 +158,7 @@ pub struct ChatCompletionMessageForResponse {
pub tool_calls: Option<Vec<ToolCall>>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
pub struct ChatCompletionChoice {
pub index: i64,
pub message: ChatCompletionMessageForResponse,
@ -166,7 +166,7 @@ pub struct ChatCompletionChoice {
pub finish_details: Option<FinishDetails>,
}
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
pub struct ChatCompletionResponse {
pub id: String,
pub object: String,

View File

@ -1,6 +1,6 @@
use serde::Deserialize;
use serde::{Deserialize, Serialize};
#[derive(Debug, Deserialize)]
#[derive(Debug, Deserialize, Serialize)]
pub struct Usage {
pub prompt_tokens: i32,
pub completion_tokens: i32,
@ -45,3 +45,8 @@ pub const GPT4_32K_0314: &str = "gpt-4-32k-0314";
// https://platform.openai.com/docs/api-reference/images/object
pub const DALL_E_2: &str = "dall-e-2";
pub const DALL_E_3: &str = "dall-e-3";
// https://platform.openai.com/docs/guides/embeddings/embedding-models
pub const TEXT_EMBEDDING_3_SMALL: &str = "text-embedding-3-small";
pub const TEXT_EMBEDDING_3_LARGE: &str = "text-embedding-3-large";
pub const TEXT_EMBEDDING_ADA_002: &str = "text-embedding-ada-002";