Files
rust-genai/examples/c00-readme.rs
2024-06-07 10:07:22 -07:00

63 lines
1.8 KiB
Rust

mod support; // For examples support funtions
use crate::support::{has_env, print_chat_stream};
use genai::chat::{ChatMessage, ChatRequest};
use genai::client::Client;
const MODEL_OPENAI: &str = "gpt-3.5-turbo";
const MODEL_ANTHROPIC: &str = "claude-3-haiku-20240307";
const MODEL_COHERE: &str = "command-light";
const MODEL_OLLAMA: &str = "mixtral";
const MODEL_AND_KEY_ENV_NAME_LIST: &[(&str, &str)] = &[
// -- de/activate models/providers
(MODEL_OPENAI, "OPENAI_API_KEY"),
(MODEL_ANTHROPIC, "ANTHROPIC_API_KEY"),
(MODEL_COHERE, "COHERE_API_KEY"),
(MODEL_OLLAMA, ""),
];
// NOTE: Model to AdapterKind (AI Provider) type mapping rule
// - starts_with "gpt" -> OpenAI
// - starts_with "claude" -> Anthropic
// - starts_with "command" -> Cohere
// - For anything else -> Ollama
//
// Refined mapping rules will be added later and extended as provider support grows.
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let question = "Why is the sky red?";
let chat_req = ChatRequest::new(vec![
// -- Messages (de/activate to see the differences)
// ChatMessage::system("Answer in one sentence"),
ChatMessage::user(question),
]);
let client = Client::default();
for (model, env_name) in MODEL_AND_KEY_ENV_NAME_LIST {
// Skip if does not have the environment name set
if !env_name.is_empty() && !has_env(env_name) {
continue;
}
println!("\n===== MODEL: {model} =====");
println!("\n--- Question:\n{question}");
println!("\n--- Answer: (oneshot response)");
let chat_res = client.exec_chat(model, chat_req.clone()).await?;
println!("{}", chat_res.content.as_deref().unwrap_or("NO ANSWER"));
println!("\n--- Answer: (streaming)");
let chat_res = client.exec_chat_stream(model, chat_req.clone()).await?;
print_chat_stream(chat_res).await?;
println!();
}
Ok(())
}