mirror of
https://github.com/mii443/rust-genai.git
synced 2025-08-22 16:25:27 +00:00
Fix image structure for OpenAI API Implement base64 image support for OpenAI Image Support: Some APIs (Gemini) require mime type for URL and Base64 format Image Support: Update OpenAI and Anthropic API to support new image structure Image Support: Add Gemini 2.0 Flash Experimental support and implement Image support Image Support: Create example with Image support Image Support: Fix rebase issue Image Support: Fix example and make it runnable from cargo
36 lines
1.2 KiB
Rust
36 lines
1.2 KiB
Rust
//! This example demonstrates how to properly attach image to the conversations
|
|
|
|
use genai::chat::printer::print_chat_stream;
|
|
use genai::chat::{ChatMessage, ChatRequest, ContentPart, ImageSource};
|
|
use genai::Client;
|
|
|
|
const MODEL: &str = "gpt-4o-mini";
|
|
|
|
#[tokio::main]
|
|
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
|
let client = Client::default();
|
|
|
|
let question = "What is in this picture?";
|
|
|
|
let mut chat_req = ChatRequest::default().with_system("Answer in one sentence");
|
|
// This is similar to sending initial system chat messages (which will be cumulative with system chat messages)
|
|
chat_req = chat_req.append_message(ChatMessage::user(
|
|
vec![
|
|
ContentPart::Text(question.to_string()),
|
|
ContentPart::Image {
|
|
content: "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg".to_string(),
|
|
content_type: "image/png".to_string(),
|
|
source: ImageSource::Url,
|
|
}
|
|
]
|
|
));
|
|
|
|
println!("\n--- Question:\n{question}");
|
|
let chat_res = client.exec_chat_stream(MODEL, chat_req.clone(), None).await?;
|
|
|
|
println!("\n--- Answer: (streaming)");
|
|
let assistant_answer = print_chat_stream(chat_res, None).await?;
|
|
|
|
Ok(())
|
|
}
|