mirror of
https://github.com/mii443/rust-genai.git
synced 2025-08-22 16:25:27 +00:00
^ test - add tests for image support
This commit is contained in:
@ -32,4 +32,5 @@ derive_more = { version = "1.0.0", features = ["from", "display"] }
|
||||
value-ext = "0.0.3" # JC Authored. Early release (API might change). Be cautious when using in other projects.
|
||||
|
||||
[dev-dependencies]
|
||||
serial_test = "3.2.0"
|
||||
serial_test = "3.2.0"
|
||||
base64 = "0.21.0" # Check for the latest version
|
@ -3,7 +3,7 @@ use crate::adapter::openai::OpenAIStreamer;
|
||||
use crate::adapter::{Adapter, AdapterDispatcher, AdapterKind, ServiceType, WebRequestData};
|
||||
use crate::chat::{
|
||||
ChatOptionsSet, ChatRequest, ChatResponse, ChatResponseFormat, ChatRole, ChatStream, ChatStreamResponse,
|
||||
MessageContent, MetaUsage, ToolCall, ContentPart, ImageSource
|
||||
ContentPart, ImageSource, MessageContent, MetaUsage, ToolCall,
|
||||
};
|
||||
use crate::resolver::{AuthData, Endpoint};
|
||||
use crate::webc::WebResponse;
|
||||
@ -253,19 +253,28 @@ impl OpenAIAdapter {
|
||||
let content = match msg.content {
|
||||
MessageContent::Text(content) => json!(content),
|
||||
MessageContent::Parts(parts) => {
|
||||
json!(parts.iter().map(|part| match part {
|
||||
ContentPart::Text(text) => json!({"type": "text", "text": text.clone()}),
|
||||
ContentPart::Image{content, content_type, source} => {
|
||||
match source {
|
||||
ImageSource::Url => json!({"type": "image_url", "image_url": {"url": content}}),
|
||||
ImageSource::Base64 => {
|
||||
let image_url = format!("data:{content_type};base64,{content}");
|
||||
json!({"type": "image_url", "image_url": {"url": image_url}})
|
||||
},
|
||||
json!(parts
|
||||
.iter()
|
||||
.map(|part| match part {
|
||||
ContentPart::Text(text) => json!({"type": "text", "text": text.clone()}),
|
||||
ContentPart::Image {
|
||||
content,
|
||||
content_type,
|
||||
source,
|
||||
} => {
|
||||
match source {
|
||||
ImageSource::Url => {
|
||||
json!({"type": "image_url", "image_url": {"url": content}})
|
||||
}
|
||||
ImageSource::Base64 => {
|
||||
let image_url = format!("data:{content_type};base64,{content}");
|
||||
json!({"type": "image_url", "image_url": {"url": image_url}})
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}).collect::<Vec<Value>>())
|
||||
},
|
||||
})
|
||||
.collect::<Vec<Value>>())
|
||||
}
|
||||
// Use `match` instead of `if let`. This will allow to future-proof this
|
||||
// implementation in case some new message content types would appear,
|
||||
// this way library would not compile if not all methods are implemented
|
||||
|
@ -1,7 +1,10 @@
|
||||
use crate::get_option_value;
|
||||
use crate::support::data::{get_b64_duck, IMAGE_URL_JPG_DUCK};
|
||||
use crate::support::{assert_contains, extract_stream_end, seed_chat_req_simple, seed_chat_req_tool_simple, Result};
|
||||
use genai::adapter::AdapterKind;
|
||||
use genai::chat::{ChatMessage, ChatOptions, ChatRequest, ChatResponseFormat, JsonSpec, Tool, ToolResponse};
|
||||
use genai::chat::{
|
||||
ChatMessage, ChatOptions, ChatRequest, ChatResponseFormat, ContentPart, ImageSource, JsonSpec, Tool, ToolResponse,
|
||||
};
|
||||
use genai::resolver::{AuthData, AuthResolver, AuthResolverFn, IntoAuthResolverFn};
|
||||
use genai::{Client, ClientConfig, ModelIden};
|
||||
use serde_json::{json, Value};
|
||||
@ -318,6 +321,58 @@ pub async fn common_test_chat_stream_capture_all_ok(model: &str) -> Result<()> {
|
||||
|
||||
// endregion: --- Chat Stream Tests
|
||||
|
||||
// region: --- Images
|
||||
|
||||
pub async fn common_test_chat_image_url_ok(model: &str) -> Result<()> {
|
||||
// -- Setup
|
||||
let client = Client::default();
|
||||
|
||||
// -- Build & Exec
|
||||
let mut chat_req = ChatRequest::default().with_system("Answer in one sentence");
|
||||
// This is similar to sending initial system chat messages (which will be cumulative with system chat messages)
|
||||
chat_req = chat_req.append_message(ChatMessage::user(vec![
|
||||
ContentPart::Text("What is in this picture?".to_string()),
|
||||
ContentPart::Image {
|
||||
content: IMAGE_URL_JPG_DUCK.to_string(),
|
||||
content_type: "image/jpeg".to_string(),
|
||||
source: ImageSource::Url,
|
||||
},
|
||||
]));
|
||||
let chat_res = client.exec_chat(model, chat_req, None).await?;
|
||||
|
||||
// -- Check
|
||||
let res = chat_res.content_text_as_str().ok_or("Should have text result")?;
|
||||
assert_contains(res, "duck");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn common_test_chat_image_b64_ok(model: &str) -> Result<()> {
|
||||
// -- Setup
|
||||
let client = Client::default();
|
||||
|
||||
// -- Build & Exec
|
||||
let mut chat_req = ChatRequest::default().with_system("Answer in one sentence");
|
||||
// This is similar to sending initial system chat messages (which will be cumulative with system chat messages)
|
||||
chat_req = chat_req.append_message(ChatMessage::user(vec![
|
||||
ContentPart::Text("What is in this picture?".to_string()),
|
||||
ContentPart::Image {
|
||||
content: get_b64_duck()?,
|
||||
content_type: "image/jpeg".to_string(),
|
||||
source: ImageSource::Base64,
|
||||
},
|
||||
]));
|
||||
let chat_res = client.exec_chat(model, chat_req, None).await?;
|
||||
|
||||
// -- Check
|
||||
let res = chat_res.content_text_as_str().ok_or("Should have text result")?;
|
||||
assert_contains(res, "duck");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// endregion: --- Images
|
||||
|
||||
// region: --- Tools
|
||||
|
||||
/// Just making the tool request, and checking the tool call response
|
||||
|
22
tests/support/data.rs
Normal file
22
tests/support/data.rs
Normal file
@ -0,0 +1,22 @@
|
||||
use base64::engine::general_purpose;
|
||||
use base64::Engine;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
|
||||
pub const IMAGE_URL_JPG_DUCK: &str = "https://upload.wikimedia.org/wikipedia/commons/thumb/b/bf/Bucephala-albeola-010.jpg/440px-Bucephala-albeola-010.jpg";
|
||||
|
||||
/// Get the base64 of the image above (but resized/lower to fit 5kb)
|
||||
pub fn get_b64_duck() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Path to the local image file
|
||||
let image_path = "./tests/data/duck-small.jpg";
|
||||
|
||||
// Open the file and read its contents into a buffer
|
||||
let mut file = File::open(image_path)?;
|
||||
let mut buffer = Vec::new();
|
||||
file.read_to_end(&mut buffer)?;
|
||||
|
||||
// Use the general-purpose Base64 engine for encoding
|
||||
let base64_encoded = general_purpose::STANDARD.encode(&buffer);
|
||||
|
||||
Ok(base64_encoded)
|
||||
}
|
@ -6,6 +6,7 @@
|
||||
// region: --- Modules
|
||||
|
||||
mod asserts;
|
||||
mod data;
|
||||
mod helpers;
|
||||
mod seeders;
|
||||
|
||||
|
@ -66,6 +66,23 @@ async fn test_chat_stream_capture_all_ok() -> Result<()> {
|
||||
}
|
||||
// endregion: --- Chat Stream Tests
|
||||
|
||||
// region: --- Image Tests
|
||||
|
||||
// NOTE: For now disable these tests as they failed. Needs to be resolved.
|
||||
|
||||
// Anthropic does not support image URL
|
||||
// #[tokio::test]
|
||||
// async fn test_chat_image_url_ok() -> Result<()> {
|
||||
// common_tests::common_test_chat_image_url_ok(MODEL).await
|
||||
// }
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_chat_image_b64_ok() -> Result<()> {
|
||||
common_tests::common_test_chat_image_b64_ok(MODEL).await
|
||||
}
|
||||
|
||||
// endregion: --- Image Test
|
||||
|
||||
// region: --- Tool Tests
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -8,6 +8,9 @@ type Result<T> = core::result::Result<T, Box<dyn std::error::Error>>; // For tes
|
||||
|
||||
const MODEL: &str = "gemini-1.5-flash-latest";
|
||||
|
||||
#[allow(dead_code)]
|
||||
const MODEL_FOR_IMAGE: &str = "gemini-2.0-flash-exp";
|
||||
|
||||
// region: --- Chat
|
||||
|
||||
#[tokio::test]
|
||||
@ -56,6 +59,20 @@ async fn test_chat_stream_capture_all_ok() -> Result<()> {
|
||||
|
||||
// endregion: --- Chat Stream Tests
|
||||
|
||||
// region: --- Image Tests
|
||||
|
||||
// NOTE: Gemini does not seem to support URL
|
||||
// #[tokio::test]
|
||||
// async fn test_chat_image_url_ok() -> Result<()> {
|
||||
// common_tests::common_test_chat_image_url_ok(MODEL_FOR_IMAGE).await
|
||||
// }
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_chat_image_b64_ok() -> Result<()> {
|
||||
common_tests::common_test_chat_image_b64_ok(MODEL_FOR_IMAGE).await
|
||||
}
|
||||
// endregion: --- Image Test
|
||||
|
||||
// region: --- Resolver Tests
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -61,6 +61,20 @@ async fn test_chat_stream_capture_all_ok() -> Result<()> {
|
||||
|
||||
// endregion: --- Chat Stream Tests
|
||||
|
||||
// region: --- Image Tests
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_chat_image_url_ok() -> Result<()> {
|
||||
common_tests::common_test_chat_image_url_ok(MODEL).await
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_chat_image_b64_ok() -> Result<()> {
|
||||
common_tests::common_test_chat_image_b64_ok(MODEL).await
|
||||
}
|
||||
|
||||
// endregion: --- Image Test
|
||||
|
||||
// region: --- Tool Tests
|
||||
|
||||
#[tokio::test]
|
||||
|
Reference in New Issue
Block a user