. proof-read comments (from devai)

This commit is contained in:
Jeremy Chone
2025-01-06 13:01:08 -08:00
parent a0b3e871bf
commit d00e386600
36 changed files with 86 additions and 85 deletions

View File

@ -109,7 +109,7 @@ impl AdapterKind {
} else if GROQ_MODELS.contains(&model) {
return Ok(Self::Groq);
}
// for now, fallback to Ollama
// For now, fallback to Ollama
else {
Ok(Self::Ollama)
}

View File

@ -22,7 +22,7 @@ pub struct AnthropicAdapter;
const MAX_TOKENS_8K: u32 = 8192;
const MAX_TOKENS_4K: u32 = 4096;
const ANTRHOPIC_VERSION: &str = "2023-06-01";
const ANTHROPIC_VERSION: &str = "2023-06-01";
const MODELS: &[&str] = &[
"claude-3-5-sonnet-20241022",
"claude-3-5-haiku-20241022",
@ -74,7 +74,7 @@ impl Adapter for AnthropicAdapter {
let headers = vec![
// headers
("x-api-key".to_string(), api_key),
("anthropic-version".to_string(), ANTRHOPIC_VERSION.to_string()),
("anthropic-version".to_string(), ANTHROPIC_VERSION.to_string()),
];
let model_name = model.model_name.clone();
@ -135,17 +135,17 @@ impl Adapter for AnthropicAdapter {
let usage = body.x_take("usage").map(Self::into_usage).unwrap_or_default();
// -- Capture the content
// NOTE: Anthropic support a list of content of multitypes but not the ChatResponse
// NOTE: Anthropic supports a list of content of multiple types but not the ChatResponse
// So, the strategy is to:
// - List all of the content and capture the text and tool_use
// - If there is one or more tool_use, this will take precedence and MessageContent support tool_call list
// - If there is one or more tool_use, this will take precedence and MessageContent will support tool_call list
// - Otherwise, the text is concatenated
// NOTE: We need to see if the multiple content type text happens and why. If not, we can probably simplify this by just capturing the first one.
// Eventually, ChatResponse will have `content: Option<Vec<MessageContent>>` for the multi parts (with images and such)
let content_items: Vec<Value> = body.x_take("content")?;
let mut text_content: Vec<String> = Vec::new();
// Note: here tool_calls is probably the exception, so, not creating the vector if not needed
// Note: here tool_calls is probably the exception, so not creating the vector if not needed
let mut tool_calls: Option<Vec<ToolCall>> = None;
for mut item in content_items {
@ -228,12 +228,12 @@ impl AnthropicAdapter {
// -- Process the messages
for msg in chat_req.messages {
match msg.role {
// for now, system and tool messages go to system
// for now, system and tool messages go to the system
ChatRole::System => {
if let MessageContent::Text(content) = msg.content {
systems.push(content)
}
// TODO: Needs to trace/warn that other type are not supported
// TODO: Needs to trace/warn that other types are not supported
}
ChatRole::User => {
let content = match msg.content {
@ -261,7 +261,7 @@ impl AnthropicAdapter {
}
// Use `match` instead of `if let`. This will allow to future-proof this
// implementation in case some new message content types would appear,
// this way library would not compile if not all methods are implemented
// this way the library would not compile if not all methods are implemented
// continue would allow to gracefully skip pushing unserializable message
// TODO: Probably need to warn if it is a ToolCalls type of content
MessageContent::ToolCalls(_) => continue,
@ -311,7 +311,7 @@ impl AnthropicAdapter {
})
.collect::<Vec<Value>>();
// FIXME: MessageContent::ToolResponse should be MessageContent::ToolResponses (even if openAI does require multi Tool message)
// FIXME: MessageContent::ToolResponse should be MessageContent::ToolResponses (even if OpenAI does require multi Tool message)
messages.push(json!({
"role": "user",
"content": tool_responses
@ -337,7 +337,7 @@ impl AnthropicAdapter {
.map(|tool| {
// TODO: Need to handle the error correctly
// TODO: Needs to have a custom serializer (tool should not have to match to a provider)
// NOTE: Right now, low probability, so, we just return null if cannto to value.
// NOTE: Right now, low probability, so we just return null if cannot convert to value.
let mut tool_value = json!({
"name": tool.name,
"input_schema": tool.schema,

View File

@ -79,7 +79,7 @@ impl futures::Stream for AnthropicStreamer {
}
// -- END MESSAGE
"message_stop" => {
// Make sure we do not poll the EventSource anymore on the next poll.
// Ensure we do not poll the EventSource anymore on the next poll.
// NOTE: This way, the last MessageStop event is still sent,
// but then, on the next poll, it will be stopped.
self.done = true;
@ -142,7 +142,7 @@ impl AnthropicStreamer {
};
// -- Capture/Add the eventual input_tokens
// NOTE: Permissive on this one, if error, treat as nonexistent (for now)
// NOTE: Permissive on this one; if an error occurs, treat it as nonexistent (for now)
if let Ok(input_tokens) = data.x_get::<i32>(input_path) {
let val = self
.captured_data

View File

@ -223,7 +223,7 @@ impl CohereAdapter {
};
match msg.role {
// For now, system and tool go to the system
// For now, system and tool messages go to the system
ChatRole::System => systems.push(content),
ChatRole::User => chat_history.push(json! ({"role": "USER", "content": content})),
ChatRole::Assistant => chat_history.push(json! ({"role": "CHATBOT", "content": content})),

View File

@ -71,7 +71,7 @@ impl futures::Stream for CohereStreamer {
"stream-start" => InterStreamEvent::Start,
"text-generation" => {
if let Some(content) = cohere_message.text {
// Add to the captured_content if chat options allow it
// Add to the captured content if chat options allow it
if self.options.capture_content {
match self.captured_data.content {
Some(ref mut c) => c.push_str(&content),
@ -110,7 +110,7 @@ impl futures::Stream for CohereStreamer {
InterStreamEvent::End(inter_stream_end)
}
_ => continue, // Skip the "Other" event
_ => continue, // Skip the "other" event
};
return Poll::Ready(Some(Ok(inter_event)));

View File

@ -15,7 +15,7 @@ impl DeepSeekAdapter {
pub const API_KEY_DEFAULT_ENV_NAME: &str = "DEEPSEEK_API_KEY";
}
// The Groq API adapter is modeled after the OpenAI adapter, as the Groq API is compatible with the OpenAI API.
// The DeepSeek API adapter is modeled after the OpenAI adapter, as the DeepSeek API is compatible with the OpenAI API.
impl Adapter for DeepSeekAdapter {
fn default_endpoint() -> Endpoint {
const BASE_URL: &str = "https://api.deepseek.com/v1/";

View File

@ -13,7 +13,7 @@ pub struct GeminiStreamer {
options: StreamerOptions,
// -- Set by the poll_next
/// Flag to not poll the EventSource after a MessageStop event
/// Flag to not poll the EventSource after a MessageStop event.
done: bool,
captured_data: StreamerCapturedData,
}
@ -41,7 +41,7 @@ impl futures::Stream for GeminiStreamer {
while let Poll::Ready(item) = Pin::new(&mut self.inner).poll_next(cx) {
match item {
Some(Ok(raw_message)) => {
// This is the message sent by the WebStream in PrettyJsonArray mode
// This is the message sent by the WebStream in PrettyJsonArray mode.
// - `[` document start
// - `{...}` block
// - `]` document end
@ -93,10 +93,10 @@ impl futures::Stream for GeminiStreamer {
}
}
// NOTE: Apparently in the Gemini API, all events have cumulative usage
// NOTE: Apparently in the Gemini API, all events have cumulative usage,
// meaning each message seems to include the tokens for all previous streams.
// Thus, we do not need to add it; we only need to replace captured_data.usage with the latest one.
// See https://twitter.com/jeremychone/status/1813734565967802859 for potential additional information
// See https://twitter.com/jeremychone/status/1813734565967802859 for potential additional information.
if self.options.capture_usage {
self.captured_data.usage = Some(usage);
}

View File

@ -26,13 +26,13 @@ impl Adapter for OllamaAdapter {
AuthData::from_single("ollama")
}
/// Note 1: For now, this adapter is the only one making a full request to the ollama server
/// Note 2: Will the OpenAI API to talk to Ollam server (https://platform.openai.com/docs/api-reference/models/list)
/// Note 1: For now, this adapter is the only one making a full request to the Ollama server
/// Note 2: Use the OpenAI API to communicate with the Ollama server (https://platform.openai.com/docs/api-reference/models/list)
///
/// TODO: This will use the default endpoint.
/// Later, we might add another function with a endpoint, so the the user can give an custom endpoint.
/// Later, we might add another function with an endpoint, so the user can provide a custom endpoint.
async fn all_model_names(adapter_kind: AdapterKind) -> Result<Vec<String>> {
// FIXME: This is harcoded to the default endpoint, should take endpoint as Argument
// FIXME: This is hardcoded to the default endpoint; it should take the endpoint as an argument.
let endpoint = Self::default_endpoint();
let base_url = endpoint.base_url();
let url = format!("{base_url}models");

View File

@ -1,5 +1,5 @@
//! OPENAI API DOC: https://platform.openai.com/docs/api-reference/chat
//! NOTE: Currently, genai uses the OpenAI compatibility layer, except for listing models.
//! NOTE: Currently, GenAI uses the OpenAI compatibility layer, except for listing models.
//! OLLAMA API DOC: https://github.com/ollama/ollama/blob/main/docs/api.md
// region: --- Modules

View File

@ -327,7 +327,7 @@ impl OpenAIAdapter {
.map(|tool| {
// TODO: Need to handle the error correctly
// TODO: Needs to have a custom serializer (tool should not have to match to a provider)
// NOTE: Right now, low probability, so, we just return null if cannto to value.
// NOTE: Right now, low probability, so, we just return null if cannot convert to value.
json!({
"type": "function",
"function": {
@ -387,7 +387,7 @@ fn parse_tool_call(raw_tool_call: Value) -> Result<ToolCall> {
let fn_name = iterim.function.name;
// For now support Object only, and parse the eventual string as a json value.
// For now, support Object only, and parse the eventual string as a json value.
// Eventually, we might check pricing
let fn_arguments = match iterim.function.arguments {
Value::Object(obj) => Value::Object(obj),

View File

@ -82,7 +82,7 @@ impl futures::Stream for OpenAIStreamer {
// If finish_reason exists, it's the end of this choice.
// Since we support only a single choice, we can proceed,
// as there might be other messages, and the last one contains data: `[DONE]`
// NOTE: xAI have no `finish_reason` when not finished, so, need to just account for both null/absent
// NOTE: xAI has no `finish_reason` when not finished, so, need to just account for both null/absent
if let Ok(_finish_reason) = first_choice.x_take::<String>("finish_reason") {
// NOTE: For Groq, the usage is captured when finish_reason indicates stopping, and in the `/x_groq/usage`
if self.options.capture_usage {
@ -101,7 +101,7 @@ impl futures::Stream for OpenAIStreamer {
.unwrap_or_default();
self.captured_data.usage = Some(usage)
}
_ => (), // do nothing, will be captured the OpenAi way
_ => (), // do nothing, will be captured the OpenAI way
}
}

View File

@ -1,4 +1,4 @@
//! This support model is for common constructs and utilities for all of the adapter implementations.
//! This support module is for common constructs and utilities for all the adapter implementations.
//! It should be private to the `crate::adapter::adapters` module.
use crate::chat::{ChatOptionsSet, MetaUsage};

View File

@ -17,7 +17,7 @@ use crate::resolver::{AuthData, Endpoint};
/// A construct that allows dispatching calls to the Adapters.
///
/// Note 1: This struct does not need to implement the Adapter trait, as some of its methods take the adapter_kind as a parameter.
/// Note 1: This struct does not need to implement the Adapter trait, as some of its methods take the adapter kind as a parameter.
///
/// Note 2: This struct might be renamed to avoid confusion with the traditional Rust dispatcher pattern.
pub struct AdapterDispatcher;
@ -118,7 +118,7 @@ impl AdapterDispatcher {
AdapterKind::OpenAI => OpenAIAdapter::to_chat_stream(model_iden, reqwest_builder, options_set),
AdapterKind::Anthropic => AnthropicAdapter::to_chat_stream(model_iden, reqwest_builder, options_set),
AdapterKind::Cohere => CohereAdapter::to_chat_stream(model_iden, reqwest_builder, options_set),
AdapterKind::Ollama => OpenAIAdapter::to_chat_stream(model_iden, reqwest_builder, options_set),
AdapterKind::Ollama => OllamaAdapter::to_chat_stream(model_iden, reqwest_builder, options_set),
AdapterKind::Gemini => GeminiAdapter::to_chat_stream(model_iden, reqwest_builder, options_set),
AdapterKind::Groq => GroqAdapter::to_chat_stream(model_iden, reqwest_builder, options_set),
AdapterKind::Xai => XaiAdapter::to_chat_stream(model_iden, reqwest_builder, options_set),

View File

@ -1,4 +1,4 @@
//! Internal stream event types that serve as an intermediary between the provider event and the GenAI stream event.
//! Internal stream event types that serve as intermediaries between the provider event and the GenAI stream event.
//!
//! This allows for flexibility if we want to capture events across providers that do not need to
//! be reflected in the public ChatStream event.

View File

@ -1,4 +1,4 @@
//! The Adapter layer allows adapting client requests/responses to various AI Providers.
//! The Adapter layer allows adapting client requests/responses to various AI providers.
//! Currently, it employs a static dispatch pattern with the `Adapter` trait and `AdapterDispatcher` implementation.
//! Adapter implementations are organized by adapter type under the `adapters` submodule.
//!

View File

@ -1,6 +1,6 @@
//! ChatOptions allows customization of a chat request.
//! - It can be provided at the `client::exec_chat(..)` level as an argument,
//! - or set in the client config `client_config.with_chat_options(..)` to be used as default for all requests
//! - or set in the client config `client_config.with_chat_options(..)` to be used as the default for all requests
//!
//! Note 1: In the future, we will probably allow setting the client
//! Note 2: Extracting it from the `ChatRequest` object allows for better reusability of each component.
@ -9,7 +9,7 @@ use crate::chat::chat_req_response_format::ChatResponseFormat;
use serde::{Deserialize, Serialize};
use std::ops::Deref;
/// Chat Options that are taken into account for any `Client::exec...` calls.
/// Chat Options that are considered for any `Client::exec...` calls.
///
/// A fallback `ChatOptions` can also be set at the `Client` during the client builder phase
/// ``
@ -39,7 +39,7 @@ pub struct ChatOptions {
/// NOTE: More response formats are coming soon.
pub response_format: Option<ChatResponseFormat>,
/// Specifies sequences used as end marker when generating text
/// Specifies sequences used as end markers when generating text
pub stop_sequences: Vec<String>,
}

View File

@ -5,7 +5,7 @@ use serde_json::Value;
/// The chat response format for the ChatRequest for structured output.
/// This will be taken into consideration only if the provider supports it.
///
/// > Note: Currently, the AI Providers will not report an error if not supported. It will just be ignored.
/// > Note: Currently, the AI Providers do not report an error if not supported; it will just be ignored.
/// > This may change in the future.
#[derive(Debug, Clone, From, Serialize, Deserialize)]
pub enum ChatResponseFormat {
@ -21,10 +21,10 @@ pub enum ChatResponseFormat {
/// The JSON specification for the structured output format.
#[derive(Debug, Clone, From, Serialize, Deserialize)]
pub struct JsonSpec {
/// The name of the spec. Mostly used by OpenAI.
/// The name of the specification. Mostly used by OpenAI.
/// IMPORTANT: With OpenAI, this cannot contain any spaces or special characters besides `-` and `_`.
pub name: String,
/// The description of the JSON spec. Mostly used by OpenAI adapters (future).
/// The description of the JSON specification. Mostly used by OpenAI adapters (future).
/// NOTE: Currently ignored in the OpenAI adapter.
pub description: Option<String>,

View File

@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize};
/// The Chat request when performing a direct `Client::`
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct ChatRequest {
/// The initial system of the request.
/// The initial system content of the request.
pub system: Option<String>,
/// The messages of the request.
@ -28,7 +28,7 @@ impl ChatRequest {
}
}
/// From the `.system` property content.
/// Create a ChatRequest from the `.system` property content.
pub fn from_system(content: impl Into<String>) -> Self {
Self {
system: Some(content.into()),
@ -46,7 +46,7 @@ impl ChatRequest {
}
}
/// Create a new request from messages
/// Create a new request from messages.
pub fn from_messages(messages: Vec<ChatMessage>) -> Self {
Self {
system: None,
@ -97,7 +97,7 @@ impl ChatRequest {
.chain(self.messages.iter().filter_map(|message| match message.role {
ChatRole::System => match message.content {
MessageContent::Text(ref content) => Some(content.as_str()),
// If system content is not text, then, we do not add it for now.
// If system content is not text, then we do not add it for now.
_ => None,
},
_ => None,
@ -116,12 +116,12 @@ impl ChatRequest {
for system in self.iter_systems() {
let systems_content = systems.get_or_insert_with(|| "".to_string());
// add eventual separator
// Add eventual separator
if systems_content.ends_with('\n') {
systems_content.push('\n');
} else if !systems_content.is_empty() {
systems_content.push_str("\n\n");
} // do not add any empty line if previous content is empty
} // Do not add any empty line if previous content is empty
systems_content.push_str(system);
}

View File

@ -58,14 +58,14 @@ impl Stream for ChatStream {
/// The normalized chat stream event for any provider when calling `Client::exec`.
#[derive(Debug, From, Serialize, Deserialize)]
pub enum ChatStreamEvent {
/// Represents the start of the stream. First event.
/// Represents the start of the stream. The first event.
Start,
/// Represents each chunk response. Currently only contains text content.
/// Represents each chunk response. Currently, it only contains text content.
Chunk(StreamChunk),
/// Represents the end of the stream.
/// Will have the `.captured_usage` and `.captured_content` if specified in the `ChatOptions`.
/// It will have the `.captured_usage` and `.captured_content` if specified in the `ChatOptions`.
End(StreamEnd),
}

View File

@ -15,7 +15,7 @@ pub enum MessageContent {
#[from]
ToolCalls(Vec<ToolCall>),
/// Tool call Responses
/// Tool call responses
#[from]
ToolResponses(Vec<ToolResponse>),
}
@ -43,7 +43,7 @@ impl MessageContent {
/// Returns the MessageContent as &str, only if it is MessageContent::Text
/// Otherwise, it returns None.
///
/// NOTE: When multi parts content, this will return None and won't concatenate the text parts.
/// NOTE: When multi-part content is present, this will return None and won't concatenate the text parts.
pub fn text_as_str(&self) -> Option<&str> {
match self {
MessageContent::Text(content) => Some(content.as_str()),
@ -56,7 +56,7 @@ impl MessageContent {
/// Consumes the MessageContent and returns it as &str,
/// only if it is MessageContent::Text; otherwise, it returns None.
///
/// NOTE: When multi parts content, this will return None and won't concatenate the text parts.
/// NOTE: When multi-part content is present, this will return None and won't concatenate the text parts.
pub fn text_into_string(self) -> Option<String> {
match self {
MessageContent::Text(content) => Some(content),
@ -66,7 +66,7 @@ impl MessageContent {
}
}
/// Checks if the text content or the tools calls is empty.
/// Checks if the text content or the tool calls are empty.
pub fn is_empty(&self) -> bool {
match self {
MessageContent::Text(content) => content.is_empty(),
@ -150,18 +150,18 @@ impl<'a> From<&'a str> for ContentPart {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ImageSource {
/// For model/services that support URL as input
/// For models/services that support URL as input
/// NOTE: Few AI services support this.
Url(String),
/// The base64 string of the image
///
/// Note: Here we use an Arc<str> to avoid cloning large amounts of data when cloning a ChatRequest.
/// NOTE: Here we use an Arc<str> to avoid cloning large amounts of data when cloning a ChatRequest.
/// The overhead is minimal compared to cloning relatively large data.
/// The downside is that it will be an Arc even when used only once, but for this particular data type, the net benefit is positive.
Base64(Arc<str>),
}
// No `Local` location, this would require handling errors like "file not found" etc.
// Such file can be easily provided by user as Base64, also can implement convenient
// TryFrom<File> to Base64 version. All LLMs accepts local Images only as Base64
// No `Local` location; this would require handling errors like "file not found" etc.
// Such a file can be easily provided by the user as Base64, and we can implement a convenient
// TryFrom<File> to Base64 version. All LLMs accept local images only as Base64.

View File

@ -1,13 +1,14 @@
//! The genai chat module contains all of the constructs necessary
//! to make genai requests with the `genai::Client`.
// region: --- Modules
mod chat_message;
mod chat_options;
mod chat_req_response_format;
mod chat_request;
mod chat_respose;
mod chat_response;
mod chat_stream;
mod message_content;
mod tool;
@ -17,7 +18,7 @@ pub use chat_message::*;
pub use chat_options::*;
pub use chat_req_response_format::*;
pub use chat_request::*;
pub use chat_respose::*;
pub use chat_response::*;
pub use chat_stream::*;
pub use message_content::*;
pub use tool::*;

View File

@ -112,7 +112,7 @@ async fn print_chat_stream_inner(
// making the main crate error aware of the different error types would be unnecessary.
//
// Note 2: This Printer Error is not wrapped in the main crate error because the printer
// functions are not used by any other crate function (they are more of a debug utility)
// functions are not used by any other crate functions (they are more of a debug utility)
use derive_more::From;

View File

@ -7,10 +7,10 @@ pub struct Tool {
/// e.g., `get_weather`
pub name: String,
/// The description of the tool which will be used by the LLM to understand the context/usage of this tool
/// The description of the tool that will be used by the LLM to understand the context/usage of this tool
pub description: Option<String>,
/// The json-schema for the parameters
/// The JSON schema for the parameters
/// e.g.,
/// ```json
/// json!({
@ -27,7 +27,7 @@ pub struct Tool {
/// "unit": {
/// "type": "string",
/// "enum": ["C", "F"],
/// "description": "The temperature unit of the country. C for Celsius, and F for Fahrenheit"
/// "description": "The temperature unit for the country. C for Celsius, and F for Fahrenheit"
/// }
/// },
/// "required": ["city", "country", "unit"],

View File

@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize};
use serde_json::Value;
/// The tool call function name and arguments send back by the LLM.
/// The tool call function name and arguments sent back by the LLM.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolCall {
pub call_id: String,

View File

@ -3,11 +3,11 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolResponse {
pub call_id: String,
// for now, just string (would probably be serialized json)
// For now, just a string (would probably be serialized JSON)
pub content: String,
}
/// constructor
/// Constructor
impl ToolResponse {
pub fn new(tool_call_id: impl Into<String>, content: impl Into<String>) -> Self {
Self {

View File

@ -29,7 +29,7 @@ impl Client {
Ok(model_iden)
}
#[deprecated(note = "use `client.resolve_service_target(model_name)")]
#[deprecated(note = "use `client.resolve_service_target(model_name)`")]
pub fn resolve_model_iden(&self, model_name: &str) -> Result<ModelIden> {
let model = self.default_model(model_name)?;
let target = self.config().resolve_service_target(model)?;

View File

@ -4,7 +4,7 @@ use crate::ClientBuilder;
use std::sync::Arc;
/// genai Client for executing AI requests to any providers.
/// Build with:
/// Built with:
/// - `ClientBuilder::default()...build()`
/// - or `Client::builder()`, which is equivalent to `ClientBuilder::default()...build()`
#[derive(Debug, Clone)]

View File

@ -17,7 +17,7 @@ pub struct ClientConfig {
impl ClientConfig {
/// Set the AuthResolver for the ClientConfig.
/// Note: This will be called before the `service_target_resolver`, and if registered
/// the `service_target_resolver` will get this new value.
/// the `service_target_resolver` will receive this new value.
pub fn with_auth_resolver(mut self, auth_resolver: AuthResolver) -> Self {
self.auth_resolver = Some(auth_resolver);
self
@ -25,7 +25,7 @@ impl ClientConfig {
/// Set the ModelMapper for the ClientConfig.
/// Note: This will be called before the `service_target_resolver`, and if registered
/// the `service_target_resolver` will get this new value.
/// the `service_target_resolver` will receive this new value.
pub fn with_model_mapper(mut self, model_mapper: ModelMapper) -> Self {
self.model_mapper = Some(model_mapper);
self
@ -33,8 +33,8 @@ impl ClientConfig {
/// Set the ServiceTargetResolver for this client config.
///
/// A ServiceTargetResolver is the last step before execution allowing the users full
/// control of the resolved Endpoint, AuthData, and ModelIden
/// A ServiceTargetResolver is the last step before execution, allowing the users full
/// control of the resolved Endpoint, AuthData, and ModelIden.
pub fn with_service_target_resolver(mut self, service_target_resolver: ServiceTargetResolver) -> Self {
self.service_target_resolver = Some(service_target_resolver);
self
@ -91,12 +91,12 @@ impl ClientConfig {
resolver_error,
})
})
.transpose()? // return error if there is an error on auth resolver
.transpose()? // return an error if there is an error with the auth resolver
.flatten()
.unwrap_or_else(|| AdapterDispatcher::default_auth(model.adapter_kind)); // flatten the two options
// -- Get the default endpoint
// For now, just get the default endpoint, the `resolve_target` will allow to override it
// For now, just get the default endpoint; the `resolve_target` will allow overriding it.
let endpoint = AdapterDispatcher::default_endpoint(model.adapter_kind);
// -- Resolve the service_target

View File

@ -5,7 +5,7 @@ use crate::ModelName;
/// Holds the adapter kind and model name in an efficient, clonable way.
///
/// This struct is used to represent the association between an adapter kind
/// This struct represents the association between an adapter kind
/// and a model name, allowing for easy conversion and instantiation.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ModelIden {

View File

@ -22,7 +22,7 @@ impl From<ModelName> for String {
}
// NOTE: Below we avoid the `T: Into<String>` blanket implementation because
// it would prevent us from having the `From<ModelName> for String` as `ModelName`
// it would prevent us from having the `From<ModelName> for String` implementation since `ModelName`
// also implements `T: Into<String>` from its deref to `&str`
impl From<String> for ModelName {

View File

@ -2,7 +2,7 @@ use std::sync::Arc;
/// A construct to store the endpoint of a service.
/// It is designed to be efficiently clonable.
/// For now, it just supports `base_url` but later might have other URLs per "service name".
/// For now, it supports only `base_url`, but it may later have other URLs per "service name".
#[derive(Debug, Clone)]
pub struct Endpoint {
inner: EndpointInner,

View File

@ -1,5 +1,5 @@
//! Resolvers are hooks that library users can set to customize aspects of the library's default behavior.
//! A good example for now is the AuthResolver, which provides the authentication data (e.g., api_key).
//! A good example is the AuthResolver, which provides the authentication data (e.g., api_key).
//!
//! Eventually, the library will have more resolvers.

View File

@ -1,5 +1,5 @@
//! A `ServiceTargetResolver` is responsible for returning the `ServiceTarget`.
//! It allows users to customize/override the service target properties.
//! It allows users to customize or override the service target properties.
//!
//! It can take the following forms:
//! - Contains a fixed service target value,

View File

@ -4,14 +4,14 @@
mod error;
mod web_client;
// for when not `text/event-stream`
// For when not using `text/event-stream`
mod web_stream;
pub(crate) use error::Result;
pub(crate) use web_client::*;
pub(crate) use web_stream::*;
// only public for external use
// Only public for external use
pub use error::Error;
// endregion: --- Modules

View File

@ -19,7 +19,7 @@ pub struct WebStream {
reqwest_builder: Option<RequestBuilder>,
response_future: Option<Pin<Box<dyn Future<Output = Result<Response, Box<dyn Error>>> + Send>>>,
bytes_stream: Option<Pin<Box<dyn Stream<Item = Result<Bytes, Box<dyn Error>>> + Send>>>,
// If a poll was a partial message, then we kept the previous part
// If a poll was a partial message, then we keep the previous part
partial_message: Option<String>,
// If a poll retrieved multiple messages, we keep them to be sent in the next poll
remaining_messages: Option<VecDeque<String>>,
@ -206,7 +206,7 @@ fn new_with_pretty_json_array(
messages.push(array_end.to_string());
}
// -- Return the buf response
// -- Return the buff response
let first_message = if !messages.is_empty() {
Some(messages[0].to_string())
} else {