From a32f69dfeeb64ea8add57af8d37594824e4b0d63 Mon Sep 17 00:00:00 2001 From: Jeremy Chone Date: Sat, 7 Dec 2024 15:03:33 -0800 Subject: [PATCH] . minor update and start prep for v0.1.13 --- CHANGELOG.md | 8 +++++++- README.md | 7 +++++-- src/adapter/adapters/cohere/adapter_impl.rs | 3 +++ 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 75ba2e0..b900f52 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,16 @@ `.` minor | `-` Fix | `+` Addition | `^` improvement | `!` Change | `*` Refactor > **IMPORTANT:** `0.1.x` will still have some breaking changes in patches. -> - Make sure to **lock** your version, e.g., `genai = "=0.1.12"`. +> - Make sure to **lock** your version, e.g., `genai = "=0.1.13"`. > - Version `0.2.x` will follow semver more strictly. > - API changes will be denoted as "`!` - **API CHANGE** ...." +## 2024-12-07 - `0.1.13` + +- `+` add stop_sequences support cohere - +- `+` stop_sequences - for openai, ollama, groq, gemini, cochere +- `+` stop_sequences - for anthropic (thanks [@semtexzv](https://github.com/semtexzv)) + ## 2024-11-18 - `0.1.12` - `.` minor update on llms model names diff --git a/README.md b/README.md index 08bae01..46842e3 100644 --- a/README.md +++ b/README.md @@ -12,17 +12,19 @@ Currently supports natively: **Ollama**, **OpenAI**, **Anthropic**, **groq**, ** ```toml # cargo.toml -genai = "=0.1.12" # Version lock for `0.1.x` +genai = "=0.1.13" # Version lock for `0.1.x` ```
The goal of this library is to provide a common and ergonomic single API to many generative AI Providers, such as OpenAI, Anthropic, Cohere, Ollama. -- **IMPORTANT 1** `0.1.x` will still have some breaking changes in patches, so make sure to **lock** your version, e.g., `genai = "=0.1.12"`. In short, `0.1.x` can be considered "beta releases." Version `0.2.x` will follow semver more strictly. +- **IMPORTANT 1** `0.1.x` will still have some breaking changes in patches, so make sure to **lock** your version, e.g., `genai = "=0.1.13"`. In short, `0.1.x` can be considered "beta releases." Version `0.2.x` will follow semver more strictly. - **IMPORTANT 2** `genai` is focused on normalizing chat completion APIs across AI providers and is not intended to be a full representation of a given AI provider. For this, there are excellent libraries such as [async-openai](https://crates.io/search?q=async-openai) for OpenAI and [ollama-rs](https://crates.io/crates/ollama-rs) for Ollama. +Check out [devai.run](https://devai.run), the **Iterate to Automate** command-line application that leverages **GenAI** for multi-AI capabilities. + [Examples](#examples) | [Thanks](#thanks) | [Library Focus](#library-focus) | [Changelog](CHANGELOG.md) | Provider Mapping: [ChatOptions](#chatoptions) | [MetaUsage](#metausage) ## Examples @@ -132,6 +134,7 @@ async fn main() -> Result<(), Box> { ## Thanks +- Thanks to [@semtexzv](https://github.com/semtexzv) for `stop_sequences` Anthropic support [PR #34](https://github.com/jeremychone/rust-genai/pull/34) - Thanks to [@omarshehab221](https://github.com/omarshehab221) for de/serialize on structs [PR #19](https://github.com/jeremychone/rust-genai/pull/19) - Thanks to [@tusharmath](https://github.com/tusharmath) for make webc::Error [PR #12](https://github.com/jeremychone/rust-genai/pull/12) - Thanks to [@giangndm](https://github.com/giangndm) for make stream is send [PR #10](https://github.com/jeremychone/rust-genai/pull/10) diff --git a/src/adapter/adapters/cohere/adapter_impl.rs b/src/adapter/adapters/cohere/adapter_impl.rs index 91ee3a8..c2f6f32 100644 --- a/src/adapter/adapters/cohere/adapter_impl.rs +++ b/src/adapter/adapters/cohere/adapter_impl.rs @@ -84,12 +84,15 @@ impl Adapter for CohereAdapter { if let Some(temperature) = options_set.temperature() { payload.x_insert("temperature", temperature)?; } + if !options_set.stop_sequences().is_empty() { payload.x_insert("stop_sequences", options_set.stop_sequences())?; } + if let Some(max_tokens) = options_set.max_tokens() { payload.x_insert("max_tokens", max_tokens)?; } + if let Some(top_p) = options_set.top_p() { payload.x_insert("p", top_p)?; }