Skip to content

astrum-chat/anyml

Folders and files

NameName
Last commit message
Last commit date

Latest commit

 

History

38 Commits
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 

Repository files navigation

Anyml

Dyn-compatible traits which provide a unified API for asynchronously interacting with machine learning inference providers.

Crates for different providers can be found here:

Installation

anyml = { git = "https://github.com/astrum-chat/anyml", features = ["anthropic", "ollama", "openai"] }

Example

use anyhttp_reqwest::ReqwestClientWrapper;
use anyml::{AnthropicProvider, ChatOptions, ChatProvider};
use futures::StreamExt;
use tokio::io::{AsyncWriteExt, stdout};

#[tokio::main]
async fn main() {
    let config = init_config().unwrap();

    let messages =  &[Message::user("Write me a short poem!")];
    let options = ChatOptions::new("claude-3-haiku-20240307").messages(messages);

    let mut response = config.chat_provider.chat(&options).await.unwrap();

    let mut out = stdout();
    while let Some(Ok(chunk)) = response.next().await {
        out.write_all(chunk.content.as_bytes()).await.unwrap();
        out.flush().await.unwrap();
    }
}

struct Config {
    chat_provider: Box<dyn ChatProvider>,
}

fn init_config() -> anyhow::Result<Config> {
    dotenvy::dotenv().ok();

    let api_key = std::env::var("ANTHROPIC_API_KEY")?;

    let client = ReqwestClientWrapper::new(reqwest::Client::new());
    let anthropic = AnthropicProvider::new(client, api_key);

    Ok(Config {
        chat_provider: Box::new(anthropic),
    })
}

About

No description, website, or topics provided.

Resources

License

Stars

Watchers

Forks

Releases

No releases published

Packages

 
 
 

Contributors

Languages