forked from graniet/llm
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcohere_example.rs
More file actions
33 lines (28 loc) · 1.32 KB
/
cohere_example.rs
File metadata and controls
33 lines (28 loc) · 1.32 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
// Import necessary modules for Cohere backend
use llm::{
builder::{LLMBackend, LLMBuilder}, // Builder components
chat::ChatMessage, // Chat message structures
};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
// Get Cohere API key from environment variable (or use a dummy key as default)
let api_key = std::env::var("COHERE_API_KEY").unwrap_or("test-key-123".into());
// Initialize and configure the LLM client with Cohere
let llm = LLMBuilder::new()
.backend(LLMBackend::Cohere) // Use Cohere as LLM provider
.api_key(api_key) // Set API key
.model("command-a-03-2025") // Choose a Cohere model (free-tier)
.system("Answer like a pirate.") // System instruction (sent with 'developer' role)
.max_tokens(200) // Limit response length
.temperature(0.7) // Set response creativity
.build()
.expect("Failed to build LLM (Cohere)");
// Prepare conversation history with a user message
let messages = vec![ChatMessage::user().content("What is 2 + 2?").build()];
// Send chat request and display response or error
match llm.chat(&messages).await {
Ok(response) => println!("Cohere model response:\n{response}"),
Err(e) => eprintln!("Error calling Cohere: {e}"),
}
Ok(())
}