forked from graniet/llm
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathevaluator_parallel_example.rs
More file actions
66 lines (57 loc) · 2 KB
/
evaluator_parallel_example.rs
File metadata and controls
66 lines (57 loc) · 2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
use llm::{
builder::{LLMBackend, LLMBuilder},
chat::{ChatMessage, ChatRole},
evaluator::ParallelEvaluator,
};
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let openai = LLMBuilder::new()
.backend(LLMBackend::OpenAI)
.api_key(std::env::var("OPENAI_API_KEY").unwrap_or("openai-key".into()))
.model("gpt-4o")
.build()?;
let anthropic = LLMBuilder::new()
.backend(LLMBackend::Anthropic)
.api_key(std::env::var("ANTHROPIC_API_KEY").unwrap_or("anthropic-key".into()))
.model("claude-3-7-sonnet-20250219")
.build()?;
let google = LLMBuilder::new()
.backend(LLMBackend::Google)
.api_key(std::env::var("GOOGLE_API_KEY").unwrap_or("google-key".into()))
.model("gemini-2.0-flash-exp")
.build()?;
let evaluator = ParallelEvaluator::new(vec![
("openai".to_string(), openai),
("anthropic".to_string(), anthropic),
("google".to_string(), google),
])
.scoring(|response| response.len() as f32 * 0.1)
.scoring(|response| {
if response.contains("important") {
10.0
} else {
0.0
}
});
let messages = vec![ChatMessage {
role: ChatRole::User,
message_type: Default::default(),
content: "Explique-moi la théorie de la relativité d'Einstein".to_string(),
}];
let results = evaluator.evaluate_chat_parallel(&messages).await?;
for result in &results {
println!("Provider: {}", result.provider_id);
println!("Score: {}", result.score);
println!("Time: {}ms", result.time_ms);
// println!("Response: {}", result.text);
println!("---");
}
if let Some(best) = evaluator.best_response(&results) {
println!("BEST RESPONSE:");
println!("Provider: {}", best.provider_id);
println!("Score: {}", best.score);
println!("Time: {}ms", best.time_ms);
// println!("Response: {}", best.text);
}
Ok(())
}