Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion samples/rust/hello-foundry-local/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ cargo run

1. Creates a FoundryLocalManager instance
2. Starts the Foundry Local service if it's not already running
3. Downloads and loads the phi-3.5-mini model
3. Downloads and loads the phi-3-mini-4k model
4. Sends a prompt to the model using the OpenAI-compatible API
5. Displays the response from the model

Expand Down
23 changes: 22 additions & 1 deletion samples/rust/hello-foundry-local/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,29 +11,50 @@ async fn main() -> Result<()> {
println!("Hello Foundry Local!");
println!("===================");

// For this example, we will use the "phi-3-mini-4k" model which is 2.181 GB in size.
let model_to_use: &str = "phi-3-mini-4k";

// Create a FoundryLocalManager instance using the builder pattern
println!("\nInitializing Foundry Local manager...");
let mut manager = FoundryLocalManager::builder()
// Alternatively to the checks below, you can specify the model to use directly during bootstrapping
// .alias_or_model_id(model_to_use)
.bootstrap(true) // Start the service if not running
.build()
.await?;

// List all the models in the catalog
println!("\nAvailable models in catalog:");
let models = manager.list_catalog_models().await?;
let model_in_catalog = models.iter().any(|m| m.alias == model_to_use);
for model in models {
println!("- {model}");
}
// Check if the model is in the catalog
if !model_in_catalog {
println!("Model '{model_to_use}' not found in catalog. Exiting.");
return Ok(());
}

// List available models in the local cache
println!("\nAvailable models in local cache:");
let models = manager.list_cached_models().await?;
let model_in_cache = models.iter().any(|m| m.alias == model_to_use);
for model in models {
println!("- {model}");
}

// Check if the model is already cached and download if not
if !model_in_cache {
println!("Model '{model_to_use}' not found in local cache. Downloading...");
// Download the model if not in cache
// NOTE if you've bootstrapped with `alias_or_model_id`, you can use that directly and skip this check
manager.download_model(model_to_use, None, false).await?;
println!("Model '{model_to_use}' downloaded successfully.");
}

// Get the model information
let model_info = manager.get_model_info("phi-4-mini", true).await?;
let model_info = manager.get_model_info(model_to_use, true).await?;
println!("\nUsing model: {model_info}");

// Build the prompt
Expand Down