diff --git a/.gitignore b/.gitignore
index 8087ed4..868cd78 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,6 +2,11 @@ paper_results/
.venv/
.ruff_cache/
.mypy_cache/
+.pytest_cache/
+htmlcov/
paper_reproduction.py
+bbob_visualizations/
site/
+.coverage
CLAUDE.md
+examples/
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 6f2f487..8c91541 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,28 +1,41 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v5.0.0
+ rev: v6.0.0
hooks:
- - id: trailing-whitespace
- - id: end-of-file-fixer
- - id: check-yaml
- - id: check-toml
- id: check-added-large-files
- - id: check-merge-conflict
- - id: debug-statements
+ args: [--maxkb=1000]
+ - id: check-json
+ - id: check-toml
+ - id: check-yaml
+ - id: detect-private-key
+ - id: end-of-file-fixer
+ - id: mixed-line-ending
+ - id: no-commit-to-branch
+ args: [--branch=main, --branch=master, --branch=production]
+ - id: trailing-whitespace
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.8.2
+ rev: v0.14.10
hooks:
- - id: ruff
+ - id: ruff-check
args: [--fix]
- id: ruff-format
- - repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.13.0
+ - repo: https://github.com/astral-sh/uv-pre-commit
+ rev: 0.9.22
+ hooks:
+ - id: uv-lock
+
+ - repo: https://github.com/gitleaks/gitleaks
+ rev: v8.30.0
+ hooks:
+ - id: gitleaks
+
+ - repo: local
hooks:
- - id: mypy
- additional_dependencies:
- - numpy
- - pandas-stubs
- - types-Pillow
- args: [--ignore-missing-imports]
+ - id: ty
+ name: ty (type checker)
+ entry: uvx ty check
+ language: system
+ types: [python]
+ pass_filenames: false
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..4333dbe
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,42 @@
+.PHONY: help install test test-cov check docs docs-serve clean
+
+help:
+ @echo "Available commands:"
+ @echo " make install - Install dependencies with uv"
+ @echo " make test - Run tests with pytest"
+ @echo " make test-cov - Run tests with coverage report"
+ @echo " make check - Run all pre-commit hooks"
+ @echo " make docs - Build documentation"
+ @echo " make docs-serve - Serve documentation locally"
+ @echo " make clean - Remove build artifacts"
+
+install:
+ uv sync --all-extras
+
+test:
+ uv run python -m pytest tests/ -v
+
+test-cov:
+ uv run python -m pytest tests/ -v --cov=src/lonpy --cov-report=term-missing --cov-report=html
+
+check:
+ pre-commit run --all-files
+
+docs:
+ uv run mkdocs build
+
+docs-serve:
+ uv run mkdocs serve
+
+clean:
+ rm -rf build/
+ rm -rf dist/
+ rm -rf *.egg-info/
+ rm -rf .pytest_cache/
+ rm -rf .mypy_cache/
+ rm -rf .ruff_cache/
+ rm -rf htmlcov/
+ rm -rf .coverage
+ rm -rf site/
+ find . -type d -name "__pycache__" -exec rm -rf {} + 2>/dev/null || true
+ find . -type f -name "*.pyc" -delete 2>/dev/null || true
diff --git a/README.md b/README.md
index eb06e79..db0243a 100644
--- a/README.md
+++ b/README.md
@@ -6,13 +6,15 @@
[](https://www.python.org/downloads/)
[](https://colab.research.google.com/drive/1Ujl48ffgHg9ck1Hueh59s65OR3Q3BG99?usp=sharing)
-**Local Optima Networks for Continuous Optimization**
+**Local Optima Networks**
-lonpy is a Python library for constructing, analyzing, and visualizing Local Optima Networks (LONs) for continuous optimization problems. LONs provide a powerful way to understand the structure of fitness landscapes, revealing how local optima are connected and how difficult it may be to find global optima.
+lonpy is a Python library for constructing, analyzing, and visualizing Local Optima Networks (LONs) for both continuous and discrete optimization problems. LONs provide a powerful way to understand the structure of fitness landscapes, revealing how local optima are connected and how difficult it may be to find global optima.
## Features
-- **Basin-Hopping Sampling**: Efficient exploration of fitness landscapes using configurable Basin-Hopping
+- **Continuous Optimization**: Basin-Hopping sampling for continuous fitness landscapes
+- **Discrete Optimization**: Iterated Local Search (ILS) sampling for combinatorial problems
+- **Built-in Problems**: OneMax, Knapsack, Number Partitioning, and custom problem support
- **LON Construction**: Automatic construction of Local Optima Networks from sampling data
- **CMLON Support**: Compressed Monotonic LONs for cleaner landscape analysis
- **Rich Metrics**: Compute landscape metrics including funnel analysis and neutrality
@@ -34,6 +36,8 @@ pip install -e .
## Quick Start
+### Continuous Optimization
+
```python
import numpy as np
from lonpy import compute_lon, LONVisualizer
@@ -54,6 +58,7 @@ lon = compute_lon(
)
metrics = lon.compute_metrics()
+print(f"Number of optima: {metrics['n_optima']}")
print(f"Number of funnels: {metrics['n_funnels']}")
print(f"Global funnels: {metrics['n_global_funnels']}")
@@ -63,7 +68,33 @@ viz.plot_2d(lon, output_path="lon_2d.png")
viz.plot_3d(lon, output_path="lon_3d.png")
```
-### Compressed Monotonic LONs (CMLONs)
+### Discrete Optimization
+
+```python
+from lonpy import compute_discrete_lon, OneMax, Knapsack, NumberPartitioning
+
+# OneMax problem (maximize number of 1s in a bitstring)
+problem = OneMax(n=20)
+lon = compute_discrete_lon(problem, n_runs=100, seed=42)
+
+metrics = lon.compute_metrics()
+print(f"Number of optima: {metrics['n_optima']}")
+print(f"Number of funnels: {metrics['n_funnels']}")
+
+# Knapsack problem
+knapsack = Knapsack(
+ values=[60, 100, 120, 80, 90],
+ weights=[10, 20, 30, 15, 25],
+ capacity=50
+)
+lon = compute_discrete_lon(knapsack, n_runs=100, seed=42)
+
+# Number Partitioning problem
+npp = NumberPartitioning(n=15, k=0.5, seed=42)
+lon = compute_discrete_lon(npp, n_runs=100, seed=42)
+```
+
+## Compressed Monotonic LONs (CMLONs)
CMLONs are a compressed representation where nodes with equal fitness that are connected get merged. This provides a cleaner view of the landscape's funnel structure.
@@ -73,9 +104,12 @@ cmlon = lon.to_cmlon()
# Analyze CMLON-specific metrics
cmlon_metrics = cmlon.compute_metrics()
+print(f"Global funnel proportion: {cmlon_metrics['global_funnel_proportion']}")
```
-### Custom Sampling Configuration
+## Advanced Configuration
+
+### Continuous Sampling (Basin-Hopping)
```python
from lonpy import BasinHoppingSampler, BasinHoppingSamplerConfig
@@ -84,7 +118,7 @@ config = BasinHoppingSamplerConfig(
n_runs=50, # Number of independent runs
n_iterations=1000, # Iterations per run
step_size=0.05, # Perturbation size
- step_mode="per", # "per" (percentage) or "fix" (fixed)
+ step_mode="percentage", # "percentage" (of domain) or "fixed"
hash_digits=4, # Precision for identifying optima
seed=42 # For reproducibility
)
@@ -98,6 +132,24 @@ domain = [(-5.12, 5.12), (-5.12, 5.12)]
lon = sampler.sample_to_lon(rastrigin, domain)
```
+### Discrete Sampling (Iterated Local Search)
+
+```python
+from lonpy import ILSSampler, ILSSamplerConfig, OneMax
+
+config = ILSSamplerConfig(
+ n_runs=100, # Number of independent ILS runs
+ non_improvement_iterations=100, # Stop after no improvement
+ perturbation_strength=2, # Number of random moves per perturbation
+ first_improvement=True, # Use first improvement hill climbing
+ seed=42 # For reproducibility
+)
+
+sampler = ILSSampler(config)
+problem = OneMax(n=20)
+lon = sampler.sample_to_lon(problem)
+```
+
## Documentation
For full documentation, visit: [https://agh-a2s.github.io/lonpy](https://agh-a2s.github.io/lonpy)
diff --git a/docs/api/index.md b/docs/api/index.md
index e89ba48..69304dd 100644
--- a/docs/api/index.md
+++ b/docs/api/index.md
@@ -4,7 +4,7 @@ Complete API documentation for lonpy.
## Modules
-lonpy is organized into three main modules:
+lonpy is organized into modules for continuous and discrete optimization:
### [LON Module](lon.md)
@@ -15,11 +15,25 @@ Data structures for Local Optima Networks.
### [Sampling Module](sampling.md)
-Basin-Hopping sampling for LON construction.
+Sampling algorithms for LON construction.
-- [`compute_lon()`](sampling.md#lonpy.sampling.compute_lon) - High-level convenience function
-- [`BasinHoppingSampler`](sampling.md#lonpy.sampling.BasinHoppingSampler) - Sampling class
-- [`BasinHoppingSamplerConfig`](sampling.md#lonpy.sampling.BasinHoppingSamplerConfig) - Configuration
+**Continuous Optimization:**
+
+- [`compute_lon()`](sampling.md#lonpy.continuous.sampling.compute_lon) - High-level convenience function
+- [`BasinHoppingSampler`](sampling.md#lonpy.continuous.sampling.BasinHoppingSampler) - Basin-Hopping sampler
+- [`BasinHoppingSamplerConfig`](sampling.md#lonpy.continuous.sampling.BasinHoppingSamplerConfig) - Configuration
+
+**Discrete Optimization:**
+
+- [`compute_discrete_lon()`](sampling.md#lonpy.discrete.sampling.compute_discrete_lon) - High-level convenience function
+- [`ILSSampler`](sampling.md#lonpy.discrete.sampling.ILSSampler) - Iterated Local Search sampler
+- [`ILSSamplerConfig`](sampling.md#lonpy.discrete.sampling.ILSSamplerConfig) - Configuration
+
+**Built-in Problems:**
+
+- [`OneMax`](sampling.md#lonpy.problems.discrete.OneMax) - Maximize 1s in bitstring
+- [`Knapsack`](sampling.md#lonpy.problems.discrete.Knapsack) - 0/1 Knapsack problem
+- [`NumberPartitioning`](sampling.md#lonpy.problems.discrete.NumberPartitioning) - Number partitioning problem
### [Visualization Module](visualization.md)
@@ -29,12 +43,11 @@ Plotting and animation tools.
## Quick Reference
-### Creating a LON
+### Creating a LON (Continuous)
```python
from lonpy import compute_lon
-# Simple usage
lon = compute_lon(
func=objective_function,
dim=2,
@@ -45,6 +58,15 @@ lon = compute_lon(
)
```
+### Creating a LON (Discrete)
+
+```python
+from lonpy import compute_discrete_lon, OneMax
+
+problem = OneMax(n=20)
+lon = compute_discrete_lon(problem, n_runs=100, seed=42)
+```
+
### Analyzing a LON
```python
@@ -86,7 +108,7 @@ viz.visualize_all(lon, output_folder="./output")
lonpy depends on:
- `numpy` - Numerical computations
-- `scipy` - Optimization
+- `scipy` - Optimization (continuous)
- `pandas` - Data handling
- `igraph` - Graph operations
- `matplotlib` - 2D plotting
diff --git a/docs/api/sampling.md b/docs/api/sampling.md
index c4c046c..58ba534 100644
--- a/docs/api/sampling.md
+++ b/docs/api/sampling.md
@@ -1,23 +1,76 @@
# Sampling Module
-::: lonpy.sampling.compute_lon
+## Continuous Optimization
+
+### compute_lon
+
+::: lonpy.continuous.sampling.compute_lon
options:
show_root_heading: true
show_source: true
-::: lonpy.sampling.BasinHoppingSamplerConfig
+### BasinHoppingSamplerConfig
+
+::: lonpy.continuous.sampling.BasinHoppingSamplerConfig
options:
show_root_heading: true
show_source: true
-::: lonpy.sampling.BasinHoppingSampler
+### BasinHoppingSampler
+
+::: lonpy.continuous.sampling.BasinHoppingSampler
options:
show_root_heading: true
show_source: true
members:
- sample
- sample_to_lon
- - hash_solution
- - fitness_to_int
- - bounded_perturbation
- - unbounded_perturbation
+
+## Discrete Optimization
+
+### compute_discrete_lon
+
+::: lonpy.discrete.sampling.compute_discrete_lon
+ options:
+ show_root_heading: true
+ show_source: true
+
+### ILSSamplerConfig
+
+::: lonpy.discrete.sampling.ILSSamplerConfig
+ options:
+ show_root_heading: true
+ show_source: true
+
+### ILSSampler
+
+::: lonpy.discrete.sampling.ILSSampler
+ options:
+ show_root_heading: true
+ show_source: true
+ members:
+ - sample
+ - sample_to_lon
+
+## Built-in Problems
+
+### OneMax
+
+::: lonpy.problems.discrete.OneMax
+ options:
+ show_root_heading: true
+ show_source: true
+
+### Knapsack
+
+::: lonpy.problems.discrete.Knapsack
+ options:
+ show_root_heading: true
+ show_source: true
+
+### NumberPartitioning
+
+::: lonpy.problems.discrete.NumberPartitioning
+ options:
+ show_root_heading: true
+ show_source: true
diff --git a/docs/getting-started/concepts.md b/docs/getting-started/concepts.md
index 97a2250..7e72d2a 100644
--- a/docs/getting-started/concepts.md
+++ b/docs/getting-started/concepts.md
@@ -18,17 +18,29 @@ Think of it as a terrain where:
## Local Optima
-A **local optimum** is a solution that cannot be improved by small changes. In continuous optimization, this means:
+A **local optimum** is a solution that cannot be improved by small changes.
+
+### In Continuous Optimization
+
+For continuous problems, this means:
$$\nabla f(x^*) = 0 \quad \text{and} \quad \nabla^2 f(x^*) \succeq 0$$
+### In Discrete Optimization
+
+For discrete problems (like bitstrings), a local optimum means no single-bit flip (or swap, for permutations) improves the fitness.
+
Local optima are important because:
- **Gradient-based methods** get stuck at local optima
- **Multimodal functions** have many local optima
- The **global optimum** is the best local optimum
-## Basin-Hopping
+## Sampling Algorithms
+
+lonpy uses different sampling algorithms for continuous and discrete problems.
+
+### Basin-Hopping (Continuous)
**Basin-Hopping** is a global optimization algorithm that escapes local optima through:
@@ -44,6 +56,32 @@ Local Opt A → (perturb) → Local Opt B → (perturb) → Local Opt C → ...
lonpy records these transitions to build the LON.
+### Iterated Local Search (Discrete)
+
+**Iterated Local Search (ILS)** is the discrete counterpart of Basin-Hopping:
+
+1. **Hill climbing**: Find nearest local optimum using neighborhood moves
+2. **Perturbation**: Apply multiple random moves to escape the current basin
+3. **Acceptance**: Move to new optimum if it's better or equal
+
+```
+Local Opt A → (perturb) → Local Opt B → (perturb) → Local Opt C → ...
+```
+
+#### Neighborhoods
+
+For discrete problems, the neighborhood defines which solutions are "neighbors":
+
+- **Flip neighborhood** (bitstrings): Change one bit from 0→1 or 1→0
+- **Swap neighborhood** (permutations): Exchange positions of two elements
+
+#### Hill Climbing
+
+Hill climbing iteratively moves to better neighbors:
+
+- **First improvement**: Accept the first improving neighbor found
+- **Best improvement**: Evaluate all neighbors, move to the best one
+
## Local Optima Networks
A **Local Optima Network (LON)** is a directed graph where:
@@ -56,7 +94,7 @@ A **Local Optima Network (LON)** is a directed graph where:
lonpy constructs LONs by:
-1. Running multiple Basin-Hopping searches
+1. Running multiple sampling searches (Basin-Hopping or ILS)
2. Recording every transition (source optimum → target optimum)
3. Aggregating transitions into a weighted graph
@@ -67,6 +105,8 @@ lonpy constructs LONs by:
### Node Identification
+#### Continuous Problems
+
Two solutions are considered the same local optimum if their coordinates match after rounding to `hash_digits` decimal places:
```python
@@ -76,6 +116,15 @@ Two solutions are considered the same local optimum if their coordinates match a
# Same node!
```
+#### Discrete Problems
+
+For discrete problems, solutions are hashed based on their representation:
+
+```python
+# Bitstring [1, 0, 1, 1, 0] → hash based on bit pattern
+# Permutation [3, 1, 4, 2, 0] → hash based on element order
+```
+
## LON Metrics
lonpy computes several metrics to characterize fitness landscapes:
@@ -150,8 +199,43 @@ Funnels are identified as **weakly connected components** in the CMLON when cons
The ideal landscape has a single global funnel. Multiple funnels indicate potential difficulty for optimization algorithms.
+## Built-in Problems
+
+lonpy provides several discrete optimization problems:
+
+### OneMax
+
+Maximize the number of 1s in a bitstring. Simple unimodal landscape.
+
+```python
+from lonpy import OneMax
+problem = OneMax(n=20) # 20-bit bitstring
+```
+
+### Knapsack
+
+Select items to maximize value without exceeding capacity. NP-hard with complex landscape.
+
+```python
+from lonpy import Knapsack
+problem = Knapsack(
+ values=[60, 100, 120],
+ weights=[10, 20, 30],
+ capacity=50
+)
+```
+
+### Number Partitioning
+
+Partition numbers into two sets with minimal difference. NP-hard.
+
+```python
+from lonpy import NumberPartitioning
+problem = NumberPartitioning(n=20, k=0.5, seed=42)
+```
+
## Further Reading
-- [Sampling Guide](../user-guide/sampling.md) - Configure Basin-Hopping for your problem
+- [Sampling Guide](../user-guide/sampling.md) - Configure sampling for your problem
- [Analysis Guide](../user-guide/analysis.md) - Interpret LON metrics
- [Visualization Guide](../user-guide/visualization.md) - Create plots of your LON
diff --git a/docs/getting-started/quickstart.md b/docs/getting-started/quickstart.md
index d973110..030941b 100644
--- a/docs/getting-started/quickstart.md
+++ b/docs/getting-started/quickstart.md
@@ -2,7 +2,9 @@
This guide will get you up and running with lonpy in just a few minutes.
-## Your First LON
+## Continuous Optimization
+
+### Your First LON
Let's create a Local Optima Network for the classic Rastrigin function:
@@ -31,7 +33,7 @@ print(f"Local optima found: {lon.n_vertices}")
print(f"Transitions recorded: {lon.n_edges}")
```
-## Analyzing the Landscape
+### Analyzing the Landscape
lonpy computes useful metrics about your fitness landscape:
@@ -46,6 +48,52 @@ print(f"Neutrality: {metrics['neutral']:.1%}")
print(f"Strength to global: {metrics['strength']:.1%}")
```
+## Discrete Optimization
+
+### Built-in Problems
+
+lonpy provides several built-in discrete optimization problems:
+
+```python
+from lonpy import compute_discrete_lon, OneMax, Knapsack, NumberPartitioning
+
+# OneMax: maximize the number of 1s in a bitstring
+problem = OneMax(n=20)
+lon = compute_discrete_lon(problem, n_runs=100, seed=42)
+
+print(f"Local optima found: {lon.n_vertices}")
+print(f"Transitions recorded: {lon.n_edges}")
+```
+
+### Knapsack Problem
+
+```python
+# 0/1 Knapsack: maximize value within capacity
+knapsack = Knapsack(
+ values=[60, 100, 120, 80, 90],
+ weights=[10, 20, 30, 15, 25],
+ capacity=50
+)
+lon = compute_discrete_lon(knapsack, n_runs=100, seed=42)
+
+metrics = lon.compute_metrics()
+print(f"Number of optima: {metrics['n_optima']}")
+print(f"Number of funnels: {metrics['n_funnels']}")
+```
+
+### Number Partitioning
+
+```python
+# Number Partitioning: minimize partition imbalance
+npp = NumberPartitioning(n=15, k=0.5, seed=42)
+lon = compute_discrete_lon(npp, n_runs=100, seed=42)
+
+metrics = lon.compute_metrics()
+print(f"Number of optima: {metrics['n_optima']}")
+```
+
+## Understanding Metrics
+
**What do these metrics mean?**
| Metric | Description |
@@ -118,9 +166,9 @@ In CMLON visualizations:
- **Pink nodes**: In global funnel
- **Light blue nodes**: In local funnels
-## Complete Example
+## Complete Examples
-Here's a full script that generates all visualizations:
+### Continuous Optimization
```python
import numpy as np
@@ -163,8 +211,37 @@ for name, path in outputs.items():
print(f"{name}: {path}")
```
+### Discrete Optimization
+
+```python
+from lonpy import compute_discrete_lon, OneMax, LONVisualizer
+
+# Build LON for OneMax
+problem = OneMax(n=20)
+lon = compute_discrete_lon(
+ problem,
+ n_runs=100,
+ non_improvement_iterations=100,
+ seed=42
+)
+
+# Print analysis
+print("=== LON Analysis ===")
+print(f"Vertices: {lon.n_vertices}")
+print(f"Edges: {lon.n_edges}")
+
+metrics = lon.compute_metrics()
+for key, value in metrics.items():
+ print(f"{key}: {value}")
+
+# Convert to CMLON
+cmlon = lon.to_cmlon()
+cmlon_metrics = cmlon.compute_metrics()
+print(f"\nGlobal funnel proportion: {cmlon_metrics['global_funnel_proportion']:.1%}")
+```
+
## Next Steps
- [Core Concepts](concepts.md) - Understand LON theory
-- [Sampling Guide](../user-guide/sampling.md) - Configure Basin-Hopping
+- [Sampling Guide](../user-guide/sampling.md) - Configure sampling algorithms
- [API Reference](../api/index.md) - Full API documentation
diff --git a/docs/index.md b/docs/index.md
index b63195b..c2e8c6e 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -1,10 +1,10 @@
# lonpy
-**Local Optima Networks for Continuous Optimization**
+**Local Optima Networks for Continuous and Discrete Optimization**
{ width="100%" }
-lonpy is a Python library for constructing, analyzing, and visualizing Local Optima Networks (LONs) for continuous optimization problems.
+lonpy is a Python library for constructing, analyzing, and visualizing Local Optima Networks (LONs) for both continuous and discrete optimization problems.
## What are Local Optima Networks?
@@ -18,17 +18,29 @@ Local Optima Networks (LONs) are graph-based models that capture the global stru
-- **Basin-Hopping Sampling**
+- **Continuous Optimization**
---
- Efficient exploration of fitness landscapes using configurable Basin-Hopping with customizable perturbation strategies
+ Basin-Hopping sampling for continuous fitness landscapes with configurable perturbation strategies
-- **LON Construction**
+- **Discrete Optimization**
---
- Automatic construction of Local Optima Networks from sampling data with support for both LON and CMLON representations
+ Iterated Local Search (ILS) sampling for combinatorial problems like OneMax, Knapsack, and Number Partitioning
+
+- **Built-in Problems**
+
+ ---
+
+ Ready-to-use problem instances: OneMax, Knapsack, Number Partitioning, and support for custom problems
+
+- **LON & CMLON Support**
+
+ ---
+
+ Both standard LON and Compressed Monotonic LON representations for landscape analysis
- **Rich Metrics**
@@ -46,33 +58,58 @@ Local Optima Networks (LONs) are graph-based models that capture the global stru
## Quick Example
-```python
-import numpy as np
-from lonpy import compute_lon, LONVisualizer
-
-# Define the Rastrigin function
-def rastrigin(x):
- return 10 * len(x) + np.sum(x**2 - 10 * np.cos(2 * np.pi * x))
-
-# Build the LON
-lon = compute_lon(
- rastrigin,
- dim=2,
- lower_bound=-5.12,
- upper_bound=5.12,
- n_runs=20,
- seed=42
-)
-
-# Analyze
-metrics = lon.compute_metrics()
-print(f"Found {lon.n_vertices} local optima")
-print(f"Landscape has {metrics['n_funnels']} funnels")
-
-# Visualize
-viz = LONVisualizer()
-viz.plot_3d(lon, output_path="landscape.png")
-```
+=== "Continuous"
+
+ ```python
+ import numpy as np
+ from lonpy import compute_lon, LONVisualizer
+
+ # Define the Rastrigin function
+ def rastrigin(x):
+ return 10 * len(x) + np.sum(x**2 - 10 * np.cos(2 * np.pi * x))
+
+ # Build the LON
+ lon = compute_lon(
+ rastrigin,
+ dim=2,
+ lower_bound=-5.12,
+ upper_bound=5.12,
+ n_runs=20,
+ seed=42
+ )
+
+ # Analyze
+ metrics = lon.compute_metrics()
+ print(f"Found {lon.n_vertices} local optima")
+ print(f"Landscape has {metrics['n_funnels']} funnels")
+
+ # Visualize
+ viz = LONVisualizer()
+ viz.plot_3d(lon, output_path="landscape.png")
+ ```
+
+=== "Discrete"
+
+ ```python
+ from lonpy import compute_discrete_lon, OneMax, Knapsack
+
+ # OneMax problem
+ problem = OneMax(n=20)
+ lon = compute_discrete_lon(problem, n_runs=100, seed=42)
+
+ # Analyze
+ metrics = lon.compute_metrics()
+ print(f"Found {lon.n_vertices} local optima")
+ print(f"Landscape has {metrics['n_funnels']} funnels")
+
+ # Knapsack problem
+ knapsack = Knapsack(
+ values=[60, 100, 120, 80, 90],
+ weights=[10, 20, 30, 15, 25],
+ capacity=50
+ )
+ lon = compute_discrete_lon(knapsack, n_runs=100, seed=42)
+ ```
## Installation
diff --git a/docs/user-guide/examples.md b/docs/user-guide/examples.md
index 2cd0743..7f19530 100644
--- a/docs/user-guide/examples.md
+++ b/docs/user-guide/examples.md
@@ -2,7 +2,9 @@
Complete examples demonstrating lonpy's capabilities.
-## Basic LON Analysis
+## Continuous Optimization
+
+### Basic LON Analysis
```python
import numpy as np
@@ -43,7 +45,7 @@ viz.plot_2d(lon, output_path="rastrigin_lon.png", seed=42)
viz.plot_3d(lon, output_path="rastrigin_3d.png", seed=42)
```
-## Comparing Multiple Functions
+### Comparing Multiple Functions
```python
import numpy as np
@@ -108,51 +110,7 @@ print("\n=== Comparison ===")
print(df.to_string(index=False))
```
-## Detailed CMLON Analysis
-
-```python
-import numpy as np
-from lonpy import compute_lon, LONVisualizer
-
-def rastrigin(x):
- return 10 * len(x) + np.sum(x**2 - 10 * np.cos(2 * np.pi * x))
-
-# Build LON
-lon = compute_lon(
- rastrigin,
- dim=2,
- lower_bound=-5.12,
- upper_bound=5.12,
- n_runs=50,
- seed=42
-)
-
-# Convert to CMLON
-cmlon = lon.to_cmlon()
-
-print("=== LON vs CMLON ===")
-print(f"LON vertices: {lon.n_vertices}")
-print(f"CMLON vertices: {cmlon.n_vertices}")
-print(f"Compression: {1 - cmlon.n_vertices/lon.n_vertices:.1%}")
-
-print("\n=== CMLON Structure ===")
-print(f"Total sinks: {len(cmlon.get_sinks())}")
-print(f"Global sinks: {len(cmlon.get_global_sinks())}")
-print(f"Local sinks: {len(cmlon.get_local_sinks())}")
-
-# Metrics
-cmlon_metrics = cmlon.compute_metrics()
-print("\n=== CMLON Metrics ===")
-print(f"Global funnel proportion: {cmlon_metrics['global_funnel_proportion']:.1%}")
-print(f"Strength to global: {cmlon_metrics['strength']:.1%}")
-
-# Visualize
-viz = LONVisualizer()
-viz.plot_2d(cmlon, output_path="cmlon_2d.png", seed=42)
-viz.plot_3d(cmlon, output_path="cmlon_3d.png", seed=42)
-```
-
-## Custom Sampling Configuration
+### Custom Sampling Configuration
```python
import numpy as np
@@ -199,41 +157,248 @@ print(f"Funnels: {metrics['n_funnels']}")
print(f"Strength: {metrics['strength']:.1%}")
```
-## Accessing Raw Trace Data
+## Discrete Optimization
+
+### OneMax Analysis
+
+```python
+from lonpy import compute_discrete_lon, OneMax, LONVisualizer
+
+# Build LON for OneMax
+problem = OneMax(n=20)
+lon = compute_discrete_lon(
+ problem,
+ n_runs=100,
+ non_improvement_iterations=100,
+ seed=42
+)
+
+# Analyze
+print("=== OneMax LON Analysis ===")
+print(f"Local optima: {lon.n_vertices}")
+print(f"Transitions: {lon.n_edges}")
+print(f"Best fitness: {lon.best_fitness}")
+
+metrics = lon.compute_metrics()
+for key, value in metrics.items():
+ if isinstance(value, float):
+ print(f"{key}: {value:.4f}")
+ else:
+ print(f"{key}: {value}")
+
+# Convert to CMLON
+cmlon = lon.to_cmlon()
+cmlon_metrics = cmlon.compute_metrics()
+print(f"\nCMLON vertices: {cmlon.n_vertices}")
+print(f"Global funnel proportion: {cmlon_metrics['global_funnel_proportion']:.1%}")
+```
+
+### Knapsack Problem
+
+```python
+from lonpy import compute_discrete_lon, Knapsack, LONVisualizer
+
+# Define a Knapsack instance
+problem = Knapsack(
+ values=[60, 100, 120, 80, 90, 70, 110, 95, 85, 75],
+ weights=[10, 20, 30, 15, 25, 12, 28, 22, 18, 14],
+ capacity=80
+)
+
+# Build LON
+lon = compute_discrete_lon(
+ problem,
+ n_runs=200,
+ non_improvement_iterations=150,
+ perturbation_strength=2,
+ seed=42
+)
+
+# Analyze
+print("=== Knapsack LON Analysis ===")
+print(f"Number of items: {problem.n}")
+print(f"Local optima: {lon.n_vertices}")
+print(f"Transitions: {lon.n_edges}")
+print(f"Best fitness: {lon.best_fitness}")
+
+metrics = lon.compute_metrics()
+print(f"\nFunnels: {metrics['n_funnels']}")
+print(f"Global funnels: {metrics['n_global_funnels']}")
+print(f"Strength: {metrics['strength']:.1%}")
+
+# CMLON analysis
+cmlon = lon.to_cmlon()
+cmlon_metrics = cmlon.compute_metrics()
+print(f"\nCMLON vertices: {cmlon.n_vertices}")
+print(f"Global funnel proportion: {cmlon_metrics['global_funnel_proportion']:.1%}")
+```
+
+### Number Partitioning
+
+```python
+from lonpy import compute_discrete_lon, NumberPartitioning
+
+# Generate NPP instance
+problem = NumberPartitioning(n=20, k=0.5, seed=42)
+
+print(f"NPP instance with {problem.n} items")
+print(f"Item values: {problem.items[:5]}... (first 5)")
+
+# Build LON
+lon = compute_discrete_lon(
+ problem,
+ n_runs=150,
+ non_improvement_iterations=100,
+ perturbation_strength=2,
+ seed=42
+)
+
+# Analyze
+print("\n=== NPP LON Analysis ===")
+print(f"Local optima: {lon.n_vertices}")
+print(f"Best fitness: {lon.best_fitness}") # 0 = perfect partition
+
+metrics = lon.compute_metrics()
+print(f"Funnels: {metrics['n_funnels']}")
+print(f"Neutrality: {metrics['neutral']:.1%}")
+
+# Check if perfect partition exists
+if lon.best_fitness == 0:
+ print("\nPerfect partition found!")
+else:
+ print(f"\nBest partition difference: {lon.best_fitness}")
+```
+
+### Comparing Discrete Problems
```python
-import numpy as np
import pandas as pd
-from lonpy import BasinHoppingSampler, BasinHoppingSamplerConfig
+from lonpy import compute_discrete_lon, OneMax, Knapsack, NumberPartitioning
+
+# Define problems
+problems = {
+ "OneMax-20": OneMax(n=20),
+ "OneMax-30": OneMax(n=30),
+ "Knapsack-10": Knapsack(
+ values=[60, 100, 120, 80, 90, 70, 110, 95, 85, 75],
+ weights=[10, 20, 30, 15, 25, 12, 28, 22, 18, 14],
+ capacity=80
+ ),
+ "NPP-15": NumberPartitioning(n=15, k=0.5, seed=42),
+ "NPP-20": NumberPartitioning(n=20, k=0.5, seed=42),
+}
-def sphere(x):
- return np.sum(x**2)
+# Analyze each
+results = []
+for name, problem in problems.items():
+ print(f"Analyzing {name}...")
-config = BasinHoppingSamplerConfig(n_runs=5, n_iterations=100, seed=42)
-sampler = BasinHoppingSampler(config)
+ lon = compute_discrete_lon(
+ problem,
+ n_runs=100,
+ non_improvement_iterations=100,
+ seed=42
+ )
+
+ metrics = lon.compute_metrics()
+ cmlon = lon.to_cmlon()
+ cmlon_metrics = cmlon.compute_metrics()
-domain = [(-5.0, 5.0), (-5.0, 5.0)]
-trace_df, raw_records = sampler.sample(sphere, domain)
-
-# Trace data for LON construction
-print("=== Trace Data ===")
-print(trace_df.head(10))
-print(f"\nTotal transitions: {len(trace_df)}")
-
-# Raw iteration data
-print("\n=== Sample Raw Records ===")
-for i, rec in enumerate(raw_records[:5]):
- print(f"Run {rec['run']}, Iter {rec['iteration']}:")
- print(f" From: {rec['current_f']:.6f} -> To: {rec['new_f']:.6f}")
- print(f" Accepted: {rec['accepted']}")
-
-# Analyze acceptance rate
-accepted = sum(r['accepted'] for r in raw_records)
-total = len(raw_records)
-print(f"\nAcceptance rate: {accepted/total:.1%}")
+ results.append({
+ "Problem": name,
+ "n": problem.n,
+ "Optima": lon.n_vertices,
+ "Funnels": metrics['n_funnels'],
+ "Neutrality": f"{metrics['neutral']:.1%}",
+ "Global Funnel %": f"{cmlon_metrics['global_funnel_proportion']:.1%}",
+ })
+
+# Display results
+df = pd.DataFrame(results)
+print("\n=== Comparison ===")
+print(df.to_string(index=False))
```
-## Working with the Graph Directly
+### Custom ILS Configuration
+
+```python
+from lonpy import ILSSampler, ILSSamplerConfig, OneMax
+
+# Custom configuration
+config = ILSSamplerConfig(
+ n_runs=200,
+ max_iterations=0, # No limit
+ non_improvement_iterations=150, # Stop condition
+ perturbation_strength=3, # Larger perturbation
+ first_improvement=True, # Faster exploration
+ representation="bitstring",
+ neighborhood="flip",
+ seed=42
+)
+
+sampler = ILSSampler(config)
+problem = OneMax(n=30)
+
+def progress(run, total):
+ if run % 20 == 0:
+ print(f"Progress: {run}/{total}")
+
+lon = sampler.sample_to_lon(problem, progress_callback=progress)
+
+print(f"\nFound {lon.n_vertices} local optima")
+metrics = lon.compute_metrics()
+print(f"Funnels: {metrics['n_funnels']}")
+```
+
+## CMLON Analysis
+
+### Detailed CMLON Comparison
+
+```python
+import numpy as np
+from lonpy import compute_lon, LONVisualizer
+
+def rastrigin(x):
+ return 10 * len(x) + np.sum(x**2 - 10 * np.cos(2 * np.pi * x))
+
+# Build LON
+lon = compute_lon(
+ rastrigin,
+ dim=2,
+ lower_bound=-5.12,
+ upper_bound=5.12,
+ n_runs=50,
+ seed=42
+)
+
+# Convert to CMLON
+cmlon = lon.to_cmlon()
+
+print("=== LON vs CMLON ===")
+print(f"LON vertices: {lon.n_vertices}")
+print(f"CMLON vertices: {cmlon.n_vertices}")
+print(f"Compression: {1 - cmlon.n_vertices/lon.n_vertices:.1%}")
+
+print("\n=== CMLON Structure ===")
+print(f"Total sinks: {len(cmlon.get_sinks())}")
+print(f"Global sinks: {len(cmlon.get_global_sinks())}")
+print(f"Local sinks: {len(cmlon.get_local_sinks())}")
+
+# Metrics
+cmlon_metrics = cmlon.compute_metrics()
+print("\n=== CMLON Metrics ===")
+print(f"Global funnel proportion: {cmlon_metrics['global_funnel_proportion']:.1%}")
+print(f"Strength to global: {cmlon_metrics['strength']:.1%}")
+
+# Visualize
+viz = LONVisualizer()
+viz.plot_2d(cmlon, output_path="cmlon_2d.png", seed=42)
+viz.plot_3d(cmlon, output_path="cmlon_3d.png", seed=42)
+```
+
+## Working with the Graph
+
+### Accessing Graph Properties
```python
import numpy as np
@@ -270,7 +435,9 @@ if g.ecount() > 0:
print(f"Mean edge weight: {np.mean(edge_weights):.2f}")
```
-## Batch Analysis Script
+## Batch Analysis
+
+### Analyzing Multiple Instances
```python
import numpy as np
diff --git a/docs/user-guide/sampling.md b/docs/user-guide/sampling.md
index 20b8b82..f215a4c 100644
--- a/docs/user-guide/sampling.md
+++ b/docs/user-guide/sampling.md
@@ -1,10 +1,12 @@
# Sampling Guide
-This guide covers how to configure Basin-Hopping sampling for LON construction.
+This guide covers how to configure sampling algorithms for LON construction.
-## Quick Start
+## Continuous Optimization
-The simplest way to create a LON:
+### Quick Start
+
+The simplest way to create a LON for continuous problems:
```python
from lonpy import compute_lon
@@ -19,7 +21,7 @@ lon = compute_lon(
)
```
-## Configuration Options
+### Configuration Options
For more control, use `BasinHoppingSamplerConfig`:
@@ -42,9 +44,9 @@ sampler = BasinHoppingSampler(config)
lon = sampler.sample_to_lon(my_objective, domain)
```
-## Parameters Explained
+### Parameters Explained
-### Sampling Parameters
+#### Sampling Parameters
| Parameter | Default | Description |
|-----------|---------|-------------|
@@ -66,7 +68,7 @@ config = BasinHoppingSamplerConfig(n_runs=50, n_iterations=200)
config = BasinHoppingSamplerConfig(n_runs=10, n_iterations=1000)
```
-### Perturbation Settings
+#### Perturbation Settings
| Parameter | Default | Description |
|-----------|---------|-------------|
@@ -96,7 +98,7 @@ config = BasinHoppingSamplerConfig(
- Too large: Jumps randomly, misses local structure
- Good starting point: 5-10% of domain range
-### Precision Settings
+#### Precision Settings
| Parameter | Default | Description |
|-----------|---------|-------------|
@@ -113,7 +115,7 @@ config = BasinHoppingSamplerConfig(hash_digits=6)
config = BasinHoppingSamplerConfig(hash_digits=2)
```
-### Local Minimizer Settings
+#### Local Minimizer Settings
| Parameter | Default | Description |
|-----------|---------|-------------|
@@ -132,7 +134,7 @@ config = BasinHoppingSamplerConfig(
)
```
-## Domain Specification
+### Domain Specification
The domain is specified as a list of (lower, upper) tuples:
@@ -150,9 +152,186 @@ sampler = BasinHoppingSampler()
lon = sampler.sample_to_lon(func, domain)
```
+## Discrete Optimization
+
+### Quick Start
+
+The simplest way to create a LON for discrete problems:
+
+```python
+from lonpy import compute_discrete_lon, OneMax
+
+problem = OneMax(n=20)
+lon = compute_discrete_lon(problem, n_runs=100, seed=42)
+```
+
+### Configuration Options
+
+For more control, use `ILSSamplerConfig`:
+
+```python
+from lonpy import ILSSampler, ILSSamplerConfig, OneMax
+
+config = ILSSamplerConfig(
+ n_runs=100, # Number of independent ILS runs
+ max_iterations=0, # Max iterations (0 = unlimited)
+ non_improvement_iterations=100, # Stop after no improvement
+ perturbation_strength=2, # Number of random moves
+ first_improvement=True, # Hill climbing strategy
+ representation="bitstring", # "bitstring" or "permutation"
+ neighborhood="flip", # "flip" or "swap"
+ seed=42
+)
+
+sampler = ILSSampler(config)
+problem = OneMax(n=20)
+lon = sampler.sample_to_lon(problem)
+```
+
+### Parameters Explained
+
+#### Sampling Parameters
+
+| Parameter | Default | Description |
+|-----------|---------|-------------|
+| `n_runs` | 100 | Number of independent ILS runs |
+| `max_iterations` | 0 | Max iterations per run (0 = unlimited) |
+| `non_improvement_iterations` | 100 | Stop after this many non-improving iterations |
+| `seed` | None | Random seed for reproducibility |
+
+**Stopping conditions:**
+
+- If `max_iterations > 0`: Stop after that many iterations
+- If `non_improvement_iterations > 0`: Stop after that many iterations without improvement
+- Both conditions are checked; whichever is reached first
+
+```python
+# Run until no improvement for 100 iterations
+config = ILSSamplerConfig(
+ n_runs=100,
+ non_improvement_iterations=100
+)
+
+# Run exactly 500 iterations per run
+config = ILSSamplerConfig(
+ n_runs=50,
+ max_iterations=500,
+ non_improvement_iterations=500 # Effectively disabled
+)
+```
+
+#### Perturbation Settings
+
+| Parameter | Default | Description |
+|-----------|---------|-------------|
+| `perturbation_strength` | 2 | Number of random moves per perturbation |
+
+**Choosing perturbation strength:**
+
+- Too small (1): May not escape current basin
+- Too large: Jumps randomly, may miss structure
+- Good starting point: 2-3 for small problems, scale with problem size
+
+```python
+# For small problems (n < 30)
+config = ILSSamplerConfig(perturbation_strength=2)
+
+# For larger problems (n > 50)
+config = ILSSamplerConfig(perturbation_strength=3)
+```
+
+#### Hill Climbing Settings
+
+| Parameter | Default | Description |
+|-----------|---------|-------------|
+| `first_improvement` | True | Use first improvement hill climbing |
+
+**First improvement vs Best improvement:**
+
+- **First improvement** (True): Faster, more diverse exploration
+- **Best improvement** (False): More thorough, deterministic convergence
+
+```python
+# Fast exploration (recommended)
+config = ILSSamplerConfig(first_improvement=True)
+
+# Thorough convergence
+config = ILSSamplerConfig(first_improvement=False)
+```
+
+#### Representation and Neighborhood
+
+| Parameter | Default | Description |
+|-----------|---------|-------------|
+| `representation` | "bitstring" | Solution representation type |
+| `neighborhood` | "flip" | Neighborhood operator |
+
+**Bitstring representation:**
+
+- Use for binary problems (OneMax, Knapsack, Number Partitioning)
+- Use `neighborhood="flip"` (flip one bit)
+
+```python
+config = ILSSamplerConfig(
+ representation="bitstring",
+ neighborhood="flip"
+)
+```
+
+**Permutation representation:**
+
+- Use for ordering problems (TSP, scheduling)
+- Use `neighborhood="swap"` (swap two positions)
+
+```python
+config = ILSSamplerConfig(
+ representation="permutation",
+ neighborhood="swap"
+)
+```
+
+### Built-in Problems
+
+#### OneMax
+
+```python
+from lonpy import OneMax
+
+# Maximize number of 1s in bitstring
+problem = OneMax(n=20)
+lon = compute_discrete_lon(problem, n_runs=100)
+```
+
+#### Knapsack
+
+```python
+from lonpy import Knapsack
+
+# Define items and capacity
+problem = Knapsack(
+ values=[60, 100, 120, 80, 90],
+ weights=[10, 20, 30, 15, 25],
+ capacity=50
+)
+lon = compute_discrete_lon(problem, n_runs=100)
+
+# Or load from file
+problem = Knapsack.from_file("instance.txt")
+```
+
+#### Number Partitioning
+
+```python
+from lonpy import NumberPartitioning
+
+# Generate random instance
+problem = NumberPartitioning(n=20, k=0.5, seed=42)
+lon = compute_discrete_lon(problem, n_runs=100)
+```
+
## Accessing Raw Data
-For custom analysis, access the raw trace data:
+### Continuous
```python
sampler = BasinHoppingSampler(config)
@@ -169,6 +348,22 @@ for record in raw_records[:5]:
print(f" Accepted: {record['accepted']}")
```
+### Discrete
+
+```python
+sampler = ILSSampler(config)
+trace_df, raw_records = sampler.sample(problem)
+
+# trace_df columns: [run, fit1, node1, fit2, node2]
+print(trace_df.head())
+
+# raw_records contains transition data
+for record in raw_records[:5]:
+ print(f"Run {record['run']}")
+ print(f" From fitness: {record['fit1']}")
+ print(f" To fitness: {record['fit2']}")
+```
+
## Progress Monitoring
Track sampling progress with a callback:
@@ -177,13 +372,18 @@ Track sampling progress with a callback:
def progress(run, total):
print(f"Run {run}/{total}")
+# Continuous
sampler = BasinHoppingSampler(config)
lon = sampler.sample_to_lon(func, domain, progress_callback=progress)
+
+# Discrete
+sampler = ILSSampler(config)
+lon = sampler.sample_to_lon(problem, progress_callback=progress)
```
## Best Practices
-### For Standard Test Functions
+### For Standard Test Functions (Continuous)
```python
# Rastrigin, Ackley, etc. with known bounds
@@ -197,7 +397,7 @@ config = BasinHoppingSamplerConfig(
)
```
-### For Unknown Functions
+### For Unknown Functions (Continuous)
```python
# Start with wider exploration
@@ -212,7 +412,7 @@ config = BasinHoppingSamplerConfig(
# Refine based on initial results
```
-### For High-Dimensional Problems
+### For High-Dimensional Problems (Continuous)
```python
# More runs needed for coverage
@@ -224,6 +424,30 @@ config = BasinHoppingSamplerConfig(
)
```
+### For Small Discrete Problems
+
+```python
+# OneMax, small Knapsack (n < 30)
+config = ILSSamplerConfig(
+ n_runs=100,
+ non_improvement_iterations=100,
+ perturbation_strength=2,
+ first_improvement=True
+)
+```
+
+### For Large Discrete Problems
+
+```python
+# Large instances (n > 50)
+config = ILSSamplerConfig(
+ n_runs=200,
+ non_improvement_iterations=200,
+ perturbation_strength=3,
+ first_improvement=True
+)
+```
+
## Next Steps
- [Analysis Guide](analysis.md) - Interpret your LON metrics
diff --git a/mkdocs.yml b/mkdocs.yml
index 4184034..ecb5749 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,6 +1,6 @@
site_name: lonpy
-site_description: Local Optima Networks for Continuous Optimization
-site_author: Your Name
+site_description: Local Optima Networks
+site_author: AGH University of Kraków
site_url: https://agh-a2s.github.io/lonpy
repo_name: agh-a2s/lonpy
diff --git a/pyproject.toml b/pyproject.toml
index e6477cc..36ce28a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -27,6 +27,7 @@ dev = [
"pytest>=7.0.0",
"pytest-cov>=4.0.0",
"ruff>=0.8.0",
+ "ty>=0.0.10",
]
[build-system]
@@ -37,40 +38,38 @@ build-backend = "hatchling.build"
packages = ["src/lonpy"]
[tool.ruff]
-line-length = 100
-target-version = "py310"
+line-length = 135
+target-version = "py313"
[tool.ruff.lint]
select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
- "F", # Pyflakes
+ "F", # pyflakes
"I", # isort
"B", # flake8-bugbear
- "C4", # flake8-comprehensions
- "UP", # pyupgrade
- "ARG", # flake8-unused-arguments
+ "C", # flake8-comprehensions
+ "UP", # pyupgrade (modern Python syntax)
"SIM", # flake8-simplify
- "TCH", # flake8-type-checking
- "PTH", # flake8-use-pathlib
"RUF", # Ruff-specific rules
+ "N", # pep8-naming
+ "YTT", # flake8-2020
+ "A", # flake8-builtins
]
-ignore = [
- "E501", # line too long (handled by formatter)
- "B008", # do not perform function calls in argument defaults
- "B905", # zip without strict parameter
- "SIM108", # use ternary operator instead of if-else
- "C408", # unnecessary dict() call - often more readable in plotly
-]
+ignore = ["C408"]
+mccabe = {max-complexity = 15}
[tool.ruff.lint.isort]
-known-first-party = ["lonpy"]
+known-first-party = ["project-name"] # replace with your project name
+combine-as-imports = true
-[tool.ruff.lint.per-file-ignores]
-"tests/*" = ["ARG"]
+[tool.ruff.format]
+quote-style = "double"
+indent-style = "space"
+skip-magic-trailing-comma = false
+line-ending = "lf"
+docstring-code-format = true
-[tool.mypy]
-python_version = "3.10"
-warn_return_any = true
-warn_unused_configs = true
-ignore_missing_imports = true
+[tool.ty.rules]
+unresolved-import = "ignore"
+possibly-missing-attribute = "ignore"
diff --git a/src/lonpy/__init__.py b/src/lonpy/__init__.py
index 464e895..591708a 100644
--- a/src/lonpy/__init__.py
+++ b/src/lonpy/__init__.py
@@ -1,13 +1,43 @@
-from lonpy.lon import CMLON, LON
-from lonpy.sampling import BasinHoppingSampler, BasinHoppingSamplerConfig, compute_lon
+from lonpy.continuous.sampling import (
+ BasinHoppingSampler,
+ BasinHoppingSamplerConfig,
+ compute_lon,
+)
+from lonpy.discrete import (
+ FlipNeighborhood,
+ ILSSampler,
+ ILSSamplerConfig,
+ Solution,
+ SwapNeighborhood,
+ hill_climb,
+)
+from lonpy.discrete.sampling import compute_discrete_lon
+from lonpy.lon import CMLON, LON, MLON
+from lonpy.problems import (
+ Knapsack,
+ NumberPartitioning,
+ OneMax,
+ ProblemInstance,
+)
from lonpy.visualization import LONVisualizer
-__version__ = "0.1.0"
__all__ = [
"CMLON",
"LON",
+ "MLON",
"BasinHoppingSampler",
"BasinHoppingSamplerConfig",
+ "FlipNeighborhood",
+ "ILSSampler",
+ "ILSSamplerConfig",
+ "Knapsack",
"LONVisualizer",
+ "NumberPartitioning",
+ "OneMax",
+ "ProblemInstance",
+ "Solution",
+ "SwapNeighborhood",
+ "compute_discrete_lon",
"compute_lon",
+ "hill_climb",
]
diff --git a/src/lonpy/continuous/__init__.py b/src/lonpy/continuous/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/lonpy/sampling.py b/src/lonpy/continuous/sampling.py
similarity index 85%
rename from src/lonpy/sampling.py
rename to src/lonpy/continuous/sampling.py
index dc13b6f..ec1f32c 100644
--- a/src/lonpy/sampling.py
+++ b/src/lonpy/continuous/sampling.py
@@ -46,13 +46,15 @@ class BasinHoppingSamplerConfig:
class BasinHoppingSampler:
"""
- Basin-Hopping sampler for constructing Local Optima Networks.
+ Basin-Hopping sampler for constructing Local Optima Networks (continuous).
Basin-Hopping is a global optimization algorithm that combines random
perturbations with local minimization. This implementation records
transitions between local optima for LON construction.
Example:
+ >>> from lonpy import BasinHoppingSampler, BasinHoppingSamplerConfig
+ >>>
>>> config = BasinHoppingSamplerConfig(n_runs=10, n_iterations=1000)
>>> sampler = BasinHoppingSampler(config)
>>> lon = sampler.sample_to_lon(objective_func, domain)
@@ -60,21 +62,24 @@ class BasinHoppingSampler:
def __init__(self, config: BasinHoppingSamplerConfig | None = None):
self.config = config or BasinHoppingSamplerConfig()
+ self._rng: np.random.Generator = np.random.default_rng()
- def bounded_perturbation(
+ def _bounded_perturbation(
self,
x: np.ndarray,
p: np.ndarray,
domain: list[tuple[float, float]],
) -> np.ndarray:
- y = x + np.random.uniform(low=-p, high=p)
+ y = x + self._rng.uniform(low=-p, high=p)
bounds = np.array(domain)
- return np.clip(y, bounds[:, 0], bounds[:, 1])
+ result: np.ndarray = np.clip(y, bounds[:, 0], bounds[:, 1])
+ return result
- def unbounded_perturbation(self, x: np.ndarray, p: np.ndarray) -> np.ndarray:
- return x + np.random.uniform(low=-p, high=p)
+ def _unbounded_perturbation(self, x: np.ndarray, p: np.ndarray) -> np.ndarray:
+ result: np.ndarray = x + self._rng.uniform(low=-p, high=p)
+ return result
- def hash_solution(self, x: np.ndarray, fitness: float = 0.0) -> str: # noqa: ARG002
+ def _hash_solution(self, x: np.ndarray) -> str:
"""
Create hash string for a solution.
@@ -83,20 +88,16 @@ def hash_solution(self, x: np.ndarray, fitness: float = 0.0) -> str: # noqa: AR
Args:
x: Solution coordinates.
- fitness: Fitness value (unused, kept for API compatibility).
Returns:
Hash string identifying the local optimum.
"""
- if self.config.hash_digits < 0:
- rounded = x
- else:
- rounded = np.round(x, self.config.hash_digits)
+ rounded = x if self.config.hash_digits < 0 else np.round(x, self.config.hash_digits)
hash_str = "_".join(f"{v:.{max(0, self.config.hash_digits)}f}" for v in rounded)
return hash_str
- def fitness_to_int(self, fitness: float) -> int:
+ def _fitness_to_int(self, fitness: float) -> int:
"""
Convert fitness to integer representation for storage.
@@ -109,7 +110,7 @@ def fitness_to_int(self, fitness: float) -> int:
if self.config.hash_digits < 0:
return int(fitness * 1e6)
scale = 10**self.config.hash_digits
- return int(round(fitness * scale))
+ return round(fitness * scale)
def sample(
self,
@@ -128,19 +129,18 @@ def sample(
Returns:
Tuple of (trace_df, raw_records):
- trace_df: DataFrame with columns [run, fit1, node1, fit2, node2]
- ready for LON construction.
- raw_records: List of dicts with detailed iteration data.
"""
if self.config.seed is not None:
- np.random.seed(self.config.seed)
+ self._rng = np.random.default_rng(self.config.seed)
+ else:
+ self._rng = np.random.default_rng()
n_var = len(domain)
# Compute step size based on mode
if self.config.step_mode == "percentage":
- p = np.array(
- [self.config.step_size * abs(domain[i][1] - domain[i][0]) for i in range(n_var)]
- )
+ p = np.array([self.config.step_size * abs(domain[i][1] - domain[i][0]) for i in range(n_var)])
else:
p = self.config.step_size * np.ones(n_var)
@@ -153,7 +153,7 @@ def sample(
progress_callback(run, self.config.n_runs)
# Random initial point
- x0 = np.array([np.random.uniform(d[0], d[1]) for d in domain])
+ x0 = np.array([self._rng.uniform(d[0], d[1]) for d in domain])
if self.config.bounded:
res = minimize(
@@ -180,7 +180,7 @@ def sample(
for iteration in range(1, self.config.n_iterations + 1):
if self.config.bounded:
- x_perturbed = self.bounded_perturbation(current_x, p, domain)
+ x_perturbed = self._bounded_perturbation(current_x, p, domain)
res = minimize(
func,
x_perturbed,
@@ -189,7 +189,7 @@ def sample(
options=self.config.minimizer_options,
)
else:
- x_perturbed = self.unbounded_perturbation(current_x, p)
+ x_perturbed = self._unbounded_perturbation(current_x, p)
res = minimize(
func,
x_perturbed,
@@ -218,10 +218,10 @@ def sample(
# Acceptance criterion (minimization: accept if better or equal)
if new_f <= current_f:
- node1 = self.hash_solution(current_x, current_f)
- node2 = self.hash_solution(new_x, new_f)
- fit1 = self.fitness_to_int(current_f)
- fit2 = self.fitness_to_int(new_f)
+ node1 = self._hash_solution(current_x)
+ node2 = self._hash_solution(new_x)
+ fit1 = self._fitness_to_int(current_f)
+ fit2 = self._fitness_to_int(new_f)
trace_records.append(
{
@@ -236,7 +236,7 @@ def sample(
current_x = new_x.copy()
current_f = new_f
- trace_df = pd.DataFrame(trace_records, columns=["run", "fit1", "node1", "fit2", "node2"])
+ trace_df = pd.DataFrame(trace_records, columns=pd.Index(["run", "fit1", "node1", "fit2", "node2"]))
return trace_df, raw_records
def sample_to_lon(
@@ -302,7 +302,7 @@ def compute_lon(
if not isinstance(upper_bound, list):
upper_bound = [upper_bound] * dim
- domain = list(zip(lower_bound, upper_bound))
+ domain = list(zip(lower_bound, upper_bound, strict=True))
config = BasinHoppingSamplerConfig(
n_runs=n_runs,
diff --git a/src/lonpy/discrete/__init__.py b/src/lonpy/discrete/__init__.py
new file mode 100644
index 0000000..1a590f8
--- /dev/null
+++ b/src/lonpy/discrete/__init__.py
@@ -0,0 +1,19 @@
+from lonpy.discrete.local_search import hill_climb
+from lonpy.discrete.neighborhoods import (
+ FlipNeighborhood,
+ Neighborhood,
+ SwapNeighborhood,
+)
+from lonpy.discrete.sampling import ILSSampler, ILSSamplerConfig, compute_discrete_lon
+from lonpy.discrete.solution import Solution
+
+__all__ = [
+ "FlipNeighborhood",
+ "ILSSampler",
+ "ILSSamplerConfig",
+ "Neighborhood",
+ "Solution",
+ "SwapNeighborhood",
+ "compute_discrete_lon",
+ "hill_climb",
+]
diff --git a/src/lonpy/discrete/local_search.py b/src/lonpy/discrete/local_search.py
new file mode 100644
index 0000000..dc6311a
--- /dev/null
+++ b/src/lonpy/discrete/local_search.py
@@ -0,0 +1,75 @@
+import random
+
+from lonpy.discrete.neighborhoods import FlipNeighborhood, Neighborhood
+from lonpy.discrete.solution import Solution
+from lonpy.problems.base import ProblemInstance
+
+
+def hill_climb(
+ solution: Solution,
+ problem: ProblemInstance,
+ neighborhood: Neighborhood,
+ first_improvement: bool = True,
+ rng: random.Random | None = None,
+) -> Solution:
+ """
+ Perform hill climbing from a starting solution until a local optimum.
+
+ Args:
+ solution: Starting solution.
+ problem: Problem instance providing evaluation.
+ neighborhood: Neighborhood operator.
+ first_improvement: If True, accept first improving neighbor.
+ If False, evaluate all neighbors and select best (best improvement).
+ rng: Random number generator for shuffling neighbor order.
+
+ Returns:
+ Local optimum solution.
+ """
+ if rng is None:
+ rng = random.Random()
+
+ current = solution.copy()
+ if current.fitness is None:
+ current.fitness = problem.evaluate(current.data)
+
+ improved = True
+ while improved:
+ improved = False
+ best_neighbor: Solution | None = None
+ best_fitness = current.fitness
+
+ indices = neighborhood.get_neighbor_indices(current)
+ if first_improvement:
+ rng.shuffle(indices)
+
+ for idx in indices:
+ # Use delta evaluation if available (for FlipNeighborhood)
+ if isinstance(neighborhood, FlipNeighborhood) and isinstance(idx, int):
+ neighbor_fitness = neighborhood.evaluate_neighbor_with_delta(current, idx, problem)
+ neighbor = None # Lazy creation
+ else:
+ neighbor = neighborhood.apply_move(current, idx)
+ neighbor.fitness = problem.evaluate(neighbor.data)
+ neighbor_fitness = neighbor.fitness
+
+ if problem.strictly_better(neighbor_fitness, best_fitness):
+ if first_improvement:
+ if neighbor is None:
+ neighbor = neighborhood.apply_move(current, idx)
+ neighbor.fitness = neighbor_fitness
+ current = neighbor
+ improved = True
+ break
+ else:
+ if neighbor is None:
+ neighbor = neighborhood.apply_move(current, idx)
+ neighbor.fitness = neighbor_fitness
+ best_neighbor = neighbor
+ best_fitness = neighbor_fitness
+
+ if not first_improvement and best_neighbor is not None:
+ current = best_neighbor
+ improved = True
+
+ return current
diff --git a/src/lonpy/discrete/neighborhoods.py b/src/lonpy/discrete/neighborhoods.py
new file mode 100644
index 0000000..efe409c
--- /dev/null
+++ b/src/lonpy/discrete/neighborhoods.py
@@ -0,0 +1,198 @@
+import random
+from abc import ABC, abstractmethod
+from typing import Any, Protocol, cast
+
+from lonpy.discrete.solution import Solution
+from lonpy.problems.base import ProblemInstance
+
+
+class DeltaEvaluationSupport(Protocol):
+ """Protocol for problems that support delta evaluation."""
+
+ def supports_delta_evaluation(self) -> bool: ...
+ def flip_delta(self, solution: Any, index: int) -> float: ...
+
+
+class Neighborhood(ABC):
+ """
+ Abstract base class for neighborhood operators.
+
+ A neighborhood defines how to explore solutions adjacent to the current one.
+ """
+
+ @abstractmethod
+ def get_neighbor_indices(self, solution: Solution) -> list:
+ """
+ Get list of indices that define the neighborhood.
+
+ Args:
+ solution: Current solution.
+
+ Returns:
+ List of indices representing possible moves.
+ """
+
+ @abstractmethod
+ def apply_move(self, solution: Solution, index: int | tuple[int, int]) -> Solution:
+ """
+ Apply a move to create a neighbor solution.
+
+ Args:
+ solution: Current solution.
+ index: Move index from get_neighbor_indices().
+
+ Returns:
+ New neighbor solution.
+ """
+
+ @abstractmethod
+ def apply_random_perturbation(self, solution: Solution, strength: int, rng: random.Random | None = None) -> Solution:
+ """
+ Apply random perturbation of given strength.
+
+ Args:
+ solution: Current solution.
+ strength: Number of random moves to apply.
+ rng: Random number generator.
+
+ Returns:
+ Perturbed solution.
+ """
+
+
+class FlipNeighborhood(Neighborhood):
+ """
+ Flip neighborhood for bitstring representations.
+
+ Each neighbor is obtained by flipping exactly one bit.
+ The neighborhood size is n (solution length).
+ """
+
+ def get_neighbor_indices(self, solution: Solution) -> list[int]:
+ """Return indices 0 to n-1 representing each possible flip."""
+ return list(range(solution.n))
+
+ def apply_move(self, solution: Solution, index: int | tuple[int, int]) -> Solution:
+ """
+ Flip bit at given index.
+
+ Args:
+ solution: Current solution.
+ index: Bit index to flip (int for FlipNeighborhood).
+
+ Returns:
+ New solution with flipped bit.
+ """
+ if isinstance(index, tuple):
+ raise TypeError("FlipNeighborhood expects int index, not tuple")
+ neighbor = solution.copy()
+ neighbor.flip(index)
+ return neighbor
+
+ def apply_random_perturbation(self, solution: Solution, strength: int, rng: random.Random | None = None) -> Solution:
+ """
+ Flip `strength` random bits.
+
+ Args:
+ solution: Current solution.
+ strength: Number of bits to flip.
+ rng: Random number generator.
+
+ Returns:
+ Perturbed solution with `strength` flipped bits.
+ """
+ if rng is None:
+ rng = random.Random()
+
+ perturbed = solution.copy()
+ indices = rng.sample(range(solution.n), min(strength, solution.n))
+ for idx in indices:
+ perturbed.flip(idx)
+ return perturbed
+
+ def evaluate_neighbor_with_delta(
+ self,
+ solution: Solution,
+ index: int,
+ problem: ProblemInstance,
+ ) -> float:
+ """
+ Evaluate neighbor using delta evaluation if supported.
+
+ Args:
+ solution: Current solution (must have fitness set).
+ index: Bit index to flip.
+ problem: Problem instance.
+
+ Returns:
+ Fitness of neighbor after flipping bit at index.
+ """
+ if (
+ hasattr(problem, "flip_delta")
+ and hasattr(problem, "supports_delta_evaluation")
+ and cast(DeltaEvaluationSupport, problem).supports_delta_evaluation()
+ and solution.fitness is not None
+ ):
+ delta = cast(DeltaEvaluationSupport, problem).flip_delta(solution.data, index)
+ return float(solution.fitness + delta)
+
+ # Fall back to full evaluation
+ neighbor = self.apply_move(solution, index)
+ return float(problem.evaluate(neighbor.data))
+
+
+class SwapNeighborhood(Neighborhood):
+ """
+ Swap neighborhood for permutation representations.
+
+ Each neighbor is obtained by swapping two elements.
+ The neighborhood size is n*(n-1)/2.
+ """
+
+ def get_neighbor_indices(self, solution: Solution) -> list[tuple[int, int]]:
+ """Return all (i, j) pairs where i < j representing possible swaps."""
+ n = solution.n
+ return [(i, j) for i in range(n) for j in range(i + 1, n)]
+
+ def apply_move(self, solution: Solution, index: int | tuple[int, int]) -> Solution:
+ """
+ Swap elements at given indices.
+
+ Args:
+ solution: Current solution.
+ index: Tuple (i, j) of indices to swap.
+
+ Returns:
+ New solution with swapped elements.
+ """
+ if not isinstance(index, tuple):
+ raise TypeError("SwapNeighborhood expects tuple index, not int")
+ neighbor = solution.copy()
+ i, j = index
+ neighbor.swap(i, j)
+ return neighbor
+
+ def apply_random_perturbation(self, solution: Solution, strength: int, rng: random.Random | None = None) -> Solution:
+ """
+ Perform `strength` random swaps.
+
+ Args:
+ solution: Current solution.
+ strength: Number of swaps to perform.
+ rng: Random number generator.
+
+ Returns:
+ Perturbed solution with `strength` random swaps applied.
+ """
+ if rng is None:
+ rng = random.Random()
+
+ perturbed = solution.copy()
+ n = solution.n
+ for _ in range(strength):
+ i = rng.randint(0, n - 1)
+ j = rng.randint(0, n - 1)
+ while j == i:
+ j = rng.randint(0, n - 1)
+ perturbed.swap(i, j)
+ return perturbed
diff --git a/src/lonpy/discrete/sampling.py b/src/lonpy/discrete/sampling.py
new file mode 100644
index 0000000..58c9cdc
--- /dev/null
+++ b/src/lonpy/discrete/sampling.py
@@ -0,0 +1,307 @@
+import random
+from collections.abc import Callable
+from dataclasses import dataclass
+from typing import Literal
+
+import pandas as pd
+
+from lonpy.discrete.local_search import hill_climb
+from lonpy.discrete.neighborhoods import (
+ FlipNeighborhood,
+ Neighborhood,
+ SwapNeighborhood,
+)
+from lonpy.discrete.solution import Solution
+from lonpy.lon import LON
+from lonpy.problems.base import ProblemInstance
+
+RepresentationType = Literal["bitstring", "permutation"]
+NeighborhoodType = Literal["flip", "swap"]
+
+
+@dataclass
+class ILSSamplerConfig:
+ """
+ Configuration for Iterated Local Search (ILS) sampling.
+
+ ILS alternates between local search (hill climbing) and perturbation
+ to explore the fitness landscape and construct a Local Optima Network.
+
+ Attributes:
+ n_runs: Number of independent ILS runs.
+ max_iterations: Maximum iterations per run (0 = unlimited, use non_improvement_iterations).
+ non_improvement_iterations: Stop after this many iterations without improvement.
+ perturbation_strength: Number of random moves for perturbation (e.g., bits to flip).
+ first_improvement: If True, use first improvement hill climbing.
+ If False, use best improvement.
+ representation: Solution representation type ("bitstring" or "permutation").
+ neighborhood: Neighborhood type ("flip" for bitstrings, "swap" for permutations).
+ seed: Random seed for reproducibility.
+ """
+
+ n_runs: int = 100
+ max_iterations: int = 0
+ non_improvement_iterations: int = 100
+ perturbation_strength: int = 2
+ first_improvement: bool = True
+ representation: RepresentationType = "bitstring"
+ neighborhood: NeighborhoodType = "flip"
+ seed: int | None = None
+
+
+class ILSSampler:
+ """
+ Iterated Local Search (ILS) sampler for constructing Local Optima Networks (discrete).
+
+ ILS explores the fitness landscape by alternating between:
+ 1. Local search (hill climbing) to reach a local optimum
+ 2. Perturbation to escape and discover new basins
+
+ The sampler records transitions between local optima to construct
+ a Local Optima Network (LON).
+
+ Example:
+ >>> from lonpy.discrete import ILSSampler, ILSSamplerConfig
+ >>> from lonpy.problems import OneMax
+ >>>
+ >>> problem = OneMax(n=20)
+ >>> config = ILSSamplerConfig(n_runs=100)
+ >>> sampler = ILSSampler(config)
+ >>> lon = sampler.sample_to_lon(problem)
+ """
+
+ def __init__(self, config: ILSSamplerConfig | None = None):
+ """
+ Initialize sampler with configuration.
+
+ Args:
+ config: Sampler configuration. Uses defaults if None.
+ """
+ self.config = config or ILSSamplerConfig()
+ self._rng: random.Random | None = None
+
+ def _get_neighborhood(self) -> Neighborhood:
+ if self.config.neighborhood == "flip":
+ return FlipNeighborhood()
+ elif self.config.neighborhood == "swap":
+ return SwapNeighborhood()
+ else:
+ raise ValueError(f"Unknown neighborhood type: {self.config.neighborhood}")
+
+ def _create_initial_solution(self, n: int) -> Solution:
+ if self.config.representation == "bitstring":
+ return Solution.random_bitstring(n, self._rng)
+ elif self.config.representation == "permutation":
+ return Solution.random_permutation(n, self._rng)
+ else:
+ raise ValueError(f"Unknown representation: {self.config.representation}")
+
+ def _fitness_to_int(self, fitness: float, scale: float = 1e6) -> int:
+ """
+ Convert fitness to integer for storage.
+
+ Args:
+ fitness: Floating-point fitness value.
+ scale: Scaling factor.
+
+ Returns:
+ Scaled integer fitness.
+ """
+ return round(fitness * scale)
+
+ def _run_single_ils(
+ self,
+ run_number: int,
+ problem: ProblemInstance,
+ neighborhood: Neighborhood,
+ solution_size: int,
+ ) -> list[dict]:
+ """
+ Run a single ILS trajectory.
+
+ Args:
+ run_number: Run identifier.
+ problem: Problem instance.
+ neighborhood: Neighborhood operator.
+ solution_size: Size of solutions.
+
+ Returns:
+ List of transition records.
+ """
+ records = []
+
+ current = self._create_initial_solution(solution_size)
+ current.fitness = problem.evaluate(current.data)
+
+ current = hill_climb(
+ current,
+ problem,
+ neighborhood,
+ first_improvement=self.config.first_improvement,
+ rng=self._rng,
+ )
+
+ best = current.copy()
+ non_improvement_count = 0
+ iteration = 0
+
+ while True:
+ if self.config.max_iterations > 0 and iteration >= self.config.max_iterations:
+ break
+ if non_improvement_count >= self.config.non_improvement_iterations:
+ break
+
+ iteration += 1
+
+ perturbed = neighborhood.apply_random_perturbation(best, self.config.perturbation_strength, self._rng)
+ perturbed.fitness = problem.evaluate(perturbed.data)
+
+ new_optimum = hill_climb(
+ perturbed,
+ problem,
+ neighborhood,
+ first_improvement=self.config.first_improvement,
+ rng=self._rng,
+ )
+
+ # Record transition from best to new_optimum
+ # Both best and new_optimum should have fitness set after hill_climb
+ best_fit = best.fitness if best.fitness is not None else 0.0
+ new_fit = new_optimum.fitness if new_optimum.fitness is not None else 0.0
+
+ records.append(
+ {
+ "run": run_number,
+ "fit1": self._fitness_to_int(best_fit),
+ "node1": best.to_hash(),
+ "fit2": self._fitness_to_int(new_fit),
+ "node2": new_optimum.to_hash(),
+ }
+ )
+
+ # Update best if new optimum is better or equal
+ if problem.better_or_equal(new_fit, best_fit):
+ if problem.strictly_better(new_fit, best_fit):
+ non_improvement_count = 0
+ else:
+ non_improvement_count += 1
+ best = new_optimum
+ else:
+ non_improvement_count += 1
+
+ return records
+
+ def sample(
+ self,
+ problem: ProblemInstance,
+ solution_size: int | None = None,
+ progress_callback: Callable[[int, int], None] | None = None,
+ ) -> tuple[pd.DataFrame, list[dict]]:
+ """
+ Run ILS sampling to generate LON data.
+
+ Args:
+ problem: Problem instance to sample.
+ solution_size: Size of solutions. If None, tries to get from problem.n.
+ progress_callback: Optional callback(run, total_runs) for progress.
+
+ Returns:
+ Tuple of (trace_df, raw_records):
+ - trace_df: DataFrame with columns [run, fit1, node1, fit2, node2]
+ - raw_records: List of all transition dictionaries
+ """
+ if self.config.seed is not None:
+ self._rng = random.Random(self.config.seed)
+ else:
+ self._rng = random.Random()
+
+ # Determine solution size
+ if solution_size is None:
+ n = getattr(problem, "n", None)
+ if n is not None:
+ solution_size = int(n)
+ else:
+ raise ValueError("solution_size must be provided if problem doesn't have 'n' attribute")
+ assert solution_size is not None
+
+ neighborhood = self._get_neighborhood()
+ all_records: list[dict] = []
+
+ for run in range(1, self.config.n_runs + 1):
+ if progress_callback:
+ progress_callback(run, self.config.n_runs)
+
+ run_records = self._run_single_ils(run, problem, neighborhood, solution_size)
+ all_records.extend(run_records)
+
+ trace_df = pd.DataFrame(all_records, columns=pd.Index(["run", "fit1", "node1", "fit2", "node2"]))
+ return trace_df, all_records
+
+ def sample_to_lon(
+ self,
+ problem: ProblemInstance,
+ solution_size: int | None = None,
+ progress_callback: Callable[[int, int], None] | None = None,
+ ) -> LON:
+ """
+ Run ILS sampling and return LON directly.
+
+ Args:
+ problem: Problem instance to sample.
+ solution_size: Size of solutions.
+ progress_callback: Optional progress callback.
+
+ Returns:
+ LON instance constructed from sampling.
+ """
+ trace_df, _ = self.sample(problem, solution_size, progress_callback)
+
+ if trace_df.empty:
+ return LON()
+
+ return LON.from_trace_data(trace_df)
+
+
+def compute_discrete_lon(
+ problem: ProblemInstance,
+ solution_size: int | None = None,
+ n_runs: int = 100,
+ non_improvement_iterations: int = 100,
+ perturbation_strength: int = 2,
+ first_improvement: bool = True,
+ seed: int | None = None,
+) -> LON:
+ """
+ Compute a LON from a discrete optimization problem.
+
+ This is the simplest way to construct a discrete LON.
+ For more control, use ILSSampler directly.
+
+ Args:
+ problem: Problem instance (must have evaluate() method and 'n' attribute).
+ solution_size: Size of solutions (uses problem.n if None).
+ n_runs: Number of independent ILS runs.
+ non_improvement_iterations: Stop after this many non-improving iterations.
+ perturbation_strength: Number of random moves per perturbation.
+ first_improvement: Use first improvement hill climbing.
+ seed: Random seed for reproducibility.
+
+ Returns:
+ LON instance.
+
+ Example:
+ >>> from lonpy.problems import OneMax
+ >>> problem = OneMax(n=20)
+ >>> lon = compute_discrete_lon(problem, n_runs=50)
+ >>> print(f"Found {lon.n_vertices} local optima")
+ """
+ config = ILSSamplerConfig(
+ n_runs=n_runs,
+ non_improvement_iterations=non_improvement_iterations,
+ perturbation_strength=perturbation_strength,
+ first_improvement=first_improvement,
+ seed=seed,
+ )
+
+ sampler = ILSSampler(config)
+ return sampler.sample_to_lon(problem, solution_size)
diff --git a/src/lonpy/discrete/solution.py b/src/lonpy/discrete/solution.py
new file mode 100644
index 0000000..1f4ecf0
--- /dev/null
+++ b/src/lonpy/discrete/solution.py
@@ -0,0 +1,138 @@
+from __future__ import annotations
+
+import random
+from dataclasses import dataclass, field
+from typing import Literal
+
+RepresentationType = Literal["bitstring", "permutation"]
+
+
+@dataclass
+class Solution:
+ """
+ Solution representation for discrete optimization problems.
+
+ Supports bitstring and permutation representations.
+
+ Attributes:
+ data: The solution data (list of integers).
+ fitness: Current fitness value (None if not evaluated).
+ representation: Type of representation ("bitstring" or "permutation").
+
+ Example:
+ >>> sol = Solution.random_bitstring(n=10)
+ >>> sol.data
+ [1, 0, 1, 1, 0, 0, 1, 0, 1, 0] # random
+ >>> sol.fitness = 5.0
+ """
+
+ data: list[int] = field(default_factory=list)
+ fitness: float | None = None
+ representation: RepresentationType = "bitstring"
+
+ @property
+ def n(self) -> int:
+ """Length of the solution."""
+ return len(self.data)
+
+ def copy(self) -> Solution:
+ """Create a deep copy of this solution."""
+ return Solution(
+ data=self.data.copy(),
+ fitness=self.fitness,
+ representation=self.representation,
+ )
+
+ def to_hash(self) -> str:
+ """
+ Create a hash string identifying this solution.
+
+ Returns:
+ String representation suitable for use as a node identifier.
+ """
+ return "_".join(str(x) for x in self.data)
+
+ def flip(self, index: int) -> None:
+ """
+ Flip bit at index (for bitstring representation).
+
+ Args:
+ index: Index of bit to flip.
+ """
+ self.data[index] = 1 - self.data[index]
+ self.fitness = None # Invalidate fitness
+
+ def swap(self, i: int, j: int) -> None:
+ """
+ Swap elements at indices i and j (for permutation representation).
+
+ Args:
+ i: First index.
+ j: Second index.
+ """
+ self.data[i], self.data[j] = self.data[j], self.data[i]
+ self.fitness = None # Invalidate fitness
+
+ @classmethod
+ def random_bitstring(cls, n: int, rng: random.Random | None = None) -> Solution:
+ """
+ Create a random bitstring solution.
+
+ Args:
+ n: Length of bitstring.
+ rng: Random number generator (uses global random if None).
+
+ Returns:
+ Solution with random 0/1 values.
+ """
+ data = [random.randint(0, 1) for _ in range(n)] if rng is None else [rng.randint(0, 1) for _ in range(n)]
+ return cls(data=data, representation="bitstring")
+
+ @classmethod
+ def random_permutation(cls, n: int, rng: random.Random | None = None) -> Solution:
+ """
+ Create a random permutation solution.
+
+ Args:
+ n: Length of permutation (values 0 to n-1).
+ rng: Random number generator (uses global random if None).
+
+ Returns:
+ Solution with random permutation of [0, 1, ..., n-1].
+ """
+ data = list(range(n))
+ if rng is None:
+ random.shuffle(data)
+ else:
+ rng.shuffle(data)
+ return cls(data=data, representation="permutation")
+
+ @classmethod
+ def from_list(cls, data: list[int], representation: RepresentationType = "bitstring") -> Solution:
+ """
+ Create a solution from a list.
+
+ Args:
+ data: Solution data.
+ representation: Type of representation.
+
+ Returns:
+ Solution instance.
+ """
+ return cls(data=data.copy(), representation=representation)
+
+ def __eq__(self, other: object) -> bool:
+ """Check equality based on data."""
+ if not isinstance(other, Solution):
+ return False
+ return self.data == other.data
+
+ def __hash__(self) -> int:
+ """Hash based on data tuple."""
+ return hash(tuple(self.data))
+
+ def __repr__(self) -> str:
+ """String representation."""
+ data_str = "".join(str(x) for x in self.data) if self.n <= 20 else f"[{self.n} elements]"
+ fit_str = f"{self.fitness:.4f}" if self.fitness is not None else "None"
+ return f"Solution({data_str}, fitness={fit_str})"
diff --git a/src/lonpy/lon.py b/src/lonpy/lon.py
index 9f2f854..271d442 100644
--- a/src/lonpy/lon.py
+++ b/src/lonpy/lon.py
@@ -167,6 +167,48 @@ def compute_metrics(self, known_best: float | None = None) -> dict[str, Any]:
"strength": strength,
}
+ def classify_edges(self) -> None:
+ """
+ Classify edges as 'improving', 'equal', or 'worsening'.
+
+ Adds 'edge_type' attribute to each edge based on fitness comparison:
+ - 'improving': target has better (lower) fitness than source
+ - 'equal': source and target have same fitness
+ - 'worsening': target has worse (higher) fitness than source
+
+ This classification is used for MLON construction and visualization.
+ """
+ if self.n_edges == 0:
+ return
+
+ fits = self.vertex_fitness
+ edge_types = []
+
+ for edge in self.graph.es:
+ src_fit = fits[edge.source]
+ tgt_fit = fits[edge.target]
+
+ if tgt_fit < src_fit:
+ edge_types.append("improving")
+ elif tgt_fit > src_fit:
+ edge_types.append("worsening")
+ else:
+ edge_types.append("equal")
+
+ self.graph.es["edge_type"] = edge_types
+
+ def to_mlon(self) -> "MLON":
+ """
+ Convert LON to Monotonic LON (MLON).
+
+ MLON keeps only non-worsening edges (improving and equal).
+ All vertices are preserved even if they become disconnected.
+
+ Returns:
+ MLON instance with only monotonic edges.
+ """
+ return MLON.from_lon(self)
+
def to_cmlon(self) -> "CMLON":
"""
Convert LON to Compressed Monotonic LON (CMLON).
@@ -177,6 +219,93 @@ def to_cmlon(self) -> "CMLON":
return CMLON.from_lon(self)
+@dataclass
+class MLON:
+ """
+ Monotonic Local Optima Network (MLON).
+
+ MLON is a variant of LON that keeps only non-worsening edges
+ (improving and equal fitness transitions). This focuses on
+ the "downhill" structure of the landscape.
+
+ Attributes:
+ graph: The underlying igraph Graph object.
+ best_fitness: The best (minimum) fitness value.
+ source_lon: Reference to the original LON.
+ """
+
+ graph: ig.Graph = field(default_factory=lambda: ig.Graph(directed=True))
+ best_fitness: float | None = None
+ source_lon: LON | None = None
+
+ @classmethod
+ def from_lon(cls, lon: LON) -> "MLON":
+ """
+ Create MLON from LON by removing worsening edges.
+
+ Args:
+ lon: Source LON instance.
+
+ Returns:
+ MLON with only improving and equal edges.
+ """
+ if lon.n_edges == 0:
+ mlon_graph = lon.graph.copy()
+ return cls(graph=mlon_graph, best_fitness=lon.best_fitness, source_lon=lon)
+
+ # Ensure edges are classified
+ if "edge_type" not in lon.graph.es.attributes():
+ lon.classify_edges()
+
+ # Keep only non-worsening edges
+ non_worsening_indices = [i for i, e in enumerate(lon.graph.es) if e["edge_type"] != "worsening"]
+
+ # Create subgraph keeping all vertices
+ mlon_graph = lon.graph.subgraph_edges(non_worsening_indices, delete_vertices=False)
+ mlon_graph = mlon_graph.simplify(multiple=False, loops=True)
+
+ return cls(graph=mlon_graph, best_fitness=lon.best_fitness, source_lon=lon)
+
+ @property
+ def n_vertices(self) -> int:
+ """Number of vertices in MLON."""
+ return int(self.graph.vcount())
+
+ @property
+ def n_edges(self) -> int:
+ """Number of edges in MLON."""
+ return int(self.graph.ecount())
+
+ @property
+ def vertex_fitness(self) -> list[float]:
+ """List of vertex fitness values."""
+ return list(self.graph.vs["Fitness"])
+
+ def get_sinks(self) -> list[int]:
+ """Get indices of sink nodes (nodes with no outgoing edges)."""
+ out_degrees = self.graph.degree(mode="out")
+ return [i for i, d in enumerate(out_degrees) if d == 0]
+
+ def get_global_optima_indices(self) -> list[int]:
+ """Get indices of global optima nodes."""
+ return [i for i, f in enumerate(self.vertex_fitness) if f == self.best_fitness]
+
+ def to_cmlon(self) -> "CMLON":
+ """
+ Convert MLON to Compressed Monotonic LON (CMLON).
+
+ Returns:
+ CMLON instance with contracted neutral nodes.
+ """
+ # Use the source LON if available for proper CMLON construction
+ if self.source_lon is not None:
+ return CMLON.from_lon(self.source_lon)
+
+ # Otherwise create a temporary LON-like structure
+ temp_lon = LON(graph=self.graph.copy(), best_fitness=self.best_fitness)
+ return CMLON.from_lon(temp_lon)
+
+
@dataclass
class CMLON:
"""
@@ -230,7 +359,7 @@ def from_lon(cls, lon: LON) -> "CMLON":
# Mark edge types and find equal-fitness edges
edge_types = []
equal_edge_indices = []
- for i, (fit1, fit2) in enumerate(zip(f1, f2)):
+ for i, (fit1, fit2) in enumerate(zip(f1, f2, strict=True)):
if fit2 < fit1:
edge_types.append("improving")
elif fit2 == fit1:
@@ -329,27 +458,16 @@ def compute_metrics(self, known_best: float | None = None) -> dict[str, Any]:
n_global_funnels = sum(1 for f in sinks_fit if f == best)
# Neutral: proportion of contracted nodes
- if self.source_lon is not None:
- neutral = round(1.0 - self.n_vertices / self.source_lon.n_vertices, 4)
- else:
- neutral = 0.0
+ neutral = round(1.0 - self.n_vertices / self.source_lon.n_vertices, 4) if self.source_lon is not None else 0.0
# Strength: ratio of incoming strength to global vs local sinks
- igs = [s for s, f in zip(sinks_id, sinks_fit) if f == best]
- ils = [s for s, f in zip(sinks_id, sinks_fit) if best is not None and f > best]
+ igs = [s for s, f in zip(sinks_id, sinks_fit, strict=True) if f == best]
+ ils = [s for s, f in zip(sinks_id, sinks_fit, strict=True) if best is not None and f > best]
if self.n_edges > 0:
edge_weights = self.graph.es["Count"] if "Count" in self.graph.es.attributes() else None
- sing = (
- sum(self.graph.strength(igs, mode="in", loops=False, weights=edge_weights))
- if igs
- else 0
- )
- sinl = (
- sum(self.graph.strength(ils, mode="in", loops=False, weights=edge_weights))
- if ils
- else 0
- )
+ sing = sum(self.graph.strength(igs, mode="in", loops=False, weights=edge_weights)) if igs else 0
+ sinl = sum(self.graph.strength(ils, mode="in", loops=False, weights=edge_weights)) if ils else 0
total = sing + sinl
strength = round(sing / total, 4) if total > 0 else 0.0
else:
diff --git a/src/lonpy/problems/__init__.py b/src/lonpy/problems/__init__.py
new file mode 100644
index 0000000..0884cf1
--- /dev/null
+++ b/src/lonpy/problems/__init__.py
@@ -0,0 +1,10 @@
+from lonpy.problems.base import ProblemInstance
+from lonpy.problems.discrete import Knapsack, NKLandscape, NumberPartitioning, OneMax
+
+__all__ = [
+ "Knapsack",
+ "NKLandscape",
+ "NumberPartitioning",
+ "OneMax",
+ "ProblemInstance",
+]
diff --git a/src/lonpy/problems/base.py b/src/lonpy/problems/base.py
new file mode 100644
index 0000000..1ccac10
--- /dev/null
+++ b/src/lonpy/problems/base.py
@@ -0,0 +1,81 @@
+from abc import ABC, abstractmethod
+from typing import Any, TypeVar
+
+SolutionType = TypeVar("SolutionType")
+
+
+class ProblemInstance(ABC):
+ """
+ Abstract base class for optimization problems.
+
+ This class defines the interface that all problem instances must implement,
+ enabling a unified approach to both discrete and continuous optimization.
+
+ Subclasses must implement:
+ - maximize(): Whether to maximize or minimize
+ - evaluate(): Compute fitness of a solution
+ """
+
+ @abstractmethod
+ def maximize(self) -> bool:
+ """
+ Return True if this is a maximization problem, False for minimization.
+
+ Returns:
+ True for maximization, False for minimization.
+ """
+
+ @abstractmethod
+ def evaluate(self, solution: Any) -> float:
+ """
+ Evaluate the fitness of a solution.
+
+ Args:
+ solution: The solution to evaluate.
+
+ Returns:
+ Fitness value (higher is better for maximization, lower for minimization).
+ """
+
+ def strictly_better(self, a: float, b: float) -> bool:
+ """
+ Check if fitness a is strictly better than fitness b.
+
+ Args:
+ a: First fitness value.
+ b: Second fitness value.
+
+ Returns:
+ True if a is strictly better than b.
+ """
+ return a > b if self.maximize() else a < b
+
+ def better_or_equal(self, a: float, b: float) -> bool:
+ """
+ Check if fitness a is better than or equal to fitness b.
+
+ Args:
+ a: First fitness value.
+ b: Second fitness value.
+
+ Returns:
+ True if a is better than or equal to b.
+ """
+ return a >= b if self.maximize() else a <= b
+
+ def compare(self, a: float, b: float) -> int:
+ """
+ Compare two fitness values.
+
+ Args:
+ a: First fitness value.
+ b: Second fitness value.
+
+ Returns:
+ 1 if a is better, -1 if b is better, 0 if equal.
+ """
+ if self.strictly_better(a, b):
+ return 1
+ elif self.strictly_better(b, a):
+ return -1
+ return 0
diff --git a/src/lonpy/problems/discrete.py b/src/lonpy/problems/discrete.py
new file mode 100644
index 0000000..b0f3e6a
--- /dev/null
+++ b/src/lonpy/problems/discrete.py
@@ -0,0 +1,276 @@
+from dataclasses import dataclass, field
+
+import numpy as np
+
+from lonpy.problems.base import ProblemInstance
+
+
+@dataclass
+class OneMax(ProblemInstance):
+ """
+ OneMax problem: maximize the number of 1s in a bitstring.
+
+ This is a simple unimodal problem often used as a benchmark.
+ The global optimum is the all-ones bitstring with fitness n.
+
+ Attributes:
+ n: Length of the bitstring.
+
+ Example:
+ >>> problem = OneMax(n=20)
+ >>> solution = [1, 0, 1, 1, 0] # partial example
+ >>> problem.evaluate(solution)
+ 3
+ """
+
+ n: int
+
+ def maximize(self) -> bool:
+ """OneMax is a maximization problem."""
+ return True
+
+ def evaluate(self, solution: list[int]) -> float:
+ """
+ Evaluate fitness as sum of 1s.
+
+ Args:
+ solution: Bitstring as list of 0/1 integers.
+
+ Returns:
+ Number of 1s in the solution.
+ """
+ return float(sum(solution))
+
+ def flip_delta(self, solution: list[int], index: int) -> float:
+ """
+ Compute fitness change from flipping bit at index.
+
+ This is O(1) compared to O(n) for full evaluation.
+
+ Args:
+ solution: Current bitstring.
+ index: Index of bit to flip.
+
+ Returns:
+ Change in fitness (+1 if flipping 0->1, -1 if flipping 1->0).
+ """
+ return -1.0 if solution[index] == 1 else 1.0
+
+ def supports_delta_evaluation(self) -> bool:
+ """OneMax supports efficient delta evaluation."""
+ return True
+
+
+@dataclass
+class Knapsack(ProblemInstance):
+ """
+ 0/1 Knapsack problem: maximize value without exceeding capacity.
+
+ Each item has a value and weight. Select items to maximize total value
+ while keeping total weight within capacity. Infeasible solutions
+ (exceeding capacity) get fitness 0.
+
+ Attributes:
+ values: Value of each item.
+ weights: Weight of each item.
+ capacity: Maximum total weight allowed.
+
+ Example:
+ >>> problem = Knapsack(values=[60, 100, 120], weights=[10, 20, 30], capacity=50)
+ >>> solution = [1, 1, 0] # take items 0 and 1
+ >>> problem.evaluate(solution)
+ 160.0
+ """
+
+ values: list[float] = field(default_factory=list)
+ weights: list[float] = field(default_factory=list)
+ capacity: float = 0.0
+
+ @property
+ def n(self) -> int:
+ """Number of items."""
+ return len(self.values)
+
+ def maximize(self) -> bool:
+ """Knapsack is a maximization problem."""
+ return True
+
+ def evaluate(self, solution: list[int]) -> float:
+ """
+ Evaluate fitness as total value if feasible, 0 otherwise.
+
+ Args:
+ solution: Bitstring where 1 means item is selected.
+
+ Returns:
+ Total value if weight <= capacity, else 0.
+ """
+ total_weight = sum(w * s for w, s in zip(self.weights, solution, strict=True))
+ if total_weight > self.capacity:
+ return 0.0
+ return float(sum(v * s for v, s in zip(self.values, solution, strict=True)))
+
+ def supports_delta_evaluation(self) -> bool:
+ """Knapsack does not support efficient delta evaluation."""
+ return False
+
+
+@dataclass
+class NumberPartitioning(ProblemInstance):
+ """
+ Number Partitioning Problem (NPP): minimize partition imbalance.
+
+ Given n positive integers, partition them into two subsets such that
+ the difference between their sums is minimized. The global optimum
+ has fitness 0 (perfect partition).
+
+ Attributes:
+ n: Number of items.
+ k: Threshold parameter controlling item range (items in [1, 2^(n*k)]).
+ seed: Random seed for reproducible instance generation.
+ items: The generated items (computed from n, k, seed).
+
+ Example:
+ >>> problem = NumberPartitioning(n=10, k=0.5, seed=42)
+ >>> solution = [0, 1, 0, 1, 1, 0, 0, 1, 0, 1]
+ >>> fitness = problem.evaluate(solution)
+ """
+
+ n: int = 20
+ k: float = 0.5
+ seed: int = 1
+ items: list[int] = field(default_factory=list, repr=False)
+
+ def __post_init__(self) -> None:
+ """Generate items if not provided."""
+ if not self.items:
+ rng = np.random.default_rng(self.seed)
+ max_val = int(2 ** (self.n * self.k))
+ self.items = [int(rng.integers(1, max_val + 1)) for _ in range(self.n)]
+
+ def maximize(self) -> bool:
+ """NPP is a minimization problem."""
+ return False
+
+ def evaluate(self, solution: list[int]) -> float:
+ """
+ Evaluate fitness as absolute difference between partition sums.
+
+ Args:
+ solution: Bitstring where 0/1 assigns item to partition A/B.
+
+ Returns:
+ |sum(A) - sum(B)| where lower is better.
+ """
+ sum_a = sum(item for item, bit in zip(self.items, solution, strict=True) if bit == 0)
+ sum_b = sum(item for item, bit in zip(self.items, solution, strict=True) if bit == 1)
+ return float(abs(sum_a - sum_b))
+
+ def supports_delta_evaluation(self) -> bool:
+ """NPP does not support efficient delta evaluation."""
+ return False
+
+
+@dataclass
+class NKLandscape(ProblemInstance):
+ """
+ NK Landscape problem: tunable rugged fitness landscape.
+
+ The NK model is a problem-independent model for constructing multimodal
+ landscapes that can gradually be tuned from smooth to rugged:
+ - N: number of binary genes in the genotype (string length)
+ - K: number of genes that influence a particular gene (0 <= K <= N-1)
+
+ By increasing K from 0 to N-1, landscapes can be tuned from smooth to rugged.
+
+ This implementation uses the adjacent neighborhood model, where the K variables
+ forming the context of gene s_i are the K variables closest to s_i in a total
+ ordering (s_1, s_2, ..., s_n) using periodic boundaries.
+
+ The fitness is the average of N contribution functions, where each
+ contribution f_i depends on bit i and its K adjacent neighbors.
+
+ Attributes:
+ n: Length of the bitstring (number of genes).
+ k: Epistasis parameter (number of neighboring genes influencing each gene).
+ seed: Random seed for reproducible instance generation.
+ contributions: Lookup tables for each bit's contribution.
+
+ Example:
+ >>> problem = NKLandscape(n=18, k=4, seed=42)
+ >>> solution = [1, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 0]
+ >>> fitness = problem.evaluate(solution)
+ """
+
+ n: int = 18
+ k: int = 4
+ seed: int = 1
+ contributions: list[dict[tuple[int, ...], float]] = field(default_factory=list, repr=False)
+
+ def __post_init__(self) -> None:
+ """Generate NK landscape instance if not already initialized."""
+ if self.k < 0 or self.k >= self.n:
+ raise ValueError(f"k must be in [0, n-1], got k={self.k}, n={self.n}")
+
+ if not self.contributions:
+ self._generate_instance()
+
+ def _generate_instance(self) -> None:
+ """Generate contribution tables for adjacent neighborhood model."""
+ rng = np.random.default_rng(self.seed)
+
+ self.contributions = []
+
+ for _ in range(self.n):
+ # Create contribution lookup table
+ # Each entry maps (bit_i, neighbor_0, ..., neighbor_k-1) -> contribution
+ n_entries = 2 ** (self.k + 1) # 2^(K+1) possible combinations
+ contribution_table: dict[tuple[int, ...], float] = {}
+
+ for entry_idx in range(n_entries):
+ # Convert entry index to bit pattern
+ bits = tuple((entry_idx >> b) & 1 for b in range(self.k + 1))
+ contribution_table[bits] = float(rng.random())
+
+ self.contributions.append(contribution_table)
+
+ def _get_adjacent_neighbors(self, i: int) -> list[int]:
+ """
+ Get K adjacent neighbors for gene i using periodic boundaries.
+
+ Args:
+ i: Index of the gene.
+
+ Returns:
+ List of K neighbor indices.
+ """
+ return [(i + j + 1) % self.n for j in range(self.k)]
+
+ def maximize(self) -> bool:
+ """NK Landscape is a maximization problem."""
+ return True
+
+ def evaluate(self, solution: list[int]) -> float:
+ """
+ Evaluate fitness as average of all contribution functions.
+
+ Args:
+ solution: Bitstring as list of 0/1 integers.
+
+ Returns:
+ Average contribution (value in [0, 1]).
+ """
+ total = 0.0
+
+ for i in range(self.n):
+ # Get the relevant bits: bit i and its K adjacent neighbors
+ neighbors = self._get_adjacent_neighbors(i)
+ key_bits = [solution[i]] + [solution[j] for j in neighbors]
+ key = tuple(key_bits)
+ total += self.contributions[i][key]
+
+ return total / self.n
+
+ def supports_delta_evaluation(self) -> bool:
+ """NK Landscape does not support efficient delta evaluation."""
+ return False
diff --git a/src/lonpy/visualization.py b/src/lonpy/visualization.py
index dc0f5e1..b129a69 100644
--- a/src/lonpy/visualization.py
+++ b/src/lonpy/visualization.py
@@ -6,7 +6,7 @@
import numpy as np
import plotly.graph_objects as go
-from lonpy.lon import CMLON, LON
+from lonpy.lon import CMLON, LON, MLON
COLORS = {
"global_optimum": "red",
@@ -16,6 +16,10 @@
"edge": "dimgray",
"lon_global": "red",
"lon_local": "pink",
+ # Edge type colors (R-style)
+ "edge_improving": "gray50",
+ "edge_equal": "royalblue",
+ "edge_worsening": "forestgreen",
}
BACKGROUND_COLOR = "rgba(255,255,255,255)"
@@ -31,13 +35,17 @@ class LONVisualizer:
"""
Visualizer for Local Optima Networks.
- Produces 2D and 3D visualizations of LON and CMLON graphs,
+ Produces 2D and 3D visualizations of LON, MLON, and CMLON graphs,
including static images and animated GIFs.
Example:
>>> viz = LONVisualizer()
>>> viz.plot_2d(lon, output_path="lon.png")
>>> viz.plot_3d(cmlon, output_path="cmlon_3d.png")
+
+ # With edge coloring by type
+ >>> viz = LONVisualizer(edge_color_by_type=True)
+ >>> viz.plot_2d(lon, output_path="lon_colored.png")
"""
def __init__(
@@ -48,6 +56,10 @@ def __init__(
max_node_size: float = 8.0,
arrow_size: float = 0.2,
alpha: int = 255,
+ edge_color_by_type: bool = False,
+ improving_color: str = COLORS["edge_improving"],
+ equal_color: str = COLORS["edge_equal"],
+ worsening_color: str = COLORS["edge_worsening"],
):
"""
Initialize visualizer.
@@ -59,6 +71,10 @@ def __init__(
max_node_size: Maximum node size.
arrow_size: Arrow size for directed edges.
alpha: Alpha value for colors (0-255).
+ edge_color_by_type: If True, color edges by type (improving/equal/worsening).
+ improving_color: Color for improving edges (fitness decreases).
+ equal_color: Color for equal fitness edges (neutral).
+ worsening_color: Color for worsening edges (fitness increases).
"""
self.min_edge_width = min_edge_width
self.max_edge_width = max_edge_width
@@ -66,6 +82,10 @@ def __init__(
self.max_node_size = max_node_size
self.arrow_size = arrow_size
self.alpha = alpha
+ self.edge_color_by_type = edge_color_by_type
+ self.improving_color = improving_color
+ self.equal_color = equal_color
+ self.worsening_color = worsening_color
def compute_edge_widths(self, graph) -> list[float]:
"""Compute edge widths based on edge weight (Count attribute)."""
@@ -101,8 +121,53 @@ def compute_node_sizes(self, graph) -> list[float]:
return sizes
- def compute_lon_colors(self, lon: LON) -> list[str]:
- """Compute node colors for LON visualization."""
+ def compute_edge_colors(self, lon_or_cmlon: LON | MLON | CMLON) -> list[str]:
+ """
+ Compute edge colors based on edge type or default.
+
+ If edge_color_by_type is True and edges have 'edge_type' attribute,
+ colors edges by type. Otherwise uses default edge color.
+
+ Args:
+ lon_or_cmlon: LON, MLON, or CMLON instance.
+
+ Returns:
+ List of color strings for each edge.
+ """
+ graph = lon_or_cmlon.graph
+
+ if graph.ecount() == 0:
+ return []
+
+ # If coloring by type is disabled, use default
+ if not self.edge_color_by_type:
+ return [COLORS["edge"]] * graph.ecount()
+
+ # Ensure edges are classified for LON
+ if isinstance(lon_or_cmlon, LON) and "edge_type" not in graph.es.attributes():
+ lon_or_cmlon.classify_edges()
+
+ # Check if edge_type attribute exists
+ if "edge_type" not in graph.es.attributes():
+ return [COLORS["edge"]] * graph.ecount()
+
+ # Color by edge type
+ colors = []
+ for edge in graph.es:
+ edge_type = edge["edge_type"]
+ if edge_type == "improving":
+ colors.append(self.improving_color)
+ elif edge_type == "equal":
+ colors.append(self.equal_color)
+ elif edge_type == "worsening":
+ colors.append(self.worsening_color)
+ else:
+ colors.append(COLORS["edge"])
+
+ return colors
+
+ def compute_lon_colors(self, lon: LON | MLON) -> list[str]:
+ """Compute node colors for LON or MLON visualization."""
colors = []
fits = lon.vertex_fitness
best = lon.best_fitness
@@ -152,17 +217,17 @@ def get_layout(self, graph, seed: int | None = None) -> np.ndarray:
def plot_2d(
self,
- lon_or_cmlon: LON | CMLON,
+ lon_or_cmlon: LON | MLON | CMLON,
output_path: str | Path | None = None,
figsize: tuple[int, int] = (8, 8),
dpi: int = 100,
seed: int | None = None,
) -> plt.Figure:
"""
- Create 2D plot of LON or CMLON.
+ Create 2D plot of LON, MLON, or CMLON.
Args:
- lon_or_cmlon: LON or CMLON instance.
+ lon_or_cmlon: LON, MLON, or CMLON instance.
output_path: Path to save PNG (optional).
figsize: Figure size in inches.
dpi: DPI for output.
@@ -175,11 +240,10 @@ def plot_2d(
edge_widths = self.compute_edge_widths(graph)
node_sizes = self.compute_node_sizes(graph)
+ edge_colors = self.compute_edge_colors(lon_or_cmlon)
node_colors = (
- self.compute_cmlon_colors(lon_or_cmlon)
- if isinstance(lon_or_cmlon, CMLON)
- else self.compute_lon_colors(lon_or_cmlon)
+ self.compute_cmlon_colors(lon_or_cmlon) if isinstance(lon_or_cmlon, CMLON) else self.compute_lon_colors(lon_or_cmlon)
)
layout = self.get_layout(graph, seed=seed)
@@ -197,6 +261,9 @@ def plot_2d(
x0, y0 = layout[src_idx]
x1, y1 = layout[tgt_idx]
+ # Get edge color
+ edge_color = edge_colors[i] if edge_colors else COLORS["edge"]
+
# Draw arrow
ax.annotate(
"",
@@ -204,7 +271,7 @@ def plot_2d(
xytext=(x0, y0),
arrowprops=dict(
arrowstyle=f"->,head_length={self.arrow_size},head_width={self.arrow_size}",
- color=COLORS["edge"],
+ color=edge_color,
lw=edge_widths[i],
shrinkA=node_sizes[src_idx] * 2,
shrinkB=node_sizes[tgt_idx] * 2,
@@ -233,7 +300,7 @@ def plot_2d(
def plot_3d(
self,
- lon_or_cmlon: LON | CMLON,
+ lon_or_cmlon: LON | MLON | CMLON,
output_path: str | Path | None = None,
width: int = 800,
height: int = 800,
@@ -243,7 +310,7 @@ def plot_3d(
Create 3D plot with fitness as Z-axis.
Args:
- lon_or_cmlon: LON or CMLON instance.
+ lon_or_cmlon: LON, MLON, or CMLON instance.
output_path: Path to save PNG (optional).
width: Image width in pixels.
height: Image height in pixels.
@@ -256,11 +323,10 @@ def plot_3d(
edge_widths = self.compute_edge_widths(graph)
node_sizes = self.compute_node_sizes(graph)
+ edge_colors = self.compute_edge_colors(lon_or_cmlon)
node_colors = (
- self.compute_cmlon_colors(lon_or_cmlon)
- if isinstance(lon_or_cmlon, CMLON)
- else self.compute_lon_colors(lon_or_cmlon)
+ self.compute_cmlon_colors(lon_or_cmlon) if isinstance(lon_or_cmlon, CMLON) else self.compute_lon_colors(lon_or_cmlon)
)
layout = self.get_layout(graph, seed=seed)
@@ -279,6 +345,9 @@ def plot_3d(
src_idx = edge.source
tgt_idx = edge.target
+ # Get edge color
+ edge_color = edge_colors[i] if edge_colors else COLORS["edge"]
+
fig.add_trace(
go.Scatter3d(
x=[x[src_idx], x[tgt_idx]],
@@ -286,7 +355,7 @@ def plot_3d(
z=[z[src_idx], z[tgt_idx]],
mode="lines",
line=dict(
- color=COLORS["edge"],
+ color=edge_color,
width=edge_widths[i] * 2,
),
hoverinfo="none",
@@ -351,7 +420,7 @@ def plot_3d(
def create_rotation_gif(
self,
- lon_or_cmlon: LON | CMLON,
+ lon_or_cmlon: LON | MLON | CMLON,
output_path: str | Path,
duration: float = 3.0,
fps: int = 10,
@@ -365,7 +434,7 @@ def create_rotation_gif(
Create rotating GIF animation of 3D plot.
Args:
- lon_or_cmlon: LON or CMLON instance.
+ lon_or_cmlon: LON, MLON, or CMLON instance.
output_path: Path to save GIF.
duration: Animation duration in seconds.
fps: Frames per second.
@@ -380,11 +449,10 @@ def create_rotation_gif(
edge_widths = self.compute_edge_widths(graph)
node_sizes = self.compute_node_sizes(graph)
+ edge_colors = self.compute_edge_colors(lon_or_cmlon)
node_colors = (
- self.compute_cmlon_colors(lon_or_cmlon)
- if isinstance(lon_or_cmlon, CMLON)
- else self.compute_lon_colors(lon_or_cmlon)
+ self.compute_cmlon_colors(lon_or_cmlon) if isinstance(lon_or_cmlon, CMLON) else self.compute_lon_colors(lon_or_cmlon)
)
layout = self.get_layout(graph, seed=seed)
@@ -409,13 +477,16 @@ def create_rotation_gif(
src_idx = edge.source
tgt_idx = edge.target
+ # Get edge color
+ edge_color = edge_colors[j] if edge_colors else COLORS["edge"]
+
fig.add_trace(
go.Scatter3d(
x=[x[src_idx], x[tgt_idx]],
y=[y[src_idx], y[tgt_idx]],
z=[z[src_idx], z[tgt_idx]],
mode="lines",
- line=dict(color=COLORS["edge"], width=edge_widths[j] * 2),
+ line=dict(color=edge_color, width=edge_widths[j] * 2),
hoverinfo="none",
showlegend=False,
)
@@ -480,7 +551,7 @@ def create_rotation_gif(
png_bytes = fig.to_image(format="png", width=width, height=height, scale=1)
frames.append(imageio.v3.imread(png_bytes, extension=".png"))
- imageio.mimsave(
+ imageio.mimsave( # type: ignore[no-matching-overload]
str(output_path),
frames,
fps=fps,
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/test_discrete_problems.py b/tests/test_discrete_problems.py
new file mode 100644
index 0000000..e6c5609
--- /dev/null
+++ b/tests/test_discrete_problems.py
@@ -0,0 +1,164 @@
+from lonpy.problems.discrete import Knapsack, NumberPartitioning, OneMax
+
+
+class TestOneMax:
+ def test_maximize_returns_true(self):
+ problem = OneMax(n=10)
+
+ assert problem.maximize() is True
+
+ def test_evaluate_all_zeros(self):
+ problem = OneMax(n=5)
+ solution = [0, 0, 0, 0, 0]
+
+ assert problem.evaluate(solution) == 0.0
+
+ def test_evaluate_mixed(self):
+ problem = OneMax(n=5)
+ solution = [1, 0, 1, 0, 1]
+
+ assert problem.evaluate(solution) == 3.0
+
+ def test_flip_delta_zero_to_one(self):
+ problem = OneMax(n=5)
+ solution = [0, 0, 0, 0, 0]
+
+ delta = problem.flip_delta(solution, 2)
+
+ assert delta == 1.0
+
+ def test_supports_delta_evaluation(self):
+ problem = OneMax(n=10)
+
+ assert problem.supports_delta_evaluation() is True
+
+ def test_strictly_better_maximization(self):
+ problem = OneMax(n=10)
+
+ assert problem.strictly_better(5.0, 3.0) is True
+ assert problem.strictly_better(3.0, 5.0) is False
+ assert problem.strictly_better(5.0, 5.0) is False
+
+ def test_better_or_equal_maximization(self):
+ problem = OneMax(n=10)
+
+ assert problem.better_or_equal(5.0, 3.0) is True
+ assert problem.better_or_equal(5.0, 5.0) is True
+ assert problem.better_or_equal(3.0, 5.0) is False
+
+ def test_compare_maximization(self):
+ problem = OneMax(n=10)
+
+ assert problem.compare(5.0, 3.0) == 1
+ assert problem.compare(3.0, 5.0) == -1
+ assert problem.compare(5.0, 5.0) == 0
+
+
+class TestKnapsack:
+ def test_maximize_returns_true(self):
+ problem = Knapsack(values=[10, 20], weights=[5, 10], capacity=15)
+
+ assert problem.maximize() is True
+
+ def test_n_property(self):
+ problem = Knapsack(values=[10, 20, 30], weights=[5, 10, 15], capacity=20)
+
+ assert problem.n == 3
+
+ def test_evaluate_feasible_solution(self):
+ problem = Knapsack(
+ values=[60.0, 100.0, 120.0],
+ weights=[10.0, 20.0, 30.0],
+ capacity=50.0,
+ )
+ solution = [1, 1, 0] # Take items 0 and 1, weight = 30
+
+ assert problem.evaluate(solution) == 160.0
+
+ def test_evaluate_infeasible_solution(self):
+ problem = Knapsack(
+ values=[60.0, 100.0, 120.0],
+ weights=[10.0, 20.0, 30.0],
+ capacity=50.0,
+ )
+ solution = [1, 1, 1] # Total weight = 60 > capacity
+
+ assert problem.evaluate(solution) == 0.0
+
+ def test_evaluate_empty_selection(self):
+ problem = Knapsack(
+ values=[60.0, 100.0, 120.0],
+ weights=[10.0, 20.0, 30.0],
+ capacity=50.0,
+ )
+ solution = [0, 0, 0]
+
+ assert problem.evaluate(solution) == 0.0
+
+ def test_supports_delta_evaluation(self):
+ problem = Knapsack(values=[10], weights=[5], capacity=10)
+
+ assert problem.supports_delta_evaluation() is False
+
+
+class TestNumberPartitioning:
+ def test_maximize_returns_false(self):
+ problem = NumberPartitioning(n=10, k=0.5, seed=42)
+
+ assert problem.maximize() is False
+
+ def test_items_count_equals_n(self):
+ problem = NumberPartitioning(n=15, k=0.5, seed=1)
+
+ assert len(problem.items) == 15
+
+ def test_items_are_positive(self):
+ problem = NumberPartitioning(n=20, k=0.5, seed=42)
+
+ assert all(item >= 1 for item in problem.items)
+ assert all(isinstance(item, int) for item in problem.items)
+
+ def test_evaluate_perfect_partition(self):
+ # Create problem with known items
+ problem = NumberPartitioning(n=4, k=0.5, seed=1)
+ problem.items = [10, 10, 5, 5] # Override items
+
+ # [0,1,0,1] -> A={10,5}, B={10,5} -> |15-15| = 0
+ solution = [0, 1, 0, 1]
+
+ assert problem.evaluate(solution) == 0.0
+
+ def test_evaluate_partial_partition(self):
+ problem = NumberPartitioning(n=4, k=0.5, seed=1)
+ problem.items = [10, 20, 5, 15] # Override items
+
+ # [1,0,1,0] -> A={20,15}=35, B={10,5}=15 -> |35-15| = 20
+ solution = [1, 0, 1, 0]
+
+ assert problem.evaluate(solution) == 20.0
+
+ def test_supports_delta_evaluation(self):
+ problem = NumberPartitioning(n=10)
+
+ assert problem.supports_delta_evaluation() is False
+
+ def test_strictly_better_minimization(self):
+ problem = NumberPartitioning(n=10)
+
+ assert problem.strictly_better(3.0, 5.0) is True
+ assert problem.strictly_better(5.0, 3.0) is False
+ assert problem.strictly_better(5.0, 5.0) is False
+
+ def test_better_or_equal_minimization(self):
+ problem = NumberPartitioning(n=10)
+
+ assert problem.better_or_equal(3.0, 5.0) is True
+ assert problem.better_or_equal(5.0, 5.0) is True
+ assert problem.better_or_equal(5.0, 3.0) is False
+
+ def test_compare_minimization(self):
+ problem = NumberPartitioning(n=10)
+
+ assert problem.compare(3.0, 5.0) == 1
+ assert problem.compare(5.0, 3.0) == -1
+ assert problem.compare(5.0, 5.0) == 0
diff --git a/tests/test_lon.py b/tests/test_lon.py
new file mode 100644
index 0000000..24a88ef
--- /dev/null
+++ b/tests/test_lon.py
@@ -0,0 +1,742 @@
+import igraph as ig
+import pandas as pd
+import pytest
+
+from lonpy.lon import CMLON, LON, MLON, _contract_vertices, _simplify_with_edge_sum
+
+COLUMNS = pd.Index(["run", "fit1", "node1", "fit2", "node2"])
+
+
+@pytest.fixture
+def simple_trace_df() -> pd.DataFrame:
+ """Simple trace with 3 nodes forming a chain: A -> B -> C (sink)."""
+ return pd.DataFrame(
+ [
+ [0, 100, "A", 50, "B"],
+ [0, 50, "B", 10, "C"],
+ ],
+ columns=COLUMNS,
+ )
+
+
+@pytest.fixture
+def multi_run_trace_df() -> pd.DataFrame:
+ """Trace with multiple runs visiting same nodes."""
+ return pd.DataFrame(
+ [
+ [0, 100, "A", 50, "B"],
+ [0, 50, "B", 10, "C"],
+ [1, 100, "A", 50, "B"],
+ [1, 50, "B", 10, "C"],
+ ],
+ columns=COLUMNS,
+ )
+
+
+@pytest.fixture
+def neutral_trace_df() -> pd.DataFrame:
+ """Trace with neutral (equal-fitness) connections."""
+ return pd.DataFrame(
+ [
+ [0, 100, "A", 50, "B"],
+ [0, 50, "B", 50, "C"], # Equal fitness
+ [0, 50, "C", 10, "D"],
+ ],
+ columns=COLUMNS,
+ )
+
+
+@pytest.fixture
+def multiple_sinks_trace_df() -> pd.DataFrame:
+ """Trace with multiple sinks (funnels)."""
+ return pd.DataFrame(
+ [
+ [0, 100, "A", 50, "B"],
+ [0, 50, "B", 10, "C"], # C is global sink
+ [1, 100, "A", 60, "D"],
+ [1, 60, "D", 30, "E"], # E is local sink
+ ],
+ columns=COLUMNS,
+ )
+
+
+@pytest.fixture
+def worsening_edge_trace_df() -> pd.DataFrame:
+ """Trace with worsening edges (escaping)."""
+ return pd.DataFrame(
+ [
+ [0, 100, "A", 50, "B"],
+ [0, 50, "B", 80, "C"], # Worsening edge
+ [0, 80, "C", 10, "D"],
+ ],
+ columns=pd.Index(["run", "fit1", "node1", "fit2", "node2"]),
+ )
+
+
+@pytest.fixture
+def simple_lon(simple_trace_df) -> LON:
+ return LON.from_trace_data(simple_trace_df)
+
+
+@pytest.fixture
+def neutral_lon(neutral_trace_df) -> LON:
+ return LON.from_trace_data(neutral_trace_df)
+
+
+@pytest.fixture
+def worsening_lon(worsening_edge_trace_df) -> LON:
+ return LON.from_trace_data(worsening_edge_trace_df)
+
+
+class TestLONFromTraceData:
+ def test_creates_lon_from_simple_trace(self, simple_trace_df):
+ lon = LON.from_trace_data(simple_trace_df)
+
+ assert isinstance(lon, LON)
+ assert isinstance(lon.graph, ig.Graph)
+
+ def test_creates_correct_number_of_vertices(self, simple_trace_df):
+ lon = LON.from_trace_data(simple_trace_df)
+
+ assert lon.n_vertices == 3 # A, B, C
+
+ def test_creates_correct_number_of_edges(self, simple_trace_df):
+ lon = LON.from_trace_data(simple_trace_df)
+
+ assert lon.n_edges == 2 # A->B, B->C
+
+ def test_sets_best_fitness(self, simple_trace_df):
+ lon = LON.from_trace_data(simple_trace_df)
+
+ assert lon.best_fitness == 10 # C has fitness 10
+
+ def test_removes_self_loops(self):
+ trace = pd.DataFrame(
+ [
+ [0, 50, "A", 50, "A"], # Self-loop
+ [0, 50, "A", 30, "B"],
+ ],
+ columns=COLUMNS,
+ )
+
+ lon = LON.from_trace_data(trace)
+
+ assert lon.n_edges == 1
+
+
+class TestLONProperties:
+ def test_n_vertices(self, simple_lon):
+ assert simple_lon.n_vertices == 3
+
+ def test_n_edges(self, simple_lon):
+ assert simple_lon.n_edges == 2
+
+ def test_vertex_names(self, simple_lon):
+ names = simple_lon.vertex_names
+
+ assert isinstance(names, list)
+ assert set(names) == {"A", "B", "C"}
+
+ def test_vertex_fitness(self, simple_lon):
+ fitness = simple_lon.vertex_fitness
+
+ assert isinstance(fitness, list)
+ assert len(fitness) == 3
+ assert set(fitness) == {100, 50, 10}
+
+ def test_vertex_count(self, simple_lon):
+ counts = simple_lon.vertex_count
+
+ assert isinstance(counts, list)
+ assert len(counts) == 3
+ assert all(c > 0 for c in counts)
+
+
+class TestLONGetSinks:
+ def test_single_sink(self, simple_lon):
+ sinks = simple_lon.get_sinks()
+
+ assert len(sinks) == 1
+
+ def test_multiple_sinks(self, multiple_sinks_trace_df):
+ lon = LON.from_trace_data(multiple_sinks_trace_df)
+
+ sinks = lon.get_sinks()
+
+ assert len(sinks) == 2 # C and E are sinks
+
+ def test_sink_has_zero_out_degree(self, simple_lon):
+ sinks = simple_lon.get_sinks()
+
+ for sink_idx in sinks:
+ out_degree = simple_lon.graph.degree(sink_idx, mode="out")
+ assert out_degree == 0
+
+
+class TestLONGetGlobalOptimaIndices:
+ def test_single_global_optimum(self, simple_lon):
+ global_indices = simple_lon.get_global_optima_indices()
+
+ assert len(global_indices) == 1
+ # Verify it's the node with best fitness
+ fitness = simple_lon.vertex_fitness[global_indices[0]]
+ assert fitness == simple_lon.best_fitness
+
+ def test_multiple_global_optima(self):
+ trace = pd.DataFrame(
+ [
+ [0, 100, "A", 10, "B"],
+ [1, 100, "A", 10, "C"], # Both B and C have best fitness
+ ],
+ columns=pd.Index(["run", "fit1", "node1", "fit2", "node2"]),
+ )
+
+ lon = LON.from_trace_data(trace)
+ global_indices = lon.get_global_optima_indices()
+
+ assert len(global_indices) == 2
+
+
+class TestLONComputeMetrics:
+ def test_returns_dict_with_required_keys(self, simple_lon):
+ metrics = simple_lon.compute_metrics()
+
+ assert "n_optima" in metrics
+ assert "n_funnels" in metrics
+ assert "n_global_funnels" in metrics
+ assert "neutral" in metrics
+ assert "strength" in metrics
+
+ def test_n_optima_metric(self, simple_lon):
+ metrics = simple_lon.compute_metrics()
+
+ assert metrics["n_optima"] == 3
+
+ def test_n_funnels_metric(self, simple_lon):
+ metrics = simple_lon.compute_metrics()
+
+ assert metrics["n_funnels"] == 1
+
+ def test_n_global_funnels_metric(self, multiple_sinks_trace_df):
+ lon = LON.from_trace_data(multiple_sinks_trace_df)
+ metrics = lon.compute_metrics()
+
+ assert metrics["n_global_funnels"] == 1 # Only C is at global best
+
+ def test_neutral_metric_with_neutral_edges(self, neutral_lon):
+ metrics = neutral_lon.compute_metrics()
+
+ assert metrics["neutral"] > 0 # B and C have neutral connection
+
+ def test_neutral_metric_without_neutral_edges(self, simple_lon):
+ metrics = simple_lon.compute_metrics()
+
+ assert metrics["neutral"] == 0.0
+
+ def test_strength_metric(self, simple_lon):
+ metrics = simple_lon.compute_metrics()
+
+ # All paths lead to global optimum, so strength should be positive
+ assert metrics["strength"] >= 0.0
+ assert metrics["strength"] <= 1.0
+
+ def test_known_best_parameter(self, simple_lon):
+ # Use a different known_best than what's in the network
+ metrics = simple_lon.compute_metrics(known_best=5)
+
+ # No node has fitness 5, so n_global_funnels should be 0
+ assert metrics["n_global_funnels"] == 0
+
+
+class TestLONClassifyEdges:
+ def test_classifies_improving_edges(self, simple_lon):
+ simple_lon.classify_edges()
+
+ edge_types = simple_lon.graph.es["edge_type"]
+ assert "improving" in edge_types
+
+ def test_classifies_equal_edges(self, neutral_lon):
+ neutral_lon.classify_edges()
+
+ edge_types = neutral_lon.graph.es["edge_type"]
+ assert "equal" in edge_types
+
+ def test_classifies_worsening_edges(self, worsening_lon):
+ worsening_lon.classify_edges()
+
+ edge_types = worsening_lon.graph.es["edge_type"]
+ assert "worsening" in edge_types
+
+ def test_handles_empty_graph(self):
+ lon = LON()
+ lon.classify_edges() # Should not raise
+
+ def test_adds_edge_type_attribute(self, simple_lon):
+ simple_lon.classify_edges()
+
+ assert "edge_type" in simple_lon.graph.es.attributes()
+
+
+class TestLONToMLON:
+ def test_returns_mlon(self, simple_lon):
+ mlon = simple_lon.to_mlon()
+
+ assert isinstance(mlon, MLON)
+
+ def test_mlon_has_reference_to_source_lon(self, simple_lon):
+ mlon = simple_lon.to_mlon()
+
+ assert mlon.source_lon is simple_lon
+
+
+class TestLONToCMLON:
+ def test_returns_cmlon(self, simple_lon):
+ cmlon = simple_lon.to_cmlon()
+
+ assert isinstance(cmlon, CMLON)
+
+ def test_cmlon_has_reference_to_source_lon(self, simple_lon):
+ cmlon = simple_lon.to_cmlon()
+
+ assert cmlon.source_lon is simple_lon
+
+
+class TestMLONFromLON:
+ def test_creates_mlon_from_lon(self, simple_lon):
+ mlon = MLON.from_lon(simple_lon)
+
+ assert isinstance(mlon, MLON)
+
+ def test_preserves_all_vertices(self, worsening_lon):
+ mlon = MLON.from_lon(worsening_lon)
+
+ assert mlon.n_vertices == worsening_lon.n_vertices
+
+ def test_removes_worsening_edges(self, worsening_lon):
+ mlon = MLON.from_lon(worsening_lon)
+
+ # Original has 3 edges, one is worsening
+ assert mlon.n_edges < worsening_lon.n_edges
+
+ def test_keeps_improving_edges(self, simple_lon):
+ mlon = MLON.from_lon(simple_lon)
+
+ # All edges in simple_lon are improving
+ assert mlon.n_edges == simple_lon.n_edges
+
+ def test_keeps_equal_edges(self, neutral_lon):
+ mlon = MLON.from_lon(neutral_lon)
+
+ assert mlon.n_edges == neutral_lon.n_edges
+
+ def test_preserves_best_fitness(self, simple_lon):
+ mlon = MLON.from_lon(simple_lon)
+
+ assert mlon.best_fitness == simple_lon.best_fitness
+
+ def test_handles_empty_lon(self):
+ lon = LON()
+ mlon = MLON.from_lon(lon)
+
+ assert mlon.n_vertices == 0
+ assert mlon.n_edges == 0
+
+
+class TestMLONProperties:
+ def test_n_vertices(self, simple_lon):
+ mlon = MLON.from_lon(simple_lon)
+
+ assert mlon.n_vertices == 3
+
+ def test_n_edges(self, simple_lon):
+ mlon = MLON.from_lon(simple_lon)
+
+ assert mlon.n_edges == 2
+
+ def test_vertex_fitness(self, simple_lon):
+ mlon = MLON.from_lon(simple_lon)
+
+ assert isinstance(mlon.vertex_fitness, list)
+ assert len(mlon.vertex_fitness) == 3
+
+
+class TestMLONGetSinks:
+ def test_identifies_sinks(self, simple_lon):
+ mlon = MLON.from_lon(simple_lon)
+
+ sinks = mlon.get_sinks()
+
+ assert len(sinks) >= 1
+
+ def test_worsening_edges_create_more_sinks(self, worsening_lon):
+ mlon = MLON.from_lon(worsening_lon)
+
+ mlon_sinks = mlon.get_sinks()
+ lon_sinks = worsening_lon.get_sinks()
+
+ # After removing worsening B->C, B becomes a sink
+ assert len(mlon_sinks) >= len(lon_sinks)
+
+
+class TestMLONGetGlobalOptimaIndices:
+ def test_identifies_global_optima(self, simple_lon):
+ mlon = MLON.from_lon(simple_lon)
+
+ global_indices = mlon.get_global_optima_indices()
+
+ assert len(global_indices) >= 1
+
+
+class TestMLONToCMLON:
+ def test_returns_cmlon(self, simple_lon):
+ mlon = MLON.from_lon(simple_lon)
+ cmlon = mlon.to_cmlon()
+
+ assert isinstance(cmlon, CMLON)
+
+ def test_uses_source_lon_when_available(self, simple_lon):
+ mlon = MLON.from_lon(simple_lon)
+ cmlon = mlon.to_cmlon()
+
+ assert cmlon.source_lon is simple_lon
+
+
+class TestCMLONFromLON:
+ def test_creates_cmlon_from_lon(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ assert isinstance(cmlon, CMLON)
+
+ def test_contracts_neutral_nodes(self, neutral_lon):
+ cmlon = CMLON.from_lon(neutral_lon)
+
+ # B and C have equal fitness and are connected, should be contracted
+ assert cmlon.n_vertices < neutral_lon.n_vertices
+
+ def test_no_contraction_without_neutral_edges(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ assert cmlon.n_vertices == simple_lon.n_vertices
+
+ def test_preserves_best_fitness(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ assert cmlon.best_fitness == simple_lon.best_fitness
+
+ def test_sums_edge_counts(self, multi_run_trace_df):
+ lon = LON.from_trace_data(multi_run_trace_df)
+ cmlon = CMLON.from_lon(lon)
+
+ if cmlon.n_edges > 0 and "Count" in cmlon.graph.es.attributes():
+ edge_counts = cmlon.graph.es["Count"]
+ assert all(c >= 1 for c in edge_counts)
+
+ def test_handles_empty_lon(self):
+ lon = LON()
+ cmlon = CMLON.from_lon(lon)
+
+ assert cmlon.n_vertices == 0
+ assert cmlon.n_edges == 0
+
+
+class TestCMLONProperties:
+ def test_n_vertices(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ assert cmlon.n_vertices == 3
+
+ def test_n_edges(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ assert cmlon.n_edges == 2
+
+ def test_vertex_fitness(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ assert isinstance(cmlon.vertex_fitness, list)
+ assert len(cmlon.vertex_fitness) == cmlon.n_vertices
+
+ def test_vertex_count(self, neutral_lon):
+ cmlon = CMLON.from_lon(neutral_lon)
+
+ # Some contracted nodes should have count > 1
+ assert isinstance(cmlon.vertex_count, list)
+ assert len(cmlon.vertex_count) == cmlon.n_vertices
+
+
+class TestCMLONGetSinks:
+ def test_identifies_sinks(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ sinks = cmlon.get_sinks()
+
+ assert len(sinks) >= 1
+
+ def test_sink_has_zero_out_degree(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ sinks = cmlon.get_sinks()
+ for sink_idx in sinks:
+ out_degree = cmlon.graph.degree(sink_idx, mode="out")
+ assert out_degree == 0
+
+
+class TestCMLONGetGlobalSinks:
+ def test_identifies_global_sinks(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ global_sinks = cmlon.get_global_sinks()
+
+ assert len(global_sinks) >= 1
+ for sink_idx in global_sinks:
+ assert cmlon.vertex_fitness[sink_idx] == cmlon.best_fitness
+
+
+class TestCMLONGetLocalSinks:
+ def test_identifies_local_sinks(self, multiple_sinks_trace_df):
+ lon = LON.from_trace_data(multiple_sinks_trace_df)
+ cmlon = CMLON.from_lon(lon)
+
+ local_sinks = cmlon.get_local_sinks()
+
+ for sink_idx in local_sinks:
+ assert cmlon.best_fitness and cmlon.vertex_fitness[sink_idx] > cmlon.best_fitness
+
+ def test_no_local_sinks_when_all_global(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ local_sinks = cmlon.get_local_sinks()
+
+ # Simple LON has only one sink which is global
+ assert len(local_sinks) == 0
+
+
+class TestCMLONComputeMetrics:
+ def test_returns_dict_with_required_keys(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+ metrics = cmlon.compute_metrics()
+
+ assert "n_optima" in metrics
+ assert "n_funnels" in metrics
+ assert "n_global_funnels" in metrics
+ assert "neutral" in metrics
+ assert "strength" in metrics
+ assert "global_funnel_proportion" in metrics
+
+ def test_n_optima_metric(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+ metrics = cmlon.compute_metrics()
+
+ assert metrics["n_optima"] == cmlon.n_vertices
+
+ def test_neutral_metric_with_contraction(self, neutral_lon):
+ cmlon = CMLON.from_lon(neutral_lon)
+ metrics = cmlon.compute_metrics()
+
+ # neutral = 1 - cmlon.n_vertices / lon.n_vertices
+ expected_neutral = 1.0 - cmlon.n_vertices / neutral_lon.n_vertices
+ assert metrics["neutral"] == round(expected_neutral, 4)
+
+ def test_strength_metric(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+ metrics = cmlon.compute_metrics()
+
+ assert metrics["strength"] >= 0.0
+ assert metrics["strength"] <= 1.0
+
+ def test_global_funnel_proportion_metric(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+ metrics = cmlon.compute_metrics()
+
+ assert metrics["global_funnel_proportion"] >= 0.0
+ assert metrics["global_funnel_proportion"] <= 1.0
+
+
+class TestCMLONGlobalFunnelProportion:
+ def test_all_nodes_reach_global(self, simple_lon):
+ cmlon = CMLON.from_lon(simple_lon)
+
+ proportion = cmlon._compute_global_funnel_proportion()
+
+ # All nodes in chain lead to global optimum
+ assert proportion == 1.0
+
+ def test_partial_reach(self, multiple_sinks_trace_df):
+ lon = LON.from_trace_data(multiple_sinks_trace_df)
+ cmlon = CMLON.from_lon(lon)
+
+ proportion = cmlon._compute_global_funnel_proportion()
+
+ # D and E lead to local sink, not all nodes reach global
+ assert proportion < 1.0
+ assert proportion > 0.0
+
+
+class TestContractVertices:
+ def test_contracts_by_membership(self):
+ graph = ig.Graph(directed=True)
+ graph.add_vertices(4)
+ graph.vs["name"] = ["A", "B", "C", "D"]
+ graph.vs["Fitness"] = [10, 10, 20, 30]
+ graph.vs["Count"] = [1, 1, 1, 1]
+ graph.add_edges([(0, 1), (1, 2), (2, 3)])
+ graph.es["Count"] = [1, 1, 1]
+
+ # Contract A and B (membership 0), C and D separate
+ membership = [0, 0, 1, 2]
+
+ result = _contract_vertices(
+ graph,
+ membership,
+ vertex_attr_comb={"name": "first", "Fitness": "first", "Count": "sum"},
+ )
+
+ assert result.vcount() == 3
+
+ def test_sums_count_attribute(self):
+ graph = ig.Graph(directed=True)
+ graph.add_vertices(2)
+ graph.vs["name"] = ["A", "B"]
+ graph.vs["Fitness"] = [10, 10]
+ graph.vs["Count"] = [3, 5]
+ graph.add_edges([(0, 1)])
+ graph.es["Count"] = [1]
+
+ membership = [0, 0] # Contract both
+
+ result = _contract_vertices(
+ graph,
+ membership,
+ vertex_attr_comb={"name": "first", "Fitness": "first", "Count": "sum"},
+ )
+
+ assert result.vs[0]["Count"] == 8
+
+ def test_removes_internal_edges(self):
+ graph = ig.Graph(directed=True)
+ graph.add_vertices(3)
+ graph.vs["name"] = ["A", "B", "C"]
+ graph.vs["Fitness"] = [10, 10, 20]
+ graph.add_edges([(0, 1), (1, 2)]) # A->B (internal), B->C (external)
+ graph.es["Count"] = [1, 1]
+
+ membership = [0, 0, 1]
+
+ result = _contract_vertices(
+ graph,
+ membership,
+ vertex_attr_comb={"name": "first", "Fitness": "first"},
+ )
+
+ # Only B->C should remain (as contracted_0 -> 1)
+ assert result.ecount() == 1
+
+
+class TestSimplifyWithEdgeSum:
+ def test_removes_self_loops(self):
+ graph = ig.Graph(directed=True)
+ graph.add_vertices(2)
+ graph.vs["name"] = ["A", "B"]
+ graph.vs["Fitness"] = [10, 20]
+ graph.add_edges([(0, 0), (0, 1)]) # Self-loop and normal edge
+ graph.es["Count"] = [1, 1]
+
+ result = _simplify_with_edge_sum(graph)
+
+ assert result.ecount() == 1
+
+ def test_combines_parallel_edges(self):
+ graph = ig.Graph(directed=True)
+ graph.add_vertices(2)
+ graph.vs["name"] = ["A", "B"]
+ graph.vs["Fitness"] = [10, 20]
+ graph.add_edges([(0, 1), (0, 1)]) # Parallel edges
+ graph.es["Count"] = [3, 5]
+
+ result = _simplify_with_edge_sum(graph)
+
+ assert result.ecount() == 1
+ assert result.es[0]["Count"] == 8
+
+ def test_preserves_vertex_attributes(self):
+ graph = ig.Graph(directed=True)
+ graph.add_vertices(2)
+ graph.vs["name"] = ["A", "B"]
+ graph.vs["Fitness"] = [10, 20]
+ graph.vs["Count"] = [1, 2]
+ graph.add_edges([(0, 1)])
+ graph.es["Count"] = [1]
+
+ result = _simplify_with_edge_sum(graph)
+
+ assert result.vs["name"] == ["A", "B"]
+ assert result.vs["Fitness"] == [10, 20]
+ assert result.vs["Count"] == [1, 2]
+
+ def test_handles_empty_graph(self):
+ graph = ig.Graph(directed=True)
+ graph.add_vertices(2)
+ graph.vs["name"] = ["A", "B"]
+ graph.vs["Fitness"] = [10, 20]
+
+ result = _simplify_with_edge_sum(graph)
+
+ assert result.vcount() == 2
+ assert result.ecount() == 0
+
+
+class TestLONIntegration:
+ def test_lon_to_mlon_to_cmlon(self, neutral_lon):
+ mlon = neutral_lon.to_mlon()
+ cmlon = mlon.to_cmlon()
+
+ assert isinstance(cmlon, CMLON)
+ assert cmlon.source_lon is neutral_lon
+
+ def test_lon_direct_to_cmlon(self, neutral_lon):
+ cmlon = neutral_lon.to_cmlon()
+
+ assert isinstance(cmlon, CMLON)
+ assert cmlon.n_vertices <= neutral_lon.n_vertices
+
+ def test_metrics_consistency(self, simple_lon):
+ lon_metrics = simple_lon.compute_metrics()
+ cmlon = simple_lon.to_cmlon()
+ cmlon_metrics = cmlon.compute_metrics()
+
+ # For simple LON without neutral edges, metrics should match
+ assert lon_metrics["n_optima"] == cmlon_metrics["n_optima"]
+ assert lon_metrics["n_funnels"] == cmlon_metrics["n_funnels"]
+ assert lon_metrics["n_global_funnels"] == cmlon_metrics["n_global_funnels"]
+
+ def test_complex_landscape(self):
+ trace = pd.DataFrame(
+ [
+ # Run 0: A -> B -> C (global)
+ [0, 100, "A", 50, "B"],
+ [0, 50, "B", 10, "C"],
+ # Run 1: A -> D -> E (local sink)
+ [1, 100, "A", 70, "D"],
+ [1, 70, "D", 40, "E"],
+ # Run 2: F -> G -> H (neutral) -> C (global)
+ [2, 90, "F", 60, "G"],
+ [2, 60, "G", 60, "H"],
+ [2, 60, "H", 10, "C"],
+ ],
+ columns=pd.Index(["run", "fit1", "node1", "fit2", "node2"]),
+ )
+
+ lon = LON.from_trace_data(trace)
+ metrics = lon.compute_metrics()
+
+ assert metrics["n_optima"] == 8
+ assert metrics["n_funnels"] == 2 # C and E
+ assert metrics["n_global_funnels"] == 1 # Only C
+ assert metrics["neutral"] > 0 # G and H are neutral
+
+ cmlon = lon.to_cmlon()
+ cmlon_metrics = cmlon.compute_metrics()
+
+ # CMLON should have fewer vertices due to G-H contraction
+ assert cmlon.n_vertices < lon.n_vertices
+ assert cmlon_metrics["neutral"] > 0
diff --git a/tests/test_neighborhoods.py b/tests/test_neighborhoods.py
new file mode 100644
index 0000000..90c1d28
--- /dev/null
+++ b/tests/test_neighborhoods.py
@@ -0,0 +1,199 @@
+import random
+
+import pytest
+
+from lonpy.discrete.neighborhoods import FlipNeighborhood, SwapNeighborhood
+from lonpy.discrete.solution import Solution
+
+
+class TestFlipNeighborhood:
+ def test_get_neighbor_indices(self):
+ solution = Solution.from_list([0, 1, 0, 1, 1])
+ neighborhood = FlipNeighborhood()
+
+ indices = neighborhood.get_neighbor_indices(solution)
+
+ assert indices == [0, 1, 2, 3, 4]
+
+ def test_get_neighbor_indices_empty_solution(self):
+ solution = Solution.from_list([])
+ neighborhood = FlipNeighborhood()
+
+ indices = neighborhood.get_neighbor_indices(solution)
+
+ assert indices == []
+
+ def test_apply_move_flips_bit(self):
+ solution = Solution.from_list([0, 1, 0, 1, 1])
+ neighborhood = FlipNeighborhood()
+
+ neighbor = neighborhood.apply_move(solution, 0)
+
+ assert neighbor.data == [1, 1, 0, 1, 1]
+ assert solution.data == [0, 1, 0, 1, 1] # Original unchanged
+
+ def test_apply_move_flips_one_to_zero(self):
+ solution = Solution.from_list([0, 1, 0, 1, 1])
+ neighborhood = FlipNeighborhood()
+
+ neighbor = neighborhood.apply_move(solution, 1)
+
+ assert neighbor.data == [0, 0, 0, 1, 1]
+
+ def test_apply_move_invalidates_fitness(self):
+ solution = Solution.from_list([0, 1, 0])
+ solution.fitness = 5.0
+ neighborhood = FlipNeighborhood()
+
+ neighbor = neighborhood.apply_move(solution, 0)
+
+ assert neighbor.fitness is None
+ assert solution.fitness == 5.0 # Original unchanged
+
+ def test_apply_move_rejects_tuple_index(self):
+ solution = Solution.from_list([0, 1, 0])
+ neighborhood = FlipNeighborhood()
+
+ with pytest.raises(TypeError, match="expects int index"):
+ neighborhood.apply_move(solution, (0, 1))
+
+ def test_apply_random_perturbation_flips_correct_count(self):
+ solution = Solution.from_list([0, 0, 0, 0, 0])
+ neighborhood = FlipNeighborhood()
+ rng = random.Random(42)
+
+ perturbed = neighborhood.apply_random_perturbation(solution, strength=3, rng=rng)
+
+ # Count how many bits differ
+ differences = sum(a != b for a, b in zip(solution.data, perturbed.data, strict=True))
+ assert differences == 3
+
+ def test_apply_random_perturbation_caps_at_solution_length(self):
+ solution = Solution.from_list([0, 0, 0])
+ neighborhood = FlipNeighborhood()
+ rng = random.Random(42)
+
+ perturbed = neighborhood.apply_random_perturbation(solution, strength=10, rng=rng)
+
+ differences = sum(a != b for a, b in zip(solution.data, perturbed.data, strict=True))
+ assert differences == 3 # Capped at solution length
+
+ def test_apply_random_perturbation_preserves_original(self):
+ solution = Solution.from_list([0, 1, 0, 1])
+ original_data = solution.data.copy()
+ neighborhood = FlipNeighborhood()
+ rng = random.Random(42)
+
+ neighborhood.apply_random_perturbation(solution, strength=2, rng=rng)
+
+ assert solution.data == original_data
+
+ def test_apply_random_perturbation_deterministic_with_seed(self):
+ solution = Solution.from_list([0, 1, 0, 1, 0])
+ neighborhood = FlipNeighborhood()
+
+ rng1 = random.Random(123)
+ perturbed1 = neighborhood.apply_random_perturbation(solution, strength=2, rng=rng1)
+
+ rng2 = random.Random(123)
+ perturbed2 = neighborhood.apply_random_perturbation(solution, strength=2, rng=rng2)
+
+ assert perturbed1.data == perturbed2.data
+
+
+class TestSwapNeighborhood:
+ def test_get_neighbor_indices(self):
+ solution = Solution.from_list([0, 1, 2, 3], representation="permutation")
+ neighborhood = SwapNeighborhood()
+
+ indices = neighborhood.get_neighbor_indices(solution)
+
+ expected = [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)]
+ assert indices == expected
+
+ def test_get_neighbor_indices_count(self):
+ solution = Solution.from_list([0, 1, 2, 3, 4], representation="permutation")
+ neighborhood = SwapNeighborhood()
+
+ indices = neighborhood.get_neighbor_indices(solution)
+
+ n = 5
+ expected_count = n * (n - 1) // 2
+ assert len(indices) == expected_count
+
+ def test_get_neighbor_indices_single_element(self):
+ solution = Solution.from_list([0], representation="permutation")
+ neighborhood = SwapNeighborhood()
+
+ indices = neighborhood.get_neighbor_indices(solution)
+
+ assert indices == []
+
+ def test_apply_move_swaps_elements(self):
+ solution = Solution.from_list([0, 1, 2, 3], representation="permutation")
+ neighborhood = SwapNeighborhood()
+
+ neighbor = neighborhood.apply_move(solution, (0, 2))
+
+ assert neighbor.data == [2, 1, 0, 3]
+ assert solution.data == [0, 1, 2, 3] # Original unchanged
+
+ def test_apply_move_invalidates_fitness(self):
+ solution = Solution.from_list([0, 1, 2], representation="permutation")
+ solution.fitness = 10.0
+ neighborhood = SwapNeighborhood()
+
+ neighbor = neighborhood.apply_move(solution, (0, 1))
+
+ assert neighbor.fitness is None
+ assert solution.fitness == 10.0 # Original unchanged
+
+ def test_apply_move_rejects_int_index(self):
+ solution = Solution.from_list([0, 1, 2], representation="permutation")
+ neighborhood = SwapNeighborhood()
+
+ with pytest.raises(TypeError, match="expects tuple index"):
+ neighborhood.apply_move(solution, 0)
+
+ def test_apply_random_perturbation_performs_swaps(self):
+ solution = Solution.from_list([0, 1, 2, 3, 4], representation="permutation")
+ neighborhood = SwapNeighborhood()
+ rng = random.Random(42)
+
+ perturbed = neighborhood.apply_random_perturbation(solution, strength=2, rng=rng)
+
+ # Verify it's still a valid permutation
+ assert sorted(perturbed.data) == [0, 1, 2, 3, 4]
+ # Verify something changed
+ assert perturbed.data != solution.data
+
+ def test_apply_random_perturbation_preserves_original(self):
+ solution = Solution.from_list([0, 1, 2, 3], representation="permutation")
+ original_data = solution.data.copy()
+ neighborhood = SwapNeighborhood()
+ rng = random.Random(42)
+
+ neighborhood.apply_random_perturbation(solution, strength=2, rng=rng)
+
+ assert solution.data == original_data
+
+ def test_apply_random_perturbation_deterministic_with_seed(self):
+ solution = Solution.from_list([0, 1, 2, 3, 4], representation="permutation")
+ neighborhood = SwapNeighborhood()
+
+ rng1 = random.Random(999)
+ perturbed1 = neighborhood.apply_random_perturbation(solution, strength=3, rng=rng1)
+
+ rng2 = random.Random(999)
+ perturbed2 = neighborhood.apply_random_perturbation(solution, strength=3, rng=rng2)
+
+ assert perturbed1.data == perturbed2.data
+
+ def test_apply_random_perturbation_maintains_permutation(self):
+ solution = Solution.from_list([4, 2, 0, 1, 3], representation="permutation")
+ neighborhood = SwapNeighborhood()
+ rng = random.Random(42)
+
+ for _ in range(10):
+ perturbed = neighborhood.apply_random_perturbation(solution, strength=5, rng=rng)
+ assert sorted(perturbed.data) == [0, 1, 2, 3, 4]
diff --git a/tests/test_solution.py b/tests/test_solution.py
new file mode 100644
index 0000000..df94d7e
--- /dev/null
+++ b/tests/test_solution.py
@@ -0,0 +1,352 @@
+import random
+
+from lonpy.discrete.solution import Solution
+
+
+class TestSolutionBasics:
+ def test_n_property(self):
+ solution = Solution(data=[0, 1, 0, 1, 1])
+
+ assert solution.n == 5
+
+ def test_n_property_empty(self):
+ solution = Solution(data=[])
+
+ assert solution.n == 0
+
+
+class TestSolutionCopy:
+ def test_copy_preserves_data(self):
+ original = Solution(data=[0, 1, 0], fitness=2.0, representation="permutation")
+
+ copied = original.copy()
+
+ assert copied is not original
+ assert copied.data == original.data
+ assert copied.fitness == original.fitness
+ assert copied.representation == original.representation
+
+ def test_copy_is_deep(self):
+ original = Solution(data=[0, 1, 0], fitness=2.0)
+
+ copied = original.copy()
+ copied.data[0] = 1
+ copied.fitness = 5.0
+
+ assert original.data == [0, 1, 0]
+ assert original.fitness == 2.0
+
+
+class TestSolutionToHash:
+ def test_to_hash_bitstring(self):
+ solution = Solution(data=[0, 1, 0, 1, 1])
+
+ hash_str = solution.to_hash()
+
+ assert hash_str == "0_1_0_1_1"
+
+ def test_to_hash_permutation(self):
+ solution = Solution(data=[3, 1, 4, 0, 2], representation="permutation")
+
+ hash_str = solution.to_hash()
+
+ assert hash_str == "3_1_4_0_2"
+
+
+class TestSolutionFlip:
+ def test_flip_zero_to_one(self):
+ solution = Solution(data=[0, 0, 0])
+
+ solution.flip(1)
+
+ assert solution.data == [0, 1, 0]
+
+ def test_flip_first_index(self):
+ solution = Solution(data=[0, 1, 1])
+
+ solution.flip(0)
+
+ assert solution.data[0] == 1
+ assert solution.data == [1, 1, 1]
+ assert solution.fitness is None
+
+
+class TestSolutionSwap:
+ def test_swap_elements(self):
+ solution = Solution(data=[0, 1, 2, 3], representation="permutation")
+
+ solution.swap(0, 3)
+
+ assert solution.data == [3, 1, 2, 0]
+
+ def test_swap_adjacent_elements(self):
+ solution = Solution(data=[0, 1, 2], representation="permutation")
+
+ solution.swap(0, 1)
+
+ assert solution.data == [1, 0, 2]
+
+ def test_swap_invalidates_fitness(self):
+ solution = Solution(data=[0, 1, 2], fitness=10.0, representation="permutation")
+
+ solution.swap(0, 2)
+
+ assert solution.fitness is None
+
+ def test_swap_same_index(self):
+ solution = Solution(data=[0, 1, 2], representation="permutation")
+
+ solution.swap(1, 1)
+
+ assert solution.data == [0, 1, 2]
+
+
+class TestSolutionRandomBitstring:
+ def test_random_bitstring_length(self):
+ solution = Solution.random_bitstring(n=10)
+
+ assert solution.n == 10
+
+ def test_random_bitstring_representation(self):
+ solution = Solution.random_bitstring(n=5)
+
+ assert solution.representation == "bitstring"
+
+ def test_random_bitstring_values(self):
+ solution = Solution.random_bitstring(n=100)
+
+ assert all(x in [0, 1] for x in solution.data)
+
+ def test_random_bitstring_fitness_is_none(self):
+ solution = Solution.random_bitstring(n=10)
+
+ assert solution.fitness is None
+
+ def test_random_bitstring_with_rng(self):
+ rng = random.Random(42)
+ solution = Solution.random_bitstring(n=10, rng=rng)
+
+ assert solution.n == 10
+
+ def test_random_bitstring_deterministic_with_seed(self):
+ rng1 = random.Random(123)
+ solution1 = Solution.random_bitstring(n=20, rng=rng1)
+
+ rng2 = random.Random(123)
+ solution2 = Solution.random_bitstring(n=20, rng=rng2)
+
+ assert solution1.data == solution2.data
+
+ def test_random_bitstring_empty(self):
+ solution = Solution.random_bitstring(n=0)
+
+ assert solution.data == []
+ assert solution.n == 0
+
+
+class TestSolutionRandomPermutation:
+ def test_random_permutation_length(self):
+ solution = Solution.random_permutation(n=10)
+
+ assert solution.n == 10
+
+ def test_random_permutation_representation(self):
+ solution = Solution.random_permutation(n=5)
+
+ assert solution.representation == "permutation"
+
+ def test_random_permutation_values(self):
+ solution = Solution.random_permutation(n=10)
+
+ assert sorted(solution.data) == list(range(10))
+
+ def test_random_permutation_fitness_is_none(self):
+ solution = Solution.random_permutation(n=10)
+
+ assert solution.fitness is None
+
+ def test_random_permutation_with_rng(self):
+ rng = random.Random(42)
+ solution = Solution.random_permutation(n=10, rng=rng)
+
+ assert sorted(solution.data) == list(range(10))
+
+ def test_random_permutation_deterministic_with_seed(self):
+ rng1 = random.Random(456)
+ solution1 = Solution.random_permutation(n=15, rng=rng1)
+
+ rng2 = random.Random(456)
+ solution2 = Solution.random_permutation(n=15, rng=rng2)
+
+ assert solution1.data == solution2.data
+
+ def test_random_permutation_empty(self):
+ solution = Solution.random_permutation(n=0)
+
+ assert solution.data == []
+ assert solution.n == 0
+
+
+class TestSolutionFromList:
+ def test_from_list_bitstring(self):
+ solution = Solution.from_list([0, 1, 0, 1])
+
+ assert solution.data == [0, 1, 0, 1]
+ assert solution.representation == "bitstring"
+
+ def test_from_list_permutation(self):
+ solution = Solution.from_list([3, 0, 2, 1], representation="permutation")
+
+ assert solution.data == [3, 0, 2, 1]
+ assert solution.representation == "permutation"
+
+ def test_from_list_copies_data(self):
+ original_data = [0, 1, 0]
+ solution = Solution.from_list(original_data)
+
+ original_data[0] = 1
+
+ assert solution.data == [0, 1, 0]
+
+ def test_from_list_fitness_is_none(self):
+ solution = Solution.from_list([1, 0, 1])
+
+ assert solution.fitness is None
+
+ def test_from_list_empty(self):
+ solution = Solution.from_list([])
+
+ assert solution.data == []
+ assert solution.n == 0
+
+
+class TestSolutionEquality:
+ def test_equal_solutions(self):
+ solution1 = Solution(data=[0, 1, 0])
+ solution2 = Solution(data=[0, 1, 0])
+
+ assert solution1 == solution2
+
+ def test_different_data(self):
+ solution1 = Solution(data=[0, 1, 0])
+ solution2 = Solution(data=[0, 0, 0])
+
+ assert solution1 != solution2
+
+ def test_different_fitness_still_equal(self):
+ solution1 = Solution(data=[0, 1, 0], fitness=1.0)
+ solution2 = Solution(data=[0, 1, 0], fitness=2.0)
+
+ assert solution1 == solution2
+
+ def test_different_representation_still_equal(self):
+ solution1 = Solution(data=[0, 1, 2], representation="bitstring")
+ solution2 = Solution(data=[0, 1, 2], representation="permutation")
+
+ assert solution1 == solution2
+
+ def test_not_equal_to_non_solution(self):
+ solution = Solution(data=[0, 1, 0])
+
+ assert solution != [0, 1, 0]
+ assert solution != "0_1_0"
+ assert solution is not None
+
+
+class TestSolutionHash:
+ def test_hash_equal_solutions(self):
+ solution1 = Solution(data=[0, 1, 0])
+ solution2 = Solution(data=[0, 1, 0])
+
+ assert hash(solution1) == hash(solution2)
+
+ def test_hash_usable_in_set(self):
+ solution1 = Solution(data=[0, 1, 0])
+ solution2 = Solution(data=[0, 1, 0])
+ solution3 = Solution(data=[1, 1, 1])
+
+ solution_set = {solution1, solution2, solution3}
+
+ assert len(solution_set) == 2
+
+ def test_hash_usable_as_dict_key(self):
+ solution = Solution(data=[0, 1, 0])
+
+ d = {solution: "test"}
+
+ assert d[solution] == "test"
+
+
+class TestSolutionRepr:
+ def test_repr_short_bitstring(self):
+ solution = Solution(data=[0, 1, 0, 1, 1], fitness=3.0)
+
+ repr_str = repr(solution)
+
+ assert "01011" in repr_str
+ assert "3.0000" in repr_str
+
+ def test_repr_no_fitness(self):
+ solution = Solution(data=[0, 1, 0])
+
+ repr_str = repr(solution)
+
+ assert "fitness=None" in repr_str
+
+ def test_repr_long_solution(self):
+ solution = Solution(data=list(range(25)))
+
+ repr_str = repr(solution)
+
+ assert "25 elements" in repr_str
+
+ def test_repr_exactly_20_elements(self):
+ solution = Solution(data=[0] * 20)
+
+ repr_str = repr(solution)
+
+ assert "00000000000000000000" in repr_str
+ assert "elements" not in repr_str
+
+ def test_repr_permutation(self):
+ solution = Solution(data=[3, 1, 4, 0, 2], representation="permutation")
+
+ repr_str = repr(solution)
+
+ assert "31402" in repr_str
+
+
+class TestSolutionIntegration:
+ def test_copy_then_flip(self):
+ original = Solution.from_list([0, 0, 0])
+ original.fitness = 0.0
+
+ copied = original.copy()
+ copied.flip(0)
+
+ assert original.data == [0, 0, 0]
+ assert original.fitness == 0.0
+ assert copied.data == [1, 0, 0]
+ assert copied.fitness is None
+
+ def test_copy_then_swap(self):
+ original = Solution.from_list([0, 1, 2], representation="permutation")
+ original.fitness = 10.0
+
+ copied = original.copy()
+ copied.swap(0, 2)
+
+ assert original.data == [0, 1, 2]
+ assert original.fitness == 10.0
+ assert copied.data == [2, 1, 0]
+ assert copied.fitness is None
+
+ def test_hash_consistent_after_operations(self):
+ solution = Solution.from_list([0, 1, 0])
+ initial_hash = solution.to_hash()
+
+ solution.flip(0)
+ after_flip_hash = solution.to_hash()
+
+ assert initial_hash == "0_1_0"
+ assert after_flip_hash == "1_1_0"
diff --git a/uv.lock b/uv.lock
index 4eeafaf..1fc70f4 100644
--- a/uv.lock
+++ b/uv.lock
@@ -884,6 +884,7 @@ dependencies = [
[package.optional-dependencies]
dev = [
+ { name = "ioh" },
{ name = "mkdocs" },
{ name = "mkdocs-material" },
{ name = "mkdocstrings" },
@@ -893,17 +894,14 @@ dev = [
{ name = "pytest" },
{ name = "pytest-cov" },
{ name = "ruff" },
-]
-
-[package.dev-dependencies]
-dev = [
- { name = "ioh" },
+ { name = "ty" },
]
[package.metadata]
requires-dist = [
{ name = "igraph", specifier = ">=0.11.0" },
{ name = "imageio", specifier = ">=2.31.0" },
+ { name = "ioh", marker = "extra == 'dev'", specifier = ">=0.3.22" },
{ name = "kaleido", specifier = ">=0.2.1" },
{ name = "matplotlib", specifier = ">=3.7.0" },
{ name = "mkdocs", marker = "extra == 'dev'", specifier = ">=1.6.1" },
@@ -919,12 +917,10 @@ requires-dist = [
{ name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.0.0" },
{ name = "ruff", marker = "extra == 'dev'", specifier = ">=0.8.0" },
{ name = "scipy", specifier = ">=1.10.0" },
+ { name = "ty", marker = "extra == 'dev'", specifier = ">=0.0.10" },
]
provides-extras = ["dev"]
-[package.metadata.requires-dev]
-dev = [{ name = "ioh", specifier = ">=0.3.22" }]
-
[[package]]
name = "markdown"
version = "3.10"
@@ -2231,6 +2227,30 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" },
]
+[[package]]
+name = "ty"
+version = "0.0.10"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/b7/85/97b5276baa217e05db2fe3d5c61e4dfd35d1d3d0ec95bfca1986820114e0/ty-0.0.10.tar.gz", hash = "sha256:0a1f9f7577e56cd508a8f93d0be2a502fdf33de6a7d65a328a4c80b784f4ac5f", size = 4892892, upload-time = "2026-01-07T23:00:23.572Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0c/7a/5a7147ce5231c3ccc55d6f945dabd7412e233e755d28093bfdec988ba595/ty-0.0.10-py3-none-linux_armv6l.whl", hash = "sha256:406a8ea4e648551f885629b75dc3f070427de6ed099af45e52051d4c68224829", size = 9835881, upload-time = "2026-01-07T22:08:17.492Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/7d/89f4d2277c938332d047237b47b11b82a330dbff4fff0de8574cba992128/ty-0.0.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d6e0a733e3d6d3bce56d6766bc61923e8b130241088dc2c05e3c549487190096", size = 9696404, upload-time = "2026-01-07T22:08:37.965Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/cd/9dd49e6d40e54d4b7d563f9e2a432c4ec002c0673a81266e269c4bc194ce/ty-0.0.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e4832f8879cb95fc725f7e7fcab4f22be0cf2550f3a50641d5f4409ee04176d4", size = 9181195, upload-time = "2026-01-07T22:59:07.187Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/b8/3e7c556654ba0569ed5207138d318faf8633d87e194760fc030543817c26/ty-0.0.10-py3-none-manylinux_2_24_aarch64.whl", hash = "sha256:6b58cc78e5865bc908f053559a80bb77cab0dc168aaad2e88f2b47955694b138", size = 9665002, upload-time = "2026-01-07T22:08:30.782Z" },
+ { url = "https://files.pythonhosted.org/packages/98/96/410a483321406c932c4e3aa1581d1072b72cdcde3ae83cd0664a65c7b254/ty-0.0.10-py3-none-manylinux_2_24_armv7l.whl", hash = "sha256:83c6a514bb86f05005fa93e3b173ae3fde94d291d994bed6fe1f1d2e5c7331cf", size = 9664948, upload-time = "2026-01-07T23:04:14.655Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/5d/cba2ab3e2f660763a72ad12620d0739db012e047eaa0ceaa252bf5e94ebb/ty-0.0.10-py3-none-manylinux_2_24_i686.whl", hash = "sha256:2e43f71e357f8a4f7fc75e4753b37beb2d0f297498055b1673a9306aa3e21897", size = 10125401, upload-time = "2026-01-07T22:08:28.171Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/67/29536e0d97f204a2933122239298e754db4564f4ed7f34e2153012b954be/ty-0.0.10-py3-none-manylinux_2_24_ppc64le.whl", hash = "sha256:18be3c679965c23944c8e574be0635504398c64c55f3f0c46259464e10c0a1c7", size = 10714052, upload-time = "2026-01-07T22:08:20.098Z" },
+ { url = "https://files.pythonhosted.org/packages/63/c8/82ac83b79a71c940c5dcacb644f526f0c8fdf4b5e9664065ab7ee7c0e4ec/ty-0.0.10-py3-none-manylinux_2_24_s390x.whl", hash = "sha256:5477981681440a35acdf9b95c3097410c547abaa32b893f61553dbc3b0096fff", size = 10395924, upload-time = "2026-01-07T22:08:22.839Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/4c/2f9ac5edbd0e67bf82f5cd04275c4e87cbbf69a78f43e5dcf90c1573d44e/ty-0.0.10-py3-none-manylinux_2_24_x86_64.whl", hash = "sha256:e206a23bd887574302138b33383ae1edfcc39d33a06a12a5a00803b3f0287a45", size = 10220096, upload-time = "2026-01-07T22:08:13.171Z" },
+ { url = "https://files.pythonhosted.org/packages/04/13/3be2b7bfd53b9952b39b6f2c2ef55edeb1a2fea3bf0285962736ee26731c/ty-0.0.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4e09ddb0d3396bd59f645b85eab20f9a72989aa8b736b34338dcb5ffecfe77b6", size = 9649120, upload-time = "2026-01-07T22:08:34.003Z" },
+ { url = "https://files.pythonhosted.org/packages/93/e3/edd58547d9fd01e4e584cec9dced4f6f283506b422cdd953e946f6a8e9f0/ty-0.0.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:139d2a741579ad86a044233b5d7e189bb81f427eebce3464202f49c3ec0eba3b", size = 9686033, upload-time = "2026-01-07T22:08:40.967Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/bc/9d2f5fec925977446d577fb9b322d0e7b1b1758709f23a6cfc10231e9b84/ty-0.0.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6bae10420c0abfe4601fbbc6ce637b67d0b87a44fa520283131a26da98f2e74c", size = 9841905, upload-time = "2026-01-07T23:04:21.694Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/b8/5acd3492b6a4ef255ace24fcff0d4b1471a05b7f3758d8910a681543f899/ty-0.0.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7358bbc5d037b9c59c3a48895206058bcd583985316c4125a74dd87fd1767adb", size = 10320058, upload-time = "2026-01-07T22:08:25.645Z" },
+ { url = "https://files.pythonhosted.org/packages/35/67/5b6906fccef654c7e801d6ac8dcbe0d493e1f04c38127f82a5e6d7e0aa0e/ty-0.0.10-py3-none-win32.whl", hash = "sha256:f51b6fd485bc695d0fdf555e69e6a87d1c50f14daef6cb980c9c941e12d6bcba", size = 9271806, upload-time = "2026-01-07T22:08:10.08Z" },
+ { url = "https://files.pythonhosted.org/packages/42/36/82e66b9753a76964d26fd9bc3514ea0abce0a5ba5ad7d5f084070c6981da/ty-0.0.10-py3-none-win_amd64.whl", hash = "sha256:16deb77a72cf93b89b4d29577829613eda535fbe030513dfd9fba70fe38bc9f5", size = 10130520, upload-time = "2026-01-07T23:04:11.759Z" },
+ { url = "https://files.pythonhosted.org/packages/63/52/89da123f370e80b587d2db8551ff31562c882d87b32b0e92b59504b709ae/ty-0.0.10-py3-none-win_arm64.whl", hash = "sha256:7495288bca7afba9a4488c9906466d648ffd3ccb6902bc3578a6dbd91a8f05f0", size = 9626026, upload-time = "2026-01-07T23:04:17.91Z" },
+]
+
[[package]]
name = "typing-extensions"
version = "4.15.0"