Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
a58811f
clean .gitignore and apply fixes from PR #2
UriKH Apr 7, 2026
d42fe7c
update shift length cehck
UriKH Apr 7, 2026
0514f1d
Merge branch 'RamanujanMachine:main' into main
UriKH Apr 7, 2026
0050235
organize and fix tests
UriKH Apr 7, 2026
f7185d6
organize and fix tests
UriKH Apr 7, 2026
1f226a7
Merge branch 'RamanujanMachine:main' into main
UriKH Apr 7, 2026
85aee25
Merge branch 'RamanujanMachine:main' into main
UriKH Apr 8, 2026
4a5bea8
Improve installation section formatting in README
UriKH Apr 9, 2026
9d5d26a
fast push
UriKH Apr 9, 2026
ef1634c
Logger hot fix
UriKH Apr 9, 2026
bdd80d3
Merge branch 'Dev'
UriKH Apr 9, 2026
d591f0e
Upgrade logger and general flow
UriKH Apr 10, 2026
98b6bc7
Upgrade logger and general flow
UriKH Apr 10, 2026
06ca69b
Merge branch 'Dev'
UriKH Apr 10, 2026
55c050c
sampling functionality organization
UriKH Apr 10, 2026
712e83e
Merge branch 'main' into main
UriKH Apr 10, 2026
ae458a8
typo fixing and type hints correction
UriKH Apr 10, 2026
c890bfd
upgrade tests and testing policy
UriKH Apr 10, 2026
b2432ac
Merge branch 'main' into main
UriKH Apr 10, 2026
7a918ab
Merge branch 'RamanujanMachine:main' into main
UriKH Apr 10, 2026
b2394c0
Fix logger and add Genetic search module
UriKH Apr 11, 2026
2307c54
upgrade logger, logging and output
UriKH Apr 12, 2026
40771b6
update DEFINITION_OF_DONE.md and test audit
UriKH Apr 12, 2026
ddac005
remove unnecessary test, update DEFINITION_OF_DONE.md
UriKH Apr 12, 2026
131559b
update system overview notebook
UriKH Apr 12, 2026
b4249d0
fixed base_cmf.py bad shift management and updated tests
UriKH Apr 12, 2026
44a25b6
Merge branch 'RamanujanMachine:main' into main
UriKH Apr 12, 2026
de7090e
clean unused GA arguments
UriKH Apr 12, 2026
4837a2c
optimize GA and remove execution logs
UriKH Apr 12, 2026
718ad0b
set a minimum stop generation thershold
UriKH Apr 12, 2026
8b52b32
fixed bad tests
UriKH Apr 12, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions SYSTEM_SPEC.md
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ Ordered by impact. When choosing what to work on, prefer items higher on this li
- [ ] **Automated proof sketches**: generate symbolic proofs or proof obligations for discovered formulas.
- [ ] **Paper-ready output**: auto-generate LaTeX summaries of discovered formulas with full verification.


---

## 8. Code Conventions
Expand Down
19 changes: 19 additions & 0 deletions dreamer/configs/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,12 @@ def traj_from_dim(dim: int) -> int:
def depth_from_len(traj_len, dim) -> int:
return min(round(1500 / max(traj_len / math.sqrt(dim), 1)), 1500)

def ga_generations(dim: int) -> int:
return 15 + 4 * dim

def ga_population(dim: int) -> int:
return 20 + 2 * dim ** 2


@dataclass
class SearchConfig(Configurable):
Expand All @@ -32,5 +38,18 @@ class SearchConfig(Configurable):
COMPUTE_GCD_SLOPE: bool = False
COMPUTE_LIMIT: bool = False

# ============================== Genetic search settings ==============================
# Number of evolutionary generations to run.
GA_GENERATIONS: Callable[[int], int] | int = ga_generations # for 3D: 27, for 15D: 75
# Number of individuals in each generation.
GA_POPULATION_SIZE: Callable[[int], int] | int = ga_population # for 3D: 38, for 15D: 470
GA_ELITE_FRACTION: float = 0.2 # Fraction of top individuals kept unchanged each generation.
GA_MUTATION_PROB: float = 0.3 # Probability to mutate each child.
GA_MUTATION_STEP: int = 1 # Max mutation step for coordinate updates.
GA_CROSSOVER_PROB: float = 0.5 # Probability to use crossover instead of cloning.
GA_MAX_RETRIES: int = 3 # Retry rounds for invalid/failed trajectory evaluations.
GA_REFINE_PROB: float = 0.5 # Probability of entering refine mutation mode.
GA_REFINE_COORD_PROB: float = 0.5 # Per-coordinate refine perturbation probability.


search_config: SearchConfig = SearchConfig()
14 changes: 4 additions & 10 deletions dreamer/extraction/samplers/conditioner.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ def __init__(self, A, max_beta=10, defect_tolerance=5.0, tol=1e-9):
self.defect_tolerance = defect_tolerance
self.tol = tol

@Logger.log_exec
def process(self):
"""
Main orchestrator: returns the conditioned basis and transformed bounds.
Expand All @@ -51,7 +50,6 @@ def process(self):

return Z_reduced, B_reduced, U_transform

@Logger.log_exec
def _extract_constraints(self) -> Tuple[np.ndarray, np.ndarray]:
"""
Separates A_prime into Equality (E) and Inequality (B) matrices.
Expand All @@ -73,7 +71,6 @@ def _extract_constraints(self) -> Tuple[np.ndarray, np.ndarray]:
B = np.array(ineq_rows, dtype=np.float64) if ineq_rows else np.empty((0, self.d_orig))
return E, B

@Logger.log_exec
def _compute_integer_basis(self, E: np.ndarray) -> np.ndarray:
"""
Finds the gapless integer basis for the equality hyperplanes.
Expand All @@ -97,7 +94,6 @@ def _compute_integer_basis(self, E: np.ndarray) -> np.ndarray:
int_basis.append(np.array(vec * common_denom, dtype=np.int64).flatten())
return np.column_stack(int_basis)

@Logger.log_exec
def _calculate_defect(self, Z: np.ndarray):
"""
Calculates the Orthogonality Defect.
Expand All @@ -114,7 +110,6 @@ def _calculate_defect(self, Z: np.ndarray):
return float('inf')
return prod_norms / det_L

@Logger.log_exec
def _ratchet_lattice_reduction(self, Z: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""
Dynamically applies LLL and BKZ to orthogonalize the space, retaining strictly the best reduction found.
Expand All @@ -129,7 +124,7 @@ def _ratchet_lattice_reduction(self, Z: np.ndarray) -> Tuple[np.ndarray, np.ndar
Z_current = np.array([list(row) for row in M_fpylll]).T
U_current = np.array([list(row) for row in U_fpylll])
defect = self._calculate_defect(Z_current)
Logger(f" LLL applied. Orthogonality Defect: {defect:.2f}", Logger.Levels.debug).log()
Logger(f"\t\tLLL applied. Orthogonality Defect: {defect:.2f}", Logger.Levels.debug).log()

# Escalation Ratchet: BKZ
beta = 4
Expand All @@ -138,23 +133,22 @@ def _ratchet_lattice_reduction(self, Z: np.ndarray) -> Tuple[np.ndarray, np.ndar
best_defect = defect

while defect > self.defect_tolerance and beta <= self.max_beta:
Logger(f" Defect too high. Escalating to BKZ (Block Size: {beta})...", Logger.Levels.debug).log()
Logger(f"\t\tDefect too high. Escalating to BKZ (Block Size: {beta})...", Logger.Levels.debug).log()
param = BKZ.Param(block_size=beta, strategies=BKZ.DEFAULT_STRATEGY, auto_abort=True)
BKZ.reduction(M_fpylll, param, U=U_fpylll)
Z_current = np.array([list(row) for row in M_fpylll]).T
U_current = np.array([list(row) for row in U_fpylll])
defect = self._calculate_defect(Z_current)
Logger(f" BKZ-{beta} applied. New Defect: {defect:.2f}", Logger.Levels.debug).log()
Logger(f"\t\tBKZ-{beta} applied. New Defect: {defect:.2f}", Logger.Levels.debug).log()
beta += 2

if defect < best_defect:
best_defect = defect
best_Z = Z_current.copy()
best_U = U_current.copy()
Logger(f" Final defect is {best_defect}", Logger.Levels.debug).log()
Logger(f"\t\tFinal defect is {best_defect}", Logger.Levels.debug).log()
return best_Z, best_U

@Logger.log_exec
def _transform_bounds(self, B_orig: np.ndarray, Z: np.ndarray, U_transform: np.ndarray):
"""
Applies the transformation matrix U to the inequality bounds.
Expand Down
33 changes: 16 additions & 17 deletions dreamer/extraction/samplers/raycast_sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,18 @@ class RaycastPipelineSampler(Sampler):
def __init__(self, A_prime):
self.A_prime = A_prime
self.d_orig: int = int(A_prime.shape[1])
self.d_flat: int = 0

Logger("Initializing Sampler: Conditioning...", Logger.Levels.debug).log()
conditioner = HyperSpaceConditioner(self.A_prime, max_beta=10, defect_tolerance=5.0)
self.Z_reduced, self.B_reduced, _ = conditioner.process()
self.d_flat = int(self.Z_reduced.shape[1])
self.fraction = float(self._estimate_cone_fraction(self.B_reduced, self.d_flat))
Logger(
f"Shard Estimated Volume: {self.fraction * 100:.6f}%",
Logger.Levels.debug
).log()

super().__init__(self.d_flat)

@staticmethod
def _estimate_cone_fraction(B: np.ndarray, d_flat: int, samples: int = 100_000) -> float:
Expand Down Expand Up @@ -133,22 +144,10 @@ def harvest(
:param exact: If true and target_func is callable, enforce exactly target_func(d_flat) rays.
:return: The samples
"""
Logger("[Pipeline] Initializing Stage 1: Conditioning...", Logger.Levels.debug).log()
conditioner = HyperSpaceConditioner(self.A_prime, max_beta=10, defect_tolerance=5.0)

try:
Z_reduced, B_reduced, _ = conditioner.process()
except ValueError as e:
raise Exception(f"[Pipeline] Stage 1 Failed: {e}")

d_flat = int(Z_reduced.shape[1])
self.d_flat = d_flat

fraction = float(self._estimate_cone_fraction(B_reduced, d_flat))
Logger(
f"[Pipeline] Cone Volume Estimate: {fraction*100:.6f}% of total sphere.",
Logger.Levels.debug
).log()
Z_reduced = self.Z_reduced
B_reduced = self.B_reduced
d_flat = self.d_flat
fraction = self.fraction

requested_rays: int
if isinstance(target_func, int):
Expand Down
8 changes: 3 additions & 5 deletions dreamer/extraction/samplers/raycaster.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,6 @@ def __init__(self, Z_reduced: np.ndarray, B_reduced: np.ndarray, d_orig: int, gu
else:
raise ValueError('Unknown guidance method')

@Logger.log_exec
def _get_chebyshev_center(self):
"""
Compute the chebyshev center point inside bounds
Expand All @@ -320,7 +319,6 @@ def _get_chebyshev_center(self):
return res.x[:-1]
return None

@Logger.log_exec
def _generate_continuous_guide_rays(self, target_rays: int, mix_steps: int = 200):
"""
Ultra-fast parallel C-kernel generation of continuous guide rays.
Expand Down Expand Up @@ -349,13 +347,13 @@ def harvest(self, target_rays: int, R_max: float, max_per_ray: int = 1) -> np.nd
if self.d_flat == 0:
return np.array([])

Logger(f"Raycaster: Generating {target_rays} Continuous Guide Rays...", Logger.Levels.debug).log()
Logger(f"\tRaycaster: Generating {target_rays} Continuous Guide Rays...", Logger.Levels.debug).log()
guide_rays = self._generate_continuous_guide_rays(target_rays)
if guide_rays is None:
Logger("XXX Closed Cone.", Logger.Levels.debug).log()
return np.array([])

Logger("Raycaster: Sweeping lattice along Guide Rays...", Logger.Levels.debug).log()
Logger("\tRaycaster: Sweeping lattice along Guide Rays...", Logger.Levels.debug).log()
start_t = time.time()

raw_buffer, counts = _raycast(
Expand All @@ -372,7 +370,7 @@ def harvest(self, target_rays: int, R_max: float, max_per_ray: int = 1) -> np.nd
unique_rays = np.unique(merged, axis=0)

Logger(
f"Raycaster Yielded {len(unique_rays)} unique, shortest trajectories in {time.time()-start_t:.3f}s",
f"\tRaycaster Yielded {len(unique_rays)} unique, shortest trajectories in {time.time()-start_t:.3f}s",
Logger.Levels.debug
).log()
return unique_rays
5 changes: 5 additions & 0 deletions dreamer/extraction/samplers/sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@

class Sampler(ABC):
"""Abstract trajectory sampler bound to a searchable space."""
def __init__(self, d: int):
"""
:param d: Dimensionality of the search space.
"""
self.d = d

@abstractmethod
def harvest(self, compute_n_samples: Callable[[int], int] | int, exact: bool = False) -> np.ndarray:
Expand Down
2 changes: 1 addition & 1 deletion dreamer/extraction/samplers/sphere_sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(self, d, batch_size=100_000):
:param d: dimensions of the sphere
:param batch_size: number of points to sample per batch
"""
self.d = d
super().__init__(d)
self.batch_size = batch_size
self.rng = np.random.default_rng()

Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
from __future__ import annotations

from typing import Callable, Set

import sympy as sp

from dreamer.extraction.shard import Shard
from dreamer.utils.caching import cached_property
from dreamer.utils.rand import np
from ramanujantools import Position

Expand All @@ -15,22 +15,28 @@

class ShardSamplingOrchestrator(SamplingOrchestrator):
"""Trajectory sampler for shards using the extraction sampling pipeline."""
def __init__(self, searchable: "Shard"):
def __init__(self, searchable: Shard):
super().__init__(searchable)
if not isinstance(self.searchable, Shard):
raise ValueError(f"{self.__class__.__name__} can only be used with {Shard.__name__} objects.")

def sample_trajectories(self, compute_n_samples: Callable[[int], int] | int, *, exact: bool = False) -> Set[Position]:
a_matrix = getattr(self.searchable, "A", None)
symbols = self.searchable.symbols
a_matrix = self.searchable.A
if a_matrix is None:
sampler = PrimitiveSphereSampler(len(symbols))
samples = sampler.harvest(compute_n_samples)
self.sampler = PrimitiveSphereSampler(len(self.searchable.symbols))
else:
self.sampler = RaycastPipelineSampler(np.asarray(a_matrix, dtype=np.float64))

def sample_trajectories(self, compute_n_samples: Callable[[int], int] | int, *, exact: bool = False) -> Set[Position]:
if isinstance(self.sampler, PrimitiveSphereSampler):
samples = self.sampler.harvest(compute_n_samples)
else:
sampler = RaycastPipelineSampler(np.asarray(a_matrix, dtype=np.float64))
samples = sampler.harvest(compute_n_samples, exact=exact)
samples = self.sampler.harvest(compute_n_samples, exact=exact)

return {
Position({sym: sp.sympify(int(v)) for v, sym in zip(p, symbols)})
Position({sym: sp.sympify(int(v)) for v, sym in zip(p, self.searchable.symbols)})
for p in samples
}

@cached_property
def search_space_dim(self):
return self.sampler.d
Loading
Loading