From 58ad0baefc5ce1ca7f54021a4e8fc22feff3ec77 Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Sat, 30 Mar 2024 11:07:29 -0700 Subject: [PATCH 01/16] Updates before starting runs --- msp/forcefield/mdl_ff.py | 28 ++++++++++++++---------- msp/optimizer/globalopt/basin_hopping.py | 14 +++++++++++- msp/utils/objectives/objectives.py | 26 ++++++++++++---------- scripts/example.py | 18 ++++++++------- 4 files changed, 55 insertions(+), 31 deletions(-) diff --git a/msp/forcefield/mdl_ff.py b/msp/forcefield/mdl_ff.py index 1d4cc2e..0940a0b 100644 --- a/msp/forcefield/mdl_ff.py +++ b/msp/forcefield/mdl_ff.py @@ -21,8 +21,9 @@ from matdeeplearn.trainers.property_trainer import PropertyTrainer from matdeeplearn.common.data import dataset_split from msp.structure.structure_util import atoms_to_data, data_to_atoms -from sklearn.cluster import KMeans +from sklearn.cluster import MiniBatchKMeans from sklearn.metrics import silhouette_score +from torch_scatter import scatter_mean import os @@ -163,11 +164,16 @@ def process_data(self, dataset): data.structure_id = [str(i)] data.structure_id = [struc['structure_id']] data.z = torch.LongTensor(struc['atomic_numbers']) - data.forces = torch.tensor(struc['forces']) - data.stress = torch.tensor(struc['stress']) + if 'forces' in struc: + data.forces = torch.tensor(struc['forces']) + if 'stress' in struc: + data.stress = torch.tensor(struc['stress']) #optional data.u = torch.tensor(np.zeros((3))[np.newaxis, ...]).float() - data.y = torch.tensor(np.array([struc['potential_energy']])).float() + if 'potential_energy' in struc: + data.y = torch.tensor(np.array([struc['potential_energy']])).float() + if 'y' in struc: + data.y = torch.tensor([struc['y']]).float() if data.y.dim() == 1: data.y = data.y.unsqueeze(0) #if forces: @@ -220,7 +226,7 @@ def fmodel(params, buffers, x): #output is a dict return output - def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=5000): + def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=20000): data_list = self.dataset['full'] device = torch.device("cuda" if torch.cuda.is_available() else "cpu") for i in range(len(self.trainer.model)): @@ -245,17 +251,16 @@ def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=5000): self.trainer.model[i].gradient = True embeddings = torch.cat(embeddings, dim=1) if cluster: - kmeans = KMeans(n_clusters=num_clusters) res = [] - silhouette_avg = 0 for i in range(len(self.trainer.model)): + clust = MiniBatchKMeans(init="k-means++", n_clusters=5000, batch_size=2048) start_time = time.time() - cluster_labels = kmeans.fit_predict(embeddings[i].cpu().detach().numpy()) - res.append(kmeans.cluster_centers_) - silhouette_avg += silhouette_score(embeddings[i].cpu().detach().numpy(), res[i]) + cluster_labels = clust.fit_predict(embeddings[i].cpu().detach().numpy()) print('Model', i, 'clustering took', time.time() - start_time) + res.append(clust.cluster_centers_) embeddings = torch.tensor(res) - print(f"New embeddings are {embeddings.size()} with a silhouette score of {silhouette_avg/len(self.trainer.model)}") + print(embeddings.size()) + print(f"New embeddings are {embeddings.size()}") return embeddings @@ -308,6 +313,7 @@ def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_str print("device:", device) for i in range(len(loader_iter)): batch = next(loader_iter).to(device) + print(batch) if getattr(objective_func, 'normalize', False): objective_func.set_norm_offset(batch.z, batch.n_atoms) pos, cell = batch.pos, batch.cell diff --git a/msp/optimizer/globalopt/basin_hopping.py b/msp/optimizer/globalopt/basin_hopping.py index 444a9da..75911b2 100644 --- a/msp/optimizer/globalopt/basin_hopping.py +++ b/msp/optimizer/globalopt/basin_hopping.py @@ -353,13 +353,24 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz print("\t\tSoft sphere loss: ", res[j][-1]['soft_sphere_loss']) print("\t\tComposition: ", res[j][-1]['composition']) print("\t\tperturb: ", res[j][-1]['perturb']) - print('HOP', i, 'took', end_time - start_time, 'seconds') for j in range(len(new_atoms)): rand_ind = np.random.randint(len(self.perturbs)) prev_perturb[j] = self.perturbs[rand_ind] self.perturbs[rand_ind](new_atoms[j], num_atoms_perturb=num_atoms_perturb, num_unique=num_unique) + print('Final optimization') + best_atoms, obj_loss, energy_loss, novel_loss, soft_sphere_loss = self.forcefield.optimize(best_atoms, 300, objective_func, log_per, lr, batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer) avg_loss = 0 + for j, hop in enumerate(best_hop): + if getattr(objective_func, 'normalize', False): + res[j][hop] = {'hop': i, 'objective_loss': obj_loss[j][0], 'energy_loss': energy_loss[j][0], 'novel_loss': novel_loss[j][0], 'soft_sphere_loss': soft_sphere_loss[j][0], + 'unnormalized_loss' : objective_func.norm_to_raw_loss(energy_loss[j][0], new_atoms[j].get_atomic_numbers()), + 'perturb': prev_perturb[j].__name__, 'composition': new_atoms[j].get_atomic_numbers(), + 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j])[0]} + else: + res[j][hop].append({'hop': i, 'objective_loss': obj_loss[j][0], 'energy_loss': energy_loss[j][0], 'novel_loss': novel_loss[j][0], 'soft_sphere_loss': soft_sphere_loss[j][0], + 'perturb': prev_perturb[j].__name__, 'composition': new_atoms[j].get_atomic_numbers(), + 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j])[0]}) for j, hop in enumerate(best_hop): print("Structure: ", j) print('\tBest hop: ', hop) @@ -371,6 +382,7 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz print("\tSoft sphere loss: ", res[j][hop]['soft_sphere_loss']) avg_loss += best_loss[j] print('Avg Objective Loss', avg_loss / len(new_atoms)) + min_atoms = atoms_to_dict(best_atoms, min_objective_loss) return res, min_atoms, best_hop, energies, accepts, accept_rate, temps, step_sizes diff --git a/msp/utils/objectives/objectives.py b/msp/utils/objectives/objectives.py index 675fafd..2c04e7e 100644 --- a/msp/utils/objectives/objectives.py +++ b/msp/utils/objectives/objectives.py @@ -45,7 +45,7 @@ def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12 -7.41385825, -9.51466466, -11.29141001, -12.94777968125, -14.26783833, -10000, -10000, -10000, -10000, -10000, -10000] def set_norm_offset(self, z, n_atoms): - self.offset = [0]*len(n_atoms) + self.offset = torch.zeros((len(n_atoms), 1)).to(z.device) curr = 0 self.lj_rmins = torch.tensor(self.lj_rmins).to(z.device) for i in range(len(n_atoms)): @@ -66,8 +66,9 @@ def lj_repulsion(self, data, power = 12): def forward(self, model_output, batch): if self.normalize: - for i in range(len(model_output['potential_energy'])): - model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] + model_output['potential_energy'] = (model_output['potential_energy'] + self.offset) / batch.n_atoms.unsqueeze(1) + # for i in range(len(batch.n_atoms)): + # model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] ljr = self.lj_repulsion(batch, power=self.ljr_power) return self.energy_ratio * model_output["potential_energy"] + self.ljr_ratio * ljr, model_output["potential_energy"], torch.zeros(len(model_output['potential_energy']), 1).to(ljr.device), ljr @@ -122,11 +123,11 @@ def __init__(self, normalize=True, energy_ratio=1.0, uncertainty_ratio=.5, ljr_r def set_norm_offset(self, z, n_atoms): - self.offset = [0]*len(n_atoms) + self.offset = torch.zeros((len(n_atoms), 1)).to(z.device) curr = 0 self.lj_rmins = torch.tensor(self.lj_rmins).to(z.device) for i in range(len(n_atoms)): - temp = z[curr:curr+n_atoms[i]] + temp = z[curr:curr+n_atoms[i]].long() for j in temp: self.offset[i] -= self.element_energy[j] curr += n_atoms[i] @@ -150,8 +151,9 @@ def norm_to_raw_loss(self, loss, z): def forward(self, model_output, batch): if self.normalize: - for i in range(len(batch.n_atoms)): - model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] + model_output['potential_energy'] = (model_output['potential_energy'] + self.offset) / batch.n_atoms.unsqueeze(1) + # for i in range(len(batch.n_atoms)): + # model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] ljr = self.lj_repulsion(batch, power=self.ljr_power) return self.energy_ratio * model_output["potential_energy"] - self.uncertainty_ratio * model_output["potential_energy_uncertainty"] + self.ljr_ratio * ljr, model_output["potential_energy"], -model_output["potential_energy_uncertainty"], ljr @@ -191,11 +193,12 @@ def __init__(self, embeddings, normalize=True, energy_ratio=1.0, ljr_ratio=1, lj def set_norm_offset(self, z, n_atoms): - self.offset = [0]*len(n_atoms) + self.offset = torch.zeros((len(n_atoms), 1)).to(z.device) + self.embeddings = self.embeddings.to(z.device) curr = 0 self.lj_rmins = torch.tensor(self.lj_rmins).to(z.device) for i in range(len(n_atoms)): - temp = z[curr:curr+n_atoms[i]] + temp = z[curr:curr+n_atoms[i]].long() for j in temp: self.offset[i] -= self.element_energy[j] curr += n_atoms[i] @@ -219,8 +222,9 @@ def norm_to_raw_loss(self, loss, z): def forward(self, model_output, batch): if self.normalize: - for i in range(len(batch.n_atoms)): - model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] + model_output['potential_energy'] = (model_output['potential_energy'] + self.offset) / batch.n_atoms.unsqueeze(1) + # for i in range(len(batch.n_atoms)): + # model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] ljr = self.lj_repulsion(batch, power=self.ljr_power) embedding_loss = torch.cdist(model_output['embeddings'], self.embeddings, p=2) if self.mode == 'min': diff --git a/scripts/example.py b/scripts/example.py index a7ced3a..1bb692f 100644 --- a/scripts/example.py +++ b/scripts/example.py @@ -14,6 +14,7 @@ from ase import io from pymatgen.analysis.structure_matcher import StructureMatcher from pymatgen.io.ase import AseAtomsAdaptor +import time import matplotlib.pyplot as plt @@ -23,18 +24,18 @@ #or load dataset from disk: #my_dataset = load_dataset(path ="path/to/dataset") -my_dataset = json.load(open("../data/data_subset_msp.json", "r")) +my_dataset = json.load(open("/global/cfs/projectdirs/m3641/Shared/Materials_datasets/MP_data_latest/raw/data.json", "r")) #print(my_dataset[0]) max_iterations=1 #Initialize a forcefield class, reading in from config (we use MDL_FF but it can be a force field from another library) train_config = 'mdl_config.yml' forcefield = MDL_FF(train_config, my_dataset) -embeddings = forcefield.get_embeddings(my_dataset, batch_size=64, cluster=False) +embeddings = forcefield.get_embeddings(my_dataset, batch_size=40, cluster=True) #predictor = BasinHoppingASE(forcefield, hops=5, steps=100, optimizer="FIRE", dr=0.5) -predictor_batch = BasinHoppingBatch(forcefield, hops=20, steps=100, dr=0.6, optimizer='Adam', batch_size=30, perturbs=['pos', 'cell']) +predictor_batch = BasinHoppingBatch(forcefield, hops=5, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) # forcefield_mace = MACE_FF() @@ -55,7 +56,7 @@ # compositions = sample_random_composition(dataset=my_dataset, n=1) # or manually specify the list of lists: # compositions = [[22, 22, 22, 22, 22, 22, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8] for _ in range(8)] - compositions = generate_random_compositions(my_dataset, n=8, max_elements=5, max_atoms=20) + compositions = generate_random_compositions(my_dataset, n=10000, max_elements=5, max_atoms=20) for comp in compositions: print(comp) initial_structures = [init_structure(c, pyxtal=True) for c in compositions] @@ -84,14 +85,14 @@ #---Optimizing a batch of structures with batch basin hopping--- # alternatively if we dont use ASE, we can optimize in batch, and optimize over multiple objectives as well # we do this by first initializing our objective function, which is similar to the loss function class in matdeeplearn - # objective_func = UpperConfidenceBound(c=0.1) # objective_func = Energy(normalize=True, ljr_ratio=1) objective_func = EmbeddingDistance(embeddings, normalize=True, energy_ratio=1, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) - #objective_func = EnergyAndUncertainty(normalize=True, uncertainty_ratio=.5, ljr_ratio=100) - total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures, objective_func, batch_size=32, log_per=5, lr=.05) + # objective_func = EnergyAndUncertainty(normalize=True, uncertainty_ratio=.5, ljr_ratio=1, ljr_scale=.7) + start_time = time.time() + total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures, objective_func, batch_size=8, log_per=5, lr=.05) minima_list_batch = dict_to_atoms(minima_list_batch) for j, minima in enumerate(minima_list_batch): - filename = "iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + filename = "clustering_predicted/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" ase.io.write(filename, minima) f = open('output.txt', 'w') for i in range(len(total_list_batch)): @@ -108,6 +109,7 @@ f.write("\t\tComposition: " +str(hop['composition'])+ '\n') f.write("\t\tperturb: " +str(hop['perturb'])+ '\n') f.close() + print('Time taken: {:.2f}'.format(time.time() - start_time)) for i, energy_list in enumerate(energies): plt.scatter(range(len(energy_list)), energy_list, label=f'Structure {i + 1}', color=['g' if a else 'r' for a in accepts[i]]) From 4bd57d12a02a841941bf7c78db96892b7d4f5074 Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Tue, 2 Apr 2024 14:53:03 -0700 Subject: [PATCH 02/16] Updated documentation and objective organization --- msp/composition/composition.py | 10 ++ msp/forcefield/mdl_ff.py | 55 +++--- msp/optimizer/globalopt/basin_hopping.py | 84 ++++++++- msp/structure/structure_util.py | 14 +- msp/utils/objectives/__init__.py | 4 +- msp/utils/objectives/objectives.py | 218 ++++++++++------------- scripts/example.py | 8 +- 7 files changed, 239 insertions(+), 154 deletions(-) diff --git a/msp/composition/composition.py b/msp/composition/composition.py index d28ee8c..0dc21e6 100644 --- a/msp/composition/composition.py +++ b/msp/composition/composition.py @@ -45,6 +45,7 @@ def generate_random_compositions(dataset, n=5, max_elements=5, max_atoms=20, ele n (int): number of compositions to generate max_elements (int): maximum number of elements in composition max_atoms (int): maximum number of atoms per element + elems_to_sample (list): list of elements to sample from Returns: compositions (list): list of compositions @@ -99,6 +100,15 @@ def generate_random_compositions(dataset, n=5, max_elements=5, max_atoms=20, ele return compositions def sample_random_composition(dataset, n=5): + """ + Sample n random compositions from the dataset + Args: + dataset (dict): dictionary of dataset + n (int): number of compositions to sample + + Returns: + dataset_comps (list): list of compositions + """ dataset_comps = [] for data in dataset: data['atomic_numbers'].sort() diff --git a/msp/forcefield/mdl_ff.py b/msp/forcefield/mdl_ff.py index 0940a0b..ea71e1b 100644 --- a/msp/forcefield/mdl_ff.py +++ b/msp/forcefield/mdl_ff.py @@ -140,11 +140,11 @@ def update(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, l def process_data(self, dataset): """ - Process data for the force field model. + Process data for the force field model by turning it from a list of dicts to list of Data objects. Args: - dataset (dict): A dictionary of the dataset. + dataset (dict): A list of dictionaries representing structures. Returns: - dict: A dictionary of the processed dataset. + dict: A list of Data objects. """ #add tqdm new_data_list = [Data() for _ in range(len(dataset))] @@ -152,12 +152,9 @@ def process_data(self, dataset): data = new_data_list[i] data.n_atoms = len(struc['atomic_numbers']) data.pos = torch.tensor(struc['positions']) - #check cell dimensions - #data.cell = torch.tensor([struc['cell']]) data.cell = torch.tensor(np.array(struc['cell']), dtype=torch.float).view(1, 3, 3) if (np.array(data.cell) == np.array([[0.0, 0.0, 0.0],[0.0, 0.0, 0.0],[0.0, 0.0, 0.0]])).all(): data.cell = torch.zeros((3,3)).unsqueeze(0) - #structure id optional or null if 'structure_id' in struc: data.structure_id = [struc['structure_id']] else: @@ -176,10 +173,6 @@ def process_data(self, dataset): data.y = torch.tensor([struc['y']]).float() if data.y.dim() == 1: data.y = data.y.unsqueeze(0) - #if forces: - # data.forces = torch.tensor(struc['forces']) - # if 'stress' in struc: - # data.stress = torch.tensor(struc['stress']) dataset = {"full": new_data_list} return dataset @@ -189,6 +182,8 @@ def _forward(self, batch_data, embeddings=False): Calls model directly Args: batch_data (torch_geometric.data.Data): A batch of data. + embeddings (bool): Whether to return embeddings. Defaults to False. + Returns: dict: A dictionary of the model output. """ @@ -208,6 +203,15 @@ def _forward(self, batch_data, embeddings=False): return output def _batched_forward(self, batch_data, embeddings = False): + """ + Calls model in parallel using torch.vmap + Args: + batch_data (torch_geometric.data.Data): A batch of data. + embeddings (bool): Whether to return embeddings. Defaults to False. + + Returns: + dict: A dictionary of the model output. + """ if embeddings: def fmodel(params, buffers, x): output = functional_call(self.base_model, (params, buffers), (x,)) @@ -226,7 +230,18 @@ def fmodel(params, buffers, x): #output is a dict return output - def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=20000): + def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=5000): + """ + Get embeddings from the model for the dataset. + Args: + dataset (dict): A dictionary of the dataset. + batch_size (int): The batch size for the model. + cluster (bool): Whether to cluster the embeddings. Defaults to False. + num_clusters (int): The number of clusters to use. Defaults to 5000. + + Returns: + torch.tensor: The embeddings from the model. + """ data_list = self.dataset['full'] device = torch.device("cuda" if torch.cuda.is_available() else "cpu") for i in range(len(self.trainer.model)): @@ -288,10 +303,13 @@ def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_str device (str): The device to use for optimization. Defaults to 'cpu'. cell_relax (bool): Whether to relax the cell. Defaults to True. optim (str): The optimizer to use. Defaults to 'Adam'. + Returns: res_atoms (list): A list of optimized ASE atoms objects. - res_energy (list): A list of the energies of the optimized structures. - old_energy (list): A list of the energies of the initial structures. + obj_loss (list): A list of objective function losses for each structure. + energy_loss (list): A list of energy losses for each structure. + novel_loss (list): A list of novelty losses for each structure + soft_sphere_loss (list): A list of soft sphere losses for each structure. """ data_list = atoms_to_data(atoms) device = torch.device("cuda" if torch.cuda.is_available() else "cpu") @@ -313,18 +331,14 @@ def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_str print("device:", device) for i in range(len(loader_iter)): batch = next(loader_iter).to(device) - print(batch) if getattr(objective_func, 'normalize', False): objective_func.set_norm_offset(batch.z, batch.n_atoms) pos, cell = batch.pos, batch.cell - # batch.z = batch.z.type(torch.float32) - # optimized_z = batch.z opt = getattr(torch.optim, optim, 'Adam')([pos, cell], lr=learning_rate) lr_scheduler = ReduceLROnPlateau(opt, 'min', factor=0.8, patience=10) pos.requires_grad_(True) - # optimized_z.requires_grad_(True) if cell_relax: cell.requires_grad_(True) @@ -341,7 +355,6 @@ def closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch): curr_time = time.time() - start_time if log_per > 0 and step[0] % log_per == 0: - #print("{} {0:4d} {1: 3.6f}".format(output, step[0], loss.mean().item())) if cell_relax: print("Structure ID: {}, Step: {}, LJR Loss: {:.6f}, Pos Gradient: {:.6f}, Cell Gradient: {:.6f}, Time: {:.6f}".format(len(batch.structure_id), step[0], objective_loss.mean().item(), pos.grad.abs().mean().item(), cell.grad.abs().mean().item(), curr_time)) @@ -361,9 +374,7 @@ def closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch): old_step = step[0] loss = opt.step(lambda: closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch)) lr_scheduler.step(loss) - # print('optimizer step time', time.time()-start_time) - # print('steps taken', step[0] - old_step) - #print("learning rate: ", opt.param_groups[0]['lr']) + res_atoms.extend(data_to_atoms(batch)) obj_loss.extend(temp_obj[0].cpu().detach().numpy()) energy_loss.extend(temp_energy[0].cpu().detach().numpy()) @@ -385,12 +396,14 @@ def from_config_train(self, config, dataset, max_epochs=None, lr=None, batch_siz optim scheduler dataset + Args: config (dict): A dictionary of the configuration. dataset (dict): A dictionary of the dataset. max_epochs (int): The maximum number of epochs to train the model. Defaults to value in the training configuration file. lr (float): The learning rate for the model. Defaults to value in the training configuration file. batch_size (int): The batch size for the model. Defaults to value in the training configuration file. + Returns: PropertyTrainer: A property trainer object. """ diff --git a/msp/optimizer/globalopt/basin_hopping.py b/msp/optimizer/globalopt/basin_hopping.py index 75911b2..1ac189d 100644 --- a/msp/optimizer/globalopt/basin_hopping.py +++ b/msp/optimizer/globalopt/basin_hopping.py @@ -24,6 +24,8 @@ def __init__(self, name, hops=5, steps=100, optimizer="FIRE", dr=.5, max_atom_nu optimizer (str, optional): Optimizer to use for each step. Defaults to "FIRE". dr (int, optional): rate at which to change values max_atom_num (int, optional): maximum atom number to be considered + perturbs (list, optional): list of perturbations to apply. Defaults to ['pos', 'cell', 'atomic_num', 'add', 'remove', 'swap'] + elems_to_sample (list, optional): list of elements to sample from. Defaults to None """ super().__init__(name, hops=hops, steps=steps, optimizer=optimizer, dr=dr, **kwargs) self.steps = steps @@ -47,6 +49,11 @@ def __init__(self, name, hops=5, steps=100, optimizer="FIRE", dr=.5, max_atom_nu def perturbPos(self, atoms, **kwargs): """ Perturbs the positions of the atoms in the structure + Args: + atoms (Atoms): ASE Atoms object to perturb + + Returns: + None """ if isinstance(atoms, ExpCellFilter): disp = np.random.uniform(-1., 1., (len(atoms.atoms), 3)) * self.dr @@ -58,6 +65,11 @@ def perturbPos(self, atoms, **kwargs): def perturbCell(self, atoms, **kwargs): """ Perturbs the cell of the atoms in the structure + Args: + atoms (Atoms): ASE Atoms object to perturb + + Returns: + None """ if isinstance(atoms, ExpCellFilter): disp = np.random.uniform(-1., 1., (3, 3)) * self.dr @@ -69,6 +81,13 @@ def perturbCell(self, atoms, **kwargs): def perturbAtomicNum(self, atoms, num_atoms_perturb=1, num_unique=4, **kwargs): """ Perturbs the atomic numbers of the atoms in the structure + Args: + atoms (Atoms): ASE Atoms object to perturb + num_atoms_perturb (int, optional): Number of atoms to perturb. Defaults to 1. + num_unique (int, optional): Number of unique atoms in the structure. Defaults to 4. + + Returns: + None """ if isinstance(atoms, ExpCellFilter): un = np.unique(atoms.atoms.get_atomic_numbers()) @@ -102,6 +121,12 @@ def perturbAtomicNum(self, atoms, num_atoms_perturb=1, num_unique=4, **kwargs): def addAtom(self, atoms, num_unique=4, **kwargs): """ Adds an atom to the structure + Args: + atoms (Atoms): ASE Atoms object to perturb + num_unique (int, optional): Number of unique atoms in the structure. Defaults to 4. + + Returns: + None """ if isinstance(atoms, ExpCellFilter): un = np.unique(atoms.atoms.get_atomic_numbers()) @@ -131,6 +156,11 @@ def addAtom(self, atoms, num_unique=4, **kwargs): def removeAtom(self, atoms, **kwargs): """ Removes an atom from the structure + Args: + atoms (Atoms): ASE Atoms object to perturb + + Returns: + None """ if isinstance(atoms, ExpCellFilter): if len(atoms.atoms) > 2: @@ -142,6 +172,11 @@ def removeAtom(self, atoms, **kwargs): def swapAtom(self, atoms, **kwargs): """ Swaps two atoms in the structure + Args: + atoms (Atoms): ASE Atoms object to perturb + + Returns: + None """ if isinstance(atoms, ExpCellFilter): nums = atoms.atoms.get_atomic_numbers() @@ -155,6 +190,18 @@ def swapAtom(self, atoms, **kwargs): atoms.set_atomic_numbers(nums) def change_temp(self, temp, accepts, interval=10, target_ratio=0.5, rate=0.1): + """ + Changes the temperature based on the acceptance ratio + Args: + temp (float): Temperature to change + accepts (list): List of acceptance ratios + interval (int, optional): Interval to check acceptance ratio. Defaults to 10. + target_ratio (float, optional): Target acceptance ratio. Defaults to 0.5. + rate (float, optional): Rate of change. Defaults to 0.1. + + Returns: + float: New temperature + """ if len(accepts) % interval == 0 and len(accepts) != 0: if sum(accepts[-interval:]) / interval <= target_ratio: temp *= 1 + rate @@ -165,6 +212,16 @@ def change_temp(self, temp, accepts, interval=10, target_ratio=0.5, rate=0.1): return temp def change_dr(self, accepts, interval=10, target_ratio=0.5, rate=0.1): + """ + Changes the dr based on the acceptance ratio + Args: + accepts (list): List of acceptance ratios + interval (int, optional): Interval to check acceptance ratio. Defaults to 10. + target_ratio (float, optional): Target acceptance ratio. Defaults to 0.5. + rate (float, optional): Rate of change. Defaults to 0.1. + Returns: + None + """ if len(accepts) % interval == 0 and len(accepts) != 0: if sum(accepts[-interval:]) / interval <= target_ratio: self.dr /= 1 + rate @@ -173,6 +230,16 @@ def change_dr(self, accepts, interval=10, target_ratio=0.5, rate=0.1): self.dr = max(0.1, min(self.dr, 1)) def accept(self, old_energy, newEnergy, temp): + """ + Acceptance criterion for the new energy + Args: + old_energy (float): Old energy + newEnergy (float): New energy + temp (float): Temperature + + Returns: + bool: Whether to accept the new energy + """ return np.random.rand() < np.exp(-(newEnergy - old_energy) / temp) class BasinHoppingASE(BasinHoppingBase): @@ -190,6 +257,8 @@ def __init__(self, forcefield, hops=5, steps=100, optimizer="FIRE", dr=.5, max_a optimizer (str, optional): Optimizer to use for each step. Defaults to "FIRE". dr (int, optional): rate at which to change values. Defaults to .5. max_atom_num (int, optional): maximum atom number to be considered, exclusive. Defaults to 101. + perturbs (list, optional): list of perturbations to apply. Defaults to ['pos', 'cell', 'atomic_num', 'add', 'remove', 'swap'] + elems_to_sample (list, optional): list of elements to sample from. Defaults to None """ super().__init__("BasinHoppingASE", hops=hops, steps=steps, optimizer=optimizer, dr=dr, max_atom_num=max_atom_num, perturbs=perturbs, elems_to_sample=elems_to_sample, **kwargs) @@ -205,6 +274,7 @@ def predict(self, structures, cell_relax=True, topk=1, num_atoms_perturb=1, num_ cell_relax (bool, optional): whether to relax cell or not. Defaults to True. topk (int, optional): Number of best performing structures to save per composition. Defaults to 1. num_atoms_perturb (int, optional): number of atoms to perturb for perturbAtomicNum. Defaults to 1. + num_unique (int, optional): number of unique atoms in the structure. Defaults to 4. Returns: list: A list of ase.Atoms objects representing the predicted minima @@ -266,6 +336,8 @@ def __init__(self, forcefield, hops=5, steps=100, optimizer="Adam", dr=.5, max_a optimizer (str, optional): Optimizer to use for each step. Defaults to "Adam". dr (int, optional): rate at which to change values. Defaults to .5. max_atom_num (int, optional): maximum atom number to be considered, exclusive. Defaults to 101. + perturbs (list, optional): list of perturbations to apply. Defaults to ['pos', 'cell', 'atomic_num', 'add', 'remove', 'swap'] + elems_to_sample (list, optional): list of elements to sample from. Defaults to None """ super().__init__("BasinHopping", hops=hops, steps=steps, optimizer=optimizer, dr=dr, max_atom_num=max_atom_num, perturbs=perturbs, elems_to_sample=elems_to_sample, **kwargs) self.forcefield = forcefield @@ -284,9 +356,19 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz log_per (int, optional): Print log messages for every log_per steps. Defaults to 0 (no logging). lr (int, optional): Learning rate for optimizer. Defaults to .5. num_atoms_perturb (int, optional): number of atoms to perturb for perturbAtomicNum + num_unique (int, optional): number of unique atoms in the structure. Defaults to 4. + dynamic_temp (bool, optional): Whether to change temperature dynamically. Defaults to False. + dynamic_dr (bool, optional): Whether to change dr dynamically. Defaults to False. Returns: - list: A list of ase.Atoms objects representing the predicted minima + res (list): A list of dictionaries containing the optimization results + min_atoms (list): A list of dictionaries containing the optimized structures + best_hop (list): A list of the best hops for each structure + energies (list): A list of the energies for each structure + accepts (list): A list of the acceptance ratios for each structure + accept_rate (list): A list of the acceptance rates for each structure + temps (list): A list of the temperatures for each structure + step_sizes (list): A list of the step sizes for each structure """ new_atoms = dict_to_atoms(structures) min_atoms = deepcopy(new_atoms) diff --git a/msp/structure/structure_util.py b/msp/structure/structure_util.py index 408a096..66d29c9 100644 --- a/msp/structure/structure_util.py +++ b/msp/structure/structure_util.py @@ -15,6 +15,8 @@ def init_structure(composition, pyxtal=False, density=.2): Args: composition (list): A list of the atomic numbers + pyxtal (bool): If True, tries to use pyxtal to generate a random symmetric structure. + If False, generates a completely random structure. Returns: dict: representing structure @@ -87,7 +89,7 @@ def atoms_to_dict(atoms, loss=None): Args: atoms (list): A list of ASE atoms objects - energy (list): A list of predicted energies for each ASE atoms object. + loss (list): A list of predicted losses for each ASE atoms object. Returns: list: Contains atoms represented as dicts @@ -168,6 +170,16 @@ def data_to_atoms(batch): def smact_validity(comp, count, use_pauling_test=True, include_alloys=True): + """ + Check if a composition is valid according to the SMACt screening rules. + Args: + comp (list): List of atomic numbers + count (list): List of counts for each atomic number + use_pauling_test (bool): If True, uses the Pauling test to check if the electronegativity is valid. + include_alloys (bool): If True, allows for the composition to be an alloy. + Returns: + bool: True if the composition is valid, False otherwise. + """ elem_symbols = tuple([chemical_symbols[elem] for elem in comp]) space = smact.element_dictionary(elem_symbols) smact_elems = [e[1] for e in space.items()] diff --git a/msp/utils/objectives/__init__.py b/msp/utils/objectives/__init__.py index 79ff9ec..d1e4c29 100644 --- a/msp/utils/objectives/__init__.py +++ b/msp/utils/objectives/__init__.py @@ -1,3 +1,3 @@ -__all__ = ["UpperConfidenceBound", "Energy", "Uncertainty", "EnergyAndUncertainty", "EmbeddingDistance"] +__all__ = ["Energy", "EnergyAndUncertainty", "EmbeddingDistance"] -from .objectives import UpperConfidenceBound, Energy, Uncertainty, EnergyAndUncertainty, EmbeddingDistance \ No newline at end of file +from .objectives import Energy, EnergyAndUncertainty, EmbeddingDistance \ No newline at end of file diff --git a/msp/utils/objectives/objectives.py b/msp/utils/objectives/objectives.py index 2c04e7e..8ec7617 100644 --- a/msp/utils/objectives/objectives.py +++ b/msp/utils/objectives/objectives.py @@ -5,22 +5,19 @@ from torch_scatter import scatter_add from mendeleev.fetch import fetch_table - -class UpperConfidenceBound(torch.nn.Module): - - def __init__(self, c): - super().__init__() - """ - Initialize - """ - pass class Energy(torch.nn.Module): def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12, ljr_scale = .8): super().__init__() """ - Initialize + Initialize objective function using only energy and no novel loss + Args: + normalize (bool): Whether to normalize the energy by the number of atoms + energy_ratio (float): Weight of the energy in the loss + ljr_ratio (float): Weight of the Lennard-Jones repulsion in the loss + ljr_power (int): Power for the Lennard-Jones repulsion calculation + ljr_scale (float): Scaling factor for the Lennard-Jones repulsion """ self.normalize = normalize self.ljr_power = ljr_power @@ -45,6 +42,12 @@ def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12 -7.41385825, -9.51466466, -11.29141001, -12.94777968125, -14.26783833, -10000, -10000, -10000, -10000, -10000, -10000] def set_norm_offset(self, z, n_atoms): + """ + Set the offset for the energy normalization + Args: + z (torch.Tensor): Atomic numbers of the atoms in the batch + n_atoms (torch.Tensor): Number of atoms in each structure in the batch + """ self.offset = torch.zeros((len(n_atoms), 1)).to(z.device) curr = 0 self.lj_rmins = torch.tensor(self.lj_rmins).to(z.device) @@ -55,101 +58,84 @@ def set_norm_offset(self, z, n_atoms): curr += n_atoms[i] def lj_repulsion(self, data, power = 12): - temp = data.z.long() - rmins = self.lj_rmins[(temp[data.edge_index[0]] - 1), - (temp[data.edge_index[1]] - 1)] + """ + Calculate the Lennard-Jones repulsion + Args: + data (torch_geometric.data.Data): Data object containing the structure + power (int): Power for the Lennard-Jones repulsion calculation + Returns: + torch.Tensor: Lennard-Jones repulsion + """ + rmins = self.lj_rmins[(data.z[data.edge_index[0]] - 1), + (data.z[data.edge_index[1]] - 1)] repulsions = torch.where(rmins <= data.edge_weight, 1.0, torch.pow(rmins / data.edge_weight, power)) edge_idx_to_graph = data.batch[data.edge_index[0]] lennard_jones_out = scatter_add(repulsions - 1, index=edge_idx_to_graph, dim_size=len(data)) return lennard_jones_out.unsqueeze(1) - - def forward(self, model_output, batch): - if self.normalize: - model_output['potential_energy'] = (model_output['potential_energy'] + self.offset) / batch.n_atoms.unsqueeze(1) - # for i in range(len(batch.n_atoms)): - # model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] - ljr = self.lj_repulsion(batch, power=self.ljr_power) - return self.energy_ratio * model_output["potential_energy"] + self.ljr_ratio * ljr, model_output["potential_energy"], torch.zeros(len(model_output['potential_energy']), 1).to(ljr.device), ljr def norm_to_raw_loss(self, loss, z): + """ + Convert normalized loss to raw loss + Args: + loss (torch.Tensor): Normalized loss + z (torch.Tensor): Atomic numbers of the atoms in the batch + Returns: + torch.Tensor: Raw loss + """ offset = 0 for num in z: offset -= self.element_energy[num] loss *= len(z) loss -= offset return loss - -class Uncertainty(torch.nn.Module): - def __init__(self): - super().__init__() + def forward(self, model_output, batch): """ - Initialize + Forward pass of the objective function + Args: + model_output (dict): Output of the model + batch (torch_geometric.data.Batch): Batch of data + Returns: + torch.Tensor: Objective Loss + torch.Tensor: Potential energy + torch.Tensor: Novel loss + torch.Tensor: Lennard-Jones repulsion """ - pass - - def forward(self, model_output, **kwargs): - - return model_output["potential_energy_uncertainty"] + if self.normalize: + model_output['potential_energy'] = (model_output['potential_energy'] + self.offset) / batch.n_atoms.unsqueeze(1) + # for i in range(len(batch.n_atoms)): + # model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] + ljr = self.lj_repulsion(batch, power=self.ljr_power) + return self.energy_ratio * model_output["potential_energy"] + self.ljr_ratio * ljr, model_output["potential_energy"], torch.zeros(len(model_output['potential_energy']), 1).to(ljr.device), ljr -class EnergyAndUncertainty(torch.nn.Module): - def __init__(self, normalize=True, energy_ratio=1.0, uncertainty_ratio=.5, ljr_ratio=1, ljr_power=12, ljr_scale=.8): - super().__init__() +class EnergyAndUncertainty(Energy): + def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, uncertainty_ratio=.25): """ - Initialize + Initialize objective function using energy and uncertainty as novel loss + Args: + normalize (bool): Whether to normalize the energy by the number of atoms + energy_ratio (float): Weight of the energy in the loss + ljr_ratio (float): Weight of the Lennard-Jones repulsion in the loss + ljr_power (int): Power for the Lennard-Jones repulsion calculation + ljr_scale (float): Scaling factor for the Lennard-Jones repulsion + uncertainty_ratio (float): Weight of the uncertainty in the loss """ + super().__init__(normalize, energy_ratio, ljr_ratio, ljr_power, ljr_scale) self.uncertainty_ratio = uncertainty_ratio - self.ljr_ratio = ljr_ratio - self.ljr_power = ljr_power - self.normalize = normalize - self.energy_ratio = energy_ratio - self.lj_rmins = np.load(str(Path(__file__).parent / "lj_rmins.npy")) * ljr_scale - if normalize: - self.element_energy = [-1, -3.392726045, -0.00905951, -1.9089228666666667, -3.739412865, -6.679391770833334, - -9.2286654925, -8.336494925, -4.947961005, -1.9114789675, -0.02593678, -1.3225252934482759, - -1.60028005, -3.74557583, -5.42531803, -5.413302506666667, -4.136449866875, -1.84853666, - -0.06880822, -1.110398947, -2.00559988, -6.332469105, -7.895492016666666, -9.08390607, -9.65304747, - -9.162015292068965, -8.47002121, -7.108317795, -5.78013668, -4.09920667, -1.25974361, -3.0280960225, - -4.623027855, -4.659118405, -3.49591147765625, -1.636946535, -0.05671467, -0.9805340725, - -1.6894934533333332, -6.466471113333333, -8.54770063, -10.10130504, -10.84565011, -10.360638945, - -9.27440254, -7.36430787, -5.17988181, -2.8325560033333335, -0.92288976, -2.75168373, -4.009571855, - -4.12900124, -3.1433058933333338, -1.524012615, -0.03617417, -0.8954023720689656, -1.91897494, - -4.936007105, -5.933089155, -4.780905755, -4.7681474325, -4.7505423225, -4.718586135, -10.2570018, - -14.07612224, -4.6343661, -4.60678684, -4.58240887, -4.56771881, -4.475835423333334, 999, -4.52095052, - -9.95718903, -11.85777763, -12.95813023, -12.444527185, -11.22736743, -8.83843418, -6.07113332, -3.273882, - -0.303680365, -2.3626431466666666, -3.71264707, -3.89003431, 999, 999, 999, 999, 999, -4.1211750075, - -7.41385825, -9.51466466, -11.29141001, -12.94777968125, -14.26783833, 999, 999, 999, 999, 999, 999] - - - def set_norm_offset(self, z, n_atoms): - self.offset = torch.zeros((len(n_atoms), 1)).to(z.device) - curr = 0 - self.lj_rmins = torch.tensor(self.lj_rmins).to(z.device) - for i in range(len(n_atoms)): - temp = z[curr:curr+n_atoms[i]].long() - for j in temp: - self.offset[i] -= self.element_energy[j] - curr += n_atoms[i] - - def lj_repulsion(self, data, power = 12): - rmins = self.lj_rmins[(data.z[data.edge_index[0]] - 1), - (data.z[data.edge_index[1]] - 1)] - repulsions = torch.where(rmins <= data.edge_weight, - 1.0, torch.pow(rmins / data.edge_weight, power)) - edge_idx_to_graph = data.batch[data.edge_index[0]] - lennard_jones_out = scatter_add(repulsions - 1, index=edge_idx_to_graph, dim_size=len(data)) - return lennard_jones_out.unsqueeze(1) - - def norm_to_raw_loss(self, loss, z): - offset = 0 - for num in z: - offset -= self.element_energy[num] - loss *= len(z) - loss -= offset - return loss def forward(self, model_output, batch): + """ + Forward pass of the objective function + Args: + model_output (dict): Output of the model + batch (torch_geometric.data.Batch): Batch of data + Returns: + torch.Tensor: Objective Loss + torch.Tensor: Potential energy + torch.Tensor: Novel loss + torch.Tensor: Lennard-Jones repulsion + """ if self.normalize: model_output['potential_energy'] = (model_output['potential_energy'] + self.offset) / batch.n_atoms.unsqueeze(1) # for i in range(len(batch.n_atoms)): @@ -160,37 +146,25 @@ def forward(self, model_output, batch): -class EmbeddingDistance(torch.nn.Module): +class EmbeddingDistance(Energy): def __init__(self, embeddings, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, embedding_ratio=.1, mode="min"): - super().__init__() """ - Initialize + Initialize objective function using only energy and embedding distance as novel loss + embedding distance is aggregated euclidean distance between structure embedding and database embeddings + Args: + embeddings (torch.Tensor): Embeddings of the database structures + normalize (bool): Whether to normalize the energy by the number of atoms + energy_ratio (float): Weight of the energy in the loss + ljr_ratio (float): Weight of the Lennard-Jones repulsion in the loss + ljr_power (int): Power for the Lennard-Jones repulsion calculation + ljr_scale (float): Scaling factor for the Lennard-Jones repulsion + embedding_ratio (float): Weight of the embedding distance in the loss + mode (str): Aggregation mode for the embedding distance, either "min" or "mean" """ + super().__init__(normalize, energy_ratio, ljr_ratio, ljr_power, ljr_scale) self.embedding_ratio = embedding_ratio - self.ljr_ratio = ljr_ratio - self.ljr_power = ljr_power - self.normalize = normalize - self.energy_ratio = energy_ratio - self.lj_rmins = np.load(str(Path(__file__).parent / "lj_rmins.npy")) * ljr_scale self.embeddings = embeddings self.mode = mode - - if normalize: - self.element_energy = [-1, -3.392726045, -0.00905951, -1.9089228666666667, -3.739412865, -6.679391770833334, - -9.2286654925, -8.336494925, -4.947961005, -1.9114789675, -0.02593678, -1.3225252934482759, - -1.60028005, -3.74557583, -5.42531803, -5.413302506666667, -4.136449866875, -1.84853666, - -0.06880822, -1.110398947, -2.00559988, -6.332469105, -7.895492016666666, -9.08390607, -9.65304747, - -9.162015292068965, -8.47002121, -7.108317795, -5.78013668, -4.09920667, -1.25974361, -3.0280960225, - -4.623027855, -4.659118405, -3.49591147765625, -1.636946535, -0.05671467, -0.9805340725, - -1.6894934533333332, -6.466471113333333, -8.54770063, -10.10130504, -10.84565011, -10.360638945, - -9.27440254, -7.36430787, -5.17988181, -2.8325560033333335, -0.92288976, -2.75168373, -4.009571855, - -4.12900124, -3.1433058933333338, -1.524012615, -0.03617417, -0.8954023720689656, -1.91897494, - -4.936007105, -5.933089155, -4.780905755, -4.7681474325, -4.7505423225, -4.718586135, -10.2570018, - -14.07612224, -4.6343661, -4.60678684, -4.58240887, -4.56771881, -4.475835423333334, 999, -4.52095052, - -9.95718903, -11.85777763, -12.95813023, -12.444527185, -11.22736743, -8.83843418, -6.07113332, -3.273882, - -0.303680365, -2.3626431466666666, -3.71264707, -3.89003431, 999, 999, 999, 999, 999, -4.1211750075, - -7.41385825, -9.51466466, -11.29141001, -12.94777968125, -14.26783833, 999, 999, 999, 999, 999, 999] - def set_norm_offset(self, z, n_atoms): self.offset = torch.zeros((len(n_atoms), 1)).to(z.device) @@ -203,24 +177,18 @@ def set_norm_offset(self, z, n_atoms): self.offset[i] -= self.element_energy[j] curr += n_atoms[i] - def lj_repulsion(self, data, power = 12): - rmins = self.lj_rmins[(data.z[data.edge_index[0]] - 1), - (data.z[data.edge_index[1]] - 1)] - repulsions = torch.where(rmins <= data.edge_weight, - 1.0, torch.pow(rmins / data.edge_weight, power)) - edge_idx_to_graph = data.batch[data.edge_index[0]] - lennard_jones_out = scatter_add(repulsions - 1, index=edge_idx_to_graph, dim_size=len(data)) - return lennard_jones_out.unsqueeze(1) - - def norm_to_raw_loss(self, loss, z): - offset = 0 - for num in z: - offset -= self.element_energy[num] - loss *= len(z) - loss -= offset - return loss - def forward(self, model_output, batch): + """ + Forward pass of the objective function + Args: + model_output (dict): Output of the model + batch (torch_geometric.data.Batch): Batch of data + Returns: + torch.Tensor: Objective Loss + torch.Tensor: Potential energy + torch.Tensor: Novel loss + torch.Tensor: Lennard-Jones repulsion + """ if self.normalize: model_output['potential_energy'] = (model_output['potential_energy'] + self.offset) / batch.n_atoms.unsqueeze(1) # for i in range(len(batch.n_atoms)): diff --git a/scripts/example.py b/scripts/example.py index 1bb692f..0c67ecd 100644 --- a/scripts/example.py +++ b/scripts/example.py @@ -3,7 +3,7 @@ from msp.composition import generate_random_compositions, sample_random_composition from msp.forcefield import MDL_FF, MACE_FF, M3GNet_FF from msp.optimizer.globalopt.basin_hopping import BasinHoppingASE, BasinHoppingBatch -from msp.utils.objectives import EnergyAndUncertainty, Energy, Uncertainty, EmbeddingDistance +from msp.utils.objectives import EnergyAndUncertainty, Energy, EmbeddingDistance from msp.structure.structure_util import dict_to_atoms, init_structure, atoms_to_dict from msp.validate import read_dft_config, setup_DFT, Validate import pickle as pkl @@ -31,7 +31,7 @@ #Initialize a forcefield class, reading in from config (we use MDL_FF but it can be a force field from another library) train_config = 'mdl_config.yml' forcefield = MDL_FF(train_config, my_dataset) -embeddings = forcefield.get_embeddings(my_dataset, batch_size=40, cluster=True) +embeddings = forcefield.get_embeddings(my_dataset, batch_size=40, cluster=False) #predictor = BasinHoppingASE(forcefield, hops=5, steps=100, optimizer="FIRE", dr=0.5) @@ -87,12 +87,12 @@ # we do this by first initializing our objective function, which is similar to the loss function class in matdeeplearn # objective_func = Energy(normalize=True, ljr_ratio=1) objective_func = EmbeddingDistance(embeddings, normalize=True, energy_ratio=1, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) - # objective_func = EnergyAndUncertainty(normalize=True, uncertainty_ratio=.5, ljr_ratio=1, ljr_scale=.7) + # objective_func = EnergyAndUncertainty(normalize=True, uncertainty_ratio=.25, ljr_ratio=1, ljr_scale=.7) start_time = time.time() total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures, objective_func, batch_size=8, log_per=5, lr=.05) minima_list_batch = dict_to_atoms(minima_list_batch) for j, minima in enumerate(minima_list_batch): - filename = "clustering_predicted/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + filename = "attempt2_no_clustering/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" ase.io.write(filename, minima) f = open('output.txt', 'w') for i in range(len(total_list_batch)): From b5b6443daa4a40b068499c980aef328c1ab631c5 Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Tue, 9 Apr 2024 12:36:05 -0700 Subject: [PATCH 03/16] modified elements, added mass calculations --- msp/composition/composition.py | 3 +-- msp/forcefield/mdl_ff.py | 5 ++--- msp/optimizer/globalopt/basin_hopping.py | 9 ++++----- msp/structure/structure_util.py | 21 ++++++++++++--------- msp/utils/objectives/objectives.py | 11 +++++++---- scripts/example.py | 23 ++++++++++++++--------- 6 files changed, 40 insertions(+), 32 deletions(-) diff --git a/msp/composition/composition.py b/msp/composition/composition.py index 0dc21e6..45e45c3 100644 --- a/msp/composition/composition.py +++ b/msp/composition/composition.py @@ -56,8 +56,7 @@ def generate_random_compositions(dataset, n=5, max_elements=5, max_atoms=20, ele if len(elems_to_sample) == 0: elems_to_sample = [1, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 55, 56, 57, 58, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, - 89, 90, 91, 92, 93, 94] + 48, 49, 50, 51, 52, 53, 55, 56, 57, 58, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83] for i in range(n): while True: comp = [] diff --git a/msp/forcefield/mdl_ff.py b/msp/forcefield/mdl_ff.py index ea71e1b..f9be57e 100644 --- a/msp/forcefield/mdl_ff.py +++ b/msp/forcefield/mdl_ff.py @@ -153,6 +153,7 @@ def process_data(self, dataset): data.n_atoms = len(struc['atomic_numbers']) data.pos = torch.tensor(struc['positions']) data.cell = torch.tensor(np.array(struc['cell']), dtype=torch.float).view(1, 3, 3) + if (np.array(data.cell) == np.array([[0.0, 0.0, 0.0],[0.0, 0.0, 0.0],[0.0, 0.0, 0.0]])).all(): data.cell = torch.zeros((3,3)).unsqueeze(0) if 'structure_id' in struc: @@ -274,7 +275,6 @@ def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=5000): print('Model', i, 'clustering took', time.time() - start_time) res.append(clust.cluster_centers_) embeddings = torch.tensor(res) - print(embeddings.size()) print(f"New embeddings are {embeddings.size()}") return embeddings @@ -331,8 +331,7 @@ def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_str print("device:", device) for i in range(len(loader_iter)): batch = next(loader_iter).to(device) - if getattr(objective_func, 'normalize', False): - objective_func.set_norm_offset(batch.z, batch.n_atoms) + objective_func.set_norm_offset(batch.z, batch.n_atoms) pos, cell = batch.pos, batch.cell opt = getattr(torch.optim, optim, 'Adam')([pos, cell], lr=learning_rate) diff --git a/msp/optimizer/globalopt/basin_hopping.py b/msp/optimizer/globalopt/basin_hopping.py index 1ac189d..10ed1f0 100644 --- a/msp/optimizer/globalopt/basin_hopping.py +++ b/msp/optimizer/globalopt/basin_hopping.py @@ -41,8 +41,7 @@ def __init__(self, name, hops=5, steps=100, optimizer="FIRE", dr=.5, max_atom_nu if elems_to_sample is None: self.elems = [1, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, - 48, 49, 50, 51, 52, 53, 55, 56, 57, 58, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, - 89, 90, 91, 92, 93, 94] + 48, 49, 50, 51, 52, 53, 55, 56, 57, 58, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83] else: self.elems = elems_to_sample @@ -450,9 +449,9 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz 'perturb': prev_perturb[j].__name__, 'composition': new_atoms[j].get_atomic_numbers(), 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j])[0]} else: - res[j][hop].append({'hop': i, 'objective_loss': obj_loss[j][0], 'energy_loss': energy_loss[j][0], 'novel_loss': novel_loss[j][0], 'soft_sphere_loss': soft_sphere_loss[j][0], + res[j][hop] = {'hop': i, 'objective_loss': obj_loss[j][0], 'energy_loss': energy_loss[j][0], 'novel_loss': novel_loss[j][0], 'soft_sphere_loss': soft_sphere_loss[j][0], 'perturb': prev_perturb[j].__name__, 'composition': new_atoms[j].get_atomic_numbers(), - 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j])[0]}) + 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j])[0]} for j, hop in enumerate(best_hop): print("Structure: ", j) print('\tBest hop: ', hop) @@ -462,7 +461,7 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz print("\tUnnormalized energy loss: ", res[j][hop]['unnormalized_loss']) print("\tNovel loss: ", res[j][hop]['novel_loss']) print("\tSoft sphere loss: ", res[j][hop]['soft_sphere_loss']) - avg_loss += best_loss[j] + avg_loss += res[j][hop]['objective_loss'] print('Avg Objective Loss', avg_loss / len(new_atoms)) min_atoms = atoms_to_dict(best_atoms, min_objective_loss) diff --git a/msp/structure/structure_util.py b/msp/structure/structure_util.py index 66d29c9..0547734 100644 --- a/msp/structure/structure_util.py +++ b/msp/structure/structure_util.py @@ -2,14 +2,14 @@ from ase import Atoms import torch from torch_geometric.data import Data -from ase.data import chemical_symbols +from ase.data import chemical_symbols, atomic_masses import smact from smact.screening import pauling_test import itertools -def init_structure(composition, pyxtal=False, density=.2): +def init_structure(composition, pyxtal=False, density=4): """ Creates a dictionary representing a structure from a composition @@ -22,9 +22,10 @@ def init_structure(composition, pyxtal=False, density=.2): dict: representing structure """ atoms = None + mass = sum([atomic_masses[num] for num in composition]) if not pyxtal: - beta = np.random.uniform(0, 180) - gamma = np.random.uniform(0, 180) + beta = np.random.uniform(5, 174) + gamma = np.random.uniform(5, 174) minCosA = - np.sin(gamma * np.pi/180) * np.sqrt(1 - np.cos(beta* np.pi/180) ** 2) + np.cos(beta * np.pi/180) * np.cos(gamma * np.pi/180) maxCosA = np.sin(gamma * np.pi/180) * np.sqrt(1 - np.cos(beta* np.pi/180) ** 2) + np.cos(beta * np.pi/180) * np.cos(gamma * np.pi/180) alpha = np.random.uniform(minCosA, maxCosA) @@ -34,8 +35,9 @@ def init_structure(composition, pyxtal=False, density=.2): c = np.random.rand() + .000001 cell=[a, b, c, alpha, beta, gamma] atoms = Atoms(composition, cell=cell, pbc=(True, True, True)) - vol = atoms.get_cell().volume - ideal_vol = len(composition) / density + vol = atoms.get_cell().volume + # ideal_vol = len(composition) / density + ideal_vol = mass / density scale = (ideal_vol / vol) ** (1/3) cell = [scale * a, scale * b, scale * c, alpha, beta, gamma] atoms.set_cell(cell) @@ -59,8 +61,8 @@ def init_structure(composition, pyxtal=False, density=.2): continue if use_random: print('Composition ', composition, 'not compatible with pyxtal. Using random structure') - beta = np.random.uniform(0, 180) - gamma = np.random.uniform(0, 180) + beta = np.random.uniform(5, 174) + gamma = np.random.uniform(5, 174) minCosA = - np.sin(gamma * np.pi/180) * np.sqrt(1 - np.cos(beta* np.pi/180) ** 2) + np.cos(beta * np.pi/180) * np.cos(gamma * np.pi/180) maxCosA = np.sin(gamma * np.pi/180) * np.sqrt(1 - np.cos(beta* np.pi/180) ** 2) + np.cos(beta * np.pi/180) * np.cos(gamma * np.pi/180) alpha = np.random.uniform(minCosA, maxCosA) @@ -71,7 +73,8 @@ def init_structure(composition, pyxtal=False, density=.2): cell=[a, b, c, alpha, beta, gamma] atoms = Atoms(composition, cell=cell, pbc=(True, True, True)) vol = atoms.get_cell().volume - ideal_vol = len(composition) / density + # ideal_vol = len(composition) / density + ideal_vol = mass / density scale = (ideal_vol / vol) ** (1/3) cell = [scale * a, scale * b, scale * c, alpha, beta, gamma] atoms.set_cell(cell) diff --git a/msp/utils/objectives/objectives.py b/msp/utils/objectives/objectives.py index 8ec7617..0f486cb 100644 --- a/msp/utils/objectives/objectives.py +++ b/msp/utils/objectives/objectives.py @@ -3,7 +3,6 @@ import numpy as np import time as time from torch_scatter import scatter_add -from mendeleev.fetch import fetch_table class Energy(torch.nn.Module): @@ -24,9 +23,7 @@ def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12 self.lj_rmins = np.load(str(Path(__file__).parent / "lj_rmins.npy")) * ljr_scale self.ljr_ratio = ljr_ratio self.energy_ratio = energy_ratio - - if normalize: - self.element_energy = [-10000, -3.392726045, -0.00905951, -1.9089228666666667, -3.739412865, -6.679391770833334, + self.element_energy = [-10000, -3.392726045, -0.00905951, -1.9089228666666667, -3.739412865, -6.679391770833334, -9.2286654925, -8.336494925, -4.947961005, -1.9114789675, -0.02593678, -1.3225252934482759, -1.60028005, -3.74557583, -5.42531803, -5.413302506666667, -4.136449866875, -1.84853666, -0.06880822, -1.110398947, -2.00559988, -6.332469105, -7.895492016666666, -9.08390607, -9.65304747, @@ -167,6 +164,12 @@ def __init__(self, embeddings, normalize=True, energy_ratio=1.0, ljr_ratio=1, lj self.mode = mode def set_norm_offset(self, z, n_atoms): + """ + Set the offset for the energy normalization + Args: + z (torch.Tensor): Atomic numbers of the atoms in the batch + n_atoms (torch.Tensor): Number of atoms in each structure in the batch + """ self.offset = torch.zeros((len(n_atoms), 1)).to(z.device) self.embeddings = self.embeddings.to(z.device) curr = 0 diff --git a/scripts/example.py b/scripts/example.py index 0c67ecd..fcf421a 100644 --- a/scripts/example.py +++ b/scripts/example.py @@ -24,18 +24,19 @@ #or load dataset from disk: #my_dataset = load_dataset(path ="path/to/dataset") -my_dataset = json.load(open("/global/cfs/projectdirs/m3641/Shared/Materials_datasets/MP_data_latest/raw/data.json", "r")) +# my_dataset = json.load(open("/global/cfs/projectdirs/m3641/Shared/Materials_datasets/MP_data_latest/raw/data.json", "r")) +my_dataset = json.load(open("../data/data_subset_msp.json", "r")) #print(my_dataset[0]) max_iterations=1 #Initialize a forcefield class, reading in from config (we use MDL_FF but it can be a force field from another library) train_config = 'mdl_config.yml' forcefield = MDL_FF(train_config, my_dataset) -embeddings = forcefield.get_embeddings(my_dataset, batch_size=40, cluster=False) +# embeddings = forcefield.get_embeddings(my_dataset, batch_size=40, cluster=False) #predictor = BasinHoppingASE(forcefield, hops=5, steps=100, optimizer="FIRE", dr=0.5) -predictor_batch = BasinHoppingBatch(forcefield, hops=5, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) +predictor_batch = BasinHoppingBatch(forcefield, hops=10, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) # forcefield_mace = MACE_FF() @@ -56,11 +57,15 @@ # compositions = sample_random_composition(dataset=my_dataset, n=1) # or manually specify the list of lists: # compositions = [[22, 22, 22, 22, 22, 22, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8] for _ in range(8)] - compositions = generate_random_compositions(my_dataset, n=10000, max_elements=5, max_atoms=20) + compositions = generate_random_compositions(my_dataset, n=8, max_elements=5, max_atoms=20) for comp in compositions: print(comp) - initial_structures = [init_structure(c, pyxtal=True) for c in compositions] - read_structure = ase.io.read("init.cif") + initial_structures = [init_structure(c, pyxtal=False) for c in compositions] + for j, minima in enumerate(dict_to_atoms(initial_structures)): + filename = "initial_iteration_"+str(i)+"_structure_"+str(j)+".cif" + ase.io.write(filename, minima) + # read_structure = ase.io.read("init.cif") + # initial_structures=[atoms_to_dict([read_structure], loss=[None])] #forcefield itself is not an ase calculator, but can be used to return the MDLCalculator class @@ -85,14 +90,14 @@ #---Optimizing a batch of structures with batch basin hopping--- # alternatively if we dont use ASE, we can optimize in batch, and optimize over multiple objectives as well # we do this by first initializing our objective function, which is similar to the loss function class in matdeeplearn - # objective_func = Energy(normalize=True, ljr_ratio=1) - objective_func = EmbeddingDistance(embeddings, normalize=True, energy_ratio=1, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) + objective_func = Energy(normalize=False, ljr_ratio=1) + # objective_func = EmbeddingDistance(embeddings, normalize=True, energy_ratio=1, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) # objective_func = EnergyAndUncertainty(normalize=True, uncertainty_ratio=.25, ljr_ratio=1, ljr_scale=.7) start_time = time.time() total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures, objective_func, batch_size=8, log_per=5, lr=.05) minima_list_batch = dict_to_atoms(minima_list_batch) for j, minima in enumerate(minima_list_batch): - filename = "attempt2_no_clustering/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + filename = "iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" ase.io.write(filename, minima) f = open('output.txt', 'w') for i in range(len(total_list_batch)): From 692ee7bbcb4042761d41da80fb41905a15597e67 Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Tue, 9 Apr 2024 12:50:44 -0700 Subject: [PATCH 04/16] Fixed default optimizer bug --- msp/forcefield/mdl_ff.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/msp/forcefield/mdl_ff.py b/msp/forcefield/mdl_ff.py index f9be57e..47514a5 100644 --- a/msp/forcefield/mdl_ff.py +++ b/msp/forcefield/mdl_ff.py @@ -334,7 +334,7 @@ def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_str objective_func.set_norm_offset(batch.z, batch.n_atoms) pos, cell = batch.pos, batch.cell - opt = getattr(torch.optim, optim, 'Adam')([pos, cell], lr=learning_rate) + opt = getattr(torch.optim, optim, torch.optim.Adam)([pos, cell], lr=learning_rate) lr_scheduler = ReduceLROnPlateau(opt, 'min', factor=0.8, patience=10) pos.requires_grad_(True) From b20d866e054c4e122a42d789c3cb88c682ab37ad Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Mon, 15 Apr 2024 18:08:07 -0700 Subject: [PATCH 05/16] Active Learning updates --- msp/forcefield/mdl_ff.py | 113 +++++++++++++++++------ msp/optimizer/globalopt/basin_hopping.py | 18 ++-- msp/structure/structure_util.py | 18 ++-- scripts/example.py | 29 +++--- 4 files changed, 127 insertions(+), 51 deletions(-) diff --git a/msp/forcefield/mdl_ff.py b/msp/forcefield/mdl_ff.py index 47514a5..38027c8 100644 --- a/msp/forcefield/mdl_ff.py +++ b/msp/forcefield/mdl_ff.py @@ -97,7 +97,7 @@ def train(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, lr - def update(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, lr=None, batch_size=None, save_path='saved_model'): + def update(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, lr=None, batch_size=None, save_path='saved_model', save_model=True): """ Updates the force field model on the dataset. (Essentially finetunes model on new data) Args: @@ -124,17 +124,17 @@ def update(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, l #self.model = self.trainer.model self.trainer.train() - - os.makedirs(save_path, exist_ok=True) - for i in range(len(self.trainer.model)): - sub_path = os.path.join(save_path, f"checkpoint_{i}",) - os.makedirs(sub_path, exist_ok=True) - if str(self.trainer.rank) not in ("cpu", "cuda"): - state = {"state_dict": self.trainer.model[i].module.state_dict()} - else: - state = {"state_dict": self.trainer.model[i].state_dict()} - model_path = os.path.join(sub_path, "best_checkpoint.pt") - torch.save(state, model_path) + if save_model: + os.makedirs(save_path, exist_ok=True) + for i in range(len(self.trainer.model)): + sub_path = os.path.join(save_path, f"checkpoint_{i}",) + os.makedirs(sub_path, exist_ok=True) + if str(self.trainer.rank) not in ("cpu", "cuda"): + state = {"state_dict": self.trainer.model[i].module.state_dict()} + else: + state = {"state_dict": self.trainer.model[i].state_dict()} + model_path = os.path.join(sub_path, "best_checkpoint.pt") + torch.save(state, model_path) gc.collect() torch.cuda.empty_cache() @@ -151,8 +151,8 @@ def process_data(self, dataset): for i, struc in enumerate(dataset): data = new_data_list[i] data.n_atoms = len(struc['atomic_numbers']) - data.pos = torch.tensor(struc['positions']) - data.cell = torch.tensor(np.array(struc['cell']), dtype=torch.float).view(1, 3, 3) + data.pos = torch.tensor(struc['positions']).float() + data.cell = torch.tensor(np.array(struc['cell']), dtype=torch.float).view(1, 3, 3).float() if (np.array(data.cell) == np.array([[0.0, 0.0, 0.0],[0.0, 0.0, 0.0],[0.0, 0.0, 0.0]])).all(): data.cell = torch.zeros((3,3)).unsqueeze(0) @@ -160,11 +160,10 @@ def process_data(self, dataset): data.structure_id = [struc['structure_id']] else: data.structure_id = [str(i)] - data.structure_id = [struc['structure_id']] data.z = torch.LongTensor(struc['atomic_numbers']) - if 'forces' in struc: + if 'forces' in struc and struc['forces'] is not None: data.forces = torch.tensor(struc['forces']) - if 'stress' in struc: + if 'stress' in struc and struc['stress'] is not None: data.stress = torch.tensor(struc['stress']) #optional data.u = torch.tensor(np.zeros((3))[np.newaxis, ...]).float() @@ -178,7 +177,7 @@ def process_data(self, dataset): return dataset - def _forward(self, batch_data, embeddings=False): + def _forward(self, batch_data, embeddings=False, forces = False): """ Calls model directly Args: @@ -193,17 +192,28 @@ def _forward(self, batch_data, embeddings=False): out_list.append(self.trainer.model[i](batch_data)) out_stack = torch.stack([o["output"] for o in out_list]) + if forces: + force_stack = torch.stack([o["pos_grad"] for o in out_list]) + stress_stack = torch.stack([o["cell_grad"] for o in out_list]) + if embeddings: embed_stack = torch.stack([o["embedding"] for o in out_list]) + output = {} output["potential_energy"] = torch.mean(out_stack, dim=0) output["potential_energy_uncertainty"] = torch.std(out_stack, dim=0) + if forces: + output["forces"] = torch.mean(force_stack, dim=0) + output["stress"] = torch.mean(stress_stack, dim=0) + else: + output["forces"] = [None] * len(out_stack) + output["stress"] = [None] * len(out_stack) if embeddings: output['embeddings'] = embed_stack #output is a dict return output - def _batched_forward(self, batch_data, embeddings = False): + def _batched_forward(self, batch_data, embeddings = False, forces = False): """ Calls model in parallel using torch.vmap Args: @@ -213,22 +223,36 @@ def _batched_forward(self, batch_data, embeddings = False): Returns: dict: A dictionary of the model output. """ - if embeddings: + output = {} + if embeddings and forces: + def fmodel(params, buffers, x): + output = functional_call(self.base_model, (params, buffers), (x,)) + return output['output'], output['embedding'], output['pos_grad'], output['cell_grad'] + out_stack, embed_stack, force_stack, stress_stack = torch.vmap(fmodel, in_dims=(0, 0, None))(self.params, self.buffers, batch_data) + output["forces"] = torch.mean(force_stack, dim=0) + output["stress"] = torch.mean(stress_stack, dim=0) + output['embeddings'] = embed_stack + elif embeddings: def fmodel(params, buffers, x): output = functional_call(self.base_model, (params, buffers), (x,)) return output['output'], output['embedding'] out_stack, embed_stack = torch.vmap(fmodel, in_dims=(0, 0, None))(self.params, self.buffers, batch_data) + output['embeddings'] = embed_stack + elif forces: + def fmodel(params, buffers, x): + output = functional_call(self.base_model, (params, buffers), (x,)) + return output['output'], output['pos_grad'], output['cell_grad'] + out_stack, force_stack, stress_stack = torch.vmap(fmodel, in_dims=(0, 0, None))(self.params, self.buffers, batch_data) + output["forces"] = torch.mean(force_stack, dim=0) + output["stress"] = torch.mean(stress_stack, dim=0) else: def fmodel(params, buffers, x): output = functional_call(self.base_model, (params, buffers), (x,)) return output['output'] out_stack = torch.vmap(fmodel, in_dims=(0, 0, None))(self.params, self.buffers, batch_data) - output = {} output["potential_energy"] = torch.mean(out_stack, dim=0) output["potential_energy_uncertainty"] = torch.std(out_stack, dim=0) - if embeddings: - output['embeddings'] = embed_stack - #output is a dict + return output def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=5000): @@ -278,7 +302,44 @@ def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=5000): print(f"New embeddings are {embeddings.size()}") return embeddings + def get_forces_and_stress(self, atoms, batch_size): + """ + Get forces and stress from the model for the atoms. + Args: + atoms (list): A list of ASE atoms objects. + batch_size (int): The batch size for the model. + Returns: + tuple: A tuple of the forces and stress from the model. + """ + data_list = atoms_to_data(atoms) + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + self.params, self.buffers = stack_module_state(self.trainer.model) + self.base_model = copy.deepcopy(self.trainer.model[0]) + self.base_model = self.base_model.to('meta') + loader = DataLoader(data_list, batch_size=batch_size) + loader_iter = iter(loader) + forces = [] + stress = [] + start_time = time.time() + temp = 1 + print('Getting forces and stress for structures') + for i in range(len(loader_iter)): + batch = next(loader_iter).to(device) + out = self._forward(batch, forces=True) + forces.append(out['forces']) + stress.append(out['stress']) + print('Structures', temp, 'to', temp + len(batch) - 1, 'took', time.time() - start_time) + temp += len(batch) + start_time = time.time() + per_atom_forces = torch.cat(forces, dim=1).cpu().detach().numpy() + per_struc_forces = [] + temp = 0 + for i in range(len(data_list)): + per_struc_forces.append(per_atom_forces[temp:temp + data_list[i].n_atoms]) + temp += data_list[i].n_atoms + stress = torch.cat(stress, dim=1).cpu().detach().numpy() + return per_struc_forces, stress def create_ase_calc(self): """ @@ -348,7 +409,7 @@ def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_str step = [0] def closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch): opt.zero_grad() - output = self._batched_forward(batch, embeddings=embed) + output = self._batched_forward(batch, embeddings=embed, forces=False) objective_loss, energy_loss, novel_loss, soft_sphere_loss = objective_func(output, batch) objective_loss.mean().backward(retain_graph=True) @@ -379,7 +440,7 @@ def closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch): energy_loss.extend(temp_energy[0].cpu().detach().numpy()) novel_loss.extend(temp_novel[0].cpu().detach().numpy()) soft_sphere_loss.extend(temp_soft_sphere[0].cpu().detach().numpy()) - batch.z = batch.z.type(torch.int64) + batch.z = batch.z.type(torch.int64) for i in range(len(self.trainer.model)): self.trainer.model[i].gradient = True diff --git a/msp/optimizer/globalopt/basin_hopping.py b/msp/optimizer/globalopt/basin_hopping.py index 10ed1f0..7a99b52 100644 --- a/msp/optimizer/globalopt/basin_hopping.py +++ b/msp/optimizer/globalopt/basin_hopping.py @@ -253,7 +253,7 @@ def __init__(self, forcefield, hops=5, steps=100, optimizer="FIRE", dr=.5, max_a forcefield: Takes a forcefield object with a create_ase_calc() function for the caclculator hops (int, optional): Number of basin hops. Defaults to 5. steps (int, optional): Number of steps per basin hop. Defaults to 100. - optimizer (str, optional): Optimizer to use for each step. Defaults to "FIRE". + optimizer (str, optional): The name of an ASE optimizer to use for each step. Defaults to "FIRE". dr (int, optional): rate at which to change values. Defaults to .5. max_atom_num (int, optional): maximum atom number to be considered, exclusive. Defaults to 101. perturbs (list, optional): list of perturbations to apply. Defaults to ['pos', 'cell', 'atomic_num', 'add', 'remove', 'swap'] @@ -294,7 +294,7 @@ def predict(self, structures, cell_relax=True, topk=1, num_atoms_perturb=1, num_ res.append([]) for i in range(self.hops): old_energy = atom.get_potential_energy(force_consistent=False) - optimizer = getattr(ase.optimize, self.optimizer, 'FIRE')(atom, logfile=None) + optimizer = getattr(ase.optimize, self.optimizer, ase.optimize.FIRE)(atom, logfile=None) start_time = time() optimizer.run(fmax=0.001, steps=self.steps) end_time = time() @@ -332,7 +332,7 @@ def __init__(self, forcefield, hops=5, steps=100, optimizer="Adam", dr=.5, max_a forcefield: Takes a forcefield object with a create_ase_calc() function for the caclculator hops (int, optional): Number of basin hops. Defaults to 5. steps (int, optional): Number of steps per basin hop. Defaults to 100. - optimizer (str, optional): Optimizer to use for each step. Defaults to "Adam". + optimizer (str, optional): The name of a torch.optim optimizer to use for each step. Defaults to "Adam". dr (int, optional): rate at which to change values. Defaults to .5. max_atom_num (int, optional): maximum atom number to be considered, exclusive. Defaults to 101. perturbs (list, optional): list of perturbations to apply. Defaults to ['pos', 'cell', 'atomic_num', 'add', 'remove', 'swap'] @@ -379,7 +379,7 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz accepts = [[] for _ in range(len(new_atoms))] accept_rate = [[] for _ in range(len(new_atoms))] temps = [[] for _ in range(len(new_atoms))] - energies = [[] for _ in range(len(new_atoms))] + losses = [[] for _ in range(len(new_atoms))] step_sizes = [] temp = [0.0001 for _ in range(len(new_atoms))] @@ -411,7 +411,7 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz best_atoms[j] = min_atoms[j].copy() best_hop[j] = i prev_step_loss[j] = obj_loss[j] - energies[j].append(obj_loss[j]) + losses[j].append(obj_loss[j]) accepts[j].append(accept) if len(accepts[j]) % 10 == 0: accept_rate[j].append(sum(accepts[j][-10:])) @@ -452,18 +452,22 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz res[j][hop] = {'hop': i, 'objective_loss': obj_loss[j][0], 'energy_loss': energy_loss[j][0], 'novel_loss': novel_loss[j][0], 'soft_sphere_loss': soft_sphere_loss[j][0], 'perturb': prev_perturb[j].__name__, 'composition': new_atoms[j].get_atomic_numbers(), 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j])[0]} + raw_energy = [0] * len(new_atoms) for j, hop in enumerate(best_hop): print("Structure: ", j) print('\tBest hop: ', hop) print("\tObjective loss: ", res[j][hop]['objective_loss']) print("\tEnergy loss: ", res[j][hop]['energy_loss']) + raw_energy[j] = res[j][hop]['energy_loss'] if getattr(objective_func, 'normalize', False): print("\tUnnormalized energy loss: ", res[j][hop]['unnormalized_loss']) + raw_energy[j] = res[j][hop]['unnormalized_loss'] print("\tNovel loss: ", res[j][hop]['novel_loss']) print("\tSoft sphere loss: ", res[j][hop]['soft_sphere_loss']) avg_loss += res[j][hop]['objective_loss'] print('Avg Objective Loss', avg_loss / len(new_atoms)) + forces, stress = self.forcefield.get_forces_and_stress(best_atoms, batch_size=batch_size) + best_atoms = atoms_to_dict(best_atoms, raw_energy, forces, stress) - min_atoms = atoms_to_dict(best_atoms, min_objective_loss) - return res, min_atoms, best_hop, energies, accepts, accept_rate, temps, step_sizes + return res, best_atoms, best_hop, losses, accepts, accept_rate, temps, step_sizes diff --git a/msp/structure/structure_util.py b/msp/structure/structure_util.py index 0547734..9a43194 100644 --- a/msp/structure/structure_util.py +++ b/msp/structure/structure_util.py @@ -86,7 +86,7 @@ def init_structure(composition, pyxtal=False, density=4): return atoms_to_dict([atoms], [None])[0] -def atoms_to_dict(atoms, loss=None): +def atoms_to_dict(atoms, loss=None, forces=None, stress=None): """ Creates a list of dict from a list of ASE atoms objects @@ -100,14 +100,20 @@ def atoms_to_dict(atoms, loss=None): res = [{} for _ in atoms] for i, d in enumerate(res): d['n_atoms'] = len(atoms[i].get_atomic_numbers()) - d['pos'] = atoms[i].get_positions() - d['cell'] = atoms[i].get_cell() + d['positions'] = atoms[i].get_positions() + d['cell'] = atoms[i].get_cell().array.tolist() d['z'] = atoms[i].get_atomic_numbers() d['atomic_numbers'] = atoms[i].get_atomic_numbers() - if loss is None: + if loss is not None: + d['y'] = loss[i] + else: d['loss'] = None + if forces is not None: + d['forces'] = forces[i] + d['stress'] = stress[i].reshape(1, 3, 3) else: - d['loss'] = loss[i] + d['forces'] = None + d['stress'] = None return res def dict_to_atoms(dictionaries): @@ -122,7 +128,7 @@ def dict_to_atoms(dictionaries): """ res = [] for d in dictionaries: - res.append(Atoms(d['z'], cell=d['cell'], positions=d['pos'])) + res.append(Atoms(d['z'], cell=d['cell'], positions=d['positions'])) return res def atoms_to_data(atoms): diff --git a/scripts/example.py b/scripts/example.py index fcf421a..a01a280 100644 --- a/scripts/example.py +++ b/scripts/example.py @@ -24,10 +24,11 @@ #or load dataset from disk: #my_dataset = load_dataset(path ="path/to/dataset") -# my_dataset = json.load(open("/global/cfs/projectdirs/m3641/Shared/Materials_datasets/MP_data_latest/raw/data.json", "r")) -my_dataset = json.load(open("../data/data_subset_msp.json", "r")) +my_dataset = json.load(open("/global/cfs/projectdirs/m3641/Shared/Materials_datasets/MP_data_latest/raw/data.json", "r")) +predicted_structures = [] +# my_dataset = json.load(open("../data/data_subset_msp.json", "r")) #print(my_dataset[0]) -max_iterations=1 +max_iterations=2 #Initialize a forcefield class, reading in from config (we use MDL_FF but it can be a force field from another library) train_config = 'mdl_config.yml' @@ -36,7 +37,7 @@ #predictor = BasinHoppingASE(forcefield, hops=5, steps=100, optimizer="FIRE", dr=0.5) -predictor_batch = BasinHoppingBatch(forcefield, hops=10, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) +predictor_batch = BasinHoppingBatch(forcefield, hops=1, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) # forcefield_mace = MACE_FF() @@ -57,13 +58,17 @@ # compositions = sample_random_composition(dataset=my_dataset, n=1) # or manually specify the list of lists: # compositions = [[22, 22, 22, 22, 22, 22, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8] for _ in range(8)] - compositions = generate_random_compositions(my_dataset, n=8, max_elements=5, max_atoms=20) + + if i != 0: + forcefield.update(predicted_structures, 1, 0, 0, max_epochs=30, save_model=False) + + compositions = generate_random_compositions(my_dataset, n=2, max_elements=5, max_atoms=20) for comp in compositions: print(comp) initial_structures = [init_structure(c, pyxtal=False) for c in compositions] - for j, minima in enumerate(dict_to_atoms(initial_structures)): - filename = "initial_iteration_"+str(i)+"_structure_"+str(j)+".cif" - ase.io.write(filename, minima) + # for j, minima in enumerate(dict_to_atoms(initial_structures)): + # filename = "initial_iteration_"+str(i)+"_structure_"+str(j)+".cif" + # ase.io.write(filename, minima) # read_structure = ase.io.read("init.cif") # initial_structures=[atoms_to_dict([read_structure], loss=[None])] @@ -90,13 +95,13 @@ #---Optimizing a batch of structures with batch basin hopping--- # alternatively if we dont use ASE, we can optimize in batch, and optimize over multiple objectives as well # we do this by first initializing our objective function, which is similar to the loss function class in matdeeplearn - objective_func = Energy(normalize=False, ljr_ratio=1) + objective_func = Energy(normalize=True, ljr_ratio=1) # objective_func = EmbeddingDistance(embeddings, normalize=True, energy_ratio=1, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) # objective_func = EnergyAndUncertainty(normalize=True, uncertainty_ratio=.25, ljr_ratio=1, ljr_scale=.7) start_time = time.time() total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures, objective_func, batch_size=8, log_per=5, lr=.05) - minima_list_batch = dict_to_atoms(minima_list_batch) - for j, minima in enumerate(minima_list_batch): + minima_list_batch_ase = dict_to_atoms(minima_list_batch) + for j, minima in enumerate(minima_list_batch_ase): filename = "iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" ase.io.write(filename, minima) f = open('output.txt', 'w') @@ -214,6 +219,6 @@ #update the dataset as well - update_dataset(repo="MP", data=dft_results) + predicted_structures.extend(minima_list_batch) print("Job done") From de9dc9e2a4409c2dc3dd927936a12331347d1310 Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Tue, 21 May 2024 21:35:07 -0700 Subject: [PATCH 06/16] Novelty updates --- msp/composition/__init__.py | 4 +- msp/composition/composition.py | 65 +++++++++++++++- msp/forcefield/mdl_ff.py | 12 +-- msp/optimizer/globalopt/basin_hopping.py | 22 +++--- msp/structure/structure_util.py | 50 ++++++------ msp/utils/objectives/objectives.py | 13 ++-- scripts/example.py | 97 +++++++++++++++++------- 7 files changed, 184 insertions(+), 79 deletions(-) diff --git a/msp/composition/__init__.py b/msp/composition/__init__.py index f96d0ac..52040bd 100644 --- a/msp/composition/__init__.py +++ b/msp/composition/__init__.py @@ -1,3 +1,3 @@ -__all__ = ["generate_random_compositions", "sample_random_composition"] +__all__ = ["generate_random_compositions", "sample_random_composition", "generate_random_lithium_compositions"] -from .composition import generate_random_compositions, sample_random_composition \ No newline at end of file +from .composition import generate_random_compositions, sample_random_composition, generate_random_lithium_compositions \ No newline at end of file diff --git a/msp/composition/composition.py b/msp/composition/composition.py index 45e45c3..65ce053 100644 --- a/msp/composition/composition.py +++ b/msp/composition/composition.py @@ -43,8 +43,8 @@ def generate_random_compositions(dataset, n=5, max_elements=5, max_atoms=20, ele Args: dataset (dict): dictionary of dataset n (int): number of compositions to generate - max_elements (int): maximum number of elements in composition - max_atoms (int): maximum number of atoms per element + max_elements (int): maximum number of unique elements in composition + max_atoms (int): maximum number of atoms in composition elems_to_sample (list): list of elements to sample from Returns: @@ -85,7 +85,7 @@ def generate_random_compositions(dataset, n=5, max_elements=5, max_atoms=20, ele print('Potential composition: ', comp) smact_valid = smact_validity(rand_elems, freq) print('SMACT validity: ', smact_valid) - if not smact_validity(rand_elems, freq): + if not smact_valid: print('Invalid composition') continue comp_hash = hash_structure(comp) @@ -95,9 +95,66 @@ def generate_random_compositions(dataset, n=5, max_elements=5, max_atoms=20, ele comp_hashes.append(comp_hash) break else: - print('Invalid compositon, already occurs') + print('Invalid compositon, already occurs in dataset') return compositions +def generate_random_lithium_compositions(dataset, n=5, max_elements=6, max_atoms=20, li_ratio_lower=.2, li_ratio_upper=.4, halide_ratio_lower=.2, halide_ratio_upper=.5): + """ + Generate n unique lithium compositions that do not appear in dataset randomly + Args: + dataset (dict): dictionary of dataset + n (int): number of compositions to generate + max_elements (int): maximum number of unique elements in composition + max_atoms (int): maximum number of atoms in composition + li_ratio_lower (float): lower bound for lithium ratio + li_ratio_upper (float): upper bound for lithium ratio + halide_ratio_lower (float): lower bound for halide ratio + halide_ratio_upper (float): upper bound for halide ratio + + Returns: + compositions (list): list of compositions + """ + compositions = [] + comp_hashes = [] + hashed_dataset = hash_dataset(dataset) + halides = [9, 17] + metals = [39, 13, 22, 21, 31, 49, 40, 12, 30, 32, 57, 58, 41] + for i in range(n): + while True: + comp = [] + total_atoms = np.random.randint(5, max_atoms + 1) + num_lithium = np.random.randint(max(1, round(total_atoms * li_ratio_lower)), round(total_atoms * li_ratio_upper) + 1) + comp.extend([3] * num_lithium) + num_halides = np.random.randint(round(total_atoms * halide_ratio_lower), round(total_atoms * halide_ratio_upper) + 1) + comp.extend(np.random.choice(halides, num_halides, replace=True)) + if len(comp) >= total_atoms: + print('Invalid composition, no space for metals: ', comp) + continue + temp_metals = [] + while np.unique(comp).size < max_elements and len(comp) < total_atoms: + temp_metals = np.random.choice(metals, 1, replace=True) + comp.append(temp_metals[-1]) + if len(comp) < total_atoms: + comp.extend(np.random.choice(temp_metals, total_atoms - len(comp), replace=True)) + elems = np.unique(comp) + freq = [comp.count(elem) for elem in elems] + smact_valid = smact_validity(elems, freq) + print('SMACT validity: ', smact_valid) + if not smact_valid: + print('Invalid composition') + continue + comp_hash = hash_structure(comp) + if comp_hash not in hashed_dataset and comp_hash not in comp_hashes: + print('Accepted composition', i, ':', comp) + comp.sort() + compositions.append(comp) + comp_hashes.append(comp_hash) + break + else: + print('Invalid compositon, already occurs in dataset') + return compositions + + def sample_random_composition(dataset, n=5): """ Sample n random compositions from the dataset diff --git a/msp/forcefield/mdl_ff.py b/msp/forcefield/mdl_ff.py index 38027c8..70f35c1 100644 --- a/msp/forcefield/mdl_ff.py +++ b/msp/forcefield/mdl_ff.py @@ -327,18 +327,18 @@ def get_forces_and_stress(self, atoms, batch_size): for i in range(len(loader_iter)): batch = next(loader_iter).to(device) out = self._forward(batch, forces=True) - forces.append(out['forces']) - stress.append(out['stress']) + forces.append(out['forces'].cpu().detach()) + stress.append(out['stress'].cpu().detach()) print('Structures', temp, 'to', temp + len(batch) - 1, 'took', time.time() - start_time) temp += len(batch) start_time = time.time() - per_atom_forces = torch.cat(forces, dim=1).cpu().detach().numpy() + per_atom_forces = torch.cat(forces, dim=-2).numpy() per_struc_forces = [] temp = 0 for i in range(len(data_list)): per_struc_forces.append(per_atom_forces[temp:temp + data_list[i].n_atoms]) temp += data_list[i].n_atoms - stress = torch.cat(stress, dim=1).cpu().detach().numpy() + stress = torch.cat(stress, dim=0).numpy() return per_struc_forces, stress def create_ase_calc(self): @@ -416,10 +416,10 @@ def closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch): curr_time = time.time() - start_time if log_per > 0 and step[0] % log_per == 0: if cell_relax: - print("Structure ID: {}, Step: {}, LJR Loss: {:.6f}, Pos Gradient: {:.6f}, Cell Gradient: {:.6f}, Time: {:.6f}".format(len(batch.structure_id), + print("Structure ID: {}, Step: {}, Objective Loss: {:.6f}, Pos Gradient: {:.6f}, Cell Gradient: {:.6f}, Time: {:.6f}".format(len(batch.structure_id), step[0], objective_loss.mean().item(), pos.grad.abs().mean().item(), cell.grad.abs().mean().item(), curr_time)) else: - print("Structure ID: {}, Step: {}, LJR Loss: {:.6f}, Pos Gradient: {:.6f}, Time: {:.6f}".format(len(batch.structure_id), + print("Structure ID: {}, Step: {}, Objective Loss: {:.6f}, Pos Gradient: {:.6f}, Time: {:.6f}".format(len(batch.structure_id), step[0], objective_loss.mean().item(), pos.grad.abs().mean().item(), curr_time)) step[0] += 1 batch.pos, batch.cell = pos, cell diff --git a/msp/optimizer/globalopt/basin_hopping.py b/msp/optimizer/globalopt/basin_hopping.py index 7a99b52..37f93bb 100644 --- a/msp/optimizer/globalopt/basin_hopping.py +++ b/msp/optimizer/globalopt/basin_hopping.py @@ -314,11 +314,11 @@ def predict(self, structures, cell_relax=True, topk=1, num_atoms_perturb=1, num_ res[-1].append( {'hop': i, 'init_loss': old_energy, 'loss': optimized_energy, 'perturb': prev_perturb.__name__, 'composition': temp.get_atomic_numbers(), - 'structure': atoms_to_dict([temp], [optimized_energy])[0]}) + 'structure': atoms_to_dict([temp], [optimized_energy], [optimized_energy])[0]}) prev_perturb = self.perturbs[np.random.randint(len(self.perturbs))] prev_perturb(atom, num_atoms_perturb=num_atoms_perturb, num_unique=num_unique) print('Structure', index, 'Min energy', min_energy[index]) - min_atoms = atoms_to_dict(min_atoms, min_energy) + min_atoms = atoms_to_dict(min_atoms, min_energy, min_energy) return res, min_atoms @@ -420,11 +420,11 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz res[j].append({'hop': i, 'objective_loss': obj_loss[j][0], 'energy_loss': energy_loss[j][0], 'novel_loss': novel_loss[j][0], 'soft_sphere_loss': soft_sphere_loss[j][0], 'unnormalized_loss' : objective_func.norm_to_raw_loss(energy_loss[j][0], new_atoms[j].get_atomic_numbers()), 'perturb': prev_perturb[j].__name__, 'composition': new_atoms[j].get_atomic_numbers(), - 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j])[0]}) + 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j], objective_func.norm_to_raw_loss(energy_loss[j], new_atoms[j].get_atomic_numbers()))[0]}) else: res[j].append({'hop': i, 'objective_loss': obj_loss[j][0], 'energy_loss': energy_loss[j][0], 'novel_loss': novel_loss[j][0], 'soft_sphere_loss': soft_sphere_loss[j][0], 'perturb': prev_perturb[j].__name__, 'composition': new_atoms[j].get_atomic_numbers(), - 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j])[0]}) + 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j], energy_loss[j])[0]}) print("\tStructure: ", j) print("\t\tObjective loss: ", res[j][-1]['objective_loss']) print("\t\tEnergy loss: ", res[j][-1]['energy_loss']) @@ -434,24 +434,24 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz print("\t\tSoft sphere loss: ", res[j][-1]['soft_sphere_loss']) print("\t\tComposition: ", res[j][-1]['composition']) print("\t\tperturb: ", res[j][-1]['perturb']) - print('HOP', i, 'took', end_time - start_time, 'seconds') - for j in range(len(new_atoms)): + new_atoms[j] = min_atoms[j].copy() rand_ind = np.random.randint(len(self.perturbs)) prev_perturb[j] = self.perturbs[rand_ind] self.perturbs[rand_ind](new_atoms[j], num_atoms_perturb=num_atoms_perturb, num_unique=num_unique) + print('HOP', i, 'took', end_time - start_time, 'seconds') print('Final optimization') - best_atoms, obj_loss, energy_loss, novel_loss, soft_sphere_loss = self.forcefield.optimize(best_atoms, 300, objective_func, log_per, lr, batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer) + best_atoms, obj_loss, energy_loss, novel_loss, soft_sphere_loss = self.forcefield.optimize(best_atoms, 1, objective_func, log_per, lr, batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer) avg_loss = 0 for j, hop in enumerate(best_hop): if getattr(objective_func, 'normalize', False): res[j][hop] = {'hop': i, 'objective_loss': obj_loss[j][0], 'energy_loss': energy_loss[j][0], 'novel_loss': novel_loss[j][0], 'soft_sphere_loss': soft_sphere_loss[j][0], 'unnormalized_loss' : objective_func.norm_to_raw_loss(energy_loss[j][0], new_atoms[j].get_atomic_numbers()), 'perturb': prev_perturb[j].__name__, 'composition': new_atoms[j].get_atomic_numbers(), - 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j])[0]} + 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j], objective_func.norm_to_raw_loss(energy_loss[j], new_atoms[j].get_atomic_numbers()))[0]} else: res[j][hop] = {'hop': i, 'objective_loss': obj_loss[j][0], 'energy_loss': energy_loss[j][0], 'novel_loss': novel_loss[j][0], 'soft_sphere_loss': soft_sphere_loss[j][0], 'perturb': prev_perturb[j].__name__, 'composition': new_atoms[j].get_atomic_numbers(), - 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j])[0]} + 'structure': atoms_to_dict([new_atoms[j]], obj_loss[j], energy_loss[j])[0]} raw_energy = [0] * len(new_atoms) for j, hop in enumerate(best_hop): print("Structure: ", j) @@ -466,8 +466,8 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz print("\tSoft sphere loss: ", res[j][hop]['soft_sphere_loss']) avg_loss += res[j][hop]['objective_loss'] print('Avg Objective Loss', avg_loss / len(new_atoms)) - forces, stress = self.forcefield.get_forces_and_stress(best_atoms, batch_size=batch_size) - best_atoms = atoms_to_dict(best_atoms, raw_energy, forces, stress) + forces, stress = self.forcefield.get_forces_and_stress(best_atoms, batch_size=4) + best_atoms = atoms_to_dict(best_atoms, best_loss, raw_energy, forces, stress) return res, best_atoms, best_hop, losses, accepts, accept_rate, temps, step_sizes diff --git a/msp/structure/structure_util.py b/msp/structure/structure_util.py index 9a43194..3485054 100644 --- a/msp/structure/structure_util.py +++ b/msp/structure/structure_util.py @@ -2,6 +2,7 @@ from ase import Atoms import torch from torch_geometric.data import Data +import random from ase.data import chemical_symbols, atomic_masses import smact from smact.screening import pauling_test @@ -9,7 +10,7 @@ -def init_structure(composition, pyxtal=False, density=4): +def init_structure(composition, pyxtal=False, density=2): """ Creates a dictionary representing a structure from a composition @@ -24,19 +25,18 @@ def init_structure(composition, pyxtal=False, density=4): atoms = None mass = sum([atomic_masses[num] for num in composition]) if not pyxtal: - beta = np.random.uniform(5, 174) - gamma = np.random.uniform(5, 174) + beta = np.random.uniform(10, 169) + gamma = np.random.uniform(10, 169) minCosA = - np.sin(gamma * np.pi/180) * np.sqrt(1 - np.cos(beta* np.pi/180) ** 2) + np.cos(beta * np.pi/180) * np.cos(gamma * np.pi/180) maxCosA = np.sin(gamma * np.pi/180) * np.sqrt(1 - np.cos(beta* np.pi/180) ** 2) + np.cos(beta * np.pi/180) * np.cos(gamma * np.pi/180) alpha = np.random.uniform(minCosA, maxCosA) alpha = np.arccos(alpha) * 180 / np.pi - a = np.random.rand() + .000001 - b = np.random.rand() + .000001 - c = np.random.rand() + .000001 + a = np.random.uniform(0.2, 1) + b = np.random.uniform(0.2, 1) + c = np.random.uniform(0.2, 1) cell=[a, b, c, alpha, beta, gamma] atoms = Atoms(composition, cell=cell, pbc=(True, True, True)) vol = atoms.get_cell().volume - # ideal_vol = len(composition) / density ideal_vol = mass / density scale = (ideal_vol / vol) ** (1/3) cell = [scale * a, scale * b, scale * c, alpha, beta, gamma] @@ -50,26 +50,28 @@ def init_structure(composition, pyxtal=False, density=4): counts = [composition.count(num) for num in unique_nums] symbols = [chemical_symbols[num] for num in unique_nums] struct_num = 0 - use_random = False - for i in range(1, 231): + space_group = list(range(2, 231)) + random.shuffle(space_group) + use_random = True + for i in space_group: try: - use_random = True struct_num = i - struc.from_random(3, i, symbols, counts) + struc.from_random(3, i, symbols, counts, factor=1.4, max_count=20) + use_random = False break - except: - continue + except Exception as e: + pass if use_random: print('Composition ', composition, 'not compatible with pyxtal. Using random structure') - beta = np.random.uniform(5, 174) - gamma = np.random.uniform(5, 174) + beta = np.random.uniform(10, 169) + gamma = np.random.uniform(10, 169) minCosA = - np.sin(gamma * np.pi/180) * np.sqrt(1 - np.cos(beta* np.pi/180) ** 2) + np.cos(beta * np.pi/180) * np.cos(gamma * np.pi/180) maxCosA = np.sin(gamma * np.pi/180) * np.sqrt(1 - np.cos(beta* np.pi/180) ** 2) + np.cos(beta * np.pi/180) * np.cos(gamma * np.pi/180) alpha = np.random.uniform(minCosA, maxCosA) alpha = np.arccos(alpha) * 180 / np.pi - a = np.random.rand() + .000001 - b = np.random.rand() + .000001 - c = np.random.rand() + .000001 + a = np.random.uniform(0.2, 1) + b = np.random.uniform(0.2, 1) + c = np.random.uniform(0.2, 1) cell=[a, b, c, alpha, beta, gamma] atoms = Atoms(composition, cell=cell, pbc=(True, True, True)) vol = atoms.get_cell().volume @@ -86,7 +88,7 @@ def init_structure(composition, pyxtal=False, density=4): return atoms_to_dict([atoms], [None])[0] -def atoms_to_dict(atoms, loss=None, forces=None, stress=None): +def atoms_to_dict(atoms, objective_loss=None, raw_energy=None, forces=None, stress=None): """ Creates a list of dict from a list of ASE atoms objects @@ -104,10 +106,14 @@ def atoms_to_dict(atoms, loss=None, forces=None, stress=None): d['cell'] = atoms[i].get_cell().array.tolist() d['z'] = atoms[i].get_atomic_numbers() d['atomic_numbers'] = atoms[i].get_atomic_numbers() - if loss is not None: - d['y'] = loss[i] + if raw_energy is not None: + d['y'] = raw_energy[i] else: - d['loss'] = None + d['y'] = None + if objective_loss is not None: + d['objective_loss'] = objective_loss[i] + else: + d['objective_loss'] = None if forces is not None: d['forces'] = forces[i] d['stress'] = stress[i].reshape(1, 3, 3) diff --git a/msp/utils/objectives/objectives.py b/msp/utils/objectives/objectives.py index 0f486cb..a28a46d 100644 --- a/msp/utils/objectives/objectives.py +++ b/msp/utils/objectives/objectives.py @@ -7,7 +7,7 @@ class Energy(torch.nn.Module): - def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12, ljr_scale = .8): + def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12, ljr_scale = .8, min_ljr_val=1.0): super().__init__() """ Initialize objective function using only energy and no novel loss @@ -21,6 +21,7 @@ def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12 self.normalize = normalize self.ljr_power = ljr_power self.lj_rmins = np.load(str(Path(__file__).parent / "lj_rmins.npy")) * ljr_scale + self.lj_rmins[self.lj_rmins < 1.0] = 1.0 self.ljr_ratio = ljr_ratio self.energy_ratio = energy_ratio self.element_energy = [-10000, -3.392726045, -0.00905951, -1.9089228666666667, -3.739412865, -6.679391770833334, @@ -33,7 +34,7 @@ def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12 -9.27440254, -7.36430787, -5.17988181, -2.8325560033333335, -0.92288976, -2.75168373, -4.009571855, -4.12900124, -3.1433058933333338, -1.524012615, -0.03617417, -0.8954023720689656, -1.91897494, -4.936007105, -5.933089155, -4.780905755, -4.7681474325, -4.7505423225, -4.718586135, -10.2570018, - -14.07612224, -4.6343661, -4.60678684, -4.58240887, -4.56771881, -4.475835423333334, 999, -4.52095052, + -14.07612224, -4.6343661, -4.60678684, -4.58240887, -4.56771881, -4.475835423333334, -10000, -4.52095052, -9.95718903, -11.85777763, -12.95813023, -12.444527185, -11.22736743, -8.83843418, -6.07113332, -3.273882, -0.303680365, -2.3626431466666666, -3.71264707, -3.89003431, -10000, -10000, -10000, -10000, -10000, -4.1211750075, -7.41385825, -9.51466466, -11.29141001, -12.94777968125, -14.26783833, -10000, -10000, -10000, -10000, -10000, -10000] @@ -107,7 +108,7 @@ def forward(self, model_output, batch): return self.energy_ratio * model_output["potential_energy"] + self.ljr_ratio * ljr, model_output["potential_energy"], torch.zeros(len(model_output['potential_energy']), 1).to(ljr.device), ljr class EnergyAndUncertainty(Energy): - def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, uncertainty_ratio=.25): + def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, uncertainty_ratio=.25, min_ljr_val=1.0): """ Initialize objective function using energy and uncertainty as novel loss Args: @@ -118,7 +119,7 @@ def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale (float): Scaling factor for the Lennard-Jones repulsion uncertainty_ratio (float): Weight of the uncertainty in the loss """ - super().__init__(normalize, energy_ratio, ljr_ratio, ljr_power, ljr_scale) + super().__init__(normalize, energy_ratio, ljr_ratio, ljr_power, ljr_scale, min_ljr_val) self.uncertainty_ratio = uncertainty_ratio def forward(self, model_output, batch): @@ -144,7 +145,7 @@ def forward(self, model_output, batch): class EmbeddingDistance(Energy): - def __init__(self, embeddings, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, embedding_ratio=.1, mode="min"): + def __init__(self, embeddings, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, min_ljr_val=1.0, embedding_ratio=.1, mode="min"): """ Initialize objective function using only energy and embedding distance as novel loss embedding distance is aggregated euclidean distance between structure embedding and database embeddings @@ -158,7 +159,7 @@ def __init__(self, embeddings, normalize=True, energy_ratio=1.0, ljr_ratio=1, lj embedding_ratio (float): Weight of the embedding distance in the loss mode (str): Aggregation mode for the embedding distance, either "min" or "mean" """ - super().__init__(normalize, energy_ratio, ljr_ratio, ljr_power, ljr_scale) + super().__init__(normalize, energy_ratio, ljr_ratio, ljr_power, ljr_scale, min_ljr_val) self.embedding_ratio = embedding_ratio self.embeddings = embeddings self.mode = mode diff --git a/scripts/example.py b/scripts/example.py index a01a280..44cc692 100644 --- a/scripts/example.py +++ b/scripts/example.py @@ -1,6 +1,6 @@ import sys from msp.dataset import download_dataset, load_dataset, combine_dataset, update_dataset -from msp.composition import generate_random_compositions, sample_random_composition +from msp.composition import generate_random_compositions, sample_random_composition, generate_random_lithium_compositions from msp.forcefield import MDL_FF, MACE_FF, M3GNet_FF from msp.optimizer.globalopt.basin_hopping import BasinHoppingASE, BasinHoppingBatch from msp.utils.objectives import EnergyAndUncertainty, Energy, EmbeddingDistance @@ -28,16 +28,16 @@ predicted_structures = [] # my_dataset = json.load(open("../data/data_subset_msp.json", "r")) #print(my_dataset[0]) -max_iterations=2 +max_iterations=1 #Initialize a forcefield class, reading in from config (we use MDL_FF but it can be a force field from another library) train_config = 'mdl_config.yml' forcefield = MDL_FF(train_config, my_dataset) -# embeddings = forcefield.get_embeddings(my_dataset, batch_size=40, cluster=False) +embeddings = forcefield.get_embeddings(my_dataset, batch_size=40, cluster=False) #predictor = BasinHoppingASE(forcefield, hops=5, steps=100, optimizer="FIRE", dr=0.5) -predictor_batch = BasinHoppingBatch(forcefield, hops=1, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) +predictor_batch = BasinHoppingBatch(forcefield, hops=10, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) # forcefield_mace = MACE_FF() @@ -62,10 +62,11 @@ if i != 0: forcefield.update(predicted_structures, 1, 0, 0, max_epochs=30, save_model=False) - compositions = generate_random_compositions(my_dataset, n=2, max_elements=5, max_atoms=20) - for comp in compositions: - print(comp) - initial_structures = [init_structure(c, pyxtal=False) for c in compositions] + # compositions = generate_random_compositions(my_dataset, n=8, max_elements=5, max_atoms=20) + compositions_novelty = generate_random_lithium_compositions(my_dataset, n=16000) + initial_structures_novelty = [init_structure(c, pyxtal=True) for c in compositions_novelty] + # compositions_energy = generate_random_lithium_compositions(my_dataset, n=4000) + # initial_structures_energy = [init_structure(c, pyxtal=True) for c in compositions_energy] # for j, minima in enumerate(dict_to_atoms(initial_structures)): # filename = "initial_iteration_"+str(i)+"_structure_"+str(j)+".cif" # ase.io.write(filename, minima) @@ -95,31 +96,71 @@ #---Optimizing a batch of structures with batch basin hopping--- # alternatively if we dont use ASE, we can optimize in batch, and optimize over multiple objectives as well # we do this by first initializing our objective function, which is similar to the loss function class in matdeeplearn - objective_func = Energy(normalize=True, ljr_ratio=1) - # objective_func = EmbeddingDistance(embeddings, normalize=True, energy_ratio=1, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) + # objective_func_energy = Energy(normalize=True, ljr_ratio=1) + objective_func_novelty = EmbeddingDistance(embeddings, normalize=True, energy_ratio=5, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) # objective_func = EnergyAndUncertainty(normalize=True, uncertainty_ratio=.25, ljr_ratio=1, ljr_scale=.7) + # start_time = time.time() + # total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures_energy, objective_func_energy, batch_size=8, log_per=0, lr=.05) + # top_energy = sorted(minima_list_batch, key=lambda struc: struc['objective_loss'])[:100] + # print('---------TOP 100 ENERGY STRUCTURES---------') + # print(top_energy) + # print('---------TOP 100 ENERGY STRUCTURES---------') + # minima_list_batch_ase = dict_to_atoms(minima_list_batch) + # top_energy_ase = dict_to_atoms(top_energy) + # for j, minima in enumerate(minima_list_batch_ase): + # filename = "all_4k_energy/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + # ase.io.write(filename, minima) + # for j, minima in enumerate(top_energy_ase): + # filename = "top_100_energy/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + # ase.io.write(filename, minima) + # f = open('output.txt', 'w') + # for i in range(len(total_list_batch)): + # f.write('Structure ' + str(i) + '\n') + # f.write('\tbest_hop: ' + str(best_hop[j]) + '\n') + # for hop in total_list_batch[i]: + # f.write("\tHop: " +str(hop['hop'])+ '\n') + # f.write("\t\tObjective loss: " +str(hop['objective_loss'])+ '\n') + # f.write("\t\tEnergy loss: "+str(hop['energy_loss'])+'\n') + # if getattr(objective_func_energy, 'normalize', False): + # f.write("\t\tUnnormalized energy loss: " +str(hop['unnormalized_loss'])+ '\n') + # f.write("\t\tNovel loss: "+str(hop['novel_loss']) + '\n') + # f.write("\t\tSoft sphere loss: "+ str(hop['soft_sphere_loss']) + '\n') + # f.write("\t\tComposition: " +str(hop['composition'])+ '\n') + # f.write("\t\tperturb: " +str(hop['perturb'])+ '\n') + # f.close() + # print('Time taken for energy: {:.2f}'.format(time.time() - start_time)) + start_time = time.time() - total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures, objective_func, batch_size=8, log_per=5, lr=.05) + total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures_novelty, objective_func_novelty, batch_size=8, log_per=0, lr=.05) + top_novelty = sorted(minima_list_batch, key=lambda struc: struc['objective_loss'])[:400] + print('---------TOP 400 NOVELTY STRUCTURES---------') + print(top_novelty) + print('---------TOP 400 NOVELTY STRUCTURES---------') minima_list_batch_ase = dict_to_atoms(minima_list_batch) + top_novelty_ase = dict_to_atoms(top_novelty) for j, minima in enumerate(minima_list_batch_ase): - filename = "iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + filename = "all_16k_novelty_5/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" ase.io.write(filename, minima) - f = open('output.txt', 'w') - for i in range(len(total_list_batch)): - f.write('Structure ' + str(i) + '\n') - f.write('\tbest_hop: ' + str(best_hop[j]) + '\n') - for hop in total_list_batch[i]: - f.write("\tHop: " +str(hop['hop'])+ '\n') - f.write("\t\tObjective loss: " +str(hop['objective_loss'])+ '\n') - f.write("\t\tEnergy loss: "+str(hop['energy_loss'])+'\n') - if getattr(objective_func, 'normalize', False): - f.write("\t\tUnnormalized energy loss: " +str(hop['unnormalized_loss'])+ '\n') - f.write("\t\tNovel loss: "+str(hop['novel_loss']) + '\n') - f.write("\t\tSoft sphere loss: "+ str(hop['soft_sphere_loss']) + '\n') - f.write("\t\tComposition: " +str(hop['composition'])+ '\n') - f.write("\t\tperturb: " +str(hop['perturb'])+ '\n') - f.close() - print('Time taken: {:.2f}'.format(time.time() - start_time)) + for j, minima in enumerate(top_novelty_ase): + filename = "top_400_novelty_5/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + ase.io.write(filename, minima) + # f = open('output.txt', 'w') + # for i in range(len(total_list_batch)): + # f.write('Structure ' + str(i) + '\n') + # f.write('\tbest_hop: ' + str(best_hop[j]) + '\n') + # for hop in total_list_batch[i]: + # f.write("\tHop: " +str(hop['hop'])+ '\n') + # f.write("\t\tObjective loss: " +str(hop['objective_loss'])+ '\n') + # f.write("\t\tEnergy loss: "+str(hop['energy_loss'])+'\n') + # if getattr(objective_func_novelty, 'normalize', False): + # f.write("\t\tUnnormalized energy loss: " +str(hop['unnormalized_loss'])+ '\n') + # f.write("\t\tNovel loss: "+str(hop['novel_loss']) + '\n') + # f.write("\t\tSoft sphere loss: "+ str(hop['soft_sphere_loss']) + '\n') + # f.write("\t\tComposition: " +str(hop['composition'])+ '\n') + # f.write("\t\tperturb: " +str(hop['perturb'])+ '\n') + # f.close() + print('Time taken for novelty: {:.2f}'.format(time.time() - start_time)) + for i, energy_list in enumerate(energies): plt.scatter(range(len(energy_list)), energy_list, label=f'Structure {i + 1}', color=['g' if a else 'r' for a in accepts[i]]) From 618db3456007e443c942149e2345fec0f382b72e Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Fri, 31 May 2024 14:11:35 -0700 Subject: [PATCH 07/16] Added finetuning script --- msp/forcefield/mdl_ff.py | 2 +- scripts/example.py | 6 +++--- scripts/finetune.py | 11 +++++++++++ 3 files changed, 15 insertions(+), 4 deletions(-) create mode 100644 scripts/finetune.py diff --git a/msp/forcefield/mdl_ff.py b/msp/forcefield/mdl_ff.py index 70f35c1..e47a62f 100644 --- a/msp/forcefield/mdl_ff.py +++ b/msp/forcefield/mdl_ff.py @@ -164,7 +164,7 @@ def process_data(self, dataset): if 'forces' in struc and struc['forces'] is not None: data.forces = torch.tensor(struc['forces']) if 'stress' in struc and struc['stress'] is not None: - data.stress = torch.tensor(struc['stress']) + data.stress = torch.tensor(struc['stress']).unsqueeze(0) #optional data.u = torch.tensor(np.zeros((3))[np.newaxis, ...]).float() if 'potential_energy' in struc: diff --git a/scripts/example.py b/scripts/example.py index 44cc692..31e8103 100644 --- a/scripts/example.py +++ b/scripts/example.py @@ -97,7 +97,7 @@ # alternatively if we dont use ASE, we can optimize in batch, and optimize over multiple objectives as well # we do this by first initializing our objective function, which is similar to the loss function class in matdeeplearn # objective_func_energy = Energy(normalize=True, ljr_ratio=1) - objective_func_novelty = EmbeddingDistance(embeddings, normalize=True, energy_ratio=5, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) + objective_func_novelty = EmbeddingDistance(embeddings, normalize=True, energy_ratio=2, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) # objective_func = EnergyAndUncertainty(normalize=True, uncertainty_ratio=.25, ljr_ratio=1, ljr_scale=.7) # start_time = time.time() # total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures_energy, objective_func_energy, batch_size=8, log_per=0, lr=.05) @@ -139,10 +139,10 @@ minima_list_batch_ase = dict_to_atoms(minima_list_batch) top_novelty_ase = dict_to_atoms(top_novelty) for j, minima in enumerate(minima_list_batch_ase): - filename = "all_16k_novelty_5/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + filename = "all_16k_novelty_2/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" ase.io.write(filename, minima) for j, minima in enumerate(top_novelty_ase): - filename = "top_400_novelty_5/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + filename = "top_400_novelty_2/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" ase.io.write(filename, minima) # f = open('output.txt', 'w') # for i in range(len(total_list_batch)): diff --git a/scripts/finetune.py b/scripts/finetune.py new file mode 100644 index 0000000..9847c88 --- /dev/null +++ b/scripts/finetune.py @@ -0,0 +1,11 @@ +from msp.forcefield import MDL_FF, MACE_FF, M3GNet_FF +import json + + +my_dataset = json.load(open("../data/iter_one_structures/data.json", "r")) + +train_config = 'mdl_config.yml' +forcefield = MDL_FF(train_config, my_dataset) + + +forcefield.update(my_dataset, 1, 0, 0, max_epochs=30, save_model=True, batch_size=48, save_path='fine_tuned_models') \ No newline at end of file From 5926ea43ee342e237fe0ebbd2e1376b85e602b4c Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Mon, 11 Nov 2024 13:21:41 -0800 Subject: [PATCH 08/16] Optimizing Z support --- msp/forcefield/mdl_ff.py | 75 ++++++++++++++++++++++-------- msp/structure/structure_util.py | 4 ++ msp/utils/objectives/objectives.py | 20 +++++++- scripts/example.py | 24 +++++----- scripts/finetune.py | 25 +++++++++- scripts/mdl_config.yml | 25 ++++++---- 6 files changed, 129 insertions(+), 44 deletions(-) diff --git a/msp/forcefield/mdl_ff.py b/msp/forcefield/mdl_ff.py index e47a62f..1cae9d0 100644 --- a/msp/forcefield/mdl_ff.py +++ b/msp/forcefield/mdl_ff.py @@ -21,6 +21,7 @@ from matdeeplearn.trainers.property_trainer import PropertyTrainer from matdeeplearn.common.data import dataset_split from msp.structure.structure_util import atoms_to_data, data_to_atoms +from matdeeplearn.preprocessor.helpers import GaussianSmearing2D from sklearn.cluster import MiniBatchKMeans from sklearn.metrics import silhouette_score from torch_scatter import scatter_mean @@ -55,7 +56,7 @@ def __init__(self, train_config, dataset): - def train(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, lr=None, batch_size=None, save_path='saved_model'): + def train(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, lr=None, batch_size=None, save_path='saved_model', save_model=True): """ Train the force field model on the dataset. Args: @@ -82,16 +83,17 @@ def train(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, lr self.trainer.train() #state = {"state_dict": self.model.state_dict()} - os.makedirs(save_path, exist_ok=True) - for i in range(len(self.trainer.model)): - sub_path = os.path.join(save_path, f"checkpoint_{i}",) - os.makedirs(sub_path, exist_ok=True) - if str(self.trainer.rank) not in ("cpu", "cuda"): - state = {"state_dict": self.trainer.model[i].module.state_dict()} - else: - state = {"state_dict": self.trainer.model[i].state_dict()} - model_path = os.path.join(sub_path, "best_checkpoint.pt") - torch.save(state, model_path) + if save_model: + os.makedirs(save_path, exist_ok=True) + for i in range(len(self.trainer.model)): + sub_path = os.path.join(save_path, f"checkpoint_{i}",) + os.makedirs(sub_path, exist_ok=True) + if str(self.trainer.rank) not in ("cpu", "cuda"): + state = {"state_dict": self.trainer.model[i].module.state_dict()} + else: + state = {"state_dict": self.trainer.model[i].state_dict()} + model_path = os.path.join(sub_path, "best_checkpoint.pt") + torch.save(state, model_path) gc.collect() torch.cuda.empty_cache() @@ -137,6 +139,30 @@ def update(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, l torch.save(state, model_path) gc.collect() torch.cuda.empty_cache() + + + def validate(self, dataset, val_ratio=1, batch_size=None): + """ + Evaluate the force field model on the dataset. + Args: + dataset (dict): A dictionary of the dataset. + val_ratio (float): The ratio of the dataset to use for validation. + batch_size (int): The batch size for the model. Defaults to value in the training configuration file. + """ + dataset = self.process_data(dataset) + dataset = dataset['full'] + _, self.dataset["val"], _ = dataset_split( + dataset, + 0, + val_ratio, + 0, + ) + self.update_trainer(self.dataset, batch_size=batch_size) + metrics = self.trainer.validate(split="val") + for i in range(len(metrics)): + print(f"Model {i} validation metrics: {metrics[i]['loss']}") + + def process_data(self, dataset): """ @@ -148,6 +174,7 @@ def process_data(self, dataset): """ #add tqdm new_data_list = [Data() for _ in range(len(dataset))] + gauss = GaussianSmearing2D(.35, .5, 100) for i, struc in enumerate(dataset): data = new_data_list[i] data.n_atoms = len(struc['atomic_numbers']) @@ -155,7 +182,7 @@ def process_data(self, dataset): data.cell = torch.tensor(np.array(struc['cell']), dtype=torch.float).view(1, 3, 3).float() if (np.array(data.cell) == np.array([[0.0, 0.0, 0.0],[0.0, 0.0, 0.0],[0.0, 0.0, 0.0]])).all(): - data.cell = torch.zeros((3,3)).unsqueeze(0) + data.cell = torch.zeros((3,3)).unsqueeze(0).unsqueeze(0) if 'structure_id' in struc: data.structure_id = [struc['structure_id']] else: @@ -167,6 +194,9 @@ def process_data(self, dataset): data.stress = torch.tensor(struc['stress']).unsqueeze(0) #optional data.u = torch.tensor(np.zeros((3))[np.newaxis, ...]).float() + + data.gauss_atom_features = gauss(data.z) + if 'potential_energy' in struc: data.y = torch.tensor(np.array([struc['potential_energy']])).float() if 'y' in struc: @@ -394,13 +424,16 @@ def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_str batch = next(loader_iter).to(device) objective_func.set_norm_offset(batch.z, batch.n_atoms) pos, cell = batch.pos, batch.cell + gauss_z = batch.gauss_atom_features + print(batch.z, "before optimization") - opt = getattr(torch.optim, optim, torch.optim.Adam)([pos, cell], lr=learning_rate) + opt = getattr(torch.optim, optim, torch.optim.Adam)([pos, cell, gauss_z], lr=learning_rate) lr_scheduler = ReduceLROnPlateau(opt, 'min', factor=0.8, patience=10) pos.requires_grad_(True) if cell_relax: cell.requires_grad_(True) + gauss_z.requires_grad_(True) temp_obj = [0] temp_energy = [0] @@ -416,14 +449,15 @@ def closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch): curr_time = time.time() - start_time if log_per > 0 and step[0] % log_per == 0: if cell_relax: - print("Structure ID: {}, Step: {}, Objective Loss: {:.6f}, Pos Gradient: {:.6f}, Cell Gradient: {:.6f}, Time: {:.6f}".format(len(batch.structure_id), - step[0], objective_loss.mean().item(), pos.grad.abs().mean().item(), cell.grad.abs().mean().item(), curr_time)) + print("Structure ID: {}, Step: {}, Objective Loss: {:.6f}, Pos Gradient: {:.6f}, Cell Gradient: {:.6f}, Z Gradient: {:.6f}, Time: {:.6f}".format(len(batch.structure_id), + step[0], objective_loss.mean().item(), pos.grad.abs().mean().item(), cell.grad.abs().mean().item(), gauss_z.grad.abs().mean().item(), curr_time)) else: - print("Structure ID: {}, Step: {}, Objective Loss: {:.6f}, Pos Gradient: {:.6f}, Time: {:.6f}".format(len(batch.structure_id), - step[0], objective_loss.mean().item(), pos.grad.abs().mean().item(), curr_time)) + print("Structure ID: {}, Step: {}, Objective Loss: {:.6f}, Pos Gradient: {:.6f}, Z Gradient: {:.6f}, Time: {:.6f}".format(len(batch.structure_id), + step[0], objective_loss.mean().item(), pos.grad.abs().mean().item(), gauss_z.grad.abs().mean().item(), curr_time)) step[0] += 1 batch.pos, batch.cell = pos, cell - # batch.z = optimized_z + batch.gauss_atom_features = gauss_z + batch.z = gauss_z.argmax(dim=-1) temp_obj[0] = objective_loss temp_energy[0] = energy_loss temp_novel[0] = novel_loss @@ -434,13 +468,14 @@ def closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch): old_step = step[0] loss = opt.step(lambda: closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch)) lr_scheduler.step(loss) - + batch.z = batch.gauss_atom_features.argmax(dim=-1) res_atoms.extend(data_to_atoms(batch)) obj_loss.extend(temp_obj[0].cpu().detach().numpy()) energy_loss.extend(temp_energy[0].cpu().detach().numpy()) novel_loss.extend(temp_novel[0].cpu().detach().numpy()) soft_sphere_loss.extend(temp_soft_sphere[0].cpu().detach().numpy()) - batch.z = batch.z.type(torch.int64) + print(batch.gauss_atom_features) + print(batch.z, "after optimization") for i in range(len(self.trainer.model)): self.trainer.model[i].gradient = True diff --git a/msp/structure/structure_util.py b/msp/structure/structure_util.py index 3485054..8613f63 100644 --- a/msp/structure/structure_util.py +++ b/msp/structure/structure_util.py @@ -7,6 +7,8 @@ import smact from smact.screening import pauling_test import itertools +from matdeeplearn.preprocessor.helpers import GaussianSmearing2D + @@ -148,6 +150,7 @@ def atoms_to_data(atoms): """ n_structures = len(atoms) data_list = [Data() for _ in range(n_structures)] + gauss = GaussianSmearing2D(.35, .5, 100) for i, s in enumerate(atoms): data = s @@ -163,6 +166,7 @@ def atoms_to_data(atoms): data_list[i].structure_id = [structure_id] data_list[i].z = atomic_numbers data_list[i].u = torch.Tensor(np.zeros((3))[np.newaxis, ...]) + data_list[i].gauss_atom_features = gauss(data_list[i].z) return data_list diff --git a/msp/utils/objectives/objectives.py b/msp/utils/objectives/objectives.py index a28a46d..704f536 100644 --- a/msp/utils/objectives/objectives.py +++ b/msp/utils/objectives/objectives.py @@ -3,6 +3,7 @@ import numpy as np import time as time from torch_scatter import scatter_add +from matdeeplearn.preprocessor.helpers import GaussianSmearing2D class Energy(torch.nn.Module): @@ -38,6 +39,7 @@ def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12 -9.95718903, -11.85777763, -12.95813023, -12.444527185, -11.22736743, -8.83843418, -6.07113332, -3.273882, -0.303680365, -2.3626431466666666, -3.71264707, -3.89003431, -10000, -10000, -10000, -10000, -10000, -4.1211750075, -7.41385825, -9.51466466, -11.29141001, -12.94777968125, -14.26783833, -10000, -10000, -10000, -10000, -10000, -10000] + self.gauss = GaussianSmearing2D(.35, .5, 100) def set_norm_offset(self, z, n_atoms): """ @@ -87,6 +89,21 @@ def norm_to_raw_loss(self, loss, z): loss *= len(z) loss -= offset return loss + + def gaussian_loss(self, batch): + """ + Calculate the Gaussian loss + Args: + batch (torch_geometric.data.Batch): Batch of data + Returns: + torch.Tensor: Gaussian loss + """ + ideal_gauss_features = self.gauss(batch.z) + dist = torch.cdist(batch.gauss_atom_features, ideal_gauss_features, p=2) + temp = batch.gauss_atom_features + temp = -temp[temp < 0] + return torch.mean(dist) + temp.sum() + def forward(self, model_output, batch): """ @@ -105,7 +122,8 @@ def forward(self, model_output, batch): # for i in range(len(batch.n_atoms)): # model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] ljr = self.lj_repulsion(batch, power=self.ljr_power) - return self.energy_ratio * model_output["potential_energy"] + self.ljr_ratio * ljr, model_output["potential_energy"], torch.zeros(len(model_output['potential_energy']), 1).to(ljr.device), ljr + gauss_loss = self.gaussian_loss(batch) + return self.energy_ratio * model_output["potential_energy"] + self.ljr_ratio * ljr + gauss_loss, model_output["potential_energy"], torch.zeros(len(model_output['potential_energy']), 1).to(ljr.device), ljr class EnergyAndUncertainty(Energy): def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, uncertainty_ratio=.25, min_ljr_val=1.0): diff --git a/scripts/example.py b/scripts/example.py index 31e8103..b673d14 100644 --- a/scripts/example.py +++ b/scripts/example.py @@ -31,13 +31,13 @@ max_iterations=1 #Initialize a forcefield class, reading in from config (we use MDL_FF but it can be a force field from another library) -train_config = 'mdl_config.yml' +train_config = 'optimize_z.yml' forcefield = MDL_FF(train_config, my_dataset) embeddings = forcefield.get_embeddings(my_dataset, batch_size=40, cluster=False) #predictor = BasinHoppingASE(forcefield, hops=5, steps=100, optimizer="FIRE", dr=0.5) -predictor_batch = BasinHoppingBatch(forcefield, hops=10, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) +predictor_batch = BasinHoppingBatch(forcefield, hops=50, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) # forcefield_mace = MACE_FF() @@ -63,13 +63,13 @@ forcefield.update(predicted_structures, 1, 0, 0, max_epochs=30, save_model=False) # compositions = generate_random_compositions(my_dataset, n=8, max_elements=5, max_atoms=20) - compositions_novelty = generate_random_lithium_compositions(my_dataset, n=16000) - initial_structures_novelty = [init_structure(c, pyxtal=True) for c in compositions_novelty] + compositions_novelty = generate_random_lithium_compositions(my_dataset, n=10) + initial_structures_novelty = [init_structure(c, pyxtal=False) for c in compositions_novelty] # compositions_energy = generate_random_lithium_compositions(my_dataset, n=4000) # initial_structures_energy = [init_structure(c, pyxtal=True) for c in compositions_energy] - # for j, minima in enumerate(dict_to_atoms(initial_structures)): - # filename = "initial_iteration_"+str(i)+"_structure_"+str(j)+".cif" - # ase.io.write(filename, minima) + for j, minima in enumerate(dict_to_atoms(initial_structures_novelty)): + filename = "optim_z/initial_iteration_"+str(i)+"_structure_"+str(j)+".cif" + ase.io.write(filename, minima) # read_structure = ase.io.read("init.cif") # initial_structures=[atoms_to_dict([read_structure], loss=[None])] @@ -96,8 +96,8 @@ #---Optimizing a batch of structures with batch basin hopping--- # alternatively if we dont use ASE, we can optimize in batch, and optimize over multiple objectives as well # we do this by first initializing our objective function, which is similar to the loss function class in matdeeplearn - # objective_func_energy = Energy(normalize=True, ljr_ratio=1) - objective_func_novelty = EmbeddingDistance(embeddings, normalize=True, energy_ratio=2, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) + objective_func_energy = Energy(normalize=True, ljr_ratio=1) + # objective_func_novelty = EmbeddingDistance(embeddings, normalize=True, energy_ratio=2, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) # objective_func = EnergyAndUncertainty(normalize=True, uncertainty_ratio=.25, ljr_ratio=1, ljr_scale=.7) # start_time = time.time() # total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures_energy, objective_func_energy, batch_size=8, log_per=0, lr=.05) @@ -131,7 +131,7 @@ # print('Time taken for energy: {:.2f}'.format(time.time() - start_time)) start_time = time.time() - total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures_novelty, objective_func_novelty, batch_size=8, log_per=0, lr=.05) + total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures_novelty, objective_func_energy, batch_size=1, log_per=1, lr=.01) top_novelty = sorted(minima_list_batch, key=lambda struc: struc['objective_loss'])[:400] print('---------TOP 400 NOVELTY STRUCTURES---------') print(top_novelty) @@ -139,10 +139,10 @@ minima_list_batch_ase = dict_to_atoms(minima_list_batch) top_novelty_ase = dict_to_atoms(top_novelty) for j, minima in enumerate(minima_list_batch_ase): - filename = "all_16k_novelty_2/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + filename = "optim_z/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" ase.io.write(filename, minima) for j, minima in enumerate(top_novelty_ase): - filename = "top_400_novelty_2/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + filename = "optim_z/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" ase.io.write(filename, minima) # f = open('output.txt', 'w') # for i in range(len(total_list_batch)): diff --git a/scripts/finetune.py b/scripts/finetune.py index 9847c88..da2d59f 100644 --- a/scripts/finetune.py +++ b/scripts/finetune.py @@ -1,11 +1,34 @@ from msp.forcefield import MDL_FF, MACE_FF, M3GNet_FF import json +import numpy as np +from msp.structure.structure_util import dict_to_atoms +import ase +eval_dataset = json.load(open("../data/data_subset_msp.json", "r")) +# get a random subset of eval_dataset +eval_dataset = eval_dataset my_dataset = json.load(open("../data/iter_one_structures/data.json", "r")) +for data in my_dataset: + data["stress"] = np.array(data["stress"])*0.006242*-0.1 +#my_dataset = json.load(open("../data/data_subset_msp.json", "r")) train_config = 'mdl_config.yml' + + + forcefield = MDL_FF(train_config, my_dataset) +print("Evaluating before finetuning on eval_dataset") +forcefield.validate(eval_dataset, val_ratio=1, batch_size=12) + +print("Evaluating before finetuning on finetuning_dataset") +forcefield.validate(my_dataset, val_ratio=1, batch_size=12) + +forcefield.train(my_dataset, .95, .05, 0, max_epochs=100, save_model=False, batch_size=12, save_path='fine_tuned_models') + +print("Evaluating after finetuning on finetuning_dataset") +forcefield.validate(my_dataset, val_ratio=1, batch_size=12) -forcefield.update(my_dataset, 1, 0, 0, max_epochs=30, save_model=True, batch_size=48, save_path='fine_tuned_models') \ No newline at end of file +print("Evaluating after finetuning on eval_dataset") +forcefield.validate(eval_dataset, val_ratio=1, batch_size=12) diff --git a/scripts/mdl_config.yml b/scripts/mdl_config.yml index 596f2ef..047043e 100644 --- a/scripts/mdl_config.yml +++ b/scripts/mdl_config.yml @@ -5,15 +5,17 @@ task: identifier: my_train_job parallel: False # If seed is not set, then it will be random every time - seed: 200 + seed: # Defaults to run directory if not specified save_dir: mdl_training/ # continue from a previous job - continue_job: True + continue_job: False # spefcify if the training state is loaded: epochs, learning rate, etc load_training_state: False # Path to the checkpoint.pt file - checkpoint_path: "../ensemble_model/torchmd-1/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-2/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-3/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-4/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-5/checkpoint_0/best_checkpoint.pt" + #checkpoint_path: "../ensemble_model/torchmd-1/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-2/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-3/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-4/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-5/checkpoint_0/best_checkpoint.pt" + #checkpoint_path: "/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/ensemble_model/torchmd-1/checkpoint_0/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/ensemble_model/torchmd-2/checkpoint_0/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/ensemble_model/torchmd-3/checkpoint_0/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/ensemble_model/torchmd-4/checkpoint_0/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/ensemble_model/torchmd-5/checkpoint_0/best_checkpoint.pt" + #checkpoint_path: "/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/scripts/fine_tuned_models/checkpoint_0/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/scripts/fine_tuned_models/checkpoint_1/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/scripts/fine_tuned_models/checkpoint_2/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/scripts/fine_tuned_models/checkpoint_3/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/scripts/fine_tuned_models/checkpoint_4/best_checkpoint.pt" #checkpoint_path: "../pretrained_models/best_checkpoint.pt" #checkpoint_path: "../ensemble_model/torchmd-1/checkpoint_0/best_checkpoint.pt" # Whether to write predictions to csv file. E.g. ["train", "val", "test"] @@ -21,7 +23,7 @@ task: # Frequency of writing to file; 0 denotes writing only at the end, 1 denotes writing every time output_frequency: 1 # Frequency of saving model .pt file; 0 denotes saving only at the end, 1 denotes saving every time, -1 denotes never saving; this controls both checkpoint and best_checkpoint - model_save_frequency: 1 + model_save_frequency: -1 # Specify if labels are provided for the predict task # labels: True # Use amp mixed precision @@ -53,16 +55,17 @@ model: model_ensemble: 5 optim: - max_epochs: 5 + max_epochs: 30 max_checkpoint_epochs: 0 - lr: 0.00001 + lr: 0.0001 # Either custom or from torch.nn.functional library. If from torch, loss_type is TorchLossWrapper loss: loss_type: ForceStressLoss - loss_args: {weight_energy: 0.01, weight_force: 50.0, weight_stress: 50.0} + loss_args: {weight_energy: 0.01, weight_force: 50.0, weight_stress: 0.0} + #loss_args: {weight_energy: 0, weight_force: 50.0, weight_stress: 0} # gradient clipping value clip_grad_norm: 10 - batch_size: 32 + batch_size: 12 optimizer: optimizer_type: AdamW optimizer_args: {} @@ -80,7 +83,8 @@ dataset: processed: False # Path to data files - this can either be in the form of a string denoting a single path or a dictionary of {train: train_path, val: val_path, test: test_path, predict: predict_path} #src: data/data_subset_msp.json - src: /global/cfs/projectdirs/m3641/Shared/Materials_datasets/MP_data_forces/raw/data_subset_mdl_test.json + #src: /global/cfs/projectdirs/m3641/Shared/Materials_datasets/MP_data_forces/raw/data_subset_mdl_test.json + src: /global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/data/iter_one_structures/data.json # Path to target file within data_path - this can either be in the form of a string denoting a single path or a dictionary of {train: train_path, val: val_path, test: test_path} or left blank when the dataset is a single json file # Example: target_path: "data/raw_graph_scalar/targets.csv" target_path: @@ -128,7 +132,8 @@ dataset: self_loop: True # Method of obtaining atom dictionary: available: (onehot) node_representation: onehot - all_neighbors: True + all_neighbors: True + # Number of workers for dataloader, see https://pytorch.org/docs/stable/data.html num_workers: 0 # Where the dataset is loaded; either "cpu" or "cuda" From 90a4130e4eb30dbfc3f715964441246941fad4a0 Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Mon, 11 Nov 2024 14:17:39 -0800 Subject: [PATCH 09/16] Optim Z update --- scripts/optimize_z.yml | 150 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 150 insertions(+) create mode 100644 scripts/optimize_z.yml diff --git a/scripts/optimize_z.yml b/scripts/optimize_z.yml new file mode 100644 index 0000000..3bfdfda --- /dev/null +++ b/scripts/optimize_z.yml @@ -0,0 +1,150 @@ +trainer: property + +task: + run_mode: train + identifier: my_train_job + parallel: False + # If seed is not set, then it will be random every time + seed: + # Defaults to run directory if not specified + save_dir: mdl_training/ + # continue from a previous job + continue_job: True + # spefcify if the training state is loaded: epochs, learning rate, etc + load_training_state: False + # Path to the checkpoint.pt file + #checkpoint_path: "../ensemble_model/torchmd-1/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-2/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-3/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-4/checkpoint_0/best_checkpoint.pt,../ensemble_model/torchmd-5/checkpoint_0/best_checkpoint.pt" + #checkpoint_path: "/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/ensemble_model/torchmd-1/checkpoint_0/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/ensemble_model/torchmd-2/checkpoint_0/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/ensemble_model/torchmd-3/checkpoint_0/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/ensemble_model/torchmd-4/checkpoint_0/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/ensemble_model/torchmd-5/checkpoint_0/best_checkpoint.pt" + #checkpoint_path: "/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/scripts/fine_tuned_models/checkpoint_0/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/scripts/fine_tuned_models/checkpoint_1/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/scripts/fine_tuned_models/checkpoint_2/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/scripts/fine_tuned_models/checkpoint_3/best_checkpoint.pt,/global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/scripts/fine_tuned_models/checkpoint_4/best_checkpoint.pt" + #checkpoint_path: "../pretrained_models/best_checkpoint.pt" + checkpoint_path: "/global/cfs/projectdirs/m3641/Rithwik/MatDeepLearn_dev/results/coordinates/results/2024-10-21-16-11-38-017-my_train_job/checkpoint_0/best_checkpoint.pt" + # Whether to write predictions to csv file. E.g. ["train", "val", "test"] + write_output: [train, val, test] + # Frequency of writing to file; 0 denotes writing only at the end, 1 denotes writing every time + output_frequency: 1 + # Frequency of saving model .pt file; 0 denotes saving only at the end, 1 denotes saving every time, -1 denotes never saving; this controls both checkpoint and best_checkpoint + model_save_frequency: -1 + # Specify if labels are provided for the predict task + # labels: True + # Use amp mixed precision + use_amp: True + +model: + name: torchmd_et + # model attributes + hidden_channels: 128 + num_layers: 8 + num_rbf: 50 + activation: "silu" + attn_activation: "silu" + num_heads: 8 + num_post_layers: 1 + post_hidden_channels: 64 + pool: "global_add_pool" + pool_order: "late" + aggr: "add" + dropout_rate: 0.0 + use_gaussian: True + gauss_width_x: .35 + gauss_width_y: .5 + # Compute edge indices on the fly in the model forward + otf_edge_index: True + # Compute edge attributes on the fly in the model forward + otf_edge_attr: True + # Compute node attributes on the fly in the model forward + otf_node_attr: True + # compute gradients w.r.t to positions and cell, requires otf_edge_attr=True + gradient: True + model_ensemble: 1 + +optim: + max_epochs: 30 + max_checkpoint_epochs: 0 + lr: 0.0001 + # Either custom or from torch.nn.functional library. If from torch, loss_type is TorchLossWrapper + loss: + loss_type: ForceStressLoss + #loss_args: {weight_energy: 0.01, weight_force: 50.0, weight_stress: 0.0} + loss_args: {weight_energy: 0, weight_force: 50.0, weight_stress: 0} + # gradient clipping value + clip_grad_norm: 10 + batch_size: 12 + optimizer: + optimizer_type: AdamW + optimizer_args: {} + scheduler: + scheduler_type: ReduceLROnPlateau + scheduler_args: {mode: min, factor: 0.8, patience: 10, min_lr: 0.00001, threshold: 0.0002} + #Training print out frequency (print per n number of epochs) + verbosity: 5 + # tdqm progress bar per batch in the epoch + batch_tqdm: False + +dataset: + name: test_data + # Whether the data has already been processed and a data.pt file is present from a previous run + processed: False + # Path to data files - this can either be in the form of a string denoting a single path or a dictionary of {train: train_path, val: val_path, test: test_path, predict: predict_path} + #src: data/data_subset_msp.json + #src: /global/cfs/projectdirs/m3641/Shared/Materials_datasets/MP_data_forces/raw/data_subset_mdl_test.json + src: /global/cfs/projectdirs/m3641/Rithwik/MatStructPredict/data/iter_one_structures/data.json + # Path to target file within data_path - this can either be in the form of a string denoting a single path or a dictionary of {train: train_path, val: val_path, test: test_path} or left blank when the dataset is a single json file + # Example: target_path: "data/raw_graph_scalar/targets.csv" + target_path: + # Path to save processed data.pt file + pt_path: data/ + # Either "node" or "graph" level + prediction_level: graph + + transforms: + - name: GetY + args: + # index specifies the index of a target vector to predict, which is useful when there are multiple property labels for a single dataset + # For example, an index: 0 (default) will use the first entry in the target vector + # if all values are to be predicted simultaneously, then specify index: -1 + index: -1 + otf: True # Optional parameter, default is True + # Format of data files (limit to those supported by ASE: https://wiki.fysik.dtu.dk/ase/ase/io/io.html) + data_format: json + # specify if additional attributes to be loaded into the dataset from the .json file; e.g. additional_attributes: [forces, stress] + additional_attributes: [forces, stress] + # Print out processing info + verbose: True + # Index of target column in targets.csv + # graph specific settings + preprocess_params: + # one of mdl (minimum image convention), ocp (all neighbors included) + edge_calc_method: ocp + # determine if edges are computed, if false, then they need to be computed on the fly + preprocess_edges: False + # determine if edge attributes are computed during processing, if false, then they need to be computed on the fly + preprocess_edge_features: False + # determine if node attributes are computed during processing, if false, then they need to be computed on the fly + preprocess_node_features: False + # distance cutoff to determine if two atoms are connected by an edge + cutoff_radius : 8.0 + # maximum number of neighbors to consider (usually an arbitrarily high number to consider all neighbors) + n_neighbors : 250 + # number of pbc offsets to consider when determining neighbors (usually not changed) + num_offsets: 2 + # dimension of node attributes + node_dim : 100 + # dimension of edge attributes + edge_dim : 50 + # whether or not to add self-loops + self_loop: True + # Method of obtaining atom dictionary: available: (onehot) + node_representation: onehot + all_neighbors: True + gaussian_atom_features: True + gaussian_method: "coordinate" + max_z: 100 + + # Number of workers for dataloader, see https://pytorch.org/docs/stable/data.html + num_workers: 0 + # Where the dataset is loaded; either "cpu" or "cuda" + dataset_device: cpu + # Ratios for train/val/test split out of a total of less than 1 (0.8 corresponds to 80% of the data) + train_ratio: 0.9 + val_ratio: 0.05 + test_ratio: 0.05 From c3d186b177bc810b4ff329a63230e48bb2f22a5d Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Mon, 11 Nov 2024 16:30:15 -0800 Subject: [PATCH 10/16] Code cleanup --- msp/composition/composition.py | 1 - msp/forcefield/mdl_ff.py | 39 ++- msp/optimizer/globalopt/basin_hopping.py | 10 +- msp/structure/structure_util.py | 1 + msp/utils/objectives/lj_rmins.csv | 94 +++++++ msp/utils/objectives/objectives.py | 34 ++- scripts/example.py | 338 +++++++---------------- scripts/finetune.py | 40 +-- 8 files changed, 273 insertions(+), 284 deletions(-) create mode 100644 msp/utils/objectives/lj_rmins.csv diff --git a/msp/composition/composition.py b/msp/composition/composition.py index 65ce053..bdf263f 100644 --- a/msp/composition/composition.py +++ b/msp/composition/composition.py @@ -16,7 +16,6 @@ def hash_structure(atomic_numbers): """ counts = Counter(atomic_numbers) sorted_counts = sorted(counts.items()) - # divide the counts by the gcd of the counts gcd = sorted_counts[0][1] for elem in sorted_counts: gcd = math.gcd(gcd, elem[1]) diff --git a/msp/forcefield/mdl_ff.py b/msp/forcefield/mdl_ff.py index 1cae9d0..2e2c265 100644 --- a/msp/forcefield/mdl_ff.py +++ b/msp/forcefield/mdl_ff.py @@ -82,7 +82,6 @@ def train(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, lr self.trainer = self.from_config_train(self.train_config, self.dataset, max_epochs, lr, batch_size) self.trainer.train() - #state = {"state_dict": self.model.state_dict()} if save_model: os.makedirs(save_path, exist_ok=True) for i in range(len(self.trainer.model)): @@ -123,7 +122,6 @@ def update(self, dataset, train_ratio, val_ratio, test_ratio, max_epochs=None, l test_ratio, ) self.update_trainer(self.dataset, max_epochs, lr, batch_size) - #self.model = self.trainer.model self.trainer.train() if save_model: @@ -285,7 +283,7 @@ def fmodel(params, buffers, x): return output - def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=5000): + def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=5000, cluster_batch_size=2048): """ Get embeddings from the model for the dataset. Args: @@ -293,6 +291,7 @@ def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=5000): batch_size (int): The batch size for the model. cluster (bool): Whether to cluster the embeddings. Defaults to False. num_clusters (int): The number of clusters to use. Defaults to 5000. + cluster_batch_size (int): The batch size for clustering. Defaults to 2048. Returns: torch.tensor: The embeddings from the model. @@ -323,7 +322,7 @@ def get_embeddings(self, dataset, batch_size, cluster=False, num_clusters=5000): if cluster: res = [] for i in range(len(self.trainer.model)): - clust = MiniBatchKMeans(init="k-means++", n_clusters=5000, batch_size=2048) + clust = MiniBatchKMeans(init="k-means++", n_clusters=num_clusters, batch_size=cluster_batch_size) start_time = time.time() cluster_labels = clust.fit_predict(embeddings[i].cpu().detach().numpy()) print('Model', i, 'clustering took', time.time() - start_time) @@ -380,7 +379,7 @@ def create_ase_calc(self): calculator = MDLCalculator(config=self.train_config) return calculator - def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_structures=-1, batch_size=4, device='cpu', cell_relax=True, optim='Adam'): + def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_structures=-1, batch_size=4, device='cpu', cell_relax=True, optim='Adam', optimize_z=False): """ Optimizes batches of structures using the force field model. Args: @@ -394,6 +393,7 @@ def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_str device (str): The device to use for optimization. Defaults to 'cpu'. cell_relax (bool): Whether to relax the cell. Defaults to True. optim (str): The optimizer to use. Defaults to 'Adam'. + optimize_z (bool): Whether to optimize the atomic numbers. Defaults to False. Returns: res_atoms (list): A list of optimized ASE atoms objects. @@ -424,16 +424,23 @@ def optimize(self, atoms, steps, objective_func, log_per, learning_rate, num_str batch = next(loader_iter).to(device) objective_func.set_norm_offset(batch.z, batch.n_atoms) pos, cell = batch.pos, batch.cell - gauss_z = batch.gauss_atom_features - print(batch.z, "before optimization") - - opt = getattr(torch.optim, optim, torch.optim.Adam)([pos, cell, gauss_z], lr=learning_rate) + gauss_z = None + if optimize_z: + gauss_z = batch.gauss_atom_features + # print(batch.z, "before optimization") + + opt = None + if optimize_z: + opt = getattr(torch.optim, optim, torch.optim.Adam)([pos, cell, gauss_z], lr=learning_rate) + else: + opt = getattr(torch.optim, optim, torch.optim.Adam)([pos, cell], lr=learning_rate) lr_scheduler = ReduceLROnPlateau(opt, 'min', factor=0.8, patience=10) pos.requires_grad_(True) if cell_relax: cell.requires_grad_(True) - gauss_z.requires_grad_(True) + if optimize_z: + gauss_z.requires_grad_(True) temp_obj = [0] temp_energy = [0] @@ -456,8 +463,9 @@ def closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch): step[0], objective_loss.mean().item(), pos.grad.abs().mean().item(), gauss_z.grad.abs().mean().item(), curr_time)) step[0] += 1 batch.pos, batch.cell = pos, cell - batch.gauss_atom_features = gauss_z - batch.z = gauss_z.argmax(dim=-1) + if optimize_z: + batch.gauss_atom_features = gauss_z + batch.z = gauss_z.argmax(dim=-1) temp_obj[0] = objective_loss temp_energy[0] = energy_loss temp_novel[0] = novel_loss @@ -468,14 +476,15 @@ def closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch): old_step = step[0] loss = opt.step(lambda: closure(step, temp_obj, temp_energy, temp_novel, temp_soft_sphere, batch)) lr_scheduler.step(loss) - batch.z = batch.gauss_atom_features.argmax(dim=-1) + if optimize_z: + batch.z = batch.gauss_atom_features.argmax(dim=-1) res_atoms.extend(data_to_atoms(batch)) obj_loss.extend(temp_obj[0].cpu().detach().numpy()) energy_loss.extend(temp_energy[0].cpu().detach().numpy()) novel_loss.extend(temp_novel[0].cpu().detach().numpy()) soft_sphere_loss.extend(temp_soft_sphere[0].cpu().detach().numpy()) - print(batch.gauss_atom_features) - print(batch.z, "after optimization") + # print(batch.gauss_atom_features) + # print(batch.z, "after optimization") for i in range(len(self.trainer.model)): self.trainer.model[i].gradient = True diff --git a/msp/optimizer/globalopt/basin_hopping.py b/msp/optimizer/globalopt/basin_hopping.py index 37f93bb..01e76bb 100644 --- a/msp/optimizer/globalopt/basin_hopping.py +++ b/msp/optimizer/globalopt/basin_hopping.py @@ -341,7 +341,8 @@ def __init__(self, forcefield, hops=5, steps=100, optimizer="Adam", dr=.5, max_a super().__init__("BasinHopping", hops=hops, steps=steps, optimizer=optimizer, dr=dr, max_atom_num=max_atom_num, perturbs=perturbs, elems_to_sample=elems_to_sample, **kwargs) self.forcefield = forcefield - def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_size=4, log_per=0, lr=.05, density=.2, num_atoms_perturb=1, num_unique=4, dynamic_temp=False, dynamic_dr=False): + def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_size=4, log_per=0, lr=.05, density=.2, num_atoms_perturb=1, + num_unique=4, dynamic_temp=False, dynamic_dr=False, optim_z=False): """ Optimizes the list of compositions in batches @@ -358,6 +359,7 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz num_unique (int, optional): number of unique atoms in the structure. Defaults to 4. dynamic_temp (bool, optional): Whether to change temperature dynamically. Defaults to False. dynamic_dr (bool, optional): Whether to change dr dynamically. Defaults to False. + optim_z (bool, optional): Whether to optimize the z values of the atoms. Defaults to False. Returns: res (list): A list of dictionaries containing the optimization results @@ -390,7 +392,8 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz res.append([]) for i in range(self.hops): start_time = time() - new_atoms, obj_loss, energy_loss, novel_loss, soft_sphere_loss = self.forcefield.optimize(new_atoms, self.steps, objective_func, log_per, lr, batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer) + new_atoms, obj_loss, energy_loss, novel_loss, soft_sphere_loss = self.forcefield.optimize(new_atoms, self.steps, objective_func, log_per, lr, + batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer, optim_z=optim_z) if dynamic_dr: self.change_dr(accepts[0], rate=0.1) end_time = time() @@ -440,7 +443,8 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz self.perturbs[rand_ind](new_atoms[j], num_atoms_perturb=num_atoms_perturb, num_unique=num_unique) print('HOP', i, 'took', end_time - start_time, 'seconds') print('Final optimization') - best_atoms, obj_loss, energy_loss, novel_loss, soft_sphere_loss = self.forcefield.optimize(best_atoms, 1, objective_func, log_per, lr, batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer) + best_atoms, obj_loss, energy_loss, novel_loss, soft_sphere_loss = self.forcefield.optimize(best_atoms, 1, objective_func, log_per, lr, + batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer, optim_z=optim_z) avg_loss = 0 for j, hop in enumerate(best_hop): if getattr(objective_func, 'normalize', False): diff --git a/msp/structure/structure_util.py b/msp/structure/structure_util.py index 8613f63..a4dcd15 100644 --- a/msp/structure/structure_util.py +++ b/msp/structure/structure_util.py @@ -20,6 +20,7 @@ def init_structure(composition, pyxtal=False, density=2): composition (list): A list of the atomic numbers pyxtal (bool): If True, tries to use pyxtal to generate a random symmetric structure. If False, generates a completely random structure. + density (float): The ideal density of the structure. Returns: dict: representing structure diff --git a/msp/utils/objectives/lj_rmins.csv b/msp/utils/objectives/lj_rmins.csv new file mode 100644 index 0000000..8c3118c --- /dev/null +++ b/msp/utils/objectives/lj_rmins.csv @@ -0,0 +1,94 @@ +7.412212914801694641e-01,nan,1.749752999999999892e+00,1.427021145750864140e+00,1.182543428788806583e+00,1.085911828487999831e+00,1.007728301542444527e+00,9.631146047243041020e-01,9.488877582564807511e-01,nan,2.203411676416100473e+00,1.803838092639939328e+00,1.603239458807999895e+00,1.474403295515833490e+00,1.378290662341729034e+00,1.342155452668974069e+00,1.297297769169833082e+00,nan,2.540609102592000124e+00,2.195829436793516276e+00,2.001543509530629983e+00,1.864449717026702702e+00,1.786349560206804687e+00,1.680115101727999605e+00,1.747349001690637671e+00,1.544114417742760370e+00,1.515425309722808755e+00,1.527017349800000146e+00,1.721929188857771909e+00,1.643222981241848624e+00,1.518082531491870446e+00,1.514732013961062496e+00,2.139445663820890964e+00,2.123011047904132198e+00,1.448282797619689033e+00,nan,2.777114637488512727e+00,2.292730999999999852e+00,2.217560439227675761e+00,1.970633741240000347e+00,1.978484659832313586e+00,2.123843670796349059e+00,3.401917492662955400e+00,1.661217569393938165e+00,1.658908255107543361e+00,1.642212079553046822e+00,2.730134724903547916e+00,1.838059928147586675e+00,2.560533399766428353e+00,1.717308815506638497e+00,2.981835131051900678e+00,2.463631385216781045e+00,1.634774678556857541e+00,nan,2.895231586643593591e+00,2.532893000000000061e+00,2.375929456869745771e+00,2.333721641941336244e+00,2.404692950858444256e+00,2.359701452928519405e+00,nan,2.328289845606950959e+00,2.349228210391335114e+00,2.148751490302119205e+00,2.185729184140881465e+00,2.242622632500028690e+00,2.199215717453347985e+00,2.148780091682134685e+00,2.190393398248820844e+00,2.190056839526372201e+00,2.103189546775930285e+00,2.018644517200243627e+00,1.931581354024525909e+00,3.358236039097470815e+00,1.786512897407999567e+00,1.684266924006315058e+00,1.663525923967254405e+00,1.630624875498358328e+00,2.948324309557059486e+00,1.670391809867448396e+00,2.951004466503344581e+00,2.539764997394242663e+00,3.532358526239999463e+00,nan,nan,nan,nan,nan,2.561880525000012288e+00,2.318073677664472143e+00,nan,2.323378098046375406e+00,2.154222259108919957e+00,2.166805142276430729e+00 +nan,2.817012229266817602e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +1.749752999999999892e+00,nan,1.771619000000000055e+00,2.291440533545127334e+00,2.155875982307753347e+00,2.135310033904286975e+00,1.906535597467809984e+00,1.721350737635999995e+00,1.799244985757333160e+00,nan,2.804475042107999805e+00,2.474914033583777595e+00,2.431771344317196437e+00,2.466680386309020534e+00,2.435996552568003182e+00,2.345885563732174273e+00,2.312695634286510149e+00,nan,2.791318222973802943e+00,2.658700779693244698e+00,2.962619374806038319e+00,2.644377748592235289e+00,2.514519710821960352e+00,2.546412215199454820e+00,2.494449916526538136e+00,2.409360623806390933e+00,2.399337537391616149e+00,2.374778278628784989e+00,2.470571925000012659e+00,2.432396937705501472e+00,2.482356132052068620e+00,2.460910025444155824e+00,2.506547225684555347e+00,2.498827985778024896e+00,2.498352290018230892e+00,nan,2.902512138302214328e+00,2.791137807605085008e+00,2.914767930905927784e+00,2.815418887466482278e+00,2.731145838150000227e+00,2.764798920013645578e+00,nan,2.577294999999999892e+00,2.515725537390733013e+00,2.563156586169286655e+00,2.702375868557138627e+00,2.579907864407321672e+00,2.659856731608489788e+00,2.446019263010000167e+00,2.543386771544999370e+00,2.754167328141526205e+00,2.626631080471999891e+00,nan,3.060899567751833583e+00,2.942200507934218834e+00,2.809014876548467488e+00,2.780239295512055619e+00,2.736311587139208967e+00,2.918313353113918041e+00,nan,2.690536139384482883e+00,2.820951732145730251e+00,2.993750883339871383e+00,2.844007041379212453e+00,2.898197128226129848e+00,2.914849767719560614e+00,2.927346727088384171e+00,3.118337841918728870e+00,3.020368270059398341e+00,3.164091556265995031e+00,3.660424519839467816e+00,2.711758346009691678e+00,2.907277270223999288e+00,2.658613032802919829e+00,2.873434869770276734e+00,2.552025310507078260e+00,2.583530125610861816e+00,2.665397580000000488e+00,2.803432485225700876e+00,2.794704684376161374e+00,2.744525638802489009e+00,2.782608157702401996e+00,nan,nan,nan,nan,nan,nan,nan,nan,2.836826499999999918e+00,3.017173387909133808e+00,nan +1.427021145750864140e+00,nan,2.291440533545127334e+00,1.981545630496609256e+00,1.928226935609656412e+00,1.668488978772004971e+00,1.602290964920062510e+00,1.495809030359503744e+00,1.519681583560049676e+00,nan,2.928399687683658215e+00,2.536536824112556321e+00,2.473653678346314333e+00,2.132367447188174836e+00,1.986267041841800030e+00,2.108902411674266286e+00,2.029873650229983895e+00,nan,2.907773128554015152e+00,2.781776202444263824e+00,2.619181798671094352e+00,2.441488225176000171e+00,2.516041962467129522e+00,2.526144587553105669e+00,2.430894687772322538e+00,2.419175100235954279e+00,2.227203698726716752e+00,2.240407364519927391e+00,2.314425074877942912e+00,nan,nan,2.296851891840000270e+00,2.072577463807644627e+00,2.237782140000015296e+00,2.193732985787752110e+00,nan,3.337939943976000023e+00,2.865633441368077072e+00,nan,2.636758930890821251e+00,2.551678274761727039e+00,2.526955486459815603e+00,nan,2.512377538698842283e+00,2.397061582637723287e+00,2.293991332797971960e+00,2.600648576279999347e+00,nan,nan,nan,2.408121107369975533e+00,2.452114869007405940e+00,2.427818719105562462e+00,nan,3.577486219591955230e+00,3.073992427590520826e+00,2.869620653824864753e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,3.700651267131241262e+00,nan,nan,nan,nan,2.609275602922448645e+00,2.535241095890144791e+00,2.523321158651419260e+00,2.540555582290795034e+00,nan,2.488981152116728879e+00,2.473299687427969573e+00,2.491301037080801528e+00,nan,nan,3.494606056607809208e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +1.182543428788806583e+00,nan,2.155875982307753347e+00,1.928226935609656412e+00,1.547690114749701529e+00,1.434445151665009854e+00,1.337779509930948629e+00,1.325243884390371329e+00,1.331790759864058593e+00,nan,2.643761431367095849e+00,2.366225451714524475e+00,2.142247096859991817e+00,2.815203491160496618e+00,1.766866275452752433e+00,1.785831818832871676e+00,1.725700145758537696e+00,nan,2.882390103496441647e+00,2.661091027332460168e+00,2.326984040000000142e+00,2.148766643412680644e+00,2.162747079427012498e+00,2.038986000793617137e+00,1.987485392775346860e+00,1.957902797472000245e+00,1.972981490617278677e+00,1.990093331888208583e+00,2.205696511261014070e+00,2.195794024545224765e+00,2.856624615437593384e+00,2.679217262871850469e+00,1.871706198216412265e+00,1.957299953201376397e+00,1.901528750190014883e+00,nan,3.157664385320659406e+00,2.753118114200741040e+00,2.620913558705519009e+00,2.326430558562040396e+00,2.309960961486909881e+00,2.213358625623882858e+00,2.175427475544306155e+00,2.057236006264463235e+00,2.025396263433673738e+00,2.112479797828868300e+00,2.680057185678285325e+00,2.179585023431753754e+00,3.084054255797341604e+00,2.667513457216275530e+00,3.022270450799025010e+00,nan,2.122744118673664637e+00,nan,3.206211336683174373e+00,3.077829536796754084e+00,2.649063209060988910e+00,2.693612285546065799e+00,2.730314133292311318e+00,2.699902439230776885e+00,nan,2.668190947514313294e+00,2.719663074183234830e+00,2.636764059546144789e+00,2.612851037927133468e+00,2.591470501613688970e+00,2.575049687116531150e+00,2.580438477695663924e+00,2.579510565891860008e+00,2.627304630071595426e+00,2.516364410209747327e+00,2.418900999999999968e+00,2.299282045299664290e+00,2.217133397551355856e+00,2.166242309700671775e+00,1.947872879695335513e+00,2.055142752398614991e+00,2.062813156346450238e+00,2.742025386766106188e+00,nan,3.008648368001351603e+00,3.177718904820879597e+00,3.006816326998310362e+00,nan,nan,nan,nan,nan,nan,2.742912172524170522e+00,nan,2.572420224389731835e+00,2.686308954204622346e+00,2.580543000000000031e+00 +1.085911828487999831e+00,nan,2.135310033904286975e+00,1.668488978772004971e+00,1.434445151665009854e+00,1.217006440700874936e+00,1.158808485509999819e+00,1.142785355337959441e+00,1.323493189833018890e+00,nan,2.302730500000000013e+00,2.165922458507652415e+00,1.873932394949999747e+00,1.752933667023000242e+00,1.757877965234137196e+00,1.545654769499450465e+00,1.615903919167999803e+00,nan,2.877372461130357717e+00,2.236420104780902030e+00,2.173175210097360033e+00,2.067706766909483296e+00,1.965402710155067734e+00,1.822326198829049781e+00,1.896681000000000061e+00,1.568180806305805142e+00,1.731211061110145000e+00,1.797195173908146204e+00,1.804416739138408943e+00,1.942864476325531387e+00,2.346850637205529555e+00,3.209772506204984666e+00,1.935742271545637605e+00,1.709220764526679481e+00,1.794175223903999861e+00,nan,3.057690693529999670e+00,2.758269343621273517e+00,2.320523549176734868e+00,2.262943077269151448e+00,2.124292568280190086e+00,2.014397459734823315e+00,2.112623259089022820e+00,1.866476714934763192e+00,1.929054132652514575e+00,1.997057432165719515e+00,2.023296699055336045e+00,2.235362966941160767e+00,2.499722173550592785e+00,2.973339420833464253e+00,3.699650765381214956e+00,nan,2.022570440537211578e+00,nan,3.308670336177650828e+00,2.978604287742152135e+00,2.298159996394635751e+00,2.267942301467168242e+00,2.280526051764281359e+00,2.457107339492865972e+00,nan,2.501079999999999970e+00,2.628655463020457717e+00,2.294932793128598991e+00,2.415151487130954955e+00,2.393553334329590943e+00,2.371878118015697368e+00,2.356628091527305902e+00,2.232644020616523672e+00,2.478520838034012463e+00,2.208004027194500285e+00,2.264749956366135297e+00,2.174964330862141360e+00,2.004406808757980052e+00,1.903379846769387429e+00,1.906989142107239354e+00,1.912781909491612975e+00,1.860562876883097516e+00,1.913313582540000279e+00,2.021058775165538890e+00,2.205901184860158182e+00,2.827409953172871138e+00,3.537970710726110468e+00,nan,nan,nan,nan,nan,nan,2.437927933655933188e+00,2.363989639244456598e+00,2.214808830486000080e+00,2.437020087688726910e+00,2.287379835890000379e+00 +1.007728301542444527e+00,nan,1.906535597467809984e+00,1.602290964920062510e+00,1.337779509930948629e+00,1.158808485509999819e+00,1.105534372826280753e+00,1.107662566694000095e+00,1.325906087816324685e+00,nan,2.316210592902684606e+00,2.057990608674000299e+00,1.801249851764602727e+00,1.666408594615310657e+00,1.542152636879523531e+00,1.453364597783575629e+00,1.567244424366504019e+00,nan,2.638194119426966466e+00,2.254158474591009487e+00,2.180272402579024327e+00,1.831928405465369281e+00,1.644279345729648512e+00,1.717691695910184002e+00,1.720164050016611101e+00,1.661007096791998983e+00,1.793364096479999503e+00,1.710183999999999926e+00,1.825622432512896243e+00,1.826879544019621271e+00,1.847539525364115143e+00,1.796280323316989502e+00,2.765322784193694705e+00,1.745172022895999930e+00,2.311922710786904922e+00,nan,2.956671537618279721e+00,2.500773228596057329e+00,2.233180408248965687e+00,1.992085523312371542e+00,1.940398765806435222e+00,1.665716563725014199e+00,2.117041534795845337e+00,1.785083345128283927e+00,1.696307147775999891e+00,1.825443858259430785e+00,1.807563500000000101e+00,2.194036176385593784e+00,2.164038426031573703e+00,2.099291973879535700e+00,2.156044223442725283e+00,1.977026370172842773e+00,1.941182275943909019e+00,nan,3.119077788330132162e+00,2.656179259870211862e+00,2.268767940621108536e+00,2.250902967936408317e+00,2.294385495021581622e+00,2.275843880177033540e+00,nan,2.203102409510295345e+00,2.349357310783999786e+00,2.187246894483326543e+00,2.153721132422302453e+00,2.396033169999999934e+00,2.381166620000000123e+00,2.365243185000000192e+00,2.350662059999999887e+00,2.391984250635732945e+00,2.280838685633795215e+00,1.975531797816703961e+00,1.913611220479036534e+00,1.815122837717017878e+00,1.690547373171999546e+00,1.686203682402598547e+00,1.724543241491999668e+00,1.749824818872000698e+00,1.928118302593670785e+00,2.078578434809840392e+00,2.282316938610985169e+00,2.267719735066459741e+00,2.878412603748000009e+00,nan,nan,nan,nan,nan,nan,2.310759547421265214e+00,2.472946390000000161e+00,2.234958517191225180e+00,2.262061810126314576e+00,2.474235995718652603e+00 +9.631146047243041020e-01,nan,1.721350737635999995e+00,1.495809030359503744e+00,1.325243884390371329e+00,1.142785355337959441e+00,1.107662566694000095e+00,1.178807804575341844e+00,1.415078640607793758e+00,nan,2.111313651895684096e+00,1.808861494591999630e+00,1.680329976298229555e+00,1.554009327696140197e+00,1.444899827043818918e+00,1.398874546164090127e+00,1.404667041943548300e+00,nan,2.356344163895706334e+00,2.020165098080999755e+00,1.971163968829461766e+00,1.628087532361135015e+00,1.571247006463110996e+00,1.563825054747612464e+00,1.585645125732684901e+00,1.646717402555999854e+00,1.604574122167997219e+00,1.688970905596004712e+00,1.761170946014796179e+00,1.852863691407432345e+00,1.816590924526913842e+00,1.710058917994944583e+00,1.647801144174152910e+00,1.623587444172764904e+00,1.618332158394307996e+00,nan,2.647790559548643063e+00,2.202687000000000062e+00,2.165505323037658858e+00,1.833488642699999893e+00,1.744259396830999620e+00,1.657915863955322422e+00,1.696074946197604127e+00,1.688338914803545476e+00,1.794932874899998865e+00,1.871218994057417540e+00,1.961492240651081742e+00,2.047639999999999905e+00,2.048036115281635983e+00,1.943468078831999790e+00,1.912347849455001247e+00,1.810068174524030349e+00,1.742491613143805740e+00,1.860360684941266163e+00,2.295166000000000039e+00,2.303975999999999580e+00,2.269709831857110061e+00,2.086378688615014187e+00,2.223791596672098692e+00,2.074262139388241000e+00,2.286905489186270124e+00,2.225247664196800201e+00,2.219577017564553056e+00,2.146317226475999540e+00,2.127185315785555364e+00,2.158242374322808743e+00,2.171953619043894612e+00,2.156513207888807404e+00,2.140660155611931348e+00,2.028971257203826539e+00,2.138640693861333819e+00,1.999054139742926228e+00,1.791654907073999325e+00,1.744650295080000246e+00,1.674262070696314497e+00,1.722741229081438119e+00,1.808550280068000227e+00,1.888700824731120109e+00,1.963722172228158813e+00,1.982281967809529144e+00,2.024065026519998867e+00,2.033069786692916381e+00,2.027435271096820202e+00,nan,nan,nan,nan,nan,2.464169434957696581e+00,2.300057499999999866e+00,2.190213207243686178e+00,1.762728105724848282e+00,1.758219826171361522e+00,1.767968518856438465e+00 +9.488877582564807511e-01,nan,1.799244985757333160e+00,1.519681583560049676e+00,1.331790759864058593e+00,1.323493189833018890e+00,1.325906087816324685e+00,1.415078640607793758e+00,1.387625495071449944e+00,nan,2.178860864079999793e+00,1.912335592643996662e+00,1.733642860177007972e+00,1.566883897970025918e+00,1.533736250954883751e+00,1.545240965426961433e+00,1.587128301769129335e+00,nan,2.450761986360791678e+00,2.208041886083653260e+00,2.011565585112988330e+00,1.836162686413860490e+00,1.732572318425475544e+00,1.707983144294762834e+00,1.812889005541234333e+00,1.789646721461527035e+00,1.778250556906523006e+00,1.779895095034891295e+00,1.754656750228942963e+00,1.924272870974393479e+00,1.865584180799999992e+00,1.699448819155735757e+00,1.685402480797588476e+00,1.709363131742457798e+00,1.728362319253121671e+00,1.914573224813413299e+00,2.600827197156000192e+00,2.381623504999999863e+00,2.156832615477223491e+00,1.471359664717049043e+00,1.906251725351999715e+00,1.832806953973094899e+00,1.847669404396772963e+00,1.836950655742607630e+00,1.842947372729765387e+00,1.851171071780704391e+00,1.923223711726884000e+00,2.161564036887921869e+00,2.082620450012794500e+00,1.898749736651724529e+00,1.846356342257577277e+00,1.861393704153232465e+00,1.817616045790547252e+00,2.016057199128694855e+00,2.791186132841223078e+00,2.572318222898914097e+00,2.393717145928000889e+00,2.277596756703278658e+00,2.548308717842870941e+00,2.523973634775171782e+00,nan,2.233835494605513894e+00,2.218557887745593593e+00,2.214237180034761554e+00,2.170809442601557837e+00,2.417581705915671009e+00,2.170599828795999997e+00,2.159110915799999475e+00,2.198807209185361433e+00,2.178889214999999879e+00,2.145481103077768292e+00,1.933921049442955287e+00,1.852028355440467511e+00,1.974759095640817641e+00,1.835416651437301594e+00,1.899971931616181386e+00,1.872885652860075911e+00,1.886505996064235191e+00,1.902159248897282939e+00,2.133551084205131509e+00,2.132147149491796956e+00,1.981039526763001213e+00,1.988476341876794118e+00,nan,nan,nan,nan,nan,nan,2.273557068201608633e+00,2.119506603985097826e+00,1.971118776991742649e+00,2.379863744445293694e+00,2.357623848738237271e+00 +nan,nan,nan,nan,nan,nan,nan,nan,nan,2.966325502297936456e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.203411676416100473e+00,nan,2.804475042107999805e+00,2.928399687683658215e+00,2.643761431367095849e+00,2.302730500000000013e+00,2.316210592902684606e+00,2.111313651895684096e+00,2.178860864079999793e+00,nan,2.589385760658486468e+00,3.061228371893388811e+00,2.974999628800535589e+00,2.869153207861374177e+00,2.837056838798893033e+00,2.654366102876781319e+00,2.703469087324119169e+00,nan,2.999500200890675927e+00,3.258490189386359592e+00,3.244090589168597383e+00,2.962533940508910835e+00,3.029939204214259441e+00,2.919044826123030401e+00,2.907041960937394531e+00,2.929774094298258635e+00,2.860812265695341861e+00,2.772532854735574759e+00,2.664766013472842143e+00,2.794419039039833574e+00,2.990943169099290699e+00,2.920807144253208421e+00,2.902345756258289722e+00,2.801667280185960784e+00,2.855288086425296523e+00,nan,3.077963113769292658e+00,3.287211596287764159e+00,3.306840861393277820e+00,2.810855120454650358e+00,3.233875484509399545e+00,3.087162786563446559e+00,3.480955898084906686e+00,3.044841022413570020e+00,2.915277959438330679e+00,2.826570960000000188e+00,2.977845919437438482e+00,3.186876597775869957e+00,3.000889695037431970e+00,3.028743591285905978e+00,3.071893694824062937e+00,3.094368028561687467e+00,3.131795025758310480e+00,nan,3.207056499975514452e+00,3.372274539450068609e+00,3.275813033201431690e+00,4.255122768464108418e+00,3.247500655575072503e+00,3.225947818747616935e+00,nan,3.193601573409589722e+00,3.599190935683564518e+00,3.290381200510007709e+00,3.257709725821887670e+00,3.958049091712887435e+00,3.659116911468059286e+00,3.269159925889901785e+00,3.997923106392856862e+00,3.699867847646896024e+00,3.985832039965755413e+00,3.487120964273264789e+00,2.933593089824271249e+00,3.104784077179262081e+00,3.048906012168518753e+00,3.059509154871621561e+00,3.117307459039983186e+00,2.985869792763138797e+00,2.958599166089153076e+00,3.038867624562036074e+00,3.153725622140885143e+00,3.085602611814757257e+00,3.129258283214048308e+00,nan,nan,nan,nan,nan,nan,3.752941620557058933e+00,3.811505010095443957e+00,3.322143031985798967e+00,3.275369785532813438e+00,3.412901531233373476e+00 +1.803838092639939328e+00,nan,2.474914033583777595e+00,2.536536824112556321e+00,2.366225451714524475e+00,2.165922458507652415e+00,2.057990608674000299e+00,1.808861494591999630e+00,1.912335592643996662e+00,nan,3.061228371893388811e+00,2.655356498390636588e+00,2.826134561785421440e+00,2.602711911623889929e+00,2.537245126978822096e+00,2.438747423663948144e+00,2.197628982851524526e+00,nan,3.330277000000000154e+00,2.987279703834260669e+00,3.114618137312044954e+00,2.920702174861230738e+00,3.054262495417295664e+00,2.803472416342326134e+00,2.751537922687540494e+00,2.719821770855426646e+00,2.660411687529421254e+00,2.561710877435786848e+00,2.619426689862945690e+00,2.729259638335999938e+00,2.717011152028382170e+00,2.565873209249013520e+00,2.613457746744361376e+00,2.563028667859714727e+00,2.616995309864857955e+00,nan,3.513407905476817916e+00,3.160705541557349285e+00,3.005996171556690300e+00,3.247446033862802839e+00,3.065395538611624726e+00,3.122334158817808181e+00,nan,2.599396216890330180e+00,2.667339685584059872e+00,2.665309184317854907e+00,2.727109666390177711e+00,3.036158149846956977e+00,2.872014072690910691e+00,2.771393875412695884e+00,2.824171039295678920e+00,2.801153935443076559e+00,2.861563960885145530e+00,nan,3.682605886690395014e+00,3.435022241568378210e+00,3.062845681292088784e+00,3.012907175427013140e+00,3.052078909899570647e+00,3.042250476350013333e+00,nan,3.091436161457445930e+00,3.174874968418603061e+00,3.015024624184127422e+00,3.034902308886990419e+00,3.149737440000010213e+00,3.267307500605727277e+00,3.240934172342468056e+00,3.212353157282980209e+00,3.231699696458567583e+00,3.217725918111309369e+00,nan,3.058724316151011813e+00,3.329858547036890659e+00,3.663340066466100886e+00,2.846792475390081290e+00,2.622881218739384401e+00,2.640484998982460585e+00,2.681897318099999872e+00,2.787938625778115576e+00,2.910364360620130153e+00,2.840222306414097453e+00,2.910767019541197875e+00,nan,nan,nan,nan,nan,nan,3.545893532245796287e+00,nan,3.551495259401718840e+00,nan,3.164482806190452191e+00 +1.603239458807999895e+00,nan,2.431771344317196437e+00,2.473653678346314333e+00,2.142247096859991817e+00,1.873932394949999747e+00,1.801249851764602727e+00,1.680329976298229555e+00,1.733642860177007972e+00,nan,2.974999628800535589e+00,2.826134561785421440e+00,2.507441930330859048e+00,2.398702282611936631e+00,2.352173636313375837e+00,2.170355621982077654e+00,2.077043636266857618e+00,nan,3.195492234534631937e+00,2.844802980429627315e+00,2.824746849828164486e+00,2.702712957186329756e+00,2.645585038449519200e+00,2.562636143987326420e+00,2.408938779285370924e+00,2.292441789227952054e+00,2.254236143640361512e+00,2.271354128882431045e+00,2.296882718090024422e+00,2.547131625144849920e+00,2.581710806139303038e+00,2.419683837295267814e+00,2.438587985141850822e+00,2.286554296604897729e+00,2.237168731725114323e+00,nan,3.326112000904095645e+00,3.252279239985129244e+00,2.976862252550333654e+00,2.621895378101999796e+00,2.711023009977822529e+00,2.543209412866774599e+00,2.468114575008998557e+00,2.497650075994775065e+00,2.515571053460142092e+00,2.499384563279587912e+00,2.683679918931362263e+00,3.778555435150168673e+00,nan,2.726196090388934845e+00,2.613999244539580324e+00,2.571674128788551172e+00,2.480875609239356905e+00,nan,3.517767352500008826e+00,3.252669952605450554e+00,3.049947235500000353e+00,3.008673422827253141e+00,3.040488992627333964e+00,2.770018499999999939e+00,nan,2.990097014925999552e+00,3.015558741198967940e+00,2.999401475627535341e+00,2.687053500000000206e+00,2.952938851079999871e+00,2.951427852384739481e+00,2.651702499999999851e+00,2.650788499999999992e+00,2.912832894400000150e+00,2.921429283655999853e+00,2.598536068127999865e+00,2.711189870029921956e+00,2.537892744317861826e+00,2.520189009794214563e+00,2.461647003100041697e+00,2.465355743531444155e+00,2.494358442592935265e+00,2.444864983259999569e+00,3.707822000000000173e+00,3.584672157830405492e+00,2.914633904751982474e+00,3.048007256547249799e+00,nan,nan,nan,nan,nan,nan,3.058514640076598123e+00,nan,2.960420193360000063e+00,2.999165570073254017e+00,2.938891151947484293e+00 +1.474403295515833490e+00,nan,2.466680386309020534e+00,2.132367447188174836e+00,2.815203491160496618e+00,1.752933667023000242e+00,1.666408594615310657e+00,1.554009327696140197e+00,1.566883897970025918e+00,nan,2.869153207861374177e+00,2.602711911623889929e+00,2.398702282611936631e+00,2.259176399120319534e+00,2.212269115702399258e+00,2.070803839922999323e+00,2.012521868047659090e+00,nan,3.297606095997298592e+00,2.973736345215484356e+00,2.637847609511076996e+00,2.547342038295999256e+00,2.455970753493400327e+00,2.315942118175124076e+00,2.257628113279575910e+00,2.196197671681500374e+00,2.185288817727164989e+00,2.182405915585709177e+00,2.312314451273720817e+00,2.421597795071352088e+00,2.549004613898678162e+00,2.518641845356000886e+00,2.332188752477890592e+00,2.202028881401082216e+00,2.195050953444587893e+00,nan,3.377972791943133934e+00,3.010302478797958869e+00,2.806923071465201591e+00,2.674791413108216886e+00,2.586231016746701528e+00,2.503626351321899257e+00,2.380814470482003564e+00,2.342334222586236692e+00,2.302921922015410239e+00,2.333904945778020767e+00,2.452083106504473253e+00,3.106170637794131828e+00,3.011159992893913895e+00,3.389848506312136411e+00,3.712243963983961503e+00,2.436644166829515346e+00,2.445597960070847510e+00,nan,3.500835083353407651e+00,3.211438540060587599e+00,3.043228776632683097e+00,2.891884313166707798e+00,3.011047453275526031e+00,2.997422872032912355e+00,nan,2.906047757989865143e+00,3.055987832280876226e+00,2.813330708745999775e+00,2.791837598757269667e+00,2.801874633331664199e+00,2.792606189232095470e+00,2.781525925408537603e+00,2.772901219878878720e+00,2.907051464559815557e+00,2.806907458173721892e+00,2.665159792411830875e+00,2.562393296918849206e+00,2.512591144320746483e+00,2.395664830470227269e+00,2.328028050423145423e+00,2.286028129198178771e+00,2.311763137917980160e+00,2.409914573675999705e+00,3.278769054438861019e+00,3.570315325500557524e+00,3.565035204395853974e+00,3.187013664279514824e+00,nan,nan,nan,nan,nan,nan,2.771188077608098599e+00,nan,2.714201225220000513e+00,2.763619570906768796e+00,2.760705313056169175e+00 +1.378290662341729034e+00,nan,2.435996552568003182e+00,1.986267041841800030e+00,1.766866275452752433e+00,1.757877965234137196e+00,1.542152636879523531e+00,1.444899827043818918e+00,1.533736250954883751e+00,nan,2.837056838798893033e+00,2.537245126978822096e+00,2.352173636313375837e+00,2.212269115702399258e+00,2.126260658175009421e+00,1.890983696642245970e+00,1.948501112384586120e+00,nan,3.183043302372227323e+00,2.827877820548215571e+00,2.599001398430871301e+00,2.426421474542914236e+00,2.322914871365881151e+00,2.195043090292000620e+00,2.206283758354522284e+00,2.156192042183054713e+00,2.068103518520000073e+00,2.150040508896760461e+00,2.220401580966186117e+00,2.196979282994645288e+00,2.350615024038453349e+00,2.288275160533211228e+00,3.510234056975858774e+00,2.125371264281361139e+00,2.172200419430815543e+00,nan,3.286459479115694915e+00,3.007135456004712104e+00,2.792470237376238007e+00,2.589343723035096634e+00,2.456823242926612583e+00,2.325052102429825407e+00,2.283937664261180611e+00,2.226553169463579174e+00,2.232126883495834768e+00,2.250971998111916061e+00,2.443570433307090450e+00,2.397280580030374697e+00,2.556486480000012218e+00,2.455977915389544020e+00,3.170176451735806200e+00,2.479758275462557116e+00,2.473980824635633624e+00,nan,3.509492662248356876e+00,3.153714162331127113e+00,2.925009833987199936e+00,2.848062411933960814e+00,2.935701197289000586e+00,2.954073599232933223e+00,nan,2.885803703028605227e+00,2.873823575242816730e+00,2.869937364658869505e+00,2.797981299860069715e+00,2.789675441203785855e+00,2.785724090254922292e+00,2.780143930549330289e+00,2.787835160574164028e+00,2.761469911779932396e+00,2.761857308891114027e+00,2.553406533355451646e+00,2.478764235663004634e+00,2.359150856493688497e+00,2.323802336898455057e+00,2.326731939316208475e+00,2.239396095456413160e+00,2.206537180910153495e+00,2.335776397677552740e+00,2.392028954653005801e+00,3.383308719643971507e+00,2.872374263474326117e+00,2.899500207602446977e+00,nan,nan,nan,nan,nan,nan,2.881334476318953453e+00,2.817139991279221878e+00,2.755300591442011449e+00,2.842957611936203666e+00,2.851692369303167052e+00 +1.342155452668974069e+00,nan,2.345885563732174273e+00,2.108902411674266286e+00,1.785831818832871676e+00,1.545654769499450465e+00,1.453364597783575629e+00,1.398874546164090127e+00,1.545240965426961433e+00,nan,2.654366102876781319e+00,2.438747423663948144e+00,2.170355621982077654e+00,2.070803839922999323e+00,1.890983696642245970e+00,1.872729858721577489e+00,1.996519150907941409e+00,nan,2.874577443601108673e+00,2.736432430154992712e+00,2.511931898650896500e+00,2.211496501568553708e+00,2.149940611134906643e+00,2.191123378703242697e+00,2.142081956899379414e+00,2.115108589143580353e+00,2.079957223782187725e+00,2.149290249306878398e+00,2.140116535998966008e+00,2.241144828738951933e+00,2.087385464940000546e+00,2.158630805901769367e+00,2.158891149809600840e+00,2.992967081389348749e+00,2.279777571687267024e+00,nan,3.183230805111595618e+00,2.874266223488544547e+00,2.659089257243896665e+00,2.442552220092286674e+00,2.283608901897795196e+00,2.221587305493134323e+00,2.285041335856895284e+00,2.317316731409143848e+00,2.288803988898571617e+00,2.294188147027598212e+00,2.383721816034898744e+00,2.459200433396921515e+00,2.398624406271999021e+00,2.380398612163793892e+00,2.340757225927727170e+00,2.356778166027258870e+00,3.054831525179714458e+00,nan,3.375811784689641826e+00,3.013756980929860330e+00,2.610373341379999879e+00,2.713796630883670957e+00,2.788812222691190890e+00,2.766716879009825547e+00,nan,2.749423763367549345e+00,2.779719320362640023e+00,2.705309608363728024e+00,2.649455026323741968e+00,2.640130765400322321e+00,2.641218767611418272e+00,2.628145061568564422e+00,2.616205645300151428e+00,2.549134737204657331e+00,2.449058732757722012e+00,2.473584809789112438e+00,2.241424767127187412e+00,2.084303828696430383e+00,2.334658378307195914e+00,2.326069276932366314e+00,2.318625232630676436e+00,2.282880383185210249e+00,2.283701273493217165e+00,2.332672843833442311e+00,2.573447810019901638e+00,2.658578929308554439e+00,2.513318714556000089e+00,nan,nan,nan,nan,nan,nan,2.759892521599344839e+00,nan,2.617937264664170982e+00,2.670275319660698976e+00,2.816975390000000079e+00 +1.297297769169833082e+00,nan,2.312695634286510149e+00,2.029873650229983895e+00,1.725700145758537696e+00,1.615903919167999803e+00,1.567244424366504019e+00,1.404667041943548300e+00,1.587128301769129335e+00,nan,2.703469087324119169e+00,2.197628982851524526e+00,2.077043636266857618e+00,2.012521868047659090e+00,1.948501112384586120e+00,1.996519150907941409e+00,1.983634597545816947e+00,nan,3.022451753633999871e+00,2.612652154736218701e+00,2.438299920120407194e+00,2.175583640840335065e+00,2.134958848858775404e+00,2.122468023789838121e+00,2.096055874151388387e+00,2.143163511688197964e+00,2.239920354968869987e+00,2.244153209360675305e+00,2.123498388972460127e+00,2.155173032951739120e+00,2.106739442954413732e+00,2.114569334609953444e+00,2.128552357816750895e+00,2.146225465627580764e+00,2.379699243834203060e+00,nan,3.144801405918842718e+00,2.952102357492044860e+00,2.619200375753551135e+00,2.325440047375516794e+00,2.253703784767841167e+00,2.256137808144933921e+00,2.329863285374691895e+00,2.236184439602666441e+00,2.349186979053630608e+00,2.273048273187141533e+00,2.401685399359999806e+00,2.431130671493846762e+00,2.400908391133063091e+00,2.296263831139013778e+00,2.307209773441828649e+00,2.315501639007846002e+00,2.333027948754744507e+00,nan,3.340023438074685824e+00,3.078213015053651969e+00,2.757065486043117186e+00,2.625121024987919327e+00,2.855473893747102920e+00,2.830965480767346776e+00,nan,2.700408673560000139e+00,2.714427246366000190e+00,2.612338171218397065e+00,2.636711623495410883e+00,2.593238446137882125e+00,2.605090203512000269e+00,2.522024036330371199e+00,2.557769330299334953e+00,2.662165309999999785e+00,2.594953124333999384e+00,2.301907752496281656e+00,2.305211236263615238e+00,2.273767330358225092e+00,2.197098363017893519e+00,2.264563239125999772e+00,2.297536026478000259e+00,2.293516951119999980e+00,2.250607627053742288e+00,2.282014364160746656e+00,2.382729443452005214e+00,2.398154553024082425e+00,2.506939026428636197e+00,nan,nan,nan,nan,nan,3.032298355103286269e+00,2.688922956375340512e+00,2.481589902346140075e+00,2.460136685584366045e+00,2.637252450901498069e+00,2.585236945468602165e+00 +nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,3.792326140000000123e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.540609102592000124e+00,nan,2.791318222973802943e+00,2.907773128554015152e+00,2.882390103496441647e+00,2.877372461130357717e+00,2.638194119426966466e+00,2.356344163895706334e+00,2.450761986360791678e+00,nan,2.999500200890675927e+00,3.330277000000000154e+00,3.195492234534631937e+00,3.297606095997298592e+00,3.183043302372227323e+00,2.874577443601108673e+00,3.022451753633999871e+00,nan,2.878684042075024596e+00,3.598600057021805743e+00,3.575626473434887043e+00,3.320157971146999287e+00,3.221341735025999409e+00,3.259924533806399793e+00,3.232996571167951139e+00,3.107629376132035492e+00,3.289174090115070559e+00,3.189595771329912122e+00,3.191400429457487586e+00,3.010119484884596464e+00,3.176871368065498302e+00,3.198681939932097595e+00,3.284650231671635723e+00,3.207326432623733847e+00,3.257525083755858919e+00,nan,3.693814965817331153e+00,3.799893439175729082e+00,3.637872122840000166e+00,3.760317907579733721e+00,3.445488195603509940e+00,3.479341728769040465e+00,3.528985492374502542e+00,3.584220169325621708e+00,3.545698138368303276e+00,3.074398909999999319e+00,3.424021866317682683e+00,3.234808818931216301e+00,3.521244549394114021e+00,3.411150316198684251e+00,3.288303297107999512e+00,3.369078384305837393e+00,3.521276075752311829e+00,nan,3.712389038875405500e+00,3.585631958641604555e+00,3.706503723345490275e+00,4.189417814999999656e+00,3.710697547595389167e+00,3.705681983183265249e+00,nan,3.694206464673034152e+00,3.755049878509846284e+00,3.707704421289899077e+00,3.805887777946879602e+00,4.256415684643828357e+00,3.829437474971632671e+00,3.660856508653930419e+00,3.786100005085020204e+00,3.738848952690843497e+00,4.300711417986438079e+00,3.755375924724534631e+00,3.459668751525181385e+00,3.507024670114291087e+00,3.449458416935617944e+00,3.591917953496173599e+00,3.315821202651180855e+00,3.270819746455339239e+00,3.265476801192098488e+00,3.529715107102691984e+00,3.573014046113993203e+00,3.361283999999999939e+00,3.556551880507572871e+00,nan,nan,nan,nan,nan,nan,3.888388697724999954e+00,3.933442170720852005e+00,3.663338376426015675e+00,4.306809453050163938e+00,3.625514610945027805e+00 +2.195829436793516276e+00,nan,2.658700779693244698e+00,2.781776202444263824e+00,2.661091027332460168e+00,2.236420104780902030e+00,2.254158474591009487e+00,2.020165098080999755e+00,2.208041886083653260e+00,nan,3.258490189386359592e+00,2.987279703834260669e+00,2.844802980429627315e+00,2.973736345215484356e+00,2.827877820548215571e+00,2.736432430154992712e+00,2.612652154736218701e+00,nan,3.598600057021805743e+00,2.913827599999999851e+00,3.213289057809114890e+00,3.099618450052286089e+00,3.042435827808001036e+00,2.994691677136615215e+00,3.056000107037193825e+00,2.911864270834341628e+00,3.027865444694982866e+00,2.776033081538238800e+00,2.913091861291479567e+00,3.041259975472359045e+00,2.876931282869406026e+00,2.994556647674280647e+00,2.878087305000000207e+00,2.970238038499841249e+00,2.853496973637497991e+00,nan,3.857075165188939003e+00,3.391219600408382551e+00,nan,3.274928798847390077e+00,3.247348155366532652e+00,3.190545727301529855e+00,3.149652312049101788e+00,3.109707119682947773e+00,2.983800661283132705e+00,2.902307280828097635e+00,3.149450276935557014e+00,3.213336056991947576e+00,3.112155474131306221e+00,2.948497000000000146e+00,2.878702500000000164e+00,3.195854216931044434e+00,3.152715819931940278e+00,nan,3.875844378908748222e+00,3.526107329327999196e+00,3.400679882393482067e+00,nan,nan,nan,nan,3.392305041245998964e+00,nan,3.822947010129070122e+00,4.351728640929112935e+00,nan,3.326528572823093111e+00,nan,nan,nan,nan,3.706195439266121383e+00,3.282368487028433979e+00,3.194323859531984500e+00,3.270277669112988672e+00,3.104673705065901235e+00,2.985741223734405647e+00,2.880729326793999956e+00,2.948586281430879197e+00,3.105993790174739200e+00,3.202672640221043476e+00,2.976993499999999848e+00,3.225330366474189248e+00,nan,nan,nan,nan,nan,nan,5.074023834055514826e+00,nan,3.404350817102035798e+00,nan,nan +2.001543509530629983e+00,nan,2.962619374806038319e+00,2.619181798671094352e+00,2.326984040000000142e+00,2.173175210097360033e+00,2.180272402579024327e+00,1.971163968829461766e+00,2.011565585112988330e+00,nan,3.244090589168597383e+00,3.114618137312044954e+00,2.824746849828164486e+00,2.637847609511076996e+00,2.599001398430871301e+00,2.511931898650896500e+00,2.438299920120407194e+00,nan,3.575626473434887043e+00,3.213289057809114890e+00,2.652623687436082456e+00,3.557927189512727129e+00,3.066672585305395771e+00,2.925854371562589762e+00,2.952872255025791759e+00,2.653381274983093174e+00,2.381986624741067260e+00,2.442127456924330087e+00,2.655831591495779964e+00,2.887344882232115051e+00,2.746300279869521077e+00,2.647601240628000063e+00,2.646983133675604005e+00,2.689577172033455366e+00,2.658155986387226388e+00,nan,3.712440374521092323e+00,3.323856439806529561e+00,3.218765135896964669e+00,nan,3.584734143350733593e+00,3.815083578232363770e+00,3.068034259588846346e+00,2.706055059494451243e+00,2.668954837066636721e+00,2.748254030475682175e+00,2.841011852500045620e+00,2.953617954025937298e+00,2.911598720080597058e+00,2.811993146342110172e+00,2.941856220573235259e+00,2.867565597348202822e+00,2.873478822426509982e+00,nan,3.894038194795731211e+00,3.554561544909301762e+00,3.330429782037772846e+00,3.281833353422522137e+00,3.286721834117217522e+00,3.267126613674823155e+00,nan,3.236335518543313583e+00,3.272909566481592591e+00,3.221724824548038324e+00,3.199202778086023269e+00,3.214645919499330873e+00,3.207796436911430238e+00,nan,nan,nan,nan,nan,3.570258082050620452e+00,nan,2.941955104391178910e+00,2.678137654077623164e+00,2.342567999999999984e+00,2.696360872884491666e+00,2.802446724605512252e+00,3.029989185883616987e+00,3.213240489912057907e+00,2.918270187427291606e+00,3.012097178250680685e+00,nan,nan,nan,nan,nan,nan,nan,nan,3.851885183994283146e+00,nan,nan +1.864449717026702702e+00,nan,2.644377748592235289e+00,2.441488225176000171e+00,2.148766643412680644e+00,2.067706766909483296e+00,1.831928405465369281e+00,1.628087532361135015e+00,1.836162686413860490e+00,nan,2.962533940508910835e+00,2.920702174861230738e+00,2.702712957186329756e+00,2.547342038295999256e+00,2.426421474542914236e+00,2.211496501568553708e+00,2.175583640840335065e+00,nan,3.320157971146999287e+00,3.099618450052286089e+00,3.557927189512727129e+00,2.358093499999999842e+00,2.937533535504802007e+00,2.712086988207422333e+00,2.579831921469635692e+00,2.498604177213421984e+00,2.463007079938591914e+00,2.400952613338179553e+00,2.552245265377053762e+00,2.744163486516024708e+00,2.559274809162157105e+00,2.580549153795206330e+00,2.515998409242299783e+00,2.359519746345549507e+00,2.324313255405242362e+00,nan,3.570189838599998389e+00,3.275847581563238631e+00,3.118281897354445054e+00,2.969542219849382825e+00,3.159841976661825313e+00,nan,2.679891744350817007e+00,2.658325607354853570e+00,2.518025443527665441e+00,2.608658310747251630e+00,2.906882801601931465e+00,3.058844730510251075e+00,2.937139091422724668e+00,2.651858120979136757e+00,2.673661176544814388e+00,2.642320757371082962e+00,2.588768449923791959e+00,nan,3.606752991243999951e+00,3.410910675486997334e+00,3.245683226466413274e+00,3.439417641544238347e+00,3.418116690760169440e+00,3.196041783993769148e+00,nan,3.165590564483208968e+00,3.374588433217650785e+00,3.131426292924215815e+00,3.359330286966708101e+00,3.338164633783192059e+00,3.318646942081169726e+00,3.304081083673673813e+00,3.311791781682212221e+00,nan,3.260616759061754077e+00,nan,nan,nan,2.691676990436439265e+00,2.671370851120949741e+00,2.624517385568537975e+00,2.575708533205133488e+00,2.720598388361700870e+00,2.857114801930086490e+00,2.979775553084577311e+00,3.020814719533544412e+00,2.911418224753537842e+00,nan,nan,nan,nan,nan,nan,3.545429194342337276e+00,nan,3.081367676904736008e+00,nan,nan +1.786349560206804687e+00,nan,2.514519710821960352e+00,2.516041962467129522e+00,2.162747079427012498e+00,1.965402710155067734e+00,1.644279345729648512e+00,1.571247006463110996e+00,1.732572318425475544e+00,nan,3.029939204214259441e+00,3.054262495417295664e+00,2.645585038449519200e+00,2.455970753493400327e+00,2.322914871365881151e+00,2.149940611134906643e+00,2.134958848858775404e+00,nan,3.221341735025999409e+00,3.042435827808001036e+00,3.066672585305395771e+00,2.937533535504802007e+00,2.197444546049953207e+00,2.722743905904446038e+00,2.473168232872207550e+00,2.419922754436801160e+00,2.412624368510428496e+00,2.467509272693911626e+00,2.684942280000000014e+00,2.656289447719474062e+00,2.532376792705988144e+00,2.482823447667693362e+00,2.420925212869288856e+00,2.282402245360060977e+00,2.424155868769860334e+00,nan,3.558252534228011577e+00,3.254929372430220180e+00,3.067738691983316723e+00,2.944741782800047947e+00,2.829349315116489016e+00,3.064718306949428595e+00,2.605973081318232243e+00,2.564677286419438929e+00,2.634574122917984429e+00,2.682311577974600514e+00,2.914781999999999762e+00,2.771634208960708001e+00,3.472902620666958651e+00,2.755495049532580509e+00,2.631098626025020426e+00,2.562430261726698077e+00,2.858686317291347923e+00,nan,3.600518918103827293e+00,3.448933180190470349e+00,3.128703826550511824e+00,3.178807995362923133e+00,3.214405655945769880e+00,3.180250280120187067e+00,nan,3.137957530563208586e+00,3.200944289999999803e+00,3.102078435439519222e+00,3.029603365350000743e+00,3.079401968075109242e+00,3.068942175932522165e+00,3.056953011958100763e+00,3.046610082446905121e+00,3.031928027194829234e+00,3.082663991730448583e+00,2.897775557519291834e+00,2.919526661512657828e+00,2.963930662915379433e+00,nan,2.612429361325223631e+00,2.595269159989471941e+00,2.669917795078292855e+00,2.439544000000000157e+00,3.514103094784498449e+00,3.527470382621648692e+00,2.810467442508952196e+00,3.218901399858100643e+00,nan,nan,nan,nan,nan,nan,3.139621732158186518e+00,nan,3.018930969190059876e+00,nan,nan +1.680115101727999605e+00,nan,2.546412215199454820e+00,2.526144587553105669e+00,2.038986000793617137e+00,1.822326198829049781e+00,1.717691695910184002e+00,1.563825054747612464e+00,1.707983144294762834e+00,nan,2.919044826123030401e+00,2.803472416342326134e+00,2.562636143987326420e+00,2.315942118175124076e+00,2.195043090292000620e+00,2.191123378703242697e+00,2.122468023789838121e+00,nan,3.259924533806399793e+00,2.994691677136615215e+00,2.925854371562589762e+00,2.712086988207422333e+00,2.722743905904446038e+00,2.149304259805691952e+00,3.056240108731408167e+00,2.435495683606439421e+00,2.370581085217601025e+00,2.506062744252026686e+00,2.570088002353810630e+00,3.459278216739344547e+00,2.424040924003344255e+00,2.413332692163982607e+00,2.383835557218163981e+00,2.444476864095152191e+00,2.513468267877065898e+00,nan,3.598833162195797186e+00,3.141295122829764974e+00,2.994731109960814930e+00,2.891677309686015995e+00,2.724120937309716606e+00,3.657867755919260055e+00,nan,2.589720016685201465e+00,2.592569885322525192e+00,2.657886477337289222e+00,3.126658731073850284e+00,3.550006931049056735e+00,2.989244999999999930e+00,2.766157771680140876e+00,2.693866701204727487e+00,2.701999416143169963e+00,2.733267573366503278e+00,nan,3.861455095499962464e+00,3.345364835015479876e+00,3.025198288112067146e+00,3.157627240540809677e+00,3.188478756345855203e+00,3.156901453064083540e+00,nan,3.039446355151042667e+00,3.177445238360275148e+00,3.022494856094562810e+00,2.997059706273324409e+00,2.989840776961592450e+00,2.979440042587794490e+00,2.982679647873056616e+00,2.978201885970974683e+00,2.997166326977366069e+00,2.944305510070984955e+00,2.876301874436550499e+00,2.705630159815406444e+00,2.658815045303448699e+00,3.060119758247999755e+00,2.581109907347782784e+00,2.579660248723607108e+00,2.601332716785988541e+00,2.880570011672523023e+00,3.592604575098945485e+00,3.179756504658101779e+00,3.076640073112770324e+00,2.891077937375880591e+00,nan,nan,nan,nan,nan,nan,3.212866367565654802e+00,nan,2.712811670787191431e+00,3.196746323487836605e+00,3.314725721459469643e+00 +1.747349001690637671e+00,nan,2.494449916526538136e+00,2.430894687772322538e+00,1.987485392775346860e+00,1.896681000000000061e+00,1.720164050016611101e+00,1.585645125732684901e+00,1.812889005541234333e+00,nan,2.907041960937394531e+00,2.751537922687540494e+00,2.408938779285370924e+00,2.257628113279575910e+00,2.206283758354522284e+00,2.142081956899379414e+00,2.096055874151388387e+00,nan,3.232996571167951139e+00,3.056000107037193825e+00,2.952872255025791759e+00,2.579831921469635692e+00,2.473168232872207550e+00,3.056240108731408167e+00,2.164525999999999950e+00,2.401812097500013454e+00,2.294471902703797639e+00,2.393308414970817921e+00,2.255092499999999944e+00,2.597356544242771292e+00,2.400504535912536763e+00,2.334863143382941431e+00,2.347226327702760074e+00,2.295138475304999481e+00,2.486213830694909976e+00,nan,3.361116532128344936e+00,3.092212088599222142e+00,2.797408412516976473e+00,2.911317515500541298e+00,2.804847280254437969e+00,2.660845052385965026e+00,nan,2.513567858965287449e+00,2.407764999999999933e+00,2.601551658240770060e+00,2.830969173624555690e+00,3.231757499814059020e+00,2.540755499999999945e+00,2.602553901195413300e+00,2.564717778824943561e+00,2.532022689888000500e+00,2.689450477596837086e+00,nan,3.638315531305111428e+00,3.313535951784344125e+00,3.024956716718689709e+00,3.002555974267049343e+00,3.069165649495133597e+00,2.892165097920021744e+00,nan,3.003665259812087651e+00,3.197218117087565314e+00,2.836146145765064741e+00,2.592022566851413412e+00,2.949985533573272090e+00,2.857266056523815045e+00,2.841924268449099511e+00,2.977671088414529432e+00,2.903387562824002455e+00,2.807807122538521938e+00,2.870459526831008912e+00,2.715919753195363384e+00,2.641892766657789160e+00,3.167489879215346082e+00,nan,2.595911760503735621e+00,2.653728735851311527e+00,2.686205303168029523e+00,2.884859657791129184e+00,4.050558855357209609e+00,3.207504000000000133e+00,2.793306626108974733e+00,nan,nan,nan,nan,nan,nan,3.059863990463488559e+00,nan,2.750887447683567189e+00,2.880145359757862789e+00,2.852842904483656739e+00 +1.544114417742760370e+00,nan,2.409360623806390933e+00,2.419175100235954279e+00,1.957902797472000245e+00,1.568180806305805142e+00,1.661007096791998983e+00,1.646717402555999854e+00,1.789646721461527035e+00,nan,2.929774094298258635e+00,2.719821770855426646e+00,2.292441789227952054e+00,2.196197671681500374e+00,2.156192042183054713e+00,2.115108589143580353e+00,2.143163511688197964e+00,nan,3.107629376132035492e+00,2.911864270834341628e+00,2.653381274983093174e+00,2.498604177213421984e+00,2.419922754436801160e+00,2.435495683606439421e+00,2.401812097500013454e+00,2.208418999999999688e+00,2.417504983423821585e+00,2.145419500000000035e+00,2.547596465441404057e+00,2.537513081073596410e+00,2.332922028182672580e+00,2.248332863356729661e+00,2.285038885214981885e+00,2.251668367270816873e+00,2.291586615056621312e+00,nan,3.645115972631682411e+00,3.043011136605344724e+00,2.613915729822692047e+00,2.560461675436087603e+00,2.409507030879508349e+00,2.461585545350200910e+00,nan,2.544466356444934796e+00,2.575866208840205118e+00,2.638257827013879897e+00,2.850092583576916638e+00,3.621668754422163961e+00,2.993104000000000209e+00,2.547086727033242681e+00,2.491485578293679115e+00,2.506153350122236390e+00,2.608384746487916850e+00,nan,3.366282627514938230e+00,3.278514526456869138e+00,2.863241942272668261e+00,2.789093180971686792e+00,2.941038085938801672e+00,2.491291756554436798e+00,nan,2.914374826670163898e+00,2.568920499999999940e+00,2.733698425772869367e+00,2.618415755084577246e+00,2.818773586575832546e+00,2.576398425712632889e+00,2.558315500020499567e+00,2.809806858846969568e+00,2.836369635984477444e+00,2.783301216846045367e+00,2.557517758259387985e+00,2.450953545713382642e+00,2.608045319059231648e+00,2.614651037315225413e+00,3.883687433272565936e+00,2.584473901804077656e+00,2.385498956138069282e+00,nan,4.093216561725295577e+00,2.935036684042499289e+00,3.315335127082379874e+00,2.905032194234200116e+00,nan,nan,nan,nan,nan,nan,2.711542499748009760e+00,nan,2.183430429647243098e+00,2.836836044089204645e+00,2.812615125743081368e+00 +1.515425309722808755e+00,nan,2.399337537391616149e+00,2.227203698726716752e+00,1.972981490617278677e+00,1.731211061110145000e+00,1.793364096479999503e+00,1.604574122167997219e+00,1.778250556906523006e+00,nan,2.860812265695341861e+00,2.660411687529421254e+00,2.254236143640361512e+00,2.185288817727164989e+00,2.068103518520000073e+00,2.079957223782187725e+00,2.239920354968869987e+00,nan,3.289174090115070559e+00,3.027865444694982866e+00,2.381986624741067260e+00,2.463007079938591914e+00,2.412624368510428496e+00,2.370581085217601025e+00,2.294471902703797639e+00,2.417504983423821585e+00,2.016612023548956412e+00,2.460781098439484449e+00,2.474425830137066296e+00,2.465344926437749606e+00,2.296801488358047649e+00,2.217699052872140086e+00,2.263729079481764828e+00,2.244777503481694581e+00,2.251012999999999931e+00,nan,3.419352225809346280e+00,3.207085461440305707e+00,2.514371315503777904e+00,2.566217402732639652e+00,2.515468944502676329e+00,2.396651947892219692e+00,nan,4.084924705812877477e+00,2.716712464417471384e+00,2.843506186423690796e+00,3.487548021541442722e+00,2.725927754172512874e+00,2.528832736171667595e+00,2.413853843620000550e+00,2.471920608462353730e+00,2.460157973262472098e+00,2.580887824450416090e+00,nan,3.517558698128232741e+00,3.211290384647422336e+00,2.411520201046612133e+00,2.194575318577716150e+00,2.630136038304786261e+00,2.661183020971134017e+00,nan,2.637614115634220102e+00,3.077564072801417527e+00,2.506083447616911819e+00,2.497408794592275605e+00,2.610506202014063248e+00,2.602388944424092632e+00,2.467278494959259660e+00,2.638532169563618801e+00,2.754369233041524367e+00,2.628565132391704751e+00,2.533041096829367511e+00,2.452542306807659944e+00,2.459522556664921922e+00,2.576233612351879643e+00,3.930044335505898268e+00,2.577164604451984697e+00,2.656080202227219633e+00,5.101591488915104833e+00,5.723797514837941414e+00,3.771636035075792925e+00,3.478379359458275477e+00,2.679519885783367528e+00,nan,nan,nan,nan,nan,nan,2.694203290304992837e+00,nan,2.559222231103206902e+00,2.946520672498533333e+00,2.640551715765792729e+00 +1.527017349800000146e+00,nan,2.374778278628784989e+00,2.240407364519927391e+00,1.990093331888208583e+00,1.797195173908146204e+00,1.710183999999999926e+00,1.688970905596004712e+00,1.779895095034891295e+00,nan,2.772532854735574759e+00,2.561710877435786848e+00,2.271354128882431045e+00,2.182405915585709177e+00,2.150040508896760461e+00,2.149290249306878398e+00,2.244153209360675305e+00,nan,3.189595771329912122e+00,2.776033081538238800e+00,2.442127456924330087e+00,2.400952613338179553e+00,2.467509272693911626e+00,2.506062744252026686e+00,2.393308414970817921e+00,2.145419500000000035e+00,2.460781098439484449e+00,2.113482499999999931e+00,2.502526335458144846e+00,2.314942195967049976e+00,2.306017753983399476e+00,2.236348049886707212e+00,2.256484548173745441e+00,2.269158432092268640e+00,2.546493484344498182e+00,nan,3.286077947238622787e+00,2.676886435495511751e+00,2.607241797926170168e+00,2.580499998495737302e+00,2.546161382910453685e+00,2.485788651053217890e+00,nan,3.961778970306553127e+00,2.686148396841981700e+00,2.709676667498234881e+00,3.078214315408160928e+00,2.702231527597205751e+00,2.486853104769803036e+00,2.385136157600903584e+00,2.455465443195000041e+00,2.477868713992720906e+00,2.749767109560175449e+00,nan,3.332299103713703747e+00,3.176090992862974360e+00,2.694251247327252408e+00,2.728044222662796336e+00,2.711188398804034172e+00,2.692149580973064005e+00,nan,2.769337743805257990e+00,2.824312146129556389e+00,2.760246597445312755e+00,2.609818994612419729e+00,2.694600361578246339e+00,2.689579371073926861e+00,2.584363527641938330e+00,2.663274243927887586e+00,2.622845465851564750e+00,2.657806950356072839e+00,2.573244131211720021e+00,2.494289765980240325e+00,2.518390147419642400e+00,3.955942828473636652e+00,3.756977122383366563e+00,2.563752168818334898e+00,2.537783343282986603e+00,2.535546683582076799e+00,2.638883804665499433e+00,2.718793866283214289e+00,2.691003080719561957e+00,2.513955366095999899e+00,nan,nan,nan,nan,nan,nan,2.817013572500609353e+00,nan,2.746976289152993633e+00,2.772497111441578177e+00,2.722059583258524817e+00 +1.721929188857771909e+00,nan,2.470571925000012659e+00,2.314425074877942912e+00,2.205696511261014070e+00,1.804416739138408943e+00,1.825622432512896243e+00,1.761170946014796179e+00,1.754656750228942963e+00,nan,2.664766013472842143e+00,2.619426689862945690e+00,2.296882718090024422e+00,2.312314451273720817e+00,2.220401580966186117e+00,2.140116535998966008e+00,2.123498388972460127e+00,nan,3.191400429457487586e+00,2.913091861291479567e+00,2.655831591495779964e+00,2.552245265377053762e+00,2.684942280000000014e+00,2.570088002353810630e+00,2.255092499999999944e+00,2.547596465441404057e+00,2.474425830137066296e+00,2.502526335458144846e+00,2.238212100000000149e+00,2.529273887842225399e+00,2.411890220498020376e+00,2.315222229053998770e+00,2.304869271278709775e+00,2.282721911069427012e+00,2.297221644425806897e+00,nan,3.331292962587284467e+00,2.611885048985999269e+00,2.415265320864000032e+00,2.600958705506207558e+00,2.680964238413514611e+00,2.735357000000000038e+00,nan,3.223506767585381017e+00,2.668102809394676367e+00,2.547166786863519494e+00,2.810631981429100978e+00,2.682013687575830030e+00,2.571948428474562842e+00,2.472594935240000069e+00,2.479710707528740343e+00,2.524873089141443483e+00,2.522829073393738852e+00,nan,3.490308530526389230e+00,2.661730499999999999e+00,2.878778358210448474e+00,2.840138553841585534e+00,2.933187931827890615e+00,2.907971918014151225e+00,nan,2.879288134416827738e+00,2.948478562886482468e+00,2.851668987603455552e+00,2.856032955973752951e+00,2.846620792394838784e+00,2.818420514910000119e+00,2.849311645809862181e+00,2.817037160062049850e+00,2.822013144672808505e+00,2.807282810696388964e+00,2.581680994105349569e+00,2.743278715000000201e+00,2.707004330000000181e+00,2.575062357904890575e+00,3.842181586182128683e+00,2.685182688789092609e+00,2.540123487895603827e+00,2.641317874035871593e+00,2.857114801930086934e+00,2.927380190659061299e+00,2.645499340861685145e+00,2.653433361016618974e+00,nan,nan,nan,nan,nan,nan,3.064108583237854067e+00,nan,2.836633787082249114e+00,3.266223673598196875e+00,3.196843390792900141e+00 +1.643222981241848624e+00,nan,2.432396937705501472e+00,nan,2.195794024545224765e+00,1.942864476325531387e+00,1.826879544019621271e+00,1.852863691407432345e+00,1.924272870974393479e+00,nan,2.794419039039833574e+00,2.729259638335999938e+00,2.547131625144849920e+00,2.421597795071352088e+00,2.196979282994645288e+00,2.241144828738951933e+00,2.155173032951739120e+00,nan,3.010119484884596464e+00,3.041259975472359045e+00,2.887344882232115051e+00,2.744163486516024708e+00,2.656289447719474062e+00,3.459278216739344547e+00,2.597356544242771292e+00,2.537513081073596410e+00,2.465344926437749606e+00,2.314942195967049976e+00,2.529273887842225399e+00,2.398102243072784301e+00,2.499715762610713821e+00,2.404847074099302429e+00,2.288675174446867189e+00,2.446276498539722510e+00,2.330498843245105700e+00,nan,3.220392982734344489e+00,3.038446516115085760e+00,2.934326361691169893e+00,2.734965759407395947e+00,2.764775825964307820e+00,2.696264555798654872e+00,nan,2.596944278238381809e+00,2.555291327039311167e+00,2.569092893507910702e+00,2.721342777245615796e+00,3.658125104999999877e+00,2.655031850088069412e+00,2.561779654686697771e+00,2.607708974711017369e+00,2.642204576942603023e+00,2.613723040675126175e+00,nan,3.698072348638218099e+00,3.208616346879233916e+00,3.108668503690614759e+00,2.996650107500054894e+00,3.067310319201841384e+00,3.039852448308959065e+00,nan,2.998685230030839310e+00,3.104227356551044092e+00,2.943357150322975180e+00,2.946926639770146128e+00,2.961522634414996524e+00,2.943428207897664350e+00,2.917014904364325645e+00,2.922366879134512097e+00,2.978032140475936007e+00,2.901430589759697476e+00,2.702293818133504111e+00,2.808389685592962870e+00,3.479768336980796128e+00,3.746221604823908979e+00,nan,2.609185757084840684e+00,2.587676871146467938e+00,2.641938598345791700e+00,5.760909622742944158e+00,2.700214679014409924e+00,2.708968062671038624e+00,2.593012610105455806e+00,nan,nan,nan,nan,nan,nan,3.127380848566099214e+00,nan,2.955343277614617570e+00,nan,3.094854175122147666e+00 +1.518082531491870446e+00,nan,2.482356132052068620e+00,nan,2.856624615437593384e+00,2.346850637205529555e+00,1.847539525364115143e+00,1.816590924526913842e+00,1.865584180799999992e+00,nan,2.990943169099290699e+00,2.717011152028382170e+00,2.581710806139303038e+00,2.549004613898678162e+00,2.350615024038453349e+00,2.087385464940000546e+00,2.106739442954413732e+00,nan,3.176871368065498302e+00,2.876931282869406026e+00,2.746300279869521077e+00,2.559274809162157105e+00,2.532376792705988144e+00,2.424040924003344255e+00,2.400504535912536763e+00,2.332922028182672580e+00,2.296801488358047649e+00,2.306017753983399476e+00,2.411890220498020376e+00,2.499715762610713821e+00,2.356746878775993537e+00,2.371322869305282577e+00,2.469825725959744478e+00,2.338154487569534368e+00,2.268906179214313035e+00,nan,3.309513666546911370e+00,2.983434536986762087e+00,2.851951582298000165e+00,2.704013138071100109e+00,2.597648665032389559e+00,2.749177586909491211e+00,nan,2.414215568759341846e+00,2.412416644813516253e+00,2.463120363428902060e+00,2.643145901006412668e+00,2.730929290343741211e+00,3.439680786661460488e+00,2.742065676196361768e+00,2.622107599456418914e+00,2.564924306941496646e+00,2.494950595107313429e+00,nan,3.530899935242295395e+00,3.434274100748420100e+00,3.068289479999999791e+00,3.019055016963671179e+00,2.768696499999999894e+00,2.751466499999999815e+00,nan,2.932710707811259532e+00,3.051640590311405443e+00,2.970214517638746887e+00,2.859520769402405183e+00,2.650380499999999806e+00,2.637223000000000095e+00,2.626570500000000141e+00,2.612731500000000207e+00,2.759723170720000063e+00,2.826000485567999654e+00,2.697999629972616109e+00,2.603253877510800063e+00,2.699951537078548292e+00,2.488104867921679286e+00,2.433444918951561231e+00,2.426423975717410197e+00,2.444071959545214057e+00,2.550750248384400454e+00,3.671266988348027560e+00,4.113935999999999815e+00,3.270814114910217896e+00,3.549749708801152703e+00,nan,nan,nan,nan,nan,nan,3.023862439700450988e+00,nan,2.917153306383063249e+00,2.998274887765047581e+00,2.921434293889218825e+00 +1.514732013961062496e+00,nan,2.460910025444155824e+00,2.296851891840000270e+00,2.679217262871850469e+00,3.209772506204984666e+00,1.796280323316989502e+00,1.710058917994944583e+00,1.699448819155735757e+00,nan,2.920807144253208421e+00,2.565873209249013520e+00,2.419683837295267814e+00,2.518641845356000886e+00,2.288275160533211228e+00,2.158630805901769367e+00,2.114569334609953444e+00,nan,3.198681939932097595e+00,2.994556647674280647e+00,2.647601240628000063e+00,2.580549153795206330e+00,2.482823447667693362e+00,2.413332692163982607e+00,2.334863143382941431e+00,2.248332863356729661e+00,2.217699052872140086e+00,2.236348049886707212e+00,2.315222229053998770e+00,2.404847074099302429e+00,2.371322869305282577e+00,2.358331999999999873e+00,2.398077179747267973e+00,2.286264327296652965e+00,2.280831766668490967e+00,nan,3.442769372373651660e+00,3.077675620246793642e+00,2.815183603975942361e+00,2.610380197458537666e+00,2.560832181400079843e+00,2.558638968550950299e+00,2.493985647935332839e+00,2.373986026779984826e+00,2.380600098665100806e+00,2.431078934679998582e+00,2.613426146843520925e+00,2.713625988119071408e+00,2.733430485000045707e+00,2.897078118088428411e+00,3.327511740046642963e+00,2.595082841158653508e+00,2.740061844547714731e+00,nan,3.577916023536139356e+00,3.288694855412869433e+00,3.040496795000204422e+00,2.921512340517324890e+00,2.951367512202400789e+00,2.842297350201417494e+00,nan,2.826150521830632645e+00,3.037145658933913861e+00,2.931522421418621960e+00,2.717144236143999692e+00,2.716268161981999718e+00,2.709554659526000275e+00,2.703579844931999432e+00,2.700566084160000102e+00,2.678370358819389896e+00,2.696020813178999997e+00,2.647502314738999551e+00,2.529366036080153091e+00,2.491019983308832675e+00,2.519242596893539687e+00,2.372654802352641923e+00,2.345702636584201972e+00,2.349074741618019768e+00,2.441412881240047295e+00,3.237124999999999808e+00,3.622950727338283272e+00,3.109949954030343022e+00,3.680284858636512446e+00,nan,nan,nan,nan,nan,nan,3.046101881897295360e+00,nan,2.729922281954999974e+00,2.817242125693600219e+00,3.012419095211647857e+00 +2.139445663820890964e+00,nan,2.506547225684555347e+00,2.072577463807644627e+00,1.871706198216412265e+00,1.935742271545637605e+00,2.765322784193694705e+00,1.647801144174152910e+00,1.685402480797588476e+00,nan,2.902345756258289722e+00,2.613457746744361376e+00,2.438587985141850822e+00,2.332188752477890592e+00,3.510234056975858774e+00,2.158891149809600840e+00,2.128552357816750895e+00,nan,3.284650231671635723e+00,2.878087305000000207e+00,2.646983133675604005e+00,2.515998409242299783e+00,2.420925212869288856e+00,2.383835557218163981e+00,2.347226327702760074e+00,2.285038885214981885e+00,2.263729079481764828e+00,2.256484548173745441e+00,2.304869271278709775e+00,2.288675174446867189e+00,2.469825725959744478e+00,2.398077179747267973e+00,2.388137224448558804e+00,2.306018849207678301e+00,2.355077310925331791e+00,nan,3.420587671891673676e+00,3.068310904295865615e+00,2.871410976371571167e+00,2.674993024402466890e+00,2.623536785438334018e+00,2.493727541282640292e+00,2.421563383328239283e+00,2.414047906219106032e+00,2.368010294877883837e+00,2.351498881966631593e+00,2.539899854811492386e+00,2.485343007564360551e+00,2.601824172432462223e+00,2.623477072603870308e+00,3.259894724940886945e+00,2.636475490118254328e+00,2.634011847942003381e+00,nan,3.598781523528935544e+00,3.215669781058749344e+00,2.981266326066218220e+00,3.008521184028515538e+00,2.989987024965945039e+00,3.011780087995099109e+00,nan,2.976017043752272606e+00,3.006497743688757662e+00,2.944398977589490762e+00,2.872863498984197239e+00,2.864861836756519065e+00,2.863189507639930742e+00,2.857782929734740573e+00,2.850151777699583722e+00,2.858742016031206479e+00,2.838190646792944971e+00,2.641741005200317183e+00,2.611467207975047167e+00,2.525509213592322943e+00,2.572860110887943641e+00,2.453311492359291357e+00,2.433579480711312470e+00,2.420970475087223850e+00,2.489952103446219311e+00,2.492111741240429890e+00,3.371425143099000543e+00,3.275458031931258951e+00,3.606149986400662311e+00,nan,nan,nan,nan,nan,nan,2.988209409491151991e+00,2.890600359005883302e+00,2.877869489585655138e+00,2.889111087391571075e+00,2.939844672078589127e+00 +2.123011047904132198e+00,nan,2.498827985778024896e+00,2.237782140000015296e+00,1.957299953201376397e+00,1.709220764526679481e+00,1.745172022895999930e+00,1.623587444172764904e+00,1.709363131742457798e+00,nan,2.801667280185960784e+00,2.563028667859714727e+00,2.286554296604897729e+00,2.202028881401082216e+00,2.125371264281361139e+00,2.992967081389348749e+00,2.146225465627580764e+00,nan,3.207326432623733847e+00,2.970238038499841249e+00,2.689577172033455366e+00,2.359519746345549507e+00,2.282402245360060977e+00,2.444476864095152191e+00,2.295138475304999481e+00,2.251668367270816873e+00,2.244777503481694581e+00,2.269158432092268640e+00,2.282721911069427012e+00,2.446276498539722510e+00,2.338154487569534368e+00,2.286264327296652965e+00,2.306018849207678301e+00,2.219043190919461228e+00,2.334187755676148335e+00,nan,3.315452117343895555e+00,2.997004584258908810e+00,2.771751219466322524e+00,2.486226855167123162e+00,2.414030055783132678e+00,2.474856440340397423e+00,nan,2.450244936983521971e+00,2.426444318543630629e+00,2.413534703729976361e+00,2.527633170886061986e+00,2.605647318682443814e+00,2.492083260576321901e+00,2.474343075830956362e+00,2.488466685579301618e+00,2.655390543578113771e+00,2.789028183650272830e+00,nan,3.469877856719184805e+00,3.151548685053483734e+00,2.936611021588340531e+00,2.853310507978263022e+00,2.912683326469464262e+00,2.854094223939695585e+00,nan,2.827166761403228978e+00,2.924794384268918357e+00,2.830279372017119766e+00,2.787938315873293327e+00,2.769584434847999876e+00,2.805706530553065292e+00,2.742459683940766002e+00,2.782660636220044559e+00,2.809356328182241569e+00,2.719066579077019430e+00,2.575561489301759543e+00,2.400891061515399549e+00,2.537108280995488574e+00,2.431212585981246121e+00,2.466649132027154856e+00,2.492179453162715941e+00,2.447111488657448319e+00,2.417675095174571798e+00,2.492135508644590214e+00,2.699895686526200933e+00,2.815840424212081139e+00,2.655591579871998675e+00,nan,nan,nan,nan,nan,nan,2.897540065094701411e+00,nan,2.762520122796841004e+00,2.849362655565290225e+00,2.880496859477440275e+00 +1.448282797619689033e+00,nan,2.498352290018230892e+00,2.193732985787752110e+00,1.901528750190014883e+00,1.794175223903999861e+00,2.311922710786904922e+00,1.618332158394307996e+00,1.728362319253121671e+00,nan,2.855288086425296523e+00,2.616995309864857955e+00,2.237168731725114323e+00,2.195050953444587893e+00,2.172200419430815543e+00,2.279777571687267024e+00,2.379699243834203060e+00,nan,3.257525083755858919e+00,2.853496973637497991e+00,2.658155986387226388e+00,2.324313255405242362e+00,2.424155868769860334e+00,2.513468267877065898e+00,2.486213830694909976e+00,2.291586615056621312e+00,2.251012999999999931e+00,2.546493484344498182e+00,2.297221644425806897e+00,2.330498843245105700e+00,2.268906179214313035e+00,2.280831766668490967e+00,2.355077310925331791e+00,2.334187755676148335e+00,2.301127614721440562e+00,nan,3.391820912416689371e+00,2.996168119015853559e+00,2.773589879002719805e+00,2.531892719884067944e+00,2.405092457329264022e+00,2.455850959593525840e+00,2.460667316483999834e+00,2.500708538943420489e+00,2.492254233214735049e+00,2.428555039648190306e+00,2.530725605203190565e+00,2.602199549609012674e+00,2.545259340717343832e+00,2.455068239650892714e+00,2.526986620407559947e+00,2.495232871881482684e+00,2.541603334073006870e+00,nan,3.540291943623226967e+00,3.140650103726827069e+00,2.931588780948000128e+00,2.995236683670206013e+00,2.893959924337924150e+00,2.915236753059655239e+00,nan,2.874742271954519257e+00,2.994961410638089294e+00,2.806269177883082389e+00,2.782980026194810996e+00,2.747567398961103802e+00,4.669985999999999748e+00,2.745899861194134761e+00,4.629781499999999994e+00,2.826197699999999813e+00,2.745742861062142381e+00,2.731863965115904236e+00,2.435056213029295424e+00,2.422937858832630909e+00,2.519370405819999270e+00,2.465675482076310043e+00,2.432702635584000017e+00,2.443767352534183424e+00,2.421878230522076958e+00,2.467455219449789627e+00,2.517686291097339879e+00,2.867912180986219806e+00,2.594534158077791641e+00,nan,nan,nan,nan,nan,3.209458688316992436e+00,2.882004814539368187e+00,2.660277917435847961e+00,2.619122252294294739e+00,2.743864693836897573e+00,2.895094184208022003e+00 +nan,nan,nan,nan,nan,nan,nan,nan,1.914573224813413299e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,4.096289379999999980e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.777114637488512727e+00,nan,2.902512138302214328e+00,3.337939943976000023e+00,3.157664385320659406e+00,3.057690693529999670e+00,2.956671537618279721e+00,2.647790559548643063e+00,2.600827197156000192e+00,nan,3.077963113769292658e+00,3.513407905476817916e+00,3.326112000904095645e+00,3.377972791943133934e+00,3.286459479115694915e+00,3.183230805111595618e+00,3.144801405918842718e+00,nan,3.693814965817331153e+00,3.857075165188939003e+00,3.712440374521092323e+00,3.570189838599998389e+00,3.558252534228011577e+00,3.598833162195797186e+00,3.361116532128344936e+00,3.645115972631682411e+00,3.419352225809346280e+00,3.286077947238622787e+00,3.331292962587284467e+00,3.220392982734344489e+00,3.309513666546911370e+00,3.442769372373651660e+00,3.420587671891673676e+00,3.315452117343895555e+00,3.391820912416689371e+00,nan,3.020973522709029080e+00,3.943012843680116397e+00,3.825095592506822406e+00,3.812650940522723797e+00,3.585920216545621653e+00,3.545202156162824192e+00,nan,3.531504760462000014e+00,3.682971380761754343e+00,3.158486176091121589e+00,3.591458041508190924e+00,3.322526182686712737e+00,3.585853272355458365e+00,3.556426734714708715e+00,3.648782494043504521e+00,3.524622568811547119e+00,3.634654453175855338e+00,nan,4.006543900637350220e+00,4.290978810461902881e+00,3.913941023020258481e+00,4.303067189155544092e+00,4.559481884706239718e+00,3.894890354537893007e+00,nan,3.870382658566069001e+00,3.837758547468449244e+00,3.848434204477027887e+00,3.800319724473357308e+00,3.824236203749940888e+00,3.815908976279250631e+00,3.805004276284891684e+00,3.794498344271218571e+00,3.773946824443866976e+00,3.778952826100431484e+00,3.796104995748445621e+00,3.566583188532426618e+00,3.569873595030087721e+00,4.156062563104424790e+00,4.027477993099155107e+00,3.700882789395611994e+00,3.384804719817827934e+00,3.359144934219252310e+00,3.715781217968283201e+00,3.748373176323587685e+00,3.628954624874195289e+00,3.728300166727104514e+00,nan,nan,nan,nan,nan,nan,4.389828234087056913e+00,3.985891362043500141e+00,3.748479759732550765e+00,4.560391577007757213e+00,4.486315106401223218e+00 +2.292730999999999852e+00,nan,2.791137807605085008e+00,2.865633441368077072e+00,2.753118114200741040e+00,2.758269343621273517e+00,2.500773228596057329e+00,2.202687000000000062e+00,2.381623504999999863e+00,nan,3.287211596287764159e+00,3.160705541557349285e+00,3.252279239985129244e+00,3.010302478797958869e+00,3.007135456004712104e+00,2.874266223488544547e+00,2.952102357492044860e+00,nan,3.799893439175729082e+00,3.391219600408382551e+00,3.323856439806529561e+00,3.275847581563238631e+00,3.254929372430220180e+00,3.141295122829764974e+00,3.092212088599222142e+00,3.043011136605344724e+00,3.207085461440305707e+00,2.676886435495511751e+00,2.611885048985999269e+00,3.038446516115085760e+00,2.983434536986762087e+00,3.077675620246793642e+00,3.068310904295865615e+00,2.997004584258908810e+00,2.996168119015853559e+00,nan,3.943012843680116397e+00,3.132486023041629597e+00,3.414474170717943124e+00,3.423431904892891620e+00,3.432691799635228858e+00,3.363019716591628239e+00,3.335297151829548223e+00,3.264099976336968467e+00,3.019764187661772237e+00,3.047728292913041592e+00,3.255297526166116118e+00,3.349127676857215352e+00,3.321357353376236610e+00,3.331801977037250673e+00,3.157045043037510990e+00,3.355835275413481256e+00,3.293073234908072244e+00,nan,4.034863999999999784e+00,3.676219245924359669e+00,3.516718437543879805e+00,3.450172579709245735e+00,3.621549020211234371e+00,3.532267349747518992e+00,nan,3.459594588538520199e+00,3.582130695595999725e+00,3.395137444499488844e+00,3.421782779674818364e+00,3.439696001766179734e+00,3.416199118438198568e+00,3.414636493488185653e+00,3.426484517389891682e+00,3.377185738206697430e+00,3.420547541432594763e+00,3.419297378948378707e+00,3.417301932801381614e+00,3.364697886635370061e+00,3.238172982003725497e+00,3.301529447064212697e+00,3.083195471458116899e+00,3.088711451281833664e+00,3.167851258509621015e+00,3.317335485445370935e+00,3.245549172473999988e+00,3.361266165014341833e+00,3.192712499401225479e+00,nan,nan,nan,nan,nan,nan,5.163086206646820386e+00,nan,3.428720604818816575e+00,4.663540000000000241e+00,3.805275048482614331e+00 +2.217560439227675761e+00,nan,2.914767930905927784e+00,nan,2.620913558705519009e+00,2.320523549176734868e+00,2.233180408248965687e+00,2.165505323037658858e+00,2.156832615477223491e+00,nan,3.306840861393277820e+00,3.005996171556690300e+00,2.976862252550333654e+00,2.806923071465201591e+00,2.792470237376238007e+00,2.659089257243896665e+00,2.619200375753551135e+00,nan,3.637872122840000166e+00,nan,3.218765135896964669e+00,3.118281897354445054e+00,3.067738691983316723e+00,2.994731109960814930e+00,2.797408412516976473e+00,2.613915729822692047e+00,2.514371315503777904e+00,2.607241797926170168e+00,2.415265320864000032e+00,2.934326361691169893e+00,2.851951582298000165e+00,2.815183603975942361e+00,2.871410976371571167e+00,2.771751219466322524e+00,2.773589879002719805e+00,nan,3.825095592506822406e+00,3.414474170717943124e+00,2.929392920000000178e+00,3.633180059391664329e+00,3.556216084831785640e+00,3.198255086101343458e+00,3.139879332308680659e+00,2.644435636864833405e+00,2.747528224754991921e+00,2.689569593099542022e+00,3.011525258735019861e+00,3.089817360998281526e+00,3.097908243208204127e+00,3.036772647798803071e+00,3.051849459232510497e+00,2.991457498245489344e+00,2.997478788961339013e+00,nan,3.984326539292681879e+00,3.445011852846955325e+00,3.509300212793087681e+00,3.980937074984195245e+00,3.973390254807119604e+00,3.953335439918396865e+00,nan,nan,nan,nan,nan,nan,nan,4.249587854615622717e+00,nan,nan,nan,nan,3.495584121060816507e+00,3.195773836546395685e+00,3.058757773263763191e+00,2.664810101452855573e+00,2.648952817858495568e+00,2.722338184679946593e+00,2.885735447826302202e+00,3.138641437192032146e+00,3.098828025799362784e+00,3.075007520142039930e+00,3.165082345781291995e+00,nan,nan,nan,nan,nan,nan,nan,nan,3.691831896544256697e+00,nan,nan +1.970633741240000347e+00,nan,2.815418887466482278e+00,2.636758930890821251e+00,2.326430558562040396e+00,2.262943077269151448e+00,1.992085523312371542e+00,1.833488642699999893e+00,1.471359664717049043e+00,nan,2.810855120454650358e+00,3.247446033862802839e+00,2.621895378101999796e+00,2.674791413108216886e+00,2.589343723035096634e+00,2.442552220092286674e+00,2.325440047375516794e+00,nan,3.760317907579733721e+00,3.274928798847390077e+00,nan,2.969542219849382825e+00,2.944741782800047947e+00,2.891677309686015995e+00,2.911317515500541298e+00,2.560461675436087603e+00,2.566217402732639652e+00,2.580499998495737302e+00,2.600958705506207558e+00,2.734965759407395947e+00,2.704013138071100109e+00,2.610380197458537666e+00,2.674993024402466890e+00,2.486226855167123162e+00,2.531892719884067944e+00,nan,3.812650940522723797e+00,3.423431904892891620e+00,3.633180059391664329e+00,2.587580976853713466e+00,3.013152503028891260e+00,2.910276013735638490e+00,3.053905652496203160e+00,2.677353475599155264e+00,2.598047088264591853e+00,2.705801116790527594e+00,2.876289226153264433e+00,2.904548463749728970e+00,2.896844829160265533e+00,2.754976631833153444e+00,2.738368862664779080e+00,2.763569943714434984e+00,2.702726745448828183e+00,nan,3.856763496790714285e+00,3.579864190323614359e+00,4.035737096196256424e+00,4.241478325278349004e+00,nan,nan,nan,4.174374918381448474e+00,3.359014769071992923e+00,3.658304326519307370e+00,3.637567535425345611e+00,3.625121474745472749e+00,3.612912597544709836e+00,3.596410722021615047e+00,3.577149529824780405e+00,nan,3.543352111478714761e+00,nan,3.110387079646232067e+00,2.690432148155578851e+00,2.933622464838548272e+00,2.837519626169834108e+00,2.704218039344667091e+00,2.703082795083489920e+00,2.822038761212103175e+00,2.925686693401738925e+00,3.186118463618622609e+00,2.949260738289206429e+00,2.912071645893536953e+00,nan,nan,nan,nan,nan,nan,3.877880620503292075e+00,nan,3.908537983506283453e+00,3.333174285570760009e+00,nan +1.978484659832313586e+00,nan,2.731145838150000227e+00,2.551678274761727039e+00,2.309960961486909881e+00,2.124292568280190086e+00,1.940398765806435222e+00,1.744259396830999620e+00,1.906251725351999715e+00,nan,3.233875484509399545e+00,3.065395538611624726e+00,2.711023009977822529e+00,2.586231016746701528e+00,2.456823242926612583e+00,2.283608901897795196e+00,2.253703784767841167e+00,nan,3.445488195603509940e+00,3.247348155366532652e+00,3.584734143350733593e+00,3.159841976661825313e+00,2.829349315116489016e+00,2.724120937309716606e+00,2.804847280254437969e+00,2.409507030879508349e+00,2.515468944502676329e+00,2.546161382910453685e+00,2.680964238413514611e+00,2.764775825964307820e+00,2.597648665032389559e+00,2.560832181400079843e+00,2.623536785438334018e+00,2.414030055783132678e+00,2.405092457329264022e+00,nan,3.585920216545621653e+00,3.432691799635228858e+00,3.556216084831785640e+00,3.013152503028891260e+00,2.469279219245999357e+00,2.772634981443987900e+00,nan,2.643992763832922588e+00,2.699688704030151598e+00,2.769412313212475052e+00,2.982325999999999588e+00,3.054536538845868510e+00,2.978421443622368159e+00,2.678471081637432860e+00,2.806100108183476216e+00,2.653007849108540483e+00,2.658551128744919723e+00,nan,3.880797531612869999e+00,3.464533887572652127e+00,3.298518924278586173e+00,3.642049283870833598e+00,3.555462573708483287e+00,3.457384622985930633e+00,nan,3.507630549830198508e+00,3.407946137684199073e+00,3.488524882878548894e+00,3.568995461719917994e+00,3.512610849172591809e+00,3.544425417020708569e+00,3.537090133887159649e+00,3.524736829583512421e+00,3.686604726366762730e+00,3.505188259850309684e+00,2.983731135739717821e+00,nan,3.379177733919998872e+00,2.789534042300088945e+00,2.738473216759752304e+00,2.767198543502280561e+00,2.738396771966876919e+00,2.850205204272047599e+00,3.758149584999999959e+00,2.998016526087758482e+00,3.011464238904442148e+00,3.005124242695627501e+00,nan,nan,nan,nan,nan,nan,nan,nan,3.568726925815998818e+00,nan,nan +2.123843670796349059e+00,nan,2.764798920013645578e+00,2.526955486459815603e+00,2.213358625623882858e+00,2.014397459734823315e+00,1.665716563725014199e+00,1.657915863955322422e+00,1.832806953973094899e+00,nan,3.087162786563446559e+00,3.122334158817808181e+00,2.543209412866774599e+00,2.503626351321899257e+00,2.325052102429825407e+00,2.221587305493134323e+00,2.256137808144933921e+00,nan,3.479341728769040465e+00,3.190545727301529855e+00,3.815083578232363770e+00,nan,3.064718306949428595e+00,3.657867755919260055e+00,2.660845052385965026e+00,2.461585545350200910e+00,2.396651947892219692e+00,2.485788651053217890e+00,2.735357000000000038e+00,2.696264555798654872e+00,2.749177586909491211e+00,2.558638968550950299e+00,2.493727541282640292e+00,2.474856440340397423e+00,2.455850959593525840e+00,nan,3.545202156162824192e+00,3.363019716591628239e+00,3.198255086101343458e+00,2.910276013735638490e+00,2.772634981443987900e+00,2.427222500000000061e+00,nan,2.788254459666164919e+00,2.686075600943439667e+00,nan,2.934684999999999988e+00,3.653977261396147647e+00,3.807572407035523909e+00,2.850922960145382934e+00,2.793187778928349463e+00,2.656033988799999968e+00,3.474496786837224072e+00,nan,4.076696808860330812e+00,3.506016690424663018e+00,3.285506365369863069e+00,3.243967386061540115e+00,3.325731097979977680e+00,3.304793808801832711e+00,nan,3.231919286940431402e+00,3.401004802188936971e+00,3.199465629840079206e+00,3.228780145955306935e+00,3.218585293708555728e+00,3.145925660095149290e+00,3.131619252162831657e+00,3.160542626579115666e+00,3.231190270651985497e+00,2.855650757396243300e+00,2.918156536761384423e+00,2.770400773508731262e+00,2.650642033727131253e+00,nan,2.781828688837606922e+00,2.715737157872329277e+00,2.763290235393184080e+00,nan,3.547421565782718833e+00,3.523363973539485716e+00,2.919731580760121403e+00,2.829594348811363957e+00,nan,nan,nan,nan,nan,nan,3.119557060492943013e+00,nan,2.630808021342932346e+00,3.221309883168052668e+00,3.144850939876559615e+00 +3.401917492662955400e+00,nan,nan,nan,2.175427475544306155e+00,2.112623259089022820e+00,2.117041534795845337e+00,1.696074946197604127e+00,1.847669404396772963e+00,nan,3.480955898084906686e+00,nan,2.468114575008998557e+00,2.380814470482003564e+00,2.283937664261180611e+00,2.285041335856895284e+00,2.329863285374691895e+00,nan,3.528985492374502542e+00,3.149652312049101788e+00,3.068034259588846346e+00,2.679891744350817007e+00,2.605973081318232243e+00,nan,nan,nan,nan,nan,nan,nan,nan,2.493985647935332839e+00,2.421563383328239283e+00,nan,2.460667316483999834e+00,nan,nan,3.335297151829548223e+00,3.139879332308680659e+00,3.053905652496203160e+00,nan,nan,2.101382473003656770e+00,nan,nan,nan,3.739531488897021916e+00,3.202424828482734132e+00,nan,nan,nan,nan,2.669921766741000013e+00,nan,3.900741382389083078e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,3.140443663136315511e+00,3.132957074579982404e+00,3.126838230536749919e+00,3.120393987347435782e+00,3.114251522724260646e+00,nan,3.103456984684071163e+00,2.812302022962624104e+00,2.749589685353739377e+00,nan,nan,nan,nan,nan,nan,nan,nan,3.698425852473994802e+00,nan,nan,nan,nan,nan,nan,nan,3.083285718432057898e+00,nan,2.898028441671258726e+00,2.893927592654235603e+00,2.849897364870004779e+00 +1.661217569393938165e+00,nan,2.577294999999999892e+00,2.512377538698842283e+00,2.057236006264463235e+00,1.866476714934763192e+00,1.785083345128283927e+00,1.688338914803545476e+00,1.836950655742607630e+00,nan,3.044841022413570020e+00,2.599396216890330180e+00,2.497650075994775065e+00,2.342334222586236692e+00,2.226553169463579174e+00,2.317316731409143848e+00,2.236184439602666441e+00,nan,3.584220169325621708e+00,3.109707119682947773e+00,2.706055059494451243e+00,2.658325607354853570e+00,2.564677286419438929e+00,2.589720016685201465e+00,2.513567858965287449e+00,2.544466356444934796e+00,4.084924705812877477e+00,3.961778970306553127e+00,3.223506767585381017e+00,2.596944278238381809e+00,2.414215568759341846e+00,2.373986026779984826e+00,2.414047906219106032e+00,2.450244936983521971e+00,2.500708538943420489e+00,nan,3.531504760462000014e+00,3.264099976336968467e+00,2.644435636864833405e+00,2.677353475599155264e+00,2.643992763832922588e+00,2.788254459666164919e+00,nan,2.319118510000000022e+00,nan,nan,nan,nan,2.655084974394660513e+00,2.570586234095999956e+00,2.617396372086342904e+00,2.594107762499437531e+00,3.356057151787010806e+00,nan,3.957318950141516023e+00,3.490615642503763549e+00,2.505968689303110786e+00,2.282549912658226265e+00,2.654572020679240918e+00,2.763269543111392945e+00,nan,2.742403838568642449e+00,3.238649819328080159e+00,2.952151573819601804e+00,2.621390600965991968e+00,2.698800019072865375e+00,2.737928498116464660e+00,2.730560793183692159e+00,2.667572614790920671e+00,2.868286529638906401e+00,2.693709729014642562e+00,2.735975168448719685e+00,2.628789270285801560e+00,nan,nan,nan,nan,nan,nan,nan,nan,4.355477613519636471e+00,2.771925121346328513e+00,nan,nan,nan,nan,nan,nan,2.809787200031472310e+00,nan,2.707888740891798207e+00,3.066740328775652280e+00,2.662000771560343537e+00 +1.658908255107543361e+00,nan,2.515725537390733013e+00,2.397061582637723287e+00,2.025396263433673738e+00,1.929054132652514575e+00,1.696307147775999891e+00,1.794932874899998865e+00,1.842947372729765387e+00,nan,2.915277959438330679e+00,2.667339685584059872e+00,2.515571053460142092e+00,2.302921922015410239e+00,2.232126883495834768e+00,2.288803988898571617e+00,2.349186979053630608e+00,nan,3.545698138368303276e+00,2.983800661283132705e+00,2.668954837066636721e+00,2.518025443527665441e+00,2.634574122917984429e+00,2.592569885322525192e+00,2.407764999999999933e+00,2.575866208840205118e+00,2.716712464417471384e+00,2.686148396841981700e+00,2.668102809394676367e+00,2.555291327039311167e+00,2.412416644813516253e+00,2.380600098665100806e+00,2.368010294877883837e+00,2.426444318543630629e+00,2.492254233214735049e+00,nan,3.682971380761754343e+00,3.019764187661772237e+00,2.747528224754991921e+00,2.598047088264591853e+00,2.699688704030151598e+00,2.686075600943439667e+00,nan,nan,2.586383142802043178e+00,nan,3.642616136003936855e+00,3.000033051414565666e+00,2.605841526603069447e+00,2.527300595049792875e+00,2.579074014947838389e+00,2.559510519947123086e+00,4.234831625569999147e+00,nan,4.002151074153372790e+00,3.139126516636981723e+00,2.678188508685906299e+00,2.763636725485833612e+00,2.914307993213683279e+00,2.893461864192735877e+00,nan,2.794698388766717834e+00,2.873904338160999217e+00,2.839495255786266714e+00,2.609644067284269120e+00,2.782861336203494851e+00,2.768527655176615365e+00,2.713134882606357490e+00,2.835479263612121770e+00,2.806961114797497103e+00,2.792658877771192483e+00,2.630684252937330658e+00,2.655180504959931742e+00,2.686170832704604994e+00,nan,nan,nan,nan,nan,2.758381378339382906e+00,2.847814926278008141e+00,2.660463588327621309e+00,2.690136739736202820e+00,nan,nan,nan,nan,nan,nan,2.722684537253676762e+00,2.850151743161279949e+00,2.751203398756630669e+00,2.785649772873745711e+00,2.776527832920250294e+00 +1.642212079553046822e+00,nan,2.563156586169286655e+00,2.293991332797971960e+00,2.112479797828868300e+00,1.997057432165719515e+00,1.825443858259430785e+00,1.871218994057417540e+00,1.851171071780704391e+00,nan,2.826570960000000188e+00,2.665309184317854907e+00,2.499384563279587912e+00,2.333904945778020767e+00,2.250971998111916061e+00,2.294188147027598212e+00,2.273048273187141533e+00,nan,3.074398909999999319e+00,2.902307280828097635e+00,2.748254030475682175e+00,2.608658310747251630e+00,2.682311577974600514e+00,2.657886477337289222e+00,2.601551658240770060e+00,2.638257827013879897e+00,2.843506186423690796e+00,2.709676667498234881e+00,2.547166786863519494e+00,2.569092893507910702e+00,2.463120363428902060e+00,2.431078934679998582e+00,2.351498881966631593e+00,2.413534703729976361e+00,2.428555039648190306e+00,nan,3.158486176091121589e+00,3.047728292913041592e+00,2.689569593099542022e+00,2.705801116790527594e+00,2.769412313212475052e+00,nan,nan,nan,nan,2.652444234999999928e+00,2.730291150842218517e+00,2.748564427120392839e+00,2.665400280784358422e+00,2.600358838686977592e+00,2.605256621056430966e+00,2.607247489171150345e+00,2.622041998295472354e+00,nan,3.460921631512224206e+00,3.104872499999999480e+00,2.881266000611812927e+00,2.895347296491358247e+00,2.920498742107281664e+00,2.905784401357172531e+00,nan,2.863884188371229378e+00,2.945833059319719283e+00,2.863639808327883785e+00,2.842257480959506477e+00,2.834934221311152580e+00,2.828923192769203787e+00,2.817007567432720183e+00,2.810434197968636472e+00,2.742350923307339094e+00,2.786040647259235836e+00,2.736059099962952157e+00,2.694153765348004548e+00,4.068385000000000140e+00,nan,nan,2.741910708474420755e+00,3.754339185963169623e+00,4.058650293713919055e+00,2.793680837970702324e+00,2.728033338573600197e+00,2.776428242401209889e+00,2.664066309491189166e+00,nan,nan,nan,nan,nan,nan,2.836075094360149151e+00,nan,2.855256827129938557e+00,2.866835818016323589e+00,2.817834024594226605e+00 +2.730134724903547916e+00,nan,2.702375868557138627e+00,2.600648576279999347e+00,2.680057185678285325e+00,2.023296699055336045e+00,1.807563500000000101e+00,1.961492240651081742e+00,1.923223711726884000e+00,nan,2.977845919437438482e+00,2.727109666390177711e+00,2.683679918931362263e+00,2.452083106504473253e+00,2.443570433307090450e+00,2.383721816034898744e+00,2.401685399359999806e+00,nan,3.424021866317682683e+00,3.149450276935557014e+00,2.841011852500045620e+00,2.906882801601931465e+00,2.914781999999999762e+00,3.126658731073850284e+00,2.830969173624555690e+00,2.850092583576916638e+00,3.487548021541442722e+00,3.078214315408160928e+00,2.810631981429100978e+00,2.721342777245615796e+00,2.643145901006412668e+00,2.613426146843520925e+00,2.539899854811492386e+00,2.527633170886061986e+00,2.530725605203190565e+00,nan,3.591458041508190924e+00,3.255297526166116118e+00,3.011525258735019861e+00,2.876289226153264433e+00,2.982325999999999588e+00,2.934684999999999988e+00,3.739531488897021916e+00,nan,3.642616136003936855e+00,2.730291150842218517e+00,2.346058499999999825e+00,2.877823696193322345e+00,2.831570355318609788e+00,2.739948539151868090e+00,2.704774362654263520e+00,2.704920799860784264e+00,2.764429880006155749e+00,nan,3.722093174493429135e+00,3.400618008490624788e+00,3.117617902086435855e+00,3.091677973070690033e+00,3.086525451883898707e+00,3.070001944722365650e+00,nan,3.044363020481515747e+00,3.228667933979235904e+00,3.055600163136396041e+00,2.925148744139999923e+00,2.922428709300000094e+00,2.913062442967758070e+00,2.904804294815762855e+00,2.980009359835951610e+00,3.019411366673085961e+00,2.975716379895867014e+00,2.897562366836601289e+00,2.994425999999998922e+00,2.959975000000000023e+00,3.800272398692044362e+00,nan,nan,2.802274780444005575e+00,3.143916426896134375e+00,3.026084833248442241e+00,3.002627276214115515e+00,2.838560235250913166e+00,2.826017233271697737e+00,nan,nan,nan,nan,nan,nan,3.226807378358279355e+00,nan,3.098688258188737166e+00,3.378908430114569317e+00,nan +1.838059928147586675e+00,nan,2.579907864407321672e+00,nan,2.179585023431753754e+00,2.235362966941160767e+00,2.194036176385593784e+00,2.047639999999999905e+00,2.161564036887921869e+00,nan,3.186876597775869957e+00,3.036158149846956977e+00,3.778555435150168673e+00,3.106170637794131828e+00,2.397280580030374697e+00,2.459200433396921515e+00,2.431130671493846762e+00,nan,3.234808818931216301e+00,3.213336056991947576e+00,2.953617954025937298e+00,3.058844730510251075e+00,2.771634208960708001e+00,3.550006931049056735e+00,3.231757499814059020e+00,3.621668754422163961e+00,2.725927754172512874e+00,2.702231527597205751e+00,2.682013687575830030e+00,3.658125104999999877e+00,2.730929290343741211e+00,2.713625988119071408e+00,2.485343007564360551e+00,2.605647318682443814e+00,2.602199549609012674e+00,nan,3.322526182686712737e+00,3.349127676857215352e+00,3.089817360998281526e+00,2.904548463749728970e+00,3.054536538845868510e+00,3.653977261396147647e+00,3.202424828482734132e+00,nan,3.000033051414565666e+00,2.748564427120392839e+00,2.877823696193322345e+00,2.613429089331587019e+00,3.179582334406798605e+00,2.878278595624959468e+00,2.824509430411179078e+00,2.808747524361161929e+00,2.749651812585342192e+00,nan,3.828602233322280668e+00,3.502333844511893446e+00,3.382223799710767143e+00,3.034929415559111998e+00,3.071045419985740210e+00,3.059118091122956073e+00,nan,3.042311190857896364e+00,3.296902195519135770e+00,3.184873638893954695e+00,3.018747419693023737e+00,3.012542578742063881e+00,3.007038679012962401e+00,3.000908158863777153e+00,2.995650293094935357e+00,2.971066023745643658e+00,3.014295814640751026e+00,3.206158655601894392e+00,4.218929003462666749e+00,3.631638169896591162e+00,4.228561000000000014e+00,nan,3.324604410400000098e+00,2.752935280216121328e+00,2.814778601914782907e+00,2.868102763233180141e+00,2.846509391537667799e+00,2.933270858058514285e+00,2.965389682151255535e+00,nan,nan,nan,nan,nan,nan,3.245984917090000366e+00,nan,3.386708095641284721e+00,nan,nan +2.560533399766428353e+00,nan,2.659856731608489788e+00,nan,3.084054255797341604e+00,2.499722173550592785e+00,2.164038426031573703e+00,2.048036115281635983e+00,2.082620450012794500e+00,nan,3.000889695037431970e+00,2.872014072690910691e+00,nan,3.011159992893913895e+00,2.556486480000012218e+00,2.398624406271999021e+00,2.400908391133063091e+00,nan,3.521244549394114021e+00,3.112155474131306221e+00,2.911598720080597058e+00,2.937139091422724668e+00,3.472902620666958651e+00,2.989244999999999930e+00,2.540755499999999945e+00,2.993104000000000209e+00,2.528832736171667595e+00,2.486853104769803036e+00,2.571948428474562842e+00,2.655031850088069412e+00,3.439680786661460488e+00,2.733430485000045707e+00,2.601824172432462223e+00,2.492083260576321901e+00,2.545259340717343832e+00,nan,3.585853272355458365e+00,3.321357353376236610e+00,3.097908243208204127e+00,2.896844829160265533e+00,2.978421443622368159e+00,3.807572407035523909e+00,nan,2.655084974394660513e+00,2.605841526603069447e+00,2.665400280784358422e+00,2.831570355318609788e+00,3.179582334406798605e+00,2.746098840933905549e+00,2.937120826807904095e+00,2.836749312917369981e+00,2.749408186383750508e+00,2.671231109219788902e+00,nan,3.872006154060958050e+00,3.458541297281235494e+00,3.255723791292044034e+00,2.919761499999999899e+00,2.915637999999999952e+00,3.055999499999999980e+00,nan,2.855371499999999951e+00,3.210610969739553067e+00,2.830198000000000214e+00,2.804710500000000106e+00,3.037271368701894847e+00,2.779942500000000205e+00,2.766853499999999855e+00,2.753309499999999854e+00,3.068265369634559647e+00,3.026598920069254461e+00,3.034621356889016131e+00,3.613537236657740070e+00,3.563130510119641325e+00,3.620456109133148637e+00,3.976388979728341599e+00,2.576861905563999855e+00,2.574066520131999880e+00,2.680081827079107804e+00,3.002646863820890299e+00,4.004329000000000249e+00,3.096007095206586790e+00,3.092538947101677760e+00,nan,nan,nan,nan,nan,nan,3.167818078025211648e+00,nan,3.035393925527747339e+00,3.239298591022445439e+00,3.073400026447582256e+00 +1.717308815506638497e+00,nan,2.446019263010000167e+00,nan,2.667513457216275530e+00,2.973339420833464253e+00,2.099291973879535700e+00,1.943468078831999790e+00,1.898749736651724529e+00,nan,3.028743591285905978e+00,2.771393875412695884e+00,2.726196090388934845e+00,3.389848506312136411e+00,2.455977915389544020e+00,2.380398612163793892e+00,2.296263831139013778e+00,nan,3.411150316198684251e+00,2.948497000000000146e+00,2.811993146342110172e+00,2.651858120979136757e+00,2.755495049532580509e+00,2.766157771680140876e+00,2.602553901195413300e+00,2.547086727033242681e+00,2.413853843620000550e+00,2.385136157600903584e+00,2.472594935240000069e+00,2.561779654686697771e+00,2.742065676196361768e+00,2.897078118088428411e+00,2.623477072603870308e+00,2.474343075830956362e+00,2.455068239650892714e+00,nan,3.556426734714708715e+00,3.331801977037250673e+00,3.036772647798803071e+00,2.754976631833153444e+00,2.678471081637432860e+00,2.850922960145382934e+00,nan,2.570586234095999956e+00,2.527300595049792875e+00,2.600358838686977592e+00,2.739948539151868090e+00,2.878278595624959468e+00,2.937120826807904095e+00,2.668075999999999670e+00,2.834848032440349108e+00,2.792164947297412603e+00,2.686966616873088576e+00,nan,3.842267713708587884e+00,3.471148235289422068e+00,3.203524312455733547e+00,2.921646500000000035e+00,2.731526500000000190e+00,2.909624500000000058e+00,nan,2.868304999999999882e+00,3.285364019825319115e+00,3.124109155272550620e+00,2.815366000000000035e+00,3.023036725358840382e+00,2.789375000000000160e+00,2.776363499999999984e+00,2.761975500000000139e+00,2.957423499999999983e+00,2.983183999085219362e+00,2.730617069275505404e+00,2.872222476047352480e+00,3.050594999999999946e+00,nan,2.753423285870408854e+00,2.569840631334384007e+00,2.586742539631998739e+00,2.618471826141057335e+00,2.844965535261812395e+00,3.476185203641928734e+00,3.382037047875141145e+00,3.039170735315503524e+00,nan,nan,nan,nan,nan,nan,3.038214570267873782e+00,nan,2.893533768235434067e+00,3.153719280324754237e+00,3.184497455008375777e+00 +2.981835131051900678e+00,nan,2.543386771544999370e+00,2.408121107369975533e+00,3.022270450799025010e+00,3.699650765381214956e+00,2.156044223442725283e+00,1.912347849455001247e+00,1.846356342257577277e+00,nan,3.071893694824062937e+00,2.824171039295678920e+00,2.613999244539580324e+00,3.712243963983961503e+00,3.170176451735806200e+00,2.340757225927727170e+00,2.307209773441828649e+00,nan,3.288303297107999512e+00,2.878702500000000164e+00,2.941856220573235259e+00,2.673661176544814388e+00,2.631098626025020426e+00,2.693866701204727487e+00,2.564717778824943561e+00,2.491485578293679115e+00,2.471920608462353730e+00,2.455465443195000041e+00,2.479710707528740343e+00,2.607708974711017369e+00,2.622107599456418914e+00,3.327511740046642963e+00,3.259894724940886945e+00,2.488466685579301618e+00,2.526986620407559947e+00,nan,3.648782494043504521e+00,3.157045043037510990e+00,3.051849459232510497e+00,2.738368862664779080e+00,2.806100108183476216e+00,2.793187778928349463e+00,nan,2.617396372086342904e+00,2.579074014947838389e+00,2.605256621056430966e+00,2.704774362654263520e+00,2.824509430411179078e+00,2.836749312917369981e+00,2.834848032440349108e+00,2.765250981666000030e+00,2.814461022549006763e+00,2.741547856826452634e+00,4.346273525093192092e+00,3.803929377923144894e+00,3.282235326740940540e+00,3.149576683342720340e+00,3.151474567027565676e+00,3.182274118757295156e+00,3.163185789591570263e+00,nan,3.115754303188468022e+00,3.211380029166195804e+00,2.921799105364150329e+00,3.058990623174045798e+00,3.055720756820926809e+00,2.914477726600557261e+00,3.015920479197032211e+00,2.898002459318961943e+00,2.967185764725684738e+00,3.039330776900507569e+00,2.809845403340724523e+00,2.794174242269098940e+00,nan,3.589716374697550449e+00,2.632752620703396218e+00,2.585316428402419930e+00,2.543209560886189102e+00,2.660332368320192487e+00,2.796455150916263843e+00,3.032914366831348740e+00,3.437036299981330423e+00,3.619764290599422285e+00,nan,nan,nan,nan,nan,nan,3.189179007949063838e+00,3.149822177868866202e+00,3.076689159559616193e+00,3.130479755811825360e+00,3.109971683843841816e+00 +2.463631385216781045e+00,nan,2.754167328141526205e+00,2.452114869007405940e+00,nan,nan,1.977026370172842773e+00,1.810068174524030349e+00,1.861393704153232465e+00,nan,3.094368028561687467e+00,2.801153935443076559e+00,2.571674128788551172e+00,2.436644166829515346e+00,2.479758275462557116e+00,2.356778166027258870e+00,2.315501639007846002e+00,nan,3.369078384305837393e+00,3.195854216931044434e+00,2.867565597348202822e+00,2.642320757371082962e+00,2.562430261726698077e+00,2.701999416143169963e+00,2.532022689888000500e+00,2.506153350122236390e+00,2.460157973262472098e+00,2.477868713992720906e+00,2.524873089141443483e+00,2.642204576942603023e+00,2.564924306941496646e+00,2.595082841158653508e+00,2.636475490118254328e+00,2.655390543578113771e+00,2.495232871881482684e+00,nan,3.524622568811547119e+00,3.355835275413481256e+00,2.991457498245489344e+00,2.763569943714434984e+00,2.653007849108540483e+00,2.656033988799999968e+00,nan,2.594107762499437531e+00,2.559510519947123086e+00,2.607247489171150345e+00,2.704920799860784264e+00,2.808747524361161929e+00,2.749408186383750508e+00,2.792164947297412603e+00,2.814461022549006763e+00,2.673263026280232602e+00,2.739540318163267596e+00,nan,3.750438664805079814e+00,3.429660905578512775e+00,3.168683211063040073e+00,3.201951393929597600e+00,3.201885288767425841e+00,3.084459674729312262e+00,nan,3.104704921632944092e+00,3.284824134803970619e+00,3.064024764575037807e+00,3.040933760853872236e+00,3.004710560351171900e+00,3.029323763863701391e+00,2.980607117311453003e+00,3.014706643346841020e+00,3.095592711326943913e+00,3.006908529668696595e+00,2.743419502668856147e+00,2.642797279271585253e+00,2.708906932220893626e+00,2.819453120381860689e+00,2.608834239585742676e+00,2.604138767761817519e+00,2.566739981916471347e+00,2.612977309024292261e+00,2.699379096718585558e+00,2.926775453781026659e+00,3.040385978050964333e+00,2.985558667634192087e+00,nan,nan,nan,nan,nan,nan,3.137655971898623886e+00,nan,2.990156601550042481e+00,3.135550654159438899e+00,3.148704912977985604e+00 +1.634774678556857541e+00,nan,2.626631080471999891e+00,2.427818719105562462e+00,2.122744118673664637e+00,2.022570440537211578e+00,1.941182275943909019e+00,1.742491613143805740e+00,1.817616045790547252e+00,nan,3.131795025758310480e+00,2.861563960885145530e+00,2.480875609239356905e+00,2.445597960070847510e+00,2.473980824635633624e+00,3.054831525179714458e+00,2.333027948754744507e+00,nan,3.521276075752311829e+00,3.152715819931940278e+00,2.873478822426509982e+00,2.588768449923791959e+00,2.858686317291347923e+00,2.733267573366503278e+00,2.689450477596837086e+00,2.608384746487916850e+00,2.580887824450416090e+00,2.749767109560175449e+00,2.522829073393738852e+00,2.613723040675126175e+00,2.494950595107313429e+00,2.740061844547714731e+00,2.634011847942003381e+00,2.789028183650272830e+00,2.541603334073006870e+00,nan,3.634654453175855338e+00,3.293073234908072244e+00,2.997478788961339013e+00,2.702726745448828183e+00,2.658551128744919723e+00,3.474496786837224072e+00,2.669921766741000013e+00,3.356057151787010806e+00,4.234831625569999147e+00,2.622041998295472354e+00,2.764429880006155749e+00,2.749651812585342192e+00,2.671231109219788902e+00,2.686966616873088576e+00,2.741547856826452634e+00,2.739540318163267596e+00,2.596083723196580095e+00,4.398792168278598425e+00,3.814696123876377243e+00,3.423562001799047128e+00,3.138942872025309860e+00,3.110130082489791725e+00,3.166812256608924514e+00,3.263153279286516550e+00,nan,3.231131406551964069e+00,3.261565982893820159e+00,3.077357348298955131e+00,3.017898480407091633e+00,3.041432803420921704e+00,3.017242843180704792e+00,2.986190547740434020e+00,3.028759176182248769e+00,3.030152793206261741e+00,3.317744035255313584e+00,2.693181052863613534e+00,2.663748275551748534e+00,2.711364110946903772e+00,nan,2.788085845480279268e+00,4.254923479519998608e+00,2.601544924030662287e+00,2.594818998454055947e+00,2.635192324411776088e+00,3.344421021231074764e+00,3.090943025418786849e+00,3.092409450420725570e+00,nan,nan,nan,nan,nan,nan,3.083634481599268451e+00,nan,2.878923530939234698e+00,3.138786750926899849e+00,3.125024338696277937e+00 +nan,nan,nan,nan,nan,nan,nan,1.860360684941266163e+00,2.016057199128694855e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,4.346273525093192092e+00,nan,4.398792168278598425e+00,4.206681661680275575e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,2.773954963721450895e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.895231586643593591e+00,nan,3.060899567751833583e+00,3.577486219591955230e+00,3.206211336683174373e+00,3.308670336177650828e+00,3.119077788330132162e+00,2.295166000000000039e+00,2.791186132841223078e+00,nan,3.207056499975514452e+00,3.682605886690395014e+00,3.517767352500008826e+00,3.500835083353407651e+00,3.509492662248356876e+00,3.375811784689641826e+00,3.340023438074685824e+00,nan,3.712389038875405500e+00,3.875844378908748222e+00,3.894038194795731211e+00,3.606752991243999951e+00,3.600518918103827293e+00,3.861455095499962464e+00,3.638315531305111428e+00,3.366282627514938230e+00,3.517558698128232741e+00,3.332299103713703747e+00,3.490308530526389230e+00,3.698072348638218099e+00,3.530899935242295395e+00,3.577916023536139356e+00,3.598781523528935544e+00,3.469877856719184805e+00,3.540291943623226967e+00,nan,4.006543900637350220e+00,4.034863999999999784e+00,3.984326539292681879e+00,3.856763496790714285e+00,3.880797531612869999e+00,4.076696808860330812e+00,3.900741382389083078e+00,3.957318950141516023e+00,4.002151074153372790e+00,3.460921631512224206e+00,3.722093174493429135e+00,3.828602233322280668e+00,3.872006154060958050e+00,3.842267713708587884e+00,3.803929377923144894e+00,3.750438664805079814e+00,3.814696123876377243e+00,nan,3.409465252128550450e+00,4.947434044825280708e+00,4.760295268165235960e+00,4.392182216454685317e+00,4.025254947375103143e+00,4.050870650341003909e+00,nan,4.024790896632069348e+00,4.130841271361191502e+00,4.688569899157930543e+00,4.305230763101310920e+00,4.665644247318067706e+00,4.081602499999999800e+00,3.966460435212608715e+00,4.508800323243676189e+00,3.833169737854806414e+00,4.564760147594914663e+00,3.972050014524950878e+00,4.145189035944445877e+00,4.295540852464433179e+00,3.963739936750294568e+00,nan,3.621749101101198409e+00,3.352753013915986013e+00,3.603656123377349640e+00,3.844557172998372518e+00,3.883874665692768957e+00,3.771378957423852984e+00,3.876293100807123970e+00,nan,nan,nan,nan,nan,nan,4.693627940945416022e+00,4.289405096850125609e+00,3.891591825967568585e+00,4.266552372181984687e+00,4.610202562027180484e+00 +2.532893000000000061e+00,nan,2.942200507934218834e+00,3.073992427590520826e+00,3.077829536796754084e+00,2.978604287742152135e+00,2.656179259870211862e+00,2.303975999999999580e+00,2.572318222898914097e+00,nan,3.372274539450068609e+00,3.435022241568378210e+00,3.252669952605450554e+00,3.211438540060587599e+00,3.153714162331127113e+00,3.013756980929860330e+00,3.078213015053651969e+00,nan,3.585631958641604555e+00,3.526107329327999196e+00,3.554561544909301762e+00,3.410910675486997334e+00,3.448933180190470349e+00,3.345364835015479876e+00,3.313535951784344125e+00,3.278514526456869138e+00,3.211290384647422336e+00,3.176090992862974360e+00,2.661730499999999999e+00,3.208616346879233916e+00,3.434274100748420100e+00,3.288694855412869433e+00,3.215669781058749344e+00,3.151548685053483734e+00,3.140650103726827069e+00,nan,4.290978810461902881e+00,3.676219245924359669e+00,3.445011852846955325e+00,3.579864190323614359e+00,3.464533887572652127e+00,3.506016690424663018e+00,nan,3.490615642503763549e+00,3.139126516636981723e+00,3.104872499999999480e+00,3.400618008490624788e+00,3.502333844511893446e+00,3.458541297281235494e+00,3.471148235289422068e+00,3.282235326740940540e+00,3.429660905578512775e+00,3.423562001799047128e+00,nan,4.947434044825280708e+00,3.298714988109369806e+00,3.692968930098736280e+00,3.686453637559409469e+00,3.678678185301554215e+00,3.650036804345345498e+00,nan,3.705454234918438949e+00,3.718512165956701310e+00,3.699338470994831152e+00,3.664977073570958144e+00,3.435104679115142723e+00,3.425154769190899007e+00,3.412623828732121556e+00,3.632151246665912936e+00,3.365749160055187872e+00,3.405715523710690196e+00,3.643490103141645786e+00,3.481035205453993697e+00,3.553564858839485030e+00,3.455368731748499567e+00,3.474116346145557088e+00,3.108868432680178984e+00,3.137343747647857395e+00,3.291613823309888165e+00,3.574565760958912986e+00,3.526000774956049355e+00,3.480581890090729491e+00,3.319847334606512490e+00,nan,nan,nan,nan,nan,nan,3.983816450329852543e+00,3.891775602416040591e+00,3.369831285834202550e+00,3.817181344290035305e+00,3.702005143907769469e+00 +2.375929456869745771e+00,nan,2.809014876548467488e+00,2.869620653824864753e+00,2.649063209060988910e+00,2.298159996394635751e+00,2.268767940621108536e+00,2.269709831857110061e+00,2.393717145928000889e+00,nan,3.275813033201431690e+00,3.062845681292088784e+00,3.049947235500000353e+00,3.043228776632683097e+00,2.925009833987199936e+00,2.610373341379999879e+00,2.757065486043117186e+00,nan,3.706503723345490275e+00,3.400679882393482067e+00,3.330429782037772846e+00,3.245683226466413274e+00,3.128703826550511824e+00,3.025198288112067146e+00,3.024956716718689709e+00,2.863241942272668261e+00,2.411520201046612133e+00,2.694251247327252408e+00,2.878778358210448474e+00,3.108668503690614759e+00,3.068289479999999791e+00,3.040496795000204422e+00,2.981266326066218220e+00,2.936611021588340531e+00,2.931588780948000128e+00,nan,3.913941023020258481e+00,3.516718437543879805e+00,3.509300212793087681e+00,4.035737096196256424e+00,3.298518924278586173e+00,3.285506365369863069e+00,nan,2.505968689303110786e+00,2.678188508685906299e+00,2.881266000611812927e+00,3.117617902086435855e+00,3.382223799710767143e+00,3.255723791292044034e+00,3.203524312455733547e+00,3.149576683342720340e+00,3.168683211063040073e+00,3.138942872025309860e+00,nan,4.760295268165235960e+00,3.692968930098736280e+00,3.023345729999999953e+00,nan,nan,4.353606253584452190e+00,nan,4.331012325518368122e+00,nan,nan,nan,nan,3.491896095314399151e+00,3.476382186012672459e+00,3.460447456599406557e+00,3.387910435616395599e+00,3.405124959897154469e+00,4.028736749540098749e+00,3.295045682595882486e+00,3.469606953934209148e+00,3.110260900873239365e+00,2.815970213638827158e+00,2.788791678275114094e+00,2.921955738521794910e+00,3.041671960870554248e+00,3.222264767869003510e+00,3.181314933479999496e+00,3.283043529943898697e+00,3.272278998785002724e+00,nan,nan,nan,nan,nan,nan,nan,nan,3.853539851496809110e+00,nan,nan +2.333721641941336244e+00,nan,2.780239295512055619e+00,nan,2.693612285546065799e+00,2.267942301467168242e+00,2.250902967936408317e+00,2.086378688615014187e+00,2.277596756703278658e+00,nan,4.255122768464108418e+00,3.012907175427013140e+00,3.008673422827253141e+00,2.891884313166707798e+00,2.848062411933960814e+00,2.713796630883670957e+00,2.625121024987919327e+00,nan,4.189417814999999656e+00,nan,3.281833353422522137e+00,3.439417641544238347e+00,3.178807995362923133e+00,3.157627240540809677e+00,3.002555974267049343e+00,2.789093180971686792e+00,2.194575318577716150e+00,2.728044222662796336e+00,2.840138553841585534e+00,2.996650107500054894e+00,3.019055016963671179e+00,2.921512340517324890e+00,3.008521184028515538e+00,2.853310507978263022e+00,2.995236683670206013e+00,nan,4.303067189155544092e+00,3.450172579709245735e+00,3.980937074984195245e+00,4.241478325278349004e+00,3.642049283870833598e+00,3.243967386061540115e+00,nan,2.282549912658226265e+00,2.763636725485833612e+00,2.895347296491358247e+00,3.091677973070690033e+00,3.034929415559111998e+00,2.919761499999999899e+00,2.921646500000000035e+00,3.151474567027565676e+00,3.201951393929597600e+00,3.110130082489791725e+00,nan,4.392182216454685317e+00,3.686453637559409469e+00,nan,2.795784803455640066e+00,nan,4.463541145647004882e+00,nan,4.441654157800008385e+00,nan,nan,nan,3.888302959713692886e+00,3.974384275636384078e+00,3.965775407403197228e+00,3.958869820396079042e+00,4.023911882966872966e+00,3.943328830742504731e+00,nan,4.321180737667454252e+00,3.327398717907630754e+00,3.167023631429179975e+00,2.738597578658447773e+00,2.820691302126126843e+00,2.888766128429178348e+00,2.989985414093068616e+00,3.233056640935516590e+00,2.931176999999999921e+00,2.993097500000000188e+00,3.151326365672462604e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.404692950858444256e+00,nan,2.736311587139208967e+00,nan,2.730314133292311318e+00,2.280526051764281359e+00,2.294385495021581622e+00,2.223791596672098692e+00,2.548308717842870941e+00,nan,3.247500655575072503e+00,3.052078909899570647e+00,3.040488992627333964e+00,3.011047453275526031e+00,2.935701197289000586e+00,2.788812222691190890e+00,2.855473893747102920e+00,nan,3.710697547595389167e+00,nan,3.286721834117217522e+00,3.418116690760169440e+00,3.214405655945769880e+00,3.188478756345855203e+00,3.069165649495133597e+00,2.941038085938801672e+00,2.630136038304786261e+00,2.711188398804034172e+00,2.933187931827890615e+00,3.067310319201841384e+00,2.768696499999999894e+00,2.951367512202400789e+00,2.989987024965945039e+00,2.912683326469464262e+00,2.893959924337924150e+00,nan,4.559481884706239718e+00,3.621549020211234371e+00,3.973390254807119604e+00,nan,3.555462573708483287e+00,3.325731097979977680e+00,nan,2.654572020679240918e+00,2.914307993213683279e+00,2.920498742107281664e+00,3.086525451883898707e+00,3.071045419985740210e+00,2.915637999999999952e+00,2.731526500000000190e+00,3.182274118757295156e+00,3.201885288767425841e+00,3.166812256608924514e+00,nan,4.025254947375103143e+00,3.678678185301554215e+00,nan,nan,2.909272500000000150e+00,nan,nan,nan,nan,nan,3.860603494641611011e+00,3.856012399529944457e+00,3.963396438810915523e+00,3.956774708687231357e+00,3.950467200375316512e+00,3.749610429009732737e+00,3.369042668648182026e+00,nan,3.385709826043067139e+00,3.341727266227507265e+00,3.163051079965418122e+00,2.673204838906142466e+00,2.585628677064511827e+00,2.915038547060145202e+00,2.998117865966676288e+00,3.298795489533983272e+00,2.935900999999999872e+00,2.965006499999999878e+00,3.247547626425873357e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.359701452928519405e+00,nan,2.918313353113918041e+00,nan,2.699902439230776885e+00,2.457107339492865972e+00,2.275843880177033540e+00,2.074262139388241000e+00,2.523973634775171782e+00,nan,3.225947818747616935e+00,3.042250476350013333e+00,2.770018499999999939e+00,2.997422872032912355e+00,2.954073599232933223e+00,2.766716879009825547e+00,2.830965480767346776e+00,nan,3.705681983183265249e+00,nan,3.267126613674823155e+00,3.196041783993769148e+00,3.180250280120187067e+00,3.156901453064083540e+00,2.892165097920021744e+00,2.491291756554436798e+00,2.661183020971134017e+00,2.692149580973064005e+00,2.907971918014151225e+00,3.039852448308959065e+00,2.751466499999999815e+00,2.842297350201417494e+00,3.011780087995099109e+00,2.854094223939695585e+00,2.915236753059655239e+00,nan,3.894890354537893007e+00,3.532267349747518992e+00,3.953335439918396865e+00,nan,3.457384622985930633e+00,3.304793808801832711e+00,nan,2.763269543111392945e+00,2.893461864192735877e+00,2.905784401357172531e+00,3.070001944722365650e+00,3.059118091122956073e+00,3.055999499999999980e+00,2.909624500000000058e+00,3.163185789591570263e+00,3.084459674729312262e+00,3.263153279286516550e+00,nan,4.050870650341003909e+00,3.650036804345345498e+00,4.353606253584452190e+00,4.463541145647004882e+00,nan,2.967394227521133132e+00,nan,nan,nan,nan,nan,nan,3.940912751935055525e+00,3.933363467033499816e+00,3.930470695264987491e+00,3.708214927947469874e+00,3.914881687302765201e+00,nan,3.369729332857387849e+00,3.305343874546734284e+00,3.143273861143473624e+00,2.816690733052363615e+00,2.821403177985929656e+00,2.904836155041787826e+00,2.967368838667030939e+00,3.305365896301708517e+00,2.912808500000000134e+00,2.944329999999999892e+00,3.224446621657516499e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +nan,nan,nan,nan,nan,nan,nan,2.286905489186270124e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,3.688671090105823058e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.328289845606950959e+00,nan,2.690536139384482883e+00,nan,2.668190947514313294e+00,2.501079999999999970e+00,2.203102409510295345e+00,2.225247664196800201e+00,2.233835494605513894e+00,nan,3.193601573409589722e+00,3.091436161457445930e+00,2.990097014925999552e+00,2.906047757989865143e+00,2.885803703028605227e+00,2.749423763367549345e+00,2.700408673560000139e+00,nan,3.694206464673034152e+00,3.392305041245998964e+00,3.236335518543313583e+00,3.165590564483208968e+00,3.137957530563208586e+00,3.039446355151042667e+00,3.003665259812087651e+00,2.914374826670163898e+00,2.637614115634220102e+00,2.769337743805257990e+00,2.879288134416827738e+00,2.998685230030839310e+00,2.932710707811259532e+00,2.826150521830632645e+00,2.976017043752272606e+00,2.827166761403228978e+00,2.874742271954519257e+00,nan,3.870382658566069001e+00,3.459594588538520199e+00,nan,4.174374918381448474e+00,3.507630549830198508e+00,3.231919286940431402e+00,nan,2.742403838568642449e+00,2.794698388766717834e+00,2.863884188371229378e+00,3.044363020481515747e+00,3.042311190857896364e+00,2.855371499999999951e+00,2.868304999999999882e+00,3.115754303188468022e+00,3.104704921632944092e+00,3.231131406551964069e+00,nan,4.024790896632069348e+00,3.705454234918438949e+00,4.331012325518368122e+00,4.441654157800008385e+00,nan,nan,nan,2.931690670000000054e+00,nan,nan,nan,nan,nan,3.898340234959190020e+00,nan,4.266925370060276279e+00,nan,nan,3.344569837421908165e+00,nan,3.109679019317446613e+00,2.782480026910277182e+00,2.783907767662940902e+00,2.715627450682059685e+00,2.969529727761716753e+00,3.148133678504557054e+00,3.126878152703998381e+00,2.903915000000000024e+00,3.189202149557583521e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.349228210391335114e+00,nan,2.820951732145730251e+00,nan,2.719663074183234830e+00,2.628655463020457717e+00,2.349357310783999786e+00,2.219577017564553056e+00,2.218557887745593593e+00,nan,3.599190935683564518e+00,3.174874968418603061e+00,3.015558741198967940e+00,3.055987832280876226e+00,2.873823575242816730e+00,2.779719320362640023e+00,2.714427246366000190e+00,nan,3.755049878509846284e+00,nan,3.272909566481592591e+00,3.374588433217650785e+00,3.200944289999999803e+00,3.177445238360275148e+00,3.197218117087565314e+00,2.568920499999999940e+00,3.077564072801417527e+00,2.824312146129556389e+00,2.948478562886482468e+00,3.104227356551044092e+00,3.051640590311405443e+00,3.037145658933913861e+00,3.006497743688757662e+00,2.924794384268918357e+00,2.994961410638089294e+00,nan,3.837758547468449244e+00,3.582130695595999725e+00,nan,3.359014769071992923e+00,3.407946137684199073e+00,3.401004802188936971e+00,nan,3.238649819328080159e+00,2.873904338160999217e+00,2.945833059319719283e+00,3.228667933979235904e+00,3.296902195519135770e+00,3.210610969739553067e+00,3.285364019825319115e+00,3.211380029166195804e+00,3.284824134803970619e+00,3.261565982893820159e+00,nan,4.130841271361191502e+00,3.718512165956701310e+00,nan,nan,nan,nan,nan,nan,2.936982299999999935e+00,nan,nan,nan,3.393973481831789041e+00,nan,4.064212601096474309e+00,nan,nan,3.782787269178224054e+00,3.399624423579948651e+00,nan,3.160202985736541237e+00,3.157025630162292984e+00,2.860743518330027158e+00,2.954883644370824847e+00,3.078957944660624779e+00,3.235748969898238236e+00,3.362001717597975503e+00,3.344334849855499225e+00,3.271410545820458982e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.148751490302119205e+00,nan,2.993750883339871383e+00,nan,2.636764059546144789e+00,2.294932793128598991e+00,2.187246894483326543e+00,2.146317226475999540e+00,2.214237180034761554e+00,nan,3.290381200510007709e+00,3.015024624184127422e+00,2.999401475627535341e+00,2.813330708745999775e+00,2.869937364658869505e+00,2.705309608363728024e+00,2.612338171218397065e+00,nan,3.707704421289899077e+00,3.822947010129070122e+00,3.221724824548038324e+00,3.131426292924215815e+00,3.102078435439519222e+00,3.022494856094562810e+00,2.836146145765064741e+00,2.733698425772869367e+00,2.506083447616911819e+00,2.760246597445312755e+00,2.851668987603455552e+00,2.943357150322975180e+00,2.970214517638746887e+00,2.931522421418621960e+00,2.944398977589490762e+00,2.830279372017119766e+00,2.806269177883082389e+00,nan,3.848434204477027887e+00,3.395137444499488844e+00,nan,3.658304326519307370e+00,3.488524882878548894e+00,3.199465629840079206e+00,nan,2.952151573819601804e+00,2.839495255786266714e+00,2.863639808327883785e+00,3.055600163136396041e+00,3.184873638893954695e+00,2.830198000000000214e+00,3.124109155272550620e+00,2.921799105364150329e+00,3.064024764575037807e+00,3.077357348298955131e+00,nan,4.688569899157930543e+00,3.699338470994831152e+00,nan,nan,nan,nan,nan,nan,nan,2.932562390000000185e+00,nan,nan,nan,4.255397317126287327e+00,nan,nan,3.852594502178106062e+00,nan,3.483932851917118878e+00,3.453575268659903497e+00,3.089188808264899944e+00,2.967274477719059167e+00,2.868323850043293710e+00,2.851872808155165195e+00,2.960983616002104579e+00,3.168102563826697526e+00,3.150023764976858764e+00,2.894366499999999842e+00,3.187654468046541734e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.185729184140881465e+00,nan,2.844007041379212453e+00,nan,2.612851037927133468e+00,2.415151487130954955e+00,2.153721132422302453e+00,2.127185315785555364e+00,2.170809442601557837e+00,nan,3.257709725821887670e+00,3.034902308886990419e+00,2.687053500000000206e+00,2.791837598757269667e+00,2.797981299860069715e+00,2.649455026323741968e+00,2.636711623495410883e+00,nan,3.805887777946879602e+00,4.351728640929112935e+00,3.199202778086023269e+00,3.359330286966708101e+00,3.029603365350000743e+00,2.997059706273324409e+00,2.592022566851413412e+00,2.618415755084577246e+00,2.497408794592275605e+00,2.609818994612419729e+00,2.856032955973752951e+00,2.946926639770146128e+00,2.859520769402405183e+00,2.717144236143999692e+00,2.872863498984197239e+00,2.787938315873293327e+00,2.782980026194810996e+00,nan,3.800319724473357308e+00,3.421782779674818364e+00,nan,3.637567535425345611e+00,3.568995461719917994e+00,3.228780145955306935e+00,3.140443663136315511e+00,2.621390600965991968e+00,2.609644067284269120e+00,2.842257480959506477e+00,2.925148744139999923e+00,3.018747419693023737e+00,2.804710500000000106e+00,2.815366000000000035e+00,3.058990623174045798e+00,3.040933760853872236e+00,3.017898480407091633e+00,nan,4.305230763101310920e+00,3.664977073570958144e+00,nan,nan,3.860603494641611011e+00,nan,nan,nan,nan,nan,2.928656999999999844e+00,nan,nan,nan,nan,4.726457605715734367e+00,nan,4.236967953015315480e+00,3.502640796817273916e+00,3.201231867042949819e+00,3.066927860052526711e+00,2.669296394624820046e+00,2.628675634288091700e+00,2.724648747873188448e+00,2.890045511768458475e+00,3.136917015281746046e+00,2.820246500000000101e+00,2.853339000000000070e+00,3.120765554200565006e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.242622632500028690e+00,nan,2.898197128226129848e+00,nan,2.591470501613688970e+00,2.393553334329590943e+00,2.396033169999999934e+00,2.158242374322808743e+00,2.417581705915671009e+00,nan,3.958049091712887435e+00,3.149737440000010213e+00,2.952938851079999871e+00,2.801874633331664199e+00,2.789675441203785855e+00,2.640130765400322321e+00,2.593238446137882125e+00,nan,4.256415684643828357e+00,nan,3.214645919499330873e+00,3.338164633783192059e+00,3.079401968075109242e+00,2.989840776961592450e+00,2.949985533573272090e+00,2.818773586575832546e+00,2.610506202014063248e+00,2.694600361578246339e+00,2.846620792394838784e+00,2.961522634414996524e+00,2.650380499999999806e+00,2.716268161981999718e+00,2.864861836756519065e+00,2.769584434847999876e+00,2.747567398961103802e+00,nan,3.824236203749940888e+00,3.439696001766179734e+00,nan,3.625121474745472749e+00,3.512610849172591809e+00,3.218585293708555728e+00,3.132957074579982404e+00,2.698800019072865375e+00,2.782861336203494851e+00,2.834934221311152580e+00,2.922428709300000094e+00,3.012542578742063881e+00,3.037271368701894847e+00,3.023036725358840382e+00,3.055720756820926809e+00,3.004710560351171900e+00,3.041432803420921704e+00,nan,4.665644247318067706e+00,3.435104679115142723e+00,nan,3.888302959713692886e+00,3.856012399529944457e+00,nan,nan,nan,nan,nan,nan,2.928447649999999847e+00,nan,nan,nan,4.740741375569864857e+00,nan,nan,3.492452056987161324e+00,3.192863256144003348e+00,3.073458293414135678e+00,2.725119578515647234e+00,2.734740086843009177e+00,2.783250370319178213e+00,2.876553120338334324e+00,3.104209326806605329e+00,2.806626500000000135e+00,3.090600802138019798e+00,3.154707836047880409e+00,nan,nan,nan,nan,nan,nan,3.470751824213415659e+00,nan,nan,nan,nan +2.199215717453347985e+00,nan,2.914849767719560614e+00,3.700651267131241262e+00,2.575049687116531150e+00,2.371878118015697368e+00,2.381166620000000123e+00,2.171953619043894612e+00,2.170599828795999997e+00,nan,3.659116911468059286e+00,3.267307500605727277e+00,2.951427852384739481e+00,2.792606189232095470e+00,2.785724090254922292e+00,2.641218767611418272e+00,2.605090203512000269e+00,nan,3.829437474971632671e+00,3.326528572823093111e+00,3.207796436911430238e+00,3.318646942081169726e+00,3.068942175932522165e+00,2.979440042587794490e+00,2.857266056523815045e+00,2.576398425712632889e+00,2.602388944424092632e+00,2.689579371073926861e+00,2.818420514910000119e+00,2.943428207897664350e+00,2.637223000000000095e+00,2.709554659526000275e+00,2.863189507639930742e+00,2.805706530553065292e+00,4.669985999999999748e+00,nan,3.815908976279250631e+00,3.416199118438198568e+00,nan,3.612912597544709836e+00,3.544425417020708569e+00,3.145925660095149290e+00,3.126838230536749919e+00,2.737928498116464660e+00,2.768527655176615365e+00,2.828923192769203787e+00,2.913062442967758070e+00,3.007038679012962401e+00,2.779942500000000205e+00,2.789375000000000160e+00,2.914477726600557261e+00,3.029323763863701391e+00,3.017242843180704792e+00,nan,4.081602499999999800e+00,3.425154769190899007e+00,3.491896095314399151e+00,3.974384275636384078e+00,3.963396438810915523e+00,3.940912751935055525e+00,nan,nan,3.393973481831789041e+00,nan,nan,nan,2.968855500000000092e+00,nan,nan,nan,nan,nan,3.482494268575297536e+00,3.185351251051065091e+00,3.061304208371461222e+00,2.713163685993843277e+00,2.721115481820583160e+00,2.691477981810111331e+00,2.860525171220814400e+00,3.090943317225118392e+00,2.789502500000000218e+00,3.074746655024691133e+00,3.142894758352218876e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.148780091682134685e+00,nan,2.927346727088384171e+00,nan,2.580438477695663924e+00,2.356628091527305902e+00,2.365243185000000192e+00,2.156513207888807404e+00,2.159110915799999475e+00,nan,3.269159925889901785e+00,3.240934172342468056e+00,2.651702499999999851e+00,2.781525925408537603e+00,2.780143930549330289e+00,2.628145061568564422e+00,2.522024036330371199e+00,nan,3.660856508653930419e+00,nan,nan,3.304081083673673813e+00,3.056953011958100763e+00,2.982679647873056616e+00,2.841924268449099511e+00,2.558315500020499567e+00,2.467278494959259660e+00,2.584363527641938330e+00,2.849311645809862181e+00,2.917014904364325645e+00,2.626570500000000141e+00,2.703579844931999432e+00,2.857782929734740573e+00,2.742459683940766002e+00,2.745899861194134761e+00,nan,3.805004276284891684e+00,3.414636493488185653e+00,4.249587854615622717e+00,3.596410722021615047e+00,3.537090133887159649e+00,3.131619252162831657e+00,3.120393987347435782e+00,2.730560793183692159e+00,2.713134882606357490e+00,2.817007567432720183e+00,2.904804294815762855e+00,3.000908158863777153e+00,2.766853499999999855e+00,2.776363499999999984e+00,3.015920479197032211e+00,2.980607117311453003e+00,2.986190547740434020e+00,nan,3.966460435212608715e+00,3.412623828732121556e+00,3.476382186012672459e+00,3.965775407403197228e+00,3.956774708687231357e+00,3.933363467033499816e+00,nan,3.898340234959190020e+00,nan,4.255397317126287327e+00,nan,nan,nan,2.909581687777191839e+00,nan,nan,nan,nan,3.473399475875134979e+00,3.176404641009879626e+00,3.036757099035661600e+00,2.757917503399776038e+00,2.612442059970998987e+00,2.755593629915009224e+00,2.844030441782897434e+00,3.108660231241554772e+00,2.783297999999999828e+00,2.812667499999999876e+00,3.130495742189648656e+00,nan,nan,nan,nan,nan,nan,nan,nan,4.753838071537071386e+00,nan,nan +2.190393398248820844e+00,nan,3.118337841918728870e+00,nan,2.579510565891860008e+00,2.232644020616523672e+00,2.350662059999999887e+00,2.140660155611931348e+00,2.198807209185361433e+00,nan,3.997923106392856862e+00,3.212353157282980209e+00,2.650788499999999992e+00,2.772901219878878720e+00,2.787835160574164028e+00,2.616205645300151428e+00,2.557769330299334953e+00,nan,3.786100005085020204e+00,nan,nan,3.311791781682212221e+00,3.046610082446905121e+00,2.978201885970974683e+00,2.977671088414529432e+00,2.809806858846969568e+00,2.638532169563618801e+00,2.663274243927887586e+00,2.817037160062049850e+00,2.922366879134512097e+00,2.612731500000000207e+00,2.700566084160000102e+00,2.850151777699583722e+00,2.782660636220044559e+00,4.629781499999999994e+00,nan,3.794498344271218571e+00,3.426484517389891682e+00,nan,3.577149529824780405e+00,3.524736829583512421e+00,3.160542626579115666e+00,3.114251522724260646e+00,2.667572614790920671e+00,2.835479263612121770e+00,2.810434197968636472e+00,2.980009359835951610e+00,2.995650293094935357e+00,2.753309499999999854e+00,2.761975500000000139e+00,2.898002459318961943e+00,3.014706643346841020e+00,3.028759176182248769e+00,nan,4.508800323243676189e+00,3.632151246665912936e+00,3.460447456599406557e+00,3.958869820396079042e+00,3.950467200375316512e+00,3.930470695264987491e+00,nan,nan,4.064212601096474309e+00,nan,nan,nan,nan,nan,2.967933320000000208e+00,nan,nan,nan,3.461754692194455085e+00,3.170411822154109771e+00,3.042548704934343373e+00,2.692703974735683126e+00,2.731620088838007110e+00,2.746159732717070590e+00,2.829697832447921169e+00,3.100927578973220022e+00,2.766192499999999832e+00,3.074505453478489692e+00,3.120041705362130990e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.190056839526372201e+00,nan,3.020368270059398341e+00,nan,2.627304630071595426e+00,2.478520838034012463e+00,2.391984250635732945e+00,2.028971257203826539e+00,2.178889214999999879e+00,nan,3.699867847646896024e+00,3.231699696458567583e+00,2.912832894400000150e+00,2.907051464559815557e+00,2.761469911779932396e+00,2.549134737204657331e+00,2.662165309999999785e+00,nan,3.738848952690843497e+00,nan,nan,nan,3.031928027194829234e+00,2.997166326977366069e+00,2.903387562824002455e+00,2.836369635984477444e+00,2.754369233041524367e+00,2.622845465851564750e+00,2.822013144672808505e+00,2.978032140475936007e+00,2.759723170720000063e+00,2.678370358819389896e+00,2.858742016031206479e+00,2.809356328182241569e+00,2.826197699999999813e+00,nan,3.773946824443866976e+00,3.377185738206697430e+00,nan,nan,3.686604726366762730e+00,3.231190270651985497e+00,nan,2.868286529638906401e+00,2.806961114797497103e+00,2.742350923307339094e+00,3.019411366673085961e+00,2.971066023745643658e+00,3.068265369634559647e+00,2.957423499999999983e+00,2.967185764725684738e+00,3.095592711326943913e+00,3.030152793206261741e+00,nan,3.833169737854806414e+00,3.365749160055187872e+00,3.387910435616395599e+00,4.023911882966872966e+00,3.749610429009732737e+00,3.708214927947469874e+00,nan,4.266925370060276279e+00,nan,nan,4.726457605715734367e+00,4.740741375569864857e+00,nan,nan,nan,2.722730889999999793e+00,nan,nan,3.524680441714683088e+00,3.385454825302224080e+00,3.089285375752285567e+00,2.865932604675739626e+00,2.795662290779483250e+00,2.815986831149338787e+00,2.847273907331961507e+00,3.091621823684879811e+00,2.981424500000000144e+00,3.087973780681366520e+00,3.069250322663296338e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.103189546775930285e+00,nan,3.164091556265995031e+00,nan,2.516364410209747327e+00,2.208004027194500285e+00,2.280838685633795215e+00,2.138640693861333819e+00,2.145481103077768292e+00,nan,3.985832039965755413e+00,3.217725918111309369e+00,2.921429283655999853e+00,2.806907458173721892e+00,2.761857308891114027e+00,2.449058732757722012e+00,2.594953124333999384e+00,nan,4.300711417986438079e+00,nan,nan,3.260616759061754077e+00,3.082663991730448583e+00,2.944305510070984955e+00,2.807807122538521938e+00,2.783301216846045367e+00,2.628565132391704751e+00,2.657806950356072839e+00,2.807282810696388964e+00,2.901430589759697476e+00,2.826000485567999654e+00,2.696020813178999997e+00,2.838190646792944971e+00,2.719066579077019430e+00,2.745742861062142381e+00,nan,3.778952826100431484e+00,3.420547541432594763e+00,nan,3.543352111478714761e+00,3.505188259850309684e+00,2.855650757396243300e+00,3.103456984684071163e+00,2.693709729014642562e+00,2.792658877771192483e+00,2.786040647259235836e+00,2.975716379895867014e+00,3.014295814640751026e+00,3.026598920069254461e+00,2.983183999085219362e+00,3.039330776900507569e+00,3.006908529668696595e+00,3.317744035255313584e+00,nan,4.564760147594914663e+00,3.405715523710690196e+00,3.405124959897154469e+00,3.943328830742504731e+00,3.369042668648182026e+00,3.914881687302765201e+00,nan,nan,nan,3.852594502178106062e+00,nan,nan,nan,nan,nan,nan,2.967736990000000130e+00,nan,3.457370471369977771e+00,3.533717110681175111e+00,3.034618205044695038e+00,2.712109369540219639e+00,2.689014083285424128e+00,2.727365866544282813e+00,2.806411029762310605e+00,3.126377506558602359e+00,3.024940015361897050e+00,3.052128705735078995e+00,3.101966737822229980e+00,nan,nan,nan,nan,nan,nan,nan,nan,3.631311509037648211e+00,nan,nan +2.018644517200243627e+00,nan,3.660424519839467816e+00,2.609275602922448645e+00,2.418900999999999968e+00,2.264749956366135297e+00,1.975531797816703961e+00,1.999054139742926228e+00,1.933921049442955287e+00,nan,3.487120964273264789e+00,nan,2.598536068127999865e+00,2.665159792411830875e+00,2.553406533355451646e+00,2.473584809789112438e+00,2.301907752496281656e+00,nan,3.755375924724534631e+00,3.706195439266121383e+00,nan,nan,2.897775557519291834e+00,2.876301874436550499e+00,2.870459526831008912e+00,2.557517758259387985e+00,2.533041096829367511e+00,2.573244131211720021e+00,2.581680994105349569e+00,2.702293818133504111e+00,2.697999629972616109e+00,2.647502314738999551e+00,2.641741005200317183e+00,2.575561489301759543e+00,2.731863965115904236e+00,nan,3.796104995748445621e+00,3.419297378948378707e+00,nan,nan,2.983731135739717821e+00,2.918156536761384423e+00,2.812302022962624104e+00,2.735975168448719685e+00,2.630684252937330658e+00,2.736059099962952157e+00,2.897562366836601289e+00,3.206158655601894392e+00,3.034621356889016131e+00,2.730617069275505404e+00,2.809845403340724523e+00,2.743419502668856147e+00,2.693181052863613534e+00,nan,3.972050014524950878e+00,3.643490103141645786e+00,4.028736749540098749e+00,nan,nan,nan,nan,nan,3.782787269178224054e+00,nan,4.236967953015315480e+00,nan,nan,nan,nan,nan,nan,2.598031444608215779e+00,nan,2.972206134469755234e+00,2.913801433656593165e+00,2.808923510638142496e+00,2.739784679214956498e+00,2.688820305021593970e+00,2.838222934335016046e+00,2.910223809817167329e+00,3.136129044649849096e+00,2.956401526601415419e+00,3.015791695318124521e+00,nan,nan,nan,nan,nan,nan,nan,nan,3.902107154902529018e+00,nan,nan +1.931581354024525909e+00,nan,2.711758346009691678e+00,2.535241095890144791e+00,2.299282045299664290e+00,2.174964330862141360e+00,1.913611220479036534e+00,1.791654907073999325e+00,1.852028355440467511e+00,nan,2.933593089824271249e+00,3.058724316151011813e+00,2.711189870029921956e+00,2.562393296918849206e+00,2.478764235663004634e+00,2.241424767127187412e+00,2.305211236263615238e+00,nan,3.459668751525181385e+00,3.282368487028433979e+00,3.570258082050620452e+00,nan,2.919526661512657828e+00,2.705630159815406444e+00,2.715919753195363384e+00,2.450953545713382642e+00,2.452542306807659944e+00,2.494289765980240325e+00,2.743278715000000201e+00,2.808389685592962870e+00,2.603253877510800063e+00,2.529366036080153091e+00,2.611467207975047167e+00,2.400891061515399549e+00,2.435056213029295424e+00,nan,3.566583188532426618e+00,3.417301932801381614e+00,3.495584121060816507e+00,3.110387079646232067e+00,nan,2.770400773508731262e+00,2.749589685353739377e+00,2.628789270285801560e+00,2.655180504959931742e+00,2.694153765348004548e+00,2.994425999999998922e+00,4.218929003462666749e+00,3.613537236657740070e+00,2.872222476047352480e+00,2.794174242269098940e+00,2.642797279271585253e+00,2.663748275551748534e+00,nan,4.145189035944445877e+00,3.481035205453993697e+00,3.295045682595882486e+00,4.321180737667454252e+00,3.385709826043067139e+00,3.369729332857387849e+00,nan,3.344569837421908165e+00,3.399624423579948651e+00,3.483932851917118878e+00,3.502640796817273916e+00,3.492452056987161324e+00,3.482494268575297536e+00,3.473399475875134979e+00,3.461754692194455085e+00,3.524680441714683088e+00,3.457370471369977771e+00,nan,2.377280100000000118e+00,nan,2.756031557775581842e+00,2.737469900879455853e+00,2.758412560553885839e+00,2.716512971810433719e+00,2.915251080519706939e+00,5.227495426987597504e+00,3.877899347464901503e+00,3.555255985038488387e+00,3.180687702350000201e+00,nan,nan,nan,nan,nan,nan,3.519905694081412406e+00,nan,3.615312384683999625e+00,nan,nan +3.358236039097470815e+00,nan,2.907277270223999288e+00,2.523321158651419260e+00,2.217133397551355856e+00,2.004406808757980052e+00,1.815122837717017878e+00,1.744650295080000246e+00,1.974759095640817641e+00,nan,3.104784077179262081e+00,3.329858547036890659e+00,2.537892744317861826e+00,2.512591144320746483e+00,2.359150856493688497e+00,2.084303828696430383e+00,2.273767330358225092e+00,nan,3.507024670114291087e+00,3.194323859531984500e+00,nan,nan,2.963930662915379433e+00,2.658815045303448699e+00,2.641892766657789160e+00,2.608045319059231648e+00,2.459522556664921922e+00,2.518390147419642400e+00,2.707004330000000181e+00,3.479768336980796128e+00,2.699951537078548292e+00,2.491019983308832675e+00,2.525509213592322943e+00,2.537108280995488574e+00,2.422937858832630909e+00,nan,3.569873595030087721e+00,3.364697886635370061e+00,3.195773836546395685e+00,2.690432148155578851e+00,3.379177733919998872e+00,2.650642033727131253e+00,nan,nan,2.686170832704604994e+00,4.068385000000000140e+00,2.959975000000000023e+00,3.631638169896591162e+00,3.563130510119641325e+00,3.050594999999999946e+00,nan,2.708906932220893626e+00,2.711364110946903772e+00,nan,4.295540852464433179e+00,3.553564858839485030e+00,3.469606953934209148e+00,3.327398717907630754e+00,3.341727266227507265e+00,3.305343874546734284e+00,nan,nan,nan,3.453575268659903497e+00,3.201231867042949819e+00,3.192863256144003348e+00,3.185351251051065091e+00,3.176404641009879626e+00,3.170411822154109771e+00,3.385454825302224080e+00,3.533717110681175111e+00,2.972206134469755234e+00,nan,2.359321045062747313e+00,2.992819556078374799e+00,2.824640323717942714e+00,2.721826753995127746e+00,2.772978965982792143e+00,nan,3.664576434260765847e+00,3.773145732201963920e+00,3.463059050449018184e+00,3.672422349585263923e+00,nan,nan,nan,nan,nan,nan,3.124152687853276600e+00,nan,2.911498816617416718e+00,nan,3.138713984043840721e+00 +1.786512897407999567e+00,nan,2.658613032802919829e+00,2.540555582290795034e+00,2.166242309700671775e+00,1.903379846769387429e+00,1.690547373171999546e+00,1.674262070696314497e+00,1.835416651437301594e+00,nan,3.048906012168518753e+00,3.663340066466100886e+00,2.520189009794214563e+00,2.395664830470227269e+00,2.323802336898455057e+00,2.334658378307195914e+00,2.197098363017893519e+00,nan,3.449458416935617944e+00,3.270277669112988672e+00,2.941955104391178910e+00,2.691676990436439265e+00,nan,3.060119758247999755e+00,3.167489879215346082e+00,2.614651037315225413e+00,2.576233612351879643e+00,3.955942828473636652e+00,2.575062357904890575e+00,3.746221604823908979e+00,2.488104867921679286e+00,2.519242596893539687e+00,2.572860110887943641e+00,2.431212585981246121e+00,2.519370405819999270e+00,nan,4.156062563104424790e+00,3.238172982003725497e+00,3.058757773263763191e+00,2.933622464838548272e+00,2.789534042300088945e+00,nan,nan,nan,nan,nan,3.800272398692044362e+00,4.228561000000000014e+00,3.620456109133148637e+00,nan,3.589716374697550449e+00,2.819453120381860689e+00,nan,nan,3.963739936750294568e+00,3.455368731748499567e+00,3.110260900873239365e+00,3.167023631429179975e+00,3.163051079965418122e+00,3.143273861143473624e+00,nan,3.109679019317446613e+00,3.160202985736541237e+00,3.089188808264899944e+00,3.066927860052526711e+00,3.073458293414135678e+00,3.061304208371461222e+00,3.036757099035661600e+00,3.042548704934343373e+00,3.089285375752285567e+00,3.034618205044695038e+00,2.913801433656593165e+00,2.756031557775581842e+00,2.992819556078374799e+00,2.254600472566672842e+00,nan,nan,nan,nan,nan,3.050509516174204183e+00,3.533717566123425691e+00,3.423382750578844558e+00,nan,nan,nan,nan,nan,nan,3.052044738318685724e+00,nan,2.909690082404738476e+00,3.012276211934153292e+00,2.920945358842927497e+00 +1.684266924006315058e+00,nan,2.873434869770276734e+00,nan,1.947872879695335513e+00,1.906989142107239354e+00,1.686203682402598547e+00,1.722741229081438119e+00,1.899971931616181386e+00,nan,3.059509154871621561e+00,2.846792475390081290e+00,2.461647003100041697e+00,2.328028050423145423e+00,2.326731939316208475e+00,2.326069276932366314e+00,2.264563239125999772e+00,nan,3.591917953496173599e+00,3.104673705065901235e+00,2.678137654077623164e+00,2.671370851120949741e+00,2.612429361325223631e+00,2.581109907347782784e+00,nan,3.883687433272565936e+00,3.930044335505898268e+00,3.756977122383366563e+00,3.842181586182128683e+00,nan,2.433444918951561231e+00,2.372654802352641923e+00,2.453311492359291357e+00,2.466649132027154856e+00,2.465675482076310043e+00,nan,4.027477993099155107e+00,3.301529447064212697e+00,2.664810101452855573e+00,2.837519626169834108e+00,2.738473216759752304e+00,2.781828688837606922e+00,nan,nan,nan,nan,nan,nan,3.976388979728341599e+00,2.753423285870408854e+00,2.632752620703396218e+00,2.608834239585742676e+00,2.788085845480279268e+00,nan,nan,3.474116346145557088e+00,2.815970213638827158e+00,2.738597578658447773e+00,2.673204838906142466e+00,2.816690733052363615e+00,nan,2.782480026910277182e+00,3.157025630162292984e+00,2.967274477719059167e+00,2.669296394624820046e+00,2.725119578515647234e+00,2.713163685993843277e+00,2.757917503399776038e+00,2.692703974735683126e+00,2.865932604675739626e+00,2.712109369540219639e+00,2.808923510638142496e+00,2.737469900879455853e+00,2.824640323717942714e+00,nan,2.423652923491041555e+00,nan,nan,nan,nan,nan,3.654613478279744143e+00,2.756193448784586941e+00,nan,nan,nan,nan,nan,nan,2.859911442147044180e+00,nan,2.949151465498168534e+00,3.101682629254441714e+00,2.713314240679133071e+00 +1.663525923967254405e+00,nan,2.552025310507078260e+00,2.488981152116728879e+00,2.055142752398614991e+00,1.912781909491612975e+00,1.724543241491999668e+00,1.808550280068000227e+00,1.872885652860075911e+00,nan,3.117307459039983186e+00,2.622881218739384401e+00,2.465355743531444155e+00,2.286028129198178771e+00,2.239396095456413160e+00,2.318625232630676436e+00,2.297536026478000259e+00,nan,3.315821202651180855e+00,2.985741223734405647e+00,2.342567999999999984e+00,2.624517385568537975e+00,2.595269159989471941e+00,2.579660248723607108e+00,2.595911760503735621e+00,2.584473901804077656e+00,2.577164604451984697e+00,2.563752168818334898e+00,2.685182688789092609e+00,2.609185757084840684e+00,2.426423975717410197e+00,2.345702636584201972e+00,2.433579480711312470e+00,2.492179453162715941e+00,2.432702635584000017e+00,nan,3.700882789395611994e+00,3.083195471458116899e+00,2.648952817858495568e+00,2.704218039344667091e+00,2.767198543502280561e+00,2.715737157872329277e+00,nan,nan,nan,2.741910708474420755e+00,nan,3.324604410400000098e+00,2.576861905563999855e+00,2.569840631334384007e+00,2.585316428402419930e+00,2.604138767761817519e+00,4.254923479519998608e+00,nan,3.621749101101198409e+00,3.108868432680178984e+00,2.788791678275114094e+00,2.820691302126126843e+00,2.585628677064511827e+00,2.821403177985929656e+00,nan,2.783907767662940902e+00,2.860743518330027158e+00,2.868323850043293710e+00,2.628675634288091700e+00,2.734740086843009177e+00,2.721115481820583160e+00,2.612442059970998987e+00,2.731620088838007110e+00,2.795662290779483250e+00,2.689014083285424128e+00,2.739784679214956498e+00,2.758412560553885839e+00,2.721826753995127746e+00,nan,nan,2.233431260000000140e+00,3.092923448770963812e+00,nan,nan,nan,2.808457418746252543e+00,2.700366904363710852e+00,nan,nan,nan,nan,nan,nan,2.884058820967400738e+00,2.877454769399556689e+00,2.649264582039128957e+00,3.018883029351144831e+00,2.893154631842444147e+00 +1.630624875498358328e+00,nan,2.583530125610861816e+00,2.473299687427969573e+00,2.062813156346450238e+00,1.860562876883097516e+00,1.749824818872000698e+00,1.888700824731120109e+00,1.886505996064235191e+00,nan,2.985869792763138797e+00,2.640484998982460585e+00,2.494358442592935265e+00,2.311763137917980160e+00,2.206537180910153495e+00,2.282880383185210249e+00,2.293516951119999980e+00,nan,3.270819746455339239e+00,2.880729326793999956e+00,2.696360872884491666e+00,2.575708533205133488e+00,2.669917795078292855e+00,2.601332716785988541e+00,2.653728735851311527e+00,2.385498956138069282e+00,2.656080202227219633e+00,2.537783343282986603e+00,2.540123487895603827e+00,2.587676871146467938e+00,2.444071959545214057e+00,2.349074741618019768e+00,2.420970475087223850e+00,2.447111488657448319e+00,2.443767352534183424e+00,nan,3.384804719817827934e+00,3.088711451281833664e+00,2.722338184679946593e+00,2.703082795083489920e+00,2.738396771966876919e+00,2.763290235393184080e+00,nan,nan,nan,3.754339185963169623e+00,2.802274780444005575e+00,2.752935280216121328e+00,2.574066520131999880e+00,2.586742539631998739e+00,2.543209560886189102e+00,2.566739981916471347e+00,2.601544924030662287e+00,nan,3.352753013915986013e+00,3.137343747647857395e+00,2.921955738521794910e+00,2.888766128429178348e+00,2.915038547060145202e+00,2.904836155041787826e+00,nan,2.715627450682059685e+00,2.954883644370824847e+00,2.851872808155165195e+00,2.724648747873188448e+00,2.783250370319178213e+00,2.691477981810111331e+00,2.755593629915009224e+00,2.746159732717070590e+00,2.815986831149338787e+00,2.727365866544282813e+00,2.688820305021593970e+00,2.716512971810433719e+00,2.772978965982792143e+00,nan,nan,3.092923448770963812e+00,2.573818104917100502e+00,3.122955838700155695e+00,2.729691746108755535e+00,2.741311083286152250e+00,2.808428758124962510e+00,2.741479741309631013e+00,nan,nan,nan,nan,nan,nan,2.821906359293140376e+00,2.862912562668930239e+00,2.803053206758466143e+00,2.873626992496257859e+00,2.643810240293692715e+00 +2.948324309557059486e+00,nan,2.665397580000000488e+00,2.491301037080801528e+00,2.742025386766106188e+00,1.913313582540000279e+00,1.928118302593670785e+00,1.963722172228158813e+00,1.902159248897282939e+00,nan,2.958599166089153076e+00,2.681897318099999872e+00,2.444864983259999569e+00,2.409914573675999705e+00,2.335776397677552740e+00,2.283701273493217165e+00,2.250607627053742288e+00,nan,3.265476801192098488e+00,2.948586281430879197e+00,2.802446724605512252e+00,2.720598388361700870e+00,2.439544000000000157e+00,2.880570011672523023e+00,2.686205303168029523e+00,nan,5.101591488915104833e+00,2.535546683582076799e+00,2.641317874035871593e+00,2.641938598345791700e+00,2.550750248384400454e+00,2.441412881240047295e+00,2.489952103446219311e+00,2.417675095174571798e+00,2.421878230522076958e+00,nan,3.359144934219252310e+00,3.167851258509621015e+00,2.885735447826302202e+00,2.822038761212103175e+00,2.850205204272047599e+00,nan,nan,nan,nan,4.058650293713919055e+00,3.143916426896134375e+00,2.814778601914782907e+00,2.680081827079107804e+00,2.618471826141057335e+00,2.660332368320192487e+00,2.612977309024292261e+00,2.594818998454055947e+00,2.773954963721450895e+00,3.603656123377349640e+00,3.291613823309888165e+00,3.041671960870554248e+00,2.989985414093068616e+00,2.998117865966676288e+00,2.967368838667030939e+00,nan,2.969529727761716753e+00,3.078957944660624779e+00,2.960983616002104579e+00,2.890045511768458475e+00,2.876553120338334324e+00,2.860525171220814400e+00,2.844030441782897434e+00,2.829697832447921169e+00,2.847273907331961507e+00,2.806411029762310605e+00,2.838222934335016046e+00,2.915251080519706939e+00,nan,nan,nan,nan,3.122955838700155695e+00,2.644733267550738898e+00,nan,2.757068822003706732e+00,2.725364412270254277e+00,2.750362040205846181e+00,nan,nan,nan,nan,nan,nan,3.010532710433706960e+00,nan,3.034269901271390513e+00,3.338195383675794492e+00,3.358550182274786078e+00 +1.670391809867448396e+00,nan,2.803432485225700876e+00,nan,nan,2.021058775165538890e+00,2.078578434809840392e+00,1.982281967809529144e+00,2.133551084205131509e+00,nan,3.038867624562036074e+00,2.787938625778115576e+00,3.707822000000000173e+00,3.278769054438861019e+00,2.392028954653005801e+00,2.332672843833442311e+00,2.282014364160746656e+00,nan,3.529715107102691984e+00,3.105993790174739200e+00,3.029989185883616987e+00,2.857114801930086490e+00,3.514103094784498449e+00,3.592604575098945485e+00,2.884859657791129184e+00,4.093216561725295577e+00,5.723797514837941414e+00,2.638883804665499433e+00,2.857114801930086934e+00,5.760909622742944158e+00,3.671266988348027560e+00,3.237124999999999808e+00,2.492111741240429890e+00,2.492135508644590214e+00,2.467455219449789627e+00,nan,3.715781217968283201e+00,3.317335485445370935e+00,3.138641437192032146e+00,2.925686693401738925e+00,3.758149584999999959e+00,3.547421565782718833e+00,nan,nan,2.758381378339382906e+00,2.793680837970702324e+00,3.026084833248442241e+00,2.868102763233180141e+00,3.002646863820890299e+00,2.844965535261812395e+00,2.796455150916263843e+00,2.699379096718585558e+00,2.635192324411776088e+00,nan,3.844557172998372518e+00,3.574565760958912986e+00,3.222264767869003510e+00,3.233056640935516590e+00,3.298795489533983272e+00,3.305365896301708517e+00,nan,3.148133678504557054e+00,3.235748969898238236e+00,3.168102563826697526e+00,3.136917015281746046e+00,3.104209326806605329e+00,3.090943317225118392e+00,3.108660231241554772e+00,3.100927578973220022e+00,3.091621823684879811e+00,3.126377506558602359e+00,2.910223809817167329e+00,5.227495426987597504e+00,3.664576434260765847e+00,nan,nan,nan,2.729691746108755535e+00,nan,2.547969005791018926e+00,3.183229380068731018e+00,2.958815594814417249e+00,4.046801244208374726e+00,nan,nan,nan,nan,nan,nan,3.207660781260686544e+00,nan,3.355123409695075942e+00,nan,nan +2.951004466503344581e+00,nan,2.794704684376161374e+00,nan,3.008648368001351603e+00,2.205901184860158182e+00,2.282316938610985169e+00,2.024065026519998867e+00,2.132147149491796956e+00,nan,3.153725622140885143e+00,2.910364360620130153e+00,3.584672157830405492e+00,3.570315325500557524e+00,3.383308719643971507e+00,2.573447810019901638e+00,2.382729443452005214e+00,nan,3.573014046113993203e+00,3.202672640221043476e+00,3.213240489912057907e+00,2.979775553084577311e+00,3.527470382621648692e+00,3.179756504658101779e+00,4.050558855357209609e+00,2.935036684042499289e+00,3.771636035075792925e+00,2.718793866283214289e+00,2.927380190659061299e+00,2.700214679014409924e+00,4.113935999999999815e+00,3.622950727338283272e+00,3.371425143099000543e+00,2.699895686526200933e+00,2.517686291097339879e+00,nan,3.748373176323587685e+00,3.245549172473999988e+00,3.098828025799362784e+00,3.186118463618622609e+00,2.998016526087758482e+00,3.523363973539485716e+00,nan,nan,2.847814926278008141e+00,2.728033338573600197e+00,3.002627276214115515e+00,2.846509391537667799e+00,4.004329000000000249e+00,3.476185203641928734e+00,3.032914366831348740e+00,2.926775453781026659e+00,3.344421021231074764e+00,nan,3.883874665692768957e+00,3.526000774956049355e+00,3.181314933479999496e+00,2.931176999999999921e+00,2.935900999999999872e+00,2.912808500000000134e+00,nan,3.126878152703998381e+00,3.362001717597975503e+00,3.150023764976858764e+00,2.820246500000000101e+00,2.806626500000000135e+00,2.789502500000000218e+00,2.783297999999999828e+00,2.766192499999999832e+00,2.981424500000000144e+00,3.024940015361897050e+00,3.136129044649849096e+00,3.877899347464901503e+00,3.773145732201963920e+00,3.050509516174204183e+00,nan,nan,2.741311083286152250e+00,2.757068822003706732e+00,3.183229380068731018e+00,2.980758525905160106e+00,3.834014895792384525e+00,3.456921398514977817e+00,nan,nan,nan,nan,nan,nan,3.175547617662360267e+00,nan,3.368272039483746649e+00,3.286171285333739256e+00,nan +2.539764997394242663e+00,nan,2.744525638802489009e+00,3.494606056607809208e+00,3.177718904820879597e+00,2.827409953172871138e+00,2.267719735066459741e+00,2.033069786692916381e+00,1.981039526763001213e+00,nan,3.085602611814757257e+00,2.840222306414097453e+00,2.914633904751982474e+00,3.565035204395853974e+00,2.872374263474326117e+00,2.658578929308554439e+00,2.398154553024082425e+00,nan,3.361283999999999939e+00,2.976993499999999848e+00,2.918270187427291606e+00,3.020814719533544412e+00,2.810467442508952196e+00,3.076640073112770324e+00,3.207504000000000133e+00,3.315335127082379874e+00,3.478379359458275477e+00,2.691003080719561957e+00,2.645499340861685145e+00,2.708968062671038624e+00,3.270814114910217896e+00,3.109949954030343022e+00,3.275458031931258951e+00,2.815840424212081139e+00,2.867912180986219806e+00,nan,3.628954624874195289e+00,3.361266165014341833e+00,3.075007520142039930e+00,2.949260738289206429e+00,3.011464238904442148e+00,2.919731580760121403e+00,3.698425852473994802e+00,4.355477613519636471e+00,2.660463588327621309e+00,2.776428242401209889e+00,2.838560235250913166e+00,2.933270858058514285e+00,3.096007095206586790e+00,3.382037047875141145e+00,3.437036299981330423e+00,3.040385978050964333e+00,3.090943025418786849e+00,nan,3.771378957423852984e+00,3.480581890090729491e+00,3.283043529943898697e+00,2.993097500000000188e+00,2.965006499999999878e+00,2.944329999999999892e+00,nan,2.903915000000000024e+00,3.344334849855499225e+00,2.894366499999999842e+00,2.853339000000000070e+00,3.090600802138019798e+00,3.074746655024691133e+00,2.812667499999999876e+00,3.074505453478489692e+00,3.087973780681366520e+00,3.052128705735078995e+00,2.956401526601415419e+00,3.555255985038488387e+00,3.463059050449018184e+00,3.533717566123425691e+00,3.654613478279744143e+00,2.808457418746252543e+00,2.808428758124962510e+00,2.725364412270254277e+00,2.958815594814417249e+00,3.834014895792384525e+00,2.977708999999999495e+00,3.245355197713548190e+00,nan,nan,nan,nan,nan,nan,3.221715707439274379e+00,nan,3.236675224864243194e+00,nan,3.251421922427178313e+00 +3.532358526239999463e+00,nan,2.782608157702401996e+00,nan,3.006816326998310362e+00,3.537970710726110468e+00,2.878412603748000009e+00,2.027435271096820202e+00,1.988476341876794118e+00,nan,3.129258283214048308e+00,2.910767019541197875e+00,3.048007256547249799e+00,3.187013664279514824e+00,2.899500207602446977e+00,2.513318714556000089e+00,2.506939026428636197e+00,nan,3.556551880507572871e+00,3.225330366474189248e+00,3.012097178250680685e+00,2.911418224753537842e+00,3.218901399858100643e+00,2.891077937375880591e+00,2.793306626108974733e+00,2.905032194234200116e+00,2.679519885783367528e+00,2.513955366095999899e+00,2.653433361016618974e+00,2.593012610105455806e+00,3.549749708801152703e+00,3.680284858636512446e+00,3.606149986400662311e+00,2.655591579871998675e+00,2.594534158077791641e+00,nan,3.728300166727104514e+00,3.192712499401225479e+00,3.165082345781291995e+00,2.912071645893536953e+00,3.005124242695627501e+00,2.829594348811363957e+00,nan,2.771925121346328513e+00,2.690136739736202820e+00,2.664066309491189166e+00,2.826017233271697737e+00,2.965389682151255535e+00,3.092538947101677760e+00,3.039170735315503524e+00,3.619764290599422285e+00,2.985558667634192087e+00,3.092409450420725570e+00,nan,3.876293100807123970e+00,3.319847334606512490e+00,3.272278998785002724e+00,3.151326365672462604e+00,3.247547626425873357e+00,3.224446621657516499e+00,nan,3.189202149557583521e+00,3.271410545820458982e+00,3.187654468046541734e+00,3.120765554200565006e+00,3.154707836047880409e+00,3.142894758352218876e+00,3.130495742189648656e+00,3.120041705362130990e+00,3.069250322663296338e+00,3.101966737822229980e+00,3.015791695318124521e+00,3.180687702350000201e+00,3.672422349585263923e+00,3.423382750578844558e+00,2.756193448784586941e+00,2.700366904363710852e+00,2.741479741309631013e+00,2.750362040205846181e+00,4.046801244208374726e+00,3.456921398514977817e+00,3.245355197713548190e+00,2.962422397630080084e+00,nan,nan,nan,nan,nan,nan,3.260065287262323253e+00,nan,3.181953000000000031e+00,3.185000000000000053e+00,3.224550770000000011e+00 +nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan +2.561880525000012288e+00,nan,nan,nan,nan,nan,nan,2.464169434957696581e+00,nan,nan,nan,nan,nan,nan,nan,nan,3.032298355103286269e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,3.209458688316992436e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,3.890396283581248227e+00,nan,nan,nan,nan,nan +2.318073677664472143e+00,nan,nan,nan,2.742912172524170522e+00,2.437927933655933188e+00,2.310759547421265214e+00,2.300057499999999866e+00,2.273557068201608633e+00,nan,3.752941620557058933e+00,3.545893532245796287e+00,3.058514640076598123e+00,2.771188077608098599e+00,2.881334476318953453e+00,2.759892521599344839e+00,2.688922956375340512e+00,nan,3.888388697724999954e+00,5.074023834055514826e+00,nan,3.545429194342337276e+00,3.139621732158186518e+00,3.212866367565654802e+00,3.059863990463488559e+00,2.711542499748009760e+00,2.694203290304992837e+00,2.817013572500609353e+00,3.064108583237854067e+00,3.127380848566099214e+00,3.023862439700450988e+00,3.046101881897295360e+00,2.988209409491151991e+00,2.897540065094701411e+00,2.882004814539368187e+00,nan,4.389828234087056913e+00,5.163086206646820386e+00,nan,3.877880620503292075e+00,nan,3.119557060492943013e+00,3.083285718432057898e+00,2.809787200031472310e+00,2.722684537253676762e+00,2.836075094360149151e+00,3.226807378358279355e+00,3.245984917090000366e+00,3.167818078025211648e+00,3.038214570267873782e+00,3.189179007949063838e+00,3.137655971898623886e+00,3.083634481599268451e+00,nan,4.693627940945416022e+00,3.983816450329852543e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,3.470751824213415659e+00,nan,nan,nan,nan,nan,nan,3.519905694081412406e+00,3.124152687853276600e+00,3.052044738318685724e+00,2.859911442147044180e+00,2.884058820967400738e+00,2.821906359293140376e+00,3.010532710433706960e+00,3.207660781260686544e+00,3.175547617662360267e+00,3.221715707439274379e+00,3.260065287262323253e+00,nan,nan,nan,nan,nan,nan,3.076132349999999960e+00,nan,nan,nan,nan +nan,nan,nan,nan,nan,2.363989639244456598e+00,2.472946390000000161e+00,2.190213207243686178e+00,2.119506603985097826e+00,nan,3.811505010095443957e+00,nan,nan,nan,2.817139991279221878e+00,nan,2.481589902346140075e+00,nan,3.933442170720852005e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,2.890600359005883302e+00,nan,2.660277917435847961e+00,nan,3.985891362043500141e+00,nan,nan,nan,nan,nan,nan,nan,2.850151743161279949e+00,nan,nan,nan,nan,nan,3.149822177868866202e+00,nan,nan,nan,4.289405096850125609e+00,3.891775602416040591e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,2.877454769399556689e+00,2.862912562668930239e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,3.198443343793171412e+00,nan,nan,nan +2.323378098046375406e+00,nan,2.836826499999999918e+00,nan,2.572420224389731835e+00,2.214808830486000080e+00,2.234958517191225180e+00,1.762728105724848282e+00,1.971118776991742649e+00,nan,3.322143031985798967e+00,3.551495259401718840e+00,2.960420193360000063e+00,2.714201225220000513e+00,2.755300591442011449e+00,2.617937264664170982e+00,2.460136685584366045e+00,nan,3.663338376426015675e+00,3.404350817102035798e+00,3.851885183994283146e+00,3.081367676904736008e+00,3.018930969190059876e+00,2.712811670787191431e+00,2.750887447683567189e+00,2.183430429647243098e+00,2.559222231103206902e+00,2.746976289152993633e+00,2.836633787082249114e+00,2.955343277614617570e+00,2.917153306383063249e+00,2.729922281954999974e+00,2.877869489585655138e+00,2.762520122796841004e+00,2.619122252294294739e+00,nan,3.748479759732550765e+00,3.428720604818816575e+00,3.691831896544256697e+00,3.908537983506283453e+00,3.568726925815998818e+00,2.630808021342932346e+00,2.898028441671258726e+00,2.707888740891798207e+00,2.751203398756630669e+00,2.855256827129938557e+00,3.098688258188737166e+00,3.386708095641284721e+00,3.035393925527747339e+00,2.893533768235434067e+00,3.076689159559616193e+00,2.990156601550042481e+00,2.878923530939234698e+00,nan,3.891591825967568585e+00,3.369831285834202550e+00,3.853539851496809110e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,4.753838071537071386e+00,nan,nan,3.631311509037648211e+00,3.902107154902529018e+00,3.615312384683999625e+00,2.911498816617416718e+00,2.909690082404738476e+00,2.949151465498168534e+00,2.649264582039128957e+00,2.803053206758466143e+00,3.034269901271390513e+00,3.355123409695075942e+00,3.368272039483746649e+00,3.236675224864243194e+00,3.181953000000000031e+00,nan,nan,nan,nan,nan,nan,nan,nan,2.469586726353585782e+00,nan,nan +2.154222259108919957e+00,nan,3.017173387909133808e+00,nan,2.686308954204622346e+00,2.437020087688726910e+00,2.262061810126314576e+00,1.758219826171361522e+00,2.379863744445293694e+00,nan,3.275369785532813438e+00,nan,2.999165570073254017e+00,2.763619570906768796e+00,2.842957611936203666e+00,2.670275319660698976e+00,2.637252450901498069e+00,nan,4.306809453050163938e+00,nan,nan,nan,nan,3.196746323487836605e+00,2.880145359757862789e+00,2.836836044089204645e+00,2.946520672498533333e+00,2.772497111441578177e+00,3.266223673598196875e+00,nan,2.998274887765047581e+00,2.817242125693600219e+00,2.889111087391571075e+00,2.849362655565290225e+00,2.743864693836897573e+00,nan,4.560391577007757213e+00,4.663540000000000241e+00,nan,3.333174285570760009e+00,nan,3.221309883168052668e+00,2.893927592654235603e+00,3.066740328775652280e+00,2.785649772873745711e+00,2.866835818016323589e+00,3.378908430114569317e+00,nan,3.239298591022445439e+00,3.153719280324754237e+00,3.130479755811825360e+00,3.135550654159438899e+00,3.138786750926899849e+00,nan,4.266552372181984687e+00,3.817181344290035305e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,3.012276211934153292e+00,3.101682629254441714e+00,3.018883029351144831e+00,2.873626992496257859e+00,3.338195383675794492e+00,nan,3.286171285333739256e+00,nan,3.185000000000000053e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,2.457542848519921108e+00,nan +2.166805142276430729e+00,nan,nan,nan,2.580543000000000031e+00,2.287379835890000379e+00,2.474235995718652603e+00,1.767968518856438465e+00,2.357623848738237271e+00,nan,3.412901531233373476e+00,3.164482806190452191e+00,2.938891151947484293e+00,2.760705313056169175e+00,2.851692369303167052e+00,2.816975390000000079e+00,2.585236945468602165e+00,nan,3.625514610945027805e+00,nan,nan,nan,nan,3.314725721459469643e+00,2.852842904483656739e+00,2.812615125743081368e+00,2.640551715765792729e+00,2.722059583258524817e+00,3.196843390792900141e+00,3.094854175122147666e+00,2.921434293889218825e+00,3.012419095211647857e+00,2.939844672078589127e+00,2.880496859477440275e+00,2.895094184208022003e+00,nan,4.486315106401223218e+00,3.805275048482614331e+00,nan,nan,nan,3.144850939876559615e+00,2.849897364870004779e+00,2.662000771560343537e+00,2.776527832920250294e+00,2.817834024594226605e+00,nan,nan,3.073400026447582256e+00,3.184497455008375777e+00,3.109971683843841816e+00,3.148704912977985604e+00,3.125024338696277937e+00,nan,4.610202562027180484e+00,3.702005143907769469e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,3.138713984043840721e+00,2.920945358842927497e+00,2.713314240679133071e+00,2.893154631842444147e+00,2.643810240293692715e+00,3.358550182274786078e+00,nan,nan,3.251421922427178313e+00,3.224550770000000011e+00,nan,nan,nan,nan,nan,nan,nan,nan,nan,nan,2.285446121339999692e+00 diff --git a/msp/utils/objectives/objectives.py b/msp/utils/objectives/objectives.py index 704f536..1fb24db 100644 --- a/msp/utils/objectives/objectives.py +++ b/msp/utils/objectives/objectives.py @@ -8,7 +8,7 @@ class Energy(torch.nn.Module): - def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12, ljr_scale = .8, min_ljr_val=1.0): + def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12, ljr_scale = .8, min_ljr_val=1.0, optimize_z=False, gauss_loss_ratio=0.0): super().__init__() """ Initialize objective function using only energy and no novel loss @@ -18,6 +18,9 @@ def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12 ljr_ratio (float): Weight of the Lennard-Jones repulsion in the loss ljr_power (int): Power for the Lennard-Jones repulsion calculation ljr_scale (float): Scaling factor for the Lennard-Jones repulsion + min_ljr_val (float): Minimum value for the Lennard-Jones repulsion + optimize_z (bool): Whether to optimize the atomic numbers + gauss_loss_ratio (float): Weight of the Gaussian loss in the loss """ self.normalize = normalize self.ljr_power = ljr_power @@ -25,6 +28,8 @@ def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1.0, ljr_power=12 self.lj_rmins[self.lj_rmins < 1.0] = 1.0 self.ljr_ratio = ljr_ratio self.energy_ratio = energy_ratio + self.optimize_z = optimize_z + self.gauss_loss_ratio = gauss_loss_ratio self.element_energy = [-10000, -3.392726045, -0.00905951, -1.9089228666666667, -3.739412865, -6.679391770833334, -9.2286654925, -8.336494925, -4.947961005, -1.9114789675, -0.02593678, -1.3225252934482759, -1.60028005, -3.74557583, -5.42531803, -5.413302506666667, -4.136449866875, -1.84853666, @@ -92,7 +97,7 @@ def norm_to_raw_loss(self, loss, z): def gaussian_loss(self, batch): """ - Calculate the Gaussian loss + Calculate the Gaussian loss (distance between atomic features and ideal Gaussian features + sum of negative atomic features) Args: batch (torch_geometric.data.Batch): Batch of data Returns: @@ -122,11 +127,13 @@ def forward(self, model_output, batch): # for i in range(len(batch.n_atoms)): # model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] ljr = self.lj_repulsion(batch, power=self.ljr_power) - gauss_loss = self.gaussian_loss(batch) + gauss_loss = 0 + if self.optimize_z: + gauss_loss = self.gauss_loss_ratio * self.gaussian_loss(batch) return self.energy_ratio * model_output["potential_energy"] + self.ljr_ratio * ljr + gauss_loss, model_output["potential_energy"], torch.zeros(len(model_output['potential_energy']), 1).to(ljr.device), ljr class EnergyAndUncertainty(Energy): - def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, uncertainty_ratio=.25, min_ljr_val=1.0): + def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, uncertainty_ratio=.25, min_ljr_val=1.0, optimize_z=False, gauss_loss_ratio=0.0): """ Initialize objective function using energy and uncertainty as novel loss Args: @@ -136,8 +143,9 @@ def __init__(self, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_power (int): Power for the Lennard-Jones repulsion calculation ljr_scale (float): Scaling factor for the Lennard-Jones repulsion uncertainty_ratio (float): Weight of the uncertainty in the loss + optimize_z (bool): Whether to optimize the atomic numbers """ - super().__init__(normalize, energy_ratio, ljr_ratio, ljr_power, ljr_scale, min_ljr_val) + super().__init__(normalize, energy_ratio, ljr_ratio, ljr_power, ljr_scale, min_ljr_val, optimize_z, gauss_loss_ratio) self.uncertainty_ratio = uncertainty_ratio def forward(self, model_output, batch): @@ -157,13 +165,16 @@ def forward(self, model_output, batch): # for i in range(len(batch.n_atoms)): # model_output['potential_energy'][i] = (model_output['potential_energy'][i] + self.offset[i]) / batch.n_atoms[i] ljr = self.lj_repulsion(batch, power=self.ljr_power) - return self.energy_ratio * model_output["potential_energy"] - self.uncertainty_ratio * model_output["potential_energy_uncertainty"] + self.ljr_ratio * ljr, model_output["potential_energy"], -model_output["potential_energy_uncertainty"], ljr + gauss_loss = 0 + if self.optimize_z: + gauss_loss = self.gauss_loss_ratio * self.gaussian_loss(batch) + return self.energy_ratio * model_output["potential_energy"] - self.uncertainty_ratio * model_output["potential_energy_uncertainty"] + self.ljr_ratio * ljr + gauss_loss, model_output["potential_energy"], -model_output["potential_energy_uncertainty"], ljr class EmbeddingDistance(Energy): - def __init__(self, embeddings, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, min_ljr_val=1.0, embedding_ratio=.1, mode="min"): + def __init__(self, embeddings, normalize=True, energy_ratio=1.0, ljr_ratio=1, ljr_power=12, ljr_scale=.8, min_ljr_val=1.0, embedding_ratio=.1, mode="min", optimize_z=False, gauss_loss_ratio=0.0): """ Initialize objective function using only energy and embedding distance as novel loss embedding distance is aggregated euclidean distance between structure embedding and database embeddings @@ -176,8 +187,10 @@ def __init__(self, embeddings, normalize=True, energy_ratio=1.0, ljr_ratio=1, lj ljr_scale (float): Scaling factor for the Lennard-Jones repulsion embedding_ratio (float): Weight of the embedding distance in the loss mode (str): Aggregation mode for the embedding distance, either "min" or "mean" + optimize_z (bool): Whether to optimize the atomic + gauss_loss_ratio (float): Weight of the Gaussian loss in the loss """ - super().__init__(normalize, energy_ratio, ljr_ratio, ljr_power, ljr_scale, min_ljr_val) + super().__init__(normalize, energy_ratio, ljr_ratio, ljr_power, ljr_scale, min_ljr_val, optimize_z, gauss_loss_ratio) self.embedding_ratio = embedding_ratio self.embeddings = embeddings self.mode = mode @@ -223,4 +236,7 @@ def forward(self, model_output, batch): else: embedding_loss = torch.mean(embedding_loss, dim=0) embedding_loss = torch.mean(embedding_loss, dim=-1, keepdim=True) - return self.energy_ratio * model_output["potential_energy"] - self.embedding_ratio * embedding_loss + self.ljr_ratio * ljr, model_output["potential_energy"], -embedding_loss, ljr \ No newline at end of file + gauss_loss = 0 + if self.optimize_z: + gauss_loss = self.gauss_loss_ratio * self.gaussian_loss(batch) + return self.energy_ratio * model_output["potential_energy"] - self.embedding_ratio * embedding_loss + self.ljr_ratio * ljr + gauss_loss, model_output["potential_energy"], -embedding_loss, ljr \ No newline at end of file diff --git a/scripts/example.py b/scripts/example.py index b673d14..d7088bc 100644 --- a/scripts/example.py +++ b/scripts/example.py @@ -1,265 +1,129 @@ import sys -from msp.dataset import download_dataset, load_dataset, combine_dataset, update_dataset from msp.composition import generate_random_compositions, sample_random_composition, generate_random_lithium_compositions from msp.forcefield import MDL_FF, MACE_FF, M3GNet_FF from msp.optimizer.globalopt.basin_hopping import BasinHoppingASE, BasinHoppingBatch from msp.utils.objectives import EnergyAndUncertainty, Energy, EmbeddingDistance from msp.structure.structure_util import dict_to_atoms, init_structure, atoms_to_dict -from msp.validate import read_dft_config, setup_DFT, Validate -import pickle as pkl import json -import numpy as np import ase -import torch from ase import io -from pymatgen.analysis.structure_matcher import StructureMatcher -from pymatgen.io.ase import AseAtomsAdaptor import time import matplotlib.pyplot as plt -#download dataset from Materials Project -#return dataset class or dict -my_dataset = download_dataset(repo="MP", save=True) -#or load dataset from disk: +if __name__ == "__main__": -#my_dataset = load_dataset(path ="path/to/dataset") -my_dataset = json.load(open("/global/cfs/projectdirs/m3641/Shared/Materials_datasets/MP_data_latest/raw/data.json", "r")) -predicted_structures = [] -# my_dataset = json.load(open("../data/data_subset_msp.json", "r")) -#print(my_dataset[0]) -max_iterations=1 + # load dataset + my_dataset = json.load(open("/global/cfs/projectdirs/m3641/Shared/Materials_datasets/MP_data_latest/raw/data.json", "r")) + + # Set number of active learning iterations + max_iterations = 1 -#Initialize a forcefield class, reading in from config (we use MDL_FF but it can be a force field from another library) -train_config = 'optimize_z.yml' -forcefield = MDL_FF(train_config, my_dataset) -embeddings = forcefield.get_embeddings(my_dataset, batch_size=40, cluster=False) + # Initiliaze the list of predicted structures + predicted_structures = [] -#predictor = BasinHoppingASE(forcefield, hops=5, steps=100, optimizer="FIRE", dr=0.5) + # Initialize a forcefield class, reading in from config (we use MDL_FF but it can be a force field from another library) + train_config = 'mdl_config.yml' + forcefield = MDL_FF(train_config, my_dataset) -predictor_batch = BasinHoppingBatch(forcefield, hops=50, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) + # get embeddings for the dataset if using EmbeddingDistance + embeddings = forcefield.get_embeddings(my_dataset, batch_size=40, cluster=False) + # initialize the predictor class, this is the BasinHopping version which uses an ASE calculator, but we can have another version for batched optimization + # predictor = BasinHoppingASE(forcefield, hops=5, steps=100, optimizer="FIRE", dr=0.5) + predictor_batch = BasinHoppingBatch(forcefield, hops=50, steps=100, dr=0.6, optimizer='Adam', perturbs=['pos', 'cell']) -# forcefield_mace = MACE_FF() -# predictor_mace = BasinHoppingASE(forcefield_mace, hops=5, steps=100, optimizer="FIRE", dr=0.5) + # train the forcefield + # forcefield.train(my_dataset, .09, .05, .05, max_epochs=1) -# forcefield_m3gnet = M3GNet_FF() -# predictor_m3gnet = BasinHoppingASE(forcefield_m3gnet, hops=5, steps=100, optimizer="FIRE", dr=0.5) -#train the forcefield (optional) -#forcefield.train(my_dataset, .09, .05, .05, max_epochs=1) -#to load saved model, use update and put the path to file in checkpoint_path in the train_config -#forcefield.update(my_dataset, .09, .05, .05, max_epochs=1) + #active learning loop + for i in range(0, max_iterations): + print("Iteration: ", i) -#active learning loop -for i in range(0, max_iterations): - # sample composition using a built in random sampler that checks for repeats in the dataset - # returns a list of compositions, could be length 1 or many - # compositions are a dictionary of {element:amount} - # compositions = sample_random_composition(dataset=my_dataset, n=1) - # or manually specify the list of lists: - # compositions = [[22, 22, 22, 22, 22, 22, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8] for _ in range(8)] + # update the forcefield with the predicted structures + if i != 0: + forcefield.update(predicted_structures, 1, 0, 0, max_epochs=30, save_model=False) - if i != 0: - forcefield.update(predicted_structures, 1, 0, 0, max_epochs=30, save_model=False) + # Generate compositions for the initial structures, random or preset + compositions = generate_random_lithium_compositions(my_dataset, n=10) - # compositions = generate_random_compositions(my_dataset, n=8, max_elements=5, max_atoms=20) - compositions_novelty = generate_random_lithium_compositions(my_dataset, n=10) - initial_structures_novelty = [init_structure(c, pyxtal=False) for c in compositions_novelty] - # compositions_energy = generate_random_lithium_compositions(my_dataset, n=4000) - # initial_structures_energy = [init_structure(c, pyxtal=True) for c in compositions_energy] - for j, minima in enumerate(dict_to_atoms(initial_structures_novelty)): - filename = "optim_z/initial_iteration_"+str(i)+"_structure_"+str(j)+".cif" - ase.io.write(filename, minima) - # read_structure = ase.io.read("init.cif") + # Generate initial structures for the compositions + initial_structures = [init_structure(c, pyxtal=False) for c in compositions] - # initial_structures=[atoms_to_dict([read_structure], loss=[None])] + # write the initial structures to file + for j, minima in enumerate(dict_to_atoms(initial_structures)): + filename = "initial_structures/iteration_"+str(i)+"_structure_"+str(j)+".cif" + ase.io.write(filename, minima) - #forcefield itself is not an ase calculator, but can be used to return the MDLCalculator class - #initialize the predictor class, this is the BasinHopping version which uses an ASE calculator, but we can have another version for batched search - # total_list, minima_list = predictor.predict(initial_structures) - # minima_list = dict_to_atoms(minima_list) - # for j, minima in enumerate(minima_list): - # filename = "iteration_"+str(i)+"_structure_"+str(j)+"_mdl.cif" - # ase.io.write(filename, minima) - # f = open('output.txt', 'w') - # for i in range(len(total_list)): - # f.write('Structure ' + str(i) + '\n') - # for hop in total_list[i]: - # f.write("\tHop: " +str(hop['hop'])+ '\n') - # f.write("\t\tInit loss: " +str(hop['init_loss'])+ '\n') - # f.write("\t\tFinal loss: " +str(hop['loss'])+ '\n') - # f.write("\t\tComposition: " +str(hop['composition'])+ '\n') - # f.write("\t\tperturb: " +str(hop['perturb'])+ '\n') - # f.close() + #-----Optimizing the initial structures using BasinHopping----- - #---Optimizing a batch of structures with batch basin hopping--- - # alternatively if we dont use ASE, we can optimize in batch, and optimize over multiple objectives as well - # we do this by first initializing our objective function, which is similar to the loss function class in matdeeplearn - objective_func_energy = Energy(normalize=True, ljr_ratio=1) - # objective_func_novelty = EmbeddingDistance(embeddings, normalize=True, energy_ratio=2, ljr_ratio=1, ljr_scale=.7, embedding_ratio=.1) - # objective_func = EnergyAndUncertainty(normalize=True, uncertainty_ratio=.25, ljr_ratio=1, ljr_scale=.7) - # start_time = time.time() - # total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures_energy, objective_func_energy, batch_size=8, log_per=0, lr=.05) - # top_energy = sorted(minima_list_batch, key=lambda struc: struc['objective_loss'])[:100] - # print('---------TOP 100 ENERGY STRUCTURES---------') - # print(top_energy) - # print('---------TOP 100 ENERGY STRUCTURES---------') - # minima_list_batch_ase = dict_to_atoms(minima_list_batch) - # top_energy_ase = dict_to_atoms(top_energy) - # for j, minima in enumerate(minima_list_batch_ase): - # filename = "all_4k_energy/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" - # ase.io.write(filename, minima) - # for j, minima in enumerate(top_energy_ase): - # filename = "top_100_energy/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" - # ase.io.write(filename, minima) - # f = open('output.txt', 'w') - # for i in range(len(total_list_batch)): - # f.write('Structure ' + str(i) + '\n') - # f.write('\tbest_hop: ' + str(best_hop[j]) + '\n') - # for hop in total_list_batch[i]: - # f.write("\tHop: " +str(hop['hop'])+ '\n') - # f.write("\t\tObjective loss: " +str(hop['objective_loss'])+ '\n') - # f.write("\t\tEnergy loss: "+str(hop['energy_loss'])+'\n') - # if getattr(objective_func_energy, 'normalize', False): - # f.write("\t\tUnnormalized energy loss: " +str(hop['unnormalized_loss'])+ '\n') - # f.write("\t\tNovel loss: "+str(hop['novel_loss']) + '\n') - # f.write("\t\tSoft sphere loss: "+ str(hop['soft_sphere_loss']) + '\n') - # f.write("\t\tComposition: " +str(hop['composition'])+ '\n') - # f.write("\t\tperturb: " +str(hop['perturb'])+ '\n') - # f.close() - # print('Time taken for energy: {:.2f}'.format(time.time() - start_time)) - - start_time = time.time() - total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures_novelty, objective_func_energy, batch_size=1, log_per=1, lr=.01) - top_novelty = sorted(minima_list_batch, key=lambda struc: struc['objective_loss'])[:400] - print('---------TOP 400 NOVELTY STRUCTURES---------') - print(top_novelty) - print('---------TOP 400 NOVELTY STRUCTURES---------') - minima_list_batch_ase = dict_to_atoms(minima_list_batch) - top_novelty_ase = dict_to_atoms(top_novelty) - for j, minima in enumerate(minima_list_batch_ase): - filename = "optim_z/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" - ase.io.write(filename, minima) - for j, minima in enumerate(top_novelty_ase): - filename = "optim_z/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" - ase.io.write(filename, minima) - # f = open('output.txt', 'w') - # for i in range(len(total_list_batch)): - # f.write('Structure ' + str(i) + '\n') - # f.write('\tbest_hop: ' + str(best_hop[j]) + '\n') - # for hop in total_list_batch[i]: - # f.write("\tHop: " +str(hop['hop'])+ '\n') - # f.write("\t\tObjective loss: " +str(hop['objective_loss'])+ '\n') - # f.write("\t\tEnergy loss: "+str(hop['energy_loss'])+'\n') - # if getattr(objective_func_novelty, 'normalize', False): - # f.write("\t\tUnnormalized energy loss: " +str(hop['unnormalized_loss'])+ '\n') - # f.write("\t\tNovel loss: "+str(hop['novel_loss']) + '\n') - # f.write("\t\tSoft sphere loss: "+ str(hop['soft_sphere_loss']) + '\n') - # f.write("\t\tComposition: " +str(hop['composition'])+ '\n') - # f.write("\t\tperturb: " +str(hop['perturb'])+ '\n') - # f.close() - print('Time taken for novelty: {:.2f}'.format(time.time() - start_time)) - - for i, energy_list in enumerate(energies): - plt.scatter(range(len(energy_list)), energy_list, label=f'Structure {i + 1}', - color=['g' if a else 'r' for a in accepts[i]]) - plt.xlabel('Steps') - plt.ylabel('Energies') - plt.legend() - plt.show() - plt.close() - - for i, accept_rate_list in enumerate(accept_rate): - plt.scatter(range(len(accept_rate_list)), accept_rate_list, label=f'Structure {i + 1}') - plt.xlabel('Steps') - plt.ylabel('Accept Rate') - plt.legend() - plt.show() - plt.close() - - for i, temps_list in enumerate(temps): - plt.scatter(range(len(temps_list)), temps_list, label=f'Structure {i + 1}') - plt.xlabel('Steps') - plt.ylabel('Temps') - plt.legend() - plt.show() - plt.close() - - plt.scatter(range(len(step_sizes)), step_sizes) - plt.xlabel('Steps') - plt.ylabel('Step Sizes') - plt.legend() - plt.show() - plt.close() - - - # minima_list_mace = predictor_mace.predict(initial_structures) - # minima_list_mace = dict_to_atoms(minima_list_mace) - # for j, minima in enumerate(minima_list_mace): - # filename = "iteration_"+str(i)+"_structure_"+str(j)+"_mace.cif" - # ase.io.write(filename, minima) + # Set an objective function, here we use Energy + objective_func = Energy(normalize=True, ljr_ratio=1, optimize_z=True) - - # minima_list_m3gnet = predictor_m3gnet.predict(initial_structures) - # minima_list_m3gnet = dict_to_atoms(minima_list_m3gnet) - # for j, minima in enumerate(minima_list_m3gnet): - # filename = "iteration_"+str(i)+"_structure_"+str(j)+"_m3gnet.cif" - # ase.io.write(filename, minima) - - #check if the true structure has been found (either yes or no) - #adaptor = AseAtomsAdaptor - #structure_matcher = StructureMatcher(ltol = 0.3, stol = 0.3, angle_tol = 5, primitive_cell = True, scale = True) - # print(structure_matcher.fit(adaptor.get_structure(read_structure), adaptor.get_structure(minima_list[0]))) - #print(structure_matcher.fit(adaptor.get_structure(read_structure), adaptor.get_structure(minima_list_batch[0]))) - # print(structure_matcher.fit(adaptor.get_structure(read_structure), adaptor.get_structure(minima_list_mace[0]))) - # print(structure_matcher.fit(adaptor.get_structure(read_structure), adaptor.get_structure(minima_list_m3gnet[0]))) - #print(structure_matcher.get_rms_dist(adaptor.get_structure(read_structure), adaptor.get_structure(minima_list[0]))) - #print(structure_matcher.get_rms_dist(adaptor.get_structure(read_structure), adaptor.get_structure(minima_list_batch[0]))) - #print(structure_matcher.get_rms_dist(adaptor.get_structure(read_structure), adaptor.get_structure(minima_list_mace[0]))) - #print(structure_matcher.get_rms_dist(adaptor.get_structure(read_structure), adaptor.get_structure(minima_list_m3gnet[0]))) - - #quantify structure similairy, continous from 1 to 0 - #see: https://docs.materialsproject.org/methodology/materials-methodology/related-materials - #matminer may need older version of numpy==1.23.5 - #ssf = SiteStatsFingerprint(CrystalNNFingerprint.from_preset('ops', distance_cutoffs=None, x_diff_weight=0), stats=('mean', 'std_dev', 'minimum', 'maximum')) - #target = np.array(ssf.featurize(adaptor.get_structure(read_structure))) - # mdl = np.array(ssf.featurize(adaptor.get_structure(minima_list[0]))) - #mdl_batch = np.array(ssf.featurize(adaptor.get_structure(minima_list_batch[0]))) - # mace = np.array(ssf.featurize(adaptor.get_structure(minima_list_mace[0]))) - # m3gnet = np.array(ssf.featurize(adaptor.get_structure(minima_list_m3gnet[0]))) - # print('Distance between target and mdl: {:.4f}'.format(np.linalg.norm(target - mdl))) - #print('Distance between target and mdl_batch: {:.4f}'.format(np.linalg.norm(target - mdl_batch))) - # print('Distance between target and mace: {:.4f}'.format(np.linalg.norm(target - mace))) - # print('Distance between target and m3gnet: {:.4f}'.format(np.linalg.norm(target - m3gnet))) - - #predict structures using BasinHoppingASE - #minima_list=[] - #for j in range(0, len(compositions)): - # putative_minima = predictor.predict(compositions[j], topk=1) - # minima_list.append(putative_minima[0]) - - - #validate with DFT on-demand on the putative minima - dft_path = 'path/to/dft_config.yml' - dft_config=read_dft_config(dft_path) - method = setup_DFT(dft_config) - validator = Validate(method=method, local=False) - dft_results=[] - # for j in range(0, len(minima_list)): - # dft_results.append(validator(minima_list[j])) - - - #my_dataset = combine_dataset(my_dataset, dft_results) - - #retrain the forcefield - #forcefield.train(my_dataset) - #or finetune the forcefield rather than from scratch - #forcefield.update(dft_results) - #forcefield.update(my_dataset, .009, .05, .05, max_epochs=1) - - - #update the dataset as well - predicted_structures.extend(minima_list_batch) - -print("Job done") + # Run the prediction + start_time = time.time() + total_list_batch, minima_list_batch, best_hop, energies, accepts, accept_rate, temps, step_sizes = predictor_batch.predict(initial_structures, objective_func, batch_size=8, log_per=0, lr=.05) + sorted_results = sorted(minima_list_batch, key=lambda struc: struc['objective_loss']) + # Save the structures to file + minima_list_batch_ase = dict_to_atoms(minima_list_batch) + for j, minima in enumerate(minima_list_batch_ase): + filename = "predicted_structures/iteration_"+str(i)+"_structure_"+str(j)+"_mdl_batch.cif" + ase.io.write(filename, minima) + + # Optionally save all optimization information to file + # f = open('output.txt', 'w') + # for i in range(len(total_list_batch)): + # f.write('Structure ' + str(i) + '\n') + # f.write('\tbest_hop: ' + str(best_hop[j]) + '\n') + # for hop in total_list_batch[i]: + # f.write("\tHop: " +str(hop['hop'])+ '\n') + # f.write("\t\tObjective loss: " +str(hop['objective_loss'])+ '\n') + # f.write("\t\tEnergy loss: "+str(hop['energy_loss'])+'\n') + # if getattr(objective_func_energy, 'normalize', False): + # f.write("\t\tUnnormalized energy loss: " +str(hop['unnormalized_loss'])+ '\n') + # f.write("\t\tNovel loss: "+str(hop['novel_loss']) + '\n') + # f.write("\t\tSoft sphere loss: "+ str(hop['soft_sphere_loss']) + '\n') + # f.write("\t\tComposition: " +str(hop['composition'])+ '\n') + # f.write("\t\tperturb: " +str(hop['perturb'])+ '\n') + # f.close() + + print('Time taken for prediction: {:.2f}'.format(time.time() - start_time)) + + + # Plotting the optimization information + for i, energy_list in enumerate(energies): + plt.scatter(range(len(energy_list)), energy_list, label=f'Structure {i + 1}', + color=['g' if a else 'r' for a in accepts[i]]) + plt.xlabel('Steps') + plt.ylabel('Energies') + plt.legend() + plt.show() + plt.close() + + for i, accept_rate_list in enumerate(accept_rate): + plt.scatter(range(len(accept_rate_list)), accept_rate_list, label=f'Structure {i + 1}') + plt.xlabel('Steps') + plt.ylabel('Accept Rate') + plt.legend() + plt.show() + plt.close() + + for i, temps_list in enumerate(temps): + plt.scatter(range(len(temps_list)), temps_list, label=f'Structure {i + 1}') + plt.xlabel('Steps') + plt.ylabel('Temps') + plt.legend() + plt.show() + plt.close() + + plt.scatter(range(len(step_sizes)), step_sizes) + plt.xlabel('Steps') + plt.ylabel('Step Sizes') + plt.legend() + plt.show() + plt.close() + + predicted_structures.extend(minima_list_batch) + + print("Job done") diff --git a/scripts/finetune.py b/scripts/finetune.py index da2d59f..96818e4 100644 --- a/scripts/finetune.py +++ b/scripts/finetune.py @@ -4,31 +4,33 @@ from msp.structure.structure_util import dict_to_atoms import ase -eval_dataset = json.load(open("../data/data_subset_msp.json", "r")) -# get a random subset of eval_dataset -eval_dataset = eval_dataset +if __name__ == "__main__": -my_dataset = json.load(open("../data/iter_one_structures/data.json", "r")) -for data in my_dataset: - data["stress"] = np.array(data["stress"])*0.006242*-0.1 -#my_dataset = json.load(open("../data/data_subset_msp.json", "r")) + # load original dataset model was trained on + eval_dataset = json.load(open("../data/data_subset_msp.json", "r")) -train_config = 'mdl_config.yml' + # load dataset to finetune on + finetune_dataset = json.load(open("../data/iter_one_structures/data.json", "r")) + # convert stress to correct units + for data in finetune_dataset: + data["stress"] = np.array(data["stress"])*0.006242*-0.1 + # Select config file for forcefield + train_config = 'mdl_config.yml' + forcefield = MDL_FF(train_config, finetune_dataset) -forcefield = MDL_FF(train_config, my_dataset) + print("Evaluating before finetuning on eval_dataset") + forcefield.validate(eval_dataset, val_ratio=1, batch_size=12) -print("Evaluating before finetuning on eval_dataset") -forcefield.validate(eval_dataset, val_ratio=1, batch_size=12) + print("Evaluating before finetuning on finetuning_dataset") + forcefield.validate(finetune_dataset, val_ratio=1, batch_size=12) -print("Evaluating before finetuning on finetuning_dataset") -forcefield.validate(my_dataset, val_ratio=1, batch_size=12) + # Finetune the model + forcefield.update(finetune_dataset, .95, .05, 0, max_epochs=100, save_model=False, batch_size=12, save_path='fine_tuned_models') -forcefield.train(my_dataset, .95, .05, 0, max_epochs=100, save_model=False, batch_size=12, save_path='fine_tuned_models') + print("Evaluating after finetuning on finetuning_dataset") + forcefield.validate(finetune_dataset, val_ratio=1, batch_size=12) -print("Evaluating after finetuning on finetuning_dataset") -forcefield.validate(my_dataset, val_ratio=1, batch_size=12) - -print("Evaluating after finetuning on eval_dataset") -forcefield.validate(eval_dataset, val_ratio=1, batch_size=12) + print("Evaluating after finetuning on eval_dataset") + forcefield.validate(eval_dataset, val_ratio=1, batch_size=12) From ea1714789085c6fb3b608825b15779a75f073bd5 Mon Sep 17 00:00:00 2001 From: Rithwik Seth Date: Mon, 11 Nov 2024 17:23:48 -0800 Subject: [PATCH 11/16] Fixed naming bug --- msp/optimizer/globalopt/basin_hopping.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/msp/optimizer/globalopt/basin_hopping.py b/msp/optimizer/globalopt/basin_hopping.py index 01e76bb..d828e10 100644 --- a/msp/optimizer/globalopt/basin_hopping.py +++ b/msp/optimizer/globalopt/basin_hopping.py @@ -342,7 +342,7 @@ def __init__(self, forcefield, hops=5, steps=100, optimizer="Adam", dr=.5, max_a self.forcefield = forcefield def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_size=4, log_per=0, lr=.05, density=.2, num_atoms_perturb=1, - num_unique=4, dynamic_temp=False, dynamic_dr=False, optim_z=False): + num_unique=4, dynamic_temp=False, dynamic_dr=False, optimize_z=False): """ Optimizes the list of compositions in batches @@ -393,7 +393,7 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz for i in range(self.hops): start_time = time() new_atoms, obj_loss, energy_loss, novel_loss, soft_sphere_loss = self.forcefield.optimize(new_atoms, self.steps, objective_func, log_per, lr, - batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer, optim_z=optim_z) + batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer, optimize_z=optimize_z) if dynamic_dr: self.change_dr(accepts[0], rate=0.1) end_time = time() @@ -444,7 +444,7 @@ def predict(self, structures, objective_func, cell_relax=True, topk=1, batch_siz print('HOP', i, 'took', end_time - start_time, 'seconds') print('Final optimization') best_atoms, obj_loss, energy_loss, novel_loss, soft_sphere_loss = self.forcefield.optimize(best_atoms, 1, objective_func, log_per, lr, - batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer, optim_z=optim_z) + batch_size=batch_size, cell_relax=cell_relax, optim=self.optimizer, optimize_z=optimize_z) avg_loss = 0 for j, hop in enumerate(best_hop): if getattr(objective_func, 'normalize', False): From ffddaee970c61f09181bac72b82ae466a40ad15b Mon Sep 17 00:00:00 2001 From: rithwiks <42122156+rithwiks@users.noreply.github.com> Date: Tue, 19 Nov 2024 21:32:54 -0500 Subject: [PATCH 12/16] Update README.md --- README.md | 94 ++++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 93 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 2155626..a9689ed 100644 --- a/README.md +++ b/README.md @@ -1 +1,93 @@ -# MatStructPredict \ No newline at end of file +# MatStructPredict: An Open Source Library for GNN-Powered Structure Prediction + +MatStructPredict is a machine learning library that offers simple, flexible pipelines for structure prediction and active learning. + +## Table of Contents +- [MatStructPredict: An Open Source Library for GNN-Powered Structure Prediction](#matstructpredict-an-open-source-library-for-gnn-powered-structure-prediction) + - [Table of Contents](#table-of-contents) + - [Motivation](#motivation) + - [Features](#features) + - [Installation](#installation-todo) + - [Quick Start: Structure Prediction](#quick-start-structure-prediction) + - [Contributing](#contributing-todo) + - [License](#license-todo) + - [Citation](#citation-todo) + + +## Motivation + +With more powerful and more accurate Graph Neural Networks coming into play, structure prediction using GNNs has become a fast and effective method for generating structures at scale. There have been multiple occasions where a vast amount of structures were generated using GNNs for property optimizaiton. However, creating programs to run GNN-based structure prediction requires specialized knowledge of PyTorch and Machine Learning. To help enable people of varying levels of machine learning knowledge to generate structures, we have created MatStructPredict. + +MatStructPredict is a library that offers simple, customizable pipelines for structure prediction. The library offers the following features: +- Training and Evaluating ML Models +- Composition generation +- Global Optimization + - BasinHopping +- Structure Prediction + - Optimize structures for multiple objectives + - Optimize atomic positions and atomic cell + +By simplifying the process of predicting structures, MatStructPredict gives researchers the ability to generate structures for their own use cases, regardless of whether or not they are familiar with machine learning. + +## Features + +- **Pre-trained Model Support**: Use multiple pre-trained models for ASE optimization: + - Chgnet + - MACE + - M3GNet + +- **MatDeepLearn Model Features**: Use all models supported by MatDeepLearn for: + - Training + - Evaluating + - Batch Optimization + - Custom Objective Structure Prediction + +- **Flexible Property**: Support for various molecular and materials properties: + - Energy prediction + - Force prediction (both conservative and non-conservative) + - Stress tensor prediction + +- **Flexible Objectives**: Support for various molecular and materials properties: + - Energy + - Novelty + - Embedding Distance + - Uncertainty + - LJR Loss + +- **Structure Prediction**: Pipelines for Structure Prediction from start to finish: + - SMACT Valid Composition generation + - Custom compositions + - Random Lithium Compositions + - Random Generic Compositions + - Global Optimization with Basin Hopping + - Includes following perturbs: + - Cell + - Positions + - Atomic Numbers + - Add/remove/swap atoms + - Saving structures + - Finetuning model on new structures + +## Installation - TODO + +```bash +pip install matstructpredict +``` + +## Quick Start: Structure Prediction + +Use example.py and mdl_config.yml for a quick structure prediction run using a MatDeepLearn model. Remember to adjust the file paths to your corresponding dataset and ideal save paths. + +Example.ipynb provides a Jupyter Notebook that takes users through each step of the Structure Prediction process. + +## Contributing - TODO + +## License - TODO + +## Citation - TODO + +If you use MatStructPredict in your research, please cite: + +TODO +```bibtex +``` From cab3e2512a8f99ce147246f5e1ae70931917e4e0 Mon Sep 17 00:00:00 2001 From: rithwiks <42122156+rithwiks@users.noreply.github.com> Date: Tue, 19 Nov 2024 21:33:33 -0500 Subject: [PATCH 13/16] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a9689ed..862ae20 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,9 @@ MatStructPredict is a machine learning library that offers simple, flexible pipe - [Table of Contents](#table-of-contents) - [Motivation](#motivation) - [Features](#features) - - [Installation](#installation-todo) + - [Installation](#installation) - [Quick Start: Structure Prediction](#quick-start-structure-prediction) - - [Contributing](#contributing-todo) + - [Contributing](#contributing) - [License](#license-todo) - [Citation](#citation-todo) From 86f16c88110cebc95f48a4cc62c3e0e7243cfdbc Mon Sep 17 00:00:00 2001 From: rithwiks <42122156+rithwiks@users.noreply.github.com> Date: Tue, 19 Nov 2024 21:33:46 -0500 Subject: [PATCH 14/16] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 862ae20..291073c 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,7 @@ MatStructPredict is a machine learning library that offers simple, flexible pipe - [Table of Contents](#table-of-contents) - [Motivation](#motivation) - [Features](#features) - - [Installation](#installation) + - [Installation](#installation-todo) - [Quick Start: Structure Prediction](#quick-start-structure-prediction) - [Contributing](#contributing) - [License](#license-todo) From 5fb474999044ebffbf36ca668a7732660973be5d Mon Sep 17 00:00:00 2001 From: rithwiks <42122156+rithwiks@users.noreply.github.com> Date: Tue, 19 Nov 2024 21:36:18 -0500 Subject: [PATCH 15/16] Update README.md --- README.md | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 291073c..1cca7f9 100644 --- a/README.md +++ b/README.md @@ -7,11 +7,11 @@ MatStructPredict is a machine learning library that offers simple, flexible pipe - [Table of Contents](#table-of-contents) - [Motivation](#motivation) - [Features](#features) - - [Installation](#installation-todo) + - [Installation](#installation) - [Quick Start: Structure Prediction](#quick-start-structure-prediction) - [Contributing](#contributing) - - [License](#license-todo) - - [Citation](#citation-todo) + - [License](#license) + - [Citation](#citation) ## Motivation @@ -68,8 +68,8 @@ By simplifying the process of predicting structures, MatStructPredict gives rese - Saving structures - Finetuning model on new structures -## Installation - TODO - +## Installation +TODO ```bash pip install matstructpredict ``` @@ -80,12 +80,12 @@ Use example.py and mdl_config.yml for a quick structure prediction run using a M Example.ipynb provides a Jupyter Notebook that takes users through each step of the Structure Prediction process. -## Contributing - TODO - -## License - TODO - -## Citation - TODO - +## Contributing + TODO +## License +TODO +## Citation +YODO If you use MatStructPredict in your research, please cite: TODO From a36f08df3f26ec5b1e56340c4ed0f15699b80e1d Mon Sep 17 00:00:00 2001 From: rithwiks <42122156+rithwiks@users.noreply.github.com> Date: Thu, 21 Nov 2024 15:37:25 -0500 Subject: [PATCH 16/16] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1cca7f9..572c4aa 100644 --- a/README.md +++ b/README.md @@ -85,7 +85,7 @@ Example.ipynb provides a Jupyter Notebook that takes users through each step of ## License TODO ## Citation -YODO +TODO If you use MatStructPredict in your research, please cite: TODO