diff --git a/mpworks/drones/mp_vaspdrone.py b/mpworks/drones/mp_vaspdrone.py index 6660f5cc..34188f29 100644 --- a/mpworks/drones/mp_vaspdrone.py +++ b/mpworks/drones/mp_vaspdrone.py @@ -21,7 +21,7 @@ from pymatgen.entries.compatibility import MaterialsProjectCompatibility from pymatgen.entries.computed_entries import ComputedEntry from pymatgen.matproj.snl import StructureNL -from pymatgen.io.vaspio.vasp_output import Vasprun, Outcar +from pymatgen.io.vasp.outputs import Vasprun, Outcar from pymatgen.analysis.structure_analyzer import oxide_type @@ -245,7 +245,13 @@ def process_fw(self, dir_name, d): d['snlgroup_id'] = fw_dict['spec']['snlgroup_id'] d['vaspinputset_name'] = fw_dict['spec'].get('vaspinputset_name') d['task_type'] = fw_dict['spec']['task_type'] - + # Process data for deformed structures + if 'deformed' in d['task_type']: + d['deformation_matrix'] = fw_dict['spec']['deformation_matrix'] + d['original_task_id'] = fw_dict['spec']['original_task_id'] + if 'modified volume' in d['task_type']: + d['strainfactor'] = fw_dict['spec']['strainfactor'] + d['original_task_id'] = fw_dict['spec']['original_task_id'] if not self.update_duplicates: if 'optimize structure' in d['task_type'] and 'output' in d: # create a new SNL based on optimized structure diff --git a/mpworks/firetasks/eos_thermal_tasks.py b/mpworks/firetasks/eos_thermal_tasks.py new file mode 100644 index 00000000..a4179e1d --- /dev/null +++ b/mpworks/firetasks/eos_thermal_tasks.py @@ -0,0 +1,321 @@ +from monty.os.path import zpath +import os +import json +from pymongo import MongoClient +import numpy as np +from decimal import Decimal + +__author__ = 'Cormac Toher' + + +from fireworks.utilities.fw_serializers import FWSerializable +from fireworks.core.firework import FireTaskBase, FWAction +from pymatgen.io.vasp.inputs import Incar, Poscar +from pymatgen.analysis.eqn_of_state_thermal.eos_vasp_setup import ModifiedVolumeStructureSet +from pymatgen.analysis.eqn_of_state_thermal.eos_wf_run import eos_thermal_properties +from fireworks.core.firework import Firework, Workflow +from mpworks.firetasks.vasp_io_tasks import VaspWriterTask, VaspToDBTask +from mpworks.firetasks.custodian_task import get_custodian_task +from fireworks.utilities.fw_utilities import get_slug +from pymatgen import Composition +from pymatgen.matproj.snl import StructureNL +from mpworks.workflows import snl_to_wf +from mpworks.firetasks.snl_tasks import AddSNLTask +from mpworks.snl_utils.mpsnl import get_meta_from_structure, MPStructureNL +from pymatgen.core.structure import Structure +from mpworks.workflows.wf_settings import QA_VASP, QA_DB, QA_VASP_SMALL +from pymatgen.io.vasp.inputs import Poscar, Kpoints +from pymatgen import MPRester + +def update_spec_force_convergence(spec, user_vasp_settings=None): + fw_spec = spec + update_set = {"ENCUT": 700, "EDIFF": 0.000001, "ALGO":"N", "NPAR":2, "NSW":0} + if user_vasp_settings and user_vasp_settings.get("incar"): + update_set.update(user_vasp_settings["incar"]) + fw_spec['vasp']['incar'].update(update_set) + old_struct=Poscar.from_dict(fw_spec["vasp"]["poscar"]).structure + if user_vasp_settings and user_vasp_settings.get("kpoints"): + kpoints_density = user_vasp_settings["kpoints"]["kpoints_density"] + else: + kpoints_density = 7000 + k=Kpoints.automatic_density(old_struct, kpoints_density) + fw_spec['vasp']['kpoints'] = k.as_dict() + return fw_spec + + +class SetupFConvergenceTask(FireTaskBase, FWSerializable): + _fw_name = "Setup Force Convergence Task" + + def run_task(self, fw_spec): + incar = fw_spec['vasp']['incar'] + update_set = {"ENCUT": 600, "EDIFF": 0.00005} + incar.update(update_set) + kpoints = fw_spec['vasp']['kpoints'] + k = [int(round(2.5*k)) if int(round(2.5*k))%2 else int(round(2.5*k))+1 for k in kpoints['kpoints'][0]] + kpoints['kpoints'] = [k] + return FWAction() + +class SetupEoSThermalTask(FireTaskBase, FWSerializable): + _fw_name = "Setup EoS Thermal Task" + + def run_task(self, fw_spec): + incar = Incar.from_file(zpath("INCAR")) + incar.update({"NSW": 0}) + incar.update({"LCHARG": True}) + incar.update({"IBRION": -1}) + incar.write_file("INCAR") + return FWAction() + +class SetupModifiedVolumeStructTask(FireTaskBase, FWSerializable): + _fw_name = "Setup Modified Volume Struct Task" + + def run_task(self, fw_spec): + # Read structure from previous relaxation + relaxed_struct = fw_spec['output']['crystal'] + modified_struct_set = ModifiedVolumeStructureSet(relaxed_struct) + poisson_val = fw_spec['poisson_ratio'] + wf=[] + task_id_list=[] + for i, mod_struct in enumerate(modified_struct_set.modvol_structs): + fws=[] + connections={} + f = Composition(mod_struct.formula).alphabetical_formula + snl = StructureNL(mod_struct, 'Cormac Toher ',projects=["Thermal"]) + tasks = [AddSNLTask()] + snl_priority = fw_spec.get('priority', 1) + spec = {'task_type': 'Add Modified Struct to SNL database', 'snl': snl.as_dict(), + '_queueadapter': QA_DB, '_priority': snl_priority} + if 'snlgroup_id' in fw_spec and isinstance(snl, MPStructureNL): + spec['force_mpsnl'] = snl.as_dict() + spec['force_snlgroup_id'] = fw_spec['snlgroup_id'] + del spec['snl'] + fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-1000+i*10)) + connections[-1000+i*10] = [-999+i*10] + spec = snl_to_wf._snl_to_spec(snl, parameters={'exact_structure':True}) + spec = update_spec_force_convergence(spec) + spec['strainfactor'] = modified_struct_set.strainfactors[i] + spec['original_task_id']=fw_spec["task_id"] + spec['_priority'] = fw_spec['_priority']*2 + #Turn off dupefinder for modified structure + del spec['_dupefinder'] + spec['task_type'] = "Calculate static modified volume structure" + fws.append(Firework([VaspWriterTask(), SetupEoSThermalTask(), get_custodian_task(spec)], + spec, name=get_slug(f + '--' + fw_spec['task_type']), fw_id=-999+i*10)) + + priority = fw_spec['_priority']*3 +# task_id_list.append(fw_spec["task_id"]) + spec = {'task_type': 'VASP db insertion', '_priority': priority, + '_allow_fizzled_parents': True, '_queueadapter': QA_DB, 'poisson_ratio': poisson_val, + 'eqn_of_state_thermal':"modified_structure", 'clean_task_doc':True, + 'strainfactor':modified_struct_set.strainfactors[i], 'original_task_id':fw_spec["task_id"], 'original_task_id_list':task_id_list} + fws.append(Firework([VaspToDBTask(), AddEoSThermalDataToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-998+i*10)) + connections[-999+i*10] = [-998+i*10] + wf.append(Workflow(fws, connections)) +# fws=[] +# connections={} +# spec = {'task_type': 'Add EoS Thermal Data to DB Task', '_priority': priority, +# '_queueadapter': QA_DB, 'poisson_ratio': poisson_val} +# print("Calling firetask") +# fws.append(Firework([AddEoSThermalDataToDBTask()], spec, +# name=get_slug(f + '--' + spec['task_type']),fw_id=-997+i*10)) +# connections[-998+i*10] = [-997+i*10] +# wf.append(Workflow(fws, connections)) + return FWAction(additions=wf) + + +class AddEoSThermalDataToDBTask(FireTaskBase, FWSerializable): + _fw_name = "Add EoS Thermal Data to DB Task" + + def run_task(self, fw_spec): + print("Running EoS Thermal Data") + db_dir = os.environ['DB_LOC'] + db_path = os.path.join(db_dir, 'tasks_db.json') + print("fw_spec keys = ", fw_spec.keys()) + poisson_val = fw_spec['poisson_ratio'] + print("Poisson ratio = ", poisson_val) + strainfactor_val = fw_spec['strainfactor'] + print("Strain factor = ", strainfactor_val) + task_id_list = fw_spec['original_task_id_list'] + i = fw_spec['original_task_id'] + print("original_task_id = ", i) + j = fw_spec['task_id'] + print("task_id = ", j) + + with open(db_path) as f: + db_creds = json.load(f) + connection = MongoClient(db_creds['host'], db_creds['port']) + tdb = connection[db_creds['database']] + tdb.authenticate(db_creds['admin_user'], db_creds['admin_password']) + tasks = tdb[db_creds['collection']] +# print("tasks = ", tasks) +# print("tasks size = ", len(tasks)) +# print("tasks keys = ", tasks.keys) + eos_thermal = tdb['eos_thermal'] +# i = 'mp-1265' + ndocs = tasks.find({"original_task_id": i, + "state":"successful"}).count() +# ndocs = tasks.find({"task_id": i, +# "state":"successful"}).count() + print("ndocs = ", ndocs) +# print("tasks keys = ", tasks.keys) +# print("tasks size = ", len(tasks)) +# ndocs = tasks.find({"task_id": i, +# "state":"successful"}).count() + existing_doc = eos_thermal.find_one({"relaxation_task_id" : i}) +# existing_doc = eos_thermal.find_one({"task_id" : i}) +# print("existing_doc = ", existing_doc) + if existing_doc: + print "Updating: " + i + else: + print "New material: " + i + d = {"analysis": {}, "error": [], "warning": []} + d["ndocs"] = ndocs +# o_size = tasks.find_one({"task_id" : i}, {"pretty_formula" : 1, "spacegroup" : 1, "snl" : 1, "snl_final" : 1, "run_tags" : 1}).count() +# print("o_size = ", o_size) + o = tasks.find_one({"task_id" : i}, + {"pretty_formula" : 1, "spacegroup" : 1, + "snl" : 1, "snl_final" : 1, "run_tags" : 1}) + if not o: + raise ValueError("Cannot find original task id") + if o: + print("o exists") +# o_size_two = o.count() +# print("o_size_two = ", o_size_two) +# print("o value = ", o) +# testok = tasks.find_one({"task_id" : i}) +# print("testok = ", testok) + # Get energy vs. volume from strained structures + d["strain_tasks"] = {} + #ss_dict = {} +# print("o = ", o) + volume_values = [] + energy_values = [] + #m = MPRester("o95HBW1KgfiSA0tN") + #elasticity_data = m.query(criteria={"task_id": "mp-1265"}, properties=["elasticity"]) + #poisson_val = elasticity_data[0]["elasticity"]["poisson_ratio"] +# for k in tasks.find({"original_task_id": i}, +# print("Starting loop over k") +# for ij in task_id_list: +# print("ij = ", ij) + for k in tasks.find({"original_task_id": i}, {"strainfactor":1, "calculations.output":1, "state":1, "task_id":1}): +# defo = k['strainfactor'] +# print("In loop over k") +# print("k = ", k) +# print("k.keys = ", k.keys) + kenerg_atom = k['calculations'][0]['output']['final_energy_per_atom'] + print("Energy per atom = ", kenerg_atom) + kvol = k['calculations'][0]['output']['crystal']['lattice']['volume'] + print("Volume = ", kvol) + ksites = k['calculations'][0]['output']['crystal']['sites'] + knatoms = len(ksites) + kenerg = kenerg_atom * knatoms + energy_values.append(kenerg) + volume_values.append(kvol) +# energy_values.append(k['output.final_energy']) +# volume_values.append(k['output.crystal.lattice.volume']) + # d_ind = np.nonzero(defo - np.eye(3)) + # delta = Decimal((defo - np.eye(3))[d_ind][0]) + # Normal deformation + # if d_ind[0] == d_ind[1]: + # dtype = "_".join(["d", str(d_ind[0][0]), + # "{:.0e}".format(delta)]) + # Shear deformation + # else: + # dtype = "_".join(["s", str(d_ind[0] + d_ind[1]), + # "{:.0e}".format(delta)]) + #sm = IndependentStrain(defo) + #d["strain_tasks"][dtype] = {"state" : k["state"], + # "strainfactor" : defo, + # "strain" : sm.tolist(), + # "task_id": k["task_id"]} + #if k["state"] == "successful": + # st = Stress(k["calculations"][-1]["output"] \ + # ["ionic_steps"][-1]["stress"]) + # ss_dict[sm] = st + d["snl"] = o["snl"] + if "run_tags" in o.keys(): + d["run_tags"] = o["run_tags"] + for tag in o["run_tags"]: + if isinstance(tag, dict): + if "input_id" in tag.keys(): + d["input_mp_id"] = tag["input_id"] + d["snl_final"] = o["snl_final"] + d["pretty_formula"] = o["pretty_formula"] + + # Old input mp-id style + if o["snl"]["about"].get("_mp_id"): + d["material_id"] = o["snl"]["about"]["_mp_id"] + + # New style + elif "input_mp_id" in d: + d["material_id"] = d["input_mp_id"] + else: + d["material_id"] = None + d["relaxation_task_id"] = i + + calc_struct = Structure.from_dict(o["snl_final"]) + # TODO: + # JHM: This test is unnecessary at the moment, but should be redone + """ + conventional = is_conventional(calc_struct) + if conventional: + d["analysis"]["is_conventional"] = True + else: + d["analysis"]["is_conventional"] = False + """ + d["spacegroup"]=o.get("spacegroup", "Unknown") + + + if ndocs >= 21: + eos_thermal_dict = {} + # Perform thermal equation of state fitting and analysis +# eos_thermal_dict = eos_thermal_properties.eos_thermal_run(calc_struct, volume_values, energy_values, ieos=2) +# eos_thermal_dict = eos_thermal_properties.eos_thermal_run(calc_struct, volume_values, energy_values, ieos=2, idebye=0, poissonratio=poisson_val) + eos_thermal_properties_inst = eos_thermal_properties() +# eos_thermal_dict = eos_thermal_properties_inst.eos_thermal_run(calc_struct, volume_values, energy_values, ieos=0, idebye=0, poissonratio=poisson_val) + eos_thermal_dict = eos_thermal_properties_inst.eos_thermal_run(calc_struct, volume_values, energy_values, poissonratio=poisson_val) + + # Test to check if results have been calculated + print("Thermal conductivity = ", eos_thermal_dict["thermal_conductivity"]) + # Add equation of state results to dict + d["temperature"] = eos_thermal_dict["temperature"] + d["pressure"] = eos_thermal_dict["pressure"] + d["Thermal_conductivity_temp_list"] = eos_thermal_dict["thermal_conductivity"] + d["Debye_temperature_temp_list"] = eos_thermal_dict["Debye_temperature"] + d["Gruneisen_parameter_temp_list"] = eos_thermal_dict["Gruneisen_parameter"] + d["Heat_capacity_Cv_temp_list"] = eos_thermal_dict["Heat_capacity_Cv"] + d["Heat_capacity_Cp_temp_list"] = eos_thermal_dict["Heat_capacity_Cp"] + d["Volume_temp_list"] = eos_thermal_dict["Volume"] + d["Bulk_modulus_temp_list"] = eos_thermal_dict["Bulk_modulus"] + d["BM_coeffs"] = eos_thermal_dict["BM_coeffs"] + jtdbest = eos_thermal_dict["Best_fit_temperature"] + + # Add values at specific temperatures (Debye temperature and Gruneisen parameter best fit, values at 300K) + d["Debye_temperature"] = d["Debye_temperature_temp_list"][eos_thermal_dict["Best_fit_temperature"]] + d["Gruneisen_parameter"] = d["Gruneisen_parameter_temp_list"][eos_thermal_dict["Best_fit_temperature"]] + d["Thermal_conductivity_300K"] = d["Thermal_conductivity_temp_list"][eos_thermal_dict["300K_point"]] + d["Heat_capacity_Cv_300K"] = d["Heat_capacity_Cv_temp_list"][eos_thermal_dict["300K_point"]] + d["Heat_capacity_Cp_300K"] = d["Heat_capacity_Cp_temp_list"][eos_thermal_dict["300K_point"]] + d["Bulk_modulus_300K"] = d["Bulk_modulus_temp_list"][eos_thermal_dict["300K_point"]] + + + + else: + d['state'] = "Fewer than 21 successful tasks completed" + print("Fewer than 21 successful tasks completed") + print("ndocs = ", ndocs) + print("mp_id = ", i) + return FWAction() + + if o["snl"]["about"].get("_kpoint_density"): + d["kpoint_density"]= o["snl"]["about"].get("_kpoint_density") + + if d["error"]: + raise ValueError("Thermal equation of state analysis failed: {}".format(d["error"])) + elif d["analysis"]["filter_pass"]: + d["state"] = "successful" + else: + d["state"] = "filter_failed" + eos_thermal.update({"relaxation_task_id": d["relaxation_task_id"]}, + d, upsert=True) + return FWAction() diff --git a/mpworks/firetasks/vasp_io_tasks.py b/mpworks/firetasks/vasp_io_tasks.py index 03a59bd5..dc22729b 100644 --- a/mpworks/firetasks/vasp_io_tasks.py +++ b/mpworks/firetasks/vasp_io_tasks.py @@ -23,7 +23,7 @@ from mpworks.workflows.wf_settings import QA_VASP, QA_DB, MOVE_TO_GARDEN_PROD, MOVE_TO_GARDEN_DEV from mpworks.workflows.wf_utils import last_relax, get_loc, move_to_garden from pymatgen import Composition -from pymatgen.io.vaspio.vasp_input import Incar, Poscar, Potcar, Kpoints +from pymatgen.io.vasp.inputs import Incar, Poscar, Potcar, Kpoints from pymatgen.matproj.snl import StructureNL __author__ = 'Anubhav Jain' @@ -42,10 +42,10 @@ class VaspWriterTask(FireTaskBase, FWSerializable): _fw_name = "Vasp Writer Task" def run_task(self, fw_spec): - Incar.from_dict(fw_spec['vasp']['incar']).write_file('INCAR') - Poscar.from_dict(fw_spec['vasp']['poscar']).write_file('POSCAR') - Potcar.from_dict(fw_spec['vasp']['potcar']).write_file('POTCAR') - Kpoints.from_dict(fw_spec['vasp']['kpoints']).write_file('KPOINTS') + fw_spec['vasp']['incar'].write_file('INCAR') + fw_spec['vasp']['poscar'].write_file('POSCAR') + fw_spec['vasp']['potcar'].write_file('POTCAR') + fw_spec['vasp']['kpoints'].write_file('KPOINTS') class VaspCopyTask(FireTaskBase, FWSerializable): @@ -159,16 +159,14 @@ def run_task(self, fw_spec): sh = logging.StreamHandler(stream=sys.stdout) sh.setLevel(getattr(logging, 'INFO')) logger.addHandler(sh) - with open(db_path) as f: db_creds = json.load(f) - drone = MPVaspDrone( - host=db_creds['host'], port=db_creds['port'], - database=db_creds['database'], user=db_creds['admin_user'], - password=db_creds['admin_password'], - collection=db_creds['collection'], parse_dos=parse_dos, - additional_fields=self.additional_fields, - update_duplicates=self.update_duplicates) + drone = MPVaspDrone(host=db_creds['host'], port=db_creds['port'], + database=db_creds['database'], user=db_creds['admin_user'], + password=db_creds['admin_password'], + collection=db_creds['collection'], parse_dos=parse_dos, + additional_fields=self.additional_fields, + update_duplicates=self.update_duplicates) t_id, d = drone.assimilate(prev_dir, launches_coll=LaunchPad.auto_load().launches) mpsnl = d['snl_final'] if 'snl_final' in d else d['snl'] @@ -176,14 +174,20 @@ def run_task(self, fw_spec): update_spec.update({'mpsnl': mpsnl, 'snlgroup_id': snlgroup_id}) print 'ENTERED task id:', t_id + print('d[state] = ', d['state']) + print('d = ', d) stored_data = {'task_id': t_id} if d['state'] == 'successful': update_spec['analysis'] = d['analysis'] update_spec['output'] = d['output'] + update_spec['vasp']={'incar':d['calculations'][-1]['input']['incar'], + 'kpoints':d['calculations'][-1]['input']['kpoints']} + update_spec["task_id"]=t_id return FWAction(stored_data=stored_data, update_spec=update_spec) # not successful - first test to see if UnconvergedHandler is needed if not fizzled_parent: + print('Unsuccessful but not fizzled parent') unconverged_tag = 'unconverged_handler--{}'.format(fw_spec['prev_task_type']) output_dir = last_relax(os.path.join(prev_dir, 'vasprun.xml')) ueh = UnconvergedErrorHandler(output_filename=output_dir) @@ -198,7 +202,16 @@ def run_task(self, fw_spec): 'parameters': fw_spec.get('parameters'), '_dupefinder': DupeFinderVasp().to_dict(), '_priority': fw_spec['_priority']} - + # Pass elastic tensor spec +# if 'deformed' in fw_spec['task_type']: +# spec['deformation_matrix'] = fw_spec['deformation_matrix'] +# spec['original_task_id'] = fw_spec['original_task_id'] + if 'deformation_matrix' in fw_spec.keys(): + spec['deformation_matrix'] = fw_spec['deformation_matrix'] + spec['original_task_id'] = fw_spec['original_task_id'] + if 'strainfactor' in fw_spec.keys(): + spec['strainfactor'] = fw_spec['strainfactor'] + spec['original_task_id'] = fw_spec['original_task_id'] snl = StructureNL.from_dict(spec['mpsnl']) spec['run_tags'].append(unconverged_tag) spec['_queueadapter'] = QA_VASP @@ -218,15 +231,21 @@ def run_task(self, fw_spec): spec = {'task_type': 'VASP db insertion', '_allow_fizzled_parents': True, '_priority': fw_spec['_priority'], '_queueadapter': QA_DB, 'run_tags': list(fw_spec['run_tags'])} + if 'deformation_matrix' in fw_spec.keys(): + spec['deformation_matrix'] = fw_spec['deformation_matrix'] + spec['original_task_id'] = fw_spec['original_task_id'] + if 'strainfactor' in fw_spec.keys(): + spec['strainfactor'] = fw_spec['strainfactor'] + spec['original_task_id'] = fw_spec['original_task_id'] spec['run_tags'].append(unconverged_tag) fws.append( Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=-1)) connections[-2] = -1 - + print('Unsuccessful task updated') wf = Workflow(fws, connections) return FWAction(detours=wf) # not successful and not due to convergence problem - FIZZLE - raise ValueError("DB insertion successful, but don't know how to fix this Firework! Can't continue with workflow...") \ No newline at end of file + raise ValueError("DB insertion successful, but don't know how to fix this Firework! Can't continue with workflow...") diff --git a/mpworks/workflows/snl_to_wf_eos_thermal.py b/mpworks/workflows/snl_to_wf_eos_thermal.py new file mode 100644 index 00000000..a33e961d --- /dev/null +++ b/mpworks/workflows/snl_to_wf_eos_thermal.py @@ -0,0 +1,121 @@ +from pymatgen.io.vaspio import Poscar + +__author__ = 'Cormac Toher' + +from fireworks.core.firework import Firework, Workflow +from fireworks.utilities.fw_utilities import get_slug +from mpworks.firetasks.custodian_task import get_custodian_task +from mpworks.firetasks.snl_tasks import AddSNLTask +from mpworks.firetasks.vasp_io_tasks import VaspCopyTask, VaspWriterTask, \ + VaspToDBTask +from mpworks.firetasks.vasp_setup_tasks import SetupGGAUTask +from mpworks.snl_utils.mpsnl import get_meta_from_structure, MPStructureNL +from mpworks.workflows.wf_settings import QA_DB, QA_VASP, QA_CONTROL +from pymatgen import Composition +from mpworks.workflows import snl_to_wf +from mpworks.firetasks.phonon_tasks import update_spec_force_convergence +from mpworks.firetasks.eos_thermal_tasks import SetupFConvergenceTask, SetupEoSThermalTask, SetupModifiedVolumeStructTask, AddEoSThermalDataToDBTask + + +def snl_to_wf_eos_thermal(snl, parameters=None): + fws = [] + connections = {} + parameters = parameters if parameters else {} + print("Parameters = ", parameters) + poisson_val = parameters["poisson_ratio"] + print("snl_to_wf_eos_thermal: Poisson ratio = ", poisson_val) + + snl_priority = parameters.get('priority', 1) + priority = snl_priority * 2 # once we start a job, keep going! + + f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula + + # add the SNL to the SNL DB and figure out duplicate group + tasks = [AddSNLTask()] + spec = {'task_type': 'Add to SNL database', 'snl': snl.as_dict(), + '_queueadapter': QA_DB, '_priority': snl_priority} + if 'snlgroup_id' in parameters and isinstance(snl, MPStructureNL): + spec['force_mpsnl'] = snl.as_dict() + spec['force_snlgroup_id'] = parameters['snlgroup_id'] + del spec['snl'] + fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=0)) + connections[0] = [1] + + print("Running structure optimization before generating different volumes") + + parameters["exact_structure"] = True + # run GGA structure optimization for force convergence + spec = snl_to_wf._snl_to_spec(snl, parameters=parameters) + user_vasp_settings = parameters.get("user_vasp_settings") + spec = update_spec_force_convergence(spec, user_vasp_settings) + spec['run_tags'].append("origin") + spec['_priority'] = priority + spec['_queueadapter'] = QA_VASP + spec['task_type'] = "Vasp force convergence optimize structure (2x)" + tasks = [VaspWriterTask(), get_custodian_task(spec)] + fws.append(Firework(tasks, spec, name=get_slug(f + '--' + spec['task_type']), fw_id=1)) + + print("Setting up modified volumes") + + # insert into DB - GGA structure optimization + spec = {'task_type': 'VASP db insertion', '_priority': priority, + '_allow_fizzled_parents': True, '_queueadapter': QA_DB, 'clean_task_doc':True, + 'thermal_properties':"force_convergence"} + fws.append(Firework([VaspToDBTask()], spec, name=get_slug(f + '--' + spec['task_type']), fw_id=2)) + connections[1] = [2] + + spec = {'task_type': 'Setup Modified Volume Struct Task', '_priority': priority, + '_queueadapter': QA_CONTROL, 'poisson_ratio': poisson_val} + fws.append(Firework([SetupModifiedVolumeStructTask()], spec, + name=get_slug(f + '--' + spec['task_type']),fw_id=3)) + connections[2] = [3] + +# print("Calling EoS results to DB task") +# print("Setting up spec") + +# spec = {'task_type': 'Add EoS Thermal Data to DB Task', '_priority': priority, +# '_queueadapter': QA_CONTROL, 'poisson_ratio': poisson_val} +# print("Calling firetask") +# fws.append(Firework([AddEoSThermalDataToDBTask()], spec, +# name=get_slug(f + '--' + spec['task_type']),fw_id=99)) +# connections[3] = [99] + + wf_meta = get_meta_from_structure(snl.structure) + wf_meta['run_version'] = 'May 2013 (1)' + + if '_materialsproject' in snl.data and 'submission_id' in snl.data['_materialsproject']: + wf_meta['submission_id'] = snl.data['_materialsproject']['submission_id'] + + return Workflow(fws, connections, name=Composition( + snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta) + + +def snl_to_wf_eos_thermal_DB(snl, parameters=None): + fws = [] + connections = {} + parameters = parameters if parameters else {} + print("Parameters = ", parameters) + poisson_val = parameters["poisson_ratio"] + print("snl_to_wf_eos_thermal: Poisson ratio = ", poisson_val) + original_task_id_val = parameters["original_task_id"] + + snl_priority = parameters.get('priority', 1) + priority = snl_priority * 2 # once we start a job, keep going! + + f = Composition(snl.structure.composition.reduced_formula).alphabetical_formula + + spec = {'task_type': 'Add EoS Thermal Data to DB Task', '_priority': priority, + '_queueadapter': QA_CONTROL, 'poisson_ratio': poisson_val, 'original_task_id': original_task_id_val} + print("Calling firetask") + fws.append(Firework([AddEoSThermalDataToDBTask()], spec, + name=get_slug(f + '--' + spec['task_type']),fw_id=99)) +# connections[3] = [99] + + wf_meta = get_meta_from_structure(snl.structure) + wf_meta['run_version'] = 'May 2013 (1)' + + if '_materialsproject' in snl.data and 'submission_id' in snl.data['_materialsproject']: + wf_meta['submission_id'] = snl.data['_materialsproject']['submission_id'] + + return Workflow(fws, connections, name=Composition( + snl.structure.composition.reduced_formula).alphabetical_formula, metadata=wf_meta)