diff --git a/.gitignore b/.gitignore index 5ed154bc1..60febdeb4 100644 --- a/.gitignore +++ b/.gitignore @@ -35,3 +35,5 @@ dpgen.egg-info */__pycache__ *.swp .eggs +.coverage +dbconfig.json diff --git a/README.md b/README.md index efe8a0bfe..3ebd81725 100644 --- a/README.md +++ b/README.md @@ -75,12 +75,23 @@ and if everything works, it gives ``` DeepModeling ------------ +Version: 0.5.1.dev53+gddbeee7.d20191020 +Date: Oct-07-2019 +Path: /home/me/miniconda3/envs/py363/lib/python3.6/site-packages/dpgen-0.5.1.dev53+gddbeee7.d20191020-py3.6.egg/dpgen -Version: 0.2.0 -Path: /home/wanghan/.local/lib/python3.6/site-packages/dpgen-0.1.0-py3.6.egg/dpgen -Date: Aug 13, 2019 - -usage: dpgen [-h] {init_surf,init_bulk,run,test,db} ... +Dependency +------------ + numpy 1.17.2 /home/me/miniconda3/envs/py363/lib/python3.6/site-packages/numpy + dpdata 0.1.10 /home/me/miniconda3/envs/py363/lib/python3.6/site-packages/dpdata-0.1.10-py3.6.egg/dpdata + pymatgen 2019.7.2 /home/me/miniconda3/envs/py363/lib/python3.6/site-packages/pymatgen + monty 2.0.4 /home/me/miniconda3/envs/py363/lib/python3.6/site-packages/monty + ase 3.17.0 /home/me/miniconda3/envs/py363/lib/python3.6/site-packages/ase-3.17.0-py3.6.egg/ase + paramiko 2.6.0 /home/me/miniconda3/envs/py363/lib/python3.6/site-packages/paramiko + custodian 2019.2.10 /home/me/miniconda3/envs/py363/lib/python3.6/site-packages/custodian + +Description +------------ +usage: dpgen [-h] {init_surf,init_bulk,run,run/report,test,db} ... dpgen is a convenient script that uses DeepGenerator to prepare initial data, drive DeepMDkit and analyze results. This script works based on several sub- @@ -88,16 +99,18 @@ commands with their own options. To see the options for the sub-commands, type "dpgen sub-command -h". positional arguments: - {init_surf,init_bulk,run,test,db} + {init_surf,init_bulk,run,run/report,test,db} init_surf Generating initial data for surface systems. init_bulk Generating initial data for bulk systems. - run Main process of Deep Generator. + run Main process of Deep Potential Generator. + run/report Report the systems and the thermodynamic conditions of + the labeled frames. test Auto-test for Deep Potential. - db Collecting data from DP-GEN. - + db Collecting data from Deep Generator. optional arguments: -h, --help show this help message and exit + ``` @@ -108,8 +121,10 @@ optional arguments: You may prepare initial data for bulk systems with VASP by: ```bash -dpgen init_bulk PARAM MACHINE +dpgen init_bulk PARAM [MACHINE] ``` +The MACHINE configure file is optional. If this parameter exists, then the optimization +tasks or MD tasks will be submitted automatically according to MACHINE.json. Basically `init_bulk` can be devided into four parts , denoted as `stages` in `PARAM`: 1. Relax in folder `00.place_ele` @@ -119,6 +134,8 @@ Basically `init_bulk` can be devided into four parts , denoted as `stages` in `P All stages must be **in order**. One doesn't need to run all stages. For example, you may run stage 1 and 2, generating supercells as starting point of exploration in `dpgen run`. +If MACHINE is None, there should be only one stage in stages. Corresponding tasks will be generated, but user's intervention should be involved in, to manunally run the scripts. + Following is an example for `PARAM`, which generates data from a typical structure hcp. ```json { @@ -176,8 +193,10 @@ The bold notation of key (such as **Elements**) means that it's a necessary key. You may prepare initial data for surface systems with VASP by: ```bash -dpgen init_surf PARAM MACHINE +dpgen init_surf PARAM [MACHINE] ``` +The MACHINE configure file is optional. If this parameter exists, then the optimization +tasks or MD tasks will be submitted automatically according to MACHINE.json. Basically `init_surf` can be devided into two parts , denoted as `stages` in `PARAM`: 1. Build specific surface in folder `00.place_ele` @@ -199,7 +218,7 @@ Following is an example for `PARAM`, which generates data from a typical structu 2, 2 ], - "z_min": 9, + "layer_numb": 3, "vacuum_max": 9, "vacuum_resol": [ 0.5, @@ -251,7 +270,7 @@ The bold notation of key (such as **Elements**) means that it's a necessary key. | **Elements** | List of String | ["Mg"] | Atom types | **cell_type** | String | "hcp" | Specifying which typical structure to be generated. **Options** include fcc, hcp, bcc, sc, diamond. | **latt** | Float | 4.479 | Lattice constant for single cell. -| **z_min** | Float | 9 | Thickness of slab (Angstrom). +| **layer_numb** | Integer | 3 | Number of equavilent layers of slab. | **vacuum_max** | Float | 9 | Maximal thickness of vacuum (Angstrom). | **vacuum_resol** | List of float | [0.5, 1 ] | Interval of thichness of vacuum. If size of `vacuum_resol` is 1, the interval is fixed to its value. If size of `vacuum_resol` is 2, the interval is `vacuum_resol[0]` before `mid_point`, otherwise `vacuum_resol[1]` after `mid_point`. | **millers** | List of list of Integer | [[1,0,0]] | Miller indices. @@ -431,6 +450,7 @@ The bold notation of key (such aas **type_map**) means that it's a necessary key | *#Basics* | **type_map** | List of string | ["H", "C"] | Atom types | **mass_map** | List of float | [1, 12] | Standard atom weights. +| **use_ele_temp** | int | 0 | Currently only support fp_style vasp. 0(default): no electron temperature. 1: eletron temperature as frame parameter. 2: electron temperature as atom parameter. | *#Data* | init_data_prefix | String | "/sharedext4/.../data/" | Prefix of initial data directories | ***init_data_sys*** | List of string|["CH4.POSCAR.01x01x01/.../deepmd"] |Directories of initial data. You may use either absolute or relative path here. diff --git a/dpgen/.gitignore b/dpgen/.gitignore new file mode 100644 index 000000000..cca703c23 --- /dev/null +++ b/dpgen/.gitignore @@ -0,0 +1,2 @@ +_date.py +_version.py diff --git a/dpgen/generator/tools/__init__.py b/dpgen/auto_test/__init__.py similarity index 100% rename from dpgen/generator/tools/__init__.py rename to dpgen/auto_test/__init__.py diff --git a/dpgen/auto_test/cmpt_00_equi.py b/dpgen/auto_test/cmpt_00_equi.py index cc03c2ae3..a716eb0c6 100755 --- a/dpgen/auto_test/cmpt_00_equi.py +++ b/dpgen/auto_test/cmpt_00_equi.py @@ -47,7 +47,7 @@ def comput_lmp_nev(conf_dir, task_name, write_stable = False) : return None, None, None def comput_vasp_nev(jdata, conf_dir, write_stable = False) : - kspacing = jdata['vasp_params']['kspacing'] + conf_path = re.sub('confs', global_equi_name, conf_dir) conf_path = os.path.abspath(conf_path) poscar = os.path.join(conf_path, 'POSCAR') @@ -59,6 +59,7 @@ def comput_vasp_nev(jdata, conf_dir, write_stable = False) : if 'relax_incar' in jdata.keys(): vasp_str='vasp-relax_incar' else: + kspacing = jdata['vasp_params']['kspacing'] vasp_str='vasp-k%.2f' % kspacing ener_shift = comput_e_shift(poscar, vasp_str) diff --git a/dpgen/auto_test/cmpt_01_eos.py b/dpgen/auto_test/cmpt_01_eos.py index 7c73ae057..f5670c422 100755 --- a/dpgen/auto_test/cmpt_01_eos.py +++ b/dpgen/auto_test/cmpt_01_eos.py @@ -1,22 +1,31 @@ #!/usr/bin/env python3 import os, glob, argparse, json, re +import dpgen.auto_test.lib.util as util import dpgen.auto_test.lib.lammps as lammps import dpgen.auto_test.lib.vasp as vasp global_task_name = '01.eos' -def comput_lmp_eos(conf_dir, task_name) : +def comput_lmp_eos(jdata,conf_dir, task_name) : conf_path = re.sub('confs', global_task_name, conf_dir) conf_path = os.path.abspath(conf_path) conf_path = os.path.join(conf_path, task_name) vol_paths = glob.glob(os.path.join(conf_path, 'vol-*')) vol_paths.sort() + result = os.path.join(conf_path,'result') print('Vpa(A^3)\tEpA(eV)') - for ii in vol_paths : - log_lammps = os.path.join(ii, 'log.lammps') - natoms, epa, vpa = lammps.get_nev(log_lammps) - print(vpa, epa) + with open(result,'w') as fp: + fp.write('conf_dir:%s\n VpA(A^3) EpA(eV)\n'% (conf_dir)) + for ii in vol_paths : + log_lammps = os.path.join(ii, 'log.lammps') + natoms, epa, vpa = lammps.get_nev(log_lammps) + print(vpa, epa) + fp.write('%7.3f %8.4f \n' % (vpa,epa)) + fp.close() + if 'upload_username' in jdata.keys() and task_name =='deepmd': + upload_username=jdata['upload_username'] + util.insert_data('eos','deepmd',upload_username,result) def comput_vasp_eos(jdata, conf_dir) : conf_path = re.sub('confs', global_task_name, conf_dir) @@ -30,17 +39,25 @@ def comput_vasp_eos(jdata, conf_dir) : task_path = os.path.join(conf_path, vasp_str) vol_paths = glob.glob(os.path.join(task_path, 'vol-*')) vol_paths.sort() + result = os.path.join(task_path,'result') print('Vpa(A^3)\tEpA(eV)') - for ii in vol_paths : - outcar = os.path.join(ii, 'OUTCAR') - natoms, epa, vpa = vasp.get_nev(outcar) - print(vpa, epa) + with open(result,'w') as fp: + fp.write('conf_dir:%s\n VpA(A^3) EpA(eV)\n'% (conf_dir)) + for ii in vol_paths : + outcar = os.path.join(ii, 'OUTCAR') + natoms, epa, vpa = vasp.get_nev(outcar) + print(vpa, epa) + fp.write('%7.3f %8.4f \n' % (vpa,epa)) + fp.close() + if 'upload_username' in jdata.keys(): + upload_username=jdata['upload_username'] + util.insert_data('eos','vasp',upload_username,result) def _main(): parser = argparse.ArgumentParser( description="cmpt 01.eos") parser.add_argument('TASK', type=str, - choices = ['vasp', 'deepmd', 'meam'], + choices = ['vasp', 'deepmd', 'meam'], help='the task of generation, vasp or lammps') parser.add_argument('PARAM', type=str, help='json parameter file') @@ -52,14 +69,13 @@ def _main(): jdata = json.load (fp) if args.TASK == 'vasp': - comput_vasp_eos(jdata, args.CONF) + comput_vasp_eos(jdata, args.CONF) elif args.TASK == 'deepmd' : - comput_lmp_eos(args.CONF, args.TASK) + comput_lmp_eos(jdata,args.CONF, args.TASK) elif args.TASK == 'meam' : - comput_lmp_eos(args.CONF, args.TASK) + comput_lmp_eos(jdata,args.CONF, args.TASK) else : raise RuntimeError("unknow task ", args.TASK) if __name__ == '__main__' : _main() - diff --git a/dpgen/auto_test/cmpt_02_elastic.py b/dpgen/auto_test/cmpt_02_elastic.py index e1a93cf3f..fc3480110 100755 --- a/dpgen/auto_test/cmpt_02_elastic.py +++ b/dpgen/auto_test/cmpt_02_elastic.py @@ -5,6 +5,7 @@ import numpy as np import dpgen.auto_test.lib.vasp as vasp import dpgen.auto_test.lib.lammps as lammps +import dpgen.auto_test.lib.util as util from pymatgen.analysis.elasticity.elastic import ElasticTensor from pymatgen.analysis.elasticity.strain import Strain from pymatgen.analysis.elasticity.stress import Stress @@ -12,7 +13,25 @@ global_equi_name = '00.equi' global_task_name = '02.elastic' -def print_et (et): +def result_et(et,conf_dir,result): + with open(result,'w') as fp: + fp.write('conf_dir:%s\n'% (conf_dir)) + for ii in range(6) : + for jj in range(6) : + fp.write ("%7.2f " % (et.voigt[ii][jj] / 1e4)) + fp.write('\n') + BV = et.k_voigt / 1e4 + GV = et.g_voigt / 1e4 + EV = 9*BV*GV/(3*BV+GV) + uV = 0.5*(3*BV-2*GV)/(3*BV+GV) + fp.write("# Bulk Modulus BV = %.2f GPa\n" % (BV)) + fp.write("# Shear Modulus GV = %.2f GPa\n" % (GV)) + fp.write("# Youngs Modulus EV = %.2f GPa\n" % (EV)) + fp.write("# Poission Ratio uV = %.2f \n" % (uV)) + fp.close() + + +def print_et (et): for ii in range(6) : for jj in range(6) : sys.stdout.write ("%7.2f " % (et.voigt[ii][jj] / 1e4)) @@ -27,17 +46,15 @@ def print_et (et): print("# Poission Ratio uV = %.2f " % (uV)) def cmpt_vasp(jdata, conf_dir) : - fp_params = jdata['vasp_params'] - kspacing = fp_params['kspacing'] - kgamma = fp_params['kgamma'] - conf_path = os.path.abspath(conf_dir) conf_poscar = os.path.join(conf_path, 'POSCAR') task_path = re.sub('confs', global_task_name, conf_path) if 'relax_incar' in jdata.keys(): vasp_str='vasp-relax_incar' else: - vasp_str='vasp-k%.2f' % kspacing + fp_params = jdata['vasp_params'] + kspacing = fp_params['kspacing'] + vasp_str='vasp-k%.2f' % kspacing task_path = os.path.join(task_path, vasp_str) equi_stress = Stress(np.loadtxt(os.path.join(task_path, 'equi.stress.out'))) @@ -55,14 +72,16 @@ def cmpt_vasp(jdata, conf_dir) : et = ElasticTensor.from_independent_strains(lst_strain, lst_stress, eq_stress = equi_stress, vasp = False) # et = ElasticTensor.from_independent_strains(lst_strain, lst_stress, eq_stress = None) # bar to GPa - # et = -et / 1e4 + # et = -et / 1e4 print_et(et) + result = os.path.join(task_path,'result') + result_et(et,conf_dir,result) + if 'upload_username' in jdata.keys(): + upload_username=jdata['upload_username'] + util.insert_data('elastic','vasp',upload_username,result) -def cmpt_deepmd_lammps(jdata, conf_dir, task_name) : - deepmd_model_dir = jdata['deepmd_model_dir'] - deepmd_type_map = jdata['deepmd_type_map'] - ntypes = len(deepmd_type_map) +def cmpt_deepmd_lammps(jdata, conf_dir, task_name) : conf_path = os.path.abspath(conf_dir) conf_poscar = os.path.join(conf_path, 'POSCAR') task_path = re.sub('confs', global_task_name, conf_path) @@ -82,8 +101,15 @@ def cmpt_deepmd_lammps(jdata, conf_dir, task_name) : et = ElasticTensor.from_independent_strains(lst_strain, lst_stress, eq_stress = equi_stress, vasp = False) # et = ElasticTensor.from_independent_strains(lst_strain, lst_stress, eq_stress = None) # bar to GPa - # et = -et / 1e4 + # et = -et / 1e4 print_et(et) + result = os.path.join(task_path,'result') + result_et(et,conf_dir,task_path) + if 'upload_username' in jdata.keys() and task_name=='deepmd': + upload_username=jdata['upload_username'] + util.insert_data('elastic','deepmd',upload_username,result) + + def _main() : parser = argparse.ArgumentParser( @@ -101,15 +127,13 @@ def _main() : print('# generate %s task with conf %s' % (args.TASK, args.CONF)) if args.TASK == 'vasp': - cmpt_vasp(jdata, args.CONF) + cmpt_vasp(jdata, args.CONF) elif args.TASK == 'deepmd' : cmpt_deepmd_lammps(jdata, args.CONF, args.TASK) elif args.TASK == 'meam' : cmpt_deepmd_lammps(jdata, args.CONF, args.TASK) else : raise RuntimeError("unknow task ", args.TASK) - + if __name__ == '__main__' : _main() - - diff --git a/dpgen/auto_test/cmpt_03_vacancy.py b/dpgen/auto_test/cmpt_03_vacancy.py index e6fc93520..0c3c201e5 100755 --- a/dpgen/auto_test/cmpt_03_vacancy.py +++ b/dpgen/auto_test/cmpt_03_vacancy.py @@ -26,8 +26,6 @@ def comput_e_shift(poscar, task_name) : return ener_shift def cmpt_vasp(jdata, conf_dir, supercell) : - fp_params = jdata['vasp_params'] - kspacing = fp_params['kspacing'] if 'relax_incar' in jdata.keys(): vasp_str='vasp-relax_incar' diff --git a/dpgen/auto_test/cmpt_04_interstitial.py b/dpgen/auto_test/cmpt_04_interstitial.py index b6a39c4af..c87ef741e 100755 --- a/dpgen/auto_test/cmpt_04_interstitial.py +++ b/dpgen/auto_test/cmpt_04_interstitial.py @@ -17,8 +17,6 @@ def cmpt_vasp(jdata, conf_dir, supercell, insert_ele) : _cmpt_vasp(jdata, conf_dir, supercell, ii) def _cmpt_vasp(jdata, conf_dir, supercell, insert_ele) : - fp_params = jdata['vasp_params'] - kspacing = fp_params['kspacing'] if 'relax_incar' in jdata.keys(): vasp_str='vasp-relax_incar' @@ -56,12 +54,10 @@ def cmpt_deepmd_reprod_traj(jdata, conf_dir, supercell, insert_ele, task_name) : _cmpt_deepmd_reprod_traj(jdata, conf_dir, supercell, ii, task_name) def _cmpt_deepmd_reprod_traj(jdata, conf_dir, supercell, insert_ele, task_name) : - fp_params = jdata['vasp_params'] - kspacing = fp_params['kspacing'] - if 'relax_incar' in jdata.keys(): vasp_str='vasp-relax_incar' else: + kspacing = jdata['vasp_params']['kspacing'] vasp_str='vasp-k%.2f' % kspacing conf_path = os.path.abspath(conf_dir) diff --git a/dpgen/auto_test/cmpt_05_surf.py b/dpgen/auto_test/cmpt_05_surf.py index 672095c45..2cb1e40f2 100755 --- a/dpgen/auto_test/cmpt_05_surf.py +++ b/dpgen/auto_test/cmpt_05_surf.py @@ -3,22 +3,19 @@ import os, re, argparse, filecmp, json, glob, sys import subprocess as sp import numpy as np +import dpgen.auto_test.lib.util as util import dpgen.auto_test.lib.vasp as vasp import dpgen.auto_test.lib.lammps as lammps -from pymatgen.analysis.elasticity.elastic import ElasticTensor -from pymatgen.analysis.elasticity.strain import Strain -from pymatgen.analysis.elasticity.stress import Stress global_equi_name = '00.equi' global_task_name = '05.surf' def cmpt_vasp(jdata, conf_dir, static = False) : - fp_params = jdata['vasp_params'] - kspacing = fp_params['kspacing'] if 'relax_incar' in jdata.keys(): vasp_str='vasp-relax_incar' - else: + else: + kspacing = jdata['vasp_params']['kspacing'] vasp_str='vasp-k%.2f' % (kspacing) equi_path = re.sub('confs', global_equi_name, conf_dir) @@ -30,6 +27,7 @@ def cmpt_vasp(jdata, conf_dir, static = False) : if 'scf_incar' in jdata.keys(): vasp_static_str='vasp-static-scf_incar' else: + kspacing = jdata['vasp_params']['kspacing'] vasp_static_str='vasp-static-k%.2f' % (kspacing) task_path = os.path.join(task_path, vasp_static_str) else : @@ -44,18 +42,28 @@ def cmpt_vasp(jdata, conf_dir, static = False) : if len(struct_path_list) == 0: print("# cannot find results for conf %s" % (conf_dir)) sys.stdout.write ("Miller_Indices: \tSurf_E(J/m^2) EpA(eV) equi_EpA(eV)\n") - for ii in struct_path_list : - structure_dir = os.path.basename(ii) - outcar = os.path.join(ii, 'OUTCAR') - natoms, epa, vpa = vasp.get_nev(outcar) - if static : - e0 = np.array(vasp.get_energies(outcar)) / natoms - epa = e0[0] - boxes = vasp.get_boxes(outcar) - AA = np.linalg.norm(np.cross(boxes[0][0], boxes[0][1])) - Cf = 1.60217657e-16 / (1e-20 * 2) * 0.001 - evac = (epa * natoms - equi_epa * natoms) / AA * Cf - sys.stdout.write ("%s:\t %7.3f %8.3f %8.3f\n" % (structure_dir, evac, epa, equi_epa)) + + result = os.path.join(task_path,'result') + with open(result,'w') as fp: + fp.write('conf_dir:%s\n'% (conf_dir)) + fp.write("Miller_Indices: \tSurf_E(J/m^2) EpA(eV) equi_EpA(eV)\n") + for ii in struct_path_list : + structure_dir = os.path.basename(ii) + outcar = os.path.join(ii, 'OUTCAR') + natoms, epa, vpa = vasp.get_nev(outcar) + if static : + e0 = np.array(vasp.get_energies(outcar)) / natoms + epa = e0[0] + boxes = vasp.get_boxes(outcar) + AA = np.linalg.norm(np.cross(boxes[0][0], boxes[0][1])) + Cf = 1.60217657e-16 / (1e-20 * 2) * 0.001 + evac = (epa * natoms - equi_epa * natoms) / AA * Cf + sys.stdout.write ("%s:\t %7.3f %8.3f %8.3f\n" % (structure_dir, evac, epa, equi_epa)) + fp.write("%s:\t %7.3f %8.3f %8.3f\n" % (structure_dir, evac, epa, equi_epa)) + fp.close() + if 'upload_username' in jdata.keys(): + upload_username=jdata['upload_username'] + util.insert_data('surf','vasp',upload_username,result) def cmpt_deepmd_lammps(jdata, conf_dir, task_name, static = False) : equi_path = re.sub('confs', global_equi_name, conf_dir) @@ -74,14 +82,23 @@ def cmpt_deepmd_lammps(jdata, conf_dir, task_name, static = False) : if len(struct_path_list) == 0: print("# cannot find results for conf %s" % (conf_dir)) sys.stdout.write ("Miller_Indices: \tSurf_E(J/m^2) EpA(eV) equi_EpA(eV)\n") - for ii in struct_path_list : - structure_dir = os.path.basename(ii) - lmp_log = os.path.join(ii, 'log.lammps') - natoms, epa, vpa = lammps.get_nev(lmp_log) - AA = lammps.get_base_area(lmp_log) - Cf = 1.60217657e-16 / (1e-20 * 2) * 0.001 - evac = (epa * natoms - equi_epa * natoms) / AA * Cf - sys.stdout.write ("%s: \t%7.3f %8.3f %8.3f\n" % (structure_dir, evac, epa, equi_epa)) + result = os.path.join(task_path,'result') + with open(result,'w') as fp: + fp.write('conf_dir:%s\n'% (conf_dir)) + fp.write("Miller_Indices: \tSurf_E(J/m^2) EpA(eV) equi_EpA(eV)\n") + for ii in struct_path_list : + structure_dir = os.path.basename(ii) + lmp_log = os.path.join(ii, 'log.lammps') + natoms, epa, vpa = lammps.get_nev(lmp_log) + AA = lammps.get_base_area(lmp_log) + Cf = 1.60217657e-16 / (1e-20 * 2) * 0.001 + evac = (epa * natoms - equi_epa * natoms) / AA * Cf + sys.stdout.write ("%s: \t%7.3f %8.3f %8.3f\n" % (structure_dir, evac, epa, equi_epa)) + fp.write("%s:\t %7.3f %8.3f %8.3f\n" % (structure_dir, evac, epa, equi_epa)) + fp.close() + if 'upload_username' in jdata.keys() and task_name=='deepm': + upload_username=jdata['upload_username'] + util.insert_data('surf','deepmd',upload_username,result) def _main() : parser = argparse.ArgumentParser( @@ -99,9 +116,9 @@ def _main() : print('# generate %s task with conf %s' % (args.TASK, args.CONF)) if args.TASK == 'vasp': - cmpt_vasp(jdata, args.CONF) + cmpt_vasp(jdata, args.CONF) elif args.TASK == 'vasp-static': - cmpt_vasp(jdata, args.CONF, static = True) + cmpt_vasp(jdata, args.CONF, static = True) elif args.TASK == 'deepmd' : cmpt_deepmd_lammps(jdata, args.CONF, args.TASK) elif args.TASK == 'deepmd-static' : @@ -112,8 +129,6 @@ def _main() : cmpt_deepmd_lammps(jdata, args.CONF, args.TASK, static = True) else : raise RuntimeError("unknow task ", args.TASK) - + if __name__ == '__main__' : _main() - - diff --git a/dpgen/auto_test/cmpt_06_phonon.py b/dpgen/auto_test/cmpt_06_phonon.py index 92395bb8e..cefa615fe 100644 --- a/dpgen/auto_test/cmpt_06_phonon.py +++ b/dpgen/auto_test/cmpt_06_phonon.py @@ -74,17 +74,12 @@ def cmpt_vasp(jdata, conf_dir) : -def cmpt_deepmd_lammps(jdata, conf_dir) : - deepmd_model_dir = jdata['deepmd_model_dir'] - deepmd_type_map = jdata['deepmd_type_map'] - ntypes = len(deepmd_type_map) - deepmd_model_dir = os.path.abspath(deepmd_model_dir) - deepmd_models = glob.glob(os.path.join(deepmd_model_dir, '*pb')) +def cmpt_lammps(jdata, conf_dir, task_type) : supercell_matrix=jdata['supercell_matrix'] conf_path = os.path.abspath(conf_dir) task_path = re.sub('confs', global_task_name, conf_path) - task_path = os.path.join(task_path, 'deepmd') + task_path = os.path.join(task_path, task_type) task_poscar = os.path.join(task_path, 'POSCAR') os.chdir(task_path) @@ -112,10 +107,8 @@ def _main() : # print('generate %s task with conf %s' % (args.TASK, args.CONF)) if args.TASK == 'vasp': cmpt_vasp(jdata, args.CONF) - elif args.TASK == 'deepmd' : - cmpt_deepmd_lammps(jdata, args.CONF) - elif args.TASK == 'meam' : - cmpt_meam_lammps(jdata, args.CONF) + elif args.TASK == 'deepmd' or args.TASK =='meam' : + cmpt_lammps(jdata, args.CONF,args.TASK) else : raise RuntimeError("unknow task ", args.TASK) diff --git a/dpgen/auto_test/cmpt_08_dislocation.py b/dpgen/auto_test/cmpt_08_dislocation.py deleted file mode 100644 index 175c1fd07..000000000 --- a/dpgen/auto_test/cmpt_08_dislocation.py +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env python3 - -import os, re, argparse, filecmp, json, glob, sys -import subprocess as sp -import numpy as np -import dpgen.auto_test.lib.vasp as vasp -import dpgen.auto_test.lib.lammps as lammps - - -global_equi_name = '00.equi' -global_task_name = '08.dislocation' - -def comput_e_shift(poscar, task_name) : - a_types = vasp.get_poscar_types(poscar) - a_natoms = vasp.get_poscar_natoms(poscar) - ener_shift = 0 - if not os.path.isdir('stables') : - raise RuntimeError('no dir "stable". Stable energy and volume of components should be computed before calculating formation energy of an alloy') - for ii in range(len(a_types)) : - ref_e_file = a_types[ii] + '.' + task_name + '.e' - ref_e_file = os.path.join('stables', ref_e_file) - ener = float(open(ref_e_file).read()) - ener_shift += a_natoms[ii] * ener - return ener_shift - -def cmpt_vasp(jdata, conf_dir, supercell) : - fp_params = jdata['vasp_params'] - kspacing = fp_params['kspacing'] - - equi_path = re.sub('confs', global_equi_name, conf_dir) - equi_path = os.path.join(equi_path, 'vasp-k%.2f' % kspacing) - equi_path = os.path.abspath(equi_path) - equi_outcar = os.path.join(equi_path, 'OUTCAR') - task_path = re.sub('confs', global_task_name, conf_dir) - task_path = os.path.join(task_path, 'vasp-k%.2f' % kspacing) - task_path = os.path.abspath(task_path) - print("# ", task_path) - - equi_natoms, equi_epa, equi_vpa = vasp.get_nev(equi_outcar) - - copy_str = "%sx%sx%s" % (supercell[0], supercell[1], supercell[2]) - struct_path_widecard = os.path.join(task_path, 'struct-%s-*' % (copy_str)) - struct_path_list = glob.glob(struct_path_widecard) - struct_path_list.sort() - if len(struct_path_list) == 0: - print("# cannot find results for conf %s supercell %s" % (conf_dir, supercell)) - sys.stdout.write ("Structure: \tVac_E(eV) E(eV) equi_E(eV)\n") - for ii in struct_path_list : - struct_poscar = os.path.join(ii, 'POSCAR') - energy_shift = comput_e_shift(struct_poscar, 'vasp-k%.2f' % kspacing) - structure_dir = os.path.basename(ii) - outcar = os.path.join(ii, 'OUTCAR') - natoms, epa, vpa = vasp.get_nev(outcar) - evac = epa * natoms - equi_epa * natoms - sys.stdout.write ("%s: %7.3f %7.3f %7.3f \n" % (structure_dir, evac, epa * natoms, equi_epa*natoms)) - # evac = epa * natoms - energy_shift - # sys.stdout.write ("%s: %7.3f %7.3f %7.3f \n" % (structure_dir, evac, epa * natoms, energy_shift)) - # sys.stdout.write ("%s: %7.3f \n" % (structure_dir, evac)) - -def cmpt_lammps(jdata, conf_dir, supercell, task_name) : - equi_path = re.sub('confs', global_equi_name, conf_dir) - equi_path = os.path.join(equi_path, task_name) - equi_path = os.path.abspath(equi_path) - equi_log = os.path.join(equi_path, 'log.lammps') - task_path = re.sub('confs', global_task_name, conf_dir) - task_path = os.path.join(task_path, task_name) - task_path = os.path.abspath(task_path) - print("# ", task_path) - - equi_natoms, equi_epa, equi_vpa = lammps.get_nev(equi_log) - - copy_str = "%sx%sx%s" % (supercell[0], supercell[1], supercell[2]) - struct_path_widecard = os.path.join(task_path, 'struct-%s-*' % (copy_str)) - struct_path_list = glob.glob(struct_path_widecard) - struct_path_list.sort() - if len(struct_path_list) == 0: - print("# cannot find results for conf %s supercell %s" % (conf_dir, supercell)) - sys.stdout.write ("Structure: \tVac_E(eV) E(eV) equi_E(eV)\n") - for ii in struct_path_list : - struct_poscar = os.path.join(ii, 'POSCAR') - energy_shift = comput_e_shift(struct_poscar, task_name) - structure_dir = os.path.basename(ii) - lmp_log = os.path.join(ii, 'log.lammps') - natoms, epa, vpa = lammps.get_nev(lmp_log) - evac = epa * natoms - equi_epa * natoms - sys.stdout.write ("%s: %7.3f %7.3f %7.3f \n" % (structure_dir, evac, epa * natoms, equi_epa * natoms)) - # evac = epa * natoms - energy_shift - # sys.stdout.write ("%s: %7.3f %7.3f %7.3f \n" % (structure_dir, evac, epa * natoms, energy_shift)) - # sys.stdout.write ("%s: %7.3f\n" % (structure_dir, evac)) - -def _main() : - parser = argparse.ArgumentParser( - description="cmpt 08.dislocation") - parser.add_argument('TASK', type=str, - help='the task of generation, vasp or lammps') - parser.add_argument('PARAM', type=str, - help='json parameter file') - parser.add_argument('CONF', type=str, - help='the path to conf') - parser.add_argument('COPY', type=int, nargs = 3, - help='the path to conf') - args = parser.parse_args() - - with open (args.PARAM, 'r') as fp : - jdata = json.load (fp) - -# print('# generate %s task with conf %s' % (args.TASK, args.CONF)) - if args.TASK == 'vasp': - cmpt_vasp(jdata, args.CONF, args.COPY) - elif args.TASK == 'deepmd' or args.TASK =='meam' : - cmpt_lammps(jdata, args.CONF, args.COPY, args.TASK) - else : - raise RuntimeError("unknow task ", args.TASK) - -if __name__ == '__main__' : - _main() - diff --git a/dpgen/auto_test/commands b/dpgen/auto_test/commands deleted file mode 100644 index 5a057a1a6..000000000 --- a/dpgen/auto_test/commands +++ /dev/null @@ -1,164 +0,0 @@ -#!/bin/bash - -############################## -# 00 -############################## -# gen 00 equi -for ii in confs/*/mp* confs/*/std*; do ./gen_00_equi.py deepmd param.json $ii; done - -# run 00 equi -cwd=`pwd`; -for ii in 00.equi/*/std*/deepmd 00.equi/*/mp*/deepmd; do cd $ii; pwd; lmp_mpi_010 -i lammps.in; cd $cwd; done - -# result of 00, print with k0.16 -out=results/00.al.k0.16.out; rm -f $out; for ii in 00.equi/Al/*; do ./cmpt_00_equi.py param-k0.16.json $ii >> $out; done -out=results/00.mg.k0.16.out; rm -f $out; for ii in 00.equi/Mg/*; do ./cmpt_00_equi.py param-k0.16.json $ii >> $out; done -out=results/00.mgal.k0.16.out; rm -f $out; for ii in 00.equi/MgAl/*; do ./cmpt_00_equi.py param-k0.16.json $ii >> $out; done - -# result of 00, print with k0.08 -out=results/00.al.k0.08.out; rm -f $out; for ii in 00.equi/Al/*; do ./cmpt_00_equi.py param.json $ii >> $out; done -out=results/00.mg.k0.08.out; rm -f $out; for ii in 00.equi/Mg/*; do ./cmpt_00_equi.py param.json $ii >> $out; done - -# record stable info for Al and Mg -test ! -d stables && mkdir stables -out=results/00.al.k0.08.out -grep 'std-fcc' $out | grep -v 'std-fcc1' | awk '{print $2}' > stables/Al.vasp-k0.08.e -grep 'std-fcc' $out | grep -v 'std-fcc1' | awk '{print $5}' > stables/Al.vasp-k0.08.v -grep 'std-fcc' $out | grep -v 'std-fcc1' | awk '{print $3}' > stables/Al.deepmd.e -grep 'std-fcc' $out | grep -v 'std-fcc1' | awk '{print $6}' > stables/Al.deepmd.v -grep 'std-fcc' $out | grep -v 'std-fcc1' | awk '{print $4}' > stables/Al.meam.e -grep 'std-fcc' $out | grep -v 'std-fcc1' | awk '{print $7}' > stables/Al.meam.v -out=results/00.mg.k0.08.out -grep 'std-hcp' $out | awk '{print $2}' > stables/Mg.vasp-k0.08.e -grep 'std-hcp' $out | awk '{print $5}' > stables/Mg.vasp-k0.08.v -grep 'std-hcp' $out | awk '{print $3}' > stables/Mg.deepmd.e -grep 'std-hcp' $out | awk '{print $6}' > stables/Mg.deepmd.v -grep 'std-hcp' $out | awk '{print $4}' > stables/Mg.meam.e -grep 'std-hcp' $out | awk '{print $7}' > stables/Mg.meam.v - -# result of 00, print with k0.08 -out=results/00.mgal.k0.08.out; rm -f $out; for ii in 00.equi/MgAl/*; do ./cmpt_00_equi.py param.json $ii >> $out; done - - -############################## -# 01 -############################## -# 01 lammps -# gen 01 -for ii in confs/*/std*; do ./gen_01_eos.py --fix-shape deepmd param.json $ii; done - -# run 01 -cwd=`pwd`; -for ii in 01.eos/*/std*/deepmd/vol*; do cd $ii; pwd; lmp_mpi_010 -i lammps.in; cd $cwd; done - -# 01 vasp -for ii in fcc hcp bcc diamond dhcp sc; do out=results/01.al.$ii.vasp.k0.08.out; rm -f $out; ./cmpt_01_eos.py vasp param.json 01.eos/Al/std-$ii > $out; done -for ii in fcc hcp bcc diamond dhcp sc; do out=results/01.mg.$ii.vasp.k0.08.out; rm -f $out; ./cmpt_01_eos.py vasp param.json 01.eos/Mg/std-$ii > $out; done -for ii in fcc hcp bcc diamond dhcp sc; do out=results/01.al.$ii.vasp.k0.16.out; rm -f $out; ./cmpt_01_eos.py vasp param-k0.16.json 01.eos/Al/std-$ii > $out; done -for ii in fcc hcp bcc diamond dhcp sc; do out=results/01.mg.$ii.vasp.k0.16.out; rm -f $out; ./cmpt_01_eos.py vasp param-k0.16.json 01.eos/Mg/std-$ii > $out; done -# cmpt 01 deepmd -for ii in fcc hcp bcc diamond dhcp sc; do out=results/01.al.$ii.deepmd.out; rm -f $out; ./cmpt_01_eos.py deepmd param.json 01.eos/Al/std-$ii > $out; done -for ii in fcc hcp bcc diamond dhcp sc; do out=results/01.mg.$ii.deepmd.out; rm -f $out; ./cmpt_01_eos.py deepmd param.json 01.eos/Mg/std-$ii > $out; done -# cmpt 01 meam -for ii in fcc hcp bcc diamond dhcp sc; do out=results/01.al.$ii.meam.out; rm -f $out; ./cmpt_01_eos.py meam param.json 01.eos/Al/std-$ii > $out; done -for ii in fcc hcp bcc diamond dhcp sc; do out=results/01.mg.$ii.meam.out; rm -f $out; ./cmpt_01_eos.py meam param.json 01.eos/Mg/std-$ii > $out; done - -############################## -# 02 -############################## -# gen 02 -for ii in confs/*/std* confs/*/mp*; do ./gen_02_elastic.py deepmd param.json $ii; done - -# run 02 -cwd=`pwd`; -for ii in 02.elastic/*/std*/deepmd/dfm* 02.elastic/*/mp*/deepmd/dfm*; do cd $ii; pwd; lmp_mpi_010 -i lammps.in; cd $cwd; done - -# cmpt 02 -out=results/02.mgal.k0.08.out; rm -f $out; for ii in confs/MgAl/mp*; do deepmd=`./cmpt_02_elastic.py deepmd param-k0.08.json $ii | grep -v \# | tr '\n' ' '`; vasp=`./cmpt_02_elastic.py vasp param-k0.08.json $ii | grep -v \# | tr '\n' ' '`; echo $ii $vasp $deepmd >> $out; echo $ii; done -out=results/02.mgal.k0.16.out; rm -f $out; for ii in confs/MgAl/mp*; do deepmd=`./cmpt_02_elastic.py deepmd param-k0.16.json $ii | grep -v \# | tr '\n' ' '`; vasp=`./cmpt_02_elastic.py vasp param-k0.16.json $ii | grep -v \# | tr '\n' ' '`; echo $ii $vasp $deepmd >> $out; echo $ii; done -out=results/02.al.out; rm -f $out; for ii in confs/Al/std* confs/Al/mp*; do deepmd=`./cmpt_02_elastic.py deepmd param.json $ii | grep -v \# | tr '\n' ' '`; vasp=`./cmpt_02_elastic.py vasp param.json $ii | grep -v \# | tr '\n' ' '`; echo $ii $vasp $deepmd >> $out; echo $ii; done -out=results/02.mg.out; rm -f $out; for ii in confs/Mg/std* confs/Mg/mp*; do deepmd=`./cmpt_02_elastic.py deepmd param.json $ii | grep -v \# | tr '\n' ' '`; vasp=`./cmpt_02_elastic.py vasp param.json $ii | grep -v \# | tr '\n' ' '`; echo $ii $vasp $deepmd >> $out; echo $ii; done - -out=results/02.mgal.meam.k0.08.out; rm -f $out; for ii in confs/MgAl/mp*; do deepmd=`./cmpt_02_elastic.py meam param-k0.08.json $ii | grep -v \# | tr '\n' ' '`; vasp=`./cmpt_02_elastic.py vasp param-k0.08.json $ii | grep -v \# | tr '\n' ' '`; echo $ii $vasp $deepmd >> $out; echo $ii; done - -out=results/02.mgal.compk.out; rm -f $out; for ii in confs/MgAl/mp*; do lammps=`./cmpt_02_elastic.py vasp param-k0.16.json $ii | grep -v \# | tr '\n' ' '`; vasp=`./cmpt_02_elastic.py vasp param-k0.08.json $ii | grep -v \# | tr '\n' ' '`; echo $ii $vasp $lammps >> $out; echo $ii; done -out=results/02.al.compk.out; rm -f $out; for ii in confs/Al/std* confs/Al/mp*; do lammps=`./cmpt_02_elastic.py vasp param-k0.16.json $ii | grep -v \# | tr '\n' ' '`; vasp=`./cmpt_02_elastic.py vasp param-k0.08.json $ii | grep -v \# | tr '\n' ' '`; echo $ii $vasp $lammps >> $out; echo $ii; done -out=results/02.mg.compk.out; rm -f $out; for ii in confs/Mg/std* confs/Mg/mp*; do lammps=`./cmpt_02_elastic.py vasp param-k0.16.json $ii | grep -v \# | tr '\n' ' '`; vasp=`./cmpt_02_elastic.py vasp param-k0.08.json $ii | grep -v \# | tr '\n' ' '`; echo $ii $vasp $lammps >> $out; echo $ii; done - -out=results/02.mgal.mods.k0.08.out; rm -f $out; for ii in confs/MgAl/mp*; do deepmd=`./cmpt_02_elastic.py deepmd param-k0.08.json $ii | grep \# | grep = | cut -d = -f 2 | tr '\n' ' ' | sed 's/GPa//g'`; vasp=`./cmpt_02_elastic.py vasp param-k0.08.json $ii | grep \# | grep = | cut -d = -f 2 | tr '\n' ' ' | sed 's/GPa//g'`; meam=`./cmpt_02_elastic.py meam param-k0.08.json $ii | grep \# | grep = | cut -d = -f 2 | tr '\n' ' ' | sed 's/GPa//g'`; echo $ii $vasp $deepmd $meam >> $out; echo $ii; done - - - -############################## -# 03 -############################## -# gen 03 -for ii in confs/*/std* confs/*/mp*; do ./gen_03_vacancy.py deepmd param.json $ii `cat $ii/supercell.dfct`; done - -# run 03 -cwd=`pwd`; -for ii in 03.vacancy/*/std*/deepmd/struct* 03.vacancy/*/mp*/deepmd/struct*; do cd $ii; pwd; lmp_mpi_010 -i lammps.in; cd $cwd; done - -# cmpt 03 -#out=results/03.mgal.vasp.k0.16.out; rm -f $out; for ii in confs/MgAl/mp*; do ./cmpt_03_vacancy.py vasp param-k0.16.json $ii `cat $ii/supercell.dfct` >> $out ; echo $ii; done -# almg -out=results/03.mgal.vasp.k0.08.out; rm -f $out; for ii in confs/MgAl/mp*; do ./cmpt_03_vacancy.py vasp param-k0.08.json $ii `cat $ii/supercell.dfct` >> $out ; echo $ii; done -out=results/03.mgal.deepmd.out; rm -f $out; for ii in confs/MgAl/mp*; do ./cmpt_03_vacancy.py deepmd param-k0.08.json $ii `cat $ii/supercell.dfct` >> $out ; echo $ii; done -out=results/03.mgal.meam.out; rm -f $out; for ii in confs/MgAl/mp*; do ./cmpt_03_vacancy.py meam param-k0.08.json $ii `cat $ii/supercell.dfct` >> $out ; echo $ii; done - -# al -out=results/03.al.vasp.k0.08.out; rm -f $out; for ii in confs/Al/std*; do ./cmpt_03_vacancy.py vasp param-k0.08.json $ii `cat $ii/supercell.dfct` >> $out ; echo $ii; done -out=results/03.al.deepmd.out; rm -f $out; for ii in confs/Al/std*; do ./cmpt_03_vacancy.py deepmd param-k0.08.json $ii `cat $ii/supercell.dfct` >> $out ; echo $ii; done -out=results/03.al.meam.out; rm -f $out; for ii in confs/Al/std*; do ./cmpt_03_vacancy.py meam param-k0.08.json $ii `cat $ii/supercell.dfct` >> $out ; echo $ii; done - -# mg -out=results/03.mg.vasp.k0.08.out; rm -f $out; for ii in confs/Mg/std*; do ./cmpt_03_vacancy.py vasp param-k0.08.json $ii `cat $ii/supercell.dfct` >> $out ; echo $ii; done -out=results/03.mg.deepmd.out; rm -f $out; for ii in confs/Mg/std*; do ./cmpt_03_vacancy.py deepmd param-k0.08.json $ii `cat $ii/supercell.dfct` >> $out ; echo $ii; done -out=results/03.mg.meam.out; rm -f $out; for ii in confs/Mg/std*; do ./cmpt_03_vacancy.py meam param-k0.08.json $ii `cat $ii/supercell.dfct` >> $out ; echo $ii; done - -############################## -# 04 -############################## -# gen 04 -# for ii in confs/*/std* confs/*/mp*; do ./gen_04_interstitial.py lammps param.json $ii `cat $ii/supercell.dfct` Al Mg; done -# for ii in confs/MgAl/mp*; do ./gen_04_interstitial.py reprod param-k0.08.json $ii `cat $ii/supercell.dfct` Al Mg; done - -# gen 04 reprod -for ss in `cat confs/MgAl/sel.out`; do ii=confs/MgAl/$ss; ./gen_04_interstitial.py deepmd-reprod param-k0.16.json $ii `cat $ii/supercell.dfct` Al Mg; done -for ss in `cat confs/MgAl/sel.out`; do ii=confs/MgAl/$ss; ./gen_04_interstitial.py deepmd-reprod param-k0.08.json $ii `cat $ii/supercell.dfct` Al Mg; done - -# run 04 -cwd=`pwd` -for ii in 04.interstitial/*/mp*/deepmd-reprod-k0.16/struct*/frame*; do cd $ii; pwd; lmp_mpi_010 -i lammps.in; cd $cwd; done -for ii in 04.interstitial/*/mp*/deepmd-reprod-k0.08/struct*/frame*; do cd $ii; pwd; lmp_mpi_010 -i lammps.in; cd $cwd; done - -# cmpt 04 -for ss in `cat confs/MgAl/sel.out`; do ii=confs/MgAl/$ss; ./cmpt_04_interstitial.py deepmd-reprod param-k0.16.json $ii `cat $ii/supercell.dfct` Al Mg; done -for ss in `cat confs/MgAl/sel.out`; do ii=confs/MgAl/$ss; ./cmpt_04_interstitial.py deepmd-reprod param-k0.08.json $ii `cat $ii/supercell.dfct` Al Mg; done - -# collect 04 -cat 04.interstitial/MgAl/mp-*/vasp-k0.16/struct-*/ener.vasp.out > results/04.mgal.ener.vasp.k0.16.out -cat 04.interstitial/MgAl/mp-*/deepmd-reprod-k0.16/struct-*/ener.lmp.out > results/04.mgal.ener.deepmd.k0.16.out -paste results/04.mgal.ener.vasp.out results/04.mgal.ener.deepmd.out > results/04.mgal.ener.k0.16.out - -cat 04.interstitial/MgAl/mp-*/vasp-k0.08/struct-*/ener.vasp.out > results/04.mgal.ener.vasp.k0.08.out -cat 04.interstitial/MgAl/mp-*/deepmd-reprod-k0.08/struct-*/ener.lmp.out > results/04.mgal.ener.deepmd.k0.08.out -cat 04.interstitial/MgAl/mp-*/meam-reprod-k0.08/struct-*/ener.lmp.out > results/04.mgal.ener.meam.k0.08.out -paste results/04.mgal.ener.vasp.k0.08.out results/04.mgal.ener.deepmd.k0.08.out > results/04.mgal.ener.deepmd.k0.08.cmp.out -paste results/04.mgal.ener.vasp.k0.08.out results/04.mgal.ener.meam.k0.08.out > results/04.mgal.ener.meam.k0.08.cmp.out - -############################## -# 05 -############################## -# gen 05 -for ii in confs/MgAl/mp*; do ./gen_05_surf.py vasp-static param-k0.08.json $ii 1; done -for ii in confs/MgAl/mp*; do ./gen_05_surf.py vasp-static param-k0.16.json $ii 1; done -for ii in confs/MgAl/mp*; do ./gen_05_surf.py deepmd-static param-k0.08.json $ii 1; done - -# run 05 -cwd=`pwd`; -for ii in 05.surf//*/mp*/deepmd-static/struct*; do cd $ii; pwd; lmp_mpi_010 -i lammps.in; cd $cwd; done - -out=results/05.mgal.vasp.static.k0.08.out; rm -f $out; for ii in confs/MgAl/mp*; do ./cmpt_05_surf.py vasp-static param-k0.08.json $ii >> $out ; echo $ii; done -out=results/05.mgal.deepmd.static.out; rm -f $out; for ii in confs/MgAl/mp*; do ./cmpt_05_surf.py deepmd-static param-k0.08.json $ii >> $out ; echo $ii; done -out=results/05.mgal.meam.static.out; rm -f $out; for ii in confs/MgAl/mp*; do ./cmpt_05_surf.py meam-static param-k0.08.json $ii >> $out ; echo $ii; done diff --git a/dpgen/auto_test/gen_01_eos.py b/dpgen/auto_test/gen_01_eos.py index 1071ec697..c129a4df3 100755 --- a/dpgen/auto_test/gen_01_eos.py +++ b/dpgen/auto_test/gen_01_eos.py @@ -66,7 +66,7 @@ def make_vasp(jdata, conf_dir) : kpar = fp_params['kpar'] kspacing = fp_params['kspacing'] kgamma = fp_params['kgamma'] - fc = vasp.make_vasp_relax_incar(ecut, ediff, is_alloy, True, True, npar, kpar, kspacing, kgamma) + fc = vasp.make_vasp_relax_incar(ecut, ediff, is_alloy, True, False, npar, kpar, kspacing, kgamma) vasp_path = os.path.join(task_path, 'vasp-k%.2f' % kspacing) os.makedirs(vasp_path, exist_ok = True) diff --git a/dpgen/auto_test/gen_05_surf.py b/dpgen/auto_test/gen_05_surf.py index ac275eb2f..776a0205f 100755 --- a/dpgen/auto_test/gen_05_surf.py +++ b/dpgen/auto_test/gen_05_surf.py @@ -11,20 +11,11 @@ global_task_name = '05.surf' def make_vasp(jdata, conf_dir, max_miller = 2, relax_box = False, static = False) : - fp_params = jdata['vasp_params'] - ecut = fp_params['ecut'] - ediff = fp_params['ediff'] - npar = fp_params['npar'] - kpar = fp_params['kpar'] - kspacing = fp_params['kspacing'] - kgamma = fp_params['kgamma'] - min_slab_size = jdata['min_slab_size'] - min_vacuum_size = jdata['min_vacuum_size'] - pert_xz = jdata['pert_xz'] if 'relax_incar' in jdata.keys(): vasp_str='vasp-relax_incar' else: + kspacing = jdata['vasp_params']['kspacing'] vasp_str='vasp-k%.2f' % (kspacing) # get conf poscar @@ -66,6 +57,16 @@ def make_vasp(jdata, conf_dir, max_miller = 2, relax_box = False, static = False scf_incar_path = os.path.abspath(scf_incar_path) fc = open(scf_incar_path).read() else : + fp_params = jdata['vasp_params'] + ecut = fp_params['ecut'] + ediff = fp_params['ediff'] + npar = fp_params['npar'] + kpar = fp_params['kpar'] + kspacing = fp_params['kspacing'] + kgamma = fp_params['kgamma'] + min_slab_size = jdata['min_slab_size'] + min_vacuum_size = jdata['min_vacuum_size'] + pert_xz = jdata['pert_xz'] fc = vasp.make_vasp_static_incar(ecut, ediff, npar=npar,kpar=kpar, kspacing = kspacing, kgamma = kgamma) else : if 'relax_incar' in jdata.keys(): @@ -74,6 +75,16 @@ def make_vasp(jdata, conf_dir, max_miller = 2, relax_box = False, static = False relax_incar_path = os.path.abspath(relax_incar_path) fc = open(relax_incar_path).read() else : + fp_params = jdata['vasp_params'] + ecut = fp_params['ecut'] + ediff = fp_params['ediff'] + npar = fp_params['npar'] + kpar = fp_params['kpar'] + kspacing = fp_params['kspacing'] + kgamma = fp_params['kgamma'] + min_slab_size = jdata['min_slab_size'] + min_vacuum_size = jdata['min_vacuum_size'] + pert_xz = jdata['pert_xz'] fc = vasp.make_vasp_relax_incar(ecut, ediff, True, relax_box, False, npar=npar,kpar=kpar, kspacing = kspacing, kgamma = kgamma) with open(os.path.join(task_path, 'INCAR'), 'w') as fp : fp.write(fc) diff --git a/dpgen/auto_test/gen_08_dislocation.py b/dpgen/auto_test/gen_08_dislocation.py deleted file mode 100644 index d28fcdd30..000000000 --- a/dpgen/auto_test/gen_08_dislocation.py +++ /dev/null @@ -1,246 +0,0 @@ -import os, re, argparse, filecmp, json, glob, math -import subprocess as sp -import numpy as np -import dpgen.auto_test.lib.vasp as vasp -import dpgen.auto_test.lib.lammps as lammps -from pymatgen.core.structure import Structure - - -global_equi_name = '00.equi' -global_task_name = '08.dislocation' - -task_dict={0:'edge',1:'screw'} - -def make_vasp(jdata, conf_dir, supercell = [1,1,1]) : - fp_params = jdata['vasp_params'] - ecut = fp_params['ecut'] - ediff = fp_params['ediff'] - npar = fp_params['npar'] - kpar = fp_params['kpar'] - kspacing = fp_params['kspacing'] - kgamma = fp_params['kgamma'] - - conf_path = os.path.abspath(conf_dir) - conf_poscar = os.path.join(conf_path, 'POSCAR') - # get equi poscar - equi_path = re.sub('confs', global_equi_name, conf_path) - equi_path = os.path.join(equi_path, 'vasp-k%.2f' % kspacing) - equi_contcar = os.path.join(equi_path, 'CONTCAR') - task_path = re.sub('confs', global_task_name, conf_path) - task_path = os.path.join(task_path, 'vasp-k%.2f' % kspacing) - os.makedirs(task_path, exist_ok=True) - cwd = os.getcwd() - os.chdir(task_path) - if os.path.isfile('POSCAR') : - os.remove('POSCAR') - os.symlink(os.path.relpath(equi_contcar), 'POSCAR') - os.chdir(cwd) - task_poscar = os.path.join(task_path, 'POSCAR') - # gen structure from equi poscar - edge = Structure.from_file(task_poscar) - edge.make_supercell([supercell[0],supercell[1],1]) - center=int(supercell[0]*int(supercell[1]/2)+supercell[0]/2) - s=[center+supercell[0]*ii for ii in range(int(supercell[1]/2+1))] - # gen edge dislocation - edge.remove_sites(s) - edge.make_supercell([1,1,supercell[2]]) - # gen screw dislocation - screw = Structure.from_file(task_poscar) - screw.make_supercell([supercell[0], supercell[1], supercell[2]],to_unit_cell=False) - c=[] - for jj in range(math.ceil(supercell[0]/2)): - for ii in range(supercell[2]): - c.append(ii+jj*supercell[2]) - v0 = np.asarray(screw._sites[0].coords, float) - np.asarray(screw._sites[1].coords, float) - for kk in range(math.ceil(supercell[1]/2)): - dc=[ii+kk*supercell[0]*supercell[2] for ii in c] - v=(math.ceil(supercell[1]/2)-kk)/math.ceil(supercell[1]/2)*v0 - screw.translate_sites(dc, vector=v, frac_coords=False, to_unit_cell=False) - dss = [] - dss.append(edge) - dss.append(screw) - - - # gen incar - fc = vasp.make_vasp_relax_incar(ecut, ediff, True, True, True, npar, kpar, kspacing = kspacing, kgamma = kgamma) - with open(os.path.join(task_path, 'INCAR'), 'w') as fp : - fp.write(fc) - # gen potcar - with open(task_poscar,'r') as fp : - lines = fp.read().split('\n') - ele_list = lines[5].split() - potcar_map = jdata['potcar_map'] - potcar_list = [] - for ii in ele_list : - assert(os.path.exists(potcar_map[ii])) - potcar_list.append(potcar_map[ii]) - with open(os.path.join(task_path,'POTCAR'), 'w') as outfile: - for fname in potcar_list: - with open(fname) as infile: - outfile.write(infile.read()) - # gen tasks - copy_str = "%sx%sx%s" % (supercell[0], supercell[1], supercell[2]) - cwd = os.getcwd() - for ii in range(len(dss)) : - struct_path = os.path.join(task_path, 'struct-%s-%s' % (copy_str,task_dict[ii])) - print('# generate %s' % (struct_path)) - os.makedirs(struct_path, exist_ok=True) - os.chdir(struct_path) - for jj in ['POSCAR', 'POTCAR', 'INCAR'] : - if os.path.isfile(jj): - os.remove(jj) - # make conf - dss[ii].to('POSCAR', 'POSCAR') - # link incar, potcar, kpoints - os.symlink(os.path.relpath(os.path.join(task_path, 'INCAR')), 'INCAR') - os.symlink(os.path.relpath(os.path.join(task_path, 'POTCAR')), 'POTCAR') - # save supercell - np.savetxt('supercell.out', supercell, fmt='%d') - os.chdir(cwd) - - -def make_lammps(jdata, conf_dir, supercell,task_type) : - - kspacing = jdata['vasp_params']['kspacing'] - fp_params = jdata['lammps_params'] - model_dir = fp_params['model_dir'] - type_map = fp_params['type_map'] - model_dir = os.path.abspath(model_dir) - model_name =fp_params['model_name'] - if not model_name : - models = glob.glob(os.path.join(model_dir, '*pb')) - model_name = [os.path.basename(ii) for ii in models] - else: - models = [os.path.join(model_dir,ii) for ii in model_name] - - model_param = {'model_name' : fp_params['model_name'], - 'param_type': fp_params['model_param_type']} - - ntypes = len(type_map) - - conf_path = os.path.abspath(conf_dir) - conf_poscar = os.path.join(conf_path, 'POSCAR') - # get equi poscar - equi_path = re.sub('confs', global_equi_name, conf_path) - equi_path = os.path.join(equi_path, 'vasp-k%.2f' % kspacing) - equi_contcar = os.path.join(equi_path, 'CONTCAR') - # equi_path = re.sub('confs', global_equi_name, conf_path) - # equi_path = os.path.join(equi_path, 'lmp') - # equi_dump = os.path.join(equi_path, 'dump.relax') - task_path = re.sub('confs', global_task_name, conf_path) - task_path = os.path.join(task_path, task_type) - os.makedirs(task_path, exist_ok=True) - # gen task poscar - task_poscar = os.path.join(task_path, 'POSCAR') - # lammps.poscar_from_last_dump(equi_dump, task_poscar, deepmd_type_map) - cwd = os.getcwd() - os.chdir(task_path) - if os.path.isfile('POSCAR') : - os.remove('POSCAR') - os.symlink(os.path.relpath(equi_contcar), 'POSCAR') - os.chdir(cwd) - # gen structure from equi poscar - edge = Structure.from_file(task_poscar) - edge.make_supercell([supercell[0],supercell[1],1]) - center=int(supercell[0]*int(supercell[1]/2)+supercell[0]/2) - s=[center+supercell[0]*ii for ii in range(int(supercell[1]/2+1))] - # gen edge dislocation - edge.remove_sites(s) - edge.make_supercell([1,1,supercell[2]]) - # gen screw dislocation - screw = Structure.from_file(task_poscar) - screw.make_supercell([supercell[0], supercell[1], supercell[2]],to_unit_cell=False) - c=[] - for jj in range(math.ceil(supercell[0]/2)): - for ii in range(supercell[2]): - c.append(ii+jj*supercell[2]) - v0 = np.asarray(screw._sites[0].coords, float) - np.asarray(screw._sites[1].coords, float) - for kk in range(math.ceil(supercell[1]/2)): - dc=[ii+kk*supercell[0]*supercell[2] for ii in c] - v=(math.ceil(supercell[1]/2)-kk)/math.ceil(supercell[1]/2)*v0 - screw.translate_sites(dc, vector=v, frac_coords=False, to_unit_cell=False) - dss = [] - dss.append(edge) - dss.append(screw) - - # gen tasks - cwd = os.getcwd() - # make lammps.in, relax at 0 bar (scale = 1) - if task_type=='deepmd': - fc = lammps.make_lammps_elastic('conf.lmp', - ntypes, - lammps.inter_deepmd, - model_name) - elif task_type =='meam': - fc = lammps.make_lammps_elastic('conf.lmp', - ntypes, - lammps.inter_meam, - model_param) - - f_lammps_in = os.path.join(task_path, 'lammps.in') - with open(f_lammps_in, 'w') as fp : - fp.write(fc) - # gen tasks - copy_str = "%sx%sx%s" % (supercell[0], supercell[1], supercell[2]) - cwd = os.getcwd() - if task_type=='deepmd': - os.chdir(task_path) - for ii in model_name : - if os.path.exists(ii) : - os.remove(ii) - for (ii,jj) in zip(models, model_name) : - os.symlink(os.path.relpath(ii), jj) - share_models = glob.glob(os.path.join(task_path, '*pb')) - else : - share_models=models - - for ii in range(len(dss)) : - struct_path = os.path.join(task_path, 'struct-%s-%s' % (copy_str,task_dict[ii])) - print('# generate %s' % (struct_path)) - os.makedirs(struct_path, exist_ok=True) - os.chdir(struct_path) - for jj in ['conf.lmp', 'lammps.in'] + model_name : - if os.path.isfile(jj): - os.remove(jj) - # make conf - dss[ii].to('POSCAR', 'POSCAR') - lammps.cvt_lammps_conf('POSCAR', 'conf.lmp') - ptypes = vasp.get_poscar_types('POSCAR') - lammps.apply_type_map('conf.lmp', type_map, ptypes) - # link lammps.in - os.symlink(os.path.relpath(f_lammps_in), 'lammps.in') - # link models - for (ii,jj) in zip(share_models, model_name) : - os.symlink(os.path.relpath(ii), jj) - # save supercell - np.savetxt('supercell.out', supercell, fmt='%d') - os.chdir(cwd) - -def _main() : - parser = argparse.ArgumentParser( - description="gen 08.dislocation") - parser.add_argument('TASK', type=str, - help='the task of generation, vasp or lammps') - parser.add_argument('PARAM', type=str, - help='json parameter file') - parser.add_argument('CONF', type=str, - help='the path to conf') - parser.add_argument('COPY', type=int, nargs = 3, - help='the path to conf') - args = parser.parse_args() - - with open (args.PARAM, 'r') as fp : - jdata = json.load (fp) - -# print('# generate %s task with conf %s' % (args.TASK, args.CONF)) - if args.TASK == 'vasp': - make_vasp(jdata, args.CONF, args.COPY) - elif args.TASK == 'deepmd' or args.TASK == 'meam' : - make_lammps(jdata, args.CONF, args.COPY, args.TASK) - #elif args.TASK == 'meam' : - # make_meam_lammps(jdata, args.CONF, args.COPY) - else : - raise RuntimeError("unknow task ", args.TASK) - -if __name__ == '__main__' : - _main() diff --git a/tests/generator/POTCAR.al b/dpgen/auto_test/lib/__init__.py similarity index 100% rename from tests/generator/POTCAR.al rename to dpgen/auto_test/lib/__init__.py diff --git a/dpgen/auto_test/lib/lammps.py b/dpgen/auto_test/lib/lammps.py index 1da497c99..f3788067f 100644 --- a/dpgen/auto_test/lib/lammps.py +++ b/dpgen/auto_test/lib/lammps.py @@ -52,7 +52,7 @@ def apply_type_map(conf_file, deepmd_type_map, ptypes) : raise RuntimeError("cannot find the entry 'atom types' in ", conf_file) words = lines[idx_ntypes].split() words[0] = str(ntypes) - new_lines[idx_ntypes] = " ".join(words) + new_lines[idx_ntypes] = " ".join(words) # find number of atoms idx_atom_entry = -1 for idx, ii in enumerate(lines) : @@ -71,7 +71,7 @@ def apply_type_map(conf_file, deepmd_type_map, ptypes) : ii = " ".join(words) new_lines[idx] = ii with open(conf_file, 'w') as fp: - fp.write("\n".join(new_lines)) + fp.write("\n".join(new_lines)) def _get_ntype(conf) : with open(conf, 'r') as fp: @@ -127,7 +127,7 @@ def make_lammps_eval(conf, ntypes, interaction, param) : ret += "box tilt large\n" ret += "read_data %s\n" % conf for ii in range(ntypes) : - ret += "mass %d 1\n" % (ii+1) + ret += "mass %d 1\n" % (ii+1) ret += "neigh_modify every 1 delay 0 check no\n" ret += interaction(param) ret += "compute mype all pe\n" @@ -157,9 +157,9 @@ def make_lammps_eval(conf, ntypes, interaction, param) : return ret -def make_lammps_equi(conf, ntypes, interaction, param, - etol=1e-12, ftol=1e-6, - maxiter=5000, maxeval=500000, +def make_lammps_equi(conf, ntypes, interaction, param, + etol=1e-12, ftol=1e-6, + maxiter=5000, maxeval=500000, change_box = True) : """ make lammps input for equilibritation @@ -173,7 +173,7 @@ def make_lammps_equi(conf, ntypes, interaction, param, ret += "box tilt large\n" ret += "read_data %s\n" % conf for ii in range(ntypes) : - ret += "mass %d 1\n" % (ii+1) + ret += "mass %d 1\n" % (ii+1) ret += "neigh_modify every 1 delay 0 check no\n" ret += interaction(param) ret += "compute mype all pe\n" @@ -208,8 +208,8 @@ def make_lammps_equi(conf, ntypes, interaction, param, ret += "print \"Final Stress (xx yy zz xy xz yz) = ${Pxx} ${Pyy} ${Pzz} ${Pxy} ${Pxz} ${Pyz}\"\n" return ret -def make_lammps_elastic(conf, ntypes, interaction, param, - etol=1e-12, ftol=1e-6, +def make_lammps_elastic(conf, ntypes, interaction, param, + etol=1e-12, ftol=1e-6, maxiter=5000, maxeval=500000) : """ make lammps input for elastic calculation @@ -223,7 +223,7 @@ def make_lammps_elastic(conf, ntypes, interaction, param, ret += "box tilt large\n" ret += "read_data %s\n" % conf for ii in range(ntypes) : - ret += "mass %d 1\n" % (ii+1) + ret += "mass %d 1\n" % (ii+1) ret += "neigh_modify every 1 delay 0 check no\n" ret += interaction(param) ret += "compute mype all pe\n" @@ -251,8 +251,8 @@ def make_lammps_elastic(conf, ntypes, interaction, param, return ret def make_lammps_press_relax(conf, ntypes, scale2equi, interaction, param, - B0 = 70, bp = 0, - etol=1e-12, ftol=1e-6, + B0 = 70, bp = 0, + etol=1e-12, ftol=1e-6, maxiter=5000, maxeval=500000) : """ make lammps input for relaxation at a certain volume @@ -274,7 +274,7 @@ def make_lammps_press_relax(conf, ntypes, scale2equi, interaction, param, ret += "box tilt large\n" ret += "read_data %s\n" % conf for ii in range(ntypes) : - ret += "mass %d 1\n" % (ii+1) + ret += "mass %d 1\n" % (ii+1) ret += "neigh_modify every 1 delay 0 check no\n" ret += interaction(param) ret += "compute mype all pe\n" @@ -305,8 +305,8 @@ def make_lammps_press_relax(conf, ntypes, scale2equi, interaction, param, ret += "print \"Final Stress (xx yy zz xy xz yz) = ${Pxx} ${Pyy} ${Pzz} ${Pxy} ${Pxz} ${Pyz}\"\n" return ret -def make_lammps_phonon(conf, masses, interaction, param, - etol=1e-12, ftol=1e-6, +def make_lammps_phonon(conf, masses, interaction, param, + etol=1e-12, ftol=1e-6, maxiter=5000, maxeval=500000): """ make lammps input for elastic calculation @@ -318,10 +318,10 @@ def make_lammps_phonon(conf, masses, interaction, param, ret += "boundary p p p\n" ret += "atom_style atomic\n" ret += "box tilt large\n" - ret += "read_data %s\n" % conf + ret += "read_data %s\n" % conf ntypes=len(masses) for ii in range(ntypes) : - ret += "mass %d %f\n" % (ii+1,masses[ii]) + ret += "mass %d %f\n" % (ii+1,masses[ii]) ret += "neigh_modify every 1 delay 0 check no\n" ret += interaction(param) return ret @@ -329,7 +329,7 @@ def make_lammps_phonon(conf, masses, interaction, param, def _get_epa (lines) : for ii in lines: if ("Final energy per atoms" in ii) and (not 'print' in ii): - return float(ii.split('=')[1].split()[0]) + return float(ii.split('=')[1].split()[0]) raise RuntimeError("cannot find key \"Final energy per atoms\" in lines, something wrong") def _get_vpa (lines) : @@ -349,7 +349,7 @@ def get_nev (log) : get natoms, energy_per_atom and volume_per_atom from lammps log """ with open(log, 'r') as fp: - lines = fp.read().split('\n') + lines = fp.read().split('\n') epa = _get_epa(lines) vpa = _get_vpa(lines) natoms = _get_natoms(lines) @@ -360,7 +360,7 @@ def get_base_area (log) : get base area """ with open(log, 'r') as fp: - lines = fp.read().split('\n') + lines = fp.read().split('\n') for ii in lines: if ("Final Base area" in ii) and (not 'print' in ii): return float(ii.split('=')[1].split()[0]) @@ -388,7 +388,7 @@ def poscar_from_last_dump(dump, poscar_out, deepmd_type_map) : if 'ITEM: TIMESTEP' in ii : step_idx = idx if step_idx == -1 : - raise RuntimeError("cannot find timestep in lammps dump, something wrong") + raise RuntimeError("cannot find timestep in lammps dump, something wrong") with open('tmp_dump', 'w') as fp: fp.write("\n".join(lines[step_idx:])) cvt_lammps_conf('tmp_dump', poscar_out, ofmt='vasp') @@ -401,6 +401,15 @@ def poscar_from_last_dump(dump, poscar_out, deepmd_type_map) : lines = fp.write("\n".join(lines)) +def check_finished_new(fname,keyword): + with open(fname, 'r') as fp : + lines = fp.read().split('\n') + flag=False + for jj in lines: + if (keyword in jj) and (not 'print' in jj): + flag=True + return flag - - +def check_finished(fname): + with open(fname, 'r') as fp: + return 'Total wall time:' in fp.read() diff --git a/dpgen/auto_test/lib/util.py b/dpgen/auto_test/lib/util.py index 9daf226dd..bbb8d438b 100644 --- a/dpgen/auto_test/lib/util.py +++ b/dpgen/auto_test/lib/util.py @@ -1,4 +1,12 @@ import numpy as np +import requests +import os,re +from dpgen.remote.RemoteJob import SSHSession +from dpgen.auto_test.lib import vasp +from dpgen.auto_test.lib import lammps +from dpgen.auto_test.lib.utils import cmd_append_log + +lammps_task_type=['deepmd','meam','eam'] def voigt_to_stress(inpt) : ret = np.zeros((3,3)) @@ -12,3 +20,90 @@ def voigt_to_stress(inpt) : ret[1][0] = ret[0][1] ret[2][1] = ret[1][2] return ret + +def insert_data(task,task_type,username,file_name): + assert task in ['eos','elastic','surf'] + assert task_type in ['vasp','deepmd'] + url='http://115.27.161.2:5000/insert_test_data?username=%s&expr_type=%s&data_type=%s' % (username,task_type,task) + res = requests.post(url, data=open(file_name).read()) + print('Successful upload!') + + +def make_work_path(jdata,task,reprod_opt,static,user): + + task_type=jdata['task_type'] + conf_dir=jdata['conf_dir'] + conf_path = os.path.abspath(conf_dir) + task_path = re.sub('confs', task, conf_path) + + if task_type=="vasp": + if user: + work_path=os.path.join(task_path, 'vasp-user_incar') + assert(os.path.isdir(work_path)) + return work_path + if static: + if 'scf_incar' in jdata.keys(): + task_type=task_type+'-static-scf_incar' + else: + kspacing = jdata['vasp_params']['kspacing'] + task_type=task_type+'-static-k%.2f' % (kspacing) + else: + if 'relax_incar' in jdata.keys(): + task_type=task_type+'-relax_incar' + else: + kspacing = jdata['vasp_params']['kspacing'] + task_type=task_type+'-k%.2f' % (kspacing) + elif task_type in lammps_task_type: + if static: + task_type=task_type+'-static' + elif reprod_opt : + if 'relax_incar' in jdata.keys(): + task_type=task_type+'-reprod-relax_incar' + else: + task_type=task_type+'-reprod-k%.2f'% (kspacing) + + work_path=os.path.join(task_path, task_type) + assert(os.path.isdir(work_path)) + return work_path + + +def get_machine_info(mdata,task_type): + if task_type=="vasp": + vasp_exec=mdata['fp_command'] + group_size = mdata['fp_group_size'] + resources = mdata['fp_resources'] + machine=mdata['fp_machine'] + machine_type = mdata['fp_machine']['machine_type'] + command = vasp_exec + command = cmd_append_log(command, "log") + elif task_type in lammps_task_type: + lmp_exec = mdata['lmp_command'] + group_size = mdata['model_devi_group_size'] + resources = mdata['model_devi_resources'] + machine=mdata['model_devi_machine'] + machine_type = mdata['model_devi_machine']['machine_type'] + command = lmp_exec + " -i lammps.in" + command = cmd_append_log(command, "model_devi.log") + ssh_sess = SSHSession(machine) + return machine, machine_type,ssh_sess,resources, command, group_size + +def collect_task(all_task,task_type): + + if task_type == 'vasp': + output_file ='OUTCAR' + check_finished = vasp.check_finished + elif task_type in lammps_task_type: + output_file = 'log.lammps' + check_finished = lammps.check_finished + + run_tasks_ = [] + for ii in all_task: + fres = os.path.join(ii, output_file) + if os.path.isfile(fres) : + if not check_finished(fres): + run_tasks_.append(ii) + else : + run_tasks_.append(ii) + + run_tasks = [os.path.basename(ii) for ii in run_tasks_] + return run_tasks diff --git a/dpgen/auto_test/lib/vasp.py b/dpgen/auto_test/lib/vasp.py index b54cd47f2..5022e4b74 100644 --- a/dpgen/auto_test/lib/vasp.py +++ b/dpgen/auto_test/lib/vasp.py @@ -1,4 +1,4 @@ -#!/usr/bin/python3 +#!/usr/bin/python3 import warnings import numpy as np @@ -43,7 +43,7 @@ def regulate_poscar(poscar_in, poscar_out) : for ii in posis : ele_name = ii.split()[-1] if ele_name == ele : - ele_lines.append(ii) + ele_lines.append(ii) all_lines += ele_lines all_lines.append('') ret = lines[0:5] @@ -70,7 +70,7 @@ def sort_poscar(poscar_in, poscar_out, new_names) : for ii in posis : ele_name = ii.split()[-1] if ele_name == ele : - ele_lines.append(ii) + ele_lines.append(ii) all_lines += ele_lines all_lines.append('') ret = lines[0:5] @@ -79,7 +79,7 @@ def sort_poscar(poscar_in, poscar_out, new_names) : ret.append("Direct") ret += all_lines with open(poscar_out, 'w') as fp: - fp.write("\n".join(ret)) + fp.write("\n".join(ret)) def perturb_xz (poscar_in, poscar_out, pert = 0.01) : with open(poscar_in, 'r') as fp: @@ -119,7 +119,7 @@ def get_energies (fname) : if not check_finished(fname): warnings.warn("incomplete outcar: "+fname) with open(fname, 'r') as fp: - lines = fp.read().split('\n') + lines = fp.read().split('\n') try : ener = _get_energies(lines) return ener @@ -130,7 +130,7 @@ def get_boxes (fname) : if not check_finished(fname): warnings.warn("incomplete outcar: "+fname) with open(fname, 'r') as fp: - lines = fp.read().split('\n') + lines = fp.read().split('\n') try : ener = _get_boxes(lines) return ener @@ -141,7 +141,7 @@ def get_nev(fname) : if not check_finished(fname): warnings.warn("incomplete outcar: "+fname) with open(fname, 'r') as fp: - lines = fp.read().split('\n') + lines = fp.read().split('\n') try: natoms = _get_natoms(lines) vol = _get_volumes(lines)[-1] @@ -155,7 +155,7 @@ def get_stress(fname) : if not check_finished(fname): warnings.warn("incomplete outcar: "+fname) with open(fname, 'r') as fp: - lines = fp.read().split('\n') + lines = fp.read().split('\n') try: stress = _get_stress(lines)[-1] return stress @@ -163,7 +163,7 @@ def get_stress(fname) : return None def check_finished(fname) : - with open(fname, 'r') as fp: + with open(fname, 'r') as fp: return 'Elapsed time (sec):' in fp.read() def _get_natoms(lines) : @@ -217,9 +217,9 @@ def _get_stress(lines) : items.append(util.voigt_to_stress(sv)) if len(items) == 0: raise OutcarItemError("cannot find item 'in kB'") - return items + return items -def _compute_isif (relax_ions, +def _compute_isif (relax_ions, relax_shape, relax_volume) : if (relax_ions) and (not relax_shape) and (not relax_volume) : @@ -321,8 +321,8 @@ def make_vasp_relax_incar (ecut, ediff, return ret def make_vasp_phonon_incar (ecut, ediff, - npar, kpar, - kspacing = 0.5, kgamma = True, + npar, kpar, + kspacing = 0.5, kgamma = True, ismear = 1, sigma = 0.2) : isif = 2 ret = '' @@ -399,7 +399,7 @@ def _poscar_scale_cartesian (str_in, scale) : cv = [float(ii) for ii in cl] cv = np.array(cv) * scale lines[ii] = "%.16e %.16e %.16e\n" % (cv[0], cv[1], cv[2]) - return lines + return lines def poscar_natoms(poscar_in) : with open(poscar_in, 'r') as fin : @@ -409,9 +409,9 @@ def poscar_natoms(poscar_in) : def poscar_scale (poscar_in, poscar_out, scale) : with open(poscar_in, 'r') as fin : lines = list(fin) - if 'D' == lines[7][0] or 'd' == lines[7][0] : + if 'D' == lines[7][0] or 'd' == lines[7][0] : lines = _poscar_scale_direct(lines, scale) - elif 'C' == lines[7][0] or 'c' == lines[7][0] : + elif 'C' == lines[7][0] or 'c' == lines[7][0] : lines = _poscar_scale_cartesian(lines, scale) else : raise RuntimeError("Unknow poscar coord style at line 7: %s" % lines[7]) @@ -451,7 +451,8 @@ def _make_vasp_kp_mp(kpoints): def make_vasp_kpoints (kpoints, kgamma = False) : if kgamma : - ret = _make_vasp_kp_gamma(kpoints) + ret = _make_vasp_kp_gamma(kpoints) else : ret = _make_vasp_kp_mp(kpoints) return ret + diff --git a/dpgen/auto_test/run.py b/dpgen/auto_test/run.py index 589a57568..ce76ca69d 100644 --- a/dpgen/auto_test/run.py +++ b/dpgen/auto_test/run.py @@ -15,6 +15,7 @@ import sys import os, re, argparse, filecmp, json, glob +import dpgen.auto_test.lib.util as util import dpgen.auto_test.lib.vasp as vasp import dpgen.auto_test.lib.lammps as lammps import random @@ -28,7 +29,7 @@ from dpgen.auto_test.lib.utils import create_path from dpgen.auto_test.lib.utils import copy_file_list from dpgen.auto_test.lib.utils import replace -from dpgen.auto_test.lib.utils import cmd_append_log + from dpgen.auto_test.lib.utils import log_iter from dpgen.auto_test.lib.utils import record_iter from dpgen.auto_test.lib.utils import log_iter @@ -74,7 +75,7 @@ def _run(machine, common_files, forward_files, backward_files) - elif machine_type == 'slurm' : + elif machine_type == 'slurm' : print("The second situation!") group_slurm_jobs(ssh_sess, resources, @@ -86,7 +87,7 @@ def _run(machine, forward_files, backward_files, forward_task_deference =False) - elif machine_type == 'pbs' : + elif machine_type == 'pbs' : group_slurm_jobs(ssh_sess, resources, command, @@ -98,7 +99,7 @@ def _run(machine, backward_files, remote_job = PBSJob, forward_task_deference =False) - elif machine_type == 'local' : + elif machine_type == 'local' : group_local_jobs(ssh_sess, resources, command, @@ -111,40 +112,7 @@ def _run(machine, else : raise RuntimeError("unknow machine type") -def make_work_path(jdata,task,reprod_opt,static,user): - kspacing = jdata['vasp_params']['kspacing'] - task_type=jdata['task_type'] - conf_dir=jdata['conf_dir'] - conf_path = os.path.abspath(conf_dir) - task_path = re.sub('confs', task, conf_path) - if task_type=="vasp": - if user: - work_path=os.path.join(task_path, 'vasp-user_incar') - assert(os.path.isdir(work_path)) - return work_path - if static: - if 'scf_incar' in jdata.keys(): - task_type=task_type+'-static-scf_incar' - else: - task_type=task_type+'-static-k%.2f' % (kspacing) - else: - if 'relax_incar' in jdata.keys(): - task_type=task_type+'-relax_incar' - else: - task_type=task_type+'-k%.2f' % (kspacing) - elif task_type in lammps_task_type: - if static: - task_type=task_type+'-static' - elif reprod_opt : - if 'relax_incar' in jdata.keys(): - task_type=task_type+'-reprod-relax_incar' - else: - task_type=task_type+'-reprod-k%.2f'% (kspacing) - - work_path=os.path.join(task_path, task_type) - assert(os.path.isdir(work_path)) - return work_path def gen_equi(task_type,jdata,mdata): conf_dir=jdata['conf_dir'] @@ -158,32 +126,16 @@ def gen_equi(task_type,jdata,mdata): else : raise RuntimeError ("unknow task %s, something wrong" % task_type) os.chdir(cwd) - -def run_equi(task_type,jdata,mdata,ssh_sess): + +def run_equi(task_type,jdata,mdata): #rmprint("This module has been run !") - work_path=make_work_path(jdata,'00.equi',False,False,False) + work_path=util.make_work_path(jdata,'00.equi',False,False,False) all_task = glob.glob(os.path.join(work_path,'.')) - + #vasp if task_type=="vasp": mdata=decide_fp_machine(mdata) - vasp_exec=mdata['fp_command'] - group_size = mdata['fp_group_size'] - resources = mdata['fp_resources'] - machine=mdata['fp_machine'] - machine_type = mdata['fp_machine']['machine_type'] - command = vasp_exec - command = cmd_append_log(command, "log") - - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'OUTCAR') - if os.path.isfile(fres) : - if not vasp.check_finished(fres): - run_tasks_.append(ii) - else : - run_tasks_.append(ii) forward_files = ['INCAR', 'POTCAR'] backward_files = ['OUTCAR','CONTCAR','OSZICAR'] @@ -192,30 +144,7 @@ def run_equi(task_type,jdata,mdata,ssh_sess): #lammps elif task_type in lammps_task_type: mdata = decide_model_devi_machine(mdata) - lmp_exec = mdata['lmp_command'] - group_size = mdata['model_devi_group_size'] - resources = mdata['model_devi_resources'] - machine=mdata['model_devi_machine'] - machine_type = mdata['model_devi_machine']['machine_type'] - - command = lmp_exec + " -i lammps.in" - command = cmd_append_log(command, "model_devi.log") - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'log.lammps') - if os.path.isfile(fres) : - with open(fres, 'r') as fp : - lines = fp.read().split('\n') - flag=False - for jj in lines: - if ("Final energy per atoms" in jj) and (not 'print' in jj): - flag=True - if not flag: - run_tasks_.append(ii) - else : - run_tasks_.append(ii) - forward_files = ['conf.lmp', 'lammps.in'] backward_files = ['dump.relax','log.lammps', 'model_devi.log'] @@ -229,14 +158,17 @@ def run_equi(task_type,jdata,mdata,ssh_sess): else: models = [os.path.join(model_dir,ii) for ii in model_name] common_files = model_name - + if len(model_name)>1 and task_type == 'deepmd': backward_files = backward_files + ['model_devi.out'] else: raise RuntimeError ("unknow task %s, something wrong" % task_type) - - run_tasks = [os.path.basename(ii) for ii in run_tasks_] + + run_tasks = util.collect_task(all_task,task_type) + + machine,machine_type,ssh_sess,resources,command,group_size=util.get_machine_info(mdata,task_type) + _run(machine, machine_type, ssh_sess, @@ -265,48 +197,30 @@ def cmpt_equi(task_type,jdata,mdata): def gen_eos(task_type,jdata,mdata): conf_dir=jdata['conf_dir'] - #fix_shape=jdata['fix_shape'] fix_shape = True cwd=os.getcwd() #vasp if task_type == "vasp": - gen_01_eos.make_vasp(jdata, conf_dir) - #lammps + gen_01_eos.make_vasp(jdata, conf_dir) + #lammps elif task_type in lammps_task_type: if fix_shape : gen_01_eos.make_lammps_fixv(jdata, conf_dir,task_type) else : - gen_01_eos.make_lammps(jdata, conf_dir,task_type) + gen_01_eos.make_lammps(jdata, conf_dir,task_type) else : raise RuntimeError("unknow task ", task_type) os.chdir(cwd) -def run_eos(task_type,jdata,mdata,ssh_sess): - work_path=make_work_path(jdata,'01.eos',False,False,False) - print(work_path) +def run_eos(task_type,jdata,mdata): + work_path=util.make_work_path(jdata,'01.eos',False,False,False) all_task = glob.glob(os.path.join(work_path, "vol-*")) all_task.sort() - + #vasp if task_type=="vasp": mdata=decide_fp_machine(mdata) - vasp_exec=mdata['fp_command'] - group_size = mdata['fp_group_size'] - resources = mdata['fp_resources'] - machine=mdata['fp_machine'] - machine_type = mdata['fp_machine']['machine_type'] - command = vasp_exec - command = cmd_append_log(command, "log") - - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'OUTCAR') - if os.path.isfile(fres) : - if not vasp.check_finished(fres): - run_tasks_.append(ii) - else : - run_tasks_.append(ii) forward_files = ['INCAR', 'POSCAR','POTCAR'] backward_files = ['OUTCAR','OSZICAR'] @@ -315,28 +229,6 @@ def run_eos(task_type,jdata,mdata,ssh_sess): #lammps elif task_type in lammps_task_type: mdata = decide_model_devi_machine(mdata) - lmp_exec = mdata['lmp_command'] - group_size = mdata['model_devi_group_size'] - resources = mdata['model_devi_resources'] - machine=mdata['model_devi_machine'] - machine_type = mdata['model_devi_machine']['machine_type'] - command = lmp_exec + " -i lammps.in" - command = cmd_append_log(command, "model_devi.log") - - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'log.lammps') - if os.path.isfile(fres) : - with open(fres, 'r') as fp : - lines = fp.read().split('\n') - flag=False - for jj in lines: - if ("Final energy per atoms" in jj) and (not 'print' in jj): - flag=True - if not flag: - run_tasks_.append(ii) - else : - run_tasks_.append(ii) fp_params = jdata['lammps_params'] model_dir = fp_params['model_dir'] @@ -350,14 +242,16 @@ def run_eos(task_type,jdata,mdata,ssh_sess): forward_files = ['conf.lmp', 'lammps.in']+model_name backward_files = ['log.lammps', 'model_devi.log'] common_files=['lammps.in']+model_name - + if len(model_name)>1 and task_type == 'deepmd': backward_files = backward_files + ['model_devi.out'] else: raise RuntimeError ("unknow task %s, something wrong" % task_type) - run_tasks = [os.path.basename(ii) for ii in run_tasks_] + run_tasks = util.collect_task(all_task,task_type) + + machine,machine_type,ssh_sess,resources,command,group_size=util.get_machine_info(mdata,task_type) _run(machine, machine_type, ssh_sess, @@ -374,10 +268,10 @@ def cmpt_eos(task_type,jdata,mdata): conf_dir=jdata['conf_dir'] #vasp if task_type == "vasp": - cmpt_01_eos.comput_vasp_eos(jdata, conf_dir) - #lammps + cmpt_01_eos.comput_vasp_eos(jdata, conf_dir) + #lammps elif task_type in lammps_task_type: - cmpt_01_eos.comput_lmp_eos(conf_dir, task_type) + cmpt_01_eos.comput_lmp_eos(jdata, conf_dir, task_type) else : raise RuntimeError("unknow task ", task_type) @@ -394,32 +288,15 @@ def gen_elastic(task_type,jdata,mdata): raise RuntimeError ("unknow task %s, something wrong" % task_type) os.chdir(cwd) -def run_elastic(task_type,jdata,mdata,ssh_sess): - work_path=make_work_path(jdata,'02.elastic',False,False,False) - print(work_path) - +def run_elastic(task_type,jdata,mdata): + work_path=util.make_work_path(jdata,'02.elastic',False,False,False) + all_task = glob.glob(os.path.join(work_path, "dfm-*")) all_task.sort() - + #vasp if task_type == "vasp": mdata=decide_fp_machine(mdata) - vasp_exec=mdata['fp_command'] - group_size = mdata['fp_group_size'] - resources = mdata['fp_resources'] - machine=mdata['fp_machine'] - machine_type = mdata['fp_machine']['machine_type'] - command = vasp_exec - command = cmd_append_log(command, "log") - - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'OUTCAR') - if os.path.isfile(fres) : - if not vasp.check_finished(fres): - run_tasks_.append(ii) - else : - run_tasks_.append(ii) forward_files = ['INCAR', 'POSCAR','POTCAR','KPOINTS'] backward_files = ['OUTCAR','CONTCAR','OSZICAR'] @@ -428,28 +305,6 @@ def run_elastic(task_type,jdata,mdata,ssh_sess): #lammps elif task_type in lammps_task_type: mdata = decide_model_devi_machine(mdata) - lmp_exec = mdata['lmp_command'] - group_size = mdata['model_devi_group_size'] - resources = mdata['model_devi_resources'] - machine=mdata['model_devi_machine'] - machine_type = mdata['model_devi_machine']['machine_type'] - command = lmp_exec + " -i lammps.in" - command = cmd_append_log(command, "model_devi.log") - - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'log.lammps') - if os.path.isfile(fres) : - with open(fres, 'r') as fp : - lines = fp.read().split('\n') - flag=False - for jj in lines: - if ('Final Stress' in jj) and (not 'print' in jj): - flag=True - if not flag: - run_tasks_.append(ii) - else : - run_tasks_.append(ii) fp_params = jdata['lammps_params'] model_dir = fp_params['model_dir'] @@ -463,14 +318,15 @@ def run_elastic(task_type,jdata,mdata,ssh_sess): forward_files = ['conf.lmp', 'lammps.in','strain.out']+model_name backward_files = ['log.lammps', 'model_devi.log'] common_files=['lammps.in']+model_name - + if len(model_name)>1 and task_type == 'deepmd': backward_files = backward_files + ['model_devi.out'] else: raise RuntimeError ("unknow task %s, something wrong" % task_type) - - run_tasks = [os.path.basename(ii) for ii in run_tasks_] + + run_tasks = util.collect_task(all_task,task_type) + machine,machine_type,ssh_sess,resources,command,group_size=util.get_machine_info(mdata,task_type) _run(machine, machine_type, ssh_sess, @@ -482,16 +338,16 @@ def run_elastic(task_type,jdata,mdata,ssh_sess): common_files, forward_files, backward_files) - + def cmpt_elastic(task_type,jdata,mdata): conf_dir=jdata['conf_dir'] if task_type == "vasp": - cmpt_02_elastic.cmpt_vasp(jdata, conf_dir) + cmpt_02_elastic.cmpt_vasp(jdata, conf_dir) elif task_type in lammps_task_type: cmpt_02_elastic.cmpt_deepmd_lammps(jdata, conf_dir, task_type) else : raise RuntimeError ("unknow task %s, something wrong" % task_type) - + def gen_vacancy(task_type,jdata,mdata): conf_dir=jdata['conf_dir'] supercell=jdata['supercell'] @@ -506,31 +362,15 @@ def gen_vacancy(task_type,jdata,mdata): raise RuntimeError("unknow task ", task_type) os.chdir(cwd) -def run_vacancy(task_type,jdata,mdata,ssh_sess): +def run_vacancy(task_type,jdata,mdata): - work_path=make_work_path(jdata,'03.vacancy',False,False,False) + work_path=util.make_work_path(jdata,'03.vacancy',False,False,False) all_task = glob.glob(os.path.join(work_path,'struct-*')) - + #vasp if task_type == "vasp": mdata=decide_fp_machine(mdata) - vasp_exec=mdata['fp_command'] - group_size = mdata['fp_group_size'] - resources = mdata['fp_resources'] - machine=mdata['fp_machine'] - machine_type = mdata['fp_machine']['machine_type'] - command = vasp_exec - command = cmd_append_log(command, "log") - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'OUTCAR') - if os.path.isfile(fres) : - if not vasp.check_finished(fres): - run_tasks_.append(ii) - else : - run_tasks_.append(ii) - forward_files = ['INCAR', 'POSCAR','POTCAR'] backward_files = ['OUTCAR','OSZICAR'] common_files=['INCAR','POTCAR'] @@ -538,29 +378,7 @@ def run_vacancy(task_type,jdata,mdata,ssh_sess): #lammps elif task_type in lammps_task_type: mdata = decide_model_devi_machine(mdata) - lmp_exec = mdata['lmp_command'] - group_size = mdata['model_devi_group_size'] - resources = mdata['model_devi_resources'] - machine=mdata['model_devi_machine'] - machine_type = mdata['model_devi_machine']['machine_type'] - command = lmp_exec + " -i lammps.in" - command = cmd_append_log(command, "model_devi.log") - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'log.lammps') - if os.path.isfile(fres) : - with open(fres, 'r') as fp : - lines = fp.read().split('\n') - flag=False - for jj in lines: - if ("Final energy per atoms" in jj) and (not 'print' in jj): - flag=True - if not flag: - run_tasks_.append(ii) - else : - run_tasks_.append(ii) - fp_params = jdata['lammps_params'] model_dir = fp_params['model_dir'] model_dir = os.path.abspath(model_dir) @@ -580,8 +398,9 @@ def run_vacancy(task_type,jdata,mdata,ssh_sess): else: raise RuntimeError ("unknow task %s, something wrong" % task_type) - - run_tasks = [os.path.basename(ii) for ii in run_tasks_] + + run_tasks = util.collect_task(all_task,task_type) + machine,machine_type,ssh_sess,resources,command,group_size=util.get_machine_info(mdata,task_type) _run(machine, machine_type, ssh_sess, @@ -599,7 +418,7 @@ def cmpt_vacancy(task_type,jdata,mdata): supercell=jdata['supercell'] #vasp if task_type == "vasp": - cmpt_03_vacancy.cmpt_vasp(jdata, conf_dir, supercell) + cmpt_03_vacancy.cmpt_vasp(jdata, conf_dir, supercell) #lammps elif task_type in lammps_task_type: cmpt_03_vacancy.cmpt_deepmd_lammps(jdata, conf_dir, supercell, task_type) @@ -625,32 +444,16 @@ def gen_interstitial(task_type,jdata,mdata): raise RuntimeError("unknow task ", task_type) os.chdir(cwd) -def run_interstitial(task_type,jdata,mdata,ssh_sess): +def run_interstitial(task_type,jdata,mdata): reprod_opt=jdata['reprod-opt'] - work_path=make_work_path(jdata,'04.interstitial',reprod_opt,False,False) + work_path=util.make_work_path(jdata,'04.interstitial',reprod_opt,False,False) all_task = glob.glob(os.path.join(work_path,'struct-*')) - + #vasp if task_type == "vasp": mdata=decide_fp_machine(mdata) - vasp_exec=mdata['fp_command'] - group_size = mdata['fp_group_size'] - resources = mdata['fp_resources'] - machine=mdata['fp_machine'] - machine_type = mdata['fp_machine']['machine_type'] - command = vasp_exec - command = cmd_append_log(command, "log") - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'OUTCAR') - if os.path.isfile(fres) : - if not vasp.check_finished(fres): - run_tasks_.append(ii) - else : - run_tasks_.append(ii) - forward_files = ['INCAR', 'POSCAR','POTCAR'] backward_files = ['OUTCAR','XDATCAR','OSZICAR'] common_files=['INCAR'] @@ -658,14 +461,7 @@ def run_interstitial(task_type,jdata,mdata,ssh_sess): #lammps elif task_type in lammps_task_type: mdata = decide_model_devi_machine(mdata) - lmp_exec = mdata['lmp_command'] - group_size = mdata['model_devi_group_size'] - resources = mdata['model_devi_resources'] - machine=mdata['model_devi_machine'] - machine_type = mdata['model_devi_machine']['machine_type'] - command = lmp_exec + " -i lammps.in" - command = cmd_append_log(command, "model_devi.log") - + if reprod_opt: all_frame=[] for ii in all_task: @@ -677,13 +473,7 @@ def run_interstitial(task_type,jdata,mdata,ssh_sess): for ii in all_task: fres = os.path.join(ii, 'log.lammps') if os.path.isfile(fres) : - with open(fres, 'r') as fp : - lines = fp.read().split('\n') - flag=False - for jj in lines: - if ("Final energy per atoms" in jj) and (not 'print' in jj): - flag=True - if not flag: + if not lammps.check_finished(fres): run_tasks_.append(ii) else : run_tasks_.append(ii) @@ -700,13 +490,15 @@ def run_interstitial(task_type,jdata,mdata,ssh_sess): forward_files = ['conf.lmp', 'lammps.in']+model_name backward_files = ['log.lammps', 'model_devi.log'] common_files=['lammps.in']+model_name - + if len(model_name)>1 and task_type == 'deepmd': backward_files = backward_files + ['model_devi.out'] else: raise RuntimeError ("unknow task %s, something wrong" % task_type) + machine,machine_type,ssh_sess,resources,command,group_size=util.get_machine_info(mdata,task_type) + if reprod_opt: for ii in work_path: run_tasks=[] @@ -716,7 +508,7 @@ def run_interstitial(task_type,jdata,mdata,ssh_sess): _run(machine, machine_type, ssh_sess, - resources, + resources, command, ii, run_tasks, @@ -725,7 +517,7 @@ def run_interstitial(task_type,jdata,mdata,ssh_sess): forward_files, backward_files) else: - run_tasks = [os.path.basename(ii) for ii in run_tasks_] + run_tasks = util.collect_task(all_task,task_type) _run(machine, machine_type, ssh_sess, @@ -774,32 +566,15 @@ def gen_surf(task_type,jdata,mdata): raise RuntimeError("unknow task ", task_type) os.chdir(cwd) -def run_surf(task_type,jdata,mdata,ssh_sess): +def run_surf(task_type,jdata,mdata): static=jdata['static-opt'] - work_path=make_work_path(jdata,'05.surf',False,static,False) - + work_path=util.make_work_path(jdata,'05.surf',False,static,False) + all_task = glob.glob(os.path.join(work_path,'struct-*')) - + #vasp if task_type == "vasp": mdata=decide_fp_machine(mdata) - vasp_exec=mdata['fp_command'] - group_size = mdata['fp_group_size'] - resources = mdata['fp_resources'] - machine=mdata['fp_machine'] - machine_type = mdata['fp_machine']['machine_type'] - command = vasp_exec - command = cmd_append_log(command, "log") - - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'OUTCAR') - if os.path.isfile(fres) : - if not vasp.check_finished(fres): - run_tasks_.append(ii) - else : - run_tasks_.append(ii) - forward_files = ['INCAR', 'POSCAR','POTCAR'] backward_files = ['OUTCAR','OSZICAR'] @@ -808,28 +583,6 @@ def run_surf(task_type,jdata,mdata,ssh_sess): #lammps elif task_type in lammps_task_type: mdata = decide_model_devi_machine(mdata) - lmp_exec = mdata['lmp_command'] - group_size = mdata['model_devi_group_size'] - resources = mdata['model_devi_resources'] - machine=mdata['model_devi_machine'] - machine_type = mdata['model_devi_machine']['machine_type'] - command = lmp_exec + " -i lammps.in" - command = cmd_append_log(command, "model_devi.log") - - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'log.lammps') - if os.path.isfile(fres) : - with open(fres, 'r') as fp : - lines = fp.read().split('\n') - flag=False - for jj in lines: - if ("Final energy per atoms" in jj) and (not 'print' in jj): - flag=True - if not flag: - run_tasks_.append(ii) - else : - run_tasks_.append(ii) fp_params = jdata['lammps_params'] model_dir = fp_params['model_dir'] @@ -843,14 +596,15 @@ def run_surf(task_type,jdata,mdata,ssh_sess): forward_files = ['conf.lmp', 'lammps.in']+model_name backward_files = ['log.lammps','model_devi.log'] common_files=['lammps.in']+model_name - + if len(model_name)>1 and task_type == 'deepmd': backward_files = backward_files + ['model_devi.out'] else: raise RuntimeError ("unknow task %s, something wrong" % task_type) - - run_tasks = [os.path.basename(ii) for ii in run_tasks_] + + run_tasks = util.collect_task(all_task,task_type) + machine,machine_type,ssh_sess,resources,command,group_size=util.get_machine_info(mdata,task_type) _run(machine, machine_type, ssh_sess, @@ -869,8 +623,8 @@ def cmpt_surf(task_type,jdata,mdata): cwd=os.getcwd() #vasp if task_type == "vasp": - cmpt_05_surf.cmpt_vasp(jdata, conf_dir, static = static_opt) - #lammps + cmpt_05_surf.cmpt_vasp(jdata, conf_dir, static = static_opt) + #lammps elif task_type in lammps_task_type : if static_opt: task_name =task_type+'-static' @@ -894,33 +648,18 @@ def gen_phonon(task_type,jdata,mdata): raise RuntimeError("unknow task ", task_type) os.chdir(cwd) -def run_phonon(task_type,jdata,mdata,ssh_sess): +def run_phonon(task_type,jdata,mdata): user= ('user_incar' in jdata.keys()) - work_path=make_work_path(jdata,'06.phonon',False,False,user) - + work_path=util.make_work_path(jdata,'06.phonon',False,False,user) + all_task = glob.glob(os.path.join(work_path,'.')) - + #vasp if task_type == "vasp": mdata=decide_fp_machine(mdata) - vasp_exec=mdata['fp_command'] - group_size = mdata['fp_group_size'] - resources = mdata['fp_resources'] - machine=mdata['fp_machine'] - machine_type = mdata['fp_machine']['machine_type'] - command = vasp_exec - command = cmd_append_log(command, "log") + machine,machine_type,ssh_sess,resources,command,group_size=util.get_machine_info(mdata,task_type) - run_tasks_ = [] - for ii in all_task: - fres = os.path.join(ii, 'OUTCAR') - if os.path.isfile(fres) : - if not vasp.check_finished(fres): - run_tasks_.append(ii) - else : - run_tasks_.append(ii) - - run_tasks = [os.path.basename(ii) for ii in run_tasks_] + run_tasks = util.collect_task(all_task,task_type) forward_files = ['INCAR', 'POTCAR','KPOINTS'] backward_files = ['OUTCAR','OSZICAR','vasprun.xml'] common_files=['POSCAR'] @@ -947,13 +686,13 @@ def cmpt_phonon(task_type,jdata,mdata): cwd=os.getcwd() #vasp if task_type == "vasp": - cmpt_06_phonon.cmpt_vasp(jdata, conf_dir) - #lammps + cmpt_06_phonon.cmpt_vasp(jdata, conf_dir) + #lammps elif task_type in lammps_task_type : cmpt_06_phonon.cmpt_lammps(jdata,conf_dir, task_type) else : raise RuntimeError("unknow task ", task_type) - os.chdir(cwd) + os.chdir(cwd) def run_task (json_file, machine_file) : with open (json_file, 'r') as fp : @@ -963,22 +702,6 @@ def run_task (json_file, machine_file) : record = "record.auto_test" - model_devi_mdata = decide_model_devi_machine(mdata) - model_devi_machine = model_devi_mdata['model_devi_machine'] - if ('machine_type' in model_devi_machine) and \ - (model_devi_machine['machine_type'] == 'ucloud'): - model_devi_ssh_sess = None - else : - model_devi_ssh_sess = SSHSession(model_devi_machine) - - fp_mdata=decide_fp_machine(mdata) - fp_machine = fp_mdata['fp_machine'] - if ('machine_type' in fp_machine) and \ - (fp_machine['machine_type'] == 'ucloud'): - fp_ssh_sess = None - else : - fp_ssh_sess = SSHSession(fp_machine) - confs = jdata['conf_dir'] ele_list=[key for key in jdata['potcar_map'].keys()] key_id = jdata['key_id'] @@ -1001,52 +724,52 @@ def run_task (json_file, machine_file) : gen_confs.gen_alloy(ele_list,key_id) #default task log_iter ("gen_equi", ii, "equi") - gen_equi (ii, jdata, mdata) + gen_equi (ii, jdata, mdata) log_iter ("run_equi", ii, "equi") - run_equi (ii, jdata, mdata,model_devi_ssh_sess) + run_equi (ii, jdata, mdata) log_iter ("cmpt_equi", ii,"equi") cmpt_equi (ii, jdata, mdata) if jj == "eos" or jj=="all": log_iter ("gen_eos", ii, "eos") - gen_eos (ii, jdata, mdata) + gen_eos (ii, jdata, mdata) log_iter ("run_eos", ii, "eos") - run_eos (ii, jdata, mdata,model_devi_ssh_sess) + run_eos (ii, jdata, mdata) log_iter ("cmpt_eos", ii, "eos") cmpt_eos (ii, jdata, mdata) if jj=="elastic" or jj=="all": log_iter ("gen_elastic", ii, "elastic") - gen_elastic (ii, jdata, mdata) + gen_elastic (ii, jdata, mdata) log_iter ("run_elastic", ii, "elastic") - run_elastic (ii, jdata, mdata,model_devi_ssh_sess) + run_elastic (ii, jdata, mdata) log_iter ("cmpt_elastic", ii, "elastic") cmpt_elastic (ii, jdata, mdata) if jj=="vacancy" or jj=="all": log_iter ("gen_vacancy", ii, "vacancy") - gen_vacancy (ii, jdata, mdata) + gen_vacancy (ii, jdata, mdata) log_iter ("run_vacancy", ii, "vacancy") - run_vacancy (ii, jdata, mdata,model_devi_ssh_sess) + run_vacancy (ii, jdata, mdata) log_iter ("cmpt_vacancy", ii, "vacancy") cmpt_vacancy (ii, jdata, mdata) if jj=="interstitial" or jj=="all": log_iter ("gen_interstitial", ii, "interstitial") - gen_interstitial (ii, jdata, mdata) + gen_interstitial (ii, jdata, mdata) log_iter ("run_interstitial", ii, "interstitial") - run_interstitial (ii, jdata, mdata,model_devi_ssh_sess) + run_interstitial (ii, jdata, mdata) log_iter ("cmpt_interstitial", ii, "interstitial") cmpt_interstitial (ii, jdata, mdata) if jj=="surf" or jj=="all": log_iter ("gen_surf", ii, "surf") - gen_surf (ii, jdata, mdata) + gen_surf (ii, jdata, mdata) log_iter ("run_surf", ii, "surf") - run_surf (ii, jdata, mdata,model_devi_ssh_sess) + run_surf (ii, jdata, mdata) log_iter ("cmpt_surf", ii, "surf") cmpt_surf (ii, jdata, mdata) ''' if jj=="phonon": log_iter ("gen_phonon", ii, "phonon") - gen_phonon (ii, jdata, mdata) + gen_phonon (ii, jdata, mdata) log_iter ("run_phonon", ii, "phonon") - run_phonon (ii, jdata, mdata,model_devi_ssh_sess) + run_phonon (ii, jdata, mdata) log_iter ("cmpt_phonon", ii, "phonon") cmpt_phonon (ii, jdata, mdata) ''' @@ -1060,9 +783,9 @@ def gen_test(args): def _main () : parser = argparse.ArgumentParser() - parser.add_argument("PARAM", type=str, + parser.add_argument("PARAM", type=str, help="The parameters of the generator") - parser.add_argument("MACHINE", type=str, + parser.add_argument("MACHINE", type=str, help="The settings of the machine running the generator") args = parser.parse_args() diff --git a/dpgen/data/gen.py b/dpgen/data/gen.py index 37cc9090f..c8e98ff54 100644 --- a/dpgen/data/gen.py +++ b/dpgen/data/gen.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import os +import re import sys import argparse import glob @@ -13,8 +14,6 @@ import dpdata import numpy as np from dpgen import dlog -import os,json,shutil,re,glob,argparse,dpdata -import numpy as np import subprocess as sp import dpgen.data.tools.hcp as hcp import dpgen.data.tools.fcc as fcc @@ -22,9 +21,9 @@ import dpgen.data.tools.diamond as diamond import dpgen.data.tools.sc as sc from pymatgen import Structure -from dpgen.remote.decide_machine import decide_train_machine, decide_fp_machine, decide_model_devi_machine -from dpgen.remote.RemoteJob import SSHSession, JobStatus, SlurmJob, PBSJob, CloudMachineJob +from dpgen.remote.decide_machine import decide_fp_machine from dpgen import ROOT_PATH +from dpgen.dispatcher.Dispatcher import Dispatcher, make_dispatcher @@ -289,19 +288,7 @@ def place_element (jdata) : def make_vasp_relax (jdata, mdata) : out_dir = jdata['out_dir'] potcars = jdata['potcars'] - #encut = jdata['encut'] - #kspacing = jdata['kspacing_relax'] - #kgamma = jdata['kgamma'] - #ismear = 1 - #if 'ismear' in jdata : - # ismear = jdata['ismear'] - #sigma = 0.2 - #if 'sigma' in jdata : - # sigma = jdata['sigma'] cwd = os.getcwd() - - #vasp_dir = os.path.join(cwd, 'vasp.in') - work_dir = os.path.join(out_dir, global_dirname_02) assert (os.path.isdir(work_dir)) work_dir = os.path.abspath(work_dir) @@ -325,15 +312,6 @@ def make_vasp_relax (jdata, mdata) : outfile.write(infile.read()) os.chdir(work_dir) - #replace('INCAR', 'ENCUT=.*', 'ENCUT=%f' % encut) - #replace('INCAR', 'ISIF=.*', 'ISIF=3') - #replace('INCAR', 'KSPACING=.*from dpgen.remote.decide_machine import decide_train_machine, decide_fp_machine, decide_model_devi_machine', 'KSPACING=%f' % kspacing) - #if kgamma : - # replace('INCAR', 'KGAMMA=.*', 'KGAMMA=T') - #else : - # replace('INCAR', 'KGAMMA=.*', 'KGAMMA=F') - #replace('INCAR', 'ISMEAR=.*', 'ISMEAR=%d' % ismear) - #replace('INCAR', 'SIGMA=.*', 'SIGMA=%f' % sigma) sys_list = glob.glob('sys-*') for ss in sys_list: @@ -437,8 +415,6 @@ def make_vasp_md(jdata) : md_nstep = jdata['md_nstep'] cwd = os.getcwd() - #vasp_dir = os.path.join(cwd, 'vasp.in') - #vasp_dir = os.path.join(cwd, vasp_dir) path_ps = os.path.join(out_dir, global_dirname_03) path_ps = os.path.abspath(path_ps) assert(os.path.isdir(path_ps)) @@ -457,18 +433,6 @@ def make_vasp_md(jdata) : with open(fname) as infile: outfile.write(infile.read()) os.chdir(path_md) - #replace('INCAR', 'ENCUT=.*', 'ENCUT=%f' % encut) - #replace('INCAR', 'ISIF=.*', 'ISIF=2') - #replace('INCAR', 'KSPACING=.*', 'KSPACING=%f' % kspacing) - #if kgamma : - # replace('INCAR', 'KGAMMA=.*', 'KGAMMA=T') - #else : - # replace('INCAR', 'KGAMMA=.*', 'KGAMMA=F') - #replace('INCAR', 'NSW=.*', 'NSW=%d' % md_nstep) - #replace('INCAR', 'TEBEG=.*', 'TEBEG=%d' % md_temp) - #replace('INCAR', 'TEEND=.*', 'TEEND=%d' % md_temp) - #replace('INCAR', 'ISMEAR=.*', 'ISMEAR=%d' % ismear) - #replace('INCAR', 'SIGMA=.*', 'SIGMA=%f' % sigma) os.chdir(cwd) for ii in sys_ps : @@ -567,46 +531,12 @@ def _vasp_check_fin (ii) : else : return False return True -def _group_slurm_jobs(ssh_sess, - resources, - command, - work_path, - tasks, - group_size, - forward_common_files, - forward_task_files, - backward_task_files, - remote_job = SlurmJob) : - task_chunks = [ - [j for j in tasks[i:i + group_size]] \ - for i in range(0, len(tasks), group_size) - ] - job_list = [] - for chunk in task_chunks : - rjob = remote_job(ssh_sess, work_path) - rjob.upload('.', forward_common_files) - rjob.upload(chunk, forward_task_files) - rjob.submit(chunk, command, resources = resources) - job_list.append(rjob) - - job_fin = [False for ii in job_list] - while not all(job_fin) : - for idx,rjob in enumerate(job_list) : - if not job_fin[idx] : - status = rjob.check_status() - if status == JobStatus.terminated : - raise RuntimeError("find unsuccessfully terminated job in %s" % rjob.get_job_root()) - elif status == JobStatus.finished : - rjob.download(task_chunks[idx], backward_task_files) - rjob.clean() - job_fin[idx] = True - time.sleep(10) - -def run_vasp_relax(jdata, mdata, ssh_sess): + +def run_vasp_relax(jdata, mdata, dispatcher): fp_command = mdata['fp_command'] fp_group_size = mdata['fp_group_size'] fp_resources = mdata['fp_resources'] - machine_type = mdata['fp_machine']['machine_type'] + #machine_type = mdata['fp_machine']['machine_type'] work_dir = os.path.join(jdata['out_dir'], global_dirname_02) forward_files = ["POSCAR", "INCAR", "POTCAR"] @@ -622,29 +552,27 @@ def run_vasp_relax(jdata, mdata, ssh_sess): if len(relax_tasks) == 0: return - relax_run_tasks = [] + relax_run_tasks = relax_tasks for ii in relax_tasks : if not _vasp_check_fin(ii): relax_run_tasks.append(ii) run_tasks = [os.path.basename(ii) for ii in relax_run_tasks] #dlog.info(run_tasks) - assert (machine_type == "slurm" or machine_type =="Slurm"), "Currently only support for Slurm!" - _group_slurm_jobs(ssh_sess, - fp_resources, - fp_command, - work_dir, - run_tasks, - fp_group_size, - forward_common_files, - forward_files, - backward_files) - -def run_vasp_md(jdata, mdata, ssh_sess): + dispatcher.run_jobs(fp_resources, + [fp_command], + work_dir, + run_tasks, + fp_group_size, + forward_common_files, + forward_files, + backward_files) + +def run_vasp_md(jdata, mdata, dispatcher): fp_command = mdata['fp_command'] fp_group_size = mdata['fp_group_size'] fp_resources = mdata['fp_resources'] - machine_type = mdata['fp_machine']['machine_type'] + #machine_type = mdata['fp_machine']['machine_type'] work_dir = os.path.join(jdata['out_dir'], global_dirname_04) scale = jdata['scale'] pert_numb = jdata['pert_numb'] @@ -667,31 +595,23 @@ def run_vasp_md(jdata, mdata, ssh_sess): if len(md_tasks) == 0: return - md_run_tasks = [] - for ii in md_tasks : - if not _vasp_check_fin(ii): - md_run_tasks.append(ii) - + md_run_tasks = md_tasks + #for ii in md_tasks : + # if not _vasp_check_fin(ii): + # md_run_tasks.append(ii) run_tasks = [ii.replace(work_dir+"/", "") for ii in md_run_tasks] #dlog.info("md_work_dir", work_dir) #dlog.info("run_tasks",run_tasks) - assert (machine_type == "slurm" or machine_type =="Slurm"), "Currently only support for Slurm!" - _group_slurm_jobs(ssh_sess, - fp_resources, - fp_command, - work_dir, - run_tasks, - fp_group_size, - forward_common_files, - forward_files, - backward_files) - - - - - + dispatcher.run_jobs(fp_resources, + [fp_command], + work_dir, + run_tasks, + fp_group_size, + forward_common_files, + forward_files, + backward_files) def gen_init_bulk(args) : try: @@ -711,8 +631,8 @@ def gen_init_bulk(args) : if args.MACHINE is not None: # Selecting a proper machine mdata = decide_fp_machine(mdata) - fp_machine = mdata['fp_machine'] - fp_ssh_sess = SSHSession(fp_machine) + disp = make_dispatcher(mdata["fp_machine"]) + # Decide work path out_dir = out_dir_name(jdata) jdata['out_dir'] = out_dir @@ -768,7 +688,7 @@ def gen_init_bulk(args) : place_element(jdata) if args.MACHINE is not None: make_vasp_relax(jdata, mdata) - run_vasp_relax(jdata, mdata, fp_ssh_sess) + run_vasp_relax(jdata, mdata, disp) else: make_vasp_relax(jdata, {"fp_resources":{}}) elif stage == 2 : @@ -779,113 +699,19 @@ def gen_init_bulk(args) : dlog.info("Current stage is 3, run a short md") make_vasp_md(jdata) if args.MACHINE is not None: - run_vasp_md(jdata, mdata, fp_ssh_sess) + run_vasp_md(jdata, mdata, disp) elif stage == 4 : dlog.info("Current stage is 4, collect data") coll_vasp_md(jdata) else : raise RuntimeError("unknown stage %d" % stage) -def _main() : +if __name__ == "__main__": parser = argparse.ArgumentParser( - description="gen init confs") - parser.add_argument('PARAM', type=str, + description="Generating initial data for bulk systems.") + parser.add_argument('PARAM', type=str, help="parameter file, json/yaml format") - parser.add_argument("MACHINE", type=str, default=None,nargs="?", - help="The settings of the machine running the generator") + parser.add_argument('MACHINE', type=str,default=None,nargs="?", + help="machine file, json/yaml format") args = parser.parse_args() - - try: - import ruamel - from monty.serialization import loadfn,dumpfn - warnings.simplefilter('ignore', ruamel.yaml.error.MantissaNoDotYAML1_1Warning) - jdata=loadfn(args.PARAM) - if args.MACHINE is not None: - mdata=loadfn(args.MACHINE) - except: - with open (args.PARAM, 'r') as fp : - jdata = json.load (fp) - if args.MACHINE is not None: - with open (args.MACHINE, "r") as fp: - mdata = json.load(fp) - if args.MACHINE is not None: - # Selecting a proper machine - mdata = decide_fp_machine(mdata) - fp_machine = mdata['fp_machine'] - fp_ssh_sess = SSHSession(fp_machine) - # Decide work path - out_dir = out_dir_name(jdata) - jdata['out_dir'] = out_dir - dlog.info ("# working dir %s" % out_dir) - # Decide whether to use a given poscar - from_poscar = False - if 'from_poscar' in jdata : - from_poscar = jdata['from_poscar'] - # Verify md_nstep - md_nstep_jdata = jdata["md_nstep"] - try: - md_incar = jdata['md_incar'] - if os.path.isfile(md_incar): - with open(md_incar , "r") as fr: - md_incar_lines = fr.readlines() - nsw_flag = False - for incar_line in md_incar_lines: - line = incar_line.split() - if "NSW" in line: - nsw_flag = True - nsw_steps = int(incar_line.split()[-1]) - break - #dlog.info("nsw_steps is", nsw_steps) - #dlog.info("md_nstep_jdata is", md_nstep_jdata) - if nsw_flag: - if (nsw_steps != md_nstep_jdata): - dlog.info("WARNING: your set-up for MD steps in PARAM and md_incar are not consistent!") - dlog.info("MD steps in PARAM is %d"%(md_nstep_jdata)) - dlog.info("MD steps in md_incar is %d"(nsw_steps)) - dlog.info("DP-GEN will use settings in md_incar!") - jdata['md_nstep'] = nsw_steps - except: - pass - ## correct element name - temp_elements = [] - for ele in jdata['elements']: - temp_elements.append(ele[0].upper() + ele[1:]) - jdata['elements'] = temp_elements - dlog.info("Elements are %s"%(' '.join(jdata['elements']))) - - ## Iteration - stage_list = [int(i) for i in jdata['stages']] - for stage in stage_list: - if stage == 1 : - dlog.info("Current stage is 1, relax") - create_path(out_dir) - shutil.copy2(args.PARAM, os.path.join(out_dir, 'param.json')) - if from_poscar : - make_super_cell_poscar(jdata) - else : - make_unit_cell(jdata) - make_super_cell(jdata) - place_element(jdata) - - if args.MACHINE is not None: - make_vasp_relax(jdata, mdata) - run_vasp_relax(jdata, mdata, fp_ssh_sess) - else: - make_vasp_relax(jdata, {"fp_resources":{}}) - elif stage == 2 : - dlog.info("Current stage is 2, perturb and scale") - make_scale(jdata) - pert_scaled(jdata) - elif stage == 3 : - dlog.info("Current stage is 3, run a short md") - make_vasp_md(jdata) - if args.MACHINE is not None: - run_vasp_md(jdata, mdata, fp_ssh_sess) - elif stage == 4 : - dlog.info("Current stage is 4, collect data") - coll_vasp_md(jdata) - else : - raise RuntimeError("unknown stage %d" % stage) - -if __name__ == "__main__": - _main() + gen_init_bulk(args) diff --git a/dpgen/data/param.json b/dpgen/data/jsons/param.json similarity index 100% rename from dpgen/data/param.json rename to dpgen/data/jsons/param.json diff --git a/dpgen/data/surf.py b/dpgen/data/surf.py old mode 100755 new mode 100644 index 641511a9b..f28f6e3ba --- a/dpgen/data/surf.py +++ b/dpgen/data/surf.py @@ -11,10 +11,14 @@ from dpgen import dlog import time from dpgen import ROOT_PATH -from dpgen.remote.decide_machine import decide_train_machine, decide_fp_machine, decide_model_devi_machine -from dpgen.remote.RemoteJob import SSHSession, JobStatus, SlurmJob, PBSJob, CloudMachineJob +from dpgen.remote.decide_machine import decide_fp_machine from pymatgen.core.surface import SlabGenerator,generate_all_slabs, Structure from pymatgen.io.vasp import Poscar +from dpgen.dispatcher.Dispatcher import Dispatcher, make_dispatcher +#-----ASE------- +from pymatgen.io.ase import AseAtomsAdaptor +from ase.io import read +from ase.build import general_surface def create_path (path) : path += '/' @@ -195,10 +199,16 @@ def make_super_cell_pymatgen (jdata) : from_path = path_uc from_file = os.path.join(from_path, 'POSCAR.unit') ss = Structure.from_file(from_file) + # ase only support X type element + for i in range(len(ss)): + ss[i]='X' + ss=AseAtomsAdaptor.get_atoms(ss) + all_millers = jdata['millers'] path_sc = os.path.join(out_dir, global_dirname_02) - z_min = jdata['z_min'] + #z_min = jdata['z_min'] + layer_numb = jdata['layer_numb'] super_cell = jdata['super_cell'] cwd = os.getcwd() @@ -211,11 +221,13 @@ def make_super_cell_pymatgen (jdata) : miller_str += str(ii) path_cur_surf = create_path('surf-'+miller_str) os.chdir(path_cur_surf) - slabgen = SlabGenerator(ss, miller, z_min, 1e-3) - all_slabs = slabgen.get_slabs() + #slabgen = SlabGenerator(ss, miller, z_min, 1e-3) + slab=general_surface.surface(ss,indices=miller,vacuum=1e-3,layers=layer_numb) + #all_slabs = slabgen.get_slabs() dlog.info(os.getcwd()) - dlog.info("Miller %s: The slab has %s termination, use the first one" %(str(miller), len(all_slabs))) - all_slabs[0].to('POSCAR', 'POSCAR') + #dlog.info("Miller %s: The slab has %s termination, use the first one" %(str(miller), len(all_slabs))) + #all_slabs[0].to('POSCAR', 'POSCAR') + slab.write('POSCAR',vasp5=True) if super_cell[0] > 1 or super_cell[1] > 1 : st=Structure.from_file('POSCAR') st.make_supercell([super_cell[0], super_cell[1], 1]) @@ -279,11 +291,7 @@ def place_element (jdata) : def make_vasp_relax (jdata) : out_dir = jdata['out_dir'] potcars = jdata['potcars'] - #encut = jdata['encut'] - #kspacing = jdata['kspacing_relax'] - #kgamma = jdata['kgamma'] cwd = os.getcwd() - #vasp_dir = os.path.join(cwd, 'vasp.in') work_dir = os.path.join(out_dir, global_dirname_02) assert (os.path.isdir(work_dir)) @@ -301,13 +309,6 @@ def make_vasp_relax (jdata) : outfile.write(infile.read()) os.chdir(work_dir) - #replace('INCAR', 'ENCUT=.*', 'ENCUT=%f' % encut) - #replace('INCAR', 'ISIF=.*', 'ISIF=3') - #replace('INCAR', 'KSPACING=.*', 'KSPACING=%f' % kspacing) - #if kgamma : - # replace('INCAR', 'KGAMMA=.*', 'KGAMMA=T') - #else : - # replace('INCAR', 'KGAMMA=.*', 'KGAMMA=F') sys_list = glob.glob(os.path.join('surf-*', 'sys-*')) for ss in sys_list: @@ -408,7 +409,7 @@ def pert_scaled(jdata) : tail_elongs = np.arange(mid_point, vacuum_max, vacuum_resol[1]).tolist() elongs = np.unique(head_elongs+tail_elongs).tolist() else: - raise RuntimeError("the length of vacuum_resol must equal 2") + raise RuntimeError("the length of vacuum_resol must equal 1 or 2") else: vacuum_num = jdata['vacuum_numb'] @@ -472,41 +473,8 @@ def _vasp_check_fin (ii) : else : return False return True -def _group_slurm_jobs(ssh_sess, - resources, - command, - work_path, - tasks, - group_size, - forward_common_files, - forward_task_files, - backward_task_files, - remote_job = SlurmJob) : - task_chunks = [ - [j for j in tasks[i:i + group_size]] \ - for i in range(0, len(tasks), group_size) - ] - job_list = [] - for chunk in task_chunks : - rjob = remote_job(ssh_sess, work_path) - rjob.upload('.', forward_common_files) - rjob.upload(chunk, forward_task_files) - rjob.submit(chunk, command, resources = resources) - job_list.append(rjob) - - job_fin = [False for ii in job_list] - while not all(job_fin) : - for idx,rjob in enumerate(job_list) : - if not job_fin[idx] : - status = rjob.check_status() - if status == JobStatus.terminated : - raise RuntimeError("find unsuccessfully terminated job in %s" % rjob.get_job_root()) - elif status == JobStatus.finished : - rjob.download(task_chunks[idx], backward_task_files) - rjob.clean() - job_fin[idx] = True - time.sleep(10) -def run_vasp_relax(jdata, mdata, ssh_sess): + +def run_vasp_relax(jdata, mdata, dispatcher): fp_command = mdata['fp_command'] fp_group_size = mdata['fp_group_size'] fp_resources = mdata['fp_resources'] @@ -533,16 +501,15 @@ def run_vasp_relax(jdata, mdata, ssh_sess): run_tasks = [ii.replace(work_dir+"/", "") for ii in relax_run_tasks] #dlog.info(run_tasks) - assert (machine_type == "slurm" or machine_type =="Slurm"), "Currently only support for Slurm!" - _group_slurm_jobs(ssh_sess, - fp_resources, - fp_command, - work_dir, - run_tasks, - fp_group_size, - forward_common_files, - forward_files, - backward_files) + dispatcher.run_jobs(fp_resources, + [fp_command], + work_dir, + run_tasks, + fp_group_size, + forward_common_files, + forward_files, + backward_files) + def gen_init_surf(args): try: import ruamel @@ -565,8 +532,8 @@ def gen_init_surf(args): if args.MACHINE is not None: # Decide a proper machine mdata = decide_fp_machine(mdata) - fp_machine = mdata['fp_machine'] - fp_ssh_sess = SSHSession(fp_machine) + disp = make_dispatcher(mdata["fp_machine"]) + #stage = args.STAGE stage_list = [int(i) for i in jdata['stages']] for stage in stage_list: @@ -576,67 +543,19 @@ def gen_init_surf(args): place_element(jdata) make_vasp_relax(jdata) if args.MACHINE is not None: - run_vasp_relax(jdata, mdata, fp_ssh_sess) - # elif stage == 0 : - # # create_path(out_dir) - # # make_super_cell(jdata) - # # place_element(jdata) - # # make_vasp_relax(jdata) - # # make_scale(jdata) - # # pert_scaled(jdata) - # # poscar_elong('POSCAR', 'POSCAR.out', 3) - # pert_scaled(jdata) + run_vasp_relax(jdata, mdata, disp) elif stage == 2 : make_scale(jdata) pert_scaled(jdata) else : raise RuntimeError("unknown stage %d" % stage) -def _main() : - parser = argparse.ArgumentParser( - description="gen init confs") - parser.add_argument('PARAM', type=str, - help="parameter file, json format") - parser.add_argument('STAGE', type=int, - help="the stage of init, can be 1 or 2 " - "1: Setup vasp jobs for relaxation. " - "2: Collect vasp relaxed confs (if relax is not skiped). Perturb system. " - ) - args = parser.parse_args() - try: - import ruamel - from monty.serialization import loadfn,dumpfn - warnings.simplefilter('ignore', ruamel.yaml.error.MantissaNoDotYAML1_1Warning) - jdata=loadfn(args.PARAM) - except: - with open (args.PARAM, 'r') as fp : - jdata = json.load (fp) - - out_dir = out_dir_name(jdata) - jdata['out_dir'] = out_dir - dlog.info ("# working dir %s" % out_dir) - - stage = args.STAGE - - if stage == 1 : - create_path(out_dir) - make_super_cell_pymatgen(jdata) - place_element(jdata) - make_vasp_relax(jdata) - # elif stage == 0 : - # # create_path(out_dir) - # # make_super_cell(jdata) - # # place_element(jdata) - # # make_vasp_relax(jdata) - # # make_scale(jdata) - # # pert_scaled(jdata) - # # poscar_elong('POSCAR', 'POSCAR.out', 3) - # pert_scaled(jdata) - elif stage == 2 : - make_scale(jdata) - pert_scaled(jdata) - else : - raise RuntimeError("unknow stage %d" % stage) - if __name__ == "__main__": - _main() + parser = argparse.ArgumentParser( + description="Generating initial data for surface systems.") + parser.add_argument('PARAM', type=str, + help="parameter file, json/yaml format") + parser.add_argument('MACHINE', type=str,default=None,nargs="?", + help="machine file, json/yaml format") + args = parser.parse_args() + gen_init_surf(args) diff --git a/tests/generator/POTCAR.mg b/dpgen/data/tools/__init__.py similarity index 100% rename from tests/generator/POTCAR.mg rename to dpgen/data/tools/__init__.py diff --git a/dpgen/data/tools/bcc.py b/dpgen/data/tools/bcc.py index 02b7d5d3a..e02483b32 100644 --- a/dpgen/data/tools/bcc.py +++ b/dpgen/data/tools/bcc.py @@ -9,7 +9,7 @@ def gen_box () : def poscar_unit (latt) : box = gen_box() ret = "" - ret += "FCC : a = %f \n" % latt + ret += "BCC : a = %f \n" % latt ret += "%.16f\n" % (latt) ret += "%.16f %.16f %.16f\n" % (box[0][0], box[0][1], box[0][2]) ret += "%.16f %.16f %.16f\n" % (box[1][0], box[1][1], box[1][2]) diff --git a/dpgen/database/run.py b/dpgen/database/run.py index 5416bcfb9..7a524a459 100644 --- a/dpgen/database/run.py +++ b/dpgen/database/run.py @@ -4,6 +4,7 @@ import os import time +import json from uuid import uuid4 from threading import Thread from glob import glob @@ -13,31 +14,43 @@ from dpgen.database.vasp import VaspInput from dpdata import System,LabeledSystem from monty.serialization import loadfn,dumpfn +import numpy as np +import traceback OUTPUT=SHORT_CMD+'_db.json' -SUPPORTED_CACULATOR=['vasp','pwscf','siesta','gaussian'] +SUPPORTED_CACULATOR=['vasp','pwscf','gaussian'] ITERS_PAT="iter.*/02.fp/task*" INIT_PAT="init/*/02.md/sys-*/scale-*/*" def db_run(args): dlog.info ("collecting data") - print(args.ID_PREFIX) - _main(args.PATH, args.CALCULATOR, args.OUTPUT,args.ID_PREFIX) + #print(args.ID_PREFIX) + _main(args.PARAM) dlog.info ("finished") -def _main(path,calculator,output,id_prefix): +def _main(param): + with open(param, "r") as fp: + jdata = json.load(fp) + calculator = jdata["calculator"] + path = jdata["path"] + calulator = jdata["calculator"] + output = jdata["output"] + config_info_dict = jdata["config_info_dict"] + id_prefix = jdata["id_prefix"] + skip_init = False + if "skip_init" in jdata: + skip_init = jdata["skip_init"] + ## The mapping from sys_info to sys_configs assert calculator.lower() in SUPPORTED_CACULATOR dlog.info('data collection from: %s'%path) if calculator == "vasp": - parsing_vasp(path,output,id_prefix) + parsing_vasp(path,config_info_dict,skip_init, output,id_prefix) elif calculator == 'gaussian': parsing_gaussian(path,output) - elif calculator == "siesta": - parsing_siesta(path, output) else: parsing_pwscf(path,output) -def parsing_vasp(path,output=OUTPUT,id_prefix=None): +def parsing_vasp(path,config_info_dict, skip_init, output=OUTPUT,id_prefix=None): fp_iters=os.path.join(path,ITERS_PAT) dlog.debug(fp_iters) @@ -46,54 +59,103 @@ def parsing_vasp(path,output=OUTPUT,id_prefix=None): fp_init=os.path.join(path,INIT_PAT) dlog.debug(fp_init) f_fp_init=glob(fp_init) - dlog.info("len initialization data: %s"%len(f_fp_init)) - entries=_parsing_vasp(f_fp_init,id_prefix,iters=False) - entries.extend(_parsing_vasp(f_fp_iters,id_prefix)) - dlog.info("len collected data: %s"%len(entries)) - + if skip_init: + entries = _parsing_vasp(f_fp_iters,config_info_dict, id_prefix) + dlog.info("len collected data: %s"%len(entries)) + else: + dlog.info("len initialization data: %s"%len(f_fp_init)) + entries=_parsing_vasp(f_fp_init,config_info_dict, id_prefix,iters=False) + entries.extend(_parsing_vasp(f_fp_iters,config_info_dict, id_prefix)) + dlog.info("len collected data: %s"%len(entries)) + #print(output) + #print(entries) dumpfn(entries,output,indent=4) -def _parsing_vasp(paths,id_prefix,iters=True): +def _parsing_vasp(paths,config_info_dict, id_prefix,iters=True): entries=[] icount=0 + if iters: + iter_record = [] + iter_record_new = [] + try: + with open ("record.database", "r") as f_record: + iter_record = [i.split()[0] for i in f_record.readlines()] + iter_record.sort() + dlog.info("iter_record") + dlog.info(iter_record) + except: + pass for path in paths: + try: f_outcar = os.path.join(path,'OUTCAR') f_job = os.path.join(path,'job.json') - - try: - vi = VaspInput.from_directory(path) - if os.path.isfile(f_job): - attrib=loadfn(f_job) - else: - attrib={} + tmp_iter = path.split('/')[-3] + if (tmp_iter in iter_record) and (tmp_iter != iter_record[-1]): + continue + if tmp_iter not in iter_record_new: + iter_record_new.append(tmp_iter) + vi = VaspInput.from_directory(path) + if os.path.isfile(f_job): + attrib=loadfn(f_job) + else: + attrib={} - if iters and attrib: - tmp_=path.split('/')[-1] - iter_info=tmp_.split('.')[1] - task_info=tmp_.split('.')[-1] - attrib['iter_info']=iter_info - attrib['task_info']=task_info - else: - pass - comp=vi['POSCAR'].structure.composition - ls = LabeledSystem(f_outcar) - lss=ls.to_list() - for ls in lss: - if id_prefix: - eid=id_prefix+"_"+str(icount) - else: - eid = str(uuid4()) - entry=Entry(comp,'vasp',vi.as_dict(),ls.as_dict(),attribute=attrib,entry_id=eid) - entries.append(entry) - icount+=1 - except: - dlog.info("failed here : %s"%path) + if iters and attrib: + # generator/Cu/iter.000031/02.fp/task.007.000000 + tmp_=path.split('/')[-1] + #config_info=tmp_.split('.')[1] + task_info=tmp_.split('.')[-1] + tmp_iter = path.split('/')[-3] + iter_info = tmp_iter.split('.')[-1] + sys_info = path.split('/')[-4] + config_info_int = int(tmp_.split('.')[1]) + for (key, value) in config_info_dict.items(): + if config_info_int in value: + config_info = key + attrib['config_info']=config_info + attrib['task_info']=task_info + attrib['iter_info']=iter_info + attrib['sys_info']=sys_info + with open(f_outcar , "r") as fin_outcar: + infile_outcar = fin_outcar.readlines() + for line in infile_outcar: + if "running on" in line: + attrib["core"] = int(line.split()[2]) + if "Elapse" in line: + attrib["wall_time"] = float(line.split()[-1]) + if "executed on" in line: + attrib["date"] = line.split()[-2] + attrib["clocktime"] = line.split()[-1] + dlog.info("Attrib") + dlog.info(attrib) + comp=vi['POSCAR'].structure.composition + ls = LabeledSystem(f_outcar) + lss=ls.to_list() + for ls in lss: + if id_prefix: + eid=id_prefix+"_"+str(icount) + else: + eid = str(uuid4()) + entry=Entry(comp,'vasp',vi.as_dict(),ls.as_dict(),attribute=attrib,entry_id=eid) + entries.append(entry) + icount+=1 + except Exception: + #dlog.info(str(Exception)) + dlog.info("failed for %s"%(path)) + #pass + if iters: + iter_record.sort() + iter_record_new.sort() + with open("record.database" , "w") as fw: + for line in iter_record: + fw.write(line + "\n") + for line in iter_record_new: + fw.write(line + "\n") return entries def parsing_pwscf(path,output=OUTPUT): pass -def parsing_siesta(path,output=OUTPUT): - pass + def parsing_gaussian(path,output=OUTPUT): pass diff --git a/dpgen/dispatcher/AWS.py b/dpgen/dispatcher/AWS.py new file mode 100644 index 000000000..ddb91dad6 --- /dev/null +++ b/dpgen/dispatcher/AWS.py @@ -0,0 +1,144 @@ +import os,getpass,time +from datetime import datetime +from itertools import zip_longest +from dpgen.dispatcher.Batch import Batch +from dpgen.dispatcher.JobStatus import JobStatus +from dpgen import dlog + + +class AWS(Batch): + try: + import boto3 + except ModuleNotFoundError: + pass + else: + batch_client = boto3.client('batch') + _query_max_results = 1000 + _query_time_interval = 30 + _job_id_map_status = {} + _jobQueue = "" + _query_next_allow_time = datetime.now().timestamp() + + @staticmethod + def map_aws_status_to_dpgen_status(aws_status): + map_dict = {'SUBMITTED': JobStatus.waiting, + 'PENDING': JobStatus.waiting, + 'RUNNABLE': JobStatus.waiting, + 'STARTING': JobStatus.waiting, + 'RUNNING': JobStatus.running, + 'SUCCEEDED': JobStatus.finished, + 'FAILED': JobStatus.terminated, + 'UNKNOWN': JobStatus.unknown} + return map_dict.get(aws_status, JobStatus.unknown) + + @classmethod + def AWS_check_status(cls, job_id=""): + """ + to aviod query jobStatus too often, set a time interval + query_dict example: + {job_id: JobStatus} + + {'40fb24b2-d0ca-4443-8e3a-c0906ea03622': , + '41bda50c-0a23-4372-806c-87d16a680d85': } + + """ + query_dict ={} + if datetime.now().timestamp() > cls._query_next_allow_time: + cls.batch_client = boto3.client('batch') + cls._query_next_allow_time=datetime.now().timestamp()+cls._query_time_interval + for status in ['SUBMITTED', 'PENDING', 'RUNNABLE', 'STARTING', 'RUNNING','SUCCEEDED', 'FAILED']: + status_response = cls.batch_client.list_jobs(jobQueue=cls._jobQueue, jobStatus=status, maxResults=cls._query_max_results) + status_list=status_response.get('jobSummaryList', []) + for job_dict in status_list: + cls._job_id_map_status.update({job_dict['jobId']: cls.map_aws_status_to_dpgen_status(job_dict['status'])}) + # for job in cls._job_id_map_status: + # cls._job_id_map_status[job]=query_dict.get(job, JobStatus.unknown) + dlog.debug('20000:_map: %s' %(cls._job_id_map_status)) + dlog.debug('62000:job_id:%s, _query: %s, _map: %s' %(job_id, query_dict, cls._job_id_map_status)) + if job_id: + return cls._job_id_map_status.get(job_id, JobStatus.unknown) + + return cls._job_id_map_status + + @property + def job_id(self): + try: + self._job_id + except AttributeError: + if self.context.check_file_exists(self.job_id_name): + self._job_id = self.context.read_file(self.job_id_name) + response_list = self.__class__.batch_client.describe_jobs(jobs=[self._job_id]).get('jobs') + try: + response = response_list[0] + jobQueue = response['jobQueue'] + except IndexError: + pass + else: + self.job_id = (response, jobQueue) + return self._job_id + dlog.debug("50000, self._job_id:%s,_Queue:%s,_map:%s,"%(self._job_id, self.__class__._jobQueue, self.__class__._job_id_map_status )) + return "" + return self._job_id + + @job_id.setter + def job_id(self, values): + response, jobQueue = values + self._job_id = response['jobId'] + self._job_name = response['jobName'] + self.__class__._jobQueue = jobQueue + self.__class__._job_id_map_status[self._job_id] = self.map_aws_status_to_dpgen_status(response.get('status', 'SUBMITTED')) + self.context.write_file(self.job_id_name, self._job_id) + dlog.debug("15000, _job_id:%s, _job_name:%s, _map:%s, _Queue:%s" % (self._job_id, self._job_name, self.__class__._job_id_map_status, self.__class__._jobQueue)) + + def check_status(self): + return self.__class__.AWS_check_status(job_id=self.job_id) + + def sub_script(self, job_dirs, cmd, args, res, outlog, errlog): + if args is None: + args=[] + multi_command = "" + for job_dir in job_dirs: + for idx,t in enumerate(zip_longest(cmd, args, fillvalue='')): + c_str = f"cd {self.context.remote_root}/{job_dir} && ( test -f tag_{idx}_finished || ( ({t[0]} {t[1]} && touch tag_{idx}_finished 2>>{errlog} || exit 52 ) | tee -a {outlog}) ) || exit 51;" + multi_command += c_str + multi_command +="exit 0;" + dlog.debug("10000, %s" % multi_command) + return multi_command + + def default_resources(self, res): + if res == None: + res = {} + else: + # res.setdefault(jobDefinition) + res.setdefault('cpu_num', 32) + res.setdefault('memory_size', 120000) + res.setdefault('jobQueue', 'deepmd_m5_v1_7') + return res + + def do_submit(self, + job_dirs, + cmd, + args = None, + res = None, + outlog = 'log', + errlog = 'err'): + + res = self.default_resources(res) + dlog.debug("2000, params=(%s, %s, %s, %s, %s, %s, )" % (job_dirs, cmd, args, res, outlog, errlog )) + dlog.debug('2200, self.context.remote_root: %s , self.context.local_root: %s' % (self.context.remote_root, self.context.local_root)) + # concreate_command = + script_str = self.sub_script(job_dirs, cmd, args=args, res=res, outlog=outlog, errlog=errlog) + dlog.debug('2300, script_str: %s, self.sub_script_name: %s' % (script_str, self.sub_script_name)) + """ + jobName example: + home-ec2-user-Ag_init-run_gen-iter_000000-01_model_devi-task_000_000048 + """ + jobName = os.path.join(self.context.remote_root,job_dirs.pop())[1:].replace('/','-').replace('.','_') + jobName += ("_" + str(self.context.job_uuid)) + response = self.__class__.batch_client.submit_job(jobName=jobName, + jobQueue=res['jobQueue'], + jobDefinition=res['jobDefinition'], + parameters={'task_command':script_str}, + containerOverrides={'vcpus':res['cpu_num'], 'memory':res['memory_size']}) + dlog.debug('4000, response:%s' % response) + self.job_id = (response, res['jobQueue']) diff --git a/dpgen/dispatcher/Dispatcher.py b/dpgen/dispatcher/Dispatcher.py index 19aed2668..39a988543 100644 --- a/dpgen/dispatcher/Dispatcher.py +++ b/dpgen/dispatcher/Dispatcher.py @@ -9,6 +9,7 @@ from dpgen.dispatcher.LSF import LSF from dpgen.dispatcher.PBS import PBS from dpgen.dispatcher.Shell import Shell +from dpgen.dispatcher.AWS import AWS from dpgen.dispatcher.JobStatus import JobStatus from dpgen import dlog from hashlib import sha1 @@ -58,6 +59,8 @@ def __init__ (self, self.batch = PBS elif batch_type == 'shell': self.batch = Shell + elif batch_type == 'aws': + self.batch = AWS else : raise RuntimeError('unknown batch ' + batch_type) @@ -93,7 +96,8 @@ def run_jobs(self, chunk_sha1 = sha1(task_chunks_[ii].encode('utf-8')).hexdigest() # if hash in map, recover job, else start a new job if chunk_sha1 in path_map: - job_uuid = path_map[chunk_sha1][1].split('/')[-1] + # job_uuid = path_map[chunk_sha1][1].split('/')[-1] + job_uuid = path_map[chunk_sha1][2] dlog.debug("load uuid %s for chunk %s" % (job_uuid, task_chunks_[ii])) else: job_uuid = None @@ -113,14 +117,15 @@ def run_jobs(self, # submit new or recover old submission if job_uuid is None: rjob['batch'].submit(chunk, command, res = resources, outlog=outlog, errlog=errlog) - dlog.debug('assigned uudi %s for %s ' % (rjob['context'].job_uuid, task_chunks_[ii])) - dlog.info('new submission of %s' % rjob['context'].job_uuid) + job_uuid = rjob['context'].job_uuid + dlog.debug('assigned uudi %s for %s ' % (job_uuid, task_chunks_[ii])) + dlog.info('new submission of %s' % job_uuid) else: rjob['batch'].submit(chunk, command, res = resources, outlog=outlog, errlog=errlog, restart = True) dlog.info('restart from old submission %s ' % job_uuid) # record job and its hash job_list.append(rjob) - path_map[chunk_sha1] = [context.local_root,context.remote_root] + path_map[chunk_sha1] = [context.local_root, context.remote_root, job_uuid] else : # finished job, append a None to list job_list.append(None) @@ -200,3 +205,24 @@ def delete(self): os.remove(f_path_map) except: pass + +def make_dispatcher(mdata): + try: + hostname = mdata['hostname'] + context_type = 'ssh' + except: + context_type = 'local' + try: + batch_type = mdata['batch'] + except: + dlog.info('cannot find key "batch" in machine file, try to use deprecated key "machine_type"') + batch_type = mdata['machine_type'] + try: + lazy_local = mdata['lazy_local'] + except: + lazy_local = False + if lazy_local and context_type == 'local': + dlog.info('Dispatcher switches to the lazy local mode') + context_type = 'lazy-local' + disp = Dispatcher(mdata, context_type=context_type, batch_type=batch_type) + return disp diff --git a/dpgen/dispatcher/__init__.py b/dpgen/dispatcher/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dpgen/generator/lib/ele_temp.py b/dpgen/generator/lib/ele_temp.py new file mode 100644 index 000000000..90372e1a1 --- /dev/null +++ b/dpgen/generator/lib/ele_temp.py @@ -0,0 +1,92 @@ +import os,dpdata,json +import numpy as np +import scipy.constants as pc +from pymatgen.io.vasp.inputs import Incar + + +class NBandsEsti(object): + def __init__ (self, + test_list): + if type(test_list) is list: + ele_t = [] + vol = [] + d_nbd = [] + nbd = [] + for ii in test_list: + res = NBandsEsti._get_res(ii) + ele_t.append(res['ele_temp']) + vol.append(res['vol']) + d_nbd.append(NBandsEsti._get_default_nbands(res)) + nbd.append(res['nbands']) + ele_t = np.array(ele_t) + vol = np.array(vol) + d_nbd = np.array(d_nbd) + nbd = np.array(nbd) + alpha = (nbd - d_nbd) / vol / ele_t**1.5 + self.err = np.std(alpha) + self.pref = np.average(alpha) + # print(np.average(alpha), np.std(alpha), self.err/self.pref) + # print((ele_t), vol, d_nbd, nbd, alpha) + elif type(test_list) is str: + with open(test_list) as fp: + self.pref = float(fp.readline()) + self.err = float(fp.readline()) + else: + raise RuntimeError('unknown input type ' + type(test_list)) + + def save(self, fname): + with open(fname, 'w') as fp: + fp.write(str(self.pref) + '\n') + fp.write(str(self.err) + '\n') + + def predict(self, + target_dir, + tolerance = 0.5): + res = NBandsEsti._get_res(target_dir) + ele_t=(res['ele_temp']) + vol=(res['vol']) + d_nbd=(NBandsEsti._get_default_nbands(res)) + nbd=(res['nbands']) + esti = (self.pref + tolerance*self.err) * ele_t**1.5 * vol + d_nbd + return int(esti)+1 + + @classmethod + def _get_res(self, res_dir): + res = {} + sys = dpdata.System(os.path.join(res_dir, 'POSCAR')) + res['natoms'] = (sys['atom_numbs']) + res['vol'] = np.linalg.det(sys['cells'][0]) + res['nvalence'] = (self._get_potcar_nvalence(os.path.join(res_dir, 'POTCAR'))) + res['ele_temp'] = self._get_incar_ele_temp(os.path.join(res_dir, 'INCAR')) * pc.electron_volt / pc.Boltzmann + res['nbands'] = self._get_incar_nbands(os.path.join(res_dir, 'INCAR')) + return res + + @classmethod + def _get_default_nbands(self, res): + ret = 0 + for ii,jj in zip(res['natoms'], res['nvalence']): + ret += ii * jj // 2 + ii // 2 + 2 + return ret + + @classmethod + def _get_potcar_nvalence(self, fname): + with open(fname) as fp: + pot_str = fp.read().split('\n') + head_idx = [] + for idx,ii in enumerate(pot_str): + if ('PAW_' in ii) and ('TITEL' not in ii): + head_idx.append(idx) + res = [] + for ii in head_idx: + res.append(float(pot_str[ii+1])) + return res + + @classmethod + def _get_incar_ele_temp(self, fname): + incar = Incar.from_file(fname) + return incar['SIGMA'] + + @classmethod + def _get_incar_nbands(self, fname): + incar = Incar.from_file(fname) + return incar.get('NBANDS') diff --git a/dpgen/generator/lib/gaussian.py b/dpgen/generator/lib/gaussian.py index 9ac89610e..2669e5f56 100644 --- a/dpgen/generator/lib/gaussian.py +++ b/dpgen/generator/lib/gaussian.py @@ -9,9 +9,12 @@ from scipy.sparse.csgraph import connected_components from scipy.spatial import cKDTree try: - import openbabel + from openbabel import openbabel except ImportError: - pass + try: + import openbabel + except ImportError: + pass try: from ase import Atoms, Atom from ase.data import atomic_numbers @@ -21,8 +24,8 @@ def _crd2frag(symbols, crds, pbc=False, cell=None, return_bonds=False): atomnumber = len(symbols) + all_atoms = Atoms(symbols = symbols, positions = crds, pbc=pbc, cell=cell) if pbc: - all_atoms = Atoms(symbols = symbols, positions = crds, pbc=True, cell=cell) repeated_atoms = all_atoms.repeat(2)[atomnumber:] tree = cKDTree(crds) d = tree.query(repeated_atoms.get_positions(), k=1)[0] diff --git a/dpgen/generator/lib/lammps.py b/dpgen/generator/lib/lammps.py index 30d78b78d..367817a89 100644 --- a/dpgen/generator/lib/lammps.py +++ b/dpgen/generator/lib/lammps.py @@ -28,12 +28,22 @@ def make_lammps_input(ensemble, pres = None, tau_p = 0.5, pka_e = None, + ele_temp_f = None, + ele_temp_a = None, max_seed = 1000000, deepmd_version = '0.1') : + if (ele_temp_f is not None or ele_temp_a is not None) and LooseVersion(deepmd_version) < LooseVersion('1'): + raise RuntimeError('the electron temperature is only supported by deepmd-kit >= 1.0.0, please upgrade your deepmd-kit') + if ele_temp_f is not None and ele_temp_a is not None: + raise RuntimeError('the frame style ele_temp and atom style ele_temp should not be set at the same time') ret = "variable NSTEPS equal %d\n" % nsteps ret+= "variable THERMO_FREQ equal %d\n" % trj_freq ret+= "variable DUMP_FREQ equal %d\n" % trj_freq ret+= "variable TEMP equal %f\n" % temp + if ele_temp_f is not None: + ret+= "variable ELE_TEMP equal %f\n" % ele_temp_f + if ele_temp_a is not None: + ret+= "variable ELE_TEMP equal %f\n" % ele_temp_a ret+= "variable PRES equal %f\n" % pres ret+= "variable TAU_T equal %f\n" % tau_t ret+= "variable TAU_P equal %f\n" % tau_p @@ -65,6 +75,10 @@ def make_lammps_input(ensemble, if jdata.get('use_relative', False): eps = jdata.get('eps', 0.) keywords += "relative %s " % jdata['epsilon'] + if ele_temp_f is not None: + keywords += "fparam ${ELE_TEMP}" + if ele_temp_a is not None: + keywords += "aparam ${ELE_TEMP}" ret+= "pair_style deepmd %s out_freq ${THERMO_FREQ} out_file model_devi.out %s\n" % (graph_list, keywords) ret+= "pair_coeff \n" ret+= "\n" diff --git a/dpgen/generator/lib/vasp.py b/dpgen/generator/lib/vasp.py index 7c22cf4e3..b6846d19e 100644 --- a/dpgen/generator/lib/vasp.py +++ b/dpgen/generator/lib/vasp.py @@ -2,76 +2,7 @@ import os import numpy as np - -def system_from_poscar(poscar) : - lines = open(poscar, 'r').read().split('\n') - system = {} - system['atom_names'] = [str(ii) for ii in lines[5].split()] - system['atom_numbs'] = [int(ii) for ii in lines[6].split()] - scale = float(lines[1]) - cell = [] - for ii in range(2,5) : - boxv = [float(jj) for jj in lines[ii].split()] - boxv = np.array(boxv) * scale - cell.append(boxv) - system['cell'] = np.array(cell) - natoms = sum(system['atom_numbs']) - coord = [] - for ii in range(8, 8+natoms) : - tmpv = [float(jj) for jj in lines[ii].split()] - tmpv = np.array(tmpv) * scale - coord.append(tmpv) - system['coordinates'] = np.array(coord) - return system - -def make_vasp_kpoints (kpoints) : - ret = "" - ret += "Automatic mesh\n" - ret += "0\n" - ret += "Gamma\n" - ret += "%d %d %d\n" % (kpoints[0], kpoints[1], kpoints[2]) - ret += "0 0 0\n" - return ret - -def _make_vasp_incar (ecut, ediff, npar, kpar, - kspacing = 0.5, kgamma = True, - smearing = None, sigma = None, - metagga = None) : - ret = '' - ret += 'PREC=A\n' - ret += 'ENCUT=%d\n' % ecut - ret += 'ISYM=0\n' - ret += 'ALGO=fast\n' - ret += 'EDIFF=%e\n' % ediff - ret += 'LREAL=A\n' - ret += 'NPAR=%d\n' % npar - ret += 'KPAR=%d\n' % kpar - ret += "\n" - ret += 'NELMIN=4\n' - ret += 'ISIF=2\n' - if smearing is not None : - ret += 'ISMEAR=%d\n' % smearing - if sigma is not None : - ret += 'SIGMA=%f\n' % sigma - ret += 'IBRION=-1\n' - ret += "\n" - ret += 'NSW=0\n' - ret += "\n" - ret += 'LWAVE=F\n' - ret += 'LCHARG=F\n' - ret += 'PSTRESS=0\n' - ret += "\n" - ret += 'KSPACING=%f\n' % kspacing - if kgamma: - ret += 'KGAMMA=.TRUE.\n' - else : - ret += 'KGAMMA=.FALSE.\n' - if metagga is not None : - ret += '\n' - ret += 'LASPH=T\n' - ret += 'METAGGA=%s\n' % metagga - return ret - +from pymatgen.io.vasp import Incar def _make_vasp_incar_dict (ecut, ediff, npar, kpar, kspacing = 0.5, kgamma = True, @@ -107,7 +38,6 @@ def _make_vasp_incar_dict (ecut, ediff, npar, kpar, incar_dict['METAGGA'] = metagga return incar_dict - def _update_incar_dict(incar_dict_, user_dict) : if user_dict is None: return incar_dict_ @@ -162,21 +92,6 @@ def _make_metagga(fp_params) : elif metagga not in [None,'SCAN', 'TPSS', 'RTPSS', 'M06L', 'MBJ'] : raise RuntimeError ("unknown metagga method " + metagga) return metagga - -def make_vasp_incar(fp_params) : - ecut = fp_params['ecut'] - ediff = fp_params['ediff'] - npar = fp_params['npar'] - kpar = fp_params['kpar'] - kspacing = fp_params['kspacing'] - smearing, sigma = _make_smearing(fp_params) - metagga = _make_metagga(fp_params) - incar = _make_vasp_incar(ecut, ediff, npar, kpar, - kspacing = kspacing, kgamma = False, - smearing = smearing, sigma = sigma, - metagga = metagga - ) - return incar def make_vasp_incar_user_dict(fp_params) : ecut = fp_params['ecut'] @@ -199,26 +114,8 @@ def make_vasp_incar_user_dict(fp_params) : incar = write_incar_dict(incar_dict) return incar -def make_vasp_kpoints_gamma (kpoints) : - ret = '' - ret += 'Automatic mesh\n' - ret += '0\n' - ret += 'Gamma\n' - ret += '%d %d %d\n' % (kpoints[0], kpoints[1], kpoints[2]) - ret += '0 0 0\n' - return ret - -def make_vasp_kpoints (kpoints) : - return make_vasp_kpoints_gamma(kpoints) - - -if __name__ == '__main__' : - import json - jdata = json.load(open('param.json')) - incar_1 = make_vasp_incar(jdata['fp_params']) - incar_2 = make_vasp_incar_user_dict(jdata['fp_params']) - with open('tmp1.out', 'w') as fp: - fp.write(incar_1) - with open('tmp2.out', 'w') as fp: - fp.write(incar_2) - +def incar_upper(dincar): + standard_incar={} + for key,val in dincar.items(): + standard_incar[key.upper()]=val + return Incar(standard_incar) diff --git a/dpgen/generator/run.py b/dpgen/generator/run.py index 1baab1385..7fa034773 100644 --- a/dpgen/generator/run.py +++ b/dpgen/generator/run.py @@ -16,12 +16,15 @@ import json import random import logging +import logging.handlers +import queue import warnings import shutil import time import dpdata import numpy as np import subprocess as sp +import scipy.constants as pc from collections import Counter from distutils.version import LooseVersion from dpgen import dlog @@ -36,17 +39,19 @@ from dpgen.generator.lib.lammps import make_lammps_input from dpgen.generator.lib.vasp import write_incar_dict from dpgen.generator.lib.vasp import make_vasp_incar_user_dict +from dpgen.generator.lib.vasp import incar_upper from dpgen.generator.lib.pwscf import make_pwscf_input #from dpgen.generator.lib.pwscf import cvt_1frame from dpgen.generator.lib.siesta import make_siesta_input from dpgen.generator.lib.gaussian import make_gaussian_input, take_cluster from dpgen.generator.lib.cp2k import make_cp2k_input, make_cp2k_xyz +from dpgen.generator.lib.ele_temp import NBandsEsti from dpgen.remote.RemoteJob import SSHSession, JobStatus, SlurmJob, PBSJob, LSFJob, CloudMachineJob, awsMachineJob from dpgen.remote.group_jobs import ucloud_submit_jobs, aws_submit_jobs from dpgen.remote.group_jobs import group_slurm_jobs from dpgen.remote.group_jobs import group_local_jobs from dpgen.remote.decide_machine import decide_train_machine, decide_fp_machine, decide_model_devi_machine -from dpgen.dispatcher.Dispatcher import Dispatcher +from dpgen.dispatcher.Dispatcher import Dispatcher, make_dispatcher from dpgen.util import sepline from dpgen import ROOT_PATH from pymatgen.io.vasp import Incar,Kpoints,Potcar @@ -182,6 +187,7 @@ def make_train (iter_index, init_data_sys_ = jdata['init_data_sys'] fp_task_min = jdata['fp_task_min'] model_devi_jobs = jdata['model_devi_jobs'] + use_ele_temp = jdata.get('use_ele_temp', 0) if iter_index > 0 and _check_empty_iter(iter_index-1, fp_task_min) : log_task('prev data is empty, copy prev model') @@ -268,10 +274,23 @@ def make_train (iter_index, # 0.x jinput['systems'] = init_data_sys jinput['batch_size'] = init_batch_size + if use_ele_temp: + raise RuntimeError('the electron temperature is only supported by deepmd-kit >= 1.0.0, please upgrade your deepmd-kit') else: # 1.x jinput['training']['systems'] = init_data_sys jinput['training']['batch_size'] = init_batch_size + # electron temperature + if use_ele_temp == 0: + pass + elif use_ele_temp == 1: + jinput['model']['fitting_net']['numb_fparam'] = 1 + jinput['model']['fitting_net'].pop('numb_aparam', None) + elif use_ele_temp == 2: + jinput['model']['fitting_net']['numb_aparam'] = 1 + jinput['model']['fitting_net'].pop('numb_fparam', None) + else: + raise RuntimeError('invalid setting for use_ele_temp ' + str(use_ele_temp)) for ii in range(numb_models) : task_path = os.path.join(work_path, train_task_fmt % ii) create_path(task_path) @@ -478,6 +497,7 @@ def parse_cur_job(cur_job) : def make_model_devi (iter_index, jdata, mdata) : + use_ele_temp = jdata.get('use_ele_temp', 0) model_devi_dt = jdata['model_devi_dt'] model_devi_neidelay = None if 'model_devi_neidelay' in jdata : @@ -571,7 +591,29 @@ def make_model_devi (iter_index, conf_counter = 0 task_counter = 0 for cc in ss : - for tt in temps: + for tt_ in temps: + if use_ele_temp: + if type(tt_) == list: + tt = tt_[0] + if use_ele_temp == 1: + te_f = tt_[1] + te_a = None + else: + te_f = None + te_a = tt_[1] + else: + assert(type(tt_) == float or type(tt_) == int) + tt = float(tt_) + if use_ele_temp == 1: + te_f = tt + te_a = None + else: + te_f = None + te_a = tt + else : + tt = tt_ + te_f = None + te_a = None for pp in press: task_name = make_model_devi_task_name(sys_idx[sys_counter], task_counter) conf_name = make_model_devi_conf_name(sys_idx[sys_counter], conf_counter) + '.lmp' @@ -603,11 +645,17 @@ def make_model_devi (iter_index, pres = pp, tau_p = model_devi_taup, pka_e = pka_e, + ele_temp_f = te_f, + ele_temp_a = te_a, deepmd_version = deepmd_version) job = {} job["ensemble"] = ensemble job["press"] = pp job["temps"] = tt + if te_f is not None: + job["ele_temp"] = te_f + if te_a is not None: + job["ele_temp"] = te_a job["model_devi_dt"] = model_devi_dt with open('job.json', 'w') as _outfile: json.dump(job, _outfile, indent = 4) @@ -762,7 +810,7 @@ def _make_fp_vasp_inner (modd_path, # print a report fp_sum = sum(counter.values()) for cc_key, cc_value in counter.items(): - dlog.info("{}: {} {}".format(cc_key, cc_value, cc_value/fp_sum)) + dlog.info("system {0:s} {1:9s} : {2:6d} in {3:6d} {4:6.2f} %".format(ss, cc_key, cc_value, fp_sum, cc_value/fp_sum*100)) random.shuffle(fp_candidate) if detailed_report_make_fp: random.shuffle(fp_rest_failed) @@ -817,8 +865,8 @@ def _make_fp_vasp_inner (modd_path, dump_to_poscar('conf.dump', 'POSCAR', type_map) os.chdir(cwd) return fp_tasks - -def make_fp_vasp_incar(jdata, filename): + +def make_vasp_incar(jdata, filename): if 'fp_incar' in jdata.keys() : fp_incar_path = jdata['fp_incar'] assert(os.path.exists(fp_incar_path)) @@ -834,13 +882,25 @@ def make_fp_vasp_incar(jdata, filename): fp.write(incar) return incar -def _link_fp_vasp_incar (iter_index, +def make_vasp_incar_ele_temp(jdata, filename, ele_temp, nbands_esti = None): + with open(filename) as fp: + incar = fp.read() + incar = incar_upper(Incar.from_string(incar)) + incar['ISMEAR'] = -1 + incar['SIGMA'] = ele_temp * pc.Boltzmann / pc.electron_volt + incar.write_file('INCAR') + if nbands_esti is not None: + nbands = nbands_esti.predict('.') + with open(filename) as fp: + incar = Incar.from_string(fp.read()) + incar['NBANDS'] = nbands + incar.write_file('INCAR') + +def _make_fp_vasp_incar (iter_index, jdata, - incar = 'INCAR') : + nbands_esti = None) : iter_name = make_iter_name(iter_index) work_path = os.path.join(iter_name, fp_name) - incar_file = os.path.join(work_path, incar) - incar_file = os.path.abspath(incar_file) fp_tasks = glob.glob(os.path.join(work_path, 'task.*')) fp_tasks.sort() if len(fp_tasks) == 0 : @@ -848,30 +908,17 @@ def _link_fp_vasp_incar (iter_index, cwd = os.getcwd() for ii in fp_tasks: os.chdir(ii) - os.symlink(os.path.relpath(incar_file), incar) + make_vasp_incar(jdata, 'INCAR') + if os.path.exists('job.json'): + with open('job.json') as fp: + job_data = json.load(fp) + if 'ele_temp' in job_data: + make_vasp_incar_ele_temp(jdata, 'INCAR', + job_data['ele_temp'], + nbands_esti = nbands_esti) os.chdir(cwd) -def _make_fp_vasp_kp (iter_index,jdata, incar): - dincar=Incar.from_string(incar) - standard_incar={} - for key,val in dincar.items(): - standard_incar[key.upper()]=val - try: - kspacing = standard_incar['KSPACING'] - except: - raise RuntimeError ("KSPACING must be given in INCAR") - try: - gamma = standard_incar['KGAMMA'] - if isinstance(gamma,bool): - pass - else: - if gamma[0].upper()=="T": - gamma=True - else: - gamma=False - except: - raise RuntimeError ("KGAMMA must be given in INCAR") - +def _make_fp_vasp_kp (iter_index,jdata): iter_name = make_iter_name(iter_index) work_path = os.path.join(iter_name, fp_name) @@ -882,12 +929,35 @@ def _make_fp_vasp_kp (iter_index,jdata, incar): cwd = os.getcwd() for ii in fp_tasks: os.chdir(ii) + # get kspacing and kgamma from incar + assert(os.path.exists('INCAR')) + with open('INCAR') as fp: + incar = fp.read() + standard_incar = incar_upper(Incar.from_string(incar)) + try: + kspacing = standard_incar['KSPACING'] + except: + raise RuntimeError ("KSPACING must be given in INCAR") + try: + gamma = standard_incar['KGAMMA'] + if isinstance(gamma,bool): + pass + else: + if gamma[0].upper()=="T": + gamma=True + else: + gamma=False + except: + raise RuntimeError ("KGAMMA must be given in INCAR") + # check poscar assert(os.path.exists('POSCAR')) + # make kpoints ret=make_kspacing_kpoints('POSCAR', kspacing, gamma) kp=Kpoints.from_string(ret) kp.write_file("KPOINTS") os.chdir(cwd) + def _link_fp_vasp_pp (iter_index, jdata) : fp_pp_path = jdata['fp_pp_path'] @@ -985,23 +1055,27 @@ def _make_fp_vasp_configs(iter_index, jdata) return fp_tasks - def make_fp_vasp (iter_index, jdata) : # make config fp_tasks = _make_fp_vasp_configs(iter_index, jdata) if len(fp_tasks) == 0 : return - # all tasks share the same incar - work_path = os.path.join(make_iter_name(iter_index), fp_name) - incar_file = os.path.abspath(os.path.join(work_path, 'INCAR')) - incar_str = make_fp_vasp_incar(jdata, incar_file) - # link incar to each task folder - _link_fp_vasp_incar(iter_index, jdata) - # create potcar + # abs path for fp_incar if it exists + if 'fp_incar' in jdata: + jdata['fp_incar'] = os.path.abspath(jdata['fp_incar']) + # get nbands esti if it exists + if 'fp_nbands_esti_data' in jdata: + nbe = NBandsEsti(jdata['fp_nbands_esti_data']) + else: + nbe = None + # order is critical! + # 1, create potcar sys_link_fp_vasp_pp(iter_index, jdata) - # create kpoints - _make_fp_vasp_kp(iter_index, jdata, incar_str) + # 2, create incar + _make_fp_vasp_incar(iter_index, jdata, nbands_esti = nbe) + # 3, create kpoints + _make_fp_vasp_kp(iter_index, jdata) def make_fp_pwscf(iter_index, @@ -1257,7 +1331,7 @@ def run_fp (iter_index, elif fp_style == "siesta": forward_files = ['input'] + fp_pp_files backward_files = ['output'] - run_fp_inner(iter_index, jdata, mdata, ssh_sess, forward_files, backward_files, _siesta_check_fin, log_file='output') + run_fp_inner(iter_index, jdata, mdata, dispatcher, forward_files, backward_files, _siesta_check_fin, log_file='output') elif fp_style == "gaussian": forward_files = ['input'] backward_files = ['output'] @@ -1277,6 +1351,7 @@ def post_fp_vasp (iter_index, ratio_failed = rfailed if rfailed else jdata.get('ratio_failed',0.05) model_devi_jobs = jdata['model_devi_jobs'] assert (iter_index < len(model_devi_jobs)) + use_ele_temp = jdata.get('use_ele_temp', 0) iter_name = make_iter_name(iter_index) work_path = os.path.join(iter_name, fp_name) @@ -1300,8 +1375,9 @@ def post_fp_vasp (iter_index, for ss in system_index : sys_outcars = glob.glob(os.path.join(work_path, "task.%s.*/OUTCAR"%ss)) sys_outcars.sort() - flag=True - tcount+=len(sys_outcars) + tcount += len(sys_outcars) + all_sys = None + all_te = [] for oo in sys_outcars : try: _sys = dpdata.LabeledSystem(oo, type_map = jdata['type_map']) @@ -1312,23 +1388,40 @@ def post_fp_vasp (iter_index, except: _sys = dpdata.LabeledSystem() dlog.info('Failed fp path: %s'%oo.replace('OUTCAR','')) - if len(_sys) == 1: - if flag: - all_sys = _sys - flag = False - else: - all_sys.append(_sys) + if all_sys is None: + all_sys = _sys + else: + all_sys.append(_sys) + # save ele_temp, if any + with open(oo.replace('OUTCAR', 'job.json')) as fp: + job_data = json.load(fp) + if 'ele_temp' in job_data: + assert(use_ele_temp) + ele_temp = job_data['ele_temp'] + all_te.append(ele_temp) else: - icount+=1 - - try: - # limitation --> all_sys not defined + icount+=1 + all_te = np.array(all_te) + if all_sys is not None: sys_data_path = os.path.join(work_path, 'data.%s'%ss) all_sys.to_deepmd_raw(sys_data_path) all_sys.to_deepmd_npy(sys_data_path, set_size = len(sys_outcars)) - except: - pass + if all_te.size > 0: + assert(len(all_sys) == all_sys.get_nframes()) + assert(len(all_sys) == all_te.size) + all_te = np.reshape(all_te, [-1,1]) + if use_ele_temp == 0: + raise RuntimeError('should not get ele temp at setting: use_ele_temp == 0') + elif use_ele_temp == 1: + np.savetxt(os.path.join(sys_data_path, 'fparam.raw'), all_te) + np.save(os.path.join(sys_data_path, 'set.000', 'fparam.npy'), all_te) + elif use_ele_temp == 2: + tile_te = np.tile(all_te, [1, all_sys.get_natoms()]) + np.savetxt(os.path.join(sys_data_path, 'aparam.raw'), tile_te) + np.save(os.path.join(sys_data_path, 'set.000', 'aparam.npy'), tile_te) + else: + raise RuntimeError('invalid setting of use_ele_temp ' + str(use_ele_temp)) dlog.info("failed frame number: %s "%icount) dlog.info("total frame number: %s "%tcount) @@ -1542,34 +1635,21 @@ def set_version(mdata): deepmd_version = '0.1' elif 'python_path' in mdata: deepmd_version = '1' + elif 'train' in mdata: + if 'deepmd_path' in mdata['train'][0]: + deepmd_version = '0.1' + elif 'python_path' in mdata['train'][0]: + deepmd_version = '1' + else: + deepmd_version = '0.1' else: - # default deepmd_version = '0.1' # set mdata['deepmd_version'] = deepmd_version return mdata -def make_dispatcher(mdata): - try: - hostname = mdata['hostname'] - context_type = 'ssh' - except: - context_type = 'local' - try: - batch_type = mdata['batch'] - except: - dlog.info('cannot find key "batch" in machine file, try to use deprecated key "machine_type"') - batch_type = mdata['machine_type'] - try: - lazy_local = mdata['lazy_local'] - except: - lazy_local = False - if lazy_local and context_type == 'local': - dlog.info('Dispatcher switches to the lazy local mode') - context_type = 'lazy-local' - disp = Dispatcher(mdata, context_type=context_type, batch_type=batch_type) - return disp + def run_iter (param_file, machine_file) : @@ -1592,6 +1672,15 @@ def run_iter (param_file, machine_file) : fmachine=SHORT_CMD+'_'+machine_file.split('.')[0]+'.'+jdata.get('pretty_format','json') dumpfn(mdata,fmachine,indent=4) + if mdata.get('handlers', None): + if mdata['handlers'].get('smtp', None): + que = queue.Queue(-1) + queue_handler = logging.handlers.QueueHandler(que) + smtp_handler = logging.handlers.SMTPHandler(**mdata['handlers']['smtp']) + listener = logging.handlers.QueueListener(que, smtp_handler) + dlog.addHandler(queue_handler) + listener.start() + max_tasks = 10000 numb_task = 9 record = "record.dpgen" diff --git a/dpgen/main.py b/dpgen/main.py index 370763a68..c3a37260f 100644 --- a/dpgen/main.py +++ b/dpgen/main.py @@ -11,6 +11,7 @@ from dpgen.data.surf import gen_init_surf from dpgen.auto_test.run import gen_test from dpgen.database.run import db_run +from dpgen.tools.run_report import run_report from dpgen import info, __version__, __date__ @@ -69,7 +70,7 @@ def main(): # run parser_run = subparsers.add_parser( "run", - help="Main process of Deep Generator.") + help="Main process of Deep Potential Generator.") parser_run.add_argument('PARAM', type=str, help="parameter file, json/yaml format") parser_run.add_argument('MACHINE', type=str, @@ -78,27 +79,39 @@ def main(): help="log debug info") parser_run.set_defaults(func=gen_run) + # run/report + parser_rr = subparsers.add_parser( + "run/report", + help="Report the systems and the thermodynamic conditions of the labeled frames.") + parser_rr.add_argument("JOB_DIR", type=str, + help="the directory of the DP-GEN job,") + parser_rr.add_argument('-s',"--stat-sys", action = 'store_true', + help="count the labeled frames for each system") + parser_rr.add_argument('-i', "--stat-iter", action= 'store_true', + help="print the iteration candidate,failed,accurate count and fp calculation,success and fail count") + parser_rr.add_argument('-t', "--stat-time", action= 'store_true', + help="print the iteration time, warning!! assume model_devi parallel cores == 1") + parser_rr.add_argument('-p',"--param", type=str, default = 'param.json', + help="the json file provides DP-GEN paramters, should be located in JOB_DIR") + parser_rr.add_argument('-v',"--verbose", action = 'store_true', + help="being loud") + parser_rr.set_defaults(func=run_report) + # test parser_test = subparsers.add_parser("test", help="Auto-test for Deep Potential.") parser_test.add_argument('PARAM', type=str, help="parameter file, json/yaml format") parser_test.add_argument('MACHINE', type=str, help="machine file, json/yaml format") - parser_test.set_defaults(func=gen_test) + parser_test.set_defaults(func=gen_test) # db parser_db = subparsers.add_parser( "db", - help="Collecting data from Deep Generator.") - parser_db.add_argument('PATH', type=str, - help="root path for dpgen modeling") - parser_db.add_argument('CALCULATOR', type=str, - help="calculator used for labeling: vasp/pwscf/gaussian") - parser_db.add_argument('OUTPUT', type=str, - help="output filename : file.json/file.yaml") - parser_db.add_argument("ID_PREFIX", type=str, default=None, - nargs="?", - help="prefix of an entry id") + help="Collecting data from DP-GEN.") + + parser_db.add_argument('PARAM', type=str, + help="parameter file, json format") parser_db.set_defaults(func=db_run) diff --git a/dpgen/remote/decide_machine.py b/dpgen/remote/decide_machine.py index 465207db6..3a0e3ecf3 100644 --- a/dpgen/remote/decide_machine.py +++ b/dpgen/remote/decide_machine.py @@ -21,18 +21,28 @@ def decide_train_machine(mdata): if profile['purpose'] == 'train': mdata['train_machine'] = profile['machine'] mdata['train_resources'] = profile['resources'] - mdata['deepmd_path'] = profile['deepmd_path'] + if 'deepmd_path' in profile: + mdata['deepmd_path'] = profile['deepmd_path'] + elif 'python_path' in profile: + mdata['python_path'] = profile['python_path'] if "group_size" in profile: mdata["train_group_size"] = profile["group_size"] + if 'deepmd_version' in profile: + mdata["deepmd_version"] = profile['deepmd_version'] continue_flag = True except: pass if "hostname" not in mdata["train"][0]["machine"]: mdata["train_machine"] = mdata["train"][0]["machine"] mdata["train_resources"] = mdata["train"][0]["resources"] - mdata["deepmd_path"] = mdata["train"][0]["deepmd_path"] + if 'deepmd_path' in mdata["train"][0]: + mdata["deepmd_path"] = mdata["train"][0]["deepmd_path"] + elif 'python_path' in mdata["train"][0]: + mdata["python_path"] = mdata["train"][0]["python_path"] if "group_size" in mdata["train"][0]: mdata["train_group_size"] = mdata["train"][0]["group_size"] + if 'deepmd_version' in mdata["train"][0]: + mdata["deepmd_version"] = mdata["train"][0]["deepmd_version"] continue_flag = True pd_flag = False @@ -61,7 +71,15 @@ def decide_train_machine(mdata): if pd_count ==1: mdata['train_machine'] = temp_machine mdata['train_resources'] = temp_resources - mdata['deepmd_path'] = mdata['train'][machine_idx]['deepmd_path'] + if 'deepmd_path' in mdata['train'][machine_idx]: + mdata['deepmd_path'] = mdata['train'][machine_idx]['deepmd_path'] + elif 'python_path' in mdata['train'][machine_idx]: + mdata['python_path'] = mdata['train'][machine_idx]['python_path'] + if 'group_size' in mdata['train'][machine_idx]: + mdata['train_group_size'] = mdata['train'][machine_idx]['group_size'] + if 'deepmd_version' in mdata['train'][machine_idx]: + mdata['deepmd_version'] = mdata['train'][machine_idx]['deepmd_version'] + ## No need to wait pd_flag = True break @@ -73,9 +91,14 @@ def decide_train_machine(mdata): min_machine_idx = np.argsort(pd_count_list)[0] mdata['train_machine'] = mdata['train'][min_machine_idx]['machine'] mdata['train_resources'] = mdata['train'][min_machine_idx]['resources'] - mdata['deepmd_path'] = mdata['train'][min_machine_idx]['deepmd_path'] + if 'deepmd_path' in mdata['train'][min_machine_idx]: + mdata['deepmd_path'] = mdata['train'][min_machine_idx]['deepmd_path'] + elif 'python_path' in mdata['train'][min_machine_idx]: + mdata['python_path'] = mdata['train'][min_machine_idx]['python_path'] if "group_size" in mdata['train'][min_machine_idx]: mdata["train_group_size"] = mdata['train'][min_machine_idx]["group_size"] + if 'deepmd_version' in mdata['train'][min_machine_idx]: + mdata['deepmd_version'] = mdata['train'][min_machine_idx]["deepmd_version"] ## Record which machine is selected with open("record.machine","w") as _outfile: @@ -83,9 +106,14 @@ def decide_train_machine(mdata): profile['purpose'] = 'train' profile['machine'] = mdata['train_machine'] profile['resources'] = mdata['train_resources'] - profile['deepmd_path'] = mdata['deepmd_path'] + if 'deepmd_path' in mdata: + profile['deepmd_path'] = mdata['deepmd_path'] + elif 'python_path' in mdata: + profile['python_path'] = mdata['python_path'] if "train_group_size" in mdata: profile["group_size"] = mdata["train_group_size"] + if 'deepmd_version' in mdata: + profile['deepmd_version'] = mdata['deepmd_version'] json.dump(profile, _outfile, indent = 4) return mdata diff --git a/dpgen/tools/__init__.py b/dpgen/tools/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/dpgen/tools/auto_gen_param.py b/dpgen/tools/auto_gen_param.py new file mode 100755 index 000000000..75faa7337 --- /dev/null +++ b/dpgen/tools/auto_gen_param.py @@ -0,0 +1,286 @@ +#%% +import os +import argparse +import json +from collections import defaultdict +from itertools import tee + +class System(object): + current_num_of_system = 0 + current_num_of_sub_systems = 0 + + @property + def index_system(self): + return self._index_system + + @index_system.setter + def index_system(self,value): + self._index_system = value + + @classmethod + def register_system(cls): + cls.current_num_of_system+=1 + return cls.current_num_of_system-1 + + @classmethod + def register_sub_system(cls): + cls.current_num_of_sub_systems+=1 + return cls.current_num_of_sub_systems-1 + + def __init__(self, system_prefix=""): + # print(files_list) + # if sum(map_relations)>len(files_list): + # raise RuntimeError( + # "files_list not enough;sum(map_relations):%s>len(files_list):%s, %s" + # % (sum(map_relations),len(files_list),files_list,)) + self.index_system = self.register_system() + self.sub_system_list = [] + self.system_prefix = system_prefix + self.current_idx2 = 0 + + def add_sub_system(self,idx2, files_list): + idx1 = self.register_sub_system() + idx2 = self.current_idx2 + self.sub_system_list.append((idx1, self.index_system, idx2, files_list)) + self.current_idx2 += 1 + + def get_sub_system(self): + return self.sub_system_list + + +class Iteration(object): + current_num_of_itearation = 0 + current_num_of_sub_itearation = 0 + + @property + def index_iteration(self): + return self._index_iteration # pylint: disable=no-member + + @index_iteration.setter + def index_iteration(self, value): + self._index_sub_iteration = value + + @classmethod + def register_iteration(cls): + cls.current_num_of_itearation+=1 + return cls.current_num_of_itearation-1 + + @classmethod + def register_sub_iteartion(cls): + cls.current_num_of_sub_itearation +=1 + return cls.current_num_of_sub_itearation-1 + + def __init__(self, + temps, + nsteps_list=[500, 500, 1000, 1000, 3000, 3000, 6000, 6000], + sub_iteration_num=8, + ensemble='npt', + press=[1.0, 10.0, 100.0, 1000.0, 5000.0, 10000.0, 20000.0, 50000.0], + trj_freq=10): + if len(nsteps_list) != sub_iteration_num: + raise RuntimeError(f'{nsteps_list}, {sub_iteration_num}; length does not match') + self.temps = temps + self.index_iteration = self.register_iteration() + self.nsteps_list=nsteps_list + self.sub_iteration_num=sub_iteration_num + self.ensemble=ensemble + self.press = press + self.trj_freq = trj_freq + + def gen_sub_iter(self, system_list): + sub_iter_list = [] + for idx2 in range(self.sub_iteration_num): + iter_dict = {} + iter_dict['_idx'] = self.register_sub_iteartion() + iter_dict['ensemble'] = self.ensemble + iter_dict['nsteps'] = self.nsteps_list[idx2] + iter_dict['press'] = self.press + iter_dict['sys_idx'] = [ii[0] for ii in system_list if ii[2]==idx2] + iter_dict['temps'] = self.temps + iter_dict['trj_freq'] = self.trj_freq + sub_iter_list.append(iter_dict) + return sub_iter_list + +def default_map_generator(map_list=[1,1,2,2,2,4,4,4], data_list=None): + num = 0 + # if len(data_list) < sum(map_list): + # raise RuntimeError(f'{data_list} < {map_list};not enough structure to expore, data_list_too_short!') + if (data_list is None) and ( all(el%10==0 for el in map_list) ): + for ii in map_list: + yield [f"{jj:0<5}?" for jj in range(num, num+ii//10)] + num+=(ii//10) + elif data_list: + for ii in map_list: + yield [data_list[jj] for jj in range(num, num+ii)] + num += ii + raise RuntimeError(f"{map_list} length is not enough") + # while True: + # yield [data_list[jj] for jj in range(num, num+ii)] + # num += ii + +def get_system_list(system_dict, + map_list=[1,1,2,2,2,4,4,4], + meta_iter_num=4, + sub_iteration_num=8, + map_iterator=None, + file_name="POSCAR"): + """ + :type map_iterator: Iterable use to generate sys_configs + :Exmaple [['000000', '000001',], ['00000[2-9]',], ['00001?', '000020',],] + """ + if sub_iteration_num != len(map_list): + raise RuntimeError(f"{sub_iteration_num},{map_list};sub_iteration_num does not match the length of map_list") + + system_list = [] + for system_prefix,data_list in system_dict.items(): + if map_iterator is None: + print('12', data_list) + new_map_iterator = default_map_generator(map_list=map_list, data_list=data_list) + else: + origin_one, new_map_iterator = tee(map_iterator) # pylint: disable=unused-variable + # tee means copy;new_map_generator will become a copy of map_iterator + system = System(system_prefix) + for idx2 in range(sub_iteration_num): + files_list = [os.path.join(system_prefix, jj) for jj in next(new_map_iterator)] + system.add_sub_system(idx2=idx2, files_list=files_list) + system_list.extend(system.get_sub_system()) + return system_list + +def scan_files(scan_dir="./" ,file_name="POSCAR", min_allow_files_num=20): + # will return + # files_list=[] + system_dict = defaultdict(list) + for ii in os.walk(scan_dir): + if file_name in ii[2]: + system_prefix = os.path.dirname(ii[0]) + system_suffix = os.path.basename(ii[0]) + system_dict[system_prefix].append(os.path.join(system_suffix, file_name)) + for k,v in list(system_dict.items()): + if len(v) < min_allow_files_num: + del system_dict[k] + return system_dict + +# def gen_ + +def default_temps_generator(melt_point, temps_intervel=0.1, num_temps=5): + temps = [50, ] + last_temp = 0 + for ii in range(num_temps-1): # pylint: disable=unused-variable + last_temp = last_temp + temps_intervel*melt_point + temps.append(last_temp) + yield temps + while True: + temps = [] + for ii in range(num_temps): + last_temp = last_temp + temps_intervel*melt_point + temps.append(last_temp) + yield temps + +def get_model_devi_jobs(melt_point, + system_list, + nsteps_list=[500, 500, 1000, 1000, 3000, 3000, 6000, 6000], + press=[1.0, 10.0, 100.0, 1000.0, 5000.0, 10000.0, 20000.0, 50000.0], + meta_iter_num=4, + sub_iteration_num=8, + temps_iterator=None, + ensemble="npt", + trj_freq=10, + temps_intervel=0.1, + num_temps=5): + + if temps_iterator is None: + temps_iterator = default_temps_generator(melt_point=melt_point, + temps_intervel=temps_intervel, num_temps=num_temps) + + if len(nsteps_list) != sub_iteration_num: + raise RuntimeError(f"{nsteps_list}, {sub_iteration_num};length do not match!") + model_devi_jobs =[] + for ii in range(meta_iter_num): # pylint: disable=unused-variable + temps = next(temps_iterator) + meta_iter = Iteration(temps=temps, + nsteps_list=nsteps_list, + sub_iteration_num=sub_iteration_num, + ensemble=ensemble, + press=press, + trj_freq=trj_freq) + model_devi_jobs.extend(meta_iter.gen_sub_iter(system_list)) + return model_devi_jobs + +def get_sys_configs(system_list): + sys_configs=[[] for ii in system_list] + for t in system_list: + sys_configs[t[0]]=t[3] + return sys_configs + +def get_init_data_sys(scan_dir='./', init_file_name='type.raw'): + + init_data_sys = [] + for t in os.walk(scan_dir): + if init_file_name in t[2]: + init_data_sys.append(t[0]) + else: + pass + return init_data_sys + + +def get_basic_param_json(melt_point, + out_param_filename='param.json', + scan_dir="./", + file_name='POSCAR', + init_file_name='type.raw', + min_allow_files_num=16, + map_list=[1,1,2,2,2,4,4,4], + meta_iter_num=4, + sub_iteration_num=8, + map_iterator=None, + nsteps_list=[500, 500, 1000, 1000, 3000, 3000, 6000, 6000], + press=[1.0, 10.0, 100.0, 1000.0, 5000.0, 10000.0, 20000.0, 50000.0], + temps_iterator=None, + ensemble="npt", + trj_freq=10, + temps_intervel=0.1, + num_temps=5,): + + init_data_sys = get_init_data_sys(scan_dir=scan_dir, init_file_name=init_file_name) + print(f"length of init_data_sys: {len(init_data_sys)} {init_data_sys}") + system_dict = scan_files(scan_dir, file_name, min_allow_files_num) + print(f"num of different systems: {len(system_dict)}") + system_list =get_system_list(system_dict, + map_list=map_list, + meta_iter_num=meta_iter_num, + sub_iteration_num=sub_iteration_num, + map_iterator=map_iterator, + file_name=file_name) + + sys_configs = get_sys_configs(system_list) + print(f"length of sys_configs: {len(sys_configs)}") + model_devi_jobs = get_model_devi_jobs(melt_point=melt_point, + system_list=system_list, + nsteps_list=nsteps_list, + press=press, + meta_iter_num=meta_iter_num, + sub_iteration_num=sub_iteration_num, + temps_iterator=temps_iterator, + ensemble=ensemble, + trj_freq=trj_freq, + temps_intervel=temps_intervel, + num_temps=num_temps) + param_dict={ + 'init_data_sys': init_data_sys, + 'sys_configs':sys_configs, + 'model_devi_jobs':model_devi_jobs + } + with open(out_param_filename, 'w') as p: + json.dump(param_dict, p, indent=4) + +def _main(): + parser = argparse.ArgumentParser(description='Collect data from inputs and generate basic param.json') + parser.add_argument("melt_point", type=float, help="melt_point") + # parser.addparser.add_argument("JOB_DIR", type=str, help="the directory of the DP-GEN job") + args = parser.parse_args() + get_basic_param_json(melt_point=args.melt_point) + +if __name__=='__main__': + _main() + +#%% diff --git a/dpgen/generator/tools/collect_data.py b/dpgen/tools/collect_data.py similarity index 100% rename from dpgen/generator/tools/collect_data.py rename to dpgen/tools/collect_data.py diff --git a/dpgen/generator/tools/relabel.py b/dpgen/tools/relabel.py similarity index 94% rename from dpgen/generator/tools/relabel.py rename to dpgen/tools/relabel.py index fd8fdd192..3cc3627f6 100755 --- a/dpgen/generator/tools/relabel.py +++ b/dpgen/tools/relabel.py @@ -4,14 +4,14 @@ import numpy as np import subprocess as sp sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')) -from lib.pwscf import make_pwscf_input -from lib.siesta import make_siesta_input -from lib.vasp import make_vasp_incar -from lib.vasp import system_from_poscar +from dpgen.generator.lib.pwscf import make_pwscf_input +from dpgen.generator.lib.siesta import make_siesta_input +from dpgen.generator.run import make_vasp_incar import dpdata def get_lmp_info(input_file) : - lines = [line.rstrip('\n') for line in open(input_file)] + with open(input_file) as fp: + lines = [line.rstrip('\n') for line in fp] for ii in lines : words = ii.split() if len(words) >= 4 and words[0] == 'variable' : @@ -75,7 +75,7 @@ def make_pwscf(tdir, fp_params, mass_map, fp_pp_path, fp_pp_files, user_input) : def make_siesta(tdir, fp_params, fp_pp_path, fp_pp_files) : cwd = os.getcwd() os.chdir(tdir) - sys_data = system_from_poscar('POSCAR') + sys_data = dpdata.System('POSCAR').data ret = make_siesta_input(sys_data, fp_pp_files, fp_params) open('input', 'w').write(ret) os.chdir(cwd) @@ -200,16 +200,7 @@ def create_tasks(target_folder, param_file, output, fp_json, verbose = True, num os.makedirs(output, exist_ok = True) if fp_style == 'vasp': copy_pp_files(output, fp_pp_path, fp_pp_files) - try : - fp_params = fp_jdata['fp_params'] - make_vasp(output, fp_params) - except: - fp_incar = fp_jdata['fp_incar'] - cwd_ = os.getcwd() - os.chdir(target_folder) - fp_incar = os.path.abspath(fp_incar) - os.chdir(cwd_) - make_vasp_incar(output, fp_incar) + make_vasp_incar(fp_params, output) if fp_style == 'pwscf' : copy_pp_files(output, fp_pp_path, fp_pp_files) if fp_style == 'siesta' : diff --git a/dpgen/tools/run_report.py b/dpgen/tools/run_report.py new file mode 100755 index 000000000..ec0ef6dbd --- /dev/null +++ b/dpgen/tools/run_report.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 + +import os,sys,json,glob,argparse,shutil +import numpy as np +import subprocess as sp +from dpgen.tools.stat_sys import stat_sys +from dpgen.tools.stat_iter import stat_iter, stat_time + + +def run_report(args): + report_count = 0 + if args.stat_sys: + stat_sys(args.JOB_DIR, args.param, args.verbose) + report_count += 1 + # other stats added in the following + if args.stat_iter: + stat_iter(args.JOB_DIR, args.param, args.verbose) + report_count += 1 + if args.stat_time: + stat_time(args.JOB_DIR, args.param, args.verbose) + report_count += 1 + if report_count == 0: + print('nothing to report, rerun with -h for help') + + return report_count diff --git a/dpgen/tools/stat_iter.py b/dpgen/tools/stat_iter.py new file mode 100644 index 000000000..228b051d4 --- /dev/null +++ b/dpgen/tools/stat_iter.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 + +import os,sys,json +import subprocess +from collections import defaultdict + +import dpdata + +def stat_iter(target_folder, + param_file = 'param.json', + verbose = True, + mute = False): + jdata={} + with open(f"{target_folder}/{param_file}") as param_file: + jdata = json.load(param_file) + iter_dict = defaultdict(lambda: defaultdict(int)) + output = subprocess.run([f"wc -l {target_folder}/iter.??????/02.fp/*out", ], + shell=True,stdout=subprocess.PIPE).stdout + data = output.decode() # split(b'\n') + for line in data.split('\n'): + if 'out' in line: + num, relative_path_doc = line.strip().split(' ') + path_doc = os.path.abspath(relative_path_doc) + num = int(num) + prefix, iter_dirname, stage, out_filename = path_doc.rsplit('/',3) + pk_id, out_filename = path_doc.rsplit('/', 1) + iter = int(iter_dirname.split('.')[-1]) + out_id = int(out_filename.strip().split('.')[-2]) + out_type = out_filename.strip().split('.')[0] + iter_dict[pk_id][out_type] += num + # for ii in + output2 = subprocess.run([f"ls -d -1 {target_folder}/iter.??????/02.fp/task.*/OUTCAR", ], + shell=True,stdout=subprocess.PIPE).stdout + data2 = output2.decode() + if verbose: + # print('find OUTCAR', data2) + print("use param_jsonfile jdata['type_map']", jdata['type_map']) + for line in data2.split('\n'): + if line: + # [/home/felix/workplace/SiC/iter.000002/02.fp/task.018.000040/OUTCAR] + path_doc = os.path.abspath(line) + pk_id, task_dirname, OUTCAR_filename=path_doc.rsplit('/', 2) + try: + _sys = dpdata.LabeledSystem(path_doc, type_map = jdata['type_map'] ) + except: + try: + _sys = dpdata.LabeledSystem(path_doc.replace('OUTCAR','vasprun.xml'), type_map = jdata['type_map']) + except: + _sys = dpdata.LabeledSystem() + if len(_sys) == 1: + pass + else: + if verbose: + print('OUTCAR not label by dpdata, not convergence or unfinshed', path_doc) + iter_dict[pk_id]['OUTCAR_not_convergence'] +=1 + iter_dict[pk_id]['OUTCAR_total_count'] +=1 + for pk_id in {**iter_dict}: + if iter_dict[pk_id]['OUTCAR_total_count']: + iter_dict[pk_id]['reff']=round(iter_dict[pk_id]['OUTCAR_not_convergence']/iter_dict[pk_id]['OUTCAR_total_count'],5) + for pk_id, value in iter_dict.items(): + print(f"{pk_id}:candidate:{value['candidate']}" + f":rest_failed:{value['rest_failed']}" + f":rest_accurate:{value['rest_accurate']}" + f":OUTCAR_total_count:{value['OUTCAR_total_count']}" + f":OUTCAR_not_convergence:{value['OUTCAR_not_convergence']}" + f":reff:{value['reff']}") + +def stat_time(target_folder, + param_file = 'param.json', + verbose = True, + mute = False): + script = os.path.join(os.path.dirname(__file__), 'update_time.sh') + output = subprocess.run([f'bash {script} {target_folder}'], + shell=True,stdout=subprocess.PIPE).stdout + data = output.decode() + print(data) + diff --git a/dpgen/generator/tools/stat_data.py b/dpgen/tools/stat_sys.py old mode 100755 new mode 100644 similarity index 65% rename from dpgen/generator/tools/stat_data.py rename to dpgen/tools/stat_sys.py index 43904fdb0..83ddf75ad --- a/dpgen/generator/tools/stat_data.py +++ b/dpgen/tools/stat_sys.py @@ -4,9 +4,7 @@ import numpy as np import subprocess as sp sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..')) -from lib.vasp import make_vasp_incar -from lib.vasp import system_from_poscar -from relabel import get_lmp_info +from dpgen.tools.relabel import get_lmp_info def ascii_hist(count) : np = (count-1) // 5 + 1 @@ -15,10 +13,13 @@ def ascii_hist(count) : ret += '=' return ret -def stat_tasks(target_folder, param_file, verbose = True) : +def stat_sys(target_folder, + param_file = 'param.json', + verbose = True, + mute = False) : target_folder = os.path.abspath(target_folder) - tool_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'template') - jdata = json.load(open(os.path.join(target_folder, param_file))) + with open(os.path.join(target_folder, param_file)) as fp: + jdata = json.load(fp) # goto input cwd = os.getcwd() os.chdir(target_folder) @@ -39,7 +40,7 @@ def stat_tasks(target_folder, param_file, verbose = True) : for jj in iter_tasks : sys_idx = int(os.path.basename(jj).split('.')[-2]) sys_tasks_count[sys_idx] += 1 - linked_file = os.path.realpath(os.path.join(jj, 'conf.lmp')) + linked_file = os.path.realpath(os.path.join(jj, 'conf.dump')) linked_keys = linked_file.split('/') task_record = linked_keys[-5] + '.' + linked_keys[-3] + '.' + linked_keys[-1].split('.')[0] task_record_keys = task_record.split('.') @@ -67,27 +68,19 @@ def stat_tasks(target_folder, param_file, verbose = True) : str_blk += " " trait_fmt = str_blk + 'ens: %s T: %10.2f P: %12.2f count: %6d' for ii in range(numb_sys): - print(sys_fmt % (str(sys[ii]), sys_tasks_count[ii])) + if not mute: + print(sys_fmt % (str(sys[ii]), sys_tasks_count[ii])) for jj in range(len(sys_tasks_all[ii])): hist_str = ascii_hist(sys_tasks_all[ii][jj][3]) - print((trait_fmt + hist_str) % (sys_tasks_all[ii][jj][0], - sys_tasks_all[ii][jj][1], - sys_tasks_all[ii][jj][2], - sys_tasks_all[ii][jj][3])) - + if not mute: + print((trait_fmt + hist_str) % (sys_tasks_all[ii][jj][0], + sys_tasks_all[ii][jj][1], + sys_tasks_all[ii][jj][2], + sys_tasks_all[ii][jj][3])) + os.chdir(cwd) + return sys, sys_tasks_count, sys_tasks_all -def _main() : - parser = argparse.ArgumentParser(description='Some data statistics of DP-GEN iterations') - parser.add_argument("JOB_DIR", type=str, - help="the directory of the DP-GEN job") - parser.add_argument('-p',"--parameter", type=str, default = 'param.json', - help="the json file provides DP-GEN paramters, should be located in JOB_DIR") - parser.add_argument('-v',"--verbose", action = 'store_true', - help="being loud") - args = parser.parse_args() - - stat_tasks(args.JOB_DIR, args.parameter, args.verbose) +def run_report(args): + stat_tasks(args.JOB_DIR, args.param, args.verbose) -if __name__ == '__main__': - _main() diff --git a/dpgen/tools/update_time.sh b/dpgen/tools/update_time.sh new file mode 100755 index 000000000..29a08e311 --- /dev/null +++ b/dpgen/tools/update_time.sh @@ -0,0 +1,88 @@ +#!/bin/bash + +model_devi_paral_cores=1 + +if [[ -n $1 ]] +then + target_dir=$1 +else + target_dir="./" +fi + +if [[ -a time.log ]] +then + rm time.log +fi +for train_dir in `ls -d -1 $target_dir/iter.??????/00.train/`;do +sec=0 +tothour=0 +upload_task_dir_num=0 +recycle_task_file_num=0 +# echo $train_dir +upload_task_dir_num=$(ls -1 -d $train_dir/??? |wc -l) +if [[ -a train_time.log ]] +then + rm train_time.log +fi +grep -H --text 'wall time' $train_dir/???/train.log > train_time.log +recycle_task_file_num=$(wc -l < train_time.log) + while read line; do +mysec=$(echo "$line" |cut -d: -f4 |sed 's/s\| //g') +sec=$(echo "$mysec + $sec" | bc) + done < train_time.log +# echo $hour:$min:$sec +tothour=$(echo "scale=3; $sec/3600"|bc) +echo "00.train:$(realpath $train_dir):paral_cores:GPUV100:upload_task_dir_num:$upload_task_dir_num:recycle_task_file_num:$recycle_task_file_num:total core hour:$tothour" | tee -a time.log +done + +for model_devi_dir in `ls -d -1 $target_dir/iter.??????/01.model_devi/`;do +sec=0 +min=0 +hour=0 +tothour=0 +upload_task_dir_num=0 +recycle_task_file_num=0 +# echo $model_devi_dir +upload_task_dir_num=$(ls -1 -d $model_devi_dir/task.* |wc -l) +if [[ -a model_devi_time.log ]] +then + rm model_devi_time.log +fi +grep -H --text 'wall' $model_devi_dir/task.*/log.lammps > model_devi_time.log +recycle_task_file_num=$(wc -l < model_devi_time.log) + while read line; do +mysec=$(echo "$line" |cut -d: -f5) +sec=$(echo "$mysec + $sec" | bc) +mymin=$(echo "$line" |cut -d: -f4) +min=$(echo "$mymin + $min" | bc) +myhour=$(echo "$line" |cut -d: -f3) +hour=$(echo "$myhour + $hour" | bc) + done < model_devi_time.log +# echo $hour:$min:$sec +tothour=$(echo "scale=3; ($hour*3600+$min*60+$sec)*$model_devi_paral_cores/3600"|bc) +echo "01.model_devi:$(realpath $model_devi_dir):paral_cores:$model_devi_paral_cores:upload_task_dir_num:$upload_task_dir_num:recycle_task_file_num:$recycle_task_file_num:total core hour:$tothour" | tee -a time.log +done + +for fp_dir in `ls -d -1 $target_dir/iter.??????/02.fp/`;do +core_sec=0 +tothour=0 +upload_task_dir_num=0 +recycle_task_file_num=0 +# echo $fp_dir +upload_task_dir_num=$(ls -1 -d $fp_dir/task.* |wc -l) +if [[ -a fp_time.log ]] +then + rm fp_time.log +fi +grep -H --text 'CPU time' $fp_dir/task.*/OUTCAR > fp_time.log +recycle_task_file_num=$(wc -l < fp_time.log) + while read line;do +mysec=$(echo "$line" |cut -d: -f3 |sed 's| ||g') +file_name=$(echo "$line" | cut -d: -f1) +fp_paral_cores=$(grep 'total cores' $file_name |grep -o '[0-9]*') +core_sec=$(echo "$mysec * $fp_paral_cores + $core_sec" | bc) + done < fp_time.log +tothour=$(echo "scale=3; $core_sec/3600"|bc) +echo "02.fp:$(realpath $fp_dir):paral_cores:$fp_paral_cores:upload_task_dir_num:$upload_task_dir_num:recycle_task_file_num:$recycle_task_file_num:total core hour:$tothour" | tee -a time.log +done +wc -l $target_dir/iter.??????/02.fp/*out> candi_fail_accu.log diff --git a/examples/.DS_Store b/examples/.DS_Store new file mode 100644 index 000000000..c3ea4212b Binary files /dev/null and b/examples/.DS_Store differ diff --git a/examples/database/param_Ti.json b/examples/database/param_Ti.json new file mode 100644 index 000000000..5b222f30a --- /dev/null +++ b/examples/database/param_Ti.json @@ -0,0 +1,19 @@ +{ + "path" : "/path/to/Ti", + "calculator" : "vasp", + "_comment" : "Current only support VASP", + "output" : "./db_Ti.json", + "id_prefix" : "", + "config_info_dict" : { + "fcc-bulk" : [0,1,2,3,4,5,6,7], + "hcp-bulk" : [8,9,10,11,12,13,14,15], + "bcc-bulk" : [16,17,18,19,20,21,22,23], + "fcc-surf-100" : [24,25,26,27,28,29,30,31], + "fcc-surf-111" : [32,33,34,35,36,37,38,39], + "fcc-surf-110" : [40,41,42,43,44,45,46,47], + "hcp-surf-001" : [48,49,50,51,52,53,54,55], + "hcp-surf-100" : [56,57,58,59,60,61,62,63], + "hcp-surf-110" : [64,65,66,67,68,69,70,71] + }, + "skip_init" : true +} diff --git a/examples/machine/machine-lsf.json b/examples/machine/machine-lsf.json new file mode 100644 index 000000000..f10ed82fb --- /dev/null +++ b/examples/machine/machine-lsf.json @@ -0,0 +1,93 @@ +{ + "train": [ + { + "machine": { + "machine_type": "lsf", + "hostname": "localhost", + "port": 22, + "username": "ypliu", + "work_path": "/data/home/ypliu/test/deepmd-tutorial/cp2k_dpgen/dpmd" + }, + "resources": { + "_comment": "this part should be modified if GPU resources could be called directly by LSF", + "node_cpu": 4, + "numb_node": 1, + "task_per_node": 4, + "partition": "gpu", + "exclude_list": [], + "mem_limit": 11, + "source_list": [ + "/data/home/ypliu/test/deepmd-tutorial/cp2k_dpgen/source_env.sh", + "/data/home/ypliu/test/deepmd-tutorial/cp2k_dpgen/test_gpu_sub.sh" + ], + "module_list": [ + "vasp/5.4.4", + "cuda" + ], + "time_limit": "23:0:0" + }, + "deepmd_path": "/data/home/ypliu/deepmd/deepmd_root" + } + ], + "model_devi": [ + { + "machine": { + "machine_type": "lsf", + "hostname": "localhost", + "port": 22, + "username": "ypliu", + "work_path": "/data/home/ypliu/test/deepmd-tutorial/cp2k_dpgen/lammps" + }, + "resources": { + "_comment": "this part should be modified if GPU resources could be called directly by LSF", + "node_cpu": 4, + "numb_node": 1, + "task_per_node": 4, + "partition": "gpu", + "exclude_list": [], + "mem_limit": 11, + "source_list": [ + "/data/home/ypliu/test/deepmd-tutorial/cp2k_dpgen/source_env.sh", + "/data/home/ypliu/test/deepmd-tutorial/cp2k_dpgen/test_gpu_sub.sh" + ], + "module_list": [ + "vasp/5.4.4", + "cuda", + "gcc/4.9.4" + ], + "time_limit": "23:0:0" + }, + "command": "/data/home/ypliu/lammps/lammps-7Aug19/src/lmp_mpi", + "group_size": 10 + } + ], + "fp": [ + { + "machine": { + "machine_type": "lsf", + "hostname": "localhost", + "port": 22, + "username": "ypliu", + "work_path": "/data/home/ypliu/test/deepmd-tutorial/cp2k_dpgen/cp2k" + }, + "resources": { + "cvasp": false, + "task_per_node": 28, + "node_cpu": 28, + "exclude_list": [], + "mem_limit": 128, + "with_mpi": true, + "source_list": [], + "module_list": [ + "intel/17.0.1", + "mpi/intel/2017.1.132" + ], + "time_limit": "96:0:0", + "partition": "q2680v4m128", + "_comment": "that's Bel" + }, + "command": "/share/apps/cp2k-5.0/Linux-x86-64-intel-host/cp2k.popt -i input.inp", + "group_size": 5 + } + ] +} \ No newline at end of file diff --git a/examples/run/dp-lammps-vasp-et/param_elet.json b/examples/run/dp-lammps-vasp-et/param_elet.json new file mode 100644 index 000000000..b0f08c7cd --- /dev/null +++ b/examples/run/dp-lammps-vasp-et/param_elet.json @@ -0,0 +1,104 @@ +{ + "type_map": ["Al", "Mg"], + "mass_map": [27, 24], + "use_ele_temp": 2, + + "init_data_prefix": "/home/wanghan/study/deep.md/data/almgop.20/init//", + + "init_data_sys": [ + "al.fcc.01x01x01/02.md/sys-0004/deepmd", + "mg.fcc.01x01x01/02.md/sys-0004/deepmd" + ], + "init_batch_size": [1,1], + "sys_configs": [ + ["/home/wanghan/study/deep.md/data/almgop.20/init/al.fcc.02x02x02/01.scale_pert/sys-0032/scale-1.000/00000[0-4]/POSCAR"], + ["/home/wanghan/study/deep.md/data/almgop.20/init/mg.fcc.02x02x02/01.scale_pert/sys-0032/scale-1.000/00000[0-4]/POSCAR"] + ], + "sys_batch_size": [1,1], + + "_comment": " 00.train ", + "numb_models": 4, + "default_training_param" : { + "_comment": " model parameters", + "model" : { + "descriptor": { + "type": "se_a", + "sel": [90, 90], + "rcut_smth": 1.80, + "rcut": 6.00, + "neuron": [10, 20, 40], + "resnet_dt": false, + "axis_neuron": 4, + "seed": 1 + }, + "fitting_net" : { + "neuron": [120, 120, 120], + "resnet_dt": true, + "numb_fparam": 10, + "seed": 1 + } + }, + + "loss" : { + "start_pref_e": 0.02, + "limit_pref_e": 1, + "start_pref_f": 1000, + "limit_pref_f": 1, + "start_pref_v": 0, + "limit_pref_v": 0 + }, + + "learning_rate" : { + "start_lr": 0.001, + "decay_steps": 5000, + "decay_rate": 0.95 + }, + + "_comment": " traing controls", + "training" : { + "systems": [], + "set_prefix": "set", + "stop_batch": 1000, + "batch_size": 1, + + "seed": 1, + + "_comment": " display and restart", + "_comment": " frequencies counted in batch", + "disp_file": "lcurve.out", + "disp_freq": 1000, + "numb_test": 1, + "save_freq": 1000, + "save_ckpt": "model.ckpt", + "load_ckpt": "model.ckpt", + "disp_training":true, + "time_training":true, + "profiling": false, + "profiling_file": "timeline.json" + }, + "_comment": "that's all" + }, + + "_comment": " 01.model_devi ", + "_comment": "model_devi_skip: the first x of the recorded frames", + "model_devi_dt": 0.002, + "model_devi_skip": 0, + "model_devi_f_trust_lo": 0.05, + "model_devi_f_trust_hi": 0.20, + "model_devi_e_trust_lo": 1e10, + "model_devi_e_trust_hi": 1e10, + "model_devi_clean_traj": false, + "model_devi_jobs": [ + { "_idx": 0, "ensemble": "npt", "nsteps": 50, "press": [1.0,2.0], "sys_idx": [0, 1], "temps": [50,100], "trj_freq": 10 } + ], + "_comment": " 02.fp ", + "fp_style": "vasp", + "shuffle_poscar": false, + "fp_task_max": 8, + "fp_task_min": 2, + "fp_pp_path": "/home/wanghan/study/deep.md/dpgen/almg/vasp", + "fp_pp_files": ["POTCAR.Al", "POTCAR.Mg"], + "fp_incar": "/home/wanghan/study/deep.md/dpgen/almg/vasp/INCAR", + "_comment": " that's all " +} + diff --git a/examples/run/dp_lammps_gaussian/dodecane/dodecane.json b/examples/run/dp_lammps_gaussian/dodecane/dodecane.json new file mode 100644 index 000000000..12b15f7b7 --- /dev/null +++ b/examples/run/dp_lammps_gaussian/dodecane/dodecane.json @@ -0,0 +1,92 @@ +{ + "type_map": ["C", "H"], + "mass_map": [12.011, 1.008], + "init_data_prefix": "/home/jzzeng/0719dodecane/gen/", + "init_data_sys": ["init_data"], + "init_multi_systems": true, + "init_batch_size": ["auto"], + "sys_configs": [ + ["/home/jzzeng/0719dodecane/gen/data.dodecane.atomic"] + ], + "sys_batch_size": ["auto"], + "sys_format":"lammps/lmp", + "numb_models": 4, + "train_param": "input.json", + "default_training_param" : { + "model":{ + "type_map": ["C","H"], + "descriptor":{ + "type":"se_a", + "sel": [40, 80], + "rcut_smth": 1.00, + "rcut": 6.00, + "neuron": [25, 50, 100], + "resnet_dt": false, + "axis_neuron": 12 + }, + "fitting_net":{ + "neuron": [240, 240, 240], + "resnet_dt": true + } + }, + "learning_rate":{ + "type": "exp", + "start_lr": 0.001, + "decay_steps": 400, + "decay_rate": 0.99 + }, + "loss":{ + "start_pref_e": 0.02, + "limit_pref_e": 1, + "start_pref_f": 1000, + "limit_pref_f": 1, + "start_pref_v": 0, + "limit_pref_v": 0, + "start_pref_pf": 0, + "limit_pref_pf": 0 + }, + "training":{ + "set_prefix": "set", + "stop_batch": 400000, + "disp_file": "lcurve.out", + "disp_freq": 1000, + "numb_test": 1, + "save_freq": 1000, + "save_ckpt": "model.ckpt", + "load_ckpt": "model.ckpt", + "disp_training": true, + "time_training": true, + "profiling": false, + "profiling_file": "timeline.json" + } + }, + "use_clusters": true, + "cluster_cutoff": 3.5, + "cluster_minify": true, + "use_relative": true, + "epsilon": 1.0, + "model_devi_dt": 0.0001, + "model_devi_skip": 100, + "model_devi_f_trust_lo": 0.20, + "model_devi_f_trust_hi": 0.45, + "model_devi_e_trust_lo": 1e10, + "model_devi_e_trust_hi": 1e10, + "model_devi_clean_traj": false, + "model_devi_jobs": [ + {"sys_idx": [0], "temps": [ 3000], "trj_freq": 10, "nsteps": 2000, "ensemble": "nvt", "_idx": "00"}, + {"sys_idx": [0], "temps": [ 3000], "trj_freq": 10, "nsteps": 4000, "ensemble": "nvt", "_idx": "01"}, + {"sys_idx": [0], "temps": [ 3000], "trj_freq": 10, "nsteps": 8000, "ensemble": "nvt", "_idx": "02"}, + {"sys_idx": [0], "temps": [ 3000], "trj_freq": 10, "nsteps": 16000, "ensemble": "nvt", "_idx": "03"} + ], + "fp_style": "gaussian", + "shuffle_poscar": false, + "fp_task_max": 1000, + "fp_task_min": 10, + "fp_pp_path": "/home/jzzeng/", + "fp_pp_files": [], + "fp_params": { + "keywords": "force mn15/6-31g**", + "nproc": 4, + "multiplicity": "auto" + } +} diff --git a/examples/test/param.json b/examples/test/deepmd_param.json similarity index 80% rename from examples/test/param.json rename to examples/test/deepmd_param.json index 2ca1c5b98..294455623 100644 --- a/examples/test/param.json +++ b/examples/test/deepmd_param.json @@ -23,21 +23,6 @@ "model_name":false, "model_param_type":false }, - "deepmd_model_dir": "somewhere/example/Al_model", - "deepmd_type_map": [ - "Al" - ], - "meam_potfile_dir": "meam", - "meam_type_map": [ - "Al", "Si", "Mg", "Cu", "Fe" - ], - "meam_potfile": [ - "library.meam", - "AlSiMgCuFe.meam" - ], - "meam_param_type": [ - "AlS", "SiS", "MgS", "CuS", "FeS" - ], "_comment":"00.equi", "store_stable":true, diff --git a/examples/test/machine.json b/examples/test/machine.json deleted file mode 100644 index d879f1e59..000000000 --- a/examples/test/machine.json +++ /dev/null @@ -1,75 +0,0 @@ -{ - "deepmd_path": "the folder of deepmd", - "train_machine": { - "machine_type": "slurm", - "hostname" : "localhost", - "port" : 22, - "username": "username", - "password": "password", - "work_path" : "the path of workplace", - "_comment" : "that's all" - }, - "train_resources": { - "numb_node": 1, - "numb_gpu": 1, - "task_per_node":7, - "source_list": [ "the path of deepmd source" ], - "module_list": [ ], - "time_limit": "23:0:0", - "mem_limit": 32, - "_comment": "that's all" - }, - - "lmp_command": "the command of lammps", - "model_devi_group_size": 10, - "_comment": "model_devi on localhost", - "model_devi_machine": { - "machine_type": "slurm", - "hostname" : "localhost", - "port" : 22, - "username": "username", - "password": "password", - "work_path" : "the path of workplace", - "_comment" : "that's all" - }, - "_comment": " if use GPU, numb_nodes(nn) should always be 1 ", - "_comment": " if numb_nodes(nn) = 1 multi-threading rather than mpi is assumed", - "model_devi_resources": { - "numb_node": 1, - "numb_gpu": 0, - "task_per_node":8, - "partition" : "partition", - "source_list": ["the path of lammps source" ], - "module_list": [ ], - "time_limit": "19:0:0", - "mem_limit": 32, - "_comment": "that's all" - }, - - "_comment": "fp on localhost ", - "fp_command": "the command of vasp", - "fp_group_size": 1, - "fp_machine": { - "machine_type": "slurm", - "hostname" : "localhost", - "port" : 22, - "username": "username", - "password": "password", - "work_path" : "the path of workplace", - "_comment" : "that's all" - }, - "fp_resources": { - "task_per_node":16, - "numb_gpu": 0, - "source_list": ["the path of source" ], - "module_list": [], - "with_mpi" : 1, - "partition" : "CPU-Node", - "time_limit": "20:0:0", - "mem_limit": 64, - "_comment": "that's all" - }, - - - "_comment": " that's all " -} diff --git a/examples/test/machine.yaml b/examples/test/machine.yaml deleted file mode 100644 index 15a43841a..000000000 --- a/examples/test/machine.yaml +++ /dev/null @@ -1,64 +0,0 @@ ---- -deepmd_path: the folder of deepmd -train_machine: - machine_type: slurm - hostname: localhost - port: 22 - username: username - password: password - work_path: the path of workplace - _comment: that's all -train_resources: - numb_node: 1 - numb_gpu: 1 - task_per_node: 7 - source_list: - - the path of deepmd source - module_list: [] - time_limit: '23:0:0' - mem_limit: 32 - _comment: that's all -lmp_command: the command of lammps -model_devi_group_size: 10 -_comment: " that's all " -model_devi_machine: - machine_type: slurm - hostname: localhost - port: 22 - username: username - password: password - work_path: the path of workplace - _comment: that's all -model_devi_resources: - numb_node: 1 - numb_gpu: 0 - task_per_node: 8 - partition: partition - source_list: - - the path of lammps source - module_list: [] - time_limit: '19:0:0' - mem_limit: 32 - _comment: that's all -fp_command: the command of vasp -fp_group_size: 1 -fp_machine: - machine_type: slurm - hostname: localhost - port: 22 - username: username - password: password - work_path: the path of workplace - _comment: that's all -fp_resources: - task_per_node: 16 - numb_gpu: 0 - source_list: - - the path of source - module_list: [] - with_mpi: 1 - partition: CPU-Node - time_limit: '20:0:0' - mem_limit: 64 - _comment: that's all - diff --git a/examples/test/meam_param.json b/examples/test/meam_param.json new file mode 100644 index 000000000..5beff5324 --- /dev/null +++ b/examples/test/meam_param.json @@ -0,0 +1,60 @@ +{ + "_comment": "models", + "potcar_map" : { + "Al" : "/somewhere/example/POTCAR" + }, + "conf_dir":"confs/Al/std-fcc", + "key_id":"key id of Material project", + "task_type":"meam", + "task":"all", + + "vasp_params": { + "ecut": 650, + "ediff": 1e-6, + "kspacing": 0.1, + "kgamma": false, + "npar": 1, + "kpar": 1, + "_comment": " that's all " + }, + "lammps_params": { + "model_dir":"somewhere/example/meam", + "type_map":["Al","Si","Mg","Cu","Fe"], + "model_name":["meam.AlSiMgCuFe","meam.library"], + "model_param_type":["AlS", "SiS", "MgS", "CuS", "FeS"] + }, + + "_comment":"00.equi", + "store_stable":true, + + "_comment": "01.eos", + "vol_start": 12, + "vol_end": 22, + "vol_step": 0.5, + + "_comment": "02.elastic", + "norm_deform": 2e-2, + "shear_deform": 5e-2, + + "_comment":"03.vacancy", + "supercell":[3,3,3], + + "_comment":"04.interstitial", + "insert_ele":["Al"], + "reprod-opt":false, + + "_comment": "05.surface", + "min_slab_size": 10, + "min_vacuum_size": 11, + "_comment": "pert xz to work around vasp bug...", + "pert_xz": 0.01, + "max_miller": 2, + "static-opt":false, + "relax_box":false, + + "_comment":"06.phonon", + "supercell_matrix":[2,2,2], + "band":"0 1 0 0.5 1 0.5 0.375 0.75 0.375 0 0 0 0.5 0.5 0.5", + + "_comment": "that's all" +} diff --git a/examples/test/param.yaml b/examples/test/param.yaml deleted file mode 100644 index b28017565..000000000 --- a/examples/test/param.yaml +++ /dev/null @@ -1,66 +0,0 @@ ---- -_comment: that's all -potcar_map: - Al: "/somewhere/example/POTCAR" -conf_dir: confs/Al/std-fcc -key_id: key id of Material project -task_type: deepmd -task: all -vasp_params: - ecut: 650 - ediff: 1.0e-06 - kspacing: 0.1 - kgamma: false - npar: 1 - kpar: 1 - _comment: " that's all " -lammps_params: - model_dir: somewhere/example/Al_model - type_map: - - Al - model_name: false - model_param_type: false -deepmd_model_dir: somewhere/example/Al_model -deepmd_type_map: -- Al -meam_potfile_dir: meam -meam_type_map: -- Al -- Si -- Mg -- Cu -- Fe -meam_potfile: -- library.meam -- AlSiMgCuFe.meam -meam_param_type: -- AlS -- SiS -- MgS -- CuS -- FeS -store_stable: true -vol_start: 12 -vol_end: 22 -vol_step: 0.5 -norm_deform: 0.02 -shear_deform: 0.05 -supercell: -- 3 -- 3 -- 3 -insert_ele: -- Al -reprod-opt: false -min_slab_size: 10 -min_vacuum_size: 11 -pert_xz: 0.01 -max_miller: 2 -static-opt: false -relax_box: false -supercell_matrix: -- 2 -- 2 -- 2 -band: 0 1 0 0.5 1 0.5 0.375 0.75 0.375 0 0 0 0.5 0.5 0.5 - diff --git a/examples/test/vasp_param.json b/examples/test/vasp_param.json new file mode 100644 index 000000000..2ec2ed9df --- /dev/null +++ b/examples/test/vasp_param.json @@ -0,0 +1,54 @@ +{ + "_comment": "models", + "potcar_map" : { + "Al" : "/somewhere/example/POTCAR" + }, + "conf_dir":"confs/Al/std-fcc", + "key_id":"key id of Material project", + "task_type":"vasp", + "task":"all", + + "vasp_params": { + "ecut": 650, + "ediff": 1e-6, + "kspacing": 0.1, + "kgamma": false, + "npar": 1, + "kpar": 1, + "_comment": " that's all " + }, + + "_comment":"00.equi", + "store_stable":true, + + "_comment": "01.eos", + "vol_start": 12, + "vol_end": 22, + "vol_step": 0.5, + + "_comment": "02.elastic", + "norm_deform": 2e-2, + "shear_deform": 5e-2, + + "_comment":"03.vacancy", + "supercell":[3,3,3], + + "_comment":"04.interstitial", + "insert_ele":["Al"], + "reprod-opt":false, + + "_comment": "05.surface", + "min_slab_size": 10, + "min_vacuum_size": 11, + "_comment": "pert xz to work around vasp bug...", + "pert_xz": 0.01, + "max_miller": 2, + "static-opt":false, + "relax_box":false, + + "_comment":"06.phonon", + "supercell_matrix":[2,2,2], + "band":"0 1 0 0.5 1 0.5 0.375 0.75 0.375 0 0 0 0.5 0.5 0.5", + + "_comment": "that's all" +} diff --git a/examples/test/vasp_poscar_param.json b/examples/test/vasp_poscar_param.json new file mode 100644 index 000000000..eeca53d7d --- /dev/null +++ b/examples/test/vasp_poscar_param.json @@ -0,0 +1,47 @@ +{ + "_comment": "models", + "potcar_map" : { + "Al" : "/somewhere/example/POTCAR" + }, + "conf_dir":"confs/Al/std-fcc", + "key_id":"key id of Material project", + "task_type":"vasp", + "task":"all", + + "relax_incar":"somewhere/relax_incar", + "scf_incar":"somewhere/scf_incar", + + "_comment":"00.equi", + "store_stable":true, + + "_comment": "01.eos", + "vol_start": 12, + "vol_end": 22, + "vol_step": 0.5, + + "_comment": "02.elastic", + "norm_deform": 2e-2, + "shear_deform": 5e-2, + + "_comment":"03.vacancy", + "supercell":[3,3,3], + + "_comment":"04.interstitial", + "insert_ele":["Al"], + "reprod-opt":false, + + "_comment": "05.surface", + "min_slab_size": 10, + "min_vacuum_size": 11, + "_comment": "pert xz to work around vasp bug...", + "pert_xz": 0.01, + "max_miller": 2, + "static-opt":false, + "relax_box":false, + + "_comment":"06.phonon", + "supercell_matrix":[2,2,2], + "band":"0 1 0 0.5 1 0.5 0.375 0.75 0.375 0 0 0 0.5 0.5 0.5", + + "_comment": "that's all" +} diff --git a/setup.py b/setup.py index 20496d7f5..c2451552d 100755 --- a/setup.py +++ b/setup.py @@ -39,8 +39,10 @@ 'dpgen/data/tools', 'dpgen/remote', 'dpgen/dispatcher', - 'dpgen/database' + 'dpgen/database', + 'dpgen/tools' ], + data_files = [('dpgen/tools/', ['dpgen/tools/update_time.sh', ])], # package_data={'example':['*.json']}, classifiers=[ "Programming Language :: Python :: 3.6", diff --git a/tests/data/surf-100.POSCAR b/tests/data/surf-100.POSCAR index f9ee253b3..22073df90 100644 --- a/tests/data/surf-100.POSCAR +++ b/tests/data/surf-100.POSCAR @@ -1,14 +1,20 @@ -Type6 -1.0 -2.899138 0.000000 0.000000 -0.000000 2.899138 0.000000 -0.000000 0.000000 16.400000 + X + 1.0000000000000000 + 4.0999999999999996 0.0000000000000000 0.0000000000000000 + 0.0000000000000000 4.0999999999999996 0.0000000000000000 + 0.0000000000000000 0.0000000000000000 10.2520000000000007 Al -6 -direct -0.000000 0.000000 0.812500 Type0+ -0.000000 0.000000 0.312500 Type0+ -0.500000 0.500000 0.937500 Type0+ -0.000000 0.000000 0.562500 Type0+ -0.500000 0.500000 0.687500 Type0+ -0.500000 0.500000 0.437500 Type0+ +12 +Cartesian + 2.0499999999999998 2.0499999999999998 4.1010000000000000 + 0.0000000000000000 2.0499999999999998 6.1509999999999998 + 0.0000000000000000 0.0000000000000000 4.1010000000000000 + 0.0000000000000000 0.0000000000000000 8.2010000000000005 + 2.0499999999999998 2.0499999999999998 8.2010000000000005 + 2.0499999999999998 0.0000000000000000 10.2510000000000012 + 2.0499999999999998 0.0000000000000000 2.0510000000000002 + 0.0000000000000000 2.0499999999999998 10.2510000000000012 + 2.0499999999999998 0.0000000000000000 6.1509999999999998 + 2.0499999999999998 2.0499999999999998 0.0010000000000003 + 0.0000000000000000 2.0499999999999998 2.0510000000000002 + 0.0000000000000000 0.0000000000000000 0.0010000000000003 diff --git a/tests/data/surf.json b/tests/data/surf.json index 1f22f2bab..63c7bc418 100644 --- a/tests/data/surf.json +++ b/tests/data/surf.json @@ -10,7 +10,7 @@ 1, 1 ], - "z_min": 9, + "layer_numb": 3, "vacuum_max": 9, "vacuum_resol": [ 0.5, diff --git a/tests/database/data.tar.gz b/tests/database/data.tar.gz index 2f733dd0f..ad7e54d6b 100644 Binary files a/tests/database/data.tar.gz and b/tests/database/data.tar.gz differ diff --git a/tests/database/param_Al.json b/tests/database/param_Al.json new file mode 100644 index 000000000..3295d8ce8 --- /dev/null +++ b/tests/database/param_Al.json @@ -0,0 +1,19 @@ +{ + "path" : "./", + "calculator" : "vasp", + "_comment" : "vasp/pwscf/gaussian", + "output" : "dpgen_db.json", + "id_prefix" : "", + "config_info_dict" : { + "fcc-bulk" : [0,1,2,3,4,5,6,7], + "hcp-bulk" : [8,9,10,11,12,13,14,15], + "bcc-bulk" : [16,17,18,19,20,21,22,23], + "fcc-surf-100" : [24,25,26,27,28,29,30,31], + "fcc-surf-111" : [32,33,34,35,36,37,38,39], + "fcc-surf-110" : [40,41,42,43,44,45,46,47], + "hcp-surf-001" : [48,49,50,51,52,53,54,55], + "hcp-surf-100" : [56,57,58,59,60,61,62,63], + "hcp-surf-110" : [64,65,66,67,68,69,70,71] + }, + "skip_init" : true +} diff --git a/tests/database/test_db_vasp.py b/tests/database/test_db_vasp.py index 3023ab5d0..577d2f7da 100644 --- a/tests/database/test_db_vasp.py +++ b/tests/database/test_db_vasp.py @@ -1,5 +1,6 @@ import os,sys,shutil import unittest +import json import numpy as np import tarfile from glob import glob @@ -42,6 +43,11 @@ def setUp(self): self.ref_entries=loadfn(os.path.join(self.cwd,'data/entries.json')) self.init_path=sorted(glob(os.path.join(self.r_init_path,init_pat))) self.iter_path=sorted(glob(os.path.join(self.r_iter_path,iter_pat))) + with open("param_Al.json", "r") as fr: + jdata = json.load(fr) + self.config_info_dict = jdata["config_info_dict"] + self.skip_init = jdata["skip_init"] + self.output = jdata["output"] def testDPPotcar(self): @@ -111,12 +117,15 @@ def testEntry(self): self.assertEqual(ret0.entry_id,'pku-0') def testParsingVasp(self): - parsing_vasp(self.cwd,id_prefix=dpgen.SHORT_CMD) - try: - Potcar(['Al']) - ref=os.path.join(self.cwd,'data/all_data_pp.json') - except: - ref=os.path.join(self.cwd,'data/all_data.json') + parsing_vasp(self.cwd, self.config_info_dict, self.skip_init,self.output, id_prefix=dpgen.SHORT_CMD ) + #try: + # Potcar(['Al']) + # ref=os.path.join(self.cwd,'data/all_data_pp.json') + #except: + # ref=os.path.join(self.cwd,'data/all_data.json') + #Potcar(['Al']) + ref=os.path.join(self.cwd,'data/all_data_pp.json') + ret=os.path.join(self.cwd,'dpgen_db.json') retd=loadfn(ret) @@ -134,10 +143,14 @@ def testParsingVasp(self): self.assertEqual(len(i.composition),len(j.composition)) self.assertEqual(len(i.attribute),len(j.attribute)) os.remove(os.path.join(self.cwd,'dpgen_db.json')) - + def tearDown(self): for path in [self.r_init_path, self.r_iter_path, self.data]: if os.path.isdir(path) : shutil.rmtree(path) + if os.path.isfile("dpgen.log"): + os.remove("dpgen.log") + if os.path.isfile("record.database"): + os.remove("record.database") diff --git a/tests/generator/context.py b/tests/generator/context.py index 9137dbdfe..75baecb0d 100644 --- a/tests/generator/context.py +++ b/tests/generator/context.py @@ -3,8 +3,11 @@ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))) from dpgen.generator.run import * from dpgen.generator.lib.gaussian import detect_multiplicity +from dpgen.generator.lib.ele_temp import NBandsEsti param_file = 'param-mg-vasp.json' +param_file_v1 = 'param-mg-vasp-v1.json' +param_file_v1_et = 'param-mg-vasp-v1-et.json' param_old_file = 'param-mg-vasp-old.json' param_pwscf_file = 'param-pyridine-pwscf.json' param_pwscf_old_file = 'param-pyridine-pwscf-old.json' @@ -12,6 +15,7 @@ param_siesta_file = 'param-pyridine-siesta.json' param_cp2k_file = 'param-pyridine-cp2k.json' machine_file = 'machine-local.json' +machine_file_v1 = 'machine-local-v1.json' param_diy_file = 'param-mg-vasp-diy.json' def my_file_cmp(test, f0, f1): diff --git a/tests/generator/machine-local-v1.json b/tests/generator/machine-local-v1.json new file mode 100644 index 000000000..7079678e8 --- /dev/null +++ b/tests/generator/machine-local-v1.json @@ -0,0 +1,49 @@ +{ + "train_machine": { + "machine_type": "shell", + "lazy_local": true + }, + "train_resources": { + "numb_node": 1, + "numb_gpu": 0, + "task_per_node": 4, + "exclude_list": [], + "source_list": ["/home/wanghan/study/deep.md/venvs/py3.6-tf1.8/bin/active"], + "envs" : { + }, + "_comment": "that's All" + }, + "python_path": "/home/wanghan/study/deep.md/venvs/py3.6-tf1.8/bin/python", + + "model_devi_machine": { + "machine_type": "shell", + "work_path": "/home/wanghan/study/deep.md/dpgen/almg/tmp" + }, + "model_devi_resources": { + "numb_node": 1, + "task_per_node": 4, + "with_mpi": true, + "module_list" : ["mpi"], + "exclude_list": [], + "source_list": [], + "_comment": "that's All" + }, + "lmp_command": "/home/wanghan/local/bin/lmp_mpi_1_1_0", + "model_devi_group_size": 10, + + "fp_machine": { + "machine_type": "shell", + "work_path": "/home/wanghan/study/deep.md/dpgen/almg/tmp" + }, + "fp_resources": { + "cvasp": false, + "task_per_node": 4, + "exclude_list": [], + "with_mpi": true, + "module_list" : ["mpi"], + "source_list": [], + "_comment": "that's All" + }, + "fp_command": "/home/wanghan/local/bin/vasp_std", + "fp_group_size": 5 +} diff --git a/tests/generator/out_data_nbands_esti/POSCAR b/tests/generator/out_data_nbands_esti/POSCAR new file mode 100644 index 000000000..7222f7ac2 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/POSCAR @@ -0,0 +1,40 @@ +POSCAR file written by OVITO +0.64 + 8.0852460528260064 0.0000000700556454 0.0000000165920926 + 0.0000000700459398 8.0852460812690730 -0.0000000878331783 + 0.0000000165994710 -0.0000000878293815 8.0852460822851544 + Al + 32 +Direct + 0.2500000074218809 -0.0000000006822529 0.2500000011219505 + 0.0000000017515228 0.4999999968593779 0.5000000121259237 + 0.0000000043319078 0.7499999907276940 0.7500000023288846 + 0.5000000030383994 0.5000000063795264 -0.0000000001750512 + 0.5000000069922398 0.2500000092556521 0.7499999925172053 + 0.7500000002384054 0.5000000058460430 0.2499999970004410 + 0.5000000100332991 0.2500000001020252 0.2500000061404412 + 0.7499999967294295 0.4999999938307768 0.7500000077731870 + -0.0000000102302524 0.2500000011729165 0.2500000007828680 + 0.2500000021034525 0.0000000026582450 0.7499999979035360 + 0.2500000064022705 0.7499999919020213 0.0000000019913002 + 0.5000000083231070 -0.0000000011197416 0.0000000014389411 + 0.2500000097291175 0.5000000129780301 0.2499999975347162 + 0.5000000055222689 0.0000000095334217 0.4999999965645718 + 0.5000000034529869 0.7499999969981677 0.2499999981901819 + 0.7499999985377536 0.7499999945443449 0.0000000074372283 + -0.0000000004748536 0.0000000005310409 0.0000000063786655 + 0.2499999999857682 0.5000000028441646 0.7500000018075699 + 0.0000000000448991 0.0000000048734801 0.5000000037789456 + 0.2500000115215270 0.2500000006209973 0.5000000113478330 + 0.7500000009516035 0.2500000011452493 0.5000000093672294 + 0.2500000006602736 0.2500000097862060 -0.0000000026740404 + -0.0000000040407868 0.7499999975356930 0.2499999972984601 + 0.7499999995103841 0.0000000012827216 0.2499999977659152 + -0.0000000041345229 0.2500000119032792 0.7499999956587747 + 0.2500000030261135 0.7499999944555688 0.5000000026313640 + 0.7499999954414514 0.2500000084865056 -0.0000000006412232 + 0.5000000035346804 0.7499999971754691 0.7500000063111742 + 0.7499999910768409 0.0000000040350684 0.7499999999698314 + -0.0000000013440370 0.5000000065441171 -0.0000000011351674 + 0.7499999970723630 0.7499999983979078 0.4999999968129619 + 0.5000000022954397 0.4999999989012168 0.5000000041503138 diff --git a/tests/generator/out_data_nbands_esti/POTCAR b/tests/generator/out_data_nbands_esti/POTCAR new file mode 100644 index 000000000..dfc902686 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/POTCAR @@ -0,0 +1,5 @@ + PAW_PBE Al 04Jan2001 + 3.00000000000000 + parameters from PSCTR are: + TITEL = PAW_PBE Al 04Jan2001 + End of Dataset diff --git a/tests/generator/out_data_nbands_esti/POTCAR.dbl b/tests/generator/out_data_nbands_esti/POTCAR.dbl new file mode 100644 index 000000000..23b637d85 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/POTCAR.dbl @@ -0,0 +1,8 @@ + PAW_PBE Mg_sv 12Apr2007 + 10.0000000000000 + TITEL = PAW_PBE Mg_sv 12Apr2007 + End of Dataset + PAW_PBE Al 04Jan2001 + 3.00000000000000 + TITEL = PAW_PBE Al 04Jan2001 + End of Dataset diff --git a/tests/generator/out_data_nbands_esti/md.000300K/INCAR b/tests/generator/out_data_nbands_esti/md.000300K/INCAR new file mode 100644 index 000000000..e22978528 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.000300K/INCAR @@ -0,0 +1,33 @@ +PREC=A +ENCUT=600 +ISYM=0 +SYMPREC=1e-10 +ALGO=fast +EDIFF=1E-4 +LREAL=A +NPAR=2 +KPAR=2 + +ISTART=0 +ICHARG=2 +ISIF=2 +ISMEAR=-1 +SIGMA=0.025851991011651636 +IBRION=0 +MAXMIX=50 +NBLOCK=1 +KBLOCK=100 + +SMASS=0 +POTIM=2 +TEBEG=300 +TEEND=300 + +NSW=2000 + +LWAVE=F +LCHARG=F +PSTRESS=0 + +KSPACING=0.32 +KGAMMA=F diff --git a/tests/generator/out_data_nbands_esti/md.000300K/POSCAR b/tests/generator/out_data_nbands_esti/md.000300K/POSCAR new file mode 120000 index 000000000..fbebe8cb9 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.000300K/POSCAR @@ -0,0 +1 @@ +../POSCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.001000K/INCAR b/tests/generator/out_data_nbands_esti/md.001000K/INCAR new file mode 100644 index 000000000..855659e1c --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.001000K/INCAR @@ -0,0 +1,33 @@ +PREC=A +ENCUT=600 +ISYM=0 +SYMPREC=1e-10 +ALGO=fast +EDIFF=1E-4 +LREAL=A +NPAR=2 +KPAR=2 + +ISTART=0 +ICHARG=2 +ISIF=2 +ISMEAR=-1 +SIGMA=0.08617330337217212 +IBRION=0 +MAXMIX=50 +NBLOCK=1 +KBLOCK=100 + +SMASS=0 +POTIM=2 +TEBEG=1000 +TEEND=1000 + +NSW=2000 + +LWAVE=F +LCHARG=F +PSTRESS=0 + +KSPACING=0.32 +KGAMMA=F diff --git a/tests/generator/out_data_nbands_esti/md.001000K/POSCAR b/tests/generator/out_data_nbands_esti/md.001000K/POSCAR new file mode 120000 index 000000000..fbebe8cb9 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.001000K/POSCAR @@ -0,0 +1 @@ +../POSCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.005000K/INCAR b/tests/generator/out_data_nbands_esti/md.005000K/INCAR new file mode 100644 index 000000000..b56714e4f --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.005000K/INCAR @@ -0,0 +1,33 @@ +PREC=A +ENCUT=600 +ISYM=0 +SYMPREC=1e-10 +ALGO=fast +EDIFF=1E-4 +LREAL=A +NPAR=2 +KPAR=2 + +ISTART=0 +ICHARG=2 +ISIF=2 +ISMEAR=-1 +SIGMA=0.43086651686086064 +IBRION=0 +MAXMIX=50 +NBLOCK=1 +KBLOCK=100 + +SMASS=0 +POTIM=2 +TEBEG=5000 +TEEND=5000 + +NSW=2000 + +LWAVE=F +LCHARG=F +PSTRESS=0 + +KSPACING=0.32 +KGAMMA=F diff --git a/tests/generator/out_data_nbands_esti/md.005000K/POSCAR b/tests/generator/out_data_nbands_esti/md.005000K/POSCAR new file mode 120000 index 000000000..fbebe8cb9 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.005000K/POSCAR @@ -0,0 +1 @@ +../POSCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.010000K/INCAR b/tests/generator/out_data_nbands_esti/md.010000K/INCAR new file mode 100644 index 000000000..814230bd0 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.010000K/INCAR @@ -0,0 +1,35 @@ +PREC=A +ENCUT=600 +ISYM=0 +SYMPREC=1e-10 +ALGO=fast +EDIFF=1E-4 +LREAL=A +NPAR=2 +KPAR=2 + +ISTART=0 +ICHARG=2 +ISIF=2 +ISMEAR=-1 +SIGMA=0.8617330337217213 +IBRION=0 +MAXMIX=50 +NBLOCK=1 +KBLOCK=100 + +SMASS=0 +POTIM=1 +TEBEG=10000 +TEEND=10000 + +NSW=4000 + +LWAVE=F +LCHARG=F +PSTRESS=0 + +KSPACING=0.32 +KGAMMA=F + +NBANDS=69 diff --git a/tests/generator/out_data_nbands_esti/md.010000K/POSCAR b/tests/generator/out_data_nbands_esti/md.010000K/POSCAR new file mode 120000 index 000000000..fbebe8cb9 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.010000K/POSCAR @@ -0,0 +1 @@ +../POSCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.010000K/POTCAR b/tests/generator/out_data_nbands_esti/md.010000K/POTCAR new file mode 120000 index 000000000..1ff10fc0a --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.010000K/POTCAR @@ -0,0 +1 @@ +../POTCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.020000K/INCAR b/tests/generator/out_data_nbands_esti/md.020000K/INCAR new file mode 100644 index 000000000..e6ae8f3e1 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.020000K/INCAR @@ -0,0 +1,35 @@ +PREC=A +ENCUT=600 +ISYM=0 +SYMPREC=1e-10 +ALGO=fast +EDIFF=1E-4 +LREAL=A +NPAR=2 +KPAR=2 + +ISTART=0 +ICHARG=2 +ISIF=2 +ISMEAR=-1 +SIGMA=1.7234660674434426 +IBRION=0 +MAXMIX=50 +NBLOCK=1 +KBLOCK=100 + +SMASS=0 +POTIM=1 +TEBEG=20000 +TEEND=20000 + +NSW=4000 + +LWAVE=F +LCHARG=F +PSTRESS=0 + +KSPACING=0.32 +KGAMMA=F + +NBANDS=81 diff --git a/tests/generator/out_data_nbands_esti/md.020000K/POSCAR b/tests/generator/out_data_nbands_esti/md.020000K/POSCAR new file mode 120000 index 000000000..fbebe8cb9 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.020000K/POSCAR @@ -0,0 +1 @@ +../POSCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.020000K/POTCAR b/tests/generator/out_data_nbands_esti/md.020000K/POTCAR new file mode 120000 index 000000000..1ff10fc0a --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.020000K/POTCAR @@ -0,0 +1 @@ +../POTCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.040000K/INCAR b/tests/generator/out_data_nbands_esti/md.040000K/INCAR new file mode 100644 index 000000000..d9eaaeee5 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.040000K/INCAR @@ -0,0 +1,36 @@ +PREC=A +ENCUT=600 +ISYM=0 +SYMPREC=1e-10 +ALGO=fast +EDIFF=1E-4 +LREAL=A +NPAR=2 +KPAR=2 + +ISTART=0 +ICHARG=2 +ISIF=2 +ISMEAR=-1 +SIGMA=3.446932134886885 +IBRION=0 +MAXMIX=50 +NBLOCK=1 +KBLOCK=100 + +SMASS=0 +POTIM=0.5 +TEBEG=40000 +TEEND=40000 + +NSW=8000 + +LWAVE=F +LCHARG=F +PSTRESS=0 + +KSPACING=0.32 +KGAMMA=F + +NBANDS=120 + diff --git a/tests/generator/out_data_nbands_esti/md.040000K/POSCAR b/tests/generator/out_data_nbands_esti/md.040000K/POSCAR new file mode 120000 index 000000000..fbebe8cb9 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.040000K/POSCAR @@ -0,0 +1 @@ +../POSCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.040000K/POTCAR b/tests/generator/out_data_nbands_esti/md.040000K/POTCAR new file mode 120000 index 000000000..1ff10fc0a --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.040000K/POTCAR @@ -0,0 +1 @@ +../POTCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.080000K/INCAR b/tests/generator/out_data_nbands_esti/md.080000K/INCAR new file mode 100644 index 000000000..e2fefc924 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.080000K/INCAR @@ -0,0 +1,36 @@ +PREC=A +ENCUT=600 +ISYM=0 +SYMPREC=1e-10 +ALGO=fast +EDIFF=1E-4 +LREAL=A +NPAR=2 +KPAR=2 + +ISTART=0 +ICHARG=2 +ISIF=2 +ISMEAR=-1 +SIGMA=6.89386426977377 +IBRION=0 +MAXMIX=50 +NBLOCK=1 +KBLOCK=100 + +SMASS=0 +POTIM=0.5 +TEBEG=80000 +TEEND=80000 + +NSW=8000 + +LWAVE=F +LCHARG=F +PSTRESS=0 + +KSPACING=0.32 +KGAMMA=F + +NBANDS=180 + diff --git a/tests/generator/out_data_nbands_esti/md.080000K/POSCAR b/tests/generator/out_data_nbands_esti/md.080000K/POSCAR new file mode 120000 index 000000000..fbebe8cb9 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.080000K/POSCAR @@ -0,0 +1 @@ +../POSCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.080000K/POTCAR b/tests/generator/out_data_nbands_esti/md.080000K/POTCAR new file mode 120000 index 000000000..1ff10fc0a --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.080000K/POTCAR @@ -0,0 +1 @@ +../POTCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.160000K/INCAR b/tests/generator/out_data_nbands_esti/md.160000K/INCAR new file mode 100644 index 000000000..bcf89cd9c --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.160000K/INCAR @@ -0,0 +1,36 @@ +PREC=A +ENCUT=600 +ISYM=0 +SYMPREC=1e-10 +ALGO=fast +EDIFF=1E-4 +LREAL=A +NPAR=4 +KPAR=2 + +ISTART=0 +ICHARG=2 +ISIF=2 +ISMEAR=-1 +SIGMA=13.78772853954754 +IBRION=0 +MAXMIX=50 +NBLOCK=1 +KBLOCK=100 + +SMASS=0 +POTIM=0.25 +TEBEG=160000 +TEEND=160000 + +NSW=8000 + +LWAVE=F +LCHARG=F +PSTRESS=0 + +KSPACING=0.64 +KGAMMA=F + +NBANDS=400 + diff --git a/tests/generator/out_data_nbands_esti/md.160000K/POSCAR b/tests/generator/out_data_nbands_esti/md.160000K/POSCAR new file mode 120000 index 000000000..fbebe8cb9 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.160000K/POSCAR @@ -0,0 +1 @@ +../POSCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.160000K/POTCAR b/tests/generator/out_data_nbands_esti/md.160000K/POTCAR new file mode 120000 index 000000000..1ff10fc0a --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.160000K/POTCAR @@ -0,0 +1 @@ +../POTCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.240000K/INCAR b/tests/generator/out_data_nbands_esti/md.240000K/INCAR new file mode 100644 index 000000000..bb9f1c9f2 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.240000K/INCAR @@ -0,0 +1,36 @@ +PREC=A +ENCUT=600 +ISYM=0 +SYMPREC=1e-10 +ALGO=fast +EDIFF=1E-4 +LREAL=A +NPAR=4 +KPAR=2 + +ISTART=0 +ICHARG=2 +ISIF=2 +ISMEAR=-1 +SIGMA=20.68159280932131 +IBRION=0 +MAXMIX=50 +NBLOCK=1 +KBLOCK=100 + +SMASS=0 +POTIM=0.25 +TEBEG=240000 +TEEND=240000 + +NSW=8000 + +LWAVE=F +LCHARG=F +PSTRESS=0 + +KSPACING=0.64 +KGAMMA=F + +NBANDS=764 + diff --git a/tests/generator/out_data_nbands_esti/md.240000K/POSCAR b/tests/generator/out_data_nbands_esti/md.240000K/POSCAR new file mode 120000 index 000000000..fbebe8cb9 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.240000K/POSCAR @@ -0,0 +1 @@ +../POSCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/md.240000K/POTCAR b/tests/generator/out_data_nbands_esti/md.240000K/POTCAR new file mode 120000 index 000000000..1ff10fc0a --- /dev/null +++ b/tests/generator/out_data_nbands_esti/md.240000K/POTCAR @@ -0,0 +1 @@ +../POTCAR \ No newline at end of file diff --git a/tests/generator/out_data_nbands_esti/mgal/INCAR b/tests/generator/out_data_nbands_esti/mgal/INCAR new file mode 100644 index 000000000..e4c9592d5 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/mgal/INCAR @@ -0,0 +1,33 @@ +PREC=A +ENCUT=600 +ISYM=0 +SYMPREC=1e-10 +ALGO=fast +EDIFF=1E-4 +LREAL=A +NPAR=2 +KPAR=2 + +ISTART=0 +ICHARG=2 +ISIF=2 +ISMEAR=-1 +SIGMA=0.025851991011651636 +IBRION=0 +MAXMIX=50 +NBLOCK=1 +KBLOCK=100 + +SMASS=0 +POTIM=2 +TEBEG=300 +TEEND=300 + +NSW=1 + +LWAVE=F +LCHARG=F +PSTRESS=0 + +KSPACING=32 +KGAMMA=F diff --git a/tests/generator/out_data_nbands_esti/mgal/POSCAR b/tests/generator/out_data_nbands_esti/mgal/POSCAR new file mode 100644 index 000000000..49d340e8d --- /dev/null +++ b/tests/generator/out_data_nbands_esti/mgal/POSCAR @@ -0,0 +1,40 @@ +POSCAR file written by OVITO +0.9 + 8.0852460528260064 0.0000000700556454 0.0000000165920926 + 0.0000000700459398 8.0852460812690730 -0.0000000878331783 + 0.0000000165994710 -0.0000000878293815 8.0852460822851544 + Mg Al + 16 16 +Direct + 0.2500000074218809 -0.0000000006822529 0.2500000011219505 + 0.0000000017515228 0.4999999968593779 0.5000000121259237 + 0.0000000043319078 0.7499999907276940 0.7500000023288846 + 0.5000000030383994 0.5000000063795264 -0.0000000001750512 + 0.5000000069922398 0.2500000092556521 0.7499999925172053 + 0.7500000002384054 0.5000000058460430 0.2499999970004410 + 0.5000000100332991 0.2500000001020252 0.2500000061404412 + 0.7499999967294295 0.4999999938307768 0.7500000077731870 + -0.0000000102302524 0.2500000011729165 0.2500000007828680 + 0.2500000021034525 0.0000000026582450 0.7499999979035360 + 0.2500000064022705 0.7499999919020213 0.0000000019913002 + 0.5000000083231070 -0.0000000011197416 0.0000000014389411 + 0.2500000097291175 0.5000000129780301 0.2499999975347162 + 0.5000000055222689 0.0000000095334217 0.4999999965645718 + 0.5000000034529869 0.7499999969981677 0.2499999981901819 + 0.7499999985377536 0.7499999945443449 0.0000000074372283 + -0.0000000004748536 0.0000000005310409 0.0000000063786655 + 0.2499999999857682 0.5000000028441646 0.7500000018075699 + 0.0000000000448991 0.0000000048734801 0.5000000037789456 + 0.2500000115215270 0.2500000006209973 0.5000000113478330 + 0.7500000009516035 0.2500000011452493 0.5000000093672294 + 0.2500000006602736 0.2500000097862060 -0.0000000026740404 + -0.0000000040407868 0.7499999975356930 0.2499999972984601 + 0.7499999995103841 0.0000000012827216 0.2499999977659152 + -0.0000000041345229 0.2500000119032792 0.7499999956587747 + 0.2500000030261135 0.7499999944555688 0.5000000026313640 + 0.7499999954414514 0.2500000084865056 -0.0000000006412232 + 0.5000000035346804 0.7499999971754691 0.7500000063111742 + 0.7499999910768409 0.0000000040350684 0.7499999999698314 + -0.0000000013440370 0.5000000065441171 -0.0000000011351674 + 0.7499999970723630 0.7499999983979078 0.4999999968129619 + 0.5000000022954397 0.4999999989012168 0.5000000041503138 diff --git a/tests/generator/out_data_nbands_esti/mgal/POTCAR b/tests/generator/out_data_nbands_esti/mgal/POTCAR new file mode 120000 index 000000000..743ba1bb9 --- /dev/null +++ b/tests/generator/out_data_nbands_esti/mgal/POTCAR @@ -0,0 +1 @@ +../POTCAR.dbl \ No newline at end of file diff --git a/tests/generator/out_data_post_fp_vasp/02.fp/task.000.000000/job.json b/tests/generator/out_data_post_fp_vasp/02.fp/task.000.000000/job.json new file mode 100644 index 000000000..e2f74b65b --- /dev/null +++ b/tests/generator/out_data_post_fp_vasp/02.fp/task.000.000000/job.json @@ -0,0 +1 @@ +{ "ele_temp": 0 } diff --git a/tests/generator/out_data_post_fp_vasp/02.fp/task.000.000001/job.json b/tests/generator/out_data_post_fp_vasp/02.fp/task.000.000001/job.json new file mode 100644 index 000000000..ec49f8a06 --- /dev/null +++ b/tests/generator/out_data_post_fp_vasp/02.fp/task.000.000001/job.json @@ -0,0 +1 @@ +{ "ele_temp": 1 } diff --git a/tests/generator/out_data_post_fp_vasp/02.fp/task.001.000000/job.json b/tests/generator/out_data_post_fp_vasp/02.fp/task.001.000000/job.json new file mode 100644 index 000000000..c4924f0bd --- /dev/null +++ b/tests/generator/out_data_post_fp_vasp/02.fp/task.001.000000/job.json @@ -0,0 +1 @@ +{ "ele_temp": 100000 } diff --git a/tests/generator/out_data_post_fp_vasp/02.fp/task.001.000001/job.json b/tests/generator/out_data_post_fp_vasp/02.fp/task.001.000001/job.json new file mode 100644 index 000000000..a393fb733 --- /dev/null +++ b/tests/generator/out_data_post_fp_vasp/02.fp/task.001.000001/job.json @@ -0,0 +1 @@ +{ "ele_temp": 110000 } diff --git a/tests/generator/param-mg-vasp-diy.json b/tests/generator/param-mg-vasp-diy.json index b40f255b0..b7fffc41b 100644 --- a/tests/generator/param-mg-vasp-diy.json +++ b/tests/generator/param-mg-vasp-diy.json @@ -84,8 +84,9 @@ "fp_task_max": 100, "fp_task_min": 10, "fp_pp_path": ".", - "fp_pp_files": ["POTCAR.mg", "POTCAR.al"], + "fp_pp_files": ["vasp/potcars/POTCAR.mg", "vasp/potcars/POTCAR.al"], "_comment": " user provided vasp script ", - "fp_incar" : "INCAR.diy", + "fp_incar" : "vasp/INCAR.diy", + "fp_nbands_esti_data": "vasp/nbands_esti.out", "_comment": " that's all " } diff --git a/tests/generator/param-mg-vasp-old.json b/tests/generator/param-mg-vasp-old.json index e79af4fb3..cd521bbc3 100644 --- a/tests/generator/param-mg-vasp-old.json +++ b/tests/generator/param-mg-vasp-old.json @@ -84,7 +84,7 @@ "fp_task_max": 100, "fp_task_min": 10, "fp_pp_path": ".", - "fp_pp_files": ["POTCAR.mg", "POTCAR.al"], + "fp_pp_files": ["vasp/potcars/POTCAR.mg", "vasp/potcars/POTCAR.al"], "fp_params": { "_comment": "given in unit depending on the fp method", "ecut": 600, diff --git a/tests/generator/param-mg-vasp-v1-et.json b/tests/generator/param-mg-vasp-v1-et.json new file mode 100644 index 000000000..7397eac67 --- /dev/null +++ b/tests/generator/param-mg-vasp-v1-et.json @@ -0,0 +1,98 @@ +{ + "type_map": ["Al", "Mg"], + "mass_map": [27, 24], + "use_ele_temp": 2, + + "init_data_prefix": "data", + "init_data_sys": ["deepmd" + ], + "init_batch_size": [16], + "sys_configs": [ + ["data/mg.fcc.02x02x02/01.scale_pert/sys-0032/scale*/000010/POSCAR"], + ["data/mg.fcc.02x02x02/01.scale_pert/sys-0032/scale*/00000[8-9]/POSCAR"] + ], + "sys_batch_size": [1,1], + + "_comment": " 00.train ", + "numb_models": 4, + "default_training_param" : { + "model" : { + "descriptor": { + "type": "se_a", + "sel": [90, 90], + "rcut_smth": 1.80, + "rcut": 6.00, + "neuron": [10, 20, 40], + "resnet_dt": false, + "axis_neuron": 4, + "seed": 1 + }, + "fitting_net" : { + "neuron": [120, 120, 120], + "resnet_dt": true, + "numb_fparam": 10, + "seed": 1 + } + }, + + "loss" : { + "start_pref_e": 0.02, + "limit_pref_e": 1, + "start_pref_f": 1000, + "limit_pref_f": 1, + "start_pref_v": 0, + "limit_pref_v": 0 + }, + + "learning_rate" : { + "start_lr": 0.001, + "decay_steps": 5000, + "decay_rate": 0.95 + }, + + "training" : { + "systems": [], + "set_prefix": "set", + "stop_batch": 1000, + "batch_size": 1, + + "seed": 1, + + "_comment": " display and restart", + "_comment": " frequencies counted in batch", + "disp_file": "lcurve.out", + "disp_freq": 1000, + "numb_test": 1, + "save_freq": 1000, + "save_ckpt": "model.ckpt", + "load_ckpt": "model.ckpt", + "disp_training":true, + "time_training":true, + "profiling": false, + "profiling_file": "timeline.json" + } + }, + + "_comment": " 01.model_devi ", + "_comment": "model_devi_skip: the first x of the recorded frames", + "model_devi_dt": 0.002, + "model_devi_skip": 0, + "model_devi_f_trust_lo": 0.05, + "model_devi_f_trust_hi": 0.20, + "model_devi_e_trust_lo": 1e10, + "model_devi_e_trust_hi": 1e10, + "model_devi_clean_traj": false, + "model_devi_jobs": [ + { "_idx": 0, "ensemble": "npt", "nsteps": 50, "press": [1.0,2.0], "sys_idx": [0, 1], "temps": [50,100], "trj_freq": 10 } + ], + "_comment": " 02.fp ", + "fp_style": "vasp", + "shuffle_poscar": false, + "fp_task_max": 8, + "fp_task_min": 2, + "fp_pp_path": "/home/wanghan/study/deep.md/dpgen/almg/vasp", + "fp_pp_files": ["POTCAR.Al", "POTCAR.Mg"], + "fp_incar": "/home/wanghan/study/deep.md/dpgen/almg/vasp/INCAR", + "_comment": " that's all " +} + diff --git a/tests/generator/param-mg-vasp-v1.json b/tests/generator/param-mg-vasp-v1.json new file mode 100644 index 000000000..59c953e9e --- /dev/null +++ b/tests/generator/param-mg-vasp-v1.json @@ -0,0 +1,97 @@ +{ + "type_map": ["Al", "Mg"], + "mass_map": [27, 24], + "use_ele_temp": 0, + + "init_data_prefix": "data", + "init_data_sys": ["deepmd" + ], + "init_batch_size": [16], + "sys_configs": [ + ["data/mg.fcc.02x02x02/01.scale_pert/sys-0032/scale*/000010/POSCAR"], + ["data/mg.fcc.02x02x02/01.scale_pert/sys-0032/scale*/00000[8-9]/POSCAR"] + ], + "sys_batch_size": [1,1], + + "_comment": " 00.train ", + "numb_models": 4, + "default_training_param" : { + "model" : { + "descriptor": { + "type": "se_a", + "sel": [90, 90], + "rcut_smth": 1.80, + "rcut": 6.00, + "neuron": [10, 20, 40], + "resnet_dt": false, + "axis_neuron": 4, + "seed": 1 + }, + "fitting_net" : { + "neuron": [120, 120, 120], + "resnet_dt": true, + "seed": 1 + } + }, + + "loss" : { + "start_pref_e": 0.02, + "limit_pref_e": 1, + "start_pref_f": 1000, + "limit_pref_f": 1, + "start_pref_v": 0, + "limit_pref_v": 0 + }, + + "learning_rate" : { + "start_lr": 0.001, + "decay_steps": 5000, + "decay_rate": 0.95 + }, + + "training" : { + "systems": [], + "set_prefix": "set", + "stop_batch": 1000, + "batch_size": 1, + + "seed": 1, + + "_comment": " display and restart", + "_comment": " frequencies counted in batch", + "disp_file": "lcurve.out", + "disp_freq": 1000, + "numb_test": 1, + "save_freq": 1000, + "save_ckpt": "model.ckpt", + "load_ckpt": "model.ckpt", + "disp_training":true, + "time_training":true, + "profiling": false, + "profiling_file": "timeline.json" + } + }, + + "_comment": " 01.model_devi ", + "_comment": "model_devi_skip: the first x of the recorded frames", + "model_devi_dt": 0.002, + "model_devi_skip": 0, + "model_devi_f_trust_lo": 0.05, + "model_devi_f_trust_hi": 0.20, + "model_devi_e_trust_lo": 1e10, + "model_devi_e_trust_hi": 1e10, + "model_devi_clean_traj": false, + "model_devi_jobs": [ + { "_idx": 0, "ensemble": "npt", "nsteps": 50, "press": [1.0,2.0], "sys_idx": [0, 1], "temps": [50,100], "trj_freq": 10 } + ], + "_comment": " 02.fp ", + "fp_style": "vasp", + "shuffle_poscar": false, + "fp_task_max": 8, + "fp_task_min": 2, + "fp_pp_path": "/home/wanghan/study/deep.md/dpgen/almg/vasp", + "fp_pp_files": ["POTCAR.Al", "POTCAR.Mg"], + "fp_incar": "/home/wanghan/study/deep.md/dpgen/almg/vasp/INCAR", + "_comment": " that's all " +} + diff --git a/tests/generator/param-mg-vasp.json b/tests/generator/param-mg-vasp.json index eba99eabe..908be9afb 100644 --- a/tests/generator/param-mg-vasp.json +++ b/tests/generator/param-mg-vasp.json @@ -84,7 +84,7 @@ "fp_task_max": 100, "fp_task_min": 10, "fp_pp_path": ".", - "fp_pp_files": ["POTCAR.mg", "POTCAR.al"], + "fp_pp_files": ["vasp/potcars/POTCAR.mg", "vasp/potcars/POTCAR.al"], "_comment": " user provided vasp script ", "user_fp_params": { "PREC": "A", diff --git a/tests/generator/test_make_fp.py b/tests/generator/test_make_fp.py index 2af722650..20fdc51dc 100644 --- a/tests/generator/test_make_fp.py +++ b/tests/generator/test_make_fp.py @@ -25,6 +25,7 @@ from .comp_sys import test_coord from .comp_sys import test_cell from pymatgen.io.vasp import Kpoints,Incar +import scipy.constants as pc vasp_incar_ref = "PREC=A\n\ ENCUT=600\n\ @@ -46,6 +47,26 @@ KSPACING=0.16\n\ KGAMMA=F\n"; +vasp_incar_ele_temp_ref = "PREC=A\n\ +ENCUT=600\n\ +ISYM=0\n\ +ALGO=fast\n\ +EDIFF=1e-05\n\ +LREAL=A\n\ +NPAR=1\n\ +KPAR=1\n\ +NELMIN=4\n\ +ISIF=2\n\ +ISMEAR=-1\n\ +SIGMA=%.10f\n\ +IBRION=-1\n\ +NSW=0\n\ +LWAVE=F\n\ +LCHARG=F\n\ +PSTRESS=0\n\ +KSPACING=0.16\n\ +KGAMMA=F\n"; + pwscf_input_ref="&control\n\ calculation='scf',\n\ restart_mode='from_scratch',\n\ @@ -224,10 +245,12 @@ def _write_lammps_dump(sys, dump_file, f_idx = 0) : fp.write('%d %d %f %f %f\n' % (ii+1, atype[ii]+1, coord[ii][0], coord[ii][1], coord[ii][2])) -def _make_fake_md(idx, md_descript, atom_types, type_map) : +def _make_fake_md(idx, md_descript, atom_types, type_map, ele_temp = None) : """ md_descript: list of dimension [n_sys][n_MD][n_frame] + ele_temp: list of dimension + [n_sys][n_MD] """ natoms = len(atom_types) ntypes = len(type_map) @@ -257,6 +280,9 @@ def _make_fake_md(idx, md_descript, atom_types, type_map) : md_out[:,0] = np.arange(nframes) md_out[:,4] = mm np.savetxt(os.path.join(task_dir, 'model_devi.out'), md_out) + if ele_temp is not None: + with open(os.path.join(task_dir, 'job.json'), 'w') as fp: + json.dump({"ele_temp": ele_temp[sidx][midx]}, fp) def _check_poscars(testCase, idx, fp_task_max, type_map) : @@ -312,7 +338,7 @@ def _check_kpoints(testCase, idx) : def _check_incar_exists(testCase, idx) : fp_path = os.path.join('iter.%06d' % idx, '02.fp') - testCase.assertTrue(os.path.isfile(os.path.join(fp_path, 'INCAR'))) + # testCase.assertTrue(os.path.isfile(os.path.join(fp_path, 'INCAR'))) tasks = glob.glob(os.path.join(fp_path, 'task.*')) for ii in tasks : my_file_cmp(testCase, @@ -356,10 +382,35 @@ def _check_sel(testCase, idx, fp_task_max, flo, fhi): def _check_incar(testCase, idx): fp_path = os.path.join('iter.%06d' % idx, '02.fp') - with open(os.path.join(fp_path, 'INCAR')) as fp: - incar = fp.read() - testCase.assertEqual(incar.strip(), vasp_incar_ref.strip()) - + tasks = glob.glob(os.path.join(fp_path, 'task.*')) + cwd = os.getcwd() + for ii in tasks : + os.chdir(ii) + with open('INCAR') as fp: + incar = fp.read() + testCase.assertEqual(incar.strip(), vasp_incar_ref.strip()) + os.chdir(cwd) + +def _check_incar_ele_temp(testCase, idx, ele_temp): + fp_path = os.path.join('iter.%06d' % idx, '02.fp') + tasks = glob.glob(os.path.join(fp_path, 'task.*')) + cwd = os.getcwd() + for ii in tasks : + os.chdir(ii) + bname = os.path.basename(ii) + sidx = int(bname.split('.')[1]) + tidx = int(bname.split('.')[2]) + with open('INCAR') as fp: + incar = fp.read() + incar0 = Incar.from_string(incar) + # make_fake_md: the frames in a system shares the same ele_temp + incar1 = Incar.from_string(vasp_incar_ele_temp_ref%(ele_temp[sidx][0] * pc.Boltzmann / pc.electron_volt)) + for ii in incar0.keys(): + # skip checking nbands... + if ii == 'NBANDS': + continue + testCase.assertAlmostEqual(incar0[ii], incar1[ii], msg = 'key %s differ' % (ii), places = 5) + os.chdir(cwd) def _check_pwscf_input_head(testCase, idx) : fp_path = os.path.join('iter.%06d' % idx, '02.fp') @@ -525,7 +576,7 @@ def test_make_fp_vasp(self): make_fp(0, jdata, {}) _check_sel(self, 0, jdata['fp_task_max'], jdata['model_devi_f_trust_lo'], jdata['model_devi_f_trust_hi']) _check_poscars(self, 0, jdata['fp_task_max'], jdata['type_map']) - _check_incar_exists(self, 0) + # _check_incar_exists(self, 0) _check_incar(self, 0) _check_kpoints_exists(self, 0) _check_kpoints(self,0) @@ -555,7 +606,7 @@ def test_make_fp_vasp_old(self): make_fp(0, jdata, {}) _check_sel(self, 0, jdata['fp_task_max'], jdata['model_devi_f_trust_lo'], jdata['model_devi_f_trust_hi']) _check_poscars(self, 0, jdata['fp_task_max'], jdata['type_map']) - _check_incar_exists(self, 0) + # _check_incar_exists(self, 0) _check_incar(self, 0) _check_kpoints_exists(self, 0) _check_kpoints(self,0) @@ -585,7 +636,7 @@ def test_make_fp_vasp_less_sel(self): make_fp(0, jdata, {}) _check_sel(self, 0, jdata['fp_task_max'], jdata['model_devi_f_trust_lo'], jdata['model_devi_f_trust_hi']) _check_poscars(self, 0, jdata['fp_task_max'], jdata['type_map']) - _check_incar_exists(self, 0) + # _check_incar_exists(self, 0) _check_incar(self, 0) _check_kpoints_exists(self, 0) _check_kpoints(self,0) @@ -619,7 +670,7 @@ def test_make_fp_vasp_from_incar(self): make_fp(0, jdata, {}) _check_sel(self, 0, jdata['fp_task_max'], jdata['model_devi_f_trust_lo'], jdata['model_devi_f_trust_hi']) _check_poscars(self, 0, jdata['fp_task_max'], jdata['type_map']) - _check_incar_exists(self, 0) + # _check_incar_exists(self, 0) _check_incar(self, 0) _check_kpoints_exists(self, 0) _check_kpoints(self,0) @@ -627,6 +678,40 @@ def test_make_fp_vasp_from_incar(self): # _check_potcar(self, 0, jdata['fp_pp_path'], jdata['fp_pp_files']) shutil.rmtree('iter.000000') + def test_make_fp_vasp_ele_temp(self): + ## Verify if user chooses to diy VASP INCAR totally. + if os.path.isdir('iter.000000') : + shutil.rmtree('iter.000000') + with open (param_diy_file, 'r') as fp : + jdata = json.load (fp) + fp.close() + with open (machine_file, 'r') as fp: + mdata = json.load (fp) + fp.close() + md_descript = [] + ele_temp = [] + nsys = 2 + nmd = 3 + n_frame = 10 + for ii in range(nsys) : + tmp = [] + for jj in range(nmd) : + tmp.append(np.arange(0, 0.29, 0.29/10)) + md_descript.append(tmp) + ele_temp.append([np.random.random() * 100000] * nmd) + atom_types = [0, 1, 0, 1] + type_map = jdata['type_map'] + _make_fake_md(0, md_descript, atom_types, type_map, ele_temp = ele_temp) + make_fp(0, jdata, {}) + _check_sel(self, 0, jdata['fp_task_max'], jdata['model_devi_f_trust_lo'], jdata['model_devi_f_trust_hi']) + _check_poscars(self, 0, jdata['fp_task_max'], jdata['type_map']) + _check_incar_ele_temp(self, 0, ele_temp) + _check_kpoints_exists(self, 0) + _check_kpoints(self,0) + # checked elsewhere + # _check_potcar(self, 0, jdata['fp_pp_path'], jdata['fp_pp_files']) + shutil.rmtree('iter.000000') + class TestMakeFPGaussian(unittest.TestCase): def test_make_fp_gaussian(self): diff --git a/tests/generator/test_make_train.py b/tests/generator/test_make_train.py index c402e3e00..c64487f55 100644 --- a/tests/generator/test_make_train.py +++ b/tests/generator/test_make_train.py @@ -8,7 +8,10 @@ __package__ = 'generator' from .context import make_train from .context import param_file +from .context import param_file_v1 +from .context import param_file_v1_et from .context import machine_file +from .context import machine_file_v1 from .context import setUpModule def _comp_sys_files (sys0, sys1) : @@ -48,7 +51,7 @@ def _check_numb_models(testCase, iter_idx, numb_models) : def _check_model_inputs(testCase, iter_idx, jdata) : - train_param = jdata['train_param'] + train_param = jdata.get('train_param', 'input.json') numb_models = jdata['numb_models'] default_training_param = jdata['default_training_param'] init_data_sys = [os.path.join('..', 'data.init', ii) for ii in jdata['init_data_sys']] @@ -79,6 +82,59 @@ def _check_model_inputs(testCase, iter_idx, jdata) : else : testCase.assertEqual(jdata0[ii], default_training_param[ii]) +def _check_model_input_dict(testCase, input_dict, init_data_sys, init_batch_size, default_training_param): + for ii in input_dict.keys() : + if ii == 'systems' : + for jj,kk in zip(input_dict[ii], init_data_sys): + testCase.assertEqual(jj, kk) + elif ii == 'batch_size' : + for jj, kk in zip(input_dict[ii], init_batch_size) : + testCase.assertEqual(jj, kk) + elif ii == 'seed': + # can be anything + pass + elif ii == 'numb_fparam': + testCase.assertEqual(input_dict[ii], 1) + elif ii == 'numb_aparam': + testCase.assertEqual(input_dict[ii], 1) + else : + testCase.assertEqual(input_dict[ii], default_training_param[ii]) + + +def _check_model_inputs_v1(testCase, iter_idx, jdata) : + train_param = jdata.get('train_param', 'input.json') + numb_models = jdata['numb_models'] + use_ele_temp = jdata.get('use_ele_temp', 0) + default_training_param = jdata['default_training_param'] + init_data_sys = [os.path.join('..', 'data.init', ii) for ii in jdata['init_data_sys']] + init_batch_size = jdata['init_batch_size'] + sys_batch_size = jdata['sys_batch_size'] + if iter_idx > 0 : + systems = glob.glob(os.path.join('iter.*', '02.fp', 'data.*')) + for ii in systems : + init_data_sys.append(os.path.join('..', 'data.iters', ii)) + sys_idx = int(os.path.basename(ii).split('.')[1]) + init_batch_size.append(sys_batch_size[sys_idx]) + for kk in range(numb_models) : + with open(os.path.join('iter.%06d' % iter_idx, + '00.train', + '%03d' % kk, + train_param)) as fp : + jdata0 = json.load(fp) + # keys except 'systems', 'batch_size', 'seed' should be identical + if use_ele_temp == 1: + testCase.assertTrue('numb_fparam' in jdata0['model']['fitting_net']) + testCase.assertFalse('numb_aparam' in jdata0['model']['fitting_net']) + if use_ele_temp == 2: + testCase.assertTrue('numb_aparam' in jdata0['model']['fitting_net']) + testCase.assertFalse('numb_fparam' in jdata0['model']['fitting_net']) + _check_model_input_dict(testCase, jdata0['model']['descriptor'], init_data_sys, init_batch_size, default_training_param['model']['descriptor']) + _check_model_input_dict(testCase, jdata0['model']['fitting_net'], init_data_sys, init_batch_size, default_training_param['model']['fitting_net']) + _check_model_input_dict(testCase, jdata0['loss'], init_data_sys, init_batch_size, default_training_param['loss']) + _check_model_input_dict(testCase, jdata0['learning_rate'], init_data_sys, init_batch_size, default_training_param['learning_rate']) + _check_model_input_dict(testCase, jdata0['training'], init_data_sys, init_batch_size, default_training_param['training']) + + def _make_fake_fp(iter_idx, sys_idx, nframes): for ii in range(nframes) : dirname = os.path.join('iter.%06d' % iter_idx, @@ -159,6 +215,44 @@ def test_1_skip(self): # remove testing dirs shutil.rmtree('iter.000001') shutil.rmtree('iter.000000') + + + def test_1_data_v1(self) : + with open (param_file_v1, 'r') as fp : + jdata = json.load (fp) + jdata.pop('use_ele_temp', None) + with open (machine_file_v1, 'r') as fp: + mdata = json.load (fp) + make_train(0, jdata, mdata) + # make fake fp results #data == fp_task_min + _make_fake_fp(0, 0, jdata['fp_task_min']) + # make iter1 train + make_train(1, jdata, mdata) + # check data is linked + self.assertTrue(os.path.isdir(os.path.join('iter.000001', '00.train', 'data.iters', 'iter.000000', '02.fp'))) + # check models inputs + _check_model_inputs_v1(self, 1, jdata) + # remove testing dirs + shutil.rmtree('iter.000001') + shutil.rmtree('iter.000000') + + def test_1_data_v1_eletron_temp(self) : + with open (param_file_v1_et, 'r') as fp : + jdata = json.load (fp) + with open (machine_file_v1, 'r') as fp: + mdata = json.load (fp) + make_train(0, jdata, mdata) + # make fake fp results #data == fp_task_min + _make_fake_fp(0, 0, jdata['fp_task_min']) + # make iter1 train + make_train(1, jdata, mdata) + # check data is linked + self.assertTrue(os.path.isdir(os.path.join('iter.000001', '00.train', 'data.iters', 'iter.000000', '02.fp'))) + # check models inputs + _check_model_inputs_v1(self, 1, jdata) + # remove testing dirs + shutil.rmtree('iter.000001') + shutil.rmtree('iter.000000') if __name__ == '__main__': diff --git a/tests/generator/test_nbands_esti.py b/tests/generator/test_nbands_esti.py new file mode 100644 index 000000000..3e1c39756 --- /dev/null +++ b/tests/generator/test_nbands_esti.py @@ -0,0 +1,75 @@ +import os,sys +import dpdata +import numpy as np +import unittest +import importlib + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +__package__ = 'generator' +from .context import NBandsEsti + +class TestNBandsEsti(unittest.TestCase): + def test_predict(self): + self.nbe = NBandsEsti(['out_data_nbands_esti/md.010000K', + 'out_data_nbands_esti/md.020000K', + 'out_data_nbands_esti/md.040000K', + 'out_data_nbands_esti/md.080000K', + 'out_data_nbands_esti/md.160000K', + ]) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.010000K'), 72) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.020000K'), 83) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.040000K'), 112) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.080000K'), 195) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.160000K'), 429) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.240000K'), 732) + + def test_save_load(self): + self.nbe2 = NBandsEsti(['out_data_nbands_esti/md.010000K', + 'out_data_nbands_esti/md.020000K', + 'out_data_nbands_esti/md.040000K', + 'out_data_nbands_esti/md.080000K', + 'out_data_nbands_esti/md.160000K', + ]) + self.nbe2.save('tmp.log') + self.nbe = NBandsEsti('tmp.log') + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.010000K'), 72) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.020000K'), 83) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.040000K'), 112) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.080000K'), 195) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.160000K'), 429) + self.assertEqual(self.nbe.predict('out_data_nbands_esti/md.240000K'), 732) + os.remove('tmp.log') + + def test_get_default_nbands(self): + res = NBandsEsti._get_res('out_data_nbands_esti/md.020000K/') + nb = NBandsEsti._get_default_nbands(res) + self.assertEqual(nb, 66) + + def test_get_default_nbands(self): + res = NBandsEsti._get_res('out_data_nbands_esti/mgal/') + nb = NBandsEsti._get_default_nbands(res) + self.assertEqual(nb, 124) + + def test_potcar_nvalence (self) : + res = NBandsEsti._get_potcar_nvalence('out_data_nbands_esti/POTCAR.dbl') + self.assertEqual(res, [10., 3.]) + + def test_incar_ele_temp (self) : + res = NBandsEsti._get_incar_ele_temp('out_data_nbands_esti/md.000300K/INCAR') + self.assertAlmostEqual(res, 0.025851991011651636) + + def test_incar_nbands (self) : + res = NBandsEsti._get_incar_nbands('out_data_nbands_esti/md.020000K/INCAR') + self.assertEqual(res, 81) + + def test_get_res(self): + res = NBandsEsti._get_res('out_data_nbands_esti/md.020000K/') + ref = { + 'natoms': [32], + 'vol': 138.55418502346618, + 'nvalence': [3.], + 'ele_temp': 20000.0, + 'nbands': 81 + } + self.assertEqual(res, ref) + diff --git a/tests/generator/test_post_fp.py b/tests/generator/test_post_fp.py index fe256e119..652f627ab 100644 --- a/tests/generator/test_post_fp.py +++ b/tests/generator/test_post_fp.py @@ -63,6 +63,7 @@ def test_post_fp_vasp_0(self): with open (param_file, 'r') as fp : jdata = json.load (fp) + jdata['use_ele_temp'] = 2 post_fp_vasp(0, jdata, rfailed=0.3) sys = dpdata.LabeledSystem('iter.000000/02.fp/data.000/', fmt = 'deepmd/raw') @@ -98,11 +99,19 @@ def test_post_fp_vasp_0(self): self.assertAlmostEqual(ref_cell[ff][ii][jj], sys.data['cells'][ff][ii][jj]) + self.assertTrue(os.path.isfile('iter.000000/02.fp/data.000/set.000/aparam.npy')) + aparam = np.load('iter.000000/02.fp/data.000/set.000/aparam.npy') + natoms = sys.get_natoms() + self.assertEqual(natoms, 2) + self.assertEqual(list(list(aparam)[0]), [0,0]) + self.assertEqual(list(list(aparam)[1]), [1,1]) + def test_post_fp_vasp_1(self): with open (param_file, 'r') as fp : jdata = json.load (fp) + jdata['use_ele_temp'] = 1 post_fp_vasp(0, jdata, rfailed=0.3) sys = dpdata.LabeledSystem('iter.000000/02.fp/data.001/', fmt = 'deepmd/raw') @@ -138,9 +147,15 @@ def test_post_fp_vasp_1(self): self.assertAlmostEqual(ref_cell[ff][ii][jj], sys.data['cells'][ff][ii][jj]) + fparam = np.load('iter.000000/02.fp/data.001/set.000/fparam.npy') + self.assertEqual(fparam.shape[0], 1) + self.assertEqual(list(fparam), [100000]) + + def test_post_fp_vasp_2(self): with open (param_file, 'r') as fp : jdata = json.load (fp) + jdata['use_ele_temp'] = 1 with self.assertRaises(RuntimeError): post_fp_vasp(0, jdata) diff --git a/tests/generator/INCAR.diy b/tests/generator/vasp/INCAR.diy similarity index 100% rename from tests/generator/INCAR.diy rename to tests/generator/vasp/INCAR.diy diff --git a/tests/generator/vasp/nbands_esti.out b/tests/generator/vasp/nbands_esti.out new file mode 100644 index 000000000..58babc4e0 --- /dev/null +++ b/tests/generator/vasp/nbands_esti.out @@ -0,0 +1,2 @@ +3.6534712640497446e-08 +8.651576676279664e-09 diff --git a/tests/generator/vasp/potcars/POTCAR.al b/tests/generator/vasp/potcars/POTCAR.al new file mode 100644 index 000000000..f3b7295da --- /dev/null +++ b/tests/generator/vasp/potcars/POTCAR.al @@ -0,0 +1,4 @@ + PAW_PBE Al 04Jan2001 + 3.00000000000000 + TITEL = PAW_PBE Al 04Jan2001 + End of Dataset diff --git a/tests/generator/vasp/potcars/POTCAR.mg b/tests/generator/vasp/potcars/POTCAR.mg new file mode 100644 index 000000000..4762c5028 --- /dev/null +++ b/tests/generator/vasp/potcars/POTCAR.mg @@ -0,0 +1,4 @@ + PAW_PBE Mg_sv 12Apr2007 + 10.0000000000000 + TITEL = PAW_PBE Mg_sv 12Apr2007 + End of Dataset diff --git a/tests/tools/__init__.py b/tests/tools/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/tools/context.py b/tests/tools/context.py new file mode 100644 index 000000000..d4e70a8c5 --- /dev/null +++ b/tests/tools/context.py @@ -0,0 +1,10 @@ +import sys,os + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))) +from dpgen.tools.run_report import * + +def my_file_cmp(test, f0, f1): + with open(f0) as fp0 : + with open(f1) as fp1: + test.assertTrue(fp0.read() == fp1.read()) + diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000000/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000000/input.lammps new file mode 100644 index 000000000..467435d8a --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000000/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 40913 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000001/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000001/input.lammps new file mode 100644 index 000000000..33a064d41 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000001/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 848424 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000002/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000002/input.lammps new file mode 100644 index 000000000..69b683f85 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000002/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 357198 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000003/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000003/input.lammps new file mode 100644 index 000000000..4531a98fa --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000003/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 747362 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000004/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000004/input.lammps new file mode 100644 index 000000000..61c3be01b --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000004/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 499638 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000005/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000005/input.lammps new file mode 100644 index 000000000..76626314c --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000005/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 869864 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000006/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000006/input.lammps new file mode 100644 index 000000000..59ca011f0 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000006/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 701845 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000007/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000007/input.lammps new file mode 100644 index 000000000..f8b7a3826 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000007/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 76887 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000008/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000008/input.lammps new file mode 100644 index 000000000..42ecd5b0f --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000008/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 70061 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000009/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000009/input.lammps new file mode 100644 index 000000000..637e62791 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000009/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 96680 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000010/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000010/input.lammps new file mode 100644 index 000000000..d4c54614e --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000010/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 96737 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000011/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000011/input.lammps new file mode 100644 index 000000000..33ca90a00 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000011/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 641044 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000012/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000012/input.lammps new file mode 100644 index 000000000..68530feda --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000012/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 224550 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000013/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000013/input.lammps new file mode 100644 index 000000000..94b1a9896 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000013/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 566339 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000014/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000014/input.lammps new file mode 100644 index 000000000..c0d503a5f --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000014/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 48117 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000015/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000015/input.lammps new file mode 100644 index 000000000..687e96b5a --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000015/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 824400 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000016/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000016/input.lammps new file mode 100644 index 000000000..2f07135ca --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000016/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 96860 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000017/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000017/input.lammps new file mode 100644 index 000000000..f03904b8c --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000017/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 523691 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000018/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000018/input.lammps new file mode 100644 index 000000000..213735fc6 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000018/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 305757 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000019/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000019/input.lammps new file mode 100644 index 000000000..d3dabf58f --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.000.000019/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 860842 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000000/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000000/input.lammps new file mode 100644 index 000000000..0216e3be8 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000000/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 295802 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000001/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000001/input.lammps new file mode 100644 index 000000000..077465731 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000001/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 681000 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000002/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000002/input.lammps new file mode 100644 index 000000000..cc4e86638 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000002/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 618462 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000003/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000003/input.lammps new file mode 100644 index 000000000..49c0480c8 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000003/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 126966 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000004/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000004/input.lammps new file mode 100644 index 000000000..3feb07f3a --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000004/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 13923 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000005/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000005/input.lammps new file mode 100644 index 000000000..69b1941b8 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000005/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 210218 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000006/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000006/input.lammps new file mode 100644 index 000000000..113e45a9c --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000006/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 424518 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000007/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000007/input.lammps new file mode 100644 index 000000000..bd32fb144 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000007/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 136621 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000008/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000008/input.lammps new file mode 100644 index 000000000..27462a413 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000008/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 312495 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000009/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000009/input.lammps new file mode 100644 index 000000000..44467a3d8 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000009/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 590507 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000010/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000010/input.lammps new file mode 100644 index 000000000..28e996e94 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000010/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 664121 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000011/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000011/input.lammps new file mode 100644 index 000000000..3cb5397c8 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000011/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 810736 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000012/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000012/input.lammps new file mode 100644 index 000000000..68d16e6e1 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000012/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 382423 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000013/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000013/input.lammps new file mode 100644 index 000000000..f58d3b132 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000013/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 216872 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000014/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000014/input.lammps new file mode 100644 index 000000000..138fc70d0 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000014/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 216100 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000015/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000015/input.lammps new file mode 100644 index 000000000..a9d01bc15 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000015/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 379255 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000016/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000016/input.lammps new file mode 100644 index 000000000..f179a813e --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000016/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 13909 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000017/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000017/input.lammps new file mode 100644 index 000000000..3da736377 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000017/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 50.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 207854 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000018/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000018/input.lammps new file mode 100644 index 000000000..ef139ef13 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000018/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 1.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 163957 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000019/input.lammps b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000019/input.lammps new file mode 100644 index 000000000..d79b17d4e --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/01.model_devi/task.001.000019/input.lammps @@ -0,0 +1,31 @@ +variable NSTEPS equal 50 +variable THERMO_FREQ equal 10 +variable DUMP_FREQ equal 10 +variable TEMP equal 100.000000 +variable PRES equal 2.000000 +variable TAU_T equal 0.100000 +variable TAU_P equal 0.500000 + +units metal +boundary p p p +atom_style atomic + +neighbor 1.0 bin + +box tilt large +read_data conf.lmp +change_box all triclinic +mass 1 27.000000 +mass 2 24.000000 +pair_style deepmd ../graph.003.pb ../graph.001.pb ../graph.002.pb ../graph.000.pb ${THERMO_FREQ} model_devi.out +pair_coeff + +thermo_style custom step temp pe ke etotal press vol lx ly lz xy xz yz +thermo ${THERMO_FREQ} +dump 1 all custom ${DUMP_FREQ} traj/*.lammpstrj id type x y z + +velocity all create ${TEMP} 435064 +fix 1 all npt temp ${TEMP} ${TEMP} ${TAU_T} iso ${PRES} ${PRES} ${TAU_P} + +timestep 0.002000 +run ${NSTEPS} diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000000/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000000/conf.dump new file mode 120000 index 000000000..cd2517412 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000000/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.000.000017/traj/20.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000001/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000001/conf.dump new file mode 120000 index 000000000..d7bfc06f9 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000001/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.000.000004/traj/40.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000002/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000002/conf.dump new file mode 120000 index 000000000..c1ace91e3 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000002/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.000.000016/traj/50.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000003/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000003/conf.dump new file mode 120000 index 000000000..827ae12bd --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000003/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.000.000006/traj/30.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000004/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000004/conf.dump new file mode 120000 index 000000000..873f93196 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000004/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.000.000012/traj/10.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000005/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000005/conf.dump new file mode 120000 index 000000000..da0b0c8a4 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000005/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.000.000019/traj/40.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000006/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000006/conf.dump new file mode 120000 index 000000000..2c17c22e3 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000006/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.000.000012/traj/30.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000007/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000007/conf.dump new file mode 120000 index 000000000..bce18ca1a --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.000.000007/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.000.000014/traj/20.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000000/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000000/conf.dump new file mode 120000 index 000000000..a201d8f23 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000000/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.001.000005/traj/20.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000001/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000001/conf.dump new file mode 120000 index 000000000..c68381e29 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000001/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.001.000001/traj/50.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000002/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000002/conf.dump new file mode 120000 index 000000000..b6a98bf8c --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000002/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.001.000017/traj/10.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000003/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000003/conf.dump new file mode 120000 index 000000000..10d8ccd66 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000003/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.001.000010/traj/20.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000004/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000004/conf.dump new file mode 120000 index 000000000..c916269b3 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000004/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.001.000001/traj/40.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000005/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000005/conf.dump new file mode 120000 index 000000000..32cd32d07 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000005/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.001.000000/traj/30.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000006/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000006/conf.dump new file mode 120000 index 000000000..d3bf7c777 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000006/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.001.000012/traj/50.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000007/conf.dump b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000007/conf.dump new file mode 120000 index 000000000..71b3526f0 --- /dev/null +++ b/tests/tools/run_report_test_output/iter.000000/02.fp/task.001.000007/conf.dump @@ -0,0 +1 @@ +../../01.model_devi/task.001.000010/traj/10.lammpstrj \ No newline at end of file diff --git a/tests/tools/run_report_test_output/param.json b/tests/tools/run_report_test_output/param.json new file mode 100644 index 000000000..202232382 --- /dev/null +++ b/tests/tools/run_report_test_output/param.json @@ -0,0 +1,89 @@ +{ + "type_map": ["Al", "Mg"], + "mass_map": [27, 24], + + "init_data_prefix": "/home/wanghan/study/deep.md/data/almgop.20/init//", + + "init_data_sys": [ + "al.fcc.01x01x01/02.md/sys-0004/deepmd", + "mg.fcc.01x01x01/02.md/sys-0004/deepmd" + ], + "init_batch_size": [1,1], + "sys_configs": [ + ["/home/wanghan/study/deep.md/data/almgop.20/init/al.fcc.02x02x02/01.scale_pert/sys-0032/scale-1.000/00000[0-4]/POSCAR"], + ["/home/wanghan/study/deep.md/data/almgop.20/init/mg.fcc.02x02x02/01.scale_pert/sys-0032/scale-1.000/00000[0-4]/POSCAR"] + ], + "sys_batch_size": [1,1], + + "_comment": " 00.train ", + "numb_models": 4, + "default_training_param" : { + "_comment": " model parameters", + "use_smooth": true, + "sel_a": [90, 90], + "rcut_smth": 2.00, + "rcut": 6.00, + "filter_neuron": [10, 20, 40], + "filter_resnet_dt": false, + "n_axis_neuron": 4, + "n_neuron": [120, 120, 120], + "resnet_dt": true, + "coord_norm": true, + "type_fitting_net": false, + + "_comment": " traing controls", + "systems": [], + "set_prefix": "set", + "stop_batch": 1000, + "batch_size": 1, + "start_lr": 0.001, + "decay_steps": 2000, + "decay_rate": 0.95, + "seed": 0, + + "start_pref_e": 0.02, + "limit_pref_e": 2, + "start_pref_f": 1000, + "limit_pref_f": 1, + "start_pref_v": 0.0, + "limit_pref_v": 0.0, + + "_comment": " display and restart", + "_comment": " frequencies counted in batch", + "disp_file": "lcurve.out", + "disp_freq": 1000, + "numb_test": 1, + "save_freq": 1000, + "save_ckpt": "model.ckpt", + "load_ckpt": "model.ckpt", + "disp_training": true, + "time_training": true, + "profiling": false, + "profiling_file": "timeline.json", + + "_comment": "that's all" + }, + + "_comment": " 01.model_devi ", + "_comment": "model_devi_skip: the first x of the recorded frames", + "model_devi_dt": 0.002, + "model_devi_skip": 0, + "model_devi_f_trust_lo": 0.05, + "model_devi_f_trust_hi": 0.20, + "model_devi_e_trust_lo": 1e10, + "model_devi_e_trust_hi": 1e10, + "model_devi_clean_traj": false, + "model_devi_jobs": [ + { "_idx": 0, "ensemble": "npt", "nsteps": 50, "press": [1.0,2.0], "sys_idx": [0, 1], "temps": [50,100], "trj_freq": 10 } + ], + "_comment": " 02.fp ", + "fp_style": "vasp", + "shuffle_poscar": false, + "fp_task_max": 8, + "fp_task_min": 2, + "fp_pp_path": "/home/wanghan/study/deep.md/dpgen/almg/vasp", + "fp_pp_files": ["POTCAR.Al", "POTCAR.Mg"], + "fp_incar": "/home/wanghan/study/deep.md/dpgen/almg/vasp/INCAR", + "_comment": " that's all " +} + diff --git a/tests/tools/test_run_report.py b/tests/tools/test_run_report.py new file mode 100644 index 000000000..e31ac3a56 --- /dev/null +++ b/tests/tools/test_run_report.py @@ -0,0 +1,18 @@ +import os,sys,json +import unittest + +test_dir = os.path.abspath(os.path.join(os.path.dirname(__file__))) +sys.path.insert(0, os.path.join(test_dir, '..')) +__package__ = 'tools' +from .context import stat_sys + +class TestRunReport(unittest.TestCase): + def test_stat_sys (self): + folder = 'run_report_test_output' + sys, sys_count, sys_all = stat_sys(os.path.join(test_dir,folder), verbose = False, mute = True) + with open(os.path.join(test_dir, folder, 'param.json')) as fp: + jdata = json.load(fp) + self.assertEqual(sys, jdata['sys_configs']) + self.assertEqual(sys_count, [jdata['fp_task_max'], jdata['fp_task_max']]) + ref_all = [[['npt', 50.0, 1.0, 4], ['npt', 50.0, 2.0, 1], ['npt', 100.0, 1.0, 2], ['npt', 100.0, 2.0, 1]], [['npt', 50.0, 1.0, 2], ['npt', 50.0, 2.0, 4], ['npt', 100.0, 1.0, 2]]] + self.assertEqual(sys_all, ref_all)