def get_kpoints_object(self, step, structure):
        try:
            kpoints_tags = self.kpoints[step]
        except KeyError:
            return None

        if kpoints_tags['Type'] == 'automatic_density':
            K = Kpoints.automatic_density(structure,
                                          kpoints_tags['Grid Density'],
                                          kpoints_tags['Force Gamma'])
        elif kpoints_tags['Type'] == 'automatic_density_by_vol':
            K = Kpoints.automatic_density_by_vol(
                structure,
                kpoints_tags['Grid Density per A^(-3) of Reciprocal Cell'],
                kpoints_tags['Force Gamma'])
        elif kpoints_tags['Type'] == 'automatic_gamma_density':
            K = Kpoints.automatic_gamma_density(structure,
                                                kpoints_tags['Grid Density'])
        elif kpoints_tags['Type'] == 'gamma_automatic':
            K = Kpoints.gamma_automatic(kpoints_tags["KPTS"],
                                        kpoints_tags["Shift"])
        elif kpoints_tags['Type'] == 'monkhorst_automatic':
            K = Kpoints.monkhorst_automatic(kpoints_tags["KPTS"],
                                            kpoints_tags["Shift"])
        else:
            print('Invalid kpoints generation type %s; fatal error' %
                  kpoints_tags['Type'])
            sys.exit(1)
        return K
Exemple #2
0
 def dielectric(self,uc_type='prim',output_dir='./'):
     if uc_type == 'prim':
         uc_type = 'primitive'
         stru = self.stru_prim.copy()
     elif uc_type == 'conv':
         uc_type = 'conventional'
         stru = self.stru_conv.copy()
     path = os.path.join(output_dir,self.name)
     inputs = MPStaticSet(stru).all_input
     transf = {'history':[{'source':self.mpid,'unit_cell':uc_type}],'defect_type':'dielectric'}
     incar = inputs['INCAR']
     kpoints = Kpoints.automatic_gamma_density(stru,2000)
     if self.is_spin_polarized:
         incar['ISPIN']=2
     else:
         incar['ISPIN']=1
     incar['IBRION']=8
     incar['LEPSILON']=True
     incar['LPEAD']=True
     incar['EDIFF']=0.000001
     incar['LWAVE']=False
     incar['LCHARG']=False
     incar['ISMEAR']=0
     incar['ALGO']="Normal"
     incar['SIGMA']=0.01
     del incar['NSW'], incar['LVHAR'], incar['LAECHG']
     os.mkdir(path)
     f=open(path+"/transformations.json",'w')
     f.write(json.dumps(jsanitize(transf)))
     inputs['POTCAR'].write_file(path+"/POTCAR")
     incar.write_file(path+"/INCAR")
     kpoints.write_file(path+"/KPOINTS")
     inputs['POSCAR'].write_file(path+"/POSCAR")
Exemple #3
0
 def get_kpoints(self, structure):
     """
     Writes out a KPOINTS file using the automated gamma grid method.
     VASP crashes GW calculations on none gamma centered meshes.
     """
     if self.sort_structure:
         structure = structure.get_sorted_structure()
     dens = int(self.kpoints_settings['grid_density'])
     if dens == 1:
         return Kpoints.gamma_automatic()
     else:
         return Kpoints.automatic_gamma_density(structure, dens)
 def get_kpoints(self, structure):
     """
     Writes out a KPOINTS file using the automated gamma grid method.
     VASP crashes GW calculations on none gamma centered meshes.
     """
     if self.sort_structure:
         structure = structure.get_sorted_structure()
     dens = int(self.kpoints_settings['grid_density'])
     if dens == 1:
         return Kpoints.gamma_automatic()
     else:
         return Kpoints.automatic_gamma_density(structure, dens)
Exemple #5
0
def convergence_k_test(folder, start_k_density):
    """
    given a folder of INCAR, POSCAR, POTCAR
    generate the several folders for different k mesh densities

    Args:
        folder: the working folder under which has the input files
        start_k_density: the start k density we use to generate KPOINTS files

    Writes:
        several folders with same vasp inputs except for the KPOINTS
    """
    #clean the folder
    for path in glob(folder + '/kpoints-*'):
        rmtree(path)

    #if CONTCAR is input instead of POSCAR
    if os.path.exists(folder + '/CONTCAR'):
        move(folder + '/CONTCAR', folder + '/POSCAR')

    #generate 5 k-points file and create folders
    struc = Poscar.from_file(folder + '/POSCAR').structure
    k_set = set()
    i = 0
    while len(k_set) < 5:
        kppa = start_k_density + i * 150
        i += 1
        kpoints = Kpoints.automatic_gamma_density(struc, kppa)
        if kpoints.kpts[0][0] in k_set:
            continue
        else:
            k_set.add(kpoints.kpts[0][0])
            subfolder = folder + '/kpoints-' + str(kpoints.kpts[0][0])
            os.mkdir(subfolder)
            print(subfolder)
            copyfile(folder + '/INCAR', subfolder + '/INCAR')
            copyfile(folder + '/POSCAR', subfolder + '/POSCAR')
            copyfile(folder + '/POTCAR', subfolder + '/POTCAR')
            kpoints.write_file(subfolder + '/KPOINTS')
    def __init__(self, structure, **kwargs):
        self.kwargs = copy.deepcopy(kwargs)
        uis = copy.deepcopy(self.kwargs.get('user_incar_settings', {}))
        new_config = copy.deepcopy(ForceConstantsSet.CONFIG)
        if 'ISPIN' not in uis:
            if magnetic_check(structure):
                uis.update({'ISPIN': 2})
            else:
                uis.update({'ISPIN': 1})

        if 'magmom' in uis:
            if 'MAGMOM' in new_config['INCAR']:
                new_config['INCAR'].pop('MAGMOM')
        elif uis['ISPIN'] == 1:
            if 'MAGMON' in uis.keys(): uis.pop['MAGMOM']
            if 'MAGMON' in new_config['INCAR']:
                new_config['INCAR'].pop['MAGMOM']

        new_config['INCAR'].update(uis)
        from pymatgen.io.vasp.inputs import Kpoints
        user_kpoints_settings = kwargs.get('user_kpoints_settings', {})
        grid_density = user_kpoints_settings.get('grid_density') or None
        if grid_density is not None:
            #new_config['KPOINTS'].update({'grid_density': grid_density})
            new_config['INCAR'].update({'PREC': 'High'})
            kpoints = Kpoints.automatic_gamma_density(structure, grid_density)
            new_config['KPOINTS'] = kpoints
        else:
            kpoints = Kpoints(kpts=[
                [3, 3, 3],
            ])
            new_config['KPOINTS'] = kpoints
        pot = self.kwargs.get('user_potcar_functional', None)
        if pot:
            new_config['POTCAR_FUNCTIONAL'] = pot
        super(ForceConstantsSet, self).__init__(structure,
                                                new_config,
                                                sort_structure=False,
                                                **self.kwargs)
Exemple #7
0
    def __init__(self, structure, **kwargs):
        # pop the old kwargs, backwards compatibility from the complex StaticSet

        uis = copy.deepcopy(kwargs.get('user_incar_settings', {}))
        """
        old_kwargs = ['prev_incar', 'prev_kpoints', 'grid_density', 'lepsilon', 'lcalcpol', \
            'user_potcar_functional', 'user_incar_settings']
        """
        old_kwargs = [
            'prev_incar', 'prev_kpoints', 'grid_density', 'lepsilon',
            'lcalcpol'
        ]
        for k in old_kwargs:
            try:
                kwargs.pop(k)
            except KeyError:
                pass
        self.kwargs = kwargs

        if 'ISPIN' not in uis:
            if magnetic_check(structure):
                uis.update({'ISPIN': 2})
            else:
                uis.update({'ISPIN': 1})
        if uis['ISPIN'] == 1:
            if 'MAGMON' in uis.keys():
                uis.pop['MAGMOM']

        for key in uis.keys():
            if key not in ElasticSet.CONFIG['INCAR']:
                if key in {'NELM', 'EDIFF', 'NEDOS', 'KPOINT_BSE'}: continue
                ElasticSet.CONFIG['INCAR'][key] = uis[key]
            elif key == 'ISPIN':
                ElasticSet.CONFIG['INCAR'][key] = uis[key]
            elif key == 'ISMEAR':
                ElasticSet.CONFIG['INCAR'][key] = uis[key]
            elif key == 'SIGMA':
                ElasticSet.CONFIG['INCAR'][key] = uis[key]

        if 'ISPIN' in ElasticSet.CONFIG['INCAR']:
            if ElasticSet.CONFIG['INCAR']['ISPIN'] == 1:
                if 'MAGMOM' in ElasticSet.CONFIG['INCAR']:
                    ElasticSet.CONFIG['INCAR'].pop('MAGMOM')

        if 'SIGMA' in ElasticSet.CONFIG[
                'INCAR'] and 'ISMEAR' in ElasticSet.CONFIG['INCAR']:
            if ElasticSet.CONFIG['INCAR']['ISMEAR'] == -5:
                ElasticSet.CONFIG['INCAR'].pop('SIGMA')

        from pymatgen.io.vasp.inputs import Kpoints
        if metal_check(structure):
            grid_density = 15625
            #ElasticSet.CONFIG['INCAR']['ISMEAR'] = 1
            #ElasticSet.CONFIG['INCAR']['SIGMA'] = 0.2
        else:
            grid_density = 8000
        kpoints = Kpoints.automatic_gamma_density(structure, grid_density)
        ElasticSet.CONFIG['KPOINTS'] = kpoints
        pot = kwargs.get('user_potcar_functional', None)
        if pot:
            ElasticSet.CONFIG['POTCAR_FUNCTIONAL'] = pot
        kwargs.update(
            {'user_potcar_functional': ElasticSet.CONFIG['POTCAR_FUNCTIONAL']})
        kwargs.update({'user_incar_settings': ElasticSet.CONFIG['INCAR']})
        super(ElasticSet, self).__init__(structure,
                                         ElasticSet.CONFIG,
                                         sort_structure=False,
                                         **kwargs)
Exemple #8
0
def make_vasp_defect_files(dictio,
                           path_base,
                           task_id,
                           compo,
                           hse=False,
                           encut_redo=False):
    """
    simple static method creating VASP files ready for defect computations
    Args:
        dictio:
            the defects data as a dictionnary
        path_base:
            where do we write the files
        task_id:
            some id of the bulk computed data
        compo:
            Composition of the bulk computed data
        hse:
            hse run or not
    """
    count = 1
    for site in dictio:
        #### bulk ####
        if site['short_name'] == 'bulk':
            bulk_unitcell = site['bulk_unitcell'].as_dict()
            uc_type = site['uc_type']
            sc_type = site['sc_type']
            for s in site['supercells']:
                defect_dist = round(closest_defect_distance(s['structure']), 2)
                bulk_info = '%s_%s_%s' % (uc_type, sc_type, str(defect_dist))
                dict_transf = {
                    'history': [{
                        'source': task_id,
                        'unitcell_type': site['uc_type']
                    }],
                    'defect_type':
                    'bulk',
                    'supercell':
                    s['size']
                }
                structs = {'bulk_unitcell': bulk_unitcell}
                dict_params = MPStaticSet(s['structure']).all_input
                incar_init = dict_params['INCAR']
                incar = IncarSetup(incar_init, 'bulk', hse)
                if encut_redo:
                    enmax = round(
                        max([i.PSCTR['ENMAX']
                             for i in dict_params['POTCAR']]) * 1.3)
                    incar['ENCUT'] = int(enmax)
                if hse:
                    kpoint = Kpoints.gamma_automatic()
                else:
                    kpoint = Kpoints.monkhorst_automatic()
                path = path_base + "/" + str(
                    task_id
                ) + '_' + compo.reduced_formula + '/bulk/' + bulk_info
                os.makedirs(path)
                f = open(path + "/transformations.json", 'w')
                f.write(json.dumps(jsanitize(dict_transf)))
                g = open(path + "/structures.json", 'w')
                g.write(json.dumps(jsanitize(structs)))
                dict_params['POTCAR'].write_file(path + "/POTCAR")
                incar.write_file(path + "/INCAR")
                kpoint.write_file(path + "/KPOINTS")
                dict_params['POSCAR'].write_file(path + "/POSCAR")
            continue

        #### dielectric constants ####
        if site['short_name'] == 'dielectric':
            dict_transf = {
                'history': [{
                    'source': task_id,
                    'unit_cell': site['uc_type']
                }],
                'defect_type': 'dielectric'
            }
            dict_params = MPStaticSet(site['structure']).all_input
            incar = dict_params['INCAR']
            kpoints = Kpoints.automatic_gamma_density(site['structure'], 2000)
            try:
                bs = m.get_bandstructure_by_material_id(task_id)
                if not bs.is_spin_polarized:
                    incar['ISPIN'] = 1
                else:
                    incar['ISPIN'] = 2
            except:
                incar['ISPIN'] = 1
            incar = IncarSetup(incar, 'dielectric', hse)
            if encut_redo:
                enmax = round(
                    max([i.PSCTR['ENMAX']
                         for i in dict_params['POTCAR']]) * 1.3)
                incar['ENCUT'] = int(enmax)
            path = path_base + "/" + str(
                task_id
            ) + '_' + compo.reduced_formula + "/" + 'dielectric/' + site[
                'uc_type']
            os.makedirs(path)
            f = open(path + "/transformations.json", 'w')
            f.write(json.dumps(jsanitize(dict_transf)))
            dict_params['POTCAR'].write_file(path + "/POTCAR")
            incar.write_file(path + "/INCAR")
            kpoints.write_file(path + "/KPOINTS")
            dict_params['POSCAR'].write_file(path + "/POSCAR")
            continue

        #### defects ####
        uc_type = site['uc_type']
        sc_type = site['sc_type']
        for charge in site['charges']:
            uc = site['bulk_unitcell'].copy()
            bulk_unitcell = uc.as_dict()
            for s in site['supercells']:
                defect_dist = round(closest_defect_distance(s['structure']), 2)
                defect_info = '%s_%s_%s' % (uc_type, sc_type, str(defect_dist))
                uc.make_supercell(s['size'], to_unit_cell=True)
                bulk_supercell = uc.as_dict()
                dict_transf = {
                    'history': [{
                        'source': task_id,
                        'unit_cell': site['uc_type']
                    }],
                    'compo': compo.as_dict(),
                    'defect_type': site['short_name'],
                    'defect_site': site['unique_sites'].as_dict(),
                    'charge': charge,
                    'supercell': s['size']
                }
                dict_params = MPRelaxSet(s['structure']).all_input
                try:
                    defect_no_relaxation = s['struct_no_move'].as_dict()
                except:
                    defect_no_relaxation = s['structure'].as_dict()
                structs = {
                    'bulk_unitcell': bulk_unitcell,
                    'bulk_supercell': bulk_supercell,
                    'defect_no_relaxation': defect_no_relaxation
                }
                incar = dict_params['INCAR']
                incar = IncarSetup(incar, 'defect', hse)
                if encut_redo:
                    enmax = round(
                        max([i.PSCTR['ENMAX']
                             for i in dict_params['POTCAR']]) * 1.3)
                    incar['ENCUT'] = int(enmax)
                if hse:
                    kpoint = Kpoints.gamma_automatic()
                else:
                    kpoint = Kpoints.monkhorst_automatic()
                path=path_base+"/"+str(task_id)+'_'+compo.reduced_formula+ \
                    '/'+str(site['short_name'])+"/"+"charge"+str(charge)+'/'+defect_info
                os.makedirs(path)
                f = open(path + "/transformations.json", 'w')
                f.write(json.dumps(jsanitize(dict_transf)))
                g = open(path + "/structures.json", 'w')
                g.write(json.dumps(jsanitize(structs)))
                comp = s['structure'].composition
                sum_elec = 0
                elts = set()
                for p in dict_params['POTCAR']:
                    if p.element not in elts:
                        sum_elec += comp.as_dict()[p.element] * p.nelectrons
                        elts.add(p.element)
                if charge != 0:
                    incar['NELECT'] = sum_elec - charge
                dict_params['POTCAR'].write_file(path + "/POTCAR")
                incar.write_file(path + "/INCAR")
                kpoint.write_file(path + "/KPOINTS")
                dict_params['POSCAR'].write_file(path + "/POSCAR")
                count = count + 1
                f.close()
                g.close()
Exemple #9
0
def process_input(args):
    if args.command == 'start_project':
        if args.i:
            f = open(args.i)
            my_project = yaml.load(open(
                args.i))  ## this will be the only CLI input
            f.close()
            NAME = my_project['NAME']

            INCAR_GENERAL = my_project['Incar_General']
            POTCAR_SPEC = yaml.load(open(my_project['Potcar_Spec']))

            MATERIALS_LIST = my_project['Insilico_Fab']['Material_List']
            struct_list = [Poscar.from_file(poscar) for poscar in glob('StructsDir/*.vasp') \
                  if 'StructsDir' in MATERIALS_LIST] + \
                 [Poscar(get_struct_from_mp(p)) for p in MATERIALS_LIST \
                  if 'StructsDir' not in p]
            WORKFLOWS = my_project['Workflow']
            project_log = get_logger(NAME + "_InSilico_Materials")

            error_handler = [VaspErrorHandler()]
            Order_WfNames = list(np.sort(list(WORKFLOWS['Steps'].keys())))
            steps_map = {'StepVASP0': StepVASP0, 'StepVASP1': StepVASP1}
            steps_dict = {
                k: WORKFLOWS['Steps'][k]['TYPE']
                for k in Order_WfNames
            }
            steps_map[steps_dict[list(steps_dict.keys())[0]]](my_project,
                                                              struct_list)

            project_abs_dir = os.path.abspath(os.path.curdir)
            my_project['Project_Dir'] = project_abs_dir
            my_project['Running_Wflow'] = [int(Order_WfNames[0])]

            with open(args.i, 'w') as projfile:
                yaml.dump(my_project, projfile, default_flow_style=False)
            if os.path.exists('custodian.json'):
                os.remove('custodian.json')
            projfile.close()

    if args.command == 'continue_project':
        if args.i:
            f = open(args.i)
            my_project = yaml.load(open(
                args.i))  ## this will be the only CLI input
            f.close()
            NAME = my_project['NAME']

            WORKFLOWS = my_project['Workflow']

            #error_handler = [VaspErrorHandler()]
            Order_WfNames = list(np.sort(list(WORKFLOWS['Steps'].keys())))
            steps_map = {'StepVASP0': StepVASP0, 'StepVASP1': StepVASP1}
            steps_dict = {
                k: WORKFLOWS['Steps'][k]['TYPE']
                for k in Order_WfNames
            }
            for k in Order_WfNames:
                if k not in my_project['Running_Wflow']:
                    #print (k)
                    #print (steps_map[steps_dict[list(steps_dict.keys())[k]]])
                    steps_map[steps_dict[list(steps_dict.keys())[k]]](
                        my_project, k)
                    #print ('Here')
                    orig_done = my_project['Running_Wflow']
                    orig_done.append(k)
                    my_project['Running_Wflow'] = [int(o) for o in orig_done]
                    with open(args.i, 'w') as projfile:
                        yaml.dump(my_project,
                                  projfile,
                                  default_flow_style=False)
                    if os.path.exists('custodian.json'):
                        os.remove('custodian.json')
                    projfile.close()
                    break

    if args.command == 'check_project':
        # check if any input spec for the project
        if args.i:
            f = open(args.i)
            project_spec = yaml.load(f)
            if args.c:
                workflow_chkpts = [args.c]
            else:
                workflow_chkpts = glob('{}*.json'.format(project_spec['NAME']))
            #print (workflow_chkpts)
            proj_dir = project_spec['Project_Dir']
            os.chdir(proj_dir)
            CustodianChecks=\
              {chk:check_errors(chkfile=chk,logfile_name=\
                   'Custodian_'+project_spec['NAME']) for chk in workflow_chkpts}
            with open('{}_CustodianReport.yaml'.format(project_spec['NAME']),
                      'w') as report:
                yaml.dump(CustodianChecks, report, default_flow_style=False)
            report.close()

    elif args.command == 'rerun_project':
        # Custodian yamls are input
        print(args.i, len(args.i))
        if args.i:
            f = open(args.i)
            rerun_logs = get_logger('{}_reruns'.format(
                args.i.replace('.yaml', '')))
            rerun_spec = yaml.load(f)
            proj_dir = os.path.abspath(os.path.curdir)
            if args.c:
                rerun_chkpts = [args.c]
            else:
                rerun_chkpts = list(rerun_spec.keys())
            print(rerun_chkpts)
            for k in rerun_chkpts:
                for case in rerun_spec[k]:
                    print('Rerunning {}'.format(case['ErrorDir'][0]))
                    rerun_logs.info('Rerunning {}'.format(case['ErrorDir'][0]))
                    if args.s:
                        rerun_logs.info(
                            'Using new submit_file {} for all rerun'.format(
                                args.s))
                        os.system('cp {0} {1}'.format(args.s,
                                                      case['ErrorDir'][0]))
                        submit_cmd = ['sbatch', args.s]
                    else:
                        if case['Error'] == ['Memory Error']:
                            if args.m:
                                rerun_logs.info(
                                    'Error Memory adding {}'.format(args.m))
                                add_mem_submit_file(
                                    case['ErrorDir'][0] + '/submit_script',
                                    args.m)
                            else:
                                rerun_logs.info('Error Memory adding 3000')
                                add_mem_submit_file(
                                    case['ErrorDir'][0] + '/submit_script',
                                    3000)
                        elif 'TIME LIMIT' in case['Error'][0]:
                            if args.w:
                                rerun_logs.info(
                                    'Error TIME LIMIT adding {}'.format(
                                        args.w))
                                add_walltime(
                                    case['ErrorDir'][0] + '/submit_script',
                                    args.w)
                            else:
                                rerun_logs.info(
                                    'Error TIME LIMIT adding 20:00:00')
                                add_walltime(
                                    case['ErrorDir'][0] + '/submit_script',
                                    '20:00:00')
                        submit_cmd = ['sbatch', 'submit_script']

                    os.chdir(case['ErrorDir'][0])

                    if args.inc:
                        incar = Incar.from_file('INCAR')
                        user_dict = ast.literal_eval(args.inc)
                        incar.update(user_dict)
                        incar.write_file('INCAR')
                    if args.dinc:
                        incar = Incar.from_file('INCAR')
                        user_exp = ast.literal_eval(args.dinc)
                        for d in user_exp:
                            if d in list(incar.keys()):
                                del incar[d]
                        incar.write_file('INCAR')
                    if args.kpt:
                        user_exp = ast.literal_eval(args.kpt)
                        if isinstance(user_exp, tuple):
                            kpoints = Kpoints.gamma_automatic(user_exp)
                        else:
                            struct = Structure.from_file('POSCAR')
                            kpoints = Kpoints.automatic_gamma_density(
                                struct, user_exp)
                        kpoints.write_file('KPOINTS')

                    p = subprocess.Popen(['sbatch', 'submit_script'],
                                         stdout=subprocess.PIPE,
                                         stderr=subprocess.PIPE)
                    stdout, stderr = p.communicate()
                    rerun_job_id = str(stdout).rstrip(
                        '\n').split()[-1].replace("\\n'", '')
                    rerun_logs.info('running job {0} in {1}'.format(
                        rerun_job_id, case['ErrorDir'][0]))
                    os.chdir(proj_dir)

            rerun_logs.info('Finished submitting reruns')
            print('Finished submitting reruns')

    elif args.command == 'analyze_project':
        # check for yaml analysis input for project
        if args.i:
            f = open(args.i)
            proj_spec = yaml.load(f)
            proj_wflow_st = proj_spec['Workflow']['Steps']
            for step in proj_wflow_st:
                print(step)
                if 'Analysis' in list(proj_wflow_st[step].keys()):
                    analyze_script = proj_wflow_st[step]['Analysis']['Script']
                    analyze_input = proj_wflow_st[step]['Analysis']['Input']
                    analyze_output = proj_wflow_st[step]['Analysis']['Output']
                    if '.py' in analyze_script:
                        #                       os.system('python {0} -i {1} -o {2}'.format(analyze_script, analyze_input, analyze_output))
                        print(analyze_script, analyze_input, analyze_output)
                        p = subprocess.Popen(['python',analyze_script, '-i', analyze_input,\
                            '-o', analyze_output], stdout=subprocess.PIPE, \
                            stderr=subprocess.PIPE)
                        stdout, stderr = p.communicate()
                    print(stdout)
            print(
                'Analyzed the project according to specified post processing script'
            )

    elif args.command == 'archive_project':
        # check for workflow.yaml input file
        if args.i:
            print(
                'tar.gz the project json files and csv and vasprun.xml files from the project directory'
            )
            f = open(args.i)
            proj_spec = yaml.load(f)
            name_spec = proj_spec['NAME']
            proj_dir = proj_spec['Project_Dir']
            os.chdir(proj_dir)
            # first the json checkpoints
            os.system('tar cvzf {}.tar.gz {}*.json '.format(
                name_spec, name_spec))
            # then add all vaspruns to tar archive
            os.system(
                'find . -iname "*.xml" -exec tar -rvf {0}.tar {} \;'.format(
                    name_spec + '_vaspruns_csvs'))
            # then add csvs
            os.system(
                'find . -iname "*.csv" -exec tar -rvf {0}.tar {} \;'.format(
                    name_spec + '_vaspruns_csvs'))
            # compress the archive
            os.system('tar cvzf {}.tar.gz {}.tar'.format(name_spec +
                                                         '_vaspruns_csvs'))
            # finally delete WAVECARS and CHG, CHGCARS
            os.system('find . -iname "WAVECAR" -exec rm {} \;')
            os.system('find . -iname "CHG*" -exec rm {} \;')

    elif args.command == 'load_settings':
        if args.i:
            user_dict = ast.literal_eval(args.i)
            if not os.path.exists(SETTINGS_FILE):
                user_configs = {key:None for key in ['username','bulk_binary','twod_binary',\
                     'ncl_binary', 'sol_binary', 'custom_binary',\
                     'vdw_kernel','potentials','MAPI_KEY', 'queue_system', 'queue_template']}
                with open(os.path.join(os.path.expanduser('~'),'.mpint_config.yaml'),'w') \
                  as config_file:
                    yaml.dump(user_configs,
                              config_file,
                              default_flow_style=False)
            config_data = yaml.load(open(SETTINGS_FILE))
            config_data.update(user_dict)
            load_config_vars(config_data)

    elif args.command == 'qcheck_project':
        states = []
        if args.i:
            f = open(args.i)
            project_spec = yaml.load(f)
            workflow_logs = [
                fi for fi in glob('{}*.log'.format(project_spec['NAME']))
                if 'InSilico' not in fi
            ]
            for l in workflow_logs:
                states = []
                print('Qcheck on {}'.format(l))
                tot, job_id, job_dir, job_name = decode_log_file(l)
                for n, j in enumerate(job_id):
                    state, oszi, nsw = decode_q(j, job_dir[n])
                    print(state, job_dir[n], j)
                    if state == 'R' and isinstance(oszi, Oszicar):
                        try:
                            print('Ionic Steps', len(oszi.ionic_steps), nsw,
                                  oszi.ionic_steps[-1])
                        except:
                            print('First Ionic Step', oszi.electronic_steps)
                    states.append(state)
                running_states = [s for s in states if s == 'R']
                print('{0} of {1} total jobs running'.format(
                    len(running_states), len(job_id)))

    elif args.command == 'cancel_project':
        states = []
        if args.i:
            f = open(args.i)
            project_spec = yaml.load(f)
            workflow_logs = [
                fi for fi in glob('{}*.log'.format(project_spec['NAME']))
                if 'InSilico' not in fi
            ]
            for l in workflow_logs:
                states = []
                print('Qcheck on {}'.format(l))
                tot, job_id, job_dir, job_name = decode_log_file(l)
                for n, j in enumerate(job_id):
                    os.system('scancel {}'.format(str(job_id)))
Exemple #10
0
    def set_kpoints(self, kpoint=None, poscar=None, ibzkpth=None):
        """
        set the kpoint
        """
        # useful to check if a poscar is supplied from setup_poscar_jobs (most often the case)
        # or this is a single poscar use case
        if not poscar:
            poscar = self.poscar

        # splitting into two if elif branches means fewer if statements to check on
        # a run

        # Most general method of setting the k-points for
        # different grid types
        # NOTE: requires that at least one k-points value be passed
        # as a turn - knobs list value
        # this is not true for values that may be caculated out of
        # a database
        # use this part only if this is a non-database run for example
        # for k-points calibration

        if not self.database:

            if self.Grid_type == 'M':
                self.kpoints = Kpoints.monkhorst_automatic(kpts=kpoint)
            elif self.Grid_type == 'A':
                self.kpoints = Kpoints.automatic(subdivisions=kpoint)
            elif self.Grid_type == 'G':
                self.kpoints = Kpoints.gamma_automatic(kpts=kpoint)
            elif self.Grid_type == '3D_vol':
                self.kpoints = Kpoints.automatic_density_by_vol(structure=poscar.structure,
                                                                kppvol=kpoint)
            elif self.Grid_type == 'bulk_bands_pbe':
                self.kpoints = Kpoints.automatic_linemode(divisions=kpoint,
                                                          ibz=HighSymmKpath(
                                                              poscar.structure))

            elif self.Grid_type == 'D':
                self.kpoints = Kpoints.automatic_density(structure=poscar.structure,kppa=kpoint)

            elif self.Grid_type == 'Finer_G_Mesh':
                # kpoint is the scaling factor and self.kpoints is the old kpoint mesh
                self.logger.info('Setting Finer G Mesh for {0} by scale {1}'.format(kpoint, self.finer_kpoint))
                self.kpoints = Kpoints.gamma_automatic(kpts = \
                   [i * self.finer_kpoint for i in kpoint])
                self.logger.info('Finished scaling operation of k-mesh')

        # applicable for database runs
        # future constructs or settinsg can be activated via a yaml file
        # database yaml file or better still the input deck from its speification
        # decides what combination of input calibrate constructor settings to use
        # one of them being the grid_type tag

        elif self.database == 'twod':

            # set of kpoints settings according to the 2D database profile
            # the actual settings of k-points density
            # will in future come from any database input file set

            if self.Grid_type == 'hse_bands_2D_prep':
                kpoint_dict = Kpoints.automatic_gamma_density(poscar.structure,
                                                              200).as_dict()
                kpoint_dict['kpoints'][0][2] = 1  # remove z kpoints
                self.kpoints = Kpoints.from_dict(kpoint_dict)

            elif self.Grid_type == 'hse_bands_2D':
                # can at most return the path to the correct kpoints file
                # needs kpoints to be written out in instrument in a different way
                # not using the Kpoints object
                self.kpoints = get_2D_hse_kpoints(poscar.structure, ibzkpth)

            elif self.Grid_type == 'bands_2D':
                kpoint_dict = Kpoints.automatic_linemode(divisions=20,
                                                         ibz=HighSymmKpath(poscar.structure)).as_dict()
                self.kpoints = Kpoints.from_dict(kpoint_dict)

            elif self.Grid_type == 'relax_2D':
                # general relaxation settings for 2D
                kpoint_dict = Kpoints.automatic_gamma_density(poscar.structure,
                                                              1000).as_dict()
                kpoint_dict['kpoints'][0][2] = 1
                self.kpoints = Kpoints.from_dict(kpoint_dict)

            elif self.Grid_type == 'relax_3D':
                # general relaxation settings for 3D
                kpoint_dict = Kpoints.automatic_gamma_density(
                    poscar.structure, 1000)
                self.kpoints = Kpoints.from_dict(kpoint_dict)