Beispiel #1
0
 def print_results(self, structure, file_name='convergence_results'):
     """
     print a summary of the convergence results to file
     """
     data = GWConvergenceData(spec=self, structure=structure)
     nodata = False
     if data.read_conv_res_from_file(
             os.path.join(
                 s_name(structure) + '.res',
                 s_name(structure) + '.conv_res')):
         s = '%s %s %s ' % (s_name(structure),
                            str(data.conv_res['values']['ecuteps']),
                            str(data.conv_res['values']['nscf_nbands']))
     else:
         s = '%s 0.0 0.0 ' % s_name(structure)
         nodata = True
     con_dat = self.code_interface.read_convergence_data(
         s_name(structure) + '.res')
     if con_dat is not None:
         s += '%s ' % con_dat['gwgap']
     else:
         s += '0.0 '
     s += '\n'
     f = open(file_name, 'a')
     f.write(str(s))
     f.close()
     if nodata:
         raise RuntimeError('no convergence data found')
Beispiel #2
0
 def test_VaspSingle(self):
     spec = GWSpecs()
     spec.data['code'] = 'VASP'
     work = SingleVaspGWWork(structure=structure, spec=spec, job='prep')
     work.create_input()
     print(os.listdir('.'))
     for f in ['INCAR', 'POTCAR', 'POSCAR', 'KPOINTS', 'INCAR.DIAG']:
         self.assertIn(f, os.listdir(path=s_name(structure)))
     work.create_job_script(add_to_collection=False)
     self.assertIn('job', os.listdir(path=s_name(structure)))
     work.create_job_script(mode='slurm', add_to_collection=False)
     self.assertIn('job', os.listdir(path=s_name(structure)))
Beispiel #3
0
 def set_status(self, structure):
     self._grid = 0
     self._all_done = False
     self._workdir = None
     self._converged = is_converged(False, structure)
     try:
         self._grid = read_grid_from_file(s_name(structure) +
                                          ".full_res")['grid']
         self._all_done = read_grid_from_file(
             s_name(structure) + ".full_res")['all_done']
         self._workdir = os.path.join(s_name(structure),
                                      'work_' + str(self.grid))
     except (IOError, OSError):
         pass
Beispiel #4
0
 def get_work_dir(self):
     name = s_name(self.structure)
     if not self.all_converged:
         return str(name) + '_' + str(self.option['test']) + '_' + str(
             self.option['value'])
     else:
         return str(name)
Beispiel #5
0
 def __init__(self, structure, spec, option=None):
     self.structure = structure
     self.spec = spec
     if option is not None:
         option.pop('gap', None)
         option['nscf_nbands'] = option['nbands']
         option.pop('nbands', None)
     self.option = option
     print('option:', option)
     self.bands_fac = 1
     self.tests = self.__class__.get_defaults_tests()
     self.convs = self.__class__.get_defaults_convs()
     self.response_models = self.__class__.get_response_models()
     if self.option is None:
         self.all_converged = False
     elif len(self.option) == len(self.convs):
         self.all_converged = True
     else:
         self.all_converged = False
     path_add = '.conv' if self.all_converged else ''
     self.work_dir = s_name(self.structure) + path_add
     try:
         abi_pseudo = os.environ['ABINIT_PS_EXT']
         abi_pseudo_dir = os.environ['ABINIT_PS']
     except KeyError:
         abi_pseudo = None
         abi_pseudo_dir = None
     pseudos = []
     for element in self.structure.composition.element_composition:
         pseudo = os.path.join(abi_pseudo_dir, str(element) + abi_pseudo)
         pseudos.append(pseudo)
     self.pseudo_table = PseudoTable(pseudos)
Beispiel #6
0
 def __init__(self, structure, spec):
     self.structure = structure
     self.spec = spec
     self.data = {}
     self.code_interface = get_code_interface(spec['code'])
     self.conv_res = {'control': {}, 'values': {}, 'derivatives': {}}
     self.full_res = {'all_done': False, 'grid': 0}
     self.final_values = ()
     if structure is not None:
         self.name = s_name(structure)
     else:
         self.name = 'notknown'
     self.type = {
         'parm_scr': False,
         'full': False,
         'single': False,
         'test': False
     }
Beispiel #7
0
    def test_SiC_conv(self):
        """
        Testing a full convergence calculation cycle on SiC using precomupted data.
        """

        # the current version uses refence data from a run using the production version on zenobe
        # once all checks out the run should be done using the input generated using this version to replace the
        # reference

        wdir = tempfile.mkdtemp()
        os.chdir(wdir)

        temp_ABINIT_PS_EXT = os.environ.get('ABINIT_PS_EXT', None)
        temp_ABINIT_PS = os.environ.get('ABINIT_PS', None)

        os.environ['ABINIT_PS_EXT'] = '.psp8'
        os.environ['ABINIT_PS'] = wdir

        reference_dir = os.path.join(__reference_dir__, 'SiC_test_case')
        if not os.path.isdir(reference_dir): raise RuntimeError('py.test needs to be started in the HTGW root, '
                                                                '%s does not exist' % __reference_dir__)

        # copy input
        print(wdir)
        self.assertTrue(os.path.isdir(reference_dir))
        src_files = os.listdir(reference_dir)
        for file_name in src_files:
            full_file_name = os.path.join(reference_dir, file_name)
            if os.path.isfile(full_file_name):
                shutil.copy(full_file_name, wdir)
        self.assertEqual(len(os.listdir(wdir)), 6)

        print(os.listdir(wdir))
        structure = Structure.from_file('SiC.cif')
        structure.item = 'SiC.cif'

        print(' ==== generate flow ===  ')
        gwsetup(update=False)
        self.assertTrue(os.path.isdir(os.path.join(wdir, 'SiC_SiC.cif')))
        print(os.listdir(os.path.join(wdir)))
        print(os.listdir(os.path.join(wdir, 'SiC_SiC.cif')))
        self.assertTrue(os.path.isfile(os.path.join(wdir, 'SiC_SiC.cif', '__AbinitFlow__.pickle')))
        self.assertEqual(len(os.listdir(os.path.join(wdir, 'SiC_SiC.cif', 'w0'))), 48)

        print(' ==== copy reference results from first calculation ===  ')
        shutil.rmtree(os.path.join(wdir, 'SiC_SiC.cif'))
        shutil.copytree(os.path.join(reference_dir, 'ref_res', 'SiC_SiC.cif'), os.path.join(wdir, 'SiC_SiC.cif'))
        self.assertTrue(os.path.isdir(os.path.join(wdir, 'SiC_SiC.cif')))
        self.assertEqual(len(os.listdir(os.path.join(wdir, 'SiC_SiC.cif', 'w0'))), 68)

        print(' ==== process output ===  ')
        gwoutput()
        print(os.listdir('.'))
        self.assertTrue(os.path.isfile('plot-fits'))
        self.assertTrue(os.path.isfile('plots'))
        self.assertEqual(is_converged(hartree_parameters=True, structure=structure, return_values=True),
                         {u'ecut': 44.0, u'ecuteps': 4.0, u'gap': 6.816130591466406, u'nbands': 60})
        self.assertTrue(os.path.isfile('SiC_SiC.cif.full_res'))

        print(' ==== generate next flow ===  ')
        print('      version with bandstructure and dos  ')
        gwsetup(update=False)
        self.assertTrue(os.path.isdir('SiC_SiC.cif.conv'))
        print(os.listdir(os.path.join(wdir, 'SiC_SiC.cif.conv', 'w0')))
        self.assertEqual(len(os.listdir(os.path.join(wdir, 'SiC_SiC.cif.conv', 'w0'))), 15)

        print(' ==== copy reference from second flow ===  ')
        time.sleep(1)  # the .conv directory should be older than the first one
        shutil.rmtree(os.path.join(wdir, 'SiC_SiC.cif.conv'))
        shutil.copytree(os.path.join(reference_dir, 'ref_res', 'SiC_SiC.cif.conv'),
                        os.path.join(wdir, 'SiC_SiC.cif.conv'))
        self.assertTrue(os.path.isdir(os.path.join(wdir, 'SiC_SiC.cif.conv')))
        self.assertEqual(len(os.listdir(os.path.join(wdir, 'SiC_SiC.cif.conv', 'w0'))), 13)

        print(' ==== process output ===  ')
        backup = sys.stdout
        sys.stdout = StringIO()  # capture output
        gwoutput()
        out = sys.stdout.getvalue()  # release output
        sys.stdout.close()  # close the stream
        sys.stdout = backup  # restore original stdout

        print('=== *** ====\n'+out+'=== *** ====\n')
        gap = 0
        for l in out.split('\n'):
            if 'values' in l:
                gap = float(l.split(' ')[6])
        self.assertEqual(gap, 7.114950664158926)

        print(os.listdir('.'))
        print('processed')
        self.assertTrue(os.path.isfile('SiC_SiC.cif.full_res'))
        full_res = read_grid_from_file(s_name(structure)+'.full_res')
        self.assertEqual(full_res, {u'all_done': True, u'grid': 0})
        self.assertTrue(os.path.isdir(os.path.join(wdir, 'SiC_SiC.cif.res')))
        self.assertEqual(len(os.listdir(os.path.join(wdir, 'SiC_SiC.cif.res'))), 5)
        print(os.listdir(os.path.join(wdir, 'SiC_SiC.cif.res')))

        msrf = MySigResFile(os.path.join(wdir, 'SiC_SiC.cif.res', 'out_SIGRES.nc'))
        self.assertEqual(msrf.h**o, 6.6843830378711786)
        self.assertEqual(msrf.lumo, 8.0650328308487982)
        self.assertEqual(msrf.homo_gw, 6.2325949743130034)
        self.assertEqual(msrf.lumo_gw, 8.2504215095164763)
        self.assertEqual(msrf.fundamental_gap('ks'), msrf.lumo - msrf.h**o)
        self.assertEqual(msrf.fundamental_gap('gw'), msrf.lumo_gw - msrf.homo_gw)
        self.assertAlmostEqual(msrf.fundamental_gap('gamma'), gap, places=3)

        # since we now have a mysigresfile object we test the functionality

        msrf.get_scissor()
        # return self.qplist_spin[0].build_scissors(domains=[[-200, mid], [mid, 200]], k=1, plot=False)

        res = msrf.get_scissor_residues()
        self.assertEqual(res, [0.05322754684319431, 0.34320373172956475])
        # return sc.residues

        #msrf.plot_scissor(title='')

        #msrf.plot_qpe(title='')

        # to be continued

        if temp_ABINIT_PS is not None:
            os.environ['ABINIT_PS_EXT'] = temp_ABINIT_PS_EXT
            os.environ['ABINIT_PS'] = temp_ABINIT_PS
Beispiel #8
0
    def create(self):
        """
        create single abinit G0W0 flow
        """
        # manager = 'slurm' if 'ceci' in self.spec['mode'] else 'shell'
        # an AbiStructure object has an overwritten version of get_sorted_structure that sorts according to Z
        # this could also be pulled into the constructor of Abistructure
        # abi_structure = self.structure.get_sorted_structure()
        from abipy import abilab
        item = copy.copy(self.structure.item)
        self.structure.__class__ = abilab.Structure
        self.structure = self.structure.get_sorted_structure_z()
        self.structure.item = item
        abi_structure = self.structure
        manager = TaskManager.from_user_config()
        # Initialize the flow.
        flow = Flow(self.work_dir, manager, pickle_protocol=0)
        # kpoint grid defined over density 40 > ~ 3 3 3
        if self.spec['converge'] and not self.all_converged:
            # (2x2x2) gamma centered mesh for the convergence test on nbands and ecuteps
            # if kp_in is present in the specs a kp_in X kp_in x kp_in mesh is used for the convergence study
            print('== here ===')
            print(self.spec.__class__)
            json.dumps(self.spec.data, indent=2)
            if 'kp_in' in self.spec.data.keys():
                if self.spec['kp_in'] > 9:
                    print(
                        'WARNING:\nkp_in should be < 13 to generate an n x n x n mesh\nfor larger values a grid with '
                        'density kp_in will be generated')
                kppa = self.spec['kp_in']
            else:
                kppa = 2
        else:
            # use the specified density for the final calculation with the converged nbands and ecuteps of other
            # stand alone calculations
            kppa = self.spec['kp_grid_dens']
        gamma = True

        # 'standard' parameters for stand alone calculation
        scf_nband = self.get_bands(self.structure) + 20
        # additional bands to accommodate for nbdbuf and a bit extra
        nscf_nband = [10 * self.get_bands(self.structure)]

        nksmall = None
        ecuteps = [8]

        extra_abivars = dict()

        # read user defined extra abivars from file  'extra_abivars' should be dictionary
        extra_abivars.update(read_extra_abivars())
        # self.bands_fac = 0.5 if 'gwcomp' in extra_abivars.keys() else 1
        # self.convs['nscf_nbands']['test_range'] =
        # tuple([self.bands_fac*x for x in self.convs['nscf_nbands']['test_range']])

        ecut = extra_abivars.pop('ecut', 44)
        ecutsigx = extra_abivars.pop('ecutsigx', 44)

        # if npfft is too large or if npfft changes between the nscf calcualtion and the screening / sigma calulations
        # strange things can happen
        if 'npfft' not in extra_abivars:
            extra_abivars['npfft'] = 3

        if ecutsigx > ecut:
            raise RuntimeError('ecutsigx can not be larger than ecut')
        if ecutsigx < max(ecuteps):
            raise RuntimeError('ecutsigx < ecuteps this is not realistic')

        response_models = ['godby']
        if 'ppmodel' in extra_abivars.keys():
            response_models = [extra_abivars.pop('ppmodel')]

        if self.option is not None:
            for k in self.option.keys():
                if k == 'ecut':
                    ecut = self.option[k]
                if k in ['ecuteps', 'nscf_nbands']:
                    pass
                else:
                    extra_abivars.update({k: self.option[k]})

        try:
            grid = read_grid_from_file(s_name(self.structure) +
                                       ".full_res")['grid']
            all_done = read_grid_from_file(
                s_name(self.structure) + ".full_res")['all_done']
            workdir = os.path.join(s_name(self.structure), 'w' + str(grid))
        except (IOError, OSError):
            grid = 0
            all_done = False
            workdir = None

        if not all_done:
            if (self.spec['test']
                    or self.spec['converge']) and not self.all_converged:
                if self.spec['test']:
                    print('| setting test calculation')
                    tests = SingleAbinitGWWork(self.structure, self.spec).tests
                    response_models = []
                else:
                    if grid == 0:
                        print('| setting convergence calculations for grid 0')
                        # tests = SingleAbinitGWWorkFlow(self.structure, self.spec).convs
                        tests = self.convs
                    else:
                        print('| extending grid')
                        # tests = expand(SingleAbinitGWWorkFlow(self.structure, self.spec).convs, grid)
                        tests = expand(self.convs, grid)
                ecuteps = []
                nscf_nband = []
                for test in tests:
                    if tests[test]['level'] == 'scf':
                        if self.option is None:
                            extra_abivars.update(
                                {test + '_s': tests[test]['test_range']})
                        elif test in self.option:
                            extra_abivars.update({test: self.option[test]})
                        else:
                            extra_abivars.update(
                                {test + '_s': tests[test]['test_range']})
                    else:
                        for value in tests[test]['test_range']:
                            if test == 'nscf_nbands':
                                nscf_nband.append(
                                    value * self.get_bands(self.structure))
                                # scr_nband takes nscf_nbands if not specified
                                # sigma_nband takes scr_nbands if not specified
                            if test == 'ecuteps':
                                ecuteps.append(value)
                            if test == 'response_model':
                                response_models.append(value)
            elif self.all_converged:
                print(
                    '| setting up for testing the converged values at the high kp grid '
                )
                # add a bandstructure and dos calculation
                if os.path.isfile('bands'):
                    nksmall = -30
                    # negative value > only bandstructure
                else:
                    nksmall = 30
                # in this case a convergence study has already been performed.
                # The resulting parameters are passed as option
                ecuteps = [
                    self.option['ecuteps'], self.option['ecuteps'] +
                    self.convs['ecuteps']['test_range'][1] -
                    self.convs['ecuteps']['test_range'][0]
                ]
                nscf_nband = [
                    self.option['nscf_nbands'], self.option['nscf_nbands'] +
                    self.convs['nscf_nbands']['test_range'][1] -
                    self.convs['nscf_nbands']['test_range'][0]
                ]
                # for option in self.option:
                #    if option not in ['ecuteps', 'nscf_nband']:
                #        extra_abivars.update({option + '_s': self.option[option]})
        else:
            print('| all is done for this material')
            return

        logger.info('ecuteps : %s ' % str(ecuteps))
        logger.info('extra   : %s ' % str(extra_abivars))
        logger.info('nscf_nb : %s ' % str(nscf_nband))
        inputs = g0w0_convergence_inputs(abi_structure,
                                         self.pseudo_table,
                                         kppa,
                                         nscf_nband,
                                         ecuteps,
                                         ecutsigx,
                                         scf_nband,
                                         ecut,
                                         accuracy="normal",
                                         spin_mode="unpolarized",
                                         smearing=None,
                                         response_models=response_models,
                                         charge=0.0,
                                         gw_qprange=2,
                                         gamma=gamma,
                                         nksmall=nksmall,
                                         extra_abivars=extra_abivars)

        work = G0W0Work(scf_inputs=inputs[0],
                        nscf_inputs=inputs[1],
                        scr_inputs=inputs[2],
                        sigma_inputs=inputs[3])

        # work = g0w0_extended_work(abi_structure, self.pseudo_table, kppa, nscf_nband, ecuteps, ecutsigx, scf_nband,
        # accuracy="normal", spin_mode="unpolarized", smearing=None, response_models=response_models,
        # charge=0.0, sigma_nband=None, scr_nband=None, gamma=gamma, nksmall=nksmall, **extra_abivars)

        print(workdir)
        flow.register_work(work, workdir=workdir)
        return flow.allocate()
Beispiel #9
0
    def execute_flow(self, structure, spec_data):
        """
        execute spec prepare input/jobfiles or submit to fw for a given structure
        for vasp the different jobs are created into a flow
        todo this should actually create and execute a VaspGWWorkFlow(GWWorkflow)
        """
        # general part for the base class
        grid = 0
        all_done = False
        converged = is_converged(False, structure)
        try:
            grid = read_grid_from_file(s_name(structure) + ".full_res")['grid']
            all_done = read_grid_from_file(s_name(structure) +
                                           ".full_res")['all_done']
        except (IOError, OSError):
            pass

        if all_done:
            print('| all is done for this material')
            return

        # specific part

        if spec_data['mode'] == 'fw':
            fw_work_flow = VaspGWFWWorkFlow()
        else:
            fw_work_flow = []
        if spec_data['test'] or spec_data['converge']:
            if spec_data['test']:
                tests_prep = GWscDFTPrepVaspInputSet(structure,
                                                     spec_data).tests
                tests_prep.update(
                    GWDFTDiagVaspInputSet(structure, spec_data).tests)
            elif spec_data['converge'] and converged:
                tests_prep = self.get_conv_res_test(spec_data,
                                                    structure)['tests_prep']
            else:
                tests_prep = GWscDFTPrepVaspInputSet(structure,
                                                     spec_data).convs
                tests_prep.update(
                    GWDFTDiagVaspInputSet(structure, spec_data).convs)
                if grid > 0:
                    tests_prep = expand(tests=tests_prep, level=grid)
                print(tests_prep)
            for test_prep in tests_prep:
                print('setting up test for: ' + test_prep)
                for value_prep in tests_prep[test_prep]['test_range']:
                    print("**" + str(value_prep) + "**")
                    option = {'test_prep': test_prep, 'value_prep': value_prep}
                    self.create_job(spec_data, structure, 'prep', fw_work_flow,
                                    converged, option)
                    for job in spec_data['jobs'][1:]:
                        if job == 'G0W0':
                            if spec_data['test']:
                                tests = GWG0W0VaspInputSet(
                                    structure, spec_data).tests
                            elif spec_data['converge'] and converged:
                                tests = self.get_conv_res_test(
                                    spec_data, structure)['tests']
                            else:
                                tests = GWG0W0VaspInputSet(
                                    structure, spec_data).convs
                                if grid > 0:
                                    tests = expand(tests=tests, level=grid)
                                print(tests)
                        if job in ['GW0', 'scGW0']:
                            input_set = GWG0W0VaspInputSet(
                                structure, spec_data)
                            input_set.gw0_on()
                            if spec_data['test']:
                                tests = input_set.tests
                            else:
                                tests = input_set.tests
                        for test in tests:
                            print('    setting up test for: ' + test)
                            for value in tests[test]['test_range']:
                                print("    **" + str(value) + "**")
                                option.update({'test': test, 'value': value})
                                self.create_job(spec_data, structure, job,
                                                fw_work_flow, converged,
                                                option)
Beispiel #10
0
    def insert_in_database(self,
                           structure,
                           clean_on_ok=False,
                           db_name='GW_results',
                           collection='general'):
        """
        insert the convergence data and the 'sigres' in a database
        """
        data = GWConvergenceData(spec=self, structure=structure)
        success = data.read_conv_res_from_file(
            os.path.join(
                s_name(structure) + '.res',
                s_name(structure) + '.conv_res'))
        con_dat = self.code_interface.read_convergence_data(
            s_name(structure) + '.res')
        try:
            with open('extra_abivars', mode='r') as f:
                extra = json.load(fp=f)
        except (OSError, IOError):
            extra = None
        ps = self.code_interface.read_ps_dir()
        results_file = os.path.join(
            s_name(structure) + '.res', self.code_interface.gw_data_file)
        ksbands_file = os.path.join(
            s_name(structure) + '.res', self.code_interface.ks_bands_file)
        data_file = os.path.join(
            s_name(structure) + '.res',
            s_name(structure) + '.data')
        if success and con_dat is not None:
            query = {
                'system':
                s_name(structure),
                'item':
                structure.item,
                'spec_hash':
                hash(self),
                'extra_vars_hash':
                hash(None)
                if extra is None else hash(frozenset(extra.items())),
                'ps':
                ps
            }
            print('query:', query)
            entry = copy.deepcopy(query)
            entry.update({
                'conv_res': data.conv_res,
                'spec': self.to_dict(),
                'extra_vars': extra,
                'structure': structure.as_dict(),
                'gw_results': con_dat,
                'results_file': results_file,
                'ksbands_file': ksbands_file,
                'data_file': data_file
            })

            # generic section that should go into the base class like
            #   insert_in_database(query, entry, db_name, collection, server="marilyn.pcpm.ucl.ac.be")
            local_serv = pymongo.Connection("marilyn.pcpm.ucl.ac.be")
            try:
                user = os.environ['MAR_USER']
            except KeyError:
                user = input('DataBase user name: ')
            try:
                pwd = os.environ['MAR_PAS']
            except KeyError:
                pwd = input('DataBase pwd: ')
            db = local_serv[db_name]
            db.authenticate(user, pwd)
            col = db[collection]
            print(col)
            gfs = gridfs.GridFS(db)
            count = col.find(query).count()
            if count == 0:
                try:
                    with open(entry['results_file'], 'r') as f:
                        entry['results_file'] = gfs.put(f.read())
                except IOError:
                    print(entry['results_file'], 'not found')
                try:
                    with open(entry['ksbands_file'], 'r') as f:
                        entry['ksbands_file'] = gfs.put(f.read())
                except IOError:
                    print(entry['ksbands_file'], 'not found')
                try:
                    with open(entry['data_file'], 'r') as f:
                        entry['data_file'] = gfs.put(f.read())
                except IOError:
                    print(entry['data_file'], 'not found')
                col.insert(entry)
                print('inserted', s_name(structure))
            elif count == 1:
                new_entry = col.find_one(query)
                try:
                    print('removing file ', new_entry['results_file'],
                          'from db')
                    gfs.remove(new_entry['results_file'])
                except:
                    print('remove failed')
                try:
                    print('removing file ', new_entry['ksbands_file'],
                          'from db')
                    gfs.remove(new_entry['ksbands_file'])
                except:
                    print('remove failed')
                try:
                    print('removing file ', new_entry['data_file'], 'from db')
                    gfs.remove(new_entry['data_file'])
                except:
                    print('remove failed')
                new_entry.update(entry)
                print('adding', new_entry['results_file'],
                      new_entry['data_file'])
                try:
                    with open(new_entry['results_file'], 'r') as f:
                        new_entry['results_file'] = gfs.put(f)
                except IOError:
                    print(new_entry['results_file'], 'not found')
                try:
                    with open(new_entry['ksbands_file'], 'r') as f:
                        new_entry['ksbands_file'] = gfs.put(f)
                except IOError:
                    print(new_entry['ksbands_file'], 'not found')
                try:
                    with open(new_entry['data_file'], 'r') as f:
                        new_entry['data_file'] = gfs.put(f)
                except IOError:
                    print(new_entry['data_file'], 'not found')
                print('as ', new_entry['results_file'], new_entry['data_file'])
                col.save(new_entry)
                print('updated', s_name(structure))
            else:
                print('duplicate entry ... ')
            local_serv.disconnect()
        else:
            raise RuntimeError('no data found to insert in db')
Beispiel #11
0
    def process_data(self, structure):
        """
        Process the data of a set of GW calculations:
        for 'single' and 'test' calculations the data is read and outputted
        for the parameter scanning part of a convergence calculation the data is read and parameters that provide
        converged results are determined
        for the 'full' part of a convergence calculation the data is read and it is tested if the slopes are in
        agreement with the scanning part
        """
        data = GWConvergenceData(spec=self, structure=structure)
        if self.data['converge']:
            done = False
            try:
                data.read_full_res_from_file()
                if data.full_res['all_done']:
                    done = True
                    print('| no action needed al is done already')
            except (IOError, OSError, SyntaxError):
                pass

            data.set_type()

            while not done:
                if data.type['parm_scr']:
                    data.read()

                    if len(data.data) == 0:
                        print('| parm_scr type calculation but no data found.')
                        break

                    if len(data.data) < 9:  # todo this should be calculated
                        print(
                            '| parm_scr type calculation but no complete data found,'
                            ' check if all calculations are done.')
                        break

                    if data.find_conv_pars_scf('ecut', 'full_width',
                                               self['tol'])[0]:
                        print(
                            '| parm_scr type calculation, converged scf values found'
                        )
                        print(data.conv_res)
                    else:
                        print(
                            '| parm_scr type calculation, no converged scf values found'
                        )
                        data.full_res.update({
                            'remark':
                            'No converged SCf parameter found. Continue anyway.'
                        })
                        data.conv_res['values'].update(
                            {'ecut':
                             44 / eV_to_Ha})  # internally we work in eV
                        data.conv_res['control'].update({'ecut': True})
                    # if ecut is provided in extra_abivars overwrite in any case .. this is done at input generation
                    # if 'ecut' in read_extra_abivars().keys():
                    #    data.conv_res['values'].update({'ecut': read_extra_abivars()['ecut']}) # should be in eV

                    # if converged ok, if not increase the grid parameter of the next set of calculations
                    extrapolated = data.find_conv_pars(self['tol'])
                    if data.conv_res['control']['nbands']:
                        print(
                            '| parm_scr type calculation, converged values found, extrapolated value: %s'
                            % extrapolated[4])
                    else:
                        print(
                            '| parm_scr type calculation, no converged values found, increasing grid'
                        )
                        data.full_res['grid'] += 1

                    data.print_full_res()
                    data.print_conv_res()

                    # plot data:
                    print_gnuplot_header('plots',
                                         s_name(structure) + ' tol = ' +
                                         str(self['tol']),
                                         filetype=None)
                    data.print_gnuplot_line('plots')
                    data.print_plot_data()
                    done = True

                elif data.type['full']:
                    if not data.read_conv_res_from_file(
                            s_name(structure) + '.conv_res'):
                        print(
                            '| Full type calculation but the conv_res file is not available, trying to reconstruct'
                        )
                        data.read()
                        data.find_conv_pars(self['tol'])
                        data.print_conv_res()
                    data.read(subset='.conv')
                    if len(data.data) == 0:
                        print('| Full type calculation but no data found.')
                        break

                    if len(data.data) < 4:
                        print(
                            '| Full type calculation but no complete data found.'
                        )
                        for item in data.data:
                            print(item)
                        break

                    if data.test_full_kp_results(tol_rel=1, tol_abs=0.0015):
                        print(
                            '| Full type calculation and the full results agree with the parm_scr.'
                            ' All_done for this compound.')
                        data.full_res.update({'all_done': True})
                        data.print_full_res()
                        done = True
                        # data.print_plot_data()
                        self.code_interface.store_results(
                            name=s_name(structure))
                    else:
                        print(
                            '| Full type calculation but the full results do not agree with the parm_scr.'
                        )
                        print(
                            '|   Increase the tol to find better converged parameters and test the full grid again.'
                        )
                        print('|   TODO')
                        data.full_res.update({
                            'remark': 'no agreement at high dens kp mesh,',
                            'all_done': True
                        })

                        # read the system specific tol for System.conv_res
                        # if it's not there create it from the global tol
                        # reduce tol
                        # set data.type to convergence
                        # loop
                        done = True
                else:
                    done = True

        elif self.data['test']:
            data.read()
            data.set_type()
            data.print_plot_data()
        else:
            data.read()
            data.set_type()
            data.print_plot_data()
Beispiel #12
0
    def loop_structures(self, mode='i'):
        """
        reading the structures specified in spec, add special points, and excecute the specs
        mode:
        i: loop structures for input generation
        o: loop structures for output parsing
        w: print all results
        """
        ok, not_ok = 0, 0
        print('loop structures mode ', mode)
        try:
            mp_key = os.environ['MP_KEY']
        except KeyError:
            mp_key = None

        mp_list_vasp = [
            'mp-149', 'mp-2534', 'mp-8062', 'mp-2469', 'mp-1550', 'mp-830',
            'mp-1986', 'mp-10695', 'mp-66', 'mp-1639', 'mp-1265', 'mp-1138',
            'mp-23155', 'mp-111'
        ]

        if self.data['source'] == 'mp-vasp':
            items_list = mp_list_vasp
        elif self.data['source'] in ['poscar', 'cif']:
            files = [
                f for f in os.listdir('.')
                if os.path.isfile(os.path.join('.', f))
            ]
            items_list = [f for f in files if f[-3:] == 'cif']
        elif self.data['source'] == 'mar_exp':
            items_list = []
            local_serv = pymongo.Connection("marilyn.pcpm.ucl.ac.be")
            local_db_gaps = local_serv.band_gaps
            pwd = os.environ['MAR_PAS']
            local_db_gaps.authenticate("setten", pwd)
            for c in local_db_gaps.exp.find():
                name = Structure.from_dict(c['icsd_data']['structure']).composition.reduced_formula, c['icsd_id'],\
                    c['MP_id']
                print(name)
                # Structure.from_dict(c['icsd_data']['structure']).to(fmt='cif',filename=name)
                items_list.append({
                    'name': 'mp-' + c['MP_id'],
                    'icsd': c['icsd_id'],
                    'mp': c['MP_id']
                })
        else:
            items_list = [line.strip() for line in open(self.data['source'])]

        for item in items_list:
            print('\n')
            # special case, this should be encaptulated
            if self.data['source'] == 'mar_exp':
                print('structure from marilyn', item['name'], item['icsd'],
                      item['mp'])
                exp = local_db_gaps.exp.find({'MP_id': item['mp']})[0]
                structure = Structure.from_dict(exp['icsd_data']['structure'])
                structure = refine_structure(structure)
                structure.to(fmt='cif', filename=item['name'])
                try:
                    kpts = local_db_gaps.GGA_BS.find({'transformations.history.0.id': item['icsd']})[0]\
                        ['calculations'][-1]['band_structure']['kpoints']
                except (IndexError, KeyError):
                    kpts = [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]
                structure.kpts = kpts
                print('kpoints:', structure.kpts[0], structure.kpts[1])
                structure.item = item['name']
            else:
                if item.startswith('POSCAR_'):
                    structure = structure.from_file(item)
                    comment = Poscar.from_file(item).comment
                    # print comment
                    if comment.startswith("gap"):
                        structure.vbm_l = comment.split(" ")[1]
                        structure.vbm = (comment.split(" ")[2],
                                         comment.split(" ")[3],
                                         comment.split(" ")[4])
                        structure.cbm_l = comment.split(" ")[5]
                        structure.cbm = (comment.split(" ")[6],
                                         comment.split(" ")[7],
                                         comment.split(" ")[8])
                    else:
                        # print "no bandstructure information available, adding GG as 'gap'"
                        structure = add_gg_gap(structure)
                elif 'cif' in item:
                    structure = Structure.from_file(item)
                    structure = add_gg_gap(structure)
                elif item.startswith('mp-'):
                    with MPRester(mp_key) as mp_database:
                        print('structure from mp database', item)
                        structure = mp_database.get_structure_by_material_id(
                            item, final=True)
                        try:
                            bandstructure = mp_database.get_bandstructure_by_material_id(
                                item)
                            structure.vbm_l = bandstructure.kpoints[
                                bandstructure.get_vbm()['kpoint_index']
                                [0]].label
                            structure.cbm_l = bandstructure.kpoints[
                                bandstructure.get_cbm()['kpoint_index']
                                [0]].label
                            structure.cbm = tuple(
                                bandstructure.kpoints[bandstructure.get_cbm(
                                )['kpoint_index'][0]].frac_coords)
                            structure.vbm = tuple(
                                bandstructure.kpoints[bandstructure.get_vbm(
                                )['kpoint_index'][0]].frac_coords)
                        except (MPRestError, IndexError, KeyError) as err:
                            print(err.message)
                            structure = add_gg_gap(structure)
                else:
                    continue
                structure.kpts = [list(structure.cbm), list(structure.vbm)]
                structure.item = item

            print(item, s_name(structure))

            modes = {
                'i': {
                    'action': self.execute_flow,
                    'fail_msg': 'input generation failed'
                },
                'w': {
                    'action': self.print_results,
                    'fail_msg': 'writing output failed'
                },
                's': {
                    'action': self.insert_in_database,
                    'fail_msg': 'database insertion failed'
                },
                'o': {
                    'action': self.process_data,
                    'fail_msg': 'output parsing failed'
                }
            }

            errors = {}

            try:
                action = modes[mode]['action']
                action(structure)
                ok += 1
            except Exception as exc:
                print('%s\n exception: %s %s' %
                      (modes[mode]['fail_msg'], exc.__class__, exc))
                not_ok += 1
                errors[s_name(structure)] = exc.args[0]
            """
            if mode == 'i':
                try:
                    self.execute_flow(structure)
                except Exception as exc:
                    print('input generation failed')
                    print(exc)
            elif mode == 'w':
                try:
                    self.print_results(structure)
                except Exception as exc:
                    print('writing output failed')
                    print(exc)
            elif mode == 's':
                try:
                    self.insert_in_database(structure)
                except Exception as exc:
                    print('database insertion failed')
                    print(exc)
            elif mode == 'o':
                try:
                    self.process_data(structure)
                except Exception as exc:
                    print('output parsing failed')
                    print(exc)
            """

        if 'ceci' in self.data['mode'] and mode == 'i':
            os.chmod("job_collection", stat.S_IRWXU)

        return ok, not_ok, errors
Beispiel #13
0
 def create_job_script(self, add_to_collection=True, mode='pbspro'):
     if mode == 'slurm':
         """
         Create job script for ceci.
         """
         npar = GWscDFTPrepVaspInputSet(
             self.structure, self.spec,
             functional=self.spec['functional']).get_npar(self.structure)
         if self.option is not None:
             option_prep_name = str(self.option['test_prep']) + str(
                 self.option['value_prep'])
             if 'test' in self.option.keys():
                 option_name = str('.') + str(self.option['test']) + str(
                     self.option['value'])
         else:
             option_prep_name = option_name = ''
         # npar = int(os.environ['NPARGWCALC'])
         header = ("#!/bin/bash \n"
                   "## standard header for Ceci clusters ## \n"
                   "#SBATCH [email protected] \n"
                   "#SBATCH --mail-type=ALL\n"
                   "#SBATCH --time=2-24:0:0 \n"
                   "#SBATCH --cpus-per-task=1 \n"
                   "#SBATCH --mem-per-cpu=4000 \n")
         path_add = ''
         if self.spec['converge'] and self.converged:
             path_add = '.conv'
         if self.job == 'prep':
             path = os.path.join(
                 s_name(self.structure) + path_add, option_prep_name)
             # create this job
             job_file = open(os.path.join(path, 'job'), mode='w')
             job_file.write(header)
             job_file.write('#SBATCH --job-name=' + s_name(self.structure) +
                            self.job + '\n')
             job_file.write('#SBATCH --ntasks=' + str(npar) + '\n')
             job_file.write('module load vasp \n')
             job_file.write('mpirun vasp \n')
             job_file.write('cp OUTCAR OUTCAR.sc \n')
             job_file.write('cp INCAR.DIAG INCAR \n')
             job_file.write('mpirun vasp \n')
             job_file.write('cp OUTCAR OUTCAR.diag \n')
             job_file.close()
             os.chmod(os.path.join(path, 'job'), stat.S_IRWXU)
             if add_to_collection:
                 job_file = open("job_collection", mode='a')
                 job_file.write('cd ' + path + ' \n')
                 job_file.write('sbatch job \n')
                 job_file.write('cd .. \n')
                 job_file.close()
                 os.chmod("job_collection", stat.S_IRWXU)
         if self.job in ['G0W0', 'GW0', 'scGW0']:
             path = os.path.join(
                 s_name(self.structure) + path_add, option_prep_name,
                 self.job + option_name)
             # create this job
             job_file = open(os.path.join(path, 'job'), mode='w')
             job_file.write(header)
             job_file.write('#SBATCH --job-name=' + s_name(self.structure) +
                            self.job + '\n')
             job_file.write('#SBATCH --ntasks=' + str(npar) + '\n')
             job_file.write('module load vasp/5.2_par_wannier90 \n')
             job_file.write('cp ../CHGCAR ../WAVECAR ../WAVEDER . \n')
             job_file.write('mpirun vasp \n')
             job_file.write('rm W* \n')
             #job_file.write('workon pymatgen-GW; get_gap > gap; deactivate')
             #job_file.write('echo '+path+'`get_gap` >> ../../gaps.dat')
             job_file.close()
             os.chmod(path + '/job', stat.S_IRWXU)
             path = os.path.join(
                 s_name(self.structure) + path_add, option_prep_name)
             # 'append submission of this job script to that of prep for this structure'
             if add_to_collection:
                 job_file = open(os.path.join(path, 'job'), mode='a')
                 job_file.write('cd ' + self.job + option_name + ' \n')
                 job_file.write('sbatch job \n')
                 job_file.write('cd .. \n')
                 job_file.close()
     elif mode == 'pbspro':
         """
         Create job script for pbse pro Zenobe.
         """
         npar = GWscDFTPrepVaspInputSet(
             self.structure, self.spec,
             functional=self.spec['functional']).get_npar(self.structure)
         #npar = 96
         if self.option is not None:
             option_prep_name = str(self.option['test_prep']) + str(
                 self.option['value_prep'])
             if 'test' in self.option.keys():
                 option_name = str('.') + str(self.option['test']) + str(
                     self.option['value'])
         else:
             option_prep_name = option_name = ''
         # npar = int(os.environ['NPARGWCALC'])
         header = str("#!/bin/bash \n" +
                      "## standard header for zenobe ## \n" +
                      "#!/bin/bash \n" + "#PBS -q main\n" +
                      "#PBS -l walltime=24:0:00\n" + "#PBS -r y \n" +
                      "#PBS -m abe\n" +
                      "#PBS -M [email protected]\n" +
                      "#PBS -W group_list=naps\n" +
                      "#PBS -l pvmem=1900mb\n")
         path_add = ''
         if self.spec['converge'] and self.converged:
             path_add = '.conv'
         if self.job == 'prep':
             path = os.path.join(
                 s_name(self.structure) + path_add, option_prep_name)
             abs_path = os.path.abspath(path)
             # create this job
             print(os.path.join(path, 'job'))
             job_file = open(os.path.join(path, 'job'), mode='w')
             job_file.write(header)
             job_file.write(
                 "#PBS -l select=%s:ncpus=1:vmem=1900mb:mpiprocs=1:ompthreads=1\n"
                 % str(npar))
             job_file.write(
                 '#PBS -o %s/queue.qout\n#PBS -e %s/queue.qerr\ncd %s\n' %
                 (abs_path, abs_path, abs_path))
             job_file.write('mpirun -n %s vasp \n' % str(npar))
             job_file.write('cp OUTCAR OUTCAR.sc \n')
             job_file.write('cp INCAR.DIAG INCAR \n')
             job_file.write('mpirun -n %s vasp \n' % str(npar))
             job_file.write('cp OUTCAR OUTCAR.diag \n')
             job_file.close()
             os.chmod(os.path.join(path, 'job'), stat.S_IRWXU)
             if add_to_collection:
                 job_file = open("job_collection", mode='a')
                 job_file.write('cd ' + path + ' \n')
                 job_file.write('qsub job \n')
                 job_file.write('cd ../.. \n')
                 job_file.close()
                 os.chmod("job_collection", stat.S_IRWXU)
         if self.job in ['G0W0', 'GW0', 'scGW0']:
             path = os.path.join(
                 s_name(self.structure) + path_add, option_prep_name,
                 self.job + option_name)
             abs_path = os.path.abspath(path)
             # create this job
             job_file = open(os.path.join(path, 'job'), mode='w')
             job_file.write(header)
             job_file.write(
                 "#PBS -l select=%s:ncpus=1:vmem=1000mb:mpiprocs=1:ompthreads=1\n"
                 % str(npar))
             job_file.write(
                 '#PBS -o %s/queue.qout\n#PBS -e %s/queue.qerr\ncd %s\n' %
                 (abs_path, abs_path, abs_path))
             job_file.write('cp ../CHGCAR ../WAVECAR ../WAVEDER . \n')
             job_file.write('mpirun -n %s vasp \n' % str(npar))
             job_file.write('rm W* \n')
             #job_file.write('workon pymatgen-GW; get_gap > gap; deactivate')
             #job_file.write('echo '+path+'`get_gap` >> ../../gaps.dat')
             job_file.close()
             os.chmod(os.path.join(path, 'job'), stat.S_IRWXU)
             path = os.path.join(
                 s_name(self.structure) + path_add, option_prep_name)
             # 'append submission of this job script to that of prep for this structure'
             if add_to_collection:
                 job_file = open(os.path.join(path, 'job'), mode='a')
                 job_file.write('cd ' + self.job + option_name + ' \n')
                 job_file.write('qsub job \n')
                 job_file.write('cd .. \n')
                 job_file.close()
Beispiel #14
0
    def create_input(self):
        """
        create vasp input
        """
        option_name = ''
        path_add = ''
        if self.spec['converge'] and self.converged:
            path_add = '.conv'
        if self.option is None:
            path = s_name(self.structure)
        else:
            path = os.path.join(
                s_name(self.structure) + path_add,
                str(self.option['test_prep']) + str(self.option['value_prep']))
            if 'test' in self.option.keys():
                option_name = '.' + str(self.option['test']) + str(
                    self.option['value'])
        if self.job == 'prep':

            inpset = GWscDFTPrepVaspInputSet(
                self.structure, self.spec, functional=self.spec['functional'])
            if self.spec['converge'] and not self.converged:
                spec_tmp = self.spec.copy()
                spec_tmp.update({'kp_grid_dens': 2})
                inpset = GWscDFTPrepVaspInputSet(
                    self.structure,
                    spec_tmp,
                    functional=self.spec['functional'])
                inpset.user_incar_settings.update({"ENCUT": 800})
            if self.spec['test'] or self.spec['converge']:
                if self.option[
                        'test_prep'] in GWscDFTPrepVaspInputSet.get_defaults_convs(
                        ).keys() or self.option[
                            'test_prep'] in GWscDFTPrepVaspInputSet.get_defaults_tests(
                            ).keys():
                    inpset.set_test(self.option['test_prep'],
                                    self.option['value_prep'])
            if self.spec["prec"] == "h":
                inpset.set_prec_high()
            inpset.write_input(path)

            inpset = GWDFTDiagVaspInputSet(self.structure,
                                           self.spec,
                                           functional=self.spec['functional'])
            if self.spec["prec"] == "h":
                inpset.set_prec_high()
            if self.spec['converge'] and not self.converged:
                spec_tmp = self.spec.copy()
                spec_tmp.update({'kp_grid_dens': 2})
                inpset = GWDFTDiagVaspInputSet(
                    self.structure,
                    spec_tmp,
                    functional=self.spec['functional'])
                inpset.user_incar_settings.update({"ENCUT": 800})
            if self.spec['test'] or self.spec['converge']:
                inpset.set_test(self.option['test_prep'],
                                self.option['value_prep'])
            inpset.incar.write_file(os.path.join(path, 'INCAR.DIAG'))

        if self.job == 'G0W0':

            inpset = GWG0W0VaspInputSet(self.structure,
                                        self.spec,
                                        functional=self.spec['functional'])
            if self.spec['converge'] and not self.converged:
                spec_tmp = self.spec.copy()
                spec_tmp.update({'kp_grid_dens': 2})
                inpset = GWG0W0VaspInputSet(self.structure,
                                            spec_tmp,
                                            functional=self.spec['functional'])
                inpset.user_incar_settings.update({"ENCUT": 800})
            if self.spec['test'] or self.spec['converge']:
                inpset.set_test(self.option['test_prep'],
                                self.option['value_prep'])
                inpset.set_test(self.option['test'], self.option['value'])
            if self.spec["prec"] == "h":
                inpset.set_prec_high()
            if self.spec['kp_grid_dens'] > 20:
                #inpset.wannier_on()
                inpset.write_input(self.structure,
                                   os.path.join(path, 'G0W0' + option_name))
                #w_inpset = Wannier90InputSet(self.spec)
                #w_inpset.write_file(self.structure, os.path.join(path, 'G0W0'+option_name))
            else:
                inpset.write_input(self.structure,
                                   os.path.join(path, 'G0W0' + option_name))

        if self.job == 'GW0':

            inpset = GWG0W0VaspInputSet(self.structure,
                                        self.spec,
                                        functional=self.spec['functional'])
            if self.spec['converge'] and not self.converged:
                spec_tmp = self.spec.copy()
                spec_tmp.update({'kp_grid_dens': 2})
                inpset = GWG0W0VaspInputSet(self.structure,
                                            spec_tmp,
                                            functional=self.spec['functional'])
                inpset.user_incar_settings.update({"ENCUT": 800})
            if self.spec['test'] or self.spec['converge']:
                inpset.set_test(self.option['test_prep'],
                                self.option['value_prep'])
                inpset.set_test(self.option['test'], self.option['value'])
            if self.spec["prec"] == "h":
                inpset.set_prec_high()
            inpset.gw0_on()
            if self.spec['kp_grid_dens'] > 20:
                #inpset.wannier_on()
                inpset.write_input(self.structure,
                                   os.path.join(path, 'GW0' + option_name))
                #w_inpset = Wannier90InputSet(self.spec)
                #w_inpset.write_file(self.structure, os.path.join(path, 'GW0'+option_name))
            else:
                inpset.write_input(self.structure,
                                   os.path.join(path, 'GW0' + option_name))

        if self.job == 'scGW0':

            inpset = GWG0W0VaspInputSet(self.structure,
                                        self.spec,
                                        functional=self.spec['functional'])
            if self.spec['converge'] and not self.converged:
                spec_tmp = self.spec.copy()
                spec_tmp.update({'kp_grid_dens': 2})
                inpset = GWG0W0VaspInputSet(self.structure,
                                            spec_tmp,
                                            functional=self.spec['functional'])
                inpset.user_incar_settings.update({"ENCUT": 800})
            if self.spec['test'] or self.spec['converge']:
                inpset.set_test(self.option['test_prep'],
                                self.option['value_prep'])
                inpset.set_test(self.option['test'], self.option['value'])
            if self.spec["prec"] == "h":
                inpset.set_prec_high()
            inpset.gw0_on(qpsc=True)
            if self.spec['kp_grid_dens'] > 20:
                inpset.wannier_on()
                inpset.write_input(self.structure,
                                   os.path.join(path, 'scGW0' + option_name))
                w_inpset = Wannier90InputSet(self.spec)
                w_inpset.write_file(self.structure,
                                    os.path.join(path, 'scGW0' + option_name))
            else:
                inpset.write_input(self.structure,
                                   os.path.join(path, 'scGW0' + option_name))