Exemplo n.º 1
0
    def post_processing(self, timedbg, logname, command):
        """
        Check the existence and the log file and return an instance logfile.

        Returns:
           A `BigDFT.Logfile` class instance associated to the run which has been just performed.
           If the run failed for some reasons, the logfile seem not existing or it cannot be parsed it returns `None`.

        """
        # verify that no debug file has been created
        if self._get_debugfile_date() > timedbg:
            verbose = self.run_options['verbose']
            if verbose:
                safe_print(
                    "ERROR: some problem occured during the execution of the command, check the 'debug/' directory and the logfile"
                )
                # the debug file is sane, we may print out the error message
                self._dump_debugfile_info()
            try:
                return Lf.Logfile(logname)
            except:
                return None
        if os.path.exists(logname):
            from futile.Utils import file_time
            from time import time
            inputname = self._get_inputfilename()
            if file_time(logname) < file_time(
                    inputname) and not self.run_options['skip']:
                safe_print("ERROR: The logfile (", logname,
                           ") is older than the inputfile (", inputname, ").")
                return None
            else:
                return Lf.Logfile(logname)
        else:
            raise ValueError("The logfile (", logname, ") does not exist.")
Exemplo n.º 2
0
    def logfile(self, path):
        """
        Set the logfile

        :raise ValueError:
        """
        self.bigdftlogfile = Logfiles.Logfile(path)
        # replace forbidden chars in aiida dicts.
        if len(self.bigdftlogfile) > 0:
            logs = []
            for log in self.bigdftlogfile:
                try:
                    log.log['Timestamp of this run'] = \
                        log.log['Timestamp of this run'].strftime("%Y-%m-%d %H:%M:%S.%f")
                except KeyError:
                    pass
                logs.append(log.log)
            self.set_attribute('logfile', logs)
        else:
            self.bigdftlogfile.log['Timestamp of this run'] = \
                self.bigdftlogfile.log['Timestamp of this run'].strftime("%Y-%m-%d %H:%M:%S.%f")
            self.set_attribute('logfile', self.bigdftlogfile.log)
Exemplo n.º 3
0
    def run(self, name='', outdir='', run_name='', input={}, posinp=None, skip=False):
        """
        Run a calculation building the input file from a dictionary.

        :param str name: naming schme of the run i.e. <name>.yaml is the input file and log-<name>.yaml the output one.
           Data will then be written in the directory `data-<name>.yaml`, unless the "radical" keyword is specified in the input dictionary.
        :param str outdir: specify the output directory
        :param str run_name: File containing the list of the run ids which have to be launched independently 
                             (list in yaml format). The option runs-file is not compatible with the name option.
        :param input: give the input parameters
        :type input: dict or yaml filename
        :param bool skip: avoid to rerun the calculation, in case it has been already performed.
        :param posinp: indicate the posinp file (atomic position file). 
           It may be specified only when the input is given as a dictionary, otherwise it is ignored: the position file should be consistent with the inputfile naming scheme.
        :type posinp: filename
        :return: a Logfile instance is returned. It returns None if an error occurred
        :rtype: Logfile

        .. todo::
           
           Set the return value of run in the case of a run_file. It should be a list of Logfile classes

        """
        # Set the number of omp threads
        os.environ['OMP_NUM_THREADS'] = self.omp
        # Creating the yaml input file from a dictionary or another file
        if len(name) > 0:
            input_file = "%s.yaml" % name
            logname = "log-%s.yaml" % name
        else:
            input_file = "input.yaml" #default name
            logname = "log.yaml"
        #check if the debug file will be updated (case of erroneous run)
        timedbg=get_debugfile_date()
        if isinstance(input,str):
             #Check if the file input does exist
            assert os.path.isfile(input)
            if input != input_file:
                shutil.copyfile(input,input_file)
                safe_print('Copying from "%s" the yaml input file "%s"' % (input,input_file))
        else:
            import copy
            local_input=copy.deepcopy(input)
            # Copying the posinp input file if needed
            if posinp != None:
                #Check if the file does exist
                assert os.path.isfile(posinp)
                #Add into the dictionary a posinp key
                local_input['posinp'] = posinp
            #Creating the yaml input file
            from futile import Yaml as Y
            Y.dump(local_input,filename=input_file)
            safe_print('Creating from a dictionary the yaml input file "%s"' % input_file)
        # Adjust the command line with options
        command = self.command
        if len(name) > 0:
            command += ' -n '+name
        if len(run_name) > 0:
            command += ' -r '+run_name
        if len(outdir) > 0:
            command += ' -d '+outdir
        if skip:
            command += ' -s Yes'
        safe_print('Executing command: ', command)
        os.system(command)
        #verify that no debug file has been created
        if get_debugfile_date() > timedbg :
            safe_print("ERROR: some problem occured during the execution of the command, check the 'debug/' directory and the logfile")
            return None
        #Check the existence and the log file and return an instance logfile
        #valid only without run_name
        if os.path.exists(logname):
            return Lf.Logfile(logname)
        else:
            return None
    sys.path+=[BIGDFT_PYTHONDIR]


# Now we can load the Logfiles module:

# In[2]:

from BigDFT import Logfiles as lf 


# Let us now load a file into a instance of a Logfile class. Imagine that our logfile corresponds to a single-point run, and it is present in a file named "log-HBDMI.yaml":
# 

# In[3]:

HBDMI=lf.Logfile('testfiles/log-HBDMI.yaml')


# The run is now loaded. To inspect its behaviour we might print it down to inspect the usual information:

# In[4]:

print HBDMI
HBDMI.geopt_plot()


# The above information can also be accessed separately, by having a look at the attributes of the HBDMI object:

# In[5]:

HBDMI.__dict__.keys() #you may also type: dir(HBDMI)
def QM_snapshot(filename, fragment_size):
    FullQM = lf.Logfile(filename)
    s = filename
    name = s[s.find('log-') + 4:].rstrip('.yaml')
    return name, fragmentation(FullQM, fragment_size)
Exemplo n.º 6
0
import os
BIGDFT_PYTHONDIR = os.path.abspath(
    os.path.join(os.pardir, os.pardir, 'src',
                 'python'))  #refer to the sources, only for test
#then update the path
import sys
if BIGDFT_PYTHONDIR not in sys.path:
    sys.path += [BIGDFT_PYTHONDIR]

# Next we load a logfile which is suitable for the calculation, for example an evergreen (this log has been shortened for practical purposes):

# In[2]:

import BigDFT.Logfiles as lf
#get_ipython().magic(u'matplotlib inline')
Graph = lf.Logfile('testfiles/log-Graphene.yaml')

# In[3]:

#inform
print Graph

# In[4]:

#How many k-points?
Graph.nkpt

# To start the analysis, it might be interesting to plot the Density of States (also see the DoS example notebook):

# In[5]:
Exemplo n.º 7
0
def single_study_workflow(alpha_conv=1.0e-2,
                          wf_convergence=1.0e-6,
                          hgrids=0.3,
                          rmult_fine=9.0,
                          term_verb=True,
                          **kwargs):
    """
    Perform the complete workflow to compute the statical polarizability
    of a specific study(molecule+xc+psp)

    Args:
        kwargs['molecule']  : the molecule type
        kwargs['xc']        : the xc functional
        kwargs['psp']       : the pseudopotential
    """
    molecule = kwargs['molecule']
    xc = kwargs['xc']
    psp = kwargs['psp']

    study = {}

    if not os.path.isdir(molecule): os.mkdir(molecule)
    os.chdir(molecule)
    path = xc + '-' + psp
    if not os.path.isdir(path): os.mkdir(path)

    print ''
    print 'Compute alpha for : ', molecule, xc, psp

    posinp = Molecules.Molecule(molecule)

    gs_rtol = 10 * wf_convergence  #rel tol for the gs convergence (using the total energy as control quantity)

    inp = I.Inputfile()
    inp.set_hgrid(hgrids)
    inp.set_xc(xc.upper())
    inp.set_wavefunction_convergence(wf_convergence)

    #gs convergence
    rmult_coarse = [1.0 * i for i in range(3, 12)]
    data = []
    code = C.SystemCalculator(skip=True, verbose=False)
    for r in rmult_coarse:
        gs_study = D.Dataset(label=molecule + '_GS',
                             run_dir=path,
                             posinp=posinp)
        gs_study.set_postprocessing_function(SP.get_energy)
        inp.set_rmult(coarse=r, fine=rmult_fine)
        idd = {'rmult': r}
        gs_study.append_run(id=idd, runner=code, input=inp)
        data.append(gs_study)
    if term_verb: print 'Seek for gs convergence'
    study['gs_conv'] = SP.seek_convergence(rt=gs_rtol,
                                           term_verb=term_verb,
                                           label='rmult',
                                           values=rmult_coarse,
                                           data=data)

    if term_verb: print 'Seek for alpha convergence'
    # alpha field intensity convergence
    conv_val = study['gs_conv']['converged_value']
    gslog = 'log-' + data[rmult_coarse.index(conv_val)].names[0] + '.yaml'
    gs = lf.Logfile(path + os.sep + gslog)
    inp.set_rmult(gs.log['dft']['rmult'])
    if term_verb: 'Convergence on the field intensity'
    study['field_conv'] = SP.perform_field_convergence(term_verb=term_verb,
                                                       rt=alpha_conv,
                                                       run_dir=path,
                                                       input=inp,
                                                       runner=code,
                                                       posinp=posinp,
                                                       ppf=SP.eval_alpha)
    f = study['field_conv']['converged_value']
    # alpha rmult convergence
    rmult_list = SP.build_rmult_list(gs)
    if term_verb: 'Convergence on rmult'
    study['rmult_conv'] = SP.perform_rmult_convergence(term_verb=term_verb,
                                                       rt=alpha_conv,
                                                       run_dir=path,
                                                       intensity=f,
                                                       rmult=rmult_list,
                                                       input=inp,
                                                       runner=code,
                                                       posinp=posinp,
                                                       ppf=SP.eval_alpha)

    os.chdir('../')
    return study
Exemplo n.º 8
0
        data = Time.dump_timing_level(bt.routines[0])  #dict_routines)
        plt = Time.polar_axis(data)
        plt.show()
    else:
        pylab.show()

if args.data is None:
    print "No input file given, exiting..."
    exit(0)

if args.analyze is not None and args.data:
    from BigDFT import Logfiles as lf
    from futile import Yaml
    instructions = Yaml.load(args.analyze)  #lf.get_logs([args.analyze])
    print '#', args.data, argcl
    lf.process_logfiles(argcl, instructions)
    exit(0)

if args.data:
    with open(argcl[0], "r") as fp:
        logfile_lines = fp.readlines()

#output file
file_out = open(args.output, "w")
#to_remove list
if args.remove is not None:
    to_remove = yaml.load(open(args.remove, "r").read(), Loader=yaml.CLoader)
else:
    #standard list which removes long items from the logfile
    to_remove = [
        "Atomic positions within the cell (Atomic and Grid Units)",