def append_inputfile(qm, mm, name, tar=None):
    rad = str(name)
    xyzname = rad + '.xyz'
    yamlname = rad + '.yaml'
    out = {'dft': {'external_potential': mm}, 'posinp': xyzname}
    qm.xyz(xyzname)
    Yaml.dump(out, yamlname, raw=True, tar=tar)
    return rad
示例#2
0
def get_fragment_PI(filename):
    from futile import Yaml
    tt=Yaml.load(filename)
    ll=tt['Orbital occupation'][0]['Fragment multipoles']
    atlist={}
    for f in ll:
        for at in f['Atom IDs']:
            atlist[at]=f['Purity indicator']
    return [ atlist[i+1] for i in range(len(atlist.keys()))]
示例#3
0
def get_logs(files): #,select_document=None):
    """
    Return a list of loaded logfiles from files, which is a list
    of paths leading to logfiles.

    :param files: List of filenames indicating the logfiles
    :returns: List of Logfile instances associated to filename
    """
    from futile import Yaml
    logs=[]
    for filename in files:
        logs+=Yaml.load(filename,doc_lists=True,safe_mode=True)
    return logs
示例#4
0
def get_fragment_chg(filename):
    from futile import Yaml
    tt=Yaml.load(filename)
    ll=tt['Orbital occupation'][0]['Fragment multipoles']
    atlist={}
    qtot=0.0
    for f in ll:
        qion=f['Neutral fragment charge']
        qelec=f['q0'][0]
        qtot+=qion+qelec
        for at in f['Atom IDs']:
            atlist[at]=qion+qelec
    print 'total charge',filename,qtot
    return [ atlist[i+1] for i in range(len(atlist.keys()))]
示例#5
0
    def run(self, name='', outdir='', run_name='', input={}, posinp=None, skip=False):
        """
        Run a calculation building the input file from a dictionary.

        :param str name: naming schme of the run i.e. <name>.yaml is the input file and log-<name>.yaml the output one.
           Data will then be written in the directory `data-<name>.yaml`, unless the "radical" keyword is specified in the input dictionary.
        :param str outdir: specify the output directory
        :param str run_name: File containing the list of the run ids which have to be launched independently 
                             (list in yaml format). The option runs-file is not compatible with the name option.
        :param input: give the input parameters
        :type input: dict or yaml filename
        :param bool skip: avoid to rerun the calculation, in case it has been already performed.
        :param posinp: indicate the posinp file (atomic position file). 
           It may be specified only when the input is given as a dictionary, otherwise it is ignored: the position file should be consistent with the inputfile naming scheme.
        :type posinp: filename
        :return: a Logfile instance is returned. It returns None if an error occurred
        :rtype: Logfile

        .. todo::
           
           Set the return value of run in the case of a run_file. It should be a list of Logfile classes

        """
        # Set the number of omp threads
        os.environ['OMP_NUM_THREADS'] = self.omp
        # Creating the yaml input file from a dictionary or another file
        if len(name) > 0:
            input_file = "%s.yaml" % name
            logname = "log-%s.yaml" % name
        else:
            input_file = "input.yaml" #default name
            logname = "log.yaml"
        #check if the debug file will be updated (case of erroneous run)
        timedbg=get_debugfile_date()
        if isinstance(input,str):
             #Check if the file input does exist
            assert os.path.isfile(input)
            if input != input_file:
                shutil.copyfile(input,input_file)
                safe_print('Copying from "%s" the yaml input file "%s"' % (input,input_file))
        else:
            import copy
            local_input=copy.deepcopy(input)
            # Copying the posinp input file if needed
            if posinp != None:
                #Check if the file does exist
                assert os.path.isfile(posinp)
                #Add into the dictionary a posinp key
                local_input['posinp'] = posinp
            #Creating the yaml input file
            from futile import Yaml as Y
            Y.dump(local_input,filename=input_file)
            safe_print('Creating from a dictionary the yaml input file "%s"' % input_file)
        # Adjust the command line with options
        command = self.command
        if len(name) > 0:
            command += ' -n '+name
        if len(run_name) > 0:
            command += ' -r '+run_name
        if len(outdir) > 0:
            command += ' -d '+outdir
        if skip:
            command += ' -s Yes'
        safe_print('Executing command: ', command)
        os.system(command)
        #verify that no debug file has been created
        if get_debugfile_date() > timedbg :
            safe_print("ERROR: some problem occured during the execution of the command, check the 'debug/' directory and the logfile")
            return None
        #Check the existence and the log file and return an instance logfile
        #valid only without run_name
        if os.path.exists(logname):
            return Lf.Logfile(logname)
        else:
            return None
示例#6
0
 def __init__(self,*args,**kwargs):
     """
     Initialize the class
     """
     import os
     dicts = []
     #Read the dictionary kwargs
     arch = kwargs.get("archive")
     member = kwargs.get("member")
     label = kwargs.get("label")
     dictionary=kwargs.get("dictionary")
     if arch:
         #An archive is detected
         import tarfile
         from futile import Yaml
         tar = tarfile.open(arch)
         members = [ tar.getmember(member) ] if member else tar.getmembers()
         #print members
         for memb in members:
             f = tar.extractfile(memb)
             dicts.append(Yaml.load(stream=f.read()))
             #dicts[-1]['label'] = memb.name #Add the label (name of the file)
         srcdir=os.path.dirname(arch)
         label = label if label is not None else arch
     elif dictionary:
         #Read the dictionary or a list of dictionaries or from a generator
         dicts = dictionary if isinstance(dictionary,list) else [d for d in dictionary]
         srcdir=''
         label = label if label is not None else 'dict'
     elif args:
         #Read the list of files (member replaces load_only...)
         dicts=get_logs(args)#,select_document=member)
         label = label if label is not None else args[0]
         srcdir=os.path.dirname(args[0])
     #: Label of the Logfile instance
     self.label=label
     #: Absolute path of the directory of logfile
     self.srcdir=os.path.abspath('.' if srcdir == '' else srcdir)
     if not dicts:
         raise ValueError("No log information provided.")
     #Initialize the logfile with the first document
     self._initialize_class(dicts[0])
     if len(dicts)>1:
         #first initialize the instances with the previous logfile such as to provide the
         #correct information
         #(we should however decide what to do if some run did not converged)
         self._instances=[]
         for i,d in enumerate(dicts):
             #label=d.get('label','log'+str(i))
             label = 'log'+str(i)
             dtmp=dicts[0]
             instance=Logfile(dictionary=dtmp,label=label)
             #now update the instance with the other value
             instance._initialize_class(d)
             self._instances.append(instance)
         #then we should find the best values for the dictionary
         print('Found',len(self._instances),'different runs')
         import numpy
         #Initialize the class with the dictionary corresponding to the lower value of the energy
         ens=[(l.energy if hasattr(l,'energy') else 1.e100) for l in self._instances]
         #: Position in the logfile items of the run associated to lower energy
         self.reference_log=numpy.argmin(ens)
         #print 'Energies',ens
         self._initialize_class(dicts[self.reference_log])
        'Fragment extraction from homogeneous QM/MM creation')
    args.option('-s',
                '--size',
                help='fragment size for the QM region',
                default=3)
    args.option('-f',
                '--files',
                remainder=True,
                help='files to be used for the snapshots',
                default='test')
    args.option(
        '-t',
        '--tarfile',
        default=None,
        help='archive on which to write the data, will be extended by .tar.bz2'
    )
    arg = args.args()
    #open the tarfile archive if required to do so
    if arg.tarfile:
        import tarfile
        tar = tarfile.open(arg.tarfile + '.tar.bz2', mode='w:bz2')
    else:
        tar = None
    list_posinp = [
    ]  #store the names of the different runs to be treated in the list
    for f in arg.files:
        list_posinp += log_to_QMMM(f, int(arg.size), tar=tar)
        print 'File:"', f, '" treated, tot snapshots', len(list_posinp)
    if tar: tar.close()
    Yaml.dump(list_posinp, 'list_posinp')
    if bt.routines[0] is not None and False:
        data = Time.dump_timing_level(bt.routines[0])  #dict_routines)
        plt = Time.polar_axis(data)
        plt.show()
    else:
        pylab.show()

if args.data is None:
    print "No input file given, exiting..."
    exit(0)

if args.analyze is not None and args.data:
    from BigDFT import Logfiles as lf
    from futile import Yaml
    instructions = Yaml.load(args.analyze)  #lf.get_logs([args.analyze])
    print '#', args.data, argcl
    lf.process_logfiles(argcl, instructions)
    exit(0)

if args.data:
    with open(argcl[0], "r") as fp:
        logfile_lines = fp.readlines()

#output file
file_out = open(args.output, "w")
#to_remove list
if args.remove is not None:
    to_remove = yaml.load(open(args.remove, "r").read(), Loader=yaml.CLoader)
else:
    #standard list which removes long items from the logfile