def GetProperties(self, flist, proppath='/parameters', respath='/simulation/results', verbose=False): fs = self.GetFileNames(flist) resultfiles = [] for f in fs: try: self.h5f = h5.archive(f, 'r') self.h5fname = f if verbose: log("Loading from file" + f) rfile = ResultFile(f) rfile.props = self.ReadParameters(proppath) try: obs = self.GetObservableList(respath) rfile.props["ObservableList"] = [ pt.hdf5_name_decode(x) for x in obs ] except: pass resultfiles.append(rfile) except Exception as e: log(e) log(traceback.format_exc()) return resultfiles
def loadTimeEvolution(flist, globalproppath='/parameters', resroot='/timesteps/', localpropsuffix='/parameters', measurements=None): ll = Hdf5Loader() data = [] #loop over files for f in flist: try: #open the file and open the results root group h5file = h5.archive(f, 'r') #enumerate the subgroups L = h5file.list_children(resroot) #Create an iterator of length the number of subgroups stepper = [i + 1 for i in range(len(L))] #Read in global props globalprops = ll.GetProperties([f], globalproppath) for d in stepper: #Get the measurements from the numbered subgroups locdata=ll.ReadMeasurementFromFile([f],proppath=resroot+str(d)+localpropsuffix, \ respath=resroot+str(d)+'/results', measurements=measurements) #Append the global props to the local props for i in range(len(locdata[0])): locdata[0][i].props.update(globalprops[0].props) #Extend the total dataset with this data data.extend(locdata) except Exception as e: log(e) log(traceback.format_exc()) return data
def ReadDMFTIterations(self, flist, observable='G_tau', measurements='0', proppath='/parameters', respath='/simulation/iteration', verbose=False): fs = self.GetFileNames(flist) fileset = [] for f in fs: try: self.h5f = h5.archive(f, 'r') self.h5fname = f if verbose: log("Loading from file " + f) list_ = self.GetObservableList(respath + '/1/results/' + observable + '/') #grp = self.h5f.require_group(respath) params = self.ReadParameters(proppath) obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] iterationset = [] for it in self.h5f.list_children(respath): obsset = [] for m in obslist: try: if verbose: log("Loading " + m) d = DataSet() size = 0 path = it + '/results/' + observable + '/' + m if "mean" in self.h5f.list_children(respath + '/' + path): if self.h5f.is_scalar(respath + '/' + path + '/mean/value'): size = 1 obs = self.h5f[respath + '/' + path + '/mean/value'] d.y = np.array([obs]) else: obs = self.h5f[respath + '/' + path + '/mean/value'] size = len(obs) d.y = obs d.x = np.arange(0, size) d.props['hdf5_path'] = respath + "/" + path d.props['observable'] = pt.hdf5_name_decode(m) d.props['iteration'] = it d.props.update(params) except AttributeError: log("Could not create DataSet") pass obsset.append(d) iterationset.append(obsset) fileset.append(iterationset) except Exception as e: log(e) log(traceback.format_exc()) return fileset
def ReadSpectrumFromFile(self, flist, proppath='/parameters', respath='/spectrum', verbose=False): fs = self.GetFileNames(flist) sets = [] for f in fs: try: fileset = [] self.h5f = h5.archive(f, 'r') self.h5fname = f if verbose: log("Loading from file " + f) params = self.ReadParameters(proppath) if 'energies' in self.h5f.list_children(respath): try: d = DataSet() d.props['hdf5_path'] = respath d.props['observable'] = 'spectrum' d.y = self.h5f[respath + '/energies'] d.x = range(len(d.y)) d.props.update(params) try: d.props.update( self.ReadParameters('quantumnumbers')) except: if verbose: log("no quantumnumbers stored ") pass fileset.append(d) except AttributeError: pass if 'sectors' in self.h5f.list_children(respath): for secnum in self.h5f.list_children(respath + '/sectors'): try: d = DataSet() secpath = respath + '/sectors/' + secnum d.props['hdf5_path'] = secpath d.props['observable'] = 'spectrum' d.y = self.h5f[secpath + '/energies'] d.x = range(len(d.y)) d.props.update(params) try: d.props.update( self.ReadParameters(secpath + '/quantumnumbers')) except: if verbose: log("no quantumnumbers stored ") pass fileset.append(d) except AttributeError: log("Could not create DataSet") pass sets.append(fileset) except Exception as e: log(e) log(traceback.format_exc()) return sets
def compute(self): globalproppath = self.getInputFromPort( 'GlobalPropertyPath') if self.hasInputFromPort( 'GlobalPropertyPath') else "/parameters" localpropsuffix = self.getInputFromPort( 'LocalPropertySuffix') if self.hasInputFromPort( 'LocalPropertySuffix') else "/parameters" resroot = self.getInputFromPort('ResultPath') if self.hasInputFromPort( 'ResultPath') else "/timesteps/" loader = Hdf5Loader() self.getInputFromPort('ResultFiles') if self.hasInputFromPort('ResultFiles'): files = [ f.props["filename"] for f in self.getInputFromPort('ResultFiles') ] datasets = [] if self.hasInputFromPort('Measurements'): #loop over files for f in files: try: #open the file and open the results root group h5file = h5.archive(f, 'r') #enumerate the subgroups L = h5file.list_children(resroot) #Create an iterator of length the number of subgroups stepper = [i + 1 for i in range(len(L))] #Read in global props globalprops = loader.GetProperties([f], globalproppath) for d in stepper: #Get the measurements from the numbered subgroups locdata=loader.ReadMeasurementFromFile([f],proppath=resroot+str(d)+localpropsuffix, \ respath=resroot+str(d)+'/results', measurements=self.getInputFromPort('Measurements')) #Append the global props to the local props for i in range(len(locdata[0])): locdata[0][i].props.update(globalprops[0].props) #Extend the total dataset with this data datasets.extend(locdata) except Exception, e: debug.log(traceback.format_exc())
# # You should have received a copy of the ALPS Library License along with # the ALPS Libraries; see the file LICENSE.txt. If not, the license is also # available from http://alps.comp-phys.org/. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT # SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE # FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. # # **************************************************************************** import pyalps as alps import pyalps.hdf5 as h5 import pyalps.alea as alea iar = h5.archive('loadobs.h5', 'r') for name in iar.list_children('/simulation/results'): if iar.is_scalar('/simulation/results/' + alps.hdf5_name_encode(name) + '/mean/value'): obs = alea.MCScalarData() else: obs = alea.MCVectorData() obs.load('loadobs.h5', '/simulation/results/' + alps.hdf5_name_encode(name)) print name + ": " + str(obs)
def loadProperties(fname): ar = h5.archive(fname) return ar['/parameters']
#Get the measurements from the numbered subgroups locdata=loader.ReadMeasurementFromFile([f],proppath=resroot+str(d)+localpropsuffix, \ respath=resroot+str(d)+'/results', measurements=self.getInputFromPort('Measurements')) #Append the global props to the local props for i in range(len(locdata[0])): locdata[0][i].props.update(globalprops[0].props) #Extend the total dataset with this data datasets.extend(locdata) except Exception, e: debug.log(traceback.format_exc()) else: #loop over files for f in files: try: #open the file and open the results root group h5file = h5.archive(f, 'r') #enumerate the subgroups L = h5file.list_children(resroot) #Create an iterator of length the number of subgroups stepper = [i + 1 for i in range(len(L))] #Read in global props globalprops = loader.GetProperties([f], globalproppath) for d in stepper: #Get the measurements from the numbered subgroups locdata=loader.ReadMeasurementFromFile([f],proppath=resroot+str(d)+localpropsuffix, \ respath=resroot+str(d)+'/results', measurements=None) #Append the global props to the local props for i in range(len(locdata[0])): locdata[0][i].props.update(globalprops[0].props) #Extend the total dataset with this data datasets.extend(locdata)
# the terms of the license, either version 1 or (at your option) any later # version. # # You should have received a copy of the ALPS Library License along with # the ALPS Libraries; see the file LICENSE.txt. If not, the license is also # available from http://alps.comp-phys.org/. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT # SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE # FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. # # **************************************************************************** import pyalps as alps import pyalps.hdf5 as h5 import pyalps.alea as alea iar = h5.archive('loadobs.h5', 'r') for name in iar.list_children('/simulation/results'): if iar.is_scalar('/simulation/results/' + alps.hdf5_name_encode(name) + '/mean/value'): obs = alea.MCScalarData() else: obs = alea.MCVectorData() obs.load('loadobs.h5', '/simulation/results/' + alps.hdf5_name_encode(name)) print name + ": " + str(obs)
def ReadMeasurementFromFile(self, flist, proppath='/parameters', respath='/simulation/results', measurements=None, verbose=False): fs = self.GetFileNames(flist) sets = [] for f in fs: try: fileset = [] self.h5f = h5.archive(f, 'r') self.h5fname = f if verbose: log("Loading from file " + f) list_ = self.GetObservableList(respath) params = self.ReadParameters(proppath) obslist = [] if measurements == None: obslist = list_ else: obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] for m in obslist: if verbose: log("Loading " + m) size = 0 xmin = 0 xstep = 1 x = None if "histogram" in self.h5f.list_children(respath + '/' + m): obs = self.h5f[respath + '/' + m + '/histogram'] xmin = self.h5f[respath + '/' + m + '/@min'] xstep = self.h5f[respath + '/' + m + '/@stepsize'] size = len(obs) x = np.arange(xmin, xmin + xstep * size, xstep) elif "error" in self.h5f.list_children(respath + '/' + m + '/mean'): if self.h5f.is_scalar(respath + '/' + m + '/mean/value'): obs = pa.MCScalarData() obs.load(self.h5fname, respath + '/' + m) obs = np.array([obs]) size = 1 if obs[0].count == 0: obs = None else: obs = None if not self.h5f.is_group( respath + '/' + m + '/timeseries'): # check for simple binning obs = np.array(self.h5f[respath + '/' + m + '/mean/value']) if 'L' in params: # ugly fix... really ugly L = int(params['L']) if L == obs.size: params['origin'] = [(L - 1.) / 2.] if L**2 == obs.size: # dimension 2 obs = obs.reshape([L, L]) params['origin'] = [(L - 1.) / 2., (L - 1.) / 2.] elif L**3 == obs.size: # dimension 3 obs = obs.reshape([L, L, L]) params['origin'] = [(L - 1.) / 2., (L - 1.) / 2., (L - 1.) / 2.] size = obs.size else: obs = pa.MCVectorData() obs.load(self.h5fname, respath + '/' + m) size = len(obs.mean) if obs.count == 0: obs = None else: if self.h5f.is_scalar(respath + '/' + m + '/mean/value'): obs = self.h5f[respath + '/' + m + '/mean/value'] obs = np.array([obs]) size = 1 else: obs = self.h5f[respath + '/' + m + '/mean/value'] size = len(obs) if "labels" in self.h5f.list_children(respath + '/' + m) and x is None: x = parse_labels(self.h5f[respath + '/' + m + '/labels']) elif x is None: x = np.arange(xmin, xmin + xstep * size, xstep) try: if obs is not None: d = DataSet() d.y = obs d.x = x d.props['hdf5_path'] = respath + "/" + m d.props['observable'] = pt.hdf5_name_decode(m) d.props.update(params) fileset.append(d) except AttributeError: log("Could not create DataSet") sets.append(fileset) except Exception as e: log(e) log(traceback.format_exc()) return sets
def impl_calculation(name, save_path, calculate): usage = "Usage: %prog [options] FILE [FILE [...]]" parser = OptionParser(usage=usage) parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="print detailed information") parser.add_option("-w", "--write", action="store_true", dest="write", help="write the result(s) back into the file(s)") parser.add_option( "-n", "--name", action="append", metavar="VAR", dest="variables", help= "variable name, can be specified multiple times [default: all variables]" ) parser.add_option( "-p", "--path", action="store", metavar="HDF5-PATH", dest="path", help= "hdf5-path where the data is stored [default: \"/simulation/results\"]" ) parser.set_defaults(verbose=False, write=False, variables=[], path="/simulation/results") (options, args) = parser.parse_args() if len(args) == 0: parser.print_help() exit() variables = options.variables for filestring in args: ar = h5.archive(filestring, 1) if len(options.variables) == 0: variables = ar.list_children(options.path) if options.verbose: print("Variables in file " + filestring + ": " + " ".join(variables)) for variablestring in variables: if ar.dimensions(options.path + "/" + pyalps.hdf5_name_encode(variablestring) + "/timeseries/data") == 1: obs = alea.MCScalarData() #_save = mcanalyze.write_dim_0 #vector_save = mcanalyze.write_dim_1 else: obs = alea.MCVectorData() #scalar_save = mcanalyze.write_dim_1 #vector_save = mcanalyze.write_dim_2 obs.load( filestring, options.path + "/" + pyalps.hdf5_name_encode(variablestring)) result = calculate(obs) if options.verbose: print("The " + name + " of variable " + variablestring + " in file " + filestring + " is: " + str(result)) if options.write: ar.write( options.path + "/" + pyalps.hdf5_name_encode(variablestring) + "/" + save_path, result) print("Done")
if len(childs) != 7: raise Exception('invalid length of \'/\'') if len(ar.extent("/int")) != 1 or ar.extent("/int")[0] != 1 or len(ar.extent("/cplx")) != 1 or ar.extent("/cplx")[0] != 1: raise Exception('invalid scalar extent') if len(ar.extent("/np/int")) != 1 or ar.extent("/cplx")[0] != 1 or len(ar.extent("/np/cplx")) != 2 or ar.extent("/np/cplx")[0] != 2 or ar.extent("/np/cplx")[1] != 2: raise Exception('invalid array extent') if type(i) != int or type(d) != float or type(c) != complex or type(s) != str: raise Exception('invalid type') if i != 9 or d - 9.123 > 0.001 or s != "test" or np.any(n != np.array([1, 2, 3])): raise Exception('invalid scalar value') if np.any(x[0] != np.array([1 + 1j,2 +2j])) or np.any(x[1] != np.array([3 + 3j,4 + 4j])): raise Exception('invalid array value') try: oar = h5.archive("py.h5", 'w') write(oar) del oar iar = h5.archive("py.h5", 'r') if iar.is_complex("/int") or not iar.is_complex("/cplx") or not iar.extent("/np/cplx"): raise Exception('invalid complex detection') read(iar) del iar ar = h5.archive("py.h5", 'w') write(ar) read(ar) del ar print "SUCCESS"
def ReadDiagDataFromFile(self, flist, proppath='/parameters', respath='/spectrum', measurements=None, index=None, loadIterations=False, verbose=False): fs = self.GetFileNames(flist) sets = [] for f in fs: try: fileset = [] self.h5f = h5.archive(f, 'r') self.h5fname = f if verbose: log("Loading from file" + f) params = self.ReadParameters(proppath) if 'results' in self.h5f.list_children(respath): list_ = self.GetObservableList(respath + '/results') if measurements == None: obslist = list_ else: obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] if loadIterations == True: if "iteration" in self.h5f.list_children(respath + '/results'): fileset.append( self.GetIterations(respath + '/results', params, measurements, index, verbose)) else: for m in obslist: if "mean" in self.h5f.list_children(respath + '/results/' + m): try: if verbose: log("Loading" + m) d = DataSet() secresultspath = respath + '/results/' + m d.props['hdf5_path'] = secresultspath d.props[ 'observable'] = pt.hdf5_name_decode(m) if index == None: d.y = self.h5f[secresultspath + '/mean/value'] d.x = np.arange(0, len(d.y)) else: try: d.y = self.h5f[ secresultspath + '/mean/value'][index] except: pass if "labels" in self.h5f.list_children( secresultspath): d.x = parse_labels( self.h5f[secresultspath + '/labels']) else: d.x = np.arange(0, len(d.y)) d.props.update(params) fileset.append(d) except AttributeError: log("Could not create DataSet") if loadIterations == True: if "iteration" in self.h5f.list_children(respath): fileset.append( self.GetIterations(respath, params, measurements, index, verbose)) if 'sectors' in self.h5f.list_children(respath): list_ = self.GetObservableList(respath + '/sectors/0/results') if measurements == None: obslist = list_ else: obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] for secnum in self.h5f.list_children(respath + '/sectors'): sector_sets = [] for m in obslist: if "mean" in self.h5f.list_children(respath + '/sectors/' + secnum + '/results/' + m): try: if verbose: log("Loading" + m) d = DataSet() secpath = respath + '/sectors/' + secnum secresultspath = respath + '/sectors/' + secnum + '/results/' + m d.props['hdf5_path'] = secresultspath d.props[ 'observable'] = pt.hdf5_name_decode(m) if index == None: d.y = self.h5f[secresultspath + '/mean/value'] d.x = np.arange(0, len(d.y)) else: try: d.y = self.h5f[ secresultspath + '/mean/value'][index] except: pass if "labels" in self.h5f.list_children( secresultspath): d.x = parse_labels( self.h5f[secresultspath + '/labels']) else: d.x = np.arange(0, len(d.y)) d.props.update(params) try: d.props.update( self.ReadParameters( secpath + '/quantumnumbers')) except: if verbose: log("no quantumnumbers stored ") pass sector_sets.append(d) except AttributeError: log("Could not create DataSet") pass fileset.append(sector_sets) sets.append(fileset) except RuntimeError: raise except Exception as e: log(e) log(traceback.format_exc()) return sets
# FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT # # SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE # # FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, # # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # # DEALINGS IN THE SOFTWARE. # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # import pyalps.hdf5 as hdf5 import pyalps.ngs as ngs import sys p = ngs.params({'val1': 42, 'val2': '42', 'a': 1, 'x': 2, 'b': 3}) print type(p["val1"]), type(p["val2"]), type(p["undefined"]) oar = hdf5.archive('parms1.h5', 'w') p.save(oar) # does not use path '/parameters' del oar oar = hdf5.archive('parms2.h5', 'w') print p.keys(), p.values() for key in p.keys(): print key oar['parameters/' + key] = p[key] del oar iar = hdf5.archive('parms2.h5', 'r') p.load(iar)
# SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE # # FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, # # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # # DEALINGS IN THE SOFTWARE. # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # import numpy as np import pyalps.hdf5 as hdf5 ## Python 3 does not have `long` type anymore import sys if sys.version_info > (3,): long = int ar = hdf5.archive('pyngs.h5', 'w') a = np.array([1, 2, 3]); b = np.array([1.1, 2.0, 3.5]); c = np.array([1.1 + 1j, 2.0j, 3.5]); d = {"a": a, 2 + 3j: "foo"} ar["/list"] = [1, 2, 3] ar["/list2"] = [[[1, 2], [3, 4]], [[1, 2], [3, 4]], [[1, 2], [3, 4]], [[1, 2], [3, 4]]] ar["/tuple"] = (1, 2, 3) ar["/dict"] = {"scalar": 1, "numpy": a, "numpycpx": c, "list": [1, 2, 3], "string": "str", 1: 1, 4: d} ar["/numpy"] = a ar["/numpy2"] = b ar["/numpy3"] = c ar["/numpyel"] = a[0] ar["/numpyel2"] = b[0] ar["/numpyel3"] = c[0]
def load(self, filename): with hdf5.archive(filename, 'r') as ar: self = ar['/']
def save(self, filename): with hdf5.archive(filename, 'w') as ar: ar['/'] = self
for j in range(0, FLAVORS, 2): if (j != i): udata[i * FLAVORS + j] = Up - J # Hubbard repulsion interband same spin udata[(i + 1) * FLAVORS + (j + 1)] = Up - J # Hubbard repulsion interband same spin udata[i * FLAVORS + (j + 1)] = Up # Hubbard repulsion interband opposite spin udata[(i + 1) * FLAVORS + j] = Up # Hubbard repulsion interband opposite spin return udata.reshape(FLAVORS, FLAVORS) ar = archive('hyb.param.out.h5', 'rw') #read the parameters from the output file parms = ar['/parameters'] # returns a python dict n_orb = parms['N_ORBITALS'] a = array(zeros(n_orb**2)).reshape(n_orb, n_orb) # density-density correlation matrix for i in range(1, n_orb): # i>j for j in range(i): a[i][j] = ar['/simulation/results/nn_%i_%i/mean/value' % (i, j)] for i in range(n_orb): # i==j a[i][i] = ar['/simulation/results/density_%i/mean/value' % i] for j in range(1, n_orb): # the matrix is symmetric for i in range(j): # i<j
def save(self, filename): with hdf5.archive(filename, 'w') as ar: ar['/simulation/realizations/0/clones/0'] = self
def load(self, filename): with hdf5.archive(filename, 'r') as ar: self = ar['/simulation/realizations/0/clones/0']
# the ALPS Libraries; see the file LICENSE.txt. If not, the license is also # # available from http://alps.comp-phys.org/. # # # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # # FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT # # SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE # # FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, # # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # # DEALINGS IN THE SOFTWARE. # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # import numpy as np import pyalps.hdf5 as hdf5 import os ar=hdf5.archive('foo%d.h5', 'al') s=2**10 while s < 2**29: print s a = np.empty(s) ar[str(s)] = a s *= 2 i = 0 while os.path.isfile('foo%d.h5'%i): os.remove('foo%d.h5'%i) i += 1
if len(childs) != 7: raise Exception('invalid length of \'/\'') if len(ar.extent("/int")) != 1 or ar.extent("/int")[0] != 1 or len(ar.extent("/cplx")) != 1 or ar.extent("/cplx")[0] != 1: raise Exception('invalid scalar extent') if len(ar.extent("/np/int")) != 1 or ar.extent("/cplx")[0] != 1 or len(ar.extent("/np/cplx")) != 2 or ar.extent("/np/cplx")[0] != 2 or ar.extent("/np/cplx")[1] != 2: raise Exception('invalid array extent') if type(i) != int or type(d) != float or type(c) != complex or type(s) != str: raise Exception('invalid type') if i != 9 or d - 9.123 > 0.001 or s != "test" or np.any(n != np.array([1, 2, 3])): raise Exception('invalid scalar value') if np.any(x[0] != np.array([1 + 1j,2 +2j])) or np.any(x[1] != np.array([3 + 3j,4 + 4j])): raise Exception('invalid array value') try: oar = h5.archive("py.h5", 'w') write(oar) del oar iar = h5.archive("py.h5", 'r') if iar.is_complex("/int") or not iar.is_complex("/cplx") or not iar.extent("/np/cplx"): raise Exception('invalid complex detection') read(iar) del iar ar = h5.archive("py.h5", 'w') write(ar) read(ar) del ar print("SUCCESS")
def ReadBinningAnalysis(self, flist, measurements=None, proppath='/parameters', respath=None, verbose=False): fs = self.GetFileNames(flist) sets = [] for f in fs: try: fileset = [] if verbose: log('loading from file ' + f) self.h5f = h5.archive(f, 'r') self.h5fname = f if respath == None: respath = "/simulation/results" list_ = self.GetObservableList(respath) # this is exception-safe in the sense that it's also required in the line above #grp = self.h5f.require_group(respath) params = self.ReadParameters(proppath) obslist = [] if measurements == None: obslist = list_ else: obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] for m in obslist: try: d = DataSet() if "timeseries" in self.h5f.list_children(respath + '/' + m): k = self.h5f.list_children(respath + '/' + m + '/timeseries') if "logbinning" in k and "logbinning2" in k and "logbinning_counts" in k: if verbose: log("Loading" + m) bins = self.h5f[respath + '/' + m + '/timeseries/logbinning'][0:-7] bins2 = self.h5f[ respath + '/' + m + '/timeseries/logbinning2'][0:-7] counts = self.h5f[ respath + '/' + m + '/timeseries/logbinning_counts'][0:-7] scale = 1 for i in range(len(counts)): mean = bins[i] / (counts[i] * scale) mean2 = bins2[i] / counts[i] bins2[i] = np.sqrt( (mean2 - mean * mean) / counts[i]) scale *= 2 d.y = bins2 d.x = np.arange(0, len(d.y)) d.props['hdf5_path'] = respath + m d.props[ 'observable'] = 'binning analysis of ' + pt.hdf5_name_decode( m) d.props.update(params) if verbose: log(' loaded binnig analysis for ' + m) fileset.append(d) except AttributeError: log("Could not create DataSet") sets.append(fileset) except Exception as e: log(e) log(traceback.format_exc()) return sets
# the ALPS Libraries; see the file LICENSE.txt. If not, the license is also # # available from http://alps.comp-phys.org/. # # # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # # FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT # # SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE # # FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, # # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # # DEALINGS IN THE SOFTWARE. # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # import numpy as np import pyalps.hdf5 as hdf5 import os ar = hdf5.archive('foo%d.h5', 'al') s = 2**10 while s < 2**29: print(s) a = np.empty(s) ar[str(s)] = a s *= 2 i = 0 while os.path.isfile('foo%d.h5' % i): os.remove('foo%d.h5' % i) i += 1
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # import pyalps.hdf5 as hdf5 import pyalps.ngs as ngs import sys p = ngs.params({ 'val1' : 42, 'val2' : '42', 'a' : 1, 'x' : 2, 'b' : 3 }) print type(p["val1"]), type(p["val2"]), type(p["undefined"]) oar = hdf5.archive('parms1.h5', 'w') p.save(oar) # does not use path '/parameters' del oar oar = hdf5.archive('parms2.h5', 'w') print p.keys(), p.values() for key in p.keys(): print key oar['parameters/' + key] = p[key] del oar iar = hdf5.archive('parms2.h5', 'r') p.load(iar)
for n in range(parms['N_MATSUBARA']): w=(2*n+1)*pi/parms['BETA'] g.append(2.0/(I*w+mu+I*sqrt(4*parms['t']**2-(I*w+mu)**2))) # noninteracting Green's function on Bethe lattice delta=[] for i in range(parms['N_TAU']+1): tau=i*parms['BETA']/parms['N_TAU'] g0tau=0.0; for n in range(parms['N_MATSUBARA']): iw=complex(0.0,(2*n+1)*pi/parms['BETA']) g0tau+=((g[n]-1.0/iw)*exp(-iw*tau)).real # Fourier transform with tail subtracted g0tau *= 2.0/parms['BETA'] g0tau += -1.0/2.0 # add back contribution of the tail delta.append(parms['t']**2*g0tau) # delta=t**2 g # write hybridization function to hdf5 archive (solver input) ar=archive(parms['DELTA'],'w') for m in range(parms['N_ORBITALS']): ar['/Delta_%i'%m]=delta del ar print("generating retarded interaction...") l =parms['lambda'] w0 =parms['w0'] beta=parms['BETA'] K = lambda tau: - (l**2)*(cosh(w0*(beta/2.0-tau))/sinh(w0*beta/2.0) - cosh(w0*beta/2.0)/sinh(w0*beta/2.0) )/(w0*w0) Kp = lambda tau: + (l**2)*(sinh(w0*(beta/2.0-tau))/sinh(w0*beta/2.0))/w0 print("U_screened =", parms['U'] - 2*l**2/w0) k_tau=[]
'N_HISTOGRAM_ORDERS': 50, # maximum order for the perturbation order histogram 'N_TAU': N_TAU, # number of imaginary time points (tau_0=0, tau_N_TAU=BETA) 'N_MATSUBARA': int(N_TAU / (2 * pi)), # number of Matsubara frequencies 'N_W': 1, # number of bosonic Matsubara frequencies for the local susceptibility # additional parameters (used outside the solver only) 't': 1, # hopping }) for parms in parameters: if mpi.rank == 0: ar = archive(parms['BASENAME'] + '.out.h5', 'a') ar['/parameters'] = parms del ar print("creating initial hybridization...") g = [] I = complex(0., 1.) mu = 0.0 for n in range(parms['N_MATSUBARA']): w = (2 * n + 1) * pi / parms['BETA'] g.append(2.0 / (I * w + mu + I * sqrt(4 * parms['t']**2 - (I * w + mu)**2)) ) # use GF with semielliptical DOS delta = [] for i in range(parms['N_TAU'] + 1): tau = i * parms['BETA'] / parms['N_TAU'] g0tau = 0.0
# available from http://alps.comp-phys.org/. # # # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # # FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT # # SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE # # FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, # # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # # DEALINGS IN THE SOFTWARE. # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # import numpy as np import pyalps.hdf5 as hdf5 ar = hdf5.archive('pyngs.h5', 'w') a = np.array([1, 2, 3]); b = np.array([1.1, 2.0, 3.5]); c = np.array([1.1 + 1j, 2.0j, 3.5]); d = {"a": a, 2 + 3j: "foo"} ar["/list"] = [1, 2, 3] ar["/list2"] = [[[1, 2], [3, 4]], [[1, 2], [3, 4]], [[1, 2], [3, 4]], [[1, 2], [3, 4]]] ar["/dict"] = {"scalar": 1, "numpy": a, "numpycpx": c, "list": [1, 2, 3], "string": "str", 1: 1, 4: d} ar["/numpy"] = a ar["/numpy2"] = b ar["/numpy3"] = c ar["/numpyel"] = a[0] ar["/numpyel2"] = b[0] ar["/numpyel3"] = c[0] ar["/int"] = int(1)
def assert_type(p, k): assert type(p[k]) == type(orig_dict[k]) ## Create params p = ngs.params({'val1': 42, 'val2': '42', 'a': 1, 'x': 2, 'b': 3}) ## check content for k in sorted(orig_dict.keys()): assert p[k] == orig_dict[k] assert_type(p, k) print(k, 'ok!') ## Check nonetype assert type(p["undefined"]) == type(None) ## Write to hdf5 with hdf5.archive('parms1.h5', 'w') as oar: p.save(oar) # does not use path '/parameters' with hdf5.archive('parms2.h5', 'w') as oar: for key in sorted(p.keys()): print(key) oar['parameters/' + key] = p[key] ## Load from hdf5 with hdf5.archive('parms2.h5', 'r') as oar: iar = hdf5.archive('parms2.h5', 'r') p.load(iar) for k in sorted(orig_dict.keys()): assert p[k] == orig_dict[k] assert_type(p, k) print(k, 'ok!')
resume = True if 'c' in args else False outfile = positional[0] except (IndexError, getopt.GetoptError): print 'usage: [-T timelimit] [-c] outputfile' exit() sim = ising.sim(ngs.params({ 'L': 100, 'THERMALIZATION': 1000, 'SWEEPS': 10000, 'T': 2 })) if resume: try: with hdf5.archive(outfile[0:outfile.rfind('.h5')] + '.clone0.h5', 'r') as ar: sim.load(ar['/']) except ArchiveNotFound: pass if limit == 0: sim.run(lambda: False) else: start = time.time() sim.run(lambda: time.time() > start + float(limit)) with hdf5.archive(outfile[0:outfile.rfind('.h5')] + '.clone0.h5', 'w') as ar: ar['/'] = sim results = sim.collectResults() # TODO: how should we do that? print results
outfile = positional[0] except (IndexError, getopt.GetoptError): print('usage: [-T timelimit] [-c] outputfile') exit() sim = ising.sim( ngs.params({ 'L': 100, 'THERMALIZATION': 1000, 'SWEEPS': 10000, 'T': 2 })) if resume: try: with hdf5.archive(outfile[0:outfile.rfind('.h5')] + '.clone0.h5', 'r') as ar: sim.load(ar['/']) except ArchiveNotFound: pass if limit == 0: sim.run(lambda: False) else: start = time.time() sim.run(lambda: time.time() > start + float(limit)) with hdf5.archive(outfile[0:outfile.rfind('.h5')] + '.clone0.h5', 'w') as ar: ar['/'] = sim results = sim.collectResults() # TODO: how should we do that?
""" import pyalps.hdf5 as hdf5 import sys, time, traceback, getopt import ising if __name__ == '__main__': sim = ising.sim({'L': 100, 'THERMALIZATION': 100, 'SWEEPS': 1000, 'T': 2}) outfile = 'test' limit = 1000 if limit == 0: sim.run(lambda: False) else: start = time.time() sim.run(lambda: time.time() > start + float(limit)) with hdf5.archive(outfile[0:outfile.rfind('.h5')] + '.clone0.h5', 'w') as ar: ar['/'] = sim results = sim.collectResults() # TODO: how should we do that? for key, value in results.iteritems(): print "{}: {}".format(key, value) with hdf5.archive(outfile, 'w') as ar: ar['/parameters'] = sim.parameters ar['/simulation/results'] = results
# version. # # # # You should have received a copy of the ALPS Library License along with # # the ALPS Libraries; see the file LICENSE.txt. If not, the license is also # # available from http://alps.comp-phys.org/. # # # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # # FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT # # SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE # # FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, # # ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # # DEALINGS IN THE SOFTWARE. # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # import pyalps.hdf5 as h5 o = h5.archive('blubb', 'w') o['/a'] = 0 del o i = h5.archive('blubb', 'r') o = h5.archive('blubb', 'w') o['/a'] = 0 del o del i o = h5.archive('blubb', 'w') o['/a'] = 0 del o