def GetProperties(self, flist, proppath='/parameters', respath='/simulation/results', verbose=False): fs = self.GetFileNames(flist) resultfiles = [] for f in fs: try: self.h5f = h5.archive(f, 'r') self.h5fname = f if verbose: log("Loading from file" + f) rfile = ResultFile(f) rfile.props = self.ReadParameters(proppath) try: obs = self.GetObservableList(respath) rfile.props["ObservableList"] = [ pt.hdf5_name_decode(x) for x in obs ] except: pass resultfiles.append(rfile) except Exception as e: log(e) log(traceback.format_exc()) return resultfiles
def ReadDMFTIterations(self, flist, observable='G_tau', measurements='0', proppath='/parameters', respath='/simulation/iteration', verbose=False): fs = self.GetFileNames(flist) fileset = [] for f in fs: try: self.h5f = h5.archive(f, 'r') self.h5fname = f if verbose: log("Loading from file " + f) list_ = self.GetObservableList(respath + '/1/results/' + observable + '/') #grp = self.h5f.require_group(respath) params = self.ReadParameters(proppath) obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] iterationset = [] for it in self.h5f.list_children(respath): obsset = [] for m in obslist: try: if verbose: log("Loading " + m) d = DataSet() size = 0 path = it + '/results/' + observable + '/' + m if "mean" in self.h5f.list_children(respath + '/' + path): if self.h5f.is_scalar(respath + '/' + path + '/mean/value'): size = 1 obs = self.h5f[respath + '/' + path + '/mean/value'] d.y = np.array([obs]) else: obs = self.h5f[respath + '/' + path + '/mean/value'] size = len(obs) d.y = obs d.x = np.arange(0, size) d.props['hdf5_path'] = respath + "/" + path d.props['observable'] = pt.hdf5_name_decode(m) d.props['iteration'] = it d.props.update(params) except AttributeError: log("Could not create DataSet") pass obsset.append(d) iterationset.append(obsset) fileset.append(iterationset) except Exception as e: log(e) log(traceback.format_exc()) return fileset
def ReadMeasurementFromFile(self, flist, proppath='/parameters', respath='/simulation/results', measurements=None, verbose=False): fs = self.GetFileNames(flist) sets = [] for f in fs: try: fileset = [] self.h5f = h5.archive(f, 'r') self.h5fname = f if verbose: log("Loading from file " + f) list_ = self.GetObservableList(respath) params = self.ReadParameters(proppath) obslist = [] if measurements == None: obslist = list_ else: obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] for m in obslist: if verbose: log("Loading " + m) size = 0 xmin = 0 xstep = 1 x = None if "histogram" in self.h5f.list_children(respath + '/' + m): obs = self.h5f[respath + '/' + m + '/histogram'] xmin = self.h5f[respath + '/' + m + '/@min'] xstep = self.h5f[respath + '/' + m + '/@stepsize'] size = len(obs) x = np.arange(xmin, xmin + xstep * size, xstep) elif "error" in self.h5f.list_children(respath + '/' + m + '/mean'): if self.h5f.is_scalar(respath + '/' + m + '/mean/value'): obs = pa.MCScalarData() obs.load(self.h5fname, respath + '/' + m) obs = np.array([obs]) size = 1 if obs[0].count == 0: obs = None else: obs = None if not self.h5f.is_group( respath + '/' + m + '/timeseries'): # check for simple binning obs = np.array(self.h5f[respath + '/' + m + '/mean/value']) if 'L' in params: # ugly fix... really ugly L = int(params['L']) if L == obs.size: params['origin'] = [(L - 1.) / 2.] if L**2 == obs.size: # dimension 2 obs = obs.reshape([L, L]) params['origin'] = [(L - 1.) / 2., (L - 1.) / 2.] elif L**3 == obs.size: # dimension 3 obs = obs.reshape([L, L, L]) params['origin'] = [(L - 1.) / 2., (L - 1.) / 2., (L - 1.) / 2.] size = obs.size else: obs = pa.MCVectorData() obs.load(self.h5fname, respath + '/' + m) size = len(obs.mean) if obs.count == 0: obs = None else: if self.h5f.is_scalar(respath + '/' + m + '/mean/value'): obs = self.h5f[respath + '/' + m + '/mean/value'] obs = np.array([obs]) size = 1 else: obs = self.h5f[respath + '/' + m + '/mean/value'] size = len(obs) if "labels" in self.h5f.list_children(respath + '/' + m) and x is None: x = parse_labels(self.h5f[respath + '/' + m + '/labels']) elif x is None: x = np.arange(xmin, xmin + xstep * size, xstep) try: if obs is not None: d = DataSet() d.y = obs d.x = x d.props['hdf5_path'] = respath + "/" + m d.props['observable'] = pt.hdf5_name_decode(m) d.props.update(params) fileset.append(d) except AttributeError: log("Could not create DataSet") sets.append(fileset) except Exception as e: log(e) log(traceback.format_exc()) return sets
def ReadBinningAnalysis(self, flist, measurements=None, proppath='/parameters', respath=None, verbose=False): fs = self.GetFileNames(flist) sets = [] for f in fs: try: fileset = [] if verbose: log('loading from file ' + f) self.h5f = h5.archive(f, 'r') self.h5fname = f if respath == None: respath = "/simulation/results" list_ = self.GetObservableList(respath) # this is exception-safe in the sense that it's also required in the line above #grp = self.h5f.require_group(respath) params = self.ReadParameters(proppath) obslist = [] if measurements == None: obslist = list_ else: obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] for m in obslist: try: d = DataSet() if "timeseries" in self.h5f.list_children(respath + '/' + m): k = self.h5f.list_children(respath + '/' + m + '/timeseries') if "logbinning" in k and "logbinning2" in k and "logbinning_counts" in k: if verbose: log("Loading" + m) bins = self.h5f[respath + '/' + m + '/timeseries/logbinning'][0:-7] bins2 = self.h5f[ respath + '/' + m + '/timeseries/logbinning2'][0:-7] counts = self.h5f[ respath + '/' + m + '/timeseries/logbinning_counts'][0:-7] scale = 1 for i in range(len(counts)): mean = bins[i] / (counts[i] * scale) mean2 = bins2[i] / counts[i] bins2[i] = np.sqrt( (mean2 - mean * mean) / counts[i]) scale *= 2 d.y = bins2 d.x = np.arange(0, len(d.y)) d.props['hdf5_path'] = respath + m d.props[ 'observable'] = 'binning analysis of ' + pt.hdf5_name_decode( m) d.props.update(params) if verbose: log(' loaded binnig analysis for ' + m) fileset.append(d) except AttributeError: log("Could not create DataSet") sets.append(fileset) except Exception as e: log(e) log(traceback.format_exc()) return sets
def ReadDiagDataFromFile(self, flist, proppath='/parameters', respath='/spectrum', measurements=None, index=None, loadIterations=False, verbose=False): fs = self.GetFileNames(flist) sets = [] for f in fs: try: fileset = [] self.h5f = h5.archive(f, 'r') self.h5fname = f if verbose: log("Loading from file" + f) params = self.ReadParameters(proppath) if 'results' in self.h5f.list_children(respath): list_ = self.GetObservableList(respath + '/results') if measurements == None: obslist = list_ else: obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] if loadIterations == True: if "iteration" in self.h5f.list_children(respath + '/results'): fileset.append( self.GetIterations(respath + '/results', params, measurements, index, verbose)) else: for m in obslist: if "mean" in self.h5f.list_children(respath + '/results/' + m): try: if verbose: log("Loading" + m) d = DataSet() secresultspath = respath + '/results/' + m d.props['hdf5_path'] = secresultspath d.props[ 'observable'] = pt.hdf5_name_decode(m) if index == None: d.y = self.h5f[secresultspath + '/mean/value'] d.x = np.arange(0, len(d.y)) else: try: d.y = self.h5f[ secresultspath + '/mean/value'][index] except: pass if "labels" in self.h5f.list_children( secresultspath): d.x = parse_labels( self.h5f[secresultspath + '/labels']) else: d.x = np.arange(0, len(d.y)) d.props.update(params) fileset.append(d) except AttributeError: log("Could not create DataSet") if loadIterations == True: if "iteration" in self.h5f.list_children(respath): fileset.append( self.GetIterations(respath, params, measurements, index, verbose)) if 'sectors' in self.h5f.list_children(respath): list_ = self.GetObservableList(respath + '/sectors/0/results') if measurements == None: obslist = list_ else: obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] for secnum in self.h5f.list_children(respath + '/sectors'): sector_sets = [] for m in obslist: if "mean" in self.h5f.list_children(respath + '/sectors/' + secnum + '/results/' + m): try: if verbose: log("Loading" + m) d = DataSet() secpath = respath + '/sectors/' + secnum secresultspath = respath + '/sectors/' + secnum + '/results/' + m d.props['hdf5_path'] = secresultspath d.props[ 'observable'] = pt.hdf5_name_decode(m) if index == None: d.y = self.h5f[secresultspath + '/mean/value'] d.x = np.arange(0, len(d.y)) else: try: d.y = self.h5f[ secresultspath + '/mean/value'][index] except: pass if "labels" in self.h5f.list_children( secresultspath): d.x = parse_labels( self.h5f[secresultspath + '/labels']) else: d.x = np.arange(0, len(d.y)) d.props.update(params) try: d.props.update( self.ReadParameters( secpath + '/quantumnumbers')) except: if verbose: log("no quantumnumbers stored ") pass sector_sets.append(d) except AttributeError: log("Could not create DataSet") pass fileset.append(sector_sets) sets.append(fileset) except RuntimeError: raise except Exception as e: log(e) log(traceback.format_exc()) return sets
def GetIterations(self, current_path, params={}, measurements=None, index=None, verbose=False): iterationset = [] #iteration_grp = self.h5f.require_group(respath+'/iteration') for it in self.h5f.list_children(current_path + '/iteration'): obsset = [] iteration_props = {} if 'parameters' in self.h5f.list_children(current_path + '/iteration/' + it): iteration_props = self.ReadParameters(current_path + '/iteration/' + it + '/parameters') iteration_props['iteration'] = it respath = current_path + '/iteration/' + it + '/results' list_ = self.GetObservableList(respath) if measurements == None: obslist = list_ else: obslist = [ pt.hdf5_name_encode(obs) for obs in measurements if pt.hdf5_name_encode(obs) in list_ ] for m in obslist: if m in self.h5f.list_children(respath): if "mean" in self.h5f.list_children(respath + '/' + m): try: d = DataSet() itresultspath = respath + '/' + m if verbose: log("Loading " + m) measurements_props = {} measurements_props['hdf5_path'] = itresultspath measurements_props[ 'observable'] = pt.hdf5_name_decode(m) if index == None: d.y = self.h5f[itresultspath + '/mean/value'] d.x = np.arange(0, len(d.y)) else: try: d.y = self.h5f[itresultspath + '/mean/value'][index] except: pass if "labels" in self.h5f.list_children( itresultspath): d.x = parse_labels(self.h5f[itresultspath + '/labels']) else: d.x = np.arange(0, len(d.y)) d.props.update(params) d.props.update(iteration_props) d.props.update(measurements_props) except AttributeError: log("Could not create DataSet") obsset.append(d) iterationset.append(obsset) return iterationset