def galdos(scell, erange=10, enum=None, estep=None, count=None, tol=None, smooth=10, limit=100, kernel='jackson', rcstr_method='std', stateclass=None, spr=1, omp=False, num_threads=0, until=None, verbose=False, ainit=None, ginit=None): """Wrapper for :func:`kpm.galdos` that uses :class:`cofunc.coFunc` objects for input and output.""" #init_aenerg = ainit.x if ainit else None init_adens = ainit.y if ainit else None init_avar = ainit.attrs.var if ainit else None init_acount = ainit.attrs.count if ainit else None #init_generg = ginit.x if ginit else None init_gdens = ginit.y if ginit else None init_gvar = ginit.attrs.var if ginit else None init_gcount = ginit.attrs.count if ginit else None aenerg, adens, avar, acount, aacc, generg, gdens, gvar, gcount, gacc \ = kpm.galdos(scell, erange=erange, enum=enum, estep=estep, count=count, tol=tol, smooth=smooth, limit=limit, kernel=kernel, rcstr_method=rcstr_method, stateclass=stateclass, spr=spr, omp=omp, num_threads=num_threads, until=until, verbose=verbose, init_adens=init_adens, # init_aenerg=init_aenerg init_acount=init_acount, init_avar=init_avar, init_gdens=init_gdens, # init_generg=init_generg init_gcount=init_gcount, init_gvar=init_gvar) if aenerg is None and ainit: aenerg = ainit.x if generg is None and ginit: generg = ginit.x aldos = cofunc.coFunc(aenerg, adens) aldos.attrs.update(erange=erange, enum=enum, estep=estep, tol=tol, smooth=smooth, limit=limit, kernel=kernel, rcstr_method=rcstr_method, stateclass=stateclass, spr=spr, omp=omp, num_threads=num_threads, until=until, verbose=verbose, var=avar, count=acount, acc=aacc) gldos = cofunc.coFunc(generg, gdens) gldos.attrs.update(erange=erange, enum=enum, estep=estep, tol=tol, smooth=smooth, limit=limit, kernel=kernel, rcstr_method=rcstr_method, stateclass=stateclass, spr=spr, omp=omp, num_threads=num_threads, until=until, verbose=verbose, var=gvar, count=gcount, acc=gacc) return aldos, gldos
def aldos(scell, erange=10, enum=None, estep=None, count=None, tol=None, smooth=10, limit=100, kernel='jackson', rcstr_method='std', stateclass=None, spr=1, omp=False, num_threads=0, until=None, verbose=False, init=None): """Wrapper for :func:`kpm.aldos` that uses :class:`cofunc.coFunc` objects for input and output.""" #init_energ = init.x if init else None init_dens = init.y if init else None init_var = init.attrs.var if init else None init_count = init.attrs.count if init else None energ, dens, var, count, acc \ = kpm.aldos(scell, erange=erange, enum=enum, estep=estep, count=count, tol=tol, smooth=smooth, limit=limit, kernel=kernel, rcstr_method=rcstr_method, stateclass=stateclass, spr=spr, omp=omp, num_threads=num_threads, until=until, verbose=verbose, init_dens=init_dens, # init_energ=init_energ init_count=init_count, init_var=init_var) if energ is None and init: energ = init.x aldos = cofunc.coFunc(energ, dens) aldos.attrs.update(erange=erange, enum=enum, estep=estep, tol=tol, smooth=smooth, limit=limit, kernel=kernel, rcstr_method=rcstr_method, stateclass=stateclass, spr=spr, omp=omp, num_threads=num_threads, until=until, verbose=verbose, var=var, count=count, acc=acc) return aldos
def ldos(mat, state=0, limit=100, erange=10, enum=None, estep=None, kernel='jackson', rcstr_method='std', omp=False, num_threads=None, rescaled=False, stateclass=''): """Wrapper for :func:`kpm.ldos` that returns a :class:`cofunc.coFunc` object.""" energ, dens \ = kpm.ldos(mat, state=state, limit=limit, erange=erange, enum=enum, estep=estep, kernel=kernel, rcstr_method=rcstr_method, omp=omp, num_threads=num_threads, rescaled=rescaled, stateclass=stateclass) ldos = cofunc.coFunc(energ, dens) ldos.attrs.update(state=state, limit=limit, erange=erange, enum=enum, estep=estep, kernel=kernel, rcstr_method=rcstr_method, omp=omp, num_threads=num_threads, rescaled=rescaled, stateclass=stateclass) return ldos
def h5load(fdpath, dtype=False, dlen=False, x=False, y=False, attrs=False, item=None, call=None, dmax=False, dmin=False, var=False, stderr=False): """Load a dataset from a HDF5 file.""" # 2013-07-08 - 2014-03-21 filename, dsetname = h5split(fdpath) if not os.path.isfile(filename): print >>sys.stderr, 'h5load: cannot load "%s": ' % fdpath +\ 'no such file or directory' sys.exit(1) with h5obj.File(filename, 'r') as f: found = dsetname in f if found: data = f[dsetname] if not found: print >>sys.stderr, 'h5load: cannot load "%s": no such dataset' \ % fdpath sys.exit(1) if dtype: data = type(data) if x: data = data.x if y: data = data.y if attrs: data = data.attrs if var: data = data.a2cf('var') elif stderr: var = data.attrs.var count = data.attrs.count stderr = numpy.sqrt(var/count) data = cofunc.coFunc(data.x, stderr) if item is not None: data = data[item] if call is not None: data = data(call) if dmax: data = max(data) if dmin: data = min(data) if dlen: data = len(data) return data
def dos(mat, rcount=None, rtol=None, rsmooth=1, limit=100, erange=10, enum=None, estep=None, kernel='jackson', rcstr_method='std', omp=False, num_threads=None, rescaled=False, until=None, verbose=False): """Wrapper for :func:`kpm.dos` that uses :class:`cofunc.coFunc` objects for input and output.""" # 2014-01-13 # var, count, acc? energ, dens \ = kpm.dos(mat, rcount=rcount, rtol=rtol, rsmooth=rsmooth, limit=limit, erange=erange, enum=enum, estep=estep, kernel=kernel, rcstr_method=rcstr_method, omp=omp, num_threads=num_threads, rescaled=rescaled, until=until, verbose=verbose) dos = cofunc.coFunc(energ, dens) dos.attrs.update(erange=erange, enum=enum, estep=estep, rcount=rcount, rtol=rtol, rsmooth=rsmooth, limit=limit, kernel=kernel, rcstr_method=rcstr_method, omp=omp, num_threads=num_threads, until=until, verbose=verbose) # var=var, acc=acc return dos
def hdf2obj(f, *names): """Read Python objects that are stored in a HDF5 file, referenced by the h5py.File object identifier f.""" __created__ = '2011-11-15' __modified__ = '2012-11-05' # former h5obj._get_obj from 2011-10-25 # former tb.savehdf from 2011-02-05 until 2011-03-30 # initialize value array values = [] for name in names: if name not in f: raise KeyError,\ 'dataset or group "%s" not found in HDF5 file object' % name # get object obj = f[name] # load attributes dtype = obj.attrs.get('__DTYPE__', None) ntype = obj.attrs.get('__NTYPE__', None) module = obj.attrs.get('__MODULE__', None) classname = obj.attrs.get('__CLASS__', None) pickled = obj.attrs.get('__PICKLED__', False) # distinguish data types if pickled: # then it is easy: just unpickle the object try: value = cPickle.loads(obj.value) except ValueError: filename = getattr(f, 'filename', f.file.filename) raise ValueError, 'cannot unpickle data "%s" from file "%s"' \ % (name, filename) elif dtype == 'NoneType': value = None elif dtype == 'bool': value = obj.value elif dtype == 'int': value = int(obj.value) elif dtype == 'long': value = long(obj.value) elif dtype == 'float': value = float(obj.value) elif dtype == 'complex': value = complex(obj.value) elif dtype == 'str': value = str(obj.value) elif dtype == 'list': if type(obj).__name__ == 'Group': keys = obj.keys() keys.sort() value = [] for key in keys: value.append(hdf2obj(obj, key)) elif isinstance(obj.value, basestring) and obj.value == '__EMPTY__': value = [] else: # plain lists that were saved as a normal 1D array dataset value = list(obj.value) elif dtype == 'tuple': if type(obj).__name__ == 'Group': keys = obj.keys() keys.sort() value = [] for key in keys: value.append(hdf2obj(obj, key)) value = tuple(value) elif isinstance(obj.value, basestring) and obj.value == '__EMPTY__': value = () else: # plain tuples that were saved as a normal 1D array dataset value = tuple(obj.value) elif dtype == 'set': if type(obj).__name__ == 'Group': keys = obj.keys() value = [] for key in keys: value.append(hdf2obj(obj, key)) value = set(value) elif isinstance(obj.value, basestring) and obj.value == '__EMPTY__': # empty sets value = set() else: # sets that were saved as a 1D ndarray value = set(obj.value) elif dtype == 'frozenset': if type(obj).__name__ == 'Group': keys = obj.keys() value = [] for key in keys: value.append(hdf2obj(obj, key)) value = frozenset(value) elif isinstance(obj.value, basestring) and obj.value == '__EMPTY__': # empty frozensets value = frozenset() else: # frozensets that were saved as a 1D ndarray value = frozenset(obj.value) elif dtype in ['struct', 'Bundle']: value = bundle.Bundle() for key in obj.iterkeys(): value[key] = hdf2obj(obj, key) elif dtype == 'dict': if isinstance(obj, h5py.Group): value = {} for key in obj.iterkeys(): value[key] = hdf2obj(obj, key) else: value = cPickle.loads(obj.value) #value = {} #for key in obj.iterkeys(): #ntype = obj[key].attrs.get('__NTYPE__', 'str') #if ntype == 'str': #value[key] = obj.get_obj(key) #else: #value[eval(key)] = obj.get_obj(key) elif dtype == 'ndarray': if isinstance(obj.value, basestring) and obj.value == '__EMPTY__': value = scipy.array([], dtype=scipy.float64) else: value = scipy.array(obj.value) elif dtype == 'matrix': if isinstance(obj.value, basestring) and obj.value == '__EMPTY__': value = scipy.matrix([], dtype=scipy.float64) else: value = scipy.matrix(obj.value) elif dtype in ('csc_matrix', 'csr_matrix', 'bsr_matrix', 'lil_matrix', 'dok_matrix', 'coo_matrix', 'dia_matrix'): # sparse matrices are always saved in CSR format # convert back to original format shape = hdf2obj(obj, 'shape') data = hdf2obj(obj, 'data') indices = hdf2obj(obj, 'indices') indptr = hdf2obj(obj, 'indptr') value = scipy.sparse.csr_matrix((data, indices, indptr), shape=shape).asformat(dtype[:3]) elif dtype == 'coFunc': # load continuous function object x = hdf2obj(obj, 'x') y = hdf2obj(obj, 'y') attrs = hdf2obj(obj, 'attrs') value = cofunc.coFunc(x=x, y=y, attrs=attrs) elif dtype == 'coFunc2d': # load continuous function object x = hdf2obj(obj, 'x') y = hdf2obj(obj, 'y') z = hdf2obj(obj, 'z') attrs = hdf2obj(obj, 'attrs') value = cofunc.coFunc2d(x=x, y=y, z=z, attrs=attrs) else: # then assume it is just a normal dataset representing whatever it # contains. If obj is a HDF5 group, return contents in form of a struct if type(obj).__name__ == 'Group': value = bundle.Bundle() for key in obj.keys(): value[key] = hdf2obj(obj, key) else: value = obj.value # collect values values.append(value) # return values if len(values) == 1: return values[0] else: return values