def hash(self, funct, *args, **kwargs): """"Create a hash value""" pg.tic() functInfo = self.functInfo(funct) funcHash = strHash(functInfo) versionHash = strHash(pg.versionStr()) codeHash = strHash(inspect.getsource(funct)) argHash = 0 for a in args: if isinstance(a, str): argHash = argHash ^ strHash(a) elif isinstance(a, list): for item in a: if isinstance(item, str): argHash = argHash ^ strHash(item) else: argHash = argHash ^ hash(item) else: argHash = argHash ^ hash(a) for k, v in kwargs.items(): if isinstance(v, str): argHash = argHash ^ strHash(v) else: argHash = argHash ^ hash(v) pg.debug("Hashing took:", pg.dur(), "s") return funcHash ^ versionHash ^ codeHash ^ argHash
def syscallTetgen(filename, quality=1.2, area=0, preserveBoundary=False, verbose=False): """Create a mesh with :term:`Tetgen` from file. Create a :term:`Tetgen` :cite:`Si2004` mesh from a PLC. Forwards to system call tetgen, which must be known to your system. Parameters ---------- filename: str quality: float [1.2] Refines mesh (to improve mesh quality). [1.1 ... ] area: float [0.0] Maximum cell size (m³) preserveBoundary: bool [False] Preserve PLC boundary mesh verbose: bool [False] be verbose Returns ------- mesh : :gimliapi:`GIMLI::Mesh` """ filebody = filename.replace('.poly', '') syscal = 'tetgen -pzAC' if area > 0: syscal += 'a' + str(area) else: syscal += 'a' syscal += 'q' + str(quality) if not verbose: syscal += 'Q' else: syscal += 'V' if preserveBoundary: syscal += 'Y' syscal += ' ' + filebody + '.poly' pg.debug(syscal) system(syscal) system('meshconvert -it -BD -o ' + filebody + ' ' + filebody + '.1') try: os.remove(filebody + '.1.node') os.remove(filebody + '.1.ele') os.remove(filebody + '.1.face') except BaseException as e: print(e) mesh = pg.Mesh(filebody) return mesh
def restore(self): """Read data from json infos""" if os.path.exists(self._name + '.json'): # Fricking mpl kills locale setting to system default .. this went # horrible wrong for german 'decimal_point': ',' pg.checkAndFixLocaleDecimal_point(verbose=False) try: with open(self._name + '.json') as file: self.info = json.load(file) # if len(self.info['type']) != 1: # pg.error('only single return caches supported for now.') #pg._y(pg.pf(self.info)) if self.info['type'] == 'DataContainerERT': self._value = pg.DataContainerERT(self.info['file'], removeInvalid=False) # print(self._value) elif self.info['type'] == 'RVector': self._value = pg.Vector() self._value.load(self.info['file'], format=pg.core.Binary) elif self.info['type'] == 'Mesh': pg.tic() self._value = pg.Mesh() self._value.loadBinaryV2(self.info['file'] + '.bms') pg.debug("Restoring cache took:", pg.dur(), "s") elif self.info['type'] == 'ndarray': self._value = np.load(self.info['file'] + '.npy', allow_pickle=True) elif self.info['type'] == 'Cm05Matrix': self._value = pg.matrix.Cm05Matrix(self.info['file']) elif self.info['type'] == 'GeostatisticConstraintsMatrix': self._value = pg.matrix.GeostatisticConstraintsMatrix( self.info['file']) else: self._value = np.load(self.info['file'] + '.npy', allow_pickle=True) if self.value is not None: self.info['restored'] = self.info['restored'] + 1 self.updateCacheInfo() pg.info('Cache {3} restored ({1}s x {0}): {2}'.\ format(self.info['restored'], round(self.info['dur'], 1), self._name, self.info['codeinfo'])) else: # default try numpy pg.warn('Could not restore cache of type {0}.'.format(self.info['type'])) pg.debug("Restoring cache took:", pg.dur(), "s") except Exception as e: import traceback traceback.print_exc(file=sys.stdout) print(self.info) pg.error('Cache restoring failed.')
def setCbarLevels(cbar, cMin=None, cMax=None, nLevs=5, levels=None): """Set colorbar levels given a number of levels and min/max values.""" if cMin is None: if hasattr(cbar, 'mappable'): cMin = cbar.mappable.get_clim()[0] else: pg.error('no cbar mappable. Cannot find cmin') if cMax is None: if hasattr(cbar, 'mappable'): cMax = cbar.mappable.get_clim()[1] else: pg.error('no cbar mappable. Cannot find cmax') if cMin == cMax: cMin *= 0.999 cMax *= 1.001 norm = None if hasattr(cbar, 'mappable'): norm = cbar.mappable.norm elif hasattr(cbar, 'norm'): norm = cbar.norm #norm.clip = True if levels is not None: cbarLevels = levels else: if isinstance(norm, mpl.colors.LogNorm): cbarLevels = np.logspace(np.log10(cMin), np.log10(cMax), nLevs) else: #if cMax < cMin: cbarLevels = np.linspace(cMin, cMax, nLevs) # FIXME: [10.1, 10.2, 10.3] mapped to [10 10 10] cbarLevelsString = [] if np.all(np.array(cbarLevels) < 1e-2): pg.debug("All values smaller than 1e-4, avoiding additional rounding.") roundValue = False else: roundValue = True for i in cbarLevels: cbarLevelsString.append(prettyFloat(i, roundValue)) if hasattr(cbar, 'mappable'): #cbar.set_clim(cMin, cMax) cbar.mappable.set_clim(vmin=cMin, vmax=cMax) cbar.set_ticks(cbarLevels) cbar.set_ticklabels(cbarLevelsString) cbar.draw_all() # necessary since mpl 3.0 cbar.ax.minorticks_off()
def VESManagerApp(): """Call VESManager as console app""" parser = VESManager.createArgParser(dataSuffix='ves') options = parser.parse_args() verbose = not options.quiet if verbose: print("VES Manager console application.") print(options._get_kwargs()) mgr = VESManager(verbose=verbose, debug=pg.debug()) ab2, mn2, ra, err = mgr.loadData(options.dataFileName) mgr.showData(ra, err) mgr.invert( ra, err, ab2, mn2, maxIter=options.maxIter, lam=options.lam, ) mgr.showResultAndFit() pg.wait()
def hash(self, funct, *args, **kwargs): """"Create a hash value""" pg.tic() functInfo = self.functInfo(funct) funcHash = strHash(functInfo) versionHash = strHash(pg.versionStr()) codeHash = strHash(inspect.getsource(funct)) argHash = 0 for a in args: argHash = argHash ^ valHash(a) for k, v in kwargs.items(): argHash = argHash ^ valHash(k) ^ valHash(v) pg.debug("Hashing took:", pg.dur(), "s") return funcHash ^ versionHash ^ codeHash ^ argHash
def setCbarLevels(cbar, cMin=None, cMax=None, nLevs=5): """Set colorbar levels given a number of levels and min/max values.""" if cMin is None: cMin = cbar.get_clim()[0] if cMax is None: cMax = cbar.get_clim()[1] if cMin == cMax: cMin *= 0.999 cMax *= 1.001 norm = None if hasattr(cbar, 'mappable'): norm = cbar.mappable.norm elif hasattr(cbar, 'norm'): norm = cbar.norm if isinstance(norm, mpl.colors.LogNorm): cbarLevels = np.logspace(np.log10(cMin), np.log10(cMax), nLevs) else: cbarLevels = np.linspace(cMin, cMax, nLevs) # FIXME: [10.1, 10.2, 10.3] mapped to [10 10 10] cbarLevelsString = [] if np.all(np.array(cbarLevels) < 1e-2): pg.debug("All values smaller than 1e-4, avoiding additional rounding.") roundValue = False else: roundValue = True for i in cbarLevels: cbarLevelsString.append(prettyFloat(i, roundValue)) # print(i, prettyFloat(i)) if hasattr(cbar, 'mappable'): cbar.mappable.set_clim(vmin=cMin, vmax=cMax) cbar.set_clim(cMin, cMax) cbar.set_ticks(cbarLevels) cbar.set_ticklabels(cbarLevelsString) cbar.draw_all() # necessary since mpl 3.0 cbar.ax.minorticks_off()
def _initFunction(self, funct): """Init any function and interpret possible args and kwargs.""" self.function = funct # the first varname is suposed to be f or freqs self.dataSpaceName = funct.__code__.co_varnames[0] pg.debug('data space:', self.dataSpaceName) args = funct.__code__.co_varnames[1:funct.__code__.co_argcount] for varname in args: if varname != 'verbose': pg.debug('add parameter:', varname) self._params[varname] = 0.0 nPara = len(self._params.keys()) for i, [k, p] in enumerate(self._params.items()): self.addParameter(k, id=i, cType=0, single=True, trans=self.defaultModelTrans, startModel=1)
def run(self, dataVals, errorVals, mesh=None, zWeight=None, **kwargs): """ """ if mesh is not None: self.fop.setMesh(mesh) # maybe move this to the fop if zWeight is None: zWeight = self._zWeight self.fop.setRegionProperties('*', zWeight=zWeight) # maybe move this to the fop pg.debug('run with: ', self.fop.regionProperties()) #### more mesh related inversion attributes to set? # ensure the mesh is generated self.fop.mesh() self.model = super(MeshInversion, self).run(dataVals, errorVals, **kwargs) return self.model
def main(): """Main""" parser = MethodManager.createArgParser(dataSuffix='sgt') options = parser.parse_args() ra = Refraction(verbose=not options.quiet, debug=pg.debug()) ra.loadData(options.dataFileName) ra.showData() ra.showVA() ra.createMesh(depth=options.depth) ra.showMesh() ra.invert(lam=options.lam, max_iter=options.maxIter, robustData=options.robustData, blockyModel=options.blockyModel) ra.showResult()
def VESManagerApp(): """Call VESManager as console app""" parser = VESManager.createArgParser(dataSuffix='ves') options = parser.parse_args() verbose = not options.quiet if verbose: print("VES Manager console application.") print(options._get_kwargs()) mgr = VESManager(verbose=verbose, debug=pg.debug()) ab2, mn2, ra, err = mgr.loadData(options.dataFileName) mgr.showData(ra, err) mgr.invert(ra, err, ab2, mn2, maxIter=options.maxIter, lam=options.lam, ) mgr.showResultAndFit() pg.wait()
def importAsciiColumns(filename, verbose=False, return_header=False): """Import any ERT data file organized in columns with column header Input can be: * Terrameter LS or SAS Ascii Export format, e.g. Time MeasID DPID Channel A(x) A(y) A(z) B(x) B(y) B(z) M(x) M(y) M(z) \ N(x) N(y) N(z) F(x) F(y) F(z) Note I(mA) Uout(V) U(V) SP(V) R(O) \ Var(%) Rhoa Cycles Pint Pext(V) T(°C) Lat Long 2016-09-14 07:01:56 73 7 1 8 1 1 20 1 1 12 1 1 \ 16 1 1 14 1 2.076 99.8757 107.892 0.0920761 0 0.921907 \ 0.196302 23.17 1 12.1679 12.425 42.1962 0 0 * Resecs Output format """ data = pg.DataContainerERT() header = {} with open(filename, 'r', encoding='iso-8859-15') as fi: content = fi.readlines() d = readAsDictionary(content, sep='\t') if len(d) < 2: d = readAsDictionary(content) nData = len(next(iter(d.values()))) data.resize(nData) if 'Spa.1' in d: # Syscal Pro abmn = ['Spa.1', 'Spa.2', 'Spa.3', 'Spa.4'] if verbose: pg.debug("detected Syscalfile format") elif 'A(x)' in d: # ABEM Terrameter abmn = ['A', 'B', 'M', 'N'] if verbose: pg.debug("detected ABEM file format") elif 'xA' in d: # Workbench TX2 processed data abmn = ['xA', 'xB', 'xM', 'xN'] if verbose: pg.debug("detected Workbench file format") elif 'C1(x)' in d or 'C1(xm)' in d: # Resecs abmn = ['C1', 'C2', 'P1', 'P2'] if verbose: pg.debug("detected RESECS file format") else: pg.debug("no electrode positions found!") pg.debug("Keys are:", d.keys()) raise Exception("No electrode positions found!") for i in range(nData): if abmn[0]+'(z)' in d: eID = [data.createSensor([d[se+'(x)'][i], d[se+'(y)'][i], d[se+'(z)'][i]]) for se in abmn] elif abmn[0]+'(zm)' in d: eID = [data.createSensor([d[se+'(xm)'][i], d[se+'(ym)'][i], d[se+'(zm)'][i]]) for se in abmn] elif abmn[0]+'(y)' in d: eID = [data.createSensor([d[se+'(x)'][i], d[se+'(y)'][i], 0.]) for se in abmn] elif abmn[0]+'(ym)' in d: eID = [data.createSensor([d[se+'(xm)'][i], d[se+'(ym)'][i], 0.]) for se in abmn] elif abmn[0]+'(x)' in d: eID = [data.createSensor([d[se+'(x)'][i], 0., 0.]) for se in abmn] elif abmn[0]+'(xm)' in d: eID = [data.createSensor([d[se+'(xm)'][i], 0., 0.]) for se in abmn] else: eID = [data.createSensor([d[se][i], 0., 0.]) for se in abmn] data.createFourPointData(i, *eID) # data.save('tmp.shm', 'a b m n') tokenmap = {'I(mA)': 'i', 'I': 'i', 'In': 'i', 'Vp': 'u', 'VoltageV': 'u', 'U': 'u', 'U(V)': 'u', 'UV': 'u', 'R(Ohm)': 'r', 'RO': 'r', 'R(O)': 'r', 'Res': 'r', 'Rho': 'rhoa', 'AppROhmm': 'rhoa', 'Rho-a(Ohm-m)': 'rhoa', 'Rho-a(Om)': 'rhoa', 'Var(%)': 'err', 'D': 'err', 'Dev.': 'err', 'Dev': 'err', 'M': 'ma', 'P': 'ip', 'IP sum window': 'ip', 'Time': 't'} # Unit conversions (mA,mV,%), partly automatically assumed unitmap = {'I(mA)': 1e-3, 'Var(%)': 0.01, # ABEM 'U': 1e-3, 'I': 1e-3, 'D': 0.01, # Resecs 'Dev.': 0.01, 'In': 1e-3, 'Vp': 1e-3} # Syscal abmn = ['a', 'b', 'm', 'n'] if 'Cycles' in d: d['stacks'] = d['Cycles'] for key in d.keys(): vals = np.asarray(d[key]) if key.startswith('IP sum window'): # there is a trailing number key = 'IP sum window' # apparently not working if np.issubdtype(vals.dtype, np.floating, # 'float' 'int' ) or np.issubdtype(vals.dtype, np.signedinteger): if key in tokenmap: # use the standard (i, u, rhoa) key if key not in abmn: if verbose: pg.debug("Setting", tokenmap[key], "from", key) data.set(tokenmap[key], vals * unitmap.get(key, 1.0)) else: # use the original key if not XX(x) etc. if not re.search('([x-z])', key) and key not in abmn: data.set(key.replace(' ', '_'), d[key]) r = data('u') / data('i') if hasattr(d, 'R(0)'): if np.linalg.norm(r-d['R(O)']) < 1e4: # no idea what's that for data.set('r', r) else: pg.debug("Warning! File inconsistent") data.sortSensorsX() if return_header: return data, header else: return data
#) pg.version() # test pygimli log pg.info("Start numeric log test." + str(pg.log(pg.RVector(1, 1.)))) pg.warn("Start warning test.") def testTraceback1(): def testTraceback2(): pg.error("Start error test.: int", 1, " vec", pg.RVector(2)) testTraceback2() testTraceback1() #pg.critical("Start critical test.") pg.debug("debug 0") pg.setDebug(1) pg.debug("debug ON") pg.setThreadCount(2) # should not printed out pg.setDebug(0) pg.debug("debug OFF") pg.setThreadCount(2) # test core log (should not be used outside the core) pg.log(pg.Info, "core log ") pg.log(pg.Warning, "core log ") pg.log(pg.Error, "core log ") pg.log(pg.Critical, "core log ")
#log = logging.getLogger('pyGIMLi') #logging.basicConfig(level=logging.DEBUG, #format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', #datefmt='%m/%d/%Y %H:%M:%S', ##filename='example.log' #) pg.version() # test pygimli log pg.info("Start numeric log test." + str(pg.log(pg.RVector(1, 1.)))) pg.warn("Start warning test.") pg.error("Start error test.: int", 1, " vec", pg.RVector(2)) #pg.critical("Start critical test.") pg.debug("debug 0") pg.setDebug(1) pg.debug("debug ON") pg.setThreadCount(2) # should not printed out pg.setDebug(0) pg.debug("debug OFF") pg.setThreadCount(2) # test core log (should not be used outside the core) pg.log(pg.Info, "core log ") pg.log(pg.Warning, "core log ") pg.log(pg.Error, "core log ") pg.log(pg.Critical, "core log ")
def testDebugDecorator(): pg.debug( 'testDebugDecorator should always seen even if debug is set to false')
testVerboseDecorator2() pg.setVerbose(True) testVerboseDecorator2() @pg.d def testDebugDecorator(): pg.debug( 'testDebugDecorator should always seen even if debug is set to false') testDebugDecorator() pg.d(testDebugDecorator()) pg.debug("debug 0") pg.setDebug(1) pg.debug("debug ON") pg.setThreadCount(2) pg.debug("debug with a trace", withTrace=True) # should not printed out pg.setDebug(0) pg.debug("debug OFF") pg.setThreadCount(2) # test core log (should not be used outside the core) pg.log(pg.core.Info, "core log ") pg.log(pg.core.Warning, "core log ")
PyQt5 = pg.optImport('PyQt5', requiredFor="use pyGIMLi's 3D viewer") pyvista = pg.optImport('pyvista', requiredFor="properly visualize 3D data") if pyvista is None: view3Dcallback = 'showMesh3DFallback' pg.rc['view3D'] = 'fallback' else: view3Dcallback = 'showMesh3DVista' vers_users = pyvista.__version__ vers_userf = float(pyvista.__version__[::-1].replace('.', '', 1)[::-1]) vers_needs = '0.23.2' vers_needf = 0.232 if vers_userf < vers_needf: pg.warn("Please consider updating PyVista to at least {}".format( vers_needs)) pg.debug("Using pyvista: {}".format(vers_users)) from pygimli.viewer.pv import drawModel # True for Jupyter notebooks and sphinx-builds _inlineBackend_ = not pg.viewer.isInteractive() if PyQt5 is None or _inlineBackend_: _inlineBackend_ = True else: from .pv.show3d import Show3D from PyQt5 import Qt _inlineBackend_ = False def showMesh3D(mesh, data, **kwargs): """
def createCylinder(radius, height, nSegments=8, area=0.0, pos=None, marker=1): """Create plc of a cylinder. Out of core wrapper for dcfemlib::polytools. Note, there is a bug in the old polytools which ignores the area settings for marker == 0. Parameters ---------- radius : float Radius of the cylinder. height : float Height of the cylinder nSegments : int [8] Number of segments of the cylinder. area : float [0.0] Largest size for the resulting tetrahedrons. pos : pg.Pos [None] The center position, default is at the origin. marker : int [1] Cell marker the resulting tetrahedrons. Returns ------- poly : :gimliapi:`GIMLI::Mesh` The resulting polygon is a :gimliapi:`GIMLI::Mesh`. """ tmp = pg.optImport('tempfile') _, namePLC = tmp.mkstemp(suffix='.poly') print(namePLC) syscal = 'polyCreateCube -Z ' \ + ' -s ' + str(nSegments) \ + ' -m ' + str(marker) \ + ' -a ' + str(area) syscal = syscal + ' ' + namePLC pg.debug(syscal) os.system(syscal) poly = readPLC(namePLC) poly.scale([radius, radius, height]) if pos is not None: poly.translate(pos) try: os.remove(namePLC) except: print("can't remove:", namePLC) return poly
def streamlineDir(mesh, field, startCoord, dLengthSteps, dataMesh=None, maxSteps=150, down=True, verbose=False, coords=(0, 1)): """ down = -1, up = 1, both = 0 """ xd = [] yd = [] vd = [] pot = None vx = None vy = None isVectorData = False if isinstance(field, pg.core.R3Vector): field = field.array() if hasattr(field[0], '__len__'): if abs(max(field[:, 0])) == 0 and abs(max(field[:, 1]) == 0): raise Exception("No data range streamline: min/max == ", min(field[:, 0])) vx = pg.Vector(field[:, 0]) vy = pg.Vector(field[:, 1]) isVectorData = True else: if min(field) == max(field): raise Exception( "No scalar data range for any gradients " " to draw a streamline: min/max == ", min(field)) if dataMesh is not None: if len(field) == dataMesh.nodeCount(): pot = pg.Vector(field) elif len(field) == dataMesh.cellCount(): pot = pg.core.cellDataToPointData(dataMesh, field) else: print(len(field), dataMesh) raise Exception( "Data length (%i) for streamline is " "neighter nodeCount (%i) nor cellCount (%i)" % (len(field), dataMesh.nodeCount(), dataMesh.nodeCount())) else: if len(field) == mesh.nodeCount(): pot = pg.Vector(field) elif len(field) == mesh.cellCount(): pot = pg.core.cellDataToPointData(mesh, field) else: print(len(field), dataMesh) raise Exception( "Data length (%i) for streamline is " "neighter nodeCount (%i) nor cellCount (%i)" % (len(field), mesh.nodeCount(), mesh.nodeCount())) direction = 1 if down: direction = -1 # search downward pos = pg.RVector3(startCoord) c = mesh.findCell(startCoord) dLength = c.center().dist(c.node(0).pos()) / dLengthSteps # stream line starting point if c is not None: xd.append(pos[coords[0]]) yd.append(pos[coords[1]]) vd.append(-1) lastC = c lastU = -direction * 1e99 d = None while c is not None and len(xd) < maxSteps: # valid .. temporary check if there is already a stream within the cell if not c.valid(): break if isVectorData: u = 0. if len(vx) == mesh.cellCount(): d = pg.RVector3(vx[c.id()], vy[c.id()]) elif len(vx) == mesh.nodeCount(): d = pg.RVector3(c.pot(pos, vx), c.pot(pos, vy)) elif dataMesh: cd = dataMesh.findCell(pos) if cd is None: raise Exception("Cannot find " + str(pos) + " dataMesh") if len(vx) == dataMesh.cellCount(): d = pg.RVector3(vx[cd.id()], vy[cd.id()]) elif len(vx) == dataMesh.nodeCount() and \ len(vy) == dataMesh.nodeCount(): d = pg.RVector3(cd.pot(pos, vx), cd.pot(pos, vy)) else: print(dataMesh, len(vx), len(vy)) raise Exception("data size wrong") else: print("mesh:", mesh, len(vx), len(vy)) raise Exception("Data length neighter node size or cell size.") else: if dataMesh: cd = dataMesh.findCell(pos) if not cd: break d = cd.grad(pos, pot) u = cd.pot(pos, pot) else: d = c.grad(pos, pot) u = c.pot(pos, pot) # always go u down dAbs = d.length() #print("cell:", c.id(), u, d, dAbs) if dAbs == 0.0: #print(d, "check this in streamlineDir(",) break if down: if u > lastU: break else: if u < lastU: break # * min(1.0, ((startCoord - pos).length())) pos += direction * d / dAbs * dLength c = mesh.findCell(pos, False) # Change cell here .. set old cell to be processed if c is not None: xd.append(pos[coords[0]]) yd.append(pos[coords[1]]) # set the stating value here if vd[0] == -1: vd[0] = dAbs vd.append(dAbs) ## check for degenerating stream if len(xd) > 2: pos0 = pg.Pos(xd[-3], yd[-3]) pos1 = pg.Pos(xd[-2], yd[-2]) pos2 = pg.Pos(xd[-1], yd[-1]) if (pos0.dist(pos2) < pos0.dist(pos1)): pg.debug('degenerating stream aborted') break # If the new cell is different from the current we move into the # new cell and make the last to be invalid .. # the last active contains a stream element if c.id() != lastC.id(): lastC.setValid(False) lastC = c dLength = c.center().dist(c.node(0).pos()) / dLengthSteps else: # There is no new cell .. the last active contains a stream element lastC.setValid(False) lastU = u if verbose: print(pos, u) # Stream line has stopped and the current cell (if there is one) .. # .. contains a stream element if c is not None: c.setValid(False) if down: xd.reverse(), yd.reverse(), vd.reverse() return xd, yd, vd