def test_utils_contaminate_same_seed(): "utils.contaminate uses same noise using same random seed" size = 1000 data = numpy.linspace(-1000, 1000, size) noise = 10 for seed in numpy.random.randint(low=0, high=10000, size=20): d1 = utils.contaminate(data, noise, seed=seed) d2 = utils.contaminate(data, noise, seed=seed) assert numpy.all(d1 == d2)
def test_utils_contaminate_diff(): "utils.contaminate uses diff noise" size = 1235 data = numpy.linspace(-100., 12255., size) noise = 244.4 for i in range(20): d1 = utils.contaminate(data, noise) d2 = utils.contaminate(data, noise) assert numpy.all(d1 != d2)
def test_utils_contaminate_diff(): "utils.contaminate uses diff noise" size = 1235 data = numpy.linspace(-100., 12255., size) noise = 244.4 for i in xrange(20): d1 = utils.contaminate(data, noise) d2 = utils.contaminate(data, noise) assert numpy.all(d1 != d2)
def test_utils_contaminate_seed_noseed(): "utils.contaminate uses diff noise after using random seed" size = 1000 data = numpy.linspace(-1000, 1000, size) noise = 10 seed = 45212 d1 = utils.contaminate(data, noise, seed=seed) d2 = utils.contaminate(data, noise, seed=seed) assert numpy.all(d1 == d2) d3 = utils.contaminate(data, noise) assert numpy.all(d1 != d3)
def test_utils_contaminate(): "utils.contaminate generates noise with 0 mean and right stddev" size = 10**6 data = numpy.zeros(size) std = 4.213 for i in range(20): noise = utils.contaminate(data, std) assert abs(noise.mean()) < 10**-10, 'mean:%g' % (noise.mean()) assert abs(noise.std() - std) / std < 0.01, 'std:%g' % (noise.std())
def test_utils_contaminate(): "utils.contaminate generates noise with 0 mean and right stddev" size = 10**6 data = numpy.zeros(size) std = 4.213 for i in xrange(20): noise = utils.contaminate(data, std) assert abs(noise.mean()) < 10**-10, 'mean:%g' % (noise.mean()) assert abs(noise.std() - std)/std < 0.01, 'std:%g' % (noise.std())
def test_utils_contaminate_seed(): "utils.contaminate noise with 0 mean and right stddev using random seed" size = 10**6 data = numpy.zeros(size) std = 4400.213 for i in xrange(20): noise = utils.contaminate(data, std, seed=i) assert abs(noise.mean()) < 10**-10, 's:%d mean:%g' % (i, noise.mean()) assert abs(noise.std() - std)/std < 0.01, 's:%d std:%g' % (i, noise.std())
def test_utils_contaminate_seed(): "utils.contaminate noise with 0 mean and right stddev using random seed" size = 10**6 data = numpy.zeros(size) std = 4400.213 for i in xrange(20): noise = utils.contaminate(data, std, seed=i) assert abs(noise.mean()) < 10**-10, 's:%d mean:%g' % (i, noise.mean()) assert abs(noise.std() - std) / std < 0.01, 's:%d std:%g' % ( i, noise.std())
def update(self): self.predtts = utils.contaminate(fatiando.seismic.profile.vertical( self.thickness, self.velocity, self.zp), self.error, percent=True) self.predplot.set_data(self.predtts, self.zp) if self.tts is not None: xmin = min(self.predtts.min(), self.tts.min()) xmax = max(self.predtts.max(), self.tts.max()) else: xmin = self.predtts.min() xmax = self.predtts.max() if xmin != xmax: self.dcanvas.set_xlim(xmin, xmax)
def update(self): self.predtts = utils.contaminate( fatiando.seismic.profile.vertical(self.thickness, self.velocity, self.zp), self.error, percent=True) self.predplot.set_data(self.predtts, self.zp) if self.tts is not None: xmin = min(self.predtts.min(), self.tts.min()) xmax = max(self.predtts.max(), self.tts.max()) else: xmin = self.predtts.min() xmax = self.predtts.max() if xmin != xmax: self.dcanvas.set_xlim(xmin, xmax)
def update(self): if self.polygons: polys = [] for p, d in zip(self.polygons, self.densities): polys.append(Polygon(1000. * numpy.array(p), {'density': d})) self.predgz = utils.contaminate( talwani.gz(self.xp, self.zp, polys), self.error) else: self.predgz = numpy.zeros_like(self.xp) self.predplot.set_data(self.xp * 0.001, self.predgz) if self.gz is not None: ymin = min(self.predgz.min(), self.gz.min()) ymax = max(self.predgz.max(), self.gz.max()) else: ymin = self.predgz.min() ymax = self.predgz.max() if ymin != ymax: self.dcanvas.set_ylim(ymin, ymax) self.draw()
def update(self): if self.polygons: polys = [] for p, d in zip(self.polygons, self.densities): polys.append(Polygon(1000.*numpy.array(p), {'density':d})) self.predgz = utils.contaminate(talwani.gz(self.xp, self.zp, polys), self.error) else: self.predgz = numpy.zeros_like(self.xp) self.predplot.set_data(self.xp*0.001, self.predgz) if self.gz is not None: ymin = min(self.predgz.min(), self.gz.min()) ymax = max(self.predgz.max(), self.gz.max()) else: ymin = self.predgz.min() ymax = self.predgz.max() if ymin != ymax: self.dcanvas.set_ylim(ymin, ymax) self.draw()
from matplotlib import pyplot import numpy from fatiando.heat import climatesignal from fatiando.inversion.gradient import levmarq from fatiando import vis, utils params = __import__('exercicio3e4_entrada') zp = numpy.arange(0, 100, 1) temp, error = utils.contaminate(climatesignal.linear(params.amplitude, params.idade, zp), params.ruido, percent=True, return_stddev=True) solver = levmarq(initial=params.inicial) p, residuals = climatesignal.invert_linear(temp, zp, solver) est_amp, est_age = p pyplot.figure(figsize=(12, 5)) pyplot.subplot(1, 2, 1) pyplot.title("Sinal climatico") pyplot.plot(temp, zp, 'ok', label='Observado') pyplot.plot(temp - residuals, zp, '--r', linewidth=3, label='Predito') pyplot.legend(loc='lower right', numpoints=1) pyplot.xlabel("Temperatura (C)") pyplot.ylabel("Z") pyplot.ylim(100, 0) ax = pyplot.subplot(1, 2, 2) ax2 = pyplot.twinx() pyplot.title("Idade e amplitude") width = 0.3 ax.bar([1 - width], [params.idade], width, color='b', label="Verdadeiro")
(more complex model + noisy data) """ from fatiando import gridder, mesher, utils from fatiando.gravmag import prism, imaging from fatiando.vis import mpl, myv # Make some synthetic gravity data from a simple prism model model = [ mesher.Prism(-4000, 0, -4000, -2000, 2000, 5000, {'density': 1200}), mesher.Prism(-1000, 1000, -1000, 1000, 1000, 7000, {'density': -800}), mesher.Prism(2000, 4000, 3000, 4000, 0, 2000, {'density': 600}) ] # Calculate on a scatter of points to show that migration doesn't need gridded # data xp, yp, zp = gridder.scatter((-6000, 6000, -6000, 6000), 1000, z=-10) gz = utils.contaminate(prism.gz(xp, yp, zp, model), 0.1) # Plot the data shape = (50, 50) mpl.figure() mpl.axis('scaled') mpl.contourf(yp, xp, gz, shape, 30, interp=True) mpl.colorbar() mpl.plot(yp, xp, '.k') mpl.xlabel('East (km)') mpl.ylabel('North (km)') mpl.m2km() mpl.show() mesh = imaging.migrate(xp, yp, zp, gz, 0, 10000, (30, 30, 30), power=0.8)
bounds = [0, 5000, 0, 5000, 0, 1000] model = [Prism3D(600, 1200, 200, 4200, 400, 900, {'density':1000}), Prism3D(1500, 4500, 2500, 3000, 300, 800, {'density':-1000}), Prism3D(3000, 4000, 1000, 2000, 200, 800, {'density':700}), Prism3D(2700, 3200, 3700, 4200, 0, 900, {'density':900})] with open('model.pickle', 'w') as f: pickle.dump(model, f) shape = (26, 26) area = bounds[0:4] noise = 5 x, y, z = gridder.regular(area, shape, z=-150) tensor = (potential.prism.gxx(x, y, z, model), potential.prism.gxy(x, y, z, model), potential.prism.gxz(x, y, z, model), potential.prism.gyy(x, y, z, model), potential.prism.gyz(x, y, z, model), potential.prism.gzz(x, y, z, model)) tensor_noisy = [utils.contaminate(d, noise) for d in tensor] data = [x, y, z] data.extend(tensor_noisy) with open('data.txt', 'w') as f: f.write("# Noise corrupted tensor components:\n") f.write("# noise = %g Eotvos\n" % (noise)) f.write("# x y z gxx gxy gxz gyy gyz gzz\n") numpy.savetxt(f, numpy.array(data).T)
from fatiando import mesher, gridder, utils from fatiando.gravmag import prism, transform from fatiando.vis import mpl # Direction of the Geomagnetic field inc, dec = -60, 0 # Make a model with only induced magnetization model = [ mesher.Prism(-100, 100, -100, 100, 0, 2000, {'magnetization': utils.ang2vec(10, inc, dec)}) ] area = (-5000, 5000, -5000, 5000) shape = (100, 100) z0 = -500 x, y, z = gridder.regular(area, shape, z=z0) tf = utils.contaminate(prism.tf(x, y, z, model, inc, dec), 1, seed=0) # Reduce to the pole using FFT. Since there is only induced magnetization, the # magnetization direction (sinc and sdec) is the same as the geomagnetic field pole = transform.reduce_to_pole(x, y, tf, shape, inc, dec, sinc=inc, sdec=dec) # Calculate the true value at the pole for comparison true = prism.tf(x, y, z, model, 90, 0, pmag=utils.ang2vec(10, 90, 0)) fig, axes = mpl.subplots(1, 3, figsize=(14, 4)) for ax in axes: ax.set_aspect('equal') mpl.sca(axes[0]) mpl.title("Original total field anomaly") mpl.contourf(y, x, tf, shape, 30, cmap=mpl.cm.RdBu_r) mpl.colorbar(pad=0).set_label('nT') mpl.m2km() mpl.sca(axes[1])
from fatiando import gravmag as gm from fatiando.mesher import Prism, PrismMesh, vremove from fatiando.vis import mpl, myv log = logger.get() log.info(logger.header()) # Create a synthetic model model = [Prism(250, 750, 250, 750, 200, 700, {'density':1000})] # and generate synthetic data from it shape = (25, 25) bounds = [0, 1000, 0, 1000, 0, 1000] area = bounds[0:4] xp, yp, zp = gridder.regular(area, shape, z=-1) noise = 0.1 # 0.1 mGal noise gz = utils.contaminate(gm.prism.gz(xp, yp, zp, model), noise) # plot the data mpl.figure() mpl.title("Synthetic gravity anomaly (mGal)") mpl.axis('scaled') levels = mpl.contourf(yp, xp, gz, shape, 12) mpl.colorbar() mpl.xlabel('Horizontal coordinate y (km)') mpl.ylabel('Horizontal coordinate x (km)') mpl.m2km() mpl.show() # Inversion setup # Create a mesh mesh = PrismMesh(bounds, (25, 25, 25)) # Wrap the data so that harvester can use it
from matplotlib import pyplot import numpy from fatiando.seismic import profile from fatiando import vis, utils, ui area = (0, 10000, 0, 600) vmin, vmax, zmin, zmax = area figure = pyplot.figure() pyplot.xlabel("Velocidade (m/s)") pyplot.ylabel("Profundidade (m)") thickness, velocity = ui.picker.draw_layers(area, figure.gca()) zp = numpy.arange(zmin + 1, zmax, 1) tts, error = utils.contaminate(profile.vertical(thickness, velocity, zp), 0.0, percent=True, return_stddev=True) pyplot.figure(figsize=(12,5)) pyplot.subplot(1, 2, 1) pyplot.grid() pyplot.title("Perfilagem sismica vertical") pyplot.plot(tts, zp, '.k') pyplot.xlabel("Tempo de chegada (s)") pyplot.ylabel("Profundidade (m)") pyplot.ylim(sum(thickness), 0) pyplot.subplot(1, 2, 2) pyplot.grid() pyplot.title("Perfil de velocidades") vis.map.layers(thickness, velocity, '--b', linewidth=2) pyplot.ylim(zmax, zmin) pyplot.xlim(vmin, vmax) pyplot.xlabel("Velocidade (m/s)")
""" GravMag: Generate noise-corrupted gravity gradient tensor data """ from fatiando import mesher, gridder, gravmag, utils from fatiando.vis import mpl prisms = [mesher.Prism(-1000,1000,-1000,1000,0,2000,{'density':1000})] shape = (100,100) xp, yp, zp = gridder.regular((-5000, 5000, -5000, 5000), shape, z=-200) components = [gravmag.prism.gxx, gravmag.prism.gxy, gravmag.prism.gxz, gravmag.prism.gyy, gravmag.prism.gyz, gravmag.prism.gzz] print "Calculate the tensor components and contaminate with 5 Eotvos noise" ftg = [utils.contaminate(comp(xp, yp, zp, prisms), 5.0) for comp in components] print "Plotting..." mpl.figure(figsize=(14,6)) mpl.suptitle("Contaminated FTG data") names = ['gxx', 'gxy', 'gxz', 'gyy', 'gyz', 'gzz'] for i, data in enumerate(ftg): mpl.subplot(2,3,i+1) mpl.title(names[i]) mpl.axis('scaled') levels = mpl.contourf(xp*0.001, yp*0.001, data, (100,100), 12) mpl.colorbar() mpl.contour(xp*0.001, yp*0.001, data, shape, levels, clabel=False) mpl.show()
GravMag: Simple gravity inversion for the relief of a 2D trapezoidal basin """ import numpy from fatiando import logger, utils, mesher, gravmag, inversion from fatiando.vis import mpl log = logger.get() log.info(logger.header()) log.info(__doc__) log.info("Generating synthetic data") verts = [(10000, 1.), (90000, 1.), (90000, 7000), (10000, 3330)] model = mesher.Polygon(verts, {'density':-100}) xp = numpy.arange(0., 100000., 1000.) zp = numpy.zeros_like(xp) gz = utils.contaminate(gravmag.talwani.gz(xp, zp, [model]), 0.5) log.info("Preparing for the inversion") solver = inversion.gradient.levmarq(initial=(9000, 500)) estimate, residuals = gravmag.basin2d.trapezoidal(xp, zp, gz, verts[0:2], -100, solver) mpl.figure() mpl.subplot(2, 1, 1) mpl.title("Gravity anomaly") mpl.plot(xp, gz, 'ok', label='Observed') mpl.plot(xp, gz - residuals, '-r', linewidth=2, label='Predicted') mpl.legend(loc='lower left', numpoints=1) mpl.ylabel("mGal") mpl.xlim(0, 100000) mpl.subplot(2, 1, 2)
""" Geothermal: Forward and inverse modeling of a linear change in temperature measured in a well """ import numpy from fatiando import utils from fatiando.geothermal.climsig import linear, SingleChange from fatiando.vis import mpl # Generating synthetic data amp = 5.43 age = 78.2 # along a well at these depths zp = numpy.arange(0, 100, 1) temp, error = utils.contaminate(linear(amp, age, zp), 0.02, percent=True, return_stddev=True) # Preparing for the inversion data = SingleChange(temp, zp, mode='linear').config('levmarq', initial=[1, 1]) amp_, age_ = data.fit().estimate_ print "Linear change in temperature" print " true: amp=%.3f age=%.3f" % (amp, age) print " estimated: amp=%.3f age=%.3f" % (amp_, age_) mpl.figure(figsize=(4, 5)) mpl.title("Residual well temperature") mpl.plot(temp, zp, 'ok', label='Observed') mpl.plot(data.predicted(), zp, '--r', linewidth=3, label='Predicted') mpl.legend(loc='lower right', numpoints=1)
""" GravMag: Simple gravity inversion for the relief of a 2D triangular basin """ import numpy from fatiando import utils, mesher, gravmag, inversion from fatiando.vis import mpl verts = [(10000, 1.), (90000, 1.), (80000, 5000)] model = mesher.Polygon(verts, {'density': -100}) xp = numpy.arange(0., 100000., 1000.) zp = numpy.zeros_like(xp) gz = utils.contaminate(gravmag.talwani.gz(xp, zp, [model]), 1) solver = inversion.gradient.levmarq(initial=(10000, 1000)) estimate, residuals = gravmag.basin2d.triangular(xp, zp, gz, verts[0:2], -100, solver) mpl.figure() mpl.subplot(2, 1, 1) mpl.title("Gravity anomaly") mpl.plot(xp, gz, 'ok', label='Observed') mpl.plot(xp, gz - residuals, '-r', linewidth=2, label='Predicted') mpl.legend(loc='lower left') mpl.ylabel("mGal") mpl.xlim(0, 100000) mpl.subplot(2, 1, 2) mpl.polygon(estimate, 'o-r', linewidth=2, fill='r', alpha=0.3,
Seismic: Invert vertical seismic profile (VSP) traveltimes using smoothness regularization and unknown layer thicknesses """ import numpy from fatiando import utils from fatiando.seismic.profile import layered_straight_ray, LayeredStraight from fatiando.inversion.regularization import Smoothness1D from fatiando.vis import mpl # Make a layered model thickness = [10, 20, 10, 30, 40, 60] velocity = [2000, 1000, 5000, 1000, 3000, 6000] zp = numpy.arange(1, sum(thickness), 1, dtype='f') # Produce some noise-corrupted synthetic data tts, error = utils.contaminate( layered_straight_ray(thickness, velocity, zp), 0.02, percent=True, return_stddev=True) # Assume that the thicknesses are unknown. In this case, use a mesh of many # thin layers and invert for each slowness thick = 10. mesh = [thick]*int(sum(thickness)/thick) solver = (LayeredStraight(tts, zp, mesh) + 5*Smoothness1D(len(mesh))).fit() velocity_ = solver.estimate_ mpl.figure(figsize=(12,5)) mpl.subplot(1, 2, 1) mpl.grid() mpl.title("Vertical seismic profile") mpl.plot(tts, zp, 'ok', label='Observed') mpl.plot(solver.predicted(), zp, '-r', linewidth=3, label='Predicted')
from matplotlib import pyplot import numpy from fatiando.seismic import profile from fatiando import vis, utils, ui import cPickle as pickle area = (0, 10000, 0, 600) vmin, vmax, zmin, zmax = area figure = pyplot.figure() pyplot.xlabel("Velocidade (m/s)") pyplot.ylabel("Profundidade (m)") thickness, velocity = ui.picker.draw_layers(area, figure.gca()) zp = numpy.arange(zmin + 1, zmax, 1) tts, error = utils.contaminate(profile.vertical(thickness, velocity, zp), 0.01, percent=True, return_stddev=True) with open('exercicio5.pickle', 'w') as f: data = {'thickness': thickness, 'velocity': velocity, 'tts': tts, 'zp': zp} pickle.dump(data, f) pyplot.figure(figsize=(12, 5)) pyplot.subplot(1, 2, 1) pyplot.grid() pyplot.title("Perfilagem sismica vertical") pyplot.plot(tts, zp, '.k') pyplot.xlabel("Tempo de chegada (s)") pyplot.ylabel("Profundidade (m)") pyplot.ylim(sum(thickness), 0) pyplot.subplot(1, 2, 2)
pylab.subplots_adjust(wspace=0.4, hspace=0.3) for i, field in enumerate(['gxx', 'gxy', 'gxz', 'gyy', 'gyz', 'gzz']): data = synthetic.from_prisms(prisms, x1=0, x2=5000, y1=0, y2=5000, nx=50, ny=50, height=150, field=field) data['value'], error = utils.contaminate(data['value'], stddev=error, percent=False, return_stddev=True) data['error'] = error * numpy.ones(len(data['value'])) io.dump('%s_data.txt' % (field), data) pylab.subplot(2, 3, i + 1) pylab.axis('scaled') pylab.title(field) vis.contourf(data, 10) vis.contourf(data, 10) cb = pylab.colorbar(shrink=0.9) cb.set_label(r'$E\"otv\"os$', fontsize=14) pylab.xlabel('X [m]') pylab.ylabel('Y [m]')
""" GravMag: Calculate the analytic signal of a total field anomaly using FFT """ from fatiando import mesher, gridder, utils from fatiando.gravmag import prism, transform from fatiando.vis import mpl model = [mesher.Prism(-100, 100, -100, 100, 0, 2000, {'magnetization': 10})] area = (-5000, 5000, -5000, 5000) shape = (100, 100) z0 = -500 x, y, z = gridder.regular(area, shape, z=z0) inc, dec = -30, 0 tf = utils.contaminate(prism.tf(x, y, z, model, inc, dec), 0.001, percent=True) # Need to convert gz to SI units so that the result is also in SI total_grad_amp = transform.tga(x, y, utils.nt2si(tf), shape) mpl.figure() mpl.subplot(1, 2, 1) mpl.title("Original total field anomaly") mpl.axis('scaled') mpl.contourf(y, x, tf, shape, 30, cmap=mpl.cm.RdBu_r) mpl.colorbar(orientation='horizontal').set_label('nT') mpl.m2km() mpl.subplot(1, 2, 2) mpl.title("Total Gradient Amplitude") mpl.axis('scaled') mpl.contourf(y, x, total_grad_amp, shape, 30, cmap=mpl.cm.RdBu_r) mpl.colorbar(orientation='horizontal').set_label('nT/m')
from fatiando.vis import mpl, myv # Generate some synthetic total field anomaly data bounds = [0, 10000, 0, 10000, 0, 5000] props = {'density': 500} props2 = {'density': 1000} model = [ mesher.Prism(4000, 6000, 4000, 6000, 500, 2500, props), mesher.Prism(2000, 2500, 2000, 2500, 500, 1000, props2), mesher.Prism(7500, 8000, 5500, 6500, 500, 1000, props2), mesher.Prism(1500, 2000, 4000, 5000, 500, 1000, props2) ] area = bounds[:4] shape = (50, 50) x, y, z = gridder.regular(area, shape, z=-1) gz = utils.contaminate(gravmag.prism.gz(x, y, z, model), 0.1) mesh = mesher.PrismMesh(bounds, (20, 40, 40)) seeds = gravmag.harvester.sow([[5000, 5000, 1000, props]], mesh) # Run the inversion without using weights data = [gravmag.harvester.Gz(x, y, z, gz)] estimate, predicted = gravmag.harvester.harvest(data, seeds, mesh, compactness=1.5, threshold=0.001) mesh.addprop('density', estimate['density']) bodies = mesher.vremove(0, 'density', mesh) mpl.figure() mpl.axis('scaled') mpl.title('No weights: Observed (color) vs Predicted (black)')
from fatiando import mesher, utils, seismic, vis, inversion area = (0, 100000, 0, 100000) shape = (100, 100) model = mesher.SquareMesh(area, shape) # Fetch the image from the online docs urllib.urlretrieve( 'http://fatiando.readthedocs.org/en/latest/_static/logo.png', 'logo.png') model.img2prop('logo.png', 4000, 10000, 'vp') # Make some travel time data and add noise src_loc = utils.random_points(area, 200) rec_loc = utils.circular_points(area, 80, random=True) srcs, recs = utils.connect_points(src_loc, rec_loc) ttimes = seismic.ttime2d.straight(model, 'vp', srcs, recs, par=True) ttimes, error = utils.contaminate(ttimes, 0.01, percent=True, return_stddev=True) # Make the mesh mesh = mesher.SquareMesh(area, shape) # Since the matrices are big, use the Steepest Descent solver to avoid dealing # with Hessian matrices. It needs a starting guess, so start with 1000 inversion.gradient.use_sparse() solver = inversion.gradient.steepest(1000*numpy.ones(mesh.size)) # and run the inversion estimate, residuals = seismic.srtomo.run(ttimes, srcs, recs, mesh, sparse=True, solver=solver, smooth=0.01) # Convert the slowness estimate to velocities and add it the mesh mesh.addprop('vp', seismic.srtomo.slowness2vel(estimate)) # Calculate and print the standard deviation of the residuals # it should be close to the data error if the inversion was able to fit the data print "Assumed error: %f" % (error)
""" Geothermal: Forward and inverse modeling of an abrupt change in temperature measured in a well """ import numpy from fatiando import utils from fatiando.geothermal import climsig from fatiando.vis import mpl # Generating synthetic data amp = 3 age = 54 zp = numpy.arange(0, 100, 1) temp, error = utils.contaminate(climsig.abrupt(amp, age, zp), 0.02, percent=True, return_stddev=True) # Preparing for the inversion p, residuals = climsig.iabrupt(temp, zp) est_amp, est_age = p mpl.figure(figsize=(12, 5)) mpl.subplot(1, 2, 1) mpl.title("Climate signal (abrupt)") mpl.plot(temp, zp, 'ok', label='Observed') mpl.plot(temp - residuals, zp, '--r', linewidth=3, label='Predicted') mpl.legend(loc='lower right', numpoints=1) mpl.xlabel("Temperature (C)") mpl.ylabel("Z") mpl.ylim(100, 0)
""" GravMag: Center of mass estimation using the first eigenvector of the gravity gradient tensor (2 sources with expanding windows) """ from fatiando import mesher, gridder, utils, gravmag from fatiando.vis import mpl, myv # Generate some synthetic data prisms = [mesher.Prism(-2500,-500,-1000,1000,500,2500,{'density':1000}), mesher.Prism(500,2500,-1000,1000,500,2500,{'density':1000})] shape = (100, 100) area = (-5000, 5000, -5000, 5000) xp, yp, zp = gridder.regular(area, shape, z=-150) noise = 2 tensor = [utils.contaminate(gravmag.prism.gxx(xp, yp, zp, prisms), noise), utils.contaminate(gravmag.prism.gxy(xp, yp, zp, prisms), noise), utils.contaminate(gravmag.prism.gxz(xp, yp, zp, prisms), noise), utils.contaminate(gravmag.prism.gyy(xp, yp, zp, prisms), noise), utils.contaminate(gravmag.prism.gyz(xp, yp, zp, prisms), noise), utils.contaminate(gravmag.prism.gzz(xp, yp, zp, prisms), noise)] # Get the eigenvectors from the tensor data eigenvals, eigenvecs = gravmag.tensor.eigen(tensor) # Plot the data titles = ['gxx', 'gxy', 'gxz', 'gyy', 'gyz', 'gzz'] mpl.figure() for i, title in enumerate(titles): mpl.subplot(3, 2, i + 1) mpl.title(title) mpl.axis('scaled') levels = mpl.contourf(yp, xp, tensor[i], shape, 10) mpl.contour(yp, xp, tensor[i], shape, levels)
rec_points = vis.mpl.pick_points(area, ax, marker='^', color='r') vis.mpl.figure() ax = vis.mpl.subplot(1, 1, 1) vis.mpl.axis('scaled') vis.mpl.suptitle("Choose the location of the source") vis.mpl.points(rec_points, '^r') src = vis.mpl.pick_points(area, ax, marker='*', color='y') if len(src) > 1: print "Don't be greedy! Pick only one point as the source" sys.exit() srcs, recs = utils.connect_points(src, rec_points) ptime = seismic.ttime2d.straight(model, 'vp', srcs, recs) stime = seismic.ttime2d.straight(model, 'vs', srcs, recs) ttresiduals, error = utils.contaminate(stime - ptime, 0.10, percent=True, return_stddev=True) solver = inversion.gradient.levmarq(initial=(0, 0), maxit=1000, tol=10**(-3)) result = seismic.epic2d.homogeneous(ttresiduals, recs, vp, vs, solver) estimate, residuals = result predicted = ttresiduals - residuals shape = (100, 100) xs, ys = gridder.regular(area, shape) goals = seismic.epic2d.mapgoal(xs, ys, ttresiduals, recs, vp, vs) vis.mpl.figure(figsize=(10,4)) vis.mpl.subplot(1, 2, 1) vis.mpl.title('Epicenter + %d recording stations' % (len(recs))) vis.mpl.axis('scaled') vis.mpl.contourf(xs, ys, goals, shape, 50)
""" GravMag: Use the polynomial equivalent layer to upward continue gravity data """ from fatiando.gravmag import prism, sphere from fatiando.gravmag.eqlayer import PELGravity, PELSmoothness from fatiando import gridder, utils, mesher from fatiando.vis import mpl # Make synthetic data props = {'density':1000} model = [mesher.Prism(-500, 500, -1000, 1000, 500, 4000, props)] shape = (50, 50) x, y, z = gridder.regular([-5000, 5000, -5000, 5000], shape, z=0) gz = utils.contaminate(prism.gz(x, y, z, model), 0.1) # Setup the layer layer = mesher.PointGrid([-5000, 5000, -5000, 5000], 200, (100, 100)) # Estimate the density using the PEL (it is faster and more memory efficient # than the traditional equivalent layer). windows = (20, 20) degree = 1 solver = (PELGravity(x, y, z, gz, layer, windows, degree) + 10**-21*PELSmoothness(layer, windows, degree)).fit() layer.addprop('density', solver.estimate_) residuals = solver.residuals() print "Residuals:" print "mean:", residuals.mean() print "stddev:", residuals.std() # Plot the layer and the fit mpl.figure(figsize=(14, 4)) mpl.subplot(1, 3, 1)
""" Seismic: Invert vertical seismic profile (VSP) traveltimes using smoothness regularization """ import numpy from fatiando import utils, seismic, vis thickness = [10, 20, 10, 30, 40, 60] velocity = [2000, 1000, 5000, 1000, 2500, 6000] zp = numpy.arange(1., sum(thickness), 1., dtype='f') tts, error = utils.contaminate( seismic.profile.vertical(thickness, velocity, zp), 0.02, percent=True, return_stddev=True) thick = 10. mesh = [thick]*int(sum(thickness)/thick) smooth = 50. estimates = [] for i in xrange(30): p, r = seismic.profile.ivertical(utils.contaminate(tts, error), zp, mesh, smooth=smooth) estimates.append(1./p) estimate = utils.vecmean(estimates) predicted = seismic.profile.vertical(mesh, estimate, zp) vis.mpl.figure(figsize=(12,5)) vis.mpl.subplot(1, 2, 1) vis.mpl.grid() vis.mpl.title("Vertical seismic profile") vis.mpl.plot(tts, zp, 'ok', label='Observed') vis.mpl.plot(predicted, zp, '-r', linewidth=3, label='Predicted')
import matplotlib.pyplot as plt from fatiando.gravmag import prism, sphere from fatiando.gravmag.eqlayer import EQLGravity from fatiando.inversion import Damping from fatiando import gridder, utils, mesher # First thing to do is make some synthetic data to test the method. We'll use a # single prism to keep it simple props = {'density': 500} model = [mesher.Prism(-5000, 5000, -200, 200, 100, 4000, props)] # The synthetic data will be generated on a random scatter of points area = [-8000, 8000, -5000, 5000] x, y, z = gridder.scatter(area, 300, z=0, seed=42) # Generate some noisy data from our model gz = utils.contaminate(prism.gz(x, y, z, model), 0.2, seed=0) # Now for the equivalent layer. We must setup a layer of point masses where # we'll estimate a density distribution that fits our synthetic data layer = mesher.PointGrid(area, 500, (20, 20)) # Estimate the density using enough damping so that won't try to fit the error eql = EQLGravity(x, y, z, gz, layer) + 1e-22*Damping(layer.size) eql.fit() # Now we add the estimated densities to our layer layer.addprop('density', eql.estimate_) # and print some statistics of how well the estimated layer fits the data residuals = eql[0].residuals() print("Residuals:") print(" mean:", residuals.mean(), 'mGal') print(" stddev:", residuals.std(), 'mGal')
""" GravMag: Use the polynomial equivalent layer to upward continue gravity data """ from fatiando.gravmag import prism, sphere from fatiando.gravmag.eqlayer import PELGravity, PELSmoothness from fatiando import gridder, utils, mesher from fatiando.vis import mpl # Make synthetic data props = {'density': 1000} model = [mesher.Prism(-500, 500, -1000, 1000, 500, 4000, props)] shape = (50, 50) x, y, z = gridder.regular([-5000, 5000, -5000, 5000], shape, z=0) gz = utils.contaminate(prism.gz(x, y, z, model), 0.1, seed=0) # Setup the layer layer = mesher.PointGrid([-5000, 5000, -5000, 5000], 200, (100, 100)) # Estimate the density using the PEL (it is faster and more memory efficient # than the traditional equivalent layer). windows = (20, 20) degree = 1 misfit = PELGravity(x, y, z, gz, layer, windows, degree) # Apply a smoothness constraint to the borders of the equivalent layer windows # to avoid gaps in the physical property distribution solver = misfit + 1e-18 * PELSmoothness(layer, windows, degree) solver.fit() # Add the estimated density distribution to the layer object for plotting and # forward modeling layer.addprop('density', solver.estimate_) residuals = solver[0].residuals() print("Residuals:") print("mean:", residuals.mean())
Geothermal: Forward and inverse modeling of a linear change in temperature measured in a well """ import numpy from fatiando import utils from fatiando.geothermal.climsig import linear, SingleChange from fatiando.vis import mpl # Generating synthetic data amp = 5.43 age = 78.2 # along a well at these depths zp = numpy.arange(0, 100, 1) temp, error = utils.contaminate(linear(amp, age, zp), 0.02, percent=True, return_stddev=True) # Preparing for the inversion data = SingleChange(temp, zp, mode='linear').config('levmarq', initial=[1, 1]) amp_, age_ = data.fit().estimate_ print "Linear change in temperature" print " true: amp=%.3f age=%.3f" % (amp, age) print " estimated: amp=%.3f age=%.3f" % (amp_, age_) mpl.figure(figsize=(4, 5)) mpl.title("Residual well temperature") mpl.plot(temp, zp, 'ok', label='Observed') mpl.plot(data.predicted(), zp, '--r', linewidth=3, label='Predicted') mpl.legend(loc='lower right', numpoints=1) mpl.xlabel("Temperature (C)")
from fatiando.seismic.profile import layered_straight_ray, LayeredStraight from fatiando.inversion.regularization import Damping from fatiando.vis import mpl # The limits in velocity and depths, respectively area = (0, 10000, 0, 100) vmin, vmax, zmin, zmax = area # Use the interactive functions of mpl to draw a layered model figure = mpl.figure() mpl.xlabel("Velocity (m/s)") mpl.ylabel("Depth (m)") thickness, velocity = mpl.draw_layers(area, figure.gca()) # Make some synthetic noise-corrupted travel-time data zp = numpy.arange(zmin + 0.5, zmax, 0.5) tts, error = utils.contaminate(layered_straight_ray(thickness, velocity, zp), 0.02, percent=True, return_stddev=True) # Make the solver and run the inversion using damping regularization # (assumes known thicknesses of the layers) solver = (LayeredStraight(tts, zp, thickness) + 0.1 * Damping(len(thickness))).fit() velocity_ = solver.estimate_ # Plot the results mpl.figure(figsize=(12, 5)) mpl.subplot(1, 2, 1) mpl.grid() mpl.title("Vertical seismic profile") mpl.plot(tts, zp, 'ok', label='Observed') mpl.plot(solver[0].predicted(), zp, '-r', linewidth=3, label='Predicted') mpl.legend(loc='upper right', numpoints=1)
area = (0, 500000, 0, 500000) shape = (30, 30) model = mesher.SquareMesh(area, shape) # Fetch the image from the online docs urllib.urlretrieve("http://fatiando.readthedocs.org/en/latest/_static/logo.png", "logo.png") model.img2prop("logo.png", 4000, 10000, "vp") # Make some travel time data and add noise log.info("Generating synthetic travel-time data") src_loc = utils.random_points(area, 80) rec_loc = utils.circular_points(area, 30, random=True) srcs, recs = utils.connect_points(src_loc, rec_loc) start = time.time() tts = seismic.ttime2d.straight(model, "vp", srcs, recs, par=True) log.info(" time: %s" % (utils.sec2hms(time.time() - start))) tts, error = utils.contaminate(tts, 0.01, percent=True, return_stddev=True) # Make the mesh mesh = mesher.SquareMesh(area, shape) # and run the inversion estimate, residuals = seismic.srtomo.run(tts, srcs, recs, mesh, damping=10 ** 9) # Convert the slowness estimate to velocities and add it the mesh mesh.addprop("vp", seismic.srtomo.slowness2vel(estimate)) # Calculate and print the standard deviation of the residuals # it should be close to the data error if the inversion was able to fit the data log.info("Assumed error: %g" % (error)) log.info("Standard deviation of residuals: %g" % (numpy.std(residuals))) vis.mpl.figure(figsize=(14, 5)) vis.mpl.subplot(1, 2, 1) vis.mpl.axis("scaled")
{'magnetization': utils.ang2vec(mag_m, inc_s, dec_s)}), mesher.Prism(x1, x2, y1m[35], y2m[35], zo_t[35], zo_b[35], {'magnetization': utils.ang2vec(mag_m, inc_s, dec_s)}), mesher.Prism(x1, x2, y1m[36], y2m[36], zo_t[36], zo_b[36], {'magnetization': utils.ang2vec(mag_m, inc_s, dec_s)}), mesher.Prism(x1, x2, y1m[37], y2m[37], zo_t[37], zo_b[37], {'magnetization': utils.ang2vec(mag_m, inc_s, dec_s)}), mesher.Prism(x1, x2, y1m[38], y2m[38], zo_t[38], zo_b[38], {'magnetization': utils.ang2vec(mag_m, inc_s, dec_s)}), mesher.Prism(x1, x2, y1m[39], y2m[39], zo_t[39], zo_b[39], {'magnetization': utils.ang2vec(mag_m, inc_s, dec_s)}) ] #total field from Fatiando a Terra tf, stdv = utils.contaminate(prism.tf(xi, yi, zi, model_mag, inc_o, dec_o), 1, percent=False, return_stddev=True) print stdv #save for the plot out = np.array([yi, xi, zi, tf]) out = out.T np.savetxt('input_mag.dat', out, delimiter=' ', fmt='%1.8f') out = None tf_noise_free = prism.tf(xi, yi, zi, model_mag, inc_o, dec_o) out = np.array([yi, xi, zi, tf_noise_free]) out = out.T np.savetxt('mag_noise_free.dat', out, delimiter=' ', fmt='%1.8f') out = None
x1, x2 = 0, 3000 y1, y2 = 0, 3000 z1, z2 = 0, 3000 extent = [x1, x2, y1, y2, -z2, -z1] # Now calculate all the components of the gradient tensor and contaminate the # data with gaussian noise error = 0.2 fields = ['gzz', 'gyz'] data = {} for i, field in enumerate(fields): data[field] = synthetic.from_prisms(model, x1=0, x2=3000, y1=0, y2=3000, nx=50, ny=50, height=150, field=field) data[field]['value'] = utils.contaminate(data[field]['value'], stddev=error, percent=False) data[field]['error'] = error*numpy.ones(len(data[field]['value'])) # PERFORM THE INVERSION ################################################################################ #~ # Generate a prism mesh mesh = fatiando.mesh.prism_mesh(x1=x1, x2=x2, y1=y1, y2=y2, z1=z1, z2=z2, nx=30, ny=30, nz=30) # Set the seeds and save them for later use log.info("Setting seeds in mesh:") seeds = [] seeds.append(gplant.get_seed((1501, 1501, 1501), 1000, mesh)) # Make a mesh for the seeds to plot them
""" GravMag: Fit an equivalent layer to gravity and gravity gradient data """ import numpy as np from fatiando.gravmag import prism, sphere from fatiando.gravmag.eqlayer import EQLGravity from fatiando.inversion.regularization import Smoothness2D, LCurve from fatiando import gridder, utils, mesher from fatiando.vis import mpl # Make synthetic data props = {'density': 1000} model = [mesher.Prism(-500, 500, -1000, 1000, 500, 4000, props)] area = [-5000, 5000, -5000, 5000] x1, y1, z1 = gridder.scatter(area, 80, z=0, seed=0) gz = utils.contaminate(prism.gz(x1, y1, z1, model), 0.1, seed=0) x2, y2, z2 = gridder.regular(area, (10, 50), z=-200) gzz = utils.contaminate(prism.gzz(x2, y2, z2, model), 5, seed=0) # Setup the layer layer = mesher.PointGrid([-6000, 6000, -6000, 6000], 500, (50, 50)) # and the inversion # Apply a scaling factor to make both portions of the misfit the same order of # magnitude scale = np.linalg.norm(gz)**2 / np.linalg.norm(gzz)**2 misfit = (EQLGravity(x1, y1, z1, gz, layer) + scale * EQLGravity(x2, y2, z2, gzz, layer, field='gzz')) regul = Smoothness2D(layer.shape) # Use an L-curve analysis to find the best regularization parameter solver = LCurve(misfit, regul, [10**i for i in range(-30, -20)]).fit() layer.addprop('density', solver.estimate_)
vel[5:25, 5:25] = 10000 model.addprop('vp', vel.ravel()) # Make some noisy travel time data using straight-rays # Set the random seed so that points are the same every time we run this script seed = 0 src_loc_x, src_loc_y = gridder.scatter(area, 80, seed=seed) src_loc = np.transpose([src_loc_x, src_loc_y]) rec_loc_x, rec_loc_y = gridder.circular_scatter(area, 30, random=True, seed=seed) rec_loc = np.transpose([rec_loc_x, rec_loc_y]) srcs = [src for src in src_loc for _ in rec_loc] recs = [rec for _ in src_loc for rec in rec_loc] tts = ttime2d.straight(model, 'vp', srcs, recs) # Use 2% random noise to corrupt the data tts = utils.contaminate(tts, 0.02, percent=True, seed=seed) # Make a mesh for the inversion. The inversion will estimate the velocity in # each square of the mesh. To make things simpler, we'll use a mesh that is the # same as our original model. mesh = SquareMesh(area, shape) # Create solvers for each type of regularization and fit the synthetic data to # obtain an estimated velocity model solver = srtomo.SRTomo(tts, srcs, recs, mesh) smooth = solver + 1e8*Smoothness2D(mesh.shape) smooth.fit() damped = solver + 1e8*Damping(mesh.size) damped.fit()
ax = vis.mpl.subplot(1, 1, 1) vis.mpl.axis('scaled') vis.mpl.suptitle("Choose the location of the source") vis.mpl.points(rec_points, '^r') src = vis.mpl.pick_points(area, ax, marker='*', color='y') if len(src) > 1: log.error("Don't be greedy! Pick only one point as the source") sys.exit() log.info("Generating synthetic travel-time data") srcs, recs = utils.connect_points(src, rec_points) ptime = seismic.ttime2d.straight(model, 'vp', srcs, recs) stime = seismic.ttime2d.straight(model, 'vs', srcs, recs) error_level = 0.1 ttr_true = stime - ptime ttr, error = utils.contaminate(ttr_true, error_level, percent=True, return_stddev=True) log.info("Choose the initial estimate for the gradient solvers") vis.mpl.figure() ax = vis.mpl.subplot(1, 1, 1) vis.mpl.axis('scaled') vis.mpl.suptitle("Choose the initial estimate for the gradient solvers") vis.mpl.points(rec_points, '^r') vis.mpl.points(src, '*y') initial = vis.mpl.pick_points(area, ax, marker='*', color='k') if len(initial) > 1: log.error("Don't be greedy! Pick only one initial estimate") sys.exit() initial = initial[0] log.info("Will solve the inverse problem using Newton's method")
from fatiando.mesher import PolygonalPrism, PrismMesh, vremove from fatiando.vis import mpl, myv # Create a synthetic model bounds = [-10000, 10000, -10000, 10000, 0, 10000] vertices = [[-4948.97959184, -6714.64019851], [-2448.97959184, -3141.43920596], [2448.97959184, 312.65508685], [6938.7755102, 5394.54094293], [4846.93877551, 6228.28784119], [2653.06122449, 3409.4292804], [-3520.40816327, -1434.24317618], [-6632.65306122, -6079.4044665]] model = [PolygonalPrism(vertices, 1000, 4000, {'density': 1000})] # and generate synthetic data from it shape = (20, 20) area = bounds[0:4] xp, yp, zp = gridder.regular(area, shape, z=-1) noise = 0.1 # 0.1 mGal noise gz = utils.contaminate(gm.polyprism.gz(xp, yp, zp, model), noise) # Create a mesh mesh = PrismMesh(bounds, (25, 50, 50)) # Wrap the data so that harvester can read it data = [gm.harvester.Gz(xp, yp, zp, gz)] # Plot the data and pick the location of the seeds mpl.figure() mpl.suptitle("Pick the seeds (polygon is the true source)") mpl.axis('scaled') levels = mpl.contourf(yp, xp, gz, shape, 12) mpl.colorbar() mpl.polygon(model[0], xy2ne=True) mpl.xlabel('Horizontal coordinate y (km)') mpl.ylabel('Horizontal coordinate x (km)') seedx, seedy = mpl.pick_points(area, mpl.gca(), xy2ne=True).T
from fatiando import utils area = (0, 500000, 0, 500000) shape = (30, 30) model = SquareMesh(area, shape) vel = 4000 * np.ones(shape) vel[5:25, 5:25] = 10000 model.addprop('vp', vel.ravel()) # Make some travel time data and add noise seed = 0 # Set the random seed so that points are the same every time src_loc = utils.random_points(area, 80, seed=seed) rec_loc = utils.circular_points(area, 30, random=True, seed=seed) srcs, recs = utils.connect_points(src_loc, rec_loc) tts = ttime2d.straight(model, 'vp', srcs, recs) tts, error = utils.contaminate(tts, 0.02, percent=True, return_stddev=True, seed=seed) # Make the mesh mesh = SquareMesh(area, shape) # and run the inversion misfit = srtomo.SRTomo(tts, srcs, recs, mesh) regularization = Smoothness2D(mesh.shape) # Will use the l-curve criterion to find the best regularization parameter tomo = LCurve(misfit, regularization, [10 ** i for i in np.arange(0, 10, 1)], jobs=8).fit() mesh.addprop('vp', tomo.estimate_) # Plot the L-curve annd print the regularization parameter estimated mpl.figure() mpl.title('L-curve: triangle marks the best solution') tomo.plot_lcurve() print "Estimated regularization parameter: %g" % (tomo.regul_param_)
import numpy from fatiando import mesher, seismic, utils, gridder, vis, inversion area = (0, 10, 0, 10) vp, vs = 2, 1 model = [mesher.Square(area, props={'vp': vp, 'vs': vs})] src = (8, 7) stations = 10 srcs, recs = utils.connect_points([src], [(4, 6), (5, 5.9), (6, 6)]) ptime = seismic.ttime2d.straight(model, 'vp', srcs, recs) stime = seismic.ttime2d.straight(model, 'vs', srcs, recs) error_level = 0.05 ttr_true = stime - ptime ttr, error = utils.contaminate(ttr_true, error_level, percent=True, return_stddev=True) vis.mpl.figure() ax = vis.mpl.subplot(1, 1, 1) vis.mpl.axis('scaled') vis.mpl.suptitle("Choose the initial estimate for the gradient solvers") vis.mpl.points(recs, '^r') vis.mpl.points(srcs, '*y') initial = vis.mpl.pick_points(area, ax, marker='*', color='k') if len(initial) > 1: print "Don't be greedy! Pick only one initial estimate" sys.exit() initial = initial[0] ref = {'y': 7}
from fatiando import gravmag as gm from fatiando.mesher import Prism, PrismMesh, vremove from fatiando.vis import mpl, myv # Create a synthetic model props = {'density':1000} model = [Prism(400, 600, 300, 500, 200, 400, props), Prism(400, 600, 400, 600, 400, 600, props), Prism(400, 600, 500, 700, 600, 800, props)] # and generate synthetic data from it shape = (51, 51) bounds = [0, 1000, 0, 1000, 0, 1000] area = bounds[0:4] xp, yp, zp = gridder.regular(area, shape, z=-150) noise = 0.5 gxx = utils.contaminate(gm.prism.gxx(xp, yp, zp, model), noise) gxy = utils.contaminate(gm.prism.gxy(xp, yp, zp, model), noise) gxz = utils.contaminate(gm.prism.gxz(xp, yp, zp, model), noise) gyy = utils.contaminate(gm.prism.gyy(xp, yp, zp, model), noise) gyz = utils.contaminate(gm.prism.gyz(xp, yp, zp, model), noise) gzz = utils.contaminate(gm.prism.gzz(xp, yp, zp, model), noise) tensor = [gxx, gxy, gxz, gyy, gyz, gzz] titles = ['gxx', 'gxy', 'gxz', 'gyy', 'gyz', 'gzz'] # plot the data mpl.figure() for i in xrange(len(tensor)): mpl.subplot(2, 3, i + 1) mpl.title(titles[i]) mpl.axis('scaled') levels = mpl.contourf(yp, xp, tensor[i], shape, 30) mpl.colorbar()
import numpy from fatiando import potential, logger, gridder, utils from fatiando.mesher.volume import Prism3D log = logger.tofile('datagen-%s.log' % (sys.argv[1].split('.')[0])) log.info(logger.header()) modelfile = __import__(sys.argv[1].split('.')[0]) model = modelfile.model shape = (51, 51) bounds = [0, 1000, 0, 1000, 0, 1000] area = bounds[0:4] noise = 0.5 x, y, z = gridder.regular(area, shape, z=-150) tensor = (potential.prism.gxx(x, y, z, model), potential.prism.gxy(x, y, z, model), potential.prism.gxz(x, y, z, model), potential.prism.gyy(x, y, z, model), potential.prism.gyz(x, y, z, model), potential.prism.gzz(x, y, z, model)) tensor_noisy = [utils.contaminate(d, noise) for d in tensor] data = [x, y, z] data.extend(tensor_noisy) with open(modelfile.datafile, 'w') as f: f.write("# Noise corrupted tensor components:\n") f.write("# noise = %g Eotvos\n" % (noise)) f.write("# x y z gxx gxy gxz gyy gyz gzz\n") numpy.savetxt(f, numpy.array(data).T)
""" GravMag: Center of mass estimation using the first eigenvector of the gravity gradient tensor (elongated model) """ from fatiando.vis import mpl, myv from fatiando import mesher, gridder, utils, gravmag # Generate some synthetic data prisms = [mesher.Prism(-4000, 4000, -500, 500, 500, 1000, {'density': 1000})] shape = (100, 100) xp, yp, zp = gridder.regular((-5000, 5000, -5000, 5000), shape, z=-150) noise = 2 tensor = [ utils.contaminate(gravmag.prism.gxx(xp, yp, zp, prisms), noise), utils.contaminate(gravmag.prism.gxy(xp, yp, zp, prisms), noise), utils.contaminate(gravmag.prism.gxz(xp, yp, zp, prisms), noise), utils.contaminate(gravmag.prism.gyy(xp, yp, zp, prisms), noise), utils.contaminate(gravmag.prism.gyz(xp, yp, zp, prisms), noise), utils.contaminate(gravmag.prism.gzz(xp, yp, zp, prisms), noise) ] # Plot the data titles = ['gxx', 'gxy', 'gxz', 'gyy', 'gyz', 'gzz'] mpl.figure() for i, title in enumerate(titles): mpl.subplot(3, 2, i + 1) mpl.title(title) mpl.axis('scaled') levels = mpl.contourf(yp, xp, tensor[i], shape, 10) mpl.contour(yp, xp, tensor[i], shape, levels) mpl.m2km() mpl.show()
GravMag: Use an equivalent layer to reduce a magnetic total field anomaly to the pole """ from fatiando.gravmag import prism, sphere from fatiando.gravmag.eqlayer import EQLTotalField from fatiando.inversion.regularization import Damping, LCurve from fatiando import gridder, utils, mesher from fatiando.vis import mpl # Make synthetic data inc, dec = -60, 23 props = {'magnetization': 10} model = [mesher.Prism(-500, 500, -1000, 1000, 500, 4000, props)] shape = (25, 25) x, y, z = gridder.regular([-5000, 5000, -5000, 5000], shape, z=0) tf = utils.contaminate(prism.tf(x, y, z, model, inc, dec), 5, seed=0) # Setup the layer layer = mesher.PointGrid([-7000, 7000, -7000, 7000], 700, (50, 50)) # Estimate the magnetization intensity # Need to apply regularization so that won't try to fit the error as well misfit = EQLTotalField(x, y, z, tf, inc, dec, layer) regul = Damping(layer.size) # Use an L-curve analysis to find the best regularization parameter solver = LCurve(misfit, regul, [10 ** i for i in range(-30, -15)]).fit() residuals = solver.residuals() layer.addprop('magnetization', solver.estimate_) print "Residuals:" print "mean:", residuals.mean() print "stddev:", residuals.std() # Now I can forward model the layer at the south pole and check against the
from fatiando import mesher, gridder from fatiando.utils import ang2vec, vec2ang, contaminate from fatiando.gravmag import sphere from fatiando.vis import mpl from fatiando.gravmag.magdir import DipoleMagDir from fatiando.constants import CM # Make noise-corrupted synthetic data inc, dec = -10.0, -15.0 # inclination and declination of the Geomagnetic Field model = [mesher.Sphere(3000, 3000, 1000, 1000, {'magnetization': ang2vec(6.0, -20.0, -10.0)}), mesher.Sphere(7000, 7000, 1000, 1000, {'magnetization': ang2vec(10.0, 3.0, -67.0)})] area = (0, 10000, 0, 10000) x, y, z = gridder.scatter(area, 1000, z=-150, seed=0) tf = contaminate(sphere.tf(x, y, z, model, inc, dec), 5.0, seed=0) # Give the centers of the dipoles centers = [[3000, 3000, 1000], [7000, 7000, 1000]] # Estimate the magnetization vectors solver = DipoleMagDir(x, y, z, tf, inc, dec, centers).fit() # Print the estimated and true dipole monents, inclinations and declinations print 'Estimated magnetization (intensity, inclination, declination)' for e in solver.estimate_: print e # Plot the fit and the normalized histogram of the residuals mpl.figure(figsize=(14, 5)) mpl.subplot(1, 2, 1)
vis.mpl.figure() ax = vis.mpl.subplot(1, 1, 1) vis.mpl.axis('scaled') vis.mpl.suptitle("Choose the location of the source") vis.mpl.points(rec_points, '^r') src = vis.mpl.pick_points(area, ax, marker='*', color='y') if len(src) > 1: print "Don't be greedy! Pick only one point as the source" sys.exit() srcs, recs = utils.connect_points(src, rec_points) ptime = seismic.ttime2d.straight(model, 'vp', srcs, recs) stime = seismic.ttime2d.straight(model, 'vs', srcs, recs) ttresiduals, error = utils.contaminate(stime - ptime, 0.10, percent=True, return_stddev=True) solver = inversion.gradient.levmarq(initial=(0, 0), maxit=1000, tol=10**(-3)) result = seismic.epic2d.homogeneous(ttresiduals, recs, vp, vs, solver) estimate, residuals = result predicted = ttresiduals - residuals shape = (100, 100) xs, ys = gridder.regular(area, shape) goals = seismic.epic2d.mapgoal(xs, ys, ttresiduals, recs, vp, vs) vis.mpl.figure(figsize=(10, 4)) vis.mpl.subplot(1, 2, 1) vis.mpl.title('Epicenter + %d recording stations' % (len(recs))) vis.mpl.axis('scaled')
rec_points = mpl.pick_points(area, mpl.gca(), marker='^', color='r') # and the source mpl.figure() mpl.axis('scaled') mpl.suptitle("Choose the location of the source") mpl.points(rec_points, '^r') src = mpl.pick_points(area, mpl.gca(), marker='*', color='y') if len(src) > 1: print "Don't be greedy! Pick only one point as the source" sys.exit() # Calculate the P and S wave traveltimes srcs, recs = utils.connect_points(src, rec_points) ptime = ttime2d.straight(model, 'vp', srcs, recs) stime = ttime2d.straight(model, 'vs', srcs, recs) # Calculate the residual time (S - P) with added noise traveltime, error = utils.contaminate(stime - ptime, 0.05, percent=True, return_stddev=True) solver = Homogeneous(traveltime, recs, vp, vs) # Pick the initial estimate and fit mpl.figure() mpl.axis('scaled') mpl.suptitle("Choose the initial estimate") mpl.points(rec_points, '^r') mpl.points(src, '*y') initial = mpl.pick_points(area, mpl.gca(), marker='*', color='b') if len(initial) > 1: print "Don't be greedy! Pick only one point" sys.exit() estimate = solver.config('levmarq', initial=initial[0]).fit().estimate_ mpl.figure(figsize=(10,4)) mpl.subplot(1, 2, 1)
from fatiando.vis import mpl, myv # Create a synthetic model props = {'density': 1000} model = [ Prism(400, 600, 300, 500, 200, 400, props), Prism(400, 600, 400, 600, 400, 600, props), Prism(400, 600, 500, 700, 600, 800, props) ] # and generate synthetic data from it shape = (51, 51) bounds = [0, 1000, 0, 1000, 0, 1000] area = bounds[0:4] xp, yp, zp = gridder.regular(area, shape, z=-150) noise = 0.5 gxx = utils.contaminate(gm.prism.gxx(xp, yp, zp, model), noise) gxy = utils.contaminate(gm.prism.gxy(xp, yp, zp, model), noise) gxz = utils.contaminate(gm.prism.gxz(xp, yp, zp, model), noise) gyy = utils.contaminate(gm.prism.gyy(xp, yp, zp, model), noise) gyz = utils.contaminate(gm.prism.gyz(xp, yp, zp, model), noise) gzz = utils.contaminate(gm.prism.gzz(xp, yp, zp, model), noise) tensor = [gxx, gxy, gxz, gyy, gyz, gzz] titles = ['gxx', 'gxy', 'gxz', 'gyy', 'gyz', 'gzz'] # plot the data mpl.figure() for i in xrange(len(tensor)): mpl.subplot(2, 3, i + 1) mpl.title(titles[i]) mpl.axis('scaled') levels = mpl.contourf(yp, xp, tensor[i], shape, 30) mpl.colorbar()
area = (0, 500000, 0, 500000) shape = (30, 30) model = SquareMesh(area, shape) vel = 4000 * np.ones(shape) vel[5:25, 5:25] = 10000 model.addprop('vp', vel.ravel()) # Make some travel time data and add noise seed = 0 # Set the random seed so that points are the same every time src_loc = utils.random_points(area, 80, seed=seed) rec_loc = utils.circular_points(area, 30, random=True, seed=seed) srcs, recs = utils.connect_points(src_loc, rec_loc) tts = ttime2d.straight(model, 'vp', srcs, recs) tts, error = utils.contaminate(tts, 0.02, percent=True, return_stddev=True, seed=seed) # Make the mesh mesh = SquareMesh(area, shape) # and run the inversion misfit = srtomo.SRTomo(tts, srcs, recs, mesh) regularization = Smoothness2D(mesh.shape) # Will use the l-curve criterion to find the best regularization parameter tomo = LCurve(misfit, regularization, [10**i for i in np.arange(0, 10, 1)], jobs=8).fit() mesh.addprop('vp', tomo.estimate_) # Plot the L-curve annd print the regularization parameter estimated mpl.figure()
myv.wall_bottom(bounds) myv.wall_north(bounds) myv.show() # Generate the data grid shape = (25, 25) area = bounds[0:4] x, y = gridder.regular(area, shape) # Generate synthetic topography height = (300*utils.gaussian2d(x, y, 1000, 3000, x0=500, y0=1000, angle=-60) + 1000*utils.gaussian2d(x, y, 500, 2000, x0=3000, y0=3000)) # Calculate the data noise = 1 noisegz = 0.1 z = -height - 150 data = [x, y, z, height, utils.contaminate(gravmag.prism.gz(x, y, z, model), noisegz), utils.contaminate(gravmag.prism.gxx(x, y, z, model), noise), utils.contaminate(gravmag.prism.gxy(x, y, z, model), noise), utils.contaminate(gravmag.prism.gxz(x, y, z, model), noise), utils.contaminate(gravmag.prism.gyy(x, y, z, model), noise), utils.contaminate(gravmag.prism.gyz(x, y, z, model), noise), utils.contaminate(gravmag.prism.gzz(x, y, z, model), noise)] with open('data.txt', 'w') as f: f.write(logger.header(comment='#')) f.write("# Noise corrupted gz and tensor components:\n") f.write("# noise = %g Eotvos\n" % (noise)) f.write("# noise = %g mGal\n" % (noisegz)) f.write("# coordinates are in meters\n") f.write("# gz in mGal and tensor in Eotvos\n") f.write("# x y z height gz gxx gxy gxz gyy gyz gzz\n") numpy.savetxt(f, numpy.transpose(data))