import toolbox import pylab import numpy as np model, mparams = toolbox.initialise("model_filtered.su") gathers, params = toolbox.initialise("fk_nmo_gathers.su") params['model'] = model params['gate'] = (.4, 1.0) #seconds params['maxshift'] = 4 #samples toolbox.trim(gathers, None, **params) toolbox.apply_statics(gathers, None, **params) stack = toolbox.stack(gathers, None, **params) params['gamma'] = -1 toolbox.tar(stack, None, **params) stack.tofile("trim_stack2.su")
import toolbox import numpy as np import pylab #-------------------------------------------------- # useful functions #------------------------------------------------- None if __name__ == "__main__": #initialise dataset print "initialising dataset" workspace, params = toolbox.initialise('al_dynamite.su') #apply tar params['gamma'] = 5 toolbox.tar(workspace, None, **params) #copy vels from previous exercise vels = {} vels[225] = (0.06, 1537.38) , (0.28, 2876.21) , (0.87, 4608.10) vels[270] = (0.05, 1525.09) , (0.18, 2483.16) , (0.36, 3171.00) , (0.66, 4079.93) , (0.98, 4816.90) vels[315] = (0.04, 1365.42) , (0.14, 2728.82) , (0.22, 3134.15) , (0.57, 4116.78) , (0.74, 4571.25) , (0.97, 5013.43) vels[360] = (0.04, 1697.05) , (0.10, 2520.01) , (0.21, 2937.62) , (0.43, 3244.70) , (0.64, 3981.67) , (0.98, 4239.61) vels[405] = (0.06, 1439.11) , (0.27, 2753.38) , (0.49, 3957.10) , (0.97, 5381.92) vels[450] = (0.06, 1340.85) , (0.41, 2741.10) , (0.52, 3625.47) , (0.02, 1144.32) , (0.29, 3060.45) , (0.54, 3711.45) , (0.97, 4313.31) vels[495] = (0.04, 1611.07) , (0.11, 3072.74) , (0.23, 3318.39) , (0.35, 3772.86) , (0.48, 3981.67) , (0.94, 5099.41)
import toolbox import numpy as np import pylab #extract shot record data, params = toolbox.initialise("prepro.su") mask = data['fldr'] == 221 shot = data[mask].copy() #agc toolbox.agc(shot, None, **params) params['primary'] = 'fldr' params['secondary'] = 'tracf' params['wiggle'] = True toolbox.display(shot, **params) #fk plot params['dx'] = 33.5 #m #~ toolbox.fk_view(shot, **params) #~ #fk filter design params['fkVelocity'] = 2000 params['fkSmooth'] = 20 params['fkFilter'] = toolbox.fk_design(shot, **params) shot = toolbox.fk_filter(shot, None, **params) toolbox.display(shot, **params) ##############end of testing #~ data, nparams = toolbox.initialise("prepro.su")
x = ((panel["trace"] * scalar) + trace_centers).ravel() xlines = np.split(x, 284) ylines = np.split(y, 284) lines = [zip(xlines[a], ylines[a]) for a in range(len(xlines))] fig, ax = pylab.subplots() ax.fill_betweenx(y, offsets, x, where=(x > offsets), color="k") col1 = collections.LineCollection(lines) col1.set_color("k") ax.add_collection(col1, autolim=True) pylab.xlim([0, 284]) pylab.ylim([0, 1500]) ax.set_ylim(ax.get_ylim()[::-1]) pylab.tight_layout() pylab.show() file = "/home/sfletcher/Downloads/2d_land_data/2D_Land_data_2ms/Line_001.su" data, params = toolbox.initialise(file) dmap = np.memmap(file, dtype=toolbox.typeSU(1501), mode="r") eps = np.unique(dmap["ep"]) for ep in eps[:1]: params["window"] = 500 panel = dmap[dmap["ep"] == ep].copy() panel = toolbox.agc(panel, None, **params) func(panel) # t = Timer("""func(panel)""", setup="from __main__ import func; from __main__ import panel") # print t.timeit(100) # 777.01802206
#processing a real dataset #step 1 - import dataset and check the gathers import toolbox import numpy as np import pylab if __name__ == "__main__": #import dataset print "initialising dataset" workspace, params = toolbox.initialise('al_dynamite.su') #set gather order to shot gather params['primary'] = 'sx' params['secondary'] = 'gx' #display toolbox.display(workspace, None, **params) pylab.show()
displays multiple stacks ''' import toolbox import pylab import numpy as np filelist = [ "1st_vels_stack.su", #"1st_vels_stack_elev.su", "fk_stack.su", #"model.su", "model_filtered.su", #"trim_stack.su", "trim_stack2.su", ] dataset, params = toolbox.initialise(filelist[0]) dataset['fldr'] = 0 for index, file in enumerate(filelist[1:]): data, junk = toolbox.initialise(file) data['fldr'] = index + 1 dataset = np.column_stack([dataset, data]) params['window'] = 1000 toolbox.agc(dataset, None, **params) params['primary'] = 'fldr' params['secondary'] = 'cdp' params['clip'] = 0.02 toolbox.display(dataset, **params) pylab.show()
import toolbox import numpy as np import pylab data, params = toolbox.initialise('prepro.su') cdps = np.unique(data['cdp']) #recreate original velocity field #~ vels = {} #~ vels[753]= (2456.0, 0.153), (2772.1, 0.413), (3003.2, 0.612), (3076.1, 0.704), (3270.7, 1.056), (3367.9, 1.668), (3538.2, 2.204), (3671.9, 3.566), (3915.1, 5.908), #~ vels[3056]=(2456.0, 0.153), (2772.1, 0.413), (3003.2, 0.612), (3076.1, 0.704), (3270.7, 1.056), (3367.9, 1.668), (3538.2, 2.204), (3671.9, 3.566), (3915.1, 5.908), #~ params['cdp'] = cdps #~ params['vels'] = toolbox.build_vels(vels, **params) #~ np.array(params['vels']).tofile('vels_initial.bin') params['vels'] = np.fromfile('vels_initial.bin').reshape(-1, params['ns']) #~ pylab.imshow(params['vels'].T, aspect='auto') #~ pylab.colorbar() #~ pylab.show() #agc and stack toolbox.agc(data, None, **params) params['smute'] = 200 toolbox.nmo(data, None, **params) stack = toolbox.stack(data, None, **params) params['gamma'] = -1 toolbox.tar(stack, None, **params) stack.tofile('field_stack.su') #display
import toolbox import numpy as np import pylab #initialise dataset #~ data, params = toolbox.initialise("geometries.su") #trim data #~ params['ns'] = 1500 #~ data = toolbox.slice(data, None, **params) #~ data.tofile("geom_short.su") #initialise dataset data, params = toolbox.initialise("geom_short.su") #agc #~ toolbox.agc(data, None, **params) params['gamma'] = 1.5 toolbox.tar(data, None, **params) kills = [270, 300, 374, 614] #fldr mask = toolbox.build_mask(data['fldr'], kills) data = data[mask] data.tofile("prepro.su") #display #~ params['primary'] = 'fldr' #~ params['secondary'] = 'tracf'
import toolbox import numpy as np import pylab #~ for mute in range(110,200, 10): #initialise data, params = toolbox.initialise('prepro.su') #load vels params['vels'] = np.fromfile('vels_full.bin').reshape(-1, params['ns']) params['smute'] = 150 #mute #normal moveout correction toolbox.nmo(data, None, **params) #AGC toolbox.agc(data, None, **params) #stack stack = toolbox.stack(data, None, **params) params['gamma'] = -1 toolbox.tar(stack, None, **params) data['tstat'] /= 2 toolbox.apply_statics(stack, None, **params) #~ stack.tofile("smute_%d.su" %mute) stack.tofile("1st_vels_stack_elev.su") # i think I like 150
import toolbox import numpy as np import pylab stack, params = toolbox.initialise("fk_stack.su") stack['fldr'] = 1 params['dx'] = 33.5/2.0 #m params['fkVelocity'] = 6000 params['fkSmooth'] = 20 params['fkFilter'] = toolbox.fk_design(stack, **params) stack = toolbox.fk_filter(stack, None, **params) #bandpass params['lowcut'] = 10.0 params['highcut'] = 100.0 toolbox.bandpass(stack, None, **params) stack.tofile("model_filtered.su") #display #~ params['primary'] = None #~ params['secondary'] = 'cdp' #~ toolbox.display(stack, **params) #~ pylab.show()
#spectral analysis, bandpass filters #test a few filters to find the best import toolbox import numpy as np import pylab #-------------------------------------------------- # useful functions #------------------------------------------------- None if __name__ == "__main__": #initialise dataset print "initialising dataset" workspace, params = toolbox.initialise('stack100.su') params['primary'] = None #basic spectral analysis #~ toolbox.fx(workspace, None, **params) params['highcut'] = 100 params['lowcut'] = 30 toolbox.bandpass(workspace, None, **params) toolbox.display(workspace, None, **params) pylab.show()
#----------------------------------------------------------------------- # useful functions #----------------------------------------------------------------------- None #----------------------------------------------------------------------- # main functions #----------------------------------------------------------------------- if __name__ == "__main__": #intialise workspace and parameter dictionary print 'initialising' workspace, params = toolbox.initialise('../../../../foybrook.su') cdp400 = workspace[workspace['cdp'] ==480] toolbox.agc(cdp400, None, **params) params['velocities'] = np.arange(2000,7000,50) toolbox.semb(cdp400, **params) #set our TAR #~ print "applying tar" #~ params['gamma'] = 3 #~ tar(workspace, None, **params) #apply LMO #~ print "applying lmo" #~ params['lmo'] =1000.0
import toolbox import numpy as np import pylab data, params = toolbox.initialise('field_stack.su') toolbox.fx(data, None, **params) pylab.show()
#shift from amplitudes to plotting coordinates x_shift, y = y[order].__divmod__(ns) ax.plot(x[order] * scalar + x_shift + 1, y, 'k') x[x < 0] = np.nan x = x[order] * scalar + x_shift + 1 ax.fill(x, y, 'k', aa=True) ax.set_xlim([0, nt]) ax.set_ylim([ns, 0]) pylab.tight_layout() pylab.show() if __name__ == "__main__": file = "/home/stewart/su/2d_land_data/2D_Land_data_2ms/su/Line_001.su" #file = "/home/sfletcher/Downloads/2d_land_data/2D_Land_data_2ms/Line_001.su" data, params = toolbox.initialise(file) eps = np.unique(data['ep']) for ep in eps[:1]: frame = data[data['ep'] == ep] params['window'] = 500 toolbox.agc(frame, None, **params) wiggle(frame, scale=2) #~ trace_centers = np.linspace(1,284, panel.size).reshape(-1,1) #~ scalar = 284/(panel.size*0.1) #~ panel['trace'][:,-1] = np.nan #~ x = panel['trace'].ravel() #~ y = np.arange(x.size) #~ dx = np.signbit(x)
displays multiple stacks ''' import toolbox import pylab import numpy as np filelist = ["1st_vels_stack.su", #"1st_vels_stack_elev.su", "fk_stack.su", #"model.su", "model_filtered.su", #"trim_stack.su", "trim_stack2.su", ] dataset, params = toolbox.initialise(filelist[0]) dataset['fldr'] = 0 for index, file in enumerate(filelist[1:]): data, junk = toolbox.initialise(file) data['fldr'] = index + 1 dataset = np.column_stack([dataset, data]) params['window'] = 1000 toolbox.agc(dataset, None, **params) params['primary'] = 'fldr' params['secondary'] = 'cdp' params['clip'] = 0.02 toolbox.display(dataset, **params) pylab.show()
#this time we want to do a few more locations import toolbox import numpy as np import pylab #-------------------------------------------------- # useful functions #------------------------------------------------- None if __name__ == "__main__": #initialise dataset print "initialising dataset" workspace, params = toolbox.initialise('foybrook.su') #apply TAR print "applying true amplitude recovery" params['gamma'] = 3 toolbox.tar(workspace, None, **params) #lets see how many cdps there are print np.unique(workspace['cdp'])[25::45].tolist() params['smoother'] = 5 #copy your list of cdps here... it will make it easier later cdps = [219, 264, 309, 354, 399, 444, 489, 534, 579] #~ params['velocities'] = np.arange(2000,6000,50)