def display(input, agc=1):
    def update(val):
        vmax = smax.val
        vmin = smin.val
        im.set_clim(vmax=vmax, vmin=vmin)
        fig.canvas.draw_idle()

    fig = pylab.figure()
    '''displays a gather using imshow'''
    dataset = toolbox.read(input)
    vmax = np.amax(dataset['trace'])
    vmin = np.amin(dataset['trace'])
    if agc:
        dataset = toolbox.agc(dataset)
    im = pylab.imshow(dataset['trace'].T,
                      aspect='auto',
                      cmap='spectral',
                      vmax=vmax,
                      vmin=vmin)
    pylab.colorbar()
    axcolor = 'lightgoldenrodyellow'
    axmax = pylab.axes(
        [0.08, 0.06, 0.65, 0.01], axisbg=axcolor
    )  #rect = [left, bottom, width, height] in normalized (0, 1) units
    smax = Slider(axmax, 'vmax', vmin, vmax, valinit=vmax)
    smax.on_changed(update)
    axmin = pylab.axes(
        [0.08, 0.03, 0.65, 0.01], axisbg=axcolor
    )  #rect = [left, bottom, width, height] in normalized (0, 1) units
    smin = Slider(axmin, 'vmin', vmin, vmax, valinit=vmin)
    smin.on_changed(update)
    smin.on_changed(update)

    pylab.show()
Пример #2
0
def initialise(file, memmap=False, scan=False):
    #intialise empty parameter dictionary
    #kwargs stands for keyword arguments
    kwargs = {}
    #load file
    if memmap == True:
        ns = np.fromfile(file, dtype=toolbox.su_header_dtype, count=1)['ns']
        sutype = toolbox.typeSU(ns)
        dataset = np.memmap(file, dtype=sutype)
    else:
        dataset = toolbox.read(file)

    #allocate stuff
    #~
    ns = kwargs['ns'] = dataset['ns'][0]
    dt = kwargs['dt'] = dataset['dt'][0] / 1e6

    #also add the time vector - it's useful later
    kwargs['times'] = np.arange(0, dt * ns, dt)

    dataset['trace'] /= np.amax(dataset['trace'])
    dataset['tracr'] = np.arange(dataset.size)

    kwargs['primary'] = 'cdp'
    kwargs['secondary'] = 'offset'
    kwargs['cdp'] = np.sort(np.unique(dataset['cdp']))
    kwargs['step'] = 1

    if scan:
        toolbox.scan(dataset)
    return dataset, kwargs
Пример #3
0
def initialise(file, memmap=False, scan=False):
        #intialise empty parameter dictionary
        #kwargs stands for keyword arguments
        kwargs = {}
        #load file
        if memmap == True:
                ns = np.fromfile(file, dtype=toolbox.su_header_dtype, count=1)['ns']
                sutype = toolbox.typeSU(ns)
                dataset = np.memmap(file, dtype=sutype)
        else:
                dataset = toolbox.read(file)
        
        
        
        #allocate stuff
        #~ 
        ns = kwargs['ns'] = dataset['ns'][0]
        dt = kwargs['dt'] = dataset['dt'][0]/1e6
        
                       
        #also add the time vector - it's useful later
        kwargs['times'] = np.arange(0, dt*ns, dt)
        
        dataset['trace'] /= np.amax(dataset['trace'])
        dataset['tracr'] = np.arange(dataset.size)
        
        kwargs['primary'] = 'cdp'
        kwargs['secondary'] = 'offset'
        kwargs['cdp'] = np.sort(np.unique(dataset['cdp']))
        kwargs['step'] = 1
        
        if scan:
                toolbox.scan(dataset)
        return dataset, kwargs
def initialise(file):
        #intialise empty parameter dictionary
        #kwargs stands for keyword arguments
        kwargs = {}
        #load file
        dataset = toolbox.read(file)
        
        
        
        #allocate stuff
        #~ 
        ns = kwargs['ns'] = dataset['ns'][0]
        dt = kwargs['dt'] = dataset['dt'][0]/1e6
        
                       
        #also add the time vector - it's useful later
        kwargs['times'] = np.arange(0, dt*ns, dt)
        
        dataset['trace'] /= np.amax(dataset['trace'])
        dataset['tracr'] = np.arange(dataset.size)
        
        kwargs['primary'] = 'cdp'
        kwargs['secondary'] = 'offset'
        kwargs['cdp'] = np.sort(np.unique(dataset['cdp']))
        kwargs['step'] = 1
        
        toolbox.scan(dataset)
        return dataset, kwargs
Пример #5
0
def initialise(file):
    #intialise empty parameter dictionary
    #kwargs stands for keyword arguments
    kwargs = {}
    #load file
    dataset = toolbox.read(file)

    #allocate stuff
    #~
    ns = kwargs['ns'] = dataset['ns'][0]
    dt = kwargs['dt'] = dataset['dt'][0] / 1e6

    #also add the time vector - it's useful later
    kwargs['times'] = np.arange(0, dt * ns, dt)

    dataset['trace'] /= np.amax(dataset['trace'])
    dataset['tracr'] = np.arange(dataset.size)

    kwargs['primary'] = 'cdp'
    kwargs['secondary'] = 'offset'
    kwargs['cdp'] = np.sort(np.unique(dataset['cdp']))
    kwargs['step'] = 1

    toolbox.scan(dataset)
    return dataset, kwargs
def display(input, agc=1):
	
	def update(val):
		vmax = smax.val
		vmin = smin.val
		im.set_clim(vmax=vmax, vmin=vmin)
		fig.canvas.draw_idle()
	
	fig = pylab.figure()
	'''displays a gather using imshow'''
	dataset = toolbox.read(input)
	vmax = np.amax(dataset['trace'])
	vmin = np.amin(dataset['trace'])
	if agc:
		dataset = toolbox.agc(dataset)
	im = pylab.imshow(dataset['trace'].T, aspect='auto', cmap='spectral', vmax =vmax, vmin=vmin)
	pylab.colorbar()
	axcolor = 'lightgoldenrodyellow'
	axmax = pylab.axes([0.08, 0.06, 0.65, 0.01], axisbg=axcolor) #rect = [left, bottom, width, height] in normalized (0, 1) units
	smax = Slider(axmax, 'vmax', vmin, vmax, valinit=vmax)
	smax.on_changed(update)
	axmin = pylab.axes([0.08, 0.03, 0.65, 0.01], axisbg=axcolor) #rect = [left, bottom, width, height] in normalized (0, 1) units
	smin = Slider(axmin, 'vmin', vmin, vmax, valinit=vmin)
	smin.on_changed(update)	
	smin.on_changed(update)
	
	pylab.show()
def slice(input, output, key, values):
	dataset = toolbox.read(input)
	filter = ~np.isfinite(dataset['tracl'])
	for value in values:
		bool = dataset[key] == value
		filter = (bool) | (filter)
	toolbox.write(dataset[filter], output)
def slice(input, output, key, values):
    dataset = toolbox.read(input)
    filter = ~np.isfinite(dataset['tracl'])
    for value in values:
        bool = dataset[key] == value
        filter = (bool) | (filter)
    toolbox.write(dataset[filter], output)
def stack(input, output):
	dataset = toolbox.read(input)
	cdps = np.unique(dataset['cdp'])
	sutype = np.result_type(dataset)
	holder = np.zeros(cdps.size, dtype=sutype)
	for index, cdp in enumerate(cdps):
		gather = dataset[dataset['cdp'] == cdp]
		trace = _stack_gather(gather)
		holder[index] = trace
	toolbox.write(holder, output)
def stack(input, output):
    dataset = toolbox.read(input)
    cdps = np.unique(dataset['cdp'])
    sutype = np.result_type(dataset)
    holder = np.zeros(cdps.size, dtype=sutype)
    for index, cdp in enumerate(cdps):
        gather = dataset[dataset['cdp'] == cdp]
        trace = _stack_gather(gather)
        holder[index] = trace
    toolbox.write(holder, output)
def semb(input, vels):
	dataset = toolbox.read(input)
	nvels = vels.size
	ns = dataset['ns'][0]
	result = np.zeros((nvels,ns),'f')
	for v in range(nvels):
		gather = nmo(input, None, [vels[v]], [0],  20)
		gather = toolbox.agc(gather)
		result[v,:] += np.abs(_stack_gather(gather)['trace'])
		
		
	pylab.imshow(result.T, aspect='auto', extent=(800.,4400.,1.,0.), cmap='spectral', vmin=0, vmax=0.6)
	pylab.colorbar()
	pylab.show()
def nmo(input, output, vels, times, smute):
	dataset = toolbox.read(input)
	nt = dataset.size
	holder = dataset.copy()
	holder['trace'].fill(0)
	
	vector = build_vel_trace(times, vels)

	for i in range(nt):
		holder[i]['trace'] += _nmo_trace(dataset[i], vector, smute)['trace']
	if output:
		toolbox.write(holder, output)
	else:
		return holder
def nmo(input, output, vels, times, smute):
    dataset = toolbox.read(input)
    nt = dataset.size
    holder = dataset.copy()
    holder['trace'].fill(0)

    vector = build_vel_trace(times, vels)

    for i in range(nt):
        holder[i]['trace'] += _nmo_trace(dataset[i], vector, smute)['trace']
    if output:
        toolbox.write(holder, output)
    else:
        return holder
def semb(input, vels):
    dataset = toolbox.read(input)
    nvels = vels.size
    ns = dataset['ns'][0]
    result = np.zeros((nvels, ns), 'f')
    for v in range(nvels):
        gather = nmo(input, None, [vels[v]], [0], 20)
        gather = toolbox.agc(gather)
        result[v, :] += np.abs(_stack_gather(gather)['trace'])

    pylab.imshow(result.T,
                 aspect='auto',
                 extent=(800., 4400., 1., 0.),
                 cmap='spectral',
                 vmin=0,
                 vmax=0.6)
    pylab.colorbar()
    pylab.show()
Пример #15
0
def initialise(file):
        #intialise empty parameter dictionary
        #kwargs stands for keyword arguments
        kwargs = {}
        #load file
        dataset = toolbox.read(file)
        
        #allocate stuff
        ns = kwargs['ns'] = dataset['ns'][0]
        dt = kwargs['dt'] = dataset['dt'][0]/1e6
                       
        #also add the time vector - it's useful later
        kwargs['times'] = np.linspace(dt, ns*dt, ns*dt*1000)
        
        dataset['trace'] /= np.amax(dataset['trace'])
        kwargs['primary'] = 'sx'
        kwargs['secondary'] = 'gx'
        kwargs['step'] = 1
        
        toolbox.scan(dataset)
        return dataset, kwargs
def agc(input, output, window=100):
	dataset = toolbox.read(input)
	dataset = toolbox.agc(dataset, window=window)
	toolbox.write(dataset, output)
def scan_headers(input):
    dataset = toolbox.read(input)
    print dataset.shape
    for key, t in toolbox.su_header_dtype.descr:
        print key, np.amin(dataset[key]), np.amax(dataset[key])
def agc(input, output, window=100):
    dataset = toolbox.read(input)
    dataset = toolbox.agc(dataset, window=window)
    toolbox.write(dataset, output)
Пример #19
0
def window_rms(arr=None, window_size=100):
    a2 = np.power(arr, 2)
    window = np.ones(window_size) / float(window_size)
    return np.sqrt(np.convolve(a2, window, 'valid'))


@_2d
def scale(dataset):
    dataset['trace'] /= np.amax(np.abs(dataset['trace']))


if __name__ == "__main__":

    file = "/home/sfletcher/Downloads/2d_land_data/2D_Land_data_2ms/Line_001.su"

    data = toolbox.read(file)

    data = data[data['tracf'] > 0]

    toolbox.scan(data)

    ffids = np.unique(data['fldr'])

    parms = {}

    parms['dt'] = data['dt'][0]

    for shot in ffids:
        panel = data[data['fldr'] == shot]
        #print shot
def scan_headers(input):
	dataset = toolbox.read(input)
	print dataset.shape
	for key, t in toolbox.su_header_dtype.descr:
		print key, np.amin(dataset[key]), np.amax(dataset[key])