Exemplo n.º 1
0
 def test_weekday(self):
     """
     Tests weekday method.
     """
     dt = UTCDateTime(2008, 10, 1, 12, 30, 35, 45020)
     self.assertEquals(dt.weekday, 2)
     self.assertEquals(dt._getWeekday(), 2)
Exemplo n.º 2
0
def datetimefmt(origin):    # 1999-01-31, 00:12:24.123  
    o = UTCDateTime(origin)
    o = UTCDateTime(round(o,3))
    date = o.strftime("%Y-%m-%d")
    time = o.strftime("%H:%M:%S.%f")[0:12]
    
    return [date, time]
Exemplo n.º 3
0
 def test_toordinal(self):
     """
     Short test if toordinal() is working.
     Matplotlib's date2num() function depends on this which is used a lot in
     plotting.
     """
     dt = UTCDateTime("2012-03-04T11:05:09.123456Z")
     self.assertEquals(dt.toordinal(), 734566)
Exemplo n.º 4
0
def DateTime2String(dt, compact=False):
    """
    Generates a valid SEED time string from a UTCDateTime object.
    """
    if isinstance(dt, UTCDateTime):
        return dt.formatSEED(compact)
    elif isinstance(dt, basestring):
        dt = dt.strip()
    if not dt:
        return ""
    try:
        dt = UTCDateTime(dt)
        return dt.formatSEED(compact)
    except:
        raise Exception("Invalid datetime %s: %s" % (type(dt), str(dt)))
Exemplo n.º 5
0
def printOutput(output):
	output = output[1].split('\n')
	fob = open(logFilepath, 'a')
	for line in output:
		if 'cal found' in line:
			fob.write(line + '\n')
	fob.write(str(UTCDateTime.now()))
	fob.close()
Exemplo n.º 6
0
 def test_toPythonDateTimeObjects(self):
     """
     Tests getDate, getTime, getTimestamp and getDateTime methods.
     """
     dt = UTCDateTime(1970, 1, 1, 12, 23, 34, 456789)
     # as function
     self.assertEquals(dt._getDate(), datetime.date(1970, 1, 1))
     self.assertEquals(dt._getTime(), datetime.time(12, 23, 34, 456789))
     self.assertEquals(dt._getDateTime(),
                       datetime.datetime(1970, 1, 1, 12, 23, 34, 456789))
     self.assertAlmostEquals(dt._getTimeStamp(), 44614.456789)
     # as property
     self.assertEquals(dt.date, datetime.date(1970, 1, 1))
     self.assertEquals(dt.time, datetime.time(12, 23, 34, 456789))
     self.assertEquals(dt.datetime,
                       datetime.datetime(1970, 1, 1, 12, 23, 34, 456789))
     self.assertAlmostEquals(dt.timestamp, 44614.456789)
Exemplo n.º 7
0
def main():
    stations = 'PB01 PB02 PB03 PB04 PB05'

    component = 'Z'
    t1 = UTC('2009-06-01')
    t2 = UTC('2009-07-01')

    data = IPOC('test', use_local_LVC=False)
    data.setXLogger('_test')
    period = 24 * 3600
    ax = None
    plt.ion()
    for station in stations.split():
        pxxs = []
        freqs_old = None
        i = 0
        for t in timegen(t1, t2, period):
            st = data.getRawStreamFromClient(t, t + period, station, component)
            st.merge(method=1, interpolation_samples=10, fill_value='interpolate')
            print st
            pxx, freqs = st.plotPSD(just_calculate=True)
            assert np.all(freqs == freqs_old) or not freqs_old
            freqs_old = freqs
            if max(pxx[4:]) > 1e7:
                print 'discard'
                i += 1
                continue
            pxxs.append(pxx)
        pxx = sum(pxxs) / len(pxxs)
        del pxxs
        tr = Trace(data=pxx,
                   header=dict(is_fft=True, sampling_rate=2 * max(freqs),
                               freq_min=min(freqs), freq_max=max(freqs)))
        ax = tr.plotPSD(ax=ax, label='%s-%d' % (st[0].stats.station, i), figtitle=None)
        plt.draw()
        # embed()
    ax.legend()
    fig = ax.get_figure()
    fig.suptitle('%s  %s  %s to %s' % (stations, component, t1.strftime('%Y-%m-%d'),
                                       t2.strftime('%Y-%m-%d')))
    plt.ioff()
    plt.show()
Exemplo n.º 8
0
def checkNetsta(netsta):
	global network, station
	network, station = netsta.split('/')[-1].split('_')
	#sets up the commands for running addNewCals for given years and stations
	for year in xrange(int(byear), int(eyear) + 1):
		#if the year is the beginning year and not the ending year
		if year == int(byear) and year != int(eyear):
			print str(UTCDateTime.now()).split('.')[0].replace('T',' '), 'Checking', network, station.ljust(4), year, bjday, '- 366', 'for calibrations'
			#starting on the beginning day and ending at the end of the current year
			for day in xrange(int(bjday), 366 + 1):
				output = commands.getstatusoutput('python ./addNewCals.py -n ' + network + ' -s ' + station + ' -d ' + str(year) + ',' + str(day).zfill(3))[1]
				#if there is a calibration, proceed
				if output != '':
					print output
		#if the year is in between the beginning year and ending year
		elif int(byear) < year < int(eyear):
			print str(UTCDateTime.now()).split('.')[0].replace('T',' '), 'Checking', network, station.ljust(4), year, '001 - 366', 'for calibrations'
			#proceed from day 001 to day 366
			for day in xrange(1, 366 + 1):
				ouput = commands.getstatusoutput('python ./addNewCals.py -n ' + network + ' -s ' + station + ' -d ' + str(year) + ',' + str(day).zfill(3))[1]
				#if there is a calibration, proceed
				if output != '':
					print output
		#if the year is the ending year and not the beginning year
		elif year == int(eyear) and year != int(byear):
			print str(UTCDateTime.now()).split('.')[0].replace('T',' '), 'Checking', network, station.ljust(4), year, '001 -', ejday, 'for calibrations'
			#proceed form day 001 to the ending day
			for day in xrange(1, int(ejday) + 1):
				output = commands.getstatusoutput('python ./addNewCals.py -n ' + network + ' -s ' + station + ' -d ' + str(year) + ',' + str(day).zfill(3))[1]
				#if there is a calibration, proceed
				if output != '':
					print output
		#if the year is the beginning year and ending year
		elif year == int(byear) == int(eyear):
			print str(UTCDateTime.now()).split('.')[0].replace('T',' '), 'Checking', network, station.ljust(4), year, bjday, '-', ejday, 'for calibrations'
			#proceed from the beginning day to the ending day
			for day in xrange(int(bjday), int(ejday) + 1):
				output = commands.getstatusoutput('python ./addNewCals.py -n ' + network + ' -s ' + station + ' -d ' + str(year) + ',' + str(day).zfill(3))[1]
				#if there is a calibration, proceed
				if output != '':
					print output
Exemplo n.º 9
0
def get_calibrations(file_name):
	calibrations = []

	#Read the first file and get the record length from blockette 1000
	fh = open(file_name, 'rb')
	record = fh.read(256)
	index = struct.unpack('>H', record[46:48])[0]
	file_stats = os.stat(file_name)
	record_length = 2 ** struct.unpack('>B', record[index+6:index+7])[0]
	
	#Get the total number of records
	total_records = file_stats[stat.ST_SIZE] / record_length
	
	#Now loop through the records and look for calibration blockettes
	for rec_idx in xrange(0,total_records):
		fh.seek(rec_idx * record_length,0)
		record = fh.read(record_length)
		next_blockette = struct.unpack('>H', record[46:48])[0]
		while next_blockette != 0:
			index = next_blockette
			blockette_type, next_blockette = struct.unpack('>HH', record[index:index+4])	
			if blockette_type in (300, 310, 320, 390):
				if debug:
					print 'We have a calibration blockette'
				year,jday,hour,minute,sec,_,tmsec,_,cal_flags,duration = tuple(struct.unpack('>HHBBBBHBBL', record[index+4:index+20]))
				stime = UTCDateTime(year=year,julday=jday,hour=hour,minute=minute,second=sec)
				if debug:
					print(stime.ctime())
				if blockette_type == 300:
					step_count,_,_,ntrvl_duration,amplitude,cal_input = struct.unpack('>BBLLf3s', record[index+14:index+31])
					calibrations.append({'type':'step','amplitude': amplitude,'number':step_count,'start_time':stime,'duration':duration/10000,'inteveral_duration':ntrvl_duration})
				if blockette_type == 310:
					signal_period,amplitude,cal_input = struct.unpack('>ff3s', record[index+20:index+31])
					calibrations.append({'type':'sine','amplitude': amplitude, 'period': signal_period,'start_time':stime,'duration':duration/10000})
				if blockette_type in (320, 390):
					amplitude,cal_input = struct.unpack('>f3s', record[index+20:index+27])
					calibrations.append({'type':'random','amplitude': amplitude,'start_time':stime,'duration':duration/10000})
	fh.close()
	return calibrations
Exemplo n.º 10
0
    def plot_graph(self):
        now = UTCDateTime()
        if self.drum_plot:
            self.stop_time = UTCDateTime(
                now.year, now.month, now.day, now.hour, 0, 0) + 3600
            self.start_time = self.stop_time - self.args.backtrace_time
        else:
            self.start_time = now - self.backtrace
            self.stop_time = now

        with self.lock:
            # leave some data left of our start for possible processing
            self.stream.trim(
                starttime=self.start_time - 120, nearest_sample=False)
            stream = self.stream.copy()

        try:
            logging.info(str(stream.split()))
            if not stream:
                raise Exception("Empty stream for plotting")

            if self.drum_plot or OBSPY_VERSION < [0, 10]:
                stream.merge()
                stream.trim(starttime=self.start_time, endtime=self.stop_time,
                            pad=True, nearest_sample=False)
            else:
                stream.merge(-1)
                stream.trim(starttime=self.start_time, endtime=self.stop_time)
            if self.drum_plot:
                self.plot_drum(stream)
            else:
                self.plot_lines(stream)
        except Exception as e:
            logging.error(e)
            pass
        self.after(int(self.args.update_time * 1000), self.plot_graph)
Exemplo n.º 11
0
fault_name = 'nepal.fault'  #Fault geometry
station_file = 'nepal.sta'  #Station distribution
GF_list = 'gps.gflist'  #What GFs are to be computed for each station
G_from_file = False
G_name = 'nepal'
NFFT = 512
dt = 0.2  #Time parameters
dk = 0.2
pmin = 0
pmax = 1
kmax = 10  #fk integration parameters
custom_stf = None
################################################################################

############                 Synthetics parameters               ###############
time_epi = UTCDateTime('2015-04-25T06:11:26')
epicenter = array([84.708, 28.147, 15])
resample = None  #Resample synthetics to this rate (in Hz)
integrate = 0  #=0 produces velocities, =1 makes displacements
beta = 0  #Rake offset, usually a good idea to keep at zero
num_windows = 1
rupture_speed = 3.0  #Only necessary if onset times are not identified in rupt file
stf_type = 'dreger'
################################################################################

#Initalize project folders
if init == 1:
    runslip.init(home, project_name)

# Run green functions
if make_green == 1:
Exemplo n.º 12
0
 def test_getEventsList(self):
     """
     Testing event request method.
     """
     client = Client()
     # 1
     results = client.getEvents(format="list",
                                min_depth=-700,
                                max_datetime="2005-01-01")
     expected = [{
         'author': u'EMSC',
         'event_id': u'20040312_0000026',
         'origin_id': 1347097,
         'longitude': 57.143,
         'datetime': UTCDateTime('2004-03-12T22:48:05Z'),
         'depth': -700.0,
         'magnitude': 4.4,
         'magnitude_type': u'mb',
         'latitude': 26.303,
         'flynn_region': u'SOUTHERN IRAN'
     }]
     self.assertEquals(results, expected)
     # 2
     results = client.getEvents(format="list",
                                min_latitude=-95,
                                max_latitude=-1,
                                min_longitude=20,
                                max_longitude=90,
                                max_datetime="2005-01-01")
     expected = [{
         'author': u'NEIR',
         'event_id': u'20041016_0000009',
         'origin_id': 120690,
         'longitude': 33.682,
         'datetime': UTCDateTime('2004-10-16T01:29:14Z'),
         'depth': -10.0,
         'magnitude': 5.0,
         'magnitude_type': u'm ',
         'latitude': -46.394,
         'flynn_region': u'PRINCE EDWARD ISLANDS REGION'
     }]
     self.assertEquals(results, expected)
     # 3
     results = client.getEvents(format="list",
                                min_depth=-11,
                                max_depth=-22.33,
                                min_magnitude=6.6,
                                max_magnitude=7,
                                max_datetime="2005-01-01")
     expected = [{
         'author': u'EMSC',
         'event_id': u'20001206_0000014',
         'origin_id': 1441886,
         'longitude': 54.843,
         'datetime': UTCDateTime('2000-12-06T17:11:05Z'),
         'depth': -11.4,
         'magnitude': 6.7,
         'magnitude_type': u'mb',
         'latitude': 39.604
     }, {
         'author': u'EMSC',
         'event_id': u'20010210_0000010',
         'origin_id': 1438991,
         'longitude': 43.784,
         'datetime': UTCDateTime('2001-02-10T18:21:57Z'),
         'depth': -17.0,
         'magnitude': 6.6,
         'magnitude_type': u'mb',
         'latitude': 12.045,
         'flynn_region': u'NEAR THE COAST OF YEMEN'
     }]
     self.assertEquals(results, expected)
     # 4
     results = client.getEvents(format="list",
                                author="EMSC",
                                max_results=3,
                                magnitude_type="mw",
                                min_magnitude=4,
                                max_datetime="2005-01-01")
     expected = [{
         'author': u'EMSC',
         'event_id': u'19980110_0000006',
         'origin_id': 1500183,
         'longitude': 20.816,
         'datetime': UTCDateTime('1998-01-10T19:21:55Z'),
         'depth': -10.0,
         'magnitude': 5.5,
         'magnitude_type': u'mw',
         'latitude': 37.243,
         'flynn_region': u'IONIAN SEA'
     }, {
         'author': u'EMSC',
         'event_id': u'19980128_0000006',
         'origin_id': 1500249,
         'longitude': 32.204,
         'datetime': UTCDateTime('1998-01-28T22:38:57Z'),
         'depth': -41.6,
         'magnitude': 4.3,
         'magnitude_type': u'mw',
         'latitude': 34.429
     }, {
         'author': u'EMSC',
         'event_id': u'19980213_0000004',
         'origin_id': 1500135,
         'longitude': 28.459,
         'datetime': UTCDateTime('1998-02-13T07:18:50Z'),
         'depth': -69.2,
         'magnitude': 4.8,
         'magnitude_type': u'mw',
         'latitude': 36.284
     }]
     self.assertEquals(results, expected)
Exemplo n.º 13
0
#%% User-defined parameters

from waveform_collection import gather_waveforms
from obspy.core import UTCDateTime
import numpy as np

# Data collection
SOURCE = 'IRIS'
NETWORK = 'IM'
STATION = 'I53H?'
LOCATION = '*'
CHANNEL = '*'
START = UTCDateTime('2018-12-19T01:45:00')
END = START + 20*60

# Filtering
FMIN = 0.1  # [Hz]
FMAX = 1    # [Hz]

# Array processing
WINLEN = 50  # [s]
WINOVER = 0.5

#%% Grab and filter waveforms

st = gather_waveforms(SOURCE, NETWORK, STATION, LOCATION, CHANNEL, START, END,
                      remove_response=True)

st.filter('bandpass', freqmin=FMIN, freqmax=FMAX, corners=2, zerophase=True)
st.taper(max_percentage=0.01)
Exemplo n.º 14
0
from obspy.signal import PPSD
from obspy.clients.fdsn.client import Client
import glob
import matplotlib.pyplot as plt
from obspy.signal.spectral_estimation import get_nlnm, get_nhnm
import matplotlib as mpl
mpl.rc('font', family='serif')
mpl.rc('font', serif='Times')
mpl.rc('text', usetex=True)
mpl.rc('font', size=18)

debug = True
sta = 'ALQ'

net = "DW"
stime = UTCDateTime('1981-001T00:00:00.0')
etime = UTCDateTime('1982-365T00:00:00.0')

#client=Client()
#inv = client.get_stations(network=net, station=sta, starttime=stime, endtime=etime, channel="*", level='response')

#for chan in ['LHZ', 'LHN', 'LHE', 'SHZ']:
#files = glob.glob("/msd/DW_ALQ/*/*/_" + chan + '*')

#for curfile in files:
#st = read(curfile)
#if 'ppsd' not in vars():
#ppsd = PPSD(st[0].stats, inv, period_smoothing_width_octaves=0.5)
#if debug:
#print(curfile)
Exemplo n.º 15
0
#!/usr/bin/env python

import argparse
import commands
import glob

from multiprocessing import Pool
from obspy.core import UTCDateTime

#initializes the variables
now = UTCDateTime.now()
net = ''
network = ''
sta = ''
station = ''
bdate = ''
byear = ''
bjday = ''
edate = ''
eyear = ''
ejday = ''

def main():
	#main sequence of logic
	arguments = getArguments()
	setArguments(arguments)
	checkDates()
	print 'DONE'

def getArguments():
	#This function parses the command line arguments
Exemplo n.º 16
0
network     = getParam(args,'net',msgLib,None)
station     = getParam(args,'sta',msgLib,None)
location    = staLib.getLocation(getParam(args,'loc',msgLib,None))
channel     = getParam(args,'chan',msgLib,None)
if len(channel) <3:
   channel += "*"

#
# PSD files are all HOURLY files with 50% overlap computed as part of the polarization product
# date parameter of the hourly PSDs to start, it starts at hour 00:00:00
#  - HOURLY files YYYY-MM-DD
#
dataDirectory    = param.dataDirectory 
startDateTime    = getParam(args,'start',msgLib,None).split("T")[0] # we always want to start from the begining of the day, so we discard user hours, if any
startDateTime   += "T00:00:00"
tStart           = UTCDateTime(startDateTime)
startYear        = tStart.strftime("%Y")
startMonth       = tStart.strftime("%m")
startDay         = tStart.strftime("%d")
startDOY         = tStart.strftime("%j")
endDateTime      = getParam(args,'end',msgLib,None).split("T")[0] # we always want to start from the begining of the day, so we discard user hours, if any
endDateTime     += "T00:00:00" # endDateTime is included
tEnd             = UTCDateTime(endDateTime)+86400
endYear          = tEnd.strftime("%Y")
endMonth         = tEnd.strftime("%m")
endDay           = tEnd.strftime("%d")
endDOY           = tEnd.strftime("%j")
duration         = tEnd - tStart

dEnd             = date(int(endYear),int(endMonth),int(endDay))
dStart           = date(int(startYear),int(startMonth),int(startDay))
Exemplo n.º 17
0
class WaveformPlotting(object):
    """
    Class that provides several solutions for plotting large and small waveform
    data sets.

    .. warning::
        This class should NOT be used directly, instead use the
        :meth:`~obspy.core.stream.Stream.plot` method of the
        ObsPy :class:`~obspy.core.stream.Stream` or
        :class:`~obspy.core.trace.Trace` objects.

    It uses matplotlib to plot the waveforms.
    """

    def __init__(self, **kwargs):
        """
        Checks some variables and maps the kwargs to class variables.
        """
        self.stream = kwargs.get('stream')
        # Check if it is a Stream or a Trace object.
        if isinstance(self.stream, Trace):
            self.stream = Stream([self.stream])
        elif not isinstance(self.stream, Stream):
            msg = 'Plotting is only supported for Stream or Trace objects.'
            raise TypeError(msg)
        # Stream object should contain at least one Trace
        if len(self.stream) < 1:
            msg = "Empty object"
            raise IndexError(msg)
        # Type of the plot.
        self.type = kwargs.get('type', 'normal')
        # Start- and endtimes of the plots.
        self.starttime = kwargs.get('starttime', None)
        self.endtime = kwargs.get('endtime', None)
        self.fig_obj = kwargs.get('fig', None)
        # If no times are given take the min/max values from the stream object.
        if not self.starttime:
            self.starttime = min([trace.stats.starttime for \
                             trace in self.stream])
        if not self.endtime:
            self.endtime = max([trace.stats.endtime for \
                           trace in self.stream])
        # Map stream object and slice just in case.
        self.stream = self.stream.slice(self.starttime, self.endtime)
        # normalize times
        if self.type == 'relative':
            dt = self.starttime
            # fix plotting boundaries
            self.endtime = UTCDateTime(self.endtime - self.starttime)
            self.starttime = UTCDateTime(0)
            # fix stream times
            for tr in self.stream:
                tr.stats.starttime = UTCDateTime(tr.stats.starttime - dt)
        # Whether to use straight plotting or the fast minmax method.
        self.plotting_method = kwargs.get('method', 'fast')
        # Below that value the data points will be plotted normally. Above it
        # the data will be plotted using a different approach (details see
        # below). Can be overwritten by the above self.plotting_method kwarg.
        self.max_npts = 400000
        # If automerge is enabled. Merge traces with the same id for the plot.
        self.automerge = kwargs.get('automerge', True)
        # Set default values.
        # The default value for the size is determined dynamically because
        # there might be more than one channel to plot.
        self.size = kwargs.get('size', None)
        # Values that will be used to calculate the size of the plot.
        self.default_width = 800
        self.default_height_per_channel = 250
        if not self.size:
            self.width = 800
            # Check the kind of plot.
            if self.type == 'dayplot':
                self.height = 600
            else:
                # One plot for each trace.
                if self.automerge:
                    count = []
                    for tr in self.stream:
                        if hasattr(tr.stats, 'preview') and tr.stats.preview:
                            tr_id = tr.id + 'preview'
                        else:
                            tr_id = tr.id
                        if not tr_id in count:
                            count.append(tr_id)
                    count = len(count)
                else:
                    count = len(self.stream)
                self.height = count * 250
        else:
            self.width, self.height = self.size
        # Interval length in minutes for dayplot.
        self.interval = 60 * kwargs.get('interval', 15)
        # Scaling.
        self.vertical_scaling_range = kwargs.get('vertical_scaling_range',
                                                 None)
        # Dots per inch of the plot. Might be useful for printing plots.
        self.dpi = kwargs.get('dpi', 100)
        # Color of the graph.
        if self.type == 'dayplot':
            self.color = kwargs.get('color', ('#000000','#B2000F', '#004C12',
                                              '#0E01FF'))
            if isinstance(self.color, basestring):
                self.color = (self.color,)
            self.number_of_ticks = kwargs.get('number_of_ticks', None)
        else:
            self.color = kwargs.get('color', 'k')
            self.number_of_ticks = kwargs.get('number_of_ticks', 5)
        # Background and face color.
        self.background_color = kwargs.get('bgcolor', 'w')
        self.face_color = kwargs.get('face_color', 'w')
        # Transparency. Overwrites background and facecolor settings.
        self.transparent = kwargs.get('transparent', False)
        if self.transparent:
            self.background_color = None
        # Ticks.
        self.tick_format = kwargs.get('tick_format', '%H:%M:%S')
        self.tick_rotation = kwargs.get('tick_rotation', 0)
        # Whether or not to save a file.
        self.outfile = kwargs.get('outfile')
        self.handle = kwargs.get('handle')
        # File format of the resulting file. Usually defaults to PNG but might
        # be dependent on your matplotlib backend.
        self.format = kwargs.get('format')

    def plotWaveform(self, *args, **kwargs):
        """
        Creates a graph of any given ObsPy Stream object. It either saves the
        image directly to the file system or returns an binary image string.

        For all color values you can use legit HTML names, HTML hex strings
        (e.g. '#eeefff') or you can pass an R , G , B tuple, where each of
        R , G , B are in the range [0, 1]. You can also use single letters for
        basic built-in colors ('b' = blue, 'g' = green, 'r' = red, 'c' = cyan,
        'm' = magenta, 'y' = yellow, 'k' = black, 'w' = white) and gray shades
        can be given as a string encoding a float in the 0-1 range.
        """
        # Setup the figure if not passed explicitly.
        if not self.fig_obj:
            self.__setupFigure()
        else:
            self.fig = self.fig_obj
        # Determine kind of plot and do the actual plotting.
        if self.type == 'dayplot':
            self.plotDay(*args, **kwargs)
        else:
            self.plot(*args, **kwargs)
        # Adjust the subplot so there is always a margin of 80 px on every
        # side except for plots with just a single trace.
        if self.type != 'dayplot':
            if self.height >= 400:
                fract_y = 80.0 / self.height
            else:
                fract_y = 25.0 / self.height
            fract_x = 80.0 / self.width
            self.fig.subplots_adjust(top=1.0 - fract_y, bottom=fract_y,
                                     left=fract_x, right=1 - fract_x)
        self.fig.canvas.draw()
        # The following just serves as a unified way of saving and displaying
        # the plots.
        if not self.transparent:
            extra_args = {'dpi': self.dpi,
                          'facecolor': self.face_color,
                          'edgecolor': self.face_color}
        else:
            extra_args = {'dpi': self.dpi,
                          'transparent': self.transparent}
        if self.outfile:
            # If format is set use it.
            if self.format:
                self.fig.savefig(self.outfile, format=self.format,
                                 **extra_args)
            # Otherwise use format from self.outfile or default to PNG.
            else:
                self.fig.savefig(self.outfile, **extra_args)
        else:
            # Return an binary imagestring if not self.outfile but self.format.
            if self.format:
                imgdata = StringIO.StringIO()
                self.fig.savefig(imgdata, format=self.format,
                                 **extra_args)
                imgdata.seek(0)
                return imgdata.read()
            elif self.handle:
                return self.fig
            else:
                if not self.fig_obj:
                    plt.show()

    def plot(self, *args, **kwargs):
        """
        Plot the Traces showing one graph per Trace.

        Plots the whole time series for self.max_npts points and less. For more
        points it plots minmax values.
        """
        stream_new = []
        # Just remove empty traces.
        if not self.automerge:
            for tr in self.stream:
                stream_new.append([])
                if len(tr.data):
                    stream_new[-1].append(tr)
        else:
            # Generate sorted list of traces (no copy)
            # Sort order, id, starttime, endtime
            ids = []
            for tr in self.stream:
                if hasattr(tr.stats, 'preview') and tr.stats.preview:
                    id = tr.id + 'preview'
                else:
                    id = tr.id
                if not id in ids:
                    ids.append(id)
            for id in ids:
                stream_new.append([])
                for tr in self.stream:
                    if hasattr(tr.stats, 'preview') and tr.stats.preview:
                        tr_id = tr.id + 'preview'
                    else:
                        tr_id = tr.id
                    if tr_id == id:
                        # does not copy the elements of the data array
                        tr_ref = copy(tr)
                        # Trim does nothing if times are outside
                        if self.starttime >= tr_ref.stats.endtime or \
                                self.endtime <= tr_ref.stats.starttime:
                            continue
                        if tr_ref.data.size:
                            stream_new[-1].append(tr_ref)
                # delete if empty list
                if not len(stream_new[-1]):
                    stream_new.pop()
                    continue
                stream_new[-1].sort(key=lambda x: x.stats.endtime)
                stream_new[-1].sort(key=lambda x: x.stats.starttime)
        # If everything is lost in the process raise an Exception.
        if not len(stream_new):
            raise Exception("Nothing to plot")
        # Create helper variable to track ids and min/max/mean values.
        self.stats = []
        # Loop over each Trace and call the appropriate plotting method.
        self.axis = []
        for _i, tr in enumerate(stream_new):
            # Each trace needs to have the same sampling rate.
            sampling_rates = set([_tr.stats.sampling_rate for _tr in tr])
            if len(sampling_rates) > 1:
                msg = "All traces with the same id need to have the same " + \
                      "sampling rate."
                raise Exception(msg)
            sampling_rate = sampling_rates.pop()
            if self.background_color:
                ax = self.fig.add_subplot(len(stream_new), 1, _i + 1,
                                          axisbg=self.background_color)
            else:
                ax = self.fig.add_subplot(len(stream_new), 1, _i + 1)
            self.axis.append(ax)
            # XXX: Also enable the minmax plotting for previews.
            if self.plotting_method != 'full' and \
                ((self.endtime - self.starttime) * sampling_rate > \
                 self.max_npts):
                self.__plotMinMax(stream_new[_i], ax, *args, **kwargs)
            else:
                self.__plotStraight(stream_new[_i], ax, *args, **kwargs)
        # Set ticks.
        self.__plotSetXTicks()
        self.__plotSetYTicks()

    def plotDay(self, *args, **kwargs):
        """
        Extend the seismogram.
        """
        # Create a copy of the stream because it might be operated on.
        self.stream = self.stream.copy()
        # Merge and trim to pad.
        self.stream.merge()
        if len(self.stream) != 1:
            msg = "All traces need to be of the same id for a dayplot"
            raise ValueError(msg)
        self.stream.trim(self.starttime, self.endtime, pad=True)
        # Get minmax array.
        self.__dayplotGetMinMaxValues(self, *args, **kwargs)
        # Normalize array
        self.__dayplotNormalizeValues(self, *args, **kwargs)
        # Get timezone information. If none is  given, use local time.
        self.time_offset = kwargs.get('time_offset',
                           round((UTCDateTime(datetime.now()) - \
                           UTCDateTime()) / 3600.0, 2))
        self.timezone = kwargs.get('timezone', 'local time')
        # Try to guess how many steps are needed to advance one full time unit.
        self.repeat = None
        intervals = self.extreme_values.shape[0]
        if self.interval < 60 and 60 % self.interval == 0:
            self.repeat = 60 / self.interval
        elif self.interval < 1800 and 3600 % self.interval == 0:
            self.repeat = 3600 / self.interval
        # Otherwise use a maximum value of 10.
        else:
            if intervals >= 10:
                self.repeat = 10
            else:
                self.repeat = intervals
        # Create axis to plot on.
        if self.background_color:
            ax = self.fig.add_subplot(1, 1, 1, axisbg=self.background_color)
        else:
            ax = self.fig.add_subplot(1, 1, 1)
        # Adjust the subplots to be symmetrical. Also make some more room
        # at the top.
        self.fig.subplots_adjust(left=0.12, right=0.88, top=0.88)
        # Create x_value_array.
        aranged_array = np.arange(self.width)
        x_values = np.empty(2 * self.width)
        x_values[0::2] = aranged_array
        x_values[1::2] = aranged_array
        intervals = self.extreme_values.shape[0]
        # Loop over each step.
        for _i in xrange(intervals):
            # Create offset array.
            y_values = np.ma.empty(self.width * 2)
            y_values.fill(intervals - (_i + 1))
            # Add min and max values.
            y_values[0::2] += self.extreme_values[_i, :, 0]
            y_values[1::2] += self.extreme_values[_i, :, 1]
            # Plot the values.
            ax.plot(x_values, y_values,
                    color=self.color[_i % len(self.color)])
        # Set ranges.
        ax.set_xlim(0, self.width - 1)
        ax.set_ylim(-0.3, intervals + 0.3)
        self.axis = [ax]
        # Set ticks.
        self.__dayplotSetYTicks()
        self.__dayplotSetXTicks()
        # Choose to show grid but only on the x axis.
        self.fig.axes[0].grid()
        self.fig.axes[0].yaxis.grid(False)
        # Set the title of the plot.
        #suptitle = '%s %s'%(self.stream[0].id,self.starttime.strftime('%Y-%m-%d'))
        #self.fig.suptitle(suptitle, fontsize='small')

    def __plotStraight(self, trace, ax, *args, **kwargs):  # @UnusedVariable
        """
        Just plots the data samples in the self.stream. Useful for smaller
        datasets up to around 1000000 samples (depending on the machine its
        being run on).

        Slow and high memory consumption for large datasets.
        """
        # Copy to avoid any changes to original data.
        trace = deepcopy(trace)
        if len(trace) > 1:
            stream = Stream(traces=trace)
            # Merge with 'interpolation'. In case of overlaps this method will
            # always use the longest available trace.
            if hasattr(trace[0].stats, 'preview') and trace[0].stats.preview:
                stream = Stream(traces=stream)
                stream = mergePreviews(stream)
            else:
                stream.merge(method=1)
            trace = stream[0]
        else:
            trace = trace[0]
        # Check if it is a preview file and adjust accordingly.
        # XXX: Will look weird if the preview file is too small.
        if hasattr(trace.stats, 'preview') and trace.stats.preview:
            # Mask the gaps.
            trace.data = np.ma.masked_array(trace.data)
            trace.data[trace.data == -1] = np.ma.masked
            # Recreate the min_max scene.
            dtype = trace.data.dtype
            old_time_range = trace.stats.endtime - trace.stats.starttime
            data = np.empty(2 * trace.stats.npts, dtype=dtype)
            data[0::2] = trace.data / 2.0
            data[1::2] = -trace.data / 2.0
            trace.data = data
            # The times are not supposed to change.
            trace.stats.delta = old_time_range / float(trace.stats.npts - 1)
        # Write to self.stats.
        calib = trace.stats.calib
        max = trace.data.max()
        min = trace.data.min()
        if hasattr(trace.stats, 'preview') and trace.stats.preview:
            tr_id = trace.id + ' [preview]'
        else:
            tr_id = trace.id
        self.stats.append([tr_id, calib * trace.data.mean(),
                           calib * min, calib * max])
        # Pad the beginning and the end with masked values if necessary. Might
        # seem like overkill but it works really fast and is a clean solution
        # to gaps at the beginning/end.
        concat = [trace]
        if self.starttime != trace.stats.starttime:
            samples = (trace.stats.starttime - self.starttime) * \
                trace.stats.sampling_rate
            temp = [np.ma.masked_all(int(samples))]
            concat = temp.extend(concat)
            concat = temp
        if self.endtime != trace.stats.endtime:
            samples = (self.endtime - trace.stats.endtime) * \
                      trace.stats.sampling_rate
            concat.append(np.ma.masked_all(int(samples)))
        if len(concat) > 1:
            # Use the masked array concatenate, otherwise it will result in a
            # not masked array.
            trace.data = np.ma.concatenate(concat)
            # set starttime and calculate endtime
            trace.stats.starttime = self.starttime
        trace.data *= calib
        ax.plot(trace.data, color=self.color)
        # Set the x limit for the graph to also show the masked values at the
        # beginning/end.
        ax.set_xlim(0, len(trace.data) - 1)

    def __plotMinMax(self, trace, ax, *args, **kwargs):  # @UnusedVariable
        """
        Plots the data using a min/max approach that calculated the minimum and
        maximum values of each "pixel" and than plots only these values. Works
        much faster with large data sets.
        """
        # Some variables to help calculate the values.
        starttime = self.starttime.timestamp
        endtime = self.endtime.timestamp
        # The same trace will always have the same sampling_rate.
        sampling_rate = trace[0].stats.sampling_rate
        # The samples per resulting pixel.
        pixel_length = int((endtime - starttime) / self.width *
                           sampling_rate)
        # Loop over all the traces. Do not merge them as there are many samples
        # and therefore merging would be slow.
        for _i, _t in enumerate(trace):
            # Get the start of the next pixel in case the starttime of the
            # trace does not match the starttime of the plot.
            ts = _t.stats.starttime
            if ts > self.starttime:
                start = int(ceil(((ts - self.starttime) * \
                        sampling_rate) / pixel_length))
                # Samples before start.
                prestart = int(((self.starttime + start * pixel_length /
                           sampling_rate) - ts) * sampling_rate)
            else:
                start = 0
                prestart = 0
            # Figure out the number of pixels in the current trace.
            length = len(_t.data) - prestart
            pixel_count = int(length // pixel_length)
            rest = int(length % pixel_length)
            # Reference to new data array which does not copy data but is
            # reshapeable.
            data = _t.data[prestart: prestart + pixel_count * pixel_length]
            data = data.reshape(pixel_count, pixel_length)
            # Calculate extreme_values and put them into new array.
            extreme_values = np.ma.masked_all((self.width, 2), dtype=np.float)
            min = data.min(axis=1) * _t.stats.calib
            max = data.max(axis=1) * _t.stats.calib
            extreme_values[start: start + pixel_count, 0] = min
            extreme_values[start: start + pixel_count, 1] = max
            # First and last and last pixel need separate treatment.
            if start and prestart:
                extreme_values[start - 1, 0] = \
                    _t.data[:prestart].min() * _t.stats.calib
                extreme_values[start - 1, 1] = \
                    _t.data[:prestart].max() * _t.stats.calib
            if rest:
                if start + pixel_count == self.width:
                    index = self.width - 1
                else:
                    index = start + pixel_count
                extreme_values[index, 0] = \
                    _t.data[-rest:].min() * _t.stats.calib
                extreme_values[index, 1] = \
                    _t.data[-rest:].max() * _t.stats.calib
            # Use the first array as a reference and merge all following
            # extreme_values into it.
            if _i == 0:
                minmax = extreme_values
            else:
                # Merge minmax and extreme_values.
                min = np.ma.empty((self.width, 2))
                max = np.ma.empty((self.width, 2))
                # Fill both with the values.
                min[:, 0] = minmax[:, 0]
                min[:, 1] = extreme_values[:, 0]
                max[:, 0] = minmax[:, 1]
                max[:, 1] = extreme_values[:, 1]
                # Find the minimum and maximum values.
                min = min.min(axis=1)
                max = max.max(axis=1)
                # Write again to minmax.
                minmax[:, 0] = min
                minmax[:, 1] = max
        # Write to self.stats.
        self.stats.append([trace[0].id, minmax.mean(),
                           minmax[:, 0].min(),
                           minmax[:, 1].max()])
        # Finally plot the data.
        x_values = np.empty(2 * self.width)
        aranged = np.arange(self.width)
        x_values[0::2] = aranged
        x_values[1::2] = aranged
        # Initialize completely masked array. This version is a little bit
        # slower than first creating an empty array and then setting the mask
        # to True. But on NumPy 1.1 this results in a 0-D array which can not
        # be indexed.
        y_values = np.ma.masked_all(2 * self.width)
        y_values[0::2] = minmax[:, 0]
        y_values[1::2] = minmax[:, 1]
        ax.plot(x_values, y_values, color=self.color)
        # Set the x-limit to avoid clipping of masked values.
        ax.set_xlim(0, self.width - 1)

    def __plotSetXTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Goes through all axes in pyplot and sets time ticks on the x axis.
        """
        # Loop over all axes.
        for ax in self.axis:
            # Get the xlimits.
            start, end = ax.get_xlim()
            # Set the location of the ticks.
            ax.set_xticks(np.linspace(start, end, self.number_of_ticks))
            # Figure out times.
            interval = float(self.endtime - self.starttime) / \
                       (self.number_of_ticks - 1)
            # Set the actual labels.
            if self.type == 'relative':
                s = ['%.2f' % (self.starttime + _i * interval).timestamp \
                          for _i in range(self.number_of_ticks)]
            else:
                labels = [(self.starttime + _i * \
                          interval).strftime(self.tick_format) for _i in \
                          range(self.number_of_ticks)]

            ax.set_xticklabels(labels, fontsize='small',
                               rotation=self.tick_rotation)

    def __plotSetYTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Goes through all axes in pyplot, reads self.stats and sets all ticks on
        the y axis.

        This method also adjusts the y limits so that the mean value is always
        in the middle of the graph and all graphs are equally scaled.
        """
        # Figure out the maximum distance from the mean value to either end.
        # Add 10 percent for better looking graphs.
        max_distance = max([max(trace[1] - trace[2], trace[3] - trace[1])
                            for trace in self.stats]) * 1.1
        # Loop over all axes.
        for _i, ax in enumerate(self.axis):
            mean = self.stats[_i][1]
            # Set the ylimit.
            min_range = mean - max_distance
            max_range = mean + max_distance
            # Set the location of the ticks.
            ticks = [mean - 0.75 * max_distance,
                     mean - 0.5 * max_distance,
                     mean - 0.25 * max_distance,
                     mean,
                     mean + 0.25 * max_distance,
                     mean + 0.5 * max_distance,
                     mean + 0.75 * max_distance]
            ax.set_yticks(ticks)
            # Setup format of the major ticks
            if max(ticks) - min(ticks) > 10:
                fmt = '%d'
            else:
                fmt = '%.2g'
            ax.set_yticklabels([fmt % t for t in ax.get_yticks()],
                               fontsize='small')
            # Set the title of each plot.
            ax.set_title(self.stats[_i][0], horizontalalignment='left',
                      fontsize='small', verticalalignment='center')
            ax.set_ylim(min_range, max_range)

    def __dayplotGetMinMaxValues(self, *args, **kwargs):  # @UnusedVariable
        """
        Takes a Stream object and calculates the min and max values for each
        pixel in the dayplot.

        Writes a three dimensional array. The first axis is the step, i.e
        number of trace, the second is the pixel in that step and the third
        contains the minimum and maximum value of the pixel.
        """
        # Helper variables for easier access.
        trace = self.stream[0]
        trace_length = len(trace.data)

        # Samples per interval.
        spi = int(self.interval * trace.stats.sampling_rate)
        # Check the approximate number of samples per pixel and raise
        # error as fit.
        spp = float(spi) / self.width
        if spp < 1.0:
            msg = """
            Too few samples to use dayplot with the given arguments.
            Adjust your arguments or use a different plotting method.
            """
            msg = " ".join(msg.strip().split())
            raise ValueError(msg)
        # Number of intervals plotted.
        noi = float(trace_length) / spi
        inoi = int(round(noi))
        # Plot an extra interval if at least 2 percent of the last interval
        # will actually contain data. Do it this way to lessen floating point
        # inaccuracies.
        if abs(noi - inoi) > 2E-2:
            noi = inoi + 1
        else:
            noi = inoi

        # Adjust data. Fill with masked values in case it is necessary.
        number_of_samples = noi * spi
        delta = number_of_samples - trace_length
        if delta < 0:
            trace.data = trace.data[:number_of_samples]
        elif delta > 0:
            trace.data = np.ma.concatenate([trace.data,
                            createEmptyDataChunk(delta, trace.data.dtype)])

        # Create array for min/max values. Use masked arrays to handle gaps.
        extreme_values = np.ma.empty((noi, self.width, 2))
        trace.data.shape = (noi, spi)

        ispp = int(spp)
        fspp = spp % 1.0
        if fspp == 0.0:
            delta = None
        else:
            delta = spi - ispp * self.width

        # Loop over each interval to avoid larger errors towards the end.
        for _i in range(noi):
            if delta:
                cur_interval = trace.data[_i][:-delta]
                rest = trace.data[_i][-delta:]
            else:
                cur_interval = trace.data[_i]
            cur_interval.shape = (self.width, ispp)
            extreme_values[_i, :, 0] = cur_interval.min(axis=1)
            extreme_values[_i, :, 1] = cur_interval.max(axis=1)
            # Add the rest.
            if delta:
                extreme_values[_i, -1, 0] = min(extreme_values[_i, -1, 0],
                                                rest.min())
                extreme_values[_i, -1, 1] = max(extreme_values[_i, -1, 0],
                                                rest.max())
        # Set class variable.
        self.extreme_values = extreme_values

    def __dayplotNormalizeValues(self, *args, **kwargs):  # @UnusedVariable
        """
        Normalizes all values in the 3 dimensional array, so that the minimum
        value will be 0 and the maximum value will be 1.

        It will also convert all values to floats.
        """
        # Convert to native floats.
        self.extreme_values = self.extreme_values.astype(np.float) * \
                              self.stream[0].stats.calib
        # Make sure that the mean value is at 0
        self.extreme_values -= self.extreme_values.mean()

        # Scale so that 99.5 % of the data will fit the given range.
        if self.vertical_scaling_range is None:
            percentile_delta = 0.005
            max_values = self.extreme_values[:, :, 1].compressed()
            min_values = self.extreme_values[:, :, 0].compressed()
            # Remove masked values.
            max_values.sort()
            min_values.sort()
            length = len(max_values)
            index = int((1.0 - percentile_delta) * length)
            max_val = max_values[index]
            index = int(percentile_delta * length)
            min_val = min_values[index]
        # Exact fit.
        elif float(self.vertical_scaling_range) == 0.0:
            max_val = self.extreme_values[:, :, 1].max()
            min_val = self.extreme_values[:, :, 0].min()
        # Fit with custom range.
        else:
            max_val = min_val = abs(self.vertical_scaling_range) / 2.0

        # Scale from 0 to 1.
        self.extreme_values = self.extreme_values / (max(abs(max_val),
                                                         abs(min_val)) * 2)
        self.extreme_values += 0.5

    def __dayplotSetXTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Sets the xticks for the dayplot.
        """
        max_value = self.width - 1
        # Check whether it are sec/mins/hours and convert to a universal unit.
        if self.interval < 240:
            time_type = 'seconds'
            time_value = self.interval
        elif self.interval < 24000:
            time_type = '[min]'
            time_value = self.interval / 60
        else:
            time_type = 'hours'
            time_value = self.interval / 3600
        count = None
        # Hardcode some common values. The plus one is itentional. It had
        # hardly any performance impact and enhances readability.
        if self.interval == 15 * 60:
            count = 15 + 1
        elif self.interval == 20 * 60:
            count = 4 + 1
        elif self.interval == 30 * 60:
            count = 6 + 1
        elif self.interval == 60 * 60:
            count = 4 + 1
        elif self.interval == 90 * 60:
            count = 6 + 1
        elif self.interval == 120 * 60:
            count = 4 + 1
        elif self.interval == 180 * 60:
            count = 6 + 1
        elif self.interval == 240 * 60:
            count = 6 + 1
        elif self.interval == 300 * 60:
            count = 6 + 1
        elif self.interval == 360 * 60:
            count = 12 + 1
        elif self.interval == 720 * 60:
            count = 12 + 1
        # Otherwise run some kind of autodetection routine.
        if not count:
            # Up to 15 time units and if its a full number, show every unit.
            if time_value <= 15 and time_value % 1 == 0:
                count = time_value
            # Otherwise determine whether they are dividable for numbers up to
            # 15. If a number is not dividable just show 10 units.
            else:
                count = 10
                for _i in xrange(15, 1, -1):
                    if time_value % _i == 0:
                        count = _i
                        break
            # Show at least 5 ticks.
            if count < 5:
                count = 5
        # Everything can be overwritten by user specified number of ticks.
        if self.number_of_ticks:
            count = self.number_of_ticks
        # Calculate and set ticks.
        ticks = np.linspace(0.0, max_value, count)
        ticklabels = ['%i' % _i for _i in np.linspace(0.0,
                                    time_value, count)]
        self.axis[0].set_xticks(ticks)
        self.axis[0].set_xticklabels(ticklabels, rotation=self.tick_rotation)
        self.axis[0].set_xlabel('Zeit %s' % time_type)

    def __dayplotSetYTicks(self, *args, **kwargs):  # @UnusedVariable
        """
        Sets the yticks for the dayplot.
        """
        intervals = self.extreme_values.shape[0]
        # Do not display all ticks except if it are five or less steps.
        if intervals <= 5:
            tick_steps = range(0, intervals)
            ticks = np.arange(intervals, 0, -1, dtype=np.float)
            ticks -= 0.5
        else:
            tick_steps = range(0, intervals, self.repeat)
            ticks = np.arange(intervals, 0, -1 * self.repeat, dtype=np.float)
            ticks -= 0.5
        ticklabels = [(self.starttime + (_i + 1) * self.interval + \
                      self.time_offset * 3600).strftime('%H:%M') \
                      for _i in tick_steps]

        self.axis[0].set_yticks(ticks)
        self.axis[0].set_yticklabels(ticklabels)
        # self.axis[0].set_ylabel('UTC')
        # Save range.
        yrange = self.axis[0].get_ylim()
        # Create twin axis.
        #XXX
        self.twin = self.axis[0].twinx()
        self.twin.set_ylim(yrange)
        self.twin.set_yticks(ticks)
        ticklabels = [(self.starttime + _i * self.interval).strftime('%H:%M') \
                      for _i in tick_steps]
        self.twin.set_yticklabels(ticklabels)
        # Complicated way to calculate the label of the y-Axis showing the
        # second time zone.
        sign = '%+i' % self.time_offset
        sign = sign[0]
      #  time_label = self.timezone.strip() + ' (UTC%s%02i:%02i)' % \
        time_label = 'Berlin' + ' (UTC%s%02i:%02i)' % \
                     (sign, abs(self.time_offset), (self.time_offset % 1 * 60))
        self.axis[0].set_ylabel(time_label)
        self.twin.set_ylabel('UTC')

    def __setupFigure(self):
        """
        The design and look of the whole plot to be produced.
        """
        # Setup figure and axes
        self.fig = plt.figure(num=None, dpi=self.dpi,
                              figsize=(float(self.width) / self.dpi,
                                       float(self.height) / self.dpi))
        # XXX: Figure out why this is needed sometimes.
        # Set size and dpi.
        self.fig.set_dpi(self.dpi)
        self.fig.set_figwidth(float(self.width) / self.dpi)
        self.fig.set_figheight(float(self.height) / self.dpi)
        # hide time information if set as option
        if self.type == 'relative':
            return
        if self.type == 'dayplot':
            suptitle = '%s %s'%(self.stream[0].id,self.starttime.strftime('%Y-%m-%d'))
            self.fig.suptitle(suptitle, y=0.94, fontsize='small')
        else:
           pattern = '%Y-%m-%dT%H:%M:%SZ'
           suptitle = '%s  -  %s' % (self.starttime.strftime(pattern),
                 self.endtime.strftime(pattern))
           self.fig.suptitle(suptitle, x=0.02, y=0.96, fontsize='small',
                 horizontalalignment='left')
Exemplo n.º 18
0
from obspy.arclink import Client as C
from obspy.core import UTCDateTime as dt
from datetime import timedelta

c = C(host='seisrequest.iag.usp.br', port=18001, user='******')

d = dt.now() - timedelta(days = 1)
print d-3600

s = c.getWaveform('BL', 'BB19B', '', "HH*", d - 3600, d)

s.plot()
Exemplo n.º 19
0
 def __init__(self, **kwargs):
     """
     Checks some variables and maps the kwargs to class variables.
     """
     self.stream = kwargs.get('stream')
     # Check if it is a Stream or a Trace object.
     if isinstance(self.stream, Trace):
         self.stream = Stream([self.stream])
     elif not isinstance(self.stream, Stream):
         msg = 'Plotting is only supported for Stream or Trace objects.'
         raise TypeError(msg)
     # Stream object should contain at least one Trace
     if len(self.stream) < 1:
         msg = "Empty object"
         raise IndexError(msg)
     # Type of the plot.
     self.type = kwargs.get('type', 'normal')
     # Start- and endtimes of the plots.
     self.starttime = kwargs.get('starttime', None)
     self.endtime = kwargs.get('endtime', None)
     self.fig_obj = kwargs.get('fig', None)
     # If no times are given take the min/max values from the stream object.
     if not self.starttime:
         self.starttime = min([trace.stats.starttime for \
                          trace in self.stream])
     if not self.endtime:
         self.endtime = max([trace.stats.endtime for \
                        trace in self.stream])
     # Map stream object and slice just in case.
     self.stream = self.stream.slice(self.starttime, self.endtime)
     # normalize times
     if self.type == 'relative':
         dt = self.starttime
         # fix plotting boundaries
         self.endtime = UTCDateTime(self.endtime - self.starttime)
         self.starttime = UTCDateTime(0)
         # fix stream times
         for tr in self.stream:
             tr.stats.starttime = UTCDateTime(tr.stats.starttime - dt)
     # Whether to use straight plotting or the fast minmax method.
     self.plotting_method = kwargs.get('method', 'fast')
     # Below that value the data points will be plotted normally. Above it
     # the data will be plotted using a different approach (details see
     # below). Can be overwritten by the above self.plotting_method kwarg.
     self.max_npts = 400000
     # If automerge is enabled. Merge traces with the same id for the plot.
     self.automerge = kwargs.get('automerge', True)
     # Set default values.
     # The default value for the size is determined dynamically because
     # there might be more than one channel to plot.
     self.size = kwargs.get('size', None)
     # Values that will be used to calculate the size of the plot.
     self.default_width = 800
     self.default_height_per_channel = 250
     if not self.size:
         self.width = 800
         # Check the kind of plot.
         if self.type == 'dayplot':
             self.height = 600
         else:
             # One plot for each trace.
             if self.automerge:
                 count = []
                 for tr in self.stream:
                     if hasattr(tr.stats, 'preview') and tr.stats.preview:
                         tr_id = tr.id + 'preview'
                     else:
                         tr_id = tr.id
                     if not tr_id in count:
                         count.append(tr_id)
                 count = len(count)
             else:
                 count = len(self.stream)
             self.height = count * 250
     else:
         self.width, self.height = self.size
     # Interval length in minutes for dayplot.
     self.interval = 60 * kwargs.get('interval', 15)
     # Scaling.
     self.vertical_scaling_range = kwargs.get('vertical_scaling_range',
                                              None)
     # Dots per inch of the plot. Might be useful for printing plots.
     self.dpi = kwargs.get('dpi', 100)
     # Color of the graph.
     if self.type == 'dayplot':
         self.color = kwargs.get('color', ('#000000','#B2000F', '#004C12',
                                           '#0E01FF'))
         if isinstance(self.color, basestring):
             self.color = (self.color,)
         self.number_of_ticks = kwargs.get('number_of_ticks', None)
     else:
         self.color = kwargs.get('color', 'k')
         self.number_of_ticks = kwargs.get('number_of_ticks', 5)
     # Background and face color.
     self.background_color = kwargs.get('bgcolor', 'w')
     self.face_color = kwargs.get('face_color', 'w')
     # Transparency. Overwrites background and facecolor settings.
     self.transparent = kwargs.get('transparent', False)
     if self.transparent:
         self.background_color = None
     # Ticks.
     self.tick_format = kwargs.get('tick_format', '%H:%M:%S')
     self.tick_rotation = kwargs.get('tick_rotation', 0)
     # Whether or not to save a file.
     self.outfile = kwargs.get('outfile')
     self.handle = kwargs.get('handle')
     # File format of the resulting file. Usually defaults to PNG but might
     # be dependent on your matplotlib backend.
     self.format = kwargs.get('format')
Exemplo n.º 20
0
 def roundOrigin(self):      # 1970-02-09T00:12:34.999990Z
     origin = UTCDateTime(self.date+'T'+self.time)
     origin = UTCDateTime(round(origin))
     self.date = origin.strftime("%Y-%m-%d")
     self.time = origin.strftime("%H:%M:%S")
     self.o = origin
Exemplo n.º 21
0
 def test_formatSEED(self):
     """
     Tests formatSEED method
     """
     #1
     dt = UTCDateTime("2010-01-01")
     self.assertEquals(dt.formatSEED(compact=True), "2010,001")
     #2
     dt = UTCDateTime("2010-01-01T00:00:00.000000")
     self.assertEquals(dt.formatSEED(compact=True), "2010,001")
     #3
     dt = UTCDateTime("2010-01-01T12:00:00")
     self.assertEquals(dt.formatSEED(compact=True), "2010,001,12")
     #4
     dt = UTCDateTime("2010-01-01T12:34:00")
     self.assertEquals(dt.formatSEED(compact=True), "2010,001,12:34")
     #5
     dt = UTCDateTime("2010-01-01T12:34:56")
     self.assertEquals(dt.formatSEED(compact=True), "2010,001,12:34:56")
     #6
     dt = UTCDateTime("2010-01-01T12:34:56.123456")
     self.assertEquals(dt.formatSEED(compact=True),
                       "2010,001,12:34:56.1234")
     #7 - explicit disabling compact flag still results into compact date if
     # no time information is given
     dt = UTCDateTime("2010-01-01")
     self.assertEquals(dt.formatSEED(compact=False), "2010,001")
Exemplo n.º 22
0
def read_input_command(parser):
    
    """
    Create input object (dictionary) based on command-line options.
    The default values are as "input" object (below) 
    """
    
    global input
    
    input = {   'address': 'psdata',
                'min_date': UTCDateTime.utcnow() - 60 * 60 * 24 * 10 * 1,
                'max_date': UTCDateTime.utcnow(),
                'min_mag': 3.0, 'max_mag': 9.9,
                'min_depth': +10.0, 'max_depth': -6000.0,
                'evlatmin': -90.0, 'evlatmax': +90.0, 
                'evlonmin': -180.0, 'evlonmax': +180.0,
                'identity': '*.*.*.*',
                'file': 'Pdiff',
                'model': 'iasp91',
                'phase': 'Pdiff',
                'freq': None,
            }
    
    # feed input dictionary of defaults into parser object
    parser.set_defaults(**input)
    
    # parse command line options
    (options, args) = parser.parse_args()
    # command line options can now be accessed via options.varname.
    
    # parse address (check if given absolute or relative)
    if options.address:
        if not os.path.isabs(options.address):
            options.address = os.path.join(os.getcwd(), options.address)
    
    # extract min. and max. longitude and latitude if the user has given the
    # coordinates with -r (GMT syntax)
    if options.event_rect:
        try:
            options.event_rect = options.event_rect.split('/')
            if len(options.event_rect) != 4:
                print "Erroneous rectangle given."
                sys.exit(2)
            options.evlatmin = float(options.event_rect[0])
            options.evlonmin = float(options.event_rect[1])
            options.evlatmax = float(options.event_rect[2])
            options.evlonmax = float(options.event_rect[3])           
        except:
            print "Erroneous rectangle given."
            sys.exit(2)
    
    
    input['address'] = options.address
    input['min_date'] = UTCDateTime(options.min_date)
    input['max_date'] = UTCDateTime(options.max_date)
    input['min_mag'] = float(options.min_mag)
    input['max_mag'] = float(options.max_mag)
    input['min_depth'] = float(options.min_depth)
    input['max_depth'] = float(options.max_depth)
    
    if options.event_rect:
        input['evlonmin'] = options.evlonmin
        input['evlonmax'] = options.evlonmax
        input['evlatmin'] = options.evlatmin
        input['evlatmax'] = options.evlatmax
    
    input['all_sta'] = options.all_sta
    input['identity'] = options.identity
    input['file'] = options.file
    input['model'] = options.model
    input['phase'] = options.phase.split('-')
    input['freq'] = options.freq
    if input['freq'] != None: input['freq'] = float(options.freq)
Exemplo n.º 23
0

#Lets set the search for the following magnitude scale
searchParameter = '&minmagnitude=' + str(parserval.minMag) + \
	'&maxmagnitude=' + str(parserval.maxMag) + \
	'&mindepth=' + str(parserval.minDep) + \
	'&eventtype=earthquake'

#Lets setup the time for the search
if parserval.time:
        try:
		if debug:
			print 'Here is the time in: ' + \
				parserval.time.split(',')[0]

                stime = UTCDateTime(parserval.time.split(',')[0] + "-" + \
                        parserval.time.split(',')[1] + "T00:00:00.0") 
        except:
                print 'Problem reading epoch'
                sys.exit(0)

        if debug:
                print 'Here is the epoch time of interest:' + str(stime)   

	etime = stime + parserval.number*24*60*60 

#Lets format for the USGS webservices
stimeString = 'starttime=' + (stime.format_iris_web_service()).replace('T','%20')
etimeString = 'endtime=' + (etime.format_iris_web_service()).replace('T','%20')

if debug:
	print 'Start time string: ' + stimeString
Exemplo n.º 24
0
 def originfmt(self):
     o = UTCDateTime(self.date+"T"+self.time)
     o = UTCDateTime(round(o,3))
     self.date = o.strftime("%Y-%m-%d")
     self.time = o.strftime("%H:%M:%S.%f")[0:12]
Exemplo n.º 25
0
#!/usr/bin/env python

import commands
import glob
import multical
from obspy.core import UTCDateTime

today = UTCDateTime.now()
jday = today.julday
yearCur = today.year
logFilepath = '/home/ambaker/calanalyzer/logs/' + today.strftime('%Y%j %H%M') + '.log'

#get first year
years = []
paths = glob.glob('/xs[01]/seed/*/*')
for path in paths:
	if path.split('/')[-1] not in years:
		if path.split('/')[-1].isdigit():
			years.append(path.split('/')[-1])
years.sort()
yearFirst = years[0]

def printOutput(output):
	output = output[1].split('\n')
	fob = open(logFilepath, 'a')
	for line in output:
		if 'cal found' in line:
			fob.write(line + '\n')
	fob.write(str(UTCDateTime.now()))
	fob.close()
Exemplo n.º 26
0
__author__ = 'bruno'

from obspy.core import read, UTCDateTime
from os import walk, sep, remove, path, makedirs
from shutil import move
import smtplib
import logging


now = str(UTCDateTime.now())[:19].replace(':','-')
 
## Filling vars
sds = "/SDS"
sdsNRT = "/SDS-NRT"
logFile = '/home/suporte/nrtSync_logs/nrtSync.'+now+'.log'
 
logging.basicConfig(filename=logFile,level=logging.DEBUG)
 
nJoinedFiles = 0
nMovedFiles  = 0
jdays = []
 
 
## Control Vars
noErrors = True
 
 
for root, dirs, files in walk(sdsNRT): ## walking NRT path
    for nrtFile in files:
        try:
            st = read(root+sep+nrtFile) ## read a NRT file and get info from header
Exemplo n.º 27
0
    #handle = plt.axes(projection=ccrs.AlbersEqualArea(central_lon, central_lat))
    handle.set_extent(extent)

    handle.add_feature(cfeature.LAND)
    handle.add_feature(cfeature.OCEAN)
    handle.add_feature(cfeature.COASTLINE)
    #handle.add_feature(cfeature.BORDERS, linestyle=':')
    handle.add_feature(cfeature.LAKES)
    handle.add_feature(cfeature.RIVERS)
    #handle.add_feature(cfeature.STATES, edgecolor='gray')
    return handle


net = 'NE,IW'

stime = UTCDateTime('2009-204T00:00:00')
etime = UTCDateTime('2019-204T00:00:00')

fig = plt.figure(figsize=(12, 6))

inv = client.get_stations(network=net,
                          station="*",
                          channel='*HZ',
                          level="response",
                          starttime=stime,
                          endtime=etime)

lats, lons, cols = [], [], []
for cnet in inv:
    for sta in cnet:
        for chan in sta:
from obspy.core import UTCDateTime

t_now = UTCDateTime()
t_bd = UTCDateTime(2011, 7, 9)
days = t_bd.julday - t_now.julday
print days, "days to my birthday"

bday = t_bd.isoweekday()
print "my birthday is day of week no.", bday
# we could also map this to a string representation using a dictionary
weekdays = {1: "monday", 2: "tuesday", 3: "wednesday", 4: "thursday",
            5: "friday", 6: "saturday", 7: "sunday"}
bday = weekdays[bday]
print "my birthday is a", bday

party = t_bd + (24 * 60 * 60)
while party.isoweekday() != 6:
    party += (24 * 60 * 60)
print "the party is going to be on", party.date
Exemplo n.º 29
0
# Required Python and Obspy modules will be imported in this part.
import matplotlib.pyplot as plt
import os
from obspy.core import UTCDateTime, read
try:
    from obspy.signal import seisSim
except Exception, e:
    from obspy.signal.invsim import seisSim

from obspyDMT.utils.event_handler import get_Events
from obspyDMT.utils.fdsn_handler import FDSN_network
from obspyDMT.utils.input_handler import command_parse, read_input_command
from obspyDMT.utils.instrument_handler import FDSN_ARC_IC
from obspyDMT.utils.update_handler import FDSN_update

dir_name = int(UTCDateTime.now().timestamp)

# ##################### test_FDSN_network ##################################


def test_FDSN_network():
    (options, args, parser) = command_parse()
    input_dics = read_input_command(parser)
    # Changing the input_dics values for testing
    input_dics['min_date'] = '2011-03-01'
    input_dics['max_date'] = '2011-03-20'
    input_dics['min_mag'] = 8.9
    input_dics['datapath'] = 'test_%s' % dir_name
    input_dics['net'] = 'TA'
    input_dics['sta'] = 'Z3*'
    input_dics['cha'] = 'BHZ'
Exemplo n.º 30
0
def fetch(datemin=UTCDateTime(2011, 1, 1),
          datemax=UTCDateTime(),
          latmin=-90,
          latmax=90,
          lonmin=-180,
          lonmax=180,
          magmin=0,
          nmax=999999,
          datadir='mt-geofon'):
    """
    Downloads all moment tensor solutions from GEOFON Global Seismic Monitor.

    :param datemin: string, optional
        Start date - MT catalog starts 01/2011 - so dates before will be
        truncated to 2011-01-01, defaults to '2011-01-01'.
    :param datemax: string, optional
        End date, defaults to today.
    :param latmin: float, optional
        Latitude minimum, defaults to -90.
    :param latmax: float, optional
        Latitude maximum, defaults to 90.
    :param lonmin: float, optional
        Longitude minimum, defaults to -180.
    :param lonmax: float, optional
        Longitude maximum, defaults to 180.
    :param magmin: float, optional
        Magnitude minimum, defaults to 0.
    :param nmax: int, optional
        Max entries, defaults to 999999.
    :param datadir: string, optional
        Directory where moment tensors are saved into, defaults to 'mt-geofon'.
    """
    # process input parameters
    datemin = UTCDateTime(datemin)
    if datemin < UTCDateTime(2011, 1, 1):
        datemin = UTCDateTime(2011, 1, 1)
    datemin = datemin.date
    datemax = UTCDateTime(datemax).date
    params = urllib.urlencode({
        'fmt': 'html',
        'datemin': datemin,
        'datemax': datemax,
        'latmin': latmin,
        'latmax': latmax,
        'lonmin': lonmin,
        'lonmax': lonmax,
        'magmin': magmin,
        'nmax': nmax
    })
    # fetch catalog
    data = urllib.urlopen(CATALOG_URL % params).read()
    # filter for moment tensors
    urls = re.findall(PATTERN, data)
    print "Found %d moment tensors" % len(urls)
    # save moment tensor files into given directory
    if not os.path.exists(datadir):
        os.mkdir(datadir)
    for url in urls:
        print "Fetching %s ..." % url
        data = urllib.urlopen(url).read()
        dt = data.splitlines()[1].replace(' ', '_').replace('/', '-')
        filename = os.path.join(datadir, '20' + dt + '.txt')
        fh = open(filename, 'wt')
        fh.write(data)
        fh.close()
Exemplo n.º 31
0
 def yearScale(self, top=True):
     """
     Creates the subdivisions of the year scale.
     """
     color = self.color1
     # Shortcut to window geometry.
     starttime = self.env.starttime
     endtime = self.env.endtime
     time_range = float(endtime - starttime)
     # Pixel counts.
     start_x = 0
     end_x = self.width - start_x
     x_range = end_x - start_x
     start_y = 0
     end_y = self.subscale_height
     y_range = self.subscale_height
     # Get the number of years.
     year_count = endtime.year - starttime.year
     years = range(starttime.year, endtime.year + 1)
     # Loop over all years.
     for _i, year in enumerate(years):
         # Some variables.
         start_of_year = UTCDateTime(year, 1, 1)
         end_of_year = UTCDateTime(year + 1, 1, 1)
         # Calculate boundaries.
         start_frac = (start_of_year - starttime) / time_range
         if start_frac < 0:
             start_frac = 0
         start = start_frac * x_range
         if start < 0:
             start = 0
         start += start_x
         end_frac = (endtime - end_of_year) / time_range
         end_frac = 1.0 - end_frac
         if end_frac > 1.0:
             end_frac = 1.0
         end = end_frac * x_range
         end = x_range - end
         if end > x_range:
             end = x_range
         end += start_x
         graph_width = (end_frac - start_frac) * x_range
         # Only draw every second box.
         if _i % 2:
             year_box = QtGui.QGraphicsRectItem(start, start_y, graph_width,
                                                y_range)
             year_box.start_frac = start_frac
             year_box.end_frac = end_frac
             year_box.setBrush(color)
             year_box.setZValue(-198)
             self.addToGroup(year_box)
             # Add to list for easier tracking.
             self.year_boxes.append(year_box)
         # If two narrow do not add a name.
         if graph_width < 30:
             continue
         # Add name.
         year_name = QtGui.QGraphicsSimpleTextItem(str(year))
         rect = year_name.boundingRect()
         # XXX: The +2 at the end is just trial end error. I cannot figure
         # out a way to calculate it. The height of the rectangle is 16
         # while the year subscale height is only 15. But the text is still
         # positioned too high without the +2.
         year_name.moveBy(((end_frac - start_frac) / 2 + start_frac) * self.width - \
                         rect.width() / 2, start_y + 2)
         year_name.start_frac = start_frac
         year_name.end_frac = end_frac
         self.year_labels.append(year_name)
         self.addToGroup(year_name)
Exemplo n.º 32
0
from obspy.signal.spectral_estimation import get_nlnm, get_nhnm
from obspy.clients.fdsn import Client
from obspy.core import UTCDateTime, Stream
import matplotlib.pyplot as plt
import numpy as np
import matplotlib as mpl
from scipy import signal
import sys
mpl.rc('font', family='serif')
mpl.rc('font', serif='Times')
mpl.rc('text', usetex=True)
mpl.rc('font', size=18)

#client = Client("http://vmdevwb.cr.usgs.gov/metadatairis" , timeout=20)
client = Client()
stime = UTCDateTime('2019-191T20:07:00')
etime = UTCDateTime('2019-191T20:17:00')
#stime = UTCDateTime('2019-193T20:07:00')
#etime = UTCDateTime('2019-193T20:27:00')
st = client.get_waveforms("GS",
                          "CA01",
                          "00",
                          "LH*",
                          stime,
                          etime,
                          attach_response=True)

#st.normalize()
st.detrend('constant')
#st.detrend('linear')
#st.filter('lowpass', freq=1./20.)
Exemplo n.º 33
0
    tr.stats.distance = dist
    tr.stats.eve_coord = eve_coordinate
    tr.stats.eve_ot = eve_ot
    tr.stats["coordinates"] = {}
    tr.stats["coordinates"]["latitude"] = sta_coordinate[0]
    tr.stats["coordinates"]["longitude"] = sta_coordinate[1]
    return tr


e = np.array([9, -25.9665, 131.9755, 1.1, 2013, 6, 9, 14, 22, 12, 5.8, 5.43])
data_dir = './output/'
eve_id = 'eve_' + str(int(e[0]))
station_file = data_dir + 'stations_eve_' + str(int(e[0])) + '.txt'
waveform_file = data_dir + 'waveforms_eve_' + str(int(e[0])) + '.pkl'
eve_coordinate = (float(e[1]), float(e[2]))
eve_ot = UTCDateTime(int(e[4]), int(e[5]), int(e[6]), int(e[7]), int(e[8]),
                     float(e[9]))

st = read(waveform_file)
st_all = Stream()
freq4 = [0.01, 0.03, 15, 20.]
ratio1 = 20.
ratio2 = 8.0
line = 'WC4 -19.9619 134.3397 6.38647400151 0 0 1 0 0 1'

sta = line.split(' ')
sta_code = sta[0]
sta_coordinate = (float(sta[1]), float(sta[2]))
tr_bhn = create_trace(st, sta_code, sta_coordinate, 'BHN', eve_coordinate,
                      eve_ot, freq4, ratio1, ratio2)
tr_bhe = create_trace(st, sta_code, sta_coordinate, 'BHE', eve_coordinate,
                      eve_ot, freq4, ratio1, ratio2)
Exemplo n.º 34
0
    sp = i_Sarr - i_Parr
    i_start = i_Parr - timedelta(seconds=td)
    i_end = i_Sarr + timedelta(seconds=90)
    i_network = str(i_network)
    i_station = str(i_station)
    ########################################################################################################################################3
    # chlear stream objects
    raw_stn = Stream()
    raw_ste = Stream()
    #data download and plot for Nort
    try:
        raw_stn += client.get_waveforms(i_network,
                                        i_station,
                                        "*",
                                        'HHN',
                                        UTCDateTime(i_start),
                                        UTCDateTime(i_end),
                                        attach_response=True)
    except:
        nummissed += .5
        print('oops')
        continue
    i_stn = raw_stn.copy()
    j_stn = raw_stn.copy()

    samprate = i_stn[0].stats['sampling_rate']
    ## Make the prefilt for the instrment response - AT.SIT is @ 50Hz so 25 is nyquist
    prefilt1 = (0.005, 0.006, ((samprate / 2) - 5), (samprate / 2)
                )  ## this is 20 to 25 at the end
    i_stn[0].remove_response(
        output='VEL',
Exemplo n.º 35
0
def wavesdownloader(args):



    #################################################################
    # ---- Check if internet connection is active 
    #
    # check servers
    args.server=list2servers(args.server,'N')
    # Do not apply if only LOCAL is active
    if (args.server[0] == 1 or args.server[1] == 1 or args.server[3] == 1 or args.server[4] == 1):
       try:
          urllib2.urlopen("http://google.com", timeout=2)
       except urllib2.URLError:
          print "No internet connection"
          sys.exit()
    
     
    #################################################################
    # ----  C. Initialize variables ----                            #
    dataStreamEida = Stream()
    dataStreamIris = Stream()
    dataStreamLoca = Stream()
    dataStreamWbDc = Stream()
    dataStream     = Stream()
    center=args.center.split(' ')
    grradius=args.radius.split(' ')
    kmradius=args.radius.split(' ')
    ev_lat=eval(center[0])
    ev_lon=eval(center[1])
    kmradius[0]=eval(kmradius[0])*100
    kmradius[1]=eval(kmradius[1])*100
    bbox=getBBox(args.supCor,args.infCor) #minlat minlon maxlat maxlon
    aFormat="fseed"
    aFORMAT="FSEED"
    irisClient = IClient()
    ID = "0"

    # check consistency of arguments
    checkConsistency(args)

    # define t1 and t2
    if args.end=="None":
       t1 = UTCDateTime(args.beg)
       t1.getTimeStamp()
       t2 = t1+int(args.len)
       t2= str(t2)[:19] #this must be a string for
       args.end=t2
    else:
       t1 = UTCDateTime(args.beg)
       t2 = UTCDateTime(args.end)
       if t1 >= t2:
          print "Wrong begin and end time entries"
          sys.exit()


    # make outdir if not exist (from myUsefullFuncs)
    if args.outdir != ".":
       mkdir(args.outdir)
    # if filtering require data writing
    if args.wfiltr == "Y":
       mkdir(args.outdir + os.sep + "_f")

    # write  summary
    makeSummary(1,dataStream,args)
    

    # check for local fseed archive
    if args.server[2] == 1 and args.fsfile == "None":
       print "local fseed file non spcified. Check --fsfile option"
       sys.exit(0)
    
    #################################################################
    # ----  D. Download waveforms (fseed and stream)
    # if eida is on
    if args.server[0] == 1:

      # Convert some args for eida syntax 
      args.net=reformatNetStaList(args.net,"eida")
      args.sta=reformatNetStaList(args.sta,"eida")
      args.cha=reformatChaStaList(args.cha,"eida")
    
      # initialize eida requestor 
      user=INGV_requestor(args.usr,args.pas)
    
      # download
      print "\n \nDownloading from eida"
      if args.mode == "circular":
        (downloadPath,ID) = user.run_circular_query( \
                            args.net,args.sta,args.cha, \
                            center[0],center[1],kmradius[0],kmradius[1], \
                            args.beg,args.end,aFORMAT)
      else: # i.e.: rectangular
        (downloadPath,ID) = user.run_rectangular_query( \
                            args.net,args.sta,args.cha, \
                            bbox[0],bbox[1],bbox[2],bbox[3], \
                            args.beg,args.end,aFORMAT)
    
      #set name and location of the fseed 
      archiveFile = downloadPath + os.sep + ID + "_data.tgz"
      (root,seed)=grabIngvEidaArchive(archiveFile,aFormat)
    
      # get stream
      try: 
        dataStreamEida = read(seed)
      except:
        print "no data found or no response from Eida server"

    
    # if iris is on
    if args.server[1] == 1:
    
       # Convert some args for eida syntax 
       args.net=reformatNetStaList(args.net,"iris")
       args.sta=reformatNetStaList(args.sta,"iris")
       args.cha=reformatNetStaList(args.cha,"iris")
    
       # initialize downloader
       print "\n \nDownloading from iris"
       if args.mode == "circular":
          response = irisClient.availability( \
                     network=args.net, station=args.sta, channel=args.cha, \
                     location=args.loc,starttime=t1, endtime=t2, \
                     lat=center[0], lon=center[1], minradius=grradius[0], maxradius=grradius[1])

       else: # i.e.: rectangular
          IrisBox = setIrisRectBox(bbox)
          response = irisClient.availability( \
                     network=args.net, station=args.sta, channel=args.cha, \
                     location=args.loc,starttime=t1, endtime=t2,\
                     minlat=IrisBox[0], minlon=IrisBox[1], maxlat=IrisBox[2], maxlon=IrisBox[3])
    
       # Download
       try:
           dataStreamIris = irisClient.bulkdataselect(response)
           # here save mseed file format
           dataStreamIris.write(args.outdir + os.sep + 'iris.' + ID + '_data.mseed',format='MSEED', encoding='STEIM2')
       except:
           print "IRIS bulkdataselect returns False. No data from IRIS\n\n"
    
    
    #
    # if local is on
    if args.server[2] == 1:
       print "\n \nExtracting from local fseed"
       dataStreamLoca=read(args.fsfile)


    # if ARClink - WEBDC is on
    if args.server[3] == 1:
    
       print "\n \nDownloading from WEBDC via ArcLink"
       # Convert some args for eida syntax 
       args.net=reformatNetStaList(args.net,"iris")
       args.sta=reformatNetStaList(args.sta,"iris")
       args.cha=reformatNetStaList(args.cha,"iris")

       # get station list available
       Inventory=getInventoryViaArcLink(t1,t2,center,grradius,bbox,args)
       # download data
       dataStreamWbDc=getDataViaArcLink(t1,t2,Inventory,args)
       if len(dataStreamWbDc) > 0:
          dataStreamWbDc.write(args.outdir + os.sep + 'webdc.' + ID + '_data.mseed',format='MSEED', encoding='STEIM2')
       
    ############################################################
    # Check if only empty data
    if(len(dataStreamIris) + len(dataStreamEida) + len(dataStreamLoca) + len(dataStreamWbDc)<=0):
      print "\nNo data found! Exit!"
      sys.exit()

    ############################################################
    # ----  E. data to sac (or other formats)
    
    # trim data
    if args.beg!="None" or  args.end!="None":
       Tb = UTCDateTime(args.beg)
       Te = UTCDateTime(args.end)
       dataStreamIris.trim(starttime=Tb, endtime=Te)
       dataStreamEida.trim(starttime=Tb, endtime=Te)
       dataStreamLoca.trim(starttime=Tb, endtime=Te)
       dataStreamWbDc.trim(starttime=Tb, endtime=Te)
    
    # find gaps and remove those traces
    if args.rmgaps=="Y":
       dataStreamIris = removeGaps(dataStreamIris, args.mingap, args.maxgap, verbose="true")
       dataStreamEida = removeGaps(dataStreamEida, args.mingap, args.maxgap, verbose="true")
       dataStreamLoca = removeGaps(dataStreamLoca, args.mingap, args.maxgap, verbose="true")
       dataStreamWbDc = removeGaps(dataStreamWbDc, args.mingap, args.maxgap, verbose="true")
    
    # Remove traces shorter than expected tolerance
    args.reject=eval(args.reject)
    if args.reject <= 100:
       dataStreamIris=removeShortTraces(dataStreamIris,args.reject,Tb,Te)
       dataStreamEida=removeShortTraces(dataStreamEida,args.reject,Tb,Te)
       dataStreamLoca=removeShortTraces(dataStreamLoca,args.reject,Tb,Te)
       dataStreamWbDc=removeShortTraces(dataStreamWbDc,args.reject,Tb,Te)
    
    # Remove mean and trend if required
    if args.demean == "Y":
       dataStreamIris=removeMeanTrend(dataStreamIris)
       dataStreamEida=removeMeanTrend(dataStreamEida)
       dataStreamLoca=removeMeanTrend(dataStreamLoca)
       dataStreamWbDc=removeMeanTrend(dataStreamWbDc)

    # Decimation if required
    if args.deci != "None":
       dataStreamIris=decimateStream(dataStreamIris,args.deci)
       dataStreamEida=decimateStream(dataStreamEida,args.deci)
       dataStreamLoca=decimateStream(dataStreamLoca,args.deci)
       dataStreamWbDc=decimateStream(dataStreamWbDc,args.deci)
    
    
    ############################################################
    # ----  F. metadata, response, and paz files and update header
    #          for arcklink data Paz already exists. Resp file can't 
    #          be downloaded now (12.10.2012)
    
    # eida - very easy
    if args.server[0] == 1:
       #if args.res != "0": # and args.reqFileFormat != "MSEED":

       # first test if fseed file exists
       aa = os.path.isfile(seed)
       if aa == "False":
          print "No data found in fseed file from EIDA"
       else:
          (respFiles, pazFiles) = extractResponse(seed,args.res,args.outdir)
          if args.res == "2" or  args.res == "3":
            dataStreamEida = addPazStats(dataStreamEida,args.outdir,pazFiles)
          dataStreamEida = updateStats(dataStreamEida,args.outdir,"rdseed.stations",ev_lat,ev_lon)
          try:
            shutil.move(seed,args.outdir + os.sep + 'eida.' + ID + '_data.fseed')
          except:
            pass
          try:
            shutil.move(args.outdir + os.sep + 'rdseed.stations',args.outdir + os.sep + 'eida_rdseed.stations')
          except:
            pass


    # local same than eida  - very easy
    if args.server[2] == 1:
       #if args.res != "0":
       (respFiles, pazFiles) = extractResponse(args.fsfile,args.res,args.outdir)
       if args.res == "2" or args.res == "3":
          dataStreamLoca = addPazStats(dataStreamLoca,args.outdir,pazFiles)
       dataStreamLoca = updateStats(dataStreamLoca,args.outdir,"rdseed.stations",ev_lat,ev_lon)
       dataStreamLoca = purgeListStation(dataStreamLoca,args,'d')
       shutil.move(args.outdir + os.sep + 'rdseed.stations',args.outdir + os.sep + 'local_rdseed.stations')   
    
    # iris - longer
    if args.server[1] == 1:
       pazFiles = []
       temp_iris = open(args.outdir + os.sep + "tmp_iris.station","w")   
       # here store lat lon staz locId for later update of iris stream
    
       for i in range(len(dataStreamIris)):
         n = dataStreamIris[i].stats['network']
         s = dataStreamIris[i].stats['station']
         l = dataStreamIris[i].stats['location']
         c = dataStreamIris[i].stats['channel']
         nameresp = "RESP." + n + "." + s + "." + l + "." + c 
         namepaz  = "SAC.PZs." + n + "." + s + "." + c
    
         # get metadata station latitude and longitude
         meta = irisClient.station(network=n, station=s, location=l, channel=c, starttime=t1, endtime=t2)
         (lat,lon) = getXmlTagData(meta)
         # update temp_iris
         temp_iris.write(s + " " + n + " " + lat + " " + lon + "0.0\n")
    
         # write resp files
         if args.res == "1" or args.res == "3":
             irisClient.saveResponse(filename=args.outdir + os.sep + nameresp ,network=n, station=s, location=args.loc, channel=c,\
                 starttime=t1, endtime=t2, format='RESP')
    
         if args.res == "2" or args.res == "3":
             sacpz = irisClient.sacpz(network=n, station=s, location=args.loc, channel=c, starttime=t1, endtime=t2)
             sacpz = sac4sac(sacpz)
             f = open(args.outdir + os.sep + namepaz, 'w')
             f.write(sacpz)
             f.close()     # close close close!!!! porcoIddio che mi dimentico sempre!!!
             pazFiles.append(namepaz)
    
       # lose temp_iris
       temp_iris.close()
       
       # update stream
       dataStreamIris = updateStats(dataStreamIris,args.outdir,"tmp_iris.station",ev_lat,ev_lon)
       if args.res == '2' or  args.res == '3':
          dataStreamIris = addPazStats(dataStreamIris,args.outdir,pazFiles)

    if args.server[3] == 1:

       # Write webdc.stations file and extract PZ file
       #   and update lat lon and event loc into statz for consistency
       dataStreamWbDc=writeWbDcStation(dataStreamWbDc,args)

       if args.res == "2" or args.res == "3":
           PzFileFromStat(dataStreamWbDc,args)
           print "\nWarning, RESP files can not be downloaded from WebDC vie ArcLink\n"
       if args.res == "1":
           print "\nWarning, RESP files can not be downloaded from WebDC vie ArcLink\n"

    #############################################################
    ## ---- G. Join all traces and write mseed files with original data
    for i in range(len(dataStreamEida)):
           dataStream.append(dataStreamEida[i])
    for i in range(len(dataStreamIris)):
           dataStream.append(dataStreamIris[i])
    for i in range(len(dataStreamLoca)):
           dataStream.append(dataStreamLoca[i])
    for i in range(len(dataStreamWbDc)):
           dataStream.append(dataStreamWbDc[i])
    # Stop and exit id no data downloaded
    if len(dataStream) == 0:
       print "\n\nNo data available!\n"
       sys.exit()

    #############################################################
    ## ---- join stations files
    files = []
    if (args.server[0] == 1):
       if(args.outdir + os.sep + "tmp_iris.station"  == True):
         files.append(args.outdir + os.sep + "tmp_iris.station")
    if (args.server[1] == 1):
       if(args.outdir + os.sep + "eida_rdseed.stations" == True):
          files.append(args.outdir + os.sep + "eida_rdseed.stations")
    content = ''
    for f in files:
       content = content + '\n' + open(f).read()
    open(args.outdir + os.sep + 'list.stations','w').write(content)
    
 
    #############################################################
    ## ---- H. Make deconvolution of instrument
    if args.deco == "Y":
       dataStream = removeInstrument(dataStream,args)

    #############################################################
    ## ----  I. Rotate orizontal to GCP
    if args.rot == "Y":
       rotatDataStream = rotateToGCP(dataStream)
       # join horizontal with rotated
       dataStream=join_NERT(dataStream,rotatDataStream)

    #############################################################
    # Write fseed to sac
    if(args.format!="None"):
      RsacFilesEida = fromFseed2sac(dataStream, args.format, args.outdir, args.format)
      for i in range(len(RsacFilesEida)):
         lat=dataStream[i].stats['stla']
         lon=dataStream[i].stats['stlo']
         ev_lat=dataStream[i].stats['evla']
         ev_lon=dataStream[i].stats['evlo']
         ok = updateSacHeader(RsacFilesEida[i],args.outdir,lat,lon,ev_lat,ev_lon)

    #############################################################
    ## ---- L. Write mseed to store semi-processed data.
    #  ---- and other output files
    # if needs to repeat analysis with different parameters on these data just add option 
    # --redo "Y" to your data request line. --server and --res will be automatically disabled
    # and the following mseed file will be loaded. Do not change the name of this file if you want to use 
    # --redo "Y"
    # analysis includes only --filter --cFreq --Sta/Lta --deco 
    #dataStream.write(args.outdir + os.sep + 'downloadedData.mseed', format='MSEED',encoding='FLOAT64')
    # and make summary level 2 (write stats.elements not writable on mseed
    makeSummary(2,dataStream,args)


    ############################################################
    # ----  Write output files
    # kml file station for google
    createKML(dataStream,args.outdir)
     


    ############################################################
    # ---- START DATA ANALYSIS:

    ############################################################
    # ----  M: filter data if required (deafult=none) 
    if args.bandpass == "0" and args.lowpass == "0" and args.highpass == "0":
       pass
    else:
       dataStream=FilterData(dataStream, args.bandpass, args.highpass, args.lowpass)

    
    ############################################################# 
    ## ---- N. Write processed data with filter . This include also rotated filtered data 
    if args.wfiltr != "N":
       outP = args.outdir + os.sep + args.wfiltr
       mkdir(outP)
       FsacFiles = fromFseed2sac(dataStream, args.format, outP, args.format)
    

    #############################################################
    ## ----  O. compute central frequency, Sta/Lta
    # Initialize stats for this section
    dataStream=initStats(dataStream)

    # central frequency
    """
    The period is determined as the period of the maximum value of the
    Fourier amplitude spectrum.
    """
    if args.cfreq=="Y":
       dataStream=cFreqStream(dataStream) 

    # pick using sta/lta
    staLtaStream=Stream()
    if args.slta!="None":
       staLtaStream=StaLta(dataStream,args.slta)
       staLtaStream=trig(staLtaStream,args.slta)
       staLtaStream.write(args.outdir + os.sep + 'picks.mseed', format='MSEED',encoding='FLOAT64')
       dataStream=syncStat(staLtaStream,dataStream)
       # if required to write 
       if args.wcf == "Y":
          SL = fromFseed2sac(staLtaStream,"SAC",args.outdir,'slt')

    # get PGMs:
    if args.pgm=="Y":
       dataStream=get_PGMs(dataStream,args)
       if(args.shake == 'Y'):
         (xmlHeader,shakeLines)=export4ShakeMap(dataStream) 
         writeShake(xmlHeader,shakeLines,args)


    # write summary level 3: data analysis
    makeSummary(3,dataStream,args)


    ############################################################
    # ----  P. Plot 
    args.pltmode = int(args.pltmode)
    if args.pltmode!=0:   
       plotWaves(dataStream,args.pltmode, \
                 kmradius, args.mode,args.pltchan, \
                 args.pltNERT,args.pltazi, \
                 staLtaStream,args.slta, args.outdir, args.rot)
Exemplo n.º 36
0
def main():

    print()
    print("#########################################")
    print("#        __                 _     _     #")
    print("#  _ __ / _|_ __  _   _    | |__ | | __ #")
    print("# | '__| |_| '_ \| | | |   | '_ \| |/ / #")
    print("# | |  |  _| |_) | |_| |   | | | |   <  #")
    print("# |_|  |_| | .__/ \__, |___|_| |_|_|\_\ #")
    print("#          |_|    |___/_____|           #")
    print("#                                       #")
    print("#########################################")
    print()

    # Run Input Parser
    args = arguments.get_hk_arguments()

    # Load Database
    db = stdb.io.load_db(fname=args.indb)

    # Construct station key loop
    allkeys = db.keys()
    sorted(allkeys)

    # Extract key subset
    if len(args.stkeys) > 0:
        stkeys = []
        for skey in args.stkeys:
            stkeys.extend([s for s in allkeys if skey in s])
    else:
        stkeys = db.keys()
        sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Define path to see if it exists
        if args.phase in ['P', 'PP', 'allP']:
            datapath = Path('P_DATA') / stkey
        elif args.phase in ['S', 'SKS', 'allS']:
            datapath = Path('S_DATA') / stkey
        if not datapath.is_dir():
            print('Path to ' + str(datapath) + ' doesn`t exist - continuing')
            continue

        # Define save path
        if args.save:
            savepath = Path('HK_DATA') / stkey
            if not savepath.is_dir():
                print('Path to ' + str(savepath) +
                      ' doesn`t exist - creating it')
                savepath.mkdir(parents=True)

        # Get search start time
        if args.startT is None:
            tstart = sta.startdate
        else:
            tstart = args.startT

        # Get search end time
        if args.endT is None:
            tend = sta.enddate
        else:
            tend = args.endT

        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|===============================================|")
        print("|===============================================|")
        print("|                   {0:>8s}                    |".format(
            sta.station))
        print("|===============================================|")
        print("|===============================================|")
        print("|  Station: {0:>2s}.{1:5s}                            |".format(
            sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}  |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}          |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}          |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|-----------------------------------------------|")

        rfRstream = Stream()

        datafiles = [x for x in datapath.iterdir() if x.is_dir()]
        for folder in datafiles:

            # Skip hidden folders
            if folder.name.startswith('.'):
                continue

            date = folder.name.split('_')[0]
            year = date[0:4]
            month = date[4:6]
            day = date[6:8]
            dateUTC = UTCDateTime(year + '-' + month + '-' + day)

            if dateUTC > tstart and dateUTC < tend:

                # Load meta data
                metafile = folder / "Meta_Data.pkl"
                if not metafile.is_file():
                    continue
                meta = pickle.load(open(metafile, 'rb'))

                # Skip data not in list of phases
                if meta.phase not in args.listphase:
                    continue

                # QC Thresholding
                if meta.snrh < args.snrh:
                    continue
                if meta.snr < args.snr:
                    continue
                if meta.cc < args.cc:
                    continue

                # # Check bounds on data
                # if meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]:
                #     continue
                # if meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]:
                #     continue

                # If everything passed, load the RF data
                filename = folder / "RF_Data.pkl"
                if filename.is_file():
                    file = open(filename, "rb")
                    rfdata = pickle.load(file)
                    rfRstream.append(rfdata[1])
                    file.close()
                if rfdata[0].stats.npts != 1451:
                    print(folder)

        if len(rfRstream) == 0:
            continue

        if args.no_outl:
            t1 = 0.
            t2 = 30.

            varR = []
            for i in range(len(rfRstream)):
                taxis = rfRstream[i].stats.taxis
                tselect = (taxis > t1) & (taxis < t2)
                varR.append(np.var(rfRstream[i].data[tselect]))
            varR = np.array(varR)

            # Remove outliers wrt variance within time range
            medvarR = np.median(varR)
            madvarR = 1.4826 * np.median(np.abs(varR - medvarR))
            robustR = np.abs((varR - medvarR) / madvarR)
            outliersR = np.arange(len(rfRstream))[robustR > 2.5]
            for i in outliersR[::-1]:
                rfRstream.remove(rfRstream[i])

        print('')
        print("Number of radial RF data: " + str(len(rfRstream)))
        print('')

        # Try binning if specified
        if args.calc_dip:
            rf_tmp = binning.bin_baz_slow(rfRstream,
                                          nbaz=args.nbaz + 1,
                                          nslow=args.nslow + 1,
                                          pws=args.pws)
            rfRstream = rf_tmp[0]
        else:
            rf_tmp = binning.bin(rfRstream,
                                 typ='slow',
                                 nbin=args.nslow + 1,
                                 pws=args.pws)
            rfRstream = rf_tmp[0]

        # Get a copy of the radial component and filter
        if args.copy:
            rfRstream_copy = rfRstream.copy()
            rfRstream_copy.filter('bandpass',
                                  freqmin=args.bp_copy[0],
                                  freqmax=args.bp_copy[1],
                                  corners=2,
                                  zerophase=True)

        # Check bin counts:
        for tr in rfRstream:
            if (tr.stats.nbin < args.binlim):
                rfRstream.remove(tr)

        # Continue if stream is too short
        if len(rfRstream) < 5:
            continue

        if args.save_plot and not Path('HK_PLOTS').is_dir():
            Path('HK_PLOTS').mkdir(parents=True)

        print('')
        print("Number of radial RF bins: " + str(len(rfRstream)))
        print('')

        # Filter original stream
        rfRstream.filter('bandpass',
                         freqmin=args.bp[0],
                         freqmax=args.bp[1],
                         corners=2,
                         zerophase=True)

        # Initialize the HkStack object
        try:
            hkstack = HkStack(rfRstream,
                              rfV2=rfRstream_copy,
                              strike=args.strike,
                              dip=args.dip,
                              vp=args.vp)
        except:
            hkstack = HkStack(rfRstream,
                              strike=args.strike,
                              dip=args.dip,
                              vp=args.vp)

        # Update attributes
        hkstack.hbound = args.hbound
        hkstack.kbound = args.kbound
        hkstack.dh = args.dh
        hkstack.dk = args.dk
        hkstack.weights = args.weights

        # Stack with or without dip
        if args.calc_dip:
            hkstack.stack_dip()
        else:
            hkstack.stack()

        # Average stacks
        hkstack.average(typ=args.typ)

        if args.plot:
            hkstack.plot(args.save_plot, args.title, args.form)

        if args.save:
            filename = savepath / (hkstack.rfV1[0].stats.station + \
                ".hkstack."+args.typ+".pkl")

            hkstack.save(file=filename)
Exemplo n.º 37
0
 def test_getInventory(self):
     """
     Testing inventory requests.
     """
     client = Client(user='******')
     dt1 = UTCDateTime("1974-01-01T00:00:00")
     dt2 = UTCDateTime("2011-01-01T00:00:00")
     # 1 - XML w/ instruments
     result = client.getInventory('GE',
                                  'SNAA',
                                  '',
                                  'BHZ',
                                  dt1,
                                  dt2,
                                  format='XML')
     self.assertTrue(result.startswith('<?xml'))
     self.assertTrue('code="GE"' in result)
     # 2 - SUDS object w/o instruments
     result = client.getInventory('GE',
                                  'SNAA',
                                  '',
                                  'BHZ',
                                  dt1,
                                  dt2,
                                  instruments=False)
     self.assertTrue(isinstance(result, object))
     self.assertEqual(result.ArclinkInventory.inventory.network._code, 'GE')
     # 3 - SUDS object w/ instruments
     result = client.getInventory('GE',
                                  'SNAA',
                                  '',
                                  'BHZ',
                                  dt1,
                                  dt2,
                                  instruments=True)
     self.assertTrue(isinstance(result, object))
     self.assertEqual(result.ArclinkInventory.inventory.network._code, 'GE')
     self.assertTrue('sensor' in result.ArclinkInventory.inventory)
     self.assertTrue('responsePAZ' in result.ArclinkInventory.inventory)
     # 4 - SUDS object with spatial filters
     client = Client(user='******')
     result = client.getInventory('GE',
                                  'SNAA',
                                  '',
                                  'BHZ',
                                  dt1,
                                  dt2,
                                  min_latitude=-72.0,
                                  max_latitude=-71.0,
                                  min_longitude=-3,
                                  max_longitude=-2)
     self.assertTrue(isinstance(result, object))
     self.assertEqual(result.ArclinkInventory.inventory.network._code, 'GE')
     # 5 - SUDS object with spatial filters with incorrect coordinates
     client = Client(user='******')
     result = client.getInventory('GE',
                                  'SNAA',
                                  '',
                                  'BHZ',
                                  dt1,
                                  dt2,
                                  min_latitude=-71.0,
                                  max_latitude=-72.0,
                                  min_longitude=-2,
                                  max_longitude=-3)
     self.assertTrue(isinstance(result, object))
     self.assertEqual(result.ArclinkInventory.inventory.network._code, 'GE')
Exemplo n.º 38
0
    def pick_ident(self):
        """
        Clean false picks and Make picks.
        """

        scnl = SCNL([
            self.stats.station, self.stats.channel, self.stats.network,
            self.stats.location
        ])
        dt = self.stats.delta
        npts_Tma = int(round(self.picker.t_ma / dt, 0))
        LEN = self.stats.npts

        # trigger the earthquakes
        trigger_ptnl_index = np.where(
            self.summary[npts_Tma:LEN] > self.thres[npts_Tma:LEN])
        trigger_ptnl_index = trigger_ptnl_index + np.array(npts_Tma)
        t = np.arange(0, self.stats.npts / self.stats.sampling_rate, dt)
        trigger_ptnl = t[trigger_ptnl_index][0]

        # clean close picks
        window_t_up = int(round(self.picker.t_up / dt, 0))
        trigger_remove1_index = []
        for i in range(0, len(trigger_ptnl) - 1):  # second from last
            if (trigger_ptnl[i + 1] - trigger_ptnl[i]
                ) <= window_t_up * dt:  # avoid consecutive picking
                trigger_remove1_index.append(i + 1)
        # delete close picks
        trigger_ptnl = np.delete(trigger_ptnl, trigger_remove1_index)

        # clean_filtering
        trigger_remove2_index = []
        N = self.picker.nr_coeff
        filter_length = self.picker.nr_len
        for i in range(len(trigger_ptnl)):
            # determine filter_length for each pick:
            r, R = self.winlen(i, trigger_ptnl, filter_length, t, dt)
            M = min(r, R)
            if N * np.std(
                    self.tr.data[int(round(trigger_ptnl[i] / dt, 0)) -
                                 M:int(round(trigger_ptnl[i] / dt, 0))]
            ) >= np.std(
                    self.tr[int(round(trigger_ptnl[i] / dt, 0)
                                ):int(round(trigger_ptnl[i] / dt, 0)) + M]):
                trigger_remove2_index.append(i)
                # delete fake picks
        trigger_ptnl = np.delete(trigger_ptnl, trigger_remove2_index)

        # assign potential picks to trigger
        trigger = trigger_ptnl

        # really need to be careful copy list or array, since if just copy like A=B, when any element in B changes, A will change as well
        # roll backward for picking
        picks = []
        for i in range(len(trigger)):
            index = int(round(trigger[i] / dt, 0))
            while True:
                if self.summary[index] > self.summary[index - 1]:
                    index -= 1
                else:
                    break
            picks.append(
                UTCDateTime(self.tr.stats.starttime + round(t[index], 3)))

        # really need to be careful copy list or array, since if just copy like A=B, when any element in B changes, A will change as well
        # roll forward for maximum signal values
        maxes = copy.deepcopy(trigger)
        for i in range(len(trigger)):
            index = int(round(trigger[i] / dt, 0))
            while True:
                if self.summary[index] < self.summary[index + 1]:
                    index += 1
                else:
                    break
            maxes[i] = round(self.summary[index], 3)

        # really need to be careful copy list or array, since if just copy like A=B, when any element in B changes, A will change as well
        # Signal noise ration: SNR
        SNR = copy.deepcopy(trigger)

        for i in range(len(picks)):
            index = int(round(trigger[i] / dt, 0))
            noise = rms(self.summary[index - npts_Tma:index])
            SNR[i] = round(maxes[i] / noise, 1)

        return scnl, picks, trigger, SNR
Exemplo n.º 39
0
    # # Convert to NumPy character array
    # data = np.fromstring(sl_data, dtype='|S1')
    # print(data)

    # Fill header attributes
    stats = {
        'network': 'BW',
        'station': 'RJOB',
        'location': 'ASD',
        'channel': 'SHZ',
        'npts': len(data),
        'sampling_rate': 100,
        'mseed': {
            'dataquality': 'D'
        },
        'starttime': UTCDateTime(str(sl_header_start_time))
    }
    # set current time
    #stats['starttime'] = UTCDateTime()
    st = Stream([Trace(data=data, header=stats)])
    # write as ASCII file (encoding=0)
    #outfile_name += ".mseed"
    #st.write(outfile_name, format='MSEED', encoding=0, reclen=256)
    st.write(outfile_name,
             format='MSEED',
             encoding=11,
             reclen=256,
             byteorder='>')

    # Show that it worked, convert NumPy character array back to string
    st1 = read(outfile_name)
Exemplo n.º 40
0
from obspy.core import UTCDateTime
import matplotlib.pyplot as plt
import numpy as np
f = open('ALQ_Results.csv','r')

times = []
ws = []
ls = []
wg = []
lg = []
sig2 = []
resi = []

for line in f:
    line = line.split(', ')
    time = UTCDateTime('19' + line[3] + '-' + line[1] + '-' + line[2] + 'T00:00:00')
    times.append(time.year + float(time.julday)/365.25)
    ws.append(float(line[5]))
    ls.append(float(line[6]))
    wg.append(float(line[7]))
    lg.append(float(line[8]))
    sig2.append(float(line[9]))
    resi.append(float(line[11]))
    
    
    
f.close()

import matplotlib as mpl
# Importing and applying font
mpl.rc('font', family = 'serif')
Exemplo n.º 41
0
#!/usr/bin/python

import numpy as np
import scipy.signal as signal
from mpi4py import MPI
from copy import deepcopy

from obspy.core import UTCDateTime, stream, trace
import obspy.signal as osignal

from miic.core.corr_fun import combine_stats

import matplotlib.pyplot as plt

zerotime = UTCDateTime(1971, 1, 1)


def pxcorr(comm, A, **kwargs):
    """ A is an array with time along going the first dimension.
    """
    global zerotime

    t0 = MPI.Wtime()
    msize = A.shape
    ntrc = msize[1]

    psize = comm.Get_size()
    rank = comm.Get_rank()
    # time domain processing
    # map of traces on precesses
    pmap = (np.arange(ntrc) * psize) / ntrc
from obspy.core import UTCDateTime

years = range(2011, 2061)
for year in years:
    t_bd = UTCDateTime(year, 7, 9)
    party = t_bd + (24 * 60 * 60)
    while party.isoweekday() != 5:
        party += 24 * 60 * 60
    print year, "the party is on", party.date
    
t = UTCDateTime()
count = 0
while t.year < 2013:
    t += 24 * 60 * 60
    if t.weekday() == 4 and t.day == 13:
        count += 1
        print t.date, "is a friday 13th"
print count, "days to take off"
Exemplo n.º 43
0
def preproc_data(filename='out_from_parse_dbselect',
                 to_file=False,
                 num_lines=2,
                 **ev_id):
    #df = pd.read_pickle(filename)
    if ev_id:
        df = filename[filename['id'] == ev_id['ev_id']]
    else:
        df = filename.iloc[:num_lines]

    df = df.reset_index(drop=True)
    pdf = None
    count = 0
    duration = 5 * 60
    for i in range(len(df)):
        data = {
            'x': [],
            'y': [],
            'chan': [],
            'net': [],
            'station': [],
            'datetime': [],
            'depth': [],
            'dsize': [],
            'id': [],
            'impulsive': [],
            'evlo': [],
            'evla': [],
            'mag': [],
            'type': [],
            'weight': [],
            'stlo': [],
            'stla': [],
            'lbl': [],
            'dist': []
        }

        chan = df['chan'][i]
        evlo = df['lon'][i]
        evla = df['lat'][i]
        #chan = str(chan[0]+'H?')
        #print(count)
        try:
            tr = client.get_waveforms(
                station=df['station'][i],
                network=df['net'][i],
                channel='*',
                location='*',
                starttime=UTCDateTime(df['datetime'][i]) - 10,
                endtime=UTCDateTime(df['datetime'][i]) + duration,
                attach_response=True)
            inv = client.get_stations(
                starttime=UTCDateTime(df['datetime'][i]) - 10,
                endtime=UTCDateTime(df['datetime'][i]) + duration,
                station=df.station[i],
                network=df.net[i],
                location='*',
                channel='*',
                level='response')
            tr = tr.select(channel='[E,H,B]H?')
            if len(tr) > 3:
                tr = tr.select(channel='H??')
            tr.merge(fill_value=0)
            #tr = tr.remove_sensitivity()
            tr.remove_response(inventory=inv, pre_filt=pre_filt)
            tr.resample(100)
            tr.detrend()
            tr.taper(.01)

            if len(tr) == 1:
                stla, stlo, dist = rotater_single(tr, inv, evlo, evla)
                tr.filter('highpass', freq=1.0)
                f, t, S0 = spectrogram(tr[0].data, tr[0].stats.sampling_rate)
                S1 = np.zeros_like(S0)
                data['x'].append(
                    np.array([
                        S0[3:51, 1:41] / np.max(S0[3:51, 1:41]),
                        S1[3:51, 1:41], S1[3:51, 1:41]
                    ]))
                data['dsize'] = 'single'
            elif len(tr) == 3:
                trn, stla, stlo, dist = rotater(tr, inv, evlo, evla)
                trn.filter('highpass', freq=1.0)
                f, t, S0 = spectrogram(trn[0].data, trn[0].stats.sampling_rate)
                f, t, S1 = spectrogram(trn[1].data, trn[1].stats.sampling_rate)
                f, t, S2 = spectrogram(trn[2].data, trn[2].stats.sampling_rate)
                data['x'].append(
                    np.array([
                        S0[3:51, 1:41] / np.max(S0[3:51, 1:41]),
                        S1[3:51, 1:41] / np.max(S1[3:51, 1:41]),
                        S2[3:51, 1:41] / np.max(S2[3:51, 1:41])
                    ]))
                data['dsize'] = 'tri'
            tmp = df.loc[i]
            tmp.stla = stla
            tmp.stlo = stlo
            tmp.dist = dist
            data['station'].append(tmp.station)
            data['chan'].append(tmp.chan)
            data['net'].append(tmp.net)
            data['evlo'].append(tmp.lon)
            data['evla'].append(tmp.lat)
            data['datetime'].append(tmp.datetime)
            data['depth'].append(tmp.depth)
            data['mag'].append(tmp.mag)
            data['id'].append(tmp.id)
            data['type'].append(tmp.type)
            data['weight'].append(tmp.weight)
            data['impulsive'].append(tmp.impulsive)
            data['stla'].append(tmp.stla)
            data['stlo'].append(tmp.stlo)
            data['dist'].append(tmp.dist)
            if tmp.type == 'le':
                data['y'].append([1, 0])
                data['lbl'].append(0)
            elif tmp.type == 'qb':
                data['y'].append([0, 1])
                data['lbl'].append(1)
            if pdf is None:
                pdf = pd.DataFrame(data)
            else:
                pdf = pdf.append(pd.DataFrame(data))

            count += 1
        except:  #ValueError :
            print(i)

    pdf = pdf.reset_index(drop=True)
    predictions1 = model.predict(
        np.array([scale_transform(x) for x in pdf['x']]))
    pred1 = np.argmax(predictions1, axis=1)
    pdf['predictions'] = [np.array(x) for x in predictions1]
    pdf['preds'] = pred1
    num_events = len(set(pdf['id']))
    #pdf[pdf['pred'] != pdf['lbl']]
    print('sucessfully processed ' + str(count) + ' examples from ' +
          str(num_events) + ' events')
    if to_file == True:
        stg = 'proc_' + filename
        pdf.to_pickle(stg)
        pdf = None
    else:
        return pdf
Exemplo n.º 44
0
class SeedlinkPlotter(Tkinter.Tk):

    """
    This module plots realtime seismic data from a Seedlink server
    """

    def __init__(self, stream=None, events=None, myargs=None, lock=None,
                 drum_plot=True, trace_ids=None, *args, **kwargs):
        Tkinter.Tk.__init__(self, *args, **kwargs)
        favicon = Tkinter.PhotoImage(
            file=os.path.join(os.path.dirname(os.path.abspath(__file__)),
                              "favicon.gif"))
        self.tk.call('wm', 'iconphoto', self._w, favicon)
        self.wm_title("seedlink-plotter {}".format(myargs.seedlink_server))
        self.focus_set()
        self._bind_keys()
        args = myargs
        self.lock = lock
        ### size and position
        self.geometry(str(args.x_size) + 'x' + str(args.y_size) + '+' + str(
            args.x_position) + '+' + str(args.y_position))
        w, h, pad = self.winfo_screenwidth(), self.winfo_screenheight(), 3
        self._geometry = ("%ix%i+0+0" % (w - pad, h - pad))
        # hide the window decoration
        if args.without_decoration:
            self.wm_overrideredirect(True)
        if args.fullscreen:
            self._toggle_fullscreen(None)

        # main figure
        self.figure = Figure()
        canvas = FigureCanvasTkAgg(self.figure, master=self)

        canvas.show()
        canvas.get_tk_widget().pack(fill=Tkinter.BOTH, expand=1)

        self.backtrace = args.backtrace_time
        self.canvas = canvas
        self.scale = args.scale
        self.args = args
        self.stream = stream
        self.events = events
        self.drum_plot = drum_plot
        self.ids = trace_ids

        # Colors
        if args.rainbow:
            # Rainbow colors !
            self.color = self.rainbow_color_generator(
                int(args.nb_rainbow_colors))
        else:
            # Regular colors: Black, Red, Blue, Green
            self.color = ('#000000', '#e50000', '#0000e5', '#448630')

        self.plot_graph()

    def _quit(self, event):
        event.widget.quit()

    def _bind_keys(self):
        self.bind('<Escape>', self._quit)
        self.bind('q', self._quit)
        self.bind('f', self._toggle_fullscreen)

    def _toggle_fullscreen(self, event):
        g = self.geometry()
        self.geometry(self._geometry)
        self._geometry = g

    def plot_graph(self):
        now = UTCDateTime()
        if self.drum_plot:
            self.stop_time = UTCDateTime(
                now.year, now.month, now.day, now.hour, 0, 0) + 3600
            self.start_time = self.stop_time - self.args.backtrace_time
        else:
            self.start_time = now - self.backtrace
            self.stop_time = now

        with self.lock:
            # leave some data left of our start for possible processing
            self.stream.trim(
                starttime=self.start_time - 120, nearest_sample=False)
            stream = self.stream.copy()

        try:
            logging.info(str(stream.split()))
            if not stream:
                raise Exception("Empty stream for plotting")

            if self.drum_plot or OBSPY_VERSION < [0, 10]:
                stream.merge()
                stream.trim(starttime=self.start_time, endtime=self.stop_time,
                            pad=True, nearest_sample=False)
            else:
                stream.merge(-1)
                stream.trim(starttime=self.start_time, endtime=self.stop_time)
            if self.drum_plot:
                self.plot_drum(stream)
            else:
                self.plot_lines(stream)
        except Exception as e:
            logging.error(e)
            pass
        self.after(int(self.args.update_time * 1000), self.plot_graph)

    def plot_drum(self, stream):
        title = stream[0].id
        if self.scale:
            title += ' - scale: ' + str(self.scale) + ' -'
        else:
            title += ' - autoscale -'
        title += " without filtering"
        self.figure.clear()
        stream.plot(
            fig=self.figure, type='dayplot', interval=self.args.x_scale,
            number_of_ticks=self.args.time_tick_nb, tick_format=self.args.tick_format,
            size=(self.args.x_size, self.args.y_size),
            x_labels_size=8, y_labels_size=8,
            title=title, title_size=14,
            linewidth=0.5, right_vertical_labels=False,
            vertical_scaling_range=self.args.scale,
            subplots_adjust_left=0.04, subplots_adjust_right=0.99,
            subplots_adjust_top=0.95, subplots_adjust_bottom=0.05,
            one_tick_per_line=True,
            color=self.color,
            show_y_UTC_label=False,
            events=self.events)

    def plot_lines(self, stream):
        for id_ in self.ids:
            if not any([tr.id == id_ for tr in stream]):
                net, sta, loc, cha = id_.split(".")
                header = {'network': net, 'station': sta, 'location': loc,
                          'channel': cha, 'starttime': self.start_time}
                data = np.zeros(2)
                stream.append(Trace(data=data, header=header))
        stream.sort()
        self.figure.clear()
        fig = self.figure
        # avoid the differing trace.processing attributes prohibiting to plot
        # single traces of one id together.
        for tr in stream:
            tr.stats.processing = []
        stream.plot(fig=fig, method="fast", draw=False, equal_scale=False,
                    size=(self.args.x_size, self.args.y_size), title="",
                    color='Blue', tick_format=self.args.tick_format,
                    number_of_ticks=self.args.time_tick_nb)
        fig.subplots_adjust(left=0, right=1, top=1, bottom=0)
        bbox = dict(boxstyle="round", fc="w", alpha=0.8)
        path_effects = [withStroke(linewidth=4, foreground="w")]
        pad = 10
        for ax in fig.axes[::2]:
            ax.set_axis_bgcolor("0.8")
        for id_, ax in zip(self.ids, fig.axes):
            ax.set_title("")
            if OBSPY_VERSION < [0, 10]:
                ax.text(0.1, 0.9, id_, va="top", ha="left",
                        transform=ax.transAxes, bbox=bbox,
                        size=self.args.title_size)
            xlabels = ax.get_xticklabels()
            ylabels = ax.get_yticklabels()
            plt.setp(ylabels, ha="left", path_effects=path_effects)
            ax.yaxis.set_tick_params(pad=-pad)
            # treatment for bottom axes:
            if ax is fig.axes[-1]:
                plt.setp(
                    xlabels, va="bottom", size=self.args.time_legend_size, bbox=bbox)
                if OBSPY_VERSION < [0, 10]:
                    plt.setp(xlabels[:1], ha="left")
                    plt.setp(xlabels[-1:], ha="right")
                ax.xaxis.set_tick_params(pad=-pad)
            # all other axes
            else:
                plt.setp(xlabels, visible=False)
            locator = MaxNLocator(nbins=4, prune="both")
            ax.yaxis.set_major_locator(locator)
            ax.yaxis.grid(False)
            ax.grid(True, axis="x")
            if len(ax.lines) == 1:
                ydata = ax.lines[0].get_ydata()
                # if station has no data we add a dummy trace and we end up in
                # a line with either 2 or 4 zeros (2 if dummy line is cut off
                # at left edge of time axis)
                if len(ydata) in [4, 2] and not ydata.any():
                    plt.setp(ylabels, visible=False)
                    ax.set_axis_bgcolor("#ff6666")
        if OBSPY_VERSION >= [0, 10]:
            fig.axes[0].set_xlim(right=date2num(self.stop_time.datetime))
            fig.axes[0].set_xlim(left=date2num(self.start_time.datetime))
        if len(fig.axes) > 5:
            bbox["alpha"] = 0.6
        fig.text(0.99, 0.97, self.stop_time.strftime("%Y-%m-%d %H:%M:%S UTC"),
                 ha="right", va="top", bbox=bbox, fontsize="medium")
        fig.canvas.draw()

    def rgb_to_hex(self, red_value, green_value, blue_value):
        """
            converter for the colors gradient
        """
        return '#%02X%02X%02X' % (red_value, green_value, blue_value)

    def rainbow_color_generator(self, max_color):
        """
            Rainbow color generator
        """
        color_list = []
        frequency = 0.3
        for compteur_lignes in xrange(max_color):

            red = sin(frequency * compteur_lignes * 2 + 0) * 127 + 128
            green = sin(frequency * compteur_lignes * 2 + 2) * 127 + 128
            blue = sin(frequency * compteur_lignes * 2 + 4) * 127 + 128

            color_list.append(
                self.rgb_to_hex(red_value=red, green_value=green, blue_value=blue))

        return tuple(color_list)
from obspy.core import UTCDateTime
from obspy.arclink import Client

client = Client()
t = UTCDateTime("2008-04-17T16:00:00Z")
st = client.getWaveform("BW", "HROE", "", "EH*", t, t + 10 * 60)
print st
st.plot()

from obspy.neries import Client

client = Client()
events = client.getEvents(min_datetime="2008-10-10T08:05:00Z",
                          max_datetime="2008-10-10T08:15:00Z")
print events

event = events[0]
print "origin time is", event['datetime']
print "magnitude is", event['magnitude']
print "longitude is", event['longitude']
print "latitude is", event['latitude']
print "depth is", event['depth']
Exemplo n.º 46
0
from obspy import Stream,Trace
from numpy import genfromtxt,r_,array
from glob import glob
from obspy.core import UTCDateTime
from obspy import read
from mudpy.forward import highpass
from scipy.integrate import cumtrapz
from matplotlib import pyplot as plt
from datetime import datetime
from string import replace

stafile='/Users/dmelgar/Amatrice2016/M6.6/strong_motion/stations/latest.sta'
fsta=open(stafile,'w')
rootpath='/Users/dmelgar/Amatrice2016/M6.6/strong_motion/'
fcorner=[1./20,0.4]
time_epi=UTCDateTime('2016-10-30T06:40:19')
tcut=15
tprevious=5
v_or_d='v'

makesac=True
signalprocess=False
makeplots_raw=False
makemudpy=False
makeplots_proc=False

def bandpass_filter(data,fcorner,fsample,order):
    '''
    Make a lowpass zero phase filter
    '''
    from numpy import size,array
Exemplo n.º 47
0
    def extract_data(self, index_rows):
        """
        Perform the data extraction.

        :param index_rows: requested data, as produced by `HTTPServer_RequestHandler.fetch_index_rows`
        :yields: sequence of `ExtractedDataSegment`s
        """

        # Pre-scan the index rows:
        # 1) Build processed list for extraction
        # 2) Check if the request is small enough to satisfy
        # Note: accumulated estimate of output bytes will be equal to or higher than actual output
        total_bytes = 0
        request_rows = []
        Request = namedtuple('Request', [
            'srcname', 'filename', 'starttime', 'endtime', 'triminfo', 'bytes',
            'samplerate'
        ])
        try:
            for NRow in index_rows:
                srcname = "_".join(NRow[:4])
                filename = NRow.filename

                logger.debug("EXTRACT: src=%s, file=%s, bytes=%s, rate:%s" %
                             (srcname, filename, NRow.bytes, NRow.samplerate))

                starttime = UTCDateTime(NRow.requeststart)
                endtime = UTCDateTime(NRow.requestend)
                triminfo = self.handle_trimming(starttime, endtime, NRow)
                total_bytes += triminfo[1][1] - triminfo[0][1]
                if self.request_limit > 0 and total_bytes > self.request_limit:
                    raise RequestLimitExceededError(
                        "Result exceeds limit of %d bytes" %
                        self.request_limit)
                if self.dp_replace_re:
                    filename = self.dp_replace_re.sub(self.dp_replace_sub,
                                                      filename)
                if not os.path.exists(filename):
                    raise Exception("Data file does not exist: %s" % filename)
                request_rows.append(
                    Request(srcname=srcname,
                            filename=filename,
                            starttime=starttime,
                            endtime=endtime,
                            triminfo=triminfo,
                            bytes=NRow.bytes,
                            samplerate=NRow.samplerate))
                logger.debug("EXTRACT: src=%s, file=%s, bytes=%s, rate:%s" %
                             (srcname, filename, NRow.bytes, NRow.samplerate))
        except Exception as err:
            import traceback
            traceback.print_exc()
            raise Exception("Error accessing data index: %s" % str(err))

        # Error if request matches no data
        if total_bytes == 0:
            raise NoDataError()

        # Get & return the actual data
        for NRow in request_rows:
            logger.debug(
                "Extracting %s (%s - %s) from %s" %
                (NRow.srcname, NRow.starttime, NRow.endtime, NRow.filename))

            # Iterate through records in section if only part of the section is needed
            if NRow.triminfo[0][2] or NRow.triminfo[1][2]:

                for msri in MSR_iterator(filename=NRow.filename,
                                         startoffset=NRow.triminfo[0][1],
                                         dataflag=False):
                    offset = msri.get_offset()

                    # Done if we are beyond end offset
                    if offset >= NRow.triminfo[1][1]:
                        break

                    yield MSRIDataSegment(msri, NRow.samplerate,
                                          NRow.starttime, NRow.endtime,
                                          NRow.srcname)

                    # Check for passing end offset
                    if (offset +
                            msri.msr.contents.reclen) >= NRow.triminfo[1][1]:
                        break

            # Otherwise, return the entire section
            else:
                yield FileDataSegment(NRow.filename, NRow.triminfo[0][1],
                                      NRow.bytes, NRow.srcname)
Exemplo n.º 48
0
    # state file base (Observatory code and channel will be appended to this)
    state_dir = 'PriorState/'
    # state_file = 'SvSqDistState'
    state_file = None

    # The following define an allowed minimum age for requested data, and a set
    # of discrete times (within a day) at which endtime may fall. Actual endtime
    # and startime values will be calculated in the code.
    min_obs_age = 600  # in seconds
    every_nth_sec = 300  # in seconds

    # if custom interval is required, modify the following lines to override the
    # realtime interval calcualted from min_obs_age and every_nth_sec, otherwise
    # set starttime and endtime equal to None
    starttime = UTCDateTime(2020, 4, 20, 9, 0, 0)
    endtime = UTCDateTime(2020, 4, 20, 12, 0, 0)

    #starttime = None
    #endtime = None

    #
    #
    # No more configuration parameters below this point
    #
    #

    # create folders for downloads and output if they don't exist already
    # (function catches race condition if path is created after check)
    def mkdirp(path):
        import os
Exemplo n.º 49
0
from matplotlib import pyplot as plt
from obspy import read
from numpy import genfromtxt
from obspy.core import UTCDateTime

#Plot acceleration
stations = genfromtxt(
    '/Users/dmelgar/Slip_inv/wenc_2008/data/station_info/wenc.gflist',
    usecols=0,
    dtype='S')
stations = stations[5:]
path = '/Users/dmelgar/Slip_inv/wenc_2008/'
time_epi = UTCDateTime('2008-05-12T06:28:04')

for k in range(len(stations)):
    sta = stations[k]
    print sta
    n = read(path + 'data/waveforms/' + sta + '.vel.n')
    e = read(path + 'data/waveforms/' + sta + '.vel.e')
    u = read(path + 'data/waveforms/' + sta + '.vel.u')
    n.trim(starttime=time_epi)
    e.trim(starttime=time_epi)
    u.trim(starttime=time_epi)

    plt.figure()
    plt.subplot(311)
    plt.title(sta)
    plt.plot(n[0].times(), n[0].data)
    plt.ylabel('North (m/s)')
    plt.subplot(312)
    plt.plot(e[0].times(), e[0].data)
Exemplo n.º 50
0
Sspeed = 1000  #S-wave mask
tmax = timedelta(seconds=2. * 60)
minpgd = 0.03  #in m
maxdist = 500  #in km
minsta = 2
Nboot = 1000
hypo = [20.6002, 38.6655, 10.7]

#Get PGD as a function of time for all sites
if get_pgd:
    #initalize
    event = 'lefkada'
    print event
    #Get hypocenter time
    hypo_time = UTCDateTime(
        genfromtxt(path + 'event_info/' + event + '.hypo', dtype='S')[0])
    #Read station data
    stanames = genfromtxt(path + 'station_info/lefkada.sta',
                          usecols=0,
                          dtype='S')
    station_coords = genfromtxt(path + 'station_info/lefkada.sta',
                                usecols=[1, 2])
    #Now loop over station data at each epoch
    for ksta in range(len(stanames)):
        #print '... fetching PGD for '+stanames[ksta]
        try:
            n = read(data_path + '/' + stanames[ksta] + '.LXN.sac')
            e = read(data_path + '/' + stanames[ksta] + '.LXE.sac')
            u = read(data_path + '/' + stanames[ksta] + '.LXZ.sac')
            if n[0].stats.npts > 0:
                #Trim to times of interest
Exemplo n.º 51
0
def test_default_inputs():
    input_dics = test_read_input_command()

    assert os.path.basename(input_dics['datapath']) == 'obspydmt-data'
    assert input_dics['event_based'] is True
    assert input_dics['data_source'] == ['IRIS']
    assert input_dics['waveform'] is True
    assert input_dics['response'] is True
    assert input_dics['dir_select'] is False
    assert input_dics['list_stas'] is False
    assert input_dics['min_epi'] is False
    assert input_dics['max_epi'] is False
    assert input_dics['min_azi'] is False
    assert input_dics['max_azi'] is False
    assert input_dics['test'] is False
    assert (UTCDateTime(input_dics['max_date']) -
            UTCDateTime(input_dics['min_date']) > (60 * 60 * 24 * 365 * 45))
    assert input_dics['preset'] == 0.0
    assert input_dics['offset'] == 1800.0
    assert input_dics['waveform_format'] is False
    assert input_dics['resample_method'] == 'lanczos'
    assert input_dics['sampling_rate'] is False
    assert input_dics['net'] == '*'
    assert input_dics['sta'] == '*'
    assert input_dics['loc'] == '*'
    assert input_dics['cha'] == '*'
    assert input_dics['lat_cba'] is None
    assert input_dics['lon_cba'] is None
    assert input_dics['mr_cba'] is None
    assert input_dics['Mr_cba'] is None
    assert input_dics['mlat_rbb'] is None
    assert input_dics['Mlat_rbb'] is None
    assert input_dics['mlon_rbb'] is None
    assert input_dics['Mlon_rbb'] is None
    assert input_dics['req_np'] == 4
    assert input_dics['process_np'] == 4
    assert input_dics['username'] is None
    assert input_dics['password'] is None
    assert input_dics['event_catalog'] == 'LOCAL'
    assert input_dics['min_depth'] == -10.0
    assert input_dics['max_depth'] == +6000.0
    assert input_dics['min_mag'] == 3.0
    assert input_dics['max_mag'] == 10.
    assert input_dics['mag_type'] is None
    assert input_dics['evlatmin'] is None
    assert input_dics['evlatmax'] is None
    assert input_dics['evlonmin'] is None
    assert input_dics['evlonmax'] is None
    assert input_dics['evlat'] is None
    assert input_dics['evlon'] is None
    assert input_dics['evradmin'] is None
    assert input_dics['evradmax'] is None
    assert input_dics['interval'] == 3600 * 24
    assert input_dics['pre_process'] == 'process_unit'
    assert input_dics['select_data'] is False
    assert input_dics['corr_unit'] == 'DIS'
    assert input_dics['pre_filt'] == '(0.008, 0.012, 3.0, 4.0)'
    assert input_dics['water_level'] == 600.0
    assert input_dics['plot_dir_name'] == 'raw'
    assert input_dics['plot_save'] is False
    assert input_dics['plot_format'] is False
    assert input_dics['show_no_plot'] is None
    assert input_dics['plot_lon0'] == 180
    assert input_dics['plot_style'] == 'simple'
    assert input_dics['plotxml_date'] is False
    assert input_dics['plotxml_start_stage'] == 1
    assert input_dics['plotxml_end_stage'] == 100
    assert input_dics['plotxml_min_freq'] == 0.01
    assert input_dics['plotxml_percentage'] == 80
    assert input_dics['plotxml_phase_threshold'] == 10.
    assert input_dics['plotxml_output'] == 'VEL'
    assert input_dics['email'] is False
    assert input_dics['arc_avai_timeout'] == 40
    assert input_dics['arc_wave_timeout'] == 2
Exemplo n.º 52
0
def main():

    print()
    print(
        "################################################################################"
    )
    print(
        "#        __                 _                                      _           #"
    )
    print(
        "#  _ __ / _|_ __  _   _    | |__   __ _ _ __ _ __ ___   ___  _ __ (_) ___ ___  #"
    )
    print(
        "# | '__| |_| '_ \| | | |   | '_ \ / _` | '__| '_ ` _ \ / _ \| '_ \| |/ __/ __| #"
    )
    print(
        "# | |  |  _| |_) | |_| |   | | | | (_| | |  | | | | | | (_) | | | | | (__\__ \ #"
    )
    print(
        "# |_|  |_| | .__/ \__, |___|_| |_|\__,_|_|  |_| |_| |_|\___/|_| |_|_|\___|___/ #"
    )
    print(
        "#          |_|    |___/_____|                                                  #"
    )
    print(
        "#                                                                              #"
    )
    print(
        "################################################################################"
    )
    print()

    # Run Input Parser
    args = arguments.get_harmonics_arguments()

    # Load Database
    db = stdb.io.load_db(fname=args.indb)

    # Construct station key loop
    allkeys = db.keys()
    sorted(allkeys)

    # Extract key subset
    if len(args.stkeys) > 0:
        stkeys = []
        for skey in args.stkeys:
            stkeys.extend([s for s in allkeys if skey in s])
    else:
        stkeys = db.keys()
        sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Define path to see if it exists
        datapath = Path('DATA') / stkey
        if not datapath.is_dir():
            raise (Exception('Path to ' + str(datapath) +
                             ' doesn`t exist - aborting'))

        # Get search start time
        if args.startT is None:
            tstart = sta.startdate
        else:
            tstart = args.startT

        # Get search end time
        if args.endT is None:
            tend = sta.enddate
        else:
            tend = args.endT

        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|===============================================|")
        print("|===============================================|")
        print("|                   {0:>8s}                    |".format(
            sta.station))
        print("|===============================================|")
        print("|===============================================|")
        print("|  Station: {0:>2s}.{1:5s}                            |".format(
            sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}  |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}          |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}          |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|-----------------------------------------------|")

        rfRstream = Stream()
        rfTstream = Stream()

        datafiles = [x for x in datapath.iterdir() if x.is_dir()]
        for folder in datafiles:

            # Skip hidden folders
            if folder.name.startswith('.'):
                continue

            date = folder.name.split('_')[0]
            year = date[0:4]
            month = date[4:6]
            day = date[6:8]
            dateUTC = UTCDateTime(year + '-' + month + '-' + day)

            if dateUTC > tstart and dateUTC < tend:

                filename = folder / "RF_Data.pkl"
                if filename.is_file():
                    file = open(filename, "rb")
                    rfdata = pickle.load(file)
                    if rfdata[0].stats.snrh > args.snrh and rfdata[0].stats.snr and \
                            rfdata[0].stats.cc > args.cc:

                        rfRstream.append(rfdata[1])
                        rfTstream.append(rfdata[2])

                    file.close()

            else:
                continue

        if args.no_outl:
            # Remove outliers wrt variance
            varR = np.array([np.var(tr.data) for tr in rfRstream])

            # Calculate outliers
            medvarR = np.median(varR)
            madvarR = 1.4826 * np.median(np.abs(varR - medvarR))
            robustR = np.abs((varR - medvarR) / madvarR)
            outliersR = np.arange(len(rfRstream))[robustR > 2.]
            for i in outliersR[::-1]:
                rfRstream.remove(rfRstream[i])
                rfTstream.remove(rfTstream[i])

            # Do the same for transverse
            varT = np.array([np.var(tr.data) for tr in rfTstream])
            medvarT = np.median(varT)
            madvarT = 1.4826 * np.median(np.abs(varT - medvarT))
            robustT = np.abs((varT - medvarT) / madvarT)
            outliersT = np.arange(len(rfTstream))[robustT > 2.]
            for i in outliersT[::-1]:
                rfRstream.remove(rfRstream[i])
                rfTstream.remove(rfTstream[i])

        # Try binning if specified
        if args.nbin is not None:
            rf_tmp = binning.bin(rfRstream,
                                 rfTstream,
                                 typ='baz',
                                 nbin=args.nbin + 1)
            rfRstream = rf_tmp[0]
            rfTstream = rf_tmp[1]

        # Filter original streams
        rfRstream.filter('bandpass',
                         freqmin=args.bp[0],
                         freqmax=args.bp[1],
                         corners=2,
                         zerophase=True)
        rfTstream.filter('bandpass',
                         freqmin=args.bp[0],
                         freqmax=args.bp[1],
                         corners=2,
                         zerophase=True)

        # Initialize the HkStack object
        harmonics = Harmonics(rfRstream, rfTstream)

        # Stack with or without dip
        if args.find_azim:
            harmonics.dcomp_find_azim(xmin=args.trange[0], xmax=args.trange[1])
            print("Optimal azimuth for trange between " + str(args.trange[0]) +
                  " and " + str(args.trange[1]) + "is: " + str(harmonics.azim))
        else:
            harmonics.dcomp_fix_azim(azim=args.azim)

        if args.plot:
            harmonics.plot(args.ymax, args.scale, args.save_plot, args.title,
                           args.form)

        if args.save:
            filename = datapath / (hkstack.hstream[0].stats.station +
                                   ".harmonics.pkl")
            harmonics.save()
Exemplo n.º 53
0
# decide if it is high-frequency or not
HF = True

chans = ['HHZ', 'HHE', 'HHN']
fig = plt.figure(1, figsize=(12, 12))
plt.subplots_adjust(hspace=0.001)
for idx, chan in enumerate(chans):
    st = Stream()
    for s in range(1, 9):

        if debug:
            print('On ' + str(s) + ' ' + chan)
        st += read(string + str(s) + '/' + chan + '.D/XX*')

    stime = UTCDateTime('2017-05-03T01:10:00.0')
    etime = UTCDateTime('2017-05-03T02:10:00.0')
    st.trim(starttime=stime, endtime=etime)

    nsHF, psHF, fHF = selfnoise(st)
    st.decimate(5)

    st.decimate(2)

    st.decimate(5)

    print(st)
    ns, ps, f = selfnoise(st)

    #st.plot()
    if chan == 'HHE':
Exemplo n.º 54
0
def preprocessing_function_asdf(dir_obs,eventname,time_increment,end_time,min_period,max_period):
    processdir=eventname+'preprocessed'
    processdata='preprocessed_'+ str(min_period) +'s_to_'+str(max_period)+'s.h5'
    tag_name='preprocessed_'+ str(min_period) +'s_to_'+str(max_period)+'s'
    if os.path.exists(processdir+'/'+processdata):
      os.system('rm -rf'+' '+processdir+'/'+processdata)
     # os.makedirs(processdir)
      os.system('cp'+' '+dir_obs+' '+processdir+'/'+processdata)
    def zerophase_chebychev_lowpass_filter(trace, freqmax):
        """
        Custom Chebychev type two zerophase lowpass filter useful for
        decimation filtering.

        This filter is stable up to a reduction in frequency with a factor of
        10. If more reduction is desired, simply decimate in steps.

        Partly based on a filter in ObsPy.

        :param trace: The trace to be filtered.
        :param freqmax: The desired lowpass frequency.

        Will be replaced once ObsPy has a proper decimation filter.
        """
        # rp - maximum ripple of passband, rs - attenuation of stopband
        rp, rs, order = 1, 96, 1e99
        ws = freqmax / (trace.stats.sampling_rate * 0.5)  # stop band frequency
        wp = ws  # pass band frequency

        while True:
            if order <= 12:
                break
            wp *= 0.99
            order, wn = signal.cheb2ord(wp, ws, rp, rs, analog=0)

        b, a = signal.cheby2(order, rs, wn, btype="low", analog=0, output="ba")
        print(trace)
        # Apply twice to get rid of the phase distortion.
        trace.data = signal.filtfilt(b, a, trace.data)

    # =========================================================================
    # Read ASDF file
    # =========================================================================

    ds = pyasdf.ASDFDataSet(processdir+'/'+processdata) 
    print(ds)
    list=ds.waveforms.list()
    event = ds.events[0]
    dt=time_increment
    sampling_rate = 1.0 / dt    
    start_time = -time_increment
    npts=int(round((end_time -start_time)/time_increment) + 1)
    origin=event.preferred_origin() or event.origins[0]
    print(origin.time,start_time)
    start=UTCDateTime(origin.time)
    print(start+start_time)
    starttime=start_time + np.float(start)
    print(starttime)
    endtime = end_time+starttime
    duration = end_time -start_time

    f2 = 0.9 / max_period
    f3 = 1.1 / min_period
    # Recommendations from the SAC manual.
    f1 = 0.5 * f2
    f4 = 2.0 * f3
    pre_filt = (f1, f2, f3, f4)

    for _i,stid in enumerate(list): 
        #print(stid)   
        stla,stlo,evz=ds.waveforms[stid].coordinates.values()
        st=ds.waveforms[stid].raw_recording
        for tr in st:
           print(tr)
            # Trim to reduce processing costs
           #tr.trim(starttime - 0.2 * duration, endtime + 0.2 * duration)
           print(tr)
           while True:
                decimation_factor = int(dt /
                                        tr.stats.delta)
                # Decimate in steps for large sample rate reductions.
                if decimation_factor > 8:
                    decimation_factor = 8
                if decimation_factor > 1:
                    new_nyquist = tr.stats.sampling_rate / 2.0 / float(
                        decimation_factor)
                    #print(new_nyquist)
                    zerophase_chebychev_lowpass_filter(tr, new_nyquist)
                    print(tr)
                    tr.decimate(factor=decimation_factor, no_filter=True)
                else:
                    break
        inv=ds.waveforms[stid].StationXML
        # Detrend and taper
        #print('start')
        st.detrend("linear")
        st.detrend("demean")
        st.taper(max_percentage=0.05, type="hann")
        # Instrument correction
        try:
            st.attach_response(inv)
            st.remove_response(output="DISP", pre_filt=pre_filt,
                               zero_mean=False, taper=False)
        except Exception as e:
            net = inv.get_contents()['channels'][0].split('.', 2)[0]
            sta = inv.get_contents()['channels'][0].split('.', 2)[1]

            msg = ("Station: %s.%s could not be corrected with the help of"
                   " asdf file: '%s'. Due to: '%s'  Will be skipped.") \
                % (net, sta,
                   processing_info["asdf_input_filename"], e.__repr__()),
            raise LASIFError(msg)

        # Bandpass filtering
        st.detrend("linear")
        st.detrend("demean")
        st.taper(0.05, type="cosine")
        st.filter("bandpass", freqmin=1.0 / max_period,
                  freqmax=1.0 / min_period, corners=3, zerophase=False)

        st.detrend("linear")
        st.detrend("demean")
        st.taper(0.05, type="cosine")
        st.filter("bandpass", freqmin=1.0 / max_period,
                  freqmax=1.0 / min_period, corners=3, zerophase=False)

        # Sinc interpolation
        for tr in st:
            tr.data = np.require(tr.data, requirements="C")
        st.interpolate(sampling_rate=sampling_rate, method="lanczos",
                       starttime=starttime, window="blackman", a=12, npts=npts)
        # Convert to single precision to save space.
        for tr in st:
            tr.data = np.require(tr.data, dtype="float32", requirements="C")

        ds.add_waveforms(st,tag=tag_name)
        del ds.waveforms[stid].raw_recording
        del ds.waveforms[stid].preprocess
Exemplo n.º 55
0
from obspy.core import UTCDateTime
from pytz import timezone



try:
    in_path = sys.argv[1]
    station = sys.argv[2]
except:
    print __doc__
    raise

try:
    tme = sys.argv[3]
    today = UTCDateTime(tme)
    t = UTCDateTime(tme)
except:
    t = datetime.datetime.today()
    today = UTCDateTime()

tu = t.hour
tl = today.hour
td = tu- tl


time_label = "Std 00:00:00 UTC (+%d Std Lokalzeit)"%(int (td))
in_file = "%s/%s.WSX.D.%02d%02d%02d.0000"%(in_path,station,today.day,today.month,today.year-2000)
outfile = "%s/%s_%02d%02d%02d_wet.png"%(in_path,station,today.day,today.month,today.year-2000)

def moving_average(x, n, type='simple'):
    """
Exemplo n.º 56
0
        array = args.array

    if args.fdresults:
        fdresultsT = args.fdresults
    else:
        print("default table FD_RESULTS")
        fdresultsT = 'FD_RESULTS'

    if args.pfkid:
        pfkid = args.pfkid

    if args.pfdid:
        pfdid = args.pfdid

    if args.tS:
        t_S = UTCDateTime(args.tS)
        print('setting ini time:', t_S)
    else:
        t_S = None

    if args.tE:
        t_E = UTCDateTime(args.tE)
        print('setting end time:', t_E)
    else:
        t_E = None

    if args.fval:
        fval = args.fval
        print('setting fval:', fval)
    else:
        fval = 0
Exemplo n.º 57
0
def savecorrs(correlation,phaseweight,n_stack,id1,id2,geoinf,\
    corrname,corrtype,outdir,params=None,timestring='',startday=None,\
    endday=None):
    
    
    
#==============================================================================
    #- Write metadata info to sac header
    #- Store results
#==============================================================================

    
    tr=Trace(data=correlation)
    tr.stats.sac={}
    
    if startday == None:
        startday=UTCDateTime(inp.startdate)
    if endday == None:
        endday=UTCDateTime(inp.enddate)
        
    (lat1, lon1, lat2, lon2, dist, az, baz)=geoinf
    
    
    # Add a preprocessing string:
    prepstring = get_prepstring()
    

    tr.stats.sampling_rate=inp.Fs[-1]
    tr.stats.starttime=UTCDateTime(2000, 01, 01)-inp.max_lag*inp.Fs[-1]
    tr.stats.network=id1.split('.')[0]
    tr.stats.station=id1.split('.')[1]
    tr.stats.location=id1.split('.')[2]
    tr.stats.channel=id1.split('.')[3]
    
    tr.stats.sac['kt2']=prepstring
    tr.stats.sac['kt8']=corrtype
    tr.stats.sac['user0']=n_stack
    tr.stats.sac['user1']=inp.winlen
    tr.stats.sac['user2']=inp.olap
    tr.stats.sac['b']=-inp.max_lag
    tr.stats.sac['e']=inp.max_lag
    tr.stats.sac['kt0']=startday.strftime('%Y%j')
    tr.stats.sac['kt1']=endday.strftime('%Y%j')
    tr.stats.sac['iftype']=1
    tr.stats.sac['stla']=lat1
    tr.stats.sac['stlo']=lon1
    tr.stats.sac['kevnm']=id2.split('.')[1]
    tr.stats.sac['evla']=lat2
    tr.stats.sac['evlo']=lon2
    tr.stats.sac['dist']=dist
    tr.stats.sac['az']=az
    tr.stats.sac['baz']=baz
    tr.stats.sac['kuser0']=id2.split('.')[0]
    tr.stats.sac['kuser1']=id2.split('.')[2]
    tr.stats.sac['kuser2']=id2.split('.')[3]
    
    if params is not None:
        tr.stats.sac['user3']=params[0]
        tr.stats.sac['user4']=params[1]
        tr.stats.sac['user5']=params[2]
        tr.stats.sac['user6']=params[3]
        tr.stats.sac['user7']=params[4]
        tr.stats.sac['user8']=params[5]
    
    
    #- open file and write correlation function
    fileid=outdir+id1+'.'+id2+'.'+corrtype+'.'+\
    corrname+timestring+'.SAC'
    tr.write(fileid,format='SAC')
    
    if phaseweight is not None:
        
        fileid_cwt=outdir+id1+'.'+id2+'.'+corrtype+\
        '.'+corrname+timestring+'.npy'
        np.save(fileid_cwt,phaseweight)
Exemplo n.º 58
0
 def monthScale(self, top=True):
     """
     Creates the subdivisions of the month scale.
     """
     color = self.color2
     # Shortcut to window geometry.
     starttime = self.env.starttime
     endtime = self.env.endtime
     time_range = float(endtime - starttime)
     # Pixel counts.
     start_x = 0
     end_x = self.width
     x_range = end_x - start_x
     start_y = 15
     end_y = start_y + self.subscale_height
     y_range = self.subscale_height
     # Get the number of months.
     if endtime.year == starttime.year:
         months = (endtime.month - starttime.month) + 1
     else:
         months = 0
         years = endtime.year - starttime.year
         # If more than one year add twelve months per year.
         if years > 1:
             months += 12 * (years - 1)
         # Get boundaries.
         months += (12 - starttime.month) + 1
         months += endtime.month
     months_count = range(months)
     # Loop over all years.
     # Loop over every month.
     for month in months_count:
         # Get the year and month of the currently treated month.
         cur_month = starttime.month + month
         if cur_month > 12:
             cur_year = starttime.year + ((cur_month - 1) // 12)
         else:
             cur_year = starttime.year
         cur_month = cur_month % 12
         # Account for weird modulo operation.
         if cur_month == 0:
             cur_month = 12
         # Some variables.
         start_of_month = UTCDateTime(cur_year, cur_month, 1)
         if cur_month + 1 > 12:
             cur_month = 1
             cur_year += 1
         else:
             cur_month += 1
         end_of_month = UTCDateTime(cur_year, cur_month, 1)
         # Calculate boundaries.
         start_frac = (start_of_month - starttime) / time_range
         if start_frac < 0:
             start_frac = 0
         start = start_frac * x_range
         if start < 0:
             start = 0
         start += start_x
         end_frac = (endtime - end_of_month) / time_range
         end_frac = 1.0 - end_frac
         if end_frac > 1.0:
             end_frac = 1.0
         end = end_frac * x_range
         end = x_range - end
         if end > x_range:
             end = x_range
         end += start_x
         graph_width = (end_frac - start_frac) * x_range
         # Only draw every second box.
         if not month % 2:
             month_box = QtGui.QGraphicsRectItem(start, start_y,
                                                 graph_width, y_range)
             month_box.start_frac = start_frac
             month_box.end_frac = end_frac
             month_box.setBrush(color)
             month_box.setZValue(-198)
             self.addToGroup(month_box)
             # Add to list for easier tracking.
             self.month_boxes.append(month_box)
         # If two narrow do not add a name. This has to be set once and stay
         # valid for all following months, otherwise only the names of the
         # long months might appear.
         # XXX: This might result in only the larger months' labels being
         # drawn.
         if graph_width < 30:
             continue
         # Add name.
         month = start_of_month.month
         name = MONTHS[month]
         month_name = QtGui.QGraphicsSimpleTextItem(name)
         rect = month_name.boundingRect()
         # XXX: The +2 at the end is just trial end error. I cannot figure
         # out a way to calculate it. The height of the rectangle is 16
         # while the year subscale height is only 15. But the text is still
         # positioned too high without the +2.
         month_name.moveBy(((end_frac - start_frac) / 2 + start_frac) * self.width - \
                         rect.width() / 2, start_y + 2)
         month_name.start_frac = start_frac
         month_name.end_frac = end_frac
         self.month_labels.append(month_name)
         self.addToGroup(month_name)
Exemplo n.º 59
0
# 2010-09 Tobi

import matplotlib
matplotlib.use("AGG")

import subprocess
from obspy.core import UTCDateTime, Stream, AttribDict
from obspy.signal import coincidenceTrigger, cosTaper
from obspy.seishub import Client


NET = "BW"
STATIONS = ("KW1", "KW2", "KW3")
CHANNEL = "EHZ"
# search from 2h before now to 1h before now
T1 = UTCDateTime() - (60 * 60 * 2)
T2 = T1 + (60 * 60 * 1) + 30
PAR = dict(LOW=10.0, # bandpass low corner
           HIGH=20.0, # bandpass high corner
           STA=0.5, # length of sta in seconds
           LTA=10, # length of lta in seconds
           ON=3.5, # trigger on threshold
           OFF=1, # trigger off threshold
           ALLOWANCE=1.2, # time in seconds to extend trigger-off time
           MAXLEN=10, # maximum trigger length in seconds
           MIN_STATIONS=3) # minimum of coinciding stations for alert
PAR = AttribDict(PAR)
SUMMARY = "/scratch/kw_trigger/kw_trigger.txt"
PLOTDIR = "/scratch/kw_trigger/"
MAILTO = ["megies"]
Exemplo n.º 60
0
 def dayScale(self, top=True):
     """
     Creates the subdivisions of the month scale.
     """
     color = self.color1
     starttime = self.env.starttime
     endtime = self.env.endtime
     time_range = float(endtime - starttime)
     # Pixel counts.
     start_x = 0
     end_x = self.width
     x_range = end_x - start_x
     # Top or bottom year scale.
     start_y = 30
     end_y = 45
     y_range = self.subscale_height
     # Get the number of days.
     starttime_day = UTCDateTime(starttime.year, starttime.month,
                                 starttime.day)
     endtime_day = UTCDateTime(endtime.year, endtime.month, endtime.day)
     days = int((endtime_day - starttime_day) / 86400) + 1
     days_count = range(days)
     # Use the middle of the starting date to later calculate the current
     # day and account for leap seconds.
     noon_of_start = UTCDateTime(starttime.year, starttime.month,
                                 starttime.day, 12, 0, 0)
     # Only draw if there are less or equal than 150 days.
     if days > 150:
         return
     # Loop over every day.
     for day in days_count:
         today = noon_of_start + 86400 * day
         # Some variables.
         start_of_day = UTCDateTime(today.year, today.month, today.day, 0,
                                    0, 0)
         end_of_day = start_of_day + 86400
         # Calculate boundaries.
         start_frac = (start_of_day - starttime) / time_range
         if start_frac < 0:
             start_frac = 0
         start = start_frac * x_range
         if start < 0:
             start = 0
         start += start_x
         end_frac = (endtime - end_of_day) / time_range
         end_frac = 1.0 - end_frac
         if end_frac > 1.0:
             end_frac = 1.0
         end = end_frac * x_range
         end = x_range - end
         if end > x_range:
             end = x_range
         end += start_x
         graph_width = (end_frac - start_frac) * x_range
         # Only draw every second box.
         if not day % 2:
             day_box = QtGui.QGraphicsRectItem(start, start_y, graph_width,
                                               y_range)
             day_box.start_frac = start_frac
             day_box.end_frac = end_frac
             day_box.setBrush(color)
             day_box.setZValue(-198)
             self.addToGroup(day_box)
             # Add to list for easier tracking.
             self.day_boxes.append(day_box)
         # If two narrow do not add a name.
         if graph_width < 20:
             continue
         # Add name.
         name = str(today.day)
         day_name = QtGui.QGraphicsSimpleTextItem(name)
         rect = day_name.boundingRect()
         # XXX: The +2 at the end is just trial end error. I cannot figure
         # out a way to calculate it. The height of the rectangle is 16
         # while the year subscale height is only 15. But the text is still
         # positioned too high without the +2.
         day_name.moveBy(((end_frac - start_frac) / 2 + start_frac) * self.width - \
                         rect.width() / 2, start_y + 2)
         self.addToGroup(day_name)
         day_name.start_frac = start_frac
         day_name.end_frac = end_frac
         self.day_labels.append(day_name)