Ejemplo n.º 1
0
 def __init__(self,
              trace,
              time,
              name='',
              comments='',
              method=method_other,
              aic=None,
              n0_aic=None,
              *args, **kwargs):
     self.trace = trace
     if time < 0 or time >= len(self.trace.signal):
         raise ValueError("Event position must be a value between 0 and %d"
                          % len(self.trace.signal))
     self.stime = time
     self.name = name
     self.method = method
     self.aic = aic
     self.n0_aic = n0_aic
     super(ApasvoEvent, self).__init__(time=self.time,
                                       method_id=ResourceIdentifier(method),
                                       creation_info=CreationInfo(
                                           author=kwargs.get('author', ''),
                                           agency_id=kwargs.get('agency', ''),
                                           creation_time=UTCDateTime.now(),
                                       ),
                                       waveform_id=WaveformStreamID(
                                           network_code=self.trace.stats.get('network', ''),
                                           station_code=self.trace.stats.get('station', ''),
                                           location_code=self.trace.stats.get('location', ''),
                                           channel_code=self.trace.stats.get('channel', ''),
                                       ),
                                       *args,
                                       **kwargs)
     self.comments = comments
Ejemplo n.º 2
0
    def __init__(self, net, station, selectors, seqnum, timestamp):
        """
        Creates a new instance of SLNetStation.

        :param net: network code.
        :param station: station code.
        :param selectors: selectors for this net/station, null if none.
        :param seqnum: SeedLink sequence number of last packet received,
            -1 to start at the next data.
        :param timestamp: SeedLink time stamp in a UTCDateTime format for
            last packet received, null for none.
        """
        self.net = str(net)
        self.station = str(station)
        #print "DEBUG: selectors:", selectors
        if selectors is not None:
            self.selectors = selectors
        self.seqnum = seqnum
        if timestamp is not None:
            self.btime = UTCDateTime(timestamp)
Ejemplo n.º 3
0
def shakeview(request):
	DEFAULTSTATION = 'NE.ORNO.00.HHZ'
	try:
		if request.GET['sta'] == 'AM.R6A3B.00.EHZ':
			DEFAULTSTATION = request.GET['sta']
		if request.GET['sta'] == 'AM.R35E7.00.SHZ':
			DEFAULTSTATION = request.GET['sta']
		if request.GET['sta'] == 'AM.RCB43.00.SHZ':
			DEFAULTSTATION = request.GET['sta']
		if request.GET['sta'] == 'AM.R4989.00.EHZ':
			DEFAULTSTATION = request.GET['sta']
		if request.GET['sta'] == 'NE.ORNO.00.HHZ':
			DEFAULTSTATION = request.GET['sta']
	except MultiValueDictKeyError as e:
		pass
	JULIANDATE = UTCDateTime.now().strftime('%Y.%j')
	AVAIL = '/media/shakedown/avail/%s.png' % JULIANDATE
	HELI = '/media/shakedown/cronplots/%s.%s-heli.png' % (DEFAULTSTATION, JULIANDATE)
	SPEC = '/media/shakedown/cronplots/%s.%s-spec.png' % (DEFAULTSTATION, JULIANDATE)
	SPECBP = '/media/shakedown/cronplots/%s.%s-spec-band.png' % (DEFAULTSTATION, JULIANDATE)
	HELIBP = '/media/shakedown/cronplots/%s.%s-heli-band.png' % (DEFAULTSTATION, JULIANDATE)


	try:
		lmtime = os.path.getmtime('/var/www/nezsite/nezsite/nezsite/media/shakedown/cronplots/%s.%s-heli.png' % (DEFAULTSTATION, JULIANDATE))
	except OSError:
		lmtime = 0
	mtime = datetime.utcfromtimestamp(lmtime)

	context = {
	    'sta': DEFAULTSTATION,
	    'heli': HELI,
	    'spec': SPEC,
	    'helibp': HELIBP,
	    'specbp': SPECBP,
		'avail': AVAIL,
		'mtime': mtime,
		'time': timezone.now(),
		'page': 'Seismic Monitor',
	}
	return render(request, 'shake/shake.html', context)
def testmaxabs(_data):
    mseed = _data['mseed']
    trace = mseed[0]

    t, g = maxabs(trace)

    assert np.max(np.abs(trace.data)) == g
    idx = np.argmax(np.abs(trace.data))

    assert timeof(trace, idx) == t

    # assert by slicing times of max are different:
    td = 2 * trace.stats.delta
    assert maxabs(trace, None, t - td)[0] < t < maxabs(trace, t + td, None)[0]

    data = trace.data
    npts = trace.stats.npts
    t, g = maxabs(trace, None, trace.stats.starttime - td)
    assert t == UTCDateTime(0) and np.isnan(g)
    # check that data has not been changed by trace.slice (called by maxabs)
    # this is for safety:
    assert data is trace.data
    assert len(trace.data) == npts  # further safety check
Ejemplo n.º 5
0
 def test_save_waveform_no_compression(self):
     """
     Explicitly disable compression during waveform request and save it
     directly to disk.
     """
     # initialize client
     client = Client(user='******')
     start = UTCDateTime(2010, 1, 1, 0, 0)
     end = start + 1
     # MiniSEED
     with NamedTemporaryFile(suffix='.bz2') as tf:
         mseedfile = tf.name
         client.save_waveforms(mseedfile, 'GE', 'APE', '', 'BHZ', start,
                               end, compressed=False)
         st = read(mseedfile)
         # MiniSEED may not start with Volume Index Control Headers (V)
         with open(mseedfile, 'rb') as fp:
             self.assertNotEqual(fp.read(8)[6:7], b"V")
         # ArcLink cuts on record base
         self.assertEqual(st[0].stats.network, 'GE')
         self.assertEqual(st[0].stats.station, 'APE')
         self.assertEqual(st[0].stats.location, '')
         self.assertEqual(st[0].stats.channel, 'BHZ')
     # Full SEED
     with NamedTemporaryFile(suffix='.bz2') as tf:
         fseedfile = tf.name
         client.save_waveforms(fseedfile, 'GE', 'APE', '', 'BHZ', start,
                               end, format='FSEED')
         st = read(fseedfile)
         # Full SEED
         client.save_waveforms(fseedfile, 'BW', 'MANZ', '', 'EHZ', start,
                               end, format='FSEED')
         # ArcLink cuts on record base
         self.assertEqual(st[0].stats.network, 'GE')
         self.assertEqual(st[0].stats.station, 'APE')
         self.assertEqual(st[0].stats.location, '')
         self.assertEqual(st[0].stats.channel, 'BHZ')
Ejemplo n.º 6
0
    def scan(self):
        ''' Scan the data directory for reftek raw data files.

        '''
        if not os.path.isdir(self.archive):
            self.logger.error("The reftek archive directory %s doesn't exist.", self.archive)
            return

        self.logger.info("Scanning the archive directory %s.", self.archive)

        re_raw = re.compile (".*\w{9}_\w{8}$")

        for root, dirs, files in os.walk(self.archive):
            dirs.sort()
            for cur_file in files:
                if re_raw.match(cur_file):
                    self.logger.debug('Adding file %s.', os.path.join(root, cur_file))
                    self.add_raw_file(os.path.join(root, cur_file))

        self.last_scan = UTCDateTime()

        self.sort_raw_files()

        # Save the scan results in the archive directory.
        try:
            result_file = os.path.join(self.archive, 'psysmon_archive_scan.json')
            fp = open(result_file, mode = 'w')
            json.dump(self, fp = fp, cls = ArchiveScanEncoder)
            self.logger.info("Saved the scan result in the file %s.", result_file)
            fp.close()

            result_file = os.path.join(self.archive, 'psysmon_archive_scan_summary.json')
            fp = open(result_file, mode = 'w')
            json.dump(self.summary, fp = fp)
            self.logger.info("Saved the scan result summary in the file %s.", result_file)
        finally:
            fp.close()
Ejemplo n.º 7
0
def readWaveformsCross(station, tw, EventPath, Origin):

    time = Origin.time
    ts = time.split('T')

    datet = ts[0]
    datet = datet.split('-')
    year = datet[0].strip()
    month = datet[1]
    day = datet[2]
    #timep = ts[1][:-1]

    print time, ts, year, month, day
    julday = UTCDateTime(int(year), int(month), int(day)).julday
    julday = "%03d" % julday
    sdspath = os.path.join(EventPath, 'data', year)

    #Wdict = {}

    streamData = station.getName() + '.D.' + str(year) + '.' + str(julday)
    entry = os.path.join(sdspath, station.net, station.sta,
                         station.comp + '.D', streamData)
    print entry
    st = read(entry,
              format="MSEED",
              starttime=tw['start'],
              endtime=tw['end'],
              nearest_sample=True)
    print st

    if len(st.getGaps()) > 0:
        st.merge(method=0, fill_value='interpolate', interpolation_samples=0)

    #Wdict[i.getName()] = st
    stream = st

    return stream
Ejemplo n.º 8
0
 def test_pick(self):
     """
     Tests Pick object.
     """
     filename = os.path.join(self.path, 'quakeml_1.2_pick.xml')
     catalog = _read_quakeml(filename)
     self.assertEqual(len(catalog), 1)
     self.assertEqual(len(catalog[0].picks), 2)
     pick = catalog[0].picks[0]
     self.assertEqual(
         pick.resource_id,
         ResourceIdentifier('smi:ch.ethz.sed/pick/117634'))
     self.assertEqual(pick.time, UTCDateTime('2005-09-18T22:04:35Z'))
     self.assertEqual(pick.time_errors.uncertainty, 0.012)
     self.assertEqual(
         pick.waveform_id,
         WaveformStreamID(network_code='BW', station_code='FUR',
                          resource_uri='smi:ch.ethz.sed/waveform/201754'))
     self.assertEqual(
         pick.filter_id,
         ResourceIdentifier('smi:ch.ethz.sed/filter/lowpass/standard'))
     self.assertEqual(
         pick.method_id,
         ResourceIdentifier('smi:ch.ethz.sed/picker/autopicker/6.0.2'))
     self.assertEqual(pick.backazimuth, 44.0)
     self.assertEqual(pick.onset, 'impulsive')
     self.assertEqual(pick.phase_hint, 'Pn')
     self.assertEqual(pick.polarity, 'positive')
     self.assertEqual(pick.evaluation_mode, "manual")
     self.assertEqual(pick.evaluation_status, "confirmed")
     self.assertEqual(len(pick.comments), 2)
     self.assertEqual(pick.creation_info.author, "Erika Mustermann")
     # exporting back to XML should result in the same document
     with open(filename, "rt") as fp:
         original = fp.read()
     processed = Pickler().dumps(catalog)
     compare_xml_strings(original, processed)
Ejemplo n.º 9
0
def readitaly(datafile):
    f = open(datafile, 'rt')
    #header needs: station,channel,location,npts,starttime,sampling_rate,delta,calib,lat,lon,height,duration,endtime,maxacc,network
    data = []
    hdrdict = {}
    for line in f.readlines():
        if not len(line.strip()):
            continue
        if not line.find(':') > -1:
            data.append(float(line.strip()))
            continue

        key, value = line.split(':')
        key = key.strip()
        value = value.strip()
        if key not in list(HEADERS.keys()):
            continue
        hdrkey = HEADERS[key]
        if hdrkey == 'starttime':
            value = UTCDateTime(datetime.datetime.strptime(value, TIMEFMT))
        elif hdrkey not in ['station', 'channel', 'location', 'network']:
            value = float(value)
        hdrdict[hdrkey] = value
    f.close()
    hdrdict['sampling_rate'] = 1 / hdrdict['delta']
    hdrdict['endtime'] = hdrdict['starttime'] + hdrdict['duration']
    hdrdict['npts'] = int(hdrdict['npts'])
    hdrdict['calib'] = 1.0
    hdrdict['units'] = 'acc'
    data = np.array(data)
    header = hdrdict.copy()
    stats = Stats(hdrdict)
    trace = Trace(data, header=stats)
    #apply the calibration and convert from mm/s^2 to m/s^2
    trace.data = trace.data * trace.stats['calib'] * 0.01  #convert to m/s^2
    return trace
Ejemplo n.º 10
0
 def get_endtimes(self, network=None, station=None, location=None,
                  channel=None):
     """
     Generates a list of last end times for each channel.
     """
     # build up query
     session = self.session()
     query = session.query(
         WaveformChannel.network, WaveformChannel.station,
         WaveformChannel.location, WaveformChannel.channel,
         func.max(WaveformChannel.endtime).label('latency')
     )
     query = query.group_by(
         WaveformChannel.network, WaveformChannel.station,
         WaveformChannel.location, WaveformChannel.channel
     )
     # process arguments
     kwargs = {'network': network, 'station': station,
               'location': location, 'channel': channel}
     for key, value in kwargs.items():
         if value is None:
             continue
         col = getattr(WaveformChannel, key)
         if '*' in value or '?' in value:
             value = value.replace('?', '_')
             value = value.replace('*', '%')
             query = query.filter(col.like(value))
         else:
             query = query.filter(col == value)
     results = query.all()
     session.close()
     adict = {}
     for result in results:
         key = '%s.%s.%s.%s' % (result[0], result[1], result[2], result[3])
         adict[key] = UTCDateTime(result[4])
     return adict
Ejemplo n.º 11
0
def test_trace():
    data = np.random.rand(1000)
    header = {
        'sampling_rate': 1,
        'npts': len(data),
        'network': 'US',
        'location': '11',
        'station': 'ABCD',
        'channel': 'HN1',
        'starttime': UTCDateTime(2010, 1, 1, 0, 0, 0)
    }
    inventory = get_inventory()
    invtrace = StationTrace(data=data, header=header, inventory=inventory)
    invtrace.setProvenance('detrend', {'detrending_method': 'demean'})
    invtrace.setParameter('failed', True)
    invtrace.setParameter('corner_frequencies', [1, 2, 3])
    invtrace.setParameter('metadata', {'name': 'Fred'})

    assert invtrace.getProvenance('detrend')[0] == {
        'detrending_method': 'demean'
    }
    assert invtrace.getParameter('failed')
    assert invtrace.getParameter('corner_frequencies') == [1, 2, 3]
    assert invtrace.getParameter('metadata') == {'name': 'Fred'}
Ejemplo n.º 12
0
 def test_getEvents(self):
     """
     Tests getEvents method.
     """
     client = Client()
     dt = UTCDateTime("2012-03-13T04:49:38")
     # 1
     cat = client.getEvents(mindepth=34.9,
                            maxdepth=35.1,
                            magtype="MB",
                            catalog="NEIC PDE",
                            lat=-56.1,
                            lon=-26.7,
                            maxradius=2,
                            starttime=dt,
                            endtime=dt + 10)
     self.assertEquals(len(cat), 1)
     ev = cat[0]
     self.assertEquals(len(ev.origins), 1)
     self.assertEquals(len(ev.magnitudes), 1)
     self.assertEquals(ev.origins[0].depth, 35.0)
     self.assertEquals(ev.origins[0].latitude, -55.404)
     self.assertEquals(ev.origins[0].longitude, -27.895)
     self.assertEquals(ev.magnitudes[0].magnitude_type, 'MB')
    def test_json_waveclient_deserialization(self):
        '''
        '''
        import psysmon.core.waveclient

        packages_path = os.path.dirname(os.path.abspath(__file__))
        packages_path = os.path.join(packages_path, 'waveclient_packages')
        psybase = test_util.create_psybase(package_directory = [packages_path, ])
        project = test_util.create_dbtest_project(psybase)
        project.createDatabaseStructure(psybase.packageMgr.packages)

        # Set the maxDiff attribute to None to enable long output of 
        # non-equal strings tested with assertMultiLineEqual.
        self.maxDiff = None

        # Set the createTime of the project to a known value.
        project.createTime = UTCDateTime('2013-01-01T00:00:00')

        # Add a waveclient to the project.
        waveclient = psysmon.core.waveclient.PsysmonDbWaveClient('db client', project)
        project.addWaveClient(waveclient)
        project.defaultWaveclient = 'db client'


        encoder = util.ProjectFileEncoder()
        decoder = util.ProjectFileDecoder()
        json_project = encoder.encode(project)
        project_obj = decoder.decode(json_project)

        # TODO: Test the project_obj for validity.
        print(project_obj.waveclient['db client'].mode)

        psybase.stop_project_server()
        base_dir = project.base_dir
        test_util.drop_project_database_tables(project)
        shutil.rmtree(base_dir)
Ejemplo n.º 14
0
def get_event_dict(eventid):
    """Get event dictionary from ComCat using event ID.

    Args:
        eventid (str): Event ID that can be found in ComCat.

    Returns:
        dict: Dictionary containing fields:
            - id String event ID
            - time UTCDateTime of event origin time.
            - lat Origin latitude.
            - lon Origin longitude.
            - depth Origin depth.
            - magnitude Origin magnitude.
    """
    dict_or_id = get_event_by_id(eventid)
    event_dict = {'id': dict_or_id.id,
                  'time': UTCDateTime(dict_or_id.time),
                  'lat': dict_or_id.latitude,
                  'lon': dict_or_id.longitude,
                  'depth': dict_or_id.depth,
                  'magnitude': dict_or_id.magnitude,
                  }
    return event_dict
Ejemplo n.º 15
0
    def signoise(self, Waveform, ttime, path):

        st = str(self.Origin.time)[:-1]

        ponset = UTCDateTime(st) + ttime

        winnoise_start = Waveform.stats.starttime
        winnoise_end = ponset - 10
        winsig_start = ponset - 2
        winsig_end = ponset + 5

        try:
            winnoise = read(path,
                            format="MSEED",
                            starttime=winnoise_start,
                            endtime=winnoise_end,
                            nearest_sample=True)
            winsig = read(path,
                          format="MSEED",
                          starttime=winsig_start,
                          endtime=winsig_end,
                          nearest_sample=True)
        except Exception, e:
            print e
Ejemplo n.º 16
0
    def signoise(self, Waveform, ttime, path):

        st = str(self.Origin.time)[:-1]
        ponset = UTCDateTime(st) + ttime

        winnoise_start = Waveform.stats.starttime + 20
        winnoise_end = ponset - 10
        winsig_start = ponset - 2
        winsig_end = ponset + 10

        try:
            winnoise = read(path,
                            format="MSEED",
                            starttime=winnoise_start,
                            endtime=winnoise_end,
                            nearest_sample=True)
            #winnoise.write (('%s.mseed')%(path),format='MSEED')
            winsig = read(path,
                          format="MSEED",
                          starttime=winsig_start,
                          endtime=winsig_end,
                          nearest_sample=True)
            #winsig.write(('s.mseed')%(path),format='MSEED')

        #except Exception, e:                          #hs : Syntax error in windows
        #   Logfile.exception (str (e))
        except:
            Logfile.exception('signoise')

        psignal = abs(winsig.max()[0])
        pnoise = abs(winnoise.max()[0])

        signoise = float(psignal) / float(pnoise)
        #       print psignal, pnoise, signoise

        return signoise
Ejemplo n.º 17
0
 def test_srl(self):
     """
     Tests if example in ObsPy paper submitted to the Electronic
     Seismologist section of SRL is still working. The test shouldn't be
     changed because the reference gets wrong.
     """
     paz = {'gain': 60077000.0,
            'poles': [(-0.037004000000000002 + 0.037016j),
                      (-0.037004000000000002 - 0.037016j),
                      (-251.33000000000001 + 0j),
                      (-131.03999999999999 - 467.29000000000002j),
                      (-131.03999999999999 + 467.29000000000002j)],
            'sensitivity': 2516800000.0,
            'zeros': [0j, 0j]}
     dat1 = np.array([288, 300, 292, 285, 265, 287, 279, 250, 278, 278])
     dat2 = np.array([445, 432, 425, 400, 397, 471, 426, 390, 450, 442])
     # Retrieve data via ArcLink
     client = Client('*****@*****.**', host='webdc.eu', port=18001)
     t = UTCDateTime("2009-08-24 00:20:03")
     st = client.get_waveforms("BW", "RJOB", "", "EHZ", t, t + 30)
     # original but deprecated call
     # poles_zeros = list(client.get_paz("BW", "RJOB", "", "EHZ",
     #                                 t, t+30).values())[0]
     poles_zeros = client.get_paz("BW", "RJOB", "", "EHZ", t)
     self.assertEqual(paz['gain'], poles_zeros['gain'])
     self.assertEqual(paz['poles'], poles_zeros['poles'])
     self.assertEqual(paz['sensitivity'], poles_zeros['sensitivity'])
     self.assertEqual(paz['zeros'], poles_zeros['zeros'])
     self.assertEqual('BW', st[0].stats['network'])
     self.assertEqual('RJOB', st[0].stats['station'])
     self.assertEqual(200.0, st[0].stats['sampling_rate'])
     self.assertEqual(6001, st[0].stats['npts'])
     self.assertEqual(
         '2009-08-24T00:20:03.000000Z', str(st[0].stats['starttime']))
     np.testing.assert_array_equal(dat1, st[0].data[:10])
     np.testing.assert_array_equal(dat2, st[0].data[-10:])
Ejemplo n.º 18
0
 def import_txt(self, input_txtfile_path):
     #This imports a summary containing only locations and origin times for events in txt with tab delimiters
     print("[preprocessing metadata] Reading metadata file "+input_txtfile_path)
     with open(input_txtfile_path) as csv_file:
         csv_reader = csv.reader(csv_file, delimiter="\t", skipinitialspace=True)
         line_count = 0
         for row in csv_reader:
             if line_count == 0:
                 line_count += 1
             else:
                 line_count += 1
                 year = int(row[0].strip())
                 month = int(row[1])
                 day = int(row[2])
                 hour = int(row[3])
                 minute = int(row[4])
                 sec = int(float(row[5]))
                 lat = float(row[6])
                 lon = float(row[7])
                 depth = float(row[8])
                 print(str(year)+","+str(month)+","+str(day)+","+str(hour)+","+str(minute)+","+str(sec)+","+str(lat)+","+str(lon)+","+str(depth))
                 eventOriginTime = UTCDateTime(year=year, month=month, day=day, hour=hour, minute=minute, second=sec)
                 e = Event(eventOriginTime, lat, lon, depth)
                 self.events.append(e)
    def signoise(self, Waveform, ttime, path):

        st = str(self.Origin.time)[:-1]
        ponset = UTCDateTime(st) + ttime

        winnoise_start = Waveform.stats.starttime+20
        winnoise_end = ponset - 10
        winsig_start = ponset - 2
        winsig_end = ponset + 10

        try:
            winnoise = read(path, format="MSEED", starttime=winnoise_start,
                            endtime=winnoise_end, nearest_sample=True)
            winsig = read(path, format="MSEED", starttime=winsig_start,
                          endtime=winsig_end, nearest_sample=True)
        except Exception:
            Logfile.exception('signoise')

        psignal = abs(winsig.max()[0])
        pnoise = abs(winnoise.max()[0])

        signoise = float(psignal) / float(pnoise)

        return signoise
Ejemplo n.º 20
0
 def test_getWaveformWithDCIDKeyFile(self):
     """
     """
     with NamedTemporaryFile() as tf:
         dcidfile = tf.name
         with open(dcidfile, 'wt') as fh:
             fh.write('TEST=XYZ\r\nBIA=OfH9ekhi\r\n')
         # test server for encryption
         client1 = Client(host="webdc.eu",
                          port=36000,
                          user="******",
                          dcid_key_file=dcidfile)
         # public server
         client2 = Client(host="webdc.eu",
                          port=18001,
                          user="******")
     # request data
     start = UTCDateTime(2010, 1, 1, 10, 0, 0)
     end = start + 100
     stream1 = client1.getWaveform('GE', 'APE', '', 'BHZ', start, end)
     stream2 = client2.getWaveform('GE', 'APE', '', 'BHZ', start, end)
     # compare results
     np.testing.assert_array_equal(stream1[0].data, stream2[0].data)
     self.assertEqual(stream1[0].stats, stream2[0].stats)
Ejemplo n.º 21
0
 def test_multipleOrigins(self):
     """
     Parameters of multiple origins should not interfere with each other.
     """
     origin = Origin()
     origin.resource_id = 'smi:ch.ethz.sed/origin/37465'
     origin.time = UTCDateTime(0)
     origin.latitude = 12
     origin.latitude_errors.confidence_level = 95
     origin.longitude = 42
     origin.depth_type = 'from location'
     self.assertEqual(origin.resource_id,
                      ResourceIdentifier(id='smi:ch.ethz.sed/origin/37465'))
     self.assertEqual(origin.latitude, 12)
     self.assertEqual(origin.latitude_errors.confidence_level, 95)
     self.assertEqual(origin.latitude_errors.uncertainty, None)
     self.assertEqual(origin.longitude, 42)
     origin2 = Origin(force_resource_id=False)
     origin2.latitude = 13.4
     self.assertEqual(origin2.depth_type, None)
     self.assertEqual(origin2.resource_id, None)
     self.assertEqual(origin2.latitude, 13.4)
     self.assertEqual(origin2.latitude_errors.confidence_level, None)
     self.assertEqual(origin2.longitude, None)
Ejemplo n.º 22
0
    def test_availability(self):
        """
        Tests availability of waveform data at the DMC.

        Examples are inspired by http://www.iris.edu/ws/availability/.
        """
        client = Client()
        # 1
        t1 = UTCDateTime("2010-02-27T06:30:00.000")
        t2 = UTCDateTime("2010-02-27T10:30:00.000")
        result = client.availability('IU', channel='B*', starttime=t1,
                                     endtime=t2)
        self.assertTrue(isinstance(result, basestring))
        self.assertTrue('IU YSS 00 BHZ' in result)
        # 2
        dt = UTCDateTime("2011-11-13T07:00:00")
        result = client.availability(network='GE', starttime=dt,
                                     endtime=dt + 10)
        self.assertTrue(isinstance(result, basestring))
        self.assertTrue('GE DAG -- BHE' in result)
        # 3 - unknown network results in empty string
        dt = UTCDateTime(2011, 11, 16)
        result = client.availability(network='XX', starttime=dt,
                                     endtime=dt + 10)
        # 4 - location=None
        t1 = UTCDateTime("2010-02-27T06:30:00")
        t2 = UTCDateTime("2010-02-27T06:40:00")
        result = client.availability("IU", "K*", starttime=t1, endtime=t2)
        self.assertTrue(isinstance(result, basestring))
        self.assertTrue('IU KBL -- BHZ' in result)
        self.assertTrue('IU KBS 00 BHE' in result)
        # 5 - empty location
        result = client.availability("IU", "K*", "", starttime=t1, endtime=t2)
        self.assertTrue(isinstance(result, basestring))
        self.assertTrue('IU KBL -- BHZ' in result)
        self.assertFalse('IU KBS 00 BHE' in result)
        # 6 - empty location code via '--'
        result = client.availability("IU", "K*", "--", starttime=t1,
                                     endtime=t2)
        self.assertTrue(isinstance(result, basestring))
        self.assertTrue('IU KBL -- BHZ' in result)
        self.assertFalse('IU KBS 00 BHE' in result)
Ejemplo n.º 23
0
 def changeTime(*args, **kwargs):
     """
     Change the times of the plot.
     """
     timedict = {'-1 h' :-60 * 60, '-10 min' :-10 * 60,
                 'Current': 'NOW',
                 '+10 min': 10 * 60,
                 '+1 h': 60 * 60}
     timechange = timedict[args[0].widget.cget("text")]
     if isinstance(timechange, int):
         start = UTCDateTime(NV.starttime.get()) + timechange
         end = UTCDateTime(NV.endtime.get()) + timechange
     elif timechange == 'NOW':
         end = UTCDateTime()
         start = UTCDateTime() - 10 * 60
     else:
         import pdb;pdb.set_trace()
     NV.starttime.set(start.strftime('%Y-%m-%dT%H:%M:%S'))
     NV.endtime.set(end.strftime('%Y-%m-%dT%H:%M:%S'))
     getWaveform()
Ejemplo n.º 24
0
 def test_origin(self):
     """
     Tests Origin object.
     """
     filename = os.path.join(self.path, 'quakeml_1.2_origin.xml')
     catalog = _read_quakeml(filename)
     self.assertEqual(len(catalog), 1)
     self.assertEqual(len(catalog[0].origins), 1)
     origin = catalog[0].origins[0]
     self.assertEqual(
         origin.resource_id,
         ResourceIdentifier(
             'smi:www.iris.edu/ws/event/query?originId=7680412'))
     self.assertEqual(origin.time, UTCDateTime("2011-03-11T05:46:24.1200"))
     self.assertEqual(origin.latitude, 38.297)
     self.assertEqual(origin.latitude_errors.lower_uncertainty, None)
     self.assertEqual(origin.longitude, 142.373)
     self.assertEqual(origin.longitude_errors.uncertainty, None)
     self.assertEqual(origin.depth, 29.0)
     self.assertEqual(origin.depth_errors.confidence_level, 50.0)
     self.assertEqual(origin.depth_type, "from location")
     self.assertEqual(
         origin.method_id,
         ResourceIdentifier(id="smi:some/method/NA"))
     self.assertEqual(origin.time_fixed, None)
     self.assertEqual(origin.epicenter_fixed, False)
     self.assertEqual(
         origin.reference_system_id,
         ResourceIdentifier(id="smi:some/reference/muh"))
     self.assertEqual(
         origin.earth_model_id,
         ResourceIdentifier(id="smi:same/model/maeh"))
     self.assertEqual(origin.region, 'Kalamazoo')
     self.assertEqual(origin.evaluation_mode, "manual")
     self.assertEqual(origin.evaluation_status, "preliminary")
     self.assertEqual(origin.origin_type, "hypocenter")
     # composite times
     self.assertEqual(len(origin.composite_times), 2)
     c = origin.composite_times
     self.assertEqual(c[0].year, 2029)
     self.assertEqual(c[0].month, None)
     self.assertEqual(c[0].day, None)
     self.assertEqual(c[0].hour, 12)
     self.assertEqual(c[0].minute, None)
     self.assertEqual(c[0].second, None)
     self.assertEqual(c[1].year, None)
     self.assertEqual(c[1].month, None)
     self.assertEqual(c[1].day, None)
     self.assertEqual(c[1].hour, 1)
     self.assertEqual(c[1].minute, None)
     self.assertEqual(c[1].second, 29.124234)
     # quality
     self.assertEqual(origin.quality.used_station_count, 16)
     self.assertEqual(origin.quality.standard_error, 0)
     self.assertEqual(origin.quality.azimuthal_gap, 231)
     self.assertEqual(origin.quality.maximum_distance, 53.03)
     self.assertEqual(origin.quality.minimum_distance, 2.45)
     self.assertEqual(origin.quality.associated_phase_count, None)
     self.assertEqual(origin.quality.associated_station_count, None)
     self.assertEqual(origin.quality.depth_phase_count, None)
     self.assertEqual(origin.quality.secondary_azimuthal_gap, None)
     self.assertEqual(origin.quality.ground_truth_level, None)
     self.assertEqual(origin.quality.median_distance, None)
     # comments
     self.assertEqual(len(origin.comments), 2)
     c = origin.comments
     self.assertEqual(c[0].text, 'Some comment')
     self.assertEqual(
         c[0].resource_id,
         ResourceIdentifier(id="smi:some/comment/reference"))
     self.assertEqual(c[0].creation_info.author, 'EMSC')
     self.assertEqual(c[1].resource_id, None)
     self.assertEqual(c[1].creation_info, None)
     self.assertEqual(c[1].text, 'Another comment')
     # creation info
     self.assertEqual(origin.creation_info.author, "NEIC")
     self.assertEqual(origin.creation_info.agency_id, None)
     self.assertEqual(origin.creation_info.author_uri, None)
     self.assertEqual(origin.creation_info.agency_uri, None)
     self.assertEqual(origin.creation_info.creation_time, None)
     self.assertEqual(origin.creation_info.version, None)
     # origin uncertainty
     u = origin.origin_uncertainty
     self.assertEqual(u.preferred_description, "uncertainty ellipse")
     self.assertEqual(u.horizontal_uncertainty, 9000)
     self.assertEqual(u.min_horizontal_uncertainty, 6000)
     self.assertEqual(u.max_horizontal_uncertainty, 10000)
     self.assertEqual(u.azimuth_max_horizontal_uncertainty, 80.0)
     # confidence ellipsoid
     c = u.confidence_ellipsoid
     self.assertEqual(c.semi_intermediate_axis_length, 2.123)
     self.assertEqual(c.major_axis_rotation, 5.123)
     self.assertEqual(c.major_axis_plunge, 3.123)
     self.assertEqual(c.semi_minor_axis_length, 1.123)
     self.assertEqual(c.semi_major_axis_length, 0.123)
     self.assertEqual(c.major_axis_azimuth, 4.123)
     # exporting back to XML should result in the same document
     with open(filename, "rt") as fp:
         original = fp.read()
     processed = Pickler().dumps(catalog)
     compare_xml_strings(original, processed)
Ejemplo n.º 25
0
    def filter(self, *args, **kwargs):
        """
        Returns a new Catalog object only containing Events which match the
        specified filter rules.

        Valid filter keys are:

        * magnitude;
        * longitude;
        * latitude;
        * depth;
        * time;
        * standard_error;
        * azimuthal_gap;
        * used_station_count;
        * used_phase_count.

        Use ``inverse=True`` to return the Events that *do not* match the
        specified filter rules.

        :rtype: :class:`Catalog`
        :return: Filtered catalog. A new Catalog object with filtered
            Events as references to the original Events.

        .. rubric:: Example

        >>> from obspy.core.event import read_events
        >>> cat = read_events()
        >>> print(cat)
        3 Event(s) in Catalog:
        2012-04-04T14:21:42.300000Z | +41.818,  +79.689 | 4.4 mb | manual
        2012-04-04T14:18:37.000000Z | +39.342,  +41.044 | 4.3 ML | manual
        2012-04-04T14:08:46.000000Z | +38.017,  +37.736 | 3.0 ML | manual
        >>> cat2 = cat.filter("magnitude >= 4.0", "latitude < 40.0")
        >>> print(cat2)
        1 Event(s) in Catalog:
        2012-04-04T14:18:37.000000Z | +39.342,  +41.044 | 4.3 ML | manual
        >>> cat3 = cat.filter("time > 2012-04-04T14:10",
        ...                   "time < 2012-04-04T14:20")
        >>> print(cat3)
        1 Event(s) in Catalog:
        2012-04-04T14:18:37.000000Z | +39.342,  +41.044 | 4.3 ML | manual
        >>> cat4 = cat.filter("time > 2012-04-04T14:10",
        ...                   "time < 2012-04-04T14:20",
        ...                   inverse=True)
        >>> print(cat4)
        2 Event(s) in Catalog:
        2012-04-04T14:21:42.300000Z | +41.818,  +79.689 | 4.4 mb | manual
        2012-04-04T14:08:46.000000Z | +38.017,  +37.736 | 3.0 ML | manual
        """

        # Helper functions. Only first argument might be None. Avoid
        # unorderable types by checking first shortcut on positive is None
        # also for the greater stuff (is confusing but correct)
        def _is_smaller(value_1, value_2):
            if value_1 is None or value_1 < value_2:
                return True
            return False

        def _is_smaller_or_equal(value_1, value_2):
            if value_1 is None or value_1 <= value_2:
                return True
            return False

        def _is_greater(value_1, value_2):
            if value_1 is None or value_1 <= value_2:
                return False
            return True

        def _is_greater_or_equal(value_1, value_2):
            if value_1 is None or value_1 < value_2:
                return False
            return True

        # Map the function to the operators.
        operator_map = {
            "<": _is_smaller,
            "<=": _is_smaller_or_equal,
            ">": _is_greater,
            ">=": _is_greater_or_equal
        }

        try:
            inverse = kwargs["inverse"]
        except KeyError:
            inverse = False

        events = list(self.events)
        for arg in args:
            try:
                key, operator, value = arg.split(" ", 2)
            except ValueError:
                msg = "%s is not a valid filter rule." % arg
                raise ValueError(msg)
            if key == "magnitude":
                temp_events = []
                for event in events:
                    if (event.magnitudes and event.magnitudes[0].mag
                            and operator_map[operator](event.magnitudes[0].mag,
                                                       float(value))):
                        temp_events.append(event)
                events = temp_events
            elif key in ("longitude", "latitude", "depth", "time"):
                temp_events = []
                for event in events:
                    if (event.origins and key in event.origins[0]
                            and operator_map[operator](
                                event.origins[0].get(key), UTCDateTime(value)
                                if key == 'time' else float(value))):
                        temp_events.append(event)
                events = temp_events
            elif key in ('standard_error', 'azimuthal_gap',
                         'used_station_count', 'used_phase_count'):
                temp_events = []
                for event in events:
                    if (event.origins and event.origins[0].quality
                            and key in event.origins[0].quality
                            and operator_map[operator](
                                event.origins[0].quality.get(key),
                                float(value))):
                        temp_events.append(event)
                events = temp_events
            else:
                msg = "%s is not a valid filter key" % key
                raise ValueError(msg)
        if inverse:
            events = [ev for ev in self.events if ev not in events]
        return Catalog(events=events)
    def refTrigger(self, RefWaveform, phase, cfg_yaml):
        Config = self.Config
        cfg = ConfigObj(dict=Config)
        name = ('%s.%s.%s.%s') % (RefWaveform[0].stats.network,
                                  RefWaveform[0].stats.station,
                                  RefWaveform[0].stats.location,
                                  RefWaveform[0].stats.channel)

        i = self.searchMeta(name, self.StationMeta)
        de = loc2degrees(self.Origin, i)

        ptime = 0

        Phase = cake.PhaseDef(phase)
        model = cake.load_model()
        if cfg_yaml.config_data.colesseo_input is True:
            arrivals = model.arrivals([de, de], phases=Phase,
                                      zstart=self.Origin.depth, zstop=0.)
        else:
            arrivals = model.arrivals([de, de], phases=Phase,
                                      zstart=self.Origin.depth*km, zstop=0.)
        try:
            ptime = arrivals[0].t
        except Exception:
            arrivals = model.arrivals([de, de], phases=Phase,
                                      zstart=self.Origin.depth*km-0.1)
            ptime = arrivals[0].t

        if ptime == 0:
                raise Exception("\033[31mILLEGAL: phase definition\033[0m")

        tw = self.calculateTimeWindows(ptime)

        if cfg_yaml.config_data.pyrocko_download is True:
            stP = self.readWaveformsPicker_pyrocko(i, tw, self.Origin, ptime,
                                                   cfg_yaml)
        elif cfg_yaml.config_data.colesseo_input is True:
            stP = self.readWaveformsPicker_colos(i, tw, self.Origin, ptime,
                                                 cfg_yaml)
        else:
            stP = self.readWaveformsPicker(i, tw, self.Origin, ptime, cfg_yaml)

        refuntouchname = os.path.basename(self.AF)+'-refstation-raw.mseed'
        stP.write(os.path.join(self.EventPath, refuntouchname), format='MSEED',
                                                                byteorder='>')
        stP.filter("bandpass",
                   freqmin=float(cfg_yaml.config_xcorr.refstationfreqmin),
                   freqmax=float(cfg_yaml.config_xcorr.refstationfreqmax))

        stP.trim(tw['xcorrstart'], tw['xcorrend'])
        trP = stP[0]

        trP.stats.starttime = UTCDateTime(3600)
        refname = os.path.basename(self.AF)+'-refstation-filtered.mseed'
        trP.write(os.path.join(self.EventPath, refname), format='MSEED',
                                                         byteorder='>')

        sta = float(cfg_yaml.config_xcorr.refsta)
        lta = float(cfg_yaml.config_xcorr.reflta)
        cft = recSTALTA(trP.data, int(sta * trP.stats.sampling_rate),
                        int(lta * trP.stats.sampling_rate))

        t = triggerOnset(cft, lta, sta)

        try:
            onset = t[0][0] / trP.stats.sampling_rate

        except Exception:
            onset = self.mintforerun

        trigger = trP.stats.starttime+onset

        tdiff = (trP.stats.starttime + onset)-(UTCDateTime(3600)
                                               + self.mintforerun)

        refp = UTCDateTime(self.Origin.time)+ptime
        reftriggeronset = refp+onset-self.mintforerun

        if cfg_yaml.config_xcorr.autoxcorrcorrectur is True:
                refmarkername = os.path.join(self.EventPath,
                                             ('%s-marker') % (os.path.basename(
                                              self.AF)))
                fobjrefmarkername = open(refmarkername, 'w')
                fobjrefmarkername.write('# Snuffler Markers File Version\
                                         0.2\n')
                fobjrefmarkername.write(('phase: %s 0 %s    None           None         None         XWStart        None False\n') % (tw['xcorrstart'].strftime('%Y-%m-%d %H:%M:%S.%f'), name))
                fobjrefmarkername.write(('phase: %s 0 %s    None           None         None         XWEnd        None False\n') % (tw['xcorrend'].strftime('%Y-%m-%d %H:%M:%S.%f'), name))
                fobjrefmarkername.write(('phase: %s 1 %s    None           None         None         TheoP        None False\n') % (refp.strftime('%Y-%m-%d %H:%M:%S.%f'), name))
                fobjrefmarkername.write(('phase: %s 3 %s    None           None         None         XTrig        None False') % (reftriggeronset.strftime('%Y-%m-%d %H:%M:%S.%f'), name))
                fobjrefmarkername.close()

                cmd = 'snuffler %s --markers=%s&' % (os.path.join(
                                                    self.EventPath,
                                                    refuntouchname),
                                                    refmarkername)
                os.system(cmd)

                thrOn = float(self.Config['reflta'])
                thrOff = float(self.Config['refsta'])
                plotTrigger(trP, cft, thrOn, thrOff)

                selection = float(input('Enter self picked phase in seconds: '))
                tdiff = selection-self.mintforerun
                refname = os.path.basename(self.AF)+'-shift.mseed'
                trP.stats.starttime = trP.stats.starttime - selection
                trP.write(os.path.join(self.EventPath, refname),
                                       format='MSEED')

        '''
        tdiff = 0
        trigger = trP.stats.starttime
        '''
        To = Trigger(name, trigger, os.path.basename(self.AF), tdiff)

        return tdiff, To
Ejemplo n.º 27
0
    def test_write_with_extra_tags_and_read(self):
        """
        Tests that a QuakeML file with additional custom "extra" tags gets
        written correctly and that when reading it again the extra tags are
        parsed correctly.
        """
        filename = os.path.join(self.path, "quakeml_1.2_origin.xml")

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            cat = _read_quakeml(filename)
            self.assertEqual(len(w), 0)

        # add some custom tags to first event:
        #  - tag with explicit namespace but no explicit ns abbreviation
        #  - tag without explicit namespace (gets obspy default ns)
        #  - tag with explicit namespace and namespace abbreviation
        my_extra = AttribDict(
            {'public': {'value': False,
                        'namespace': 'http://some-page.de/xmlns/1.0',
                        'attrib': {'some_attrib': 'some_value',
                                   'another_attrib': 'another_value'}},
             'custom': {'value': 'True',
                        'namespace': 'http://test.org/xmlns/0.1'},
             'new_tag': {'value': 1234,
                         'namespace': 'http://test.org/xmlns/0.1'},
             'tX': {'value': UTCDateTime('2013-01-02T13:12:14.600000Z'),
                    'namespace': 'http://test.org/xmlns/0.1'},
             'dataid': {'namespace': 'http://anss.org/xmlns/catalog/0.1',
                        'type': 'attribute', 'value': '00999999'},
             # some nested tags :
             'quantity': {'namespace': 'http://some-page.de/xmlns/1.0',
                          'attrib': {'attrib1': 'attrib_value1',
                                     'attrib2': 'attrib_value2'},
                          'value': {
                              'my_nested_tag1': {
                                  'namespace': 'http://some-page.de/xmlns/1.0',
                                  'value': 1.23E10},
                              'my_nested_tag2': {
                                  'namespace': 'http://some-page.de/xmlns/1.0',
                                  'value': False}}}})
        nsmap = {'ns0': 'http://test.org/xmlns/0.1',
                 'catalog': 'http://anss.org/xmlns/catalog/0.1'}
        cat[0].extra = my_extra.copy()
        # insert a pick with an extra field
        p = Pick()
        p.extra = {'weight': {'value': 2,
                              'namespace': 'http://test.org/xmlns/0.1'}}
        cat[0].picks.append(p)

        with NamedTemporaryFile() as tf:
            tmpfile = tf.name
            # write file
            cat.write(tmpfile, format='QUAKEML', nsmap=nsmap)
            # check contents
            with open(tmpfile, 'rb') as fh:
                # enforce reproducible attribute orders through write_c14n
                obj = etree.fromstring(fh.read()).getroottree()
                buf = io.BytesIO()
                obj.write_c14n(buf)
                buf.seek(0, 0)
                content = buf.read()
            # check namespace definitions in root element
            expected = [b'<q:quakeml',
                        b'xmlns:catalog="http://anss.org/xmlns/catalog/0.1"',
                        b'xmlns:ns0="http://test.org/xmlns/0.1"',
                        b'xmlns:ns1="http://some-page.de/xmlns/1.0"',
                        b'xmlns:q="http://quakeml.org/xmlns/quakeml/1.2"',
                        b'xmlns="http://quakeml.org/xmlns/bed/1.2"']
            for line in expected:
                self.assertIn(line, content)
            # check additional tags
            expected = [
                b'<ns0:custom>True</ns0:custom>',
                b'<ns0:new_tag>1234</ns0:new_tag>',
                b'<ns0:tX>2013-01-02T13:12:14.600000Z</ns0:tX>',
                b'<ns1:public '
                b'another_attrib="another_value" '
                b'some_attrib="some_value">false</ns1:public>'
            ]
            for line in expected:
                self.assertIn(line, content)
            # now, read again to test if it's parsed correctly..
            cat = _read_quakeml(tmpfile)
        # when reading..
        #  - namespace abbreviations should be disregarded
        #  - we always end up with a namespace definition, even if it was
        #    omitted when originally setting the custom tag
        #  - custom namespace abbreviations should attached to Catalog
        self.assertTrue(hasattr(cat[0], 'extra'))

        def _tostr(x):
            if isinstance(x, bool):
                if x:
                    return str('true')
                else:
                    return str('false')
            elif isinstance(x, AttribDict):
                for key, value in x.items():
                    x[key].value = _tostr(value['value'])
                return x
            else:
                return str(x)

        for key, value in my_extra.items():
            my_extra[key]['value'] = _tostr(value['value'])
        self.assertEqual(cat[0].extra, my_extra)
        self.assertTrue(hasattr(cat[0].picks[0], 'extra'))
        self.assertEqual(
            cat[0].picks[0].extra,
            {'weight': {'value': '2',
                        'namespace': 'http://test.org/xmlns/0.1'}})
        self.assertTrue(hasattr(cat, 'nsmap'))
        self.assertEqual(getattr(cat, 'nsmap')['ns0'], nsmap['ns0'])
Ejemplo n.º 28
0
def get_inventory():
    # We'll first create all the various objects. These strongly follow the
    # hierarchy of StationXML files.
    inv = Inventory(
        # We'll add networks later.
        networks=[],
        # The source should be the id whoever create the file.
        source="ObsPy-Tutorial")

    net = Network(
        # This is the network code according to the SEED standard.
        code="US",
        # A list of stations. We'll add one later.
        stations=[],
        description="A test stations.",
        # Start-and end dates are optional.
        start_date=UTCDateTime(2016, 1, 2))

    sta = Station(
        # This is the station code according to the SEED standard.
        code="ABCD",
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        creation_date=UTCDateTime(2016, 1, 2),
        site=Site(name="First station"))

    cha1 = Channel(
        # This is the channel code according to the SEED standard.
        code="HN1",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=1)
    cha2 = Channel(
        # This is the channel code according to the SEED standard.
        code="HN2",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=90.0,
        dip=-90.0,
        sample_rate=1)
    cha3 = Channel(
        # This is the channel code according to the SEED standard.
        code="HNZ",
        # This is the location code according to the SEED standard.
        location_code="11",
        # Note that these coordinates can differ from the station coordinates.
        latitude=1.0,
        longitude=2.0,
        elevation=345.0,
        depth=10.0,
        azimuth=0.0,
        dip=-90.0,
        sample_rate=1)

    # Now tie it all together.
    sta.channels.append(cha1)
    sta.channels.append(cha2)
    sta.channels.append(cha3)
    net.stations.append(sta)
    inv.networks.append(net)

    return inv
Ejemplo n.º 29
0
start_t = '2015-07-04T15:42:18.000Z'

## Directories to be used in pulling data ##
# Need a list of earthquakes and stations
stndata = pd.read_csv(
    '/Users/aklimase/Documents/Cascadia_subduction/validation/PNSN_Network_noclip.txt',
    delim_whitespace=True,
    header=0)
net = np.array(stndata["net"])
stnm = np.array(stndata["#stnm"])
chan = np.array(stndata["chan"])
loc = np.array(['*'] * len(net))
lat = np.array(stndata["lat"])
lon = np.array(stndata["lon"])

t1 = UTCDateTime(start_t)
t2 = t1 + (45)  #delta in seconds

fdsn_client = Client('IRIS')
# Fetch waveform from IRIS FDSN web service into a ObsPy stream object
# and automatically attach correct response
for i in range(0, len(net)):
    st = fdsn_client.get_waveforms(network=net[i],
                                   station=stnm[i],
                                   location='*',
                                   channel=chan[i],
                                   starttime=t1,
                                   endtime=t2,
                                   attach_response=True)

    tr = st[0]
Ejemplo n.º 30
0
class SLNetStation(object):
    """
    Class to hold a SeedLink stream selectors for a network/station.

    :var MAX_SELECTOR_SIZE: Maximum selector size.
    :type MAX_SELECTOR_SIZE: int
    :var net: The network code.
    :type net: str
    :var station: The station code.
    :type station: str
    :var selectors: SeedLink style selectors for this station.
    :type selectors: str
    :var seqnum: SeedLink sequence number of last packet received.
    :type seqnum: int
    :var btime: Time stamp of last packet received.
    :type btime: :class:`~obspy.core.utcdatetime.UTCDateTime`
    """
    MAX_SELECTOR_SIZE = 8

    def __init__(self, net, station, selectors, seqnum, timestamp):
        """
        Creates a new instance of SLNetStation.

        :param net: network code.
        :param station: station code.
        :param selectors: selectors for this net/station, null if none.
        :param seqnum: SeedLink sequence number of last packet received,
            -1 to start at the next data.
        :param timestamp: SeedLink time stamp in a UTCDateTime format for
            last packet received, null for none.
        """
        self.net = str(net)
        self.station = str(station)
        # print "DEBUG: selectors:", selectors
        if selectors is not None:
            self.selectors = selectors
        else:
            self.selectors = []
        self.seqnum = seqnum
        if timestamp is not None:
            self.btime = UTCDateTime(timestamp)
        else:
            self.btime = None

    def append_selectors(self, new_selectors):
        """
        Appends a selectors String to the current selectors for this
        SLNetStation.

        :return: 0 if selectors added successfully, 1 otherwise
        """
        self.selectors.append(new_selectors)
        return 1

    def get_selectors(self):
        """
        Returns the selectors as an array of Strings

        :return: array of selector Strings
        """
        return self.selectors

    def get_sl_time_stamp(self):
        """
        Returns the time stamp in SeedLink string format:
        "year,month,day,hour,minute,second"

        :return: SeedLink time
        """
        return self.btime.format_seedlink()
Ejemplo n.º 31
0
 def getWaveform(*args, **kwargs):
     """
     Retrieves the waveforms and normalizes the graphs
     """
     # Check the two dates.
     try:
         st = UTCDateTime(NV.starttime.get())
     except:
         status_bar.configure(text='Please enter a valid start time.', foreground='red')
         status_bar.update_idletasks()
         return
     try:
         ed = UTCDateTime(NV.endtime.get())
     except:
         status_bar.configure(text='Please enter a valid end time.', foreground='red')
         status_bar.update_idletasks()
         return
     if ed - st <= 0:
         status_bar.configure(text='Start time need to be smaller than end time.', foreground='red')
         status_bar.update_idletasks()
         return
     now = UTCDateTime()
     if now < st:
         status_bar.configure(text='You cannot plot the future...', foreground='red')
         status_bar.update_idletasks()
         return
     if ed - st > MAX_SPAN:
         status_bar.configure(text='Timeframe too large. Maximal %s seconds allowed.' % MAX_SPAN, foreground='red')
         status_bar.update_idletasks()
         return
     stream_list = []
     if len(NV.selected_list) == 0:
         NV.stream = None
         create_graph()
         return
     status_bar.configure(text='Retrieving data...', foreground='black')
     status_bar.update_idletasks()
     for channel in NV.selected_list:
         # Read the waveform
         start = UTCDateTime(NV.starttime.get())
         end = UTCDateTime(NV.endtime.get())
         splitted = channel.split('.')
         network = splitted[0]
         station = splitted[1]
         location = splitted[2]
         channel = splitted[3]
         try:
             st = SH.client.waveform.getWaveform(network, station, location,
                                             channel, start, end)
         except:
             trace = Trace(header={'network' : network,
                 'station' : station, 'location' : location,
                 'channel' : channel, 'starttime': start,
                 'endtime' : end, 'npts' : 0, 'sampling_rate' : 1.0})
             st = Stream(traces=[trace])
         st.merge()
         st.trim(start, end)
         stream_list.append(st)
     st = stream_list[0]
     for _i in xrange(1, len(stream_list)):
         st += stream_list[_i]
     # Merge the Stream and replace all masked values with NaNs.
     st.merge()
     st.sort()
     # Normalize all traces and throw out traces with no data.
     try:
         max_diff = max([trace.data.max() - trace.data.min() for trace in st \
                     if len(trace.data) > 0])
     except:
         pass
     for trace in st:
         if (np.ma.is_masked(trace.data) and not False in trace.data._mask)or\
             len(trace.data) == 0:
             trace.data = np.array([])
         else:
             trace.data = trace.data - trace.data.mean()
             trace.data = trace.data / (max_diff / 2)
     NV.stream = st
     # Get the min. starttime and the max. endtime.
     starttime = UTCDateTime(NV.starttime.get())
     endtime = UTCDateTime(NV.endtime.get())
     for trace in NV.stream:
         if np.ma.is_masked(trace):
             trace = trace.data[trace._mask] = np.NaN
     # Loop over all traces again and fill with NaNs.
     for trace in NV.stream:
         startgaps = int(round((trace.stats.starttime - starttime) * \
                             trace.stats.sampling_rate))
         endgaps = int(round((endtime - trace.stats.endtime) * \
                             trace.stats.sampling_rate))
         print endgaps
         if startgaps or endgaps:
             if startgaps > 0:
                 start = np.empty(startgaps)
                 start[:] = np.NaN
             else:
                 start = []
             if endgaps > 0:
                 end = np.empty(endgaps)
                 end[:] = np.NaN
             else:
                 end = []
             trace.data = np.concatenate([start, trace.data, end])
             trace.stats.npts = trace.data.size
             trace.stats.starttime = UTCDateTime(NV.starttime.get())
             #trace.stats.endtime = UTCDateTime(NV.endtime.get())
     status_bar.configure(text='')
     status_bar.update_idletasks()
     create_graph()
Ejemplo n.º 32
0
def shakedown(request):
	DEFAULTSTATION = 'NE.ORNO.00.HHZ'
	JULIANDATE = UTCDateTime.now().strftime('%Y.%j')
	AVAIL = '/media/shakedown/avail/%s.png' % JULIANDATE
	HELI = '/media/shakedown/cronplots/%s.%s-heli.png' % (DEFAULTSTATION, JULIANDATE)
	SPEC = '/media/shakedown/cronplots/%s.%s-spec.png' % (DEFAULTSTATION, JULIANDATE)
	SPECBP = '/media/shakedown/cronplots/%s.%s-spec-band.png' % (DEFAULTSTATION, JULIANDATE)
	HELIBP = '/media/shakedown/cronplots/%s.%s-heli-band.png' % (DEFAULTSTATION, JULIANDATE)
	
	initialtime = (UTCDateTime.now() - 90).strftime('%Y%m%d%H%M%S')
	initialdur = 30
	initialdbscale = ''
	r35e7, r4989, r6a3b, rcb43, orno = '', '', '', '', ''
	sta, start, dur, end, dbscale, logscale, status = False, False, False, False, False, False, False
	userstart, userdur = '', ''
	filtmin, filtmax = 0.7, 2
	procimg = {
		'heli': HELI,
		'avail': AVAIL,
		'spec': SPEC,
		'helibp': HELIBP,
		'specbp': SPECBP,
		'filtmin': filtmin,
		'filtmax': filtmax,
	}
	try:
		userstart = int(request.GET['start'])
		start = datetime.strptime(str(userstart), '%Y%m%d%H%M%S')
		initialtime = start.strftime('%Y%m%d%H%M%S')
		try:
			userdur = int(request.GET['dur'])
		except (ValueError, MultiValueDictKeyError) as e:
			status = e
		try:
			dbscale = request.GET['dbscale']
			if dbscale == 'on':
				dbscale = True
				initialdbscale = 'checked'
			else:
				dbscale = False
		except (ValueError, MultiValueDictKeyError) as e:
			pass
		try:
			logscale = request.GET['logscale']
			if logscale == 'on':
				logscale = True
			else:
				logscale = False
		except (ValueError, MultiValueDictKeyError) as e:
			pass
		try:
			sta = request.GET['sta']
			if 'AM.R6A3B.00.EHZ' == sta:
				r6a3b = ' selected'
			if 'AM.RCB43.00.SHZ' == sta:
				rcb43 = ' selected'
			if 'AM.R35E7.00.SHZ' == sta:
				r4989 = ' selected'
			if 'AM.R4989.00.EHZ' == sta:
				r35e7z = ' selected'
			if 'AM.ORNO.00.HHZ' == sta:
				orno = ' selected'
		except (ValueError, MultiValueDictKeyError) as e:
			sta = False
		try:
			filtmin = float(request.GET['filtmin'])
			filtmax = float(request.GET['filtmax'])
			if filtmax < filtmin:
				tmp = filtmin
				filtmin = filtmax
				filtmax = tmp
			if filtmax >= 25:
				filtmax = 25
			if filtmax < 0.1:
				filtmax = 0.1
			if filtmin <= 0:
				filtmin = 0
			if filtmin > 24.9:
				filtmin = 24.9
		except (ValueError, MultiValueDictKeyError) as e:
			filtmin = 0.7
			filtmax = 2
	except ValueError as e:
		status = e
	except MultiValueDictKeyError as e:
		status = e

	if userstart and userdur:
		initialdur = userdur
		start = UTCDateTime(start)
		try:
			dur = timedelta(0, userdur)
			end = start + dur
			procimg = processor(sta, start, end, dbscale, filtmin, filtmax)
		except ValueError as e:
			pass
	elif userstart:
		try:
			end = start + timedelta(0, 60)
			procimg = processor(sta, start, end, dbscale, filtmin, filtmax)
		except:
			pass
	else:
		pass

	if procimg['spec']:
		SPEC = procimg['spec']
		AVAIL = procimg['avail']
		HELI = procimg['heli']
		SPECBP = procimg['specbp']
		HELIBP = procimg['helibp']
		filtmin = procimg['filtmin']
		filtmax = procimg['filtmax']
	elif procimg['heli']:
		HELI = procimg['heli']

	context = {
		'sta': sta,
		'r4989': r4989,
		'r6a3b': r6a3b,
		'rcb43': rcb43,
		'r35e7': r35e7,
		'orno': orno,
		'filtmin': filtmin,
		'filtmax': filtmax,
		'status': status,
		'form': ShakeForm,
		'start': start,
		'dur': dur,
		'end': end,
		'avail': AVAIL,
		'heliimg': HELI,
		'specimg': SPEC,
		'helibpimg': HELIBP,
		'specbpimg': SPECBP,
		'initialtime': initialtime,
		'initialdur': initialdur,
		'initialdbscale': initialdbscale,
		'time': timezone.now(),
		'page': 'Shakedown - The Seismic Data Archive Browser',
	}
	return render(request, 'shake/shakedown.html', context)
Ejemplo n.º 33
0
 def test_focalmechanism(self):
     """
     Tests FocalMechanism object.
     """
     filename = os.path.join(self.path, 'quakeml_1.2_focalmechanism.xml')
     catalog = _read_quakeml(filename)
     self.assertEqual(len(catalog), 1)
     self.assertEqual(len(catalog[0].focal_mechanisms), 2)
     fm = catalog[0].focal_mechanisms[0]
     # general
     self.assertEqual(
         fm.resource_id,
         ResourceIdentifier('smi:ISC/fmid=292309'))
     self.assertEqual(len(fm.waveform_id), 2)
     self.assertEqual(fm.waveform_id[0].network_code, 'BW')
     self.assertEqual(fm.waveform_id[0].station_code, 'FUR')
     self.assertEqual(
         fm.waveform_id[0].resource_uri,
         ResourceIdentifier(id="smi:ch.ethz.sed/waveform/201754"))
     self.assertTrue(isinstance(fm.waveform_id[0], WaveformStreamID))
     self.assertEqual(
         fm.triggering_origin_id,
         ResourceIdentifier('smi:local/originId=7680412'))
     self.assertAlmostEqual(fm.azimuthal_gap, 0.123)
     self.assertEqual(fm.station_polarity_count, 987)
     self.assertAlmostEqual(fm.misfit, 1.234)
     self.assertAlmostEqual(fm.station_distribution_ratio, 2.345)
     self.assertEqual(
         fm.method_id,
         ResourceIdentifier('smi:ISC/methodID=Best_double_couple'))
     # comments
     self.assertEqual(len(fm.comments), 2)
     c = fm.comments
     self.assertEqual(c[0].text, 'Relocated after re-evaluation')
     self.assertEqual(c[0].resource_id, None)
     self.assertEqual(c[0].creation_info.agency_id, 'MUH')
     self.assertEqual(c[1].text, 'Another MUH')
     self.assertEqual(
         c[1].resource_id,
         ResourceIdentifier(id="smi:some/comment/id/number_3"))
     self.assertEqual(c[1].creation_info, None)
     # creation info
     self.assertEqual(fm.creation_info.author, "Erika Mustermann")
     self.assertEqual(fm.creation_info.agency_id, "MUH")
     self.assertEqual(
         fm.creation_info.author_uri,
         ResourceIdentifier("smi:smi-registry/organization/MUH"))
     self.assertEqual(
         fm.creation_info.agency_uri,
         ResourceIdentifier("smi:smi-registry/organization/MUH"))
     self.assertEqual(
         fm.creation_info.creation_time,
         UTCDateTime("2012-04-04T16:40:50+00:00"))
     self.assertEqual(fm.creation_info.version, "1.0.1")
     # nodalPlanes
     self.assertAlmostEqual(fm.nodal_planes.nodal_plane_1.strike, 346.0)
     self.assertAlmostEqual(fm.nodal_planes.nodal_plane_1.dip, 57.0)
     self.assertAlmostEqual(fm.nodal_planes.nodal_plane_1.rake, 75.0)
     self.assertAlmostEqual(fm.nodal_planes.nodal_plane_2.strike, 193.0)
     self.assertAlmostEqual(fm.nodal_planes.nodal_plane_2.dip, 36.0)
     self.assertAlmostEqual(fm.nodal_planes.nodal_plane_2.rake, 112.0)
     self.assertEqual(fm.nodal_planes.preferred_plane, 2)
     # principalAxes
     self.assertAlmostEqual(fm.principal_axes.t_axis.azimuth, 216.0)
     self.assertAlmostEqual(fm.principal_axes.t_axis.plunge, 73.0)
     self.assertAlmostEqual(fm.principal_axes.t_axis.length, 1.050e+18)
     self.assertAlmostEqual(fm.principal_axes.p_axis.azimuth, 86.0)
     self.assertAlmostEqual(fm.principal_axes.p_axis.plunge, 10.0)
     self.assertAlmostEqual(fm.principal_axes.p_axis.length, -1.180e+18)
     self.assertEqual(fm.principal_axes.n_axis.azimuth, None)
     self.assertEqual(fm.principal_axes.n_axis.plunge, None)
     self.assertEqual(fm.principal_axes.n_axis.length, None)
     # momentTensor
     mt = fm.moment_tensor
     self.assertEqual(
         mt.resource_id,
         ResourceIdentifier('smi:ISC/mtid=123321'))
     self.assertEqual(
         mt.derived_origin_id,
         ResourceIdentifier('smi:ISC/origid=13145006'))
     self.assertAlmostEqual(mt.scalar_moment, 1.100e+18)
     self.assertAlmostEqual(mt.tensor.m_rr, 9.300e+17)
     self.assertAlmostEqual(mt.tensor.m_tt, 1.700e+17)
     self.assertAlmostEqual(mt.tensor.m_pp, -1.100e+18)
     self.assertAlmostEqual(mt.tensor.m_rt, -2.200e+17)
     self.assertAlmostEqual(mt.tensor.m_rp, 4.000e+17)
     self.assertAlmostEqual(mt.tensor.m_tp, 3.000e+16)
     self.assertAlmostEqual(mt.clvd, 0.22)
     # exporting back to XML should result in the same document
     with open(filename, "rb") as fp:
         original = fp.read()
     processed = Pickler().dumps(catalog)
     compare_xml_strings(original, processed)
Ejemplo n.º 34
0
 def test_origin(self):
     """
     Tests Origin object.
     """
     self.assertEqual(len(self.catalog[0].origins), 4)
     origin = self.catalog[0].origins[0]
     self.assertEqual(
         origin.resource_id,
         ResourceIdentifier(
             id='quakeml:us.anss.org/origin/20120101052755.98'))
     self.assertEqual(origin.origin_type, 'hypocenter')
     self.assertEqual(
         origin.time,
         UTCDateTime(2012, 1, 1, 5, 27, 55, 980000))
     self.assertEqual(origin.latitude, 31.456)
     self.assertAlmostEqual(
         origin.latitude_errors.uncertainty, 0.0155, places=3)
     self.assertEqual(origin.longitude, 138.072)
     self.assertAlmostEqual(
         origin.longitude_errors.uncertainty, 0.0173, places=3)
     self.assertEqual(origin.depth, 365300.0)
     self.assertEqual(origin.depth_errors.uncertainty, 2700.0)
     self.assertEqual(origin.depth_type, 'from location')
     self.assertEqual(origin.method_id, None)
     self.assertEqual(origin.time_fixed, None)
     self.assertEqual(origin.epicenter_fixed, None)
     self.assertEqual(
         origin.earth_model_id,
         ResourceIdentifier(
             id='quakeml:us.anss.org/earthmodel/ak135'))
     self.assertEqual(origin.evaluation_mode, None)
     self.assertEqual(origin.evaluation_status, None)
     self.assertEqual(origin.origin_type, 'hypocenter')
     # composite times
     self.assertEqual(len(origin.composite_times), 0)
     # quality
     self.assertEqual(origin.quality.used_station_count, 628)
     self.assertEqual(origin.quality.standard_error, 0.84)
     self.assertEqual(origin.quality.azimuthal_gap, 10.8)
     self.assertEqual(origin.quality.maximum_distance, 29.1)
     self.assertEqual(origin.quality.minimum_distance, 2.22)
     self.assertEqual(origin.quality.associated_phase_count, 52)
     self.assertEqual(origin.quality.associated_station_count, 628)
     self.assertEqual(origin.quality.depth_phase_count, 0)
     self.assertEqual(origin.quality.secondary_azimuthal_gap, None)
     self.assertEqual(origin.quality.ground_truth_level, None)
     self.assertEqual(origin.quality.median_distance, None)
     # comments
     self.assertEqual(len(origin.comments), 0)
     # creation info
     self.assertEqual(origin.creation_info.author, None)
     self.assertEqual(origin.creation_info.agency_id, 'USGS-NEIC')
     self.assertEqual(origin.creation_info.author_uri, None)
     self.assertEqual(origin.creation_info.agency_uri, None)
     self.assertEqual(origin.creation_info.creation_time, None)
     self.assertEqual(origin.creation_info.version, None)
     # origin uncertainty
     u = origin.origin_uncertainty
     self.assertEqual(u.preferred_description, 'confidence ellipsoid')
     self.assertEqual(u.horizontal_uncertainty, None)
     self.assertEqual(u.min_horizontal_uncertainty, None)
     self.assertEqual(u.max_horizontal_uncertainty, None)
     self.assertEqual(u.azimuth_max_horizontal_uncertainty, None)
     # confidence ellipsoid
     c = u.confidence_ellipsoid
     self.assertEqual(c.semi_intermediate_axis_length, 2750.0)
     # c.major_axis_rotation is computed during file reading:
     self.assertAlmostEqual(c.major_axis_rotation, 170.5, places=3)
     self.assertEqual(c.major_axis_plunge, 76.06)
     self.assertEqual(c.semi_minor_axis_length, 2210.0)
     self.assertEqual(c.semi_major_axis_length, 4220.0)
     self.assertEqual(c.major_axis_azimuth, 292.79)
Ejemplo n.º 35
0
    def test_creating_minimal_quakeml_with_mt(self):
        """
        Tests the creation of a minimal QuakeML containing origin, magnitude
        and moment tensor.
        """
        # Rotate into physical domain
        lat, lon, depth, org_time = 10.0, -20.0, 12000, UTCDateTime(2012, 1, 1)
        mrr, mtt, mpp, mtr, mpr, mtp = 1E18, 2E18, 3E18, 3E18, 2E18, 1E18
        scalar_moment = math.sqrt(
            mrr ** 2 + mtt ** 2 + mpp ** 2 + mtr ** 2 + mpr ** 2 + mtp ** 2)
        moment_magnitude = 0.667 * (math.log10(scalar_moment) - 9.1)

        # Initialise event
        ev = Event(event_type="earthquake")

        ev_origin = Origin(time=org_time, latitude=lat, longitude=lon,
                           depth=depth, resource_id=ResourceIdentifier())
        ev.origins.append(ev_origin)

        # populate event moment tensor
        ev_tensor = Tensor(m_rr=mrr, m_tt=mtt, m_pp=mpp, m_rt=mtr, m_rp=mpr,
                           m_tp=mtp)

        ev_momenttensor = MomentTensor(tensor=ev_tensor)
        ev_momenttensor.scalar_moment = scalar_moment
        ev_momenttensor.derived_origin_id = ev_origin.resource_id

        ev_focalmechanism = FocalMechanism(moment_tensor=ev_momenttensor)
        ev.focal_mechanisms.append(ev_focalmechanism)

        # populate event magnitude
        ev_magnitude = Magnitude()
        ev_magnitude.mag = moment_magnitude
        ev_magnitude.magnitude_type = 'Mw'
        ev_magnitude.evaluation_mode = 'automatic'
        ev.magnitudes.append(ev_magnitude)

        # write QuakeML file
        cat = Catalog(events=[ev])
        memfile = io.BytesIO()
        cat.write(memfile, format="quakeml", validate=IS_RECENT_LXML)

        memfile.seek(0, 0)
        new_cat = _read_quakeml(memfile)
        self.assertEqual(len(new_cat), 1)
        event = new_cat[0]
        self.assertEqual(len(event.origins), 1)
        self.assertEqual(len(event.magnitudes), 1)
        self.assertEqual(len(event.focal_mechanisms), 1)
        org = event.origins[0]
        mag = event.magnitudes[0]
        fm = event.focal_mechanisms[0]
        self.assertEqual(org.latitude, lat)
        self.assertEqual(org.longitude, lon)
        self.assertEqual(org.depth, depth)
        self.assertEqual(org.time, org_time)
        # Moment tensor.
        mt = fm.moment_tensor.tensor
        self.assertTrue((fm.moment_tensor.scalar_moment - scalar_moment) /
                        scalar_moment < scalar_moment * 1E-10)
        self.assertEqual(mt.m_rr, mrr)
        self.assertEqual(mt.m_pp, mpp)
        self.assertEqual(mt.m_tt, mtt)
        self.assertEqual(mt.m_rt, mtr)
        self.assertEqual(mt.m_rp, mpr)
        self.assertEqual(mt.m_tp, mtp)
        # Mag
        self.assertAlmostEqual(mag.mag, moment_magnitude)
        self.assertEqual(mag.magnitude_type, "Mw")
        self.assertEqual(mag.evaluation_mode, "automatic")
Ejemplo n.º 36
0
def createPreview(trace, delta=60):
    """
    Creates a preview trace.

    A preview trace consists of maximum minus minimum of all samples within
    ``delta`` seconds. The parameter ``delta`` must be a multiple of the
    sampling rate of the ``trace`` object.

    :type delta: int, optional
    :param delta: Difference between two preview points. Defaults to ``60``.
    :rtype: :class:`~obspy.core.trace.Trace`
    :return: New Trace object.

    This method will modify the original Trace object. Create a copy of the
    Trace object if you want to continue using the original data.
    """
    if not isinstance(delta, int) or delta < 1:
        msg = 'The delta values need to be an Integer and at least 1.'
        raise TypeError(msg)
    data = trace.data
    start_time = trace.stats.starttime.timestamp
    # number of samples for a single slice of delta seconds
    samples_per_slice = delta * int(trace.stats.sampling_rate)
    if samples_per_slice < 1:
        raise ValueError('samples_per_slice is less than 0 - skipping')
    # minimum and maximum of samples before a static time marker
    start = int((delta - start_time % delta) * int(trace.stats.sampling_rate))
    start_time = start_time - start_time % delta
    if start > (delta / 2) and data[0:start].size:
        first_diff = [data[0:start].max() - data[0:start].min()]
    else:
        # skip starting samples
        first_diff = []
        start_time += delta
    # number of complete slices of data
    number_of_slices = int((len(data) - start) / samples_per_slice)
    # minimum and maximum of remaining samples
    end = samples_per_slice * number_of_slices + start
    if end > (delta / 2) and data[end:].size:
        last_diff = [data[end:].max() - data[end:].min()]
    else:
        # skip tailing samples
        last_diff = []
    # Fill NaN value with -1.
    if np.isnan(last_diff):
        last_diff = -1
    # reshape matrix
    data = trace.data[start:end].reshape([number_of_slices, samples_per_slice])
    # get minimum and maximum for each row
    diff = data.ptp(axis=1)
    # fill masked values with -1 -> means missing data
    if isinstance(diff, np.ma.masked_array):
        diff = np.ma.filled(diff, -1)
    data = np.concatenate([first_diff, diff, last_diff])
    data = np.require(data, dtype=np.float32)
    tr = Trace(data=data, header=trace.stats)
    tr.stats.delta = delta
    tr.stats.npts = len(data)
    tr.stats.starttime = UTCDateTime(start_time)
    tr.stats.preview = True
    return tr
Ejemplo n.º 37
0
    def getMatchingEvents(self, solve=True):
        """Return a list of dictionaries matching input parameters.

        Args:
            solve (bool):
                If set to True, then this method
                should return a list with a maximum of one event.

        Returns:
            list: List of event dictionaries, with fields:
                  - time Event time (UTC)
                  - lat Event latitude
                  - lon Event longitude
                  - depth Event depth
                  - mag Event magnitude
        """
        jpyear = str(self.jptime.year)
        jpquarter = str(QUARTERS[self.jptime.month])
        if len(jpquarter) == 1:
            jpquarter = '0' + jpquarter
        url = SEARCH_URL.replace('YEAR', jpyear)
        url = url.replace('QUARTER', jpquarter)
        req = requests.get(url)
        data = req.text
        soup = BeautifulSoup(data, features="lxml")
        select = soup.find('select')
        options = select.find_all('option')
        times = []
        lats = []
        lons = []
        depths = []
        mags = []
        values = []
        for option in options:
            if 'Data not found' in option.text:
                break
            eventstr = option.contents[0]
            timestr = re.search(TIMEPAT, eventstr).group()
            latstr = re.search(LATPAT, eventstr).group()
            lonstr = re.search(LONPAT, eventstr).group()
            depstr = re.search(DEPPAT, eventstr).group()
            magstr = re.search(MAGPAT, eventstr).group()
            lat = float(latstr.replace('N', ''))
            lon = float(lonstr.replace('E', ''))
            depth = float(depstr.replace('km', ''))
            mag = float(magstr.replace('M', ''))
            etime = datetime.strptime(timestr, TIMEFMT)
            times.append(np.datetime64(etime))
            lats.append(lat)
            lons.append(lon)
            depths.append(depth)
            mags.append(mag)
            values.append(option.get('value'))

        events = []
        if not len(times):
            return events

        times = np.array(times)
        lats = np.array(lats)
        lons = np.array(lons)
        depths = np.array(depths)
        mags = np.array(mags)
        values = np.array(values)
        distances = geodetic_distance(self.lon, self.lat, lons, lats)
        didx = distances <= self.radius
        jptime = np.datetime64(self.jptime)
        # dtimes is in microseconds
        dtimes = np.abs(jptime - times)
        tidx = dtimes <= np.timedelta64(int(self.dt), 's')
        etimes = times[didx & tidx]
        elats = lats[didx & tidx]
        elons = lons[didx & tidx]
        edepths = depths[didx & tidx]
        emags = mags[didx & tidx]
        evalues = values[didx & tidx]

        for etime, elat, elon, edep, emag, evalue in zip(etimes, elats,
                                                         elons, edepths,
                                                         emags, evalues):
            jtime = UTCDateTime(str(etime))
            utime = jtime - JST_OFFSET
            edict = {'time': utime,
                     'lat': elat,
                     'lon': elon,
                     'depth': edep,
                     'mag': emag,
                     'cgi_value': evalue}
            events.append(edict)

        if solve and len(events) > 1:
            event = self.solveEvents(events)
            events = [event]

        return events
Ejemplo n.º 38
0
def processor(sta, start, end, dbscale, filtmin, filtmax, inpath='/var/www/nezsite/nezsite/nezsite/media/seismic', OUTPATH='/var/www/nezsite/nezsite/nezsite/media/shakedown'):
	#global INPATH
	day = start.strftime('%Y.%j')
	yday = (start - timedelta(days=1)).strftime('%Y.%j')
	daystart = UTCDateTime(start.year, start.month, start.day)
	dayend = daystart + timedelta(days=1)
	if dayend > datetime.now():
		now = UTCDateTime.now()
		mins = 0
		hourdelta = timedelta(hours=0)
		if 14 >= now.minute >= 0:
			mins = 15
		elif 29 >= now.minute >= 15:
			mins = 30
		elif 44 >= now.minute >= 30:
			mins = 45
		else:
			mins = 0
			hourdelta = timedelta(hours=1)
		now += hourdelta
		dayend = UTCDateTime(now.year, now.month, now.day, now.hour, mins)
		daystart = dayend - timedelta(days=1)
	avail = day + '.png'
	avail = os.path.join(AVAILPATH, avail)

	if sta:
		stn = sta
		#sta = sta + '.D.'
	else:
		stn = str(STA_DEF[0:-2])
		#sta = STA_DEF
		sta = stn

	stc = stn.split('.')
	net = stc[0]
	sta = stc[1]
	loc = stc[2]
	ch = stc[3]

	fn = '%s.%s.%s.%s.%s' % (sta, net, loc, ch, day)
	yfn = '%s.%s.%s.%s.%s' % (sta, net, loc, ch, yday)

	inpath = os.path.join(inpath, stc[0], stc[1])
	if os.path.isdir(os.path.join(inpath, 'proc')):
		pass
	else:
		os.mkdir(os.path.join(inpath, 'proc'))

	shutil.copy2(os.path.join(inpath, fn), os.path.join(inpath, 'proc'))
	shutil.copy2(os.path.join(inpath, yfn), os.path.join(inpath, 'proc'))
	ypath = inpath
	inpath = os.path.join(inpath, 'proc')


	tz = int(datetime.now(pytz.timezone('America/New_York')).strftime('%z'))/100
	fmin = 0.1
	fmax = 25
	fminbp = filtmin
	fmaxbp = filtmax
	if 'ORNO' in sta:
		fminbp = 0.03 # 33.3 seconds
		fmaxbp = 0.1  # 10 seconds

	heli = os.path.join(OUTPATH, stn + '.' + day + '-heli.png')
	helibp = os.path.join(OUTPATH, stn + '.' + day + '-heli-band.png')
	dur, spec = '', ''

	st = read().clear()
	yst = st.copy()
	try:
		yst = read(os.path.join(ypath, yfn))
	except:
		print("error reading yesterday's miniSEED file. may be further errors...")

	try:
		st = read(os.path.join(inpath, fn))
		os.remove(os.path.join(inpath, fn))
	except:
		print("error reading today's miniSEED file. may be further errors...")

	net = str(st[0].stats.network)
	sta = str(st[0].stats.station)
	loc = str(st[0].stats.location)
	ch = str(st[0].stats.channel)
	startt = str(st[0].stats.starttime)
	sr = str(st[0].stats.sampling_rate)

	st = yst + st
	#st.merge()
	st = st.slice(starttime=daystart, endtime=dayend)

	sbp = st.copy()
	sbp = sbp.filter('bandpass', freqmin=fminbp, freqmax=fmaxbp, zerophase=True)
	spu = st.slice(starttime=start, endtime=end)
	sps = sbp.slice(starttime=start, endtime=end) # slice for bandpass spectrogram

	cat = Catalog()
	try:
		cat.extend(read_events(pathname_or_url='/var/www/nezsite/nezsite/nezsite/media/seismic/events/evtmajor30days.xml', format='QUAKEML'))
	except:
		pass
	try:
		cat.extend(read_events(pathname_or_url='/var/www/nezsite/nezsite/nezsite/media/seismic/events/evtlocal30days.xml', format='QUAKEML'))
	except:
		pass

	'''
	# get events
	client = Client("USGS")
	cat = Catalog()
	try:
		cat += client.get_events(starttime=daystart, endtime=dayend, latitude=44.036114, longitude=-70.439856, maxradius=10)
	except FDSNException:
		pass
	try:
		cat += client.get_events(starttime=daystart, endtime=dayend, latitude=44.036114, longitude=-70.439856,
									minradius=10, maxradius=15, minmagnitude=2.5)
	except FDSNException:
		pass
	try:
		cat += client.get_events(starttime=daystart, endtime=dayend, minmagnitude=6.5)
	except FDSNException:
		pass
	'''

	title = net + '.' + sta + '.' + loc + '.' + ch + ' - ' + startt + ' - rate: ' + sr

	st.plot(type="dayplot", size=(1600, 1200), title=title + 'Hz - band: 0-25Hz', vertical_scaling_range=2000,
		tick_format='%H:%M', outfile=heli, color=['k', 'r', 'b', 'g'], linewidth=0.3, time_offset=tz, events=cat)
	sbp.plot(type="dayplot", size=(1600, 1200), title=title + 'Hz - band: '+ str(fminbp) + '-' + str(fmaxbp) + 'Hz', vertical_scaling_range=200,
		tick_format='%H:%M', outfile=helibp, color=['k', 'r', 'b', 'g'], linewidth=0.3, time_offset=tz, events=cat)

	#st.plot(type="dayplot", title=net + '.' + sta + '.' + loc + '.' + ch + ' - ' + startt + ' - rate: ' + sr + 'Hz - band: 0-25Hz', vertical_scaling_range=8e3, outfile=heli, color=['k', 'r', 'b', 'g'], time_offset=tz, events={'min_magnitude': 6.5})
	#sbp.plot(type="dayplot", title=net + '.' + sta + '.' + loc + '.' + ch + ' - ' + startt + ' - rate: ' + sr + 'Hz - band: '+ str(fminbp) + '-' + str(fmaxbp) + 'Hz', vertical_scaling_range=7e2, outfile=helibp, color=['k', 'r', 'b', 'g'], time_offset=tz, events={'min_magnitude': 6.5})

	heli = WEBPATH + os.path.split(heli)[1]
	helibp = WEBPATH + os.path.split(helibp)[1]

	if end:
		dur = end - start


	sp = spu.detrend(type='constant')
	ss = sps.detrend(type='constant')

	startt = str(sp[0].stats.starttime)


	## ------------------------- ##
	# make spectrogram figure 1
	fig = plt.figure(figsize=(16,6), dpi=100)
	ax1 = fig.add_axes([0.068, 0.75, 0.85, 0.2]) #[left bottom width height]
	ax2 = fig.add_axes([0.068, 0.1, 0.85, 0.6], sharex=ax1)
	ax3 = fig.add_axes([0.931, 0.1, 0.03, 0.6])

	# labels
	fig.suptitle(net + '.' + sta + '.' + loc + '.' + ch + ' - ' + startt + ' - samplerate: ' + sr + 'Hz - frequency band: 0-25 Hz')
	ax1.set_ylabel('Traces')
	ax2.set_xlabel('Time [s]')
	ax2.set_ylabel('Frequency [Hz]')
	ax3.set_ylabel('Energy density [dimensionless]') # doesn't work

	# make time vector
	t = np.arange(sp[0].stats.npts) / sp[0].stats.sampling_rate

	# plot waveform (top subfigure)
	ax1.plot(t, sp[0].data, 'k', linewidth=0.5)

	# plot spectrogram (bottom subfigure)
	fig = sp[0].spectrogram(show=False, axes=ax2, log=False, dbscale=dbscale, cmap='viridis')
	mappable = ax2.images[0]
	plt.colorbar(mappable=mappable, cax=ax3)

	ax2.set_ylim(fmin, fmax)

	if 'cronplots' in OUTPATH:
		spec = os.path.join(OUTPATH, stn + '.' + start.strftime('%Y.%j') + "-spec.png")
	else:
		spec = os.path.join(OUTPATH, stn + '.' + start.strftime('%Y.%j.%H%M%S-') + str(dur) + "-spec.png")
	plt.savefig(spec)
	spec = WEBPATH + os.path.split(spec)[1]


	## ------------------------- ##
	# make spectrogram figure 2
	sfig2 = plt.figure(figsize=(16,4), dpi=100)
	ax1 = sfig2.add_axes([0.068, 0.600, 0.85, 0.3]) #[left bottom width height]
	ax2 = sfig2.add_axes([0.068, 0.115, 0.85, 0.4], sharex=ax1)
	ax3 = sfig2.add_axes([0.932, 0.115, 0.03, 0.4])

	# labels
	sfig2.suptitle(net + '.' + sta + '.' + loc + '.' + ch + ' - ' + startt + ' - samplerate: ' + sr + 'Hz - bandpass: '******'-' + str(fmaxbp) + ' Hz')
	ax1.set_ylabel('Counts')
	ax2.set_xlabel('Time [s]')
	ax2.set_ylabel('Frequency [Hz]')
	ax3.set_ylabel('Energy density [dimensionless]') # doesn't work

	# make time vector
	t = np.arange(ss[0].stats.npts) / ss[0].stats.sampling_rate

	# plot waveform (top subfigure)
	ax1.plot(t, ss[0].data, 'k', linewidth=0.5)

	# plot spectrogram (bottom subfigure)
	sfig2 = ss[0].spectrogram(show=False, axes=ax2, log=False, dbscale=dbscale, cmap='viridis')
	mappable = ax2.images[0]
	plt.colorbar(mappable=mappable, cax=ax3)

	ax2.set_ylim(fminbp, fmaxbp)


	if 'cronplots' in OUTPATH:
		specbp = os.path.join(OUTPATH, stn + '.' + start.strftime('%Y.%j') + "-spec-band.png")
	else:
		specbp = os.path.join(OUTPATH, stn + '.' + start.strftime('%Y.%j.%H%M%S-') + str(dur) + "-spec-band.png")
	plt.savefig(specbp)
	specbp = WEBPATH + os.path.split(specbp)[1]



	imgpaths = {
		'avail': avail,
		'filtmin': fminbp,
		'filtmax': fmaxbp,
		'heli': heli,
		'helibp': helibp,
		'spec': spec,
		'specbp': specbp,
	}
	return imgpaths