Exemple #1
0
def getpaths(database_loc, starttime, endtime):
        """
        Gets list of paths to mseed files between certain time series intervals
        using initialised SQL timeline database.
        @type starttime: L{UTCDateTime} or L{datetime} or L{date}
        @type endtime: L{UTCDateTime} or L{datetime} or L{date}

        @rtype: unicode
        """

        starttime, endtime = utc(starttime), utc(endtime)
        
        import_start = starttime.timestamp
        #import_end = endtime.timestamp
        
        #connect SQL database

        if not os.path.exists(database_loc):
            raise Exception("Database doesn't exist")
        
        conn = lite.connect(database_loc)
        #print "conn: ", conn
        
        c = conn.cursor()
        extrema = []
        for row in c.execute('''SELECT * FROM 
                             file_extrema ORDER BY station'''):
            extrema.append(row)
        
        # make sure that this test works! 
        #code = '{}.{}.{}'.format(self.network, self.name, self.channel)
    
        file_paths = c.execute('''SELECT file_path FROM 
                             file_extrema WHERE starttime <= ? 
                             AND endtime >= ?''', 
                             (import_start, import_start))        
        
        output_path = []
        for file_path in file_paths: output_path.append(file_path)
        #if len(output_path) > 0:
        #    return str(output_path[0][0])

        
        # close database
        conn.close() 
        return output_path
def getpaths(database_loc, starttime, endtime):
    """
        Gets list of paths to mseed files between certain time series intervals
        using initialised SQL timeline database.
        @type starttime: L{UTCDateTime} or L{datetime} or L{date}
        @type endtime: L{UTCDateTime} or L{datetime} or L{date}

        @rtype: unicode
        """

    starttime, endtime = utc(starttime), utc(endtime)

    import_start = starttime.timestamp
    #import_end = endtime.timestamp

    #connect SQL database

    if not os.path.exists(database_loc):
        raise Exception("Database doesn't exist")

    conn = lite.connect(database_loc)
    #print "conn: ", conn

    c = conn.cursor()
    extrema = []
    for row in c.execute('''SELECT * FROM 
                             file_extrema ORDER BY station'''):
        extrema.append(row)

    # make sure that this test works!
    #code = '{}.{}.{}'.format(self.network, self.name, self.channel)

    file_paths = c.execute(
        '''SELECT file_path FROM 
                             file_extrema WHERE starttime <= ? 
                             AND endtime >= ?''', (import_start, import_start))

    output_path = []
    for file_path in file_paths:
        output_path.append(file_path)
    #if len(output_path) > 0:
    #    return str(output_path[0][0])

    # close database
    conn.close()
    return output_path
            times = []
            for tr in st:

                start_time = tr.stats.starttime
                # reduce the first day to the beginning time i.e. midnight.
                start_time = start_time - (start_time.hour * 3600) - (
                    start_time.minute * 60) - start_time.second
                start_time = start_time.timestamp
                end_time = tr.stats.endtime.timestamp  #tr.stats.endtime
                times.append(start_time)
                times.append(end_time)

            days = int((end_time - start_time) / 86400) + 1

            time_intervals = [
                utc(i) for i in np.linspace(min(times), max(times), days)
            ]

            for i in range(1, len(time_intervals)):
                starttime = time_intervals[i - 1]
                endtime = time_intervals[i]

                st_partial = read(path, starttime=starttime, endtime=endtime)
                net = st_partial[0].stats.network
                stat = st_partial[0].stats.station
                loc = st_partial[0].stats.location
                channel = st_partial[0].stats.channel
                year = starttime.year
                jul_day = starttime.julday
                write_string = '{}_{}_{}_{}_{}_{}.msd'.format(
                    net, stat, channel, loc, year, jul_day)
    t_start = timestamp + event_plus1
    t_end = timestamp + event_plus2
    
    
    # get only the miniseed paths that have data between t_start and t_end
    event_paths = getpaths(database_loc, t_start, t_end)

    event_traces = []
    SNRs = []

    # multiplex the traces and only show Z components
    for event_path in event_paths: 
        event_path = str(event_path[0])
        
        try:
            print "processing event path: {}".format(event_path)     
            print utc(t_start), utc(t_end)
            st = read(event_path)
            
            end = utc(t_start)+3600

            st = st.cutout(starttime=utc(t_start), endtime=utc(t_end))
            st.merge()
            for tr in st:
                tr.data = np.ma.filled(tr.data)
            st.write(event_path, format='MSEED')
            
        except Exception as error:
            print error

    counter += 1            
Exemple #5
0
                                                                    1])
    t_start = timestamp + event_plus1
    t_end = timestamp + event_plus2

    # get only the miniseed paths that have data between t_start and t_end
    event_paths = getpaths(database_loc, t_start, t_end)

    event_traces = []
    SNRs = []

    # multiplex the traces and only show Z components
    for event_path in event_paths:
        event_path = str(event_path[0])

        try:
            print "processing event path: {}".format(event_path)
            print utc(t_start), utc(t_end)
            st = read(event_path)

            end = utc(t_start) + 3600

            st = st.cutout(starttime=utc(t_start), endtime=utc(t_end))
            st.merge()
            for tr in st:
                tr.data = np.ma.filled(tr.data)
            st.write(event_path, format='MSEED')

        except Exception as error:
            print error

    counter += 1
Exemple #6
0
Created on Thu Jan 21 13:09:17 2016
@author: Benjamin Boland
"""

from obspy.fdsn import Client
from obspy.core import UTCDateTime as utc
from obspy.core.event import Catalog
import sqlite3 as lite
import numpy as np
import os


client_list = ['IRIS', 'USGS', 'USP', 'NIEP', 'ETH', 'GFZ', 'INGV', 'ORFEUS']

# Operation dates for IESE network
t1 = utc("2014-01-01T00:00:00")
t2 = utc("2015-01-01T00:00:00")



reset_catalogue = True
timelimit = utc("2005-01-01T00:00:00").timestamp
# set time limit and all events before this limit will be erased
# if reset_catalogue = True

database_name = '/storage/MASTERS/CONFIGURATIONS/S_NETWORK/INPUT/DATABASES/timeline.db'

# enter the information for catalogue 1 search
minlat, maxlat, minlon, maxlon = (-40.0, -12.5, 113.0, 154.0) 

event_list = []
Exemple #7
0
USE_DATALESSPAZ = CONFIG.USE_DATALESSPAZ
USE_STATIONXML = CONFIG.USE_STATIONXML
CROSSCORR_STATIONS_SUBSET = CONFIG.CROSSCORR_STATIONS_SUBSET
CROSSCORR_SKIPLOCS = CONFIG.CROSSCORR_SKIPLOCS
FIRSTDAY = CONFIG.FIRSTDAY
LASTDAY = CONFIG.LASTDAY




TIMELINE_DB = os.path.join(DATABASE_DIR, 'timeline.db')

PSD_OUTPUT = os.path.join(CROSSCORR_DIR, 'PSD')
if not os.path.exists(PSD_OUTPUT): os.mkdir(PSD_OUTPUT)
    
t_start, t_end = utc(FIRSTDAY), utc(LASTDAY)

def read_ref(path):
    ref_head, ref_data = rdreftek(path)
    st = reftek2stream(ref_head, ref_data)
    return st
    
#------------------------------------------------------------------------------
# IMPORT PATHS TO MSEED FILES
#------------------------------------------------------------------------------

def getpaths(database_loc, starttime, endtime):
        """
        Gets list of paths to mseed files between certain time series intervals
        using initialised SQL timeline database.
        @type starttime: L{UTCDateTime} or L{datetime} or L{date}
        try:
            st = read(path, headonly=True)
            times = []
            for tr in st:
            	
                start_time = tr.stats.starttime
		# reduce the first day to the beginning time i.e. midnight. 
		start_time = start_time - (start_time.hour * 3600) - (start_time.minute * 60) - start_time.second
		start_time = start_time.timestamp
                end_time = tr.stats.endtime.timestamp #tr.stats.endtime
		times.append(start_time)
		times.append(end_time)
		
            days = int((end_time - start_time)/86400) + 1
		
            time_intervals = [utc(i) for i in np.linspace(min(times), max(times), days)]
		
	    for i in range(1, len(time_intervals)):
		starttime = time_intervals[i-1]
		endtime = time_intervals[i]

		st_partial = read(path, starttime=starttime, endtime=endtime)
		net = st_partial[0].stats.network
		stat = st_partial[0].stats.station
		loc = st_partial[0].stats.location
		channel = st_partial[0].stats.channel
		year = starttime.year
		jul_day = starttime.julday
		write_string = '{}_{}_{}_{}_{}_{}.msd'.format(net, stat, channel, loc, year, jul_day)
		print write_string
		mseed_write = os.path.join(os.path.dirname(path), write_string)
DATALESS_DIR = CONFIG.DATALESS_DIR
STATIONXML_DIR = CONFIG.STATIONXML_DIR
CROSSCORR_DIR = CONFIG.CROSSCORR_DIR
USE_DATALESSPAZ = CONFIG.USE_DATALESSPAZ
USE_STATIONXML = CONFIG.USE_STATIONXML
CROSSCORR_STATIONS_SUBSET = CONFIG.CROSSCORR_STATIONS_SUBSET
CROSSCORR_SKIPLOCS = CONFIG.CROSSCORR_SKIPLOCS
FIRSTDAY = CONFIG.FIRSTDAY
LASTDAY = CONFIG.LASTDAY

TIMELINE_DB = os.path.join(DATABASE_DIR, 'timeline.db')

PSD_OUTPUT = os.path.join(CROSSCORR_DIR, 'PSD')
if not os.path.exists(PSD_OUTPUT): os.mkdir(PSD_OUTPUT)

t_start, t_end = utc(FIRSTDAY), utc(LASTDAY)


def read_ref(path):
    ref_head, ref_data = rdreftek(path)
    st = reftek2stream(ref_head, ref_data)
    return st


#------------------------------------------------------------------------------
# IMPORT PATHS TO MSEED FILES
#------------------------------------------------------------------------------


def getpaths(database_loc, starttime, endtime):
    """
Exemple #10
0
"""
Created on Thu Jan 21 13:09:17 2016
@author: Benjamin Boland
"""

from obspy.fdsn import Client
from obspy.core import UTCDateTime as utc
from obspy.core.event import Catalog
import sqlite3 as lite
import numpy as np
import os

client_list = ['IRIS', 'USGS', 'USP', 'NIEP', 'ETH', 'GFZ', 'INGV', 'ORFEUS']

# Operation dates for IESE network
t1 = utc("2014-01-01T00:00:00")
t2 = utc("2015-01-01T00:00:00")

reset_catalogue = True
timelimit = utc("2005-01-01T00:00:00").timestamp
# set time limit and all events before this limit will be erased
# if reset_catalogue = True

database_name = '/storage/MASTERS/CONFIGURATIONS/S_NETWORK/INPUT/DATABASES/timeline.db'

# enter the information for catalogue 1 search
minlat, maxlat, minlon, maxlon = (-40.0, -12.5, 113.0, 154.0)

event_list = []

for c in client_list: