Пример #1
0
def ARC_waveform(input_dics, events, Sta_req, i, req_type):
    """
    Gets Waveforms, Response files and meta-data from ArcLink
    """
    t_wave_1 = datetime.now()

    add_event = []
    if req_type == 'save':
        period = '{0:s}_{1:s}_{2:s}_{3:s}'.\
            format(input_dics['min_date'].split('T')[0],
                   input_dics['max_date'].split('T')[0],
                   str(input_dics['min_mag']),
                   str(input_dics['max_mag']))
        eventpath = os.path.join(input_dics['datapath'], period)
        for k in range(len(events)):
            add_event.append(os.path.join(eventpath, events[k]['event_id']))
            if not os.path.isfile(os.path.join(add_event[k],
                                               'info', 'event.pkl')):
                events_fio = open(os.path.join(add_event[k],
                                               'info', 'event.pkl'), 'w')
                pickle.dump(events[k], events_fio)
                events_fio.close()
    elif req_type == 'update':
        events, add_event = \
            quake_info(input_dics['arc_update'], target='info')

    if input_dics['test'] == 'Y':
        len_req_arc = input_dics['test_num']
    else:
        len_req_arc = len(Sta_req)

    ARC_serial_parallel(i, events, add_event, Sta_req, input_dics,
                        len_req_arc)

    if input_dics['resample_raw']:
        print '\nResample RAW traces to %sHz...' % input_dics['resample_raw'],
        resample_all(i=i, address_events=add_event,
                     des_sr=input_dics['resample_raw'],
                     resample_method=input_dics['resample_method'])
        print 'DONE'
    if input_dics['SAC'] == 'Y':
        print '\nConverting the MSEED files to SAC...',
        writesac_all(i=i, address_events=add_event)
        print 'DONE'

    try:
        len_sta_ev_open = open(os.path.join(add_event[i], 'info',
                                            'station_event'), 'r')
        len_sta_ev = len(len_sta_ev_open.readlines())
    except IOError:
        len_sta_ev = 'Can not open station_event file: %s' \
                     % (os.path.join(add_event[i], 'info', 'station_event'))

    ARC_reporter(i, add_event, events, input_dics, Sta_req, len_sta_ev,
                 req_type, t_wave_1)
Пример #2
0
def ARC_waveform(input_dics, events, Sta_req, i, req_type):
    """
    Gets Waveforms, Response files and meta-data from ArcLink
    """
    t_wave_1 = datetime.now()

    add_event = []
    if req_type == 'save':
        period = '{0:s}_{1:s}'.\
            format(input_dics['min_date'].split('T')[0],
                   input_dics['max_date'].split('T')[0])
        eventpath = os.path.join(input_dics['datapath'], period)
        for k in range(len(events)):
            add_event.append(os.path.join(eventpath, events[k]['event_id']))
            if not os.path.isfile(
                    os.path.join(add_event[k], 'info', 'event.pkl')):
                events_fio = open(
                    os.path.join(add_event[k], 'info', 'event.pkl'), 'w')
                pickle.dump(events[k], events_fio)
                events_fio.close()
    elif req_type == 'update':
        events, add_event = \
            quake_info(input_dics['arc_update'], target='info')

    if input_dics['test'] == 'Y':
        len_req_arc = input_dics['test_num']
    else:
        len_req_arc = len(Sta_req)

    ARC_serial_parallel(i, events, add_event, Sta_req, input_dics, len_req_arc)

    if input_dics['resample_raw']:
        print '\nResample RAW traces to %sHz...' % input_dics['resample_raw'],
        resample_all(i=i,
                     address_events=add_event,
                     des_sr=input_dics['resample_raw'],
                     resample_method=input_dics['resample_method'])
        print 'DONE'
    if input_dics['SAC'] == 'Y':
        print '\nConverting the MSEED files to SAC...',
        writesac_all(i=i, address_events=add_event)
        print 'DONE'

    try:
        len_sta_ev_open = open(
            os.path.join(add_event[i], 'info', 'station_event'), 'r')
        len_sta_ev = len(len_sta_ev_open.readlines())
    except IOError:
        len_sta_ev = 'Can not open station_event file: %s' \
                     % (os.path.join(add_event[i], 'info', 'station_event'))

    ARC_reporter(i, add_event, events, input_dics, Sta_req, len_sta_ev,
                 req_type, t_wave_1)
Пример #3
0
def FDSN_waveform(input_dics, events, Sta_req, i, req_type):
    """
    Gets Waveforms, StationXML files and meta-data from FDSN
    :param input_dics:
    :param events:
    :param Sta_req:
    :param i:
    :param req_type:
    :return:
    """
    t_wave_1 = datetime.now()

    add_event = []
    if req_type == 'save':
        period = '{0:s}_{1:s}'.\
            format(input_dics['min_date'].split('T')[0],
                   input_dics['max_date'].split('T')[0])
        eventpath = os.path.join(input_dics['datapath'], period)
        for k in range(len(events)):
            add_event.append(os.path.join(eventpath, events[k]['event_id']))
            events_fio = open(os.path.join(add_event[k],
                                           'info', 'event.pkl'), 'w')
            pickle.dump(events[k], events_fio)
            events_fio.close()
    elif req_type == 'update':
        events, add_event = \
            quake_info(input_dics['fdsn_update'], target='info')

    if input_dics['test'] == 'Y':
        len_req_fdsn = input_dics['test_num']
    else:
        len_req_fdsn = len(Sta_req)

    if input_dics['fdsn_bulk'] == 'Y':
        t11 = datetime.now()
        try:
            FDSN_bulk_request(i, add_event, input_dics)
        except Exception as e:
            print 'WARNING: %s' % e
        print 'DONE'

        # Following parameter is set to 'N' to avoid
        # retrieving the waveforms twice
        # When using bulk requests, waveforms are retreived in bulk
        # but not response/StationXML files and not metadata
        input_dics['waveform'] = 'N'
        t22 = datetime.now()
        print '\nbulkdataselect request is done for event: %s/%s in %s' \
              % (i+1, len(events), t22-t11)

    FDSN_serial_parallel(i, events, add_event, Sta_req, input_dics,
                         len_req_fdsn)

    if input_dics['resample_raw']:
        print '\nResample RAW traces to %sHz...' % input_dics['resample_raw'],
        resample_all(i=i, address_events=add_event,
                     des_sr=input_dics['resample_raw'],
                     resample_method=input_dics['resample_method'])
        print 'DONE'
    if input_dics['SAC'] == 'Y':
        print '\nConverting the MSEED files to SAC...',
        writesac_all(i=i, address_events=add_event)
        print 'DONE'

    try:
        len_sta_ev_open = open(os.path.join(add_event[i], 'info',
                                            'station_event'), 'r')
        len_sta_ev = len(len_sta_ev_open.readlines())
    except IOError:
        len_sta_ev = 'Can not open station_event file: %s' \
                     % (os.path.join(add_event[i], 'info', 'station_event'))

    FDSN_reporter(i, add_event, events, input_dics, Sta_req, len_sta_ev,
                  req_type, t_wave_1)
Пример #4
0
def FDSN_waveform(input_dics, events, Sta_req, i, req_type):
    """
    Gets Waveforms, StationXML files and meta-data from FDSN
    :param input_dics:
    :param events:
    :param Sta_req:
    :param i:
    :param req_type:
    :return:
    """
    t_wave_1 = datetime.now()

    add_event = []
    if req_type == 'save':
        period = '{0:s}_{1:s}'.\
            format(input_dics['min_date'].split('T')[0],
                   input_dics['max_date'].split('T')[0])
        eventpath = os.path.join(input_dics['datapath'], period)
        for k in range(len(events)):
            add_event.append(os.path.join(eventpath, events[k]['event_id']))
            events_fio = open(os.path.join(add_event[k], 'info', 'event.pkl'),
                              'w')
            pickle.dump(events[k], events_fio)
            events_fio.close()
    elif req_type == 'update':
        events, add_event = \
            quake_info(input_dics['fdsn_update'], target='info')

    if input_dics['test'] == 'Y':
        len_req_fdsn = input_dics['test_num']
    else:
        len_req_fdsn = len(Sta_req)

    if input_dics['fdsn_bulk'] == 'Y':
        t11 = datetime.now()
        try:
            FDSN_bulk_request(i, add_event, input_dics)
        except Exception as e:
            print 'WARNING: %s' % e
        print 'DONE'

        # Following parameter is set to 'N' to avoid
        # retrieving the waveforms twice
        # When using bulk requests, waveforms are retreived in bulk
        # but not response/StationXML files and not metadata
        input_dics['waveform'] = 'N'
        t22 = datetime.now()
        print '\nbulkdataselect request is done for event: %s/%s in %s' \
              % (i+1, len(events), t22-t11)

    FDSN_serial_parallel(i, events, add_event, Sta_req, input_dics,
                         len_req_fdsn)

    if input_dics['resample_raw']:
        print '\nResample RAW traces to %sHz...' % input_dics['resample_raw'],
        resample_all(i=i,
                     address_events=add_event,
                     des_sr=input_dics['resample_raw'],
                     resample_method=input_dics['resample_method'])
        print 'DONE'
    if input_dics['SAC'] == 'Y':
        print '\nConverting the MSEED files to SAC...',
        writesac_all(i=i, address_events=add_event)
        print 'DONE'

    try:
        len_sta_ev_open = open(
            os.path.join(add_event[i], 'info', 'station_event'), 'r')
        len_sta_ev = len(len_sta_ev_open.readlines())
    except IOError:
        len_sta_ev = 'Can not open station_event file: %s' \
                     % (os.path.join(add_event[i], 'info', 'station_event'))

    FDSN_reporter(i, add_event, events, input_dics, Sta_req, len_sta_ev,
                  req_type, t_wave_1)
Пример #5
0
def FDSN_waveform(input_dics, events, Sta_req, i, req_type):
    """
    Gets Waveforms, StationXML files and meta-data from FDSN
    :param input_dics:
    :param events:
    :param Sta_req:
    :param i:
    :param req_type:
    :return:
    """
    t_wave_1 = datetime.now()

    add_event = []
    if req_type == "save":
        period = "{0:s}_{1:s}".format(input_dics["min_date"].split("T")[0], input_dics["max_date"].split("T")[0])
        eventpath = os.path.join(input_dics["datapath"], period)
        for k in range(len(events)):
            add_event.append(os.path.join(eventpath, events[k]["event_id"]))
            events_fio = open(os.path.join(add_event[k], "info", "event.pkl"), "w")
            pickle.dump(events[k], events_fio)
            events_fio.close()
    elif req_type == "update":
        events, add_event = quake_info(input_dics["fdsn_update"], target="info")

    if input_dics["test"] == "Y":
        len_req_fdsn = input_dics["test_num"]
    else:
        len_req_fdsn = len(Sta_req)

    if input_dics["fdsn_bulk"] == "Y":
        t11 = datetime.now()
        try:
            FDSN_bulk_request(i, add_event, input_dics)
        except Exception as e:
            print "WARNING: %s" % e
        print "DONE"

        # Following parameter is set to 'N' to avoid
        # retrieving the waveforms twice
        # When using bulk requests, waveforms are retreived in bulk
        # but not response/StationXML files and not metadata
        input_dics["waveform"] = "N"
        t22 = datetime.now()
        print "\nbulkdataselect request is done for event: %s/%s in %s" % (i + 1, len(events), t22 - t11)

    FDSN_serial_parallel(i, events, add_event, Sta_req, input_dics, len_req_fdsn)

    if input_dics["resample_raw"]:
        print "\nResample RAW traces to %sHz..." % input_dics["resample_raw"],
        resample_all(
            i=i,
            address_events=add_event,
            des_sr=input_dics["resample_raw"],
            resample_method=input_dics["resample_method"],
        )
        print "DONE"
    if input_dics["SAC"] == "Y":
        print "\nConverting the MSEED files to SAC...",
        writesac_all(i=i, address_events=add_event)
        print "DONE"

    try:
        len_sta_ev_open = open(os.path.join(add_event[i], "info", "station_event"), "r")
        len_sta_ev = len(len_sta_ev_open.readlines())
    except IOError:
        len_sta_ev = "Can not open station_event file: %s" % (os.path.join(add_event[i], "info", "station_event"))

    FDSN_reporter(i, add_event, events, input_dics, Sta_req, len_sta_ev, req_type, t_wave_1)