Exemple #1
0
def fdsn_bulk_request(target_path, req_cli, input_dics):
    """
    send bulk request to FDSN
    :param target_path:
    :param req_cli:
    :param input_dics:
    :return:
    """
    print('\n[INFO] sending bulk request to: %s' % req_cli)

    client_fdsn = Client_fdsn(base_url=req_cli,
                              user=input_dics['username'],
                              password=input_dics['password'])

    bulk_list_fio = open(
        os.path.join(target_path, 'info', 'bulkdata_list_%s' % req_cli), 'rb')
    bulk_list = pickle.load(bulk_list_fio)
    bulk_smgrs = client_fdsn.get_waveforms_bulk(bulk_list)
    print('[INFO] saving the retrieved waveforms from %s...' % req_cli)
    for bulk_st in bulk_smgrs:
        bulk_st.write(
            os.path.join(
                target_path, 'raw', '%s.%s.%s.%s' %
                (bulk_st.stats['network'], bulk_st.stats['station'],
                 bulk_st.stats['location'], bulk_st.stats['channel'])),
            'MSEED')
Exemple #2
0
def FDSN_bulk_request(i, add_event, input_dics):
    """
    Send bulk request to FDSN
    """
    print '\nSending bulk request to FDSN: %s' % input_dics['fdsn_base_url']
    client_fdsn = Client_fdsn(base_url=input_dics['fdsn_base_url'],
                              user=input_dics['fdsn_user'],
                              password=input_dics['fdsn_pass'])
    bulk_list_fio = open(os.path.join(add_event[i], 'info', 'bulkdata_list'))
    bulk_list = pickle.load(bulk_list_fio)
    bulk_smgrs = client_fdsn.get_waveforms_bulk(bulk_list)
    print 'Saving the retrieved waveforms...',
    for bulk_st in bulk_smgrs:
        bulk_st.write(
            os.path.join(
                add_event[i], 'BH_RAW', '%s.%s.%s.%s' %
                (bulk_st.stats['network'], bulk_st.stats['station'],
                 bulk_st.stats['location'], bulk_st.stats['channel'])),
            'MSEED')
Exemple #3
0
def fdsn_serial_parallel(stas_avail, event, input_dics, target_path, req_cli,
                         info_event):
    """
    retrieving data from FDSN
    :param stas_avail:
    :param event:
    :param input_dics:
    :param target_path:
    :param req_cli:
    :param info_event:
    :return:
    """
    print('%s -- event: %s' % (req_cli, target_path))

    client_fdsn = Client_fdsn(base_url=req_cli,
                              user=input_dics['username'],
                              password=input_dics['password'])
    #debug=True)
    client_syngine = Client_syngine()

    if input_dics['req_parallel']:
        if input_dics['password']:
            print("[INFO] Restricted data from %s" % req_cli)
            print("[WARNING] parallel retrieving is now possible!")
            print("[WARNING] serial retrieving is activated!")
            # num_req_np = 1
            num_req_np = input_dics['req_np']
        else:
            num_req_np = input_dics['req_np']
        par_jobs = []
        st_counter = 0
        for st_avail in stas_avail:
            st_counter += 1
            info_station = '[%s-%s/%s]' % (info_event, st_counter,
                                           len(stas_avail))
            p = multiprocessing.Process(
                target=fdsn_download_core,
                args=(st_avail, event, input_dics, target_path, client_fdsn,
                      client_syngine, req_cli, info_station))
            par_jobs.append(p)

        sub_par_jobs = []
        for l in range(len(par_jobs)):
            counter = num_req_np
            while counter >= num_req_np:
                counter = 0
                for ll in range(len(sub_par_jobs)):
                    if par_jobs[sub_par_jobs[ll]].is_alive():
                        counter += 1
            par_jobs[l].start()
            sub_par_jobs.append(l)

        counter = num_req_np
        while counter > 0:
            counter = 0
            for ll in range(len(par_jobs)):
                if par_jobs[ll].is_alive():
                    counter += 1
    else:
        st_counter = 0
        for st_avail in stas_avail:
            st_counter += 1
            info_station = '[%s-%s/%s]' % (info_event, st_counter,
                                           len(stas_avail))
            fdsn_download_core(st_avail, event, input_dics, target_path,
                               client_fdsn, client_syngine, req_cli,
                               info_station)

    update_sta_ev_file(target_path, event)

    if input_dics['bulk']:
        input_dics['waveform'] = True
        sta_saved_path = glob.glob(os.path.join(target_path, 'raw', '*.*.*.*'))
        print('\n[INFO] adjusting the station_event file for bulk request...',
              end='')

        sta_saved_list = []
        for sta_num in range(len(sta_saved_path)):
            sta_saved_list.append(os.path.basename(sta_saved_path[sta_num]))

        sta_ev_new = []
        for line in fileinput.FileInput(
                os.path.join(target_path, 'info', 'station_event')):
            line_split = line.split(',')
            if not '%s.%s.%s.%s' \
                    % (line_split[0], line_split[1], line_split[2],
                       line_split[3]) in sta_saved_list:
                pass
            else:
                sta_ev_new.append(line)

        file_staev_open = open(
            os.path.join(target_path, 'info', 'station_event'), 'wt')
        file_staev_open.writelines(sta_ev_new)
        file_staev_open.close()
        print('DONE')
Exemple #4
0
def event_info(input_dics):
    """
    get event(s) info for event_based request
    :param input_dics:
    :return:
    """
    try:
        evlatmin = input_dics['evlatmin']
        evlatmax = input_dics['evlatmax']
        evlonmin = input_dics['evlonmin']
        evlonmax = input_dics['evlonmax']

        evlat = input_dics['evlat']
        evlon = input_dics['evlon']
        evradmin = input_dics['evradmin']
        evradmax = input_dics['evradmax']

        event_switch = 'fdsn'
        event_url = input_dics['event_catalog']
        if input_dics['read_catalog']:
            event_switch = 'local'
        event_fdsn_cat = None

        if event_url.lower() == 'gcmt_combo':
            event_switch = 'gcmt_combo'
        if event_url.lower() == 'neic_usgs':
            event_switch = 'neic_usgs'
        if event_url.lower() == 'isc':
            event_switch = 'isc_cat'

        print('\nEvent(s) are based on:\t%s' % input_dics['event_catalog'])

        if event_switch == 'fdsn':
            client_fdsn = Client_fdsn(base_url=event_url)
            events_QML = client_fdsn.get_events(
                minlatitude=evlatmin,
                maxlatitude=evlatmax,
                minlongitude=evlonmin,
                maxlongitude=evlonmax,
                latitude=evlat,
                longitude=evlon,
                minradius=evradmin,
                maxradius=evradmax,
                mindepth=input_dics['min_depth'],
                maxdepth=input_dics['max_depth'],
                starttime=input_dics['min_date'],
                endtime=input_dics['max_date'],
                minmagnitude=input_dics['min_mag'],
                maxmagnitude=input_dics['max_mag'],
                orderby='time-asc',
                catalog=event_fdsn_cat,
                magnitudetype=input_dics['mag_type'],
                includeallorigins=None,
                includeallmagnitudes=None,
                includearrivals=None,
                eventid=None,
                limit=None,
                offset=None,
                contributor=None,
                updatedafter=None)

        elif event_switch == 'gcmt_combo':
            events_QML = \
                gcmt_catalog(input_dics['min_date'],
                             input_dics['max_date'],
                             evlatmin, evlatmax, evlonmin, evlonmax,
                             evlat, evlon, evradmin, evradmax,
                             input_dics['min_depth'],
                             input_dics['max_depth'],
                             input_dics['min_mag'],
                             input_dics['max_mag'])

        elif event_switch == 'neic_usgs':
            events_QML = \
                neic_catalog_urllib(input_dics['min_date'],
                                    input_dics['max_date'],
                                    evlatmin, evlatmax, evlonmin, evlonmax,
                                    evlat, evlon, evradmin, evradmax,
                                    input_dics['min_depth'],
                                    input_dics['max_depth'],
                                    input_dics['min_mag'],
                                    input_dics['max_mag'])

        elif event_switch == 'isc_cat':
            events_QML = \
                isc_catalog(bot_lat=evlatmin, top_lat=evlatmax,
                            left_lon=evlonmin, right_lon=evlonmax,
                            ctr_lat=evlat, ctr_lon=evlon,
                            radius=evradmax,
                            start_time=input_dics['min_date'],
                            end_time=input_dics['max_date'],
                            min_dep=input_dics['min_depth'],
                            max_dep=input_dics['max_depth'],
                            min_mag=input_dics['min_mag'],
                            max_mag=input_dics['max_mag'],
                            mag_type=input_dics['mag_type'],
                            req_mag_agcy='Any',
                            rev_comp=input_dics['isc_rev_comp'])

        elif event_switch == 'local':
            events_QML = readEvents(input_dics['read_catalog'])

        else:
            sys.exit('[ERROR] %s is not supported' %
                     input_dics['event_catalog'])

        for i in range(len(events_QML)):
            if not hasattr(events_QML.events[i], 'preferred_mag'):
                events_QML.events[i].preferred_mag = \
                    events_QML.events[i].magnitudes[0].mag
                events_QML.events[i].preferred_mag_type = \
                    events_QML.events[i].magnitudes[0].magnitude_type
                events_QML.events[i].preferred_author = 'None'
            else:
                if not hasattr(events_QML.events[i], 'preferred_author'):
                    if events_QML.events[i].preferred_magnitude(
                    ).creation_info:
                        events_QML.events[i].preferred_author = \
                            events_QML.events[i].preferred_magnitude().creation_info.author
                    elif events_QML.events[i].magnitudes[0].creation_info:
                        events_QML.events[i].preferred_author = \
                            events_QML.events[i].magnitudes[0].creation_info.author
        # no matter if list was passed or requested, sort catalogue,
        # plot events and proceed
        events_QML = sort_catalogue(events_QML)
        events = qml_to_event_list(events_QML)

    except Exception as error:
        print(60 * '-')
        print('[WARNING] %s' % error)
        print(60 * '-')
        events = []
        events_QML = []

    for i in range(len(events)):
        events[i]['t1'] = events[i]['datetime'] - input_dics['preset']
        events[i]['t2'] = events[i]['datetime'] + input_dics['offset']

    return events, events_QML
Exemple #5
0
def fdsn_available(input_dics, cl, event, target_path):
    """
    check the availablity of FDSN stations
    :param input_dics:
    :param cl:
    :param event:
    :param target_path:
    :return:
    """
    print("check the availability: %s" % input_dics['data_source'][cl])

    if input_dics['username_fdsn']:
        include_restricted = True
    else:
        include_restricted = None

    sta_fdsn = []
    try:
        client_fdsn = Client_fdsn(
            base_url=input_dics['data_source'][cl].upper(),
            user=input_dics['username_fdsn'],
            password=input_dics['password_fdsn'])

        available = client_fdsn.get_stations(
            network=input_dics['net'],
            station=input_dics['sta'],
            location=input_dics['loc'],
            channel=input_dics['cha'],
            starttime=event['t1'],
            endtime=event['t2'],
            latitude=input_dics['lat_cba'],
            longitude=input_dics['lon_cba'],
            minradius=input_dics['mr_cba'],
            maxradius=input_dics['Mr_cba'],
            minlatitude=input_dics['mlat_rbb'],
            maxlatitude=input_dics['Mlat_rbb'],
            minlongitude=input_dics['mlon_rbb'],
            maxlongitude=input_dics['Mlon_rbb'],
            includerestricted=include_restricted,
            level='channel')

        for network in available.networks:
            for station in network:
                for channel in station:
                    st_id = '%s_%s_%s_%s' % (network.code, station.code,
                                             channel.location_code,
                                             channel.code)
                    sta_fdsn.append([
                        network.code, station.code, channel.location_code,
                        channel.code, channel.latitude, channel.longitude,
                        channel.elevation, channel.depth,
                        input_dics['data_source'][cl], st_id, channel.azimuth,
                        channel.dip
                    ])

        if input_dics['bulk']:
            print('creating a list for bulk request...')
            bulk_list = []
            for bulk_sta in sta_fdsn:
                if input_dics['cut_time_phase']:
                    t_start, t_end = calculate_time_phase(event, bulk_sta)
                else:
                    t_start = event['t1']
                    t_end = event['t2']
                bulk_list.append((bulk_sta[0], bulk_sta[1], bulk_sta[2],
                                  bulk_sta[3], t_start, t_end))

            bulk_list_fio = open(
                os.path.join(
                    target_path, 'info',
                    'bulkdata_list_%s' % input_dics['data_source'][cl]), 'ab+')
            pickle.dump(bulk_list, bulk_list_fio, protocol=2)
            bulk_list_fio.close()

    except Exception as error:
        exc_file = open(os.path.join(target_path, 'info', 'exception'), 'at+')
        ee = 'availability -- %s -- %s\n' % (input_dics['data_source'][cl],
                                             error)
        exc_file.writelines(ee)
        exc_file.close()
        print('ERROR: %s' % ee)
        return []

    if len(sta_fdsn) == 0:
        sta_fdsn = []
    sta_fdsn.sort()
    return sta_fdsn
Exemple #6
0
def FDSN_serial_parallel(i, events, add_event, Sta_req, input_dics,
                         len_req_fdsn):
    """
    FDSN serial/parallel request
    """
    dic = {}
    print '\nFDSN-Event: %s/%s -- %s' % (i + 1, len(events), add_event[i])

    client_fdsn = Client_fdsn(base_url=input_dics['fdsn_base_url'],
                              user=input_dics['fdsn_user'],
                              password=input_dics['fdsn_pass'])
    if input_dics['req_parallel'] == 'Y':
        print "Parallel request with %s processes.\n" % input_dics['req_np']
        par_jobs = []
        for j in range(len_req_fdsn):
            p = multiprocessing.Process(target=FDSN_download_core,
                                        args=(
                                            i,
                                            j,
                                            dic,
                                            len(events),
                                            events,
                                            add_event,
                                            Sta_req,
                                            input_dics,
                                            client_fdsn,
                                        ))
            par_jobs.append(p)
        sub_par_jobs = []
        for l in range(len(par_jobs)):
            counter = input_dics['req_np']
            while counter >= input_dics['req_np']:
                counter = 0
                for ll in range(len(sub_par_jobs)):
                    if par_jobs[sub_par_jobs[ll]].is_alive():
                        counter += 1
            par_jobs[l].start()
            sub_par_jobs.append(l)

        counter = input_dics['req_np']
        while counter > 0:
            counter = 0
            for ll in range(len(par_jobs)):
                if par_jobs[ll].is_alive():
                    counter += 1
    else:
        for j in range(len_req_fdsn):
            FDSN_download_core(i=i,
                               j=j,
                               dic=dic,
                               len_events=len(events),
                               events=events,
                               add_event=add_event,
                               Sta_req=Sta_req,
                               input_dics=input_dics,
                               client_fdsn=client_fdsn)

    if input_dics['fdsn_bulk'] == 'Y':
        input_dics['waveform'] = 'Y'
        sta_saved_path = glob.glob(
            os.path.join(add_event[i], 'BH_RAW', '*.*.*.*'))
        print '\nAdjusting the station_event file...',

        sta_saved_list = []
        for sta_num in range(len(sta_saved_path)):
            sta_saved_list.append(os.path.basename(sta_saved_path[sta_num]))

        sta_ev_new = []
        for line in fileinput.FileInput(
                os.path.join(add_event[i], 'info', 'station_event')):
            line_split = line.split(',')
            if not '%s.%s.%s.%s' \
                    % (line_split[0], line_split[1], line_split[2],
                       line_split[3]) in sta_saved_list:
                pass
            else:
                sta_ev_new.append(line)

        file_staev_open = open(
            os.path.join(add_event[i], 'info', 'station_event'), 'w')
        file_staev_open.writelines(sta_ev_new)
        file_staev_open.close()
        print 'DONE'
Exemple #7
0
def FDSN_available(input_dics, event, target_path, event_number):
    """
    Check the availablity of FDSN stations
    :param input_dics:
    :param event:
    :param target_path:
    :param event_number:
    :return:
    """
    print "Check the availablity of FDSN stations: %s" \
          % input_dics['fdsn_base_url']
    client_fdsn = Client_fdsn(base_url=input_dics['fdsn_base_url'],
                              user=input_dics['fdsn_user'],
                              password=input_dics['fdsn_pass'])
    Sta_fdsn = []
    try:
        if input_dics['fdsn_base_url'].lower() in ['resif']:
            # start_time = None
            # end_time = None
            start_time = event['t1']
            end_time = event['t2']
        else:
            start_time = event['t1']
            end_time = event['t2']
        available = client_fdsn.get_stations(
            network=input_dics['net'],
            station=input_dics['sta'],
            location=input_dics['loc'],
            channel=input_dics['cha'],
            starttime=start_time,
            endtime=end_time,
            latitude=input_dics['lat_cba'],
            longitude=input_dics['lon_cba'],
            minradius=input_dics['mr_cba'],
            maxradius=input_dics['Mr_cba'],
            minlatitude=input_dics['mlat_rbb'],
            maxlatitude=input_dics['Mlat_rbb'],
            minlongitude=input_dics['mlon_rbb'],
            maxlongitude=input_dics['Mlon_rbb'],
            level='channel')

        for network in available.networks:
            for station in network:
                for channel in station:
                    Sta_fdsn.append([
                        network.code, station.code, channel.location_code,
                        channel.code, channel.latitude, channel.longitude,
                        channel.elevation, channel.depth
                    ])
        if input_dics['fdsn_bulk'] == 'Y':
            if input_dics['fdsn_update'] != 'N':
                if os.path.exists(
                        os.path.join(target_path, 'info', 'bulkdata.txt')):
                    os.remove(os.path.join(target_path, 'info',
                                           'bulkdata.txt'))
            if os.path.exists(os.path.join(target_path, 'info',
                                           'bulkdata.txt')):
                print 'bulkdata.txt exists in the directory!'
            else:
                print 'Start creating a list for bulk request'
                bulk_list = []
                for bulk_sta in Sta_fdsn:
                    if input_dics['cut_time_phase']:
                        t_start, t_end = calculate_time_phase(event, bulk_sta)
                    else:
                        t_start = event['t1']
                        t_end = event['t2']
                    bulk_list.append((bulk_sta[0], bulk_sta[1], bulk_sta[2],
                                      bulk_sta[3], t_start, t_end))

                bulk_list_fio = open(
                    os.path.join(target_path, 'info', 'bulkdata_list'), 'a+')
                pickle.dump(bulk_list, bulk_list_fio)
                bulk_list_fio.close()
    except Exception as e:
        exc_file = open(os.path.join(target_path, 'info', 'exception'), 'a+')
        ee = 'fdsn -- Event: %s --- %s\n' % (str(event_number + 1), e)
        exc_file.writelines(ee)
        exc_file.close()
        print 'ERROR: %s' % ee

    if len(Sta_fdsn) == 0:
        Sta_fdsn.append([])
    Sta_fdsn.sort()
    return Sta_fdsn