def print_syngine_models(): """ Function to print available syngine models :return: """ print("\n------------------------") print("Available syngine models") print("------------------------\n") from obspy.clients.syngine import Client as Client_syngine client_syngine = Client_syngine() avail_syngine_models = client_syngine.get_available_models() for count_mod, avail_mod in enumerate(avail_syngine_models.keys()): print('%s. %s' % (count_mod + 1, avail_mod)) print('\n') for count_mod, avail_mod in enumerate(avail_syngine_models.keys()): print("------------------------") print('%s. %s' % (count_mod + 1, avail_mod)) for inf in avail_syngine_models[avail_mod].keys(): print("%s: %s" % (inf, avail_syngine_models[avail_mod][inf])) print("\n============================================================") print("This is the list of all available syngine models that can be") print("used for --syngine_bg_model option.") print("============================================================") sys.exit()
def arc_serial_parallel(stas_avail, event, input_dics, target_path, req_cli, info_event): """ retrieving data from ArcLink :param stas_avail: :param event: :param input_dics: :param target_path: :param req_cli: :param info_event: :return: """ print('%s -- event: %s' % (req_cli, target_path)) client_arclink = Client_arclink(user=input_dics['username_arclink'], host=input_dics['host_arclink'], port=input_dics['port_arclink'], password=input_dics['password_arclink'], timeout=input_dics['arc_wave_timeout']) client_syngine = Client_syngine() if input_dics['req_parallel']: par_jobs = [] st_counter = 0 for st_avail in stas_avail: st_counter += 1 info_station = '[%s-%s/%s]' % (info_event, st_counter, len(stas_avail)) p = multiprocessing.Process( target=arc_download_core, args=(st_avail, event, input_dics, target_path, client_arclink, client_syngine, req_cli, info_station)) par_jobs.append(p) sub_par_jobs = [] for l in range(len(par_jobs)): counter = input_dics['req_np'] while counter >= input_dics['req_np']: counter = 0 for ll in range(len(sub_par_jobs)): if par_jobs[sub_par_jobs[ll]].is_alive(): counter += 1 par_jobs[l].start() sub_par_jobs.append(l) counter = input_dics['req_np'] while counter > 0: counter = 0 for ll in range(len(par_jobs)): if par_jobs[ll].is_alive(): counter += 1 else: st_counter = 0 for st_avail in stas_avail: st_counter += 1 info_station = '[%s-%s/%s]' % (info_event, st_counter, len(stas_avail)) arc_download_core(st_avail, event, input_dics, target_path, client_arclink, client_syngine, req_cli, info_station)
def fdsn_serial_parallel(stas_avail, event, input_dics, target_path, req_cli, info_event): """ retrieving data from FDSN :param stas_avail: :param event: :param input_dics: :param target_path: :param req_cli: :param info_event: :return: """ print('%s -- event: %s' % (req_cli, target_path)) client_fdsn = Client_fdsn(base_url=req_cli, user=input_dics['username'], password=input_dics['password']) #debug=True) client_syngine = Client_syngine() if input_dics['req_parallel']: if input_dics['password']: print("[INFO] Restricted data from %s" % req_cli) print("[WARNING] parallel retrieving is now possible!") print("[WARNING] serial retrieving is activated!") # num_req_np = 1 num_req_np = input_dics['req_np'] else: num_req_np = input_dics['req_np'] par_jobs = [] st_counter = 0 for st_avail in stas_avail: st_counter += 1 info_station = '[%s-%s/%s]' % (info_event, st_counter, len(stas_avail)) p = multiprocessing.Process( target=fdsn_download_core, args=(st_avail, event, input_dics, target_path, client_fdsn, client_syngine, req_cli, info_station)) par_jobs.append(p) sub_par_jobs = [] for l in range(len(par_jobs)): counter = num_req_np while counter >= num_req_np: counter = 0 for ll in range(len(sub_par_jobs)): if par_jobs[sub_par_jobs[ll]].is_alive(): counter += 1 par_jobs[l].start() sub_par_jobs.append(l) counter = num_req_np while counter > 0: counter = 0 for ll in range(len(par_jobs)): if par_jobs[ll].is_alive(): counter += 1 else: st_counter = 0 for st_avail in stas_avail: st_counter += 1 info_station = '[%s-%s/%s]' % (info_event, st_counter, len(stas_avail)) fdsn_download_core(st_avail, event, input_dics, target_path, client_fdsn, client_syngine, req_cli, info_station) update_sta_ev_file(target_path, event) if input_dics['bulk']: input_dics['waveform'] = True sta_saved_path = glob.glob(os.path.join(target_path, 'raw', '*.*.*.*')) print('\n[INFO] adjusting the station_event file for bulk request...', end='') sta_saved_list = [] for sta_num in range(len(sta_saved_path)): sta_saved_list.append(os.path.basename(sta_saved_path[sta_num])) sta_ev_new = [] for line in fileinput.FileInput( os.path.join(target_path, 'info', 'station_event')): line_split = line.split(',') if not '%s.%s.%s.%s' \ % (line_split[0], line_split[1], line_split[2], line_split[3]) in sta_saved_list: pass else: sta_ev_new.append(line) file_staev_open = open( os.path.join(target_path, 'info', 'station_event'), 'wt') file_staev_open.writelines(sta_ev_new) file_staev_open.close() print('DONE')