def download_resp(xmlinput): """ Tool for the download of response information, to be stored in RESP files. The download is based on the iris DMC FetchData script and takes as input an xml file that specifies the download parameters. """ datadir = cfg.datadir #- read input file ============================================================================ datainput = rxml.read_xml(xmlinput) dat1 = datainput[1] # Verbose? if dat1['verbose'] == '1': v = True vfetchdata = '-v ' else: vfetchdata = '' # Directory where executable is located exdir = dat1['exdir'] # network, channel, location and station list stafile = dat1['ids'] # geographical region lat_min = dat1['region']['lat_min'] lat_max = dat1['region']['lat_max'] lon_min = dat1['region']['lon_min'] lon_max = dat1['region']['lon_max'] respfileloc = datadir + 'resp/' if os.path.isdir(respfileloc) == False: cmd = 'mkdir ' + respfileloc os.system(cmd) fh = open(stafile, 'r') ids = fh.read().split('\n') for id in ids: if id == '': continue network = id.split('.')[0] station = id.split('.')[1] channel = id.split('.')[3] print '\n Downloading response information from: ' + id + '\n' reqstring = exdir + '/FetchData ' + vfetchdata + ' -N ' + network + ' -S ' + station + ' -C ' + channel + ' --lat ' + lat_min + ':' + lat_max + ' --lon ' + lon_min + ':' + lon_max + ' -rd ' + respfileloc os.system(reqstring) return
def par_download(): """ Parallel download from IRIS DMC """ #============================================================================================== # preliminaries #============================================================================================== comm = MPI.COMM_WORLD rank = comm.Get_rank() size = comm.Get_size() #============================================================================================== #- MASTER process: #- reads in xmlinput #- creates output directory #- creates a list of input files #============================================================================================== if rank == 0: datadir = cfg.datadir dat = rxml.read_xml(os.path.join(cfg.inpdir, 'input_download.xml'))[1] # network, channel, location and station list stalist = os.path.join(cfg.inpdir, 'downloadlist.txt') fh = open(stalist, 'r') ids = fh.read().split('\n') #============================================================================================== #- All processes: #- receive the input; and the list of files #- read variables from broadcasted input #============================================================================================== else: ids = list() dat = list() ids = comm.bcast(ids, root=0) dat = comm.bcast(dat, root=0) datadir = cfg.datadir targetloc = datadir + 'raw/latest/rank' + str(rank) + '/' if os.path.isdir(targetloc) == False: cmd = 'mkdir ' + targetloc os.system(cmd) # Directory where executable is located exdir = dat['exdir'] # Verbose? if dat['verbose'] == '1': v = True vfetchdata = '-v ' else: vfetchdata = '' # Quality? quality = dat['quality'] # time interval of request t1 = dat['time']['starttime'] t1str = UTCDateTime(t1).strftime('%Y.%j.%H.%M.%S') t2 = dat['time']['endtime'] t2str = UTCDateTime(t2).strftime('%Y.%j.%H.%M.%S') # data segment length if dat['time']['len'] == None: winlen = UTCDateTime(t2) - UTCDateTime(t1) else: winlen = int(dat['time']['len']) # minimum length minlen = dat['time']['minlen'] # geographical region lat_min = dat['region']['lat_min'] lat_max = dat['region']['lat_max'] lon_min = dat['region']['lon_min'] lon_max = dat['region']['lon_max'] #============================================================================================== #- Assign each rank its own chunk of input #============================================================================================== clen = int(float(len(ids)) / float(size)) chunk = (rank * clen, (rank + 1) * clen) myids = ids[chunk[0]:chunk[1]] if rank == size - 1: myids = ids[chunk[0]:] #================================================================================== # Input files loop #================================================================================== for id in myids: if id == '': continue t = UTCDateTime(t1) while t < UTCDateTime(t2): tstart = UTCDateTime(t).strftime('%Y-%m-%d,%H:%M:%S') tstartstr = UTCDateTime(t).strftime('%Y.%j.%H.%M.%S') tstep = min((UTCDateTime(t)+winlen),UTCDateTime(t2)).\ strftime('%Y-%m-%d,%H:%M:%S') tstepstr = min((UTCDateTime(t)+winlen),UTCDateTime(t2)).\ strftime('%Y.%j.%H.%M.%S') #-Formulate a polite request filename = targetloc + id + '.' + tstartstr + '.' + tstepstr + '.mseed' if os.path.exists(filename) == False: network = id.split('.')[0] station = id.split('.')[1] channel = id.split('.')[3] #print network, station, location, channel print('\n Rank ' + str(rank) + '\n', file=None) print('\n Attempting to download data from: ' + id + '\n', file=None) print(filename, None) reqstring=exdir+'/FetchData '+vfetchdata+' -N '+network+ \ ' -S '+station+' -C '+channel+' -s '+tstart+' -e '+tstep+ \ ' -msl '+minlen+' --lat '+lat_min+':'+lat_max+ \ ' --lon '+lon_min+':'+lon_max+' -o '+filename+' -Q '+quality os.system(reqstring) t += winlen # Clean up (some files come back with 0 data) stafile = dat['ids'] t1s = t1str.split('.')[0] + '.' + t1str.split('.')[1] t2s = t2str.split('.')[0] + '.' + t2str.split('.')[1] cmd = ('./UTIL/cleandir.sh ' + targetloc) os.system(cmd) os.system('mv ' + targetloc + '* ' + targetloc + '/..') os.system('rmdir ' + targetloc) # Download resp files for all epochs! respfileloc = datadir + 'resp/' if os.path.isdir(respfileloc) == False: cmd = 'mkdir ' + respfileloc os.system(cmd) for id in myids: if id == '': continue network = id.split('.')[0] station = id.split('.')[1] channel = id.split('.')[3] print('\n Downloading response information from: ' + id + '\n') reqstring=exdir+'/FetchData '+vfetchdata+' -N '+network+ ' -S '+station+' -C '+channel+\ ' --lat '+lat_min+':'+lat_max+' --lon '+lon_min+':'+lon_max+' -rd '+respfileloc os.system(reqstring) comm.Barrier() if rank == 0: outfile = os.path.join(cfg.datadir, 'raw/latest/download_report.txt') outf = open(outfile, 'w') print('Attempting to download data from stations: \n', file=outf) print('****************************************** \n', file=outf) for id in ids: print(id, file=outf) print('****************************************** \n', file=outf) stalist = os.path.join(cfg.inpdir, 'downloadlist.txt') fh = open(stalist, 'r') ids = fh.read().split('\n') noreturn = [] for id in ids: if id == '': continue fls = glob(os.path.join(cfg.datadir, 'raw/latest', id + '*')) if fls != []: print('Files downloaded for id: ' + id, file=outf) print('First file: ' + fls[0], file=outf) print('Last file: ' + fls[-1], file=outf) print('****************************************** \n', file=outf) else: noreturn.append(id) if noreturn != []: print('NO files downloaded for: \n', file=outf) print(noreturn, file=outf) print('****************************************** \n', file=outf) print('Download parameters were: \n', file=outf) print('****************************************** \n', file=outf) outf.close() os.system('cat ' + cfg.inpdir + '/input_download.xml >> ' + outfile)
def dispersion_psv(xml_input): """ Compute dispersion curves and displacement functions for PSV propagation. periods, phase_velocities = dispersion_psv(xml_input) Dispersion curves and displacement functions are written to files. """ #- read input --------------------------------------------------------------------------------- inp = rxml.read_xml(xml_input) inp = inp[1] verbose = int(inp["verbose"]) plot_dispersion = int(inp["plot_dispersion"]) plot_amplitudes = int(inp["plot_amplitudes"]) model = inp["model"] write_output = inp["output"]["write_output"] output_directory = inp["output"]["directory"] tag = inp["output"]["tag"] r_min = float(inp["integration"]["starting_radius"]) dr = float(inp["integration"]["radius_sampling"]) T_min = float(inp["T_c_sampling"]["T_min"]) T_max = float(inp["T_c_sampling"]["T_max"]) dT = float(inp["T_c_sampling"]["dT"]) c_min = float(inp["T_c_sampling"]["c_min"]) c_max = float(inp["T_c_sampling"]["c_max"]) dc = float(inp["T_c_sampling"]["dc"]) #- initialisations ---------------------------------------------------------------------------- T = np.arange(T_min,T_max + dT,dT,dtype=float) c = np.arange(c_min,c_max + dc,dc,dtype=float) omega = 2 * np.pi / T mode = [] periods = [] phase_velocities = [] group_velocities = [] r = np.arange(r_min, 6371000.0 + dr, dr, dtype=float) rho = np.zeros(len(r)) A = np.zeros(len(r)) C = np.zeros(len(r)) F = np.zeros(len(r)) L = np.zeros(len(r)) N = np.zeros(len(r)) for n in np.arange(len(r)): rho[n], A[n], C[n], F[n], L[n], N[n] = m.models(r[n], model) #- root-finding algorithm --------------------------------------------------------------------- #- loop over angular frequencies for _omega in omega: mode_count = 0.0 k = _omega / c #- loop over trial wavenumbers r_left = 0.0 for n in np.arange(len(k)): #- compute vertical wave functions using alternative system r1, r2, r3, r4, r5, r = ipsv_alt.integrate_psv_alt(r_min, dr, _omega, k[n], model) r_right = r2[len(r2)-1] #- check if there is a zero ----------------------------------------------------------- if r_left * r_right < 0.0: mode_count += 1.0 mode.append(mode_count) #- start bisection algorithm rr_left = r_left rr_right = r_right k_left = k[n-1] k_right = k[n] for i in np.arange(5): k_new = (k_left * np.abs(rr_right) + k_right * np.abs(rr_left)) / (np.abs(rr_left) + np.abs(rr_right)) r1, r2, r3, r4, r5, r = ipsv_alt.integrate_psv_alt(r_min, dr, _omega, k_new, model) rr = r2[len(r2)-1] if rr * rr_left < 0.0: k_right = k_new rr_right = rr elif rr * rr_right < 0.0: k_left = k_new rr_left = rr else: continue #================================================================================== #- compute final vertical wave functions and corresponding velocities and kernels - #================================================================================== #- compute final vertical wave functions using the original first-order system #- two independent solutions r11, r21, r31, r41, r = ipsv.integrate_psv(r_min, dr, _omega, k_new, model, 1) r12, r22, r32, r42, r = ipsv.integrate_psv(r_min, dr, _omega, k_new, model, 2) #- determine their weights via boundary condition (weight q1 is set to 1) nr = len(r) - 1 q2 = -r21[nr] / r22[nr] #- final solution with boundary condition r1 = r11 + q2 * r12 r2 = r21 + q2 * r22 r3 = r31 + q2 * r32 r4 = r41 + q2 * r42 #- normalise to vertical displacement at the surface mm = r1[nr] r1 = r1 / mm r2 = r2 / mm r3 = r3 / mm r4 = r4 / mm #- phase velocity periods.append(2*np.pi/_omega) phase_velocities.append(_omega / k_new) #- group velocity U, I1, I3 = vg.group_velocity_psv(r1, r2, r3, r4, r, k_new, _omega/k_new, rho, A, C, F, L, N) group_velocities.append(U) #- kernels kpsv.kernels_psv(r, r1, r2, r3, r4, _omega, k_new, I3, rho, A, C, F, L, N, write_output, output_directory, tag) #================================================================================== #- screen output and displacement function files ---------------------------------- #================================================================================== #- plot and print to screen if verbose: print "T="+str(2*np.pi/_omega)+" s, c="+str(_omega / k_new)+" m/s, U="+str(U)+" m/s" if plot_amplitudes: f=plt.figure() f.text(0.5,0.95,"T="+str(2*np.pi/_omega)+" s, c="+str(_omega / k_new)+" m/s",horizontalalignment="center",verticalalignment="top") plt.subplot(2,2,1) plt.plot(r, r1) plt.xlabel("radius [m]") plt.title("vertical displacement") plt.subplot(2,2,2) plt.plot(r, r2) plt.xlabel("radius [m]") plt.title("vertical stress") plt.subplot(2,2,3) plt.plot(r, r3) plt.xlabel("radius [m]") plt.title("horizontal displacement") plt.subplot(2,2,4) plt.plot(r, r4) plt.xlabel("radius [m]") plt.title("horizontal stress") plt.show() #- write output if write_output: identifier = "T="+str(2*np.pi/_omega)+".c="+str(_omega / k_new) fid = open(output_directory+"displacement_psv."+tag+"."+identifier,"w") fid.write("number of vertical sampling points\n") fid.write(str(len(r))+"\n") fid.write("radius, vertical displacement, horizontal displacement \n") for idx in np.arange(len(r)): fid.write(str(r[idx])+" "+str(r1[idx])+" "+str(r3[idx])+"\n") fid.close() r_left = r_right #============================================================================================== #- output ------------------------------------------------------------------------------------- #============================================================================================== #- dispersion curve --------------------------------------------------------------------------- if write_output: #- write dispersion curve fid = open(output_directory+"dispersion_psv."+tag,"w") for k in np.arange(len(periods)): fid.write(str(periods[k])+" "+str(phase_velocities[k])+" "+str(group_velocities[k])+"\n") fid.close() #- plot --------------------------------------------------------------------------------------- if plot_dispersion: for n in np.arange(len(periods)): plt.plot(periods[n],phase_velocities[n],'ko') #if mode[n]==1.0: plt.plot(periods[n],group_velocities[n],'ro') plt.margins(0.2) plt.xlabel("period [s]") plt.ylabel("phase velocity (black), group velocity (red) [m/s]") plt.title("PSV dispersion") plt.show() #- return ------------------------------------------------------------------------------------- return periods, phase_velocities
def dispersion_psv(xml_input): """ Compute dispersion curves and displacement functions for PSV propagation. periods, phase_velocities = dispersion_psv(xml_input) Dispersion curves and displacement functions are written to files. """ #- read input --------------------------------------------------------------------------------- inp = rxml.read_xml(xml_input) inp = inp[1] verbose = int(inp["verbose"]) plot_dispersion = int(inp["plot_dispersion"]) plot_amplitudes = int(inp["plot_amplitudes"]) model = inp["model"] write_output = inp["output"]["write_output"] output_directory = inp["output"]["directory"] tag = inp["output"]["tag"] r_min = float(inp["integration"]["starting_radius"]) dr = float(inp["integration"]["radius_sampling"]) T_min = float(inp["T_c_sampling"]["T_min"]) T_max = float(inp["T_c_sampling"]["T_max"]) dT = float(inp["T_c_sampling"]["dT"]) c_min = float(inp["T_c_sampling"]["c_min"]) c_max = float(inp["T_c_sampling"]["c_max"]) dc = float(inp["T_c_sampling"]["dc"]) #- initialisations ---------------------------------------------------------------------------- T = np.arange(T_min, T_max + dT, dT, dtype=float) c = np.arange(c_min, c_max + dc, dc, dtype=float) omega = 2 * np.pi / T mode = [] periods = [] phase_velocities = [] group_velocities = [] r = np.arange(r_min, 6371000.0 + dr, dr, dtype=float) rho = np.zeros(len(r)) A = np.zeros(len(r)) C = np.zeros(len(r)) F = np.zeros(len(r)) L = np.zeros(len(r)) N = np.zeros(len(r)) for n in np.arange(len(r)): rho[n], A[n], C[n], F[n], L[n], N[n] = m.models(r[n], model) #- root-finding algorithm --------------------------------------------------------------------- #- loop over angular frequencies for _omega in omega: mode_count = 0.0 k = _omega / c #- loop over trial wavenumbers r_left = 0.0 for n in np.arange(len(k)): #- compute vertical wave functions using alternative system r1, r2, r3, r4, r5, r = ipsv_alt.integrate_psv_alt( r_min, dr, _omega, k[n], model) r_right = r2[len(r2) - 1] #- check if there is a zero ----------------------------------------------------------- if r_left * r_right < 0.0: mode_count += 1.0 mode.append(mode_count) #- start bisection algorithm rr_left = r_left rr_right = r_right k_left = k[n - 1] k_right = k[n] for i in np.arange(5): k_new = (k_left * np.abs(rr_right) + k_right * np.abs(rr_left)) / (np.abs(rr_left) + np.abs(rr_right)) r1, r2, r3, r4, r5, r = ipsv_alt.integrate_psv_alt( r_min, dr, _omega, k_new, model) rr = r2[len(r2) - 1] if rr * rr_left < 0.0: k_right = k_new rr_right = rr elif rr * rr_right < 0.0: k_left = k_new rr_left = rr else: continue #================================================================================== #- compute final vertical wave functions and corresponding velocities and kernels - #================================================================================== #- compute final vertical wave functions using the original first-order system #- two independent solutions r11, r21, r31, r41, r = ipsv.integrate_psv( r_min, dr, _omega, k_new, model, 1) r12, r22, r32, r42, r = ipsv.integrate_psv( r_min, dr, _omega, k_new, model, 2) #- determine their weights via boundary condition (weight q1 is set to 1) nr = len(r) - 1 q2 = -r21[nr] / r22[nr] #- final solution with boundary condition r1 = r11 + q2 * r12 r2 = r21 + q2 * r22 r3 = r31 + q2 * r32 r4 = r41 + q2 * r42 #- normalise to vertical displacement at the surface mm = r1[nr] r1 = r1 / mm r2 = r2 / mm r3 = r3 / mm r4 = r4 / mm #- phase velocity periods.append(2 * np.pi / _omega) phase_velocities.append(_omega / k_new) #- group velocity U, I1, I3 = vg.group_velocity_psv(r1, r2, r3, r4, r, k_new, _omega / k_new, rho, A, C, F, L, N) group_velocities.append(U) #- kernels kpsv.kernels_psv(r, r1, r2, r3, r4, _omega, k_new, I3, rho, A, C, F, L, N, write_output, output_directory, tag) #================================================================================== #- screen output and displacement function files ---------------------------------- #================================================================================== #- plot and print to screen if verbose: print "T=" + str(2 * np.pi / _omega) + " s, c=" + str( _omega / k_new) + " m/s, U=" + str(U) + " m/s" if plot_amplitudes: f = plt.figure() f.text(0.5, 0.95, "T=" + str(2 * np.pi / _omega) + " s, c=" + str(_omega / k_new) + " m/s", horizontalalignment="center", verticalalignment="top") plt.subplot(2, 2, 1) plt.plot(r, r1) plt.xlabel("radius [m]") plt.title("vertical displacement") plt.subplot(2, 2, 2) plt.plot(r, r2) plt.xlabel("radius [m]") plt.title("vertical stress") plt.subplot(2, 2, 3) plt.plot(r, r3) plt.xlabel("radius [m]") plt.title("horizontal displacement") plt.subplot(2, 2, 4) plt.plot(r, r4) plt.xlabel("radius [m]") plt.title("horizontal stress") plt.show() #- write output if write_output: identifier = "T=" + str(2 * np.pi / _omega) + ".c=" + str( _omega / k_new) fid = open( output_directory + "displacement_psv." + tag + "." + identifier, "w") fid.write("number of vertical sampling points\n") fid.write(str(len(r)) + "\n") fid.write( "radius, vertical displacement, horizontal displacement \n" ) for idx in np.arange(len(r)): fid.write( str(r[idx]) + " " + str(r1[idx]) + " " + str(r3[idx]) + "\n") fid.close() r_left = r_right #============================================================================================== #- output ------------------------------------------------------------------------------------- #============================================================================================== #- dispersion curve --------------------------------------------------------------------------- if write_output: #- write dispersion curve fid = open(output_directory + "dispersion_psv." + tag, "w") for k in np.arange(len(periods)): fid.write( str(periods[k]) + " " + str(phase_velocities[k]) + " " + str(group_velocities[k]) + "\n") fid.close() #- plot --------------------------------------------------------------------------------------- if plot_dispersion: for n in np.arange(len(periods)): plt.plot(periods[n], phase_velocities[n], 'ko') #if mode[n]==1.0: plt.plot(periods[n], group_velocities[n], 'ro') plt.margins(0.2) plt.xlabel("period [s]") plt.ylabel("phase velocity (black), group velocity (red) [m/s]") plt.title("PSV dispersion") plt.show() #- return ------------------------------------------------------------------------------------- return periods, phase_velocities
def download_resp(xmlinput): """ Tool for the download of response information, to be stored in RESP files. The download is based on the iris DMC FetchData script and takes as input an xml file that specifies the download parameters. """ datadir=cfg.datadir #- read input file ============================================================================ datainput=rxml.read_xml(xmlinput) dat1=datainput[1] # Verbose? if dat1['verbose']=='1': v=True vfetchdata='-v ' else: vfetchdata='' # Directory where executable is located exdir=dat1['exdir'] # network, channel, location and station list stafile=dat1['ids'] # geographical region lat_min=dat1['region']['lat_min'] lat_max=dat1['region']['lat_max'] lon_min=dat1['region']['lon_min'] lon_max=dat1['region']['lon_max'] respfileloc=datadir+'resp/' if os.path.isdir(respfileloc)==False: cmd='mkdir '+respfileloc os.system(cmd) fh=open(stafile, 'r') ids=fh.read().split('\n') for id in ids: if id=='': continue network=id.split('.')[0] station=id.split('.')[1] channel=id.split('.')[3] print '\n Downloading response information from: '+id+'\n' reqstring=exdir+'/FetchData '+vfetchdata+' -N '+network+ ' -S '+station+' -C '+channel+' --lat '+lat_min+':'+lat_max+' --lon '+lon_min+':'+lon_max+' -rd '+respfileloc os.system(reqstring) return
def dispersion_sh(xml_input): """ Compute dispersion curves, displacement functions and kernels for SH propagation. periods, phase_velocities = dispersion_sh(xml_input) Dispersion curves and displacement functions are written to files. """ #- read input --------------------------------------------------------------------------------- inp = rxml.read_xml(xml_input) inp = inp[1] verbose = int(inp["verbose"]) plot_dispersion = int(inp["plot_dispersion"]) plot_amplitudes = int(inp["plot_amplitudes"]) model = inp["model"] write_output = inp["output"]["write_output"] output_directory = inp["output"]["directory"] tag = inp["output"]["tag"] r_min = float(inp["integration"]["starting_radius"]) dr = float(inp["integration"]["radius_sampling"]) T_min = float(inp["T_c_sampling"]["T_min"]) T_max = float(inp["T_c_sampling"]["T_max"]) dT = float(inp["T_c_sampling"]["dT"]) c_min = float(inp["T_c_sampling"]["c_min"]) c_max = float(inp["T_c_sampling"]["c_max"]) dc = float(inp["T_c_sampling"]["dc"]) #- initialisations ---------------------------------------------------------------------------- T = np.arange(T_min,T_max + dT,dT,dtype=float) c = np.arange(c_min,c_max + dc,dc,dtype=float) omega = 2 * np.pi / T mode = [] periods = [] phase_velocities = [] group_velocities = [] r = np.arange(r_min, 6371000.0 + dr, dr, dtype=float) rho = np.zeros(len(r)) A = np.zeros(len(r)) C = np.zeros(len(r)) F = np.zeros(len(r)) L = np.zeros(len(r)) N = np.zeros(len(r)) for n in np.arange(len(r)): rho[n], A[n], C[n], F[n], L[n], N[n] = m.models(r[n], model) #- root-finding algorithm --------------------------------------------------------------------- #- loop over angular frequencies for _omega in omega: k = _omega / c mode_count = 0.0 #- loop over trial wavenumbers l_left = 0.0 for n in np.arange(len(k)): #- compute vertical wave functions l1, l2, r = ish.integrate_sh(r_min, dr, _omega, k[n], model) l_right = l2[len(l2)-1] #- check if there is a zero ----------------------------------------------------------- if l_left * l_right < 0.0: mode_count += 1.0 mode.append(mode_count) #- start bisection algorithm ll_left = l_left ll_right = l_right k_left = k[n-1] k_right = k[n] for i in np.arange(5): k_new = (k_left * np.abs(ll_right) + k_right * np.abs(ll_left)) / (np.abs(ll_left) + np.abs(ll_right)) l1, l2, r = ish.integrate_sh(r_min, dr, _omega, k_new, model) ll = l2[len(l2)-1] if ll * ll_left < 0.0: k_right = k_new ll_right = ll elif ll * ll_right < 0.0: k_left = k_new ll_left = ll else: continue #================================================================================== #- compute final vertical wave functions and corresponding velocities and kernels - #================================================================================== #- stress and displacement functions l1, l2, r = ish.integrate_sh(r_min, dr, _omega, k_new, model) #- phase velocity periods.append(2*np.pi/_omega) phase_velocities.append(_omega/k_new) #- group velocity U, I1, I3 = vg.group_velocity_sh(l1, l2, r, _omega/k_new, rho, N) group_velocities.append(U) #- kernels ksh.kernels_sh(r, l1, l2, _omega, k_new, I3, rho, A, C, F, L, N, write_output, output_directory, tag) #================================================================================== #- screen output and displacement function files ---------------------------------- #================================================================================== #- plot and print to screen if verbose: print "T="+str(2*np.pi/_omega)+" s, c="+str(_omega / k_new)+" m/s, U="+str(U)+" m/s" if plot_amplitudes: plt.plot(r, l1) plt.xlabel("radius [m]") plt.ylabel("stress-normalised displacement amplitude") plt.title("T="+str(2*np.pi/_omega)+" s, c="+str(_omega / k_new)+" m/s") plt.show() #- write output if write_output: identifier = "T="+str(2*np.pi/_omega)+".c="+str(_omega / k_new) fid = open(output_directory+"displacement_sh."+tag+"."+identifier,"w") fid.write("number of vertical sampling points\n") fid.write(str(len(r))+"\n") fid.write("radius displacement stress\n") for idx in np.arange(len(r)): fid.write(str(r[idx])+" "+str(l1[idx])+" "+str(l2[idx])+"\n") fid.close() l_left =l_right #============================================================================================== #- output ------------------------------------------------------------------------------------- #============================================================================================== if write_output: #- write dispersion curve fid = open(output_directory+"dispersion_sh."+tag,"w") for k in np.arange(len(periods)): fid.write(str(periods[k])+" "+str(phase_velocities[k])+" "+str(group_velocities[k])+"\n") fid.close() #- plot --------------------------------------------------------------------------------------- print mode if plot_dispersion: for n in np.arange(len(periods)): plt.plot(periods[n],phase_velocities[n],'ko') if mode[n]==1.0: plt.plot(periods[n],group_velocities[n],'ro') plt.margins(0.2) plt.xlabel("period [s]") plt.ylabel("phase velocity (black), group velocity (red) [m/s]") plt.title("SH dispersion") plt.show() #- return ------------------------------------------------------------------------------------- return periods, phase_velocities
def par_download(): """ Parallel download from IRIS DMC """ #============================================================================================== # preliminaries #============================================================================================== comm = MPI.COMM_WORLD rank = comm.Get_rank() size=comm.Get_size() #============================================================================================== #- MASTER process: #- reads in xmlinput #- creates output directory #- creates a list of input files #============================================================================================== if rank==0: datadir=cfg.datadir dat=rxml.read_xml(os.path.join(cfg.inpdir,'input_download.xml'))[1] # network, channel, location and station list stalist=os.path.join(cfg.inpdir,'downloadlist.txt') fh=open(stalist,'r') ids=fh.read().split('\n') #============================================================================================== #- All processes: #- receive the input; and the list of files #- read variables from broadcasted input #============================================================================================== else: ids=list() dat=list() ids=comm.bcast(ids, root=0) dat=comm.bcast(dat, root=0) datadir=cfg.datadir targetloc=datadir+'raw/latest/rank'+str(rank)+'/' if os.path.isdir(targetloc)==False: cmd='mkdir '+targetloc os.system(cmd) # Directory where executable is located exdir=dat['exdir'] # Verbose? if dat['verbose']=='1': v=True vfetchdata='-v ' else: vfetchdata='' # Quality? quality = dat['quality'] # time interval of request t1=dat['time']['starttime'] t1str=UTCDateTime(t1).strftime('%Y.%j.%H.%M.%S') t2=dat['time']['endtime'] t2str=UTCDateTime(t2).strftime('%Y.%j.%H.%M.%S') # data segment length if dat['time']['len']==None: winlen=UTCDateTime(t2)-UTCDateTime(t1) else: winlen = int(dat['time']['len']) # minimum length minlen=dat['time']['minlen'] # geographical region lat_min=dat['region']['lat_min'] lat_max=dat['region']['lat_max'] lon_min=dat['region']['lon_min'] lon_max=dat['region']['lon_max'] #============================================================================================== #- Assign each rank its own chunk of input #============================================================================================== clen=int(float(len(ids))/float(size)) chunk=(rank*clen, (rank+1)*clen) myids=ids[chunk[0]:chunk[1]] if rank==size-1: myids=ids[chunk[0]:] #================================================================================== # Input files loop #================================================================================== for id in myids: if id=='': continue t = UTCDateTime(t1) while t < UTCDateTime(t2): tstart = UTCDateTime(t).strftime('%Y-%m-%d,%H:%M:%S') tstartstr = UTCDateTime(t).strftime('%Y.%j.%H.%M.%S') tstep = min((UTCDateTime(t)+winlen),UTCDateTime(t2)).\ strftime('%Y-%m-%d,%H:%M:%S') tstepstr = min((UTCDateTime(t)+winlen),UTCDateTime(t2)).\ strftime('%Y.%j.%H.%M.%S') #-Formulate a polite request filename=targetloc+id+'.'+tstartstr+'.'+tstepstr+'.mseed' if os.path.exists(filename)==False: network=id.split('.')[0] station=id.split('.')[1] channel=id.split('.')[3] #print network, station, location, channel print('\n Rank '+str(rank)+'\n',file=None) print('\n Attempting to download data from: '+id+'\n',file=None) print(filename,None) reqstring=exdir+'/FetchData '+vfetchdata+' -N '+network+ \ ' -S '+station+' -C '+channel+' -s '+tstart+' -e '+tstep+ \ ' -msl '+minlen+' --lat '+lat_min+':'+lat_max+ \ ' --lon '+lon_min+':'+lon_max+' -o '+filename+' -Q '+quality os.system(reqstring) t += winlen # Clean up (some files come back with 0 data) stafile=dat['ids'] t1s=t1str.split('.')[0]+'.'+t1str.split('.')[1] t2s=t2str.split('.')[0]+'.'+t2str.split('.')[1] cmd=('./UTIL/cleandir.sh '+targetloc) os.system(cmd) os.system('mv '+targetloc+'* '+targetloc+'/..') os.system('rmdir '+targetloc) # Download resp files for all epochs! respfileloc=datadir+'resp/' if os.path.isdir(respfileloc)==False: cmd='mkdir '+respfileloc os.system(cmd) for id in myids: if id=='': continue network=id.split('.')[0] station=id.split('.')[1] channel=id.split('.')[3] print('\n Downloading response information from: '+id+'\n') reqstring=exdir+'/FetchData '+vfetchdata+' -N '+network+ ' -S '+station+' -C '+channel+\ ' --lat '+lat_min+':'+lat_max+' --lon '+lon_min+':'+lon_max+' -rd '+respfileloc os.system(reqstring) comm.Barrier() if rank==0: outfile=os.path.join(cfg.datadir,'raw/latest/download_report.txt') outf=open(outfile,'w') print('Attempting to download data from stations: \n',file=outf) print('****************************************** \n',file=outf) for id in ids: print(id,file=outf) print('****************************************** \n',file=outf) stalist=os.path.join(cfg.inpdir,'downloadlist.txt') fh=open(stalist,'r') ids=fh.read().split('\n') noreturn=[] for id in ids: if id=='': continue fls=glob(os.path.join(cfg.datadir,'raw/latest',id+'*')) if fls != []: print('Files downloaded for id: '+id,file=outf) print('First file: '+fls[0],file=outf) print('Last file: '+fls[-1],file=outf) print('****************************************** \n',file=outf) else: noreturn.append(id) if noreturn != []: print('NO files downloaded for: \n',file=outf) print(noreturn,file=outf) print('****************************************** \n',file=outf) print('Download parameters were: \n',file=outf) print('****************************************** \n',file=outf) outf.close() os.system('cat '+cfg.inpdir+'/input_download.xml >> '+outfile)