def queryData(self): # code from IRIS client #Here we pull the data client = Client("IRIS") DupStations = [] DupLocations = [] DupChannels = [] self.STAWILD = False self.LOCWILD = False self.CHANWILD = False try: requestArray = [(self.network,self.station,self.location, \ self.channel,self.startTime,self.endTime)] print if self.debug: print(requestArray) print self.st = client.get_waveforms_bulk(requestArray) for self.tr in self.st: #Here we remove the M data quality and go with D self.tr.stats.mseed['dataquality'] = 'D' if self.debug: #print "Here is a trace we have" #print(tr.stats) if self.station == '*': self.STAWILD = True DupStations.append(self.tr.stats.station) elif self.station != '*': self.STAWILD = False if self.location == '*': self.LOCWILD = True DupLocations.append(self.tr.stats.location) elif self.location != '*': self.LOCWILD = False if self.channel == '*': self.CHANWILD = True DupChannels.append(self.tr.stats.channel) elif self.channel != '*': self.CHANWILD = False except: print 'Trouble getting data' sys.exit(0) #takes duplicate stations out of list self.stations = list(set(DupStations)) self.locations = list(set(DupLocations)) self.channels = list(set(DupChannels)) print self.stations print self.locations print self.channels # Now call code to store streams in mseed files self.storeMSEED()
def _download_bulk_waveforms( (event, networks, stations, start_times, end_times, download_path, event_xml_directory, recording_time, padding_time)): time.sleep(1) c = Client("IRIS") # Make directory. utils.mkdir_p(os.path.join(download_path, event)) filename = os.path.join(download_path, event, 'data.mseed') # Don't re-download files. if os.path.exists(filename): return # Find start time and end time. event_info = os.path.join(event_xml_directory, event + '.xml') tree = ET.parse(event_info) root = tree.getroot() for tag in root.iter(): if tag.tag == XML_STRING + 'time': time.start = obspy.UTCDateTime( tag.findall(XML_STRING + 'value')[0].text) - padding_time time.end = time.start + recording_time + 2 * padding_time break # Set up request. bulk = [] for x, y, s_time, e_time in zip(networks, stations, start_times, end_times): if time.start < s_time or time.start > e_time: continue bulk.append((x, y, '*', '*H*', time.start, time.end)) utils.print_ylw('Downloading %s...' % (event)) c.get_waveforms_bulk(bulk, filename=filename, quality='B')
def FDSN_bulk_request(i, add_event, input_dics): """ Send bulk request to FDSN """ print '\nSending bulk request to FDSN: %s' % input_dics['fdsn_base_url'] client_fdsn = Client_fdsn(base_url=input_dics['fdsn_base_url'], user=input_dics['fdsn_user'], password=input_dics['fdsn_pass']) bulk_list_fio = open(os.path.join(add_event[i], 'info', 'bulkdata_list')) bulk_list = pickle.load(bulk_list_fio) bulk_smgrs = client_fdsn.get_waveforms_bulk(bulk_list) print 'Saving the retrieved waveforms...', for bulk_st in bulk_smgrs: bulk_st.write(os.path.join(add_event[i], 'BH_RAW', '%s.%s.%s.%s' % (bulk_st.stats['network'], bulk_st.stats['station'], bulk_st.stats['location'], bulk_st.stats['channel'])), 'MSEED')
def queryData(self): # code from IRIS client # Here we pull the data client = Client("IRIS") DupStations = [] DupLocations = [] DupChannels = [] self.st = Stream() self.STAWILD = False self.LOCWILD = False self.CHANWILD = False try: timeout = 300 socket.setdefaulttimeout(timeout) # this needs to have a get_waveform that queries data 1 hour at a time # data cant query right now if the data is too bulky # also needs to include a timeout exception for hourIndex in range(0,self.hours): #this cant be days... has to be hours self.startTime1 = self.startTime + (hourIndex)*1*60*60 self.endTime1 = self.startTime + (hourIndex+1)*1*60*60 requestArray = [(self.network,self.station,self.location, \ self.channel,self.startTime1,self.endTime1)] self.st1 = client.get_waveforms_bulk(requestArray) self.st += self.st1 print self.st print #self.st = client.get_waveforms_bulk(timeout=10,requestArray) for self.tr in self.st: #Here we remove the M data quality and go with D self.tr.stats.mseed['dataquality'] = 'D' if self.debug: if self.station == '*': self.STAWILD = True DupStations.append(self.tr.stats.station) elif self.station != '*': self.STAWILD = False if self.location == '*': self.LOCWILD = True DupLocations.append(self.tr.stats.location) elif self.location != '*': self.LOCWILD = False if self.channel == '*': self.CHANWILD = True DupChannels.append(self.tr.stats.channel) elif self.channel != '*': self.CHANWILD = False #except TimeoutError: #print 'Get waveform timeout, exiting...' #sys.exit(0) except: print 'Trouble getting data' sys.exit(0) # Takes duplicate stations out of list and # makes station, location, and channel into an array # for looping( probably easier way but it works) self.stations = list(set(DupStations)) if self.station != '*': self.stations.append(self.station) self.locations = list(set(DupLocations)) if self.location != '*': self.locations.append(self.location) self.channels = list(set(DupChannels)) if self.channel != '*': self.channels.append(self.channel) print print "Station(s) being pulled: " + str(self.stations) print "Location(s) being pulled: " + str(self.locations) print "Channel(s) being pulled: " + str(self.channels) # Now call code to store streams in mseed files self.storeMSEED()
bulk_req = [] S = [] for sta in inv[0].stations: dist = locations2degrees(eve_lat, eve_lon, sta.latitude, sta.longitude) arrival = model.get_travel_times(source_depth_in_km=eve_depth, distance_in_degree=dist, phase_list=['P']) p_arrival = eve_ot + arrival[0].time t1 = p_arrival - wl_10deg t2 = p_arrival + wl_10deg bulk_req.append(('AU', sta.code, '*', 'BH*', t1, t2)) s = sta.code + ' ' + str(sta.latitude) + ' ' + str(sta.longitude) \ + ' ' + str(dist) S.append(s) print("Downloading waveforms for eve_" + str(int(line[0]))) st = client.get_waveforms_bulk(bulk_req, attach_response=True) print("completed for eve_" + str(int(line[0]))) # write stations.txt (see plan-phase1.txt for format details) ## stations = [] ## for s in S: ## sta_code = s.split(' ')[0] ## tr_bhn = st.select(station=sta_code,channel ='BHN') ## tr_bhe = st.select(station=sta_code,channel ='BHE') ## tr_bhz = st.select(station=sta_code,channel ='BHZ') ## data_bhn = 1 if (len(tr_bhn)==1) else 0 ## data_bhe = 1 if (len(tr_bhe)==1) else 0 ## data_bhz = 1 if (len(tr_bhz)==1) else 0 ## if (data_bhn==1): ## meta_data_bhn = 1 if (hasattr(tr_bhn[0].stats,'response')) else 0 ## else: ## meta_data_bhn = 0
def queryData(self): # code from IRIS client # Here we pull the data client = Client("IRIS") DupStations = [] DupLocations = [] DupChannels = [] self.st = Stream() self.STAWILD = False self.LOCWILD = False self.CHANWILD = False try: timeout = 300 socket.setdefaulttimeout(timeout) # this needs to have a get_waveform that queries data 1 hour at a time # data cant query right now if the data is too bulky # also needs to include a timeout exception for hourIndex in range( 0, self.hours): #this cant be days... has to be hours self.startTime1 = self.startTime + (hourIndex) * 1 * 60 * 60 self.endTime1 = self.startTime + (hourIndex + 1) * 1 * 60 * 60 requestArray = [(self.network,self.station,self.location, \ self.channel,self.startTime1,self.endTime1)] self.st1 = client.get_waveforms_bulk(requestArray) self.st += self.st1 print self.st print #self.st = client.get_waveforms_bulk(timeout=10,requestArray) for self.tr in self.st: #Here we remove the M data quality and go with D self.tr.stats.mseed['dataquality'] = 'D' if self.debug: if self.station == '*': self.STAWILD = True DupStations.append(self.tr.stats.station) elif self.station != '*': self.STAWILD = False if self.location == '*': self.LOCWILD = True DupLocations.append(self.tr.stats.location) elif self.location != '*': self.LOCWILD = False if self.channel == '*': self.CHANWILD = True DupChannels.append(self.tr.stats.channel) elif self.channel != '*': self.CHANWILD = False #except TimeoutError: #print 'Get waveform timeout, exiting...' #sys.exit(0) except: print 'Trouble getting data' sys.exit(0) # Takes duplicate stations out of list and # makes station, location, and channel into an array # for looping( probably easier way but it works) self.stations = list(set(DupStations)) if self.station != '*': self.stations.append(self.station) self.locations = list(set(DupLocations)) if self.location != '*': self.locations.append(self.location) self.channels = list(set(DupChannels)) if self.channel != '*': self.channels.append(self.channel) print print "Station(s) being pulled: " + str(self.stations) print "Location(s) being pulled: " + str(self.locations) print "Channel(s) being pulled: " + str(self.channels) # Now call code to store streams in mseed files self.storeMSEED()