def create_config(conf='conf.json', tutorial=False, less_data=False): """Create JSON config file and download tutorial data if requested""" shutil.copyfile(resource_filename('yam', 'conf_example.json'), conf) temp_dir = os.path.join(tempfile.gettempdir(), 'yam_example_data') template = os.path.join(temp_dir, 'example_data') station_template = os.path.join(temp_dir, 'example_inventory') try: num_files = (len([name for name in os.listdir(template)]), len([name for name in os.listdir(station_template)])) except FileNotFoundError: num_files = (0, 0) if tutorial and (num_files[0] < (9 if less_data else 54) or num_files[1] < 3): print('Download example data from Geofon') from obspy import UTCDateTime as UTC from obspy.clients.fdsn.mass_downloader import (GlobalDomain, Restrictions, MassDownloader) domain = GlobalDomain() restrictions = Restrictions( starttime=UTC('2010-02-04' if less_data else '2010-02-01'), endtime=UTC('2010-02-06' if less_data else '2010-02-15'), network='CX', station='PATCX', location=None, channel_priorities=["BH[ZN]"], chunklength_in_sec=86400, reject_channels_with_gaps=False, minimum_length=0.5) mdl = MassDownloader(providers=['GFZ']) kw = dict(threads_per_client=1, download_chunk_size_in_mb=200) mdl.download(domain, restrictions, template, station_template, **kw) restrictions.station = 'PB06' if not less_data: restrictions.endtime = UTC('2010-02-12') mdl.download(domain, restrictions, template, station_template, **kw) restrictions.station = 'PB01' restrictions.endtime = UTC('2010-02-04 08:00:00') restrictions.channel_priorities = ["BHZ"] mdl.download(domain, restrictions, template, station_template, **kw) if not less_data: restrictions.starttime = UTC('2010-02-08 00:00:00') restrictions.endtime = UTC('2010-02-09 23:55:00') restrictions.channel_priorities = ["BHZ"] mdl.download(domain, restrictions, template, station_template, **kw) if tutorial: dest_dir = os.path.dirname(conf) dest_dir_data = os.path.join(dest_dir, 'example_data') dest_dir_inv = os.path.join(dest_dir, 'example_inventory') if not os.path.exists(dest_dir_data): if less_data: ignore = shutil.ignore_patterns('*2010020[123]T000000Z__*', '*2010020[6-9]T000000Z__*', '*2010021?T000000Z__*') else: ignore = None shutil.copytree(template, dest_dir_data, ignore=ignore) if not os.path.exists(dest_dir_inv): shutil.copytree(station_template, dest_dir_inv)
def download_data(self, event, providers=None): """ """ event = self.comm.events.get(event) from obspy.clients.fdsn.mass_downloader import MassDownloader, \ Restrictions, GlobalDomain proj = self.comm.project if isinstance(proj.domain, lasif.domain.GlobalDomain): domain = GlobalDomain() else: domain = self._get_spherical_section_domain(proj.domain) event_time = event["origin_time"] ds = proj.config["download_settings"] starttime = event_time - ds["seconds_before_event"] endtime = event_time + ds["seconds_after_event"] mseed_storage = os.path.join(proj.paths["data"], event["event_name"], "raw") # Attempt to get StationXML data for a very long time span. This has # the nice side effect that StationXML files will mostly be shared # between events. restrictions = Restrictions( starttime=starttime, endtime=endtime, # Go back 10 years. station_starttime=starttime - 86400 * 365.25 * 10, # Advance 10 years. station_endtime=endtime + 86400 * 365.25 * 10, network=None, station=None, location=None, channel=None, minimum_interstation_distance_in_m=ds[ "interstation_distance_in_m"], reject_channels_with_gaps=True, minimum_length=0.95, location_priorities=ds["location_priorities"], channel_priorities=ds["channel_priorities"]) stationxml_storage = self._get_stationxml_storage_fct(starttime, endtime) # Also log to file for reasons of provenance and debugging. logger = logging.getLogger("obspy.clients.fdsn.mass_downloader") fh = logging.FileHandler( self.comm.project.get_log_file("DOWNLOADS", event["event_name"])) fh.setLevel(logging.INFO) FORMAT = "[%(asctime)s] - %(name)s - %(levelname)s: %(message)s" formatter = logging.Formatter(FORMAT) fh.setFormatter(formatter) logger.addHandler(fh) dlh = MassDownloader(providers=providers) dlh.download(domain=domain, restrictions=restrictions, mseed_storage=mseed_storage, stationxml_storage=stationxml_storage)
def download(eqname, t0, min_length=600): domain = GlobalDomain() restrictions = Restrictions(starttime=t0,endtime=t0+min_length,chunklength_in_sec=84600,network="*", station="*", location="", channel="BH*", reject_channels_with_gaps=True,minimum_length=0.0,minimum_interstation_distance_in_m=100.0) waveform_dir = "data/{}/waveforms".format(eqname) stationxml_dir = "data/{}/stations".format(eqname) makedir(waveform_dir) makedir(stationxml_dir) mdl = MassDownloader(providers=["http://eida.koeri.boun.edu.tr:8080"]) # Kandilli FDSN matchtimeseries=True icin bos istasyon listesi donduruyor. mdl._initialized_clients["http://eida.koeri.boun.edu.tr:8080"].services["station"].pop("matchtimeseries") mdl.download(domain, restrictions, mseed_storage=waveform_dir, stationxml_storage=stationxml_dir)
def download_global_data(starttime, endtime, waveform_dir, stationxml_dir, stations=None, networks=None, channels=None, location_priorities=None, channel_priorities=None, minimum_length=0.95, reject_channels_with_gaps=True, providers=None): domain = GlobalDomain() station = list2str(stations) network = list2str(networks) channel = list2str(channels) print("network {}: | station: {} | channel: {} ".format( network, station, channel)) time.sleep(2.0) # Set download restrictions restrictions = Restrictions( starttime=starttime, endtime=endtime, reject_channels_with_gaps=reject_channels_with_gaps, minimum_length=minimum_length, station=station, network=network, channel=channel, location_priorities=location_priorities, channel_priorities=channel_priorities) if (providers is None) or (providers == "None"): mdl = MassDownloader() else: mdl = MassDownloader(providers=providers) mdl.download(domain, restrictions, mseed_storage=waveform_dir, stationxml_storage=stationxml_dir)
def download_event(event: str): d.mkdir(mseed := f'mseed/{event}') d.mkdir(xml := f'xml/{event}') # Event object e = read_events(f'events/{event}')[0] time = e.preferred_origin().time # call mass downloader restrictions = Restrictions( starttime=time - 600, endtime=time + 7800, reject_channels_with_gaps=True, minimum_length=0.95, channel_priorities=['BH[ZNE12]', 'HH[ZNE12]'], location_priorities=['', '00', '10'] ) MassDownloader().download( GlobalDomain(), restrictions, mseed_storage=d.abs(mseed), stationxml_storage=d.abs(xml) )
""" Spyder Editor This is a temporary script file. """ import obspy from obspy.clients.fdsn import Client from obspy.clients.fdsn.mass_downloader import RectangularDomain, \ Restrictions, MassDownloader, GlobalDomain ## Parameters: network_code = "OO" center = "IRIS" domain = GlobalDomain() #stations_list=["AXAS1","AXAS2","AXBA1","AXCC1","AXEC1","AXEC2","AXEC3","AXID1"] stations_list = "AX*" client = Client(center) ### Restrictions restrictions = Restrictions( starttime=obspy.UTCDateTime(2015, 1, 1), endtime=obspy.UTCDateTime(2016, 1, 1), chunklength_in_sec=86400, network=network_code, station=stations_list, # The typical use case for such a data set are noise correlations where # gaps are dealt with at a later stage.