def test_extend(self): """ Tests the extend method of the Catalog object. """ # 1 - create catalog and extend it with list of events catalog = Catalog() event1 = Event() event2 = Event() assert len(catalog) == 0 catalog.extend([event1, event2]) assert len(catalog) == 2 assert catalog.events == [event1, event2] # 2 - extend it with other catalog event3 = Event() event4 = Event() catalog2 = Catalog([event3, event4]) assert len(catalog) == 2 catalog.extend(catalog2) assert len(catalog) == 4 assert catalog.events == [event1, event2, event3, event4] # adding objects other as Catalog or list should fails with pytest.raises(TypeError): catalog.extend(str) with pytest.raises(TypeError): catalog.extend(event1) with pytest.raises(TypeError): catalog.extend((event1, event2))
def test_extend(self): """ Tests the extend method of the Catalog object. """ # 1 - create catalog and extend it with list of events catalog = Catalog() event1 = Event() event2 = Event() self.assertEqual(len(catalog), 0) catalog.extend([event1, event2]) self.assertEqual(len(catalog), 2) self.assertEqual(catalog.events, [event1, event2]) # 2 - extend it with other catalog event3 = Event() event4 = Event() catalog2 = Catalog([event3, event4]) self.assertEqual(len(catalog), 2) catalog.extend(catalog2) self.assertEqual(len(catalog), 4) self.assertEqual(catalog.events, [event1, event2, event3, event4]) # adding objects other as Catalog or list should fails self.assertRaises(TypeError, catalog.extend, str) self.assertRaises(TypeError, catalog.extend, event1) self.assertRaises(TypeError, catalog.extend, (event1, event2))
def gcmt_catalog( t_start, t_end, min_latitude, max_latitude, min_longitude, max_longitude, latitude, longitude, radius_min, radius_max, d_min, d_max, mag_min, mag_max, link_gcmt='http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog'): """ Function for downloading data from GCMT :param t_start: :param t_end: :param min_latitude: :param max_latitude: :param min_longitude: :param max_longitude: :param latitude: :param longitude: :param radius_min: :param radius_max: :param d_min: :param d_max: :param mag_min: :param mag_max: :param link_gcmt: :return: """ # for the time record tic = datetime.now() try: import obspyDMT dmt_path = obspyDMT.__path__[0] except Exception as error: print("WARNING: %s" % error) dmt_path = '.' gcmt_cat_path = os.path.join(dmt_path, 'gcmt_catalog') if not os.path.exists(gcmt_cat_path): os.mkdir(gcmt_cat_path) os.mkdir(os.path.join(gcmt_cat_path, 'NEW_MONTHLY')) os.mkdir(os.path.join(gcmt_cat_path, 'COMBO')) # creating a time list t_list = [] delta_t = int(UTCDateTime(t_end) - UTCDateTime(t_start) + 1) / 86400 yymm = [] for i in range(delta_t + 1): t_list.append( (UTCDateTime(t_start) + i * 60 * 60 * 24).strftime('%Y/%m/%d')) yy_tmp, mm_tmp, dd_tmp = t_list[i].split('/') yymm.append('%s%s' % (yy_tmp, mm_tmp)) yymmset = set(yymm) yymmls = list(yymmset) yymmls.sort() # starting to search for all events in the time window given by the user: cat = Catalog() yy_ret = [] mm_ret = [] remotefile_add = False for i in range(len(yymmls)): try: yy = yymmls[i][0:4] mm = yymmls[i][4:6] if int(yy) < 2006: month_year = [ 'jan', 'feb', 'mar', 'apr', 'may', 'june', 'july', 'aug', 'sept', 'oct', 'nov', 'dec' ] else: month_year = [ 'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec' ] if int(yy) >= 2005: new_monthly = 'NEW_MONTHLY' file_to_open = os.path.join( gcmt_cat_path, new_monthly, '%s%s.ndk' % (month_year[int(mm) - 1], yy[-2:])) remotefile_add = '%s/%s/%s/%s%s.ndk' \ % (link_gcmt, new_monthly, yy, month_year[int(mm)-1], yy[-2:]) else: new_monthly = 'COMBO' if yy in yy_ret: continue file_to_open = os.path.join(gcmt_cat_path, new_monthly, '%s.qml' % yy) if not os.path.exists(file_to_open) and not new_monthly == 'COMBO': print('Reading the data from GCMT webpage: %s' % yymmls[i]) remotefile = urlopen(remotefile_add) remotefile_read = remotefile.readlines() search_fio = open(file_to_open, 'w') search_fio.writelines(remotefile_read) search_fio.close() print('Reading the data from local gcmt_catalog: %s' % yymmls[i]) cat.extend(readEvents(file_to_open)) yy_ret.append(yy) mm_ret.append(mm) except Exception as error: print("ERROR: %s" % error) print('Done reading the data from GCMT webpage.') toc = datetime.now() print('%s sec to retrieve the event info form GCMT.' % (toc - tic)) filt1 = 'time >= %s' % t_start filt2 = 'time <= %s' % t_end cat = cat.filter(filt1, filt2) filt1 = 'magnitude >= %s' % mag_min filt2 = 'magnitude <= %s' % mag_max cat = cat.filter(filt1, filt2) filt1 = 'depth >= %s' % (float(d_min) * 1000.) filt2 = 'depth <= %s' % (float(d_max) * 1000.) cat = cat.filter(filt1, filt2) if None not in [min_latitude, max_latitude, min_longitude, max_longitude]: filt1 = 'latitude >= %s' % min_latitude filt2 = 'latitude <= %s' % max_latitude cat = cat.filter(filt1, filt2) filt1 = 'longitude >= %s' % min_longitude filt2 = 'longitude <= %s' % max_longitude cat = cat.filter(filt1, filt2) # final filtering for the remaining requests if None not in [latitude, longitude, radius_min, radius_max]: index_rm = [] for i in range(len(cat)): e_lat = cat.events[i].preferred_origin().latitude or \ cat.events[i].origins[0].latitude e_lon = cat.events[i].preferred_origin().longitude or \ cat.events[i].origins[0].longitude dist = locations2degrees(latitude, longitude, e_lat, e_lon) if not radius_min <= dist <= radius_max: index_rm.append(i) index_rm.sort() index_rm.reverse() for i in range(len(index_rm)): del cat[index_rm[i]] return cat
def neic_catalog_urllib( t_start, t_end, min_latitude, max_latitude, min_longitude, max_longitude, latitude, longitude, radius_min, radius_max, d_min, d_max, mag_min, mag_max, link_neic="http://earthquake.usgs.gov/fdsnws/event/1/query.quakeml?"): """ Function for downloading data from NEIC :param t_start: :param t_end: :param min_latitude: :param max_latitude: :param min_longitude: :param max_longitude: :param latitude: :param longitude: :param radius_min: :param radius_max: :param d_min: :param d_max: :param mag_min: :param mag_max: :param link_neic: :return: """ tic = time.clock() dir_name = '%s_temp_xml_files' % int(UTCDateTime.now().timestamp) os.mkdir(dir_name) getVars = { 'minmagnitude': str(mag_min), 'maxmagnitude': str(mag_max), 'mindepth': str(d_min), 'maxdepth': str(d_max), } if None in [latitude, longitude, radius_min, radius_max]: if not None in [ min_latitude, max_latitude, min_longitude, max_longitude ]: getVars['minlongitude'] = str(min_longitude) getVars['maxlongitude'] = str(max_longitude) getVars['minlatitude'] = str(min_latitude) getVars['maxlatitude'] = str(max_latitude) else: getVars['latitude'] = str(latitude) getVars['longitude'] = str(longitude) getVars['maxradiuskm'] = str(float(radius_max) * 111.194) getVars['includeallorigins'] = 'true' getVars['includeallmagnitudes'] = 'true' getVars['producttype'] = 'moment-tensor' m_date = UTCDateTime(t_start) M_date = UTCDateTime(t_end) dur_event = M_date - m_date interval = 30. * 24. * 60. * 60. num_div = int(dur_event / interval) print('#Divisions: %s' % num_div) if not num_div < 1: for i in range(1, num_div + 1): try: print(i, end=',') sys.stdout.flush() t_start_split = m_date + (i - 1) * interval t_end_split = m_date + i * interval getVars['starttime'] = str(t_start_split) getVars['endtime'] = str(t_end_split) url_values = urllib.parse.urlencode(getVars) remotefile = link_neic + url_values page = urlopen(remotefile) page_content = page.read() if 'quakeml' in page_content: with open(os.path.join(dir_name, 'temp_neic_xml_%05i.xml' % i), 'w') \ as fid: fid.write(page_content) fid.close() else: continue page.close() except Exception as error: print("\nWARNING: %s -- %s\n" % (error, remotefile)) elif num_div == 0: try: t_start_split = m_date t_end_split = M_date getVars['starttime'] = str(t_start_split) getVars['endtime'] = str(t_end_split) url_values = urllib.parse.urlencode(getVars) remotefile = link_neic + url_values page = urlopen(remotefile) page_content = page.read() if 'quakeml' in page_content: with open(os.path.join(dir_name, 'temp_neic_xml_%05i.xml' % 0), 'w') \ as fid: fid.write(page_content) fid.close() page.close() except Exception as error: print("\nWARNING: %s -- %s\n" % (error, remotefile)) try: final_time = m_date + num_div * interval if (not M_date == final_time) and (not int(dur_event / interval) == 0): t_start_split = final_time t_end_split = M_date getVars['starttime'] = str(t_start_split) getVars['endtime'] = str(t_end_split) url_values = urllib.parse.urlencode(getVars) remotefile = link_neic + url_values page = urlopen(remotefile) page_content = page.read() if 'quakeml' in page_content: with open( os.path.join(dir_name, 'temp_neic_xml_%05i.xml' % (num_div + 1)), 'w') as fid: fid.write(page_content) fid.close() page.close() except Exception as error: print("\nWARNING: %s\n" % error) xml_add = glob.glob(os.path.join(dir_name, 'temp_neic_xml_*.xml')) xml_add.sort() cat = Catalog() print('\nAssembling %s xml files...' % len(xml_add)) counter = 1 for x_add in xml_add: print(counter, end=',') sys.stdout.flush() counter += 1 try: cat.extend(readEvents(x_add, format='QuakeML')) os.remove(x_add) except Exception as error: print('[WARNING] %s' % error) os.remove(x_add) print("\ncleaning up the temporary folder.") os.rmdir(dir_name) toc = time.clock() print('\n%s sec to retrieve the event info form NEIC.' % (toc - tic)) return cat