def _extract_index_values_seed(filename): """ Reads SEED files and extracts some keys per channel. """ try: p = Parser(filename) except: msg = "Not a valid SEED file?" raise StationCacheError(msg) channels = p.getInventory()["channels"] channels = [ [ _i["channel_id"], int(_i["start_date"].timestamp), int(_i["end_date"].timestamp) if _i["end_date"] else None, _i["latitude"], _i["longitude"], _i["elevation_in_m"], _i["local_depth_in_m"], ] for _i in channels ] return channels
def stats_from_dataless(self, metadata_path=None): """ Function that returns a (1,N) shaped array of the station names from a dataless SEED file. """ if metadata_path is None: metadata_path = self.metadata_path sp = Parser(metadata_path) metadata = Parser.getInventory(sp) stats = np.asarray( [stat['station_id'] for stat in metadata['stations']]) return stats
def stats_from_dataless(self, metadata_path=None): """ Function that returns a (1,N) shaped array of the station names from a dataless SEED file. """ if metadata_path is None: metadata_path = self.metadata_path sp = Parser(metadata_path) metadata = Parser.getInventory(sp) stats = np.asarray([stat['station_id'] for stat in metadata['stations']]) return stats
def _extract_index_values_seed(self, filename): """ Reads SEED files and extracts some keys per channel. """ try: p = Parser(filename) except: msg = "Could not read SEED file '%s'." % filename raise ValueError(msg) channels = p.getInventory()["channels"] channels = [[ _i["channel_id"], int(_i["start_date"].timestamp), int(_i["end_date"].timestamp) if _i["end_date"] else None, _i["latitude"], _i["longitude"], _i["elevation_in_m"], _i["local_depth_in_m"] ] for _i in channels] return channels
def locs_from_dataless(self, metadata_path=None): """ Function that returns a numpy (2,N) shaped array of the longitude latitude coordinates (in degree, decimal) from a dataless SEED file. """ if metadata_path is None: metadata_path = self.metadata_path sp = Parser(metadata_path) metadata = Parser.getInventory(sp) lats = np.asarray([float(i['latitude']) for i in metadata['channels']]) lons = np.asarray( [float(i['longitude']) for i in metadata['channels']]) elev = np.asarray( [float(i['elevation_in_m']) for i in metadata['channels']]) return np.column_stack((lons, lats, elev))
def _read_SEED(string_io): """ Attempt to read the file as a SEED file. If it not a valid SEED file, it will return False. """ try: parser = Parser(string_io) except: return False if len(str(parser)) == 0: return False channels = parser.getInventory()["channels"] for channel in channels: channel_id = channel.pop("channel_id") del channel["sampling_rate"] net, sta, loc, cha = channel_id.split(".") channel["network"] = net channel["station"] = sta channel["location"] = loc channel["channel"] = cha if not channel["end_date"]: time = channel["start_date"] + 2 * 86400 else: time = channel["start_date"] + 0.5 * (channel["end_date"] - channel["start_date"]) try: location = parser.getCoordinates(channel_id, time) channel["latitude"] = location["latitude"] channel["longitude"] = location["longitude"] channel["elevation"] = location["elevation"] channel["local_depth"] = location["local_depth"] except: msg = ("Cannot retrieve location for channel. This happens when " "overlapping time periods are defined in the SEED file. " "Please fix the file. Contents of the file:\n") msg += str(parser) raise InvalidObjectError(msg) channel["format"] = parser._format return channels
def locs_from_dataless(self, metadata_path=None): """ Function that returns a numpy (2,N) shaped array of the longitude latitude coordinates (in degree, decimal) from a dataless SEED file. """ if metadata_path is None: metadata_path = self.metadata_path sp = Parser(metadata_path) metadata = Parser.getInventory(sp) lats = np.asarray([float(i['latitude']) for i in metadata['channels']]) lons = np.asarray([float(i['longitude']) for i in metadata['channels']]) elev = np.asarray([float(i['elevation_in_m']) for i in metadata['channels']]) return np.column_stack((lons, lats, elev))
plt.grid() plt.savefig("/Users/lion/Desktop/SourceRadius.pdf") if __name__ == "__main__": # Read all instrument responses. widgets = ['Parsing instrument responses...', progressbar.Percentage(), ' ', progressbar.Bar()] pbar = progressbar.ProgressBar(widgets=widgets, maxval=len(STATION_FILES)).start() parsers = {} # Read all waveform files. for _i, xseed in enumerate(STATION_FILES): pbar.update(_i) parser = Parser(xseed) channels = [c['channel_id'] for c in parser.getInventory()['channels']] parsers_ = dict.fromkeys(channels, parser) if any([k in parsers for k in parsers_.keys()]): msg = "Channel(s) defined in more than one metadata file." warnings.warn(msg) parsers.update(parsers_) pbar.finish() # Parse all waveform files. widgets = ['Indexing waveform files... ', progressbar.Percentage(), ' ', progressbar.Bar()] pbar = progressbar.ProgressBar(widgets=widgets, maxval=len(WAVEFORM_FILES)).start() waveform_index = {} # Read all waveform files. for _i, waveform in enumerate(WAVEFORM_FILES):
minlongitude = 110.0 maxlatitude = 0.0 maxlongitude = 160.0 plt.figure(1) colours = ["black", "blue", "green", "yellow", "purple", "orange", "white", "red", "brown"] dataless_files = glob.glob('/home/boland/Dropbox/University/UniMelb/AGOS/PROGRAMS/dataless/*.dataless') for index, files in enumerate(dataless_files): network = os.path.basename(files).split('.')[0] sp = Parser(files) info = sp.getInventory() coordinates = [(i['longitude'], i['latitude'], i['channel_id']) for i in info['channels'][:] if i['channel_id'][-3:] == "BHZ"] #dataless_inventories = [] #if USE_DATALESSPAZ: # with warnings.catch_warnings(): # warnings.simplefilter('ignore') # dataless_inventories = psstation.get_dataless_inventories(DATALESS_DIR, # verbose=True) # info = dataless_inventories.getInventory() # coordinates = [(i['longitude'], i['latitude'], i['channel_id'])
plt.figure(1) colours = [ "black", "blue", "green", "yellow", "purple", "orange", "white", "red", "brown" ] dataless_files = glob.glob( '/home/boland/Dropbox/University/UniMelb/AGOS/PROGRAMS/dataless/*.dataless' ) for index, files in enumerate(dataless_files): network = os.path.basename(files).split('.')[0] sp = Parser(files) info = sp.getInventory() coordinates = [(i['longitude'], i['latitude'], i['channel_id']) for i in info['channels'][:] if i['channel_id'][-3:] == "BHZ"] #dataless_inventories = [] #if USE_DATALESSPAZ: # with warnings.catch_warnings(): # warnings.simplefilter('ignore') # dataless_inventories = psstation.get_dataless_inventories(DATALESS_DIR, # verbose=True) # info = dataless_inventories.getInventory() # coordinates = [(i['longitude'], i['latitude'], i['channel_id'])
def get_station_details(self, request, network, station): session = self.env.db.session(bind=self.env.db.engine) try: query = session.query(StationObject)\ .filter(StationObject.network == network)\ .filter(StationObject.station == station).one() except sqlalchemy.orm.exc.NoResultFound: session.close() msg = "Station %s.%s could not be found." % (network, station) raise NotFoundError(msg) result = { "network_code": query.network, "network_name": "", "station_code": query.station, "station_name": "", "latitude": query.latitude, "longitude": query.longitude, "elevation_in_m": query.elevation_in_m, "local_depth_in_m": query.local_depth_in_m, "channels": []} # Also parse information about all channels. for channel in query.channel: md = channel.channel_metadata if md: md = md[0] info = { "channel_code": channel.channel, "location_code": channel.location, "start_date": str(md.starttime) if md else None, "end_date": str(md.starttime) if md else None, "instrument": "", "sampling_rate": "", "format": md.format if md else None, "channel_filepath_id": md.filepath_id if md else None} # Attempt to get long descriptions for the station and network # codes. This is only possible for SEED and XSEED files. if info["format"] and info["format"].lower() in ["seed", "xseed"]: parser = Parser(md.filepath.filepath) inv = parser.getInventory() if not result["network_name"] and inv["networks"]: for network in inv["networks"]: if network["network_code"] != result["network_code"]: continue result["network_name"] = network["network_name"] break if not result["station_name"] and inv["stations"]: for station in inv["stations"]: station_code = station["station_id"].split(".")[1] if station_code != result["station_code"]: continue result["station_name"] = station["station_name"] for channel in inv["channels"]: location_code, channel_code = \ channel["channel_id"].split(".")[2:] if location_code == info["location_code"] and \ channel_code == info["channel_code"]: info["start_date"] = str(channel["start_date"]) info["end_date"] = str(channel["end_date"]) info["instrument"] = channel["instrument"] info["sampling_rate"] = channel["sampling_rate"] result["channels"].append({"channel": info}) session.close() return formatResults(request, [result])