def add_stations(self, stations): """ Add the desired output stations to the input file generator. Can currently deal with SEED/XML-SEED files and dictionaries of the following form: {"latitude": 123.4, "longitude": 123.4, "elevation_in_m": 123.4, "local_depth_in_m": 123.4, "id": "network_code.station_code"} `local_depth_in_m` is optional and will be assumed to be zero if not present. It denotes the burrial of the sensor beneath the surface. If it is a SEED/XML-SEED files, all stations in it will be added. :type stations: List of filenames, list of dictionaries or a single filename, single dictionary. :param stations: The stations for which output files should be generated. """ # Try to interpret it as json. If it works and results in a list or # dicionary, use it! try: json_s = json.loads(stations) except: pass else: # A simple string is also a valid JSON document. if isinstance(json_s, list) or isinstance(json_s, dict): stations = json_s # Thin wrapper to enable single element treatment. if isinstance(stations, dict) or not hasattr(stations, "__iter__") or \ (hasattr(stations, "read") and hasattr(stations.read, "__call__")): stations = [ stations, ] all_stations = {} for station_item in stations: # Store the original pointer position to be able to restore it. original_position = None try: original_position = station_item.tell() station_item.seek(original_position, 0) except: pass # Download it if it is some kind of URL. if isinstance(station_item, basestring) and "://" in station_item: station_item = io.BytesIO(urllib2.urlopen(station_item).read()) # If it is a dict do some checks and add it. if isinstance(station_item, dict): if "latitude" not in station_item or \ "longitude" not in station_item or \ "elevation_in_m" not in station_item or \ "id" not in station_item: msg = ( "Each station dictionary needs to at least have " "'latitude', 'longitude', 'elevation_in_m', and 'id' " "keys.") raise ValueError(msg) # Create new dict to not carry around any additional keys. stat = { "latitude": float(station_item["latitude"]), "longitude": float(station_item["longitude"]), "elevation_in_m": float(station_item["elevation_in_m"]), "id": str(station_item["id"]) } try: stat["local_depth_in_m"] = \ float(station_item["local_depth_in_m"]) except: pass all_stations[stat["id"]] = stat continue # Also accepts SAC files. if isSAC(station_item): st = read(station_item) for tr in st: stat = {} stat["id"] = "%s.%s" % (tr.stats.network, tr.stats.station) stat["latitude"] = float(tr.stats.sac.stla) stat["longitude"] = float(tr.stats.sac.stlo) stat["elevation_in_m"] = float(tr.stats.sac.stel) stat["local_depth_in_m"] = float(tr.stats.sac.stdp) # lat/lng/ele must be given. if stat["latitude"] == -12345.0 or \ stat["longitude"] == -12345.0 or \ stat["elevation_in_m"] == -12345.0: warnings.warn("No coordinates for channel '%s'." % str(tr)) continue # Local may be neclected. if stat["local_depth_in_m"] == -12345.0: del stat["local_depth_in_m"] all_stations[stat["id"]] = stat continue continue # Reset pointer. if original_position is not None: station_item.seek(original_position, 0) # SEED / XML-SEED try: Parser(station_item) is_seed = True except: is_seed = False # Reset. if original_position is not None: station_item.seek(original_position, 0) if is_seed is True: self._parse_seed(station_item, all_stations) continue # StationXML try: stations = extract_coordinates_from_StationXML(station_item) except: pass else: for station in stations: all_stations[station["id"]] = station continue msg = "Could not read %s." % station_item raise ValueError(msg) self.__add_stations(all_stations.values())
def add_stations(self, stations): """ Add the desired output stations to the input file generator. Can currently deal with SEED/XML-SEED files and dictionaries of the following form: {"latitude": 123.4, "longitude": 123.4, "elevation_in_m": 123.4, "local_depth_in_m": 123.4, "id": "network_code.station_code"} `local_depth_in_m` is optional and will be assumed to be zero if not present. It denotes the burrial of the sensor beneath the surface. If it is a SEED/XML-SEED files, all stations in it will be added. :type stations: List of filenames, list of dictionaries or a single filename, single dictionary. :param stations: The stations for which output files should be generated. """ # Try to interpret it as json. If it works and results in a list or # dicionary, use it! try: json_s = json.loads(stations) except: pass else: # A simple string is also a valid JSON document. if isinstance(json_s, list) or isinstance(json_s, dict): stations = json_s # Thin wrapper to enable single element treatment. if isinstance(stations, dict) or not hasattr(stations, "__iter__") or \ (hasattr(stations, "read") and hasattr(stations.read, "__call__")): stations = [stations, ] all_stations = {} for station_item in stations: # Store the original pointer position to be able to restore it. original_position = None try: original_position = station_item.tell() station_item.seek(original_position, 0) except: pass # Download it if it is some kind of URL. if isinstance(station_item, basestring) and "://" in station_item: station_item = io.BytesIO(urllib2.urlopen(station_item).read()) # If it is a dict do some checks and add it. if isinstance(station_item, dict): if "latitude" not in station_item or \ "longitude" not in station_item or \ "elevation_in_m" not in station_item or \ "id" not in station_item: msg = ( "Each station dictionary needs to at least have " "'latitude', 'longitude', 'elevation_in_m', and 'id' " "keys.") raise ValueError(msg) # Create new dict to not carry around any additional keys. stat = { "latitude": float(station_item["latitude"]), "longitude": float(station_item["longitude"]), "elevation_in_m": float(station_item["elevation_in_m"]), "id": str(station_item["id"])} try: stat["local_depth_in_m"] = \ float(station_item["local_depth_in_m"]) except: pass all_stations[stat["id"]] = stat continue # Also accepts SAC files. if isSAC(station_item): st = read(station_item) for tr in st: stat = {} stat["id"] = "%s.%s" % (tr.stats.network, tr.stats.station) stat["latitude"] = float(tr.stats.sac.stla) stat["longitude"] = float(tr.stats.sac.stlo) stat["elevation_in_m"] = float(tr.stats.sac.stel) stat["local_depth_in_m"] = float(tr.stats.sac.stdp) # lat/lng/ele must be given. if stat["latitude"] == -12345.0 or \ stat["longitude"] == -12345.0 or \ stat["elevation_in_m"] == -12345.0: warnings.warn("No coordinates for channel '%s'." % str(tr)) continue # Local may be neclected. if stat["local_depth_in_m"] == -12345.0: del stat["local_depth_in_m"] all_stations[stat["id"]] = stat continue continue # Reset pointer. if original_position is not None: station_item.seek(original_position, 0) # SEED / XML-SEED try: Parser(station_item) is_seed = True except: is_seed = False # Reset. if original_position is not None: station_item.seek(original_position, 0) if is_seed is True: self._parse_seed(station_item, all_stations) continue # StationXML try: stations = extract_coordinates_from_StationXML(station_item) except: pass else: for station in stations: all_stations[station["id"]] = station continue msg = "Could not read %s." % station_item raise ValueError(msg) self.__add_stations(all_stations.values())
def add_stations(self, stations): """ Add the desired output stations to the input file generator. Can currently deal with SEED/XML-SEED files and dictionaries of the following form: {"latitude": 123.4, "longitude": 123.4, "elevation_in_m": 123.4, "local_depth_in_m": 123.4, "id": "network_code.station_code"} `local_depth_in_m` is optional and will be assumed to be zero if not present. It denotes the burrial of the sensor beneath the surface. If it is a SEED/XML-SEED files, all stations in it will be added. :type stations: List of filenames, list of dictionaries or a single filename, single dictionary. :param stations: The stations for which output files should be generated. """ all_stations = {} # Thin wrapper to enable single element treatment. if isinstance(stations, dict) or not hasattr(stations, "__iter__"): stations = [stations, ] for station_item in stations: if isinstance(station_item, dict): if "latitude" not in station_item or \ "longitude" not in station_item or \ "elevation_in_m" not in station_item or \ "id" not in station_item: msg = ("Each station dictionary needs to at least have " "'latitude', 'longitude', 'elevation_in_m', and 'id' " "keys.") raise ValueError(msg) # Create new dict to not carry around any additional keys. stat = { "latitude": float(station_item["latitude"]), "longitude": float(station_item["longitude"]), "elevation_in_m": float(station_item["elevation_in_m"]), "id": str(station_item["id"])} try: stat["local_depth_in_m"] = \ float(station_item["local_depth_in_m"]) except: stat["local_depth_in_m"] = 0.0 if stat["id"] in all_stations: all_stations[stat["id"]].update(stat) else: all_stations[stat["id"]] = stat continue # Check if the file is a sac file. if isSAC(station_item): st = read(station_item) for tr in st: stat = {} stat["id"] = "%s.%s" % (tr.stats.network, tr.stats.station) stat["latitude"] = float(tr.stats.sac.stla) stat["longitude"] = float(tr.stats.sac.stlo) stat["elevation_in_m"] = float(tr.stats.sac.stel) stat["local_depth_in_m"] = float(tr.stats.sac.stdp) if stat["id"] in all_stations: all_stations[stat["id"]].update(stat) else: all_stations[stat["id"]] = stat continue continue # Check if the file is readable by the parser. try: Parser(station_item) is_seed = True except: is_seed = False if is_seed is True: self._parse_seed(station_item, all_stations) continue msg = "Warning: Could not read %s." % station_item print msg self._stations.extend(list(all_stations.values())) self._stations = unique_list(self._stations)