def download_satellite_image(self, metaimage, x=None, y=None, zoom=None, palette=None): """ Downloads the satellite image described by the provided metadata. In case the satellite image is a tile, then tile coordinates and zoom must be provided. An optional palette ID can be provided, if supported by the downloaded preset (currently only NDVI is supported) :param metaimage: the satellite image's metadata, in the form of a `MetaImage` subtype instance :type metaimage: a `pyowm.agroapi10.imagery.MetaImage` subtype :param x: x tile coordinate (only needed in case you are downloading a tile image) :type x: int or `None` :param y: y tile coordinate (only needed in case you are downloading a tile image) :type y: int or `None` :param zoom: zoom level (only needed in case you are downloading a tile image) :type zoom: int or `None` :param palette: ID of the color palette of the downloaded images. Values are provided by `pyowm.agroapi10.enums.PaletteEnum` :type palette: str or `None` :return: a `pyowm.agroapi10.imagery.SatelliteImage` instance containing both image's metadata and data """ if palette is not None: assert isinstance(palette, str) params = dict(paletteid=palette) else: palette = PaletteEnum.GREEN params = dict() # polygon PNG if isinstance(metaimage, MetaPNGImage): prepared_url = metaimage.url status, data = self.http_client.get_png( prepared_url, params=params) img = Image(data, metaimage.image_type) return SatelliteImage(metaimage, img, downloaded_on=timeutils.now(timeformat='unix'), palette=palette) # GeoTIF elif isinstance(metaimage, MetaGeoTiffImage): prepared_url = metaimage.url status, data = self.http_client.get_geotiff( prepared_url, params=params) img = Image(data, metaimage.image_type) return SatelliteImage(metaimage, img, downloaded_on=timeutils.now(timeformat='unix'), palette=palette) # tile PNG elif isinstance(metaimage, MetaTile): assert x is not None assert y is not None assert zoom is not None prepared_url = self._fill_url(metaimage.url, x, y, zoom) status, data = self.http_client.get_png( prepared_url, params=params) img = Image(data, metaimage.image_type) tile = Tile(x, y, zoom, None, img) return SatelliteImage(metaimage, tile, downloaded_on=timeutils.now(timeformat='unix'), palette=palette) else: raise ValueError("Cannot download: unsupported MetaImage subtype")
def set(self, request_url, response_json): """ Checks if the maximum size of the cache has been reached and in case discards the least recently used item from 'usage_recency' and 'table'; then adds the response_json to be cached to the 'table' dict using as a lookup key the request_url of the request that generated the value; finally adds it at the front of 'usage_recency' :param request_url: the request URL that uniquely identifies the request whose response is to be cached :type request_url: str :param response_json: the response JSON to be cached :type response_json: str """ if self.size() == self._max_size: popped = self._usage_recency.pop() del self._table[popped] current_time = timeutils.now('unix') if request_url not in self._table: self._table[request_url] = { 'data': response_json, 'insertion_time': current_time } self._usage_recency.add(request_url) else: self._table[request_url]['insertion_time'] = current_time self._promote(request_url)
def set(self, request_url, response_json): """ Checks if the maximum size of the cache has been reached and in case discards the least recently used item from 'usage_recency' and 'table'; then adds the response_json to be cached to the 'table' dict using as a lookup key the request_url of the request that generated the value; finally adds it at the front of 'usage_recency' :param request_url: the request URL that uniquely identifies the request whose response is to be cached :type request_url: str :param response_json: the response JSON to be cached :type response_json: str """ if self.size() == self._max_size: popped = self._usage_recency.pop() del self._table[popped] current_time = timeutils.now('unix') if request_url not in self._table: self._table[request_url] = {'data': response_json, 'insertion_time': current_time} self._usage_recency.add(request_url) else: self._table[request_url]['insertion_time'] = current_time self._promote(request_url)
def is_forecast(self): """ Tells if the current CO observation refers to the future with respect to the current date :return: bool """ return timeutils.now(timeformat='unix') < \ self.get_reference_time(timeformat='unix')
def is_forecast(self): """ Tells if the current O3 observation refers to the future with respect to the current date :return: bool """ return timeutils.now(timeformat='unix') < \ self.get_reference_time(timeformat='unix')
def create_trigger(self, start, end, conditions, area, alert_channels=None): """ Create a new trigger on the Alert API with the given parameters :param start: time object representing the time when the trigger begins to be checked :type start: int, ``datetime.datetime`` or ISO8601-formatted string :param end: time object representing the time when the trigger ends to be checked :type end: int, ``datetime.datetime`` or ISO8601-formatted string :param conditions: the `Condition` objects representing the set of checks to be done on weather variables :type conditions: list of `pyowm.utils.alertapi30.Condition` instances :param area: the geographic are over which conditions are checked: it can be composed by multiple geoJSON types :type area: list of geoJSON types :param alert_channels: the alert channels through which alerts originating from this `Trigger` can be consumed. Defaults to OWM API polling :type alert_channels: list of `pyowm.utils.alertapi30.AlertChannel` instances :returns: a *Trigger* instance :raises: *ValueError* when start or end epochs are `None` or when end precedes start or when conditions or area are empty collections """ assert start is not None assert end is not None # prepare time period unix_start = timeformatutils.to_UNIXtime(start) unix_end = timeformatutils.to_UNIXtime(end) unix_current = timeutils.now(timeformat='unix') if unix_start >= unix_end: raise ValueError("The start timestamp must precede the end timestamp") delta_millis_start = timeutils.millis_offset_between_epochs(unix_current, unix_start) delta_millis_end = timeutils.millis_offset_between_epochs(unix_current, unix_end) the_time_period = { "start": { "expression": "after", "amount": delta_millis_start }, "end": { "expression": "after", "amount": delta_millis_end } } assert conditions is not None if len(conditions) == 0: raise ValueError('A trigger must contain at least one condition: you provided none') the_conditions = [dict(name=c.weather_param, expression=c.operator, amount=c.amount) for c in conditions] assert area is not None if len(area) == 0: raise ValueError('The area for a trigger must contain at least one geoJSON type: you provided none') the_area = [a.as_dict() for a in area] # >>> for the moment, no specific handling for alert channels status, payload = self.http_client.post( TRIGGERS_URI, params={'appid': self.API_key}, data=dict(time_period=the_time_period, conditions=the_conditions, area=the_area), headers={'Content-Type': 'application/json'}) return self.trigger_parser.parse_dict(payload)
def actualize(self): """ Removes from this forecast all the *Weather* objects having a reference timestamp in the past with respect to the current timestamp """ current_time = timeutils.now(timeformat='unix') for w in self._weathers: if w.get_reference_time(timeformat='unix') < current_time: self._weathers.remove(w)
def test_now(self): expected = datetime.now() result = timeutils.now() self.assertEquals(result.year, expected.year) self.assertEquals(result.month, expected.month) self.assertEquals(result.day, expected.day) self.assertEquals(result.hour, expected.hour) self.assertEquals(result.minute, expected.minute) self.assertEquals(result.second, expected.second)
def parse_JSON(self, JSON_string): """ Parses an *Ozone* instance out of raw JSON data. Only certain properties of the data are used: if these properties are not found or cannot be parsed, an error is issued. :param JSON_string: a raw JSON string :type JSON_string: str :returns: an *Ozone* instance or ``None`` if no data is available :raises: *ParseResponseError* if it is impossible to find or parse the data needed to build the result, *APIResponseError* if the JSON string embeds an HTTP status error (this is an OWM web API 2.5 bug) """ if JSON_string is None: raise parse_response_error.ParseResponseError('JSON data is None') d = json.loads(JSON_string) try: # -- reference time (strip away Z and T on ISO8601 format) ref_t = d['time'].replace('Z', '+00').replace('T', ' ') reference_time = timeformatutils._ISO8601_to_UNIXtime(ref_t) # -- reception time (now) reception_time = timeutils.now('unix') # -- location lon = float(d['location']['longitude']) lat = float(d['location']['latitude']) place = location.Location(None, lon, lat, None) # -- ozone Dobson Units value du = d['data'] if du is not None: du_value = float(du) else: raise ValueError('No information about Ozon Dobson Units') except KeyError: raise parse_response_error.ParseResponseError(''.join( [__name__, ': impossible to parse UV Index'])) return ozone.Ozone(reference_time, place, None, du_value, reception_time)
def parse_JSON(self, JSON_string): """ Parses an *NO2Index* instance out of raw JSON data. Only certain properties of the data are used: if these properties are not found or cannot be parsed, an error is issued. :param JSON_string: a raw JSON string :type JSON_string: str :returns: an *NO2Index* instance or ``None`` if no data is available :raises: *ParseResponseError* if it is impossible to find or parse the data needed to build the result, *APIResponseError* if the JSON string embeds an HTTP status error (this is an OWM web API 2.5 bug) """ if JSON_string is None: raise parse_response_error.ParseResponseError('JSON data is None') d = json.loads(JSON_string) try: # -- reference time (strip away Z and T on ISO8601 format) t = d['time'].replace('Z', '+00').replace('T', ' ') reference_time = timeformatutils._ISO8601_to_UNIXtime(t) # -- reception time (now) reception_time = timeutils.now('unix') # -- location lon = float(d['location']['longitude']) lat = float(d['location']['latitude']) place = location.Location(None, lon, lat, None) # -- CO samples no2_samples = [ dict(label=key, precision=d['data'][key]['precision'], value=d['data'][key]['value']) for key in d['data'] ] except KeyError: raise parse_response_error.ParseResponseError(''.join( [__name__, ': impossible to parse NO2Index'])) return no2index.NO2Index(reference_time, place, None, no2_samples, reception_time)
def parse_JSON(self, JSON_string): """ Parses an *Ozone* instance out of raw JSON data. Only certain properties of the data are used: if these properties are not found or cannot be parsed, an error is issued. :param JSON_string: a raw JSON string :type JSON_string: str :returns: an *Ozone* instance or ``None`` if no data is available :raises: *ParseResponseError* if it is impossible to find or parse the data needed to build the result, *APIResponseError* if the JSON string embeds an HTTP status error """ if JSON_string is None: raise parse_response_error.ParseResponseError('JSON data is None') d = json.loads(JSON_string) try: # -- reference time (strip away Z and T on ISO8601 format) ref_t = d['time'].replace('Z', '+00').replace('T', ' ') reference_time = timeformatutils._ISO8601_to_UNIXtime(ref_t) # -- reception time (now) reception_time = timeutils.now('unix') # -- location lon = float(d['location']['longitude']) lat = float(d['location']['latitude']) place = location.Location(None, lon, lat, None) # -- ozone Dobson Units value du = d['data'] if du is not None: du_value = float(du) else: raise ValueError('No information about Ozon Dobson Units') except KeyError: raise parse_response_error.ParseResponseError( ''.join([__name__, ': impossible to parse UV Index'])) return ozone.Ozone(reference_time, place, None, du_value, reception_time)
def parse_JSON(self, JSON_string): """ Parses an *NO2Index* instance out of raw JSON data. Only certain properties of the data are used: if these properties are not found or cannot be parsed, an error is issued. :param JSON_string: a raw JSON string :type JSON_string: str :returns: an *NO2Index* instance or ``None`` if no data is available :raises: *ParseResponseError* if it is impossible to find or parse the data needed to build the result, *APIResponseError* if the JSON string embeds an HTTP status error (this is an OWM web API 2.5 bug) """ if JSON_string is None: raise parse_response_error.ParseResponseError('JSON data is None') d = json.loads(JSON_string) try: # -- reference time (strip away Z and T on ISO8601 format) t = d['time'].replace('Z', '+00').replace('T', ' ') reference_time = timeformatutils._ISO8601_to_UNIXtime(t) # -- reception time (now) reception_time = timeutils.now('unix') # -- location lon = float(d['location']['longitude']) lat = float(d['location']['latitude']) place = location.Location(None, lon, lat, None) # -- CO samples no2_samples = [dict(label=key, precision=d['data'][key]['precision'], value=d['data'][key]['value']) for key in d['data']] except KeyError: raise parse_response_error.ParseResponseError( ''.join([__name__, ': impossible to parse NO2Index'])) return no2index.NO2Index(reference_time, place, None, no2_samples, reception_time)
def parse_JSON(self, JSON_string): """ Parses an *UVIndex* instance out of raw JSON data. Only certain properties of the data are used: if these properties are not found or cannot be parsed, an error is issued. :param JSON_string: a raw JSON string :type JSON_string: str :returns: an *UVIndex* instance or ``None`` if no data is available :raises: *ParseResponseError* if it is impossible to find or parse the data needed to build the result, *APIResponseError* if the JSON string embeds an HTTP status error (this is an OWM web API 2.5 bug) """ if JSON_string is None: raise parse_response_error.ParseResponseError('JSON data is None') d = json.loads(JSON_string) try: # -- reference time reference_time = d['date'] # -- reception time (now) reception_time = timeutils.now('unix') # -- location lon = float(d['lon']) lat = float(d['lat']) place = location.Location(None, lon, lat, None) # -- UV intensity uv_intensity = float(d['value']) except KeyError: raise parse_response_error.ParseResponseError(''.join( [__name__, ': impossible to parse UV Index'])) return uvindex.UVIndex(reference_time, place, uv_intensity, reception_time)
def parse_JSON(self, JSON_string): """ Parses an *UVIndex* instance out of raw JSON data. Only certain properties of the data are used: if these properties are not found or cannot be parsed, an error is issued. :param JSON_string: a raw JSON string :type JSON_string: str :returns: an *UVIndex* instance or ``None`` if no data is available :raises: *ParseResponseError* if it is impossible to find or parse the data needed to build the result, *APIResponseError* if the JSON string embeds an HTTP status error """ if JSON_string is None: raise parse_response_error.ParseResponseError('JSON data is None') d = json.loads(JSON_string) try: # -- reference time reference_time = d['date'] # -- reception time (now) reception_time = timeutils.now('unix') # -- location lon = float(d['lon']) lat = float(d['lat']) place = location.Location(None, lon, lat, None) # -- UV intensity uv_intensity = float(d['value']) except KeyError: raise parse_response_error.ParseResponseError( ''.join([__name__, ': impossible to parse UV Index'])) return uvindex.UVIndex(reference_time, place, uv_intensity, reception_time)
def get(self, request_url): """ In case of a hit, returns the JSON string which represents the OWM web API response to the request being identified by a specific string URL and updates the recency of this request. :param request_url: an URL that uniquely identifies the request whose response is to be looked up :type request_url: str :returns: a JSON str in case of cache hit or ``None`` otherwise """ try: cached_item = self._table[request_url] cur_time = timeutils.now('unix') if cur_time - cached_item['insertion_time'] > self._item_lifetime: # Cache item has expired self._clean_item(request_url) return None cached_item['insertion_time'] = cur_time # Update insertion time self._promote(request_url) return cached_item['data'] except: return None
def search_satellite_imagery(self, polygon_id, acquired_from, acquired_to, img_type=None, preset=None, min_resolution=None, max_resolution=None, acquired_by=None, min_cloud_coverage=None, max_cloud_coverage=None, min_valid_data_coverage=None, max_valid_data_coverage=None): """ Searches on the Agro API the metadata for all available satellite images that contain the specified polygon and acquired during the specified time interval; and optionally matching the specified set of filters: - image type (eg. GeoTIF) - image preset (eg. false color, NDVI, ...) - min/max acquisition resolution - acquiring satellite - min/max cloud coverage on acquired scene - min/max valid data coverage on acquired scene :param polygon_id: the ID of the reference polygon :type polygon_id: str :param acquired_from: lower edge of acquisition interval, UNIX timestamp :type acquired_from: int :param acquired_to: upper edge of acquisition interval, UNIX timestamp :type acquired_to: int :param img_type: the desired file format type of the images. Allowed values are given by `pyowm.commons.enums.ImageTypeEnum` :type img_type: `pyowm.commons.databoxes.ImageType` :param preset: the desired preset of the images. Allowed values are given by `pyowm.agroapi10.enums.PresetEnum` :type preset: str :param min_resolution: minimum resolution for images, px/meters :type min_resolution: int :param max_resolution: maximum resolution for images, px/meters :type max_resolution: int :param acquired_by: short symbol of the satellite that acquired the image (eg. "l8") :type acquired_by: str :param min_cloud_coverage: minimum cloud coverage percentage on acquired images :type min_cloud_coverage: int :param max_cloud_coverage: maximum cloud coverage percentage on acquired images :type max_cloud_coverage: int :param min_valid_data_coverage: minimum valid data coverage percentage on acquired images :type min_valid_data_coverage: int :param max_valid_data_coverage: maximum valid data coverage percentage on acquired images :type max_valid_data_coverage: int :return: a list of `pyowm.agro10.imagery.MetaImage` subtypes instances """ assert polygon_id is not None assert acquired_from is not None assert acquired_to is not None assert acquired_from <= acquired_to, 'Start timestamp of acquisition window must come before its end' if min_resolution is not None: assert min_resolution > 0, 'Minimum resolution must be positive' if max_resolution is not None: assert max_resolution > 0, 'Maximum resolution must be positive' if min_resolution is not None and max_resolution is not None: assert min_resolution <= max_resolution, 'Mininum resolution must be lower than maximum resolution' if min_cloud_coverage is not None: assert min_cloud_coverage >= 0, 'Minimum cloud coverage must be non negative' if max_cloud_coverage is not None: assert max_cloud_coverage >= 0, 'Maximum cloud coverage must be non negative' if min_cloud_coverage is not None and max_cloud_coverage is not None: assert min_cloud_coverage <= max_cloud_coverage, 'Minimum cloud coverage must be lower than maximum cloud coverage' if min_valid_data_coverage is not None: assert min_valid_data_coverage >= 0, 'Minimum valid data coverage must be non negative' if max_valid_data_coverage is not None: assert max_valid_data_coverage >= 0, 'Maximum valid data coverage must be non negative' if min_valid_data_coverage is not None and max_valid_data_coverage is not None: assert min_valid_data_coverage <= max_valid_data_coverage, 'Minimum valid data coverage must be lower than maximum valid data coverage' # prepare params params = dict(appid=self.API_key, polyid=polygon_id, start=acquired_from, end=acquired_to) if min_resolution is not None: params['resolution_min'] = min_resolution if max_resolution is not None: params['resolution_max'] = max_resolution if acquired_by is not None: params['type'] = acquired_by if min_cloud_coverage is not None: params['clouds_min'] = min_cloud_coverage if max_cloud_coverage is not None: params['clouds_max'] = max_cloud_coverage if min_valid_data_coverage is not None: params['coverage_min'] = min_valid_data_coverage if max_valid_data_coverage is not None: params['coverage_max'] = max_valid_data_coverage # call API status, data = self.http_client.get_json(SATELLITE_IMAGERY_SEARCH_URI, params=params) result_set = SatelliteImagerySearchResultSet(polygon_id, data, timeutils.now(timeformat='unix')) # further filter by img_type and/or preset (if specified) if img_type is not None and preset is not None: return result_set.with_img_type_and_preset(img_type, preset) elif img_type is not None: return result_set.with_img_type(img_type) elif preset is not None: return result_set.with_preset(preset) else: return result_set.all()
def __init__(self, station_id): assert station_id is not None self.station_id = station_id self.created_at = timeutils.now(timeformat='unix') self.measurements = list()
def create_trigger(self, start, end, conditions, area, alert_channels=None): """ Create a new trigger on the Alert API with the given parameters :param start: time object representing the time when the trigger begins to be checked :type start: int, ``datetime.datetime`` or ISO8601-formatted string :param end: time object representing the time when the trigger ends to be checked :type end: int, ``datetime.datetime`` or ISO8601-formatted string :param conditions: the `Condition` objects representing the set of checks to be done on weather variables :type conditions: list of `pyowm.utils.alertapi30.Condition` instances :param area: the geographic are over which conditions are checked: it can be composed by multiple geoJSON types :type area: list of geoJSON types :param alert_channels: the alert channels through which alerts originating from this `Trigger` can be consumed. Defaults to OWM API polling :type alert_channels: list of `pyowm.utils.alertapi30.AlertChannel` instances :returns: a *Trigger* instance :raises: *ValueError* when start or end epochs are `None` or when end precedes start or when conditions or area are empty collections """ assert start is not None assert end is not None # prepare time period unix_start = timeformatutils.to_UNIXtime(start) unix_end = timeformatutils.to_UNIXtime(end) unix_current = timeutils.now(timeformat='unix') if unix_start >= unix_end: raise ValueError( "The start timestamp must precede the end timestamp") delta_millis_start = timeutils.millis_offset_between_epochs( unix_current, unix_start) delta_millis_end = timeutils.millis_offset_between_epochs( unix_current, unix_end) the_time_period = { "start": { "expression": "after", "amount": delta_millis_start }, "end": { "expression": "after", "amount": delta_millis_end } } assert conditions is not None if len(conditions) == 0: raise ValueError( 'A trigger must contain at least one condition: you provided none' ) the_conditions = [ dict(name=c.weather_param, expression=c.operator, amount=c.amount) for c in conditions ] assert area is not None if len(area) == 0: raise ValueError( 'The area for a trigger must contain at least one geoJSON type: you provided none' ) the_area = [a.as_dict() for a in area] # >>> for the moment, no specific handling for alert channels status, payload = self.http_client.post( TRIGGERS_URI, params={'appid': self.API_key}, data=dict(time_period=the_time_period, conditions=the_conditions, area=the_area), headers={'Content-Type': 'application/json'}) return self.trigger_parser.parse_dict(payload)
def test_now(self): expected = datetime.now() result = timeutils.now() self.assertTrue((result - expected) < timedelta(microseconds = 100))