def _values_retrieved(self, vals_dict): """ A values response has been received. Create and notify corresponding event to platform agent. """ # maximum of the latest timestamps in the returned values; # used to prepare for the next request: max_ntp_ts = None for attr_id, attr_vals in vals_dict.iteritems(): assert attr_vals, "Must be a non-empty array of values per _retrieve_attribute_values" _, ntp_ts = attr_vals[-1] if max_ntp_ts is None: max_ntp_ts = ntp_ts else: max_ntp_ts = max(max_ntp_ts, ntp_ts) # update _last_ts based on max_ntp_ts: note that timestamps are reported # in NTP so we need to convert it to ION system time for a subsequent request: self._last_ts = ntp_2_ion_ts(max_ntp_ts) # finally, notify the values event: driver_event = AttributeValueDriverEvent(self._platform_id, _STREAM_NAME, vals_dict) self._notify_driver_event(driver_event)
def _values_retrieved(self, vals_dict): """ A values response has been received. Create and notify corresponding event to platform agent. """ # update _last_ts_millis for each retrieved attribute: for attr_id, attr_vals in vals_dict.iteritems(): _, ntp_ts = attr_vals[-1] # update _last_ts_millis based on ntp_ts: note that timestamps are reported # in NTP so we need to convert it to ION system time for a subsequent request: self._last_ts_millis[attr_id] = ntp_2_ion_ts(ntp_ts) # finally, notify the values event: driver_event = AttributeValueDriverEvent(self._platform_id, _STREAM_NAME, vals_dict) self._notify_driver_event(driver_event)
def _dispatch_publication(self): """ Inspects the collected data in the buffers to create and notify an aggregated AttributeValueDriverEvent. Keeps all samples for each attribute, reporting all associated timestamps and filling with None values for missing values at particular timestamps, but an attribute is included *only* if it has at least an actual value. @note The platform agent will translate any None entries to corresponding fill_values. """ # step 1: # - collect all actual values in a dict indexed by timestamp # - keep track of the attributes having actual values by_ts = {} # { ts0 : { attr_n : val_n, ... }, ... } attrs_with_actual_values = set() for attr_id, attr_vals in self._buffers.iteritems(): for v, ts in attr_vals: if not ts in by_ts: by_ts[ts] = {} by_ts[ts][attr_id] = v attrs_with_actual_values.add(attr_id) # re-init buffer for this attribute: self._buffers[attr_id] = [] if not attrs_with_actual_values: # No new data collected at all; nothing to publish, just return: log.debug("%r: _dispatch_publication: no new data collected.", self._platform_id) return """ # step 2: # - put None's for any missing attribute value per timestamp: for ts in by_ts: # only do this for attrs_with_actual_values: # (note: these attributes do have actual values, but not necessarily # at every reported timestamp in this cycle): for attr_id in attrs_with_actual_values: if not attr_id in by_ts[ts]: by_ts[ts][attr_id] = None """ # step 2: # - put None's for any missing attribute value per timestamp: # EH. Here I used all attributes instead of only the measured ones # so the agent can properly populate rdts and construct granules. for ts in by_ts: # only do this for attrs_with_actual_values: # (note: these attributes do have actual values, but not necessarily # at every reported timestamp in this cycle): for attr_id in self._buffers.keys(): if not attr_id in by_ts[ts]: by_ts[ts][attr_id] = None """ # step 3: # - construct vals_dict for the event: vals_dict = {} for attr_id in attrs_with_actual_values: vals_dict[attr_id] = [] for ts in sorted(by_ts.iterkeys()): val = by_ts[ts][attr_id] vals_dict[attr_id].append((val, ts)) """ # step 3: # - construct vals_dict for the event: # EH. Here I used all attributes instead of only the measured ones # so the agent can properly populate rdts and construct granules. vals_dict = {} for attr_id in self._buffers.keys(): vals_dict[attr_id] = [] for ts in sorted(by_ts.iterkeys()): val = by_ts[ts][attr_id] vals_dict[attr_id].append((val, ts)) # finally, create and notify event: driver_event = AttributeValueDriverEvent(self._platform_id, _STREAM_NAME, vals_dict) log.debug("%r: _dispatch_publication: notifying event: %s", self._platform_id, driver_event) if log.isEnabledFor(logging.TRACE): # pragma: no cover log.trace("%r: vals_dict:\n%s", self._platform_id, self._pp.pformat(driver_event.vals_dict)) self._notify_driver_event(driver_event)
def _dispatch_publication(self): """ Inspects the collected data in the buffers to create and notify an aggregated AttributeValueDriverEvent. Keeps all samples for each attribute, reporting all associated timestamps and filling with None values for missing values at particular timestamps, but an attribute is included *only* if it has at least an actual value. @note The platform agent will translate any None entries to corresponding fill_values. """ log.debug("%r: _dispatch_publication: %s", self._platform_id,self._buffers) # step 1: # - collect all actual values in a dict indexed by timestamp # - keep track of the attributes having actual values by_ts = {} # { ts0 : { attr_n : val_n, ... }, ... } attrs_with_actual_values = set() for attr_id, attr_vals in self._buffers.iteritems(): for v, ts in attr_vals: if not ts in by_ts: by_ts[ts] = {} by_ts[ts][attr_id] = v attrs_with_actual_values.add(attr_id) # re-init buffer for this attribute: self._buffers[attr_id] = [] if not attrs_with_actual_values: # No new data collected at all; nothing to publish, just return: log.debug("%r: _dispatch_publication: no new data collected.", self._platform_id) return """ # step 2: # - put None's for any missing attribute value per timestamp: for ts in by_ts: # only do this for attrs_with_actual_values: # (note: these attributes do have actual values, but not necessarily # at every reported timestamp in this cycle): for attr_id in attrs_with_actual_values: if not attr_id in by_ts[ts]: by_ts[ts][attr_id] = None """ # step 2: # - put None's for any missing attribute value per timestamp: # EH. Here I used all attributes instead of only the measured ones # so the agent can properly populate rdts and construct granules. for ts in by_ts: # only do this for attrs_with_actual_values: # (note: these attributes do have actual values, but not necessarily # at every reported timestamp in this cycle): for attr_id in self._buffers.keys(): if not attr_id in by_ts[ts]: by_ts[ts][attr_id] = None """ # step 3: # - construct vals_dict for the event: vals_dict = {} for attr_id in attrs_with_actual_values: vals_dict[attr_id] = [] for ts in sorted(by_ts.iterkeys()): val = by_ts[ts][attr_id] vals_dict[attr_id].append((val, ts)) """ # step 3: # - construct vals_dict for the event: # EH. Here I used all attributes instead of only the measured ones # so the agent can properly populate rdts and construct granules. vals_dict = {} for attr_id in self._buffers.keys(): vals_dict[attr_id] = [] for ts in sorted(by_ts.iterkeys()): val = by_ts[ts][attr_id] vals_dict[attr_id].append((val, ts)) """ new Step 4: MikeH - The buffers have data from all possible streams form this node - So go through and put them into their own set of buffers before before publishing each stream """ for stream_name, stream_config in self._stream_info.iteritems(): if 'stream_def_dict' not in stream_config: msg = "_dispatch_publication: validate_configuration: 'stream_def_dict' key not in configuration for stream %r" % stream_name log.error(msg) return else : # log.trace("%r: _dispatch_publication: stream name %s stream dict %s", self._platform_id,stream_name,stream_config['stream_def_dict']) log.trace("%r: _dispatch_publication: stream name %s stream ", self._platform_id,stream_name) stream_vals_dict = {} for attr_name in stream_config['stream_def_dict']['parameter_dictionary'].iterkeys(): log.trace("%r: _dispatch_publication: stream name %s attr %s", self._platform_id,stream_name,attr_name) for attr_id in vals_dict: if attr_id == attr_name : stream_vals_dict[attr_id] = vals_dict[attr_id] log.trace("%r: _dispatch_publication: stream name %s attr %s copied to stream_vals_dict", self._platform_id,stream_name,attr_name) # finally, create and notify event: #remove any time-slices that do not have any attribute values for this stream #before publishing compact_stream_vals_dict = self._remove_empty_ts(stream_vals_dict) if len(compact_stream_vals_dict) > 0 : log.trace("%r: _dispatch_publication: stream name %s attr %s copied to stream_vals_dict", self._platform_id,stream_name,attr_name) driver_event = AttributeValueDriverEvent(self._platform_id, stream_name, compact_stream_vals_dict) log.debug("%r: _dispatch_publication: notifying event: %s", self._platform_id, driver_event) if log.isEnabledFor(logging.TRACE): # pragma: no cover log.trace("%r: vals_dict:\n%s", self._platform_id, self._pp.pformat(driver_event.vals_dict)) self._notify_driver_event(driver_event)