async def get_samples_by_timestamp( port: core_ports.BasePort, timestamps: List[int]) -> Iterable[GenericJSONDict]: port_filter = { 'pid': port.get_id(), } now_ms = int(time.time() * 1000) samples_cache = _samples_cache.setdefault(port.get_id(), {}) INEXISTENT = {} query_tasks = [] for timestamp in timestamps: # Look it up in cache sample = samples_cache.get(timestamp, INEXISTENT) if sample is INEXISTENT: filt = dict(port_filter, ts={'le': timestamp}) task = persist.query(PERSIST_COLLECTION, filt=filt, sort='-ts', limit=1) else: task = asyncio.Future() task.set_result([sample]) query_tasks.append(task) task_results = await asyncio.gather(*query_tasks) samples = [] for i, task_result in enumerate(task_results): timestamp = timestamps[i] query_results = list(task_result) if query_results: sample = query_results[0] samples.append(sample) else: samples.append(None) # Add sample to cache if it's old enough if now_ms - timestamp > _CACHE_TIMESTAMP_MIN_AGE: samples_cache[timestamp] = samples[-1] return ({ 'value': r['val'], 'timestamp': r['ts'] } if r is not None else None for r in samples)
async def get_samples_slice( port: core_ports.BasePort, from_timestamp: Optional[int] = None, to_timestamp: Optional[int] = None, limit: Optional[int] = None, sort_desc: bool = False) -> Iterable[GenericJSONDict]: filt = { 'pid': port.get_id(), } if from_timestamp is not None: filt.setdefault('ts', {})['ge'] = from_timestamp if to_timestamp is not None: filt.setdefault('ts', {})['lt'] = to_timestamp sort = 'ts' if sort_desc: sort = f'-{sort}' results = await persist.query(PERSIST_COLLECTION, filt=filt, sort=sort, limit=limit) return ({'value': r['val'], 'timestamp': r['ts']} for r in results)
async def on_value_change(self, event: core_events.Event, port: core_ports.BasePort, old_value: NullablePortValue, new_value: NullablePortValue, attrs: Attributes) -> None: # When period is specified, periodic_send_values() will take care of sending values if self._period is not None: return # Look up port id -> field number mapping; if not present, values for this port are not configured for sending field_no = self._fields.get(port.get_id()) if field_no is None: return self._values_cache[field_no] = new_value # Don't send samples more often than min_period now = time.time() if now - self._last_send_time < self._min_period: return self._last_send_time = now created_at = datetime.datetime.fromtimestamp(event.get_timestamp(), tz=pytz.UTC) try: await self.send_values(self._values_cache, created_at) except Exception as e: self.error('sending values failed: %s', e, exc_info=True) self._values_cache = {}
async def save_sample(port: core_ports.BasePort, timestamp: int) -> None: value = port.get_last_read_value() if value is None: logger.debug('skipping null sample of %s (timestamp = %s)', port, timestamp) return logger.debug('saving sample of %s (value = %s, timestamp = %s)', port, json_utils.dumps(value), timestamp) record = {'pid': port.get_id(), 'val': value, 'ts': timestamp} await persist.insert(PERSIST_COLLECTION, record)
async def check_loops(port: core_ports.BasePort, expression: Expression) -> None: seen_ports = {port} async def check_loops_rec(level: int, e: Expression) -> int: if isinstance(e, PortValue): p = e.get_port() if not p: return 0 # A loop is detected when we stumble upon the initial port at a level deeper than 1 if port is p and level > 1: return level # Avoid visiting the same port twice if p in seen_ports: return 0 seen_ports.add(p) expr = await p.get_expression() if expr: lv = await check_loops_rec(level + 1, expr) if lv: return lv return 0 elif isinstance(e, Function): for arg in e.args: lv = await check_loops_rec(level, arg) if lv: return lv return 0 if await check_loops_rec(1, expression) > 1: raise CircularDependency(port.get_id())