async def read_device(device_id: str) -> List[Dict[str, Any]]: """Generate the readings response data for the specified device. Args: device_id: The ID of the device to get readings for. Returns: A list of dictionary representations of device reading response(s). """ logger.info('issuing command', command='READ DEVICE', device_id=device_id) p = await cache.get_plugin(device_id) if p is None: raise errors.NotFound(f'plugin not found for device {device_id}', ) readings = [] try: with p as client: for reading in client.read(device_id=device_id): readings.append(reading_to_dict(reading)) except Exception as e: raise errors.ServerError( 'error while issuing gRPC request: read device', ) from e logger.debug('got readings', count=len(readings), command='READ DEVICE') return readings
async def transaction(transaction_id: str) -> Dict[str, Any]: """Generate the transaction response data. Args: transaction_id: The ID of the transaction to get the status of. Returns: A dictionary representation of the transaction status response. """ logger.info('issuing command', command='TRANSACTION') txn = await cache.get_transaction(transaction_id) if not txn: raise errors.NotFound(f'transaction not found: {transaction_id}', ) plugin_id = txn.get('plugin') device = txn.get('device') if not plugin_id: raise errors.ServerError( f'malformed cached transaction ({transaction_id}): "plugin" not defined' ) p = plugin.manager.get(plugin_id) if not p: raise errors.NotFound( f'plugin not found for transaction: {plugin_id}', ) try: logger.debug( 'getting transaction info', command='TRANSACTION', device=device, plugin=plugin_id, txn_id=transaction_id, ) with p as client: response = client.transaction(transaction_id) except Exception as e: raise errors.ServerError( 'error while issuing gRPC request: transaction', ) from e status = synse_grpc.utils.to_dict(response) status['device'] = device utils.normalize_write_ctx(status) return status
def test_make_response(self): ex = errors.ServerError('context error message') resp = ex.make_response() assert isinstance(resp, dict) assert resp == { 'http_code': 500, 'description': 'error processing the request', 'timestamp': '2019-04-22T13:30:00Z', 'context': 'context error message', }
def run(self) -> None: """Run the thread.""" logger.info('running Stream thread', plugin=self.plugin.id) try: with self.plugin as client: for reading in client.read_stream(devices=self.ids, tag_groups=self.tag_groups): self.q.put(reading_to_dict(reading)) # Important: we need to check if the thread event is set -- this # allows the thread to be cancellable. if self.event.is_set(): logger.info('stream thread cancelled', plugin=self.plugin.id) break except Exception as e: raise errors.ServerError( 'error while issuing gRPC request: read stream', ) from e
async def plugin(plugin_id: str) -> Dict[str, Any]: """Generate the plugin response data. Args: plugin_id: The ID of the plugin to get information for. Returns: A dictionary representation of the plugin response. """ logger.info('issuing command', command='PLUGIN', plugin_id=plugin_id) # If there are no plugins registered, re-registering to ensure # the most up-to-date plugin state. if not manager.has_plugins(): manager.refresh() p = manager.get(plugin_id) if p is None: raise errors.NotFound(f'plugin not found: {plugin_id}') try: with p as client: health = client.health() except Exception as e: raise errors.ServerError( 'error while issuing gRPC request: plugin health' ) from e response = { **p.metadata, 'active': p.active, 'network': { 'address': p.address, 'protocol': p.protocol, }, 'version': p.version, 'health': utils.to_dict(health), } return response
async def read_cache(start: str = None, end: str = None) -> AsyncIterable: """Generate the readings response data for the cached readings. Args: start: An RFC3339 formatted timestamp defining the starting bound on the cache data to return. An empty string or None designates no starting bound. (default: None) end: An RFC3339 formatted timestamp defining the ending bound on the cache data to return. An empty string or None designates no ending bound. (default: None) Yields: A dictionary representation of a device reading response. """ logger.info('issuing command', command='READ CACHE', start=start, end=end) # FIXME: this could benefit from being async. this would require the plugin # api client to provide async behaviors as well. for p in plugin.manager: if not p.active: logger.debug( 'plugin not active, will not read its devices', plugin=p.tag, plugin_id=p.id, ) continue logger.debug('getting cached readings for plugin', plugin=p.tag, command='READ CACHE') try: with p as client: for reading in client.read_cache(start=start, end=end): yield reading_to_dict(reading) except Exception as e: raise errors.ServerError( 'error while issuing gRPC request: read cache', ) from e
async def write_sync(device_id: str, payload: Union[Dict, List[Dict]]) -> List[Dict[str, Any]]: """Generate the synchronous write response data. Args: device_id: The ID of the device to write to. payload: The data to write to the device. Returns: A list of dictionary representations of synchronous write response(s). """ logger.info( 'issuing command', command='WRITE SYNC', device_id=device_id, payload=payload, ) plugin = await cache.get_plugin(device_id) if plugin is None: raise errors.NotFound(f'plugin not found for device {device_id}', ) response = [] try: with plugin as client: for status in client.write_sync(device_id=device_id, data=payload): # Add the transaction to the cache await cache.add_transaction(status.id, device_id, plugin.id) s = grpc_utils.to_dict(status) s['device'] = device_id utils.normalize_write_ctx(s) response.append(s) except Exception as e: raise errors.ServerError( 'error while issuing gRPC request: sync write', ) from e return response
async def read( ns: str, tag_groups: Union[List[str], List[List[str]]], plugin_id: Optional[str] = None, ) -> List[Dict[str, Any]]: """Generate the readings response data. Args: ns: The default namespace to use for tags which do no specify one. If all tags specify a namespace, or no tags are defined, this is ignored. tag_groups: The tags groups used to filter devices. If no tag groups are given (and thus no tags), no filtering is done. plugin_id: The ID of the plugin to get device readings from. If not specified, all plugins are considered valid for reading. Returns: A list of dictionary representations of device reading response(s). """ logger.info('issuing command', command='READ', ns=ns, tag_groups=tag_groups) # If there are no tags specified, read with no tag filter. if len(tag_groups) == 0: logger.debug('no tags specified, reading with no tag filter', command='READ') readings = [] for p in plugin.manager: if plugin_id and p.id != plugin_id: logger.debug( 'skipping plugin for read - plugin filter set', filter=plugin_id, skipped=p.id, ) continue if not p.active: logger.debug( 'plugin not active, will not read its devices', plugin=p.tag, plugin_id=p.id, ) continue try: with p as client: for reading in client.read(): readings.append(reading_to_dict(reading)) except Exception as e: raise errors.ServerError( 'error while issuing gRPC request: read') from e logger.debug('got readings', count=len(readings), command='READ') return readings # Otherwise, there is at least one tag group. We need to issue a read request # for each group and collect the results of each group. The provided tag groups # may take the form of a List[str] in the case of a single tag group, or a # List[List[str]] in the case of multiple tag groups. if all(isinstance(x, str) for x in tag_groups): tag_groups = [tag_groups] results = {} for group in tag_groups: logger.debug('parsing tag groups', command='READ', group=group) # Apply the default namespace to the tags in the group which do not # have any namespace defined. for i, tag in enumerate(group): if '/' not in tag: group[i] = f'{ns}/{tag}' for p in plugin.manager: if plugin_id and p.id != plugin_id: logger.debug( 'skipping plugin for read - plugin filter set', filter=plugin_id, skipped=p.id, ) continue if not p.active: logger.debug( 'plugin not active, will not read its devices', plugin=p.tag, plugin_id=p.id, ) continue try: with p as client: for r in client.read(tags=group): results[ f'{r.id}{r.type}{r.timestamp}'] = reading_to_dict( r) except Exception as e: raise errors.ServerError( 'error while issuing gRPC request: read') from e readings = list(results.values()) logger.debug('got readings', count=len(readings), command='READ') return readings
async def scan( ns: str, tag_groups: List[List[str]], sort: str, force: bool = False, ) -> List[Dict[str, Any]]: """Generate the scan response data. Args: ns: The default namespace to use for tags which do not specify one. If all tags specify a namespace, or no tags are defined, this is ignored. tag_groups: The tags groups used to filter devices. If no tag groups are given (and thus no tags), no filtering is done. force: Option to force rebuild the internal device cache. (default: False) sort: The fields to sort by. Returns: A list of dictionary representations of device summary response(s). """ logger.info( 'issuing command', command='SCAN', ns=ns, tag_groups=tag_groups, sort=sort, force=force, ) # If the force flag is set, rebuild the internal device cache. This # will ensure everything is up to date, but will ultimately make the # request take longer to fulfill. if force: logger.debug('forced scan: rebuilding device cache', command='SCAN') try: await cache.update_device_cache() except Exception as e: raise errors.ServerError('failed to rebuild device cache') from e # If no tags are specified, get devices with no tag filter. if len(tag_groups) == 0: logger.debug('getting devices with no tag filter', command='SCAN') try: devices = await cache.get_devices() except Exception as e: logger.exception(e) raise errors.ServerError( 'failed to get all devices from cache') from e else: # Otherwise, there is at least one tag group. We need to get the devices for # each tag group and collect the results of each group. results = {} logger.debug('parsing tag groups', command='SCAN') for group in tag_groups: # Apply the default namespace to the tags in the group which do not # have any namespace defined. for i, tag in enumerate(group): if '/' not in tag: group[i] = f'{ns}/{tag}' try: device_group = await cache.get_devices(*group) except Exception as e: logger.exception(e) raise errors.ServerError( 'failed to get devices from cache') from e for device in device_group: results[device.id] = device devices = list(results.values()) # Sort the devices based on the sort string. There may be multiple # components in the sort string separated by commas. The order in which # they are listed is equivalent to the order of their sort priority. sort_keys = sort.split(',') try: logger.debug('sorting devices', command='SCAN') sorted_devices = sorted( devices, key=lambda dev: tuple(map(lambda key: getattr(dev, key), sort_keys) )) except AttributeError as e: raise errors.InvalidUsage('invalid sort key(s) provided') from e response = [] for device in sorted_devices: response.append({ 'id': device.id, 'alias': device.alias, 'info': device.info, 'type': device.type, 'plugin': device.plugin, 'tags': [utils.tag_string(tag) for tag in device.tags], 'metadata': dict(device.metadata), }) logger.debug('got devices', count=len(response), command='SCAN') return response