def test_getting_and_setting(): """Test setting and getting variables with correct types """ man = ConfigManager() man.add_variable('test:var', 'bool', 'test variable', 'false') val = man.get('test:var') assert val is False man.set('test:var', 'true') assert man.get('test:var') is True # Make sure we can get and set without a default value man.add_variable('test:var2', 'bool', 'test variable') with pytest.raises(ArgumentError): man.get('test:var2') man.set('test:var2', 'True') assert man.get('test:var2') is True man.set('test:var2', 'False') assert man.get('test:var2') is False # Make sure removing a variable works man.remove('test:var2') with pytest.raises(ArgumentError): man.get('test:var2') man.remove('test:var') assert man.get('test:var') is False
def refresh_token(self): """Attempt to refresh out cloud token with iotile.cloud.""" if self.token_type != 'jwt': raise DataError( "Attempting to refresh a token that does not need to be refreshed", token_type=self.token_type) conf = ConfigManager() domain = conf.get('cloud:server') url = '{}/api/v1/auth/api-jwt-refresh/'.format(domain) resp = self.api.session.post(url, json={'token': self.token}) if resp.status_code != 200: raise ExternalError("Could not refresh token", error_code=resp.status_code) data = resp.json() # Save token that we just refreshed to the registry and update our own token self.token = data['token'] reg = ComponentRegistry() reg.set_config('arch:cloud_token', self.token)
def __init__(self, domain=None, username=None, **kwargs): reg = ComponentRegistry() conf = ConfigManager() if domain is None: domain = conf.get('cloud:server') self.api = Api(domain=domain, **kwargs) self._domain = self.api.domain try: token = reg.get_config('arch:cloud_token') token_type = reg.get_config('arch:cloud_token_type', default='jwt') self.api.set_token(token, token_type=token_type) except ArgumentError: # If we are interactive, try to get the user to login for a single # session rather than making them call link_cloud to store a cloud token if type_system.interactive: username, password = self._prompt_user_pass(username, domain) ok_resp = self.api.login(email=username, password=password) if not ok_resp: raise ExternalError("Could not login to %s as user %s" % (domain, username)) else: raise ExternalError( "No stored iotile cloud authentication information", suggestion= 'Call iotile config link_cloud with your username and password' ) self.token = self.api.token self.token_type = self.api.token_type
def __init__(self, port, on_scan=None, on_disconnect=None, passive=None, **kwargs): super(BLED112Adapter, self).__init__() # Get optional configuration flags stop_check_interval = kwargs.get('stop_check_interval', 0.1) #Make sure that if someone tries to connect to a device immediately after creating the adapter #we tell them we need time to accumulate device advertising packets first self.set_config('minimum_scan_time', 2.0) if on_scan is not None: self.add_callback('on_scan', on_scan) if on_disconnect is not None: self.add_callback('on_disconnect', on_disconnect) if port is None or port == '<auto>': devices = self.find_bled112_devices() if len(devices) > 0: port = devices[0] else: raise ValueError("Could not find any BLED112 adapters connected to this computer") self.scanning = False self.stopped = False if passive is not None: self._active_scan = not passive else: config = ConfigManager() self._active_scan = config.get('bled112:active-scan') #Prepare internal state of scannable and in progress devices # Do this before spinning off the BLED112CommandProcessor # in case a scanned device is seen immediately. self.partial_scan_responses = {} self._connections = {} self.count_lock = threading.Lock() self.connecting_count = 0 self.maximum_connections = 0 self._logger = logging.getLogger(__name__) self._logger.addHandler(logging.NullHandler()) self._serial_port = serial.Serial(port, 256000, timeout=0.01, rtscts=True) self._stream = AsyncPacketBuffer(self._serial_port, header_length=4, length_function=packet_length) self._commands = Queue() self._command_task = BLED112CommandProcessor(self._stream, self._commands, stop_check_interval=stop_check_interval) self._command_task.event_handler = self._handle_event self._command_task.start() try: self.initialize_system_sync() self.start_scan(self._active_scan) except: self.stop_sync() raise
def setup_environment(chip): """ Setup the SCons environment for compiling arm cortex code """ config = ConfigManager() #Make sure we never get MSVC settings for windows since that has the wrong command line flags for gcc if platform.system() == 'Windows': env = Environment(tools=['mingw'], ENV=os.environ) else: env = Environment(tools=['default'], ENV=os.environ) env['INCPREFIX'] = '-I"' env['INCSUFFIX'] = '"' env['CPPPATH'] = chip.includes() env['ARCH'] = chip #Setup Cross Compiler env['CC'] = 'arm-none-eabi-gcc' env['AS'] = 'arm-none-eabi-gcc' env['LINK'] = 'arm-none-eabi-gcc' env['AR'] = 'arm-none-eabi-ar' env['RANLIB'] = 'arm-none-eabi-ranlib' #AS command line is by default setup for call as directly so we need to modify it to call via *-gcc to allow for preprocessing env['ASCOM'] = "$AS $ASFLAGS -o $TARGET -c $SOURCES" # Setup nice display strings unless we're asked to show raw commands if not config.get('build:show-commands'): env['CCCOMSTR'] = "Compiling $TARGET" env['ARCOMSTR'] = "Building static library $TARGET" env['RANLIBCOMSTR'] = "Indexing static library $TARGET" env['LINKCOMSTR'] = "Linking $TARGET" #Setup Compiler Flags env['CCFLAGS'] = chip.combined_properties('cflags') env['LINKFLAGS'] = chip.combined_properties('ldflags') env['ARFLAGS'].append(chip.combined_properties( 'arflags')) #There are default ARFLAGS that are necessary to keep env['ASFLAGS'].append(chip.combined_properties('asflags')) #Add in compile tile definitions defines = utilities.build_defines(chip.property('defines', {})) env['CCFLAGS'].append(defines) #Setup Target Architecture env['CCFLAGS'].append('-mcpu=%s' % chip.property('cpu')) env['ASFLAGS'].append('-mcpu=%s' % chip.property('cpu')) env['LINKFLAGS'].append('-mcpu=%s' % chip.property('cpu')) #Initialize library paths (all libraries are added via dependencies) env['LIBPATH'] = [] env['LIBS'] = [] return env
def __init__(self, port=None, record=None, adapter=None): if port is None and adapter is None: try: conf = ConfigManager() port = conf.get('core:default-port') except ArgumentError: raise ArgumentError( "No port given and no core:default-port config variable set", suggestion= "Specify the port to use to connect to the IOTile devices") elif port is None: port = "" transport, _, arg = port.partition(':') self.transport = transport self.port = None if arg != "": self.port = arg self._record = record self.stream = self._create_stream(adapter) self._stream_queue = None self._trace_queue = None self._broadcast_queue = None self._trace_data = bytearray() self._proxies = {'TileBusProxyObject': TileBusProxyObject} self._name_map = { TileBusProxyObject.ModuleName(): [TileBusProxyObject] } self._known_apps = {} self._named_apps = {} self._setup_proxies() self._setup_apps()
def __init__(self, port, on_scan=None, on_disconnect=None, active_scan=None, **kwargs): super(NativeBLEDeviceAdapter, self).__init__() # Create logger self._logger = logging.getLogger(__name__) self._logger.addHandler(logging.NullHandler()) # Register configuration self.set_config( 'minimum_scan_time', 2.0) # Time to accumulate device advertising packets first self.set_config('default_timeout', 10.0) # Time before timeout an operation self.set_config('expiration_time', 60.0) # Time before a scanned device expired self.set_config( 'maximum_connections', 3) # Maximum number of simultaneous connections per controller # Create the baBLE interface to interact with BLE controllers self.bable = bable_interface.BaBLEInterface() # Get the list of BLE controllers self.bable.start(on_error=self._on_ble_error) controllers = self._find_ble_controllers() self.bable.stop() if len(controllers) == 0: raise ExternalError( "Could not find any BLE controller connected to this computer") # Parse port and check if it exists if port is None or port == '<auto>': self.controller_id = controllers[0].id else: self.controller_id = int(port) if not any(controller.id == self.controller_id for controller in controllers): raise ExternalError( "Could not find a BLE controller with the given ID, controller_id=%s" .format(self.controller_id)) # Restart baBLE with the selected controller id to prevent conflicts if multiple controllers self.bable.start(on_error=self._on_ble_error, exit_on_sigint=False, controller_id=self.controller_id) # Register callbacks if on_scan is not None: self.add_callback('on_scan', on_scan) if on_disconnect is not None: self.add_callback('on_disconnect', on_disconnect) self.scanning = False self.stopped = False if active_scan is not None: self._active_scan = active_scan else: config = ConfigManager() self._active_scan = config.get('ble:active-scan') # To register advertising packets waiting for a scan response (only if active scan) self.partial_scan_responses = {} # To manage multiple connections self.connections = ConnectionManager(self.id) self.connections.start() # Notification callbacks self.notification_callbacks_lock = threading.Lock() self.notification_callbacks = {} try: self._initialize_system_sync() self.start_scan(active=self._active_scan) except Exception: self.stop_sync() raise
def setup_environment(chip, args_file=None): """Setup the SCons environment for compiling arm cortex code. This will return an env that has all of the correct settings and create a command line arguments file for GCC that contains all of the required flags. The use of a command line argument file passed with @./file_path is important since there can be many flags that exceed the maximum allowed length of a command line on Windows. """ config = ConfigManager() # Make sure we never get MSVC settings for windows since that has the wrong command line flags for gcc if platform.system() == 'Windows': env = Environment(tools=['mingw'], ENV=os.environ) else: env = Environment(tools=['default'], ENV=os.environ) env['INCPREFIX'] = '-I"' env['INCSUFFIX'] = '"' env['CPPDEFPREFIX'] = '' env['CPPDEFSUFFIX'] = '' env['CPPPATH'] = chip.includes() env['ARCH'] = chip # Setup Cross Compiler env['CC'] = 'arm-none-eabi-gcc' env['AS'] = 'arm-none-eabi-gcc' env['LINK'] = 'arm-none-eabi-gcc' env['AR'] = 'arm-none-eabi-ar' env['RANLIB'] = 'arm-none-eabi-ranlib' # AS command line is by default setup for call as directly so we need # to modify it to call via *-gcc to allow for preprocessing env['ASCOM'] = "$AS $ASFLAGS -o $TARGET -c $SOURCES" # Setup nice display strings unless we're asked to show raw commands if not config.get('build:show-commands'): env['CCCOMSTR'] = "Compiling $TARGET" env['ARCOMSTR'] = "Building static library $TARGET" env['RANLIBCOMSTR'] = "Indexing static library $TARGET" env['LINKCOMSTR'] = "Linking $TARGET" # Setup Compiler Flags env['CCFLAGS'] = chip.combined_properties('cflags') env['LINKFLAGS'] = chip.combined_properties('ldflags') env['ARFLAGS'].append(chip.combined_properties( 'arflags')) # There are default ARFLAGS that are necessary to keep env['ASFLAGS'].append(chip.combined_properties('asflags')) # Add in compile tile definitions defines = utilities.build_defines(chip.property('defines', {})) env['CPPDEFINES'] = defines if args_file is not None: env['CCCOM'] = "$CC $CCFLAGS $CPPFLAGS @{} -c -o $TARGET $SOURCES".format( args_file) # Setup Target Architecture env['CCFLAGS'].append('-mcpu=%s' % chip.property('cpu')) env['ASFLAGS'].append('-mcpu=%s' % chip.property('cpu')) env['LINKFLAGS'].append('-mcpu=%s' % chip.property('cpu')) # Initialize library paths (all libraries are added via dependencies) env['LIBPATH'] = [] env['LIBS'] = [] return env
def watch_broadcasts(self, whitelist=None, blacklist=None): """Spawn an interactive terminal UI to watch broadcast data from devices. Devices are allowed to post a broadcast report containing stream data. This function will create a list in your console window with the latest broadcast value from each device in range. Args: whitelist (list(integer)): Only include devices with these listed ids. blacklist (list(integer)): Include every device **except** those with these specific ids. If combined with whitelist, whitelist wins and this parameter has no effect. """ title = "Watching Broadcast Reports (Ctrl-C to Stop)" subtitle = "" if self.transport == 'bled112': reg = ConfigManager() if not reg.get('bled112:active-scan'): subtitle = "Active Scanning not active, you won't see v1 broadcasts" if whitelist is not None: whitelist = set(whitelist) if blacklist is not None: blacklist = set(blacklist) def _title(_items): return [title, subtitle] def _poll(): results = [x for x in self.iter_broadcast_reports(blocking=False)] return results def _text(item): fmt_uuid = "%08X" % item.origin fmt_uuid = fmt_uuid[:4] + '-' + fmt_uuid[4:] reading = item.visible_readings[0] return "{0: <15} stream: {1: 04X} value: {2: <8}".format( fmt_uuid, reading.stream, reading.value) def _sort_order(item): return item.origin def _hash(item): uuid = item.origin stream_id = item.visible_readings[0].stream if whitelist is not None and uuid not in whitelist: return None if blacklist is not None and whitelist is None and uuid in blacklist: return None item_id = str(uuid) + "," + str(stream_id) return item_id line_ui = LinebufferUI(_poll, _hash, _text, sortkey_func=_sort_order, title=_title) line_ui.run()
def __init__(self, port, on_scan=None, on_disconnect=None, passive=None, **kwargs): super(BLED112Adapter, self).__init__() # Get optional configuration flags stop_check_interval = kwargs.get('stop_check_interval', 0.1) # Make sure that if someone tries to connect to a device immediately after creating the adapter # we tell them we need time to accumulate device advertising packets first self.set_config('minimum_scan_time', 2.0) if on_scan is not None: self.add_callback('on_scan', on_scan) if on_disconnect is not None: self.add_callback('on_disconnect', on_disconnect) self.scanning = False self.stopped = False config = ConfigManager() if passive is not None: self._active_scan = not passive else: self._active_scan = config.get('bled112:active-scan') self._throttle_broadcast = config.get('bled112:throttle-broadcast') self._throttle_scans = config.get('bled112:throttle-scan') self._throttle_timeout = config.get('bled112:throttle-timeout') # Prepare internal state of scannable and in progress devices # Do this before spinning off the BLED112CommandProcessor # in case a scanned device is seen immediately. self.partial_scan_responses = {} self._broadcast_state = {} self._connections = {} self.count_lock = threading.Lock() self.connecting_count = 0 self.maximum_connections = 0 self._scan_event_count = 0 self._v1_scan_count = 0 self._v1_scan_response_count = 0 self._v2_scan_count = 0 self._device_scan_counts = {} self._last_reset_time = time.monotonic() self._logger = logging.getLogger(__name__) self._logger.addHandler(logging.NullHandler()) self._serial_port = open_bled112(port, self._logger) self._stream = AsyncPacketBuffer(self._serial_port, header_length=4, length_function=packet_length) self._commands = Queue() self._command_task = BLED112CommandProcessor(self._stream, self._commands, stop_check_interval=stop_check_interval) self._command_task.event_handler = self._handle_event self._command_task.start() try: self.initialize_system_sync() self.start_scan(self._active_scan) except: self.stop_sync() raise
class IOTileCloud: """High level routines for interacting with IOTile cloud. Normally, you can create one of these objects with no arguments and the iotile.cloud server and authentication details will be pulled from the ComponentRegistry. However, you can force a specific domain by passing the optional domain arguments. If there are no stored credentials in ComponentRegistry, the user will be prompted for a password on the command line IF the session is interactive, otherwise __init__ will fail. Args: domain (str): Optional server domain. If not specified, the default will be whatever is stored in the registry username (str): Optional username to force the user to use if they don't have stored credentials """ DEVICE_TOKEN_TYPE = 'a-jwt' def __init__(self, domain=None, username=None, **kwargs): reg = ComponentRegistry() self._conf = ConfigManager() if domain is None: domain = self._conf.get('cloud:server') if not self.verify_server: urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) _verify = self.verify_server self.api = Api(domain=domain, verify=_verify, **kwargs) self._domain = self.api.domain try: token = reg.get_config('arch:cloud_token') token_type = reg.get_config('arch:cloud_token_type', default='jwt') self.api.set_token(token, token_type=token_type) except ArgumentError: # If we are interactive, try to get the user to login for a single # session rather than making them call link_cloud to store a cloud token if type_system.interactive: username, password = self._prompt_user_pass(username, domain) ok_resp = self.api.login(email=username, password=password) if not ok_resp: raise ExternalError("Could not login to %s as user %s" % (domain, username)) else: raise ExternalError( "No stored iotile cloud authentication information", suggestion= 'Call iotile config link_cloud with your username and password' ) self.token = self.api.token self.token_type = self.api.token_type @property def verify_server(self) -> bool: return self._conf.get('cloud:verify-server') def _prompt_user_pass(self, username, domain): if username is None: prompt_str = "Please enter your IOTile.cloud email: " username = input(prompt_str) prompt_str = "Please enter your IOTile.cloud password: "******"{:04x}".format(device_id) streamer_hex = "{:04x}".format(streamer_id) return "t--0000-0000-0000-{}--{}".format(idhex, streamer_hex) @param("device_id", "integer", desc="ID of the device that we want information about") @return_type("basic_dict") def device_info(self, device_id): """Query information about a device by its device id """ slug = device_id_to_slug(device_id) try: dev = self.api.device(slug).get() except HttpNotFoundError: raise ArgumentError("Device does not exist in cloud database", device_id=device_id, slug=slug) return dev @param("fleet_id", "integer", desc="Id of the fleet we want to retrieve") @return_type("basic_dict") def get_fleet(self, fleet_id): """ Returns the devices in the given fleet.""" api = self.api slug = fleet_id_to_slug(fleet_id) try: results = api.fleet(slug).devices.get() entries = results.get('results', []) return {entry.pop('device'): entry for entry in entries} except HttpNotFoundError: raise ArgumentError("Fleet does not exist in cloud database", fleet_id=fleet_id, slug=slug) @param("device_id", "integer", desc="Id of the device whose fleet we want to retrieve") @return_type("basic_dict") def get_whitelist(self, device_id): """ Returns the whitelist associated with the given device_id if any""" api = self.api slug = device_id_to_slug(device_id) try: fleets = api.fleet.get(device=slug)['results'] except HttpNotFoundError: raise ExternalError( "Could not find the right URL. Are fleets enabled ?") if not fleets: # This is to be expected for devices set to take data from all project, or any device. raise ExternalError("The device isn't in any network !") networks = [ self.get_fleet(fleet['id']) for fleet in fleets if fleet.get('is_network', False) is True ] networks_to_manage = [ x for x in networks if x.get(slug, {}).get('is_access_point', False) is True ] out = {} for network in networks_to_manage: out.update(network) # Remove ourselves from the whitelist that we are supposed to manage if slug in out: del out[slug] if not out: raise ExternalError("No device to manage in these fleets !") return out @param("max_slop", "integer", desc="Optional max time difference value") @return_type("bool") def check_time(self, max_slop=300): """ Check if current system time is consistent with iotile.cloud time""" cloud_time = self.api.session.get('{}/api/v1/server/'.format( self._domain)).json().get('now', None) if cloud_time is None: raise DataError("No date header returned from iotile cloud", domain=self._domain) curtime = datetime.datetime.now(tzutc()) delta = dateutil.parser.parse(cloud_time) - curtime delta_secs = delta.total_seconds() return abs(delta_secs) < max_slop @param("device_id", "integer", desc="ID of the device that we want information about") @param("new_sg", "string", desc="The new sensor graph id that we want to load") @param( "app_tag", "integer", desc= "Optional arg to check if the device template on the cloud matches the app_tag" ) def set_sensorgraph(self, device_id, new_sg, app_tag=None): """The the cloud's sensor graph id that informs what kind of device this is. Is app_tag is passed, verify that the sensorgraph explicitly matches the expected app_tag by making another API call. Args: device_id (int): The id of the device that we want to change the sensorgraph for. new_sg (string): Name of a valid sensorgraph that you wish to set the device to app_tag (int): Optional. The intended app_tag of the sensorgraph will be set. If the app_tag passed into this function does not match the app_tag of the sensorgraph in iotile.cloud, raise an error. """ try: sg = self.api.sg(new_sg).get() except RestHttpBaseException as exc: raise ExternalError("Error calling method on iotile.cloud", exception=exc, response=exc.response.status_code) if app_tag is not None: if sg.get('app_tag', None) != app_tag: raise ArgumentError( "Cloud sensorgraph record does not match app tag", value=new_sg, cloud_sg_app_tag=sg.get('app_tag', None), app_tag_set=app_tag) slug = device_id_to_slug(device_id) patch = {'sg': new_sg} try: self.api.device(slug).patch(patch) except RestHttpBaseException as exc: if exc.response.status_code == 400: raise ArgumentError( "Error setting sensor graph, invalid value", value=new_sg, error_code=exc.response.status_code) else: raise ExternalError("Error calling method on iotile.cloud", exception=exc, response=exc.response.status_code) @param("device_id", "integer", desc="ID of the device that we want information about") @param("new_template", "string", desc="The new device template that we want to set") @param( "os_tag", "integer", desc= "Optional arg to check if the sensorgraph on the cloud matches the os_tag" ) def set_device_template(self, device_id, new_template, os_tag=None): """Sets the device template for the given device in iotile.cloud. Is os_tag is passed, verify that the device template explicitly matches the expected os_tag by making another API call. Args: device_id (int): The id of the device that we want to change the device template for. new_template (string): Name of a valid device template that you wish to set the device to os_tag (int): Optional. If the os_tag passed into this function does not match the os_tag of the device_template in iotile.cloud, raise an error. """ try: dt = self.api.dt(new_template).get() except RestHttpBaseException as exc: raise ExternalError("Error calling method on iotile.cloud", exception=exc, response=exc.response.status_code) if os_tag is not None: if dt.get('os_tag', None) != os_tag: raise ArgumentError( "Cloud device template record does not match os tag", value=new_template, cloud_sg_os_tag=dt.get('os_tag', None), os_tag_set=os_tag) slug = device_id_to_slug(device_id) patch = {'template': new_template} try: self.api.device(slug).patch(patch, staff=1) except RestHttpBaseException as exc: if exc.response.status_code == 400: raise ArgumentError( "Error setting device template, invalid value", value=new_template, error_code=exc.response.status_code) else: raise ExternalError("Error calling method on iotile.cloud", exception=exc, response=exc.response.status_code) @param("project_id", "string", desc="Optional ID of the project to download a list of devices from" ) @return_type("list(integer)") def device_list(self, project_id=None): """Download a list of all device IDs or device IDs that are members for a specific project.""" if project_id: devices = self.api.device.get(project=project_id) else: devices = self.api.device.get() ids = [device['id'] for device in devices['results']] return ids @param("device_id", "integer", desc="ID of the device that we want to get a permanent token for") def impersonate_device(self, device_id): """Convert our token to a permanent device token. This function is most useful for creating virtual IOTile devices whose access to iotile.cloud is based on their device id, not any particular user's account. There are a few differences between device tokens and user tokens: - Device tokens never expire and don't need to be refreshed - Device tokens are more restricted in what they can access in IOTile.cloud than user tokens Args: device_id (int): The id of the device that we want to get a token for. """ slug = device_id_to_slug(device_id) token_type = IOTileCloud.DEVICE_TOKEN_TYPE try: resp = self.api.device(slug).key.get( type=IOTileCloud.DEVICE_TOKEN_TYPE) token = resp['key'] except RestHttpBaseException as exc: raise ExternalError("Error calling method on iotile.cloud", exception=exc, response=exc.response.status_code) self.api.set_token(token, token_type=token_type) self.token = token self.token_type = token_type reg = ComponentRegistry() reg.set_config('arch:cloud_token', self.token) reg.set_config('arch:cloud_token_type', self.token_type) reg.set_config('arch:cloud_device', slug) @param("device_id", "integer", desc="ID of the device that we want information about") @param("clean", "bool", desc="Also clean old stream data for this device") def unclaim(self, device_id, clean=True): """Unclaim a device that may have previously been claimed.""" slug = device_id_to_slug(device_id) payload = {'clean_streams': clean} try: self.api.device(slug).unclaim.post(payload) except RestHttpBaseException as exc: raise ExternalError("Error calling method on iotile.cloud", exception=exc, response=exc.response.status_code) def upload_report(self, report): """Upload an IOTile report to the cloud. This function currently supports uploading the following kinds of reports: SignedListReport FlexibleDictionaryReport If you pass an instance of IndividualReadingReport, an exception will be thrown because IOTile.cloud does not support receiving individual readings. Those are only for local use. The filename of the uploaded report will have an extension set based on the type of report that you are uploading. Args: report (IOTileReport): The report that you want to upload. This should not be an IndividualReadingReport. Returns: int: The number of new readings that were accepted by the cloud as novel. """ if isinstance(report, IndividualReadingReport): raise ArgumentError( "You cannot upload IndividualReadingReport objects to iotile.cloud", report=report) if isinstance(report, SignedListReport): file_ext = ".bin" elif isinstance(report, FlexibleDictionaryReport): file_ext = ".mp" else: raise ArgumentError( "Unknown report format passed to upload_report", classname=report.__class__.__name__, report=report) timestamp = '{}'.format(report.received_time.isoformat()) payload = {'file': ("report" + file_ext, BytesIO(report.encode()))} resource = self.api.streamer.report headers = {} authorization_str = '{0} {1}'.format(self.token_type, self.token) headers['Authorization'] = authorization_str resp = self.api.session.post(resource.url(), files=payload, headers=headers, params={'timestamp': timestamp}) count = resource._process_response(resp)['count'] return count def highest_acknowledged(self, device_id, streamer): """Get the highest acknowledged reading for a given streamer. Args: device_id (int): The device whose streamer we are querying streamer (int): The streamer on the device that we want info about. Returns: int: The highest reading id that has been acknowledged by the cloud """ slug = self._build_streamer_slug(device_id, streamer) try: data = self.api.streamer(slug).get() except RestHttpBaseException as exc: raise ArgumentError("Could not get information for streamer", device_id=device_id, streamer_id=streamer, slug=slug, err=str(exc)) if 'last_id' not in data: raise ExternalError( "Response fom the cloud did not have last_id set", response=data) return data['last_id'] def device_acknowledgements(self, device_id): """Get all streamer acknowledgements for a device by its id. Args: device_id (int): The device we are querying Returns: list of namedtuples: A list of all acknowledgement values received from the cloud. The namedtuples should have index, ack and selector fields pulled from the corresponding record in the cloud. """ slug = device_id_to_slug(device_id) try: data = self.api.streamer().get(device=slug) except RestHttpBaseException as exc: raise ArgumentError("Could not get information for streamer", device_id=device_id, slug=slug, err=str(exc)) results = data.get('results', []) acknowledgements = [] for result in results: acknowledgement = Acknowledgement(result.get("index"), result.get("last_id"), result.get("selector")) acknowledgements.append(acknowledgement) return acknowledgements @annotated def refresh_token(self): """Attempt to refresh out cloud token with iotile.cloud.""" if self.token_type != 'jwt': raise DataError( "Attempting to refresh a token that does not need to be refreshed", token_type=self.token_type) conf = ConfigManager() domain = conf.get('cloud:server') url = '{}/api/v1/auth/api-jwt-refresh/'.format(domain) resp = self.api.session.post(url, json={'token': self.token}) if resp.status_code != 200: raise ExternalError("Could not refresh token", error_code=resp.status_code) data = resp.json() # Save token that we just refreshed to the registry and update our own token self.token = data['token'] reg = ComponentRegistry() reg.set_config('arch:cloud_token', self.token)