def _validate_information(self): """ Validate that all information has been filled in """ needed_variables = ["ModuleName", "ModuleVersion", "APIVersion"] for var in needed_variables: if var not in self.variables: raise DataError("Needed variable was not defined in mib file.", variable=var) #Make sure ModuleName is <= 6 characters if len(self.variables["ModuleName"]) > 6: raise DataError( "ModuleName too long, must be 6 or fewer characters.", module_name=self.variables["ModuleName"]) if not isinstance(self.variables["ModuleVersion"], basestring): raise ValueError( "ModuleVersion ('%s') must be a string of the form X.Y.Z" % str(self.variables['ModuleVersion'])) if not isinstance(self.variables["APIVersion"], basestring): raise ValueError( "APIVersion ('%s') must be a string of the form X.Y" % str(self.variables['APIVersion'])) self.variables['ModuleVersion'] = self._convert_module_version( self.variables["ModuleVersion"]) self.variables['APIVersion'] = self._convert_api_version( self.variables["APIVersion"]) self.variables["ModuleName"] = self.variables["ModuleName"].ljust(6) self.valid = True
def load_metascenario(self, scenario_list): """Load one or more scenarios from a list. Each entry in scenario_list should be a dict containing at least a name key and an optional tile key and args key. If tile is present and its value is not None, the scenario specified will be loaded into the given tile only. Otherwise it will be loaded into the entire device. If the args key is specified is will be passed as keyword arguments to load_scenario. Args: scenario_list (list): A list of dicts for each scenario that should be loaded. """ for scenario in scenario_list: name = scenario.get('name') if name is None: raise DataError("Scenario in scenario list is missing a name parameter", scenario=scenario) tile_address = scenario.get('tile') args = scenario.get('args', {}) dest = self if tile_address is not None: dest = self._tiles.get(tile_address) if dest is None: raise DataError("Attempted to load a scenario into a tile address that does not exist", address=tile_address, valid_addresses=list(self._tiles)) dest.load_scenario(name, **args)
def FromString(cls, indata): """Load a CommandFile from a string. The string should be produced from a previous call to encode. Args: indata (str): The encoded input data. Returns: CommandFile: The decoded CommandFile object. """ lines = [x.strip() for x in indata.split("\n") if not x.startswith('#') and not x.strip() == ""] if len(lines) < 3: raise DataError("Invalid CommandFile string that did not contain 3 header lines", lines=lines) fmt_line, version_line, ascii_line = lines[:3] if not version_line.startswith("Format: "): raise DataError("Invalid format version that did not start with 'Format: '", line=version_line) version = version_line[8:] if ascii_line != "Type: ASCII": raise DataError("Unknown file type line (expected Type: ASCII)", line=ascii_line) cmds = [cls.decode(x) for x in lines[3:]] return CommandFile(fmt_line, version, cmds)
def ParsePrerelease(cls, prerelease): """Parse a prerelease string into a type, number tuple Args: prerelease (string): a prerelease string in the format specified for SemanticVersion Returns: tuple: (release_type, number) """ last_alpha = 0 while last_alpha < len(prerelease) and prerelease[last_alpha].isalpha( ): last_alpha += 1 release_type = prerelease[:last_alpha] release_number = prerelease[last_alpha:] if release_type not in SemanticVersion.prerelease_order or release_type == 'release': raise DataError("Invalid Prerelease specifier in semantic version", prerelease_type=release_type) try: release_number = int(release_number) except ValueError: raise DataError("Invalid Prerelease number in semantic version", prerelease_number=release_number) return (release_type, release_number)
def _find_v1_settings(self, settings): """Parse a v1 module_settings.json file. V1 is the older file format that requires a modules dictionary with a module_name and modules key that could in theory hold information on multiple modules in a single directory. """ if 'module_name' in settings: modname = settings['module_name'] if 'modules' not in settings or len(settings['modules']) == 0: raise DataError("No modules defined in module_settings.json file") elif len(settings['modules']) > 1: raise DataError( "Multiple modules defined in module_settings.json file", modules=[x for x in settings['modules']]) else: modname = list(settings['modules'])[0] if modname not in settings['modules']: raise DataError( "Module name does not correspond with an entry in the modules directory", name=modname, modules=[x for x in settings['modules']]) release_info = self._load_release_info(settings) modsettings = settings['modules'][modname] architectures = settings.get('architectures', {}) target_defs = settings.get('module_targets', {}) targets = target_defs.get(modname, []) return TileInfo(modname, modsettings, architectures, targets, release_info)
def latch(self): """Convert the current value inside this config descriptor to a python object. The conversion proceeds by mapping the given type name to a native python class and performing the conversion. You can override what python object is used as the destination class by passing a python_type parameter to __init__. The default mapping is: - char (u)int8_t, (u)int16_t, (u)int32_t: int - char[] (u)int8_t[], (u)int16_t[]0, u(int32_t): list of int If you want to parse a char[] or uint8_t[] as a python string, it needs to be null terminated and you should pass python_type='string'. If you are declaring a scalar integer type and wish it to be decoded as a bool, you can pass python_type='bool' to the constructor. All integers are decoded as little-endian. Returns: object: The corresponding python object. This will either be an int, list of int or string based on the type_name specified and the optional python_type keyword argument to the constructor. Raises: DataError: if the object cannot be converted to the desired type. ArgumentError: if an invalid python_type was specified during construction. """ if len(self.current_value) == 0: raise DataError("There was no data in a config variable during latching", name=self.name) # Make sure the data ends on a unit boundary. This would have happened automatically # in an actual device by the C runtime 0 padding out the storage area. remaining = len(self.current_value) % self.unit_size if remaining > 0: self.current_value += bytearray(remaining) if self.special_type == 'string': if self.current_value[-1] != 0: raise DataError("String type was specified by data did not end with a null byte", data=self.current_value, name=self.name) return bytes(self.current_value[:-1]).decode('utf-8') fmt_code = "<" + (self.base_type * (len(self.current_value) // self.unit_size)) data = struct.unpack(fmt_code, self.current_value) if self.variable: data = list(data) else: data = data[0] if self.special_type == 'bool': data = bool(data) return data
def _parse_configvar(self, match): if 'length' in match: quantity = match['length'] array = True else: quantity = 1 array = False if 'value' in match: default_length = self._value_length(match['value'], match['type']) default_value = match['value'] # Special case, if this is an array and the initializer = {0} then that means an # empty array, not a single value of 0, per standard C idiom. if array is True and match['type'] != 'string' and len( default_value) == 1 and default_value[0] == 0: default_length = 0 required = False else: default_value = None default_length = 0 required = True varname = match['configvar'] vartype = match['type'] varnum = match['confignum'] varsize = quantity * type_lengths[vartype] flags = len(self.configs) if flags >= 64: raise DataError( "Too many configuration variables. The maximum number of supported variables is 64" ) if required: flags |= (1 << 6) config = { 'name': varname, 'flags': flags, 'type': vartype, 'array': array, 'total_size': varsize, 'count': quantity, 'required': required, 'default_value': default_value, 'default_size': default_length } if varnum in self.configs: raise DataError("Attempted to add the same config variable twice", variable_name=varname, id_number=varnum, defined_variables=self.configs.keys()) self.configs[varnum] = config
def find_proxy_plugin(component, plugin_name): """ Attempt to find a proxy plugin provided by a specific component Args: component (string): The name of the component that provides the plugin plugin_name (string): The name of the plugin to load Returns: TileBuxProxyPlugin: The plugin, if found, otherwise raises DataError """ reg = ComponentRegistry() plugins = reg.load_extensions('iotile.proxy_plugin', comp_filter=component, class_filter=TileBusProxyPlugin, product_name='proxy_plugin') for _name, plugin in plugins: if plugin.__name__ == plugin_name: return plugin raise DataError( "Could not find proxy plugin module in registered components or installed distributions", component=component, name=plugin_name)
def add_direct(self, target, var_id, var_type, data): """Directly add a config variable. This method is meant to be called from emulation scenarios that want to directly set config database entries from python. Args: target (SlotIdentifer): The target slot for this config variable. var_id (int): The config variable ID var_type (str): The config variable type data (bytes or int or str): The data that will be encoded according to var_type. """ data = struct.pack("<H", var_id) + _convert_to_bytes(var_type, data) if self.data_size - self.data_index < len(data): raise DataError("Not enough space for data in new conig entry", needed_space=len(data), actual_space=(self.data_size - self.data_index)) new_entry = ConfigEntry(target, var_id, data) for entry in self.entries: if entry.target == new_entry.target and entry.var_id == new_entry.var_id: entry.valid = False self.entries.append(new_entry) self.data_index += new_entry.data_space()
def decode(self): """Decode this report from a msgpack encoded binary blob.""" report_dict = msgpack.unpackb(self.raw_report, raw=False) events = [ IOTileEvent.FromDict(x) for x in report_dict.get('events', []) ] readings = [ IOTileReading.FromDict(x) for x in report_dict.get('data', []) ] if 'device' not in report_dict: raise DataError( "Invalid encoded FlexibleDictionaryReport that did not " "have a device key set with the device uuid") self.origin = report_dict['device'] self.report_id = report_dict.get("incremental_id", IOTileReading.InvalidReadingID) self.sent_timestamp = report_dict.get("device_sent_timestamp", 0) self.origin_streamer = report_dict.get("streamer_index") self.streamer_selector = report_dict.get("streamer_selector") self.lowest_id = report_dict.get('lowest_id') self.highest_id = report_dict.get('highest_id') return readings, events
def refresh_token(self): """Attempt to refresh out cloud token with iotile.cloud.""" if self.token_type != 'jwt': raise DataError( "Attempting to refresh a token that does not need to be refreshed", token_type=self.token_type) conf = ConfigManager() domain = conf.get('cloud:server') url = '{}/api/v1/auth/api-jwt-refresh/'.format(domain) resp = self.api.session.post(url, json={'token': self.token}) if resp.status_code != 200: raise ExternalError("Could not refresh token", error_code=resp.status_code) data = resp.json() # Save token that we just refreshed to the registry and update our own token self.token = data['token'] reg = ComponentRegistry() reg.set_config('arch:cloud_token', self.token)
def _convert_api_version(self, version): vals = self._convert_version(version) if len(vals) != 2: raise DataError("Invalid API Version, should be X.Y", version_string=version) return vals
def _convert_default_value(self, default): """Convert the passed default value to binary. The default value (if passed) may be specified as either a `bytes` object or a python int or list of ints. If an int or list of ints is passed, it is converted to binary. Otherwise, the raw binary data is used. If you pass a bytes object with python_type as True, do not null terminate it, an additional null termination will be added. Passing a unicode string is only allowed if as_string is True and it will be encoded as utf-8 and null terminated for use as a default value. """ if default is None: return None if isinstance(default, basestring) and not isinstance(default, bytes): if self.special_type == 'string': return default.encode('utf-8') + b'\0' raise DataError("You can only pass a unicode string if you are declaring a string type config variable", default=default) if isinstance(default, (bytes, bytearray)): if self.special_type == 'string' and isinstance(default, bytes): default += b'\0' return default if isinstance(default, int): default = [default] format_string = "<" + (self.base_type*len(default)) return struct.pack(format_string, *default)
def decode(cls, command_str): """Decode a string encoded command back into a Command object. Args: command_str (str): The encoded command string output from a previous call to encode. Returns: Command: The decoded Command object. """ name, _, arg = command_str.partition(" ") args = [] if len(arg) > 0: if arg[0] != '{' or arg[-1] != '}': raise DataError( "Invalid command, argument is not contained in { and }", arg=arg, cmd=name) arg = arg[1:-1] args = arg.split(",") proc = [] for arg in args: if arg.startswith("hex:"): arg = unhexlify(arg[4:]).decode('utf-8') proc.append(arg) return Command(name, proc)
def _convert_module_version(self, version): vals = self._convert_version(version) if len(vals) != 3: raise DataError("Invalid Module Version, should be X.Y.Z", version_string=version) return vals
def __init__(self, folder): self.folder = folder self.filter_prods = False modfile = os.path.join(self.folder, 'module_settings.json') try: with open(modfile, "r") as infile: settings = json.load(infile) except IOError: raise ExternalError( "Could not load module_settings.json file, make sure this directory is an IOTile component", path=self.folder) file_format = settings.get('file_format', IOTile.V1_FORMAT) if file_format == IOTile.V1_FORMAT: info = self._find_v1_settings(settings) elif file_format == IOTile.V2_FORMAT: info = self._find_v2_settings(settings) else: raise DataError("Unknown file format in module_settings.json", format=file_format, path=modfile) self._load_settings(info)
def resource_path(relative_path=None, expect=None): """Return the absolute path to a resource in iotile-build. This method finds the path to the `config` folder inside iotile-build, appends `relative_path` to it and then checks to make sure the desired file or directory exists. You can specify expect=(None, 'file', or 'folder') for what you expect to find at the given path. Args: relative_path (str): The relative_path from the config folder to the resource in question. This path can be specified using / characters on all operating systems since it will be normalized before usage. If None is passed, the based config folder will be returned. expect (str): What the path should resolve to, which is checked before returning, raising a DataError if the check fails. You can pass None for no checking, file for checking `os.path.isfile`, or folder for checking `os.path.isdir`. Default: None Returns: str: The normalized absolute path to the resource. """ if expect not in (None, 'file', 'folder'): raise ArgumentError( "Invalid expect parameter, must be None, 'file' or 'folder'", expect=expect) this_dir = os.path.dirname(__file__) resource_path = os.path.join(this_dir, '..', 'config') if relative_path is not None: path = os.path.normpath(relative_path) resource_path = os.path.join(resource_path, path) if expect == 'file' and not os.path.isfile(resource_path): raise DataError("Expected resource %s to be a file and it wasn't" % resource_path) elif expect == 'folder' and not os.path.isdir(resource_path): raise DataError("Expected resource %s to be a folder and it wasn't" % resource_path) return os.path.abspath(resource_path)
def _parse_number(self, number): if isinstance(number, int): return number if number in self.variables: return self.variables[number] raise DataError("Reference to undefined variable %s" % number)
def _dump_list(obj): if obj is None: return None if not isinstance(obj, list): raise DataError( "Property %s marked as list was not a list: %s" % (name, repr(obj))) return [x.dump() for x in obj]
def _convert_version(self, version_string): vals = [int(x) for x in version_string.split(".")] invalid = [x for x in vals if x < 0 or x > 255] if len(invalid) > 0: raise DataError("Invalid version number larger than 1 byte", number=invalid[0], version_string=version_string) return vals
def _add_cmd(self, num, symbol, num_ints, has_buffer): handler = TBHandler(symbol=symbol) if num in self.commands: raise DataError("Attempted to add the same command number twice", number=num, old_handler=self.commands[num], new_handler=handler) self.commands[num] = handler
def _dump_map(obj): if obj is None: return None if not isinstance(obj, dict): raise DataError( "Property %s marked as list was not a dict: %s" % (name, repr(obj))) return {key: val.dump() for key, val in obj.items()}
def _load_release_info(cls, settings): if settings.get('release', False) is False: return None if 'release_date' not in settings: raise DataError("Release mode IOTile component did not include a release date") import dateutil.parser release_date = dateutil.parser.parse(settings['release_date']) dependency_versions = {x: SemanticVersion.FromString(y) for x, y in viewitems(settings.get('dependency_versions', {}))} return ReleaseInfo(release_date, dependency_versions)
def _parse_cmd(self, match): symbol = match['symbol'] num = self._parse_number(match['cmd_number']) if num < 0 or num >= 2**16: raise DataError( "Invalid command identifier, must be a number between 0 and 2^16 - 1.", command_id=num) has_buffer = match['has_buffer'] num_ints = match['num_ints'] self._add_cmd(num, symbol, num_ints=num_ints, has_buffer=has_buffer)
def check_time(self, max_slop=300): """ Check if current system time is consistent with iotile.cloud time""" cloud_time = requests.get( 'https://iotile.cloud/api/v1/server/').json().get('now', None) if cloud_time is None: raise DataError("No date header returned from iotile.cloud") curtime = datetime.datetime.now(tzutc()) delta = dateutil.parser.parse(cloud_time) - curtime delta_secs = delta.total_seconds() return abs(delta_secs) < max_slop
def restore_state(self, state): """Restore the current state of this emulated object. Args: state (dict): A previously dumped state produced by dump_state. """ state_format = state.get('state_format') state_version = state.get('state_version') if state_format != 'basic_test_emulated_device': raise DataError("Unsupported state format", found=state_format, expected='basic_test_emulated_device') if state_version != '1.0.0': raise DataError("Unsupported state version", found=state_format, expected="1.0.0") self.tracked_counter = state.get('tracked_counter', 0) self.manual_counter = state.get('manual_counter', 0)
def ReportLength(cls, header): """Given a header of HeaderLength bytes, calculate the size of this report. Returns: int: The total length of the report including the header that we are passed. """ parsed_header = cls._parse_header(header) auth_size = cls._AUTH_BLOCK_LENGTHS.get(parsed_header.auth_type) if auth_size is None: raise DataError("Unknown auth block size in BroadcastReport") return cls._HEADER_LENGTH + parsed_header.reading_length + auth_size
def check_time(self, max_slop=300): """ Check if current system time is consistent with iotile.cloud time""" cloud_time = self.api.session.get('{}/api/v1/server/'.format( self._domain)).json().get('now', None) if cloud_time is None: raise DataError("No date header returned from iotile cloud", domain=self._domain) curtime = datetime.datetime.now(tzutc()) delta = dateutil.parser.parse(cloud_time) - curtime delta_secs = delta.total_seconds() return abs(delta_secs) < max_slop
def _ensure_product_string(cls, product): """Ensure that all product locations are strings. Older components specify paths as lists of path components. Join those paths into a normal path string. """ if isinstance(product, str): return product if isinstance(product, list): return os.path.join(*product) raise DataError("Unknown object (not str or list) specified as a component product", product=product)
def start(self, channel=None): """Start this emulated device. This triggers the controller to call start on all peripheral tiles in the device to make sure they start after the controller does and then it waits on each one to make sure they have finished initializing before returning. Args: channel (IOTilePushChannel): the channel with a stream and trace routine for streaming and tracing data through a VirtualInterface """ super(ReferenceDevice, self).start(channel) try: self.controller.start(channel) # Guarantee an initialization order so that our trace files are deterministic for address, tile in sorted(self._tiles.items()): if address == 8: continue if not isinstance(tile, EmulatedPeripheralTile): raise DataError( "An emulated ReferenceDevice can only have a single controller and all other tiles must inherit from EmulatedPeripheralTile", address=address) tile.start(channel) async def _launch_tiles(): await self.controller.reset() await asyncio.wait_for(self.controller.initialized.wait(), 2.0) # Note that we do not explicitly reset the tiles. # The controller resets all tiles in its reset method. for address, tile in sorted(self._tiles.items()): if address == 8: continue await asyncio.wait_for(tile.initialized.wait(), 2.0) self.emulator.run_task_external(_launch_tiles()) if self._simulating_time: self.emulator.add_task(None, self._time_ticker()) except: self.stop() raise