def get_data_particle_values_as_dict(self, data_particle): """ Return all of the data particle values as a dictionary with the value id as the key and the value as the value. This method will decimate the data, in the any characteristics other than value id and value. i.e. binary. @param data_particle data particle to inspect @return return a dictionary with keys and values { value-id: value } @throws IDKException when missing values dictionary """ sample_dict = self.convert_data_particle_to_dict(data_particle) values = sample_dict.get('values') if (not values): raise IDKException("Data particle missing values") if (not isinstance(values, list)): raise IDKException("Data particle values not a list") result = {} for param in values: if (not isinstance(param, dict)): raise IDKException("must be a dict") key = param.get('value_id') if (key == None): raise IDKException("value_id not defined") if (key in result.keys()): raise IDKException("duplicate value detected for %s" % key) result[key] = param.get('value') return result
def create_data_dir(self): """ Verify the test data directory is created and exists. Return the path to the directory. @return: path to data directory @raise: IDKConfigMissing no harvester config @raise: IDKException if data_dir exists, but not a directory """ startup_config = self._driver_config().get('startup_config') if not startup_config: raise IDKConfigMissing("Driver config missing 'startup_config'") harvester_config = startup_config.get('harvester') if not harvester_config: raise IDKConfigMissing("Startup config missing 'harvester' config") data_dir = harvester_config.get("directory") if not data_dir: raise IDKConfigMissing("Harvester config missing 'directory'") if not os.path.exists(data_dir): log.debug("Creating data dir: %s", data_dir) os.makedirs(data_dir) elif not os.path.isdir(data_dir): raise IDKException("'data_dir' is not a directory") return data_dir
def _build_stream_config(self): """ """ if (not self.packet_config): return streams = self.packet_config log.debug("Streams: %s", streams) # Create a pubsub client to create streams. pubsub_client = PubsubManagementServiceClient(node=self.container.node) dataset_management = DatasetManagementServiceClient() # Create streams and subscriptions for each stream named in driver. self.stream_config = {} for stream_name in streams: pd_id = None try: pd_id = dataset_management.read_parameter_dictionary_by_name( stream_name, id_only=True) except: log.error("No pd_id found for param_dict '%s'" % stream_name) if (self.use_default_stream): log.error("using default pd '%s'" % DEFAULT_STREAM_NAME) pd_id = dataset_management.read_parameter_dictionary_by_name( DEFAULT_STREAM_NAME, id_only=True) if (not pd_id): raise IDKException( "Missing parameter dictionary for stream '%s'" % stream_name) log.debug("parameter dictionary id: %s" % pd_id) stream_def_id = pubsub_client.create_stream_definition( name=stream_name, parameter_dictionary_id=pd_id) #log.debug("Stream: %s (%s), stream_def_id %s" % (stream_name, type(stream_name), stream_def_id)) pd = pubsub_client.read_stream_definition( stream_def_id).parameter_dictionary #log.debug("Parameter Dictionary: %s" % pd) try: stream_id, stream_route = pubsub_client.create_stream( name=stream_name, exchange_point='science_data', stream_definition_id=stream_def_id) stream_config = dict( stream_route=stream_route, routing_key=stream_route.routing_key, exchange_point=stream_route.exchange_point, stream_id=stream_id, stream_definition_ref=stream_def_id, parameter_dictionary=pd) self.stream_config[stream_name] = stream_config #log.debug("Stream Config (%s): %s" % (stream_name, stream_config)) except Exception as e: log.error("stream publisher exception: %s", e)
def _get_source_data_file(self, filename): """ Search for a sample data file, first check the driver resource directory then just use the filename as a path. If the file doesn't exists raise an exception @param filename name or path of the file to search for @return full path to the found data file @raise IDKException if the file isn't found """ resource_dir = Metadata().resource_dir() source_path = os.path.join(resource_dir, filename) log.debug("Search for resource file (%s) in %s", filename, resource_dir) if os.path.isfile(source_path): log.debug("Found %s in resource directory", filename) return source_path log.debug("Search for resource file (%s) in current directory", filename) if os.path.isfile(filename): log.debug("Found %s in the current directory", filename) return filename raise IDKException("Data file %s does not exist", filename)
def _inspect_driver_module(self, test_module): ''' Search the driver module for class definitions which are UNIT, INT, and QUAL tests. We will import the module do a little introspection and set member variables for the three test types. @raises: ImportError - we can't load the test module @raises: IDKException - if all test types aren't found ''' self._unit_test_class = None self._int_test_class = None self._qual_test_class = None log.debug("Loading test module: %s", test_module) __import__(test_module) module = sys.modules.get(test_module) classes = inspect.getmembers(module, inspect.isclass) for name, clsobj in classes: clsstr = "<class '%s.%s'>" % (test_module, name) # We only want to inspect classes defined in the test module explicitly. Ignore imported classes if (clsstr == str(clsobj)): if (issubclass(clsobj, DSATestClasses.UNIT)): self._unit_test_class = name if (issubclass(clsobj, DSATestClasses.INT)): self._int_test_class = name if (issubclass(clsobj, DSATestClasses.QUAL)): self._qual_test_class = name if not (self._int_test_class): raise IDKException("integration test class not found") if (not self._qual_test_class): raise IDKException("qualification test class not found") # store a marker so we can either run or not run unit tests, # since missing integration tests is not an error self.has_unit = True if (not self._unit_test_class): self._log("No unit test class found") self.has_unit = False
def _inspect_driver_module(self, test_module): ''' Search the driver module for class definitions which are UNIT, INT, and QUAL tests. We will import the module do a little introspection and set member variables for the three test types. @raises: ImportError - we can't load the test module @raises: IDKException - if all three test types aren't found ''' self._unit_test_class = None self._int_test_class = None self._qual_test_class = None self._pub_test_class = None __import__(test_module) module = sys.modules.get(test_module) classes = inspect.getmembers(module, inspect.isclass) for name, clsobj in classes: clsstr = "<class '%s.%s'>" % (test_module, name) # We only want to inspect classes defined in the test module explicitly. Ignore imported classes if (clsstr == str(clsobj)): if (issubclass(clsobj, IDKTestClasses.UNIT)): self._unit_test_class = name if (issubclass(clsobj, IDKTestClasses.INT)): self._int_test_class = name if (issubclass(clsobj, IDKTestClasses.QUAL)): self._qual_test_class = name if (issubclass(clsobj, IDKTestClasses.PUB)): self._pub_test_class = name if (not self._unit_test_class): raise IDKException("unit test class not found") if (not self._qual_test_class): raise IDKException("qual test class not found") if (not self._int_test_class): raise IDKException("int test class not found")
def assert_particle_not_published(self, driver, sample_data, particle_assert_method, verify_values=False): try: self.assert_particle_published(driver, sample_data, particle_assert_method, verify_values) except AssertionError as e: if str(e) == "0 != 1": return else: raise e else: raise IDKException( "assert_particle_not_published: particle was published")
def convert_data_particle_to_dict(self, data_particle): """ Convert a data particle object to a dict. This will work for data particles as DataParticle object, dictionaries or a string @param data_particle data particle @return dictionary representation of a data particle """ if isinstance(data_particle, DataParticle): sample_dict = data_particle.generate_dict() elif isinstance(data_particle, str): sample_dict = json.loads(data_particle) elif isinstance(data_particle, dict): sample_dict = data_particle else: raise IDKException("invalid data particle type: %s", type(data_particle)) return sample_dict
def init_instrument_simulator(self): """ Startup a TCP server that we can use as an instrument simulator """ self._instrument_simulator = TCPSimulatorServer() self.addCleanup(self._instrument_simulator.close) # Wait for the simulator to bind to a port timeout = time.time() + 10 while timeout > time.time(): if self._instrument_simulator.port > 0: log.debug("Instrument simulator initialized on port %s" % self._instrument_simulator.port) return log.debug("waiting for simulator to bind. sleeping") time.sleep(1) raise IDKException("Timeout waiting for simulator to bind")
def run_publication(self): """ @brief Run publication test for a driver """ self._log("*** Starting Publication Tests ***") self._log(" ==> module: " + self._data_test_module()) if (self._pub_test_class == None): raise IDKException( "Test module does not contain publication tests") self._log(" ==> class: " + self._pub_test_module_param()) args = [sys.argv[0]] #args += self._nose_stdout() #args += ['-v', '-a', 'PUB'] args += [self._pub_test_module_param()] module = "%s" % (self._data_test_module()) return self._run_nose(module, args)
def _generate_setup_file(self): if not os.path.exists(self._build_dir()): os.makedirs(self._build_dir()) if not os.path.exists(self._build_dir()): raise IDKException("failed to create build dir: %s" % self._build_dir()) setup_file = self._setup_path() setup_template = self._get_template(self._setup_template_path()) log.debug("Create setup.py file: %s", setup_file) log.debug("setup.py template file: %s", self._setup_template_path()) log.debug("setup.py template date: %s", self._setup_template_data()) log.debug("setup.py template: %s", setup_template) ofile = open(setup_file, 'w') code = setup_template.substitute(self._setup_template_data()) log.debug("CODE: %s", code) ofile.write(code) ofile.close()