def validate(self): if ipython.import_error_str: raise ResultProcessorError(ipython.import_error_str) if not jinja2: msg = '{} requires python-jinja2 package to be installed'.format( self.name) raise ResultProcessorError(msg) if self.show_notebook and not self.notebook_directory: raise ConfigError( 'Requested "show_notebook" but no notebook_directory was specified' ) if self.notebook_directory and not os.path.isdir( self.notebook_directory): raise ConfigError('notebook_directory {} does not exist'.format( self.notebook_directory)) if self.show_html and not self.convert_to_html: # pylint: disable=E0203 self.convert_to_html = True self.logger.debug( 'Assuming "convert_to_html" as "show_html" is set') if self.show_pdf and not self.convert_to_pdf: # pylint: disable=E0203 self.convert_to_pdf = True self.logger.debug('Assuming "convert_to_pdf" as "show_pdf" is set')
def initialize(self, context): if sys.platform != 'linux2': raise ResultProcessorError( 'Notifications are only supported in linux') if not notify2: raise ResultProcessorError( 'notify2 not installed. Please install the notify2 package') notify2.init("Workload Automation")
def initialize(self, context): if pymongo is None: raise ResultProcessorError( 'mongodb result processor requres pymongo package to be installed.' ) try: self.client = pymongo.MongoClient(self.host, self.port, **self.extra_params) except pymongo.errors.PyMongoError, e: raise ResultProcessorError( 'Error connecting to mongod: {}'.fromat(e))
def initialize(self, context): # pylint: disable=R0912 self.device = context.device if not self.device.has('cpuidle'): raise ConfigError('Device does not appear to have cpuidle capability; is the right module installed?') if not self.device.core_names: message = 'Device does not specify its core types (core_names/core_clusters not set in device_config).' raise ResultProcessorError(message) number_of_clusters = max(self.device.core_clusters) + 1 # In IKS devices, actual number of cores is double # from what we get from device.number_of_cores if self.device.scheduler == 'iks': self.multiply_factor = 2 elif self.device.scheduler == 'unknown': # Device doesn't specify its scheduler type. It could be IKS, in # which case reporeted values would be wrong, so error out. message = ('The Device doesn not specify it\'s scheduler type. If you are ' 'using a generic device interface, please make sure to set the ' '"scheduler" parameter in the device config.') raise ResultProcessorError(message) else: self.multiply_factor = 1 # separate out the cores in each cluster # It is list of list of cores in cluster listof_cores_clusters = [] for cluster in range(number_of_clusters): listof_cores_clusters.append([core for core in self.device.core_clusters if core == cluster]) # Extract minimum frequency of each cluster and # the idle power state with its descriptive name # total_cores = 0 current_cores = 0 for cluster, cores_list in enumerate(listof_cores_clusters): self.corename_of_clusters.append(self.device.core_names[total_cores]) if self.device.scheduler != 'iks': self.idlestate_description.update({s.id: s.desc for s in self.device.get_cpuidle_states(total_cores)}) else: self.idlestate_description.update({s.id: s.desc for s in self.device.get_cpuidle_states()}) total_cores += len(cores_list) self.numberofcores_in_cluster.append(len(cores_list)) for i in range(current_cores, total_cores): if i in self.device.online_cpus: self.minimum_frequency_cluster.append(int(self.device.get_cpu_min_frequency("cpu{}".format(i)))) break current_cores = total_cores length_frequency_cluster = len(self.minimum_frequency_cluster) if length_frequency_cluster != number_of_clusters: diff = number_of_clusters - length_frequency_cluster offline_value = -1 for i in range(diff): if self.device.scheduler != 'iks': self.minimum_frequency_cluster.append(offline_value) else: self.minimum_frequency_cluster.append(self.device.iks_switch_frequency)
def _validate_schema_version(self): with self._open_connecton() as conn: try: c = conn.execute('SELECT schema_version FROM __meta') found_version = c.fetchone()[0] except sqlite3.OperationalError: message = '{} does not appear to be a valid WA results database.'.format( self.database) raise ResultProcessorError(message) if found_version != SCHEMA_VERSION: message = 'Schema version in {} ({}) does not match current version ({}).' raise ResultProcessorError( message.format(self.database, found_version, SCHEMA_VERSION))
def initialize(self, context): if not pd or LooseVersion(pd.__version__) < LooseVersion('0.13.1'): message = ( 'uxperf result processor requires pandas Python package ' '(version 0.13.1 or higher) to be installed.\n' 'You can install it with pip, e.g. "sudo pip install pandas"') raise ResultProcessorError(message)
def initialize(self, context): if not pd or LooseVersion(pd.__version__) < LooseVersion('0.13.1'): message = ( 'uxperf result processor requires pandas Python package ' '(version 0.13.1 or higher) to be installed.\n' 'You can install it with pip, e.g. "sudo pip install pandas"') raise ResultProcessorError(message) if self.add_frames and not instrument_is_enabled('fps'): raise ConfigError( 'fps instrument must be enabled in order to add frames.')
class MongodbUploader(ResultProcessor): name = 'mongodb' description = """ Uploads run results to a MongoDB instance. MongoDB is a popular document-based data store (NoSQL database). """ parameters = [ Parameter( 'uri', kind=str, default=None, description= """Connection URI. If specified, this will be used for connecting to the backend, and host/port parameters will be ignored.""" ), Parameter( 'host', kind=str, default='localhost', mandatory=True, description= 'IP address/name of the machinge hosting the MongoDB server.'), Parameter( 'port', kind=int, default=27017, mandatory=True, description='Port on which the MongoDB server is listening.'), Parameter( 'db', kind=str, default='wa', mandatory=True, description='Database on the server used to store WA results.'), Parameter( 'extra_params', kind=dict, default={}, description= '''Additional connection parameters may be specfied using this (see pymongo documentation.'''), Parameter( 'authentication', kind=dict, default={}, description= '''If specified, this will be passed to db.authenticate() upon connection; please pymongo documentaion authentication examples for detail.''' ), ] def initialize(self, context): if pymongo is None: raise ResultProcessorError( 'mongodb result processor requres pymongo package to be installed.' ) try: self.client = pymongo.MongoClient(self.host, self.port, **self.extra_params) except pymongo.errors.PyMongoError, e: raise ResultProcessorError( 'Error connecting to mongod: {}'.fromat(e)) self.dbc = self.client[self.db] self.fs = GridFS(self.dbc) if self.authentication: if not self.dbc.authenticate(**self.authentication): raise ResultProcessorError( 'Authentication to database {} failed.'.format(self.db)) self.run_result_dbid = ObjectId() run_doc = context.run_info.to_dict() wa_adapter = run_doc['device'] devprops = dict((k.translate(KEY_TRANS_TABLE), v) for k, v in run_doc['device_properties'].iteritems()) run_doc['device'] = devprops run_doc['device']['wa_adapter'] = wa_adapter del run_doc['device_properties'] run_doc['output_directory'] = os.path.abspath(context.output_directory) run_doc['artifacts'] = [] run_doc['workloads'] = context.config.to_dict()['workload_specs'] for workload in run_doc['workloads']: workload['name'] = workload['workload_name'] del workload['workload_name'] workload['results'] = [] self.run_dbid = self.dbc.runs.insert(run_doc) prefix = context.run_info.project if context.run_info.project else '[NOPROJECT]' run_part = context.run_info.run_name or context.run_info.uuid.hex self.gridfs_dir = os.path.join(prefix, run_part) i = 0 while self.gridfs_directory_exists(self.gridfs_dir): if self.gridfs_dir.endswith('-{}'.format(i)): self.gridfs_dir = self.gridfs_dir[:-2] i += 1 self.gridfs_dir += '-{}'.format(i) # Keep track of all generated artefacts, so that we know what to # include in the tarball. The tarball will contains raw artificats # (other kinds would have been uploaded directly or do not contain # new data) and all files in the results dir that have not been marked # as artificats. self.artifacts = []