def run(self, args): """ Run test modules or simple tests. :param args: Command line args received from the run subparser. """ log = logging.getLogger("avocado.app") if args.unique_job_id is not None: try: int(args.unique_job_id, 16) if len(args.unique_job_id) != 40: raise ValueError except ValueError: log.error('Unique Job ID needs to be a 40 digit hex number') sys.exit(exit_codes.AVOCADO_FAIL) try: args.job_timeout = time_to_seconds(args.job_timeout) except ValueError as e: log.error(e.message) sys.exit(exit_codes.AVOCADO_FAIL) job_instance = job.Job(args) job_run = job_instance.run() result_dispatcher = ResultDispatcher() if result_dispatcher.extensions: result_dispatcher.map_method('render', job_instance.result, job_instance) return job_run
def fetch_assets(test_file, klass=None, method=None, logger=None): """Fetches the assets based on keywords listed on FetchAssetHandler.calls. :param test_file: File name of instrumented test to be evaluated :type test_file: str :returns: list of names that were successfully fetched and list of fails. """ cache_dirs = settings.as_dict().get('datadir.paths.cache_dirs') timeout = settings.as_dict().get('assets.fetch.timeout') success = [] fail = [] handler = FetchAssetHandler(test_file, klass, method) for call in handler.calls: expire = call.pop('expire', None) if expire is not None: expire = data_structures.time_to_seconds(str(expire)) try: asset_obj = Asset(**call, cache_dirs=cache_dirs, expire=expire) if logger is not None: logger.info('Fetching asset from %s:%s.%s', test_file, klass, method) asset_obj.fetch(timeout) success.append(call['name']) except (OSError, ValueError) as failed: fail.append(failed) return success, fail
def run(self, args): """ Run test modules or simple tests. :param args: Command line args received from the run subparser. """ log = logging.getLogger("avocado.app") if args.unique_job_id is not None: try: int(args.unique_job_id, 16) if len(args.unique_job_id) != 40: raise ValueError except ValueError: log.error('Unique Job ID needs to be a 40 digit hex number') sys.exit(exit_codes.AVOCADO_FAIL) try: args.job_timeout = time_to_seconds(args.job_timeout) except ValueError as e: log.error(e.message) sys.exit(exit_codes.AVOCADO_FAIL) job_instance = job.Job(args) job_run = job_instance.run() result_dispatcher = ResultDispatcher() if result_dispatcher.extensions: result_dispatcher.map_method('render', job_instance.result, job_instance) return job_run
def run(self, args): """ Run test modules or simple tests. :param args: Command line args received from the run subparser. """ log = logging.getLogger("avocado.app") if args.unique_job_id is not None: try: int(args.unique_job_id, 16) if len(args.unique_job_id) != 40: raise ValueError except ValueError: log.error('Unique Job ID needs to be a 40 digit hex number') sys.exit(exit_codes.AVOCADO_FAIL) try: args.job_timeout = time_to_seconds(args.job_timeout) except ValueError as e: log.error(e.message) sys.exit(exit_codes.AVOCADO_FAIL) job_instance = job.Job(args) job_run = job_instance.run() result_dispatcher = ResultDispatcher() if result_dispatcher.extensions: # At this point job_instance doesn't have a single results # attribute which is the end goal. For now, we pick any of the # plugin classes added to the result proxy. if len(job_instance.result_proxy.output_plugins) > 0: result = job_instance.result_proxy.output_plugins[0] result_dispatcher.map_method('render', result, job_instance) return job_run
def fetch_assets(test_file, klass=None, method=None, logger=None): """ Fetches the assets based on keywords listed on FetchAssetHandler.calls. :param test_file: File name of instrumented test to be evaluated :type test_file: str :returns: list of names that were successfully fetched and list of fails. """ cache_dirs = data_dir.get_cache_dirs() success = [] fail = [] handler = FetchAssetHandler(test_file, klass, method) for call in handler.calls: expire = call.pop('expire', None) if expire is not None: expire = data_structures.time_to_seconds(str(expire)) # make dictionary unpacking compatible with python 3.4 as it does # not support constructions like: # Asset(**call, cache_dirs=cache_dirs, expire=expire) call['cache_dirs'] = cache_dirs call['expire'] = expire try: asset_obj = Asset(**call) if logger is not None: logger.info('Fetching asset from %s:%s.%s', test_file, klass, method) asset_obj.fetch() success.append(call['name']) except (OSError, ValueError) as failed: fail.append(failed) return success, fail
def run(self, args): """ Run test modules or simple tests. :param args: Command line args received from the run subparser. """ log = logging.getLogger("avocado.app") if args.unique_job_id is not None: try: int(args.unique_job_id, 16) if len(args.unique_job_id) != 40: raise ValueError except ValueError: log.error('Unique Job ID needs to be a 40 digit hex number') sys.exit(exit_codes.AVOCADO_FAIL) try: args.job_timeout = time_to_seconds(args.job_timeout) except ValueError as e: log.error(e.message) sys.exit(exit_codes.AVOCADO_FAIL) job_instance = job.Job(args) job_run = job_instance.run() result_dispatcher = ResultDispatcher() if result_dispatcher.extensions: # At this point job_instance doesn't have a single results # attribute which is the end goal. For now, we pick any of the # plugin classes added to the result proxy. if len(job_instance.result_proxy.output_plugins) > 0: result = job_instance.result_proxy.output_plugins[0] result_dispatcher.map_method('render', result, job_instance) return job_run
def run(self, runnable): # pylint: disable=W0201 self.runnable = runnable yield self.prepare_status("started") name = self.runnable.kwargs.get("name") # if name was passed correctly, run the Avocado Asset utility if name is not None: asset_hash = self.runnable.kwargs.get("asset_hash") algorithm = self.runnable.kwargs.get("algorithm") locations = self.runnable.kwargs.get("locations") expire = self.runnable.kwargs.get("expire") if expire is not None: expire = data_structures.time_to_seconds(str(expire)) cache_dirs = self.runnable.config.get("datadir.paths.cache_dirs") if cache_dirs is None: cache_dirs = settings.as_dict().get("datadir.paths.cache_dirs") # let's spawn it to another process to be able to update the # status messages and avoid the Asset to lock this process queue = SimpleQueue() process = Process( target=self._fetch_asset, args=( name, asset_hash, algorithm, locations, cache_dirs, expire, queue, ), ) process.start() while queue.empty(): time.sleep(RUNNER_RUN_STATUS_INTERVAL) yield self.prepare_status("running") output = queue.get() result = output["result"] stdout = output["stdout"] stderr = output["stderr"] else: # Otherwise, log the missing package name result = "error" stdout = "" stderr = 'At least name should be passed as kwargs using name="uri".' yield self.prepare_status("running", { "type": "stdout", "log": stdout.encode() }) yield self.prepare_status("running", { "type": "stderr", "log": stderr.encode() }) yield self.prepare_status("finished", {"result": result})
def fetch_assets(test_file, klass=None, method=None, logger=None): """ Fetches the assets based on keywords listed on FetchAssetHandler.calls. :param test_file: File name of instrumented test to be evaluated :type test_file: str :returns: list of names that were successfully fetched and list of fails. """ def validate_parameters(call): """ Validate the parameters to make sure we have a supported case. :param call: List of parameter to the Asset object. :type call: dict :returns: True or False """ name = call.get('name', None) locations = call.get('locations', None) # probably, parameter name was defined as a class attribute if ((name is None) or # probably, parameter locations was defined as a class attribute (not urllib.parse.urlparse(name).scheme and locations is None)): return False return True cache_dirs = data_dir.get_cache_dirs() success = [] fail = [] handler = FetchAssetHandler(test_file, klass, method) for call in handler.calls: # validate the parameters if not validate_parameters(call): continue expire = call.pop('expire', None) if expire is not None: expire = data_structures.time_to_seconds(str(expire)) try: # make dictionary unpacking compatible with python 3.4 as it does # not support constructions like: # Asset(**call, cache_dirs=cache_dirs, expire=expire) call['cache_dirs'] = cache_dirs call['expire'] = expire asset_obj = Asset(**call) if logger is not None: logger.info('Fetching asset from %s:%s.%s', test_file, klass, method) asset_obj.fetch() success.append(call['name']) except (OSError, ValueError) as failed: fail.append(failed) return success, fail
def run(self, runnable): # pylint: disable=W0201 self.runnable = runnable yield self.prepare_status('started') name = self.runnable.kwargs.get('name') # if name was passed correctly, run the Avocado Asset utility if name is not None: asset_hash = self.runnable.kwargs.get('asset_hash') algorithm = self.runnable.kwargs.get('algorithm') locations = self.runnable.kwargs.get('locations') expire = self.runnable.kwargs.get('expire') if expire is not None: expire = data_structures.time_to_seconds(str(expire)) cache_dirs = self.runnable.config.get('datadir.paths.cache_dirs') if cache_dirs is None: cache_dirs = settings.as_dict().get('datadir.paths.cache_dirs') # let's spawn it to another process to be able to update the # status messages and avoid the Asset to lock this process queue = SimpleQueue() process = Process(target=self._fetch_asset, args=(name, asset_hash, algorithm, locations, cache_dirs, expire, queue)) process.start() while queue.empty(): time.sleep(RUNNER_RUN_STATUS_INTERVAL) yield self.prepare_status('running') output = queue.get() result = output['result'] stdout = output['stdout'] stderr = output['stderr'] else: # Otherwise, log the missing package name result = 'error' stdout = '' stderr = ('At least name should be passed as kwargs using' ' name="uri".') yield self.prepare_status('running', {'type': 'stdout', 'log': stdout.encode()}) yield self.prepare_status('running', {'type': 'stderr', 'log': stderr.encode()}) yield self.prepare_status('finished', {'result': result})
def run(self, config): """ Run test modules or simple tests. :param config: Configuration received from command line parser and possibly other sources. :type config: dict """ if 'output_check_record' in config: process.OUTPUT_CHECK_RECORD_MODE = config.get( 'output_check_record', None) warnings.warn( "The following arguments will be changed to boolean soon: " "sysinfo, output-check, failfast, keep-tmp, " "ignore-missing-references, sysinfo and output-check", FutureWarning) if config.get('unique_job_id') is not None: try: int(config.get('unique_job_id'), 16) if len(config.get('unique_job_id')) != 40: raise ValueError except ValueError: LOG_UI.error('Unique Job ID needs to be a 40 digit hex number') sys.exit(exit_codes.AVOCADO_FAIL) try: config['job_timeout'] = time_to_seconds(config.get('job_timeout')) except ValueError as detail: LOG_UI.error(detail.args[0]) sys.exit(exit_codes.AVOCADO_FAIL) with job.Job(config) as job_instance: pre_post_dispatcher = JobPrePostDispatcher() try: # Run JobPre plugins output.log_plugin_failures(pre_post_dispatcher.load_failures) pre_post_dispatcher.map_method('pre', job_instance) job_run = job_instance.run() finally: # Run JobPost plugins pre_post_dispatcher.map_method('post', job_instance) result_dispatcher = ResultDispatcher() if result_dispatcher.extensions: result_dispatcher.map_method('render', job_instance.result, job_instance) return job_run
def run(self, args): """ Run test modules or simple tests. :param args: Command line args received from the run subparser. """ if 'output_check_record' in args: process.OUTPUT_CHECK_RECORD_MODE = getattr(args, 'output_check_record', None) if args.unique_job_id is not None: try: int(args.unique_job_id, 16) if len(args.unique_job_id) != 40: raise ValueError except ValueError: LOG_UI.error('Unique Job ID needs to be a 40 digit hex number') sys.exit(exit_codes.AVOCADO_FAIL) try: args.job_timeout = time_to_seconds(args.job_timeout) except ValueError as detail: LOG_UI.error(detail.args[0]) sys.exit(exit_codes.AVOCADO_FAIL) with job.Job(args) as job_instance: pre_post_dispatcher = JobPrePostDispatcher() try: # Run JobPre plugins output.log_plugin_failures(pre_post_dispatcher.load_failures) pre_post_dispatcher.map_method('pre', job_instance) job_run = job_instance.run() finally: # Run JobPost plugins pre_post_dispatcher.map_method('post', job_instance) result_dispatcher = ResultDispatcher() if result_dispatcher.extensions: result_dispatcher.map_method('render', job_instance.result, job_instance) return job_run
def run(self, args): """ Run test modules or simple tests. :param args: Command line args received from the run subparser. """ if 'output_check_record' in args: process.OUTPUT_CHECK_RECORD_MODE = getattr(args, 'output_check_record', None) if args.unique_job_id is not None: try: int(args.unique_job_id, 16) if len(args.unique_job_id) != 40: raise ValueError except ValueError: LOG_UI.error('Unique Job ID needs to be a 40 digit hex number') sys.exit(exit_codes.AVOCADO_FAIL) try: args.job_timeout = time_to_seconds(args.job_timeout) except ValueError as detail: LOG_UI.error(detail.args[0]) sys.exit(exit_codes.AVOCADO_FAIL) with job.Job(args) as job_instance: pre_post_dispatcher = JobPrePostDispatcher() try: # Run JobPre plugins output.log_plugin_failures(pre_post_dispatcher.load_failures) pre_post_dispatcher.map_method('pre', job_instance) job_run = job_instance.run() finally: # Run JobPost plugins pre_post_dispatcher.map_method('post', job_instance) result_dispatcher = ResultDispatcher() if result_dispatcher.extensions: result_dispatcher.map_method('render', job_instance.result, job_instance) return job_run
def fetch_asset(self, name, asset_hash=None, algorithm=None, locations=None, expire=None, find_only=False, cancel_on_missing=False): """ Method o call the utils.asset in order to fetch and asset file supporting hash check, caching and multiple locations. :param name: the asset filename or URL :param asset_hash: asset hash (optional) :param algorithm: hash algorithm (optional, defaults to :data:`avocado.utils.asset.DEFAULT_HASH_ALGORITHM`) :param locations: list of URLs from where the asset can be fetched (optional) :param expire: time for the asset to expire :param find_only: When `True`, `fetch_asset` only looks for the asset in the cache, avoiding the download/move action. Defaults to `False`. :param cancel_on_missing: whether the test should be canceled if the asset was not found in the cache or if `fetch` could not add the asset to the cache. Defaults to `False`. :raises OSError: when it fails to fetch the asset or file is not in the cache and `cancel_on_missing` is `False`. :returns: asset file local path. """ if expire is not None: expire = data_structures.time_to_seconds(str(expire)) # If name has no protocol or network locations, attempt to find # the asset "by name" first. This is valid use case when the # asset has been previously put into any of the cache # directories, either manually or by the caching process # itself. parsed_name = asset.Asset.parse_name(name) if not (parsed_name.scheme or locations): try: return asset.Asset.get_asset_by_name(name, self.cache_dirs, expire, asset_hash) except OSError as e: if cancel_on_missing: self.cancel(f"Missing asset {name}") raise e asset_obj = asset.Asset(name, asset_hash, algorithm, locations, self.cache_dirs, expire) try: # return the path to the asset when it was found or fetched if find_only: return asset_obj.find_asset_file() else: return asset_obj.fetch() except OSError as e: # if asset is not in the cache or there was a problem fetching # the asset if cancel_on_missing: # cancel when requested self.cancel(f"Missing asset {name}") # otherwise re-throw OSError raise e
def test_time_to_seconds(self): self.assertEqual(data_structures.time_to_seconds(None), 0) self.assertEqual(data_structures.time_to_seconds("31"), 31) self.assertEqual(data_structures.time_to_seconds('10d'), 864000) self.assertRaises(ValueError, data_structures.time_to_seconds, "10days")
def test_time_to_seconds(self): self.assertEqual(data_structures.time_to_seconds(None), 0) self.assertEqual(data_structures.time_to_seconds("31"), 31) self.assertEqual(data_structures.time_to_seconds('10d'), 864000) self.assertRaises(ValueError, data_structures.time_to_seconds, "10days")