def _setup_job_results(self): logdir = getattr(self.args, 'logdir', None) if self.standalone: if logdir is not None: logdir = os.path.abspath(logdir) self.logdir = data_dir.create_job_logs_dir(logdir=logdir, unique_id=self.unique_id) else: self.logdir = tempfile.mkdtemp(prefix='avocado-') else: if logdir is None: self.logdir = data_dir.create_job_logs_dir(unique_id=self.unique_id) else: logdir = os.path.abspath(logdir) self.logdir = data_dir.create_job_logs_dir(logdir=logdir, unique_id=self.unique_id) self.logfile = os.path.join(self.logdir, "job.log") self.idfile = os.path.join(self.logdir, "id") with open(self.idfile, 'w') as id_file_obj: id_file_obj.write("%s\n" % self.unique_id)
def _setup_job_results(self): """ Prepares a job result directory, also known as logdir, for this job """ base_logdir = self.config.get('run.results_dir') if base_logdir is None: self.logdir = data_dir.create_job_logs_dir( unique_id=self.unique_id) else: base_logdir = os.path.abspath(base_logdir) self.logdir = data_dir.create_job_logs_dir( base_dir=base_logdir, unique_id=self.unique_id) if not self.config.get('run.dry_run.enabled'): self._update_latest_link() self.logfile = os.path.join(self.logdir, "job.log") idfile = os.path.join(self.logdir, "id") with open(idfile, 'w', encoding='utf-8') as id_file_obj: id_file_obj.write(f"{self.unique_id}\n") id_file_obj.flush() os.fsync(id_file_obj)
def testUniqueLogDir(self): """ Tests that multiple queries for a logdir at the same time provides unique results. """ from avocado.core import data_dir flexmock(data_dir.time).should_receive('strftime').and_return("date") logdir = os.path.join(self.mapping['base_dir'], "foor", "bar", "baz") path_prefix = os.path.join(logdir, "job-date-") uid = "1234567890"*4 for i in xrange(7, 40): path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid[:i]) self.assertTrue(os.path.exists(path)) path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid + ".0") self.assertTrue(os.path.exists(path)) path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid + ".1") self.assertTrue(os.path.exists(path))
def test_unique_log_dir(self): """ Tests that multiple queries for a logdir at the same time provides unique results. """ from avocado.core import data_dir flexmock(data_dir.time).should_receive('strftime').and_return("date") logdir = os.path.join(self.mapping['base_dir'], "foor", "bar", "baz") path_prefix = os.path.join(logdir, "job-date-") uid = "1234567890" * 4 for i in xrange(7, 40): path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid[:i]) self.assertTrue(os.path.exists(path)) path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid + ".0") self.assertTrue(os.path.exists(path)) path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid + ".1") self.assertTrue(os.path.exists(path))
def test_unique_log_dir(self): """ Tests that multiple queries for a logdir at the same time provides unique results. """ from avocado.core import data_dir with unittest.mock.patch('avocado.core.data_dir.time.strftime', return_value="date_would_go_here"): logdir = os.path.join(self.mapping['base_dir'], "foor", "bar", "baz") path_prefix = os.path.join(logdir, "job-date_would_go_here-") uid = "1234567890"*4 for i in range(7, 40): path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid[:i]) self.assertTrue(os.path.exists(path)) path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid + ".0") self.assertTrue(os.path.exists(path)) path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid + ".1") self.assertTrue(os.path.exists(path))
def test_unique_log_dir(self): """ Tests that multiple queries for a logdir at the same time provides unique results. """ from avocado.core import data_dir with unittest.mock.patch('avocado.core.data_dir.time.strftime', return_value="date_would_go_here"): logdir = os.path.join(self.mapping['base_dir'], "foor", "bar", "baz") path_prefix = os.path.join(logdir, "job-date_would_go_here-") uid = "1234567890"*4 for i in range(7, 40): path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid[:i]) self.assertTrue(os.path.exists(path)) path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid + ".0") self.assertTrue(os.path.exists(path)) path = data_dir.create_job_logs_dir(logdir, uid) self.assertEqual(path, path_prefix + uid + ".1") self.assertTrue(os.path.exists(path))
def test_get_job_results_dir(self): from avocado.core import data_dir, job_id # First let's mock a jobs results directory # logs_dir = self.mapping.get('logs_dir') self.assertNotEqual(None, logs_dir) unique_id = job_id.create_unique_job_id() # Expected job results dir expected_jrd = data_dir.create_job_logs_dir(logs_dir, unique_id) # Now let's test some cases # self.assertEqual(None, data_dir.get_job_results_dir(expected_jrd, logs_dir), ("If passing a directory reference, it expects the id" "file")) # Create the id file. id_file_path = os.path.join(expected_jrd, 'id') with open(id_file_path, 'w') as id_file: id_file.write("%s\n" % unique_id) id_file.flush() os.fsync(id_file) self.assertEqual(expected_jrd, data_dir.get_job_results_dir(expected_jrd, logs_dir), "It should get from the path to the directory") results_dirname = os.path.basename(expected_jrd) self.assertEqual( None, data_dir.get_job_results_dir(results_dirname, logs_dir), "It should not get from a valid path to the directory") pwd = os.getcwd() os.chdir(logs_dir) self.assertEqual( expected_jrd, data_dir.get_job_results_dir(results_dirname, logs_dir), "It should get from relative path to the directory") os.chdir(pwd) self.assertEqual(expected_jrd, data_dir.get_job_results_dir(id_file_path, logs_dir), "It should get from the path to the id file") self.assertEqual(expected_jrd, data_dir.get_job_results_dir(unique_id, logs_dir), "It should get from the id") another_id = job_id.create_unique_job_id() self.assertNotEqual(unique_id, another_id) self.assertEqual(None, data_dir.get_job_results_dir(another_id, logs_dir), "It should not get from unexisting job") self.assertEqual(expected_jrd, data_dir.get_job_results_dir(unique_id[:7], logs_dir), "It should get from partial id equals to 7 digits") self.assertEqual(expected_jrd, data_dir.get_job_results_dir(unique_id[:4], logs_dir), "It should get from partial id less than 7 digits") almost_id = unique_id[:7] + ('a' * (len(unique_id) - 7)) self.assertNotEqual(unique_id, almost_id) self.assertEqual(None, data_dir.get_job_results_dir(almost_id, logs_dir), ("It should not get if the id is equal on only" "the first 7 characters")) os.symlink(expected_jrd, os.path.join(logs_dir, 'latest')) self.assertEqual(expected_jrd, data_dir.get_job_results_dir('latest', logs_dir), "It should get from the 'latest' id") stg = settings.Settings() with unittest.mock.patch('avocado.core.stgs', stg): import avocado.core avocado.core.register_core_options() stg.process_config_path(self.config_file_path) stg.merge_with_configs() with unittest.mock.patch('avocado.core.data_dir.settings', stg): self.assertEqual(expected_jrd, data_dir.get_job_results_dir(unique_id), "It should use the default base logs directory")
def __init__(self, methodName='runTest', name=None, params=None, base_logdir=None, tag=None, job=None, runner_queue=None): """ Initializes the test. :param methodName: Name of the main method to run. For the sake of compatibility with the original unittest class, you should not set this. :param name: Pretty name of the test name. For normal tests, written with the avocado API, this should not be set, this is reserved for running random executables as tests. :param base_logdir: Directory where test logs should go. If None provided, it'll use :func:`avocado.core.data_dir.create_job_logs_dir`. :param tag: Tag that differentiates 2 executions of the same test name. Example: 'long', 'short', so we can differentiate 'sleeptest.long' and 'sleeptest.short'. :param job: The job that this test is part of. """ def record_and_warn(*args, **kwargs): """ Record call to this function and log warning """ if not self.__log_warn_used: self.__log_warn_used = True return original_log_warn(*args, **kwargs) if name is not None: self.name = name else: self.name = self.__class__.__name__ self.tag = tag or None self.job = job basename = os.path.basename(self.name) tmpdir = data_dir.get_tmp_dir() self.filename = inspect.getfile(self.__class__).rstrip('co') self.basedir = os.path.dirname(self.filename) self.datadir = self.filename + '.data' self.expected_stdout_file = os.path.join(self.datadir, 'stdout.expected') self.expected_stderr_file = os.path.join(self.datadir, 'stderr.expected') self.workdir = utils_path.init_dir(tmpdir, basename) self.srcdir = utils_path.init_dir(self.workdir, 'src') if base_logdir is None: base_logdir = data_dir.create_job_logs_dir() base_logdir = os.path.join(base_logdir, 'test-results') self.tagged_name = self.get_tagged_name(base_logdir) # Let's avoid trouble at logdir init time, since we're interested # in a relative directory here tagged_name = self.tagged_name if tagged_name.startswith('/'): tagged_name = tagged_name[1:] self.logdir = utils_path.init_dir(base_logdir, tagged_name) genio.set_log_file_dir(self.logdir) self.logfile = os.path.join(self.logdir, 'debug.log') self.stdout_file = os.path.join(self.logdir, 'stdout') self.stderr_file = os.path.join(self.logdir, 'stderr') self.outputdir = utils_path.init_dir(self.logdir, 'data') self.sysinfodir = utils_path.init_dir(self.logdir, 'sysinfo') self.sysinfo_logger = sysinfo.SysInfo(basedir=self.sysinfodir) self.log = logging.getLogger("avocado.test") original_log_warn = self.log.warning self.__log_warn_used = False self.log.warn = self.log.warning = record_and_warn self.stdout_log = logging.getLogger("avocado.test.stdout") self.stderr_log = logging.getLogger("avocado.test.stderr") mux_entry = ['/test/*'] if isinstance(params, dict): self.default_params = self.default_params.copy() self.default_params.update(params) params = [] elif params is None: params = [] elif isinstance(params, tuple): params, mux_entry = params[0], params[1] self.params = multiplexer.AvocadoParams(params, self.name, self.tag, mux_entry, self.default_params) self.log.info('START %s', self.tagged_name) self.log.debug('') self.debugdir = None self.resultsdir = None self.status = None self.fail_reason = None self.fail_class = None self.traceback = None self.text_output = None self.whiteboard = '' self.running = False self.time_start = None self.time_end = None self.paused = False self.paused_msg = '' self.runner_queue = runner_queue self.time_elapsed = None unittest.TestCase.__init__(self, methodName=methodName)
def __init__(self, methodName='runTest', name=None, params=None, base_logdir=None, tag=None, job=None, runner_queue=None): """ Initializes the test. :param methodName: Name of the main method to run. For the sake of compatibility with the original unittest class, you should not set this. :param name: Pretty name of the test name. For normal tests, written with the avocado API, this should not be set, this is reserved for running random executables as tests. :param base_logdir: Directory where test logs should go. If None provided, it'll use :func:`avocado.core.data_dir.create_job_logs_dir`. :param tag: Tag that differentiates 2 executions of the same test name. Example: 'long', 'short', so we can differentiate 'sleeptest.long' and 'sleeptest.short'. :param job: The job that this test is part of. """ def record_and_warn(*args, **kwargs): """ Record call to this function and log warning """ if not self.__log_warn_used: self.__log_warn_used = True return original_log_warn(*args, **kwargs) if name is not None: self.name = name else: self.name = self.__class__.__name__ self.tag = tag or None self.job = job basename = os.path.basename(self.name) tmpdir = data_dir.get_tmp_dir() self.filename = inspect.getfile(self.__class__).rstrip('co') self.basedir = os.path.dirname(self.filename) self.datadir = self.filename + '.data' self.expected_stdout_file = os.path.join(self.datadir, 'stdout.expected') self.expected_stderr_file = os.path.join(self.datadir, 'stderr.expected') self.workdir = utils_path.init_dir(tmpdir, basename) self.srcdir = utils_path.init_dir(self.workdir, 'src') if base_logdir is None: base_logdir = data_dir.create_job_logs_dir() base_logdir = os.path.join(base_logdir, 'test-results') self.tagged_name = self.get_tagged_name(base_logdir) # Let's avoid trouble at logdir init time, since we're interested # in a relative directory here tagged_name = self.tagged_name if tagged_name.startswith('/'): tagged_name = tagged_name[1:] self.logdir = utils_path.init_dir(base_logdir, tagged_name) genio.set_log_file_dir(self.logdir) self.logfile = os.path.join(self.logdir, 'debug.log') self.stdout_file = os.path.join(self.logdir, 'stdout') self.stderr_file = os.path.join(self.logdir, 'stderr') self.outputdir = utils_path.init_dir(self.logdir, 'data') self.sysinfodir = utils_path.init_dir(self.logdir, 'sysinfo') self.sysinfo_logger = sysinfo.SysInfo(basedir=self.sysinfodir) self.log = logging.getLogger("avocado.test") original_log_warn = self.log.warning self.__log_warn_used = False self.log.warn = self.log.warning = record_and_warn self.stdout_log = logging.getLogger("avocado.test.stdout") self.stderr_log = logging.getLogger("avocado.test.stderr") mux_entry = ['/test/*'] if isinstance(params, dict): self.default_params = self.default_params.copy() self.default_params.update(params) params = [] elif params is None: params = [] elif isinstance(params, tuple): params, mux_entry = params[0], params[1] self.params = multiplexer.AvocadoParams(params, self.name, self.tag, mux_entry, self.default_params) self.log.info('START %s', self.tagged_name) self.log.debug('') self.debugdir = None self.resultsdir = None self.status = None self.fail_reason = None self.fail_class = None self.traceback = None self.text_output = None self.whiteboard = '' self.running = False self.time_start = None self.time_end = None self.paused = False self.paused_msg = '' self.runner_queue = runner_queue self.time_elapsed = None unittest.TestCase.__init__(self, methodName=methodName)
def __init__(self, methodName='runTest', name=None, params=None, base_logdir=None, tag=None, job=None, runner_queue=None): """ Initializes the test. :param methodName: Name of the main method to run. For the sake of compatibility with the original unittest class, you should not set this. :param name: Pretty name of the test name. For normal tests, written with the avocado API, this should not be set, this is reserved for running random executables as tests. :param base_logdir: Directory where test logs should go. If None provided, it'll use :func:`avocado.core.data_dir.create_job_logs_dir`. :param tag: Tag that differentiates 2 executions of the same test name. Example: 'long', 'short', so we can differentiate 'sleeptest.long' and 'sleeptest.short'. :param job: The job that this test is part of. """ def record_and_warn(*args, **kwargs): """ Record call to this function and log warning """ if not self.__log_warn_used: self.__log_warn_used = True return original_log_warn(*args, **kwargs) if name is not None: self.name = name else: self.name = self.__class__.__name__ if params is None: params = {} self.params = Params(params) self._raw_params = params self.tag = tag or self.params.get('tag') self.job = job basename = os.path.basename(self.name) tmpdir = data_dir.get_tmp_dir() self.basedir = os.path.dirname(inspect.getfile(self.__class__)) self.datadir = os.path.join(self.basedir, '%s.data' % basename) self.expected_stdout_file = os.path.join(self.datadir, 'stdout.expected') self.expected_stderr_file = os.path.join(self.datadir, 'stderr.expected') self.workdir = utils_path.init_dir(tmpdir, basename) self.srcdir = utils_path.init_dir(self.workdir, 'src') if base_logdir is None: base_logdir = data_dir.create_job_logs_dir() base_logdir = os.path.join(base_logdir, 'test-results') self.tagged_name = self.get_tagged_name(base_logdir) # Let's avoid trouble at logdir init time, since we're interested # in a relative directory here tagged_name = self.tagged_name if tagged_name.startswith('/'): tagged_name = tagged_name[1:] self.logdir = utils_path.init_dir(base_logdir, tagged_name) genio.set_log_file_dir(self.logdir) self.logfile = os.path.join(self.logdir, 'debug.log') self.stdout_file = os.path.join(self.logdir, 'stdout') self.stderr_file = os.path.join(self.logdir, 'stderr') self.outputdir = utils_path.init_dir(self.logdir, 'data') self.sysinfodir = utils_path.init_dir(self.logdir, 'sysinfo') self.sysinfo_logger = sysinfo.SysInfo(basedir=self.sysinfodir) self.log = logging.getLogger("avocado.test") original_log_warn = self.log.warning self.__log_warn_used = False self.log.warn = self.log.warning = record_and_warn self.stdout_log = logging.getLogger("avocado.test.stdout") self.stderr_log = logging.getLogger("avocado.test.stderr") self.log.info('START %s', self.tagged_name) self.log.debug('') self.log.debug('Test instance parameters:') # Set the helper set_default to the params object setattr(self.params, 'set_default', self._set_default) # Apply what comes from the params dict for key in sorted(self.params.keys()): self.log.debug(' %s = %s', key, self.params.get(key)) self.log.debug('') # Apply what comes from the default_params dict self.log.debug('Default parameters:') for key in sorted(self.default_params.keys()): self.log.debug(' %s = %s', key, self.default_params.get(key)) self.params.set_default(key, self.default_params[key]) self.log.debug('') self.log.debug('Test instance params override defaults whenever available') self.log.debug('') # If there's a timeout set, log a timeout reminder if self.params.timeout: self.log.info('Test timeout set. Will wait %.2f s for ' 'PID %s to end', float(self.params.timeout), os.getpid()) self.log.info('') self.debugdir = None self.resultsdir = None self.status = None self.fail_reason = None self.fail_class = None self.traceback = None self.text_output = None self.whiteboard = '' self.running = False self.time_start = None self.time_end = None self.paused = False self.paused_msg = '' self.runner_queue = runner_queue self.time_elapsed = None unittest.TestCase.__init__(self)
def __init__(self, methodName='runTest', name=None, params=None, base_logdir=None, tag=None, job=None, runner_queue=None): """ Initializes the test. :param methodName: Name of the main method to run. For the sake of compatibility with the original unittest class, you should not set this. :param name: Pretty name of the test name. For normal tests, written with the avocado API, this should not be set, this is reserved for running random executables as tests. :param base_logdir: Directory where test logs should go. If None provided, it'll use :func:`avocado.core.data_dir.create_job_logs_dir`. :param tag: Tag that differentiates 2 executions of the same test name. Example: 'long', 'short', so we can differentiate 'sleeptest.long' and 'sleeptest.short'. :param job: The job that this test is part of. """ def record_and_warn(*args, **kwargs): """ Record call to this function and log warning """ if not self.__log_warn_used: self.__log_warn_used = True return original_log_warn(*args, **kwargs) if name is not None: self.name = name else: self.name = self.__class__.__name__ if params is None: params = {} self.params = Params(params) self._raw_params = params self.tag = tag or self.params.get('tag') self.job = job basename = os.path.basename(self.name) tmpdir = data_dir.get_tmp_dir() self.basedir = os.path.dirname(inspect.getfile(self.__class__)) self.datadir = os.path.join(self.basedir, '%s.data' % basename) self.expected_stdout_file = os.path.join(self.datadir, 'stdout.expected') self.expected_stderr_file = os.path.join(self.datadir, 'stderr.expected') self.workdir = utils_path.init_dir(tmpdir, basename) self.srcdir = utils_path.init_dir(self.workdir, 'src') if base_logdir is None: base_logdir = data_dir.create_job_logs_dir() base_logdir = os.path.join(base_logdir, 'test-results') self.tagged_name = self.get_tagged_name(base_logdir) # Let's avoid trouble at logdir init time, since we're interested # in a relative directory here tagged_name = self.tagged_name if tagged_name.startswith('/'): tagged_name = tagged_name[1:] self.logdir = utils_path.init_dir(base_logdir, tagged_name) genio.set_log_file_dir(self.logdir) self.logfile = os.path.join(self.logdir, 'debug.log') self.stdout_file = os.path.join(self.logdir, 'stdout') self.stderr_file = os.path.join(self.logdir, 'stderr') self.outputdir = utils_path.init_dir(self.logdir, 'data') self.sysinfodir = utils_path.init_dir(self.logdir, 'sysinfo') self.sysinfo_logger = sysinfo.SysInfo(basedir=self.sysinfodir) self.log = logging.getLogger("avocado.test") original_log_warn = self.log.warning self.__log_warn_used = False self.log.warn = self.log.warning = record_and_warn self.stdout_log = logging.getLogger("avocado.test.stdout") self.stderr_log = logging.getLogger("avocado.test.stderr") self.log.info('START %s', self.tagged_name) self.log.debug('') self.log.debug('Test instance parameters:') # Set the helper set_default to the params object setattr(self.params, 'set_default', self._set_default) # Apply what comes from the params dict for key in sorted(self.params.keys()): self.log.debug(' %s = %s', key, self.params.get(key)) self.log.debug('') # Apply what comes from the default_params dict self.log.debug('Default parameters:') for key in sorted(self.default_params.keys()): self.log.debug(' %s = %s', key, self.default_params.get(key)) self.params.set_default(key, self.default_params[key]) self.log.debug('') self.log.debug( 'Test instance params override defaults whenever available') self.log.debug('') # If there's a timeout set, log a timeout reminder if self.params.timeout: self.log.info( 'Test timeout set. Will wait %.2f s for ' 'PID %s to end', float(self.params.timeout), os.getpid()) self.log.info('') self.debugdir = None self.resultsdir = None self.status = None self.fail_reason = None self.fail_class = None self.traceback = None self.text_output = None self.whiteboard = '' self.running = False self.time_start = None self.time_end = None self.paused = False self.paused_msg = '' self.runner_queue = runner_queue self.time_elapsed = None unittest.TestCase.__init__(self)