def mark_recipe_tasks_finished(recipe, result=TaskResult.pass_, task_status=TaskStatus.completed, finish_time=None, only=False, server_log=False, num_tasks=None, **kwargs): # we accept result=None to mean: don't add any results to recipetasks assert result is None or result in TaskResult finish_time = finish_time or datetime.datetime.utcnow() if not only: mark_recipe_running(recipe, **kwargs) mark_recipe_installation_finished(recipe) # Need to make sure recipe.watchdog has been persisted, since we delete it # below when the recipe completes and sqlalchemy will barf on deleting an # instance that hasn't been persisted. session.flush() if not server_log: recipe.log_server = recipe.recipeset.lab_controller.fqdn recipe.logs = [LogRecipe(path=u'recipe_path', filename=u'dummy.txt')] else: recipe.log_server = u'dummy-archive-server' recipe.logs = [ LogRecipe(server=u'http://dummy-archive-server/beaker/', path=u'recipe_path', filename=u'dummy.txt') ] if not server_log: rt_log = lambda: LogRecipeTask(path=u'tasks', filename=u'dummy.txt') else: rt_log = lambda: LogRecipeTask(server= u'http://dummy-archive-server/beaker/', path=u'tasks', filename=u'dummy.txt') if not server_log: rtr_log = lambda: LogRecipeTaskResult(path=u'/', filename=u'result.txt') else: rtr_log = lambda: LogRecipeTaskResult( server=u'http://dummy-archive-server/beaker/', path=u'/', filename=u'result.txt') for recipe_task in recipe.tasks[:num_tasks]: if result is not None: rtr = RecipeTaskResult(recipetask=recipe_task, result=result) rtr.logs = [rtr_log()] recipe_task.results.append(rtr) recipe_task.logs = [rt_log()] recipe_task.finish_time = finish_time recipe_task._change_status(task_status) log.debug('Marked %s tasks in %s as %s with result %s', num_tasks or 'all', recipe.t_id, task_status, result)
def test_duplicate_logs_are_filtered_out(self): # Even if the db contains multiple rows referencing the same filename # (which it shouldn't) we want recipe.files() to filter those out # before returning them, to avoid breaking beaker-transfer. with session.begin(): job = data_setup.create_running_job() recipe = job.recipesets[0].recipes[0] recipe.logs.extend([ LogRecipe(path=u'/', filename=u'imadupe.log'), LogRecipe(path=u'/', filename=u'imadupe.log'), ]) logs = self.server.recipes.files(recipe.id) self.assertEqual(len(logs), 1) self.assertEqual(logs[0]['filename'], u'imadupe.log')
def _create_jobs(): with session.begin(): for i in range(limit + 1): job_to_purge = data_setup.create_completed_job() job_to_purge.recipesets[0].recipes[0].logs.append( LogRecipe(filename=u'test.log')) job_to_purge.deleted = datetime.datetime.utcnow()
def _create_jobs(): with session.begin(): for i in range(limit + 1): job_to_delete = data_setup.create_completed_job( start_time=datetime.datetime.utcnow() - datetime.timedelta(days=60), finish_time=datetime.datetime.utcnow() - datetime.timedelta(days=31)) job_to_delete.recipesets[0].recipes[0].logs.append(LogRecipe(filename=u'test.log'))
def test_does_not_load_RecipeTaskResults(self): # In large jobs with many RecipeTasks and RecipeTaskResults, # beaker-log-delete would previously take a long time and a lot of # memory, because it was traversing the entire object graph down to # RecipeTaskResult and loading them all into memory. # This test is asserting that no RecipeTask or RecipeTaskResult # instances are loaded when beaker-log-delete runs. with session.begin(): job = data_setup.create_completed_job() job.deleted = datetime.datetime.utcnow() recipe = job.recipesets[0].recipes[0] server = self.log_server_url + '/recipe/' open(os.path.join(self.recipe_logs_dir, 'recipe.log'), 'w').write('dummy') recipe.logs[:] = [LogRecipe(server=server, filename=u'recipe.log')] open(os.path.join(self.recipe_logs_dir, 'task.log'), 'w').write('dummy') recipe.tasks[0].logs[:] = [LogRecipeTask(server=server, filename=u'task.log')] open(os.path.join(self.recipe_logs_dir, 'result.log'), 'w').write('dummy') recipe.tasks[0].results[0].logs[:] = \ [LogRecipeTaskResult(server=server, filename=u'result.log')] # RecipeTasks/RecipeTaskResults are already loaded from the data_setup # calls above, expunge the session so that log_delete starts from # a clean slate. session.expunge_all() with mock.patch.object(RecipeTask, '__new__', side_effect=AssertionError): with mock.patch.object(RecipeTaskResult, '__new__', side_effect=AssertionError): self.assertEquals(log_delete.log_delete(), 0) # exit status # Check that we really deleted something, if not the test setup was faulty. with session.begin(): job = Job.by_id(job.id) self.assertIsNotNone(job.purged)
def test_log_not_delete(self): # Job that is not within it's expiry time with session.begin(): job_not_delete = data_setup.create_completed_job( start_time=datetime.datetime.utcnow() - datetime.timedelta(days=60), finish_time=datetime.datetime.utcnow() - datetime.timedelta(days=29)) job_not_delete.recipesets[0].recipes[0].logs.append( LogRecipe(filename=u'test.log')) r_not_delete = job_not_delete.recipesets[0].recipes[0] dir_not_delete = os.path.join(r_not_delete.logspath, r_not_delete.filepath) self.make_dir(dir_not_delete) ft = open(os.path.join(dir_not_delete, 'test.log'), 'w') ft.close() session.flush() run_command('log_delete.py', 'beaker-log-delete') self.assertRaises(AssertionError, self._assert_logs_not_in_db, self.job_to_delete) try: self.check_dir_not_there(dir_not_delete) raise Exception('%s was deleted when it shold not have been' % dir_not_delete) except AssertionError: pass
def setUp(self): # set up a directory for our dummy job logs, with an HTTP server self.logs_dir = tempfile.mkdtemp(prefix='beaker-client-test-job-logs') self.addCleanup(shutil.rmtree, self.logs_dir, ignore_errors=True) self.archive_server = Process('http_server.py', args=[sys.executable, pkg_resources.resource_filename('bkr.inttest', 'http_server.py'), '--base', self.logs_dir], listen_port=19998) self.archive_server.start() self.addCleanup(self.archive_server.stop) self.log_server_url = u'http://localhost:19998/' # job for testing with session.begin(): self.job = data_setup.create_completed_job() self.recipe = self.job.recipesets[0].recipes[0] os.mkdir(os.path.join(self.logs_dir, 'R')) open(os.path.join(self.logs_dir, 'R', 'dummy.txt'), 'w').write('recipe\n') self.recipe.logs[:] = [LogRecipe(server=self.log_server_url, path=u'R', filename=u'dummy.txt')] os.mkdir(os.path.join(self.logs_dir, 'T')) open(os.path.join(self.logs_dir, 'T', 'dummy.txt'), 'w').write('task\n') self.recipe.tasks[0].logs[:] = [LogRecipeTask(server=self.log_server_url, path=u'T', filename=u'dummy.txt')] os.mkdir(os.path.join(self.logs_dir, 'TR')) open(os.path.join(self.logs_dir, 'TR', 'dummy.txt'), 'w').write('result\n') self.recipe.tasks[0].results[0].logs[:] = [LogRecipeTaskResult( server=self.log_server_url, path=u'TR', filename=u'dummy.txt')]
def test_gets_logs(self): with session.begin(): system = data_setup.create_system(lab_controller=self.lc) recipe = data_setup.create_recipe() recipe.logs.append(LogRecipe(filename=u'test.log')) data_setup.create_job_for_recipes([recipe]) logs = self.server.recipes.files(recipe.id) self.assertEqual(len(logs), 1) self.assertEqual(logs[0]['filename'], u'test.log')
def setUp(self): with session.begin(): self.recipe = data_setup.create_recipe() data_setup.create_job_for_recipes([self.recipe]) data_setup.mark_recipe_running(self.recipe) self.recipe.logs[:] = [ LogRecipe(path=u'/', filename=u'console.log'), LogRecipe(path=u'some-dir', filename=u'some-file.txt') ] self.task = self.recipe.tasks[0] self.task.logs[:] = [ LogRecipeTask(path=u'/', filename=u'TESTOUT.log'), LogRecipeTask(path=u'debug', filename=u'.task_beah_raw') ] self.task.pass_(u'', 0, u'Pass') self.result = self.recipe.tasks[0].results[0] self.result.logs[:] = [ LogRecipeTaskResult(path=u'/', filename=u'test.log'), LogRecipeTaskResult(path=u'some-dir', filename=u'some-file.txt') ]
def test_purge_deleted(self): with session.begin(): self.job_to_delete.deleted = datetime.datetime.utcnow() self.job_to_delete.recipesets[0].recipes[0].logs.append(LogRecipe(filename=u'test.log')) r_ = self.job_to_delete.recipesets[0].recipes[0] dir = os.path.join(r_.logspath, r_.filepath) self.make_dir(dir) f = open(os.path.join(dir, 'test.log'), 'w') f.close() run_command('log_delete.py', 'beaker-log-delete') self._assert_logs_not_in_db(Job.by_id(self.job_to_delete.id)) self.check_dir_not_there(dir)
def create_deleted_job_with_log(self, path, filename): with session.begin(): job = data_setup.create_completed_job() job.to_delete = datetime.datetime.utcnow() session.flush() job.recipesets[0].recipes[0].log_server = self.log_server job.recipesets[0].recipes[0].logs[:] = [ LogRecipe(server='%s/%s' % (self.log_server_url, path), filename=filename) ] for rt in job.recipesets[0].recipes[0].tasks: rt.logs[:] = []
def test_delete_and_purge_expired(self): with session.begin(): job_to_delete = data_setup.create_completed_job( start_time=datetime.datetime.utcnow() - datetime.timedelta(days=60), finish_time=datetime.datetime.utcnow() - datetime.timedelta(days=31)) self.job_to_delete.owner = self.user job_to_delete.recipesets[0].recipes[0].logs.append(LogRecipe(filename=u'test.log')) r_delete = job_to_delete.recipesets[0].recipes[0] dir_delete = os.path.join(r_delete.logspath, r_delete.filepath) self.make_dir(dir_delete) fd = open(os.path.join(dir_delete, 'test.log'), 'w') fd.close() run_command('log_delete.py', 'beaker-log-delete') self._assert_logs_not_in_db(job_to_delete) self.check_dir_not_there(dir_delete)