def test_start_on_unfinished_deployment(self, mock_update_globals_file, mock_exists): deployment_id = self.deployment_uuid deployment_name = self.deployment_name exc = exceptions.DeploymentNotFinishedStatus( name=deployment_name, uuid=deployment_id, status=consts.DeployStatus.DEPLOY_INIT) self.fake_api.verification.start.side_effect = exc self.assertEqual( 1, self.verify.start(self.fake_api, self.deployment_uuid, deployment_id))
def test_start_on_unfinished_deployment(self, mock__load_and_validate_task, mock_detailed): deployment_id = "e0617de9-77d1-4875-9b49-9d5789e29f20" deployment_name = "xxx_name" task_path = "path_to_config.json" fake_task = fakes.FakeTask(uuid="some_new_uuid", tag="tag") self.fake_api.task.create.return_value = fake_task exc = exceptions.DeploymentNotFinishedStatus( name=deployment_name, uuid=deployment_id, status=consts.DeployStatus.DEPLOY_INIT) self.fake_api.task.create.side_effect = exc self.assertEqual(1, self.task.start(self.fake_api, task_path, deployment="any", tag="some_tag")) self.assertFalse(mock_detailed.called)
def create(cls, deployment, tag): """Create a task without starting it. Task is a list of benchmarks that will be called one by one, results of execution will be stored in DB. :param deployment: UUID or name of the deployment :param tag: tag for this task :returns: Task object """ deployment = objects.Deployment.get(deployment) if deployment["status"] != consts.DeployStatus.DEPLOY_FINISHED: raise exceptions.DeploymentNotFinishedStatus( name=deployment["name"], uuid=deployment["uuid"], status=deployment["status"]) return objects.Task(deployment_uuid=deployment["uuid"], tag=tag)
def start(cls, verifier_id, deployment_id, tags=None, **run_args): """Start a verification. :param verifier_id: Verifier name or UUID :param deployment_id: Deployment name or UUID :param tags: List of tags to assign them to verification :param run_args: Dictionary with run arguments for verification """ # TODO(ylobankov): Add an ability to skip tests by specifying only test # names (without test IDs). Also, it would be nice to # skip the whole test suites. For example, all tests # in the class or module. deployment = objects.Deployment.get(deployment_id) if deployment["status"] != consts.DeployStatus.DEPLOY_FINISHED: raise exceptions.DeploymentNotFinishedStatus( name=deployment["name"], uuid=deployment["uuid"], status=deployment["status"]) verifier = _Verifier.get(verifier_id) if verifier.status != consts.VerifierStatus.INSTALLED: raise exceptions.RallyException( "Failed to start verification because verifier %s is in '%s' " "status, but should be in '%s'." % (verifier, verifier.status, consts.VerifierStatus.INSTALLED)) verifier.set_deployment(deployment_id) if not verifier.manager.is_configured(): _Verifier.configure(verifier, deployment_id) # TODO(andreykurilin): save validation results to db verifier.manager.validate(run_args) verification = objects.Verification.create(verifier_id=verifier_id, deployment_id=deployment_id, tags=tags, run_args=run_args) LOG.info( "Starting verification (UUID=%s) for deployment '%s' " "(UUID=%s) by verifier %s.", verification.uuid, verifier.deployment["name"], verifier.deployment["uuid"], verifier) verification.update_status(consts.VerificationStatus.RUNNING) context = { "config": verifier.manager._meta_get("context"), "run_args": run_args, "verification": verification, "verifier": verifier } try: with vcontext.ContextManager(context): results = verifier.manager.run(context) except Exception as e: verification.set_error(e) raise # TODO(ylobankov): Check that verification exists in the database # because users may delete verification before tests # finish. verification.finish(results.totals, results.tests) LOG.info( "Verification (UUID=%s) has been successfully finished for " "deployment '%s' (UUID=%s)!", verification.uuid, verifier.deployment["name"], verifier.deployment["uuid"]) return verification, results