def start(self, task, deploy_id=None, tag=None, do_use=False): """Start benchmark task. :param task: a file with yaml/json configration :param deploy_id: a UUID of a deployment :param tag: optional tag for this task """ task = os.path.expanduser(task) with open(task, 'rb') as task_file: config_dict = yaml.safe_load(task_file.read()) try: task = api.create_task(deploy_id, tag) print("=" * 80) print( _("Task %(tag)s %(uuid)s is started") % { "uuid": task["uuid"], "tag": task["tag"] }) print("-" * 80) api.start_task(deploy_id, config_dict, task=task) self.detailed(task_id=task['uuid']) if do_use: use.UseCommands().task(task['uuid']) except exceptions.InvalidConfigException: return (1) except KeyboardInterrupt: api.abort_task(task['uuid']) raise
def test_start_task_invalid_task_ignored(self, mock_engine, mock_deployment_get, mock_task): mock_engine().run.side_effect = (exceptions.InvalidTaskException()) # check that it doesn't raise anything api.start_task("deploy_uuid", "config")
def start(self, task, deploy_id=None, tag=None, do_use=False): """Run a benchmark task. :param task: a file with yaml/json configration :param deploy_id: a UUID of a deployment :param tag: optional tag for this task """ task = os.path.expanduser(task) with open(task, 'rb') as task_file: config_dict = yaml.safe_load(task_file.read()) try: task = api.create_task(deploy_id, tag) print("=" * 80) print(_("Task %(tag)s %(uuid)s is started") % {"uuid": task["uuid"], "tag": task["tag"]}) print("-" * 80) api.start_task(deploy_id, config_dict, task=task) self.detailed(task_id=task['uuid']) if do_use: use.UseCommands().task(task['uuid']) except exceptions.InvalidConfigException: return(1) except KeyboardInterrupt: api.abort_task(task['uuid']) raise
def start(self, task): """Run Benchmark task :param config: File with json configration Returns task_uuid """ with open(task) as task_file: config_dict = json.load(task_file) api.start_task(config_dict)
def test_start_task_invalid_task_ignored(self, mock_engine, mock_deployment_get, mock_task): mock_engine().run.side_effect = ( exceptions.InvalidTaskException()) # check that it doesn't raise anything api.start_task("deploy_uuid", "config")
def test_start_task( self, mock_task_create, mock_task_update, mock_deploy_update, mock_deploy_create, mock_deploy_delete, mock_task_result_create, mock_utils_verifier, mock_utils_runner, ): mock_task_create.return_value = self.task mock_task_update.return_value = self.task mock_deploy_create.return_value = self.deployment mock_deploy_update.return_value = self.deployment mock_utils_verifier.return_value = mock_verifier = mock.Mock() mock_utils_verifier.list_verification_tests.return_value = {"fake_test": mock.Mock()} mock_verifier.run_all.return_value = [{"status": 0}] mock_utils_runner.return_value = mock_runner = mock.Mock() mock_runner.run.return_value = ["fake_result"] api.start_task(self.full_config) mock_deploy_create.assert_called_once_with({"config": self.deploy_config}) mock_deploy_update.assert_has_calls( [ mock.call(self.deploy_uuid, {"status": "deploy->started"}), mock.call(self.deploy_uuid, {"status": "deploy->finished"}), mock.call(self.deploy_uuid, {"endpoint": self.endpoint}), ] ) mock_task_create.assert_called_once_with({"deployment_uuid": self.deploy_uuid}) mock_task_update.assert_has_calls( [ mock.call(self.task_uuid, {"status": "test_tool->verify_openstack"}), mock.call(self.task_uuid, {"verification_log": '[{"status": 0}]'}), mock.call(self.task_uuid, {"status": "test_tool->benchmarking"}), ] ) # NOTE(akscram): It looks really awful, but checks degradation. mock_task_result_create.assert_called_once_with( self.task_uuid, { "kw": { "args": {}, "execution": "continuous", "config": {"timeout": 10000, "times": 1, "active_users": 1, "tenants": 1, "users_per_tenant": 1}, }, "name": "FakeScenario.fake", "pos": 0, }, {"raw": ["fake_result"]}, ) # TODO(akscram): It's just to follow legacy logic. mock_deploy_delete.assert_called_once_with(self.deploy_uuid)
def start(self, config): """Run Benchmark task :param config: File with json configration Returns task_uuid """ try: api.start_task(json.load(open(config))) except Exception as e: print(_("Something went wrong %s") % e)
def start(self, deploy_id, task): """Run a benchmark task. :param deploy_id: a UUID of a deployment :param task: a file with json configration """ with open(task) as task_file: config_dict = json.load(task_file) api.start_task(deploy_id, config_dict)
def test_start_task(self, mock_task_create, mock_task_update, mock_task_result_create, mock_deploy_get, mock_utils_runner, mock_osclients, mock_validate_names, mock_validate_syntax, mock_validate_semantic): mock_task_create.return_value = self.task mock_task_update.return_value = self.task mock_deploy_get.return_value = self.deployment mock_utils_runner.return_value = mock_runner = mock.Mock() mock_runner.result_queue = collections.deque(['fake_result']) mock_runner.run.return_value = 42 mock_osclients.Clients.return_value = fakes.FakeClients() api.start_task(self.deploy_uuid, self.task_config) mock_deploy_get.assert_called_once_with(self.deploy_uuid) mock_task_create.assert_called_once_with({ 'deployment_uuid': self.deploy_uuid, }) mock_task_update.assert_has_calls([ mock.call(self.task_uuid, {'status': consts.TaskStatus.VERIFYING}), mock.call(self.task_uuid, {'status': consts.TaskStatus.RUNNING}), mock.call(self.task_uuid, {'status': consts.TaskStatus.FINISHED}) ]) # NOTE(akscram): It looks really awful, but checks degradation. mock_task_result_create.assert_called_once_with( self.task_uuid, { 'kw': { 'args': {}, 'runner': { 'type': 'constant', 'timeout': 10000, 'times': 3, 'concurrency': 2, }, 'context': { 'users': { 'tenants': 5, 'users_per_tenant': 6, } } }, 'name': 'FakeScenario.fake', 'pos': 0, }, { 'raw': ['fake_result'], 'scenario_duration': 42 } )
def test_start_task(self, mock_engine, mock_deployment_get, mock_task): api.start_task(self.deploy_uuid, "config") mock_engine.assert_has_calls([ mock.call("config", mock_task.return_value), mock.call().bind(admin=mock_deployment_get.return_value["admin"], users=[]), mock.call().validate(), mock.call().run(), ]) mock_task.assert_called_once_with(deployment_uuid=self.deploy_uuid) mock_deployment_get.assert_called_once_with(self.deploy_uuid)
def test_start_task(self, mock_task_create, mock_task_update, mock_task_result_create, mock_deploy_get, mock_utils_runner, mock_osclients, mock_validate_names, mock_validate_syntax, mock_validate_semantic): mock_task_create.return_value = self.task mock_task_update.return_value = self.task mock_deploy_get.return_value = self.deployment mock_utils_runner.return_value = mock_runner = mock.Mock() mock_runner.run.return_value = ['fake_result'] mock_osclients.Clients.return_value = fakes.FakeClients() api.start_task(self.deploy_uuid, self.task_config) mock_deploy_get.assert_called_once_with(self.deploy_uuid) mock_task_create.assert_called_once_with({ 'deployment_uuid': self.deploy_uuid, }) mock_task_update.assert_has_calls([ mock.call(self.task_uuid, {'status': consts.TaskStatus.VERIFYING}), mock.call(self.task_uuid, {'status': consts.TaskStatus.RUNNING}), mock.call(self.task_uuid, {'status': consts.TaskStatus.FINISHED}) ]) # NOTE(akscram): It looks really awful, but checks degradation. mock_task_result_create.assert_called_once_with( self.task_uuid, { 'kw': { 'args': {}, 'runner': { 'type': 'constant', 'timeout': 10000, 'times': 3, 'concurrency': 2, }, 'context': { 'users': { 'tenants': 5, 'users_per_tenant': 6, } } }, 'name': 'FakeScenario.fake', 'pos': 0, }, { 'raw': ['fake_result'] } )
def start(self, task, deploy_id=None, tag=None): """Run a benchmark task. :param task: a file with yaml/json configration :param deploy_id: a UUID of a deployment :param tag: optional tag for this task """ with open(task, 'rb') as task_file: config_dict = yaml.safe_load(task_file.read()) try: task = api.create_task(deploy_id, tag) self.list(task_list=[task]) api.start_task(deploy_id, config_dict, task=task) self.detailed(task_id=task['uuid']) except exceptions.InvalidArgumentsException: print(_("Reason: %s") % sys.exc_info()[1])
def start(self, task, deploy_id=None, tag=None): """Run a benchmark task. :param task: a file with yaml/json configration :param deploy_id: a UUID of a deployment :param tag: optional tag for this task """ with open(task, "rb") as task_file: config_dict = yaml.safe_load(task_file.read()) try: task = api.create_task(deploy_id, tag) print("=" * 80) print(_("Task %(tag)s %(uuid)s is started") % {"uuid": task["uuid"], "tag": task["tag"]}) print("-" * 80) api.start_task(deploy_id, config_dict, task=task) self.detailed(task_id=task["uuid"]) except exceptions.InvalidConfigException: sys.exit(1)
def test_start_task(self, mock_task_create, mock_task_update, mock_task_result_create, mock_deploy_get, mock_utils_runner): mock_task_create.return_value = self.task mock_task_update.return_value = self.task mock_deploy_get.return_value = self.deployment mock_utils_runner.return_value = mock_runner = mock.Mock() mock_runner.run.return_value = ['fake_result'] api.start_task(self.deploy_uuid, self.task_config) mock_deploy_get.assert_called_once_with(self.deploy_uuid) mock_task_create.assert_called_once_with({ 'deployment_uuid': self.deploy_uuid, }) mock_task_update.assert_has_calls([ mock.call(self.task_uuid, {'status': 'test_tool->benchmarking'}) ]) # NOTE(akscram): It looks really awful, but checks degradation. mock_task_result_create.assert_called_once_with( self.task_uuid, { 'kw': { 'args': {}, 'execution': 'continuous', 'config': { 'timeout': 10000, 'times': 1, 'active_users': 1, 'tenants': 1, 'users_per_tenant': 1, } }, 'name': 'FakeScenario.fake', 'pos': 0, }, { 'raw': ['fake_result'], }, )
def test_start_task(self): # TODO(boris-42): Improve these tests, to check that requried mehtods # are called. api.start_task({'deploy': {'name': 'test'}, 'tests': {}})