def test_executions_to_time_filter(self): with db_api.transaction(read_only=True): created0 = db_api.create_workflow_execution(WF_EXECS[0]) created1 = db_api.create_workflow_execution(WF_EXECS[1]) ctx = { '__execution': { 'id': 'some' } } result = self._evaluator.evaluate( '_|executions(to_time="2020-01-01")', ctx ) self.assertEqual([created0, created1], result) result = self._evaluator.evaluate( '_|executions(to_time="2016-12-01 15:01:00")', ctx ) self.assertEqual([created0], result) result = self._evaluator.evaluate( '_|executions(id="two", to_time="2016-12-01 15:01:00")', ctx ) self.assertEqual([], result)
def _get_all_function(): with db_api.transaction(): db_models = get_all_function(limit=limit, marker=marker_obj, sort_keys=sort_keys, sort_dirs=sort_dirs, insecure=insecure, **filters) for db_model in db_models: try: if resource_function: rest_resource = resource_function(db_model) else: rest_resource = cls.from_db_model(db_model) rest_resources.append(rest_resource) except sa_exc.ObjectDeletedError: # If the persistent object has been removed in a parallel # transaction then it just won't be included into the # result set and the warning will be printed into the log. LOG.warning( 'The object must have been deleted while being fetched' ' with a list request [model_class=%s, id=%s]', type(db_model), db_model.id, exc_info=True)
def test_executions(self): with db_api.transaction(read_only=True): created0 = db_api.create_workflow_execution(WF_EXECS[0]) created1 = db_api.create_workflow_execution(WF_EXECS[1]) ctx = {'__execution': {'id': 'some'}} result = self._evaluator.evaluate('_|executions()', ctx) self.assertEqual([created0, created1], result)
def test_update_workflow_execution_env_wrong_state(self): wf_exec_template = { 'spec': {}, 'start_params': { 'task': 'my_task1' }, 'state': 'PAUSED', 'state_info': None, 'params': { 'env': { 'k1': 'abc' } }, 'created_at': None, 'updated_at': None, 'context': { '__env': { 'k1': 'fee fi fo fum' } }, 'task_id': None, 'trust_id': None, 'description': None, 'output': None } states_not_permitted = [ states.RUNNING, states.RUNNING_DELAYED, states.SUCCESS, states.WAITING ] update_env = {'k1': 'foobar'} for state in states_not_permitted: wf_exec = copy.deepcopy(wf_exec_template) wf_exec['state'] = state with db_api.transaction(): created = db_api.create_workflow_execution(wf_exec) self.assertIsNone(created.updated_at) self.assertRaises(exc.NotAllowedException, wf_service.update_workflow_execution_env, created, update_env) fetched = db_api.get_workflow_execution(created.id) self.assertDictEqual(wf_exec['params']['env'], fetched.params['env']) self.assertDictEqual(wf_exec['context']['__env'], fetched.context['__env'])
def test_task_executions(self): # Add an associated object into collection. with db_api.transaction(): wf_ex = db_api.create_workflow_execution(WF_EXECS[0]) self.assertEqual(0, len(wf_ex.task_executions)) wf_ex.task_executions.append( db_models.TaskExecution(**TASK_EXECS[0]) ) # Make sure task execution has been saved. with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) self.assertIsNotNone(wf_ex) self.assertEqual(1, len(wf_ex.task_executions)) task_ex = wf_ex.task_executions[0] self.assertEqual(TASK_EXECS[0]['name'], task_ex.name) # Make sure that polymorphic load works correctly. self.assertEqual(2, len(db_api.get_executions())) self.assertEqual(1, len(db_api.get_workflow_executions())) self.assertEqual(1, len(db_api.get_task_executions())) # Remove task execution from collection. with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) del wf_ex.task_executions[:] # Make sure task execution has been removed. with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) self.assertEqual(0, len(wf_ex.task_executions)) self.assertIsNone(db_api.load_task_execution(task_ex.id))
def test_update_workflow_execution_env_wrong_state(self): wf_exec_template = { 'spec': {}, 'start_params': {'task': 'my_task1'}, 'state': 'PAUSED', 'state_info': None, 'params': {'env': {'k1': 'abc'}}, 'created_at': None, 'updated_at': None, 'context': {'__env': {'k1': 'fee fi fo fum'}}, 'task_id': None, 'trust_id': None, 'description': None, 'output': None } states_not_permitted = [ states.RUNNING, states.RUNNING_DELAYED, states.SUCCESS, states.WAITING ] update_env = {'k1': 'foobar'} for state in states_not_permitted: wf_exec = copy.deepcopy(wf_exec_template) wf_exec['state'] = state with db_api.transaction(): created = db_api.create_workflow_execution(wf_exec) self.assertIsNone(created.updated_at) self.assertRaises( exc.NotAllowedException, wf_service.update_workflow_execution_env, created, update_env ) fetched = db_api.get_workflow_execution(created.id) self.assertDictEqual( wf_exec['params']['env'], fetched.params['env'] ) self.assertDictEqual( wf_exec['context']['__env'], fetched.context['__env'] )
def _run_tx1(): with db_api.transaction(): wf_ex = db_api.create_workflow_execution(WF_EXEC) # Release TX2 so it can read data. sem2.release() print("Created: %s" % wf_ex) print("Holding TX1...") sem1.acquire() print("TX1 completed.")
def test_action_executions(self): # Store one task with two invocations. with db_api.transaction(): wf_ex = db_api.create_workflow_execution(WF_EXECS[0]) values = copy.copy(TASK_EXECS[0]) values.update({'workflow_execution_id': wf_ex.id}) task = db_api.create_task_execution(values) self.assertEqual(0, len(task.executions)) a_ex1 = db_models.ActionExecution() a_ex2 = db_models.ActionExecution() task.executions.append(a_ex1) task.executions.append(a_ex2) self.assertEqual(2, len(task.executions)) # Make sure associated objects were saved. with db_api.transaction(): task = db_api.get_task_execution(task.id) self.assertEqual(2, len(task.executions)) self.assertNotIsInstance(task.executions[0].task_execution, list) # Remove associated objects from collection. with db_api.transaction(): task = db_api.get_task_execution(task.id) del task.executions[:] # Make sure associated objects were deleted. with db_api.transaction(): task = db_api.get_task_execution(task.id) self.assertEqual(0, len(task.executions))
def test_task_executions(self): # Add an associated object into collection. with db_api.transaction(): wf_ex = db_api.create_workflow_execution(WF_EXECS[0]) self.assertEqual(0, len(wf_ex.task_executions)) wf_ex.task_executions.append( db_models.TaskExecution(**TASK_EXECS[0])) # Make sure task execution has been saved. with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) self.assertIsNotNone(wf_ex) self.assertEqual(1, len(wf_ex.task_executions)) task_ex = wf_ex.task_executions[0] self.assertEqual(TASK_EXECS[0]['name'], task_ex.name) # Make sure that polymorphic load works correctly. self.assertEqual(2, len(db_api.get_executions())) self.assertEqual(1, len(db_api.get_workflow_executions())) self.assertEqual(1, len(db_api.get_task_executions())) # Remove task execution from collection. with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) del wf_ex.task_executions[:] # Make sure task execution has been removed. with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) self.assertEqual(0, len(wf_ex.task_executions)) self.assertIsNone(db_api.load_task_execution(task_ex.id))
def test_action_executions(self): # Store one task with two invocations. with db_api.transaction(): wf_ex = db_api.create_workflow_execution(WF_EXECS[0]) values = copy.deepcopy(TASK_EXECS[0]) values.update({'workflow_execution_id': wf_ex.id}) task = db_api.create_task_execution(values) self.assertEqual(0, len(task.executions)) a_ex1 = db_models.ActionExecution() a_ex2 = db_models.ActionExecution() task.executions.append(a_ex1) task.executions.append(a_ex2) self.assertEqual(2, len(task.executions)) # Make sure associated objects were saved. with db_api.transaction(): task = db_api.get_task_execution(task.id) self.assertEqual(2, len(task.executions)) self.assertNotIsInstance(task.executions[0].task_execution, list) # Remove associated objects from collection. with db_api.transaction(): task = db_api.get_task_execution(task.id) del task.executions[:] # Make sure associated objects were deleted. with db_api.transaction(): task = db_api.get_task_execution(task.id) self.assertEqual(0, len(task.executions))
def _run_correct_locking(self, wf_ex): self._random_sleep() with db_api.transaction(): # Lock workflow execution and get the most up-to-date object. wf_ex = db_api.acquire_lock(db_models.WorkflowExecution, wf_ex.id) # Refresh the object. db_api.get_workflow_execution(wf_ex.id) wf_ex.name = str(int(wf_ex.name) + 1) return wf_ex.name
def test_commit_transaction(self): with db_api.transaction(): created = db_api.create_workbook(WORKBOOKS[0]) fetched = db_api.get_workbook(created.name) self.assertEqual(created, fetched) self.assertTrue(self.is_db_session_open()) self.assertFalse(self.is_db_session_open()) fetched = db_api.get_workbook(created.name) self.assertEqual(created, fetched) self.assertFalse(self.is_db_session_open())
def test_executions(self): with db_api.transaction(read_only=True): created0 = db_api.create_workflow_execution(WF_EXECS[0]) created1 = db_api.create_workflow_execution(WF_EXECS[1]) ctx = { '__execution': { 'id': 'some' } } result = self._evaluator.evaluate('_|executions()', ctx) self.assertEqual([created0, created1], result)
def test_update_workflow_execution_env(self): wf_exec_template = { 'spec': {}, 'start_params': { 'task': 'my_task1' }, 'state': 'PAUSED', 'state_info': None, 'params': { 'env': { 'k1': 'abc' } }, 'created_at': None, 'updated_at': None, 'context': { '__env': { 'k1': 'fee fi fo fum' } }, 'task_id': None, 'trust_id': None, 'description': None, 'output': None } states_permitted = [states.IDLE, states.PAUSED, states.ERROR] update_env = {'k1': 'foobar'} for state in states_permitted: wf_exec = copy.deepcopy(wf_exec_template) wf_exec['state'] = state with db_api.transaction(): created = db_api.create_workflow_execution(wf_exec) self.assertIsNone(created.updated_at) updated = wf_service.update_workflow_execution_env( created, update_env) self.assertDictEqual(update_env, updated.params['env']) self.assertDictEqual(update_env, updated.context['__env']) fetched = db_api.get_workflow_execution(created.id) self.assertEqual(updated, fetched) self.assertIsNotNone(fetched.updated_at)
def _run_correct_locking(self, wf_ex): # Set context info for the thread. auth_context.set_ctx(test_base.get_context()) self._random_sleep() with db_api.transaction(): # Lock workflow execution and get the most up-to-date object. wf_ex = db_api.acquire_lock(db_models.WorkflowExecution, wf_ex.id) # Refresh the object. db_api.get_workflow_execution(wf_ex.id) wf_ex.name = str(int(wf_ex.name) + 1) return wf_ex.name
def _run_tx2(): with db_api.transaction(): print("Holding TX2...") sem2.acquire() wf_execs = db_api.get_workflow_executions() print("Read: %s" % wf_execs) self.assertEqual(1, len(wf_execs)) # Release TX1 so it can complete. sem1.release() print("TX2 completed.")
def test_executions_state_filter(self): with db_api.transaction(read_only=True): db_api.create_workflow_execution(WF_EXECS[0]) created1 = db_api.create_workflow_execution(WF_EXECS[1]) ctx = {'__execution': {'id': 'some'}} result = self._evaluator.evaluate('_|executions(state="RUNNING")', ctx) self.assertEqual([created1], result) result = self._evaluator.evaluate( '_|executions(id="one", state="RUNNING")', ctx) self.assertEqual([], result)
def _get_all_function(): with db_api.transaction(): db_models = get_all_function(limit=limit, marker=marker_obj, sort_keys=sort_keys, sort_dirs=sort_dirs, insecure=insecure, **filters) for db_model in db_models: if resource_function: rest_resource = resource_function(db_model) else: rest_resource = cls.from_db_model(db_model) rest_resources.append(rest_resource)
def _run_invalid_locking(self, wf_ex): self._random_sleep() with db_api.transaction(): # Load object into the session (transaction). wf_ex = db_api.get_workflow_execution(wf_ex.id) # It's too late to lock the object here because it's already # been loaded into the session so there should be multiple # threads that read the same object state so they write the # same value into DB. As a result we won't get a result # (object name) equal to a number of transactions. db_api.acquire_lock(db_models.WorkflowExecution, wf_ex.id) wf_ex.name = str(int(wf_ex.name) + 1) return wf_ex.name
def test_rollback_transaction(self): try: with db_api.transaction(): created = db_api.create_workbook(WORKBOOKS[0]) fetched = db_api.get_workbook(created.name) self.assertEqual(created, fetched) self.assertTrue(self.is_db_session_open()) db_api.create_workbook(WORKBOOKS[0]) except exc.DBDuplicateEntry: pass self.assertFalse(self.is_db_session_open()) self.assertRaises(exc.NotFoundException, db_api.get_workbook, created.name)
def test_update_workflow_execution_env(self): wf_exec_template = { 'spec': {}, 'start_params': {'task': 'my_task1'}, 'state': 'PAUSED', 'state_info': None, 'params': {'env': {'k1': 'abc'}}, 'created_at': None, 'updated_at': None, 'context': {'__env': {'k1': 'fee fi fo fum'}}, 'task_id': None, 'trust_id': None, 'description': None, 'output': None } states_permitted = [ states.IDLE, states.PAUSED, states.ERROR ] update_env = {'k1': 'foobar'} for state in states_permitted: wf_exec = copy.deepcopy(wf_exec_template) wf_exec['state'] = state with db_api.transaction(): created = db_api.create_workflow_execution(wf_exec) self.assertIsNone(created.updated_at) updated = wf_service.update_workflow_execution_env( created, update_env ) self.assertDictEqual(update_env, updated.params['env']) self.assertDictEqual(update_env, updated.context['__env']) fetched = db_api.get_workflow_execution(created.id) self.assertEqual(updated, fetched) self.assertIsNotNone(fetched.updated_at)
def _run_correct_locking(self, wf_ex): self._random_sleep() with db_api.transaction(): # Here we lock the object before it gets loaded into the # session and prevent reading the same object state by # multiple transactions. Hence the rest of the transaction # body works atomically (in a serialized manner) and the # result (object name) must be equal to a number of # transactions. db_api.acquire_lock(db_models.WorkflowExecution, wf_ex.id) # Refresh the object. wf_ex = db_api.get_workflow_execution(wf_ex.id) wf_ex.name = str(int(wf_ex.name) + 1) return wf_ex.name
def test_rollback_transaction(self): try: with db_api.transaction(): created = db_api.create_workbook(WORKBOOKS[0]) fetched = db_api.get_workbook(created.name) self.assertEqual(created, fetched) self.assertTrue(self.is_db_session_open()) db_api.create_workbook(WORKBOOKS[0]) except exc.DBDuplicateEntryException: pass self.assertFalse(self.is_db_session_open()) self.assertRaises( exc.NotFoundException, db_api.get_workbook, created.name )
def test_executions_state_filter(self): with db_api.transaction(read_only=True): db_api.create_workflow_execution(WF_EXECS[0]) created1 = db_api.create_workflow_execution(WF_EXECS[1]) ctx = { '__execution': { 'id': 'some' } } result = self._evaluator.evaluate( '_|executions(state="RUNNING")', ctx ) self.assertEqual([created1], result) result = self._evaluator.evaluate( '_|executions(id="one", state="RUNNING")', ctx ) self.assertEqual([], result)
def get_all(list_cls, cls, get_all_function, get_function, resource_function=None, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', all_projects=False, **filters): """Return a list of cls. :param list_cls: REST Resource collection class (e.g.: Actions, Workflows, ...) :param cls: REST Resource class (e.g.: Action, Workflow, ...) :param get_all_function: Request function to get all elements with filtering (limit, marker, sort_keys, sort_dirs, fields) :param get_function: Function used to fetch the marker :param resource_function: Optional, function used to fetch additional data :param marker: Optional. Pagination marker for large data sets. :param limit: Optional. Maximum number of resources to return in a single result. Default value is None for backward compatibility. :param sort_keys: Optional. Columns to sort results by. Default: created_at. :param sort_dirs: Optional. Directions to sort corresponding to sort_keys, "asc" or "desc" can be chosen. Default: asc. :param fields: Optional. A specified list of fields of the resource to be returned. 'id' will be included automatically in fields if it's provided, since it will be used when constructing 'next' link. :param filters: Optional. A specified dictionary of filters to match. :param all_projects: Optional. Get resources of all projects. """ if fields and 'id' not in fields: fields.insert(0, 'id') validate_query_params(limit, sort_keys, sort_dirs) validate_fields(fields, cls.get_fields()) # Admin user can get all tenants resources, no matter they are private or # public. insecure = False if (all_projects or (auth_ctx.ctx().is_admin and filters.get('project_id', ''))): insecure = True marker_obj = None if marker: marker_obj = get_function(marker) rest_resources = [] # If only certain fields are requested then we ignore "resource_function" # parameter because it doesn't make sense anymore. if fields: db_list = get_all_function(limit=limit, marker=marker_obj, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, insecure=insecure, **filters) for obj_values in db_list: # Note: in case if only certain fields have been requested # "db_list" contains tuples with values of db objects. rest_resources.append(cls.from_tuples(zip(fields, obj_values))) else: with db_api.transaction(): db_models = get_all_function(limit=limit, marker=marker_obj, sort_keys=sort_keys, sort_dirs=sort_dirs, insecure=insecure, **filters) for db_model in db_models: if resource_function: rest_resource = resource_function(db_model) else: rest_resource = cls.from_db_model(db_model) rest_resources.append(rest_resource) return list_cls.convert_with_links( rest_resources, limit, pecan.request.host_url, sort_keys=','.join(sort_keys), sort_dirs=','.join(sort_dirs), fields=','.join(fields) if fields else '', **filters)
def get_all(list_cls, cls, get_all_function, get_function, resource_function=None, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', all_projects=False, **filters): """Return a list of cls. :param list_cls: Collection class (e.g.: Actions, Workflows, ...). :param cls: Class (e.g.: Action, Workflow, ...). :param get_all_function: Request function to get all elements with filtering (limit, marker, sort_keys, sort_dirs, fields) :param get_function: Function used to fetch the marker :param resource_function: Optional, function used to fetch additional data :param marker: Optional. Pagination marker for large data sets. :param limit: Optional. Maximum number of resources to return in a single result. Default value is None for backward compatibility. :param sort_keys: Optional. Columns to sort results by. Default: created_at. :param sort_dirs: Optional. Directions to sort corresponding to sort_keys, "asc" or "desc" can be chosen. Default: asc. :param fields: Optional. A specified list of fields of the resource to be returned. 'id' will be included automatically in fields if it's provided, since it will be used when constructing 'next' link. :param filters: Optional. A specified dictionary of filters to match. :param all_projects: Optional. Get resources of all projects. """ if fields and 'id' not in fields: fields.insert(0, 'id') validate_query_params(limit, sort_keys, sort_dirs) validate_fields(fields, cls.get_fields()) # Admin user can get all tenants resources, no matter they are private or # public. insecure = False if (all_projects or (auth_ctx.ctx().is_admin and filters.get('project_id', ''))): insecure = True marker_obj = None if marker: marker_obj = get_function(marker) list_to_return = [] if resource_function: with db_api.transaction(): # do not filter fields yet, resource_function needs the ORM object db_list = get_all_function( limit=limit, marker=marker_obj, sort_keys=sort_keys, sort_dirs=sort_dirs, insecure=insecure, **filters ) for data in db_list: obj = resource_function(data) # filter fields using a loop instead of the ORM if fields: data = [] for f in fields: if hasattr(obj, f): data.append(getattr(obj, f)) dict_data = dict(zip(fields, data)) else: dict_data = obj.to_dict() list_to_return.append(cls.from_dict(dict_data)) else: db_list = get_all_function( limit=limit, marker=marker_obj, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, insecure=insecure, **filters ) for data in db_list: dict_data = (dict(zip(fields, data)) if fields else data.to_dict()) list_to_return.append(cls.from_dict(dict_data)) return list_cls.convert_with_links( list_to_return, limit, pecan.request.host_url, sort_keys=','.join(sort_keys), sort_dirs=','.join(sort_dirs), fields=','.join(fields) if fields else '', **filters )