def test_workflow_commit(self): # test the committing stage for workflow creating workflow_def = make_workflow_template() workflow = Workflow( id=20, name='job_test1', comment='这是一个测试工作流', config=workflow_def.SerializeToString(), project_id=1, forkable=True, state=WorkflowState.NEW, target_state=WorkflowState.READY, transaction_state=TransactionState.PARTICIPANT_COMMITTING) db.session.add(workflow) db.session.commit() scheduler.wakeup(20) self._wait_until( lambda: Workflow.query.get(20).state == WorkflowState.READY) workflow = Workflow.query.get(20) self.assertEqual(len(workflow.get_jobs()), 2) self.assertEqual(workflow.get_jobs()[0].state, JobState.NEW) self.assertEqual(workflow.get_jobs()[1].state, JobState.NEW) # test the committing stage for workflow running workflow.target_state = WorkflowState.RUNNING workflow.transaction_state = TransactionState.PARTICIPANT_COMMITTING db.session.commit() scheduler.wakeup(20) self._wait_until( lambda: Workflow.query.get(20).state == WorkflowState.RUNNING) workflow = Workflow.query.get(20) self._wait_until( lambda: workflow.get_jobs()[0].state == JobState.STARTED) self.assertEqual(workflow.get_jobs()[1].state, JobState.WAITING) workflow = Workflow.query.get(20) for job in workflow.owned_jobs: job.state = JobState.COMPLETED self.assertEqual(workflow.to_dict()['state'], 'COMPLETED') workflow.get_jobs()[0].state = JobState.FAILED self.assertEqual(workflow.to_dict()['state'], 'FAILED') # test the committing stage for workflow stopping workflow.target_state = WorkflowState.STOPPED workflow.transaction_state = TransactionState.PARTICIPANT_COMMITTING for job in workflow.owned_jobs: job.state = JobState.STARTED db.session.commit() scheduler.wakeup(20) self._wait_until( lambda: Workflow.query.get(20).state == WorkflowState.STOPPED) workflow = Workflow.query.get(20) self._wait_until( lambda: workflow.get_jobs()[0].state == JobState.STOPPED) self.assertEqual(workflow.get_jobs()[1].state, JobState.STOPPED)
def post(self): parser = reqparse.RequestParser() parser.add_argument('name', required=True, help='name is empty') parser.add_argument('project_id', type=int, required=True, help='project_id is empty') # TODO: should verify if the config is compatible with # workflow template parser.add_argument('config', type=dict, required=True, help='config is empty') parser.add_argument('forkable', type=bool, required=True, help='forkable is empty') parser.add_argument('forked_from', type=int, required=False, help='fork from base workflow') parser.add_argument('reuse_job_names', type=list, required=False, location='json', help='fork and inherit jobs') parser.add_argument('peer_reuse_job_names', type=list, required=False, location='json', help='peer fork and inherit jobs') parser.add_argument('fork_proposal_config', type=dict, required=False, help='fork and edit peer config') parser.add_argument('comment') data = parser.parse_args() name = data['name'] if Workflow.query.filter_by(name=name).first() is not None: raise ResourceConflictException( 'Workflow {} already exists.'.format(name)) # form to proto buffer template_proto = dict_to_workflow_definition(data['config']) workflow = Workflow(name=name, comment=data['comment'], project_id=data['project_id'], forkable=data['forkable'], forked_from=data['forked_from'], state=WorkflowState.NEW, target_state=WorkflowState.READY, transaction_state=TransactionState.READY) if workflow.forked_from is not None: fork_config = dict_to_workflow_definition( data['fork_proposal_config']) # TODO: more validations if len(fork_config.job_definitions) != \ len(template_proto.job_definitions): raise InvalidArgumentException( 'Forked workflow\'s template does not match base workflow') workflow.set_fork_proposal_config(fork_config) workflow.set_reuse_job_names(data['reuse_job_names']) workflow.set_peer_reuse_job_names(data['peer_reuse_job_names']) workflow.set_config(template_proto) db.session.add(workflow) db.session.commit() logging.info('Inserted a workflow to db') scheduler.wakeup(workflow.id) return {'data': workflow.to_dict()}, HTTPStatus.CREATED
def post(self): parser = reqparse.RequestParser() parser.add_argument('name', required=True, help='name is empty') parser.add_argument('project_id', type=int, required=True, help='project_id is empty') # TODO: should verify if the config is compatible with # workflow template parser.add_argument('config', type=dict, required=True, help='config is empty') parser.add_argument('forkable', type=bool, required=True, help='forkable is empty') parser.add_argument('forked_from', type=int, required=False, help='forkable is empty') parser.add_argument('comment') data = parser.parse_args() name = data['name'] if Workflow.query.filter_by(name=name).first() is not None: raise ResourceConflictException( 'Workflow {} already exists.'.format(name)) # form to proto buffer template_proto = dict_to_workflow_definition(data['config']) workflow = Workflow(name=name, comment=data['comment'], project_id=data['project_id'], forkable=data['forkable'], forked_from=data['forked_from'], state=WorkflowState.NEW, target_state=WorkflowState.READY, transaction_state=TransactionState.READY) workflow.set_config(template_proto) db.session.add(workflow) db.session.commit() logging.info('Inserted a workflow to db') scheduler.wakeup(workflow.id) return {'data': workflow.to_dict()}, HTTPStatus.CREATED
def post(self): parser = reqparse.RequestParser() parser.add_argument('name', required=True, help='name is empty') parser.add_argument('project_id', type=int, required=True, help='project_id is empty') # TODO: should verify if the config is compatible with # workflow template parser.add_argument('config', type=dict, required=True, help='config is empty') parser.add_argument('forkable', type=bool, required=True, help='forkable is empty') parser.add_argument('forked_from', type=int, required=False, help='fork from base workflow') parser.add_argument('create_job_flags', type=list, required=False, location='json', help='flags in common.CreateJobFlag') parser.add_argument('peer_create_job_flags', type=list, required=False, location='json', help='peer flags in common.CreateJobFlag') parser.add_argument('fork_proposal_config', type=dict, required=False, help='fork and edit peer config') parser.add_argument('comment') data = parser.parse_args() name = data['name'] if Workflow.query.filter_by(name=name).first() is not None: raise ResourceConflictException( 'Workflow {} already exists.'.format(name)) # form to proto buffer template_proto = dict_to_workflow_definition(data['config']) workflow = Workflow( name=name, # 20 bytes # a DNS-1035 label must start with an # alphabetic character. substring uuid[:19] has # no collision in 10 million draws uuid=f'u{uuid4().hex[:19]}', comment=data['comment'], project_id=data['project_id'], forkable=data['forkable'], forked_from=data['forked_from'], state=WorkflowState.NEW, target_state=WorkflowState.READY, transaction_state=TransactionState.READY) workflow.set_create_job_flags(data['create_job_flags']) if workflow.forked_from is not None: fork_config = dict_to_workflow_definition( data['fork_proposal_config']) # TODO: more validations if len(fork_config.job_definitions) != \ len(template_proto.job_definitions): raise InvalidArgumentException( 'Forked workflow\'s template does not match base workflow') workflow.set_fork_proposal_config(fork_config) # TODO: check that federated jobs have # same reuse policy on both sides workflow.set_peer_create_job_flags(data['peer_create_job_flags']) workflow.set_config(template_proto) db.session.add(workflow) db.session.commit() logging.info('Inserted a workflow to db') scheduler.wakeup(workflow.id) return {'data': workflow.to_dict()}, HTTPStatus.CREATED