Пример #1
0
    def test_is_peer_job_inheritance_matched(self, mock_get_workflow):
        peer_job_0 = JobDefinition(name='raw-data-job')
        peer_job_1 = JobDefinition(name='train-job', is_federated=True)
        peer_config = WorkflowDefinition()
        peer_config.job_definitions.extend([peer_job_0, peer_job_1])
        resp = GetWorkflowResponse(config=peer_config)
        mock_get_workflow.return_value = resp

        job_0 = JobDefinition(name='train-job', is_federated=True)
        config = WorkflowDefinition(job_definitions=[job_0])

        project = Project()
        participant = project_pb2.Participant()
        project.set_config(project_pb2.Project(participants=[participant]))
        workflow0 = Workflow(project=project)
        workflow0.set_config(config)
        db.session.add(workflow0)
        db.session.commit()
        db.session.flush()
        workflow1 = Workflow(project=project, forked_from=workflow0.id)
        workflow1.set_config(config)
        workflow1.set_create_job_flags([CreateJobFlag.REUSE])
        workflow1.set_peer_create_job_flags(
            [CreateJobFlag.NEW, CreateJobFlag.REUSE])

        self.assertTrue(is_peer_job_inheritance_matched(workflow1))

        workflow1.set_create_job_flags([CreateJobFlag.NEW])
        self.assertFalse(is_peer_job_inheritance_matched(workflow1))
Пример #2
0
    def post(self):
        parser = reqparse.RequestParser()
        parser.add_argument('name', required=True, help='name is empty')
        parser.add_argument('project_id', type=int, required=True,
                            help='project_id is empty')
        # TODO: should verify if the config is compatible with
        # workflow template
        parser.add_argument('config', type=dict, required=True,
                            help='config is empty')
        parser.add_argument('forkable', type=bool, required=True,
                            help='forkable is empty')
        parser.add_argument('forked_from', type=int, required=False,
                            help='fork from base workflow')
        parser.add_argument('reuse_job_names', type=list, required=False,
                            location='json', help='fork and inherit jobs')
        parser.add_argument('peer_reuse_job_names', type=list,
                            required=False, location='json',
                            help='peer fork and inherit jobs')
        parser.add_argument('fork_proposal_config', type=dict, required=False,
                            help='fork and edit peer config')
        parser.add_argument('comment')
        data = parser.parse_args()

        name = data['name']
        if Workflow.query.filter_by(name=name).first() is not None:
            raise ResourceConflictException(
                'Workflow {} already exists.'.format(name))

        # form to proto buffer
        template_proto = dict_to_workflow_definition(data['config'])
        workflow = Workflow(name=name, comment=data['comment'],
                            project_id=data['project_id'],
                            forkable=data['forkable'],
                            forked_from=data['forked_from'],
                            state=WorkflowState.NEW,
                            target_state=WorkflowState.READY,
                            transaction_state=TransactionState.READY)

        if workflow.forked_from is not None:
            fork_config = dict_to_workflow_definition(
                data['fork_proposal_config'])
            # TODO: more validations
            if len(fork_config.job_definitions) != \
                    len(template_proto.job_definitions):
                raise InvalidArgumentException(
                    'Forked workflow\'s template does not match base workflow')
            workflow.set_fork_proposal_config(fork_config)
            workflow.set_reuse_job_names(data['reuse_job_names'])
            workflow.set_peer_reuse_job_names(data['peer_reuse_job_names'])

        workflow.set_config(template_proto)
        db.session.add(workflow)
        db.session.commit()
        logging.info('Inserted a workflow to db')
        scheduler.wakeup(workflow.id)
        return {'data': workflow.to_dict()}, HTTPStatus.CREATED
Пример #3
0
    def post(self):
        parser = reqparse.RequestParser()
        parser.add_argument('name', required=True, help='name is empty')
        parser.add_argument('project_id', type=int, required=True,
                            help='project_id is empty')
        # TODO: should verify if the config is compatible with
        # workflow template
        parser.add_argument('config', type=dict, required=True,
                            help='config is empty')
        parser.add_argument('forkable', type=bool, required=True,
                            help='forkable is empty')
        parser.add_argument('forked_from', type=int, required=False,
                            help='forkable is empty')
        parser.add_argument('comment')
        data = parser.parse_args()

        name = data['name']
        if Workflow.query.filter_by(name=name).first() is not None:
            raise ResourceConflictException(
                'Workflow {} already exists.'.format(name))

        # form to proto buffer
        template_proto = dict_to_workflow_definition(data['config'])
        workflow = Workflow(name=name, comment=data['comment'],
                            project_id=data['project_id'],
                            forkable=data['forkable'],
                            forked_from=data['forked_from'],
                            state=WorkflowState.NEW,
                            target_state=WorkflowState.READY,
                            transaction_state=TransactionState.READY)
        workflow.set_config(template_proto)
        db.session.add(workflow)
        db.session.commit()
        logging.info('Inserted a workflow to db')
        scheduler.wakeup(workflow.id)
        return {'data': workflow.to_dict()}, HTTPStatus.CREATED
Пример #4
0
    def post(self):
        parser = reqparse.RequestParser()
        parser.add_argument('name', required=True, help='name is empty')
        parser.add_argument('project_id',
                            type=int,
                            required=True,
                            help='project_id is empty')
        # TODO: should verify if the config is compatible with
        # workflow template
        parser.add_argument('config',
                            type=dict,
                            required=True,
                            help='config is empty')
        parser.add_argument('forkable',
                            type=bool,
                            required=True,
                            help='forkable is empty')
        parser.add_argument('forked_from',
                            type=int,
                            required=False,
                            help='fork from base workflow')
        parser.add_argument('create_job_flags',
                            type=list,
                            required=False,
                            location='json',
                            help='flags in common.CreateJobFlag')
        parser.add_argument('peer_create_job_flags',
                            type=list,
                            required=False,
                            location='json',
                            help='peer flags in common.CreateJobFlag')
        parser.add_argument('fork_proposal_config',
                            type=dict,
                            required=False,
                            help='fork and edit peer config')
        parser.add_argument('comment')
        data = parser.parse_args()
        name = data['name']
        if Workflow.query.filter_by(name=name).first() is not None:
            raise ResourceConflictException(
                'Workflow {} already exists.'.format(name))

        # form to proto buffer
        template_proto = dict_to_workflow_definition(data['config'])
        workflow = Workflow(
            name=name,
            # 20 bytes
            # a DNS-1035 label must start with an
            # alphabetic character. substring uuid[:19] has
            # no collision in 10 million draws
            uuid=f'u{uuid4().hex[:19]}',
            comment=data['comment'],
            project_id=data['project_id'],
            forkable=data['forkable'],
            forked_from=data['forked_from'],
            state=WorkflowState.NEW,
            target_state=WorkflowState.READY,
            transaction_state=TransactionState.READY)
        workflow.set_create_job_flags(data['create_job_flags'])

        if workflow.forked_from is not None:
            fork_config = dict_to_workflow_definition(
                data['fork_proposal_config'])
            # TODO: more validations
            if len(fork_config.job_definitions) != \
                    len(template_proto.job_definitions):
                raise InvalidArgumentException(
                    'Forked workflow\'s template does not match base workflow')
            workflow.set_fork_proposal_config(fork_config)
            # TODO: check that federated jobs have
            #       same reuse policy on both sides
            workflow.set_peer_create_job_flags(data['peer_create_job_flags'])

        workflow.set_config(template_proto)
        db.session.add(workflow)
        db.session.commit()
        logging.info('Inserted a workflow to db')
        scheduler.wakeup(workflow.id)
        return {'data': workflow.to_dict()}, HTTPStatus.CREATED