示例#1
0
 def test_patch_create_job_flags(self):
     wd = WorkflowDefinition()
     jd = wd.job_definitions.add()
     workflow = Workflow(
         name='test-workflow',
         project_id=123,
         config=wd.SerializeToString(),
         forkable=False,
         state=WorkflowState.READY,
     )
     db.session.add(workflow)
     db.session.flush()
     job = Job(name='test_job',
               job_type=JobType(1),
               config=jd.SerializeToString(),
               workflow_id=workflow.id,
               project_id=123,
               state=JobState.STOPPED,
               is_disabled=False)
     db.session.add(job)
     db.session.flush()
     workflow.job_ids = str(job.id)
     db.session.commit()
     response = self.patch_helper(f'/api/v2/workflows/{workflow.id}',
                                  data={'create_job_flags': [3]})
     self.assertEqual(response.status_code, HTTPStatus.OK)
     patched_job = Job.query.get(job.id)
     self.assertEqual(patched_job.is_disabled, True)
     response = self.patch_helper(f'/api/v2/workflows/{workflow.id}',
                                  data={'create_job_flags': [1]})
     self.assertEqual(response.status_code, HTTPStatus.OK)
     patched_job = Job.query.get(job.id)
     self.assertEqual(patched_job.is_disabled, False)
示例#2
0
 def is_model_related_job(job):
     job_type = job.job_type
     if isinstance(job_type, int):
         job_type = JobType(job.job_type)
     return job_type in [
         JobType.NN_MODEL_TRANINING, JobType.NN_MODEL_EVALUATION,
         JobType.TREE_MODEL_TRAINING, JobType.TREE_MODEL_EVALUATION
     ]
示例#3
0
    def _setup_jobs(self):
        if self.forked_from is not None:
            trunk = Workflow.query.get(self.forked_from)
            assert trunk is not None, \
                'Source workflow %d not found' % self.forked_from
            trunk_job_defs = trunk.get_config().job_definitions
            trunk_name2index = {
                job.name: i
                for i, job in enumerate(trunk_job_defs)
            }

        job_defs = self.get_config().job_definitions
        flags = self.get_create_job_flags()
        assert len(job_defs) == len(flags), \
            'Number of job defs does not match number of create_job_flags ' \
            '%d vs %d'%(len(job_defs), len(flags))
        jobs = []
        for i, (job_def, flag) in enumerate(zip(job_defs, flags)):
            if flag == common_pb2.CreateJobFlag.REUSE:
                assert job_def.name in trunk_name2index, \
                    f'Job {job_def.name} not found in base workflow'
                j = trunk.get_job_ids()[trunk_name2index[job_def.name]]
                job = Job.query.get(j)
                assert job is not None, \
                    'Job %d not found' % j
                # TODO: check forked jobs does not depend on non-forked jobs
            else:
                job = Job(
                    name=f'{self.uuid}-{job_def.name}',
                    job_type=JobType(job_def.job_type),
                    config=job_def.SerializeToString(),
                    workflow_id=self.id,
                    project_id=self.project_id,
                    state=JobState.NEW,
                    is_disabled=(flag == common_pb2.CreateJobFlag.DISABLED))
                db.session.add(job)
            jobs.append(job)
        db.session.flush()
        name2index = {job.name: i for i, job in enumerate(job_defs)}
        for i, (job, flag) in enumerate(zip(jobs, flags)):
            if flag == common_pb2.CreateJobFlag.REUSE:
                continue
            for j, dep_def in enumerate(job.get_config().dependencies):
                dep = JobDependency(
                    src_job_id=jobs[name2index[dep_def.source]].id,
                    dst_job_id=job.id,
                    dep_index=j)
                db.session.add(dep)

        self.set_job_ids([job.id for job in jobs])
        if Features.FEATURE_MODEL_WORKFLOW_HOOK:
            for job in jobs:
                ModelService(db.session).workflow_hook(job)
示例#4
0
    def _setup_jobs(self):
        if self.forked_from is not None:
            trunk = Workflow.query.get(self.forked_from)
            assert trunk is not None, \
                'Source workflow %d not found'%self.forked_from
            trunk_job_defs = trunk.get_config().job_definitions
            trunk_name2index = {
                job.name: i
                for i, job in enumerate(trunk_job_defs)
            }
        else:
            assert not self.get_reuse_job_names()

        job_defs = self.get_config().job_definitions
        jobs = []
        reuse_jobs = set(self.get_reuse_job_names())
        for i, job_def in enumerate(job_defs):
            if job_def.name in reuse_jobs:
                assert job_def.name in trunk_name2index, \
                    "Job %s not found in base workflow"%job_def.name
                j = trunk.get_job_ids()[trunk_name2index[job_def.name]]
                job = Job.query.get(j)
                assert job is not None, \
                    'Job %d not found'%j
                # TODO: check forked jobs does not depend on non-forked jobs
            else:
                job = Job(name=f'{self.name}-{job_def.name}',
                          job_type=JobType(job_def.type),
                          config=job_def.SerializeToString(),
                          workflow_id=self.id,
                          project_id=self.project_id,
                          state=JobState.STOPPED)
                job.set_yaml_template(job_def.yaml_template)
                db.session.add(job)
            jobs.append(job)
        db.session.commit()

        name2index = {job.name: i for i, job in enumerate(job_defs)}
        for i, job in enumerate(jobs):
            if job.name in reuse_jobs:
                continue
            for j, dep_def in enumerate(job.get_config().dependencies):
                dep = JobDependency(
                    src_job_id=jobs[name2index[dep_def.source]].id,
                    dst_job_id=job.id,
                    dep_index=j)
                db.session.add(dep)

        self.set_job_ids([job.id for job in jobs])

        db.session.commit()
示例#5
0
def add_fake_workflow(session):
    wd = WorkflowDefinition()
    jd = wd.job_definitions.add()
    workflow = Workflow(
        name='test-workflow',
        project_id=123,
        config=wd.SerializeToString(),
        forkable=False,
        state=WorkflowState.READY,
    )
    session.add(workflow)
    session.flush()
    job = Job(name='test_job',
              job_type=JobType(1),
              config=jd.SerializeToString(),
              workflow_id=workflow.id,
              project_id=123,
              state=JobState.STOPPED,
              is_disabled=False)
    session.add(job)
    session.flush()
    workflow.job_ids = str(job.id)
    session.commit()
    return workflow, job