Beispiel #1
0
def jobs_stop(self,
              project_name,
              project_uuid,
              job_name,
              job_uuid,
              specification,
              update_status=True):
    specification = JobSpecification.read(specification)
    deleted = job_scheduler.stop_job(project_name=project_name,
                                     project_uuid=project_uuid,
                                     job_name=job_name,
                                     job_uuid=job_uuid,
                                     specification=specification)

    if not deleted and self.request.retries < 2:
        _logger.info('Trying again to delete job `%s`.', job_name)
        self.retry(countdown=Intervals.EXPERIMENTS_SCHEDULER)
        return

    if not update_status:
        return

    job = get_valid_job(job_uuid=job_uuid)
    if not job:
        return None

    # Update notebook status to show that its stopped
    job.set_status(status=JobLifeCycle.STOPPED, message='Job was stopped')
Beispiel #2
0
def validate_job_spec_config(config, raise_for_rest: bool = False):
    try:
        spec = JobSpecification.read(config)
    except (MarshmallowValidationError, PolyaxonfileError,
            PolyaxonConfigurationError) as e:
        message_error = 'Received non valid job specification config. %s' % e
        if raise_for_rest:
            raise ValidationError(message_error)
        else:
            raise DjangoValidationError(message_error)

    return spec
Beispiel #3
0
    def test_get_with_environment(self):
        spec_content = """---
            version: 1

            kind: job

            environment:
              node_selector: 
                foo: bar
              tolerations:
                - key: "key"
                  operator: "Equal"
                  value: "value"
                  effect: "NoSchedule"
              affinity:
                foo: bar
              resources:
                gpu:
                  requests: 1
                  limits: 1
                gpu:
                  requests: 1
                  limits: 1

            build:
              image: my_image

            run:
              cmd: do_something
        """
        spec_parsed_content = JobSpecification.read(spec_content)

        project = ProjectFactory(user=self.auth_client.user)
        exp = self.factory_class(project=project, config=spec_parsed_content.parsed_data)
        url = '/{}/{}/{}/jobs/{}/'.format(API_V1,
                                          project.user.username,
                                          project.name,
                                          exp.id)

        resp = self.auth_client.get(url)
        assert resp.status_code == status.HTTP_200_OK
        exp.refresh_from_db()
        assert resp.data == self.serializer_class(exp).data
Beispiel #4
0
    def test_create_job_with_resources_spec(self, spawner_mock):
        config = JobSpecification.read(job_spec_resources_content)
        mock_instance = spawner_mock.return_value
        mock_instance.start_job.return_value = {'pod': 'pod_content'}
        mock_instance.spec = config

        with patch('scheduler.dockerizer_scheduler.start_dockerizer') as mock_start:
            with patch('scheduler.dockerizer_scheduler.check_image') as mock_check:
                mock_start.return_value = False
                mock_check.return_value = True
                job = JobFactory(config=config.parsed_data)

        assert JobStatus.objects.filter(job=job).count() == 2
        assert list(JobStatus.objects.filter(job=job).values_list(
            'status', flat=True)) == [JobLifeCycle.CREATED,
                                      JobLifeCycle.SCHEDULED]

        job.refresh_from_db()
        assert job.last_status == JobLifeCycle.SCHEDULED
Beispiel #5
0
    def test_create_job_with_valid_spec(self, spawner_mock):
        config = JobSpecification.read(job_spec_content)

        mock_instance = spawner_mock.return_value
        mock_instance.start_job.return_value = {'pod': 'pod_content'}
        mock_instance.spec = config

        with patch('scheduler.dockerizer_scheduler.create_build_job'
                   ) as mock_start:
            build = BuildJobFactory()
            BuildJobStatus.objects.create(status=JobLifeCycle.SUCCEEDED,
                                          job=build)
            mock_start.return_value = build, True, True
            job = JobFactory(config=config.parsed_data)

        assert JobStatus.objects.filter(job=job).count() == 2
        assert list(
            JobStatus.objects.filter(job=job).values_list(
                'status',
                flat=True)) == [JobLifeCycle.CREATED, JobLifeCycle.SCHEDULED]
        job.refresh_from_db()
        assert job.last_status == JobLifeCycle.SCHEDULED
Beispiel #6
0
def jobs_stop(project_name,
              project_uuid,
              job_name,
              job_uuid,
              specification,
              update_status=True):
    specification = JobSpecification.read(specification)
    job_scheduler.stop_job(project_name=project_name,
                           project_uuid=project_uuid,
                           job_name=job_name,
                           job_uuid=job_uuid,
                           specification=specification)

    if not update_status:
        return

    job = get_valid_job(job_uuid=job_uuid)
    if not job:
        return None

    # Update notebook status to show that its stopped
    job.set_status(status=JobLifeCycle.STOPPED, message='Job was stopped')
Beispiel #7
0
job_spec_content = """---
    version: 1

    kind: job
    
    tags: [fixtures]

    build:
      image: my_image
    
    run:
      cmd: test
"""

job_spec_parsed_content = JobSpecification.read(job_spec_content)

job_spec_resources_content = """---
    version: 1

    kind: job
    
    tags: [fixtures]
    
    environment:
      resources:
        cpu:
          requests: 1
          limits: 1    
        memory:
          requests: 100
Beispiel #8
0
 def create_job(self, config):
     config = JobSpecification.read(config)
     return JobFactory(config=config.parsed_data, project=self.project)