コード例 #1
0
    def test_put_replace_config(self, httpsconnection_mock):
        def _getresponse():
            self.assertLessEqual(self._call_count, 1)
            result = mock.Mock()
            if self._call_count == 0:
                result.status = httplib.OK
                result.read.return_value = '{"type":"file", "sha":"abc"}'
            else:
                result.status = httplib.CREATED
            self._call_count += 1
            return result

        conn = mock.Mock()
        httpsconnection_mock.return_value = conn
        conn.getresponse = _getresponse

        repository = GithubRepository()
        repository._put_config('/some_path', 'some_content')

        self.assertEqual('GET', conn.request.call_args_list[0][0][0])
        self.assertEqual('/api/v3/repos/data/configs/contents/some_path',
                         conn.request.call_args_list[0][0][1])
        self.assertIsNone(conn.request.call_args_list[0][0][2])

        self.assertEqual('PUT', conn.request.call_args_list[1][0][0])
        self.assertEqual('/api/v3/repos/data/configs/contents/some_path',
                         conn.request.call_args_list[1][0][1])
        encoded_content = base64.b64encode('some_content')
        self.assertEqual('{"content": "%s", "committer": {"name": "Pinball", '
                         '"email": "*****@*****.**"}, "message": '
                         '"updating config", "sha": "abc"}' % encoded_content,
                         conn.request.call_args_list[1][0][2])
コード例 #2
0
    def test_put_replace_config(self, httpsconnection_mock):
        def _getresponse():
            self.assertLessEqual(self._call_count, 1)
            result = mock.Mock()
            if self._call_count == 0:
                result.status = httplib.OK
                result.read.return_value = '{"type":"file", "sha":"abc"}'
            else:
                result.status = httplib.CREATED
            self._call_count += 1
            return result

        conn = mock.Mock()
        httpsconnection_mock.return_value = conn
        conn.getresponse = _getresponse

        repository = GithubRepository()
        repository._put_config('/some_path', 'some_content')

        self.assertEqual('GET', conn.request.call_args_list[0][0][0])
        self.assertEqual('/api/v3/repos/data/configs/contents/some_path',
                         conn.request.call_args_list[0][0][1])
        self.assertIsNone(conn.request.call_args_list[0][0][2])

        self.assertEqual('PUT', conn.request.call_args_list[1][0][0])
        self.assertEqual('/api/v3/repos/data/configs/contents/some_path',
                         conn.request.call_args_list[1][0][1])
        encoded_content = base64.b64encode('some_content')
        self.assertEqual(
            '{"content": "%s", "committer": {"name": "Pinball", '
            '"email": "*****@*****.**"}, "message": '
            '"updating config", "sha": "abc"}' % encoded_content,
            conn.request.call_args_list[1][0][2])
コード例 #3
0
    def test_get_config(self, httpsconnection_mock):
        conn = mock.Mock()
        httpsconnection_mock.return_value = conn
        response = mock.Mock()
        conn.getresponse.return_value = response
        response.status = httplib.OK
        encoded_content = base64.b64encode('some_content')
        response.read.return_value = '{{"content": "{0!s}"}}'.format(encoded_content)

        repository = GithubRepository()
        content = repository._get_config('/some_path')
        self.assertEqual('some_content', content)

        self.assertEqual('GET', conn.request.call_args[0][0])
        self.assertEqual('/api/v3/repos/data/configs/contents/some_path',
                         conn.request.call_args[0][1])
        self.assertIsNone(conn.request.call_args[0][2])
コード例 #4
0
    def test_list_directory(self, httpsconnection_mock):
        conn = mock.Mock()
        httpsconnection_mock.return_value = conn
        response = mock.Mock()
        conn.getresponse.return_value = response
        response.status = httplib.OK
        response.read.return_value = ('[{"type":"dir", "name":"some_dir"},'
                                      ' {"type":"file", "name":"some_file"}]')

        repository = GithubRepository()
        paths = repository._list_directory('/some_path/', False)
        self.assertEqual(['some_dir/', 'some_file'], paths)

        self.assertEqual('GET', conn.request.call_args[0][0])
        self.assertEqual('/api/v3/repos/data/configs/contents/some_path',
                         conn.request.call_args[0][1])
        self.assertIsNone(conn.request.call_args[0][2])
コード例 #5
0
    def test_get_config(self, httpsconnection_mock):
        conn = mock.Mock()
        httpsconnection_mock.return_value = conn
        response = mock.Mock()
        conn.getresponse.return_value = response
        response.status = httplib.OK
        encoded_content = base64.b64encode('some_content')
        response.read.return_value = '{"content": "%s"}' % encoded_content

        repository = GithubRepository()
        content = repository._get_config('/some_path')
        self.assertEqual('some_content', content)

        self.assertEqual('GET', conn.request.call_args[0][0])
        self.assertEqual('/api/v3/repos/data/configs/contents/some_path',
                         conn.request.call_args[0][1])
        self.assertIsNone(conn.request.call_args[0][2])
コード例 #6
0
    def test_list_directory(self, httpsconnection_mock):
        conn = mock.Mock()
        httpsconnection_mock.return_value = conn
        response = mock.Mock()
        conn.getresponse.return_value = response
        response.status = httplib.OK
        response.read.return_value = ('[{"type":"dir", "name":"some_dir"},'
                                      ' {"type":"file", "name":"some_file"}]')

        repository = GithubRepository()
        paths = repository._list_directory('/some_path/', False)
        self.assertEqual(['some_dir/', 'some_file'], paths)

        self.assertEqual('GET', conn.request.call_args[0][0])
        self.assertEqual('/api/v3/repos/data/configs/contents/some_path',
                         conn.request.call_args[0][1])
        self.assertIsNone(conn.request.call_args[0][2])
コード例 #7
0
 def __init__(self, params=None):
     self._repository = GithubRepository()
コード例 #8
0
class RepositoryConfigParser(ConfigParser):
    def __init__(self, params=None):
        self._repository = GithubRepository()

    @staticmethod
    def _job_config_to_job(job_config, outputs):
        """Create a job from a job config.

        Args:
            job_config: The job config.
            outputs: The list of job output names.
        Returns:
            Pinball job constructed from the config.
        """
        # load template
        job_template = load_path(job_config.template)(
            job_config.job,
            max_attempts=job_config.max_attempts,
            emails=job_config.emails,
            priority=job_config.priority)
        return job_template.get_pinball_job(job_config.parents,
                                            outputs,
                                            job_config.template_params)

    @staticmethod
    def _condition_config_to_condition(condition_config, outputs):
        """Create a condition from a condition config.

        Args:
            condition_config: The condition config.
            outputs: The list of condition output names.
        Returns:
            Pinball job constructed from the config.
        """
        # load template
        condition_template = load_path(condition_config.template)(
            condition_config.job,
            max_attempts=condition_config.max_attempts,
            retry_delay_sec=condition_config.retry_delay_sec,
            emails=condition_config.emails,
            priority=condition_config.priority)
        return condition_template.get_pinball_condition(
            outputs, params=condition_config.template_params)

    @staticmethod
    def _job_config_to_token(workflow, instance, job_config, job_outputs):
        """Create a job token from a job config.

        Args:
            workflow: The workflow name.
            instance: The workflow instance.
            job_config: The job config to create token from.
            job_outputs: The names of the job outputs.
        Returns:
            Job token constructed from the job config.
        """
        if job_config.is_condition:
            job = RepositoryConfigParser._condition_config_to_condition(
                job_config, job_outputs)
        else:
            job = RepositoryConfigParser._job_config_to_job(job_config,
                                                            job_outputs)
        name = Name(workflow=workflow, instance=instance,
                    job_state=Name.WAITING_STATE, job=job_config.job)
        job_token = Token(name=name.get_job_token_name(),
                          data=pickle.dumps(job))
        return job_token

    def get_schedule_token(self, workflow):
        schedule_config = self._repository.get_schedule(workflow)
        timestamp = schedule_to_timestamp(schedule_config.time,
                                          schedule_config.start_date)
        recurrence = recurrence_str_to_sec(schedule_config.recurrence)
        overrun_policy = OverrunPolicy.from_string(
            schedule_config.overrun_policy)
        schedule = WorkflowSchedule(
            next_run_time=timestamp,
            recurrence_seconds=recurrence,
            overrun_policy=overrun_policy,
            workflow=schedule_config.workflow,
            emails=schedule_config.emails,
            #TODO(mao): to make it flexible that allow users specify through UI
            max_running_instances=PinballConfig.DEFAULT_MAX_WORKFLOW_RUNNING_INSTANCES)
        schedule.advance_next_run_time()
        timestamp = schedule.next_run_time
        token_name = (
            Name(workflow=schedule_config.workflow
                 ).get_workflow_schedule_token_name())
        return Token(name=token_name, owner='parser',
                     expirationTime=timestamp,
                     data=pickle.dumps(schedule))

    def get_workflow_tokens(self, workflow):
        # TODO(pawel): add workflow connectivity check.
        job_configs = {}
        top_level_job_names = []
        job_names = self._repository.get_job_names(workflow)
        for job_name in job_names:
            job_config = self._repository.get_job(workflow, job_name)
            job_configs[job_name] = job_config
            if not job_config.parents:
                top_level_job_names.append(job_name)
                job_config.parents = [Name.WORKFLOW_START_INPUT]

        job_outputs = collections.defaultdict(list)
        for job_config in job_configs.values():
            for parent_job_name in job_config.parents:
                job_outputs[parent_job_name].append(job_config.job)

        result = []
        instance = get_unique_workflow_instance()

        # Convert job configs to job tokens.
        for job_config in job_configs.values():
            token = RepositoryConfigParser._job_config_to_token(
                workflow,
                instance,
                job_config,
                job_outputs[job_config.job])
            result.append(token)

        # Create triggering events for top-level jobs.
        for job_name in top_level_job_names:
            event = Event(creator='repository_config_parser')
            event_name = Name(workflow=workflow,
                              instance=instance,
                              job=job_name,
                              input_name=Name.WORKFLOW_START_INPUT,
                              event='workflow_start_event')
            result.append(Token(name=event_name.get_event_token_name(),
                                data=pickle.dumps(event)))

        return result

    def get_workflow_names(self):
        return self._repository.get_workflow_names()
コード例 #9
0
 def __init__(self, params=None):
     self._repository = GithubRepository()
コード例 #10
0
class RepositoryConfigParser(ConfigParser):
    def __init__(self, params=None):
        self._repository = GithubRepository()

    @staticmethod
    def _job_config_to_job(job_config, outputs):
        """Create a job from a job config.

        Args:
            job_config: The job config.
            outputs: The list of job output names.
        Returns:
            Pinball job constructed from the config.
        """
        # load template
        job_template = load_path(job_config.template)(
            job_config.job,
            max_attempts=job_config.max_attempts,
            emails=job_config.emails,
            priority=job_config.priority)
        return job_template.get_pinball_job(job_config.parents, outputs,
                                            job_config.template_params)

    @staticmethod
    def _condition_config_to_condition(condition_config, outputs):
        """Create a condition from a condition config.

        Args:
            condition_config: The condition config.
            outputs: The list of condition output names.
        Returns:
            Pinball job constructed from the config.
        """
        # load template
        condition_template = load_path(condition_config.template)(
            condition_config.job,
            max_attempts=condition_config.max_attempts,
            retry_delay_sec=condition_config.retry_delay_sec,
            emails=condition_config.emails,
            priority=condition_config.priority)
        return condition_template.get_pinball_condition(
            outputs, params=condition_config.template_params)

    @staticmethod
    def _job_config_to_token(workflow, instance, job_config, job_outputs):
        """Create a job token from a job config.

        Args:
            workflow: The workflow name.
            instance: The workflow instance.
            job_config: The job config to create token from.
            job_outputs: The names of the job outputs.
        Returns:
            Job token constructed from the job config.
        """
        if job_config.is_condition:
            job = RepositoryConfigParser._condition_config_to_condition(
                job_config, job_outputs)
        else:
            job = RepositoryConfigParser._job_config_to_job(
                job_config, job_outputs)
        name = Name(workflow=workflow,
                    instance=instance,
                    job_state=Name.WAITING_STATE,
                    job=job_config.job)
        job_token = Token(name=name.get_job_token_name(),
                          data=pickle.dumps(job))
        return job_token

    def get_schedule_token(self, workflow):
        schedule_config = self._repository.get_schedule(workflow)
        timestamp = schedule_to_timestamp(schedule_config.time,
                                          schedule_config.start_date)
        recurrence = recurrence_str_to_sec(schedule_config.recurrence)
        overrun_policy = OverrunPolicy.from_string(
            schedule_config.overrun_policy)
        schedule = WorkflowSchedule(
            next_run_time=timestamp,
            recurrence_seconds=recurrence,
            overrun_policy=overrun_policy,
            workflow=schedule_config.workflow,
            emails=schedule_config.emails,
            #TODO(mao): to make it flexible that allow users specify through UI
            max_running_instances=PinballConfig.
            DEFAULT_MAX_WORKFLOW_RUNNING_INSTANCES)
        schedule.advance_next_run_time()
        timestamp = schedule.next_run_time
        token_name = (Name(workflow=schedule_config.workflow).
                      get_workflow_schedule_token_name())
        return Token(name=token_name,
                     owner='parser',
                     expirationTime=timestamp,
                     data=pickle.dumps(schedule))

    def get_workflow_tokens(self, workflow):
        # TODO(pawel): add workflow connectivity check.
        job_configs = {}
        top_level_job_names = []
        job_names = self._repository.get_job_names(workflow)
        for job_name in job_names:
            job_config = self._repository.get_job(workflow, job_name)
            job_configs[job_name] = job_config
            if not job_config.parents:
                top_level_job_names.append(job_name)
                job_config.parents = [Name.WORKFLOW_START_INPUT]

        job_outputs = collections.defaultdict(list)
        for job_config in job_configs.values():
            for parent_job_name in job_config.parents:
                job_outputs[parent_job_name].append(job_config.job)

        result = []
        instance = get_unique_workflow_instance()

        # Convert job configs to job tokens.
        for job_config in job_configs.values():
            token = RepositoryConfigParser._job_config_to_token(
                workflow, instance, job_config, job_outputs[job_config.job])
            result.append(token)

        # Create triggering events for top-level jobs.
        for job_name in top_level_job_names:
            event = Event(creator='repository_config_parser')
            event_name = Name(workflow=workflow,
                              instance=instance,
                              job=job_name,
                              input_name=Name.WORKFLOW_START_INPUT,
                              event='workflow_start_event')
            result.append(
                Token(name=event_name.get_event_token_name(),
                      data=pickle.dumps(event)))

        return result

    def get_workflow_names(self):
        return self._repository.get_workflow_names()