Ejemplo n.º 1
0
 def _list_directory(self, directory, allow_not_found):
     location = 'repos/%s/configs/contents%s' % (PinballConfig.USER, directory)
     # Remove the trailing slash.
     assert directory[-1] == '/'
     location = location[:-1]
     # Check if path exists.
     expected_status = [httplib.OK]
     if allow_not_found:
         expected_status.append(httplib.NOT_FOUND)
     response_json = self._make_request('GET', location, None,
                                        expected_status)
     response = json.loads(response_json)
     file_type = GithubRepository._get_file_type(response)
     if not file_type:
         assert allow_not_found
         return []
     if file_type != 'dir':
         raise PinballException('path %s is not a dir but a %s' %
                                (directory, file_type))
     assert type(response) == list
     result = []
     for entry in response:
         file_type = GithubRepository._get_file_type(entry)
         if file_type == 'file':
             result.append(entry['name'])
         elif file_type == 'dir':
             result.append('%s/' % entry['name'])
         else:
             raise PinballException('found content %s of unsupported type '
                                    '%s' % (entry['path'], file_type))
     return result
Ejemplo n.º 2
0
    def from_dict(cls, query_dict):
        """Construct configuration object from a QueryDict.

        Args:
            query_dict: The QueryDict describing the query_dict.
        Returns:
            Configuration object described by the json string.
        """
        for attribute in cls._REQUIRED_ATTRIBUTES:
            if attribute not in query_dict:
                raise PinballException('attribute %s not found in query_dict '
                                       '%s' % (attribute, query_dict))
        for attribute in query_dict.keys():
            if ((attribute not in cls._REQUIRED_ATTRIBUTES)
                    and (attribute not in cls._OPTIONAL_ATTRIBUTES)):
                raise PinballException('unrecognized attribute %s found in '
                                       'query_dict %s' %
                                       (attribute, query_dict))
        result = cls()
        for key, value in query_dict.items():
            setattr(result, key, value)
        for attribute in cls._OPTIONAL_ATTRIBUTES:
            if attribute not in query_dict:
                setattr(result, attribute, None)
        result._validate()
        return result
Ejemplo n.º 3
0
    def _put_config(self, path, content):
        location = 'repos/%s/configs/contents%s' % (PinballConfig.USER, path)
        encoded_contnet = base64.b64encode(content)
        body = {'message': 'updating config',
                'committer': {'name': GithubRepository._COMMITTER_NAME,
                              'email': GithubRepository._COMMITTER_EMAIL},
                'content': encoded_contnet}

        # Check if path exists.
        response = self._make_request('GET', location, None,
                                      [httplib.OK, httplib.NOT_FOUND])
        response_json = json.loads(response)
        file_type = GithubRepository._get_file_type(response_json)
        if file_type:
            # Path exists.
            if file_type != 'file':
                raise PinballException('path %s is not a file but a %s' %
                                       (path, file_type))
            body['sha'] = response_json['sha']

        # Work around a bug in Github.  See
        # http://stackoverflow.com/questions/19576601/\
        # github-api-issue-with-file-upload
        time.sleep(0.5)

        # Create or update the file.
        self._make_request('PUT', location, json.dumps(body),
                           [httplib.CREATED, httplib.OK])
Ejemplo n.º 4
0
    def _make_request(self, method, location, body, expected_status):
        """Make a request through the Github API.

        Args:
            method: The request method.
            location: The request path.
            body: The body of the request message.
            expected_status: List of http status codes expected on a successful
                request.
        Returns:
            The API response.
        """
        conn = httplib.HTTPSConnection(PinballConfig.GITHUB_SERVER,
                                       timeout=PinballConfig.GITHUB_HTTP_TIMEOUT_SEC)
        location = urlparse.urljoin(PinballConfig.GITHUB_API_URI, location)
        authorization = base64.b64encode('%s:x-oauth-basic' %
                                         PinballConfig.GITHUB_OAUTH_TOKEN)
        headers = {'Authorization': 'Basic %s' % authorization}
        conn.request(method, location, body, headers)
        response = conn.getresponse()
        if response.status not in expected_status:
            raise PinballException('failed request to %s method %s location '
                                   '%s status %s reason %s content %s' %
                                   (PinballConfig.GITHUB_API_URI, method, location,
                                    response.status, response.reason,
                                    response.read()))
        return response.read()
Ejemplo n.º 5
0
    def get_token_paths(self, path):
        """Get token paths data from the store.

        Args:
            path: The path is the name prefix of the parent whose direct
                children should be returned.
        Returns:
            List of direct path descendants of the parent.
        """
        if not path.startswith(Name.DELIMITER):
            raise PinballException('incorrectly formatted path %s' % path)
        # TODO(pawel): this is a bit inefficient as it may load names of quite
        # a few tokens into the memory.
        token_names = self._store.read_token_names(name_prefix=path)
        counts = collections.defaultdict(int)
        path_len = len(path)
        for token_name in token_names:
            index = token_name.find(Name.DELIMITER, path_len)
            if index == -1:
                index = len(token_name)
            else:
                index += 1
            group = token_name[:index]
            counts[group] += 1
        result = []
        for path, count in counts.items():
            result.append(TokenPathData(path, count))
        return result
Ejemplo n.º 6
0
    def format(self):
        """Extract configuration options describing the object.

        Returns:
            Dictionary with key-values describing the object.
        """
        for attribute in self._REQUIRED_ATTRIBUTES:
            if not hasattr(self, attribute):
                raise PinballException('attribute %s not found in config %s' %
                                       (attribute, self))
        return copy.copy(self.__dict__)
Ejemplo n.º 7
0
    def from_json(cls, json_config):
        """Construct configuration object from a json string.

        Args:
            json_config: The json string describing the config.
        Returns:
            Configuration object described by the json string.
        """
        config = json.loads(json_config)
        if type(config) is not dict:
            raise PinballException('json %s is not a dictionary', json_config)
        return cls.from_dict(config)
Ejemplo n.º 8
0
    def _run_or_reschedule(self):
        """Run the schedule represented by the owned schedule token.

        If the time is right and the overrun policy permits it, run the owned
        schedule token.  Otherwise, reschedule it until a later time.
        """
        assert self._owned_schedule_token
        schedule = pickle.loads(self._owned_schedule_token.data)
        if schedule.next_run_time > time.time():
            LOG.info("not the time to run token: %s",
                     self._owned_schedule_token.name)

            # It's not time to run it yet.  Although we should claim only
            # tokens which are ready to run, clock skew between different
            # machines may result in claiming a token too soon.
            assert self._owned_schedule_token.expirationTime >= schedule.next_run_time, \
                ('%d < %d in token %s' % (self._owned_schedule_token.expirationTime,
                                          schedule.next_run_time,
                                          token_to_str(self._owned_schedule_token)))
        elif (schedule.overrun_policy == OverrunPolicy.START_NEW
              or schedule.overrun_policy == OverrunPolicy.ABORT_RUNNING or
              # Ordering of the checks in the "and" condition below is
              # important to avoid a race condition when a workflow gets
              # retried and changes the state from failed to running.
              ((schedule.overrun_policy != OverrunPolicy.DELAY_UNTIL_SUCCESS
                or not schedule.is_failed(self._store))
               and not schedule.is_running(self._store))):
            LOG.info("run token: %s", self._owned_schedule_token.name)

            if schedule.overrun_policy == OverrunPolicy.ABORT_RUNNING:
                if not self._abort_workflow(schedule):
                    return
            self._request = schedule.run(self._emailer, self._store)
            if self._request:
                self._advance_schedule(schedule)
        elif schedule.overrun_policy == OverrunPolicy.SKIP:
            LOG.info("skip schedule due to overrun policy for token: %s",
                     self._owned_schedule_token.name)

            self._advance_schedule(schedule)
        elif (schedule.overrun_policy == OverrunPolicy.DELAY
              or schedule.overrun_policy == OverrunPolicy.DELAY_UNTIL_SUCCESS):
            LOG.info("delay schedule due to overrun policy for token: %s",
                     self._owned_schedule_token.name)

            self._owned_schedule_token.expirationTime = int(
                time.time() + Scheduler._DELAY_TIME_SEC)
        else:
            raise PinballException(
                'unknown schedule policy %d in token %s' %
                (schedule.overrun_policy, self._owned_schedule_token))
Ejemplo n.º 9
0
    def get_workflow_names(self):
        """Retrieve names of all workflows.

        Returns:
            List of workflow names.
        """
        result = []
        workflow_dirs = self._list_directory(Path.WORKFLOW_PREFIX, True)
        for workflow_dir in workflow_dirs:
            if workflow_dir[-1] != Path.DELIMITER:
                raise PinballException('found unexpected file in workflows '
                                       'directory %s' % Path.WORKFLOW_PREFIX)
            result.append(workflow_dir[:-1])
        return result
Ejemplo n.º 10
0
    def _delete_config(self, path):
        location = 'repos/%s/configs/contents%s' % (PinballConfig.USER, path)
        body = {'message': 'updating config',
                'committer': {'name': GithubRepository._COMMITTER_NAME,
                              'email': GithubRepository._COMMITTER_EMAIL}}

        # Get sha of the content.
        response = self._make_request('GET', location, None, [httplib.OK])
        response_json = json.loads(response)
        file_type = GithubRepository._get_file_type(response_json)
        if not file_type or file_type != 'file':
            raise PinballException('path %s is not a file but a %s' %
                                   (path, file_type))
        body['sha'] = response_json['sha']

        # Create or update the file.
        self._make_request('DELETE', location, json.dumps(body),
                           [httplib.CREATED, httplib.OK])
Ejemplo n.º 11
0
    def get_job_names(self, workflow):
        """Retrieve names of all jobs in a given workflow.

        Returns:
            List of job names.
        """
        result = []
        path = Path(workflow=workflow)
        workflow_prefix = path.get_workflow_prefix()
        # It happens that user create a workflow and run a few days,
        # and then delete them. To adaptive this case, we allow the
        # workflow doesn't exists even there is token in PAL.
        self._list_directory(workflow_prefix, True)
        job_prefix = path.get_job_prefix()
        jobs = self._list_directory(job_prefix, True)
        for job in jobs:
            if job[-1] == Path.DELIMITER:
                raise PinballException('found unexpected dir in jobs '
                                       'directory %s' % job_prefix)
            result.append(job)
        return result
Ejemplo n.º 12
0
    def get_token(self, name):
        """Get token data from the store.

        Args:
            path: The name of the token to retrieve.
        Returns:
            The token or None if it was not found.
        """
        tokens = self._store.read_tokens(name)
        token = None
        for token in tokens:
            if token.name == name:
                break
        if not token or token.name != name:
            raise PinballException("didn't find any tokens with name %s" %
                                   name)
        return TokenData(name=token.name,
                         version=token.version,
                         owner=token.owner,
                         expiration_time=token.expirationTime,
                         priority=token.priority,
                         data=token.data)
Ejemplo n.º 13
0
 def from_string(policy_name):
     for policy, name in OverrunPolicy._POLICY_NAMES.items():
         if name[0] == policy_name:
             return policy
     raise PinballException('Unknown policy %s' % policy_name)
Ejemplo n.º 14
0
 def from_string(status_name):
     for status, name in Status._STATUS_NAMES.items():
         if name == status_name:
             return status
     raise PinballException('Unknown status %s' % status_name)