Ejemplo n.º 1
0
def get(http_request, path, root=METADATA_ROOT, recursive=None):
    """Fetch a resource from the metadata server.

    Args:
        path: A string indicating the resource to retrieve. For example,
            'instance/service-accounts/defualt'
        http_request: A callable that matches the method
            signature of httplib2.Http.request. Used to make the request to the
            metadataserver.
        root: A string indicating the full path to the metadata server root.
        recursive: A boolean indicating whether to do a recursive query of
            metadata. See
            https://cloud.google.com/compute/docs/metadata#aggcontents

    Returns:
        A dictionary if the metadata server returns JSON, otherwise a string.

    Raises:
        httplib2.Httplib2Error if an error corrured while retrieving metadata.
    """
    url = urlparse.urljoin(root, path)
    url = util._add_query_parameter(url, 'recursive', recursive)

    response, content = http_request(url, headers=METADATA_HEADERS)

    if response.status == http_client.OK:
        decoded = _helpers._from_bytes(content)
        if response['content-type'] == 'application/json':
            return json.loads(decoded)
        else:
            return decoded
    else:
        raise httplib2.HttpLib2Error(
            'Failed to retrieve {0} from the Google Compute Engine'
            'metadata service. Response:\n{1}'.format(url, response))
Ejemplo n.º 2
0
def FetchBuilderJsonFromMilo(master,
                             builder,
                             limit=100,
                             service_account_file=None):  # pragma: no cover
    LOGGER.debug('Fetching buildbot json for %s/%s from milo', master, builder)
    body = {'master': master, 'builder': builder, 'limit': limit}
    headers = {
        'Accept': 'application/json',
        'Content-Type': 'application/json',
    }
    http = httplib2.Http(timeout=300)
    if service_account_file:
        creds = infra_libs.get_signed_jwt_assertion_credentials(
            service_account_file, scope=OAUTH_SCOPES)
        creds.authorize(http)

    resp, content = http.request(MILO_JSON_ENDPOINT,
                                 method='POST',
                                 headers=headers,
                                 body=json.dumps(body))
    if resp.status != 200:
        raise httplib2.HttpLib2Error('Invalid response status: %s\n%s' %
                                     (resp.status, content))
    # Strip off jsonp header.
    data = json.loads(content[4:])
    builds = [
        json.loads(base64.b64decode(build['data'])) for build in data['builds']
    ]
    return {build['number']: build for build in builds}
Ejemplo n.º 3
0
    def GetRefs(self, refs_regex=None, filter_regex=None):
        """Get a dict of refs from the given git repo, like ls-remote.

    Args:
      refs_regex: regex list for which refs to monitor.
      filter_regex: List of regex substitutions for matching filter refs (if
        any) to corresponding monitored refs (used to filter unwanted commits
        from monitoring).

    Returns:
      A dict of "ref:commit" and a dict of "ref:filter_ref" mappings.
    """
        refs = {}
        filters = {}
        refs_url = '%s/+refs?format=TEXT' % self._api_url
        self.logger.debug('Refs request: %s', refs_url)
        try:
            resp, content = self.http.request(refs_url, headers=self.headers)
            if resp.status >= 400:
                raise httplib2.HttpLib2Error('Invalid response status %d' %
                                             resp.status)
        except httplib2.HttpLib2Error as e:
            self.logger.exception('Failed refs request: %s (%s). Response: %r',
                                  refs_url, e, resp)
            return refs, filters

        splitter = re.compile(r'(?P<commit>[0-9a-fA-F]+)\s+(?P<ref>[^\s]+)$')
        ref_res = [(re.compile('.*'), None)]
        if refs_regex:
            ref_res = [[re.compile(ref_reg + '$'), None]
                       for ref_reg in refs_regex]
            for idx, filter_reg in enumerate(filter_regex or []):
                ref_res[idx][1] = filter_reg
        all_refs = []
        for line in content.splitlines():
            m = splitter.match(line)
            if m:
                ref = m.group('ref')
                all_refs.append(ref)
                for ref_re in ref_res:
                    if ref_re[0].match(ref):
                        refs[ref] = m.group('commit')
                        # Try to calculate the corresponding filter ref (if any) based on
                        # the name of this monitored ref and the defined substitution, e.g.
                        # refs/heads/* => refs/upstream/heads/*).
                        if ref_re[1]:
                            filters[ref] = re.sub(ref_re[0], ref_re[1], ref)
            else:
                self.logger.debug('Unable to split line:\n%s', line)
        # Remove any bogus filter refs, otherwise using them to specify a commit
        # range will generate an error and not find any commits.
        for key, val in filters.items():
            if not val in all_refs:
                self.logger.debug('Filter ref "%s" not found.' % val)
                filters[key] = None
        return refs, filters
 def testGetCredentialsFromEnvLocal(self):
     self.mox.StubOutWithMock(gce, 'AppAssertionCredentials')
     credentials = self.mox.CreateMockAnything()
     gce.AppAssertionCredentials(connection.SCOPE).AndReturn(credentials)
     credentials.authorize(mox.IsA(httplib2.Http))
     credentials.refresh(mox.IsA(httplib2.Http)).AndRaise(
         httplib2.HttpLib2Error())
     self.mox.ReplayAll()
     self.assertIs(None, helper.get_credentials_from_env())
     self.mox.VerifyAll()
 def testGetDatastoreFromEnvNone(self):
     self.mox.StubOutWithMock(httplib2, 'Http')
     http = self.mox.CreateMockAnything()
     httplib2.Http().AndReturn(http)
     http.request('http://metadata/computeMetadata/v1/project/project-id',
                  headers={
                      'X-Google-Metadata-Request': 'True'
                  }).AndRaise(httplib2.HttpLib2Error())
     self.mox.ReplayAll()
     self.assertEquals(None, helper.get_dataset_from_env())
     self.mox.VerifyAll()
Ejemplo n.º 6
0
    def test_get_ip_info_vmi_conn_error(self):
        self.ipinfo.nova_client.servers.list(search_opts=mox.IgnoreArg()
                                            ).AndReturn(test_data.instances)
        url = urljoin(self.ipinfo.contrail_url, self.ipinfo.contrail_vmi_path)
        httplib2.Http().AndReturn(self.mock_http)
        self.mock_http.request(url, 'GET').AndRaise(httplib2.HttpLib2Error())

        self.m.ReplayAll()
        instances = self.ipinfo.get_ip_info()
        self.assertEqual(2, len(instances))
        for instance in instances:
            self.assertFalse(hasattr(instance, 'fixed_ips'))
            self.assertFalse(hasattr(instance, 'floating_ips'))
        self.m.VerifyAll()
        self.m.UnsetStubs()
Ejemplo n.º 7
0
    def GetCommitDiff(self, commit):
        """Get the text diff introduced by the argument commit."""

        diff_url = '%s/+/%s%%5E%%21/?format=TEXT' % (self._api_url, commit)
        self.logger.debug('Diff request: %s', diff_url)
        try:
            resp, content = self.http.request(diff_url, headers=self.headers)
            if resp.status >= 400:
                raise httplib2.HttpLib2Error('Invalid response status %d' %
                                             resp.status)
            return b64decode(content)
        except httplib2.HttpLib2Error as e:
            self.logger.exception('Failed Diff request %s: %s', diff_url,
                                  str(e))
            return None
Ejemplo n.º 8
0
 def GetLogDict(log_range, limit):
     url = '%s/+log/%s?format=JSON&n=%d' % (self._api_url, log_range,
                                            limit)
     self.logger.debug('Log request: %s', url)
     try:
         resp, content = self.http.request(url, headers=self.headers)
         if resp.status >= 400:
             raise httplib2.HttpLib2Error('Invalid response status %d' %
                                          resp.status)
         # Skip the first 5 bytes of the response due to JSON anti-XSSI prefix.
         json_txt = content[5:]
         return json.loads(json_txt)
     except httplib2.HttpLib2Error as e:
         self.logger.exception('Failed log request %s: %s', url, str(e))
         return None
Ejemplo n.º 9
0
def crop_image(url, width=0, height=0, img_type='jpg', quality=100, **kwargs):
    """
    :param url: source image url
    :param width: cropped width
    :param height: cropped height
    :param img_type: cropped image type
    :param quality: cropped image quality
    """
    face_detect = kwargs.get('face_detect')
    if not os.path.exists(_data_path):
        os.mkdir(_data_path)
    h = httplib2.Http('.cache')
    try:
        (resp, content) = h.request(url, 'GET')
        if content[0:20].find('The requested URL') != -1:
            raise httplib2.HttpLib2Error('URL cannot find in server')
    except httplib2.HttpLib2Error, err:
        raise Exception('%s request error: %s' % (url, str(err)))
Ejemplo n.º 10
0
def _FetchFromBuildbucketImpl(project,
                              bucket_name,
                              builder,
                              service_account_file=None):  # pragma: no cover
    request_pb = rpc_pb2.SearchBuildsRequest()
    request_pb.predicate.builder.project = project
    request_pb.predicate.builder.bucket = bucket_name
    request_pb.predicate.builder.builder = builder
    request_pb.predicate.status = common_pb2.ENDED_MASK
    request_pb.fields.paths.extend([
        'builds.*.number',
        'builds.*.status',
        'builds.*.input.gitiles_commit.id',
    ])

    headers = {
        'Accept': 'application/prpc; encoding=binary',
        'Content-Type': 'application/prpc; encoding=binary',
    }

    http = httplib2.Http(timeout=300)
    creds = None
    if service_account_file:
        creds = infra_libs.get_signed_jwt_assertion_credentials(
            service_account_file, scope=OAUTH_SCOPES)
    elif luci_auth.available():
        creds = luci_auth.LUCICredentials(scopes=OAUTH_SCOPES)
    if creds:
        creds.authorize(http)

    resp, content = http.request(_BUILDBUCKET_SEARCH_ENDPOINT_V2.format(
        buildbucket_instance=_DEFAULT_BUILDBUCKET_INSTANCE),
                                 method='POST',
                                 headers=headers,
                                 body=request_pb.SerializeToString())
    grpc_code = resp.get('X-Prpc-Grpc-Code'.lower())
    if grpc_code != '0':
        raise httplib2.HttpLib2Error('Invalid GRPC exit code: %s\n%s' %
                                     (grpc_code, content))
    response_pb = rpc_pb2.SearchBuildsResponse()
    response_pb.ParseFromString(content)

    return response_pb
Ejemplo n.º 11
0
    def GetMergedChanges(self,
                         since=None,
                         limit=None,
                         start=None,
                         fields=None):
        """Query gerrit for merged changes (i.e. commits).

    API docs:
    https://gerrit-review.googlesource.com/Documentation/rest-api-changes.html#list-changes
    https://gerrit-review.googlesource.com/Documentation/user-search.html

    Args:
      since: Only get commits after this datetime() (UTC assumed).
      limit: Maximum number of results to return.
      start: Offset in the result set to start at.
      fields: A list of additional output fields (cause more database lookups
        and slows down query response time).
    """
        query = '%s/changes/?q=status:merged' % self._api_url
        if since:
            query = '%s+since:{%s}' % (
                query, urllib.quote('%s' % self.GenerateTimeStamp(since)))
        if start:
            query = '%s&S=%d' % (query, start)
        if limit:
            query = '%s&n=%d' % (query, limit)
        if fields:
            query = '%s&%s' % (query, '&'.join(['o=%s' % p for p in fields]))
        self.logger.debug('Gerrit commits request: %s', query)
        try:
            resp, content = self.http.request(query, headers=self.headers)
            if resp.status >= 400:
                raise httplib2.HttpLib2Error('Invalid response status %d' %
                                             resp.status)
            # Skip the first 5 bytes of the response due to JSON anti-XSSI prefix.
            json_txt = content[5:]
        except httplib2.HttpLib2Error as e:
            self.logger.exception('Failed gerrit request %s: %s', query,
                                  str(e))
            return {}
        return json.loads(json_txt)
Ejemplo n.º 12
0
    def request(self, api_url, query={}, credentials=None, app_access=False, method="GET"):
        if credentials: self.credentials = credentials
        if not app_access and self.credentials.access_token_expired:
            raise httplib2.HttpLib2Error('Access token expired')

        h = httplib2.Http()
        if app_access:
            request_url = api_url + "?access_token=" + self.access_token
        else:
            request_url = api_url + "?access_token=" + self.credentials.access_token

        for item in query.iteritems():
            request_url += '&' + str(item[0])+ '=' + str(item[1])

        head, content = h.request(request_url, method=method)

        if head.get('status') == '200':
            return (head, content)
        else:
            logger.error("AccessTokenCredentialsError: %s", content)
            raise AccessTokenCredentialsError(content)
Ejemplo n.º 13
0
 def test_retries_transient_errors(self):
     self.mock_httplib2([
         ({
             'uri': 'http://localhost/123'
         }, httplib2.HttpLib2Error()),
         ({
             'uri': 'http://localhost/123'
         }, (httplib2.Response({
             'status': 408,
             'reason': 'client timeout',
         }), 'client timeout')),
         ({
             'uri': 'http://localhost/123'
         }, (httplib2.Response({
             'status': 500,
             'reason': 'server error',
         }), 'server error')),
         ({
             'uri': 'http://localhost/123'
         }, (httplib2.Response({}), 'response body')),
     ])
     response = net.request('http://localhost/123', max_attempts=4)
     self.assertEqual('response body', response)
Ejemplo n.º 14
0
    def GetCommitDetails(self, ref):
        """Get a the details of the given commit ref.

    Args:
      ref: The commit ref to get the details for.

    Returns:
      Dict of gitiles commit details:
        commit, tree, parents, author, committer, message, tree_diff
    """
        commit_url = '%s/+/%s?format=JSON' % (self._api_url, ref)
        self.logger.debug('Commit request: %s', commit_url)
        try:
            resp, content = self.http.request(commit_url, headers=self.headers)
            if resp.status >= 400:
                raise httplib2.HttpLib2Error('Invalid response status %d' %
                                             resp.status)
            # Skip the first 5 bytes of the response due to JSON anti-XSSI prefix.
            json_txt = content[5:]
        except httplib2.HttpLib2Error as e:
            self.logger.exception('Failed Commit request %s: %s', commit_url,
                                  str(e))
            return {}
        return json.loads(json_txt)
Ejemplo n.º 15
0
            cookies = response.get('set-cookie')
            jsessionid = re.findall(r'(JSESSIONID=[0-9A-F]+);', cookies)
            if len(jsessionid) > 0:
                self._jsession = jsessionid[0]

        if response is not None and response.get('status') == '404':
            r, _ = self._http.request(self._server)

            if self._server.rstrip('/') != r.get('content-location',
                                                 self._server).rstrip('/'):

                old_server = self._server
                self._server = r.get('content-location').rstrip('/')
                return self._exec(uri.replace(old_server, ''), method, body)
            else:
                raise httplib2.HttpLib2Error(
                    '%s %s %s' % (uri, response.status, response.reason))

        if is_xnat_error(content):
            catch_error(content)

        return content

    def _get_json(self, uri):
        """ Specific Interface._exec method to retrieve data.
            It forces the data format to csv and then puts it back to a 
            json-like format.
            
            Parameters
            ----------
            uri: string
                URI of the resource to be accessed. e.g. /REST/projects
Ejemplo n.º 16
0
 def test_fail_exception(self, _sleep):
   self.http._http.request.side_effect = httplib2.HttpLib2Error()
   self.assertRaises(httplib2.HttpLib2Error, self.http.request, 'http://foo/')
   self.http._http.request.assert_has_calls([ self._MOCK_REQUEST ] * 5)
Ejemplo n.º 17
0
    def _exec(self, uri, method='GET', body=None, headers=None):
        """ A wrapper around a simple httplib2.request call that:
                - avoids repeating the server url in the request
                - deals with custom caching mechanisms
                - manages a user session with cookies
                - catches and broadcast specific XNAT errors

            Parameters
            ----------
            uri: string
                URI of the resource to be accessed. e.g. /REST/projects
            method: GET | PUT | POST | DELETE
                HTTP method.
            body: string
                HTTP message body
            headers: dict
                Additional headers for the HTTP request.
        """

        if headers is None:
            headers = {}

        self._get_entry_point()

        uri = join_uri(self._server, uri)

        if DEBUG:
            print(uri)
        # using session authentication
        headers['cookie'] = self._jsession
        headers['connection'] = 'keep-alive'

        # reset the memcache when client changes something on the server
        if method in ['PUT', 'DELETE']:
            self._memcache = {}

        # Initialize these to default values.
        response = None
        info = None
        content = None

        if self._mode == 'online' and method == 'GET':

            if time.time() - self._memcache.get(uri, 0) < self._memtimeout:
                if DEBUG:
                    print('send: GET CACHE %s' % uri)

                info, content = self._http.cache.get(uri).split('\r\n\r\n', 1)

                self._memcache[uri] = time.time()
            else:
                response, content = self._http.request(uri, method, body,
                                                       headers)
                self._memcache[uri] = time.time()

        elif self._mode == 'offline' and method == 'GET':

            cached_value = self._http.cache.get(uri)

            if cached_value is not None:
                if DEBUG:
                    print('send: GET CACHE %s' % uri)
                info, content = cached_value.split('\r\n\r\n', 1)
            else:
                try:
                    self._http.timeout = 10

                    response, content = self._http.request(
                        uri, method, body, headers)

                    self._http.timeout = None
                    self._memcache[uri] = time.time()
                except Exception as e:
                    catch_error(e)
        else:
            response, content = self._http.request(uri, method, body, headers)

        if DEBUG:
            if response is None:
                response = httplib2.Response(email.message_from_string(info))
                print('reply: %s %s from cache') % (response.status,
                                                    response.reason)
                for key in response.keys():
                    print('header: %s: %s') % (key.title(), response.get(key))

        if response is not None and 'set-cookie' in response:
            cookies = response.get('set-cookie')
            jsessionid = re.findall(r'(JSESSIONID=[0-9A-F]+);', cookies)
            if len(jsessionid) > 0:
                self._jsession = jsessionid[0]

        if response is not None and response.get('status') == '404':
            r, _ = self._http.request(self._server)

            if self._server.rstrip('/') != r.get('content-location',
                                                 self._server).rstrip('/'):

                old_server = self._server
                self._server = r.get('content-location').rstrip('/')
                return self._exec(uri.replace(old_server, ''), method, body)
            else:
                raise httplib2.HttpLib2Error(
                    '%s %s %s' % (uri, response.status, response.reason))

        if is_xnat_error(content):
            print(response.keys())
            print(response.get("status"))

            catch_error(content)

        return content
Ejemplo n.º 18
0
 def _raise_http_err(master, builder, **kwargs):
     if builder == 'builder1':
         return {'build1': 'success'}
     raise httplib2.HttpLib2Error()
Ejemplo n.º 19
0
 def testPollHTTPError(self):
     self.SetNextException(httplib2.HttpLib2Error('bad stuff'))
     tailer = cloudbuild_logs.LogTailer('bucket', 'log-build-id.txt')
     tailer.Poll()  # no error raised
Ejemplo n.º 20
0
 def testPollHTTPError_IsLast(self):
     self.SetNextException(httplib2.HttpLib2Error('bad stuff'))
     tailer = cloudbuild_logs.LogTailer('bucket', 'log-build-id.txt')
     with self.assertRaisesRegex(api_exceptions.CommunicationError,
                                 'bad stuff'):
         tailer.Poll(is_last=True)  # the last poll will raise the error