Ejemplo n.º 1
0
 def test_reload_unexpected_url(self):
     """
     If the error URL is not the root, the error is propagated.
     """
     error = HTTPError(URL, 503, "Service Unavailable", {}, None)
     error.url = "/foo"
     self.fakes.jenkins.responses[urljoin(URL, "/reload")] = error
     self.assertRaises(HTTPError, self.api.reload)
Ejemplo n.º 2
0
 def test_reload(self):
     """
     The reload method POSTs a request to the '/reload' URL, expecting
     a 503 on the homepage (which happens after redirection).
     """
     error = HTTPError(URL, 503, "Service Unavailable", {}, None)
     error.url = URL
     self.fakes.jenkins.responses[urljoin(URL, "/reload")] = error
     self.api.reload()
Ejemplo n.º 3
0
 def _except(self, kwargs):
     #    rh_logger.logger.report_event(kwargs['msg'])
     raise HTTPError(self.request.uri, 400, kwargs['msg'], [], None)
def test_segmented_only_catches_404(cipher_signature):
    stream = cipher_signature.streams.filter(adaptive=True)[0]
    with mock.patch('pytube.request.head') as mock_head:
        mock_head.side_effect = HTTPError('', 403, 'Forbidden', '', '')
        with pytest.raises(HTTPError):
            stream.download()
Ejemplo n.º 5
0
def test_head_url_500(check_docker):
    expected_exception = HTTPError(code=500, fp=None, url='url', msg='msg', hdrs=[])
    with patch('check_docker.check_docker.HTTPSHandler.https_open', side_effect=expected_exception), \
         pytest.raises(HTTPError):
        check_docker.head_url(url='https://example.com/test')
Ejemplo n.º 6
0
 def redirect_response(self, req, fp, code, msg, headers, newurl):
     if code in (301, 302, 303, 307):
         raise HTTPError(req.get_full_url(), code, msg, headers, fp)
Ejemplo n.º 7
0
 def _load_data():
     raise HTTPError(url=None,
                     code=412,
                     msg='Simulated mock error',
                     hdrs=None,
                     fp=None)
Ejemplo n.º 8
0
 def mock_urlopen_raising_HTTPError(self, url, data, timeout):
     """Mock implementation of urllib2.urlopen raising HTTPError"""
     raise HTTPError(None, 404, "Failure", None, None)
Ejemplo n.º 9
0
 def _raise_http_error(url, req):
     """ Raises HTTP error for 'url' and 'req' """
     # Indent req.text to pretty print it later
     indented_lines = ['\t' + l for l in req.text.splitlines(True)]
     msg = '\n' + ''.join(indented_lines)
     raise HTTPError(url, req.status_code, msg, req.headers, None)
Ejemplo n.º 10
0
    def make_request(self, path, data=None, ajax=False, debug=True, force_login=False):
        url = path if path.startswith("http") else self.url + path
        logger.error(f" make_request ----------> url: {url}")

        if ajax:
            url += f"{('&' if '?' in url else '?')}force_ajax=true"
            self._session.headers['X_REQUESTED_WITH'] = "XMLHttpRequest"

        cookie_value = self._session.cookies.get(settings.SESSION_COOKIE_NAME)
        if force_login and cookie_value:
            self.response_cookies += f"; {settings.SESSION_COOKIE_NAME}={cookie_value}"

        if self.csrf_token:
            self._session.headers['X-CSRFToken'] = self.csrf_token

        if self.response_cookies:
            self._session.headers['cookie'] = self.response_cookies

        if data:
            for name, value in data.items():
                if isinstance(value, IOBase):
                    data[name] = (os.path.basename(value.name), value)

            encoder = MultipartEncoder(fields=data)
            self._session.headers['Content-Type'] = encoder.content_type
            self._session.mount(f"{urlsplit(url).scheme}://", self._adapter)
            self._session.verify = False
            self._action = getattr(self._session, 'post', None)

            _retry = 0
            _not_done = True
            while _not_done and _retry < 3:
                try:
                    response = self._action(
                        url=url,
                        data=encoder,
                        headers=self._session.headers,
                        timeout=10,
                        stream=False)
                    _not_done = False
                except (ProtocolError, ConnectionError, ConnectionResetError):
                    time.sleep(1.0)
                    _not_done = True
                finally:
                    _retry += 1
        else:
            self._session.mount(f"{urlsplit(url).scheme}://", self._adapter)
            self._session.verify = False
            self._action = getattr(self._session, 'get', None)

            _retry = 0
            _not_done = True
            while _not_done and _retry < 3:
                try:
                    response = self._action(
                        url=url,
                        data=None,
                        headers=self._session.headers,
                        timeout=10,
                        stream=False)
                    _not_done = False
                except (ProtocolError, ConnectionError, ConnectionResetError):
                    time.sleep(1.0)
                    _not_done = True
                finally:
                    _retry += 1

        try:
            response.raise_for_status()
        except requests.exceptions.HTTPError as ex:
            message = ''
            if hasattr(ex, 'message'):
                if debug:
                    logger.error(f'error in request to {path}')
                    logger.error(ex.message)
                message = ex.message[ex.message.index(':') + 2:]
            else:
                message = str(ex)
            raise HTTPError(url, response.status_code, message, response.headers, None)

        logger.error(f" make_request ----------> response: {response}")
        return response
Ejemplo n.º 11
0
def test_download_file_response_code(mocker, test_image_url, code, reason):
    side_effect = HTTPError(test_image_url, code, reason, {}, None)
    mocker.patch(make_mock_target("misc", "urlopen"), side_effect=side_effect)

    with pytest.raises(RuntimeError):
        misc.download_file(test_image_url)
Ejemplo n.º 12
0
 def test_build_request_status_http_error(self):
     status = self.do_collect_views_error(
         1, "foo", HTTPError("", 500, "kaputt", None, None))
     self.assertEqual(status[("ci.sbb.ch", "foo")].request_status,
                      RequestStatus.ERROR)
Ejemplo n.º 13
0
 def test_collect_views_http_error(self):
     self.do_collect_views_error(1,
                                 view_name=self.view_name_1,
                                 error=HTTPError(self.url, 500, None, None,
                                                 None))
Ejemplo n.º 14
0
 def test_build_request_status_not_found(self):
     status = self.do_collect_jobs_error(
         "foo", HTTPError(self.url, 404, None, None, None))
     self.assertEqual(RequestStatus.NOT_FOUND,
                      status[("ci.sbb.ch", "foo")].request_status)
Ejemplo n.º 15
0
 def test_build_request_status_error(self):
     status = self.do_collect_jobs_error(
         "foo", HTTPError(self.url, 500, None, None, None))
     self.assertEqual(RequestStatus.ERROR,
                      status[("ci.sbb.ch", "foo")].request_status)
Ejemplo n.º 16
0
    def _perform(self,
                 url: str,
                 curl_obj: pycurl.Curl = None,
                 headers: dict = None,
                 postfields: dict = None,
                 skip_auth=False) -> bytes:
        if not skip_auth:
            self._wait_authenticated()

        if not curl_obj:
            curl_obj = pycurl.Curl()

        if postfields:
            postfields_dict = urlencode(postfields)
            _set_postfields(curl_obj, postfields_dict)

        logger.debug("url={url}, headers={headers}", url=url, headers=headers)
        if not headers:
            headers = self.BASE_HEADERS.copy()
        headers_list = self._headers_to_list(headers)

        logger.debug("prepared headers={h}", h=headers_list)

        buffer = BytesIO()

        curl_obj.setopt(pycurl.WRITEFUNCTION, buffer.write)
        curl_obj.setopt(pycurl.HEADERFUNCTION, self._header_function)
        curl_obj.setopt(pycurl.BUFFERSIZE, 102400)
        curl_obj.setopt(pycurl.URL, url)
        curl_obj.setopt(pycurl.HTTPHEADER, headers_list)
        curl_obj.setopt(pycurl.USERAGENT, CURL_USERAGENT)
        curl_obj.setopt(pycurl.MAXREDIRS, 50)
        curl_obj.setopt(pycurl.ACCEPT_ENCODING, "")
        curl_obj.setopt(pycurl.TCP_KEEPALIVE, 1)
        curl_obj.setopt(pycurl.FOLLOWLOCATION, True)
        curl_obj.setopt(pycurl.ENCODING, "gzip, deflate")

        try:
            curl_obj.perform()
        except pycurl.error as e:
            logger.debug(e, exc_info=True)
            raise

        status = curl_obj.getinfo(pycurl.HTTP_CODE)
        logger.debug("HTTP status: {s}", s=status)
        curl_obj.close()

        if status != HTTPStatus.OK:
            hdrs = {}
            try:
                hdrs = {k: v[-1] for k, v in self._headers.items()}
            except (IndexError, KeyError):
                pass
            phrase = "error"
            try:
                phrase = http.client.responses[status]
                logger.error("HTTP status error: {s}", s=status)
            except KeyError:
                pass
            raise HTTPError(url=url,
                            msg=phrase,
                            code=status,
                            hdrs=hdrs,
                            fp=None)

        # Server changing maps will trigger sessionid change,
        # keep track of latest sessionid in response headers.
        sessionid = self._find_sessionid()
        if sessionid and self._auth_data:
            self._auth_data.sessionid = sessionid

        return buffer.getvalue()
Ejemplo n.º 17
0
def urlopen(request):
    raise HTTPError(request.url, 418, 'Not implemented yet', {}, None)
Ejemplo n.º 18
0
 def redirect_request(self, req: URLRequest, fp: IO[str], code: int,
                      msg: str, headers: Mapping[str, str],
                      newurl: str) -> Optional[URLRequest]:
     raise HTTPError(newurl, code, msg, headers, fp)
Ejemplo n.º 19
0
 def _post(url, form: Dict[str, Any], headers: Dict[str, Any], timeout=5) -> str:
     response = requests.post(url, json=form, headers=headers, timeout=timeout)
     if response.status_code != 200:
         raise HTTPError(url, response.status_code, response.text, response.headers, None)
     return response.text
Ejemplo n.º 20
0
 def raise_http_error(self, url, *args, **kw):
     raise HTTPError(url, 500, 'kaboom: %s %s' % (pp(args), pp(kw)), dict(),
                     None)
Ejemplo n.º 21
0
 def autoretry_backoff_jitter_task(self, url):
     self.iterations += 1
     if "error" in url:
         fp = tempfile.TemporaryFile()
         raise HTTPError(url, '500', 'Error', '', fp)
     return url
Ejemplo n.º 22
0
 def test_is_new_contributor_nonexistent_user(self):
     self.mocks['api_req'].side_effect = HTTPError(None, 422, None, None,
                                                   None)
     assert self.is_new_contributor()
     self.assert_api_req_call()
Ejemplo n.º 23
0
                {
                    "path": "src/main.c",
                    "status": "modified",
                    "additions": 1,
                    "deletions": 1
                }
            ]
        }
    }"""),
    ),  # valid response
    ("CVE-1970-1000", 200, b"", None, io.BytesIO(b"")),  # empty response
    (
        "CVE-1970-1000",
        404,
        b"vcs not found",
        HTTPError("/", 404, "vcs not found", {}, io.StringIO("vcs not found")),
        None,
    ),  # not found
    (
        "CVE-1970-1000",
        400,
        b"something wrong",
        URLError("something wrong"),
        None,
    ),  # url error
    ("CVE-1970-1500", 404, b"not found", None, "None"),  # no commit
    ("CVE-1970-9000", 404, b"not found", None, None),  # cve not found
    ("9999", 404, b"not found", None, None),  # id not found
    ("Invalid ID", 404, b"not found", None, None),  # invalid id
]
Ejemplo n.º 24
0
 def test_is_new_contributor_error(self):
     self.mocks['api_req'].side_effect = HTTPError(None, 403, None, None,
                                                   None)
     with pytest.raises(HTTPError):
         self.is_new_contributor()
     self.assert_api_req_call()
Ejemplo n.º 25
0
def main():
    hostnames = args.hostname.split('.')

    if len(hostnames) < 3:
        msg = 'Hostname "{}" is not a fully-qualified host name of form "HOST.DOMAIN.TOP".'.format(
            args.hostname)
        raise Exception(msg)

    if not args.ip:
        try:
            with urlopen("http://ipv4.icanhazip.com/") as f:
                resp = f.read()
            if sys.version_info > (3, ): resp = resp.decode('utf-8')
            args.ip = resp.strip()
        except URLError:
            msg = 'Unable to automatically obtain IP address from http://ipv4.icanhazip.com/.'
            raise Exception(msg)

    ips = args.ip.split('.')
    if len(ips)!=4 or \
      not ips[0].isdigit() or not ips[1].isdigit() or not ips[2].isdigit() or not ips[3].isdigit() or \
      int(ips[0])>255 or int(ips[1])>255 or int(ips[2])>255 or int(ips[3])>255:
        msg = '"{}" is not valid IP address.'.format(args.ip)
        raise Exception(msg)

    url = 'https://api.godaddy.com/v1/domains/{}/records/A/{}'.format(
        '.'.join(hostnames[1:]), hostnames[0])
    data = json.dumps([{
        "data": args.ip,
        "ttl": args.ttl,
        "name": hostnames[0],
        "type": "A"
    }])
    if sys.version_info > (3, ): data = data.encode('utf-8')
    req = Request(url, method='PUT', data=data)

    req.add_header("Content-Type", "application/json")
    req.add_header("Accept", "application/json")
    if args.key and args.secret:
        req.add_header("Authorization",
                       "sso-key {}:{}".format(args.key, args.secret))

    try:
        with urlopen(req) as f:
            resp = f.read()
        if sys.version_info > (3, ): resp = resp.decode('utf-8')
        resp = json.loads(resp)
    except HTTPError(e):
        if e.code == 400:
            msg = 'Unable to set IP address: GoDaddy API URL ({}) was malformed.'.format(
                req.full_url)
        elif e.code == 401:
            if args.key and args.secret:
                msg = '''Unable to set IP address: --key or --secret option incorrect.
Correct values can be obtained from from https://developer.godaddy.com/keys/ and are ideally placed in a % file.'''
            else:
                msg = '''Unable to set IP address: --key or --secret option missing.
Correct values can be obtained from from https://developer.godaddy.com/keys/ and are ideally placed in a % file.'''
        elif e.code == 403:
            msg = '''Unable to set IP address: customer identified by --key and --secret options denied permission.
Correct values can be obtained from from https://developer.godaddy.com/keys/ and are ideally placed in a % file.'''
        elif e.code == 404:
            msg = 'Unable to set IP address: {} not found at GoDaddy.'.format(
                args.hostname)
        elif e.code == 422:
            msg = 'Unable to set IP address: "{}" has invalid domain or lacks A record.'.format(
                args.hostname)
        elif e.code == 429:
            msg = 'Unable to set IP address: too many requests to GoDaddy within brief period.'
        else:
            msg = 'Unable to set IP address: GoDaddy API failure because "{}".'.format(
                e.reason)
        raise Exception(msg)
    except URLError(e):
        msg = 'Unable to set IP address: GoDaddy API failure because "{}".'.format(
            e.reason)
        raise Exception(msg)

    print('IP address for {} set to {}.'.format(args.hostname, args.ip))
Ejemplo n.º 26
0
def import_source_locations(
    vial_http: urllib3.connectionpool.ConnectionPool,
    import_run_id: str,
    import_locations: Iterable[load.ImportSourceLocation],
    import_batch_size: int = IMPORT_BATCH_SIZE,
) -> ImportSourceLocationsResult:
    """Import source locations"""
    created = set()
    updated = set()

    path_and_query = f"/api/importSourceLocations?import_run_id={import_run_id}"
    logger.info("Contacting VIAL: POST %s", path_and_query)

    batches = 0
    for import_locations_batch in misc.batch(import_locations, import_batch_size):
        encoded_locs = []

        for loc in import_locations_batch:
            if loc.match and loc.match.action == "new":
                created.add(loc.source_uid)
            else:
                updated.add(loc.source_uid)

            loc_json = orjson.dumps(loc.dict(exclude_none=True))
            encoded_locs.append(loc_json)

        encoded_ndjson = b"\n".join(encoded_locs)

        try:
            rsp = vial_http.request(
                "POST",
                path_and_query,
                headers={**vial_http.headers, "Content-Type": "application/x-ndjson"},
                body=encoded_ndjson,
            )
        except Exception as e:
            logger.error(
                "Error while importing locations: %s (...) %s: %s",
                encoded_ndjson[:100],
                encoded_ndjson[-100:],
                e,
            )
            raise

        if rsp.status != 200:
            raise HTTPError(
                f"/api/importSourceLocations?import_run_id={import_run_id}",
                rsp.status,
                rsp.data[:100],
                dict(rsp.headers),
                None,
            )

        batches += 1
        if batches % 5 == 0:
            logger.info(
                "Submitted %d batches of up to %d records to VIAL.",
                batches,
                import_batch_size,
            )

    logger.info("Submitted %d total batches to VIAL.", batches)

    return ImportSourceLocationsResult(created=created, updated=updated)
def test_segmented_stream_on_404(cipher_signature):
    stream = cipher_signature.streams.filter(adaptive=True)[0]
    with mock.patch('pytube.request.head') as mock_head:
        with mock.patch('pytube.request.urlopen') as mock_url_open:
            # Mock the responses to YouTube
            mock_url_open_object = mock.Mock()

            # These are our 4 "segments" of a dash stream
            # The first explains how many pieces there are, and
            # the rest are those pieces
            responses = [
                b'Raw_data\r\nSegment-Count: 3',
                b'a',
                b'b',
                b'c',
            ]
            joined_responses = b''.join(responses)

            # We create response headers to match the segments
            response_headers = [{
                'content-length':
                len(r),
                'Content-Range':
                '0-%s/%s' % (str(len(r)), str(len(r)))
            } for r in responses]

            # Request order for stream:
            # Filesize:
            #   1. head(url) -> 404
            #   2. get(url&sn=0)
            #   3. head(url&sn=[1,2,3])
            # Download:
            #   4. info(url) -> 404
            #   5. get(url&sn=0)
            #   6. get(url&sn=[1,2,3])

            # Handle filesize requests
            mock_head.side_effect = [
                HTTPError('', 404, 'Not Found', '', ''),
                *response_headers[1:],
            ]

            # Each response must be followed by None, to break iteration
            #  in the stream() function
            mock_url_open_object.read.side_effect = [
                responses[0],
                None,
                responses[0],
                None,
                responses[1],
                None,
                responses[2],
                None,
                responses[3],
                None,
            ]

            # This handles the HEAD requests to get content-length
            mock_url_open_object.info.side_effect = [
                HTTPError('', 404, 'Not Found', '', ''), *response_headers
            ]

            mock_url_open.return_value = mock_url_open_object

            with mock.patch('builtins.open',
                            new_callable=mock.mock_open) as mock_open:
                file_handle = mock_open.return_value.__enter__.return_value
                fp = stream.download()
                full_content = b''
                for call in file_handle.write.call_args_list:
                    args, kwargs = call
                    full_content += b''.join(args)

                assert full_content == joined_responses
                mock_open.assert_called_once_with(fp, 'wb')
Ejemplo n.º 28
0
 def fail(*ignore):
     raise HTTPError('url', 403, 'Forbidden', 'Forbidden', mock.Mock())
Ejemplo n.º 29
0
 def http_error_302(self, req, fp, code, msg, headers):
     raise HTTPError(req.full_url, code, msg, headers, fp)
Ejemplo n.º 30
0
 def raise_url_error(url, filename, md5sum=None):
     raise HTTPError('message', None, None, None, None)
Ejemplo n.º 31
0
 def get_mask(self):
     # TODO: implement this
     raise HTTPError(self.request.uri, 501,
                     "The server does not yet support /api/mask requests",
                     [], None)
Ejemplo n.º 32
0
    def process_entries(self,
                        omimids,
                        transform,
                        included_fields=None,
                        graph=None,
                        limit=None):
        """
        Given a list of omim ids,
        this will use the omim API to fetch the entries, according to the
        ```included_fields``` passed as a parameter.
        If a transformation function is supplied,
        this will iterate over each entry,
        and either add the results to the supplied ```graph```
        or will return a set of processed entries that the calling function
        can further iterate.

        If no ```included_fields``` are provided, this will simply fetch
        the basic entry from omim, that is ALL fields,
        which includes an entry's:  prefix, mimNumber, status, and titles.

        :param omimids: the set of omim entry ids to fetch using their API
        :param transform: Function to transform each omim entry when looping
        :param included_fields: A set of what fields are required to retrieve
         from the API
        :param graph: the graph to add the transformed data into
        """

        omimparams = {}
        reponse_batches = []

        # add the included_fields as parameters
        if included_fields is not None and included_fields:
            omimparams['include'] = ','.join(included_fields)

        # not expecting any, but keeping just in case
        cleanomimids = [o.split(':')[-1] for o in omimids]
        diff = set(omimids) - set(cleanomimids)
        if diff:
            LOG.warning('OMIM has %i dirty bits see"\n %s', len(diff),
                        str(diff))
            omimids = cleanomimids
        cleanomimids = []

        # WIP: check if we can use a cached copy of the json records
        # maybe if exists raw/omim/_<iso-date>.json use that

        # in the meanwhile, to bypass (in case of emergencies)
        # with open('raw/omim/_2019-05-01.json', 'r') as cachefile:
        #     reponse_batches = json.load(cachefile)
        if True:  # False:

            acc = 0  # for counting

            # note that you can only do request batches of 20
            # see info about "Limits" at http://omim.org/help/api
            # TODO 2017 May seems a majority of many groups of 20
            # are producing python None for RDF triple Objects

            groupsize = 20
            if not self.test_mode and limit is not None:
                # just in case the limit is larger than the number of records,
                maxit = limit
                if limit > len(omimids):
                    maxit = len(omimids)
            else:
                maxit = len(omimids)

            while acc < maxit:
                end = min((maxit, acc + groupsize))
                # iterate through the omim ids list,
                # and fetch from the OMIM api in batches of 20

                if self.test_mode:
                    intersect = list(
                        set([str(i)
                             for i in self.test_ids]) & set(omimids[acc:end]))
                    # some of the test ids are in the omimids
                    if intersect:
                        LOG.info("found test ids: %s", intersect)
                        omimparams.update({'mimNumber': ','.join(intersect)})
                    else:
                        acc += groupsize
                        continue
                else:
                    omimparams.update(
                        {'mimNumber': ','.join(omimids[acc:end])})

                url = OMIMAPI + urllib.parse.urlencode(omimparams)

                try:
                    req = urllib.request.urlopen(url)
                except HTTPError as err:  # URLError?
                    LOG.warning('fetching: %s', url)
                    error_msg = err.read()
                    if re.search(r'The API key: .* is invalid',
                                 str(error_msg)):
                        msg = "API Key not valid"
                        raise HTTPError(url, err.code, msg, err.hdrs, err.fp)
                    LOG.error("Failed with: %s", str(error_msg))
                    break

                resp = req.read().decode()
                acc += groupsize
                # gather all batches
                reponse_batches.append(json.loads(resp))

            # snag a copy of all the batches

            with open('./raw/omim/_' + date.today().isoformat() + '.json',
                      'w') as writer:
                json.dump(reponse_batches, writer)

        LOG.info("begin transforming the %i blocks of (20) records",
                 len(reponse_batches))
        for myjson in reponse_batches:
            for entery in myjson['omim']['entryList']:
                # apply the data transformation, and save it to the graph
                transform(entery, graph)