コード例 #1
0
ファイル: flask_unsign.py プロジェクト: znatz/Flask-Unsign
    def test_server(self, requests):
        """Ensure it's possible to fetch cookies from a server and errors are handled properly"""
        requests.return_value = requests
        requests.get.return_value = requests

        requests.cookies = {'session': self.encoded}
        stdout, stderr = self.call('--decode', '--server',
                                   'http://*****:*****@localhost:8080')

        self.assertEqual(stdout.read().strip(), str(self.decoded))

        for call in requests.mock_calls:
            if call[INDEX_FN_NAME] == 'get':
                if 'proxies' in call[INDEX_KWARGS]:
                    break

        else:
            raise AssertionError(
                'Didn\'t find "proxies" argument in call args.')

        error_reason = ProxyError()
        error_reason.args = ('Cannot connect to proxy',
                             OSError('Tunnel connection failed'))
        error = ProxyError(
            MaxRetryError(reason=error_reason,
                          pool=MagicMock(),
                          url='http://*****:*****@localhost:8080')

        self.assertIn('Tunnel connection failed', stderr.read().strip())
コード例 #2
0
def test_call_url_with_proxy_error(mocker, git_crw):
    def raise_(e):
        raise e
    mocker.patch('requests.get', new=lambda x, **args: raise_(ProxyError()))
    git_crw.proxies.copy()
    with pytest.raises(ProxyError, match="No proxy is available"):
        git_crw._call_url_with_proxy("https://myurl")
コード例 #3
0
ファイル: Registration.py プロジェクト: miha4759/registration
    def try_register_phone(self):
        request = None
        try:
            self.session.mount('https://api.grab.com', HTTP20Adapter())
            request = self.session.post(
                'https://api.grab.com/grabid/v1/phone/otp',
                timeout=20,
                headers={
                    "User-Agent": "Grab/5.95.1 (Android 9; Build 15313556)",
                    "Content-Type": "application/x-www-form-urlencoded",
                    "Accept": None,
                    "Connection": None,
                    "Accept-Encoding": "gzip",
                    "Accept-Language": "en-US;q=1.0, en;q=0.9",
                },
                data={
                    'method': 'SMS',
                    'countryCode': 'RU',
                    'phoneNumber': self.phone,
                    'templateId': 'pax_android_production',
                    'numDigits': 6,
                })
        except Timeout:
            raise ProxyError()
        except json.JSONDecodeError:
            raise ValueError('unexpected answer: %d' % request.status_code)

        return request
コード例 #4
0
ファイル: core.py プロジェクト: vlasenkoalexey/gcsfs
def validate_response(r, path):
    """
    Check the requests object r, raise error if it's not ok.

    Parameters
    ----------
    r: requests response object
    path: associated URL path, for error messages
    """
    if not r.ok:
        m = str(r.content)
        error = None
        try:
            error = r.json()["error"]
            msg = error["message"]
        except:  # noqa: E722
            # TODO: limit to appropriate exceptions
            msg = str(r.content)

        if r.status_code == 404:
            raise FileNotFoundError
        elif r.status_code == 403:
            raise IOError("Forbidden: %s\n%s" % (path, msg))
        elif r.status_code == 502:
            raise ProxyError()
        elif "invalid" in m:
            raise ValueError("Bad Request: %s\n%s" % (path, msg))
        elif error:
            raise HttpError(error)
        elif r.status_code:
            raise HttpError({"code": r.status_code})
        else:
            raise RuntimeError(m)
コード例 #5
0
def validate_response(r, path):
    """
    Check the requests object r, raise error if it's not ok.

    Parameters
    ----------
    r: requests response object
    path: associated URL path, for error messages
    """
    if not r.ok:
        m = str(r.content)
        error = None
        try:
            error = r.json()['error']
            msg = error['message']
        except:
            msg = str(r.content)

        if r.status_code == 404:
            raise FileNotFoundError
        elif r.status_code == 403:
            raise IOError("Forbidden: %s\n%s" % (path, msg))
        elif r.status_code == 429:
            raise RateLimitException(error)
        elif r.status_code == 502:
            raise ProxyError()
        elif "invalid" in m:
            raise ValueError("Bad Request: %s\n%s" % (path, msg))
        elif error:
            raise HttpError(error)
        else:
            raise RuntimeError(m)
コード例 #6
0
ファイル: shitter.py プロジェクト: rabid-lady/fetside-bot
 def get_ip_by_wtfismyip(self):
     response = retry(lambda: self.session.get(self.WTFIP_URL), attempts=self.max_retries)
     content_type = response.headers.get('Content-Type', '').split(';')[0]
     if content_type != 'application/json':
         print('failed! wrong content type: %s' % content_type)
         raise ProxyError('Wrong response format')
     return response.json().get('YourFuckingIPAddress')
コード例 #7
0
ファイル: scrape.py プロジェクト: danielHava/travel
 def is_valid(self, proxy_host, timeout=Timeout(connect=5, read=10)):
     try:
         response = requests.get("https://canihazip.com/s",
                                 proxies={
                                     "http": proxy_host,
                                     "https": proxy_host
                                 },
                                 timeout=timeout)
     except Exception as e:
         raise ProxyError(e)
     else:
         if response.text != proxy_host.replace("http://",
                                                "").split(":")[0]:
             raise ProxyError(
                 "Proxy check failed: {} not used while requesting".format(
                     proxy_host))
     self.checked.update([proxy_host])
コード例 #8
0
ファイル: parser.py プロジェクト: wbglaeser/flatshare-scraper
 def define_request(self, url, proxy):
     headers = {'User-Agent': ua.random}
     proxies = {'http': proxy, 'https': proxy}
     res = requests.get(url, proxies=proxies, headers=headers)
     if res.ok:
         return res.text
     else:
         raise ProxyError('The request did not get through: ', res)
コード例 #9
0
ファイル: crawlutils.py プロジェクト: szuprefix/py-xyz-util
def http_request(url,
                 data=None,
                 mobile_mode=True,
                 cookies='',
                 referer=None,
                 extra_headers={},
                 timeout=(20, 20),
                 proxy=True):
    headers = {
        "User-Agent": UA_MOBILE if mobile_mode else UA_PC,
        "Accept-Encoding": "gzip"
    }
    headers.update(extra_headers)
    if referer:
        headers['Referer'] = referer
    if proxy is True:
        proxy = PROXY
    from inspect import isgeneratorfunction
    if isgeneratorfunction(proxy):
        ps = proxy()
    else:
        ps = [proxy]
    log.info('http_get: %s', url)
    for p in ps:
        btime = datetime.now()
        try:
            if callable(p):
                p = p()
            proxies = {
                'http': 'http://' + p,
                'https': 'http://' + p
            } if p else None
            if data:
                r = requests.post(url,
                                  data,
                                  headers=headers,
                                  timeout=timeout,
                                  proxies=proxies,
                                  cookies=cookies)
            else:
                r = requests.get(url,
                                 headers=headers,
                                 timeout=timeout,
                                 proxies=proxies,
                                 cookies=cookies)
            if 'charset' not in r.headers.get('Content-Type', ''):
                r.encoding = 'utf8'
            if p:
                log.info('proxy %s visit %s spent %s seconds', p, url,
                         (datetime.now() - btime).seconds)
            return r
        except (ProxyError, ConnectionError) as e:
            import traceback
            log.warn('proxy %s error: %s', p, traceback.format_exc())
    if proxy:
        raise ProxyError('all proxies failed')
コード例 #10
0
ファイル: shitter.py プロジェクト: rabid-lady/fetside-bot
 def check_proxy(self, proxy_set, host):
     print('Checking proxy cloaking for %s... ' % proxy_set['https'], end='', flush=True)
     self.session.proxies = proxy_set
     real_ip = self.get_ip_by_ifconfig()
     print('reported IP is %s. ' % real_ip, end='')
     ok = real_ip == host
     if not ok:
         print('failed! (expected=%s, received=%s)' % (host, real_ip))
         raise ProxyError('Proxy isn''t anonymous')
     print('success!')
コード例 #11
0
def monkeypatch_test_client_proxy_exception(
    self,
    method=None,
    url=None,
    headers=None,
    params=None,
    stream=False,
    proxies=None,
    timeout=None,
):
    raise ProxyError("Test Proxy Error")
コード例 #12
0
    def _handle_error_response(self, response):
        if response.status_code == 502:
            from requests.exceptions import ProxyError
            raise ProxyError("The proxy returned an error, this could be due to a timeout.")
        else:
            message = None
            if response.headers['Content-Type'] == 'application/json':
                message = response.json().get('message', None)
            if message:
                message = response.text

            raise ConnectionAbortedError(message)
コード例 #13
0
 def test_pac_no_failover_available_exc_case(self):
     """Special case where proxy fails but there's no DIRECT fallback. Error should bubble up,
     and all applicable proxies should be tried again in the next request. Proxy failure from exception."""
     sess = PACSession(pac=PACFile(proxy_pac_js_tpl % 'PROXY a:80; PROXY b:80'))
     for _ in range(2):
         with _patch_request_base(side_effect=ProxyError()) as request, \
                 pytest.raises(ProxyError):
             sess.get(arbitrary_url)
         request.assert_has_calls([
             get_call(arbitrary_url, 'http://a:80'),
             get_call(arbitrary_url, 'http://b:80'),
         ])
コード例 #14
0
ファイル: test_api.py プロジェクト: SeyfSV/pypac
 def test_pac_failover_to_direct_also_fails(self):
     """Proxy fails. Next in line is DIRECT keyword, but direct connection also fails. Error should bubble up.
     Subsequent requests go straight to DIRECT, despite DIRECT failing."""
     sess = PACSession(pac=PACFile(proxy_pac_js))
     with _patch_request_base(side_effect=ProxyError()) as request:
         for _ in range(2):
             with pytest.raises(ProxyError):
                 sess.get(arbitrary_url)
     request.assert_has_calls([
         get_call(arbitrary_url, fake_proxy_url),
         get_call(arbitrary_url, 'DIRECT'),
         get_call(arbitrary_url, 'DIRECT'),
     ])
コード例 #15
0
ファイル: test_realtime.py プロジェクト: TCCinTaiwan/twstock
 def test_proxy_raises_proxy_error(self):
     realtime.proxies_list = [
         'http://0.0.0.0:1234',
         'http://0.0.0.0:3128',
     ]
     responses.add(responses.GET,
                   'http://mis.twse.com.tw/stock/index.jsp',
                   status=200)
     responses.add(responses.GET,
                   self.FETCH_URL['2330'],
                   body=ProxyError('Unittest Mock ProxyError!!'),
                   status=200)
     stock = realtime.get('2330')
コード例 #16
0
 def parse_json_response(self, response: requests.Response):
     """
     Parses json response, if an error occurs it raises an Exception.
     :param response: Response of a RESTful request
     :return: response: JSON Response
     """
     if response.status_code == 200:
         return response.json()
     elif response.status_code == 502:
         from requests.exceptions import ProxyError
         return ProxyError(
             "The proxy returned an error, this could be due to a timeout.")
     else:
         raise ConnectionAbortedError(response.text)
コード例 #17
0
ファイル: main.py プロジェクト: nurettinabaci/UnissuScraper
def worker():
    while True:
        item = q.get()
        if item is None:
            break
        ep = item[0]
        proxy = item[1]
        try:
            do_work(ep, proxy)
        except ProxyError:
            raise ProxyError("Proxy related problem")
        except Exception:
            raise Exception("Can't scrape for link: ", ep, proxy)
        q.task_done()
コード例 #18
0
ファイル: main.py プロジェクト: nurettinabaci/UnissuScraper
def worker_company():
    '''Worker function to scrape details of each company'''
    while True:
        item = q.get()
        if item is None:
            break
        ep = item[0]
        proxy = item[1]
        try:
            do_work_company(ep, proxy)
        except ProxyError:
            raise ProxyError("Proxy related problem")
        except Exception:
            raise Exception("Can't scrape for link: ", ep, proxy)
        q.task_done()
コード例 #19
0
def get_data(markets, bittrex, session, proxies, proxy_indexes, logger=None):

    futures = []
    response_dict = {}

    for index in range(len(markets)):
        market = markets[index]
        request_input = bittrex.get_market_history(market)

        proxy = configure_ip(proxies[proxy_indexes[index]])
        url = request_input.get('url')
        headers = {"apisign": request_input.get('apisign')}

        response = session.get(url,
                               background_callback=process_response,
                               headers=headers,
                               timeout=3,
                               proxies=proxy)

        # Add attributes to response
        response.market = market
        response.url = request_input.get('url')
        response.headers = headers

        futures.append(response)

    for future in as_completed(futures):

        try:
            response_data = future.result().data

            if not response_data.get('success'):
                if response_data.get('message') == "INVALID_MARKET":
                    markets.remove(future.market)
                    logger.debug('Removed {}: invalid market ...'.format(future.market))
                continue

            response_dict[future.market] = response_data.get('result')
            if not response_dict[future.market]:
                if response_data.get('message') == "NO_API_RESPONSE":
                    raise ProxyError('NO API RESPONSE')

        except (ProxyError, ConnectTimeout, ConnectionError, ReadTimeout):

            # logger.info('Failed API call for {}, skipping.'.format(future.market))
            pass

    return response_dict
コード例 #20
0
 def wrapper(*args, **kwargs):
     try:
         result = method(*args, **kwargs)
         return result
     except ProxyError:
         _LOGGER.exception('ProxyError when try to get %s.', args)
         raise ProxyError('A proxy error occurred.')
     except ConnectionException:
         _LOGGER.exception('ConnectionError when try to get %s.', args)
         raise ConnectionException('DNS failure, refused connection, etc.')
     except Timeout:
         _LOGGER.exception('Timeout when try to get %s', args)
         raise Timeout('The request timed out.')
     except RequestException:
         _LOGGER.exception('RequestException when try to get %s.', args)
         raise RequestException('Please check out your network.')
コード例 #21
0
ファイル: main.py プロジェクト: GullinBustin/red_points_test
 def _call_url_with_proxy(self, url):
     random.shuffle(self.proxies)
     proxy_list = self.proxies.copy()
     while True:
         try:
             rand_proxy = proxy_list.pop()
             proxies = {"http": rand_proxy, "https": rand_proxy}
             r = requests.get(url, proxies=proxies, timeout=5)
             r.raise_for_status()
         except (ProxyError, Timeout) as e:
             logging.warning("Proxy %s is not available, error: %s",
                             rand_proxy, e)
             continue
         except IndexError:
             raise ProxyError("No proxy is available")
         break
     return r
コード例 #22
0
 async def _setup_proxy(self, conn, proxy, **ssl_params):
     if not ssl_params.get('ssl_context'):
         logger.debug(f'Forward HTTP request to {proxy}')
         return conn
     headers = {}
     if proxy.raw_user:
         auth = _basic_auth_str(proxy.raw_user, proxy.password)
         headers['Proxy-Authorization'] = auth
     path = f'{conn.host}:{conn.port}'
     logger.debug(f'Setup HTTP tunnel {proxy}')
     request = RequestSerializer(path, method='CONNECT', headers=headers)
     async for chunk in request:
         await conn.sock.sendall(chunk)
     response = await ResponseParser(conn.sock).parse()
     if response.status != 200:
         raise ProxyError(response)
     conn.sock = await ssl_wrap_socket(conn.sock, **ssl_params)
     return conn
コード例 #23
0
ファイル: api_sms.py プロジェクト: Podstolniy/API_for_SMS
def try_register_phone(phone, session: requests.Session):
    try:
        r=requests.post(session,
        timeout=20,
        headers={'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:47.0) Gecko/20100101 Firefox/47.0',
        'Accept': 'text/html, application/xhtml+xml, application/xml;q=0.9, */*;q=0.8',
        'Accept-Language': 'ru-RU, ru;q=0.9, en-US;q=0.8, en;q=0.7, fr;q=0.6',},

        data={'client_id': "4da4649307cc4bfaa16b08d03432535e",
        'country_code': "RU",
        'method': "SMS",
        'num_digits': 6,
        'phone_number': "{}".format(phone),}
        )
    except ConnectTimeout:
        raise ProxyError()
    except json.JSONDecodeError:
        raise LogicError('unexpected answer: %d' % r.status_code)
コード例 #24
0
    def test_record_error(self):
        error_map = ErrorCounter.error_map

        def compute_length() -> int:
            return sum(error_map.values())

        assert compute_length() == 0
        ErrorCounter.record_error(ArithmeticError())
        assert compute_length() == 1
        assert error_map[ArithmeticError] == 1
        assert error_map[TypeError] == 0
        assert error_map[ProxyError] == 0

        ErrorCounter.record_error(TypeError())
        assert compute_length() == 2
        assert error_map[ArithmeticError] == 1
        assert error_map[TypeError] == 1
        assert error_map[ProxyError] == 0

        ErrorCounter.record_error(ArithmeticError())
        assert compute_length() == 3
        assert error_map[ArithmeticError] == 2
        assert error_map[TypeError] == 1
        assert error_map[ProxyError] == 0

        ErrorCounter.record_error(TypeError())
        assert compute_length() == 4
        assert error_map[ArithmeticError] == 2
        assert error_map[TypeError] == 2
        assert error_map[ProxyError] == 0

        ErrorCounter.record_error(ProxyError())
        assert compute_length() == 5
        assert error_map[ArithmeticError] == 2
        assert error_map[TypeError] == 2
        assert error_map[ProxyError] == 1

        ErrorCounter.record_error(ArithmeticError())
        assert compute_length() == 6
        assert error_map[ArithmeticError] == 3
        assert error_map[TypeError] == 2
        assert error_map[ProxyError] == 1
コード例 #25
0
ファイル: test_utils.py プロジェクト: vkd-fa/gcsfs
def test_retriable_exception():
    e = requests.exceptions.Timeout()
    assert is_retriable(e)
    e = ValueError
    assert not is_retriable(e)

    e = HttpError({"message": "", "code": 500})
    assert is_retriable(e)

    e = HttpError({"message": "", "code": "500"})
    assert is_retriable(e)

    e = HttpError({"message": "", "code": 400})
    assert not is_retriable(e)

    e = HttpError({"code": "429"})
    assert is_retriable(e)

    e = ProxyError()
    assert is_retriable(e)
コード例 #26
0
 def test_proxy_raises_proxy_error(self):
     proxies_list = [
         'http://0.0.0.0:1234',
         'http://0.0.0.0:3128',
     ]
     self.stk = stock.Stock('6223', initial_fetch=False, proxies_list=proxies_list)
     responses.add(
         responses.GET,
         self.FETCH_URL,
         body=ProxyError('Unittest Mock ProxyError!!'),
         status=200
     )
     self.stk.fetch(2015, 5)
     self.assertEqual(self.stk.data, [])
     self.assertEqual(self.stk.fetcher.PROXIES_LIST, proxies_list)
     self.assertLess(self.stk.fetcher.proxy_counter, len(proxies_list))
     self.assertGreaterEqual(self.stk.fetcher.proxy_counter, 0)
     for data in self.stk.raw_data:
         self.assertEqual(data, {'aaData': [], 'data': []})
     self.assertEqual(self.stk.sid, '6223')
コード例 #27
0
def _request(method, url, sendTimes=3, **kwargs):
    try:
        if "timeout" not in kwargs or kwargs["timeout"] > 30:
            timeout = 20
            kwargs['timeout'] = 20

        response = _session.request(method=method, url=url, **kwargs)
        if response.status_code == 200:
            return response
        else:
            status_code = response.status_code
            message = 'Response {}:{}'.format(status_code,
                                              HTTP_Status_Code[status_code])
            print(message)

    except Exception as err:
        sendTimes -= 1
        if sendTimes < 1:
            raise
        else:
            time.sleep(random.uniform(0, 1))
        if isinstance(err, Timeout):
            return _request(method, url, sendTimes=sendTimes, **kwargs)

        if isinstance(err, ProxyError):
            message = "出现代理异常,IP被禁也可能是代理IP失效了"
            raise ProxyError(message)

        elif isinstance(err, SSLError):

            kwargs['verify'] = False
            return _request(method, url, sendTimes=sendTimes, **kwargs)

        if isinstance(err, ConnectionError):
            return _request(method, url, sendTimes=sendTimes, **kwargs)

        else:
            return _request(method, url, sendTimes=sendTimes, **kwargs)
コード例 #28
0
def change_proxy():
    # proxies must be formatted: 182.52.238.111:30098,103.105.77.22:8181,
    # bad proxies will be updated and skipped next time
    proxies: deque = get_global('PROXIES')
    bad_proxies: str = load_data('bad_proxies.txt')
    while proxies:
        proxy: str = proxies.popleft()
        if proxy in bad_proxies:
            continue
        #log(f'checking proxy {proxy}')
        try:
            old_proxy: str = get_global('PROXY')
            save_data('bad_proxies.txt', old_proxy, end=',')
            set_global('PROXY', proxy)
            set_global('PROXY_ERROR', False)
            # set_global('SLEEP_TIME', USER_SLEEP_TIME)
            if USE_BOT:
                log('Reloading bot')
                get_global('BOT').close()
                set_global(
                    'BOT',
                    Browser(headless=HEADLESS,
                            proxy=proxy,
                            driverpath=WEBDRIVERPATH))
            return
            #acceptable = check_connection()
            #if acceptable:
            #    log(f'using proxy {proxy}')
            #    set_global('PROXIES', proxies)
            #    return
            #else:
            #    save_data('bad_proxies.txt', proxy, end=',')
            #    set_global('PROXY', old_proxy)
            #    set_global('PROXY_ERROR', True)
        except Exception as e:
            log(e)
            change_proxy()
    raise ProxyError('Все прокси использованы.')
コード例 #29
0
    def build_response(self, req, resp):
        """
        Builds a :class:`Response <requests.Response>` object from a urllib3 response.

        Build response are doing by parent class :class:`HTTPAdapter`. This code detect 500 in response status code and
        search in text of response specific strings.

        :param PreparedRequest req: The :class:`PreparedRequest` used to generate the response.
        :param HTTPResponse resp: The urllib3 :class:`HTTPResponse` object.
        :return: :class:`Response <requests.Response>` object
        :rtype: :class:`Response <requests.Response>`
        """
        r = super(PrivoxyAdapter, self).build_response(req, resp)
        if r.status_code == 500 and r.text is not None and '500 Internal Privoxy Error' in r.text:
            if '<code>forwarding-failed</code>' in r.text:
                raise ForwardingFailedError
            if '<code>no-server-data</code>' in r.text:  # pragma: no cover
                raise NoServerDataError
            if '<code>connection-timeout</code>' in r.text:  # pragma: no cover
                raise ConnectionTimeoutError
            if '<title>500 Internal Privoxy Error</title>' in r.text:  # pragma: no cover
                raise PrivoxyError(str(r.text))
            raise ProxyError(r.text)  # pragma: no cover
        return r
コード例 #30
0
    def request(self, method, url,
                params=None,
                data=None,
                headers=None,
                cookies=None,
                files=None,
                auth=None,
                timeout=None,
                allow_redirects=True,
                proxies=None,
                hooks=None,
                stream=None,
                verify=None,
                cert=None,
                json=None):

        # Set Proxies
        if proxies is not None:
            req_proxies = Proxies(self.driver, self.proxies)
            req_proxies.update(proxies)

        prep_headers = CaseInsensitiveDict(self.headers)
        prep_headers.update(headers or {})

        cookies_to_restore = []
        url_parsed = utils.urlparse(url)
        prep_cookies = self.cookies.get_dict(domain=url_parsed.netloc)
        prep_cookies.update(cookies or {})
        if cookies is not None:
            # Work Around To Set Cookies From Other Domain
            current_url_parsed = utils.urlparse(self.driver.current_url)
            if not current_url_parsed.netloc.endswith(url_parsed.netloc):
                self.driver.get('about:blank')

            for name, value in cookies.items():
                cookies_to_restore.append(self.driver.get_cookie(name))
                self.driver.execute_phantomjs("this.addCookie({});".format(dumps(
                    {'name': name, 'value': value, 'domain': url_parsed.netloc}
                )))

        prep_params = params or {}
        prep_params.update(self.params)

        req = Request(method, url, prep_headers, files, data, prep_params,
                      self.auth or auth, prep_cookies, hooks, json)
        prep = req.prepare()

        self.driver.request(prep.url, prep.method, prep.body, prep.headers)
        error, res = self._extract_response(self.driver.get_page())

        # Clean
        if proxies is not None:
            self.proxies.update()

        if cookies is not None:
            for name, _ in cookies.items():
                self.driver.delete_cookie(name)
            for cookie in cookies_to_restore:
                if not cookie:
                    continue
                self.driver.execute_phantomjs("this.addCookie({})".format(dumps(cookie)))

        if error:
            if proxies and error['errorCode'] == 1:
                raise ProxyError(error['errorString'])
            raise ConnectionError(error['errorString'])

        return res