Ejemplo n.º 1
0
 def __init__(self, custom_endpoint=None, http_client=None):
     self._custom_endpoint = custom_endpoint
     self._http_client = http_client or urllib3.PoolManager(
         retries=urllib3.Retry(
             total=5,
             backoff_factor=0.2,
             status_forcelist=[500, 502, 503, 504],
         ), )
     self._token_file = os.environ.get("AWS_WEB_IDENTITY_TOKEN_FILE")
     self._aws_region = os.environ.get("AWS_REGION")
     self._role_arn = os.environ.get("AWS_ROLE_ARN")
     self._role_session_name = os.environ.get("AWS_ROLE_SESSION_NAME")
     self._relative_uri = os.environ.get(
         "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI", )
     if self._relative_uri and not self._relative_uri.startswith("/"):
         self._relative_uri = "/" + self._relative_uri
     self._full_uri = os.environ.get("AWS_CONTAINER_CREDENTIALS_FULL_URI")
     self._credentials = None
Ejemplo n.º 2
0
 def download_file(self, filepath, access_key, secret_key):
     secure = True if self.schema == 'https' else False
     ca_certs = os.environ.get('SSL_CERT_FILE') or certifi.where()
     http_client = urllib3.PoolManager(timeout=3,
                                       maxsize=10,
                                       cert_reqs='CERT_REQUIRED',
                                       ca_certs=ca_certs,
                                       retries=urllib3.Retry(total=1,
                                                             backoff_factor=0.2,
                                                             status_forcelist=[500, 502, 503, 504]))
     client = minio.Minio(self.host, access_key, secret_key, secure=secure, http_client=http_client)
     try:
         return client.fget_object(self.bucket, self.object_key, filepath)
     except Exception as e:
         raise exceptions.PluginError(message=_('failed to download file[%(filepath)s] from s3: %(reason)s') % {
             'filepath': self.object_key,
             'reason': str(e)
         })
Ejemplo n.º 3
0
    def parse_url_status(self):
        http = urllib3.PoolManager()

        urls_status = []
        for url in self._urls:
            status = 0
            try:
                r = http.request(
                    'GET',
                    url,
                    retries=urllib3.Retry(raise_on_redirect=False),
                    timeout=3)
                status = r.status
            except urllib3.exceptions.HTTPError:
                status = 'error'
            finally:
                urls_status.append(status)

        self._urls_status = urls_status
Ejemplo n.º 4
0
 def __init__(self, tor_controller=None):
     if not self.__socket_is_patched():
         gevent.monkey.patch_socket()
     self.tor_controller = tor_controller
     if not self.tor_controller:
         retries = urllib3.Retry(35)
         user_agent = {
             'user-agent':
             'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36'
         }
         self.session = urllib3.PoolManager(maxsize=35,
                                            cert_reqs='CERT_REQUIRED',
                                            ca_certs=certifi.where(),
                                            headers=user_agent,
                                            retries=retries)
     else:
         self.session = self.tor_controller.get_tor_session()
     self.__tor_status__()
     self.languages = self._get_all_languages()
Ejemplo n.º 5
0
    def __init__(self,
                 host,
                 version=None,
                 api_token=None,
                 ca_certs_file_path=None):
        self._api_client = ApiClient(host='https://{}/api'.format(host))
        self.request_timeout = urllib3.Timeout(
            connect=PurityFb.DEFAULT_CONN_TIMEOUT,
            read=PurityFb.DEFAULT_READ_TIMEOUT)
        self.retries = urllib3.Retry(total=PurityFb.DEFAULT_RETRIES)

        if not ca_certs_file_path:
            self.disable_verify_ssl()
        else:
            self.configure_ca_certificate_file(ca_certs_file_path)
            self.enable_verify_ssl()

        self._api_version = VersionApi(api_client=self._api_client)
        if version:
            self._version = self._check_rest_version(version)
        else:
            self._version = self._choose_rest_version()
        self._auth = AuthenticationApi(api_client=self._api_client)

        self._file_systems = globals()[self._class_name(
            FILE_SYSTEMS, self._version)](api_client=self._api_client)
        self._file_system_snapshots = globals()[self._class_name(
            FILE_SYSTEM_SNAPSHOTS, self._version)](api_client=self._api_client)

        if self._version >= LooseVersion('1.2'):
            self._admins = AdminsApi(api_client=self._api_client)
            self._alerts = AlertsApi(api_client=self._api_client)
            self._alert_watchers = AlertWatchersApi(
                api_client=self._api_client)
            self._arrays = ArraysApi(api_client=self._api_client)
            self._blade = BladeApi(api_client=self._api_client)
            self._certificates = CertificatesApi(api_client=self._api_client)
            self._dns = DnsApi(api_client=self._api_client)
            self._hardware = HardwareApi(api_client=self._api_client)

        if api_token:
            self.login(api_token)
        self.enable_verify_ssl()
Ejemplo n.º 6
0
    def _url_to_soup(cls, url, parser='html.parser'):
        '''
        Return the BeautifulSoup object corresponding to the supplied url
        Does not guarantee to return a real time updated version

        Input:
            string url
        Successful output:
            BeautifulSoup soup
                The BeautifulSoup object containing the contents of target website
        Special output:
            NoneType
                Not found
        '''

        if parser != 'html.parser':
            raise NotImplemented(
                'netgrep: only html.parser is currently supported')

        # TODO: Should be moved as class variable later to avoid multiple allocation
        # TODO: Cache retrieved webpages and only refresh every some intervals
        http_pool = urllib3.PoolManager(timeout=urllib3.Timeout(connect=1.0,
                                                                read=2.0),
                                        retries=urllib3.Retry(2, redirect=2))
        try:
            page = http_pool.request('GET', url)
            soup = BeautifulSoup(page.data, parser)
            return soup
        except urllib3.exceptions.BodyNotHttplibCompatible:
            return None
        except urllib3.exceptions.ConnectionError:
            return None
        except urllib3.exceptions.NewConnectionError:
            return None
        except urllib3.exceptions.TimeoutError:
            return None
        except urllib3.exceptions.MaxRetryError:
            # TODO: raise more meaningful message
            return None
        except Exception as e:
            print("FATAL: unexpected error. ", e)
            raise
Ejemplo n.º 7
0
def get_TEI_XML(url, isBetaCode):
    http = urllib3.PoolManager()
    max_tries = 5
    if (isBetaCode):
        hdrs = {
            "Cookie":
            "disp.prefs=\"greek.display=PerseusBetaCode\""  #|default.scheme=book:card|default.type=book
        }
    else:
        hdrs = {}
    response = http.request('GET',
                            url,
                            headers=hdrs,
                            retries=urllib3.Retry(max_tries, redirect=2))
    xml = response.data.decode('utf-8')
    if (xml[0:5] != "<?xml"):
        print(url)
        # print(xml)
        raise Exception("Got back HTML for %s." % (url))
    return xml
Ejemplo n.º 8
0
 def _create_session(self):
     session = requests.Session()
     retry = urllib3.Retry(
         total=5,
         read=5,
         connect=5,
         backoff_factor=0.3,
         # use on any request type
         method_whitelist=False,
         # force retry on those status responses
         status_forcelist=(501, 502, 503, 504, 505, 506, 507, 508, 510, 511),
         raise_on_status=False
     )
     adapter = requests.adapters.HTTPAdapter(
         max_retries=retry, pool_maxsize=16, pool_connections=16
     )
     session.mount('https://', adapter)
     session.mount('http://', adapter)
     session.headers = self._default_headers
     return session
Ejemplo n.º 9
0
def get_url_data(url, cc):
    http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',
                               ca_certs=certifi.where())
    try:
        r = http.request('GET',
                         url.format(cc.upper()),
                         retries=urllib3.Retry(total=4,
                                               connect=2.0,
                                               read=6.0,
                                               redirect=0,
                                               raise_on_status=True,
                                               raise_on_redirect=True,
                                               status_forcelist=[
                                                   500, 501, 502, 503, 504,
                                                   400, 401, 403, 404
                                               ]))
        response = r.data.decode('utf-8')
    except urllib3.exceptions.HTTPError as e:
        log.error("Error getting: %r\n%r" % (json_url, e))
        response = """"""
    return response
Ejemplo n.º 10
0
Archivo: s3.py Proyecto: xmonader/js-ng
    def __init__(self):
        super().__init__()
        # Create the http client to be able to set timeout
        http_client = urllib3.PoolManager(
            timeout=5,
            cert_reqs="CERT_REQUIRED",
            ca_certs=certifi.where(),
            retries=urllib3.Retry(total=3,
                                  backoff_factor=0.2,
                                  status_forcelist=[500, 502, 503, 504]),
        )
        # Create Minio client
        self.client = Minio(
            "{}:{}".format(self.address, self.port),
            access_key=self.access_key,
            secret_key=self.secret_key,
            secure=False,
            http_client=http_client,
        )

        if self.bucket_create:
            self._bucket_create(self.bucket)
Ejemplo n.º 11
0
    def connect(self):
        # Create the http client to be able to set timeout
        http_client = urllib3.PoolManager(
            timeout=5,
            cert_reqs="CERT_REQUIRED",
            ca_certs=certifi.where(),
            retries=urllib3.Retry(total=3,
                                  backoff_factor=0.2,
                                  status_forcelist=[500, 502, 503, 504]),
        )
        # Create Minio client
        self._log_info("open connection to minio:%s" % self)
        self.client = Minio(
            "%s:%s" % (self.address, self.port),
            access_key=self.accesskey_,
            secret_key=self.secretkey_,
            secure=False,
            http_client=http_client,
        )

        if not self.bucket_ok:
            self._bucket_create(self.bucket)
Ejemplo n.º 12
0
def generate_session(
    pool_connections,
    pool_maxsize,
    pool_block,
    proxies=None,
    retry_count=None,
    backoff_factor=None,
):
    """
    Utility method to generate request sessions.
    :param pool_connections: The number of urllib3 connection pools to
        cache.
    :param pool_maxsize: The maximum number of connections to save in the
        pool.
    :param pool_block: Whether the connection pool should block for
        connections.
    :param proxies: Proxies dictionary.
    :param retry_count: Number of retries to attempt
    :param backoff_factor: Backoff factor for retries
    :return: requests.Session object.
    """
    session = RequestSession()

    # Retry if no response from server, this applies to failed DNS lookups,
    # socket connections and connection timeouts
    retry_count = retry_count or RequestParameters.DEFAULT_RETRY_COUNT
    backoff_factor = backoff_factor or RequestParameters.DEFAULT_BACKOFF_FACTOR
    retries = urllib3.Retry(total=retry_count, backoff_factor=backoff_factor)

    # noinspection PyUnresolvedReferences
    adapter = requests.adapters.HTTPAdapter(pool_connections=pool_connections,
                                            pool_maxsize=pool_maxsize,
                                            pool_block=pool_block,
                                            max_retries=retries)
    session.mount('http://', adapter)
    session.mount('https://', adapter)
    session.proxies = proxies
    return session
Ejemplo n.º 13
0
    def _get_raw_data_with_status(self, url=None, manager=None, retries=1, redirect=True, **kwargs):
        """
        Get status and response body content from http request. Does not catch exceptions
        :return: int, str
        """
        url = url or self.url
        manager = manager or self._manager
        retry = urllib3.Retry(retries)
        if hasattr(retry, 'respect_retry_after_header'):
            retry.respect_retry_after_header = bool(self.respect_retry_after_header)

        response = manager.request(
            method=self.method,
            url=url,
            timeout=self.request_timeout,
            retries=retry,
            headers=manager.headers,
            redirect=redirect,
            **kwargs
        )
        if isinstance(response.data, str):
            return response.status, response.data
        return response.status, response.data.decode(errors='ignore')
Ejemplo n.º 14
0
 def json_reader(self):
     url = self.sparqlURL()
     http = urllib3.PoolManager()
     resp = http.request(
         'GET',
         url,
         retries=urllib3.Retry(5, redirect=2),
         headers={
             "Accept":
             "application/sparql-results+json, applicaton/json;q=0.1"
         })
     #with self.session.urlOpener.open(url) as jsonFile:
     try:
         jsonDoc = json.loads(resp.data.decode('utf-8'))
         bindings = jsonDoc['results']['bindings']
         for binding in bindings:
             n = self.binding_as_doc(binding)
             if n is not None:
                 #                    print(n)
                 yield n
     except:
         print("Data error: " + resp.data.decode(encoding='UTF-8'))
         pass
Ejemplo n.º 15
0
def get_azure_imds(path, format='text'):
    """Get metadata from Azure Instance Metadata Service.

    Args:
        path (str): URL path for Azure Instance Metadata Service.
        format (str, optional): Response format, text or json. Defaults to 'text'.

    Returns:
        str: Metadata in response. Defaults to '' if timeout or error occurs.
    """
    http = urllib3.PoolManager(
        headers={'Metadata': 'true'},
        timeout=urllib3.Timeout(connect=1.0, read=1.0),
        retries=urllib3.Retry(total=3, connect=0, backoff_factor=1.0),
    )
    try:
        r = http.request(
            'GET',
            f'http://169.254.169.254/metadata/{path}?api-version=2020-06-01&format={format}'
        )
        return r.data.decode('ascii')
    except Exception:
        return ''