def _get_pool_manager(verify, cert_file, key_file): global _pool_manager default_pool_args = dict(maxsize=32, cert_reqs=ssl.CERT_REQUIRED, ca_certs=_default_certs, headers=_default_headers, timeout=_default_timeout) if cert_file is None and verify is None and 'DX_CA_CERT' not in os.environ: with _pool_mutex: if _pool_manager is None: if 'HTTPS_PROXY' in os.environ: proxy_params = _get_proxy_info(os.environ['HTTPS_PROXY']) default_pool_args.update(proxy_params) _pool_manager = urllib3.ProxyManager(**default_pool_args) else: _pool_manager = urllib3.PoolManager(**default_pool_args) return _pool_manager else: # This is the uncommon case, normally, we want to cache the pool # manager. pool_args = dict(default_pool_args, cert_file=cert_file, key_file=key_file, ca_certs=verify or os.environ.get('DX_CA_CERT') or requests.certs.where()) if verify is False or os.environ.get('DX_CA_CERT') == 'NOVERIFY': pool_args.update(cert_reqs=ssl.CERT_NONE, ca_certs=None) urllib3.disable_warnings() if 'HTTPS_PROXY' in os.environ: proxy_params = _get_proxy_info(os.environ['HTTPS_PROXY']) pool_args.update(proxy_params) return urllib3.ProxyManager(**pool_args) else: return urllib3.PoolManager(**pool_args)
def __init__(self, max_reusable_connections=8, mock_urlopen=None): """ Parameters max_reusable_connections max connections to keep alive in the pool mock_urlopen an optional alternate urlopen function for testing This class uses ``urllib3`` to maintain a pool of connections. We attempt to grab an existing idle connection from the pool, otherwise we spin up a new connection. Once a connection is closed, it is reinserted into the pool (unless the pool is full). SSL settings: - Certificates validated using Dropbox-approved trusted root certs - TLS v1.0 (newer TLS versions are not supported by urllib3) - Default ciphersuites. Choosing ciphersuites is not supported by urllib3 - Hostname verification is provided by urllib3 """ self.mock_urlopen = mock_urlopen self.pool_manager = urllib3.PoolManager( num_pools= 4, # only a handful of hosts. api.dropbox.com, api-content.dropbox.com maxsize=max_reusable_connections, block=False, timeout= 60.0, # long enough so datastores await doesn't get interrupted cert_reqs=ssl.CERT_REQUIRED, ca_certs=TRUSTED_CERT_FILE, ssl_version=ssl.PROTOCOL_TLSv1, )
def __init__(self, pools_size=4): # urllib3.PoolManager will pass all kw parameters to connectionpool # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # ca_certs vs cert_file vs key_file # http://stackoverflow.com/a/23957365/2985775 # cert_reqs if Configuration().verify_ssl: cert_reqs = ssl.CERT_REQUIRED else: cert_reqs = ssl.CERT_NONE # ca_certs if Configuration().ssl_ca_cert: ca_certs = Configuration().ssl_ca_cert else: # if not set certificate file, use Mozilla's root certificates. ca_certs = certifi.where() # cert_file cert_file = Configuration().cert_file # key file key_file = Configuration().key_file # https pool manager self.pool_manager = urllib3.PoolManager(num_pools=pools_size, cert_reqs=cert_reqs, ca_certs=ca_certs, cert_file=cert_file, key_file=key_file)
def open(self, col, row): from requests.packages import urllib3 http = urllib3.PoolManager() astr = 'http://' + self.ip + '/?V=' + str(col) + str(row) r = http.request('GET', astr) print(astr) self.occupied = True self.save()
def _get_pool_manager(verify, cert_file, key_file): global _pool_manager default_pool_args = dict(maxsize=32, cert_reqs=ssl.CERT_REQUIRED, headers=_default_headers, timeout=_default_timeout) # Don't define a default CA bundle on Windows platforms - urllib3 and # ssl.py take care of loading the default CA list provided by the OS. # Plus, explicitly setting a CA list in the pool manager on Windows # can sometimes cause verification failures; see DEVEX-875. # But non-Windows platforms require setting a ca_certs bundle here: if platform.system() != 'Windows': default_pool_args.update(ca_certs=_default_certs) if cert_file is None and verify is None and 'DX_CA_CERT' not in os.environ: with _pool_mutex: if _pool_manager is None: if _get_env_var_proxy(): proxy_params = _get_proxy_info( _get_env_var_proxy(print_proxy=True)) default_pool_args.update(proxy_params) _pool_manager = urllib3.ProxyManager(**default_pool_args) else: _pool_manager = urllib3.PoolManager(**default_pool_args) return _pool_manager else: # This is the uncommon case, normally, we want to cache the pool # manager. pool_args = dict(default_pool_args, cert_file=cert_file, key_file=key_file, ca_certs=verify or os.environ.get('DX_CA_CERT') or requests.certs.where()) if verify is False or os.environ.get('DX_CA_CERT') == 'NOVERIFY': pool_args.update(cert_reqs=ssl.CERT_NONE, ca_certs=None) urllib3.disable_warnings() if _get_env_var_proxy(): proxy_params = _get_proxy_info( _get_env_var_proxy(print_proxy=True)) pool_args.update(proxy_params) return urllib3.ProxyManager(**pool_args) else: return urllib3.PoolManager(**pool_args)
def _request_with_retry(url, max_retries=10): http = urllib3.PoolManager( retries=Retry( max_retries, redirect=max_retries, status=max_retries, status_forcelist=[502, 404], backoff_factor=0.2, ) ) try: return http.request("GET", url) except Exception: # pylint: disable=W0703 return None
def _get_pool_manager(verify, cert_file, key_file): global _pool_manager default_pool_args = dict(maxsize=32, cert_reqs=ssl.CERT_REQUIRED, headers=_default_headers, timeout=_default_timeout) # Don't use the default CA bundle if the user has set the env variable # DX_USE_OS_CA_BUNDLE. Enabling that var will make us attempt to load # the default CA certs provided by the OS; see DEVEX-875. if 'DX_USE_OS_CA_BUNDLE' not in os.environ: default_pool_args.update(ca_certs=_default_certs) if cert_file is None and verify is None and 'DX_CA_CERT' not in os.environ: with _pool_mutex: if _pool_manager is None: if _get_env_var_proxy(): proxy_params = _get_proxy_info( _get_env_var_proxy(print_proxy=True)) default_pool_args.update(proxy_params) _pool_manager = urllib3.ProxyManager(**default_pool_args) else: _pool_manager = urllib3.PoolManager(**default_pool_args) return _pool_manager else: # This is the uncommon case, normally, we want to cache the pool # manager. pool_args = dict(default_pool_args, cert_file=cert_file, key_file=key_file, ca_certs=verify or os.environ.get('DX_CA_CERT') or requests.certs.where()) if verify is False or os.environ.get('DX_CA_CERT') == 'NOVERIFY': pool_args.update(cert_reqs=ssl.CERT_NONE, ca_certs=None) urllib3.disable_warnings() if _get_env_var_proxy(): proxy_params = _get_proxy_info( _get_env_var_proxy(print_proxy=True)) pool_args.update(proxy_params) return urllib3.ProxyManager(**pool_args) else: return urllib3.PoolManager(**pool_args)
def _get_pool_manager(request_kwargs): if 'verify' in request_kwargs or 'DX_CA_CERT' in os.environ: cert_reqs = ssl.CERT_REQUIRED ca_certs = request_kwargs.get('verify', os.environ.get('DX_CA_CERT')) if request_kwargs.get('verify') is False or os.environ.get( 'DX_CA_CERT') == 'NOVERIFY': cert_reqs, ca_certs = ssl.CERT_NONE, None urllib3.disable_warnings() return urllib3.PoolManager(cert_reqs=cert_reqs, ca_certs=ca_certs, headers=_default_headers, timeout=_default_timeout) else: return _pool_manager
def lambda_handler(event, context): s3 = boto3.client('s3') datetimestamp = datetime.datetime.today().strftime('%Y%m%dT%H%M%S') filename = datetimestamp + "_covid_county.csv" local_file = "/tmp/" + filename key = s3folder + '/' + filename http = urllib3.PoolManager() response = requests.get(baseURL, stream=True) with open(local_file, 'wb') as fout: fout.write(response.content) s3.upload_file(local_file, bucket, key) # TODO implement return {'statusCode': 200, 'body': json.dumps('file uploaded' + filename)}
def download_if_needed(url): """ Download from URL to filename unless filename already exists """ filename = url.split('/')[-1] if os.path.exists(filename): print(filename, 'already exists') return else: print('downloading', filename) c = urllib3.PoolManager() with c.request('GET', url, preload_content=False) as resp, open(filename, 'wb') as out_file: shutil.copyfileobj(resp, out_file) resp.release_conn()
def download_torrent(url, filename): ''' Stiahnutie torrentu z URL url. @param url URL torrentu @param filename Subor pre zapisanie torrentu @return True ak je spracovanie uspesne, inak False ''' Log.info("AntiPirat: Stahujem torrent z URL {}".format(url)) request = urllib3.PoolManager() response = request.urlopen('GET', url) try: f = open(filename, 'w') f.write(response.data) f.close() Log.info( "AntiPirat: Stiahnuty torrent bol ulozeny do suboru {}".format( filename)) return True except: Log.error("AntiPirat: Chyba pri zapisovani torrent suboru {}".format( filename)) return False
def loginpage(): flag = 0 error = ' ' try: if request.method == "POST": print 'Testing' temp = request.form['inputName'] temp2 = request.form['inputPassword'] print temp print temp2 encoded_body=json.dumps({"username":temp,"password":temp2,"requestCode":"LOGIN"}).encode('utf-8') http=urllib3.PoolManager() r=http.urlopen('POST', 'http:localhost:5001/todo/tasks/',headers={'Content-Type':'application/json'},body=encoded_body) json.loads(r.data.decode('utf-8'))['json'] print temp print temp2 print r.status return redirect("create_tasks") else: return render_template("login.html", error=error) except Exception as e: return render_template("login.html", error=error)
def _clear_index_cache(dbm): # clears the field data cache (used for sorting and faceting) for the specified index http = urllib3.PoolManager() http.request( 'POST', '%s_cache/clear?field_data=true&index=%s' % (ELASTIC_SEARCH_URL, dbm.database_name))
urllib3.disable_warnings() dotenv_path = join(dirname(__file__), '.env') load_dotenv(dotenv_path) SLACK_TOKEN = os.getenv('SLACK_BOT_TOKEN', None) BOT_WEBHOOK_SECRET = os.getenv('SLACK_WEBHOOK_SECRET', None) BOT_NAME = os.getenv('BOT_NAME', None) BOT_IMAGE_URL = os.getenv('BOT_IMAGE_URL', None) BOT_DEBUG = os.getenv('BOT_DEBUG', False) BOT_USERNAME = os.getenv('BOT_USERNAME', 'captainSlackHook') HIPCHAT_API_TOKEN = os.getenv('HIPCHAT_API_TOKEN', None) bot = Flask(__name__) slack_client = SlackClient(SLACK_TOKEN) http = urllib3.PoolManager() def messageBuilder(data): if 'event' in data: message = [] for event_data in data['event']: fields = [] event_message = { "pretext": "Incomming Notification", "author_name": BOT_NAME, "fallback": "Event Triggered", } for k, v in event_data.iteritems(): if k == 'fields': for _field in v:
DEFAULT_TIMEOUT = 600 DEFAULT_RETRY_AFTER_503_INTERVAL = 60 _DEBUG = 0 # debug verbosity level _UPGRADE_NOTIFY = True USER_AGENT = "{name}/{version} ({platform})".format( name=__name__, version=TOOLKIT_VERSION, platform=platform.platform()) _default_headers = requests.utils.default_headers() _default_headers['DNAnexus-API'] = API_VERSION _default_headers['User-Agent'] = USER_AGENT _default_timeout = urllib3.util.timeout.Timeout(connect=DEFAULT_TIMEOUT, read=DEFAULT_TIMEOUT) _pool_manager = urllib3.PoolManager(maxsize=32, cert_reqs=ssl.CERT_REQUIRED, ca_certs=requests.certs.where(), headers=_default_headers, timeout=_default_timeout) _RequestForAuth = namedtuple('_RequestForAuth', 'method url headers') _expected_exceptions = exceptions.network_exceptions + ( exceptions.DXAPIError, ) def _get_pool_manager(request_kwargs): if 'verify' in request_kwargs or 'DX_CA_CERT' in os.environ: cert_reqs = ssl.CERT_REQUIRED ca_certs = request_kwargs.get('verify', os.environ.get('DX_CA_CERT')) if request_kwargs.get('verify') is False or os.environ.get( 'DX_CA_CERT') == 'NOVERIFY': cert_reqs, ca_certs = ssl.CERT_NONE, None urllib3.disable_warnings()