def get_page(source): protocol = urlparse(source)[0] + "://" source = protocol + quote(source.replace(protocol, "")) if settings.USING_PROXY: http = SOCKSProxyManager( settings.proxy_type + "://" + settings.proxy_host + ":" + settings.proxy_port, cert_reqs="CERT_REQUIRED", # Force certificate check ca_certs=certifi.where(), # Path to the Certifi bundle ) else: http = urllib3.PoolManager( cert_reqs="CERT_REQUIRED", # Force certificate check ca_certs=certifi.where(), # Path to the Certifi bundle ) try: page = http.urlopen( "GET", source, preload_content=False, # timeout=urllib3.Timeout(connect=5.0, read=10.0), headers={'User-Agent': 'Mozilla'} ) except urllib3.exceptions.MaxRetryError as error: print("Connection error:", error) sys.exit(1) else: return page
def get_file(url, output_path=None, headers=None, session=None): try: if not session: session = get_session(url) output_dir = os.path.dirname(os.path.abspath(output_path)) if not os.path.exists(output_dir): os.makedirs(output_dir) if not headers: headers = HEADERS else: headers.update(HEADERS) r = session.get(url, headers=headers, stream=True, verify=certifi.where()) if r.status_code == 401: session = get_session(url) r = session.get(url, headers=headers, stream=True, verify=certifi.where()) if r.status_code != 200: logger.error('HTTP GET Failed for url: %s' % url) logger.error("Host %s responded:\n\n%s" % (urlsplit(url).netloc, r.text)) logger.warn('File [%s] transfer failed. ' % output_path) else: logger.debug("Transferring file %s to %s" % (url, output_path)) with open(output_path, 'wb') as data_file: for chunk in r.iter_content(CHUNK_SIZE): data_file.write(chunk) data_file.flush() logger.info('File [%s] transfer successful.' % output_path) return True except requests.exceptions.RequestException as e: logger.error('HTTP Request Exception: %s %s' % (e.errno, e.message)) return False
def request(params, session, dest_url, verb='POST', headers=xreq_header, data=""): # sleep for rate limiting time.sleep(3) if debug: print "HTTP Verb" + verb print "URL: " + qualys_api_url+dest_url print "Params: " + str(params) try: if verb.upper() == 'GET': s = session.get(qualys_api_url+dest_url, params=params, headers=headers, verify=certifi.where()) elif verb.upper() == 'POST': s = session.post(qualys_api_url+dest_url, params=params, headers=headers, data=data, verify=certifi.where()) else: print "Unsupported HTTP verb: " + verb sys.exit(2) if debug: print "status_code: " + str(s.status_code) except Exception as e: print e print "Retrying..." try: s = session.post(qualys_api_url+dest_url, params=params, headers=headers, data=data) except Exception as e: print e sys.exit(2) return s
def googleAuthCallback(request): try : state = request.GET.get('state') localprint(request, state) code= str(request.GET.get('code')) exchangeToken_url = 'https://accounts.google.com/o/oauth2/token' data = {'code':code, 'client_id':GOOGLE_CLIENT_ID, 'client_secret':GOOGLE_CLIENT_SECRET, 'redirect_uri':GOOGLE_CALLBACK_URL, 'grant_type':'authorization_code'} headers = {'Content-Type':'application/x-www-form-urlencoded'} h = httplib2.Http(ca_certs=certifi.where()) resp, content = h.request(exchangeToken_url, 'POST', headers=headers, body=urlencode(data)) exchange_token_obj = simplejson.loads(content) localprint(request, exchange_token_obj) response = None if(exchange_token_obj.has_key('access_token')): access_token = exchange_token_obj['access_token'] request.session['google_access_token'] = access_token profile_url = 'https://www.googleapis.com/oauth2/v1/userinfo?access_token=' + access_token h2 = httplib2.Http(ca_certs=certifi.where()) user_info_resp, user_info_content = h2.request(profile_url, 'GET') user_info_obj = simplejson.loads(user_info_content) localprint(request, user_info_obj) if user_info_resp.status == 200: if state == 'userpage': response = genericSocialActivate('googleplus', user_info_obj['id'], request.COOKIES['access_token'], request, state) #access token has to be change to the right access_token elif state == 'homepage' or state == 'signin': response = genericSocialLogin('googleplus', user_info_obj['id'], user_info_obj['email'], request, access_token, state) else: response = signinError(request,'password', u"Sorry, the Google sign-in failed.") return response #return response else: return HttpResponse("<script>window.close();</script>") except : logger=logging.getLogger(__name__) return errorscreen(request,logger, sys)
def init(): global __HTTP proxy_url = os.getenv("http_proxy") if proxy_url and len(proxy_url) > 0: logger.info("Rally connects via proxy URL [%s] to the Internet (picked up from the environment variable [http_proxy])." % proxy_url) __HTTP = urllib3.ProxyManager(proxy_url, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) else: logger.info("Rally connects directly to the Internet (no proxy support).") __HTTP = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where())
def send_request_to_path(self, base_url, auth, params=None): """ Construct an API request, send it to the API, and parse the response. """ from intercom import __version__ req_params = {} # full URL url = base_url + self.path headers = { 'User-Agent': 'python-intercom/' + __version__, 'AcceptEncoding': 'gzip, deflate', 'Accept': 'application/json' } if self.http_method in ('POST', 'PUT', 'DELETE'): headers['content-type'] = 'application/json' req_params['data'] = json.dumps(params, cls=ResourceEncoder) elif self.http_method == 'GET': req_params['params'] = params req_params['headers'] = headers # request logging if logger.isEnabledFor(logging.DEBUG): logger.debug("Sending %s request to: %s", self.http_method, url) logger.debug(" headers: %s", headers) if self.http_method == 'GET': logger.debug(" params: %s", req_params['params']) else: logger.debug(" params: %s", req_params['data']) if self.http_session is None: resp = requests.request( self.http_method, url, timeout=self.timeout, auth=auth, verify=certifi.where(), **req_params) else: resp = self.http_session.request( self.http_method, url, timeout=self.timeout, auth=auth, verify=certifi.where(), **req_params) # response logging if logger.isEnabledFor(logging.DEBUG): logger.debug("Response received from %s", url) logger.debug(" encoding=%s status:%s", resp.encoding, resp.status_code) logger.debug(" content:\n%s", resp.content) parsed_body = self.parse_body(resp) self.raise_errors_on_failure(resp) self.set_rate_limit_details(resp) return parsed_body
def login(): """ Login to sf.net, get auth details... Taken from https://sourceforge.net/p/forge/documentation/Allura%20API/ """ consumer = oauth.Consumer(CONSUMER_KEY, CONSUMER_SECRET) client = oauth.Client(consumer) client.ca_certs = certifi.where() # Step 1: Get a request token. This is a temporary token that is used for # having the user authorize an access token and to sign the request to obtain # said access token. resp, content = client.request(REQUEST_TOKEN_URL, 'GET') if resp['status'] != '200': raise Exception("Invalid response %s." % resp['status']) request_token = dict(urlparse.parse_qsl(content)) # these are intermediate tokens and not needed later #print "Request Token:" #print " - oauth_token = %s" % request_token['oauth_token'] #print " - oauth_token_secret = %s" % request_token['oauth_token_secret'] #print # Step 2: Redirect to the provider. Since this is a CLI script we do not # redirect. In a web application you would redirect the user to the URL # below, specifying the additional parameter oauth_callback=<your callback URL>. webbrowser.open("%s?oauth_token=%s" % (AUTHORIZE_URL, request_token['oauth_token'])) # Since we didn't specify a callback, the user must now enter the PIN displayed in # their browser. If you had specified a callback URL, it would have been called with # oauth_token and oauth_verifier parameters, used below in obtaining an access token. oauth_verifier = raw_input('What is the PIN? ') # Step 3: Once the consumer has redirected the user back to the oauth_callback # URL you can request the access token the user has approved. You use the # request token to sign this request. After this is done you throw away the # request token and use the access token returned. You should store this # access token somewhere safe, like a database, for future use. token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret']) token.set_verifier(oauth_verifier) client = oauth.Client(consumer, token) client.ca_certs = certifi.where() resp, content = client.request(ACCESS_TOKEN_URL, "GET") access_token = dict(urlparse.parse_qsl(content)) print access_token return access_token
def _create_elasticsearch_client(self, url, client_options): aws_options = client_options.pop('aws', None) if aws_options is None: LOG.info("Connecting without AWS auth") elastic = Elasticsearch(hosts=[url], ca_certs=certifi.where(), **client_options) return elastic LOG.info("Connecting with AWS auth") aws_auth = self.create_aws_auth(aws_options) elastic = Elasticsearch( hosts=[url], http_auth=aws_auth, ca_certs=certifi.where(), connection_class=RequestsHttpConnection, **client_options) return elastic
def main(): os.chdir(os.path.dirname(os.path.realpath(__file__))) os.system("pip install certifi") print "Copying certifi's cacert.pem" import certifi shutil.copy2(certifi.where(), 'agkyra/resources/cacert.pem')
def twitterCallback(request): twitterConsumerKey = getattr(settings, 'TWITTER_CONSUMER_KEY') twitterConsumerSecret = getattr(settings, 'TWITTER_CONSUMER_SECRET') consumer = oauth.Consumer(twitterConsumerKey, twitterConsumerSecret) #cancel handle if 'oauth_verifier' not in request.GET: return HttpResponseRedirect('/myCard/') oauthVerifier = request.GET['oauth_verifier'] token = oauth.Token(request.session['twitterRequestToken']['oauth_token'], request.session['twitterRequestToken']['oauth_token_secret']) token.set_verifier(oauthVerifier) client = oauth.Client(consumer, token) client.ca_certs = certifi.where() resp, content = client.request(twitterAccessTokenURL, "POST") if resp['status'] != '200': raise Exception("Invalid response from Twitter.") accessToken = dict(urlparse.parse_qsl(content)) #save account card = Card.objects.filter(user=request.user) if len(card)<=0: raise Exception("Can not find card") card = card[0] card.twitter = 'https://twitter.com/'+ accessToken['screen_name'] card.save() #send notification to friends sendUpdateNotification(request,'twitter') return HttpResponseRedirect('/myCard/')
def process_updates(): """ Decides which type the update is and routes it to the appropriate route_updates method and launches a thread for the run_extensions method. """ signal.signal(signal.SIGINT, signal.SIG_IGN) plugin_http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) plugin_http.timeout = urllib3.Timeout(connect=1.0) plugin_http.retries = 3 update_router = RouteMessage(PLUGINS, plugin_http, GET_ME, CONFIG) while RUNNING.value: try: update = MESSAGE_QUEUE.get_nowait() except queue.Empty: time.sleep(SLEEP_TIME) continue extension_thread = ThreadProcess(target=run_extensions, args=(update, )) extension_thread.start() if 'message' in update: update_router.route_update(update['message']) elif 'edited_message' in update: update_router.route_update(update['edited_message']) elif 'callback_query' in update: route_callback_query(PLUGINS, GET_ME, CONFIG, plugin_http, update['callback_query']) elif 'inline_query' in update: route_inline_query(PLUGINS, GET_ME, CONFIG, plugin_http, update['inline_query']) extension_thread.join()
def __init__(self, env, id_correios, password, cert=False, log_config=None, timeout=None): ''' Webservice initialization. Depending on the env get a different wsdl definition. New Correios SIGEP uses HTTPAuth to do requests. Args: env (str): Environment used to get the wsdl id_correios (str): IdCorreios given by correios website password (str): password vinculated to the IdCorreios log_config (dict): Dictionary configurations of logging ''' ''' Untrusted ssl certificate for homolog envs see more at: https://www.ssllabs.com/ssltest/analyze.html?d=apphom.correios.com.br ''' if cert is False: verify = False else: verify = certifi.where() self.timeout = timeout or 300 if log_config is not None and isinstance(log_config, dict): """ Example config from zeep documentation: { 'version': 1, 'formatters': { 'verbose': { 'format': '%(name)s: %(message)s' } }, 'handlers': { 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'verbose', }, }, 'loggers': { 'zeep.transports': { 'level': 'DEBUG', 'propagate': True, 'handlers': ['console'], }, } } """ logging.config.dictConfig(log_config) session = Session() session.timeout = self.timeout session.verify = verify session.auth=(id_correios, password) t = Transport(session=session); self.client = Client(wsdl=self.get_env(env), transport=t)
def __init__(self, pools_size=4): # urllib3.PoolManager will pass all kw parameters to connectionpool # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # ca_certs vs cert_file vs key_file # http://stackoverflow.com/a/23957365/2985775 # cert_reqs if Configuration().verify_ssl: cert_reqs = ssl.CERT_REQUIRED else: cert_reqs = ssl.CERT_NONE # ca_certs if Configuration().ssl_ca_cert: ca_certs = Configuration().ssl_ca_cert else: # if not set certificate file, use Mozilla's root certificates. ca_certs = certifi.where() # cert_file cert_file = Configuration().cert_file # key file key_file = Configuration().key_file # https pool manager self.pool_manager = urllib3.PoolManager( num_pools=pools_size, cert_reqs=cert_reqs, ca_certs=ca_certs, cert_file=cert_file, key_file=key_file )
def get_client(**kwargs): """Return an Elasticsearch client using the provided parameters """ kwargs['master_only'] = False if not 'master_only' in kwargs else kwargs['master_only'] kwargs['use_ssl'] = False if not 'use_ssl' in kwargs else kwargs['use_ssl'] logger.debug("kwargs = {0}".format(kwargs)) master_only = kwargs.pop('master_only') if kwargs['use_ssl']: try: logger.info('Attempting to verify SSL certificate.') import certifi kwargs['verify_certs'] = True kwargs['ca_certs'] = certifi.where() except ImportError: logger.warn('Unable to verify SSL certificate.') try: client = elasticsearch.Elasticsearch(**kwargs) # Verify the version is acceptable. check_version(client) # Verify "master_only" status, if applicable check_master(client, master_only=master_only) return client except Exception: click.echo(click.style('ERROR: Connection failure.', fg='red', bold=True)) sys.exit(1)
def __init__(self, okta_url, okta_token, username, password, client_ipaddr, allow_insecure_auth=False, assert_pinset=okta_pinset): passcode_len = 6 self.okta_url = None self.okta_token = okta_token self.username = username self.password = password self.client_ipaddr = client_ipaddr self.passcode = None self.okta_urlparse = urlparse.urlparse(okta_url) url_new = (self.okta_urlparse.scheme, self.okta_urlparse.netloc, '', '', '', '') self.okta_url = urlparse.urlunparse(url_new) if password and len(password) > passcode_len: last = password[-passcode_len:] if last.isdigit(): self.passcode = last self.password = password[:-passcode_len] self.pool = PublicKeyPinsetConnectionPool( self.okta_urlparse.hostname, self.okta_urlparse.port, assert_pinset=assert_pinset, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(), )
def test_handshake_fail(self): self.server_start_tls(_server_ssl_options()) client_future = self.client_start_tls( dict(cert_reqs=ssl.CERT_REQUIRED, ca_certs=certifi.where())) with ExpectLog(gen_log, "SSL Error"): with self.assertRaises(ssl.SSLError): yield client_future
def fetcher(url): if self.method == 'LOCAL': return "No case names fetched during tests." else: r = requests.get( url, allow_redirects=True, headers={'User-Agent': 'Juriscraper'}, verify=certifi.where(), ) r.raise_for_status() html_tree = html.fromstring(r.text) html_tree.make_links_absolute(self.url) plaintiff = '' defendant = '' try: plaintiff = html_tree.xpath( "//text()[contains(., 'Style:')]/ancestor::div[@class='span2']/following-sibling::div/text()" )[0] defendant = html_tree.xpath( "//text()[contains(., 'v.:')]/ancestor::div[@class='span2']/following-sibling::div/text()" )[0] except IndexError: logger.warn("No title or defendant found for {}".format(url)) if defendant.strip(): # If there's a defendant return titlecase('%s v. %s' % (plaintiff, defendant)) else: return titlecase(plaintiff)
def __init__(self, configuration, pools_size=4, maxsize=4): # maxsize is the number of requests to host that are allowed in parallel # ca_certs vs cert_file vs key_file # http://stackoverflow.com/a/23957365/2985775 # ca_certs if configuration.ssl_ca_cert: ca_certs = configuration.ssl_ca_cert else: # if not set certificate file, use Mozilla's root certificates. ca_certs = certifi.where() self.ssl_context = ssl_context = ssl.SSLContext() if configuration.cert_file: ssl_context.load_cert_chain( configuration.cert_file, keyfile=configuration.key_file ) self.proxy_port = self.proxy_host = None # https pool manager if configuration.proxy: self.proxy_port = 80 self.proxy_host = configuration.proxy self.pool_manager = AsyncHTTPClient()
def searchDocument(id): es = Elasticsearch( ['https://cdr-es.istresearch.com:9200/memex-qpr-cp4-2'], http_auth=('cdr-memex', '5OaYUNBhjO68O7Pn'), port=9200, use_ssl=True, verify_certs = True, ca_certs=certifi.where(), ) query_body = { "query":{ "bool": { "must": { "match": { "_id":id } } } } } response = es.search(body=query_body,request_timeout=60) document = response["hits"]["hits"] if document: return document[0] else: return document
def _download(self, request_dict={}): if self.method == 'LOCAL': return super(Site, self)._download(request_dict=request_dict) else: html_l = super(Site, self)._download(request_dict) s = requests.session() html_trees = [] for url in html_l.xpath("//*[@class='cen']/a/@href"): logger.info("Getting sub-url: {url}".format(url=url)) r = s.get( url, headers={'User-Agent': 'Juriscraper'}, verify=certifi.where(), **request_dict ) r.raise_for_status() # If the encoding is iso-8859-1, switch it to cp1252 (a superset) if r.encoding == 'ISO-8859-1': r.encoding = 'cp1252' # Grab the content text = self._clean_text(r.text) html_tree = html.fromstring(text) html_tree.make_links_absolute(url) remove_anchors = lambda url: url.split('#')[0] html_tree.rewrite_links(remove_anchors) html_trees.append(html_tree) return html_trees
def _stream_trace(streaming_url, user_name, password): import sys import certifi import urllib3 try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() except ImportError: pass std_encoding = sys.stdout.encoding http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) headers = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(user_name, password)) r = http.request( 'GET', streaming_url, headers=headers, preload_content=False ) for chunk in r.stream(): if chunk: # Extra encode() and decode for stdout which does not surpport 'utf-8' print(chunk.decode(encoding='utf-8', errors='replace') .encode(std_encoding, errors='replace') .decode(std_encoding, errors='replace'), end='') # each line of log has CRLF. r.release_conn()
def test_official(): if not sys.version_info >= (3, 5) or platform.python_implementation() != 'CPython': warnings.warn('Not running "official" tests, since follow_wrapped is not supported' 'on this platform (cpython version >= 3.5 required)') return http = urllib3.PoolManager( cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) request = http.request('GET', 'https://core.telegram.org/bots/api') soup = BeautifulSoup(request.data.decode('utf-8'), 'html.parser') for thing in soup.select('h4 > a.anchor'): # Methods and types don't have spaces in them, luckily all other sections of the docs do # TODO: don't depend on that if '-' not in thing['name']: h4 = thing.parent name = h4.text test = None # Is it a method if h4.text[0].lower() == h4.text[0]: test = check_method else: # Or a type/object if name not in IGNORED_OBJECTS: test = check_object if test: def fn(): return test(h4) fn.description = '{}({}) ({})'.format(test.__name__, h4.text, __name__) yield fn
def get_file(base_url,fname): """Download web-based file using pycurl with SSL security via certifi""" print('Downloading %s' % fname) print('from %s' % base_url) c = pycurl.Curl() f = open(fname,'wb') if https: c.setopt(pycurl.CAINFO, certifi.where()) print('- Using http-secure transfer protocol') else: c.setopt(pycurl.SSL_VERIFYPEER, 0) c.setopt(pycurl.SSL_VERIFYHOST, 0) print('- Using unsecure http transfer protocol') c.setopt(c.URL, base_url+fname) c.setopt(c.WRITEDATA, f) c.perform() responsecode = c.getinfo(c.RESPONSE_CODE) if responsecode == 200: print('- Status: OK') print('- Elapsed time: %f sec' % c.getinfo(c.TOTAL_TIME)) else: print('- Status: ERROR, response code %d' % responsecode) c.close() print(' ') return
def _download(self, request_dict={}): if self.method == "LOCAL": # Note that this is returning a list of HTML trees. html_trees = [super(Site, self)._download(request_dict=request_dict)] else: html_l = OpinionSite._download(self) s = requests.session() html_trees = [] for url in html_l.xpath( "//td[@width='49%']//tr[contains(., ', {year}')]/td[5]/a/@href".format(year=self.year) ): r = s.get(url, headers={"User-Agent": "Juriscraper"}, verify=certifi.where(), **request_dict) r.raise_for_status() # If the encoding is iso-8859-1, switch it to cp1252 (a # superset) if r.encoding == "ISO-8859-1": r.encoding = "cp1252" # Grab the content text = self._clean_text(r.text) html_tree = html.fromstring(text) html_tree.make_links_absolute(self.url) remove_anchors = lambda url: url.split("#")[0] html_tree.rewrite_links(remove_anchors) html_trees.append(html_tree) return html_trees
def where(): """Return the preferred certificate bundle.""" if platform.linux_distribution()[0]: return _LINUX_LOCATION else: import certifi return certifi.where()
def fetch_or_load(spec_path): """ Fetch a new specification or use the cache if it's current. Arguments: cache_path: the path to a cached specification """ headers = {} try: modified = datetime.utcfromtimestamp(os.path.getmtime(spec_path)) date = modified.strftime("%a, %d %b %Y %I:%M:%S UTC") headers["If-Modified-Since"] = date except OSError as error: if error.errno != errno.ENOENT: raise request = urllib.Request(VALIDATION_SPEC, headers=headers) response = urllib.urlopen(request, cafile=certifi.where()) if response.code == 200: with open(spec_path, "w+b") as spec: spec.writelines(response) spec.seek(0) return html.parse(spec) with open(spec_path) as spec: return html.parse(spec)
def __init__(self): self._auth = None self._sj_client = HTTPClient.from_url( "https://{0}{1}".format(SJ_DOMAIN, SJ_URL), concurrency=20, network_timeout=15, ssl_options={ "ca_certs": certifi.where(), }) self._pl_client = HTTPClient.from_url( "https://{0}{1}".format(SJ_DOMAIN, SJ_URL), concurrency=1, network_timeout=120, ssl_options={ "ca_certs": certifi.where(), })
def test_loads_certifi(self): """ Loading certifi returns a list of Certificates. """ cas = pem.parse_file(certifi.where()) assert isinstance(cas, list) assert all(isinstance(ca, pem.Certificate) for ca in cas)
def _download(self, request_dict={}): html_l = OpinionSite._download(self) s = requests.session() html_trees = [] for url in html_l.xpath("//td[contains(./text(),'Opinion') or contains(./text(), 'PER CURIAM')]" "/preceding-sibling::td[1]//@href")[:2]: r = s.get( url, headers={'User-Agent': 'Juriscraper'}, verify=certifi.where(), **request_dict ) r.raise_for_status() # If the encoding is iso-8859-1, switch it to cp1252 (a superset) if r.encoding == 'ISO-8859-1': r.encoding = 'cp1252' # Grab the content text = self._clean_text(r.text) html_tree = html.fromstring(text) html_tree.make_links_absolute(self.url) remove_anchors = lambda url: url.split('#')[0] html_tree.rewrite_links(remove_anchors) html_trees.append(html_tree) return html_trees
def _make_default_http(): if certifi is not None: return urllib3.PoolManager( cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) else: return urllib3.PoolManager()
def __init__(self, service_url, authentication=None, operation_timeout_seconds=30, io_threads=1, message_listener_threads=1, concurrent_lookup_requests=50000, log_conf_file_path=None, use_tls=False, tls_trust_certs_file_path=None, tls_allow_insecure_connection=False ): """ Create a new Pulsar client instance. **Args** * `service_url`: The Pulsar service url eg: pulsar://my-broker.com:6650/ **Options** * `authentication`: Set the authentication provider to be used with the broker. For example: `AuthenticationTls` or `AuthenticationAthenz` * `operation_timeout_seconds`: Set timeout on client operations (subscribe, create producer, close, unsubscribe). * `io_threads`: Set the number of IO threads to be used by the Pulsar client. * `message_listener_threads`: Set the number of threads to be used by the Pulsar client when delivering messages through message listener. The default is 1 thread per Pulsar client. If using more than 1 thread, messages for distinct `message_listener`s will be delivered in different threads, however a single `MessageListener` will always be assigned to the same thread. * `concurrent_lookup_requests`: Number of concurrent lookup-requests allowed on each broker connection to prevent overload on the broker. * `log_conf_file_path`: Initialize log4cxx from a configuration file. * `use_tls`: Configure whether to use TLS encryption on the connection. This setting is deprecated. TLS will be automatically enabled if the `serviceUrl` is set to `pulsar+ssl://` or `https://` * `tls_trust_certs_file_path`: Set the path to the trusted TLS certificate file. If empty defaults to certifi. * `tls_allow_insecure_connection`: Configure whether the Pulsar client accepts untrusted TLS certificates from the broker. """ _check_type(str, service_url, 'service_url') _check_type_or_none(Authentication, authentication, 'authentication') _check_type(int, operation_timeout_seconds, 'operation_timeout_seconds') _check_type(int, io_threads, 'io_threads') _check_type(int, message_listener_threads, 'message_listener_threads') _check_type(int, concurrent_lookup_requests, 'concurrent_lookup_requests') _check_type_or_none(str, log_conf_file_path, 'log_conf_file_path') _check_type(bool, use_tls, 'use_tls') _check_type_or_none(str, tls_trust_certs_file_path, 'tls_trust_certs_file_path') _check_type(bool, tls_allow_insecure_connection, 'tls_allow_insecure_connection') conf = _pulsar.ClientConfiguration() if authentication: conf.authentication(authentication.auth) conf.operation_timeout_seconds(operation_timeout_seconds) conf.io_threads(io_threads) conf.message_listener_threads(message_listener_threads) conf.concurrent_lookup_requests(concurrent_lookup_requests) if log_conf_file_path: conf.log_conf_file_path(log_conf_file_path) if use_tls or service_url.startswith('pulsar+ssl://') or service_url.startswith('https://'): conf.use_tls(True) if tls_trust_certs_file_path: conf.tls_trust_certs_file_path(tls_trust_certs_file_path) else: conf.tls_trust_certs_file_path(certifi.where()) conf.tls_allow_insecure_connection(tls_allow_insecure_connection) self._client = _pulsar.Client(service_url, conf) self._consumers = []
async def sticker(context): """ Fetches images/stickers and add them to your pack. """ user = await bot.get_me() if not user.username: user.username = user.first_name message = await context.get_reply_message() custom_emoji = False animated = False emoji = "" await context.edit("收集图像/贴纸中 . . .") if message and message.media: if isinstance(message.media, MessageMediaPhoto): photo = BytesIO() photo = await bot.download_media(message.photo, photo) elif "image" in message.media.document.mime_type.split('/'): photo = BytesIO() await context.edit("下载图片中 . . .") await bot.download_file(message.media.document, photo) if (DocumentAttributeFilename(file_name='sticker.webp') in message.media.document.attributes): emoji = message.media.document.attributes[1].alt custom_emoji = True elif (DocumentAttributeFilename(file_name='AnimatedSticker.tgs') in message.media.document.attributes): photo = BytesIO() await bot.download_file(message.media.document, "AnimatedSticker.tgs") for index in range(len(message.media.document.attributes)): try: emoji = message.media.document.attributes[index].alt break except: pass custom_emoji = True animated = True photo = 1 else: await context.edit("`出错了呜呜呜 ~ 不支持此文件类型。`") return else: await context.edit("`出错了呜呜呜 ~ 请回复带有图片/贴纸的消息。`") return if photo: split_strings = context.text.split() if not custom_emoji: emoji = "👀" pack = 1 sticker_already = False if len(split_strings) == 3: pack = split_strings[2] emoji = split_strings[1] elif len(split_strings) == 2: if split_strings[1].isnumeric(): pack = int(split_strings[1]) else: emoji = split_strings[1] pack_name = f"{user.username}_{pack}" pack_title = f"@{user.username} 的私藏 ({pack})" command = '/newpack' file = BytesIO() if not animated: await context.edit("调整图像大小中 . . .") image = await resize_image(photo) file.name = "sticker.png" image.save(file, "PNG") else: pack_name += "_animated" pack_title += " (animated)" command = '/newanimated' response = request.urlopen( request.Request(f'http://t.me/addstickers/{pack_name}'), context=ssl.create_default_context(cafile=certifi.where())) if not response.status == 200: await context.edit("连接到 Telegram 服务器失败 . . .") return http_response = response.read().decode("utf8").split('\n') if " A <strong>Telegram</strong> user has created the <strong>Sticker Set</strong>." not in \ http_response: for _ in range(20): # 最多重试20次 try: async with bot.conversation('Stickers') as conversation: await conversation.send_message('/addsticker') await conversation.get_response() await bot.send_read_acknowledge(conversation.chat_id) await conversation.send_message(pack_name) chat_response = await conversation.get_response() while chat_response.text == "Whoa! That's probably enough stickers for one pack, give it a break. \ A pack can't have more than 120 stickers at the moment.": pack += 1 pack_name = f"{user.username}_{pack}" pack_title = f"@{user.username} 的私藏 ({pack})" await context.edit("切换到私藏 " + str(pack) + " 上一个贴纸包已满 . . .") await conversation.send_message(pack_name) chat_response = await conversation.get_response() if chat_response.text == "Invalid pack selected.": await add_sticker(conversation, command, pack_title, pack_name, animated, message, context, file, emoji) await context.edit( f"这张图片/贴纸已经被添加到 [这个](t.me/addstickers/{pack_name}) 贴纸包。", parse_mode='md') return await upload_sticker(animated, message, context, file, conversation) await conversation.get_response() await conversation.send_message(emoji) await bot.send_read_acknowledge(conversation.chat_id) await conversation.get_response() await conversation.send_message('/done') await conversation.get_response() await bot.send_read_acknowledge(conversation.chat_id) break except AlreadyInConversationError: if not sticker_already: await context.edit("另一个命令正在添加贴纸, 重新尝试中") sticker_already = True else: pass await sleep(.5) except Exception: raise else: await context.edit("贴纸包不存在,正在创建 . . .") async with bot.conversation('Stickers') as conversation: await add_sticker(conversation, command, pack_title, pack_name, animated, message, context, file, emoji) notification = await context.edit( f"这张图片/贴纸已经被添加到 [这个](t.me/addstickers/{pack_name}) 贴纸包。", parse_mode='md') await sleep(5) try: await notification.delete() except: pass
from subliminal.cache import region from cfscrape import CloudflareScraper try: from urlparse import urlparse except ImportError: from urllib.parse import urlparse from subzero.lib.io import get_viable_encoding logger = logging.getLogger(__name__) pem_file = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(unicode(__file__, get_viable_encoding()))), "..", certifi.where())) try: default_ssl_context = ssl.create_default_context(cafile=pem_file) except AttributeError: # < Python 2.7.9 default_ssl_context = None class TimeoutSession(requests.Session): timeout = 10 def __init__(self, timeout=None): super(TimeoutSession, self).__init__() self.timeout = timeout or self.timeout def request(self, method, url, *args, **kwargs):
IDENTIFIED BY 'datajoint'; """) conn_root.query( "GRANT SELECT ON `djtest%%`.* TO 'djview'@'%%' IDENTIFIED BY 'djview';" ) conn_root.query(""" GRANT SELECT ON `djtest%%`.* TO 'djssl'@'%%' IDENTIFIED BY 'djssl' REQUIRE SSL; """) # Initialize httpClient with relevant timeout. httpClient = urllib3.PoolManager( timeout=30, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(), retries=urllib3.Retry( total=3, backoff_factor=0.2, status_forcelist=[500, 502, 503, 504] ) ) # Initialize minioClient with an endpoint and access/secret keys. minioClient = minio.Minio( S3_CONN_INFO['endpoint'], access_key=S3_CONN_INFO['access_key'], secret_key=S3_CONN_INFO['secret_key'], secure=False, http_client=httpClient)
def get_client(**kwargs): """ Return an :class:`elasticsearch.Elasticsearch` client object using the provided parameters. Any of the keyword arguments the :class:`elasticsearch.Elasticsearch` client object can receive are valid, such as: :arg hosts: A list of one or more Elasticsearch client hostnames or IP addresses to connect to. Can send a single host. :type hosts: list :arg port: The Elasticsearch client port to connect to. :type port: int :arg url_prefix: `Optional` url prefix, if needed to reach the Elasticsearch API (i.e., it's not at the root level) :type url_prefix: str :arg use_ssl: Whether to connect to the client via SSL/TLS :type use_ssl: bool :arg certificate: Path to SSL/TLS certificate :arg client_cert: Path to SSL/TLS client certificate (public key) :arg client_key: Path to SSL/TLS private key :arg aws_key: AWS IAM Access Key (Only used if the :mod:`requests-aws4auth` python module is installed) :arg aws_secret_key: AWS IAM Secret Access Key (Only used if the :mod:`requests-aws4auth` python module is installed) :arg aws_region: AWS Region (Only used if the :mod:`requests-aws4auth` python module is installed) :arg ssl_no_validate: If `True`, do not validate the certificate chain. This is an insecure option and you will see warnings in the log output. :type ssl_no_validate: bool :arg http_auth: Authentication credentials in `user:pass` format. :type http_auth: str :arg timeout: Number of seconds before the client will timeout. :type timeout: int :arg master_only: If `True`, the client will `only` connect if the endpoint is the elected master node of the cluster. **This option does not work if `hosts` has more than one value.** It will raise an Exception in that case. :type master_only: bool :rtype: :class:`elasticsearch.Elasticsearch` """ if 'url_prefix' in kwargs: if (type(kwargs['url_prefix']) == type(None) or kwargs['url_prefix'] == "None"): kwargs['url_prefix'] = '' kwargs['hosts'] = '127.0.0.1' if not 'hosts' in kwargs else kwargs['hosts'] kwargs['master_only'] = False if not 'master_only' in kwargs \ else kwargs['master_only'] kwargs['use_ssl'] = False if not 'use_ssl' in kwargs else kwargs['use_ssl'] kwargs['ssl_no_validate'] = False if not 'ssl_no_validate' in kwargs \ else kwargs['ssl_no_validate'] kwargs['certificate'] = False if not 'certificate' in kwargs \ else kwargs['certificate'] kwargs['client_cert'] = False if not 'client_cert' in kwargs \ else kwargs['client_cert'] kwargs['client_key'] = False if not 'client_key' in kwargs \ else kwargs['client_key'] kwargs['hosts'] = ensure_list(kwargs['hosts']) logger.debug("kwargs = {0}".format(kwargs)) master_only = kwargs.pop('master_only') if kwargs['use_ssl']: if kwargs['ssl_no_validate']: kwargs[ 'verify_certs'] = False # Not needed, but explicitly defined else: logger.info('Attempting to verify SSL certificate.') # If user provides a certificate: if kwargs['certificate']: kwargs['verify_certs'] = True kwargs['ca_certs'] = kwargs['certificate'] else: # Try to use certifi certificates: try: import certifi kwargs['verify_certs'] = True kwargs['ca_certs'] = certifi.where() except ImportError: logger.warn('Unable to verify SSL certificate.') try: from requests_aws4auth import AWS4Auth kwargs['aws_key'] = False if not 'aws_key' in kwargs \ else kwargs['aws_key'] kwargs['aws_secret_key'] = False if not 'aws_secret_key' in kwargs \ else kwargs['aws_secret_key'] kwargs['region'] = False if not 'region' in kwargs \ else kwargs['region'] if kwargs['aws_key'] or kwargs['aws_secret_key'] or kwargs['region']: if not kwargs['aws_key'] and kwargs['aws_secret_key'] \ and kwargs['region']: raise MissingArgument( 'Missing one or more of "aws_key", "aws_secret_key", ' 'or "region".') # Override these kwargs kwargs['use_ssl'] = True kwargs['verify_certs'] = True kwargs['connection_class'] = elasticsearch.RequestsHttpConnection kwargs['http_auth'] = (AWS4Auth(kwargs['aws_key'], kwargs['aws_secret_key'], kwargs['region'], 'es')) else: logger.debug('"requests_aws4auth" module present, but not used.') except ImportError: logger.debug('Not using "requests_aws4auth" python module to connect.') if master_only: if len(kwargs['hosts']) > 1: raise ConfigurationError( '"master_only" cannot be True if more than one host is ' 'specified. Hosts = {0}'.format(kwargs['hosts'])) try: client = elasticsearch.Elasticsearch(**kwargs) # Verify the version is acceptable. check_version(client) # Verify "master_only" status, if applicable check_master(client, master_only=master_only) return client except Exception as e: raise elasticsearch.ElasticsearchException( 'Unable to create client connection to Elasticsearch. ' 'Error: {0}'.format(e))
Limits on the number of connections in a connection pool. """ def __init__( self, *, soft_limit: int = None, hard_limit: int = None, ): self.soft_limit = soft_limit self.hard_limit = hard_limit def __eq__(self, other: typing.Any) -> bool: return (isinstance(other, self.__class__) and self.soft_limit == other.soft_limit and self.hard_limit == other.hard_limit) def __repr__(self) -> str: class_name = self.__class__.__name__ return ( f"{class_name}(soft_limit={self.soft_limit}, hard_limit={self.hard_limit})" ) TimeoutConfig = Timeout # Synonym for backwards compat DEFAULT_SSL_CONFIG = SSLConfig(cert=None, verify=True) DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) DEFAULT_POOL_LIMITS = PoolLimits(soft_limit=10, hard_limit=100) DEFAULT_CA_BUNDLE_PATH = Path(certifi.where()) DEFAULT_MAX_REDIRECTS = 20
def __init__( self, host, port, user=None, password=None, indexPrefix="", useSSL=True, useCRT=False, ca_certs=None, client_key=None, client_cert=None, ): """c'tor :param self: self reference :param str host: name of the database for example: MonitoringDB :param str port: The full name of the database for example: 'Monitoring/MonitoringDB' :param str user: user name to access the db :param str password: if the db is password protected we need to provide a password :param str indexPrefix: it is the indexPrefix used to get all indexes :param bool useSSL: We can disable using secure connection. By default we use secure connection. :param bool useCRT: Use certificates. :param str ca_certs: CA certificates bundle. :param str client_key: Client key. :param str client_cert: Client certificate. """ self.__indexPrefix = indexPrefix self._connected = False if user and password: sLog.debug("Specified username and password") if port: self.__url = "https://%s:%s@%s:%d" % (user, password, host, port) else: self.__url = "https://%s:%s@%s" % (user, password, host) else: sLog.debug("Username and password not specified") if port: self.__url = "http://%s:%d" % (host, port) else: self.__url = "http://%s" % host if port: sLog.verbose("Connecting to %s:%s, useSSL = %s" % (host, port, useSSL)) else: sLog.verbose("Connecting to %s, useSSL = %s" % (host, useSSL)) if useSSL: if ca_certs: casFile = ca_certs else: bd = BundleDeliveryClient() retVal = bd.getCAs() casFile = None if not retVal["OK"]: sLog.error("CAs file does not exists:", retVal["Message"]) casFile = certifi.where() else: casFile = retVal["Value"] self.client = Elasticsearch( self.__url, timeout=self.__timeout, use_ssl=True, verify_certs=True, ca_certs=casFile ) elif useCRT: self.client = Elasticsearch( self.__url, timeout=self.__timeout, use_ssl=True, verify_certs=True, ca_certs=ca_certs, client_cert=client_cert, client_key=client_key, ) else: self.client = Elasticsearch(self.__url, timeout=self.__timeout) # Before we use the database we try to connect # and retrieve the cluster name try: if self.client.ping(): # Returns True if the cluster is running, False otherwise result = self.client.info() self.clusterName = result.get("cluster_name", " ") # pylint: disable=no-member sLog.info("Database info\n", json.dumps(result, indent=4)) self._connected = True else: sLog.error("Cannot ping ElasticsearchDB!") except ConnectionError as e: sLog.error(repr(e))
def _defaultcacerts(ui): """return path to default CA certificates or None. It is assumed this function is called when the returned certificates file will actually be used to validate connections. Therefore this function may print warnings or debug messages assuming this usage. We don't print a message when the Python is able to load default CA certs because this scenario is detected at socket connect time. """ # The "certifi" Python package provides certificates. If it is installed # and usable, assume the user intends it to be used and use it. try: import certifi certs = certifi.where() if os.path.exists(certs): ui.debug('using ca certificates from certifi\n') return certs except (ImportError, AttributeError): pass # On Windows, only the modern ssl module is capable of loading the system # CA certificates. If we're not capable of doing that, emit a warning # because we'll get a certificate verification error later and the lack # of loaded CA certificates will be the reason why. # Assertion: this code is only called if certificates are being verified. if pycompat.iswindows: if not _canloaddefaultcerts: ui.warn( _('(unable to load Windows CA certificates; see ' 'https://mercurial-scm.org/wiki/SecureConnections for ' 'how to configure Mercurial to avoid this message)\n')) return None # Apple's OpenSSL has patches that allow a specially constructed certificate # to load the system CA store. If we're running on Apple Python, use this # trick. if _plainapplepython(): dummycert = os.path.join(os.path.dirname(pycompat.fsencode(__file__)), 'dummycert.pem') if os.path.exists(dummycert): return dummycert # The Apple OpenSSL trick isn't available to us. If Python isn't able to # load system certs, we're out of luck. if pycompat.isdarwin: # FUTURE Consider looking for Homebrew or MacPorts installed certs # files. Also consider exporting the keychain certs to a file during # Mercurial install. if not _canloaddefaultcerts: ui.warn( _('(unable to load CA certificates; see ' 'https://mercurial-scm.org/wiki/SecureConnections for ' 'how to configure Mercurial to avoid this message)\n')) return None # / is writable on Windows. Out of an abundance of caution make sure # we're not on Windows because paths from _systemcacerts could be installed # by non-admin users. assert not pycompat.iswindows # Try to find CA certificates in well-known locations. We print a warning # when using a found file because we don't want too much silent magic # for security settings. The expectation is that proper Mercurial # installs will have the CA certs path defined at install time and the # installer/packager will make an appropriate decision on the user's # behalf. We only get here and perform this setting as a feature of # last resort. if not _canloaddefaultcerts: for path in _systemcacertpaths: if os.path.isfile(path): ui.warn( _('(using CA certificates from %s; if you see this ' 'message, your Mercurial install is not properly ' 'configured; see ' 'https://mercurial-scm.org/wiki/SecureConnections ' 'for how to configure Mercurial to avoid this ' 'message)\n') % path) return path ui.warn( _('(unable to load CA certificates; see ' 'https://mercurial-scm.org/wiki/SecureConnections for ' 'how to configure Mercurial to avoid this message)\n')) return None
#!/usr/bin/env python # Author: Ronald Miller # Contact: [email protected] import logging import asyncio import aiohttp import argparse import certifi import ssl import sys from datetime import datetime WAIT_TIME = 2 # time in seconds between calls SSL_CONTEXT = ssl.create_default_context(cafile=certifi.where()) API_BASE_URL = "https://haveibeenpwned.com/api/v3/breachedaccount/" # logger message format FORMAT = '%(asctime)s %(message)s' async def pwn_email(key, email: str): async with aiohttp.ClientSession() as session: async with session.get(API_BASE_URL + email, headers={"hibp-api-key": key}, ssl=SSL_CONTEXT) as resp: try: assert resp.status == 200 except AssertionError: logging.error( f"Unexpected API Response {resp.status} - could not fetch breaches for {email}"
def geocode_get_lat_lon(self, address): with open('gps_apikey.txt', 'r') as f: gps_apikey_id = f.read() address = parse.quote(address) url = "https://geocoder.ls.hereapi.com/6.2/geocode.json?apiKey=" + gps_apikey_id +"&searchtext=" + address UrlRequest(url, on_success=self.success, on_failure=self.failure, on_error=self.error, ca_file=certifi.where())
import certifi import ssl import geopy import math from geopy.geocoders import Nominatim from geopy.distance import geodesic import numpy as np ctx = ssl.create_default_context(cafile=certifi.where()) geopy.geocoders.options.default_ssl_context = ctx def generateData(reqData): # initialize data = {"demands": [], "location_ids": [], "depot": 0} geolocator = Nominatim(user_agent="my-droptimize") # get geopy obj points = [] totalQty = 0 points.append(geolocator.geocode(reqData['depot']['address'])) data["demands"].append(0) data["location_ids"].append(reqData['depot']['location_id']) for delivery in reqData['deliveries']: points.append(geolocator.geocode(delivery['address'])) data["demands"].append(delivery['quantity']) data["location_ids"].append(delivery['location_id']) totalQty += delivery['quantity'] # debug for point in points: print(point.address)
def _create_ssl_context(self, method=SSL_DEFAULT_METHOD, options=SSL_DEFAULT_OPTIONS, verify_options=SSL.VERIFY_NONE, ca_path=None, ca_pemfile=None, cipher_list=None, alpn_protos=None, alpn_select=None, alpn_select_callback=None, ): """ Creates an SSL Context. :param method: One of SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, TLSv1_METHOD, TLSv1_1_METHOD, or TLSv1_2_METHOD :param options: A bit field consisting of OpenSSL.SSL.OP_* values :param verify_options: A bit field consisting of OpenSSL.SSL.VERIFY_* values :param ca_path: Path to a directory of trusted CA certificates prepared using the c_rehash tool :param ca_pemfile: Path to a PEM formatted trusted CA certificate :param cipher_list: A textual OpenSSL cipher list, see https://www.openssl.org/docs/apps/ciphers.html :rtype : SSL.Context """ context = SSL.Context(method) # Options (NO_SSLv2/3) if options is not None: context.set_options(options) # Verify Options (NONE/PEER and trusted CAs) if verify_options is not None: def verify_cert(conn, x509, errno, err_depth, is_cert_verified): if not is_cert_verified: self.ssl_verification_error = dict(errno=errno, depth=err_depth) return is_cert_verified context.set_verify(verify_options, verify_cert) if ca_path is None and ca_pemfile is None: ca_pemfile = certifi.where() context.load_verify_locations(ca_pemfile, ca_path) # Workaround for # https://github.com/pyca/pyopenssl/issues/190 # https://github.com/mitmproxy/mitmproxy/issues/472 # Options already set before are not cleared. context.set_mode(SSL._lib.SSL_MODE_AUTO_RETRY) # Cipher List if cipher_list: try: context.set_cipher_list(cipher_list) # TODO: maybe change this to with newer pyOpenSSL APIs context.set_tmp_ecdh(OpenSSL.crypto.get_elliptic_curve('prime256v1')) except SSL.Error as v: raise exceptions.TlsException("SSL cipher specification error: %s" % str(v)) # SSLKEYLOGFILE if log_ssl_key: context.set_info_callback(log_ssl_key) if HAS_ALPN: if alpn_protos is not None: # advertise application layer protocols context.set_alpn_protos(alpn_protos) elif alpn_select is not None and alpn_select_callback is None: # select application layer protocol def alpn_select_callback(conn_, options): if alpn_select in options: return bytes(alpn_select) else: # pragma no cover return options[0] context.set_alpn_select_callback(alpn_select_callback) elif alpn_select_callback is not None and alpn_select is None: context.set_alpn_select_callback(alpn_select_callback) elif alpn_select_callback is not None and alpn_select is not None: raise exceptions.TlsException("ALPN error: only define alpn_select (string) OR alpn_select_callback (method).") return context
from . import x509 from . import pem from . import version from . import blockchain from .blockchain import Blockchain, HEADER_SIZE from . import bitcoin from . import constants from .i18n import _ from .logging import Logger from .transaction import Transaction if TYPE_CHECKING: from .network import Network from .simple_config import SimpleConfig ca_path = certifi.where() BUCKET_NAME_OF_ONION_SERVERS = 'onion' MAX_INCOMING_MSG_SIZE = 20_000_000 # in bytes _KNOWN_NETWORK_PROTOCOLS = {'t', 's'} PREFERRED_NETWORK_PROTOCOL = 's' assert PREFERRED_NETWORK_PROTOCOL in _KNOWN_NETWORK_PROTOCOLS class NetworkTimeout: # seconds class Generic: NORMAL = 30 RELAXED = 45
def get_external_ip(): _request = urllib.request.urlopen( "https://checkip.amazonaws.com", context=ssl.create_default_context(cafile=certifi.where())) return _request.read().decode("utf-8")
from flask import Flask, request, Response, render_template, session, redirect, json, url_for from twilio import twiml from twilio.rest import TwilioRestClient import imp import urllib3 import certifi import urllib3.contrib.pyopenssl import googlemaps from datetime import datetime import phonenumbers as ph urllib3.contrib.pyopenssl.inject_into_urllib3() # To allow python 2.7.6 to send safe http requests http = urllib3.PoolManager( cert_reqs='CERT_REQUIRED', # Force certificate check. ca_certs=certifi.where(), # Path to the Certifi bundle. ) #loads account_sid and auth_token from private space config = imp.load_source('config', '../sensitive_data/config.py') #for heroku. need to improve security TWILIO_ACCOUNT_SID = config.TWILIO_ACCOUNT_SID TWILIO_AUTH_TOKEN = config.TWILIO_AUTH_TOKEN app = Flask(__name__, template_folder='templates') app.secret_key = config.SECRET_KEY gmaps = googlemaps.Client(key=config.GMAPS_KEY) client = TwilioRestClient(TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN) #search radius for app SEARCH_RADIUS = 1000.0
import os import sys import json import time import pathlib import urllib import requests import subprocess import urllib3 import certifi from urllib3.exceptions import InsecureRequestWarning urllib3.disable_warnings(category=InsecureRequestWarning) http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) class IBClient(): def __init__(self, username=None, password=None, account=None): ''' Initalizes a new IBClient Object with the username and password of the account holder. ''' self.ACCOUNT = account self.USERNAME = username self.PASSWORD = password self.CLIENT_PORTAL_FOLDER = pathlib.Path.cwd().joinpath( 'clientportal.beta.gw').resolve() self.API_VERSION = 'v1/' self.TESTING_FLAG = False self._operating_system = sys.platform
def get_statuses(namespace: str) -> object: log = app.logger log.info("Get status for %s" % namespace) services = get_services_by_ns(namespace) routes = get_routes_by_ns(namespace) response = [] for service in services: url = build_url(service) status = "UP" reason = "" actual_host = None host = None for route in routes: if route['service']['id'] == service['id'] and 'hosts' in route: actual_host = route['hosts'][0] host = clean_host(actual_host) try: addr = socket.gethostbyname(service['host']) log.info("Address = %s" % addr) except: status = "DOWN" reason = "DNS" if status == "UP": try: headers = {} if host is None or service['host'].endswith('.svc'): r = requests.get(url, headers=headers, timeout=3.0) status_code = r.status_code else: u = urlparse(url) headers['Host'] = host log.info("GET %-30s %s" % ("%s://%s" % (u.scheme, u.netloc), headers)) urllib3.disable_warnings() if u.scheme == "https": pool = urllib3.HTTPSConnectionPool( "%s" % (u.netloc), assert_hostname=host, server_hostname=host, cert_reqs='CERT_NONE', ca_certs=certifi.where()) else: pool = urllib3.HTTPConnectionPool("%s" % (u.netloc)) req = pool.urlopen("GET", u.path, headers={"Host": host}, assert_same_host=False, timeout=1.0, retries=False) status_code = req.status log.info("Result received!! %d" % status_code) if status_code < 400: status = "UP" reason = "%d Response" % status_code elif status_code == 401 or status_code == 403: status = "UP" reason = "AUTH %d" % status_code else: status = "DOWN" reason = "%d Response" % status_code except requests.exceptions.Timeout as ex: status = "DOWN" reason = "TIMEOUT" except urllib3.exceptions.ConnectTimeoutError as ex: status = "DOWN" reason = "TIMEOUT" except requests.exceptions.ConnectionError as ex: log.error("ConnError %s" % ex) status = "DOWN" reason = "CONNECTION" except requests.exceptions.SSLError as ex: status = "DOWN" reason = "SSL" except urllib3.exceptions.NewConnectionError as ex: log.error("NewConnError %s" % ex) status = "DOWN" reason = "CON_ERR" except urllib3.exceptions.SSLError as ex: log.error(ex) status = "DOWN" reason = "SSL_URLLIB3" except Exception as ex: log.error(ex) traceback.print_exc(file=sys.stdout) status = "DOWN" reason = "UNKNOWN" log.info("GET %-30s %s" % (url, reason)) response.append({ "name": service['name'], "upstream": url, "status": status, "reason": reason, "host": host, "env_host": actual_host }) return make_response(jsonify(response))
def process_message(self, connector, host, secret_key, resource, parameters): # Process the file and upload the results if os.getenv('STREAM', '').lower() == 'pycurl': url = '%sapi/files/%s/blob?key=%s' % (host, resource['id'], secret_key) (inputfile, inputfilename) = tempfile.mkstemp(suffix="") try: with os.fdopen(inputfile, "wb") as outputfile: c = pycurl.Curl() if (connector and not connector.ssl_verify) or (os.getenv( "SSL_IGNORE", "").lower() == "true"): c.setopt(pycurl.SSL_VERIFYPEER, 0) c.setopt(pycurl.SSL_VERIFYHOST, 0) c.setopt(c.URL, url) c.setopt(c.WRITEDATA, outputfile) c.setopt(c.CAINFO, certifi.where()) c.perform() c.close() except Exception: os.remove(inputfilename) raise else: inputfilename = resource["local_paths"][0] file_id = resource['id'] try: # create thumbnail image if 'image_thumbnail' in parameters: args = parameters['image_thumbnail'] else: args = self.args.image_thumbnail_command self.execute_command(connector, host, secret_key, inputfilename, file_id, resource, False, self.args.image_binary, args, self.args.image_type) # create preview image if 'image_preview' in parameters: args = parameters['image_preview'] else: args = self.args.image_preview_command self.execute_command(connector, host, secret_key, inputfilename, file_id, resource, True, self.args.image_binary, args, self.args.image_type) # create extractor specifc preview if 'preview' in parameters: args = parameters['preview'] else: args = self.args.preview_command self.execute_command(connector, host, secret_key, inputfilename, file_id, resource, True, self.args.preview_binary, args, self.args.preview_type) finally: if os.getenv('STREAM', '').lower() == 'pycurl': os.remove(inputfilename)
def get_http_pool(): return urllib3.PoolManager(cert_reqs=str("CERT_REQUIRED"), ca_certs=certifi.where())
import certifi import requests try: print('Checking connection to Discord...') test = requests.get(url='https://discordapp.com', verify=True) print('Connection to Discord OK.') except requests.exceptions.SSLError as err: print('SSL Error. Adding custom certs to Certifi store...') cafile = certifi.where() with open('ssl764977.cloudflaressl.com.pem', 'rb') as infile: customca = infile.read() with open(cafile, 'ab') as outfile: outfile.write(customca) print('That might have worked.')
print 'SmartMesh SDK {0}\n'.format('.'.join( [str(i) for i in sdk_version.VERSION])) mgrhost = raw_input( 'Enter the IP address of the manager (e.g. {0} ): '.format( DFLT_VMGR_HOST)) if mgrhost == "": mgrhost = DFLT_VMGR_HOST # log in as user "dust" config = Configuration() config.username = '******' config.password = '******' config.verify_ssl = False if os.path.isfile(certifi.where()): config.ssl_ca_cert = certifi.where() else: config.ssl_ca_cert = os.path.join(os.path.dirname(sys.executable), "cacert.pem") # initialize the VManager Python library voyager = VManagerApi(host=mgrhost) # read and display network configuration netConfig = voyager.networkApi.get_network_config() print '\n==== Display current network Configuration' print netConfig # start listening for data notifications print '\n==== Subscribe to data notifications and display Average pkt/sec'
import asyncio import datetime from asyncio import AbstractEventLoop import aiohttp import colorama import pandas as pd import ssl import certifi ssl_context = ssl.create_default_context() ssl_context.load_verify_locations(certifi.where()) def main(date='2020-08-24', value=400, usa_only=False): # Grab the Data passed through Through the function & turn into date object start = datetime.datetime.strptime(date, '%Y-%m-%d').date() # Grab today - last day of the week end = datetime.date.today() - start # Loop through dates & create a list of all the days of that week dates = [] for x in range(int(end.days)): yesterday = datetime.date.today() - datetime.timedelta(days=1) search_date = yesterday - datetime.timedelta(days=x) dates.append(search_date.strftime('%Y-%m-%d')) # print(dates) # Create the asyncio Loop loop: AbstractEventLoop = asyncio.new_event_loop()
def _make_default_http(): if certifi is not None: return urllib3.PoolManager(cert_reqs="CERT_REQUIRED", ca_certs=certifi.where()) else: return urllib3.PoolManager()
#File/operating system related imports import os import sys import os.path from os import path #Network request related imports import certifi import requests import urllib3 import urllib http = urllib3.PoolManager( cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) #To securely crawl SSL sites skips = [] #images that couldn't be searched sites = [] #Sites available to search domain = "" stamp = "" site_count = 0 #Number of sites available image_index = 0 skip_count = 0 #Loads the configuration info from selected site def load_config(): global domain global stamp
from __future__ import unicode_literals import sys, os, time, shutil, re, io, certifi, plura_dl from plura_dl import PluraDL from plura_dl.utils import ExtractorError, DownloadError from plura_dl.scrapeutils import extract_user_credentials, Logger if sys.version_info[0] < 3: raise Exception("Must be using Python 3") certpath = os.path.abspath(certifi.where()) os.environ["SSL_CERT_FILE"] = certpath # IMPORTANT SETTINGS TO PREVENT SPAM BLOCKING OF YOUR ACCOUNT/IP AT PLURALSIGHT # # # # # # # # # # SLEEP_INTERVAL = 100 # minimum sleep time (s) # # SLEEP_OFFSET = 150 # set random sleep time (s) up to # Change this at your own risk. # SLEEP_PLAYLIST = 100 # sleep time (s) between playlist requests # # RATE_LIMIT = 500 # download rate (kb/s) # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # Global defaults DLPATH, USERNAME, PASSWORD = "", "", "" INPROGRESSPATH, FINISHPATH, FAILPATH, INTERRUPTPATH = "", "", "", "" PDL_OPTS = {} SUBTITLE_OFF = False FILENAME_TEMPLATE = r"%(playlist_index)s-%(chapter_number)s-%(title)s-%(resolution)s.%(ext)s" PLURAURL = r"https://app.pluralsight.com/library/courses/" SCRIPTPATH = os.path.dirname(os.path.abspath(sys.argv[0])) COOKIEPATH = os.path.join(SCRIPTPATH, 'cookies') COOKIEFILE = os.path.join(COOKIEPATH, 'cookies.txt') RATE_LIMIT = RATE_LIMIT * 10**3 if not os.path.exists(COOKIEPATH): os.mkdir(COOKIEPATH)
# Post the updated message in Slack updated_message = web_client.chat_update(**message) # Update the timestamp saved on the onboarding tutorial object onboarding_tutorial.timestamp = updated_message["ts"] # ============== Message Events ============= # # When a user sends a DM, the event type will be 'message'. # Here we'll link the update_share callback to the 'message' event. @slack.RTMClient.run_on(event="message") def message(**payload): """Display the onboarding welcome message after receiving a message that contains "start". """ data = payload["data"] web_client = payload["web_client"] channel_id = data.get("channel") user_id = data.get("user") text = data.get("text") if text and text.lower() == "start": return start_onboarding(web_client, user_id, channel_id) if __name__ == "__main__": ssl_context = ssl_lib.create_default_context(cafile=certifi.where()) slack_token = os.environ["SLACK_BOT_TOKEN"] rtm_client = slack.RTMClient(token=slack_token, ssl=ssl_context) rtm_client.start()
def _create_ssl_context( method: int = DEFAULT_METHOD, options: int = DEFAULT_OPTIONS, ca_path: str = None, ca_pemfile: str = None, cipher_list: str = None, alpn_protos: typing.Iterable[bytes] = None, alpn_select=None, alpn_select_callback: typing.Callable[[typing.Any, typing.Any], bytes] = None, verify: int = SSL.VERIFY_PEER, verify_callback: typing.Optional[typing.Callable[ [SSL.Connection, SSL.X509, int, int, bool], bool]] = None, ) -> SSL.Context: """ Creates an SSL Context. :param method: One of SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, TLSv1_METHOD, TLSv1_1_METHOD, or TLSv1_2_METHOD :param options: A bit field consisting of OpenSSL.SSL.OP_* values :param verify: A bit field consisting of OpenSSL.SSL.VERIFY_* values :param ca_path: Path to a directory of trusted CA certificates prepared using the c_rehash tool :param ca_pemfile: Path to a PEM formatted trusted CA certificate :param cipher_list: A textual OpenSSL cipher list, see https://www.openssl.org/docs/apps/ciphers.html :rtype : SSL.Context """ try: context = SSL.Context(method) except ValueError: method_name = METHOD_NAMES.get(method, "unknown") raise exceptions.TlsException( "SSL method \"%s\" is most likely not supported " "or disabled (for security reasons) in your libssl. " "Please refer to https://github.com/mitmproxy/mitmproxy/issues/1101 " "for more details." % method_name) # Options (NO_SSLv2/3) if options is not None: context.set_options(options) # Verify Options (NONE/PEER and trusted CAs) if verify is not None: context.set_verify(verify, verify_callback) if ca_path is None and ca_pemfile is None: ca_pemfile = certifi.where() try: context.load_verify_locations(ca_pemfile, ca_path) except SSL.Error: raise exceptions.TlsException( "Cannot load trusted certificates ({}, {}).".format( ca_pemfile, ca_path)) # Workaround for # https://github.com/pyca/pyopenssl/issues/190 # https://github.com/mitmproxy/mitmproxy/issues/472 # Options already set before are not cleared. context.set_mode(SSL._lib.SSL_MODE_AUTO_RETRY) # Cipher List if cipher_list: try: context.set_cipher_list(cipher_list.encode()) except SSL.Error as v: raise exceptions.TlsException( "SSL cipher specification error: %s" % str(v)) # SSLKEYLOGFILE if log_master_secret: context.set_info_callback(log_master_secret) if alpn_protos is not None: # advertise application layer protocols context.set_alpn_protos(alpn_protos) elif alpn_select is not None and alpn_select_callback is None: # select application layer protocol def alpn_select_callback(conn_, options): if alpn_select in options: return bytes(alpn_select) else: # pragma: no cover return options[0] context.set_alpn_select_callback(alpn_select_callback) elif alpn_select_callback is not None and alpn_select is None: if not callable(alpn_select_callback): raise exceptions.TlsException( "ALPN error: alpn_select_callback must be a function.") context.set_alpn_select_callback(alpn_select_callback) elif alpn_select_callback is not None and alpn_select is not None: raise exceptions.TlsException( "ALPN error: only define alpn_select (string) OR alpn_select_callback (function)." ) return context
'options': { 'py2app': { 'argv_emulation': True } }, 'setup_requires': ['py2app'], } elif platform.system() in ['Windows']: # imports for Windows # use distutils because it's easier to specify what's included from distutils.core import setup import py2exe import certifi ssl_ca_cert.append(certifi.where()) platform_setup_options = { # py2exe parameters 'console': [ # embedded manager apps {'script': os.path.join('app', 'AclCommissioning', 'AclCommissioning.py'),}, {'script': os.path.join('app', 'BlinkPacketSend', 'BlinkPacketSend.py'),}, {'script': os.path.join('app', 'BroadcastLeds', 'BroadcastLeds.py'),}, {'script': os.path.join('app', 'FindManagers', 'FindManagers.py'),}, {'script': os.path.join('app', 'InstallTest', 'InstallTest.py'),}, {'script': os.path.join('app', 'JsonServer', 'JsonServer.py'),}, {'script': os.path.join('app', 'MgrBlinkData', 'MgrBlinkData.py'),}, {'script': os.path.join('app', 'NetworkHealth', 'NetworkHealth.py'),}, {'script': os.path.join('app', 'OapClient', 'OapClient.py'),}, {'script': os.path.join('app', 'OTAPCommunicator', 'OTAPCommunicator.py'),},
db = SQLAlchemy(app) assets = Environment(app) celery = Celery('nomenklatura', broker=app.config['CELERY_BROKER_URL']) oauth = OAuth() github = oauth.remote_app( 'github', base_url='https://github.com/login/oauth/', authorize_url='https://github.com/login/oauth/authorize', request_token_url=None, access_token_url='https://github.com/login/oauth/access_token', consumer_key=app.config.get('GITHUB_CLIENT_ID'), consumer_secret=app.config.get('GITHUB_CLIENT_SECRET')) github._client.ca_certs = certifi.where() ########NEW FILE######## __FILENAME__ = default_settings DEBUG = True SECRET_KEY = 'no' SQLALCHEMY_DATABASE_URI = 'sqlite:///master.sqlite3' CELERY_BROKER_URL = 'amqp://*****:*****@localhost:5672//' GITHUB_CLIENT_ID = 'da79a6b5868e690ab984' GITHUB_CLIENT_SECRET = '1701d3bd20bbb29012592fd3a9c64b827e0682d6' ALLOWED_EXTENSIONS = set(['csv', 'tsv', 'ods', 'xls', 'xlsx', 'txt']) ########NEW FILE######## __FILENAME__ = exc # This file exists because I'm not sure we'll need to subclass them later.
res.append('boto3>=1.7.24') res.append('requests_aws4auth>=0.9') res.append('click>=6.7,<7.0') res.append('pyyaml>=3.10') res.append('voluptuous>=0.9.3') res.append('certifi>=2018.4.16') res.append('six>=1.11.0') return res try: ### cx_Freeze ### from cx_Freeze import setup, Executable try: import certifi cert_file = certifi.where() except ImportError: cert_file = '' # Dependencies are automatically detected, but it might need # fine tuning. base = 'Console' icon = None if os.path.exists('Elastic.ico'): icon = 'Elastic.ico' curator_exe = Executable( "run_curator.py", base=base, targetName="curator",