def __init__(self): super(MovieCrawlerCP, self).__init__(cadena=u"Cineplanet", tag="CP") # indicadores de subtitulos self.suffix_subtitles['doblada'] = [ u'2D Doblada', u'3D Doblada', u'Doblada', ] self.suffix_subtitles['subtitluada'] = [ u'Subtitulada', u'2D Subtitulada', u'3D Subtitulada', ] # indicadores de resolución self.suffix_resolutions['HD'] = [ u'Digital', u'Digital Hd', u'HD', u'Hd', ] self.suffix_resolutions['3D'] = [ u'3D', ] self.suffix_discard = [ ] self.url = r"""https://cineplanet.com.pe""" self.encoding = 'utf-8' urllib3.make_headers(user_agent=wanderer()) self.conn = urllib3.connectionpool.connection_from_url( self.url, timeout=self.timeout, headers=wanderer() )
def __init__(self, host='localhost', port=9200, http_auth=None, use_ssl=False, verify_certs=False, ca_certs=None, client_cert=None, ssl_version=None, ssl_assert_hostname=None, ssl_assert_fingerprint=None, maxsize=10, **kwargs): super(Urllib3HttpConnection, self).__init__(host=host, port=port, **kwargs) self.headers = urllib3.make_headers(keep_alive=True) if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = ':'.join(http_auth) self.headers.update(urllib3.make_headers(basic_auth=http_auth)) pool_class = urllib3.HTTPConnectionPool kw = {} if use_ssl: pool_class = urllib3.HTTPSConnectionPool kw.update({ 'ssl_version': ssl_version, 'assert_hostname': ssl_assert_hostname, 'assert_fingerprint': ssl_assert_fingerprint, }) if verify_certs: kw.update({ 'cert_reqs': 'CERT_REQUIRED', 'ca_certs': ca_certs, 'cert_file': client_cert, }) elif ca_certs: raise ImproperlyConfigured("You cannot pass CA certificates when verify SSL is off.") else: warnings.warn( 'Connecting to %s using SSL with verify_certs=False is insecure.' % host) self.pool = pool_class(host, port=port, timeout=self.timeout, maxsize=maxsize, **kw)
def __init__(self, con_pool_size=1, proxy_url=None, urllib3_proxy_kwargs=None): if urllib3_proxy_kwargs is None: urllib3_proxy_kwargs = dict() kwargs = dict( maxsize=con_pool_size, cert_reqs="CERT_REQUIRED", ca_certs=certifi.where(), socket_options=HTTPConnection.default_socket_options + [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)], ) # Set a proxy according to the following order: # * proxy defined in proxy_url (+ urllib3_proxy_kwargs) # * proxy set in `HTTPS_PROXY` env. var. # * proxy set in `https_proxy` env. var. # * None (if no proxy is configured) if not proxy_url: proxy_url = os.environ.get("HTTPS_PROXY") or os.environ.get("https_proxy") if not proxy_url: mgr = urllib3.PoolManager(**kwargs) else: kwargs.update(urllib3_proxy_kwargs) mgr = urllib3.proxy_from_url(proxy_url, **kwargs) if mgr.proxy.auth: # TODO: what about other auth types? auth_hdrs = urllib3.make_headers(proxy_basic_auth=mgr.proxy.auth) mgr.proxy_headers.update(auth_hdrs) self._con_pool = mgr
def __init__(self, host='localhost', port=9200, http_auth=None, use_ssl=False, verify_certs=False, ca_certs=None, client_cert=None, maxsize=10, **kwargs): super(Urllib3HttpConnection, self).__init__(host=host, port=port, **kwargs) self.headers = {} if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = ':'.join(http_auth) self.headers = urllib3.make_headers(basic_auth=http_auth) pool_class = urllib3.HTTPConnectionPool kw = {} if use_ssl: pool_class = urllib3.HTTPSConnectionPool if verify_certs: kw['cert_reqs'] = 'CERT_REQUIRED' kw['ca_certs'] = ca_certs kw['cert_file'] = client_cert elif ca_certs: raise ImproperlyConfigured("You cannot pass CA certificates when verify SSL is off.") else: warnings.warn( 'Connecting to %s using SSL with verify_certs=False is insecure.' % host) self.pool = pool_class(host, port=port, timeout=self.timeout, maxsize=maxsize, **kw)
def upload(): upload_url = "http://127.0.0.1:8080/upload" url = urllib3.util.parse_url(upload_url) cb_url = url.request_uri if url.port is not None: server = "%s:%d"%(url.host, url.port) else: server = url.host conn = urllib3.connection_from_url(server) headers = urllib3.make_headers(keep_alive=True) content = "hello world" response = conn.urlopen("POST", cb_url, body=content, headers=headers) if response.status != 200: print "eeeeeeeeeeee" sys.exit(1) else: print response.getheaders() print response.read() print response.data fileid = json.loads(response.data)["fileid"] path = "/download?fileid=%d"%fileid print "download path:", path response = conn.urlopen("GET", path, headers=headers) if response.status != 200: print "download fail" sys.exit(1) else: print response.data
def _request(self, method, path, params=None): url = self._compose_url(path, params) self.rate_limit_lock and self.rate_limit_lock.acquire() auth_string = self.USER + ":" + self.PW headers = urllib3.make_headers(basic_auth=auth_string,accept_encoding=True) r = self.connection_pool.urlopen(method.upper(), url, headers=headers) return self._handle_response(r)
def _create_headers(self, content_type): """ Creates the headers for the request. """ headers = urllib3.make_headers(keep_alive=True) headers['content-type'] = content_type return headers
def getHTML(path): try: headers = urllib3.make_headers(keep_alive=True,user_agent="Microsoft-Windows/6.3 UPnP/1.0") http=urllib3.PoolManager(timeout=3.0) connection=http.request('get',path,headers=headers) return connection except: return None
def connect_web(url): try: http = urllib3.PoolManager() http.headers = urllib3.make_headers(user_agent=None) html = http.urlopen('GET', url) return html except ValueError: print("{}... does not exist..".format(url))
def __init__(self, host='localhost', port=9200, http_auth=None, use_ssl=False, verify_certs=True, ca_certs=None, client_cert=None, client_key=None, ssl_version=None, ssl_assert_hostname=None, ssl_assert_fingerprint=None, maxsize=10, headers=None, **kwargs): super(Urllib3HttpConnection, self).__init__(host=host, port=port, use_ssl=use_ssl, **kwargs) self.headers = urllib3.make_headers(keep_alive=True) if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = ':'.join(http_auth) self.headers.update(urllib3.make_headers(basic_auth=http_auth)) # update headers in lowercase to allow overriding of auth headers if headers: for k in headers: self.headers[k.lower()] = headers[k] self.headers.setdefault('content-type', 'application/json') ca_certs = CA_CERTS if ca_certs is None else ca_certs pool_class = urllib3.HTTPConnectionPool kw = {} if use_ssl: pool_class = urllib3.HTTPSConnectionPool kw.update({ 'ssl_version': ssl_version, 'assert_hostname': ssl_assert_hostname, 'assert_fingerprint': ssl_assert_fingerprint, }) if verify_certs: if not ca_certs: raise ImproperlyConfigured("Root certificates are missing for certificate " "validation. Either pass them in using the ca_certs parameter or " "install certifi to use it automatically.") kw.update({ 'cert_reqs': 'CERT_REQUIRED', 'ca_certs': ca_certs, 'cert_file': client_cert, 'key_file': client_key, }) else: warnings.warn( 'Connecting to %s using SSL with verify_certs=False is insecure.' % host) self.pool = pool_class(host, port=port, timeout=self.timeout, maxsize=maxsize, **kw)
def get_programacion_cine(self, idCine=0, url=None): retries = 3 while retries > 0: try: r = self.conn.request( 'GET', url, headers = urllib3.make_headers(user_agent=wanderer()) ) break except TimeoutError: retries = retries - 1 if retries > 0: if r.status == 200: html = r.data.decode(self.encoding, errors='replace') soup = BeautifulSoup(html) m_titles = [m.a.string.strip() for m in soup.find_all( 'div', class_='titcarte') if m.a] m_showtimes = [] for m in soup.find_all('div', class_='horasprof'): if m.string: m_showtimes.append(m.string.strip()) else: m_showtimes.append(None) movies = [] for i in range(0, len(m_titles)): # This is to handle case when no showtimes available for movie if m_showtimes[i]: movie = Movie( name = self.purify_movie_name(m_titles[i]), showtimes = self.grab_horarios(m_showtimes[i]), # La página web de Cinerama no da mayor información isSubtitled = True, isTranslated = False, isHD = True, is3D = False, isDbox = False, ) movies.append(movie) return movies else: return [] else: return []
def _init_http_proxy(self, http_proxy, **kwargs): pool_options = dict(kwargs) p = urlparse.urlparse(http_proxy) scheme = p.scheme netloc = p.netloc if "@" in netloc: auth, netloc = netloc.split("@", 2) pool_options["proxy_headers"] = urllib3.make_headers(proxy_basic_auth=auth) return urllib3.ProxyManager("%s://%s" % (scheme, netloc), **pool_options)
def __make_headers(self, **header_kw): user = header_kw.get('user') or self.user password = header_kw.get('pass') or self.password proxy_user = header_kw.get('proxy_user') or self.proxy_user proxy_password = header_kw.get('proxy_pass') or self.proxy_password header_params = dict(keep_alive=True) proxy_header_params = dict() if user and password: header_params['basic_auth'] = '{user}:{password}'.format(user=user, password=password) if proxy_user and proxy_password: proxy_header_params['proxy_basic_auth'] = '{user}:{password}'.format(user=proxy_user, password=proxy_password) try: return urllib3.make_headers(**header_params), urllib3.make_headers(**proxy_header_params) except TypeError as error: self.error('build_header() error: {error}'.format(error=error)) return None, None
def _call_api(method, uri, params=None, body=None, headers=None, **options): prefix = options.pop("upload_prefix", cloudinary.config().upload_prefix) or "https://api.cloudinary.com" cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name) if not cloud_name: raise Exception("Must supply cloud_name") api_key = options.pop("api_key", cloudinary.config().api_key) if not api_key: raise Exception("Must supply api_key") api_secret = options.pop("api_secret", cloudinary.config().api_secret) if not cloud_name: raise Exception("Must supply api_secret") api_url = "/".join([prefix, "v1_1", cloud_name] + uri) processed_params = None if isinstance(params, dict): processed_params = {} for key, value in params.items(): if isinstance(value, list) or isinstance(value, tuple): value_list = {"{}[{}]".format(key, i): i_value for i, i_value in enumerate(value)} processed_params.update(value_list) elif value: processed_params[key] = value # Add authentication req_headers = urllib3.make_headers( basic_auth="{0}:{1}".format(api_key, api_secret), user_agent=cloudinary.get_user_agent() ) if headers is not None: req_headers.update(headers) kw = {} if 'timeout' in options: kw['timeout'] = options['timeout'] if body is not None: kw['body'] = body try: response = _http.request(method.upper(), api_url, processed_params, req_headers, **kw) body = response.data except HTTPError as e: raise GeneralError("Unexpected error {0}", e.message) except socket.error as e: raise GeneralError("Socket Error: %s" % (str(e))) try: result = json.loads(body.decode('utf-8')) except Exception as e: # Error is parsing json raise GeneralError("Error parsing server response (%d) - %s. Got - %s" % (response.status, body, e)) if "error" in result: exception_class = EXCEPTION_CODES.get(response.status) or Exception exception_class = exception_class raise exception_class("Error {0} - {1}".format(response.status, result["error"]["message"])) return Response(result, response)
def getFile(self, url): headers = urllib3.make_headers( keep_alive=True, user_agent='Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_2 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8H7 Safari/653.18.5', accept_encoding=True) r = self.pm.request('GET', url, headers=headers) if r.status != 200: self.error = 'Error downloading: %s, %s' % (r.status, url) print self.error return r.data
def response(self): if not self._response: # TODO: implement caching layer headers = urllib3.make_headers(accept_encoding=True) http = urllib3.PoolManager() self._response = http.request('GET', self.url, headers=headers) for line in str(self._response.data, encoding="utf-8").split('\n'): line = line.strip() if line: yield line
def getFile(self, url): headers = urllib3.make_headers( keep_alive=True, user_agent='Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:17.0) Gecko/20100101 Firefox/17.0', accept_encoding=True) r = self.pm.request('GET', url, headers=headers) if r.status != 200: print 'Error downloading', r.status, url # sys.exit(1) return r.data
def checkWeb(): for chat_id, v in links.items(): for link in v: print('Checking ' + link + '...') pool = urllib3.PoolManager() header = urllib3.make_headers(user_agent='Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.6 Safari/537.36') r = pool.request('GET', link, headers=header) html = r.data.decode('shift-jis').encode('utf-8').decode('utf-8') #pprint(html) if search_str not in html: bot.sendMessage(chat_id, 'ALERT! Room available: ' + link)
def __init__(self, pools_size=4, maxsize=4): # urllib3.PoolManager will pass all kw parameters to connectionpool # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # maxsize is the number of requests to host that are allowed in parallel # ca_certs vs cert_file vs key_file # http://stackoverflow.com/a/23957365/2985775 # cert_reqs if Configuration().verify_ssl: cert_reqs = ssl.CERT_REQUIRED else: cert_reqs = ssl.CERT_NONE # ca_certs if Configuration().ssl_ca_cert: ca_certs = Configuration().ssl_ca_cert else: # if not set certificate file, use Mozilla's root certificates. ca_certs = certifi.where() # cert_file cert_file = Configuration().cert_file # key file key_file = Configuration().key_file if Configuration().proxy is not None: proxy = Configuration().proxy proxy_url = urllib3.util.parse_url(proxy) proxy_hostport = '{}://{}:{}'.format(proxy_url.scheme, proxy_url.host, proxy_url.port) if proxy_url.port else '{}://{}'.format(proxy_url.scheme, proxy_url.host) proxy_auth = urllib3.make_headers(proxy_basic_auth=proxy_url.auth) if proxy_url.auth else None self.pool_manager = urllib3.ProxyManager( num_pools=pools_size, maxsize=maxsize, cert_reqs=cert_reqs, ca_certs=ca_certs, cert_file=cert_file, key_file=key_file, proxy_url=proxy_hostport, proxy_headers=proxy_auth ) else: self.pool_manager = urllib3.PoolManager( num_pools=pools_size, maxsize=maxsize, cert_reqs=cert_reqs, ca_certs=ca_certs, cert_file=cert_file, key_file=key_file )
def __init__(self, host='localhost', port=9200, http_auth=None, use_ssl=False, **kwargs): super(Urllib3HttpConnection, self).__init__(host=host, port=port, **kwargs) headers = {} if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = ':'.join(http_auth) headers = urllib3.make_headers(basic_auth=http_auth) pool_class = urllib3.HTTPConnectionPool if use_ssl: pool_class = urllib3.HTTPSConnectionPool self.pool = pool_class(host, port=port, timeout=kwargs.get('timeout', None), headers=headers)
def __init__(self, servers=None, retry_time=60, max_retries=3, timeout=None, basic_auth=None): if servers is None: servers = [DEFAULT_SERVER] self._active_servers = [server.geturl() for server in servers] self._inactive_servers = [] self._retry_time = retry_time self._max_retries = max_retries self._timeout = timeout if basic_auth: self._headers = urllib3.make_headers(basic_auth="%(username)s:%(password)s" % basic_auth) else: self._headers = {} self._lock = threading.RLock() self._local = threading.local()
def set_proxy(url, basic_auth=None): """ Access Bot API through a proxy. :param url: proxy URL :param basic_auth: 2-tuple ``('username', 'password')`` """ global _pools, _onetime_pool_spec if not url: _pools['default'] = urllib3.PoolManager(**_default_pool_params) _onetime_pool_spec = (urllib3.PoolManager, _onetime_pool_params) elif basic_auth: h = urllib3.make_headers(proxy_basic_auth=':'.join(basic_auth)) _pools['default'] = urllib3.ProxyManager(url, proxy_headers=h, **_default_pool_params) _onetime_pool_spec = (urllib3.ProxyManager, dict(proxy_url=url, proxy_headers=h, **_onetime_pool_params)) else: _pools['default'] = urllib3.ProxyManager(url, **_default_pool_params) _onetime_pool_spec = (urllib3.ProxyManager, dict(proxy_url=url, **_onetime_pool_params))
def request(self): req = self._request if req.proxy: if req.proxy_userpwd: headers = make_headers(proxy_basic_auth=req.proxy_userpwd) else: headers = None proxy_url = '%s://%s' % (req.proxy_type, req.proxy) try: pool = ProxyManager(proxy_url, proxy_headers=headers) except ProxySchemeUnknown: raise GrabMisuseError('Urllib3 transport does ' 'not support %s proxies' % req.proxy_type) else: pool = self.pool try: retry = Retry(redirect=False, connect=False, read=False) timeout = Timeout(connect=req.connect_timeout, read=req.timeout) #req_headers = dict((make_unicode(x), make_unicode(y)) # for (x, y) in req.headers.items()) if six.PY3: req_url = make_unicode(req.url) req_method = make_unicode(req.method) else: req_url = make_str(req.url) req_method = req.method res = pool.urlopen(req_method, req_url, body=req.data, timeout=timeout, retries=retry, headers=req.headers, preload_content=False) except exceptions.ConnectTimeoutError as ex: raise error.GrabConnectionError('Could not create connection') except exceptions.ProtocolError as ex: raise error.GrabConnectionError(ex.args[1][0], ex.args[1][1]) # WTF? self.request_head = '' self.request_body = '' self.request_log = '' self._response = res
def execute_request(http_connector, method, params, headers, auth, api_url, **options): # authentication key = auth.get("key") secret = auth.get("secret") req_headers = urllib3.make_headers( basic_auth="{0}:{1}".format(key, secret), user_agent=cloudinary.get_user_agent() ) if headers is not None: req_headers.update(headers) kw = {} if "timeout" in options: kw["timeout"] = options["timeout"] if "body" in options: kw["body"] = options["body"] processed_params = process_params(params) api_url = smart_escape(unquote(api_url)) try: response = http_connector.request(method.upper(), api_url, processed_params, req_headers, **kw) body = response.data except HTTPError as e: raise GeneralError("Unexpected error {0}", e.message) except socket.error as e: raise GeneralError("Socket Error: %s" % (str(e))) try: result = json.loads(body.decode('utf-8')) except Exception as e: # Error is parsing json raise GeneralError("Error parsing server response (%d) - %s. Got - %s" % (response.status, body, e)) if "error" in result: exception_class = EXCEPTION_CODES.get(response.status) or Exception exception_class = exception_class raise exception_class("Error {0} - {1}".format(response.status, result["error"]["message"])) return Response(result, response)
def __init__(self, utility, is_no_update): # Read config.ini. self.utility = utility config = configparser.ConfigParser() self.file_name = os.path.basename(__file__) self.full_path = os.path.dirname(os.path.abspath(__file__)) self.root_path = os.path.join(self.full_path, '../') config.read(os.path.join(self.root_path, 'config.ini')) try: self.con_timeout = float(config['CveExplorerNVD']['con_timeout']) self.max_cve_count = int(config['CveExplorerNVD']['max_cve_count']) self.vuln_db_dir = config['CveExplorerNVD']['vuln_db_dir'] self.nvd_name = config['CveExplorerNVD']['nvd_name'] self.nvd_db_header = str(config['CveExplorerNVD']['nvd_db_header']).split('@') self.nvd_year_name = config['CveExplorerNVD']['nvd_year_name'] self.nvd_db_dir = os.path.join(self.full_path, self.vuln_db_dir) self.nvd_path = os.path.join(self.full_path, os.path.join(self.vuln_db_dir, self.nvd_name)) self.nvd_year_path = os.path.join(self.full_path, os.path.join(self.vuln_db_dir, self.nvd_year_name)) self.cve_year_list = config['CveExplorerNVD']['cve_years'].split('@') self.nvd_meta_url = config['CveExplorerNVD']['nvd_meta_url'] self.nvd_zip_url = config['CveExplorerNVD']['nvd_zip_url'] self.nvd_chk_date_regex = config['CveExplorerNVD']['nvd_chk_date_regex'] self.nvd_chk_hash_regex = config['CveExplorerNVD']['nvd_chk_hash_regex'] self.nvd_date_format = config['CveExplorerNVD']['nvd_date_format'] self.headers = urllib3.make_headers(proxy_basic_auth=self.utility.proxy_user + ':' + self.utility.proxy_pass) self.db_colmns = {} except Exception as e: self.utility.print_message(FAIL, 'Reading config.ini is failure : {}'.format(e)) self.utility.write_log(40, 'Reading config.ini is failure : {}'.format(e)) sys.exit(1) # Create/Get vulnerability data base. for idx, col_name in enumerate(self.nvd_db_header): self.db_colmns[idx] = col_name if is_no_update is True and os.path.exists(self.nvd_path): self.utility.print_message(WARNING, 'Skip updating vulnerability DB.') self.utility.print_message(WARNING, 'Load existing "{}".'.format(self.nvd_path)) self.df_vuln_db = pd.read_csv(self.nvd_path, sep=',', encoding='utf-8') else: self.df_vuln_db = self.initialize_vuln_db()
def clear(self): ''' couchbase-cli To clear all data in bucket. use command couchbase-cli on Couchbase Server, /opt/couchbase/bin/couchbase-cli bucket-flush -u admin -p password -c 127.0.0.1:8091 -b bucket --force Couchbase Command Line tools folder /opt/couchbase/lib/python could be copied to other host or get from GitHub > git clone https://github.com/couchbase/couchbase-cli.git execute: > python couchbase-cli bucket-flush -u admin -p password -c 192.168.12.13:8091 -b bucket --force import os,os.path #if self._couchbase_cli == '': if True: log.error( 'CouchbaseError: please using couchbase-cli instead\n' 'couchbase-cli on GitHub\n' 'https://github.com/couchbase/couchbase-cli.git' ) return False else: # Always 'Permission denied' ? return os.system( 'python %s bucket-flush -u %s -p %s -c %s -b %s --force'% ( path.normpath( self._couchbase_cli + '/couchbaase-cli' ), self._options.get( 'admin','' ), self._options.get( 'admin-pwd', '' ), self._server[0], self._bucket ) )== 0 ''' endpoint = '/pools/default/buckets/%s/controller/doFlush' % self._options.get('bucket', 'default') requestHeaders = urllib3.make_headers(basic_auth=self._options.get('admin:pwd', '')) conn = urllib3.connection_from_url(self._server[0], block=True, maxsize=100) res = conn.urlopen(url=endpoint, method='POST', headers=requestHeaders) if len(res.data) == 0: return True else: msg = res.data # '{"_":"Flush is disabled for the bucket"}' try: msg = json.loads(res.data).get('-') except Excetion as e: pass log.error( "CouchbaseError: clear fail..: %s" % ( msg ) ) return False
def _init_con_pool(): global _CON_POOL kwargs = dict(maxsize=CON_POOL_SIZE, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(), socket_options=HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), ]) proxy_url = _get_con_pool_proxy() if not proxy_url: mgr = urllib3.PoolManager(**kwargs) else: if _CON_POOL_PROXY_KWARGS: kwargs.update(_CON_POOL_PROXY_KWARGS) mgr = urllib3.proxy_from_url(proxy_url, **kwargs) if mgr.proxy.auth: # TODO: what about other auth types? auth_hdrs = urllib3.make_headers(proxy_basic_auth=mgr.proxy.auth) mgr.proxy_headers.update(auth_hdrs) _CON_POOL = mgr
def create_series(dublincore, acl): ''' Creates a series with given metadata and access control rules in the target Opencast. :param dublincore: Dublin Core metadata XML :type dublincore: str :param acl: Access control list as XML :type acl: str ''' url_series = f'{TARGET_HOST}/series/' headers = urllib3.make_headers(basic_auth=f'{TARGET_USER}:{TARGET_PASS}') request = http.request('POST', url_series, headers=headers, fields={ 'series': dublincore, 'acl': acl }) print('Create series response:', request.status)
def connect(): if args.proxy: proto, _ = args.proxy.split("://") if proto == "http": default_headers = urllib3.make_headers( proxy_basic_auth='%s:%s' % (args.username, args.password), ) http_proxy = urllib3.ProxyManager( args.proxy, headers=default_headers ) http = http_proxy.connection_from_url(args.uri["uri"]) elif proto in ["socks4", "socks5"]: http = SOCKSProxyManager(args.proxy) else: logging.error("Invalid proxy protocol. It must start with 'http://' or " "'socks[45]://'.") exit(1) else: http = urllib3.connection_from_url(args.uri["uri"]) return http
def init(): logger = logging.getLogger(__name__) global __HTTP proxy_url = os.getenv("http_proxy") if proxy_url and len(proxy_url) > 0: parsed_url = urllib3.util.parse_url(proxy_url) logger.info( "Connecting via proxy URL [%s] to the Internet (picked up from the env variable [http_proxy]).", proxy_url) __HTTP = urllib3.ProxyManager( proxy_url, cert_reqs="CERT_REQUIRED", ca_certs=certifi.where(), # appropriate headers will only be set if there is auth info proxy_headers=urllib3.make_headers( proxy_basic_auth=parsed_url.auth), ) else: logger.info("Connecting directly to the Internet (no proxy support).") __HTTP = urllib3.PoolManager(cert_reqs="CERT_REQUIRED", ca_certs=certifi.where())
def __init__(self, host='localhost', port=9200, http_auth=None, use_ssl=False, verify_certs=False, ca_certs=None, client_cert=None, maxsize=10, block=False, **kwargs): super(Urllib3HttpConnection, self).__init__(host=host, port=port, **kwargs) self.headers = {} if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = ':'.join(http_auth) self.headers = urllib3.make_headers(basic_auth=http_auth) pool_class = urllib3.HTTPConnectionPool kw = {} if use_ssl: pool_class = urllib3.HTTPSConnectionPool if verify_certs: kw['cert_reqs'] = 'CERT_REQUIRED' kw['ca_certs'] = ca_certs kw['cert_file'] = client_cert elif ca_certs: raise ImproperlyConfigured( "You cannot pass CA certificates when verify SSL is off.") self.pool = pool_class(host, port=port, timeout=self.timeout, maxsize=maxsize, block=block, **kw)
def __init__(self, currdir, logger=None, mode=None): Thread.__init__(self) self.mode = mode self.logger = logger or logging.getLogger(__name__) self.threadname = "[ th_{0} ]".format( os.path.basename(currdir)).upper() self.currentdir = os.path.join(BaseConfig.VAI_DIR, currdir) self.logger.info("{0} Select current directory '{1}'".format( self.threadname, self.currentdir)) if Distribdiag.DIAGS_TO_S3 == "yes": self.conn = boto.connect_s3(Distribdiag.AWS_ACCESS_KEY, Distribdiag.AWS_SECRET_KEY) self.vaibucket = self.conn.get_bucket('vai-znb') elif Distribdiag.DIAGS_TO_REBOUND == "yes": self.url = "{0}:{1}".format(Distribdiag.CONFIG.get('server'), Distribdiag.CONFIG.get('port')) self.logger.info("{0} Start dedicated thread on {1}".format( self.threadname, self.currentdir)) self.logger.info("{0} Opening connection to: {1}".format( self.threadname, self.url)) self.conn = urllib3.connection_from_url(self.url) self.post_url = "{0}/{1}".format(self.url, Distribdiag.SERVER_FILENAME) self.headers = urllib3.make_headers( basic_auth='%s:%s' % (Distribdiag.CONFIG.get('login'), Distribdiag.CONFIG.get('password')), keep_alive=True, user_agent=gethostname()) # disable warning from InsecureRequestWarning urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) self.logger.info("{0} OK connection established to {1}".format( self.threadname, self.url)) else: self.logger.error( 'Invalid parameter for S3 or Rebound transfer mode, put enable="yes" to activate ...' ) raise
def lambda_handler(event, context): print(event) bucket_name = event["Records"][0]["s3"]["bucket"]["name"] object_name = event["Records"][0]["s3"]["object"]["key"] print(bucket_name, object_name) rekognition = boto3.client("rekognition", "us-east-1") img = {"S3Object": {"Bucket": bucket_name, "Name": object_name}} response = rekognition.detect_labels(Image=img, MaxLabels=10, MinConfidence=90) labels = [label['Name'] for label in response['Labels']] print(labels) es_array = json.dumps({ 'objectKey': object_name, 'bucket': bucket_name, 'createdTimestamp': datetime.now().strftime("%Y-%m-%d %H:%M:%S"), 'labels': labels }).encode('utf-8') http = urllib3.PoolManager() es_search = 'https://search-photos-aghaksxl3t6gsnxniv6snl34oy.us-east-1.es.amazonaws.com/photos/photo' headers = urllib3.make_headers(basic_auth='esmaster:Es888888!') headers.update({"Content-Type": "application/json"}) r = http.request('POST', es_search, headers=headers, body=es_array) print(es_array) # TODO implement return {'statusCode': 200, 'body': json.dumps('Hello from Lambda!')}
def setUp(self) -> None: self.tunnel_opts = TunnelOptions( host="104.248.43.30", port=1337, auth_login="******", auth_password="******", secure=True, ) self.adapter = TunneledHTTPAdapter(tunnel_opts=self.tunnel_opts, ) self.session = requests.Session() self.session.headers.update( urllib3.make_headers( keep_alive=True, disable_cache=True, accept_encoding=True, user_agent= 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/85.0.4183.102 Safari/537.36' )) # connect adapter for requests.Session instance self.session.mount("http://", self.adapter) self.session.mount("https://", self.adapter)
def _post_message(self, dat): headers = urllib3.make_headers(keep_alive=True, user_agent='sslf-hec/3.14', accept_encoding=True) headers.update({ 'Authorization': 'Splunk ' + self.token, 'Content-Type': 'application/json' }) fake_headers = headers.copy() fake_data = str(dat) if len(fake_data) > 100: fake_data = fake_data[0:98] + ' … ' fake_headers[ 'Authorization'] = 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxx' + headers[ 'Authorization'][-4:] log.debug( "HEC.pool_manager.request('POST', url=%s + path=%s, body=%s, headers=%s)", self.url, self.path, fake_data, fake_headers) return self.pool_manager.request('POST', self.urlpath, body=dat, headers=headers)
def call_api(method, uri, params, **options): prefix = options.pop("upload_prefix", cloudinary.config().upload_prefix) or "https://api.cloudinary.com" cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name) if not cloud_name: raise Exception("Must supply cloud_name") api_key = options.pop("api_key", cloudinary.config().api_key) if not api_key: raise Exception("Must supply api_key") api_secret = options.pop("api_secret", cloudinary.config().api_secret) if not cloud_name: raise Exception("Must supply api_secret") api_url = "/".join([prefix, "v1_1", cloud_name] + uri) # Add authentication headers = urllib3.make_headers( basic_auth="{0}:{1}".format(api_key, api_secret), user_agent=cloudinary.get_user_agent() ) kw = {} if 'timeout' in options: kw['timeout'] = options['timeout'] try: response = _http.request(method.upper(), api_url, params, headers, **kw) body = response.data except HTTPError as e: raise GeneralError("Unexpected error {0}", e.message) except socket.error as e: raise GeneralError("Socket Error: %s" % (str(e))) try: result = json.loads(body.decode('utf-8')) except Exception as e: # Error is parsing json raise GeneralError("Error parsing server response (%d) - %s. Got - %s" % (response.status, body, e)) if "error" in result: exception_class = EXCEPTION_CODES.get(response.status) or Exception exception_class = exception_class raise exception_class("Error {0} - {1}".format(response.status, result["error"]["message"])) return Response(result, response)
def main(): REQUEST_KWARGS = { 'proxy_url': PROXY_URL, 'urllib3_proxy_kwargs': { 'headers': make_headers(proxy_basic_auth=f'{PROXY_LOGIN}:{PROXY_PASS}') } } updater = Updater(TOKEN, request_kwargs=REQUEST_KWARGS) conv_handler = ConversationHandler( entry_points=[CommandHandler('start', start)], states={ ACTION: [ RegexHandler('^(Check meetings)$', list_of_meetings), RegexHandler('^(Add meeting)$', add_meeting), RegexHandler('^(Add workspace)$', add_workspace), RegexHandler('^(Add location)$', add_location), RegexHandler('^(Cancel meeting)$', cancel_meeting), RegexHandler( '^((?!(Check meetings)|(Add meeting)|(Add workspace)|(Add location)|(Cancel meeting)).)*$', start) ] }, fallbacks=[ CommandHandler('cancel', cancel), CommandHandler('help', help) ]) conv_handler.states.update(list_of_meetings_states) conv_handler.states.update(workspace_states) conv_handler.states.update(cancel_states) conv_handler.states.update(meeting_states) conv_handler.states.update(location_states) updater.dispatcher.add_handler(conv_handler) updater.dispatcher.add_error_handler(error) updater.start_polling() updater.idle()
def get_cines_cadena(self): url = r'http://www.cinerama.com.pe/cines.php' retries = 3 while retries > 0: try: r = self.conn.request( 'GET', url, headers = urllib3.make_headers(user_agent=wanderer()) ) break except TimeoutError: retries = retries - 1 if retries > 0: if r.status == 200: html = r.data.decode(self.encoding, errors='replace') soup = BeautifulSoup(html) theaters = [ ( c.next.text.strip(), os.path.join(self.url, c.next['href']) ) for c in soup.find_all( 'p', class_='titulo-tienda' )] return theaters else: return []
def send_request(bucket, photo, head_object, labels): host = 'https://search-photos1-qt6drdhhpna5jdwct6mp5u6jfu.us-east-1.es.amazonaws.com' index = 'photos1' url = host + '/' + index + '/_doc' headers = urllib3.make_headers(basic_auth='user:Password123.') headers["Content-Type"] = "application/json" http = urllib3.PoolManager() body = { "objectKey": photo, "bucket": bucket, "createdTimestamp": head_object["LastModified"].strftime("%Y-%m-%dT%H:%M:%S"), "labels": labels } logger.debug(f"SENDING REQUEST") r = http.request('POST', url, headers=headers, body=json.dumps(body)) logger.debug(f"RESPONSE: {r.data}") resp = json.loads(r.data.decode('utf-8')) logger.debug(f"RESPONSE2: {resp}")
def get_labels_from_domain(self, tag): headers = { "Content-Type": "application/json"} headers.update(urllib3.make_headers(basic_auth=self.basic_auth)) query = { "size": 25, "query": { "query_string": { "default_field": tag, "query": "images" } } } response = self.http.request( 'GET', self.search_uri, body = json.dumps(query), headers=headers, retries=False ) return json.loads(response.data.decode("utf-8"))
def _download(self, url, path, auth=None, on_progress=None): if auth: headers = make_headers(basic_auth=":".join(auth)) else: headers = None r = self.http.request("GET", url, headers=headers, preload_content=False) if r.status != 200: raise RuntimeError("Failed with status code %r" % r.status) try: content_length = int(r.getheader("Content-Length")) except (ValueError, TypeError): content_length = -1 content_downloaded = 0 with open(path, "wb") as out: while True: data = r.read(65536) if not data: break out.write(data) content_downloaded += len(data) if callable(on_progress): on_progress(content_downloaded, content_length) r.release_conn()
def get_published_media(): ''' Generator, requesting all published media packages from the search service of the source Opencast. The media packages hold links to all published media and can be passed on as-is to the target Opencast. :return: Media package XML ''' url_search = f'{SOURCE_HOST_PRESENTATION}/search/episode.json' headers = urllib3.make_headers(basic_auth=f'{SOURCE_USER}:{SOURCE_PASS}') offset = 0 total = 1 limit = 2 # get published media packages from source opencast while total > offset: request = http.request('GET', url_search, headers=headers, fields={ 'limit': limit, 'offset': offset }) data = request.data.decode('utf-8') data = json.loads(data).get('search-results') offset = data.get('offset') + limit total = data.get('total') results = data.get('result', []) if type(results) is not list: results = [results] for result in results: print('Importing ' + result.get('id')) yield result.get('ocMediapackage')
def wrapper(callback, path, params=None, data='', headers=None): # python-consul doesn't allow to specify ttl smaller then 10 seconds # because session_ttl_min defaults to 10s, so we have to do this ugly dirty hack... if method == 'put' and path == '/v1/session/create': ttl = '"ttl": "{0}s"'.format(self._ttl) if not data or data == '{}': data = '{' + ttl + '}' else: data = data[:-1] + ', ' + ttl + '}' if isinstance( params, list ): # starting from v1.1.0 python-consul switched from `dict` to `list` for params params = {k: v for k, v in params} kwargs = {'retries': 0, 'preload_content': False, 'body': data} if method == 'get' and isinstance(params, dict) and 'index' in params: timeout = float( params['wait'][:-1]) if 'wait' in params else 300 # According to the documentation a small random amount of additional wait time is added to the # supplied maximum wait time to spread out the wake up time of any concurrent requests. This adds # up to wait / 16 additional time to the maximum duration. Since our goal is actually getting a # response rather read timeout we will add to the timeout a slightly bigger value. kwargs['timeout'] = timeout + max(timeout / 15.0, 1) else: kwargs['timeout'] = self._read_timeout kwargs['headers'] = (headers or {}).copy() kwargs['headers'].update( urllib3.make_headers(user_agent=USER_AGENT)) token = params.pop('token', self.token) if isinstance( params, dict) else self.token if token: kwargs['headers']['X-Consul-Token'] = token return callback( self.response( self.http.request(method.upper(), self.uri(path, params), **kwargs)))
def sendmail(http, data): headers = urllib3.make_headers(basic_auth='api:{}'.format(data.get('api_key'))) request = http.request( 'POST', data.get('endpoint'), headers=headers, fields={ 'from': data.get('from'), 'to': data.get('to'), 'subject': data.get('subject'), 'template': data.get('template') or 'blank-testing', 'h:X-Mailgun-Variables': json.dumps(data.get('params')) } ) respone = json.loads( request.data.decode('utf-8') ) if request.status == 200: print(response) return response
def __init__(self, con_pool_size=1, proxy_url=None, urllib3_proxy_kwargs=None): if urllib3_proxy_kwargs is None: urllib3_proxy_kwargs = dict() kwargs = dict(maxsize=con_pool_size, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(), socket_options=HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), ]) # Set a proxy according to the following order: # * proxy defined in proxy_url (+ urllib3_proxy_kwargs) # * proxy set in `HTTPS_PROXY` env. var. # * proxy set in `https_proxy` env. var. # * None (if no proxy is configured) if not proxy_url: proxy_url = os.environ.get('HTTPS_PROXY') or os.environ.get( 'https_proxy') if not proxy_url: mgr = urllib3.PoolManager(**kwargs) else: kwargs.update(urllib3_proxy_kwargs) mgr = urllib3.proxy_from_url(proxy_url, **kwargs) if mgr.proxy.auth: # TODO: what about other auth types? auth_hdrs = urllib3.make_headers( proxy_basic_auth=mgr.proxy.auth) mgr.proxy_headers.update(auth_hdrs) self._con_pool = mgr
def _call_api(method, uri, params=None, body=None, headers=None, **options): prefix = options.pop( "upload_prefix", cloudinary.config().upload_prefix) or "https://api.cloudinary.com" cloud_name = options.pop("cloud_name", cloudinary.config().cloud_name) if not cloud_name: raise Exception("Must supply cloud_name") api_key = options.pop("api_key", cloudinary.config().api_key) if not api_key: raise Exception("Must supply api_key") api_secret = options.pop("api_secret", cloudinary.config().api_secret) if not cloud_name: raise Exception("Must supply api_secret") api_url = "/".join([prefix, "v1_1", cloud_name] + uri) processed_params = None if isinstance(params, dict): processed_params = {} for key, value in params.items(): if isinstance(value, list) or isinstance(value, tuple): value_list = { "{}[{}]".format(key, i): i_value for i, i_value in enumerate(value) } processed_params.update(value_list) elif value: processed_params[key] = value # Add authentication req_headers = urllib3.make_headers(basic_auth="{0}:{1}".format( api_key, api_secret), user_agent=cloudinary.get_user_agent()) if headers is not None: req_headers.update(headers) kw = {} if 'timeout' in options: kw['timeout'] = options['timeout'] if body is not None: kw['body'] = body try: response = _http.request(method.upper(), api_url, processed_params, req_headers, **kw) body = response.data except HTTPError as e: raise GeneralError("Unexpected error {0}", e.message) except socket.error as e: raise GeneralError("Socket Error: %s" % (str(e))) try: result = json.loads(body.decode('utf-8')) except Exception as e: # Error is parsing json raise GeneralError( "Error parsing server response (%d) - %s. Got - %s" % (response.status, body, e)) if "error" in result: exception_class = EXCEPTION_CODES.get(response.status) or Exception exception_class = exception_class raise exception_class("Error {0} - {1}".format( response.status, result["error"]["message"])) return Response(result, response)
def send_request(self, method, target_url, preload_content=True, query_param=None, body_param=None, enc='utf-8'): res_header = '' res_body = '' server_header = '-' res = None content_type_value = '' # Initialize empty parameter set. if query_param is None: query_param = {} if body_param is None: body_param = {} # Set proxy server. http = None if self.proxy != '': self.print_message(WARNING, 'Set proxy server: {}'.format(self.proxy)) if self.proxy_user != '': headers = urllib3.make_headers( proxy_basic_auth=self.proxy_user + ':' + self.proxy_pass) http = urllib3.ProxyManager(timeout=self.con_timeout, headers=self.http_req_header, proxy_url=self.proxy, proxy_headers=headers) else: http = urllib3.ProxyManager(timeout=self.con_timeout, headers=self.http_req_header, proxy_url=self.proxy) else: http = urllib3.PoolManager(timeout=self.con_timeout, headers=self.http_req_header) try: if method.lower() == 'get': res = http.request('GET', target_url, fields=query_param, preload_content=preload_content, redirect=self.redirect) else: encoded_args = urlencode(body_param, encoding=enc) res = http.request('POST', target_url + '?' + encoded_args, preload_content=preload_content, redirect=self.redirect) for header in res.headers.items(): res_header += header[0] + ': ' + header[1] + '\r\n' if header[0].lower() == 'server': server_header = header[0] + ': ' + header[1] if header[0].lower() == 'content-type': content_type_value = header[1] # Detect encoding. encoding = self.detect_encoding(res.data, content_type_value) # Get response body. res_body = res.data.decode(encoding) except Exception as e: self.print_message(WARNING, 'Use default charset: {}'.format(self.encoding)) encoding = self.encoding self.print_exception(e, 'Access is failure : {}'.format(target_url)) self.write_log(30, 'Accessing is failure : {}'.format(target_url)) return res, server_header, res_header, res_body, encoding
def get_programacion_cine(self, idCine=0, url=None): # UVK usa urls para identificar sus cines, no ids # TODO: handle errors retries = 3 while retries > 0: try: r = self.conn.request( 'GET', url, headers = urllib3.make_headers(user_agent=wanderer()) ) break except TimeoutError: retries = retries - 1 if retries > 0: if r.status == 200: # Page readed ok html = r.data.decode(self.encoding, errors='replace') # html = r.text.encode(r.encoding, errors='replace') soup = BeautifulSoup(html, 'html5lib') p1 = soup.find('div', class_='highslide-body') if p1: peliculas = p1.find_all('td', class_='bg_infotabla1') else: return [] result = [] for i in range(0, len(peliculas)-1): # Identificamos el comienzo del listado de películas porque son los # <td> que tienen class bg_infotabla1 y un elemento <a href=...> if peliculas[i].a is not None: # El primer <td> tiene el nombre de la película. # El segundo <td> tiene el horario movie = Movie( name = self.purify_movie_name(peliculas[i].string), showtimes = self.grab_horarios(peliculas[i+1].string), isSubtitled = self.is_movie_subtitled(peliculas[i].string), isTranslated = self.is_movie_translated(peliculas[i].string), isHD = self.is_movie_HD(peliculas[i].string), is3D =self.is_movie_3D(peliculas[i].string), isDbox = self.is_movie_dbox(peliculas[i].string), ) result.append(movie) i = i +2 else: i = i+1 # Sort movies result.sort(key=lambda x: x.name) return result else: return [] else: return []
def get_movies(url): retries = 3 result = [] while retries > 0: try: r = self.conn.request( 'GET', url, headers = urllib3.make_headers(user_agent=wanderer()) ) break except TimeoutError: retries = retries - 1 if retries > 0: if r.status == 200: # Page readed ok html = r.data.decode(self.encoding, errors='replace') soup = BeautifulSoup(html) theater_names = [t.string for t in soup.find_all( 'span', class_='TitulosBlanco' )] theater_codes = [ t['id'][4:] for t in soup.find_all( 'a', id=re.compile('^Cine*') )] for i in range(0, len(theater_names)): theater = Theater(name=theater_names[i]) movies = [] tmp = soup.find_all('a', id=re.compile( '^idPelCine.+' + theater_codes[i] )) for p in tmp: movie_name = p.parent.find('a', class_='peliculaCartelera').string movie_showtimes = [t.string for t in p.parent.parent.find_all( 'span', class_=re.compile('^horariosCartelera*') )] movie_showtimes = self.grab_horarios(" ".join(movie_showtimes)) movie = Movie( name = self.purify_movie_name(movie_name), showtimes = movie_showtimes, isSubtitled = self.is_movie_subtitled(movie_name), isTranslated = self.is_movie_translated(movie_name), isHD = self.is_movie_HD(movie_name), is3D = self.is_movie_3D(movie_name), isDbox = self.is_movie_dbox(movie_name), ) movies.append(movie) # movies.sort(key=lambda x: x.name) theater.movies = movies result.append(theater) else: return [] else: return [] return result
def _get_headers(self): if self.username and self.password: credentials = ':'.join((self.username, self.password)) return urllib3.make_headers(basic_auth=credentials) return {}
def request(self): req = self._request if req.proxy: if req.proxy_userpwd: headers = make_headers(proxy_basic_auth=req.proxy_userpwd) else: headers = None proxy_url = '%s://%s' % (req.proxy_type, req.proxy) if req.proxy_type == 'socks5': pool = SOCKSProxyManager( proxy_url, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) # , proxy_headers=headers) else: pool = ProxyManager(proxy_url, proxy_headers=headers, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where()) else: pool = self.pool with self.wrap_transport_error(): # Retries can be disabled by passing False: # http://urllib3.readthedocs.io/en/latest/reference/urllib3.util.html#module-urllib3.util.retry # Do not use False because of warning: # Converted retries value: False -> Retry(total=False, # connect=None, read=None, redirect=0, status=None) retry = Retry( total=False, connect=False, read=False, redirect=0, status=None, ) # The read timeout is not total response time timeout # It is the timeout on read of next data chunk from the server # Total response timeout is handled by Grab timeout = Timeout(connect=req.connect_timeout, read=req.timeout) #req_headers = dict((make_unicode(x), make_unicode(y)) # for (x, y) in req.headers.items()) if six.PY3: req_url = make_unicode(req.url) req_method = make_unicode(req.method) else: req_url = make_str(req.url) req_method = req.method req.op_started = time.time() try: res = pool.urlopen(req_method, req_url, body=req.data, timeout=timeout, retries=retry, headers=req.headers, preload_content=False) except UnicodeError as ex: raise error.GrabConnectionError('GrabInvalidUrl', ex) #except exceptions.ReadTimeoutError as ex: # raise error.GrabTimeoutError('ReadTimeoutError', ex) #except exceptions.ConnectTimeoutError as ex: # raise error.GrabConnectionError('ConnectTimeoutError', ex) #except exceptions.ProtocolError as ex: # # TODO: # # the code # # raise error.GrabConnectionError(ex.args[1][0], ex.args[1][1]) # # fails # # with error TypeError: 'OSError' object is not subscriptable # raise error.GrabConnectionError('ProtocolError', ex) #except exceptions.SSLError as ex: # raise error.GrabConnectionError('SSLError', ex) # WTF? self.request_head = b'' self.request_body = b'' self.request_log = b'' self._response = res
def endpoint_api(object, endpoint, headers={}, data={}, json_data={}, method='GET', admin=False, compatibility=False): logger = logging.getLogger('endpoint_api') endpoint_url = f"{object.url}{endpoint}" logger.debug(f"[{get_linenumber()}] Endpoint URL: {endpoint_url}") logger.debug(f"[{get_linenumber()}] Method: {method}") logger.debug(f"[{get_linenumber()}] Headers: {headers}") if len(headers.keys()) == 0: logger.debug( f"[{get_linenumber()}] Empty header detected, using default") default_header = True else: default_header = False logger.debug( f"[{get_linenumber()}] Headers: {json.dumps(headers, indent=2)}") if data is None: logger.debug(f"[{get_linenumber()}] Empty data detected") else: logger.debug(f"[{get_linenumber()}] Data: {data}") if json_data is None: logger.debug(f"[{get_linenumber()}] Empty json detected") else: logger.debug( f"[{get_linenumber()}] Data: {json.dumps(json_data, indent=2)}") # Set credentials if type(object) is Server: creds = (object.username, object.password) elif type(object) is Database: creds = (object.server.username, object.server.password) elif type(object) is Node: creds = (object.server.username, object.server.password) elif type(object) is Document: creds = (object.database.server.username, object.database.server.password) logger.debug(f"[{get_linenumber()}] Checking which module to use") if requests_module: ## When the requests module is found try: if admin: logger.debug(f"[{get_linenumber()}] Using admin mode") if type(object) is Server: headers["Host"] = object.admin_host headers["Referer"] = f"http://{object.admin_host}" headers["Referer"] = object.admin_url elif type(object) is Database: headers["Host"] = object.server.admin_host headers["Referer"] = f"http://{object.server.admin_host}" headers["Referer"] = object.server.admin_url elif type(object) is Document: headers["Host"] = object.database.server.admin_host headers[ "Referer"] = f"http://{object.database.server.admin_host}" headers["Referer"] = object.database.server.admin_url else: if type(object) is Server: headers["Host"] = object.couchdb_host headers["Referer"] = f"http://{object.couchdb_host}" headers["Referer"] = object.url elif type(object) is Database: headers["Host"] = object.server.couchdb_host headers["Referer"] = f"http://{object.server.couchdb_host}" headers["Referer"] = object.server.url elif type(object) is Document: headers["Host"] = object.database.server.couchdb_host headers[ "Referer"] = f"http://{object.database.server.couchdb_host}" # headers["Referer"] = object.database.server.url if default_header: headers["accept"] = "application/json" logger.debug(f"[{get_linenumber()}] Final header:\n{headers}") if method.upper() == 'GET': response = requests.get(url=endpoint_url, headers=headers, auth=creds, json=json_data, data=data) elif method.upper() == 'PUT': response = requests.put(url=endpoint_url, headers=headers, auth=creds, json=json_data, data=data) elif method.upper() == 'POST': response = requests.post(url=endpoint_url, headers=headers, auth=creds, json=json_data, data=data) elif method.upper() == 'DELETE': response = requests.delete(url=endpoint_url, headers=headers, auth=creds, json=json_data, data=data) else: response = requests.request(method=method.upper(), url=endpoint_url, headers=headers, auth=creds, json=json_data, data=data) except requests.HTTPError as he: logger.warning('HTTPError while trying the connection') logger.debug(he) response = {'status': 'error', 'headers': {}} except Exception as e: logger.critical(e.__str__()) response = {'status': 'error', 'fullerror': e.__str__()} finally: if requests_module: logger.debug(f"Crude response >>> \n{response}") if "json" in dir(response): return response.json() else: return response.raw.read().decode() else: return response else: ## Falback to http.client module # Split Endpoint URL into base + path host_end = endpoint_url.find("/", 7) url_base = endpoint_url[:host_end].split("/")[2] url_path = endpoint_url[host_end:] logger.debug( f"[{get_linenumber()}] URL Base: {url_base}\nURL Path: {url_path}") try: logger.debug( f"[{get_linenumber()}] Preparing request using http.client") http_conn = http.client.HTTPConnection(host=url_base) # http_conn.set_debuglevel(100) headers_auth = make_headers(basic_auth=f"{creds[0]}:{creds[1]}") if admin: logger.debug(f"[{get_linenumber()}] Using admin mode") if type(object) is Server: headers["Host"] = object.admin_host headers["Referer"] = f"http://{object.admin_host}" elif type(object) is Database: headers["Host"] = object.server.admin_host headers["Referer"] = f"http://{object.server.admin_host}" elif type(object) is Document: headers["Host"] = object.database.server.admin_host headers[ "Referer"] = f"http://{object.database.server.admin_host}" else: if type(object) is Server: headers["Host"] = object.couchdb_host headers["Referer"] = f"http://{object.couchdb_host}" elif type(object) is Database: headers["Host"] = object.server.couchdb_host headers["Referer"] = f"http://{object.server.couchdb_host}" elif type(object) is Document: headers["Host"] = object.database.server.couchdb_host headers[ "Referer"] = f"http://{object.database.server.couchdb_host}" if default_header: headers["accept"] = "application/json" headers["authorization"] = headers_auth["authorization"] logger.debug(f"[{get_linenumber()}] Final header:\n{headers}") logger.debug( f"[{get_linenumber()}] Connection attempt: {http_conn.connect()}" ) if data: http_conn.request(method=method.upper(), url=url_path, body=data, headers=headers) elif json_data: http_conn.request(method=method.upper(), url=url_path, body=bytes(json.dumps(json_data), 'utf-8'), headers=headers) else: http_conn.request(method=method.upper(), url=url_path, headers=headers) response = json.loads(http_conn.getresponse().read().decode()) except http.client.HTTPException as he: logger.error('HTTPException while trying the connection') response = { "status": f"{response.status}", "reason": f"{response.reason}", "content": str(he) } except Exception as e: logger.critical(e.__str__()) response = {'status': 'error', 'fullerror': e.__str__()} finally: logger.debug(response) if requests_module: if response.json(): return response.json() else: return response.raw.read().decode() else: return response
class Client(object): """HTTP Client for interacting with New Relic APIs This class is used to send data to the New Relic APIs over HTTP. This class will automatically handle retries as needed. :param insert_key: Insights insert key :type insert_key: str :param host: (optional) Override the host for the client. :type host: str :param compression_threshold: (optional) Compress if number of bytes in payload is above this threshold. (Default: 64K) :type compression_threshold: int Usage:: >>> import os >>> insert_key = os.environ.get("NEW_RELIC_INSERT_KEY", "") >>> client = Client(insert_key, host="metric-api.newrelic.com") >>> response = client.send({}) """ POOL_CLS = HTTPSConnectionPool PAYLOAD_TYPE = "" HOST = "" URL = "/" GZIP_HEADER = {"Content-Encoding": "gzip"} HEADERS = urllib3.make_headers( keep_alive=True, accept_encoding=True, user_agent=USER_AGENT ) def __init__(self, insert_key, host=None, compression_threshold=64 * 1024): host = host or self.HOST self.compression_threshold = compression_threshold headers = self.HEADERS.copy() headers.update( { "Api-Key": insert_key, "Content-Encoding": "identity", "Content-Type": "application/json", } ) self._pool = pool = self.POOL_CLS( host=host, port=443, retries=False, headers=headers, strict=True ) self._gzip_headers = gzip_headers = pool.headers.copy() gzip_headers.update(self.GZIP_HEADER) @staticmethod def _compress_payload(payload): level = zlib.Z_DEFAULT_COMPRESSION compressor = zlib.compressobj(level, zlib.DEFLATED, 31) payload = compressor.compress(payload) payload += compressor.flush() return payload def send(self, item): """Send a single item :param item: The object to send :type item: dict :rtype: HTTPResponse """ return self.send_batch((item,)) def send_batch(self, items, common=None): """Send a batch of spans :param items: An iterable of items to send to New Relic. :type items: list or tuple :rtype: HTTPResponse """ payload = {self.PAYLOAD_TYPE: items} if common: payload["common"] = common payload = json.dumps([payload]) if not isinstance(payload, bytes): payload = payload.encode("utf-8") headers = None if len(payload) > self.compression_threshold: payload = self._compress_payload(payload) headers = self._gzip_headers return self._pool.urlopen("POST", self.URL, body=payload, headers=headers)
TOKENS_FILE = TOKENS_DIR + "/tokens" APP_CLIENTID = "rbowlcj4sc7u96dfxprgd26bhqwt5nlz" APP_SECRET = "Huiq0x7vxFgKjpAlp9k0WAcLxQ1Efmjh" APP_ACCESS_TOKEN = "" LOGFILE = '/tmp/fs.log' UID = os.geteuid() GID = os.getgid() #http_pool = urllib3.HTTPConnectionPool(host='api.box.com', port=443, maxsize=20, # cert_reqs='CERT_REQUIRED', # ca_certs=certifi.where(), # assert_same_host=False) http_pool_headers = urllib3.make_headers( keep_alive=True, user_agent= "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36" ) #http_pool_api = urllib3.connection_from_url('https://api.box.com', # cert_reqs='CERT_REQUIRED', # ca_certs=certifi.where(), # headers=http_pool_headers, # maxsize=10, # block=False) #http_pool_dl = urllib3.connection_from_url('https://dl.boxcloud.com', # cert_reqs='CERT_REQUIRED', # ca_certs=certifi.where(), # headers=http_pool_headers, # maxsize=10, # block=False)
def __init__(self, host="localhost", port=None, http_auth=None, use_ssl=False, verify_certs=VERIFY_CERTS_DEFAULT, ssl_show_warn=SSL_SHOW_WARN_DEFAULT, ca_certs=None, client_cert=None, client_key=None, ssl_version=None, ssl_assert_hostname=None, ssl_assert_fingerprint=None, maxsize=10, headers=None, ssl_context=None, http_compress=None, cloud_id=None, api_key=None, opaque_id=None, **kwargs): # Initialize headers before calling super().__init__(). self.headers = urllib3.make_headers(keep_alive=True) super(Urllib3HttpConnection, self).__init__(host=host, port=port, use_ssl=use_ssl, headers=headers, http_compress=http_compress, cloud_id=cloud_id, api_key=api_key, opaque_id=opaque_id, **kwargs) if http_auth is not None: if isinstance(http_auth, (tuple, list)): http_auth = ":".join(http_auth) self.headers.update(urllib3.make_headers(basic_auth=http_auth)) pool_class = urllib3.HTTPConnectionPool kw = {} # if providing an SSL context, raise error if any other SSL related flag is used if ssl_context and ((verify_certs is not VERIFY_CERTS_DEFAULT) or (ssl_show_warn is not SSL_SHOW_WARN_DEFAULT) or ca_certs or client_cert or client_key or ssl_version): warnings.warn( "When using `ssl_context`, all other SSL related kwargs are ignored" ) # if ssl_context provided use SSL by default if ssl_context and self.use_ssl: pool_class = urllib3.HTTPSConnectionPool kw.update({ "assert_fingerprint": ssl_assert_fingerprint, "ssl_context": ssl_context, }) elif self.use_ssl: pool_class = urllib3.HTTPSConnectionPool kw.update({ "ssl_version": ssl_version, "assert_hostname": ssl_assert_hostname, "assert_fingerprint": ssl_assert_fingerprint, }) # Convert all sentinel values to their actual default # values if not using an SSLContext. if verify_certs is VERIFY_CERTS_DEFAULT: verify_certs = True if ssl_show_warn is SSL_SHOW_WARN_DEFAULT: ssl_show_warn = True ca_certs = CA_CERTS if ca_certs is None else ca_certs if verify_certs: if not ca_certs: raise ImproperlyConfigured( "Root certificates are missing for certificate " "validation. Either pass them in using the ca_certs parameter or " "install certifi to use it automatically.") kw.update({ "cert_reqs": "CERT_REQUIRED", "ca_certs": ca_certs, "cert_file": client_cert, "key_file": client_key, }) else: kw["cert_reqs"] = "CERT_NONE" if ssl_show_warn: warnings.warn( "Connecting to %s using SSL with verify_certs=False is insecure." % self.host) if not ssl_show_warn: urllib3.disable_warnings() self.pool = pool_class(self.hostname, port=self.port, timeout=self.timeout, maxsize=maxsize, **kw)
def create_vuln_yearly_db(self, cve_year, last_modified_date): # Get cve list from NVD. self.utility.write_log( 20, '[In] Create yearly vulnerability database [{}]'.format( self.file_name)) target_url = self.nvd_zip_url.replace('*', cve_year) tmp_file = os.path.join(self.nvd_db_dir, 'temp_' + cve_year + '.zip') # Download zip file (include cve list) and uncompress zip file. target_json_name = '' self.utility.write_log(20, 'Accessing : {}'.format(target_url)) self.utility.print_message( OK, 'Get {} CVE list from {}'.format(cve_year, target_url)) http = None if self.utility.proxy != '': self.utility.print_message( WARNING, 'Set proxy server: {}'.format(self.utility.proxy)) if self.utility.proxy_user != '': headers = urllib3.make_headers( proxy_basic_auth=self.utility.proxy_user + ':' + self.utility.proxy_pass) http = urllib3.ProxyManager(timeout=self.con_timeout, headers=self.utility.ua, proxy_url=self.utility.proxy, proxy_headers=headers) else: http = urllib3.ProxyManager(timeout=self.con_timeout, headers=self.utility.ua, proxy_url=self.utility.proxy) else: http = urllib3.PoolManager(timeout=self.con_timeout, headers=self.utility.ua) try: with http.request('GET', target_url, preload_content=False) as res, open( tmp_file, 'wb') as fout: shutil.copyfileobj(res, fout) except Exception as e: self.utility.print_exception( e, 'Access is failure : {}'.format(target_url)) self.utility.write_log( 30, 'Accessing is failure : {}'.format(target_url)) with zipfile.ZipFile(tmp_file, 'r') as downloaded_zip: target_json_name = downloaded_zip.namelist()[0] downloaded_zip.extractall(self.nvd_db_dir) os.remove(tmp_file) # Create cve list of cve file. yearly_cve_list = [] with codecs.open(os.path.join(self.nvd_db_dir, target_json_name), 'r', encoding='utf-8') as fin: self.extract_vuln_info(json.loads(fin.read().replace('\0', '')), cve_year, last_modified_date) self.utility.write_log( 20, '[Out] Create yearly vulnerability database [{}]'.format( self.file_name)) return yearly_cve_list