def __init__(self, proto, host, user=None, passwd=None, token=None, debug=False, disable_certificate_verification=False): self.url = "%s://%s" % (proto, host) self.user = user self.passwd = passwd self.token = token # authentication token self.debug = debug context = None if disable_certificate_verification: import ssl if "_create_unverified_context" in ssl.__dict__.keys(): context = ssl._create_unverified_context() self.opener = urllib_request.OpenerDirector() self.opener.add_handler(urllib_request.HTTPHandler()) try: self.opener.add_handler( urllib_request.HTTPSHandler(context=context)) except TypeError: # Python < 2.7.9 self.opener.add_handler(urllib_request.HTTPSHandler()) Agent.defaultInst = self
def twitterreq(self, url, parameters,method="GET"): req = oauth.Request.from_consumer_and_token(self.oauth_consumer, token=self.oauth_token, http_method=method, http_url=url, parameters=parameters) req.sign_request(self.signature_method_hmac_sha1, self.oauth_consumer, self.oauth_token) headers = req.to_header() if method == "POST": encoded_post_data = req.to_postdata() encoded_post_data = encoded_post_data.encode('utf-8') else: encoded_post_data = None url = req.to_url() opener = urllib.OpenerDirector() opener.add_handler(self.http_handler) opener.add_handler(self.https_handler) response = None i = 0 while response is None and i < 10: try: i += 1 response = opener.open(url, encoded_post_data) except urlliberr.URLError: pass if response != None: response = opener.open(url, encoded_post_data) return response
def twitterreq(self,url, parameters, method = None): #if the method not specified use the GET method as default if method == None: method = self._http_method # Make OAUTH request for the prespecified data req = oauth.Request.from_consumer_and_token(self._oauth_consumer, token=self._oauth_token, http_method=method, http_url=url, parameters=parameters) # Account sign-in req.sign_request(self._signature_method_hmac_sha1, self._oauth_consumer, self._oauth_token) headers = req.to_header() if method == "POST": encoded_post_data = req.to_postdata() else: encoded_post_data = None url = req.to_url() opener = urllib.OpenerDirector() opener.add_handler(self._http_handler) opener.add_handler(self._https_handler) # Start collecting data through open stream socket response = opener.open(url, encoded_post_data) return response
def go_to_endpoint(context, endpoint): requestUrl = "{}/{}".format(context.apiUrl, parse.quote(endpoint)) opener = request.OpenerDirector() opener.add_handler(request.HTTPHandler()) response = opener.open(requestUrl) context.endpointResponse = response context.responseBodyString = context.endpointResponse.read().decode("utf8")
def open_url(url, proxy, resp_code, sslContext): if isinstance(url, request.Request): logging.info('opener: opening "{0}"'.format(url.full_url)) else: logging.info('opener: opening "{0}"'.format(url)) opener = request.OpenerDirector() if proxy: # not doing 'is not None', so empty strings don't try and proxy # have a proxy option to take it from the envrionment vars opener.add_handler( request.ProxyHandler({ 'http': proxy, 'https': proxy })) else: opener.add_handler(request.ProxyHandler({})) opener.add_handler(request.HTTPHandler()) opener.add_handler(PackratHandler()) if hasattr(http.client, 'HTTPSConnection'): opener.add_handler(request.HTTPSHandler()) # context=sslContext opener.add_handler(PackratsHandler()) # context=sslContext opener.add_handler(request.FileHandler()) opener.add_handler(request.FTPHandler()) opener.add_handler(request.UnknownHandler()) try: resp = opener.open(url, timeout=WEB_HANDLE_TIMEOUT) except request.HTTPError as e: raise FileRetrieveException('HTTPError "{0}"'.format(e)) except request.URLError as e: if isinstance(e.reason, socket.timeout): raise FileRetrieveException( 'Request Timeout after {0} seconds'.format(WEB_HANDLE_TIMEOUT)) raise FileRetrieveException( 'URLError "{0}" for "{1}" via "{2}"'.format( e, url.full_url, proxy)) except socket.timeout: raise FileRetrieveException( 'Request Timeout after {0} seconds'.format(WEB_HANDLE_TIMEOUT)) except socket.error as e: raise FileRetrieveException('Socket Error "{0}"'.format(e)) if resp.code is not None: # FileHandler, FTPHandler do not have a response code if resp.code == 404: raise FileRetrieveException('File "{0}" not Found'.format(url)) if resp.code != resp_code: raise FileRetrieveException('Invalid Response code "{0}"'.format( resp.code)) return resp
def __init__( self, host, root_path, proxy=None, verify_ssl=True, retry_event=None ): # retry_event should be an Event Object, use to cancel retry loops, if the event get's set the retry loop will throw the most recent Exception it ignored super().__init__() self.retry_event = retry_event if not host.startswith(('http:', 'https:')): raise ValueError('hostname must start with http(s):') if host[-1] == '/': raise ValueError('hostname must not end with "/"') self.proxy = proxy self.host = host logging.debug( 'cinp: new client host: "{0}", root_path: "{1}", via: "{2}"'. format(self.host, root_path, self.proxy)) self.uri = URI(root_path) self.opener = request.OpenerDirector() if self.proxy: # not doing 'is not None', so empty strings don't try and proxy # have a proxy option to take it from the envrionment vars self.opener.add_handler( request.ProxyHandler({ 'http': self.proxy, 'https': self.proxy })) else: self.opener.add_handler(request.ProxyHandler({})) self.opener.add_handler(request.HTTPHandler()) if hasattr(http.client, 'HTTPSConnection'): if not verify_ssl: self.opener.add_handler( request.HTTPSHandler( context=ssl._create_unverified_context())) else: self.opener.add_handler(request.HTTPSHandler()) self.opener.add_handler(request.UnknownHandler()) self.opener.addheaders = [ ('User-Agent', 'python CInP client {0}'.format(__CLIENT_VERSION__)), ('Accepts', 'application/json'), ('Accept-Charset', 'utf-8'), ('CInP-Version', __CINP_VERSION__) ]
def twitterreq(url, method, parameters): req = oauth.Request.from_consumer_and_token(oauth_consumer, token=oauth_token, http_method=http_method, http_url=url, parameters=parameters) req.sign_request(signature_method_hmac_sha1, oauth_consumer, oauth_token) headers = req.to_header() if http_method == "POST": encoded_post_data = req.to_postdata() else: encoded_post_data = None url = req.to_url() opener = urllib.OpenerDirector() opener.add_handler(http_handler) opener.add_handler(https_handler) response = opener.open(url, encoded_post_data) return response
def setUp(self): super(HttpCase, self).setUp() if self.registry_test_mode: self.registry.enter_test_mode() self.addCleanup(self.registry.leave_test_mode) # setup a magic session_id that will be rollbacked self.session = odoo.http.root.session_store.new() self.session_id = self.session.sid self.session.db = get_db_name() odoo.http.root.session_store.save(self.session) # setup an url opener helper self.opener = urllib2.OpenerDirector() self.opener.add_handler(urllib2.UnknownHandler()) self.opener.add_handler(urllib2.HTTPHandler()) self.opener.add_handler(urllib2.HTTPSHandler()) self.opener.add_handler(urllib2.HTTPCookieProcessor()) self.opener.add_handler(RedirectHandler()) self.opener.addheaders.append(('Cookie', 'session_id=%s' % self.session_id))
def add_parent( self, parent ): # TODO: Until the proxy stuff is figured out, make sure to add PackratHandler after HTTP(s) and Proxy Handlers super().add_parent(parent) handler_name_list = [ i.__class__.__name__ for i in self.parent.handlers ] self.opener = request.OpenerDirector() if 'ProxyHandler' in handler_name_list: self.opener.add_handler( request.ProxyHandler(self.parent.handlers[ handler_name_list.index('ProxyHandler')].proxies)) self.opener.add_handler(request.HTTPHandler()) if 'HTTPSHandler' in handler_name_list: self.opener.add_handler(request.HTTPSHandler()) self.opener.addheaders = [('User-agent', 'subcontractor_plugin')]
def send_to_endpoint(context, method, dataFile, endpoint): postDataFilePath = os.path.join(context.postDataDir, dataFile) requestUrl = "{}/{}".format(context.apiUrl, parse.quote(endpoint)) with open(postDataFilePath, "r") as jsonFile: postDataJson = json.load(jsonFile) jsonFile.close() postDataJsonStr = json.dumps(postDataJson).encode("utf8") requestObj = request.Request(url=requestUrl, data=postDataJsonStr, method=method, headers={"Content-Type": "application/json"}) opener = request.OpenerDirector() opener.add_handler(request.HTTPHandler()) response = opener.open(requestObj) context.endpointResponse = response context.responseBodyString = context.endpointResponse.read().decode("utf8")
class MyRedirectHandler(urllib2.HTTPRedirectHandler): """Throw an exception on redirects instead of continuing. The redirect will be handled in the main thread so it can work with connection pooling.""" def redirect_request(self, req, fp, code, msg, headers, newurl): """@type code: int @type msg: str @type newurl: str""" new_req = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl) if new_req: raise Redirect(new_req) # Our handler differs from the Python default in that: # - we don't support file:// URLs # - we don't follow HTTP redirects _my_urlopen = urllib2.OpenerDirector() for klass in [urllib2.ProxyHandler, urllib2.UnknownHandler, urllib2.HTTPHandler, urllib2.HTTPDefaultErrorHandler, MyRedirectHandler, urllib2.FTPHandler, urllib2.HTTPErrorProcessor, MyHTTPSHandler]: _my_urlopen.add_handler(klass()) #Modified function signature to add support for safe_download def download_in_thread(url, target_file, if_modified_since, notify_done,expected_size=None): """@type url: str @type target_file: file""" ''' This is the function inside which a resource with a specified url is downloaded ''' src = None try: #print ("Download url", url)
#! /usr/bin/env python # -*- coding:utf-8 -*- # __author__ = "NYA" import urllib.request as urllib import time import datetime opener = urllib.OpenerDirector() print('%.20f' % time.time()) url = 'https://ci.phncdn.com/pics/albums/013/889/491/173801711/(m=e-yaaGqaa)(mh=keF16B9jroWqJNLf)original_173801711.jpg' try: opener.open(url, timeout=0.01) print('23242') print('%.20f' % time.time()) urllib.urlretrieve(url, 'E:\\row_data\\' + str(1) + '.jpg') except Exception as e: print(e) opener.open(url) urllib.urlretrieve(url, 'E:\\row_data\\' + str(1) + '.jpg') print('%.20f' % time.time())
def _downloadFiles(self, wrk_dir, lease, host, header_map, proxy): ovf_files = [] opener = request.OpenerDirector() if proxy: # not doing 'is not None', so empty strings don't try and proxy # have a proxy option to take it from the envrionment vars opener.add_handler( request.ProxyHandler({ 'http': proxy, 'https': proxy })) else: opener.add_handler(request.ProxyHandler({})) opener.add_handler(request.HTTPHandler()) if hasattr(http.client, 'HTTPSConnection'): opener.add_handler( request.HTTPSHandler()) # context=self.sslContext opener.add_handler(request.UnknownHandler()) logging.debug('OVAExportHandler: Starting file downloads(s)...') for device in lease.info.deviceUrl: url = device.url.replace('*', host) if not device.targetId: logging.debug( 'ExportLease: No targetId for "{0}", skipping...'.format( url)) continue logging.debug('OVAExportHandler: Downloading "{0}"...'.format( device.url)) req = request.Request(url, headers=header_map, method='GET') resp = opener.open(req, timeout=DOWNLOAD_FILE_TIMEOUT) try: content_length = int(resp.headers['content-length']) except TypeError: # ESX dosen't supply contect-length? content_length = '<unknwon>' file_hash = hashlib.sha256() local_file = open(os.path.join(wrk_dir, device.targetId), 'wb') buff = resp.read(4096 * 1024) cp = datetime.utcnow() while buff: if datetime.utcnow() > cp: cp = datetime.utcnow() + timedelta( seconds=PROGRESS_INTERVAL) logging.debug( 'OVAExportHandler: download at {0} of {1}'.format( local_file.tell(), content_length)) local_file.write(buff) file_hash.update(buff) buff = resp.read(4096 * 1024) ovf_file = vim.OvfManager.OvfFile() ovf_file.deviceId = device.key ovf_file.path = device.targetId ovf_file.size = local_file.tell() ovf_files.append((ovf_file, file_hash.hexdigest())) local_file.close() return ovf_files