def __init__(self, con_pool_size=1, proxy_url=None, urllib3_proxy_kwargs=None, connect_timeout=5., read_timeout=5.): if urllib3_proxy_kwargs is None: urllib3_proxy_kwargs = dict() self._connect_timeout = connect_timeout sockopts = HTTPConnection.default_socket_options + [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) ] # TODO: Support other platforms like mac and windows. if 'linux' in sys.platform: sockopts.append((socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 120)) # pylint: disable=no-member sockopts.append((socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 30)) # pylint: disable=no-member sockopts.append((socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 8)) # pylint: disable=no-member self._con_pool_size = con_pool_size kwargs = dict(maxsize=con_pool_size, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(), socket_options=sockopts, timeout=urllib3.Timeout(connect=self._connect_timeout, read=read_timeout, total=None)) # Set a proxy according to the following order: # * proxy defined in proxy_url (+ urllib3_proxy_kwargs) # * proxy set in `HTTPS_PROXY` env. var. # * proxy set in `https_proxy` env. var. # * None (if no proxy is configured) if not proxy_url: proxy_url = os.environ.get('HTTPS_PROXY') or os.environ.get( 'https_proxy') if not proxy_url: if appengine.is_appengine_sandbox(): # Use URLFetch service if running in App Engine mgr = appengine.AppEngineManager() else: mgr = urllib3.PoolManager(**kwargs) else: kwargs.update(urllib3_proxy_kwargs) if proxy_url.startswith('socks'): try: from urllib3.contrib.socks import SOCKSProxyManager except ImportError: raise RuntimeError('PySocks is missing') mgr = SOCKSProxyManager(proxy_url, **kwargs) else: mgr = urllib3.proxy_from_url(proxy_url, **kwargs) if mgr.proxy.auth: # TODO: what about other auth types? auth_hdrs = urllib3.make_headers( proxy_basic_auth=mgr.proxy.auth) mgr.proxy_headers.update(auth_hdrs) self._con_pool = mgr
def request(self, method, url, query_params=None, headers=None, body=None, post_params=None, _preload_content=True, _request_timeout=None): """Perform requests. :param method: http request method :param url: http request url :param query_params: query parameters in the url :param headers: http request headers :param body: request json body, for `application/json` :param post_params: request post parameters, `application/x-www-form-urlencoded` and `multipart/form-data` :param _preload_content: if False, the urllib3.HTTPResponse object will be returned without reading/decoding response data. Default is True. :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of (connection, read) timeouts. """ method = method.upper() assert method in [ 'GET', 'HEAD', 'DELETE', 'POST', 'PUT', 'PATCH', 'OPTIONS' ] if post_params and body: raise ApiValueError( "body parameter cannot be used with post_params parameter.") post_params = post_params or {} headers = headers or {} timeout = None if _request_timeout: if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821 timeout = urllib3.Timeout(total=_request_timeout) elif (isinstance(_request_timeout, tuple) and len(_request_timeout) == 2): timeout = urllib3.Timeout(connect=_request_timeout[0], read=_request_timeout[1]) if 'Content-Type' not in headers: headers['Content-Type'] = 'application/json' try: # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: if query_params: url += '?' + urlencode(query_params) if re.search('json', headers['Content-Type'], re.IGNORECASE): request_body = None if body is not None: request_body = json.dumps(body) r = self.pool_manager.request( method, url, body=request_body, preload_content=_preload_content, timeout=timeout, headers=headers) elif headers[ 'Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 r = self.pool_manager.request( method, url, fields=post_params, encode_multipart=False, preload_content=_preload_content, timeout=timeout, headers=headers) elif headers['Content-Type'] == 'multipart/form-data': # must del headers['Content-Type'], or the correct # Content-Type which generated by urllib3 will be # overwritten. del headers['Content-Type'] r = self.pool_manager.request( method, url, fields=post_params, encode_multipart=True, preload_content=_preload_content, timeout=timeout, headers=headers) # Pass a `string` parameter directly in the body to support # other content types than Json when `body` argument is # provided in serialized form elif isinstance(body, str) or isinstance(body, bytes): request_body = body r = self.pool_manager.request( method, url, body=request_body, preload_content=_preload_content, timeout=timeout, headers=headers) else: # Cannot generate the request from given parameters msg = """Cannot prepare a request message for provided arguments. Please check that your arguments match declared content type.""" raise ApiException(status=0, reason=msg) # For `GET`, `HEAD` else: r = self.pool_manager.request(method, url, fields=query_params, preload_content=_preload_content, timeout=timeout, headers=headers) except urllib3.exceptions.SSLError as e: msg = "{0}\n{1}".format(type(e).__name__, str(e)) raise ApiException(status=0, reason=msg) if _preload_content: r = RESTResponse(r) # log response body logger.debug("response body: %s", r.data) if not 200 <= r.status <= 299: raise ApiException(http_resp=r) return r
def jasonNewFileCheck(satName, ogdr_cycles_page_URL): '''function to check for new jason 2/3 file it will keep downloading and processing files from the last downloaded file until the latest file''' print("***************** CHECKING FOR NEW FILE OF", satName, "********************") print("URL of cycles web page :", ogdr_cycles_page_URL) print() #getting information about last downloaded file print("opening last loaded file :", satName + '_last.txt ....') with open(op.join(app_dir, 'data', 'last', satName + '_last.txt'), 'r') as last: lcycle = last.readline()[:-1] #last accessed cycle lfile = last.readline()[:-1] #last downloaded file name print("last downloaded file loaded") print("last cycle =", lcycle) print("last file =", lfile) print() #creating PoolManager print("creating PoolManager (connection timout= 15.0s)") http = ulib.PoolManager(timeout=ulib.Timeout(connect=15.0)) #downlaoding the page with list of cylces print("donwloading webpage with list of cycles ....") ogdr_cycles_page = http.request('GET', ogdr_cycles_page_URL) # saving the html in a file with open(op.join(app_dir, 'data', 'temp', 'ogdr_cycles_page.html'), 'wb') as f: f.write(ogdr_cycles_page.data) print("download complete.") print() #opening the cycles file for reading print("opening HTML file for reading") with open(op.join(app_dir, 'data', 'temp', 'ogdr_cycles_page.html'), 'r') as html: cflag = False for line in html: flag = True if 'cycle' in line: cflag = True if lcycle not in line: continue flag = False #last accessed cycle link found #downloading the corresponding cycle web page and saving the web page print("cycle found :", lcycle) print("downloading webpage...") lcycle_page = http.request('GET', ogdr_cycles_page_URL + lcycle) with open(op.join(app_dir, 'data', 'temp', 'lcycle.html'), 'wb') as f: f.write(lcycle_page.data) print("download complete.") print("opening HTML file and passing to checkData") with open(op.join(app_dir, 'data', 'temp', 'lcycle.html'), 'r') as f: lfile = checkData(satName, lcycle, ogdr_cycles_page_URL + lcycle, f, lfile) print("checking for new files in ", lcycle, "done.") print("latest downloaded file :", lfile) print() #updating lcycle variable num = int(lcycle[5:]) num = num + 1 t = num count = 0 while t > 0: #counting digits in num t = t // 10 count = count + 1 if count == 1: lcycle = 'cycle00' + str(num) elif count == 2: lcycle = 'cycle0' + str(num) elif count == 3: lcycle = 'cycle' + str(num) if cflag and flag and 'cycle' not in line: #decrementing lcycle variable it is greater than available cycles num = int(lcycle[5:]) #cycle number num = num - 1 t = num count = 0 while t > 0: #counting digits in num t = t // 10 count = count + 1 if count == 1: lcycle = 'cycle00' + str(num) if count == 2: lcycle = 'cycle0' + str(num) elif count == 3: lcycle = 'cycle' + str(num) print("no more new cycles available") print("last downloaded cycle :", lcycle) print() break if cflag: print("next cycle to check :", lcycle) c = input("want to download new cycle ? (y/n) > ") if c == 'y' or c == 'Y': continue else: break # writing updates to JAx_last.txt print("writing last downloads info to", satName + '_last.txt') print('lcycle={} lfile={}'.format(lcycle, lfile)) with open(op.join(app_dir, 'data', 'last', satName + '_last.txt'), "w") as f: f.write(lcycle + '\n') f.write(lfile + '\n') print('write complete') print() print("*************** NEW FILE CHECK COMPLETED FOR", satName, "******************") print() return