def authenticate(self): """ Authenticate and get oauth2 token for the session. """ session = OAuth2ClientCredentialsSession( url=self.ms_graph_token_url, client_id=self.client_id, client_secret=self.client_secret, scope=DEFAULT_SCOPE, proxies=self.proxies) session.mount( 'https://', HTTPAdapter(max_retries=Retry( total=self.max_retries_total, status_forcelist=[429, 500, 502, 503], backoff_factor=self.max_retries_backoff_factor, respect_retry_after_header=True))) session.mount( 'http://', HTTPAdapter(max_retries=Retry( total=self.max_retries_total, status_forcelist=[429, 500, 502, 503], backoff_factor=self.max_retries_backoff_factor, respect_retry_after_header=True))) return session
def __init__(self, headers=None, cookies=None, max_retries=1): """ 初始化requests.Session对象 :param headers: :param cookies: :param max_retries: """ self.session = requests.Session() logger.info('创建Session对象') if headers: self.session.headers.update(headers) logger.info('Session headers using: %s' % headers) try: max_retries = int(max_retries) except ValueError as err: raise ValueError("Error converting max_retries parameter: %s" % err) if max_retries > 0: http = requests.adapters.HTTPAdapter(max_retries=Retry( total=max_retries, backoff_factor=backoff_factor)) https = requests.adapters.HTTPAdapter(max_retries=Retry( total=max_retries, backoff_factor=backoff_factor)) logger.info('Session max_retries using: %s' % max_retries) self.session.mount('http://', http) self.session.mount('https://', https) self.session.cookies = cookies
def get_request_session(max_retries=3): from requests.packages.urllib3.util import Retry session = requests.Session() session.mount( "http://", requests.adapters.HTTPAdapter( max_retries=Retry(total=5, status_forcelist=[500]))) session.mount( "https://", requests.adapters.HTTPAdapter( max_retries=Retry(total=5, status_forcelist=[500]))) return session
def _get_requests_session(): global _session if _session is None: _session = Session() # configure 5 retries for any requests that return 502 or 503 errors # exponential backoff increases the delay for each retry iteration _session.mount('https://', HTTPAdapter(max_retries=Retry( total=5, backoff_factor=0.1, status_forcelist=[502, 503]))) _session.mount('http://', HTTPAdapter(max_retries=Retry( total=5, backoff_factor=0.1, status_forcelist=[502, 503]))) return _session
def __init__(self, timeout=None, retries=None): super(Session, self).__init__() self.timeout = timeout if retries is None: retry = Retry(**DEFAULT_RETRY_ARGS) elif isinstance(retries, int): args = DEFAULT_RETRY_ARGS.copy() args.pop('total', None) retry = Retry(total=retries, **args) elif isinstance(retries, dict): retry = Retry(**retries) self.mount('http://', HTTPAdapter(max_retries=retry)) self.mount('https://', HTTPAdapter(max_retries=retry))
def wrapper(self, url, *args, **kwargs): if url is None and not allowed_none: raise ValueError('Invalid Url: ' + url) if url is not None: if url_re.match(url) is None: raise ValueError('Invalid URL: ' + url) if not url.endswith('/') and trailing_slash: url += '/' if 'session' not in kwargs.keys() or kwargs['session'] is None: kwargs['session'] = Session() kwargs['session'].mount('https://', Retry(5)) kwargs['session'].mount('http://', Retry(5)) self.soup = None return func(self, url, *args, **kwargs)
def __init__(self, submit): # Whether or not this is a submission run. self.submit = submit if submit: self.base_url = REMOTE_URL else: self.base_url = LOCAL_URL self.session = Session() self.session.mount( 'http://', HTTPAdapter(max_retries=Retry(total=30, status_forcelist=[429, 500, 503], backoff_factor=1, method_whitelist=frozenset( ['GET', 'POST'])))) self.last_request = time.perf_counter() log_filename = datetime.now().strftime('logs/log_%y%m%d.txt') if not os.path.exists('logs'): os.makedirs('logs') self.log = open(log_filename, 'a+') if self.submit: with open('group_token', 'r') as f: self.group_token = f.readline().strip() self.__print__('Client initialized! ' + ('Using group token {}.'.format(self.group_token) if self.submit \ else ''))
def __init__(self, config_d): super().__init__(config_d) # Settings self.puppetdb_business_unit_fact_name = config_d.get('business_unit_fact_name', None) self.puppetdb_group_fact_names = config_d.get('group_fact_names', []) self.puppetdb_facts = config_d.get('facts', []) self.puppetdb_save_full_inventory = config_d.get('full_inventory', False) self.puppetdb_puppetboard_enable = config_d.get('puppetboard_enable', False) self.puppetdb_puppetboard_url = config_d.get('puppetboard_url', None) # Connection settings self.puppetdb_host = config_d['puppetdb_host'] self.puppetdb_port = config_d.get('puppetdb_port', 8081) self.puppetdb_ssl_ca = config_d.get('puppetdb_ssl_ca', None) self.puppetdb_ssl_key = config_d.get('puppetdb_ssl_key', None) self.puppetdb_ssl_cert = config_d.get('puppetdb_ssl_cert', None) self.puppetdb_timeout = config_d.get('puppetdb_timeout', 10) self.puppetdb_protocol = config_d.get('puppetdb_protocol', 'https') self.base_url = '{}://{}:{}'.format(self.puppetdb_protocol, self.puppetdb_host, self.puppetdb_port) self.api_base_url = '{}/pdb/query/v4'.format(self.base_url) self.session = requests.Session() self.session.headers.update({ 'user-agent': 'zentral/0.0.1', 'content-type': 'application/json', 'accept': 'application/json', 'accept-charset': 'utf-8' }) max_retries = Retry(total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]) self.session.mount( self.api_base_url, requests.adapters.HTTPAdapter(max_retries=max_retries) )
def __init__( self, device_uid=None, url="https://broker.staging.skylark.swiftnav.com", retries=DEFAULT_RETRIES, timeout=DEFAULT_TIMEOUT, ): self._retry = Retry(connect=DEFAULT_RETRIES[0], read=DEFAULT_RETRIES[1], redirect=MAX_REDIRECTS, status_forcelist=[500], backoff_factor=DEFAULT_BACKOFF_FACTOR) self.url = url self.read_session = requests.Session() self.read_session.mount( "http://", HTTPAdapter(pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, pool_block=DEFAULT_POOLBLOCK, max_retries=self._retry)) self.read_session.mount( "https://", HTTPAdapter(pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE, pool_block=DEFAULT_POOLBLOCK, max_retries=self._retry)) self.write_session = None self.device_uid = device_uid self.timeout = timeout self.read_response = None self.write_response = None self.source = None
def _get_session(self): s = Settings.Instance() if not self.session: retry = Retry(total=s.TOTAL_RETRIES, backoff_factor=s.BACKOFF_FACTOR, status_forcelist=(list(range(400, 421)) + list(range(500, 505)))) size = s.CONCURRENT_SIZE adapter = requests.adapters.HTTPAdapter(pool_connections=size, pool_maxsize=size, pool_block=True, max_retries=retry) home_directory = os.path.expanduser('~') self.session = requests_cache.CachedSession( os.path.join(home_directory, s.CACHE_NAME), backend='sqlite', expire_after=s.CACHE_EXPIRE, fast_save=s.FAST_SAVE, allowable_methods=( 'GET', 'POST')) if s.CACHING else requests.Session() if s.PROXIES: self.session.proxies = s.PROXIES self.session.mount('http://', adapter) self.session.mount('https://', adapter) return self.session
def get_host_init_business(): """获取主机初始化业务类型""" success = True msg = 'ok' try: url = 'https://119.29.79.89/api/install/Init_Type/' token = 'bda6d0ff9803476bc0763c4f1912a2c5ba7145bc' headers = { 'Accept': 'application/json', 'Authorization': 'Token {}'.format(token) } post_data = {} s = Session() s.mount( 'https://', HTTPAdapter(max_retries=Retry(total=5, status_forcelist=[408]))) r = s.post(url, headers=headers, json=post_data, timeout=30, verify=False) if r.status_code != 200: raise Exception(str(r)) res = r.json() if not res['Accepted']: raise Exception(res['data']) msg = res['data'] except Exception as e: success = False msg = str(e) finally: return success, msg
def __init__(self, token, knowru_url='https://www.knowru.com', retry_total=3, backoff_factor=1, status_forcelist=(502, 503, 504), method_whitelist=('GET', 'POST')): if knowru_url.endswith('/'): knowru_url = knowru_url[:-1] self.knowru_url = knowru_url self._token = token self.headers = { 'Authorization': 'Token {}'.format(token), 'Content-Type': 'application/json', 'Accept': 'application/json' } self._retry_total = retry_total self._backoff_factor = backoff_factor self._status_forcelist = status_forcelist self._method_whitelist = method_whitelist self._adapter = HTTPAdapter( max_retries=Retry(total=retry_total, backoff_factor=backoff_factor, status_forcelist=status_forcelist, method_whitelist=method_whitelist)) self.session = requests.Session() self.session.mount(knowru_url, self._adapter)
def __init__(self): self.key = sw_context.inputs['otx_api_key'] self.headers = { 'X-OTX-API-KEY': self.key, 'User-Agent': 'OTX Swimlane Python SDK/1.0', 'Content-Type': 'application/json' } self.ioc = None self.iocType = None self.project = 'SDK' self.proxies = { 'http': sw_context.inputs['proxy'], 'https': sw_context.inputs['proxy'] } self.response = None self.request_session = requests.Session() self.request_session.mount( 'https://', HTTPAdapter(max_retries=Retry( total=5, status_forcelist=[429, 500, 502, 503], backoff_factor=5, ))) self.section = None self.server = sw_context.inputs['otx_api_server'] self.table = None self.uri = None
def delete_user_for_openvpn(username): url = 'https://192.168.40.11/api/delvpnuser/' token = 'd11205fc792d2d2def44ca55e5252dcbdcea6961' authorized_token = "Token " + token headers = { 'Accept': 'application/json', 'Authorization': authorized_token, 'Connection': 'keep-alive', } data = { 'username': username, } try: s = Session() s.mount( 'https://', HTTPAdapter(max_retries=Retry(total=3, status_forcelist=[408]))) r = s.post(url, headers=headers, json=data, verify=False, timeout=30) if r.status_code == 200: result = r.json() return result else: raise Exception('发送到40.11失败' + str(r)) except Exception as e: return {'msg': str(e), 'result': False}
def modify_password_for_openvpn(username, passwd): """修改openvpn帐号密码""" log = OpenVPNLog() url = 'https://192.168.40.11/api/modvpnuser/' token = 'd11205fc792d2d2def44ca55e5252dcbdcea6961' authorized_token = "Token " + token headers = { 'Accept': 'application/json', 'Authorization': authorized_token, 'Connection': 'keep-alive', } data = {'username': username, 'passwd': passwd} try: s = Session() s.mount( 'https://', HTTPAdapter(max_retries=Retry(total=3, status_forcelist=[408]))) r = s.post(url, headers=headers, json=data, verify=False, timeout=30) if r.status_code == 200: result = r.json() log.logger.info(result) return result else: log.logger.info(str(r)) raise Exception('发送到40.11失败' + str(r)) except Exception as e: log.logger.info(str(e)) return {'msg': str(e), 'result': False}
def set_max_retries( self, base_url: Optional[str] = None, connect: Optional[int] = None, read: Optional[int] = None, redirect: Optional[int] = None, ) -> None: """Retry configuration. :param base_url: base url for the HTTPAdapter :param connect: how many connection-related errors to retry on :param read: how many times to retry on read errors :param redirect: how many redirects to perform. Limit this to avoid infinite redirect loops. """ if base_url is None: base_urls = [str(b) for b in self.base_urls if b is not None] else: base_urls = [base_url] for url in base_urls: self.session.mount( url, requests.adapters.HTTPAdapter(max_retries=Retry( connect=connect, read=read, redirect=redirect)), )
def __init__(self, key: str, token: str): """Inits DigitalShadowsAPI with base URL and required API arguments. Creates a requests session, mounts it and auths it. Args: key: Trello API OAuth key token: Trello API OAuth token """ self.key = key self.token = token self.base_url = 'https://api.trello.com' self.session = session = requests.session() session.mount( self.base_url, HTTPAdapter(max_retries=Retry(connect=3, backoff_factor=1))) session.headers.update({ 'Authorization': f'OAuth oauth_consumer_key="{self.key}", oauth_token="{self.token}"' }) session.params.update({ 'cards_limit': 1000, 'card_members': 'true', 'card_attachments': 'true', 'members_limit': 100, 'boards_limit': 1000, 'board': 'true', 'modelTypes': ['cards', 'boards'], })
def send(self, request): # type: (Request) -> Response if self._session is None: self._session = requests.Session() # Ask urllib not to make retries on its own. self._session.mount( 'https://', HTTPAdapter(max_retries=Retry(connect=0)) ) req = requests.Request(method=request.verb, url=request.url, headers=request.headers, data=request.data_as_string) r = req.prepare() # type: ignore requests_timeout = (request.connect_timeout, request.timeout) try: response = self._session.send( # type: ignore r, timeout=requests_timeout ) except Timeout as e: return Response(error_message=str(e), is_timed_out_error=True) except RequestException as e: return Response(error_message=str(e), is_network_error=True) return Response( response.status_code, response.json(), response.reason )
def __init__(self, auth=None): super().__init__() self.scrapers = Group() self.scrape_pool = Pool(size=Config().scraper.pool_size) self.session = Session(auth=auth) self.session.mount( "http://", HTTPAdapter( max_retries=Retry( total=Config().scraper.max_retry, connect=Config().scraper.max_retry, read=Config().scraper.max_retry, backoff_factor=0.3, ), pool_connections=10, ), ) metric_types = {} for metric_config in Config().metrics: if metric_config.uve_type not in metric_types: metric_types[metric_config.uve_type] = [] metric_types[metric_config.uve_type].append(metric_config) for uve_type, metric_configs in metric_types.items(): self.append( MetricTypeCollection( self.session, uve_type, metric_configs, self.scrapers, self.scrape_pool, ))
def make_adapter_with_retry(): adapter = HTTPAdapter(max_retries=Retry( total=1, method_whitelist=['POST'], status_forcelist=[500, 502, 503, 504], )) return adapter
def __init__(self, config_d): self.business_unit_fact_key = config_d.get('business_unit_fact_key') self.group_fact_keys = config_d.get('group_fact_keys') or [] self.extra_fact_keys = config_d.get('extra_fact_keys') or [] self.puppetboard_url = config_d.get('puppetboard_url') # prepare requests session with connection settings self.puppetdb_url = config_d["puppetdb_url"] self.api_base_url = '{}/pdb/query/v4'.format(self.puppetdb_url) self.puppetdb_timeout = config_d.get('puppetdb_timeout', 10) self.session = requests.Session() # headers self.session.headers.update({ 'user-agent': 'zentral/0.0.1', 'content-type': 'application/json', 'accept': 'application/json', 'accept-charset': 'utf-8' }) # ca puppetdb_ca = config_d.get("puppetdb_ca") if puppetdb_ca: self.session.verify = puppetdb_ca # client cert cert = config_d.get("puppetdb_cert") key = config_d.get("puppetdb_key") if cert and key: self.session.cert = (cert, key) elif cert or key: raise PuppetDBError("Incomplete puppetDB configuration") # max retries max_retries = Retry(total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]) self.session.mount( self.api_base_url, requests.adapters.HTTPAdapter(max_retries=max_retries) )
def get_host_install_soft(): """获取主机需要安装的软件信息""" data = '' success = True try: url = 'https://119.29.79.89/api/install/soft/' token = 'bda6d0ff9803476bc0763c4f1912a2c5ba7145bc' headers = { 'Accept': 'application/json', 'Authorization': 'Token {}'.format(token) } s = Session() s.mount( 'https://', HTTPAdapter(max_retries=Retry(total=5, status_forcelist=[408]))) r = s.post(url, headers=headers, json={}, timeout=30, verify=False) if r.status_code != 200: raise Exception(str(r)) res = r.json() if res['Accepted']: data = res['data'] except Exception as e: success = False data = str(e) finally: return success, data
def __init__(self, host, port, path, user, password, secret, business_unit=None, **kwargs): self.host, self.path, self.port, self.secret, self.business_unit = host, path, port, secret, business_unit self.base_url = "https://{}:{}".format(host, port) self.api_base_url = "{}{}".format(self.base_url, path) # requests session setup self.session = requests.Session() self.session.headers.update({ 'user-agent': 'zentral/0.0.1', 'accept': 'application/json' }) self.session.auth = (user, password) max_retries = Retry(total=3, backoff_factor=1, status_forcelist=[500, 502, 503, 504]) self.session.mount( self.api_base_url, requests.adapters.HTTPAdapter(max_retries=max_retries)) self.mobile_device_groups = {} self.reverse_computer_groups = {}
def download_data(url, verbose): """Download the binaries from a URL and return the destination filename Retry downloading if either server or connection errors occur on a SSL connection """ retry = Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 503, 504]) session = requests.Session() session.mount('https://', HTTPAdapter(max_retries=retry)) response = session.get(url, stream=True) _, content = cgi.parse_header(response.headers['Content-Disposition']) tmp_path = os.path.join(tempfile.mkdtemp(), content['filename']) sct.printv('\nDownloading %s...' % content['filename'], verbose) with open(tmp_path, 'wb') as tmp_file: total = int(response.headers.get('content-length', 1)) tqdm_bar = tqdm(total=total, unit='B', unit_scale=True, desc="Status", ascii=True) for chunk in response.iter_content(chunk_size=8192): if chunk: tmp_file.write(chunk) if verbose > 0: dl_chunk = len(chunk) tqdm_bar.update(dl_chunk) tqdm_bar.close() sct.printv('\nDownload complete', verbose=verbose) return tmp_path
def __init__(self, api_key=None, return_type='snake', retry_total=6, api_version="1.0", resources="base"): if return_type not in ['snake', 'raw', 'pandas']: raise ValueError("Return type must be one of 'snake', 'raw', " "'pandas'") session_auth_key = _get_api_key(api_key) self._session = session = requests.session() session.auth = (session_auth_key, '') civis_version = civis.__version__ session_agent = session.headers.get('User-Agent', '') user_agent = "civis-python/{} {}".format(civis_version, session_agent) session.headers.update({"User-Agent": user_agent.strip()}) max_retries = Retry(retry_total, backoff_factor=.75, status_forcelist=RETRY_CODES) adapter = HTTPAdapter(max_retries=max_retries) session.mount("https://", adapter) classes = generate_classes(api_key=session_auth_key, user_agent=user_agent, api_version=api_version, resources=resources) for class_name, cls in classes.items(): setattr(self, class_name, cls(session, return_type))
def set_max_retries(self, base_url=None, connect=None, read=None, redirect=None): """Retry configuration. :param base_url: base url for the HTTPAdapter :type base_url: str :param connect: how many connection-related errors to retry on :type connect: int | None :param read: how many times to retry on read errors :type read: int | None :param redirect: how many redirects to perform. Limit this to avoid infinite redirect loops. :type redirect: int | None """ if base_url is None: base_urls = self.base_urls else: base_urls = [base_url] for url in base_urls: self.session.mount( str(url), requests.adapters.HTTPAdapter(max_retries=Retry( connect=connect, read=read, redirect=redirect)))
def _make_requests_session_with_retries(max_retries): from requests.adapters import HTTPAdapter from requests.packages.urllib3.util import Retry s = requests.Session() a = HTTPAdapter(max_retries=Retry( total=max_retries, status_forcelist=[ # this is a list of statuses to consider to be # an error and retry. 429, # Too many requests (i.e: back off) 500, # Generic internal server error 502, # Bad Gateway - i.e: upstream failure 503, # Unavailable, temporarily 504, # Gateway timeout 522 # Origin connection timed out ], backoff_factor=1.0 # back off for 0s, 1s, 3s, 7s, etc... after # each successive failure. (factor*(2^N-1)) )) # use retry for both HTTP and HTTPS connections. s.mount('http://', a) s.mount('https://', a) return s
def download_data(urls, verbose): """Download the binaries from a URL and return the destination filename Retry downloading if either server or connection errors occur on a SSL connection urls: list of several urls (mirror servers) or single url (string) """ # if urls is not a list, make it one if not isinstance(urls, (list, tuple)): urls = [urls] # loop through URLs for url in urls: try: sct.printv('\nTrying URL: %s' % url, verbose) retry = Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 503, 504]) session = requests.Session() session.mount('https://', HTTPAdapter(max_retries=retry)) response = session.get(url, stream=True) if "Content-Disposition" in response.headers: _, content = cgi.parse_header( response.headers['Content-Disposition']) filename = content["filename"] else: sct.printv("Unexpected: link doesn't provide a filename", type="warning") continue tmp_path = os.path.join(tempfile.mkdtemp(), filename) sct.printv('Downloading %s...' % filename, verbose) with open(tmp_path, 'wb') as tmp_file: total = int(response.headers.get('content-length', 1)) tqdm_bar = tqdm(total=total, unit='B', unit_scale=True, desc="Status", ascii=True) for chunk in response.iter_content(chunk_size=8192): if chunk: tmp_file.write(chunk) if verbose > 0: dl_chunk = len(chunk) tqdm_bar.update(dl_chunk) tqdm_bar.close() return tmp_path except Exception as e: sct.printv( "Link download error, trying next mirror (error was: %s)" % e, type='warning') else: sct.printv('\nDownload error', type='error')
def download_data(urls): """ Function used to download the data form github or other mirrors Args: urls (list): List of urls to try. Returns: downloaded folder path """ if isinstance(urls, str): urls = [urls] exceptions = [] for url in urls: try: logger.info('Trying URL: %s' % url) retry = Retry(total=3, backoff_factor=0.5, status_forcelist=[500, 503, 504]) session = requests.Session() session.mount('https://', HTTPAdapter(max_retries=retry)) response = session.get(url, stream=True) response.raise_for_status() filename = os.path.basename(urllib.parse.urlparse(url).path) if "Content-Disposition" in response.headers: _, content = cgi.parse_header( response.headers['Content-Disposition']) filename = content["filename"] # protect against directory traversal filename = os.path.basename(filename) if not filename: # this handles cases where you're loading something like an index page # instead of a specific file. e.g. https://osf.io/ugscu/?action=view. raise ValueError( "Unable to determine target filename for URL: %s" % (url, )) tmp_path = os.path.join(tempfile.mkdtemp(), filename) logger.info('Downloading: %s' % filename) with open(tmp_path, 'wb') as tmp_file: total = int(response.headers.get('content-length', 1)) for chunk in response.iter_content(chunk_size=8192): if chunk: tmp_file.write(chunk) return tmp_path except Exception as e: logger.warning( "Link download error, trying next mirror (error was: %s)" % e) exceptions.append(e) else: raise Exception('Download error', exceptions)
def __init__(self, token, base_url=BASE_URL): self.token = token self.base_url = base_url self.session = session = requests.session() retries = Retry(total=100, status_forcelist=(429, 500, 502, 504), backoff_factor=0.1) session.mount(self.base_url, requests.adapters.HTTPAdapter(max_retries=retries)) session.headers.update( {'Content-Type': 'application/json', 'Authorization': 'Bearer {}'.format(self.token)})