def get_session(k8s_api_verify_tls): global session if session is None: with open('/var/run/secrets/kubernetes.io/serviceaccount/token' ) as token_file: token = token_file.read() session = requests.Session() session.headers = { 'Authorization': 'Bearer ' + token, 'Content-Type': 'application/json', 'User-Agent': user_agent('Deis Controller', deis_version) } if k8s_api_verify_tls: session.verify = '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt' else: session.verify = False retry = Retry( total=3, read=3, connect=3, backoff_factor=0.3, status_forcelist=(500, 502, 504), ) adapter = HTTPAdapter(max_retries=retry) session.mount('http://', adapter) session.mount('https://', adapter) return session
def __init__(self, token, host='https://api2.panopta.com', version='2', log_level=LOG_INFO, log_path="."): self.session = requests.Session() self.session.auth = PanoptaAuth(token) self.session.headers.update({ 'Accept': 'application/json', 'User-Agent': user_agent(__package__, __version__) }) self.base_url = urljoin(host, 'v' + version) logger = logging.getLogger() log_handler = logging.handlers.TimedRotatingFileHandler(join( log_path, __package__ + '.log'), when='d', interval=1, backupCount=14) log_handler.setFormatter( logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s')) logger.addHandler(log_handler) logger.setLevel(log_level) logger.getChild(__package__).info(self.__class__.__name__ + ' initialized')
def gen_user_agent(version): """ generating the user agent witch will be used for most requests monkey patching system and release functions from platform module to prevent disclosure of the OS and it's version """ def monkey_patch(): """ small monkey patch """ raise IOError # saving original functions orig_system = platform.system orig_release = platform.release # applying patch platform.system = monkey_patch platform.release = monkey_patch user_agent = requests_toolbelt.user_agent('picuplib', version) # reverting patch platform.system = orig_system platform.release = orig_release return user_agent
class Client( Batches, Accounts, Broadcasts, Campaigns, Conversions, CustomFields, Events, Forms, Shoppers, Subscribers, Tags, Users, Webhooks, Workflows, ): session: 'Session' account_id: int drip_py_ua: str = user_agent("drip-python", __version__, extras=[('requests', __requests_version__), ]) api_domain: str = "https://api.getdrip.com" api_version: str = 'v2' def __init__(self, api_token: str, account_id: int) -> None: self.account_id = account_id # Rather than assigning directly to `self`, this is the recommended idiom so atexit.register behaves nicely with GC. session = BaseUrlSession(base_url=f'{self.api_domain}/{self.api_version}/{account_id}/') session.auth = (api_token, '') session.headers.update({"User-Agent": self.drip_py_ua, "Content-Type": 'application/json'}) register(session.close) self.session = session
def __init__(self, base_url, username=None, password=None): # standardize url format; ensure we have a trailing slash, # adding one if necessary if not base_url.endswith('/'): base_url = base_url + '/' # TODO: can we re-use sessions safely across instances? global _sessions # check for an existing session for this fedora if base_url in _sessions: self.session = _sessions[base_url] else: # create a new session and add to global sessions self.session = requests.Session() # Set headers to be passed with every request # NOTE: only headers that will be common for *all* requests # to this fedora should be set in the session # (i.e., do NOT include auth information here) self.session.headers = { 'User-Agent': user_agent('eulfedora', eulfedora_version), # 'user-agent': 'eulfedora/%s (python-requests/%s)' % \ # (eulfedora_version, requests.__version__), 'verify': True, # verify SSL certs by default } _sessions[base_url] = self.session self.base_url = base_url self.username = username self.password = password self.request_options = {} if self.username is not None: # store basic auth option to pass when making requests self.request_options['auth'] = (self.username, self.password)
def do_github_request(req) -> Response: headers = {'User-Agent': requests_toolbelt.user_agent('bitbucket_issues_to_github', '1.0.0')} token = get_github_access_token() if token is not None: headers['Authorization'] = 'token ' + token req.headers.update(headers) return do_request(req)
def __init__(self, url: str, secret: str, initial_fetch: bool = True, timeout: Union[int, tuple, None] = None): """ :param url: The url to reach your OpenVidu Server instance. Typically something like https://localhost:4443/ :param secret: Secret for your OpenVidu Server :param initial_fetch: Enable the initial fetching on object creation. Defaults to `True`. If set to `False` a `fetc()` must be called before doing anything with the object. In most scenarios you won't need to change this. :param timeout: Set timeout to all Requests to the OpenVidu server. Default: None = No timeout. See https://2.python-requests.org/en/latest/user/advanced/#timeouts for possible values. """ self._session = BaseUrlSession(base_url=url) self._session.auth = HTTPBasicAuth('OPENVIDUAPP', secret) self._session.headers.update( {'User-Agent': user_agent('PyOpenVidu', __version__)}) self._session.request = partial(self._session.request, timeout=timeout) self._openvidu_sessions = {} # id:object self._last_fetch_result = { } # Used only to calculate the return value of the fetch() call if initial_fetch: self.fetch() # initial fetch
def gen_request(self, req_type, path, data=None, json_req=None, files=None, stream=False, headers=None, log_error=True): """ Generic request from platform :param req_type: :param path: :param data: :param json_req: :param files: :param stream: :param headers: :param log_error: :return: """ req_type = req_type.upper() valid_request_type = ['GET', 'DELETE', 'POST', 'PUT', 'PATCH'] assert req_type in valid_request_type, '[ERROR] type: %s NOT in valid requests' % req_type # prepare request headers_req = self.auth headers_req['User-Agent'] = requests_toolbelt.user_agent('dtlpy', __version__.version) if headers is not None: if not isinstance(headers, dict): raise exceptions.PlatformException(error=400, message="Input 'headers' must be a dictionary") for k, v in headers.items(): headers_req[k] = v req = requests.Request(method=req_type, url=self.environment + path, json=json_req, files=files, data=data, headers=headers_req) # prepare to send prepared = req.prepare() # save curl for debug command = "curl -X {method} -H {headers} -d '{data}' '{uri}'" method = prepared.method uri = prepared.url data = prepared.body headers = ['"{0}: {1}"'.format(k, v) for k, v in prepared.headers.items()] headers = " -H ".join(headers) self.last_curl = command.format(method=method, headers=headers, data=data, uri=uri) self.last_request = prepared # send request resp = self.send_session(prepared=prepared, stream=stream) self.last_response = resp # handle output if not resp.ok: self.print_bad_response(resp, log_error=log_error and not self.is_cli) return_type = False else: try: # print only what is printable (dont print get steam etc..) if not stream: self.print_response(resp) except ValueError: # no JSON returned pass return_type = True return return_type, resp
def __init__(self, args, options): """Constructs a Driver instance. The driver instance manages the crawling proces. Args: args (:class:`argparse.Namespace`): A namespace with all the parsed CLI arguments. options (:class:`nyawc.Options`): The options to use for the current crawling runtime. """ self.stopping = False self.__args = args self.__options = options self.__vulnerable_items = [] self.__options.callbacks.crawler_before_start = self.cb_crawler_before_start self.__options.callbacks.crawler_after_finish = self.cb_crawler_after_finish self.__options.callbacks.request_before_start = self.cb_request_before_start self.__options.callbacks.request_after_finish = self.cb_request_after_finish self.__options.callbacks.request_in_thread_after_finish = self.cb_request_in_thread_after_finish self.__options.callbacks.request_on_error = self.cb_request_on_error self.__options.identity.headers.update({ "User-Agent": user_agent(PackageHelper.get_alias(), PackageHelper.get_version()) })
def __init__(self, base_url, username=None, password=None, retries=None): # standardize url format; ensure we have a trailing slash, # adding one if necessary if not base_url.endswith('/'): base_url = base_url + '/' # create a new session and add to global sessions self.session = requests.Session() # Set headers to be passed with every request # NOTE: only headers that will be common for *all* requests # to this fedora should be set in the session # (i.e., do NOT include auth information here) # NOTE: ssl verification is turned on by default self.session.headers = { # use requests-toolbelt user agent 'User-Agent': user_agent('eulfedora', eulfedora_version), } # no retries is requests current default behavior, so only # customize if a value is set if retries is not None: adapter = requests.adapters.HTTPAdapter(max_retries=retries) self.session.mount('http://', adapter) self.session.mount('https://', adapter) self.base_url = base_url self.username = username self.password = password self.request_options = {} if self.username is not None: # store basic auth option to pass when making requests self.request_options['auth'] = (self.username, self.password)
def __init__(self): """Constructs an OptionsIdentity instance.""" self.auth = None self.cookies = requests.cookies.RequestsCookieJar() self.headers = requests.utils.default_headers() self.headers.update({"User-Agent": user_agent(PackageHelper.get_alias(), PackageHelper.get_version())}) self.proxies = None
def get_session(access_token: str) -> requests.Session: session = requests.Session() session.headers = { 'Content-Type': 'application/json', 'User-Agent': user_agent('Drycc Manager ', drycc_version), 'Authorization': 'token ' + access_token } return session
def __init__(self): token = base64.b85encode(b"%s:%s" % ( settings.WORKFLOW_MANAGER_ACCESS_KEY.encode("utf8"), settings.WORKFLOW_MANAGER_SECRET_KEY.encode("utf8"), )).decode("utf8") self.headers = { 'Content-Type': 'application/json', 'Authorization': 'token %s' % token, 'User-Agent': user_agent('Drycc Controller ', drycc_version) }
def get_session(): global session if session is None: with open('/var/run/secrets/kubernetes.io/serviceaccount/token') as token_file: token = token_file.read() session = requests.Session() session.headers = { 'Authorization': 'Bearer ' + token, 'Content-Type': 'application/json', 'User-Agent': user_agent('Deis Controller', deis_version) } session.verify = '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt' return session
def get_session(): global session if session is None: session = requests.Session() session.headers = { # https://toolbelt.readthedocs.org/en/latest/user-agent.html#user-agent-constructor 'User-Agent': user_agent('Deis Controller', deis_version), } # `mount` a custom adapter that retries failed connections for HTTP and HTTPS requests. # http://docs.python-requests.org/en/latest/api/#requests.adapters.HTTPAdapter session.mount('http://', requests.adapters.HTTPAdapter(max_retries=10)) session.mount('https://', requests.adapters.HTTPAdapter(max_retries=10)) return session
def session(self) -> Session: """ Create a custom session object. A request session provides cookie persistence, connection-pooling, and further configuration options. """ session = requests.Session() session.mount("https://", HTTPAdapter(max_retries=self.retry_strategy)) session.hooks['response'] = [ lambda response, *args, **kwargs: response.raise_for_status() ] session.headers.update({ 'User-Agent': self.user_agent or user_agent(package_name, __version__) }) return session
def set_auth(self, config_location): self._api_config = json.load(open(config_location)) try: self._token = self._api_config["token"] except KeyError: logger.error( "'token' key is not present in the config file %s.", config_location ) raise SABaseException(0, "Incorrect config file.") try: self.team_id = int(self._token.split("=")[1]) except Exception: logger.error( "token key is not valid in the config file %s.", config_location ) raise SABaseException(0, "Incorrect config file.") self._default_headers = {'Authorization': self._token} self._default_headers["authtype"] = "sdk" if "authtype" in self._api_config: self._default_headers["authtype"] = self._api_config["authtype"] self._default_headers['User-Agent'] = requests_toolbelt.user_agent( 'superannotate', Version ) self._main_endpoint = "https://api.annotate.online" if "main_endpoint" in self._api_config: self._main_endpoint = self._api_config["main_endpoint"] self._verify = True self._session = None response = self.send_request( req_type='GET', path='/projects', params={ 'team_id': str(self.team_id), 'offset': 0, 'limit': 1 } ) if not response.ok: self._session = None if "Not authorized" in response.text: raise SABaseException(0, "Couldn't authorize") raise SABaseException(0, "Couldn't reach superannotate")
async def __upload_file_async(self, to_upload, item_type, item_size, remote_url, uploaded_filename, remote_path=None, callback=None, mode='skip', item_metadata=None): headers = self.auth headers['User-Agent'] = requests_toolbelt.user_agent('dtlpy', __version__.version) if callback is None: if item_size > 10e6: # size larger than 10MB pbar = tqdm.tqdm(total=item_size, unit="B", unit_scale=True, unit_divisor=1024, position=1, disable=self.verbose.disable_progress_bar) def callback(bytes_read): pbar.update(bytes_read) else: def callback(bytes_read): pass timeout = aiohttp.ClientTimeout(total=0) async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session: try: form = aiohttp.FormData({}) form.add_field('type', item_type) form.add_field('path', os.path.join(remote_path, uploaded_filename).replace('\\', '/')) if item_metadata is not None: form.add_field('metadata', json.dumps(item_metadata)) form.add_field('file', AsyncUploadStream(buffer=to_upload, callback=callback)) url = '{}?mode={}'.format(self.environment + remote_url, mode) async with session.post(url, data=form, verify_ssl=self.verify) as resp: text = await resp.text() try: _json = await resp.json() except: _json = dict() response = AsyncResponse(text=text, _json=_json, async_resp=resp) except Exception as err: response = AsyncResponseError(error=err, trace=traceback.format_exc()) return response
async def gen_async_request(self, req_type, path, data=None, json_req=None, files=None, stream=None, headers=None, log_error=True, filepath=None, chunk_size=8192, pbar=None, is_dataloop=True): req_type = req_type.upper() valid_request_type = ['GET', 'DELETE', 'POST', 'PUT', 'PATCH'] assert req_type in valid_request_type, '[ERROR] type: %s NOT in valid requests' % req_type # prepare request if is_dataloop: full_url = self.environment + path headers_req = self.auth headers_req['User-Agent'] = requests_toolbelt.user_agent( 'dtlpy', __version__.version) else: full_url = path headers = dict() headers_req = headers if headers is not None: if not isinstance(headers, dict): raise exceptions.PlatformException( error='400', message="Input 'headers' must be a dictionary") for k, v in headers.items(): headers_req[k] = v req = requests.Request(method=req_type, url=full_url, json=json_req, files=files, data=data, headers=headers_req) # prepare to send prepared = req.prepare() # save curl for debug command = "curl -X {method} -H {headers} -d '{data}' '{uri}'" headers = [ '"{0}: {1}"'.format(k, v) for k, v in prepared.headers.items() ] headers = " -H ".join(headers) curl = command.format(method=prepared.method, headers=headers, data=prepared.body, uri=prepared.url) self.last_curl = curl self.last_request = prepared # send request try: timeout = aiohttp.ClientTimeout(total=0) async with RetryClient(headers=headers_req, timeout=timeout) as session: try: async with session._request( request=session._client.request, url=self.environment + path, method=req_type, json=json_req, data=data, headers=headers_req, chunked=stream, retry_attempts=5, retry_exceptions={ aiohttp.client_exceptions.ClientOSError, aiohttp.client_exceptions. ServerDisconnectedError, aiohttp.client_exceptions.ClientPayloadError }, raise_for_status=False) as request: if stream: pbar = self.__get_pbar( pbar=pbar, total_length=request.headers.get( "content-length")) if filepath is not None: to_close = False if isinstance(filepath, str): to_close = True buffer = open(filepath, 'wb') elif isinstance(filepath, io.BytesIO): pass else: raise ValueError( 'unknown data type to write file: {}'. format(type(filepath))) try: while True: chunk = await request.content.read( chunk_size) if not chunk: break buffer.write(chunk) if pbar is not None: pbar.update(len(chunk)) finally: if to_close: buffer.close() if pbar is not None: pbar.close() text = await request.text() try: _json = await request.json() except: _json = dict() response = AsyncResponse(text=text, _json=_json, async_resp=request) except Exception as err: response = AsyncResponseError(error=err, trace=traceback.format_exc()) finally: with threadLock: self.calls_counter.add() except Exception: logger.error(self.print_request(req=prepared, to_return=True)) raise self.last_response = response # handle output if not response.ok: self.print_bad_response(response, log_error=log_error and not self.is_cli) return_type = False else: try: # print only what is printable (dont print get steam etc..) if not stream: self.print_response(response) except ValueError: # no JSON returned pass return_type = True return return_type, response
def inject_user_agent(application_scope): """Return the user agent string for web requests configured by an application scope.""" return user_agent(application_scope.app_name, application_scope.version)
def verify_application_health(self, **kwargs): """ Verify an application is healthy via the router. This is only used in conjunction with the kubernetes health check system and should only run after kubernetes has reported all pods as healthy """ # Bail out early if the application is not routable if not kwargs.get('routable', False): return app_type = kwargs.get('app_type') self.log( 'Waiting for router to be ready to serve traffic to process type {}' .format(app_type), level=logging.DEBUG) # Get the router host and append healthcheck path url = 'http://{}:{}'.format(settings.ROUTER_HOST, settings.ROUTER_PORT) # if a httpGet probe is available then 200 is the only acceptable status code if 'livenessProbe' in kwargs.get( 'healthcheck', {}) and 'httpGet' in kwargs.get( 'healthcheck').get('livenessProbe'): # noqa allowed = [200] handler = kwargs['healthcheck']['livenessProbe']['httpGet'] url = urljoin(url, handler.get('path', '/')) req_timeout = handler.get('timeoutSeconds', 1) else: allowed = set(range(200, 599)) allowed.remove(404) req_timeout = 3 session = requests.Session() session.headers = { # https://toolbelt.readthedocs.org/en/latest/user-agent.html#user-agent-constructor 'User-Agent': user_agent('Deis Controller', deis_version), # set the Host header for the application being checked - not used for actual routing 'Host': '{}.{}.nip.io'.format(self.id, settings.ROUTER_HOST) } # `mount` a custom adapter that retries failed connections for HTTP and HTTPS requests. # http://docs.python-requests.org/en/latest/api/#requests.adapters.HTTPAdapter session.mount('http://', requests.adapters.HTTPAdapter(max_retries=10)) session.mount('https://', requests.adapters.HTTPAdapter(max_retries=10)) # Give the router max of 10 tries or max 30 seconds to become healthy # Uses time module to account for the timout value of 3 seconds start = time.time() failed = False for _ in range(10): try: # http://docs.python-requests.org/en/master/user/advanced/#timeouts response = session.get(url, timeout=req_timeout) failed = False except requests.exceptions.RequestException: # In case of a failure where response object is not available failed = True # We are fine with timeouts and request problems, lets keep trying time.sleep(1) # just a bit of a buffer continue # 30 second timeout (timout per request * 10) if (time.time() - start) > (req_timeout * 10): break # check response against the allowed pool if response.status_code in allowed: break # a small sleep since router usually resolve within 10 seconds time.sleep(1) # Endpoint did not report healthy in time if ('response' in locals() and response.status_code == 404) or failed: delta = time.time() - start self.log( 'Router was not ready to serve traffic to process type {} in time, waited {} seconds' .format(app_type, delta), # noqa level=logging.WARNING) return self.log('Router is ready to serve traffic to process type {}'.format( app_type), level=logging.DEBUG)
def user_agent(self): return user_agent('poetry', __version__)
def init(self, config_location=None): if config_location is None: config_location = Path.home() / ".superannotate" / "config.json" from_none = True else: config_location = Path(config_location) from_none = False try: if not config_location.is_file(): raise SABaseException( 0, "SuperAnnotate config file " + str(config_location) + " not found. Please provide correct config file location to sa.init(<path>) or use CLI's superannotate init to generate default location config file." ) self._api_config = json.load(open(config_location)) try: self._token = self._api_config["token"] except KeyError: raise SABaseException( 0, "Incorrect config file: 'token' key is not present in the config file " + str(config_location)) try: self.team_id = int(self._token.split("=")[1]) except Exception: raise SABaseException( 0, "Incorrect config file: 'token' key is not valid in the config file " + str(config_location)) self._default_headers = {'Authorization': self._token} self._default_headers["authtype"] = "sdk" if "authtype" in self._api_config: self._default_headers["authtype"] = self._api_config[ "authtype"] self._default_headers['User-Agent'] = requests_toolbelt.user_agent( 'superannotate', __version__) self._main_endpoint = "https://api.annotate.online" if "main_endpoint" in self._api_config: self._main_endpoint = self._api_config["main_endpoint"] self._verify = True self._session = None self._authenticated = True response = self.send_request(req_type='GET', path='/projects', params={ 'team_id': str(self.team_id), 'offset': 0, 'limit': 1 }) if not response.ok: self._authenticated = False self._session = None if "Not authorized" in response.text: raise SABaseException( 0, "Couldn't authorize " + response.text) raise SABaseException( 0, "Couldn't reach superannotate " + response.text) except SABaseException: self._authenticated = False self._session = None self.team_id = None if not from_none: raise
def test_user_agent_provides_package_version(self): assert "0.0.1" in user_agent("my-package", "0.0.1")
def test_user_agent_provides_package_name(self): assert "my-package" in user_agent("my-package", "0.0.1")
import time import pandas as pd import re from pprint import pprint from requests_toolbelt import user_agent from requests import Session # Prompts the user for User-Agent info to create custom header user_agent_name = str(input("What is your User-Agent name?: ")) script_name = str(input("What is your script called?: ")) script_vers = str( input("What's the version number of this script (e.g, 0.0.1)?: ")) my_script = "{}/{}".format(script_name, script_vers) s = Session() s.headers = {'User-Agent': user_agent(user_agent_name, my_script)} # Define functions to parse json header = ['display_name'] def parse_response(User): pprint(display_name()) return data def display_name(): result = data['data']['children'][n]['data']['display_name'] return result
def user_agent(self): return user_agent("poetry", __version__)
"""User agent and robots cache.""" import resource from functools import lru_cache import redis import requests import requests_toolbelt import reppy.robots import tsa from tsa.extensions import redis_pool soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) user_agent = requests_toolbelt.user_agent('tsa', tsa.__version__, extras=[('requests', requests.__version__)]) session = requests.Session() session.headers.update({'User-Agent': user_agent}) a = requests.adapters.HTTPAdapter(pool_connections=1000, pool_maxsize=(soft - 10), max_retries=3, pool_block=True) session.mount('http://', a) session.mount('https://', a) def allowed(iri): robots_iri = reppy.robots.Robots.robots_url(iri) text = fetch_robots(robots_iri) if text is None:
from trading_bots.core.storage import Store, get_store from .errors import * from .models import * from .utils import parse_money __all__ = [ "USER_AGENT", "ClientWrapper", "BaseClient", "MarketClient", "WalletClient", "TradingClient", ] USER_AGENT = user_agent("trading-bots", version("trading_bots")) class ClientWrapper(abc.ABC): name: str = None def __init__(self, client_params: Dict = None, name: str = None): assert self.name, "A name must be defined for the client!" credentials = getattr(settings, "credentials", {}) self.credentials: Dict = credentials.get(self.name, {}) self.timeout: int = settings.timeout self.client_params: Dict = self._build_client_params( **client_params or {}) if name is not None: self.name = name
def user_agent(self) -> str: agent: str = user_agent("poetry", __version__) return agent