def do_shorten(self, context): session = Session() params = {"url": unicode(context["url"])} d = session.get(self.base_url, params=params) d.addCallbacks( self.shorten_success, self.shorten_error ) return d
def __init__(self, url, fqdn=False, localname=None, facility=None, session=None): logging.Handler.__init__(self) self.url = url self.fqdn = fqdn self.localname = localname self.facility = facility self.session = session if session is not None else Session()
def do_request(): proxies = {} if proxy_url: proxies['http'] = proxies['https'] = proxy_url elif proxy_host: proxies['http'] = proxies['https'] = '{}:{}'.format( proxy_host, proxy_port) headers = kwargs.get('headers') body = kwargs.get('body') disable_tls_verification = kwargs.get('disable_tls_verification', False) allow_redirects = kwargs.get('allow_redirects', False) params = kwargs.get('params') cookies = kwargs.get('cookies') auth = kwargs.get('auth') digest_auth = kwargs.get('digest_auth') args = { 'method': method, 'url': url, 'verify': not disable_tls_verification, 'timeout': timeout, 'allow_redirects': allow_redirects, } if headers: args['headers'] = headers if body: args['data'] = body if proxies: args['proxies'] = proxies if params: args['params'] = params if cookies: args['cookies'] = cookies if auth: args['auth'] = auth if digest_auth: args['auth'] = HTTPDigestAuth(digest_auth) if disable_tls_verification: disable_warnings() with Session() as session: request = session.request(**args) response = yield request if response.status_code != expected_code: raise RuntimeError("Unexpected response code: {}".format( response.status_code))
def new_instance(cls, enabled=None): """Initialize an instance using values from the configuration""" # Session是requests库的twisted的异步版本 session = Session() if enabled is None: # 是否与LBRY共享使用统计信息和诊断信息。 enabled = conf.settings['share_usage_data'] return cls( session, # 下面这两个配置的值是https://segment.com/网站的api访问 # 此站是网站主上传用户数据后, 可提供200+的工具用于数据分析 conf.settings['ANALYTICS_ENDPOINT'], utils.deobfuscate(conf.settings['ANALYTICS_TOKEN']), enabled, )
def __init__(self, pool=None, minthreads=1, maxthreads=4, req_args=None, req_kwargs=None, session_kwargs=None): if not req_args: req_args = [] if not req_kwargs: req_kwargs = {} if not session_kwargs: session_kwargs = {} self._args = req_args self._kwargs = req_kwargs self._session = Session(pool=pool, minthreads=minthreads, maxthreads=maxthreads, **session_kwargs)
def reload(self): self.teardown() self.group_sessions = {} self.resolver = AddressResolver() proxy = self.plugin.get_proxy() if not proxy: self.global_session = Session() else: self.global_session = ProxySession(proxy) try: self.global_session.cookies = self.get_cookie_jar("/global.txt") self.global_session.session_type = "global" self.global_session.cookies.set_mode( self.plugin.config.get("sessions", {}).get("cookies", {}).get("global", "discard")) except ValueError as e: self.urls_plugin.logger.error( "Failed to create global cookie jar: {0}".format(e))
def test_session(self): # basic futures get with Session() as sess: d = sess.get(httpbin('get')) self.assertIsInstance(d, defer.Deferred) resp = yield d self.assertIsInstance(resp, Response) self.assertEqual(200, resp.status_code) # non-200, 404 d = sess.get(httpbin('status/404')) resp = yield d self.assertEqual(404, resp.status_code) def cb(s, r): self.assertIsInstance(s, Session) self.assertIsInstance(r, Response) # add the parsed json data to the response r.data = r.json() return r d = sess.get(httpbin('get'), background_callback=cb) # this should block until complete resp = yield d self.assertEqual(200, resp.status_code) # make sure the callback was invoked self.assertTrue(hasattr(resp, 'data')) def rasing_cb(s, r): raise Exception('boom') d = sess.get(httpbin('get'), background_callback=rasing_cb) raised = False try: resp = yield d except Exception as e: self.assertEqual('boom', e.args[0]) raised = True self.assertTrue(raised)
import os import random import sys import time import boto import psycopg2 import requests from faker import Faker from twisted.internet import defer from twisted.internet.task import react from txrequests import Session import push_helper as ph session = Session(maxthreads=10) responses = [] # Instantiate logger logger = logging.getLogger(__name__) logger.setLevel(logging.ERROR) # Create a file handler handler = logging.FileHandler('/home/ubuntu/push_engine/push_engine.log') handler.setLevel(logging.ERROR) # Create a logging format formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter)
def get_session(self, url, context): sessions = context.get("config", {}).get("sessions", {}) if not sessions.get("enable", False): self.urls_plugin.logger.debug("Sessions are disabled.") proxy = self.urls_plugin.get_proxy(url) if not proxy: s = Session() else: s = ProxySession(proxy) s.session_type = None return s for entry in sessions["never"]: if re.match(entry, url.domain, flags=str_to_regex_flags("ui")): self.urls_plugin.logger.debug( "Domain {0} is blacklisted for sessions.".format( url.domain ) ) proxy = self.urls_plugin.get_proxy(url) if not proxy: s = Session() else: s = ProxySession(proxy) s.session_type = None return s for group, entries in sessions["group"].iteritems(): for entry in entries: try: if re.match( entry, url.domain, flags=str_to_regex_flags("ui") ): self.urls_plugin.logger.debug( "Domain {0} uses the '{1}' group sessions.".format( url.domain, group ) ) if group not in self.group_sessions: proxy = self.urls_plugin.get_proxy(group=group) if not proxy: s = Session() else: s = ProxySession(proxy) s.cookies = ( self.get_cookie_jar( "/groups/{0}.txt".format( group ) ) ) s.session_type = "group" s.cookies.set_mode( context.get("config") .get("sessions") .get("cookies") .get("group") ) self.group_sessions[group] = s return self.group_sessions[group] except ValueError as e: self.urls_plugin.logger.error( "Failed to create cookie jar: {0}".format(e) ) continue self.urls_plugin.logger.debug( "Domain {0} uses the global session storage.".format( url.domain ) ) proxy = self.urls_plugin.get_proxy(url) if not proxy: return self.global_session else: s = ProxySession(proxy) s.cookies = self.get_cookie_jar("/global.txt") s.session_type = "global" s.cookies.set_mode( self.plugin.config.get("sessions", {}) .get("cookies", {}) .get("global", "discard") ) return s
def reload(self): self.teardown() self.session = Session()
def new_instance(cls, session=None): """Initialize an instance using values from the configuration""" if not session: session = Session() return cls(session, conf.settings['ANALYTICS_ENDPOINT'], utils.deobfuscate(conf.settings['ANALYTICS_TOKEN']))
def __init__(self, api_key=None, client_id=None): self.api_key = api_key self.client_id = client_id self._session = Session()
def message_handler(self, event): """ Event handler for general messages """ protocol = event.caller source = event.source target = event.target message = event.message if protocol.TYPE == "irc": message = protocol.utils.strip_formatting(message) allowed = self.commands.perm_handler.check("urls.trigger", source, target, protocol) if not allowed: return if isinstance(target, Channel): self.ensure_channel(protocol.name, target.name) status = self.channels.get(protocol.name, {})\ .get(target.name, {})\ .get("status", True) if not status or status == "off": return matches = extract_urls(message) for match in matches: self.logger.trace("match: {0}", match) _url = self.match_to_url(match) if _url is None: continue # Check redirects, following as necessary redirects = 0 max_redirects = self.config.get("redirects", {}).get("max", 15) domains = self.config.get("redirects", {}).get("domains", []) self.logger.debug("Checking redirects...") while _url.domain in domains and redirects < max_redirects: redirects += 1 session = Session() #: :type: requests.Response r = yield session.get(unicode(_url), allow_redirects=False) if r.is_redirect: # This only ever happens when we have a well-formed # redirect that could have been handled automatically redirect_url = r.headers["location"] self.logger.debug("Redirect [{0:03d}] {1}".format( redirects, redirect_url)) _url = self.match_to_url(extract_urls(redirect_url)[0]) else: break if redirects >= max_redirects: self.logger.debug("URL has exceeded the redirects limit") return lazy_request = LazyRequest(req_args=[unicode(_url)]) if isinstance(target, Channel): with self.channels: self.channels[protocol.name][target.name]["last"] = ( unicode(_url)) yield self.run_handlers( _url, { "event": event, "config": self.config, "get_request": lazy_request, "redirects": redirects, "max_redirects": max_redirects })
def __init__(self, max_concurrent_requests): self.session = Session(maxthreads=max_concurrent_requests)
# coding=utf-8 from txrequests import Session from utils.html import unescape_html_entities __author__ = "Gareth Coles" url = "http://ajax.googleapis.com/ajax/services/search/web" session = Session() def get_results(query, page=0, limit=None): if limit is None: limit = 4 start = int(page * limit) # In case some fool passes a float if start > 0: start -= 1 return session.get(url, params={"v": "1.0", "start": start, "q": query}) def parse_results(json, limit=None): if limit is None: limit = 4 result = {} i = 1