Esempio n. 1
0
    def __init__(self, auth_url, username, api_key, pool=None, proxy=None,
                 extra_headers=None, verbose=False):
        self.auth_url = auth_url
        self.username = username
        self.api_key = api_key
        self.storage_url = None
        self.auth_token = None
        self.pool = pool

        if proxy:
            if ":" in proxy:
                addr, port = proxy.rsplit(":", 1)
                port = int(port)
            else:
                addr, port = proxy, 8000

            endpoint = TCP4ClientEndpoint(reactor, addr, port)
            self.agent = ProxyAgent(endpoint, pool=self.pool)
        else:
            contextFactory = WebClientContextFactory()
            contextFactory.noisy = False
            self.agent = Agent(reactor, contextFactory, pool=self.pool)

        self.extra_headers = extra_headers
        self.verbose = verbose
Esempio n. 2
0
 def __init__(self, auth_url, username, api_key, pool=None,
              extra_headers=None, verbose=False):
     self.auth_url = auth_url
     self.username = username
     self.api_key = api_key
     self.storage_url = None
     self.auth_token = None
     contextFactory = WebClientContextFactory()
     contextFactory.noisy = False
     self.pool = pool
     self.agent = Agent(reactor, contextFactory, pool=self.pool)
     self.extra_headers = extra_headers
     self.verbose = verbose
Esempio n. 3
0
    def _make_request(self, method, url, headers=None, data=None, files=None,
                      timeout=None, context_factory=None,
                      data_limit=None):
        context_factory = (context_factory if context_factory is not None
                           else WebClientContextFactory())

        if headers is not None:
            headers = dict((k.encode("utf-8"), [x.encode("utf-8") for x in v])
                           for k, v in headers.items())

        if data is not None:
            data = data.encode("utf-8")

        if files is not None:
            files = dict([
                (key,
                    (value['file_name'],
                     value['content_type'],
                     StringIO(base64.b64decode(value['data']))))
                for key, value in files.iteritems()])

        agent = self.agent_class(reactor, contextFactory=context_factory)
        http_client = self.http_client_class(agent)

        d = http_client.request(method, url, headers=headers, data=data,
                                files=files, timeout=timeout)

        d.addCallback(self._ensure_data_limit, data_limit)
        return d
Esempio n. 4
0
    def test_init(self):
        url = 'https://example.org/abcdef'
        version = '2.0'

        proxy = Proxy(url, version, contextFactory=WebClientContextFactory())
        self.assertEquals(proxy.url, url)
        self.assertEquals(proxy.version, version)
Esempio n. 5
0
File: swift.py Progetto: joonp/swftp
 def __init__(self,
              auth_url,
              username,
              api_key,
              pool=None,
              extra_headers=None,
              verbose=False):
     self.auth_url = auth_url
     self.username = username
     self.api_key = api_key
     self.storage_url = None
     self.auth_token = None
     contextFactory = WebClientContextFactory()
     contextFactory.noisy = False
     self.pool = pool
     self.agent = Agent(reactor, contextFactory, pool=self.pool)
     self.extra_headers = extra_headers
     self.verbose = verbose
Esempio n. 6
0
    def __init__(self,
                 reactor,
                 contextFactory=WebClientContextFactory(),
                 connectTimeout=None,
                 bindAddress=None):
        Agent.__init__(self, reactor, contextFactory)

        self._connectTimeout = connectTimeout
        self._bindAddress = bindAddress
Esempio n. 7
0
class Repeater(object):

    USER_AGENT = 'STXNext Intranet 2 Cron task'
    contextFactory = WebClientContextFactory()
    client = Agent(reactor, contextFactory)

    def __init__(self, *actions):
        self.headers = {
            'User-Agent': [self.USER_AGENT],
            'X-Intranet-Cron': [config['CRON_SECRET_KEY']]
        }
        self.actions = {}
        for action in actions:
            self.actions[action.name] = action.url_producer
        self.file_path = config['REPEATER_FILE']

    def pending(self):
        """ returns a list of pending entries """
        result = []
        yesterday = previous_day(datetime.date.today())
        cron_url = config['CRON_URL']
        with PickleStore(self.file_path) as store:
            for date, action_name in store.get_pending(yesterday,
                                                       self.actions.keys()):
                result.append(
                    ('%s%s' % (cron_url, self.actions[action_name](date)),
                     partial(self.update, date, action_name)))
        return result

    def update(self, date, action_name, done):
        with PickleStore(self.file_path) as store:
            store.update(date, action_name, done)

    def on_success(self, url, callback, resp):
        LOG(u"Repeater %s succeeded with status %s" % (url, resp.code))
        callback(resp.code == 200)

    def on_failure(self, url, callback, err):
        EXCEPTION(u"Repeater %s failed %s" % (url, err))
        callback(False)

    def __call__(self):
        DEBUG(u"Repeater starting")
        i = 0
        try:
            for url, callback in self.pending():
                DEBUG(u"Will call action %s" % (url, ))
                deferred = self.client.request('GET', url,
                                               Headers(self.headers))
                deferred.addCallbacks(partial(self.on_success, url, callback),
                                      partial(self.on_failure, url, callback))
                i += 1
        except:
            EXCEPTION(u"Repeater could not start")
        DEBUG(u"Repeater started %s jobs" % (i, ))
Esempio n. 8
0
class URLCronTask(object):

    USER_AGENT = 'STXNext Intranet 2 Cron task'
    contextFactory = WebClientContextFactory()
    client = Agent(reactor, contextFactory)

    def __init__(self, task_name, url, repeats=None):
        self.busy = False
        self.task_name = task_name
        self.url = '%s%s' % (config['CRON_URL'], url)
        self.repeats = repeats
        self.repeated = 0

    def get_headers(self):
        """ Generate request headers (as a dictionary) """
        return {
            'User-Agent': [self.USER_AGENT],
            'X-Intranet-Cron': [config['CRON_SECRET_KEY']]
        }

    def request(self, url, headers, method='GET'):
        LOG(u'Will request URL %s with headers %s, method %s' %
            (url, pformat(headers), method))
        deferred = self.client.request(method, url, Headers(headers))
        deferred.addCallbacks(self.on_success, self.on_failure)

    def on_failure(self, err):
        self.busy = False
        EXCEPTION(u"Cron task [%s] failed: %s" % (self.task_name, err))

    def on_success(self, resp):
        self.busy = False
        LOG(u'Cron function [%s] finished (status code %s)' %
            (self.task_name, resp.code))

    def __call__(self):
        LOG(u'Cron function [%s] starting (%s)' %
            (self.task_name, self.repeated))
        if not self.busy:
            self.busy = True
            self.repeated = 0
            self.request(self.url, self.get_headers())
        elif (self.repeats is not None) and self.repeated >= self.repeats:
            WARN(u"Overriding busy action [%s] on %s/%s time" %
                 (self.task_name, self.repeated, self.repeats))
            self.repeated = 0
            self.request(self.url, self.get_headers())
        else:
            self.repeated += 1
            WARN(u'Action [%s] is busy (%s/%s)' %
                 (self.task_name, self.repeated, self.repeats))
Esempio n. 9
0
    def test_http_request_with_custom_context_factory(self):
        self.set_render(lambda r: "Yay")

        ctxt = WebClientContextFactory()

        class FakeAgent(Agent):
            def __init__(slf, reactor, contextFactory=None):
                self.assertEqual(contextFactory, ctxt)
                super(FakeAgent, slf).__init__(reactor, contextFactory)

        request = yield http_request_full(self.url,
                                          '',
                                          context_factory=ctxt,
                                          agent_class=FakeAgent)
        self.assertEqual(request.delivered_body, "Yay")
        self.assertEqual(request.code, http.OK)
Esempio n. 10
0
    def getContext(self, hostname, port):
        ctx = WebClientContextFactory.getContext(self, hostname, port)

        if self.cfile:
            try:
                x509Obj = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, self.cfile)
                ctx.use_certificate(x509Obj)
            except:
                ctx.use_certificate_file(self.cfile)
        if self.kfile:
            try:
                pkey = OpenSSL.crypto.load_privatekey(OpenSSL.crypto.FILETYPE_PEM, self.kfile)
                ctx.use_privatekey(pkey)
            except:
                ctx.use_privatekey_file(self.kfile)
        return ctx
Esempio n. 11
0
def http_request_full(url,
                      data=None,
                      headers={},
                      method='POST',
                      timeout=None,
                      data_limit=None,
                      context_factory=None,
                      agent_class=None,
                      reactor=None):
    if reactor is None:
        # The import replaces the local variable.
        from twisted.internet import reactor
    if agent_class is None:
        agent_class = Agent
    context_factory = context_factory or WebClientContextFactory()
    agent = agent_class(reactor, contextFactory=context_factory)
    d = agent.request(method, url, mkheaders(headers),
                      StringProducer(data) if data else None)

    def handle_response(response):
        return SimplishReceiver(response, data_limit).deferred

    d.addCallback(handle_response)

    if timeout is not None:
        cancelling_on_timeout = [False]

        def raise_timeout(reason):
            if not cancelling_on_timeout[0] or reason.check(HttpTimeoutError):
                return reason
            return Failure(HttpTimeoutError("Timeout while connecting"))

        def cancel_on_timeout():
            cancelling_on_timeout[0] = True
            d.cancel()

        def cancel_timeout(r, delayed_call):
            if delayed_call.active():
                delayed_call.cancel()
            return r

        d.addErrback(raise_timeout)
        delayed_call = reactor.callLater(timeout, cancel_on_timeout)
        d.addCallback(cancel_timeout, delayed_call)

    return d
Esempio n. 12
0
    def test_http_request_with_custom_context_factory(self):
        self.set_render(lambda r: "Yay")
        agents = []

        ctxt = WebClientContextFactory()

        def stashing_factory(reactor, contextFactory=None, pool=None):
            agent = self.fake_http.get_agent(
                reactor, contextFactory=contextFactory, pool=pool)
            agents.append(agent)
            return agent

        request = yield http_request_full(
            self.url, '', context_factory=ctxt, agent_class=stashing_factory)
        self.assertEqual(request.delivered_body, "Yay")
        self.assertEqual(request.code, http.OK)
        [agent] = agents
        self.assertEqual(agent.contextFactory, ctxt)
Esempio n. 13
0
    def test_callRemote(self):
        """
        The test itself passes, but trial raises "Reactor was unclean" after
        tearDown.. Might be related to
        http://twistedmatrix.com/trac/ticket/5118
        """
        data = 'some random string'

        addr = 'https://localhost:%s' % self.portNumber
        proxy = Proxy(addr,
                      jsonrpc.VERSION_1,
                      contextFactory=WebClientContextFactory())
        d = proxy.callRemote('echo', data)

        def finished(result):
            self.assertEquals(result, data)

        d.addCallback(finished)
        return d
Esempio n. 14
0
    def __init__(self,
                 url,
                 version=jsonrpc.VERSION_1,
                 connectTimeout=None,
                 credentials=Anonymous(),
                 contextFactory=WebClientContextFactory()):
        """
        @type url: str
        @param url: URL of the RPC server. Supports HTTP and HTTPS for now,
        more might come in the future.

        @type version: int
        @param version: Which JSON-RPC version to use? The default is 1.0.

        @type connectTimeout: float
        @param connectTimeout: Connection timeout. Note that we don't connect
            when creating this object, but in callRemote, so the timeout
            will apply to callRemote.

        @type credentials: twisted.cred.credentials.ICredentials
        @param credentials: Credentials for basic HTTP authentication.
            Supported are Anonymous and UsernamePassword classes.

        @type contextFactory: twisted.internet.ssl.ClientContextFactory
        @param contextFactory: A context factory for SSL clients.
        """

        self.url = url
        self.version = version

        if not isinstance(credentials, (Anonymous, UsernamePassword)):
            raise NotImplementedError("'%s' credentials are not supported" %
                                      type(credentials))

        self.agent = Agent(reactor,
                           connectTimeout=connectTimeout,
                           contextFactory=contextFactory)
        self.credentials = credentials
        self.auth_headers = None
Esempio n. 15
0
	def __init__(self,reactor,api_key,api_secret):
		# Bitcoin.de API URI
		apihost = 'https://api.bitcoin.de'
		apiversion = 'v1'
		orderuri = apihost + '/' + apiversion + '/' + 'orders'
		tradeuri = apihost + '/' + apiversion + '/' + 'trades'
		accounturi = apihost + '/' + apiversion + '/' + 'account'
		# set initial nonce
		self.nonce = int(time.time())
		
		self.reactor = reactor
		pool = HTTPConnectionPool(reactor)		# Actually reusing the connection leads to correct credits
		pool.maxPersistentPerHost = 1
		self.contextFactory = WebClientContextFactory()
		self.agent = Agent(self.reactor, self.contextFactory,pool=pool)
		
		self.api_key = api_key
		self.api_secret = api_secret
		
		self.calls = {}
		# Method,uri,required params with allowed values,credits,field to return (after credits/pages are stripped)
		# Orders
		self.calls['showOrderbook'] = ['GET',orderuri,{'type':['sell','buy']},2]
		self.calls['showOrderbookCompact'] = ['GET',orderuri+'/compact',{},3]
		self.calls['createOrder'] = ['POST',orderuri,{'type':['sell','buy'],'max_amount':[],'price':[]},1]
		self.calls['deleteOrder'] = ['DELETE',orderuri,{'order_id':[]},2]
		self.calls['showMyOrders'] = ['GET',orderuri+'/my_own',{},2]	# Fix: all arguments are optional
		self.calls['showMyOrderDetails'] = ['GET',orderuri,{'order_id':[]},2]
		# Trades
		self.calls['executeTrade'] = ['POST',tradeuri,{'order_id':[],'amount':[]},1]
		self.calls['showMyTradeDetails'] = ['GET',tradeuri,{'trade_id':[]},3]
		self.calls['showMyTrades'] = ['GET',tradeuri,{},3]
		self.calls['showPublicTradeHistory'] = ['GET',tradeuri+'/history',{'since_tid':[]},3]
		# Account
		self.calls['showAccountInfo'] = ['GET',accounturi,{},2]
		self.calls['showAccountLedger'] = ['GET',accounturi+'/ledger',{},3]
		# Other
		self.calls['showRates'] = ['GET',apihost+'/'+apiversion+'/rates',{},3]
Esempio n. 16
0
class BaseFetcher(object):
    
    USER_AGENT = 'Intranet Bug Fetcher'
    contextFactory = WebClientContextFactory()
    client = Agent(reactor, contextFactory)
    SLEEP_PERIOD = 0.1
    CACHE_TIMEOUT = 3 * 60  # 3 minutes
    redirect_support = False
    
    def __init__(self, tracker, credentials, login_mapping):
        self.tracker = tracker
        self.login = credentials.login
        self.password = credentials.password
        self.user = credentials.user
        self.login_mapping = login_mapping
        self.bugs = {}
        self.done = False
        self.error = None
        self.cache_key = None
        self.dependson_and_blocked_status = {}
        
    def run(self):
        """ start fetching tickets """
        try:
            # start asynchronous ticket fetching
            self.fetch()
        except:
            # failed to start ticket fetching
            self.done = True
            raise
        
    def request(self, url, headers, on_success, on_failure=None, method='GET', body=None):
        LOG(u'Will request URL %s' % (url, ))
        if on_failure is None:
            on_failure = self.failed
        deferred = self.client.request(
            method,
            url,
            Headers(headers),
            None if body is None else StringProducer(body)
        )
        def redirecting_on_success(resp):
            if resp.code == 302:
                LOG(u"Redirect (302) found in response")
                location = resp.headers.getRawHeaders('location')[0]
                self.request(location, headers, on_success, on_failure, 'GET', None)
            else:
                on_success(resp)
        deferred.addCallbacks(redirecting_on_success if self.redirect_support else on_success, on_failure)

    def failed(self, err):
        self.fail(err)
        EXCEPTION(u"Fetcher for tracker %s failed: %s" % (self.tracker.name, err))
        
    def success(self):
        self.done = True
        
    def get_headers(self):
        """ Generate request headers (as a dictionary) """
        return {
            'User-Agent': [self.USER_AGENT]
        }
    
    def fetch_user_tickets(self):
        """ Start fetching tickets for current user """
        raise NotImplementedError()
    
    def fetch_all_tickets(self):
        """ Start fetching tickets for all users in mapping """
        raise NotImplementedError()
    
    def fetch_user_resolved_bugs(self):
        """ Start fetching fixable tickets for current user """
        raise NotImplementedError()
    
    def fetch_all_resolved_bugs(self):
        """ Start fetching fixable tickets for all users """
        raise NotImplementedError()
    
    def fetch_bugs_for_query(self, ticket_ids, project_selector, component_selector, version):
        """ Start fetching all bugs matching given criteria """
        raise NotImplementedError()
    
    def fetch_resolved_bugs_for_query(self, ticket_id, project_selector, component_selector, version):
        """ Start fetching resolved bugs matching given criteria """
        raise NotImplementedError()
    
    def fetch_bug_titles_and_depends_on(self, ticket_ids):
        """ Start fetching bug titles and bug depends_on for bugs with given ids """
        # TODO other implementations
        self.success()
    
    def fetch_dependons_for_ticket_ids(self, ticket_ids):
        """ Start recursively fetching dependons for ticket ids """
        raise NotImplementedError()

    def fetch_scrum(self, sprint_name, project_id):
        raise NotImplementedError()

    def isReady(self):
        """ Check if this fetcher is done """
        return self.done
    
    def resolve_user(self, orig_login):
        login = orig_login.lower()
        if login in self.login_mapping:
            return self.login_mapping[login]
        else:
            return User(name=orig_login, email=orig_login)
    
    def resolve(self, bug):
        bug.owner = self.resolve_user(bug.owner)
        bug.reporter = self.resolve_user(bug.reporter)
        bug.project_id = SelectorMapping(self.tracker).match(
            bug.id, bug.project_name, bug.component_name, bug.version,
        )

    def __iter__(self):
        """ iterate over fetched tickets """
        if self.cache_key and self.error is None: # cache bugs if key was designeated and no error occured
            memcache.set(self.cache_key, self.bugs, timeout=self.CACHE_TIMEOUT)
            DEBUG(u"Cached %s bugs for key %s" % (len(self.bugs), self.cache_key))
        for bug in self.bugs.itervalues():
            self.resolve(bug)
            yield bug
            
    def responded(self, resp, on_success=None):
        """ Called when server returns response headers """
        if resp.code == 200:
            on_success = on_success or self.received
            resp.deliverBody(SimpleProtocol(on_success, self.failed))
        else:
            self.fail(FetchException(u'Received response %s' % (resp.code, )))

    def received(self, data):
        """ Called when server returns whole response body """
        try:
            for bug in self.parse(data):
                self.bugs[bug.id] = bug  
        except BaseException, e:
            EXCEPTION(u"Could not parse tracker response")
            self.fail(e)
        else:
Esempio n. 17
0
 def test_init_agent(self):
     proxy = Proxy('', '', contextFactory=WebClientContextFactory())
     self.assertTrue(isinstance(proxy.agent, Agent))
     self.assertTrue(
         isinstance(proxy.agent._contextFactory, ssl.ClientContextFactory))
Esempio n. 18
0
def printValue(value):
    print "Result: %s" % str(value)


def printError(error):
    print 'error', error.value


def shutDown(data):
    print "Shutting down reactor..."
    reactor.stop()


address = 'https://localhost:8999'

proxy = Proxy(address, contextFactory=WebClientContextFactory())
ds = []

d = proxy.callRemote('echo', ['ajajaj', 'bjbjbj'])
d.addCallbacks(printValue, printError)
ds.append(d)

d = proxy.callRemote('add', 14, 15)
d.addCallbacks(printValue, printError)
ds.append(d)

d = proxy.callRemote('mysql_first_user')
d.addCallbacks(printValue, printError)
ds.append(d)

d = proxy.callRemote('none')