예제 #1
0
파일: sina.py 프로젝트: Big-Data/ec2
def client_factory(app, client):
    k = '%s-%s'%(app,client)
    e = redis_api.db().select_from( 'wb_oauth2', k)
    if not e or not e.get('access_token',None):   
        return 

    return tw_api(e['access_token'])
예제 #2
0
파일: expired.py 프로젝트: bright-pan/ec2
def handler(ctrl, message):
    _name = message.get('db', 'default')
    _ns = API.ns_of( message['table'], message['pid'])
    
    src = API.db(_name)
    rs = src.select_from(message['table'], message['pid'])

    if  not rs: return -1
    
    if  not rs.get('queue',None)    or \
        not rs.get('timeout', None) or \
        not rs.get('cron_handler', None) or \
        not rs.get('retry', None):
        src.delete_from( message['table'], message['pid'] )
        return -2

    _retry = src.redis().hincrby( _ns , 'retry', -1)
    if  _retry<1 :
        src.delete_from( message['table'], message['pid'] )
        return -1

    API.expire(message, rs['timeout'])
    API.queue_push( rs['queue'], {
        'pid':      message['pid'],
        'table':    message['table'],
        'handler':  rs['cron_handler'],
    })
예제 #3
0
def client_factory(app, client):
    k = '%s-%s' % (app, client)
    e = redis_api.db().select_from('wb_oauth2', k)
    if not e or not e.get('access_token', None):
        return

    return tw_api(e['access_token'])
예제 #4
0
파일: ws_srv.py 프로젝트: Big-Data/ec2
def callback(wsock, chnl = None):
    Chnls.add(wsock)
    logging.debug('>>> new|%s|%d'%(wsock,len(Chnls)))

    ctrl = Ctrlet(API.db(),'www:%s'%chnl,wsock)
    ctrl.start()
    Chnls.remove(wsock)
    logging.debug('>>> del|%s|%d'%(wsock,len(Chnls)))
예제 #5
0
파일: ws_srv.py 프로젝트: bright-pan/ec2
def callback(wsock, chnl=None):
    Chnls.add(wsock)
    logging.debug('>>> new|%s|%d' % (wsock, len(Chnls)))

    ctrl = Ctrlet(API.db(), 'www:%s' % chnl, wsock)
    ctrl.start()
    Chnls.remove(wsock)
    logging.debug('>>> del|%s|%d' % (wsock, len(Chnls)))
예제 #6
0
import logging, json, time
logging.basicConfig(level=logging.DEBUG)

from ec2.redis import API
from ec2.redis.worker import Ctrlet
from ec2.redis.puller import ChannelsPuller

ctrl = Ctrlet(ChannelsPuller(API.db(), 'queue:cron'), )

if __name__ == '__main__':
    try:
        ctrl.start()
    except KeyboardInterrupt:
        ctrl.stop()
        print '\nbye'
예제 #7
0
파일: crawler.py 프로젝트: bright-pan/ec2
from ec2.conf import init_scrapy
init_scrapy()

import time
from scrapy import log

from ec2.redis import API
from ec2.scrapy.puller  import ChannelsPuller
from ec2.scrapy.worker  import Ctrl
from ec2.conf.enabled import scrapy_conf

log.start()

puller = ChannelsPuller(API.db(), 'request')
ctrl = Ctrl( scrapy_conf, puller )

if __name__=='__main__':
    from twisted.internet import reactor
    ctrl.start()
    reactor.run()
예제 #8
0
 def setUp(self):
     self.db = API.db('default')
     self._cache = cache.ZsetCache('default', 'myzset', 1)
예제 #9
0
 def setUp(self):
     self.db = API.db('default')
예제 #10
0
import logging, json, time

logging.basicConfig(level=logging.DEBUG)

from ec2.redis import API
from ec2.redis.worker import Ctrlet, ExpiredPoolWorker
from ec2.redis.puller import ExpiredPoolPuller
from ec2.conf.enabled import redis_conf

ctrl = Ctrlet(puller=ExpiredPoolPuller(API.db()),
              worker=ExpiredPoolWorker(redis_conf))

if __name__ == '__main__':
    try:
        ctrl.start()
    except KeyboardInterrupt:
        ctrl.stop()
        print '\nbye'
예제 #11
0
 def setUp(self):
     self.db = API.db('default')
     self._cache = cache.HashCache('default', 'myhash', 1)
예제 #12
0
import logging, json, time

logging.basicConfig(level=logging.DEBUG)

from ec2.redis import API
from ec2.scrapy.tw import Ctrlet
from ec2.scrapy.puller import ChannelsPuller

ctrl = Ctrlet(ChannelsPuller(API.db(), 'queue:soap'), )

if __name__ == '__main__':
    try:
        ctrl.start()
    except KeyboardInterrupt:
        ctrl.stop()
        print '\nbye'
예제 #13
0
import time
from scrapy import log
#from scrapy.conf import settings

from ec2.redis import API
from ec2.scrapy.puller import ChannelsPuller
from ec2.conf.enabled import ws_conf

from ec2.scrapy.worker import Ctrl
from ec2.utils import event
from ec2.conf.enabled import scrapy_conf

log.start()

puller = ChannelsPuller(API.db(), 'xxx')
ctrl = Ctrl(scrapy_conf, puller)

url = 'http://%s/test/slow_echo/%%s_%%s' % ws_conf['client']


def test_data():
    for i in xrange(2):
        db.queue_push('xxx', [
            {
                'url': url % (
                    i,
                    int(time.time()),
                ),
                'meta': {},
                'dont_filter': True,
예제 #14
0
파일: openapi3.py 프로젝트: bright-pan/ec2
 def client(self, app, nice):
     return client_factory(app, nice, API.db())
예제 #15
0
import logging, json , time
logging.basicConfig(level=logging.DEBUG)

from ec2.redis  import API

db = API.db()

def init():
    db.update_table('conf:cron', 'test', {
        'pid':      'test',
        'table':    'conf:cron',
        'queue':    'queue:cron',
        #'handler':  'expired.handler',     #no need
        'cron_handler': 'cron.dump',
        'retry':    20,
        'timeout':  5,
    })
    

    API.expire({
        'pid':      'test',
        'table':    'conf:cron',
    },5)    

    



if __name__=='__main__':
    init()
    print '---- over ---'