def __init__(self, dbname, name, timeout): self._timeout = timeout self._dbname = dbname self._keyname = name self._redis = API.redis(self._dbname) self._ns = { 'data': API.ns_of(self._keyname), }
def _init_proxy(k,conf): db.update_table('cron:proxylist', k, misc.apply({ 'pid': k, 'table': 'conf:cron:proxylist', 'retry': 3, #24*6*30*12 },conf)) API.expire({ 'pid': k, 'table': 'cron:proxylist', },5)
def _init_check(): db.update_table('cron:proxylist', 'checkurl', { 'pid': 'checkurl', 'table': 'conf:cron:proxylist', 'retry': 3, #24*6*30*12 'timeout': 10, 'queue': 'queue:cron', 'cron_handler': 'proxylist.on_checkurls', }) API.expire({ 'pid': 'checkurl', 'table': 'cron:proxylist', },5)
def init(): db.update_table('conf:cron', 'test', { 'pid': 'test', 'table': 'conf:cron', 'queue': 'queue:cron', #'handler': 'expired.handler', #no need 'cron_handler': 'cron.dump', 'retry': 20, 'timeout': 5, }) API.expire({ 'pid': 'test', 'table': 'conf:cron', },5)
def client_factory(app, client): k = '%s-%s'%(app,client) e = redis_api.db().select_from( 'wb_oauth2', k) if not e or not e.get('access_token',None): return return tw_api(e['access_token'])
def recv(self): chnl,msg = API.queue_bpop( self.chnls, timeout=redis_conf.getint('channel_timeout',60) ) if not chnl or not msg: return return (chnl,msg)
def client_factory(app, client): k = '%s-%s' % (app, client) e = redis_api.db().select_from('wb_oauth2', k) if not e or not e.get('access_token', None): return return tw_api(e['access_token'])
def __init__(self, settings): self._dbname = settings.get('xpool_db', 'cache') self._holding = cache.SetCache('cache','proxy_holding', 60*60) self._caching = cache.ZsetCache('cache','proxy_caches', 4*60*60) self._xfwd = cache.HashCache('cache','proxy_xfwd', 24*60*60) self._conf = self._DefaultConf.copy() self._redis = API.redis(self._dbname)
def __init__(self, settings): self._dbname = settings.get('xpool_db', 'cache') self._holding = cache.SetCache('cache', 'proxy_holding', 60 * 60) self._caching = cache.ZsetCache('cache', 'proxy_caches', 4 * 60 * 60) self._xfwd = cache.HashCache('cache', 'proxy_xfwd', 24 * 60 * 60) self._conf = self._DefaultConf.copy() self._redis = API.redis(self._dbname)
def callback(wsock, chnl = None): Chnls.add(wsock) logging.debug('>>> new|%s|%d'%(wsock,len(Chnls))) ctrl = Ctrlet(API.db(),'www:%s'%chnl,wsock) ctrl.start() Chnls.remove(wsock) logging.debug('>>> del|%s|%d'%(wsock,len(Chnls)))
def callback(wsock, chnl=None): Chnls.add(wsock) logging.debug('>>> new|%s|%d' % (wsock, len(Chnls))) ctrl = Ctrlet(API.db(), 'www:%s' % chnl, wsock) ctrl.start() Chnls.remove(wsock) logging.debug('>>> del|%s|%d' % (wsock, len(Chnls)))
def handler(ctrl, message): _name = message.get('db', 'default') _ns = API.ns_of( message['table'], message['pid']) src = API.db(_name) rs = src.select_from(message['table'], message['pid']) if not rs: return -1 if not rs.get('queue',None) or \ not rs.get('timeout', None) or \ not rs.get('cron_handler', None) or \ not rs.get('retry', None): src.delete_from( message['table'], message['pid'] ) return -2 _retry = src.redis().hincrby( _ns , 'retry', -1) if _retry<1 : src.delete_from( message['table'], message['pid'] ) return -1 API.expire(message, rs['timeout']) API.queue_push( rs['queue'], { 'pid': message['pid'], 'table': message['table'], 'handler': rs['cron_handler'], })
def _recv(self): self.running = True log.msg('recv begin: %s' % self.chnls, log.DEBUG) while self.running: try: chnl, msg = API.queue_bpop(self.chnls, timeout=5) except redis.exceptions.ConnectionError, e: send_catch_log_deferred(signal=signals.ERROR, sender=self) self.stop() else: if not chnl or not msg: continue send_catch_log_deferred(signal=signals.RECV, sender=self, message=(chnl, msg))
def _recv(self): self.running = True log.msg('recv begin: %s'%self.chnls,log.DEBUG) while self.running: try: chnl,msg = API.queue_bpop(self.chnls, timeout=5) except redis.exceptions.ConnectionError, e: send_catch_log_deferred( signal=signals.ERROR, sender=self ) self.stop() else: if not chnl or not msg: continue send_catch_log_deferred( signal=signals.RECV, sender=self, message=(chnl,msg) )
import logging, json , time logging.basicConfig(level=logging.DEBUG) from ec2.redis import API from ec2.scrapy.tw import Ctrlet from ec2.scrapy.puller import ChannelsPuller ctrl = Ctrlet( ChannelsPuller(API.db(), 'queue:soap'), ) if __name__=='__main__': try: ctrl.start() except KeyboardInterrupt: ctrl.stop() print '\nbye'
def setUp(self): self.db = API.db('default') self._cache = cache.ZsetCache('default', 'myzset', 1)
def testNs(self): rs = API.ns_of('a') tools.eq_('test:a', rs) rs = API.ns_of('a','b') tools.eq_('test:a:b', rs)
def testNs(self): rs = API.ns_of('a') tools.eq_('test:a', rs) rs = API.ns_of('a', 'b') tools.eq_('test:a:b', rs)
def _hgetall(self): return self.db.redis().hgetall(API.ns_of('myhash'))
def setUp(self): self.redis = API.redis('default')
def _send(self, msg): API.queue_push('queue:proxy', msg, 'cache')
import logging, json, time logging.basicConfig(level=logging.DEBUG) from ec2.redis import API from ec2.redis.worker import Ctrlet, ExpiredPoolWorker from ec2.redis.puller import ExpiredPoolPuller from ec2.conf.enabled import redis_conf ctrl = Ctrlet(puller=ExpiredPoolPuller(API.db()), worker=ExpiredPoolWorker(redis_conf)) if __name__ == "__main__": try: ctrl.start() except KeyboardInterrupt: ctrl.stop() print "\nbye"
def client(self, app, nice): return client_factory(app, nice, API.db())
import time from scrapy import log #from scrapy.conf import settings from ec2.redis import API from ec2.scrapy.puller import ChannelsPuller from ec2.conf.enabled import ws_conf from ec2.scrapy.worker import Ctrl from ec2.utils import event from ec2.conf.enabled import scrapy_conf log.start() puller = ChannelsPuller(API.db(), 'xxx') ctrl = Ctrl(scrapy_conf, puller) url = 'http://%s/test/slow_echo/%%s_%%s' % ws_conf['client'] def test_data(): for i in xrange(2): db.queue_push('xxx', [ { 'url': url % ( i, int(time.time()), ), 'meta': {}, 'dont_filter': True,
def cache_zset(ctrl, message): _name = message.get('db', 'default') API.redis(_name).zrem( API.ns_of( message['zset'] ), message['value'] )
def cache_hash(ctrl, message): _name = message.get('db', 'default') API.redis(_name).hdel( API.ns_of( message['hash'] ), message['field'] )
import logging, json , time logging.basicConfig(level=logging.DEBUG) from ec2.redis import API from ec2.redis.worker import Ctrlet from ec2.redis.puller import ChannelsPuller ctrl = Ctrlet( ChannelsPuller(API.db(), 'queue:cron'), ) if __name__=='__main__': try: ctrl.start() except KeyboardInterrupt: ctrl.stop() print '\nbye'
import logging, json, time logging.basicConfig(level=logging.DEBUG) from ec2.redis import API from ec2.redis.worker import Ctrlet from ec2.redis.puller import ChannelsPuller ctrl = Ctrlet(ChannelsPuller(API.db(), 'queue:cron'), ) if __name__ == '__main__': try: ctrl.start() except KeyboardInterrupt: ctrl.stop() print '\nbye'
from scrapy import log #from scrapy.conf import settings from ec2.redis import API from ec2.scrapy.puller import ChannelsPuller from ec2.conf.enabled import ws_conf from ec2.scrapy.worker import Ctrl from ec2.utils import event from ec2.conf.enabled import scrapy_conf log.start() puller = ChannelsPuller(API.db(), 'xxx') ctrl = Ctrl( scrapy_conf, puller ) url = 'http://%s/test/slow_echo/%%s_%%s'%ws_conf['client'] def test_data(): for i in xrange(2): db.queue_push('xxx',[{ 'url': url%( i, int(time.time()), ) , 'meta': {}, 'dont_filter': True, },]) if __name__=='__main__':
import logging, json , time logging.basicConfig(level=logging.DEBUG) from ec2.redis import API db = API.db() def init(): db.update_table('conf:cron', 'test', { 'pid': 'test', 'table': 'conf:cron', 'queue': 'queue:cron', #'handler': 'expired.handler', #no need 'cron_handler': 'cron.dump', 'retry': 20, 'timeout': 5, }) API.expire({ 'pid': 'test', 'table': 'conf:cron', },5) if __name__=='__main__': init() print '---- over ---'
def _expire_rem(self, v): vv = misc.makelist(v) API.expire_rem(map(self._cache_info, vv))
def setUp(self): self.db = API.db('default') self._cache = cache.HashCache('default', 'myhash', 1)
import logging, json, time logging.basicConfig(level=logging.DEBUG) from ec2.redis import API from ec2.scrapy.tw import Ctrlet from ec2.scrapy.puller import ChannelsPuller ctrl = Ctrlet(ChannelsPuller(API.db(), 'queue:soap'), ) if __name__ == '__main__': try: ctrl.start() except KeyboardInterrupt: ctrl.stop() print '\nbye'
import logging, json, time logging.basicConfig(level=logging.DEBUG) from ec2.redis import API from ec2.redis.worker import Ctrlet, ExpiredPoolWorker from ec2.redis.puller import ExpiredPoolPuller from ec2.conf.enabled import redis_conf ctrl = Ctrlet(puller=ExpiredPoolPuller(API.db()), worker=ExpiredPoolWorker(redis_conf)) if __name__ == '__main__': try: ctrl.start() except KeyboardInterrupt: ctrl.stop() print '\nbye'
def recv(self): chnl, msg = API.queue_bpop(self.chnls, timeout=redis_conf.getint( 'channel_timeout', 60)) if not chnl or not msg: return return (chnl, msg)
def setUp(self): self.db = API.db('default')
def _expire(self, v, timeout=None): vv = misc.makelist(v) timeout = timeout or self._timeout API.expire( map(self._cache_info,vv), timeout=timeout)
from ec2.conf import init_scrapy init_scrapy() import time from scrapy import log from ec2.redis import API from ec2.scrapy.puller import ChannelsPuller from ec2.scrapy.worker import Ctrl from ec2.conf.enabled import scrapy_conf log.start() puller = ChannelsPuller(API.db(), 'request') ctrl = Ctrl( scrapy_conf, puller ) if __name__=='__main__': from twisted.internet import reactor ctrl.start() reactor.run()
def _send(self, msg): API.queue_push("queue:proxy", msg, "cache")
def client(self,app,nice): return client_factory(app, nice, API.db())
def _expire(self, v, timeout=None): vv = misc.makelist(v) timeout = timeout or self._timeout API.expire(map(self._cache_info, vv), timeout=timeout)
#coding=utf-8 import json, time import logging from ec2.redis import API #from ec2.utils.decorator import * from ec2.utils import misc, decorator db = API.db() _default = { 'url': None, 'page_begin': None, 'page_end': None, 'timeout': 4*3600, 'queue': 'queue:cron', 'cron_handler': 'proxylist.on_weblist', 'response_handler': None, } _Conf = { 'lonmen': misc.apply({'url': 'http://www.loamen.com/ws/proxyservice.php', 'queue': 'queue:soap', 'cron_handler': 'proxylist.on_soaplist', },_default), 'ct0592': misc.apply({'url': 'http://www.ct0592.com/loadproxy.htm', 'response_handler': 'proxylist.on_resp_s0', }, _default),