def __init__(self, servers, session_class=Session): SessionStore.__init__(self, session_class) try: import cmemcache as memcache except ImportError: import memcache self.client = memcache.Client(servers)
def __init__(self, servers, format=None, timeout=600): """ Creates a new memcached-backed cache. The `servers` parameter should be a list of locations of memcached servers. To contact a server over IP, give its location in "host:port" format; to contact one over a UNIX socket, give the absolute path to the socket. To distribute the load over servers differently, entries in the `servers` list can be tuples of which the first item is the location and the second item is an integer weight. Servers default to a weight of 1. If only one server is being used, it is acceptable to provide its location string as `servers`. The `format` string, if given, should be a format string suitable for the string interpolation operator (%). For example, to prefix "lastfm_" to all keys before they are transmitted to memcached, set format to "lastfm_%s". Defaults to "%s" (i.e., no modification). The `timeout` parameter gives the time to live for items in this cache in seconds. """ self._format = format or '%s' self._timeout = timeout if isinstance(servers, basestring): servers = [servers] self._client = _memcache.Client(servers, debug=0)
def __init__(self, root, mode="r+" ): """Load the TSDB located at ``path``. ``mode`` control the mode used by open() """ TSDBBase.__init__(self) self.path = "/" self.mode = mode self.fs = get_fs(root, []) self.load_metadata() self.chunk_prefixes = self.metadata.get('CHUNK_PREFIXES', []) if self.chunk_prefixes: # the root is listed as the first prefix, don't add it again self.fs = get_fs(root, self.chunk_prefixes[1:]) if self.metadata.has_key('MEMCACHED_URI'): self.memcache = True try: import cmemcache as memcache except ImportError: try: import memcache except: self.memcache = False if self.memcache: self.memcache = memcache.Client([self.metadata['MEMCACHED_URI']])
def stats(name, config, opts): stats = {} mc = memcache.Client(['127.0.0.1:11211']) for qname, qinfo in config.persist_queues.iteritems(): (qclass, nworkers) = qinfo if nworkers == 1: stats[qname] = QueueStats(mc, qname) stats[qname].update_stats() else: for i in range(1, nworkers + 1): k = "%s_%d" % (qname, i) stats[k] = QueueStats(mc, k) stats[k].update_stats() keys = stats.keys() keys.sort() while True: total = [0,0,0,0] print "%20s %8s %8s %8s %8s %14s" % ( "queue", "pending", "new", "done", "delta", "max") for k in keys: stats[k].update_stats() vals = stats[k].get_stats() print "%20s % 8d % 8d % 8d % 8d % 14d" % vals total = map(sum, zip(total, vals[1:-1])) total.insert(0, "TOTAL") print "%20s % 8d % 8d % 8d % 8d" % tuple(total) print "" time.sleep(5)
def __get_cache_connection( self ): if self.__cache is not None: return self.__cache try: import cmemcache return cmemcache.Client( [ "127.0.0.1:11211" ], debug = 0 ) except ImportError: return None
def scrape(request): request.encoding = 'latin-1' xhr = request.GET.get('xhr') info_hash = request.GET.get('info_hash', '') if len(info_hash) < 20: return _fail("invalid request", xhr=xhr) try: info_hash = info_hash.encode('iso-8859-1').encode('hex') except: return _fail("invalid request", xhr=xhr) t = Torrent.objects.filter(info_hash=info_hash).values('id') if not t: return _fail("no such torrent", xhr=xhr) mc = memcache.Client([MEMCACHE], debug=0) peers = mc.get('peers') if not peers: peers = [] # calculate scrape interval now = datetime.datetime.now() num_peers = len([p for p in peers if p['expire_time']>now]) announce_rate = len([p for p in peers if p['update_time']>now-datetime.timedelta(minutes=1)]) scrape_interval = max(num_peers * announce_rate / MAX_ANNOUNCE_RATE**2 * 60, MIN_ANNOUNCE_INTERVAL) * SCRAPE_FACTOR result = {info_hash: {'complete': 0, 'incomplete': 0, 'downloaded': 0}} for p in peers: if p['info_hash'] == info_hash: if p['left'] == 0 and p['expire_time']>now: result[info_hash]['complete'] += 1 elif p['left'] > 0 and p['expire_time']>now: result[info_hash]['incomplete'] += 1 elif p['left'] == 0: result[info_hash]['downloaded'] += 1 if xhr: r = HttpResponse(mimetype="text/xml") r.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>") r.write("""<scraper><msg>success</msg> <leechers>%s</leechers> <seeds>%s</seeds> <downloaded>%s</downloaded> </scraper>""" % ( result[info_hash].get('incomplete', 0), result[info_hash].get('complete', 0), result[info_hash].get('downloaded', 0), ) ) return r return HttpResponse(bencode({ 'files': result, 'flags': {'min_request_interval': int(scrape_interval)}, }), mimetype = 'text/plain')
def get_peers(the_id): import cmemcache as memcache import datetime mc = memcache.Client([MEMCACHE], debug=0) peers = mc.get('peers') if not peers: return (0, 0) now = datetime.datetime.now() seeders = len([p for p in peers if p['torrent_id'] == the_id and p['expire_time'] > now and p['left'] == 0]) leechers = len([p for p in peers if p['torrent_id'] == the_id and p['expire_time'] > now and p['left'] > 0]) return (seeders, leechers)
def setUp(self): config = ConfigObj("/etc/whitetrash.conf")["DEFAULT"] self.cache = BlacklistCache(config) self.raw_cache = cmemcache.Client( config["memcache_servers"].split(",")) proxy = None if "safebrowsing_proxy" in config: proxy = config["safebrowsing_proxy"] print("Using proxy: %s for testing" % proxy) self.mgr = SafeBrowsingManager(config["safebrowsing_api_key"], proxy=proxy) self.mgr.do_updates()
def __init__(self, servers=None, key_prefix=None, default_timeout=300): SessionStore.__init__(self) if isinstance(servers, (list, tuple)): try: import cmemcache as memcache is_cmemcache = True except ImportError: try: import memcache is_cmemcache = False is_pylibmc = False except ImprotError: try: import pylibmc as memcache is_cmemcache = False is_pylibmc = True except ImportError: raise RuntimeErorr(' no memcache module found ') if is_cmemcache: client = memcache.Client(map(str, servers)) try: client.debuglog = lambda *a: None except Exception: pass else: if is_pylibmc: client = memcache.Client(servers, False) else: client = memcache.Client(servers, False, HIGHEST_PROTOCOL) else: client = servers self._memcache_client = client self._memcache_key_prefix = key_prefix self._memcache_timeout = default_timeout
def __init__(self, namespace, url, data_dir=None, lock_dir=None, **params): NamespaceManager.__init__(self, namespace, **params) if lock_dir is not None: self.lock_dir = lock_dir elif data_dir is None: raise MissingCacheParameter("data_dir or lock_dir is required") else: self.lock_dir = data_dir + "/container_mcd_lock" verify_directory(self.lock_dir) self.mc = MemcachedNamespaceManager.clients.get( url, lambda: memcache.Client(url.split(';'), debug=0))
def __init__(self, servers, default_timeout=300, key_prefix=None): BaseCache.__init__(self, default_timeout) if isinstance(servers, (list, tuple)): try: import cmemcache as memcache is_cmemcache = True except ImportError: try: import memcache is_cmemcache = False is_pylibmc = False except ImportError: try: import pylibmc as memcache is_cmemcache = False is_pylibmc = True except ImportError: raise RuntimeError('no memcache module found') # cmemcache has a bug that debuglog is not defined for the # client. Whenever pickle fails you get a weird AttributeError. if is_cmemcache: client = memcache.Client(map(str, servers)) try: client.debuglog = lambda *a: None except Exception: pass else: if is_pylibmc: client = memcache.Client(servers, False) else: client = memcache.Client(servers, False, HIGHEST_PROTOCOL) else: client = servers self._client = client self.key_prefix = key_prefix
def main(): import optparse, random from time import sleep parser = optparse.OptionParser(__doc__.strip()) parser.add_option('-n', '--number', action='store', type='int', default=100, help="Number of get/set.") parser.add_option('-v', '--verbose', action='count', default=0, help="Verbose level.") opts, args = parser.parse_args() servers = ["127.0.0.1:11211", "127.0.0.1:11222"] servers = ["127.0.0.1:11211"] mc = memcache.Client(servers) cmc = cmemcache.Client(servers) k = 'bla' v = 'bli' for i in xrange(opts.number): if mc.get(k) == None: print 'mc.get() failed' else: print 'mc.get() succesful' if mc.set(k, v) == 0: print 'mc.set() failed' else: print 'mc.set() succesful' if cmc.get(k) == None: print 'cmc.get() failed' else: print 'cmc.get() succesful' if cmc.set(k, v) == 0: print 'cmc.set() failed' else: print 'cmc.set() success' # print 'recreate Client' # cmc = cmemcache.Client(servers) s = random.random() sleep(s)
def test_memcache(self): # quick check if memcached is running ip, port = self.servers[0].split(":") print("ip", ip, "port", port) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) memcached = None try: s.connect((ip, int(port))) except socket.error as e: # not running, start one memcached = subprocess.Popen("memcached -m 10", shell=True) print("memcached not running, starting one (pid %d)" % (memcached.pid, )) # give it some time to start import time time.sleep(0.5) s.close() # use memcache as the reference try: from Products.MemcachedManager.tests import memcache except ImportError: pass else: self._test_memcache(memcache) self._test_base(memcache, memcache.Client(self.servers), ok=1) self._test_client(memcache, ok=1) # test extension try: from cmemcache import StringClient # Only in cmemcache del StringClient import cmemcache except ImportError: pass else: self._test_cmemcache(cmemcache) self._test_base(cmemcache, cmemcache.StringClient(self.servers), ok=0) self._test_base(cmemcache, cmemcache.Client(self.servers), ok=0) self._test_client(cmemcache, ok=0) # if we created memcached for our test, then shut it down if memcached: os.kill(memcached.pid, signal.SIGINT)
def __init__(self, qname, memcached_uri): super(MemcachedPersistQueue, self).__init__(qname) self.log = get_logger("MemcachedPersistQueue_%s" % self.qname) self.mc = memcache.Client([memcached_uri]) self.last_added = '%s_%s_last_added' % (self.PREFIX, self.qname) la = self.mc.get(self.last_added) if not la: self.mc.set(self.last_added, 0) self.last_read = '%s_%s_last_read' % (self.PREFIX, self.qname) lr = self.mc.get(self.last_read) if not lr: self.mc.set(self.last_read, 0)
def main(): try: config = ConfigObj("/etc/whitetrash.conf")["DEFAULT"] logging.config.fileConfig("/etc/whitetrash.conf") log = logging.getLogger("whitetrashCleanup") log.info("Running whitetrash_cleanup") dbh = MySQLdb.Connect(user=config['DATABASE_CLEANUP_USER'], passwd=config['DATABASE_CLEANUP_PASSWORD'], db=config['DATABASE_NAME'], unix_socket=config['DATABASE_UNIX_SOCKET'], use_unicode=False) cursor = dbh.cursor() if config["use_memcached"].upper() == "TRUE": import cmemcache servers = config["memcache_servers"].split(",") cache = cmemcache.Client(servers) result = cursor.execute( "select whitelist_id,protocol,domain from whitelist_whitelist where (DATEDIFF(NOW(),last_accessed) > %s)", config["timeout_in_days"]) for (id, proto, dom) in cursor.fetchall(): key = "|".join((dom, str(proto))) if config["delete_old_domains"].upper() == "TRUE": cache.delete(key) else: cache.set(key, (id, False)) log.info("Deleted/disabled %s entries in memcache" % result) if config["delete_old_domains"].upper() == "TRUE": result = cursor.execute( "delete from whitelist_whitelist where (DATEDIFF(NOW(),last_accessed) > %s)", config["timeout_in_days"]) log.info("Whitetrash cleanup successful. Deleted %s domains(s)" % result) else: result = cursor.execute( "update whitelist_whitelist set enabled=0 where (DATEDIFF(NOW(),last_accessed) > %s)", config["timeout_in_days"]) log.info("Whitetrash cleanup successful. Disabled %s domain(s)" % result) except Exception, e: log.error("whitetrash_cleanup.py error:%s" % e)
def _sort_by(o, q): import cmemcache as memcache mc = memcache.Client([MEMCACHE], debug=0) peers = mc.get('peers') if not peers: peers = [] if o == 'd': # date return q.order_by('-created') elif o == 'L': # Leechers Desc ids = [ t['torrent__id'] for t in q.order_by('created').values('torrent__id') ] return sort_result(ids, peers, by_seeds=False) elif o == 'l': # Leechers Asc ids = [ t['torrent__id'] for t in q.order_by('-created').values('torrent__id') ] return sort_result(ids, peers, asc=True, by_seeds=False) elif o == 'S': # Seeds Desc ids = [ t['torrent__id'] for t in q.order_by('created').values('torrent__id') ] return sort_result(ids, peers) elif o == 's': # Seeds Asc ids = [ t['torrent__id'] for t in q.order_by('-created').values('torrent__id') ] return sort_result(ids, peers, asc=True) elif o == 'B': # Size Desc return q.order_by('torrent__bytes') elif o == 'b': # Size Asc return q.order_by('-torrent__bytes') else: # date return result.order_by('-created')
def summary(context): mc = memcache.Client([MEMCACHE], debug=0) peers = mc.get('peers') render_dict = {} if not peers: for s in SECTIONS: render_dict.update({ s[0]: Topic.objects.filter(section=s[0], approved=True).order_by('-created')[:10] }) return render_dict now = datetime.datetime.now() cursor = connection.cursor() for s in SECTIONS: ids = [ t['torrent__id'] for t in Topic.objects.filter(section=s[0], approved=True). order_by('-created').values('torrent__id') ] render_dict.update({s[0]: sort_result(ids, peers, limit=10)}) connection.close() context.update(render_dict) return context
def __init__(self, server, params): BaseCache.__init__(self, params) self._cache = memcache.Client(server.split(';'))
class TestCmemcache(unittest.TestCase): servers = ["127.0.0.1:11211"] servers_unknown = ["127.0.0.1:52345"] servers_weighted = [("127.0.0.1:11211", 2)] def _test_cmemcache(self, mcm): """ Test cmemcache specifics. """ mc = mcm.StringClient(self.servers) mc.set('blo', 'blu', 0, 12) self.failUnlessEqual(mc.get('blo'), 'blu') self.failUnlessEqual(mc.getflags('blo'), ('blu', 12)) self.failUnlessEqual(mc.incr('nonexistantnumber'), None) self.failUnlessEqual(mc.decr('nonexistantnumber'), None) # try weird server formats # number is not a server self.failUnlessRaises(TypeError, lambda: mc.set_servers([12])) # forget port self.failUnlessRaises(TypeError, lambda: mc.set_servers(['12'])) def _test_memcache(self, mcm): """ Test memcache specifics. """ mc = mcm.Client(self.servers) mc.set('blo', 'blu') self.failUnlessEqual(mc.get('blo'), 'blu') self.failUnlessRaises(ValueError, lambda: mc.decr('nonexistantnumber')) self.failUnlessRaises(ValueError, lambda: mc.incr('nonexistantnumber')) self.failUnlessRaises(mc.MemcachedKeyCharacterError, lambda: mc.set("a a", "b b")) def _test_sgra(self, mc, val, repval, norepval): """ Test set, get, replace, add api. """ self.failUnlessEqual(mc.set('blo', val), 1) self.failUnlessEqual(mc.get('blo'), val) mc.replace('blo', repval) self.failUnlessEqual(mc.get('blo'), repval) mc.add('blo', norepval) self.failUnlessEqual(mc.get('blo'), repval) mc.delete('blo') self.failUnlessEqual(mc.get('blo'), None) mc.replace('blo', norepval) self.failUnlessEqual(mc.get('blo'), None) mc.add('blo', repval) self.failUnlessEqual(mc.get('blo'), repval) def _test_base(self, mcm, mc): """ The base test, uses string values only. The return codes are not compatible between memcache and cmemcache. memcache return 1 for any reply from memcached, and cmemcache returns the return code returned by memcached. Actually the return codes from libmemcache for replace and add do not seem to be logical either. So ignore them and tests through get() if the appropriate action was done. """ print 'testing', mc, 'version', mcm.__version__, '\n\tfrom', mcm self._test_sgra(mc, 'blu', 'replace', 'will not be set') mc.delete('blo') self.failUnlessEqual(mc.get('blo'), None) mc.set('number', '5') self.failUnlessEqual(mc.get('number'), '5') self.failUnlessEqual(mc.incr('number', 3), 8) self.failUnlessEqual(mc.decr('number', 2), 6) self.failUnlessEqual(mc.get('number'), '6') self.failUnlessEqual(mc.incr('number'), 7) self.failUnlessEqual(mc.decr('number'), 6) bli = 'bli' # try with maxint exptime = sys.maxint mc.set('blo', bli, exptime) self.failUnlessEqual(mc.get('blo'), bli) d = mc.get_multi(['blo', 'number', 'doesnotexist']) self.failUnlessEqual(d, {'blo': bli, 'number': '6'}) # make sure zero delimitation characters are ignored in values. test_setget(mc, 'blabla', 'bli\000bli', self.failUnlessEqual) # check utf str test_setget(mc, 'blabla', 'blü', self.failUnlessEqual) # get stats stats = mc.get_stats() self.failUnlessEqual(len(stats), 1) self.assert_(self.servers[0] in stats[0][0]) self.assert_('total_items' in stats[0][1]) self.assert_('bytes_read' in stats[0][1]) self.assert_('bytes_written' in stats[0][1]) # set_servers to none mc.set_servers([]) try: # memcache does not support the 0 server case mc.set('bli', 'bla') except ZeroDivisionError: pass else: self.failUnlessEqual(mc.get('bli'), None) # set unknown server # mc.set_servers(self.servers_unknown) # test_setget(mc, 'bla', 'bli', self.failIfEqual) # set servers with weight syntax mc.set_servers(self.servers_weighted) test_setget(mc, 'bla', 'bli', self.failUnlessEqual) test_setget(mc, 'blo', 'blu', self.failUnlessEqual) # set servers again mc.set_servers(self.servers) test_setget(mc, 'bla', 'bli', self.failUnlessEqual) test_setget(mc, 'blo', 'blu', self.failUnlessEqual) # test unicode test_setget(mc, 'blo', '© 2006', self.failUnlessEqual) # flush_all # fixme: how to test this? # fixme: after doing flush_all() one can not start new Client(), do not know why # since I know no good way to test it we ignore it for now # mc.flush_all() mc.disconnect_all() def _test_client(self, mcm): """ Test Client, only need to test the set, get, add, replace, rest is implemented by test_memcache(). """ mc = mcm.Client(self.servers, debug=True) mc.debuglog("This should be in the output (test.py)") self._test_sgra(mc, 'blu', 'replace', 'will not be set') # Test unicode string, not supported by StringClient test_setget(mc, 'blabla', u'blü', self.failUnlessEqual) val = {'bla': 'bli', 'blo': 12} repval = {'bla': 'blo', 'blo': 12} norepval = {'blo': 12} self._test_sgra(mc, val, repval, norepval) mc.set('number', 124567) self.failUnlessEqual(mc.get('number'), 124567) mc.set('longnumber', 123456789L) self.failUnlessEqual(mc.get('longnumber'), 123456789L) bli = ['bli'] mc.set('blo', bli) self.failUnlessEqual(mc.get('blo'), bli) d = mc.get_multi(['blo', 'number', 'doesnotexist', 'longnumber']) self.failUnlessEqual(d, { 'blo': bli, 'number': 124567, 'longnumber': 123456789L }) # some quick timing. t0 = time.time() n = 10000 for i in xrange(n): d = mc.get_multi(['blo', 'number', 'doesnotexist', 'longnumber']) self.failUnlessEqual(d, { 'blo': bli, 'number': 124567, 'longnumber': 123456789L }) t1 = time.time() print 'time elapsed', t1 - t0, 'for', n, 'get_multi' def _test_create_leak(self, mcm): """ Dan Helfman reported a memory leak Client create/dealloc. But I can't seem to get any memory usage information. This guppy.hpy does not seem to report memory usage of C types. """ try: from guppy import hpy h = hpy() print 'check memleak' print h.heap() for i in xrange(1000000): mc = mcm.Client(self.servers) print 'checked memleak' print h.heap() except ImportError: pass def _test_no_memcached(self, mc): """ Test mc when there is no memcached running (anymore). """ # memcached not running, so get should return no value self.failUnlessEqual(mc.get('bla'), None) self.failUnlessEqual(mc.set('bla', 'bli'), 0) def test_memcache(self): # quick check if memcached is running ip, port = self.servers[0].split(':') print 'ip', ip, 'port', port s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) memcached = None try: s.connect((ip, int(port))) except socket.error, e: # not running, start one memcached = subprocess.Popen("memcached -m 10", shell=True) print 'memcached not running, starting one (pid %d)' % ( memcached.pid, ) # give it some time to start import time time.sleep(0.5) s.close() # Apply tests to memcache as the reference mc = None try: import memcache except ImportError: pass else: self._test_memcache(memcache) mc = memcache.Client(self.servers) self._test_base(memcache, mc) self._test_client(memcache) # print out extension just to make sure we got the local one (and not some # installed version somewhere) import _cmemcache print _cmemcache # test extension import cmemcache self._test_cmemcache(cmemcache) self._test_base(cmemcache, cmemcache.StringClient(self.servers)) cmc = cmemcache.Client(self.servers) self._test_base(cmemcache, cmc) self._test_client(cmemcache) self._test_create_leak(cmemcache) # if we created memcached for our test, then shut it down if memcached: os.kill(memcached.pid, signal.SIGINT) # test get() with memcached not running anymore if mc: self._test_no_memcached(mc) self._test_no_memcached(cmc)
def __init__(self, server, params): BaseStorage.__init__(self, params) self._db = memcache.Client(server.split(';'))
def __init__(self): self.client = memcache.Client(['127.0.0.1:11211'], debug=0)
def __init__(self, default_timeout = 300, key_prefix = None): from google.appengine.api import memcache MemcachedCache.__init__(self, memcache.Client(), default_timeout, key_prefix)
def __init__(self, config): self.cache = cmemcache.Client(config["memcache_servers"].split(",")) self.malware_version = -1 self.phishing_version = -1
def __init__(self): self.memcache = memcache.Client([CONFIG.espersistd_uri])
def __init__(self, name, server, params): super(BackEnd, self).__init__(name, params) self._cache = memcache.Client(server.split(';'))
def __init__(self, config): WTSquidRedirector.__init__(self, config) self.servers = config["memcache_servers"].split(",") self.cache = cmemcache.Client(self.servers)
def announce(request): request.encoding = 'latin-1' if not OPEN_TRACKER: if not request.GET.get('passkey'): return _fail("you need to provide passkey") if len(request.GET['passkey']) < 40: return _fail("you need to provide valid passkey") u = User.objects.filter(passkey=request.GET['passkey']) if not u: return _fail("user with this passkey wasn't found") else: u = u[0] args = {} args['ip'] = request.GET.get('ip') or request.META.get('REMOTE_ADDR') try: gethostbyname(args['ip']) except: return _fail("unable to resolve host name %s"%args['ip']) for key in ['uploaded', 'downloaded', 'port', 'left']: if request.GET.has_key(key): try: args[key] = int(request.GET[key]) except ValueError: return _fail("argument '%s' specified incorrectly."%key) else: return _fail("argument '%s' not specified."%key) event = request.GET.get('event', '') if event not in ['completed','stopped','started'] and len(event.strip())>0: return _fail("invalid request") # is the announce method allowed ? if REQUIRE_ANNOUNCE_PROTOCOL == 'no_peer_id': if not request.GET.get('compact') and not request.GET.get('no_peer_id'): return _fail("standard announces not allowed; use no_peer_id or compact option") elif REQUIRE_ANNOUNCE_PROTOCOL == 'compact': if not request.GET.get('compact'): return _fail("tracker requires use of compact option") info_hash = request.GET.get('info_hash', '') if len(info_hash) < 20 or not request.GET.get('peer_id'): return _fail("invalid request") try: info_hash = info_hash.encode('iso-8859-1').encode('hex') except: return _fail("invalid request") args['peer_id'] = request.GET['peer_id'] torrent_id = Torrent.objects.filter(info_hash=info_hash).values('id') if not torrent_id: return _fail("no such torrent") else: torrent_id = torrent_id[0]['id'] # calculate announce interval now = datetime.datetime.now() mc = memcache.Client([MEMCACHE], debug=0) peers = mc.get('peers') if not peers: peers = [] if not OPEN_TRACKER: dwns = len([p for p in peers if p['user_id'] == u.id and p['expire_time']>now]) cur_dwns = u.attrs.get('max_sim_dwn', 2) if dwns >= cur_dwns and cur_dwns != 0: return _fail("maximum number of simultaneous downloads reached: %s"% dwns) num_peers = len([p for p in peers if p['expire_time']>now]) announce_rate = len([p for p in peers if p['update_time']>now-datetime.timedelta(minutes=1)]) announce_interval = max(num_peers * announce_rate / (MAX_ANNOUNCE_RATE**2) * 60, MIN_ANNOUNCE_INTERVAL) # calculate expiration time offset if event == 'stopped': expire_time = 0 else: expire_time = announce_interval * EXPIRE_FACTOR for p in peers: if p['peer_id'] == args['peer_id']: peers.remove(p) if event == 'completed': topic = Topic.objects.filter(torrent__pk=torrent_id) if len(topic)>0: topic[0].attrs['downloaded'] = topic[0].attrs.get('downloaded', 0)+1 topic[0].save() if event != 'stopped': peer_dict = { 'info_hash': info_hash, 'peer_id': args['peer_id'], 'ip': args['ip'], 'port': args['port'], 'uploaded': args['uploaded'], 'downloaded': args['downloaded'], 'left': args['left'], 'expire_time': now+datetime.timedelta(seconds=int(expire_time)), 'update_time': now, 'torrent_id': torrent_id, } if not OPEN_TRACKER: peer_dict['user_id'] = u.id peers.append(peer_dict) mc.set('peers', peers) numwant = request.GET.get('numwant', 50) try: numwant = int(numwant) except ValueError: numwant = 50 result = [p for p in peers if p['torrent_id'] == torrent_id and p['expire_time']>now and p['info_hash']==info_hash] #this may be optimized shuffle(result) result = result[:numwant] if request.GET.get('compact'): peers = "" for peer in result: peers += pack('>4sH', inet_aton(peer['ip']), peer['port']) elif request.GET.get('no_peer_id'): peers = [] for peer in result: peers.append({'ip': peer['ip'], 'port': peer['port']}) else: peers = [] for peer in result: peers.append({'ip': peer['ip'], 'port': peer['port'], 'peer id': peer['peer_id']}) return HttpResponse(bencode({ 'interval': int(announce_interval), 'peers': peers, }), mimetype = 'text/plain')
def __init__(self, config): self.cache = cmemcache.Client(config["memcache_servers"].split(",")) self.malware_version = -1 self.phishing_version = -1 logging.config.fileConfig("/etc/whitetrash.conf") self.log = logging.getLogger("whitetrashSafeBrowsing")