def _parseConfigfileCache(cache_dict, dirpath): """ Used by parseConfigfile() to parse just the cache parts of a config. """ if cache_dict.has_key('name'): _class = Caches.getCacheByName(cache_dict['name']) kwargs = {} if _class is Caches.Test: if cache_dict.get('verbose', False): kwargs['logfunc'] = lambda msg: stderr.write(msg + '\n') elif _class is Caches.Disk: kwargs['path'] = enforcedLocalPath(cache_dict['path'], dirpath, 'Disk cache path') if cache_dict.has_key('umask'): kwargs['umask'] = int(cache_dict['umask'], 8) for key in ('dirs', 'gzip'): if cache_dict.has_key(key): kwargs[key] = cache_dict[key] else: raise Exception('Unknown cache: %s' % cache_dict['name']) elif cache_dict.has_key('class'): _class = loadClassPath(cache_dict['class']) kwargs = cache_dict.get('kwargs', {}) kwargs = dict( [(str(k), v) for (k, v) in kwargs.items()] ) else: raise Exception('Missing required cache name or class: %s' % json_dumps(cache_dict)) cache = _class(**kwargs) return cache
def _parseConfigfileCache(cache_dict, dirpath): """ Used by parseConfigfile() to parse just the cache parts of a config. """ if 'name' in cache_dict: _class = Caches.getCacheByName(cache_dict['name']) kwargs = {} def add_kwargs(*keys): """ Populate named keys in kwargs from cache_dict. """ for key in keys: if key in cache_dict: kwargs[key] = cache_dict[key] if _class is Caches.Test: if cache_dict.get('verbose', False): kwargs['logfunc'] = lambda msg: stderr.write(msg + '\n') elif _class is Caches.Disk: kwargs['path'] = enforcedLocalPath(cache_dict['path'], dirpath, 'Disk cache path') if 'umask' in cache_dict: kwargs['umask'] = int(cache_dict['umask'], 8) add_kwargs('dirs', 'gzip') elif _class is Caches.Multi: kwargs['tiers'] = [_parseConfigfileCache(tier_dict, dirpath) for tier_dict in cache_dict['tiers']] elif _class is Caches.Memcache.Cache: if 'key prefix' in cache_dict: kwargs['key_prefix'] = cache_dict['key prefix'] add_kwargs('servers', 'lifespan', 'revision') elif _class is Caches.Redis.Cache: if 'key prefix' in cache_dict: kwargs['key_prefix'] = cache_dict['key prefix'] add_kwargs('host', 'port', 'db') elif _class is Caches.S3.Cache: add_kwargs('bucket', 'access', 'secret', 'use_locks', 'path', 'reduced_redundancy') else: raise Exception('Unknown cache: %s' % cache_dict['name']) elif 'class' in cache_dict: _class = loadClassPath(cache_dict['class']) kwargs = cache_dict.get('kwargs', {}) kwargs = dict( [(str(k), v) for (k, v) in kwargs.items()] ) else: raise Exception('Missing required cache name or class: %s' % json_dumps(cache_dict)) cache = _class(**kwargs) return cache
def _parseConfigfileCache(cache_dict, dirpath): """ Used by parseConfigfile() to parse just the cache parts of a config. """ if 'name' in cache_dict: _class = Caches.getCacheByName(cache_dict['name']) kwargs = {} def add_kwargs(*keys): """ Populate named keys in kwargs from cache_dict. """ for key in keys: if key in cache_dict: kwargs[key] = cache_dict[key] if _class is Caches.Test: if cache_dict.get('verbose', False): kwargs['logfunc'] = lambda msg: stderr.write(msg + '\n') elif _class is Caches.Disk: kwargs['path'] = enforcedLocalPath(cache_dict['path'], dirpath, 'Disk cache path') if 'umask' in cache_dict: kwargs['umask'] = int(cache_dict['umask'], 8) add_kwargs('dirs', 'gzip') elif _class is Caches.Multi: kwargs['tiers'] = [_parseConfigfileCache(tier_dict, dirpath) for tier_dict in cache_dict['tiers']] elif _class is Caches.Memcache.Cache: if 'key prefix' in cache_dict: kwargs['key_prefix'] = cache_dict['key prefix'] add_kwargs('servers', 'lifespan', 'revision') elif _class is Caches.Redis.Cache: if 'key prefix' in cache_dict: kwargs['key_prefix'] = cache_dict['key prefix'] add_kwargs('host', 'port', 'db') elif _class is Caches.S3.Cache: add_kwargs('bucket', 'access', 'secret', 'use_locks', 'path', 'reduced_redundancy', 'policy') else: raise Exception('Unknown cache: %s' % cache_dict['name']) elif 'class' in cache_dict: _class = loadClassPath(cache_dict['class']) kwargs = cache_dict.get('kwargs', {}) kwargs = dict( [(str(k), v) for (k, v) in kwargs.items()] ) else: raise Exception('Missing required cache name or class: %s' % json_dumps(cache_dict)) cache = _class(**kwargs) return cache
def _parseConfigfileCache(cache_dict, dirpath): """ Used by parseConfigfile() to parse just the cache parts of a config. """ if "name" in cache_dict: _class = Caches.getCacheByName(cache_dict["name"]) kwargs = {} def add_kwargs(*keys): """ Populate named keys in kwargs from cache_dict. """ for key in keys: if key in cache_dict: kwargs[key] = cache_dict[key] if _class is Caches.Test: if cache_dict.get("verbose", False): kwargs["logfunc"] = lambda msg: stderr.write(msg + "\n") elif _class is Caches.Disk: kwargs["path"] = enforcedLocalPath(cache_dict["path"], dirpath, "Disk cache path") if "umask" in cache_dict: kwargs["umask"] = int(cache_dict["umask"], 8) add_kwargs("dirs", "gzip") elif _class is Caches.Multi: kwargs["tiers"] = [_parseConfigfileCache(tier_dict, dirpath) for tier_dict in cache_dict["tiers"]] elif _class is Caches.Memcache.Cache: if "key prefix" in cache_dict: kwargs["key_prefix"] = cache_dict["key prefix"] add_kwargs("servers", "lifespan", "revision") elif _class is Caches.Redis.Cache: if "key prefix" in cache_dict: kwargs["key_prefix"] = cache_dict["key prefix"] add_kwargs("host", "port", "db") elif _class is Caches.S3.Cache: add_kwargs("bucket", "access", "secret", "use_locks", "path", "reduced_redundancy") else: raise Exception("Unknown cache: %s" % cache_dict["name"]) elif "class" in cache_dict: _class = loadClassPath(cache_dict["class"]) kwargs = cache_dict.get("kwargs", {}) kwargs = dict([(str(k), v) for (k, v) in kwargs.items()]) else: raise Exception("Missing required cache name or class: %s" % json_dumps(cache_dict)) cache = _class(**kwargs) return cache
def __init__(self): self.peerID = self.genID() self.caches = Caches.Caches(self.peerID) self.sender = Sender.Sender() self.receiver = Receiver.Receiver(self.peerID, ("0.0.0.0", 5000), self.caches) self.handler = Handler.Handler(self.peerID, self.caches) self.t1 = threading.Thread(target=self.receiver.multicast).start() self.t2 = threading.Thread(target=self.receiver.unicast).start() self.t3 = threading.Thread(target=self.receiver.receive_files).start() self.lock = Lock()