def has_local_ip(idurl): """ To check for some known local IP of given user. """ global _LocalIPs idurl = id_url.to_original(idurl) return idurl in _LocalIPs
def start_one(idurl, timeout=10, try_other_sources=True): """ An alias for `immediatelyCaching()` method. """ return immediatelyCaching(id_url.to_original(idurl), timeout=timeout, try_other_sources=try_other_sources)
def get_local_ip(idurl): """ This is to get a local IP of some user from the index. """ global _LocalIPs idurl = id_url.to_original(idurl) return _LocalIPs.get(idurl, None)
def idremove(idurl): """ Remove identity from cache, also update indexes. Not remove local file. """ global _IdentityCache global _IdentityCacheIDs global _IdentityCacheModifiedTime global _Contact2IDURL global _IDURL2Contacts global _IPPort2IDURL idurl = id_url.to_original(idurl) idobj = _IdentityCache.pop(idurl, None) identid = _IdentityCacheIDs.pop(idurl, None) _IdentityCacheModifiedTime.pop(idurl, None) _IDURL2Contacts.pop(idurl, None) if idobj is not None: for contact in idobj.getContacts(): _Contact2IDURL.pop(contact, None) try: proto, host, port, fname = nameurl.UrlParse(contact) ipport = (host, int(port)) _IPPort2IDURL.pop(ipport, None) except: pass fire_cache_updated_callbacks(single_item=(identid, None, None)) return idobj
def idcontacts(idurl): """ A fast way to get identity contacts. """ global _IDURL2Contacts idurl = id_url.to_original(idurl) return list(_IDURL2Contacts.get(idurl, set()))
def idget(idurl): """ Get identity from cache. """ global _IdentityCache idurl = id_url.to_original(idurl) return _IdentityCache.get(idurl, None)
def _success(src, idurl): global _LastTimeCached idurl = id_url.to_original(idurl) defer_results = caching().pop(idurl, []) if _Debug: lg.args(_DebugLevel, src=type(src), idurl=idurl, defer_results=len(defer_results)) if UpdateAfterChecking(idurl, src): for result in defer_results: if result and not result.called: reactor.callLater(0, result.callback, src) # @UndefinedVariable if _Debug: lg.out(_DebugLevel, '[cached] %s' % idurl) p2p_stats.count_identity_cache(idurl, len(src)) _LastTimeCached[idurl] = time.time() else: for result in defer_results: if result and not result.called: reactor.callLater(0, result.errback, Exception(src)) # @UndefinedVariable lg.warn('[cache error] %s is not valid' % idurl) p2p_stats.count_identity_cache(idurl, 0) _LastTimeCached.pop(idurl, None) defer_results = [] del defer_results return src
def _next_source(resp, idurl, sources, pos): global _LastTimeCached if _Debug: lg.args(_DebugLevel, resp=resp, idurl=idurl, pos=pos, sources=len(sources)) if pos >= len(sources): lg.warn('[cache failed] %r and also %d other sources' % (idurl, len(sources))) defer_results = caching().pop(idurl, []) for result in defer_results: if result and not result.called: reactor.callLater( 0, result.errback, Exception('cache failed from multiple sources' )) # @UndefinedVariable p2p_stats.count_identity_cache(idurl, 0) _LastTimeCached.pop(idurl, None) defer_results = [] del defer_results return None next_idurl = sources[pos] next_idurl = id_url.to_original(next_idurl) if _Debug: lg.args(_DebugLevel, next_idurl=next_idurl) d = net_misc.getPageTwisted(url=next_idurl, timeout=timeout) d.addCallback(_success, idurl) d.addErrback(_next_source, idurl, sources, pos + 1) return None
def _next_source(err, sources, pos, ret): if pos >= len(sources): lg.warn('[cache failed] %r from %d sources' % (idurl, len(sources))) if ret: ret.errback(Exception('cache failed from %d sources' % len(sources))) return None next_idurl = sources[pos] next_idurl = id_url.to_original(next_idurl) if _Debug: lg.out(_DebugLevel, 'identitycache.immediatelyCaching._next_source %r from %r : %r' % (pos, sources, next_idurl, )) if next_idurl in _CachingTasks: if _Debug: lg.out(_DebugLevel, 'identitycache.immediatelyCaching already have next task for %r' % next_idurl) d = _CachingTasks[next_idurl] else: if _Debug: lg.out(_DebugLevel, 'identitycache.immediatelyCaching will try another source of %r : %r' % (idurl, next_idurl)) _CachingTasks[next_idurl] = Deferred() _CachingTasks[next_idurl].addErrback(on_caching_task_failed, next_idurl) d = net_misc.getPageTwisted(next_idurl, timeout) d.addCallback(_success, next_idurl) if ret: d.addCallback(ret.callback) d.addErrback(_next_source, sources, pos+1, ret) return None
def get_filename(idurl): idurl = id_url.to_original(idurl) try: partfilename = nameurl.UrlFilename(idurl) except: lg.err("idurl %r is not correct" % idurl) return None return os.path.join(settings.IdentityCacheDir(), partfilename)
def add_callback(idurl, ignore_errors=False): idurl = id_url.to_original(idurl) defer_obj = Deferred() defer_obj.addErrback(on_caching_task_failed, idurl, ignore_errors) if idurl not in caching(): caching()[idurl] = [] caching()[idurl].append(defer_obj) return defer_obj
def update(idurl, xml_src): """ This is a correct method to update an identity in the local cache. PREPRO need to check that date or version is after old one so not vulnerable to replay attacks. """ idurl = id_url.to_original(idurl) try: newid = identity.identity(xmlsrc=xml_src) except: lg.exc() return False if not newid.isCorrect(): lg.err("incorrect identity : %r" % idurl) return False try: if not newid.Valid(): lg.err("identity not valid : %r" % idurl) return False except: lg.exc() return False filename = os.path.join(settings.IdentityCacheDir(), nameurl.UrlFilename(idurl)) if os.path.exists(filename): oldidentityxml = bpio.ReadTextFile(filename) oldidentity = identity.identity(xmlsrc=oldidentityxml) if oldidentity.publickey != newid.publickey: # TODO: SECURITY add some kind of black list to be able to block certain IP's if the DDoS me lg.err( "new public key does not match with old, SECURITY VIOLATION : %r" % idurl) return False if oldidentity.signature != newid.signature: if _Debug: lg.out( _DebugLevel, 'identitydb.update have new data for %r' % nameurl.GetName(idurl)) else: idset(idurl, newid) return True # publickeys match so we can update it bpio.WriteTextFile(filename, xml_src) idset(idurl, newid) return True
def print_id(idurl): """ For debug purposes. """ idurl = id_url.to_original(idurl) if has_idurl(idurl): idForKey = get_ident(idurl) if _Debug: lg.out(_DebugLevel, str(idForKey.getSources(as_originals=True))) lg.out(_DebugLevel, str(idForKey.contacts)) lg.out(_DebugLevel, str(idForKey.publickey)) lg.out(_DebugLevel, str(idForKey.signature))
def fetch(list_ids, refresh_cache=False): """ Request a list of identity files. """ if _Debug: lg.out(_DebugLevel, "propagate.fetch %d identities" % len(list_ids)) dl = [] for url in list_ids: if not url: continue if identitycache.FromCache(url) and not refresh_cache: continue dl.append(identitycache.immediatelyCaching(id_url.to_original(url))) return DeferredList(dl, consumeErrors=True)
def GetLatest(idurl): """ Returns latest copy from cache or fire `immediatelyCaching`, result is a `Deferred` object. """ idurl = id_url.to_original(idurl) known = FromCache(idurl) result = Deferred() if known: result.callback(known) else: d = immediatelyCaching(idurl) d.addCallback(lambda _: result.callback(FromCache(idurl))) d.addErrback(lambda err: result.errback(err) and None) return result
def remove(idurl): """ Top method to remove identity from cache - also remove local file. """ idurl = id_url.to_original(idurl) filename = os.path.join(settings.IdentityCacheDir(), nameurl.UrlFilename(idurl)) if os.path.isfile(filename): if _Debug: lg.out(_DebugLevel, "identitydb.remove file %r" % filename) try: os.remove(filename) except: lg.exc() idremove(idurl) return True
def _fail(err, idurl): global _CachingTasks idurl = id_url.to_original(idurl) result = _CachingTasks.pop(idurl) if not try_other_sources: if result: result.errback(err) else: lg.warn('caching task for %s was not found' % idurl) p2p_stats.count_identity_cache(idurl, 0) lg.warn('[cache failed] %s : %s' % ( idurl, err.getErrorMessage(), )) return None latest_idurl, latest_rev = id_url.get_latest_revision(idurl) latest_ident = None sources = [] if latest_idurl: latest_ident = identitydb.get_ident(latest_idurl) if latest_ident: sources = latest_ident.getSources(as_fields=False) if sources: if idurl in sources: sources = sources.remove(idurl) if sources: lg.warn('[cache failed] %s : %s but will try %d more sources' % ( idurl, err.getErrorMessage(), len(sources), )) _next_source(err, sources, 0, result) return result if result: result.errback(err) else: lg.warn('caching task for %s was not found' % idurl) p2p_stats.count_identity_cache(idurl, 0) lg.warn('[cache failed] and also no other sources found %s : %s' % ( idurl, err.getErrorMessage(), )) return None
def idset(idurl, id_obj): """ Important method - need to call that to update indexes. """ global _Contact2IDURL global _IDURL2Contacts global _IPPort2IDURL global _IdentityCache global _IdentityCacheIDs global _IdentityCacheCounter global _IdentityCacheModifiedTime idurl = id_url.to_original(idurl) if not has_idurl(idurl): if _Debug: lg.out(_DebugLevel, 'identitydb.idset new identity: %r' % idurl) _IdentityCache[idurl] = id_obj _IdentityCacheModifiedTime[idurl] = time.time() identid = _IdentityCacheIDs.get(idurl, None) if identid is None: identid = _IdentityCacheCounter _IdentityCacheCounter += 1 _IdentityCacheIDs[idurl] = identid for contact in id_obj.getContacts(): if contact not in _Contact2IDURL: _Contact2IDURL[contact] = set() # else: # if len(_Contact2IDURL[contact]) >= 1 and idurl not in _Contact2IDURL[contact]: # lg.warn('another user have same contact: ' + str(list(_Contact2IDURL[contact]))) _Contact2IDURL[contact].add(idurl) if idurl not in _IDURL2Contacts: _IDURL2Contacts[idurl] = set() _IDURL2Contacts[idurl].add(contact) try: proto, host, port, fname = nameurl.UrlParse(contact) ipport = (host, int(port)) _IPPort2IDURL[ipport] = idurl except: pass # TODO: when identity contacts changed - need to remove old items from _Contact2IDURL fire_cache_updated_callbacks(single_item=(identid, idurl, id_obj)) if _Debug: lg.out(_DebugLevel, 'identitydb.idset %r' % idurl) # now make sure we properly handle changes in the sources of that identity try: id_url.identity_cached(id_obj) except: lg.exc()
def get_ident(idurl): """ A smart way to get identity from cache. If not cached in memory but found locally - read it from disk. """ idurl = id_url.to_original(idurl) if has_idurl(idurl): return idget(idurl) try: partfilename = nameurl.UrlFilename(idurl) except: if _Debug: lg.out(_DebugLevel, "identitydb.get_ident ERROR %r is incorrect" % idurl) return None if not partfilename: if _Debug: lg.out(_DebugLevel, "identitydb.get_ident ERROR %r is empty" % idurl) return None filename = os.path.join(settings.IdentityCacheDir(), partfilename) if not os.path.exists(filename): if _Debug: lg.out( _DebugLevel, "identitydb.get_ident file %r not exist" % os.path.basename(filename)) return None idxml = bpio.ReadTextFile(filename) if not idxml: if _Debug: lg.out( _DebugLevel, "identitydb.get_ident %s not found" % nameurl.GetName(idurl)) return None idobj = identity.identity(xmlsrc=idxml) idurl_orig = idobj.getIDURL() if idurl == idurl_orig.original(): idset(idurl, idobj) return idobj lg.err("not found identity object idurl=%r idurl_orig=%r" % (idurl, idurl_orig)) return None
def _success(src, idurl): global _CachingTasks global _LastTimeCached idurl = id_url.to_original(idurl) result = _CachingTasks.pop(idurl, None) if not result: lg.warn('caching task for %s was not found' % idurl) if UpdateAfterChecking(idurl, src): if result: result.callback(src) lg.out(_DebugLevel, '[cached] %s' % idurl) p2p_stats.count_identity_cache(idurl, len(src)) _LastTimeCached[idurl] = time.time() else: if result: result.errback(Exception(src)) lg.warn('[cache error] %s is not valid' % idurl) p2p_stats.count_identity_cache(idurl, 0) return src
def _fail(err, idurl): global _LastTimeCached idurl = id_url.to_original(idurl) if _Debug: lg.args(_DebugLevel, err=err, idurl=idurl) if not try_other_sources: p2p_stats.count_identity_cache(idurl, 0) _LastTimeCached.pop(idurl, None) lg.warn('[cache failed] %s : %s' % ( idurl, err.getErrorMessage(), )) defer_results = caching().pop(idurl, []) for result in defer_results: if result and not result.called: reactor.callLater(0, result.errback, err) # @UndefinedVariable defer_results = [] del defer_results return None sources = [] latest_ident = None latest_idurl, _ = id_url.get_latest_revision(idurl) if not latest_idurl: latest_idurl = idurl if latest_idurl: latest_ident = identitydb.get_ident(latest_idurl) if latest_ident: sources.extend(list(latest_ident.getSources(as_originals=True))) if not sources: pub_key = id_url.known().get(latest_idurl) if pub_key: known_sources = id_url.sources(pub_key) for another_idurl in reversed(known_sources): if another_idurl != latest_idurl and another_idurl != idurl: if another_idurl not in sources: sources.append(another_idurl) if idurl in sources: sources.remove(idurl) if _Debug: lg.args(_DebugLevel, idurl=idurl, latest_idurl=latest_idurl, latest_ident=latest_ident, sources=sources) if sources: lg.warn('[cache failed] %s : %s but will try %d more sources' % ( idurl, err.getErrorMessage(), len(sources), )) _next_source(None, idurl, sources, 0) return None p2p_stats.count_identity_cache(idurl, 0) _LastTimeCached.pop(idurl, None) lg.warn('[cache failed] and also no other sources found %s : %s' % ( idurl, err.getErrorMessage(), )) defer_results = caching().pop(idurl, []) if _Debug: lg.args(_DebugLevel, known=len(id_url.known().keys()), defer_results=len(defer_results)) for result in defer_results: if result and not result.called: reactor.callLater(0, result.errback, err) # @UndefinedVariable defer_results = [] del defer_results return None
def has_idurl(idurl): """ Return True if that IDURL already cached. """ global _IdentityCache return id_url.to_original(idurl) in _IdentityCache
def immediatelyCaching(idurl, timeout=10, try_other_sources=True, ignore_errors=False): """ A smart method to cache some identity and get results in callbacks. """ idurl = id_url.to_original(idurl) if not idurl: raise Exception('can not cache, idurl is empty') def _success(src, idurl): global _LastTimeCached idurl = id_url.to_original(idurl) defer_results = caching().pop(idurl, []) if _Debug: lg.args(_DebugLevel, src=type(src), idurl=idurl, defer_results=len(defer_results)) if UpdateAfterChecking(idurl, src): for result in defer_results: if result and not result.called: reactor.callLater(0, result.callback, src) # @UndefinedVariable if _Debug: lg.out(_DebugLevel, '[cached] %s' % idurl) p2p_stats.count_identity_cache(idurl, len(src)) _LastTimeCached[idurl] = time.time() else: for result in defer_results: if result and not result.called: reactor.callLater(0, result.errback, Exception(src)) # @UndefinedVariable lg.warn('[cache error] %s is not valid' % idurl) p2p_stats.count_identity_cache(idurl, 0) _LastTimeCached.pop(idurl, None) defer_results = [] del defer_results return src def _next_source(resp, idurl, sources, pos): global _LastTimeCached if _Debug: lg.args(_DebugLevel, resp=resp, idurl=idurl, pos=pos, sources=len(sources)) if pos >= len(sources): lg.warn('[cache failed] %r and also %d other sources' % (idurl, len(sources))) defer_results = caching().pop(idurl, []) for result in defer_results: if result and not result.called: reactor.callLater( 0, result.errback, Exception('cache failed from multiple sources' )) # @UndefinedVariable p2p_stats.count_identity_cache(idurl, 0) _LastTimeCached.pop(idurl, None) defer_results = [] del defer_results return None next_idurl = sources[pos] next_idurl = id_url.to_original(next_idurl) if _Debug: lg.args(_DebugLevel, next_idurl=next_idurl) d = net_misc.getPageTwisted(url=next_idurl, timeout=timeout) d.addCallback(_success, idurl) d.addErrback(_next_source, idurl, sources, pos + 1) return None def _fail(err, idurl): global _LastTimeCached idurl = id_url.to_original(idurl) if _Debug: lg.args(_DebugLevel, err=err, idurl=idurl) if not try_other_sources: p2p_stats.count_identity_cache(idurl, 0) _LastTimeCached.pop(idurl, None) lg.warn('[cache failed] %s : %s' % ( idurl, err.getErrorMessage(), )) defer_results = caching().pop(idurl, []) for result in defer_results: if result and not result.called: reactor.callLater(0, result.errback, err) # @UndefinedVariable defer_results = [] del defer_results return None sources = [] latest_ident = None latest_idurl, _ = id_url.get_latest_revision(idurl) if not latest_idurl: latest_idurl = idurl if latest_idurl: latest_ident = identitydb.get_ident(latest_idurl) if latest_ident: sources.extend(list(latest_ident.getSources(as_originals=True))) if not sources: pub_key = id_url.known().get(latest_idurl) if pub_key: known_sources = id_url.sources(pub_key) for another_idurl in reversed(known_sources): if another_idurl != latest_idurl and another_idurl != idurl: if another_idurl not in sources: sources.append(another_idurl) if idurl in sources: sources.remove(idurl) if _Debug: lg.args(_DebugLevel, idurl=idurl, latest_idurl=latest_idurl, latest_ident=latest_ident, sources=sources) if sources: lg.warn('[cache failed] %s : %s but will try %d more sources' % ( idurl, err.getErrorMessage(), len(sources), )) _next_source(None, idurl, sources, 0) return None p2p_stats.count_identity_cache(idurl, 0) _LastTimeCached.pop(idurl, None) lg.warn('[cache failed] and also no other sources found %s : %s' % ( idurl, err.getErrorMessage(), )) defer_results = caching().pop(idurl, []) if _Debug: lg.args(_DebugLevel, known=len(id_url.known().keys()), defer_results=len(defer_results)) for result in defer_results: if result and not result.called: reactor.callLater(0, result.errback, err) # @UndefinedVariable defer_results = [] del defer_results return None def _start_one(idurl, ignore_errors): if _Debug: lg.args(_DebugLevel, idurl=idurl) defer_obj = add_callback(idurl, ignore_errors=ignore_errors) d = net_misc.getPageTwisted(idurl, timeout) d.addCallback(_success, idurl) d.addErrback(_fail, idurl) return defer_obj if idurl in caching(): if _Debug: lg.out( _DebugLevel, 'identitycache.immediatelyCaching already has tasks for %r' % idurl) defer_obj = add_callback(idurl, ignore_errors=ignore_errors) return defer_obj if _Debug: lg.out( _DebugLevel, 'identitycache.immediatelyCaching started new task for %r' % idurl) return _start_one(idurl, ignore_errors=ignore_errors)
def last_time_cached(idurl): global _LastTimeCached idurl = id_url.to_original(idurl) if not idurl: return None return _LastTimeCached.get(idurl, None)
def get_one(idurl): """ An alias for `FromCache()` method. """ return FromCache(id_url.to_original(idurl))
def get_last_modified_time(idurl): """ """ global _IdentityCacheModifiedTime idurl = id_url.to_original(idurl) return _IdentityCacheModifiedTime.get(idurl, None)
def immediatelyCaching(idurl, timeout=10, try_other_sources=True): """ A smart method to cache some identity and get results in callbacks. """ global _CachingTasks global _LastTimeCached idurl = id_url.to_original(idurl) if not idurl: raise Exception('can not cache, idurl is empty') if idurl in _CachingTasks and not _CachingTasks[idurl].called: if _Debug: lg.out( _DebugLevel, 'identitycache.immediatelyCaching already have a task for %r' % idurl) return _CachingTasks[idurl] if _Debug: lg.out( _DebugLevel, 'identitycache.immediatelyCaching started new task for %r' % idurl) def _success(src, idurl): global _CachingTasks global _LastTimeCached idurl = id_url.to_original(idurl) result = _CachingTasks.pop(idurl, None) if not result: lg.warn('caching task for %s was not found' % idurl) if UpdateAfterChecking(idurl, src): if result: result.callback(src) lg.out(_DebugLevel, '[cached] %s' % idurl) p2p_stats.count_identity_cache(idurl, len(src)) _LastTimeCached[idurl] = time.time() else: if result: result.errback(Exception(src)) lg.warn('[cache error] %s is not valid' % idurl) p2p_stats.count_identity_cache(idurl, 0) return src def _next_source(err, sources, pos, ret): if pos >= len(sources): lg.warn('[cache failed] %r from %d sources' % (idurl, len(sources))) if ret: ret.errback( Exception('cache failed from %d sources' % len(sources))) return None next_idurl = sources[pos] next_idurl = id_url.to_original(next_idurl) if _Debug: lg.out( _DebugLevel, 'identitycache.immediatelyCaching._next_source %r from %r : %r' % ( pos, sources, next_idurl, )) if next_idurl in _CachingTasks: if _Debug: lg.out( _DebugLevel, 'identitycache.immediatelyCaching already have next task for %r' % next_idurl) d = _CachingTasks[next_idurl] else: if _Debug: lg.out( _DebugLevel, 'identitycache.immediatelyCaching will try another source of %r : %r' % (idurl, next_idurl)) _CachingTasks[next_idurl] = Deferred() _CachingTasks[next_idurl].addErrback(on_caching_task_failed, next_idurl) d = net_misc.getPageTwisted(next_idurl, timeout) d.addCallback(_success, next_idurl) if ret: d.addCallback(ret.callback) d.addErrback(_next_source, sources, pos + 1, ret) return None def _fail(err, idurl): global _CachingTasks idurl = id_url.to_original(idurl) result = _CachingTasks.pop(idurl) if not try_other_sources: if result: result.errback(err) else: lg.warn('caching task for %s was not found' % idurl) p2p_stats.count_identity_cache(idurl, 0) lg.warn('[cache failed] %s : %s' % ( idurl, err.getErrorMessage(), )) return None latest_idurl, latest_rev = id_url.get_latest_revision(idurl) latest_ident = None sources = [] if latest_idurl: latest_ident = identitydb.get_ident(latest_idurl) if latest_ident: sources = latest_ident.getSources(as_fields=False) if sources: if idurl in sources: sources = sources.remove(idurl) if sources: lg.warn('[cache failed] %s : %s but will try %d more sources' % ( idurl, err.getErrorMessage(), len(sources), )) _next_source(err, sources, 0, result) return result if result: result.errback(err) else: lg.warn('caching task for %s was not found' % idurl) p2p_stats.count_identity_cache(idurl, 0) lg.warn('[cache failed] and also no other sources found %s : %s' % ( idurl, err.getErrorMessage(), )) return None idurl = id_url.to_original(idurl) _CachingTasks[idurl] = Deferred() _CachingTasks[idurl].addErrback(on_caching_task_failed, idurl) d = net_misc.getPageTwisted(idurl, timeout) d.addCallback(_success, idurl) d.addErrback(_fail, idurl) return _CachingTasks[idurl]