def cache_suppliers(path=None): """ Make sure identities of all suppliers we know are cached. """ dl = [] list_local_customers = list(os.listdir(settings.SuppliersDir())) for customer_id in list_local_customers: if not global_id.IsValidGlobalUser(customer_id): lg.warn('invalid customer record %s found in %s' % (customer_id, settings.SuppliersDir())) continue try: one_customer_idurl = global_id.GlobalUserToIDURL(customer_id) except Exception as exc: lg.err('idurl caching failed: %r' % exc) continue if not id_url.is_cached(one_customer_idurl): dl.append(identitycache.immediatelyCaching(one_customer_idurl)) path = os.path.join(settings.SuppliersDir(), customer_id, 'supplierids') lst = bpio._read_list(path) if lst is None: lg.warn('did not found suppliers ids at %s' % path) continue for one_supplier_idurl in lst: if one_supplier_idurl: if not id_url.is_cached(one_supplier_idurl): dl.append(identitycache.immediatelyCaching(one_supplier_idurl)) if _Debug: lg.out(_DebugLevel, 'contactsdb.cache_suppliers prepared %d idurls to be cached' % len(dl)) return DeferredList(dl, consumeErrors=True)
def doInit(self, *args, **kwargs): """ Action method. """ # TODO : put in a seprate state in the state machine self.result_defer = kwargs.get('result_defer', None) identitycache.immediatelyCaching(self.customer_idurl)
def ReadIndex(text_data, encoding='utf-8'): """ Read index data base, ``input`` is a ``StringIO.StringIO`` object which keeps the data. This is a simple text format, see ``p2p.backup_fs.Serialize()`` method. The first line keeps revision number. """ global _LoadingFlag if _LoadingFlag: return False _LoadingFlag = True backup_fs.Clear() count = 0 try: json_data = jsn.loads( text_data, encoding=encoding, ) except: lg.exc() json_data = text_data if _Debug: lg.args(_DebugLevel, json_data=json_data) for customer_id in json_data.keys(): if customer_id == 'items': try: count = backup_fs.Unserialize(json_data, from_json=True, decoding=encoding) except: lg.exc() return False else: customer_idurl = global_id.GlobalUserToIDURL(customer_id) if not id_url.is_cached(customer_idurl): lg.warn( 'identity %r is not yet cached, skip reading related catalog items' % customer_idurl) identitycache.immediatelyCaching(customer_idurl, try_other_sources=False, ignore_errors=True) continue try: count = backup_fs.Unserialize( json_data[customer_id], iter=backup_fs.fs(customer_idurl), iterID=backup_fs.fsID(customer_idurl), from_json=True, decoding=encoding, ) except: lg.exc() return False if _Debug: lg.out(_DebugLevel, 'backup_control.ReadIndex %d items loaded' % count) # local_site.update_backup_fs(backup_fs.ListAllBackupIDsSQL()) # commit(new_revision) _LoadingFlag = False return True
def process(newpacket, info): """ Main entry point where all incoming signed packets are coming from remote peers. The main aspect here is to "authenticate" remote node - need to know it identity. """ from p2p import p2p_service from userid import my_id if not driver.is_on('service_p2p_hookups'): if _Debug: lg.out(_DebugLevel, 'packet_in.process SKIP incoming packet, service_p2p_hookups is not started') return None if _Debug: lg.out(_DebugLevel, 'packet_in.process [%s/%s/%s]:%s(%s) from %s://%s is "%s"' % ( nameurl.GetName(newpacket.OwnerID), nameurl.GetName(newpacket.CreatorID), nameurl.GetName(newpacket.RemoteID), newpacket.Command, newpacket.PacketID, info.proto, info.host, info.status, )) if info.status != 'finished': if _Debug: lg.out(_DebugLevel, ' skip, packet status is : [%s]' % info.status) return None # if _PacketLogFileEnabled: # lg.out(0, ' \033[0;49;92mIN %s(%s) with %d bytes from %s to %s TID:%s\033[0m' % ( # newpacket.Command, newpacket.PacketID, info.bytes_received, # global_id.UrlToGlobalID(info.sender_idurl), global_id.UrlToGlobalID(newpacket.RemoteID), # info.transfer_id), log_name='packet', showtime=True) # we must know recipient identity if not id_url.is_cached(newpacket.RemoteID): d = identitycache.immediatelyCaching(newpacket.RemoteID) d.addCallback(lambda _: process(newpacket, info)) d.addErrback(lambda err: lg.err('incoming remote ID is unknown, failed caching remote %s identity: %s' % (newpacket.RemoteID, str(err))) and None) return d if newpacket.Command == commands.Identity(): if newpacket.RemoteID != my_id.getIDURL(): if _Debug: lg.out(_DebugLevel, ' incoming Identity is routed to another user') if not p2p_service.Identity(newpacket, send_ack=False): lg.warn('received identity was not processed') return None # remote peer sending a valid identity to another peer routed via my machine # need to handle that packet - it should be processed by proxy_server return handle(newpacket, info) # contact sending us current identity we might not have # so we handle it before check that packet is valid # because we might not have his identity on hands and so can not verify the packet # so we check that his Identity is valid and save it into cache # than we check the packet to be valid too. if not p2p_service.Identity(newpacket): lg.warn('received identity was not processed') return None if not identitycache.HasKey(newpacket.CreatorID): if _Debug: lg.out(_DebugLevel, ' will cache remote identity %s before processing incoming packet %s' % (newpacket.CreatorID, newpacket)) d = identitycache.immediatelyCaching(newpacket.CreatorID) d.addCallback(lambda _: handle(newpacket, info)) d.addErrback(lambda err: lg.err('failed caching remote %s identity: %s' % (newpacket.CreatorID, str(err))) and None) return d return handle(newpacket, info)
def SendMessage(remote_idurl, messagebody, packet_id=None): """ Send command.Message() packet to remote peer. """ global _OutgoingMessageCallback if not packet_id: packet_id = packetid.UniqueID() remote_identity = identitycache.FromCache(remote_idurl) if remote_identity is None: d = identitycache.immediatelyCaching(remote_idurl, 20) d.addCallback(lambda src: SendMessage( remote_idurl, messagebody, packet_id)) d.addErrback(lambda err: lg.warn('failed to retrieve ' + remote_idurl)) return d Amessage = MessageClass(remote_identity, messagebody) Payload = misc.ObjectToString(Amessage) lg.out(6, "message.SendMessage to %s with %d bytes" % (remote_idurl, len(Payload))) outpacket = signed.Packet( commands.Message(), my_id.getLocalID(), my_id.getLocalID(), packet_id, Payload, remote_idurl) result = gateway.outbox(outpacket, wide=True) if _OutgoingMessageCallback: _OutgoingMessageCallback(result, messagebody, remote_identity, packet_id) return result
def doCacheRemoteIdentity(self, *args, **kwargs): """ Action method. """ d = identitycache.immediatelyCaching(self.sender_idurl) d.addCallback(self._remote_identity_cached, *args, **kwargs) d.addErrback(lambda err: self.automat('failed', *args, **kwargs))
def doCacheRemoteIdentity(self, arg): """ Action method. """ self.caching_deferred = identitycache.immediatelyCaching(self.remote_idurl) self.caching_deferred.addCallback(self._remote_identity_cached) self.caching_deferred.addErrback(lambda err: self.automat("failed"))
def _do_retry_one_time(self, fail_info): to_idurl = id_url.field(fail_info['to']).to_bin() from_idurl = id_url.field(fail_info['from']).to_bin() _key = (fail_info['command'], fail_info['packet_id'], from_idurl, to_idurl) current_retries = self.packets_retries.get(_key, 0) if _Debug: lg.args(_DebugLevel, key=_key, retries=current_retries) if fail_info.get('error') != 'route already closed': if _Debug: lg.dbg(_DebugLevel, 'failed sending routed packet : %r' % fail_info) self._do_clean_sent_packet(fail_info) self._do_cancel_outbox_packets(fail_info) self.packets_retries.pop(_key, None) return if current_retries >= 1: if _Debug: lg.dbg( _DebugLevel, 'failed sending routed packet after few attempts : %r' % fail_info) self.automat('retry-failed', fail_info) self._do_clean_sent_packet(fail_info) self._do_cancel_outbox_packets(fail_info) self.packets_retries.pop(_key, None) return self.packets_retries[_key] = current_retries + 1 d = identitycache.immediatelyCaching(fail_info['to']) d.addCallback(self._on_cache_retry_success, fail_info) d.addErrback(self._on_cache_retry_failed, fail_info)
def doCacheRemoteIdentity(self, arg): """ Action method. """ self.caching_deferred = identitycache.immediatelyCaching(self.remote_idurl) self.caching_deferred.addCallback(self._on_remote_identity_cached) self.caching_deferred.addErrback(self._on_remote_identity_cache_failed)
def doConnectCustomerSuppliers(self, *args, **kwargs): """ Action method. """ try: self.known_suppliers_list = [s for s in args[0]['suppliers'] if s] except: lg.exc() return self.outgoing_list_files_packets_ids = [] self.known_ecc_map = args[0].get('ecc_map') if _Debug: lg.args(_DebugLevel, known_ecc_map=self.known_ecc_map, known_suppliers_list=self.known_suppliers_list) for supplier_idurl in self.known_suppliers_list: if id_url.is_cached(supplier_idurl): self._do_connect_with_supplier(supplier_idurl) else: d = identitycache.immediatelyCaching(supplier_idurl) d.addCallback( lambda *a: self._do_connect_with_supplier(supplier_idurl)) d.addErrback(lambda err: lg.warn( 'failed caching supplier %r identity: %r' % ( supplier_idurl, str(err), )) and None)
def doCacheRemoteIdentity(self, *args, **kwargs): """ Action method. """ d = identitycache.immediatelyCaching(self.sender_idurl) d.addCallback(self._on_remote_identity_cached, *args, **kwargs) d.addErrback(self._on_remote_identity_cache_failed, *args, **kwargs)
def _do_id_server_health_check(self): my_idurl = my_id.getLocalIdentity().getIDURL(as_original=True) if _Debug: lg.args(_DebugLevel, my_idurl=my_idurl) def _verify(xmlsrc=None): if not xmlsrc: lg.err('my current identity server not healthy') self.NeedPropagate = True self.automat('check-synchronize') return remote_ident = identity.identity(xmlsrc=xmlsrc) if not remote_ident.isCorrect() or not remote_ident.Valid(): lg.warn('my current identity server responded with bad identity file') self.NeedPropagate = True self.automat('check-synchronize') return if remote_ident.getIDURL(as_original=True) != my_idurl: lg.warn('my current identity server responded with unknown identity') self.NeedPropagate = True self.automat('check-synchronize') return if _Debug: lg.dbg(_DebugLevel, 'my current identity server is healthy') last_time = identitycache.last_time_cached(my_idurl) if last_time and time.time() - last_time < config.conf().getInt('services/identity-propagate/health-check-interval-seconds'): if _Debug: lg.dbg(_DebugLevel, 'skip health check of my current identity server, last time cached %f seconds ago' % (time.time() - last_time)) return d = identitycache.immediatelyCaching(my_idurl, try_other_sources=False) d.addCallback(_verify) d.addErrback(lambda _: _verify() and None)
def doCacheRemoteIdentity(self, arg): """ Action method. """ d = identitycache.immediatelyCaching(self.sender_idurl) d.addCallback(self._remote_identity_cached, arg) d.addErrback(lambda err: self.automat("failed", arg))
def doCacheRemoteIdentity(self, *args, **kwargs): """ Action method. """ self.caching_deferred = identitycache.immediatelyCaching(self.remote_idurl) self.caching_deferred.addCallback(self._on_remote_identity_cached) self.caching_deferred.addErrback(lambda err: self.automat('fail', err))
def process(newpacket, info): from p2p import p2p_service from userid import my_id if not driver.is_on('service_p2p_hookups'): if _Debug: lg.out( _DebugLevel, 'packet_in.process SKIP incoming packet, service_p2p_hookups is not started' ) return None if _Debug: lg.out( _DebugLevel, 'packet_in.process [%s/%s/%s]:%s(%s) from %s://%s is "%s"' % ( nameurl.GetName(newpacket.OwnerID), nameurl.GetName(newpacket.CreatorID), nameurl.GetName(newpacket.RemoteID), newpacket.Command, newpacket.PacketID, info.proto, info.host, info.status, )) if info.status != 'finished': if _Debug: lg.out(_DebugLevel, ' skip, packet status is : [%s]' % info.status) return None if newpacket.Command == commands.Identity(): if newpacket.RemoteID != my_id.getLocalIDURL(): if _Debug: lg.out(_DebugLevel, ' incoming Identity is routed to another user') if not p2p_service.Identity(newpacket, send_ack=False): lg.warn('non-valid identity received') return None # remote peer sending a valid identity to another peer routed via my machine # need to handle that packet - it should be processed by proxy_server return handle(newpacket, info) # contact sending us current identity we might not have # so we handle it before check that packet is valid # because we might not have his identity on hands and so can not verify the packet # so we check that his Identity is valid and save it into cache # than we check the packet to be valid too. if not p2p_service.Identity(newpacket): lg.warn('non-valid identity received') return None if not identitycache.HasKey(newpacket.CreatorID): if _Debug: lg.out( _DebugLevel, ' will cache remote identity %s before processing incoming packet %s' % (newpacket.CreatorID, newpacket)) d = identitycache.immediatelyCaching(newpacket.CreatorID) d.addCallback(lambda _: handle(newpacket, info)) d.addErrback(lambda err: lg.err('failed caching remote %s identity: %s' % (newpacket.CreatorID, str(err)))) return d return handle(newpacket, info)
def doCacheRemoteIDURL(self, *args, **kwargs): """ Action method. """ self.cache_attempts += 1 idcache_defer = identitycache.immediatelyCaching(idurl=strng.to_text(self.remote_idurl), timeout=self.cache_timeout) idcache_defer.addCallback(lambda src: self.automat('remote-identity-cached', src)) idcache_defer.addErrback(lambda err: self.automat('remote-identity-failed', err) and None)
def process_idurl(self, idurl, node): from twisted.internet.defer import Deferred from contacts import identitycache result = Deferred() d = identitycache.immediatelyCaching(idurl) d.addCallback(lambda src: result.callback(idurl)) d.addErrback(result.errback) return result
def _process_idurl(self, idurl, node): from twisted.internet.defer import Deferred from contacts import identitycache from logs import lg lg.out(12, 'service_nodes_lookup._process_idurl %s' % idurl) result = Deferred() d = identitycache.immediatelyCaching(idurl) d.addCallback(lambda src: result.callback(idurl)) d.addErrback(result.errback) return result
def _do_identity_cache(ret): all_stories = [] for _supplier_idurl in ret['suppliers']: if _supplier_idurl: _supplier_idurl = id_url.to_bin(_supplier_idurl) if not id_url.is_cached( _supplier_idurl) or not identitycache.HasFile( _supplier_idurl): one_supplier_story = identitycache.immediatelyCaching( _supplier_idurl) if _Debug: one_supplier_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache' ) all_stories.append(one_supplier_story) _customer_idurl = id_url.to_bin(ret['customer_idurl']) if _customer_idurl and (not id_url.is_cached(_customer_idurl) or not identitycache.HasFile(_customer_idurl)): one_customer_story = identitycache.immediatelyCaching( _customer_idurl) if _Debug: one_customer_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache') all_stories.append(one_customer_story) if _Debug: lg.args(_DebugLevel, all_stories=len(all_stories), ret=ret) id_cache_story = DeferredList(all_stories, consumeErrors=True) id_cache_story.addCallback(_do_save_customer_suppliers, ret) if _Debug: id_cache_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache') id_cache_story.addErrback(result.errback) return id_cache_story
def process_idurl(idurl, node): if _Debug: lg.out(_DebugLevel, 'lookup.process_idurl %r from %r' % ( idurl, node, )) result = Deferred() if not idurl: result.errback(Exception(idurl)) return result d = identitycache.immediatelyCaching(idurl) d.addCallback(on_identity_cached, idurl, result) d.addErrback(result.errback) return result
def fetch(list_ids, refresh_cache=False): """ Request a list of identity files. """ if _Debug: lg.out(_DebugLevel, "propagate.fetch %d identities" % len(list_ids)) dl = [] for url in list_ids: if not url: continue if identitycache.FromCache(url) and not refresh_cache: continue dl.append(identitycache.immediatelyCaching(id_url.to_original(url))) return DeferredList(dl, consumeErrors=True)
def _got_remote_idurl(self, response): if _Debug: lg.out(_DebugLevel, 'proxy_receiver._got_remote_idurl response=%s' % str(response) ) try: idurl = response['idurl'] except: idurl = None if not idurl or idurl == 'None': self.automat('nodes-not-found') return response d = identitycache.immediatelyCaching(idurl) d.addCallback(lambda src: self.automat('found-one-node', idurl)) d.addErrback(lambda x: self.automat('nodes-not-found')) return response
def push_message(group_key_id, data): creator_idurl = my_keys.get_creator_idurl(group_key_id, as_field=False) if _Debug: lg.args(_DebugLevel, group_key_id=group_key_id, creator_idurl=creator_idurl) ret = Deferred() if not id_url.is_cached(creator_idurl): d = identitycache.immediatelyCaching(creator_idurl) d.addErrback(ret.errback) d.addCallback( lambda *args: do_start_message_producer(group_key_id, data, ret)) return ret do_start_message_producer(group_key_id, data, ret) return ret
def cache_customers(path=None): """ Make sure identities of all customers we know are cached. """ dl = [] if path is None: path = settings.CustomerIDsFilename() lst = bpio._read_list(path) or [] for one_customer_idurl in lst: if one_customer_idurl: if not id_url.is_cached(one_customer_idurl): dl.append(identitycache.immediatelyCaching(one_customer_idurl)) if _Debug: lg.out(_DebugLevel, 'contactsdb.cache_customers prepared %d idurls to be cached' % len(dl)) return DeferredList(dl, consumeErrors=True)
def cache_correspondents(path=None): """ Make sure identities of all correspondents we know are cached. """ dl = [] if path is None: path = settings.CorrespondentIDsFilename() lst = bpio._read_list(path) or [] for i in range(len(lst)): try: one_correspondent_idurl = lst[i].strip().split(' ', 1)[0] except: lg.exc() continue if one_correspondent_idurl: if not id_url.is_cached(one_correspondent_idurl): dl.append(identitycache.immediatelyCaching(one_correspondent_idurl)) if _Debug: lg.out(_DebugLevel, 'contactsdb.cache_correspondents prepared %d idurls to be cached' % len(dl)) return DeferredList(dl, consumeErrors=True)
def send_message(message_body, recipient_global_id, packet_id=None): """ Send command.Message() packet to remote peer. Returns Deferred (if remote_idurl was not cached yet) or outbox packet object. """ global _OutgoingMessageCallbacks if not packet_id: packet_id = packetid.UniqueID() remote_idurl = global_id.GlobalUserToIDURL(recipient_global_id) remote_identity = identitycache.FromCache(remote_idurl) # make sure we have remote identity cached if remote_identity is None: d = identitycache.immediatelyCaching(remote_idurl, timeout=10) d.addCallback(lambda src: send_message( message_body, recipient_global_id, packet_id)) d.addErrback(lambda err: lg.warn('failed to retrieve %s : %s' (remote_idurl, err))) return d lg.out(6, "message.send_message to %s with %d bytes message" % (recipient_global_id, len(message_body))) try: private_message_object = PrivateMessage(recipient_global_id=recipient_global_id) private_message_object.encrypt(message_body) except Exception as exc: return fail(exc) # Payload = misc.ObjectToString(Amessage) Payload = private_message_object.serialize() lg.out(6, "message.send_message payload is %d bytes, remote idurl is %s" % (len(Payload), remote_idurl)) outpacket = signed.Packet( commands.Message(), my_id.getLocalID(), my_id.getLocalID(), packet_id, Payload, remote_idurl, ) result = gateway.outbox(outpacket, wide=True) try: for cp in _OutgoingMessageCallbacks: cp(message_body, private_message_object, remote_identity, outpacket, result) except: lg.exc() return result
def _find_random_node(self, attempts): preferred_routers = [] preferred_routers_raw = config.conf().getData('services/proxy-transport/preferred-routers').strip() if preferred_routers_raw: preferred_routers_list = re.split('\n|,|;| ', preferred_routers_raw) preferred_routers.extend(preferred_routers_list) if preferred_routers: self.possible_router_idurl = id_url.field(random.choice(preferred_routers)) if _Debug: lg.out(_DebugLevel, 'proxy_receiver._find_random_node selected random item from preferred_routers: %r' % self.possible_router_idurl) idcache_defer = identitycache.immediatelyCaching(self.possible_router_idurl) idcache_defer.addCallback(lambda *args: self.automat('found-one-node', self.possible_router_idurl)) idcache_defer.addErrback(lambda err: self.automat('nodes-not-found') and None) return if _Debug: lg.out(_DebugLevel, 'proxy_receiver._find_random_node will start DHT lookup') tsk = lookup.random_proxy_router() if tsk: tsk.result_defer.addCallback(self._on_nodes_lookup_finished, attempts=attempts) tsk.result_defer.addErrback(lambda err: self.automat('nodes-not-found')) else: self.automat('nodes-not-found')
def verify_contacts(self, id_obj): """ Check if router is ready and his contacts exists in that identity. """ from transport.proxy import proxy_receiver if not proxy_receiver.A() or not proxy_receiver.GetRouterIDURL() or not proxy_receiver.GetRouterIdentity(): # if not yet found any node to route your traffic - do nothing if _Debug: lg.out(4, "proxy_interface.verify_contacts returning True : router not yet found") return True if not proxy_receiver.ReadMyOriginalIdentitySource(): if _Debug: lg.out(4, "proxy_interface.verify_contacts returning False : my original identity is empty") return False result = Deferred() def _finish_verification(res): if _Debug: lg.out(4, "proxy_interface._finish_verification") try: cached_id = identitycache.FromCache(proxy_receiver.GetRouterIDURL()) if not cached_id: if _Debug: lg.out(4, " returning False: router identity is not cached") res.callback(False) return False if not proxy_receiver.GetRouterIdentity(): if _Debug: lg.out(4, " returning False : router identity is None or router is not ready yet") return True if cached_id.serialize() != proxy_receiver.GetRouterIdentity().serialize(): if _Debug: lg.out(4, "proxy_interface.verify_contacts return False: cached copy is different") lg.out(20, "\n%s\n" % cached_id.serialize()) lg.out(20, "\n%s\n" % proxy_receiver.GetRouterIdentity().serialize()) res.callback(False) return router_contacts = proxy_receiver.GetRouterIdentity().getContactsByProto() if len(router_contacts) != id_obj.getContactsNumber(): if _Debug: lg.out(4, " returning False: router contacts is different") res.callback(False) return False for proto, contact in id_obj.getContactsByProto().items(): if proto not in router_contacts.keys(): if _Debug: lg.out(4, " returning False: [%s] is not present in router contacts" % proto) res.callback(False) return False if router_contacts[proto] != contact: if _Debug: lg.out(4, " returning False: [%s] contact is different in router id" % proto) res.callback(False) return False if _Debug: lg.out(4, " returning True : my contacts and router contacts is same") res.callback(True) return True except: lg.exc() res.callback(True) return True d = identitycache.immediatelyCaching(proxy_receiver.GetRouterIDURL()) d.addCallback(lambda src: _finish_verification(result)) d.addErrback(lambda err: result.callback(False)) return result
def doInit(self, *args, **kwargs): """ Action method. """ # TODO : put in a seprate state in the state machine identitycache.immediatelyCaching(self.customer_idurl)
def verify_contacts(self, id_obj): """ Check if router is ready and his contacts exists in that identity. """ from transport.proxy import proxy_receiver if not proxy_receiver.A() or not proxy_receiver.GetRouterIDURL( ) or not proxy_receiver.GetRouterIdentity(): # if not yet found any node to route your traffic - do nothing if _Debug: lg.out( 4, 'proxy_interface.verify_contacts returning True : router not yet found' ) return True if not proxy_receiver.ReadMyOriginalIdentitySource(): if _Debug: lg.out( 4, 'proxy_interface.verify_contacts returning False : my original identity is empty' ) return False result = Deferred() def _finish_verification(res): if _Debug: lg.out(4, 'proxy_interface._finish_verification') try: cached_id = identitycache.FromCache( proxy_receiver.GetRouterIDURL()) if not cached_id: if _Debug: lg.out( 4, ' returning False: router identity is not cached' ) res.callback(False) return False if not proxy_receiver.GetRouterIdentity(): if _Debug: lg.out( 4, ' returning False : router identity is None or router is not ready yet' ) return True if cached_id.serialize() != proxy_receiver.GetRouterIdentity( ).serialize(): if _Debug: lg.out( 4, 'proxy_interface.verify_contacts return False: cached copy is different' ) lg.out(20, '\n%s\n' % cached_id.serialize()) lg.out( 20, '\n%s\n' % proxy_receiver.GetRouterIdentity().serialize()) res.callback(False) return router_contacts = proxy_receiver.GetRouterIdentity( ).getContactsByProto() if len(router_contacts) != id_obj.getContactsNumber(): if _Debug: lg.out( 4, ' returning False: router contacts is different' ) res.callback(False) return False for proto, contact in id_obj.getContactsByProto().items(): if proto not in router_contacts.keys(): if _Debug: lg.out( 4, ' returning False: [%s] is not present in router contacts' % proto) res.callback(False) return False if router_contacts[proto] != contact: if _Debug: lg.out( 4, ' returning False: [%s] contact is different in router id' % proto) res.callback(False) return False if _Debug: lg.out( 4, ' returning True : my contacts and router contacts is same' ) res.callback(True) return True except: lg.exc() res.callback(True) return True d = identitycache.immediatelyCaching(proxy_receiver.GetRouterIDURL()) d.addCallback(lambda src: _finish_verification(result)) d.addErrback(lambda err: result.callback(False)) return result