def read_state(customer_id, broker_id): service_dir = settings.ServiceDir('service_message_broker') keepers_dir = os.path.join(service_dir, 'keepers') broker_dir = os.path.join(keepers_dir, broker_id) keeper_state_file_path = os.path.join(broker_dir, customer_id) json_value = None if os.path.isfile(keeper_state_file_path): try: json_value = jsn.loads_text(local_fs.ReadTextFile(keeper_state_file_path)) except: lg.exc() return None if _Debug: lg.args(_DebugLevel, customer_id=customer_id, broker_id=broker_id, json_value=json_value) return json_value broker_idurl = global_id.glob2idurl(broker_id) if id_url.is_cached(broker_idurl): for one_broker_id in os.listdir(keepers_dir): one_broker_idurl = global_id.glob2idurl(one_broker_id) if id_url.is_cached(one_broker_idurl): if one_broker_idurl == broker_idurl: broker_dir = os.path.join(keepers_dir, one_broker_id) keeper_state_file_path = os.path.join(broker_dir, customer_id) json_value = None if os.path.isfile(keeper_state_file_path): try: json_value = jsn.loads_text(local_fs.ReadTextFile(keeper_state_file_path)) except: lg.exc() return None if _Debug: lg.args(_DebugLevel, customer_id=customer_id, broker_id=one_broker_id, json_value=json_value) return json_value return None
def cache_suppliers(path=None): """ Make sure identities of all suppliers we know are cached. """ dl = [] list_local_customers = list(os.listdir(settings.SuppliersDir())) for customer_id in list_local_customers: if not global_id.IsValidGlobalUser(customer_id): lg.warn('invalid customer record %s found in %s' % (customer_id, settings.SuppliersDir())) continue try: one_customer_idurl = global_id.GlobalUserToIDURL(customer_id) except Exception as exc: lg.err('idurl caching failed: %r' % exc) continue if not id_url.is_cached(one_customer_idurl): dl.append(identitycache.immediatelyCaching(one_customer_idurl)) path = os.path.join(settings.SuppliersDir(), customer_id, 'supplierids') lst = bpio._read_list(path) if lst is None: lg.warn('did not found suppliers ids at %s' % path) continue for one_supplier_idurl in lst: if one_supplier_idurl: if not id_url.is_cached(one_supplier_idurl): dl.append(identitycache.immediatelyCaching(one_supplier_idurl)) if _Debug: lg.out(_DebugLevel, 'contactsdb.cache_suppliers prepared %d idurls to be cached' % len(dl)) return DeferredList(dl, consumeErrors=True)
def get_supplier_meta_info(supplier_idurl, customer_idurl=None): """ """ global _SuppliersMetaInfo if not customer_idurl: customer_idurl = my_id.getIDURL() if not id_url.is_cached(customer_idurl) or not id_url.is_cached(supplier_idurl): return {} customer_idurl = id_url.field(customer_idurl) supplier_idurl = id_url.field(supplier_idurl) return jsn.dict_keys_to_text(jsn.dict_values_to_text( _SuppliersMetaInfo.get(customer_idurl, {}).get(supplier_idurl, {})))
def test_identity_cached(self): self.assertFalse(id_url.is_cached(alice_bin)) self.assertFalse(id_url.is_cached(alice_text)) alice_identity = self._cache_identity('alice') self.assertTrue(id_url.is_cached(alice_bin)) self.assertTrue(id_url.is_cached(alice_text)) self.assertEqual(alice_identity.getIDURL().to_bin(), alice_bin) self.assertEqual(alice_identity.getIDURL().to_text(), alice_text) self.assertEqual( id_url.field(alice_bin).to_public_key(), alice_identity.getPublicKey()) self.assertEqual(id_url.field(alice_text), id_url.field(alice_bin)) self._cache_identity('bob') self.assertTrue(id_url.field(alice_text) != id_url.field(bob))
def load_suppliers(path=None, customer_idurl=None, all_customers=False): """ Load suppliers list from disk. """ if all_customers: list_local_customers = list(os.listdir(settings.SuppliersDir())) if _Debug: lg.out(_DebugLevel, 'contactsdb.load_suppliers %d known customers' % len(list_local_customers)) for customer_id in list_local_customers: if not global_id.IsValidGlobalUser(customer_id): lg.warn('invalid customer record %s found in %s' % (customer_id, settings.SuppliersDir())) continue path = os.path.join(settings.SuppliersDir(), customer_id, 'supplierids') lst = bpio._read_list(path) if lst is None: lg.warn('did not found suppliers ids at %s' % path) continue one_customer_idurl = global_id.GlobalUserToIDURL(customer_id) if not id_url.is_cached(one_customer_idurl): lg.warn('customer identity %r not cached yet' % one_customer_idurl) continue if not one_customer_idurl.is_latest(): latest_customer_path = os.path.join(settings.SuppliersDir(), one_customer_idurl.to_id()) old_customer_path = os.path.join(settings.SuppliersDir(), customer_id) if not os.path.exists(latest_customer_path): os.rename(old_customer_path, latest_customer_path) lg.info('detected and processed idurl rotate when loading suppliers for customer : %r -> %r' % (customer_id, one_customer_idurl.to_id())) else: bpio._dir_remove(old_customer_path) lg.warn('found old customer dir %r and removed' % old_customer_path) continue lst = list(map(lambda i: i if id_url.is_cached(i) else b'', lst)) set_suppliers(lst, customer_idurl=one_customer_idurl) if _Debug: lg.out(_DebugLevel, ' loaded %d known suppliers for customer %r' % (len(lst), one_customer_idurl)) return True if not customer_idurl: customer_idurl = my_id.getLocalID() customer_idurl = id_url.field(customer_idurl) if path is None: path = os.path.join(settings.SuppliersDir(), global_id.UrlToGlobalID(customer_idurl), 'supplierids') lst = bpio._read_list(path) if lst is None: lst = list() lst = list(map(lambda i: i if id_url.is_cached(i) else b'', lst)) set_suppliers(lst, customer_idurl=customer_idurl) if _Debug: lg.out(_DebugLevel, 'contactsdb.load_suppliers %d items from %s' % (len(lst), path)) return True
def ping(idurl, channel=None, ack_timeout=15, ping_retries=0, keep_alive=False): """ Doing handshake with remote node only if it is currently not connected. Returns Deferred object. """ idurl = strng.to_bin(idurl) if _Debug: lg.args(_DebugLevel, idurl=idurl, keep_alive=keep_alive, channel=channel) result = Deferred() result.addErrback(on_ping_failed, idurl=idurl, channel=channel) if id_url.is_empty(idurl): result.errback(Exception('empty idurl provided')) return result if not id_url.is_cached(idurl): if _Debug: lg.dbg(_DebugLevel, 'user identity %r not cached yet, executing clean handshake' % idurl) return handshaker.ping( idurl=idurl, ack_timeout=ack_timeout, ping_retries=ping_retries, channel=channel or 'clean_ping', keep_alive=keep_alive, ) idurl = id_url.field(idurl) if not isKnown(idurl): if not check_create(idurl, keep_alive=keep_alive): raise Exception('can not create instance') A(idurl, 'ping-now', result, channel=channel, ack_timeout=ack_timeout, ping_retries=ping_retries, original_idurl=idurl.to_original()) return result
def handshake(idurl, channel=None, ack_timeout=15, ping_retries=2, keep_alive=False): """ Immediately doing handshake with remote node by fetching remote identity file and then sending my own Identity() to remote peer and wait for an Ack() packet. Returns Deferred object. """ idurl = strng.to_bin(idurl) if _Debug: lg.args(_DebugLevel, idurl=idurl, keep_alive=keep_alive, channel=channel, ack_timeout=ack_timeout, ping_retries=ping_retries) result = Deferred() result.addErrback(on_ping_failed, idurl=idurl, channel=channel) if id_url.is_empty(idurl): result.errback(Exception('empty idurl provided')) return result if not id_url.is_cached(idurl): if _Debug: lg.dbg(_DebugLevel, 'user identity %r not cached yet, executing clean handshake' % idurl) return handshaker.ping( idurl=idurl, ack_timeout=ack_timeout, ping_retries=ping_retries, channel=channel or 'clean_handshake', keep_alive=keep_alive, ) idurl = id_url.field(idurl) if not isKnown(idurl): if not check_create(idurl, keep_alive=keep_alive): raise Exception('can not create instance') A(idurl, 'handshake', result, channel=channel, ack_timeout=ack_timeout, ping_retries=ping_retries, original_idurl=idurl.to_original()) return result
def doConnectCustomerSuppliers(self, *args, **kwargs): """ Action method. """ try: self.known_suppliers_list = [s for s in args[0]['suppliers'] if s] except: lg.exc() return self.outgoing_list_files_packets_ids = [] self.known_ecc_map = args[0].get('ecc_map') if _Debug: lg.args(_DebugLevel, known_ecc_map=self.known_ecc_map, known_suppliers_list=self.known_suppliers_list) for supplier_idurl in self.known_suppliers_list: if id_url.is_cached(supplier_idurl): self._do_connect_with_supplier(supplier_idurl) else: d = identitycache.immediatelyCaching(supplier_idurl) d.addCallback( lambda *a: self._do_connect_with_supplier(supplier_idurl)) d.addErrback(lambda err: lg.warn( 'failed caching supplier %r identity: %r' % ( supplier_idurl, str(err), )) and None)
def ReadIndex(text_data, encoding='utf-8'): """ Read index data base, ``input`` is a ``StringIO.StringIO`` object which keeps the data. This is a simple text format, see ``p2p.backup_fs.Serialize()`` method. The first line keeps revision number. """ global _LoadingFlag if _LoadingFlag: return False _LoadingFlag = True backup_fs.Clear() count = 0 try: json_data = jsn.loads( text_data, encoding=encoding, ) except: lg.exc() json_data = text_data if _Debug: lg.args(_DebugLevel, json_data=json_data) for customer_id in json_data.keys(): if customer_id == 'items': try: count = backup_fs.Unserialize(json_data, from_json=True, decoding=encoding) except: lg.exc() return False else: customer_idurl = global_id.GlobalUserToIDURL(customer_id) if not id_url.is_cached(customer_idurl): lg.warn( 'identity %r is not yet cached, skip reading related catalog items' % customer_idurl) identitycache.immediatelyCaching(customer_idurl, try_other_sources=False, ignore_errors=True) continue try: count = backup_fs.Unserialize( json_data[customer_id], iter=backup_fs.fs(customer_idurl), iterID=backup_fs.fsID(customer_idurl), from_json=True, decoding=encoding, ) except: lg.exc() return False if _Debug: lg.out(_DebugLevel, 'backup_control.ReadIndex %d items loaded' % count) # local_site.update_backup_fs(backup_fs.ListAllBackupIDsSQL()) # commit(new_revision) _LoadingFlag = False return True
def _do_save_customer_suppliers(id_cached_result, ret): if my_id.getIDURL() != id_url.field(ret['customer_idurl']): contactsdb.set_suppliers(ret['suppliers'], customer_idurl=ret['customer_idurl']) contactsdb.save_suppliers(customer_idurl=ret['customer_idurl']) if ret.get('ecc_map'): for supplier_idurl in ret['suppliers']: if supplier_idurl and id_url.is_cached(supplier_idurl): contactsdb.add_supplier_meta_info( supplier_idurl=supplier_idurl, info={ 'ecc_map': ret['ecc_map'], }, customer_idurl=ret['customer_idurl'], ) else: if _Debug: lg.out( _DebugLevel, 'dht_relations._do_save_customer_suppliers SKIP processing my own suppliers' ) if _Debug: lg.out( _DebugLevel, 'dht_relations._do_save_customer_suppliers OK for %r returned %d suppliers' % ( ret['customer_idurl'], len(ret['suppliers']), )) result.callback(ret) return ret
def is_supplier(supplier_idurl, customer_idurl=None): """ """ global _SuppliersConnectors if customer_idurl is None: customer_idurl = my_id.getLocalID() if not id_url.is_cached(customer_idurl): return False if not id_url.is_cached(supplier_idurl): return False customer_idurl = id_url.field(customer_idurl) supplier_idurl = id_url.field(supplier_idurl) if customer_idurl not in _SuppliersConnectors: return False if supplier_idurl not in _SuppliersConnectors[customer_idurl]: return False return True
def process(newpacket, info): """ Main entry point where all incoming signed packets are coming from remote peers. The main aspect here is to "authenticate" remote node - need to know it identity. """ from p2p import p2p_service from userid import my_id if not driver.is_on('service_p2p_hookups'): if _Debug: lg.out(_DebugLevel, 'packet_in.process SKIP incoming packet, service_p2p_hookups is not started') return None if _Debug: lg.out(_DebugLevel, 'packet_in.process [%s/%s/%s]:%s(%s) from %s://%s is "%s"' % ( nameurl.GetName(newpacket.OwnerID), nameurl.GetName(newpacket.CreatorID), nameurl.GetName(newpacket.RemoteID), newpacket.Command, newpacket.PacketID, info.proto, info.host, info.status, )) if info.status != 'finished': if _Debug: lg.out(_DebugLevel, ' skip, packet status is : [%s]' % info.status) return None # if _PacketLogFileEnabled: # lg.out(0, ' \033[0;49;92mIN %s(%s) with %d bytes from %s to %s TID:%s\033[0m' % ( # newpacket.Command, newpacket.PacketID, info.bytes_received, # global_id.UrlToGlobalID(info.sender_idurl), global_id.UrlToGlobalID(newpacket.RemoteID), # info.transfer_id), log_name='packet', showtime=True) # we must know recipient identity if not id_url.is_cached(newpacket.RemoteID): d = identitycache.immediatelyCaching(newpacket.RemoteID) d.addCallback(lambda _: process(newpacket, info)) d.addErrback(lambda err: lg.err('incoming remote ID is unknown, failed caching remote %s identity: %s' % (newpacket.RemoteID, str(err))) and None) return d if newpacket.Command == commands.Identity(): if newpacket.RemoteID != my_id.getIDURL(): if _Debug: lg.out(_DebugLevel, ' incoming Identity is routed to another user') if not p2p_service.Identity(newpacket, send_ack=False): lg.warn('received identity was not processed') return None # remote peer sending a valid identity to another peer routed via my machine # need to handle that packet - it should be processed by proxy_server return handle(newpacket, info) # contact sending us current identity we might not have # so we handle it before check that packet is valid # because we might not have his identity on hands and so can not verify the packet # so we check that his Identity is valid and save it into cache # than we check the packet to be valid too. if not p2p_service.Identity(newpacket): lg.warn('received identity was not processed') return None if not identitycache.HasKey(newpacket.CreatorID): if _Debug: lg.out(_DebugLevel, ' will cache remote identity %s before processing incoming packet %s' % (newpacket.CreatorID, newpacket)) d = identitycache.immediatelyCaching(newpacket.CreatorID) d.addCallback(lambda _: handle(newpacket, info)) d.addErrback(lambda err: lg.err('failed caching remote %s identity: %s' % (newpacket.CreatorID, str(err))) and None) return d return handle(newpacket, info)
def update_correspondents(idslist): """ Set correspondents ID's list, input items are tuples: (idurl, nickname, ). """ global _CorrespondentsChangedCallback oldcorrespondents = list(correspondents()) idslist = list(filter(lambda i: id_url.is_cached(i[0]), idslist)) set_correspondents(idslist) if _CorrespondentsChangedCallback is not None: _CorrespondentsChangedCallback(oldcorrespondents, correspondents())
def _do_identity_cache(ret): all_stories = [] for _supplier_idurl in ret['suppliers']: if _supplier_idurl: _supplier_idurl = id_url.to_bin(_supplier_idurl) if not id_url.is_cached( _supplier_idurl) or not identitycache.HasFile( _supplier_idurl): one_supplier_story = identitycache.immediatelyCaching( _supplier_idurl) if _Debug: one_supplier_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache' ) all_stories.append(one_supplier_story) _customer_idurl = id_url.to_bin(ret['customer_idurl']) if _customer_idurl and (not id_url.is_cached(_customer_idurl) or not identitycache.HasFile(_customer_idurl)): one_customer_story = identitycache.immediatelyCaching( _customer_idurl) if _Debug: one_customer_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache') all_stories.append(one_customer_story) if _Debug: lg.args(_DebugLevel, all_stories=len(all_stories), ret=ret) id_cache_story = DeferredList(all_stories, consumeErrors=True) id_cache_story.addCallback(_do_save_customer_suppliers, ret) if _Debug: id_cache_story.addErrback( lg.errback, debug=_Debug, debug_level=_DebugLevel, method='read_customer_suppliers._do_identity_cache') id_cache_story.addErrback(result.errback) return id_cache_story
def Inbox(newpacket, info, status, message): """ This is called when some ``packet`` was received from remote peer - user seems to be ONLINE. """ global _ShutdownFlag if _ShutdownFlag: return False if id_url.is_cached(newpacket.OwnerID): if newpacket.OwnerID == my_id.getIDURL(): return False else: if newpacket.OwnerID.to_bin() == my_id.getIDURL().to_bin(): return False if not id_url.is_cached(newpacket.OwnerID): return False if newpacket.RemoteID != my_id.getIDURL(): return False check_create(newpacket.OwnerID) A(newpacket.OwnerID, 'inbox-packet', (newpacket, info, status, message)) return False
def doCleanPrevUser(self, *args, **kwargs): """ Action method. """ from customer import supplier_connector if id_url.is_cached(self.target_idurl): sc = supplier_connector.by_idurl(self.target_idurl) if sc: sc.remove_callback('supplier_finder', self._supplier_connector_state) self.target_idurl = None
def existing(customer_idurl): """ Returns instance of existing `queue_keeper()` or None. """ global _QueueKeepers customer_idurl = id_url.to_bin(customer_idurl) if id_url.is_empty(customer_idurl): return None if not id_url.is_cached(customer_idurl): lg.warn('customer idurl is not cached yet, can not start QueueKeeper()') return None customer_idurl = id_url.field(customer_idurl) return customer_idurl in _QueueKeepers
def getInstance(idurl, autocreate=True): """ """ if _ShutdownFlag: return None if id_url.is_empty(idurl): return None if not id_url.is_cached(idurl): return None idurl = id_url.field(idurl) if not isKnown(idurl) and not autocreate: return None check_create(idurl) return A(idurl)
def customer_position(idurl): """ Return position of supplier with given ID or -1. """ if not idurl: return -1 if not id_url.is_cached(idurl): return -1 idurl = id_url.field(idurl) try: index = id_url.to_bin_list(customers()).index(idurl.to_bin()) except: index = -1 return index
def cache_customers(path=None): """ Make sure identities of all customers we know are cached. """ dl = [] if path is None: path = settings.CustomerIDsFilename() lst = bpio._read_list(path) or [] for one_customer_idurl in lst: if one_customer_idurl: if not id_url.is_cached(one_customer_idurl): dl.append(identitycache.immediatelyCaching(one_customer_idurl)) if _Debug: lg.out(_DebugLevel, 'contactsdb.cache_customers prepared %d idurls to be cached' % len(dl)) return DeferredList(dl, consumeErrors=True)
def getStatusLabel(idurl): """ Return some text description about the current state of that user. """ global _ShutdownFlag if _ShutdownFlag: return '?' if id_url.is_empty(idurl): return '?' if not id_url.is_cached(idurl): return '?' idurl = id_url.field(idurl) if not isKnown(idurl): return '?' return stateToLabel(A(idurl).state)
def getCurrentState(idurl): """ Return the current state of that user or `None` if that contact is unknown. """ global _ShutdownFlag if _ShutdownFlag: return None if id_url.is_empty(idurl): return None if not id_url.is_cached(idurl): return None idurl = id_url.field(idurl) if not isKnown(idurl): return None return A(idurl).state
def isCheckingNow(idurl): """ Return True if given contact's state is PING or ACK?. """ global _ShutdownFlag if _ShutdownFlag: return False if id_url.is_empty(idurl): return False if not id_url.is_cached(idurl): return False idurl = id_url.field(idurl) if not isKnown(idurl): return False return A(idurl).state == 'PING'
def isOffline(idurl): """ Return True if given contact's state is OFFLINE. """ global _ShutdownFlag if _ShutdownFlag: return True if id_url.is_empty(idurl): return True if not id_url.is_cached(idurl): return True idurl = id_url.field(idurl) if not isKnown(idurl): return True return A(idurl).state == 'OFFLINE'
def isOnline(idurl): """ Return True if given contact's state is ONLINE. """ global _ShutdownFlag if _ShutdownFlag: return False if id_url.is_empty(idurl): return False if not id_url.is_cached(idurl): return False idurl = id_url.field(idurl) if not isKnown(idurl): return False return A(idurl).state in ['CONNECTED', 'PING?', ]
def isKnown(idurl): """ Return `True` if state machine online_status() already exists for this user. """ global _OnlineStatusDict global _ShutdownFlag if _ShutdownFlag: return False if id_url.is_empty(idurl): return False if not id_url.is_cached(idurl): return False idurl = id_url.field(idurl) return idurl in list(_OnlineStatusDict.keys())
def check_create(idurl, keep_alive=True): """ Creates new instance of online_status() state machine and send "init" event to it. """ idurl = strng.to_bin(idurl) if id_url.is_empty(idurl): return False if not id_url.is_cached(idurl): return False idurl = id_url.field(idurl) if idurl not in list(_OnlineStatusDict.keys()): A(idurl, 'init', keep_alive=keep_alive) if _Debug: lg.out(_DebugLevel, 'online_status.check_create instance for %r was not found, made a new with state OFFLINE' % idurl) return True
def push_message(group_key_id, data): creator_idurl = my_keys.get_creator_idurl(group_key_id, as_field=False) if _Debug: lg.args(_DebugLevel, group_key_id=group_key_id, creator_idurl=creator_idurl) ret = Deferred() if not id_url.is_cached(creator_idurl): d = identitycache.immediatelyCaching(creator_idurl) d.addErrback(ret.errback) d.addCallback( lambda *args: do_start_message_producer(group_key_id, data, ret)) return ret do_start_message_producer(group_key_id, data, ret) return ret
def close(customer_idurl): """ Closes instance of queue_keeper() state machine related to given customer. """ customer_idurl = strng.to_bin(customer_idurl) if id_url.is_empty(customer_idurl): return False if not id_url.is_cached(customer_idurl): lg.warn('customer idurl is not cached yet, can not stop QueueKeeper()') return False customer_idurl = id_url.field(customer_idurl) if customer_idurl not in queue_keepers().keys(): lg.warn('instance of queue_keeper() not found for given customer') return False A(customer_idurl, 'shutdown') return True
def update_suppliers(idlist, customer_idurl=None): """ High-level method to set suppliers ID's list. Executes required callbacks. """ global _SuppliersChangedCallback global _ContactsChangedCallbacks oldsuppliers = list(suppliers(customer_idurl=customer_idurl)) oldcontacts = list(contacts()) idlist = list(map(lambda i: i if id_url.is_cached(i) else b'', idlist)) set_suppliers(idlist, customer_idurl=customer_idurl) if _SuppliersChangedCallback is not None: _SuppliersChangedCallback(oldsuppliers, suppliers(customer_idurl=customer_idurl)) if id_url.to_original_list(oldcontacts) != id_url.to_original_list(contacts()): for cb in _ContactsChangedCallbacks: cb(id_url.to_original_list(oldcontacts), id_url.to_original_list(contacts()))