def QueueRequestFile(self, callOnReceived, creatorID, packetID, ownerID, remoteID): # make sure that we don't actually already have the file # if packetID != settings.BackupInfoFileName(): remoteID = id_url.field(remoteID) ownerID = id_url.field(ownerID) creatorID = id_url.field(creatorID) if packetID not in [ settings.BackupInfoFileName(), settings.BackupInfoFileNameOld(), settings.BackupInfoEncryptedFileName(), ]: customer, pathID = packetid.SplitPacketID(packetID) filename = os.path.join(settings.getLocalBackupsDir(), customer, pathID) if os.path.exists(filename): lg.warn("%s already exist " % filename) if callOnReceived: reactor.callLater(0, callOnReceived, packetID, 'exist') # @UndefinedVariable return False if remoteID not in list(self.supplierQueues.keys()): # made a new queue for this man self.supplierQueues[remoteID] = SupplierQueue( remoteID, self.creatorID) lg.info("made a new receiving queue for %s" % nameurl.GetName(remoteID)) # lg.out(10, "io_throttle.QueueRequestFile asking for %s from %s" % (packetID, nameurl.GetName(remoteID))) return self.supplierQueues[remoteID].SupplierRequestFile( callOnReceived, creatorID, packetID, ownerID)
def get_correspondent_nickname(correspondent_idurl): """ """ for idurl, nickname in correspondents(): if id_url.field(idurl).to_bin() == id_url.field(correspondent_idurl).to_bin(): return nickname return None
def erase_supplier(idurl=None, position=None, customer_idurl=None): """ """ global _SuppliersList if not customer_idurl: customer_idurl = my_id.getIDURL() customer_idurl = id_url.field(customer_idurl) if customer_idurl not in _SuppliersList: return False current_suppliers = _SuppliersList[customer_idurl] if _Debug: lg.args(_DebugLevel, idurl=idurl, position=position, customer_idurl=customer_idurl) if idurl: idurl = id_url.field(idurl) if idurl not in current_suppliers: return False current_suppliers[current_suppliers.index(idurl)] = id_url.field(b'') elif position is not None: if position >= len(current_suppliers): return False current_suppliers[position] = id_url.field(b'') else: return False update_suppliers(idlist=current_suppliers, customer_idurl=customer_idurl) return True
def add_supplier(idurl, position=None, customer_idurl=None): """ Add supplier in my list of suppliers or to the list stored for another customer. If parameter `position` is provided, supplier will be inserted instead of added. If position is greater than current list - empty strings will be filled in between. """ global _SuppliersList if not customer_idurl: customer_idurl = my_id.getIDURL() customer_idurl = id_url.field(customer_idurl) if customer_idurl not in _SuppliersList: _SuppliersList[customer_idurl] = [] lg.info('created new suppliers list in memory for customer %r' % customer_idurl) idurl = id_url.field(idurl) if _Debug: lg.args(_DebugLevel, idurl=idurl, position=position, customer_idurl=customer_idurl) if position is None or position == -1: lg.warn('position unknown, added supplier "%s" to the end of the list for customer %s' % (idurl, customer_idurl, )) _SuppliersList[customer_idurl].append(idurl) return len(_SuppliersList[customer_idurl]) - 1 current_suppliers = _SuppliersList[customer_idurl] if position >= len(current_suppliers): empty_suppliers = [id_url.field(b''), ] * (1 + position - len(current_suppliers)) current_suppliers.extend(empty_suppliers) if _Debug: lg.out(_DebugLevel, 'contactsdb.add_supplier %d empty suppliers added for customer %r' % (len(empty_suppliers), customer_idurl)) if current_suppliers[position] and current_suppliers[position] != idurl: lg.info('replacing known supplier "%s" by "%s" at position %d for customer %s' % ( current_suppliers[position], idurl, position, customer_idurl, )) else: lg.info('added supplier "%s" at position %d for customer %s' % (idurl, position, customer_idurl, )) current_suppliers[position] = idurl update_suppliers(idlist=current_suppliers, customer_idurl=customer_idurl) return position
def QueueSendFile(self, fileName, packetID, remoteID, ownerID, callOnAck=None, callOnFail=None): #out(10, "io_throttle.QueueSendFile %s to %s" % (packetID, nameurl.GetName(remoteID))) remoteID = id_url.field(remoteID) ownerID = id_url.field(ownerID) if not os.path.exists(fileName): lg.err("%s not exist" % fileName) if callOnFail is not None: reactor.callLater(.01, callOnFail, remoteID, packetID, 'not exist') # @UndefinedVariable return False if remoteID not in list(self.supplierQueues.keys()): self.supplierQueues[remoteID] = SupplierQueue( remoteID, self.creatorID) lg.info("made a new sending queue for %s" % nameurl.GetName(remoteID)) return self.supplierQueues[remoteID].SupplierSendFile( fileName, packetID, ownerID, callOnAck, callOnFail, )
def by_idurl(supplier_idurl, customer_idurl=None): """ """ if customer_idurl is None: customer_idurl = my_id.getLocalID() customer_idurl = id_url.field(customer_idurl) supplier_idurl = id_url.field(supplier_idurl) return connectors(customer_idurl).get(supplier_idurl, None)
def doRequestCurBroker(self, event, *args, **kwargs): """ Action method. """ target_pos = self.desired_position known_brokers = {} known_brokers.update(self.cooperated_brokers or {}) if event in [ 'record-busy', ]: # there is no cooperation done yet but current record in DHT on that position belongs to another broker target_pos = self.desired_position broker_idurl = id_url.field(self.dht_brokers[target_pos]) known_brokers[self.desired_position] = self.my_broker_idurl elif event in [ 'prev-record-busy', ]: # there is no cooperation done yet but found another broker on the previous position in DHT target_pos = self.desired_position - 1 broker_idurl = id_url.field(self.dht_brokers[target_pos]) known_brokers[self.desired_position] = self.my_broker_idurl elif event in [ 'my-record-busy', 'my-record-empty', 'my-record-own', ]: # me and two other brokers already made a cooperation, connecting again with already known previous broker target_pos = self.my_position - 1 broker_idurl = id_url.field(self.cooperated_brokers[target_pos]) known_brokers[self.my_position] = self.my_broker_idurl if _Debug: lg.args(_DebugLevel, e=event, my=self.my_position, desired=self.desired_position, target=target_pos, broker=broker_idurl, known=known_brokers) result = p2p_service_seeker.connect_known_node( remote_idurl=broker_idurl, service_name='service_message_broker', service_params=lambda idurl: self._do_prepare_service_request_params( idurl, target_pos, known_brokers, event), request_service_timeout=self.broker_negotiate_ack_timeout * (target_pos + 1), force_handshake=True, attempts=1, ) result.addCallback(self._on_cur_broker_connected, target_pos, self.my_position, self.desired_position, event) if _Debug: result.addErrback(lg.errback, debug=_Debug, debug_level=_DebugLevel, method='broker_negotiator.doRequestCurBroker') result.addErrback(self._on_cur_broker_connect_failed, target_pos, event)
def _do_verify(dht_value, position, broker_result): if _Debug: lg.args(_DebugLevel, dht_value=dht_value, position=position, broker_result=broker_result) ret = { 'timestamp': None, 'revision': 0, 'customer_idurl': customer_idurl, 'broker_idurl': None, 'position': position, 'archive_folder_path': None, } if not dht_value or not isinstance(dht_value, dict): broker_result.callback(ret) return ret try: if as_fields: _customer_idurl = id_url.field(dht_value['customer_idurl']) _broker_idurl = id_url.field(dht_value['broker_idurl']) else: _customer_idurl = id_url.to_bin(dht_value['customer_idurl']) _broker_idurl = id_url.to_bin(dht_value['broker_idurl']) _position = int(dht_value['position']) _archive_folder_path = strng.to_text( dht_value['archive_folder_path']) _revision = int(dht_value.get('revision')) _timestamp = int(dht_value.get('timestamp')) except: lg.exc() broker_result.callback(ret) return ret if as_fields: if _customer_idurl != customer_idurl: lg.err( 'wrong customer idurl %r in message broker DHT record for %r at position %d' % (_customer_idurl, customer_idurl, position)) broker_result.callback(ret) return ret if position != _position: lg.err( 'wrong position value %d in message broker DHT record for %r at position %d' % (_position, customer_idurl, position)) broker_result.callback(ret) return ret ret.update({ 'customer_idurl': _customer_idurl, 'broker_idurl': _broker_idurl, 'position': _position, 'archive_folder_path': _archive_folder_path, 'revision': _revision, 'timestamp': _timestamp, }) _do_broker_identity_cache(ret, position, broker_result) return None
def get_supplier_meta_info(supplier_idurl, customer_idurl=None): """ """ global _SuppliersMetaInfo if not customer_idurl: customer_idurl = my_id.getLocalID() customer_idurl = id_url.field(customer_idurl) supplier_idurl = id_url.field(supplier_idurl) return jsn.dict_keys_to_text(jsn.dict_values_to_text( _SuppliersMetaInfo.get(customer_idurl, {}).get(supplier_idurl, {})))
def supplier(index, customer_idurl=None): """ Return supplier ID on given position or empty string. """ if not customer_idurl: customer_idurl = my_id.getLocalID() customer_idurl = id_url.field(customer_idurl) num = int(index) if num >= 0 and num < len(suppliers(customer_idurl=customer_idurl)): return suppliers(customer_idurl=customer_idurl)[num] return id_url.field(b'')
def test_identity_not_cached(self): self._cache_identity('alice') with self.assertRaises(KeyError): (id_url.field(ethan_text) != id_url.field(alice_bin)) l = [ id_url.field(frank_2), ] with self.assertRaises(KeyError): (id_url.field(frank_1) not in l) self._cache_identity('frank') self.assertIn(id_url.field(frank_1), l)
def _do_cancel_outbox_packets(self, fail_info): to_idurl = id_url.field(fail_info['to']) from_idurl = id_url.field(fail_info['from']) for p in packet_out.search_by_packet_id(fail_info['packet_id']): if p.outpacket.Command == fail_info['command']: if p.outpacket.RemoteID == to_idurl: if p.outpacket.CreatorID == from_idurl or p.outpacket.OwnerID == from_idurl: lg.warn( 'about to cancel %r because sending via proxy transport failed' % p) p.automat('cancel')
def get_supplier_meta_info(supplier_idurl, customer_idurl=None): """ """ global _SuppliersMetaInfo if not customer_idurl: customer_idurl = my_id.getIDURL() if not id_url.is_cached(customer_idurl) or not id_url.is_cached(supplier_idurl): return {} customer_idurl = id_url.field(customer_idurl) supplier_idurl = id_url.field(supplier_idurl) return jsn.dict_keys_to_text(jsn.dict_values_to_text( _SuppliersMetaInfo.get(customer_idurl, {}).get(supplier_idurl, {})))
def _do_verify(dht_value, customer_idurl_bin): if customer_idurl_bin in rotated_idurls: rotated_idurls.remove(customer_idurl_bin) ret = { 'suppliers': [], 'ecc_map': None, 'customer_idurl': customer_idurl, 'revision': 0, 'publisher_idurl': None, 'timestamp': None, } if not dht_value or not isinstance(dht_value, dict): if not rotated_idurls: result.callback(ret) return ret another_customer_idurl_bin = rotated_idurls.pop(0) lg.warn( 'found another rotated idurl %r and re-try reading customer suppliers' % another_customer_idurl_bin) d = dht_records.get_suppliers(another_customer_idurl_bin, return_details=True, use_cache=False) d.addCallback(_do_verify, another_customer_idurl_bin) d.addErrback(_on_error) return ret try: _ecc_map = strng.to_text(dht_value['ecc_map']) if as_fields: _customer_idurl = id_url.field(dht_value['customer_idurl']) _publisher_idurl = id_url.field( dht_value.get('publisher_idurl')) _suppliers_list = id_url.fields_list(dht_value['suppliers']) else: _customer_idurl = id_url.to_bin(dht_value['customer_idurl']) _publisher_idurl = id_url.to_bin( dht_value.get('publisher_idurl')) _suppliers_list = id_url.to_bin_list(dht_value['suppliers']) _revision = int(dht_value.get('revision')) _timestamp = int(dht_value.get('timestamp')) except: lg.exc() result.callback(ret) return ret ret.update({ 'suppliers': _suppliers_list, 'ecc_map': _ecc_map, 'customer_idurl': _customer_idurl, 'revision': _revision, 'publisher_idurl': _publisher_idurl, 'timestamp': _timestamp, }) return _do_identity_cache(ret)
def test_identity_cached(self): self.assertFalse(id_url.is_cached(alice_bin)) self.assertFalse(id_url.is_cached(alice_text)) alice_identity = self._cache_identity('alice') self.assertTrue(id_url.is_cached(alice_bin)) self.assertTrue(id_url.is_cached(alice_text)) self.assertEqual(alice_identity.getIDURL().to_bin(), alice_bin) self.assertEqual(alice_identity.getIDURL().to_text(), alice_text) self.assertEqual( id_url.field(alice_bin).to_public_key(), alice_identity.getPublicKey()) self.assertEqual(id_url.field(alice_text), id_url.field(alice_bin)) self._cache_identity('bob') self.assertTrue(id_url.field(alice_text) != id_url.field(bob))
def write_customer_message_broker(customer_idurl, broker_idurl, position=0, archive_folder_path=None, revision=None): customer_idurl = id_url.field(customer_idurl) broker_idurl = id_url.field(broker_idurl) return dht_records.set_message_broker( customer_idurl=customer_idurl, broker_idurl=broker_idurl, position=position, archive_folder_path=archive_folder_path, revision=revision, )
def _do_cancel_outbox_packets(self, fail_info): to_idurl = id_url.field(fail_info['to']) from_idurl = id_url.field(fail_info['from']) for p in packet_out.search_by_packet_id(fail_info['packet_id']): if p.outpacket.Command == fail_info['command']: if id_url.to_bin(to_idurl) == p.outpacket.RemoteID.to_bin(): if p.outpacket.CreatorID.to_bin() == id_url.to_bin( from_idurl) or p.outpacket.OwnerID.to_bin( ) == id_url.to_bin(from_idurl): if _Debug: lg.dbg( _DebugLevel, 'about to cancel %r because sending via proxy transport is failed' % p) p.automat('cancel')
def doInit(self, *args, **kwargs): """ Action method. """ self.remote_idurl = id_url.field(kwargs['trusted_idurl']) self.group_key_id = strng.to_text(kwargs['group_key_id']) self.result_defer = kwargs.get('result_defer', None)
def NormalizeGlobalID(inp, detect_version=False, as_field=True): """ Input `inp` is a string or glob_path_id dict. This will fill out missed/empty fields from existing data. Such an order: 1. if no idurl : use my local identity, 2. if no customer : use idurl 3. if no user : use customer 4. if no key alias : use "master" 5. if no idhost : use idurl """ from userid import my_id if isinstance(inp, dict): g = inp else: g = ParseGlobalID(inp, detect_version=detect_version) if not g['idurl']: g['idurl'] = my_id.getIDURL().to_bin() if as_field: from userid import id_url g['idurl'] = id_url.field(g['idurl']) if not g['customer']: g['customer'] = UrlToGlobalID(g['idurl']) if not g['user']: g['user'] = g['customer'].split('@')[0] if not g['key_alias']: g['key_alias'] = 'master' if not g['idhost']: from lib import nameurl g['idhost'] = nameurl.GetHost(g['idurl']) if not g['key_id']: g['key_id'] = MakeGlobalKeyID(g['key_alias'], g['customer']) return g
def isExistSomeSuppliers(self, *args, **kwargs): """ Condition method. """ sup_list = contactsdb.suppliers() return contactsdb.num_suppliers() > 0 and sup_list.count( id_url.field(b'')) < contactsdb.num_suppliers()
def _on_supplier_connector_state_changed(self, idurl, newstate, **kwargs): from customer import supplier_connector idurl = id_url.field(idurl) if _Debug: lg.out( _DebugLevel, 'fire_hire._on_supplier_connector_state_changed %s to %s, own state is %s ' % (idurl, newstate, self.state)) if supplier_connector.by_idurl(idurl): supplier_connector.by_idurl(idurl).remove_callback( 'fire_hire', self._on_supplier_connector_state_changed) if self.state == 'SUPPLIERS?': if idurl in self.connect_list: self.connect_list.remove(idurl) else: lg.warn('did not found %r in connect_list' % idurl) elif self.state == 'FIRE_MANY': if idurl not in self.dismiss_results: self.dismiss_results.append(idurl) else: lg.warn('did not found %r in dismiss_results' % idurl) else: return self.automat('supplier-state-changed', ( idurl, newstate, ))
def StopOverridingIdentity(idurl): """ """ global _OverriddenIdentities idurl = id_url.field(idurl) if not idurl.is_latest(): if idurl.original() in _OverriddenIdentities: if idurl.to_bin() not in _OverriddenIdentities: _OverriddenIdentities[ idurl.to_bin()] = _OverriddenIdentities.pop( idurl.original()) lg.info( 'detected and processed idurl rotate for overridden identity : %r -> %r' % (idurl.original(), idurl.to_bin())) idurl = id_url.to_bin(idurl) result = _OverriddenIdentities.pop(idurl, None) if _Debug: lg.out( _DebugLevel, 'identitycache.StopOverridingIdentity removed overridden source for %s' % idurl) if result: lg.out( _DebugLevel, ' previous overridden identity was %d bytes' % len(result)) lg.out( _DebugLevel, ' total number of overrides is %d' % len(_OverriddenIdentities)) return result
def isStillNeeded(self, *args, **kwargs): """ Condition method. """ supplier_idurl = args[0] current_suppliers = contactsdb.suppliers() if supplier_idurl in current_suppliers: # this guy is already a supplier, we still need more then return True desired_number = settings.getSuppliersNumberDesired() needed_suppliers = current_suppliers[:desired_number] empty_suppliers = needed_suppliers.count(id_url.field(b'')) # if '' in needed_suppliers: # lg.warn('found empty suppliers!!!') # return True s = set(id_url.to_bin_list(needed_suppliers)) s.add(id_url.to_bin(supplier_idurl)) s.difference_update(set(id_url.to_bin_list(self.dismiss_list))) result = len(s) - empty_suppliers < settings.getSuppliersNumberDesired( ) # if _Debug: # lg.out(_DebugLevel, 'fire_hire.isStillNeeded %d %d %d %d %d, result=%s' % ( # contactsdb.num_suppliers(), len(needed_suppliers), len(self.dismiss_list), # len(s), settings.getSuppliersNumberDesired(), result)) return result
def doRememberUser(self, *args, **kwargs): """ Action method. """ self.target_idurl = id_url.field(args[0]) if _Debug: lg.args(_DebugLevel, target_idurl=self.target_idurl)
def _on_rotate_broker_connected(self, response_info, broker_pos, event, *args, **kwargs): try: # skip leading "accepted:" marker cooperated_brokers = jsn.loads( strng.to_text(response_info[0].Payload)[9:]) cooperated_brokers.pop('archive_folder_path', None) cooperated_brokers = { int(k): id_url.field(v) for k, v in cooperated_brokers.items() } except: lg.exc() self.automat('broker-rotate-failed') return if _Debug: lg.args(_DebugLevel, cooperated=cooperated_brokers, pos=broker_pos, e=event) if id_url.is_the_same(cooperated_brokers.get(broker_pos), self.my_broker_idurl): self.automat('broker-rotate-accepted', cooperated_brokers=cooperated_brokers) return self.automat('broker-rotate-rejected', cooperated_brokers=cooperated_brokers)
def _do_save_customer_suppliers(id_cached_result, ret): if my_id.getIDURL() != id_url.field(ret['customer_idurl']): contactsdb.set_suppliers(ret['suppliers'], customer_idurl=ret['customer_idurl']) contactsdb.save_suppliers(customer_idurl=ret['customer_idurl']) if ret.get('ecc_map'): for supplier_idurl in ret['suppliers']: if supplier_idurl and id_url.is_cached(supplier_idurl): contactsdb.add_supplier_meta_info( supplier_idurl=supplier_idurl, info={ 'ecc_map': ret['ecc_map'], }, customer_idurl=ret['customer_idurl'], ) else: if _Debug: lg.out( _DebugLevel, 'dht_relations._do_save_customer_suppliers SKIP processing my own suppliers' ) if _Debug: lg.out( _DebugLevel, 'dht_relations._do_save_customer_suppliers OK for %r returned %d suppliers' % ( ret['customer_idurl'], len(ret['suppliers']), )) result.callback(ret) return ret
def ping(idurl, channel=None, ack_timeout=15, ping_retries=0, keep_alive=False): """ Doing handshake with remote node only if it is currently not connected. Returns Deferred object. """ idurl = strng.to_bin(idurl) if _Debug: lg.args(_DebugLevel, idurl=idurl, keep_alive=keep_alive, channel=channel) result = Deferred() result.addErrback(on_ping_failed, idurl=idurl, channel=channel) if id_url.is_empty(idurl): result.errback(Exception('empty idurl provided')) return result if not id_url.is_cached(idurl): if _Debug: lg.dbg(_DebugLevel, 'user identity %r not cached yet, executing clean handshake' % idurl) return handshaker.ping( idurl=idurl, ack_timeout=ack_timeout, ping_retries=ping_retries, channel=channel or 'clean_ping', keep_alive=keep_alive, ) idurl = id_url.field(idurl) if not isKnown(idurl): if not check_create(idurl, keep_alive=keep_alive): raise Exception('can not create instance') A(idurl, 'ping-now', result, channel=channel, ack_timeout=ack_timeout, ping_retries=ping_retries, original_idurl=idurl.to_original()) return result
def write_customers_quotas(new_space_dict, free_space): space_dict = { id_url.field(k).to_text(): v for k, v in new_space_dict.items() } space_dict['free'] = free_space return bpio._write_dict(settings.CustomersSpaceFile(), space_dict)
def update_customers_usage(new_space_usage_dict): usage_dict = { id_url.field(k).to_bin(): v for k, v in new_space_usage_dict.items() } return bpio._write_dict(settings.CustomersUsedSpaceFile(), jsn.dict_keys_to_text(usage_dict))
def handshake(idurl, channel=None, ack_timeout=15, ping_retries=2, keep_alive=False): """ Immediately doing handshake with remote node by fetching remote identity file and then sending my own Identity() to remote peer and wait for an Ack() packet. Returns Deferred object. """ idurl = strng.to_bin(idurl) if _Debug: lg.args(_DebugLevel, idurl=idurl, keep_alive=keep_alive, channel=channel, ack_timeout=ack_timeout, ping_retries=ping_retries) result = Deferred() result.addErrback(on_ping_failed, idurl=idurl, channel=channel) if id_url.is_empty(idurl): result.errback(Exception('empty idurl provided')) return result if not id_url.is_cached(idurl): if _Debug: lg.dbg(_DebugLevel, 'user identity %r not cached yet, executing clean handshake' % idurl) return handshaker.ping( idurl=idurl, ack_timeout=ack_timeout, ping_retries=ping_retries, channel=channel or 'clean_handshake', keep_alive=keep_alive, ) idurl = id_url.field(idurl) if not isKnown(idurl): if not check_create(idurl, keep_alive=keep_alive): raise Exception('can not create instance') A(idurl, 'handshake', result, channel=channel, ack_timeout=ack_timeout, ping_retries=ping_retries, original_idurl=idurl.to_original()) return result