def _do_create_revision_from_another_supplier(self, another_revision, another_suppliers, another_ecc_map): local_customer_meta_info = contactsdb.get_customer_meta_info( self.customer_idurl) possible_position = local_customer_meta_info.get('position', -1) or -1 if possible_position >= 0: try: another_suppliers[possible_position] = my_id.getLocalIDURL() except: lg.exc() contactsdb.add_customer_meta_info( self.customer_idurl, { 'ecc_map': another_ecc_map, 'position': possible_position, 'family_snapshot': another_suppliers, }) return { 'revision': int(another_revision), 'publisher_idurl': my_id.getLocalIDURL(), # I will be a publisher of that revision 'suppliers': another_suppliers, 'ecc_map': another_ecc_map, 'customer_idurl': self.customer_idurl, }
def start(self): from logs import lg from main import events from contacts import contactsdb from supplier import family_member from transport import callback # TODO: check all imports.! my_id must be loaded latest as possible! from userid import my_id callback.append_inbox_callback(self._on_inbox_packet_received) for customer_idurl in contactsdb.customers(): if not customer_idurl: continue if customer_idurl == my_id.getLocalIDURL(): lg.warn('skipping my own identity') continue fm = family_member.by_customer_idurl(customer_idurl) if not fm: fm = family_member.create_family(customer_idurl) fm.automat('init') local_customer_meta_info = contactsdb.get_customer_meta_info(customer_idurl) fm.automat('family-join', { 'supplier_idurl': my_id.getLocalIDURL(), 'ecc_map': local_customer_meta_info.get('ecc_map'), 'position': local_customer_meta_info.get('position', -1), 'family_snapshot': local_customer_meta_info.get('family_snapshot'), }) events.add_subscriber(self._on_existing_customer_accepted, 'existing-customer-accepted') events.add_subscriber(self._on_new_customer_accepted, 'new-customer-accepted') events.add_subscriber(self._on_existing_customer_terminated, 'existing-customer-terminated') return True
def isMyPositionOK(self, *args, **kwargs): """ Condition method. """ dht_info_valid = self._do_validate_dht_info(args[0]) if not dht_info_valid: return False if self.current_request and self.current_request[ 'command'] == 'family-leave': if my_id.getLocalIDURL() not in dht_info_valid['suppliers']: return True my_info_valid = self._do_validate_my_info(self.my_info) if not my_info_valid: return False latest_revision = self._do_detect_latest_revision( dht_info_valid, my_info_valid) if latest_revision == 0: return False try: my_position = my_info_valid['suppliers'].index( my_id.getLocalIDURL()) except: my_position = -1 if my_position < 0: return False try: existing_position = dht_info_valid['suppliers'].index( my_id.getLocalIDURL()) except: existing_position = -1 return existing_position > 0 and my_position > 0 and existing_position == my_position
def _on_incoming_suppliers_list(self, inp): # this packet came from another supplier who belongs to that family also incoming_packet = inp['packet'] if _Debug: lg.out( _DebugLevel, 'family_member._on_incoming_suppliers_list with %s' % incoming_packet) if not self.my_info: if _Debug: lg.out(_DebugLevel, ' current DHT info is not yet known, skip') return p2p_service.SendAck(incoming_packet) try: another_ecc_map = inp['customer_ecc_map'] another_suppliers_list = inp['suppliers_list'] another_revision = int(inp['transaction_revision']) except: lg.exc() return p2p_service.SendFail(incoming_packet, response=serialization.DictToBytes( self.my_info)) if _Debug: lg.out( _DebugLevel, ' another_revision=%d another_ecc_map=%s another_suppliers_list=%r' % (another_revision, another_ecc_map, another_suppliers_list)) if another_revision >= int(self.my_info['revision']): self.my_info = self._do_create_revision_from_another_supplier( another_revision, another_suppliers_list, another_ecc_map) lg.info( 'another supplier have more fresh revision, update my info and raise "family-refresh" event' ) self.automat('family-refresh') return p2p_service.SendAck(incoming_packet) if my_id.getLocalIDURL() not in another_suppliers_list: lg.warn( 'another supplier is trying to remove my IDURL from the family of customer %s' % self.customer_idurl) return p2p_service.SendFail(incoming_packet, response=serialization.DictToBytes( self.my_info)) my_position_in_transaction = another_suppliers_list.index( my_id.getLocalIDURL()) my_known_position = self.my_info['suppliers'].index( my_id.getLocalIDURL()) if my_position_in_transaction != my_known_position: lg.warn( 'another supplier is trying to put my IDURL on another position in the family of customer %s' % self.customer_idurl) return p2p_service.SendFail(incoming_packet, response=serialization.DictToBytes( self.my_info)) return p2p_service.SendAck(incoming_packet)
def search_by_response_packet(newpacket, proto=None, host=None): result = [] incoming_owner_idurl = newpacket.OwnerID incoming_creator_idurl = newpacket.CreatorID incoming_remote_idurl = newpacket.RemoteID if _Debug: lg.out(_DebugLevel, 'packet_out.search_by_response_packet for incoming [%s/%s/%s]:%s(%s) from [%s://%s]' % ( nameurl.GetName(incoming_owner_idurl), nameurl.GetName(incoming_creator_idurl), nameurl.GetName(incoming_remote_idurl), newpacket.Command, newpacket.PacketID, proto, host, )) lg.out(_DebugLevel, ' [%s]' % (','.join([str(p.outpacket) for p in queue()]))) for p in queue(): # TODO: investigate if p.outpacket.PacketID.lower() != newpacket.PacketID.lower(): # PacketID of incoming packet not matching with that outgoing packet continue if p.outpacket.PacketID != newpacket.PacketID: lg.warn('packet ID in queue "almost" matching with incoming: %s ~ %s' % ( p.outpacket.PacketID, newpacket.PacketID, )) if not commands.IsCommandAck(p.outpacket.Command, newpacket.Command): # this command must not be in the reply continue expected_recipient = [p.outpacket.RemoteID, ] if p.outpacket.RemoteID != p.remote_idurl: # outgoing packet was addressed to another node, so that means we need to expect response from another node also expected_recipient.append(p.remote_idurl) matched = False if incoming_owner_idurl in expected_recipient and my_id.getLocalIDURL() == incoming_remote_idurl: if _Debug: lg.out(_DebugLevel, ' matched with incoming owner: %s' % expected_recipient) matched = True if incoming_creator_idurl in expected_recipient and my_id.getLocalIDURL() == incoming_remote_idurl: if _Debug: lg.out(_DebugLevel, ' matched with incoming creator: %s' % expected_recipient) matched = True if incoming_remote_idurl in expected_recipient and my_id.getLocalIDURL() == incoming_owner_idurl and commands.Data() == newpacket.Command: if _Debug: lg.out(_DebugLevel, ' matched my own incoming Data with incoming remote: %s' % expected_recipient) matched = True if matched: result.append(p) if _Debug: lg.out(_DebugLevel, ' found pending outbox [%s/%s/%s]:%s(%s) cb:%s' % ( nameurl.GetName(p.outpacket.OwnerID), nameurl.GetName(p.outpacket.CreatorID), nameurl.GetName(p.outpacket.RemoteID), p.outpacket.Command, p.outpacket.PacketID, list(p.callbacks.keys()))) if len(result) == 0: if _Debug: lg.out(_DebugLevel, ' NOT FOUND pending packets in outbox queue matching incoming %s' % newpacket) if newpacket.Command == commands.Ack() and newpacket.PacketID not in [commands.Identity(), commands.Identity().lower()]: lg.warn('received %s was not a "good reply" from %s://%s' % (newpacket, proto, host, )) return result
def _on_existing_customer_terminated(self, evt): from logs import lg from supplier import family_member from userid import my_id customer_idurl = evt.data['idurl'] if customer_idurl == my_id.getLocalIDURL(): lg.warn('skipping my own identity') return fm = family_member.by_customer_idurl(customer_idurl) if not fm: lg.err('family_member() instance not found for existing customer %s' % customer_idurl) return fm.automat('family-leave', { 'supplier_idurl': my_id.getLocalIDURL(), })
def test_encrypted_block(self): from crypt import key from crypt import encrypted from userid import my_id key.InitMyKey() data1 = os.urandom(1024) b1 = encrypted.Block( CreatorID=my_id.getLocalIDURL(), BackupID='BackupABC', BlockNumber=123, SessionKey=key.NewSessionKey(), SessionKeyType=key.SessionKeyType(), LastBlock=True, Data=data1, ) self.assertTrue(b1.Valid()) raw1 = b1.Serialize() b2 = encrypted.Unserialize(raw1) self.assertTrue(b2.Valid()) raw2 = b2.Serialize() data2 = b2.Data() self.assertEqual(data1, data2) self.assertEqual(raw1, raw2)
def _on_inbox_packet_received(self, newpacket, info, status, error_message): from logs import lg from contacts import contactsdb from userid import my_id from userid import global_id from storage import backup_control from p2p import commands from p2p import p2p_service if newpacket.Command == commands.Files(): list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if not list_files_global_id['idurl']: lg.warn('invalid PacketID: %s' % newpacket.PacketID) return False if list_files_global_id['idurl'] != my_id.getLocalIDURL(): # lg.warn('skip %s which is from another customer' % newpacket) return False if not contactsdb.is_supplier(newpacket.OwnerID): lg.warn('%s came, but %s is not my supplier' % (newpacket, newpacket.OwnerID, )) # skip Files() if this is not my supplier return False lg.out(self.debug_level, "service_backups._on_inbox_packet_received: %r for us from %s at %s" % ( newpacket, newpacket.CreatorID, info)) if backup_control.IncomingSupplierListFiles(newpacket, list_files_global_id): # send ack packet back p2p_service.SendAck(newpacket) else: p2p_service.SendFail(newpacket) return True return False
def doRequestService(self, arg): """ Action method. """ service_info = { 'needed_bytes': self.needed_bytes, 'customer_id': global_id.UrlToGlobalID(self.customer_idurl), } my_customer_key_id = my_id.getGlobalID(key_alias='customer') if my_keys.is_key_registered(my_customer_key_id): service_info['customer_public_key'] = my_keys.get_key_info( key_id=my_customer_key_id, include_private=False, ) if self.key_id: service_info['key_id'] = self.key_id if self.customer_idurl == my_id.getLocalIDURL(): service_info['ecc_map'] = eccmap.Current().name request = p2p_service.SendRequestService( remote_idurl=self.supplier_idurl, service_name='service_supplier', json_payload=service_info, callbacks={ commands.Ack(): self._supplier_acked, commands.Fail(): self._supplier_failed, }, ) self.request_packet_id = request.PacketID
def process(newpacket, info): from p2p import p2p_service from userid import my_id if not driver.is_on('service_p2p_hookups'): if _Debug: lg.out( _DebugLevel, 'packet_in.process SKIP incoming packet, service_p2p_hookups is not started' ) return None if _Debug: lg.out( _DebugLevel, 'packet_in.process [%s/%s/%s]:%s(%s) from %s://%s is "%s"' % ( nameurl.GetName(newpacket.OwnerID), nameurl.GetName(newpacket.CreatorID), nameurl.GetName(newpacket.RemoteID), newpacket.Command, newpacket.PacketID, info.proto, info.host, info.status, )) if info.status != 'finished': if _Debug: lg.out(_DebugLevel, ' skip, packet status is : [%s]' % info.status) return None if newpacket.Command == commands.Identity(): if newpacket.RemoteID != my_id.getLocalIDURL(): if _Debug: lg.out(_DebugLevel, ' incoming Identity is routed to another user') if not p2p_service.Identity(newpacket, send_ack=False): lg.warn('non-valid identity received') return None # remote peer sending a valid identity to another peer routed via my machine # need to handle that packet - it should be processed by proxy_server return handle(newpacket, info) # contact sending us current identity we might not have # so we handle it before check that packet is valid # because we might not have his identity on hands and so can not verify the packet # so we check that his Identity is valid and save it into cache # than we check the packet to be valid too. if not p2p_service.Identity(newpacket): lg.warn('non-valid identity received') return None if not identitycache.HasKey(newpacket.CreatorID): if _Debug: lg.out( _DebugLevel, ' will cache remote identity %s before processing incoming packet %s' % (newpacket.CreatorID, newpacket)) d = identitycache.immediatelyCaching(newpacket.CreatorID) d.addCallback(lambda _: handle(newpacket, info)) d.addErrback(lambda err: lg.err('failed caching remote %s identity: %s' % (newpacket.CreatorID, str(err)))) return d return handle(newpacket, info)
def health_check(self): from customer import supplier_connector from userid import my_id for sc in supplier_connector.connectors(my_id.getLocalIDURL()).values(): # at least one supplier must be online to consider my customer service to be healthy if sc.state in ['CONNECTED', ]: return True return False
def _do_create_possible_revision(self, latest_revision): local_customer_meta_info = contactsdb.get_customer_meta_info( self.customer_idurl) possible_position = local_customer_meta_info.get('position', -1) possible_suppliers = local_customer_meta_info.get('family_snapshot') if possible_position > 0 and my_id.getLocalIDURL( ) not in possible_suppliers: if len(possible_suppliers) > possible_position: possible_suppliers[possible_position] = my_id.getLocalIDURL() return { 'revision': latest_revision, 'publisher_idurl': my_id.getLocalIDURL(), # I will be a publisher of that revision 'suppliers': possible_suppliers, 'ecc_map': local_customer_meta_info.get('ecc_map'), 'customer_idurl': self.customer_idurl, }
def __init__(self, supplier_idurl, customer_idurl, needed_bytes, key_id=None, queue_subscribe=True): """ """ self.supplier_idurl = supplier_idurl self.customer_idurl = customer_idurl self.needed_bytes = needed_bytes self.key_id = key_id self.queue_subscribe = queue_subscribe if self.needed_bytes is None: total_bytes_needed = diskspace.GetBytesFromString( settings.getNeededString(), 0) num_suppliers = -1 if self.customer_idurl == my_id.getLocalIDURL(): num_suppliers = settings.getSuppliersNumberDesired() else: known_ecc_map = contactsdb.get_customer_meta_info( customer_idurl).get('ecc_map') if known_ecc_map: num_suppliers = eccmap.GetEccMapSuppliersNumber( known_ecc_map) if num_suppliers > 0: self.needed_bytes = int( math.ceil(2.0 * total_bytes_needed / float(num_suppliers))) else: raise Exception( 'not possible to determine needed_bytes value to be requested from that supplier' ) # self.needed_bytes = int(math.ceil(2.0 * settings.MinimumNeededBytes() / float(settings.DefaultDesiredSuppliers()))) name = 'supplier_%s_%s' % ( nameurl.GetName(self.supplier_idurl), diskspace.MakeStringFromBytes(self.needed_bytes).replace(' ', ''), ) self.request_packet_id = None self.callbacks = {} try: st = bpio.ReadTextFile( settings.SupplierServiceFilename( idurl=self.supplier_idurl, customer_idurl=self.customer_idurl, )).strip() except: st = 'DISCONNECTED' automat.Automat.__init__( self, name, state=st, debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, ) for cb in self.callbacks.values(): cb(self.supplier_idurl, self.state, self.state)
def _do_create_first_revision(self, request): return { 'revision': 0, 'publisher_idurl': my_id.getLocalIDURL( ), # I will be a publisher of the first revision 'suppliers': request.get('family_snapshot') or [], 'ecc_map': request.get('ecc_map'), 'customer_idurl': self.customer_idurl, }
def _on_existing_customer_accepted(self, evt): from logs import lg from supplier import family_member from userid import my_id customer_idurl = evt.data['idurl'] if customer_idurl == my_id.getLocalIDURL(): lg.warn('skipping my own identity') return if evt.data.get('position') is None: lg.warn('position of supplier in the family is still unclear') return fm = family_member.by_customer_idurl(customer_idurl) if not fm: lg.err('family_member() instance was not found for existing customer %s' % customer_idurl) return fm.automat('family-join', { 'supplier_idurl': my_id.getLocalIDURL(), 'ecc_map': evt.data.get('ecc_map'), 'position': evt.data.get('position'), 'family_snapshot': evt.data.get('family_snapshot'), })
def doSuppliersRequestIndexFile(self, arg): """ Action method. """ if _Debug: lg.out(_DebugLevel, 'index_synchronizer.doSuppliersRequestIndexFile') if driver.is_on('service_backups'): from storage import backup_control self.current_local_revision = backup_control.revision() else: self.current_local_revision = -1 self.latest_supplier_revision = -1 self.requesting_suppliers.clear() self.requested_suppliers_number = 0 packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) # packetID = settings.BackupIndexFileName() localID = my_id.getLocalIDURL() for supplierId in contactsdb.suppliers(): if not supplierId: continue if not contact_status.isOnline(supplierId): continue pkt_out = p2p_service.SendRetreive(localID, localID, packetID, supplierId, callbacks={ commands.Data(): self._on_supplier_response, commands.Fail(): self._on_supplier_response, }) # newpacket = signed.Packet( # commands.Retrieve(), # localID, # localID, # packetid.RemotePath(packetID), # '', # supplierId) # pkt_out = gateway.outbox(newpacket, callbacks={ # commands.Data(): self._on_supplier_response, # commands.Fail(): self._on_supplier_response, }) if pkt_out: self.requesting_suppliers.add(supplierId) self.requested_suppliers_number += 1 if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (pkt_out, nameurl.GetName(supplierId)))
def doSuppliersSendIndexFile(self, arg): """ Action method. """ if _Debug: lg.out(_DebugLevel, 'index_synchronizer.doSuppliersSendIndexFile') packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) self.sending_suppliers.clear() self.sent_suppliers_number = 0 localID = my_id.getLocalIDURL() b = encrypted.Block( CreatorID=localID, BackupID=packetID, BlockNumber=0, SessionKey=key.NewSessionKey(), SessionKeyType=key.SessionKeyType(), LastBlock=True, Data=bpio.ReadBinaryFile(settings.BackupIndexFilePath()), ) Payload = b.Serialize() for supplierId in contactsdb.suppliers(): if not supplierId: continue if not contact_status.isOnline(supplierId): continue newpacket, pkt_out = p2p_service.SendData( raw_data=Payload, ownerID=localID, creatorID=localID, remoteID=supplierId, packetID=packetID, callbacks={ commands.Ack(): self._on_supplier_acked, commands.Fail(): self._on_supplier_acked, }, ) # newpacket = signed.Packet( # commands.Data(), localID, localID, packetID, # Payload, supplierId) # pkt_out = gateway.outbox(newpacket, callbacks={ # commands.Ack(): self._on_supplier_acked, # commands.Fail(): self._on_supplier_acked, }) if pkt_out: self.sending_suppliers.add(supplierId) self.sent_suppliers_number += 1 if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (newpacket, nameurl.GetName(supplierId)))
def IncomingSupplierListFiles(newpacket, list_files_global_id): """ Called by ``p2p.p2p_service`` when command "Files" were received from one of our suppliers. This is an answer from given supplier (after our request) to get a list of our files stored on his machine. """ from p2p import p2p_service supplier_idurl = newpacket.OwnerID # incoming_key_id = newpacket.PacketID.strip().split(':')[0] customer_idurl = list_files_global_id['idurl'] num = contactsdb.supplier_position(supplier_idurl, customer_idurl=customer_idurl) if num < -1: lg.warn('unknown supplier: %s' % supplier_idurl) return False from supplier import list_files from customer import list_files_orator try: block = encrypted.Unserialize( newpacket.Payload, decrypt_key=my_keys.make_key_id(alias='customer', creator_idurl=my_id.getLocalIDURL(), ), ) input_data = block.Data() except: lg.out(2, 'backup_control.IncomingSupplierListFiles ERROR decrypting data from %s' % newpacket) return False src = list_files.UnpackListFiles(input_data, settings.ListFilesFormat()) backups2remove, paths2remove, missed_backups = backup_matrix.ReadRawListFiles(num, src) list_files_orator.IncomingListFiles(newpacket) backup_matrix.SaveLatestRawListFiles(supplier_idurl, src) if _Debug: lg.out(_DebugLevel, 'backup_control.IncomingSupplierListFiles from [%s]: paths2remove=%d, backups2remove=%d missed_backups=%d' % ( nameurl.GetName(supplier_idurl), len(paths2remove), len(backups2remove), len(missed_backups))) if len(backups2remove) > 0: p2p_service.RequestDeleteListBackups(backups2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d backups' % len(backups2remove)) if len(paths2remove) > 0: p2p_service.RequestDeleteListPaths(paths2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d paths' % len(paths2remove)) if len(missed_backups) > 0: from storage import backup_rebuilder backup_rebuilder.AddBackupsToWork(missed_backups) backup_rebuilder.A('start') if _Debug: lg.out(_DebugLevel, ' also triggered service_rebuilding with %d missed backups' % len(missed_backups)) del backups2remove del paths2remove del missed_backups return True
def _do_increment_revision(self, possible_transaction): if self.dht_info: if self.dht_info['suppliers'] == possible_transaction['suppliers']: if self.dht_info['ecc_map'] == possible_transaction['ecc_map']: if _Debug: lg.out( _DebugLevel, 'family_member._do_increment_revision did not found any changes, skip transaction' ) return None possible_transaction['revision'] += 1 possible_transaction['publisher_idurl'] = my_id.getLocalIDURL() return possible_transaction
def __init__(self, customer_idurl, debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, publish_events=False, **kwargs): """ Builds `family_member()` state machine. """ self.customer_idurl = customer_idurl self.supplier_idurl = my_id.getLocalIDURL() super(FamilyMember, self).__init__(name="family_member_%s_%s" % ( nameurl.GetName(self.customer_idurl), nameurl.GetName(my_id.getLocalIDURL()), ), state="AT_STARTUP", debug_level=debug_level, log_events=log_events, log_transitions=log_transitions, publish_events=publish_events, **kwargs)
def do_calculate_needed_bytes(self): if self.needed_bytes is None: total_bytes_needed = diskspace.GetBytesFromString(settings.getNeededString(), 0) num_suppliers = -1 if self.customer_idurl == my_id.getLocalIDURL(): num_suppliers = settings.getSuppliersNumberDesired() else: known_ecc_map = contactsdb.get_customer_meta_info(self.customer_idurl).get('ecc_map') if known_ecc_map: num_suppliers = eccmap.GetEccMapSuppliersNumber(known_ecc_map) if num_suppliers > 0: self.needed_bytes = int(math.ceil(2.0 * total_bytes_needed / float(num_suppliers))) else: raise Exception('not possible to determine needed_bytes value to be requested from that supplier')
def _do_notify_supplier_position(self, supplier_idurl, supplier_position): from p2p import p2p_service from raid import eccmap from userid import my_id p2p_service.SendContacts( remote_idurl=supplier_idurl, json_payload={ 'space': 'family_member', 'type': 'supplier_position', 'customer_idurl': my_id.getLocalIDURL(), 'customer_ecc_map': eccmap.Current().name, 'supplier_idurl': supplier_idurl, 'supplier_position': supplier_position, }, )
def _on_new_customer_accepted(self, evt): from logs import lg from userid import my_id from supplier import family_member customer_idurl = evt.data['idurl'] fm = family_member.by_customer_idurl(customer_idurl) if not fm: fm = family_member.create_family(customer_idurl) fm.automat('init') else: lg.warn('family_member() instance already exists, but new customer just accepted %s' % customer_idurl) fm.automat('family-join', { 'supplier_idurl': my_id.getLocalIDURL(), 'ecc_map': evt.data.get('ecc_map'), 'position': evt.data.get('position', -1), 'family_snapshot': evt.data.get('family_snapshot'), })
def _on_supplier_modified(self, evt): from access import key_ring from crypt import my_keys from userid import global_id from userid import my_id if evt.data['new_idurl']: my_keys_to_be_republished = [] for key_id in my_keys.known_keys(): if not key_id.startswith('share_'): continue _glob_id = global_id.ParseGlobalID(key_id) if _glob_id['idurl'] == my_id.getLocalIDURL(): my_keys_to_be_republished.append(key_id) for key_id in my_keys_to_be_republished: key_ring.transfer_key(key_id, trusted_idurl=evt.data['new_idurl'], include_private=False)
def write_customer_suppliers( customer_idurl, suppliers_list, ecc_map=None, revision=None, publisher_idurl=None, ): if customer_idurl == my_id.getLocalIDURL(): lg.warn( 'skip writing my own suppliers list which suppose to be written to DHT' ) else: contactsdb.set_suppliers(suppliers_list, customer_idurl=customer_idurl) contactsdb.save_suppliers(customer_idurl=customer_idurl) return dht_records.set_suppliers( customer_idurl=customer_idurl, suppliers_list=suppliers_list, ecc_map=ecc_map, revision=revision, publisher_idurl=publisher_idurl, )
def _do_verify(dht_value): try: _ecc_map = dht_value['ecc_map'] _customer_idurl = strng.to_bin(dht_value['customer_idurl']) _publisher_idurl = dht_value.get('publisher_idurl') _suppliers_list = list(map(strng.to_bin, dht_value['suppliers'])) _revision = dht_value.get('revision') _timestamp = dht_value.get('timestamp') except: lg.exc() result.callback(None) return None ret = { 'suppliers': _suppliers_list, 'ecc_map': _ecc_map, 'customer_idurl': _customer_idurl, 'revision': _revision, 'publisher_idurl': _publisher_idurl, 'timestamp': _timestamp, } if customer_idurl == my_id.getLocalIDURL(): if _Debug: lg.out( _DebugLevel, 'dht_relations.read_customer_suppliers skip caching my own suppliers list received from DHT: %s' % ret) else: contactsdb.set_suppliers(_suppliers_list, customer_idurl=customer_idurl) contactsdb.save_suppliers(customer_idurl=customer_idurl) if _Debug: lg.out( _DebugLevel, 'dht_relations.read_customer_suppliers %r returned %r' % ( customer_idurl, ret, )) result.callback(ret) return None
def _on_incoming_supplier_position(self, inp): # this packet came from the customer, a godfather of the family ;))) incoming_packet = inp['packet'] try: ecc_map = inp['customer_ecc_map'] supplier_idurl = inp['supplier_idurl'] supplier_position = inp['supplier_position'] family_snapshot = inp.get('family_snapshot') or [] except: lg.exc() return None if supplier_idurl != my_id.getLocalIDURL(): return p2p_service.SendFail( incoming_packet, 'contacts packet with supplier position not addressed to me') try: _existing_position = self.my_info['suppliers'].index( supplier_idurl) except: _existing_position = -1 contactsdb.add_customer_meta_info( self.customer_idurl, { 'ecc_map': ecc_map, 'position': supplier_position, 'family_snapshot': family_snapshot, }) if _Debug: lg.out( _DebugLevel, 'family_member._on_incoming_supplier_position stored new meta info for customer %s:\n' % self.customer_idurl) lg.out( _DebugLevel, ' ecc_map=%s position=%s family_snapshot=%s' % ( ecc_map, supplier_position, family_snapshot, )) return p2p_service.SendAck(incoming_packet)
def doRequestSuppliersReview(self, *args, **kwargs): """ Action method. """ if not self.transaction: self.automat('all-suppliers-agree') return self.suppliers_requests = [] for supplier_idurl in self.transaction['suppliers']: if not supplier_idurl: continue if supplier_idurl == my_id.getLocalIDURL(): continue outpacket = p2p_service.SendContacts( remote_idurl=supplier_idurl, json_payload={ 'space': 'family_member', 'type': 'suppliers_list', 'customer_idurl': self.customer_idurl, 'customer_ecc_map': self.transaction['ecc_map'], 'transaction_revision': self.transaction['revision'], 'suppliers_list': self.transaction['suppliers'], }, callbacks={ commands.Ack(): self._on_supplier_ack, commands.Fail(): self._on_supplier_fail, }, ) self.suppliers_requests.append(outpacket.PacketID) if not self.suppliers_requests: self.automat('all-suppliers-agree') else: if _Debug: lg.out( _DebugLevel, 'family_member.doRequestSuppliersReview sent to transaction for review to %d suppliers' % len(self.suppliers_requests))
def _do_process_family_refresh_request(self, merged_info): if not self.my_info: self.my_info = self._do_create_possible_revision( int(merged_info['revision'])) lg.warn( '"family-refresh" request will use "possible" customer meta info: %r' % self.my_info) if int(self.my_info['revision']) > int(merged_info['revision']): lg.info( '"family-refresh" request will overwrite DHT record with my info because my revision is higher than record in DHT' ) return self.my_info.copy() try: my_position = self.my_info['suppliers'].index( my_id.getLocalIDURL()) except: my_position = -1 if my_position < 0: lg.warn( '"family-refresh" request failed because my info not exist or not valid, my own position in the family is unknown' ) return None my_expected_suppliers_count = None if self.my_info['ecc_map']: my_expected_suppliers_count = eccmap.GetEccMapSuppliersNumber( self.my_info['ecc_map']) if my_expected_suppliers_count and my_position >= my_expected_suppliers_count: lg.warn( '"family-refresh" request failed because my info is not valid, supplier position greater than expected suppliers count' ) return None if len(merged_info['suppliers']) != my_expected_suppliers_count: lg.warn( 'number of suppliers not expected during processing of "family-refresh" request' ) if len(merged_info['suppliers']) < my_expected_suppliers_count: merged_info['suppliers'] += [ b'', ] * (my_expected_suppliers_count - len(merged_info['suppliers'])) else: merged_info['suppliers'] = merged_info[ 'suppliers'][:my_expected_suppliers_count] try: existing_position = merged_info['suppliers'].index( my_id.getLocalIDURL()) except ValueError: existing_position = -1 if existing_position < 0: if merged_info['suppliers'][my_position] not in [b'', '', None]: # TODO: SECURITY need to implement a signature verification and # also build solution to validate that change was approved by customer lg.warn( 'overwriting another supplier %s with my IDURL at position %d in family of customer %s' % ( merged_info['suppliers'][my_position], my_position, self.customer_idurl, )) merged_info['suppliers'][my_position] = my_id.getLocalIDURL() if _Debug: lg.out( _DebugLevel, ' placed supplier %s at known position %d in the family of customer %s' % (my_id.getLocalIDURL(), my_position, self.customer_idurl)) existing_position = my_position if existing_position != my_position: merged_info['suppliers'][existing_position] = b'' merged_info['suppliers'][my_position] = my_id.getLocalIDURL() if _Debug: lg.out( _DebugLevel, ' found my IDURL on %d position and will move it on %d position in the family of customer %s' % (existing_position, my_position, self.customer_idurl)) return merged_info
def _on_retreive(self, newpacket): import os from logs import lg from system import bpio from userid import my_id from userid import global_id from crypt import signed from contacts import contactsdb from transport import gateway from p2p import p2p_service from p2p import commands # external customer must be able to request # TODO: add validation of public key # if not contactsdb.is_customer(newpacket.OwnerID): # lg.err("had unknown customer %s" % newpacket.OwnerID) # p2p_service.SendFail(newpacket, 'not a customer') # return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False if not glob_path['idurl']: lg.warn('no customer global id found in PacketID: %s' % newpacket.PacketID) p2p_service.SendFail(newpacket, 'incorrect retreive request') return False if newpacket.CreatorID != glob_path['idurl']: lg.warn( 'one of customers requesting a Data from another customer!') else: pass # same customer, based on CreatorID : OK! recipient_idurl = newpacket.OwnerID # TODO: process requests from another customer : glob_path['idurl'] filename = self._do_make_valid_filename(newpacket.OwnerID, glob_path) if not filename: if True: # TODO: settings.getCustomersDataSharingEnabled() and # SECURITY # TODO: add more validations for receiver idurl # recipient_idurl = glob_path['idurl'] filename = self._do_make_valid_filename( glob_path['idurl'], glob_path) if not filename: lg.warn("had empty filename") p2p_service.SendFail(newpacket, 'empty filename') return False if not os.path.exists(filename): lg.warn("did not find requested file locally : %s" % filename) p2p_service.SendFail(newpacket, 'did not find requested file locally') return False if not os.access(filename, os.R_OK): lg.warn("no read access to requested packet %s" % filename) p2p_service.SendFail(newpacket, 'no read access to requested packet') return False data = bpio.ReadBinaryFile(filename) if not data: lg.warn("empty data on disk %s" % filename) p2p_service.SendFail(newpacket, 'empty data on disk') return False stored_packet = signed.Unserialize(data) del data if stored_packet is None: lg.warn("Unserialize failed, not Valid packet %s" % filename) p2p_service.SendFail(newpacket, 'unserialize failed') return False if not stored_packet.Valid(): lg.warn("Stored packet is not Valid %s" % filename) p2p_service.SendFail(newpacket, 'stored packet is not valid') return False if stored_packet.Command != commands.Data(): lg.warn('sending back packet which is not a Data') # here Data() packet is sent back as it is... # that means outpacket.RemoteID=my_id.getLocalID() - it was addressed to that node and stored as it is # need to take that in account every time you receive Data() packet # it can be not a new Data(), but the old data returning back as a response to Retreive() packet # let's create a new Data() packet which will be addressed directly to recipient and "wrap" stored data inside it routed_packet = signed.Packet( Command=commands.Data(), OwnerID=stored_packet.OwnerID, CreatorID=my_id.getLocalIDURL(), PacketID=stored_packet.PacketID, Payload=stored_packet.Serialize(), RemoteID=recipient_idurl, ) if recipient_idurl == stored_packet.OwnerID: lg.out( self.debug_level, 'service_supplier._on_retreive from request %r : sending %r back to owner: %s' % (newpacket, stored_packet, recipient_idurl)) gateway.outbox(routed_packet) # , target=recipient_idurl) return True lg.out( self.debug_level, 'service_supplier._on_retreive from request %r : returning data owned by %s to %s' % (newpacket, stored_packet.OwnerID, recipient_idurl)) gateway.outbox(routed_packet) return True