def _do_send_packet_to_router(self, outpacket, callbacks, wide, response_timeout, keep_alive, is_retry=False): router_idurl = proxy_receiver.GetRouterIDURL() router_identity_obj = proxy_receiver.GetRouterIdentity() router_proto_host = proxy_receiver.GetRouterProtoHost() router_proto, router_host = router_proto_host publickey = router_identity_obj.publickey my_original_identity_src = proxy_receiver.ReadMyOriginalIdentitySource( ) if not router_idurl or not router_identity_obj or not router_proto_host or not my_original_identity_src: if _Debug: lg.out( _DebugLevel, 'proxy_sender._do_send_packet_to_router SKIP because router not ready yet' ) return self._do_add_pending_packet(outpacket, callbacks, wide, response_timeout, keep_alive) if outpacket.RemoteID.to_bin() == router_idurl.to_bin(): if _Debug: lg.out( _DebugLevel, 'proxy_sender._do_send_packet_to_router SKIP, packet addressed to router and must be sent in a usual way' ) return None try: raw_data = outpacket.Serialize() except: lg.exc('failed to Serialize %s' % outpacket) return None # see proxy_router.ProxyRouter : doForwardOutboxPacket() for receiving part json_payload = { 'f': my_id.getIDURL().to_bin(), # from 't': outpacket.RemoteID.to_bin(), # to 'p': raw_data, # payload 'w': wide, # wide 'i': response_timeout, 'a': keep_alive, 'r': is_retry, } if not json_payload['t']: raise ValueError('receiver idurl was not set') raw_bytes = serialization.DictToBytes(json_payload) block = encrypted.Block( CreatorID=my_id.getIDURL(), BackupID='routed outgoing data', BlockNumber=0, SessionKey=key.NewSessionKey( session_key_type=key.SessionKeyType()), SessionKeyType=key.SessionKeyType(), LastBlock=True, Data=raw_bytes, EncryptKey=lambda inp: key.EncryptOpenSSHPublicKey(publickey, inp), ) block_encrypted = block.Serialize() newpacket = signed.Packet( Command=commands.RelayOut(), OwnerID=outpacket.OwnerID, CreatorID=my_id.getIDURL(), PacketID=outpacket.PacketID, Payload=block_encrypted, RemoteID=router_idurl, ) if response_timeout is not None: # must give some extra time for the proxy re-routing response_timeout += 10.0 routed_packet = packet_out.create( outpacket=outpacket, wide=False, callbacks={}, route={ 'packet': newpacket, # pointing "newpacket" to router node 'proto': router_proto, 'host': router_host, 'remoteid': router_idurl, 'description': 'RelayOut_%s[%s]_%s' % (outpacket.Command, outpacket.PacketID, nameurl.GetName(router_idurl)), }, response_timeout=response_timeout, keep_alive=True, ) for command, cb_list in callbacks.items(): if isinstance(cb_list, list): for cb in cb_list: routed_packet.set_callback(command, cb) else: routed_packet.set_callback(command, cb_list) if not is_retry: _key = ( outpacket.Command, outpacket.PacketID, outpacket.RemoteID.to_bin(), ) self.sent_packets[_key] = ( routed_packet, outpacket, ) self.event('relay-out', (outpacket, newpacket, routed_packet)) if _Debug: lg.out( _DebugLevel, '>>>Relay-OUT %s sent to %s://%s with %d bytes, timeout=%r' % ( str(outpacket), router_proto, router_host, len(block_encrypted), response_timeout, )) if _PacketLogFileEnabled: lg.out( 0, '\033[0;49;36mRELAY OUT %s(%s) with %s bytes from %s to %s via %s\033[0m' % ( outpacket.Command, outpacket.PacketID, len(raw_bytes), global_id.UrlToGlobalID(outpacket.CreatorID), global_id.UrlToGlobalID(outpacket.RemoteID), global_id.UrlToGlobalID(router_idurl), ), log_name='packet', showtime=True, ) del raw_bytes del block del newpacket del outpacket del router_identity_obj del router_idurl del router_proto_host return routed_packet
def _on_first_outbox_packet(self, outpacket, wide, callbacks, target=None, route=None, response_timeout=None, keep_alive=True): """ Will be called first for every outgoing packet. Must return `None` if that packet should be sent in a normal way. Otherwise will create another "routed" packet instead and return it. """ if not driver.is_on('service_proxy_transport'): if _Debug: lg.out( _DebugLevel, 'proxy_sender._on_first_outbox_packet SKIP sending %r because service_proxy_transport is not started yet' % outpacket) return None if not proxy_receiver.A(): if _Debug: lg.out( _DebugLevel, 'proxy_sender._on_first_outbox_packet SKIP sending %r because proxy_receiver() not exist' % outpacket) return None if outpacket.Command == commands.Identity( ) and outpacket.CreatorID == my_id.getIDURL(): if proxy_receiver.GetPossibleRouterIDURL( ) and proxy_receiver.GetPossibleRouterIDURL().to_bin( ) == outpacket.RemoteID.to_bin(): if network_connector.A().state == 'DISCONNECTED': if _Debug: lg.out( _DebugLevel, 'proxy_sender._on_first_outbox_packet SKIP sending %r because network_connector() is DISCONNECTED' % outpacket) return None if network_connector.A().state == 'CONNECTED': lg.warn( 'sending %r to "possible" proxy router %r' % (outpacket, proxy_receiver.GetPossibleRouterIDURL())) pkt_out = packet_out.create(outpacket, wide, callbacks, target, route, response_timeout, keep_alive) return pkt_out if _Debug: lg.out( _DebugLevel, 'proxy_sender._on_first_outbox_packet SKIP sending %r, network_connector() have transition state' % outpacket) return None if proxy_receiver.A().state != 'LISTEN': if _Debug: lg.out( _DebugLevel, 'proxy_sender._on_first_outbox_packet DELLAYED %r because proxy_receiver state is not LISTEN yet' % outpacket) return self._do_add_pending_packet(outpacket, callbacks, wide, response_timeout, keep_alive) return self._do_send_packet_to_router( outpacket=outpacket, callbacks=callbacks, wide=wide, keep_alive=keep_alive, response_timeout=response_timeout, )
def doScanAndQueue(self, *args, **kwargs): """ Action method. """ global _ShutdownFlag if _ShutdownFlag: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue _ShutdownFlag is True\n') self.automat('scan-done', 0) return from storage import backup_matrix from storage import backup_fs backup_matrix.ReadLocalFiles() progress = 0 # if _Debug: # lg.out(_DebugLevel, 'data_sender.doScanAndQueue with %d known customers' % len(contactsdb.known_customers())) for customer_idurl in contactsdb.known_customers(): if customer_idurl != my_id.getIDURL(): # TODO: check that later if _Debug: lg.out( _DebugLevel + 2, 'data_sender.doScanAndQueue skip sending to another customer: %r' % customer_idurl) continue known_suppliers = contactsdb.suppliers(customer_idurl) if not known_suppliers or id_url.is_some_empty(known_suppliers): if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue found empty supplier(s) for customer %r, SKIP' % customer_idurl) continue known_backups = misc.sorted_backup_ids( list(backup_matrix.local_files().keys()), True) if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue found %d known suppliers for customer %r with %d backups' % (len(known_suppliers), customer_idurl, len(known_backups))) for backupID in known_backups: this_customer_idurl = packetid.CustomerIDURL(backupID) if this_customer_idurl != customer_idurl: continue customerGlobalID, pathID, _ = packetid.SplitBackupID( backupID, normalize_key_alias=True) keyAlias = packetid.KeyAlias(customerGlobalID) item = backup_fs.GetByID(pathID, iterID=backup_fs.fsID( customer_idurl, keyAlias)) if not item: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r path not exist in catalog' % backupID) continue if item.key_id and customerGlobalID and customerGlobalID != item.key_id: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r key is different in the catalog: %r ~ %r' % ( backupID, customerGlobalID, item.key_id, )) continue packetsBySupplier = backup_matrix.ScanBlocksToSend( backupID, limit_per_supplier=None) total_for_customer = sum( [len(v) for v in packetsBySupplier.values()]) if total_for_customer: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue sending %r for customer %r with %d pieces' % (item.name(), customer_idurl, total_for_customer)) for supplierNum in packetsBySupplier.keys(): # supplier_idurl = contactsdb.supplier(supplierNum, customer_idurl=customer_idurl) if supplierNum >= 0 and supplierNum < len( known_suppliers): supplier_idurl = known_suppliers[supplierNum] else: supplier_idurl = None if not supplier_idurl: lg.warn( 'skip sending, unknown supplier_idurl supplierNum=%s for %s, customer_idurl=%r' % (supplierNum, backupID, customer_idurl)) continue for packetID in packetsBySupplier[supplierNum]: backupID_, _, supplierNum_, _ = packetid.BidBnSnDp( packetID) if backupID_ != backupID: lg.warn( 'skip sending, unexpected backupID supplierNum=%s for %s, customer_idurl=%r' % (packetID, backupID, customer_idurl)) continue if supplierNum_ != supplierNum: lg.warn( 'skip sending, unexpected supplierNum %s for %s, customer_idurl=%r' % (packetID, backupID, customer_idurl)) continue if io_throttle.HasPacketInSendQueue( supplier_idurl, packetID): if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue %s already in sending queue for %r' % (packetID, supplier_idurl)) continue latest_progress = self.statistic.get( supplier_idurl, {}).get('latest', '') if len(latest_progress ) >= 3 and latest_progress.endswith('---'): if _Debug: lg.out( _DebugLevel + 2, 'data_sender.doScanAndQueue skip sending to supplier %r because multiple packets already failed' % supplier_idurl) continue if not io_throttle.OkToSend(supplier_idurl): if _Debug: lg.out( _DebugLevel + 2, 'data_sender.doScanAndQueue skip sending, queue is busy for %r' % supplier_idurl) continue customerGlobalID, pathID = packetid.SplitPacketID( packetID) filename = os.path.join( settings.getLocalBackupsDir(), customerGlobalID, pathID, ) if not os.path.isfile(filename): if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue %s is not a file' % filename) continue itemInfo = item.to_json() if io_throttle.QueueSendFile( filename, packetID, supplier_idurl, my_id.getIDURL(), lambda packet, ownerID, packetID: self._packetAcked( packet, ownerID, packetID, itemInfo), lambda remoteID, packetID, why: self._packetFailed( remoteID, packetID, why, itemInfo), ): progress += 1 if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue for %r put %s in the queue progress=%d' % ( item.name(), packetID, progress, )) else: if _Debug: lg.out( _DebugLevel, 'data_sender.doScanAndQueue io_throttle.QueueSendFile FAILED %s' % packetID) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue progress=%s' % progress) self.automat('scan-done', progress)
def _do_send_packets(self, backup_id, block_num): customer_id, path_id, version_name = packetid.SplitBackupID(backup_id) archive_snapshot_dir = os.path.join(settings.getLocalBackupsDir(), customer_id, path_id, version_name) if _Debug: lg.args(_DebugLevel, backup_id=backup_id, block_num=block_num, archive_snapshot_dir=archive_snapshot_dir) if not os.path.isdir(archive_snapshot_dir): self.block_failed = True lg.err('archive snapshot folder was not found in %r' % archive_snapshot_dir) return None failed_supliers = 0 for supplier_num in range(len(self.suppliers_list)): supplier_idurl = self.suppliers_list[supplier_num] if not supplier_idurl: failed_supliers += 1 lg.warn('unknown supplier supplier_num=%d' % supplier_num) continue for dataORparity in ( 'Data', 'Parity', ): packet_id = packetid.MakePacketID(backup_id, block_num, supplier_num, dataORparity) packet_filename = os.path.join( archive_snapshot_dir, '%d-%d-%s' % ( block_num, supplier_num, dataORparity, )) if not os.path.isfile(packet_filename): lg.err('%s is not a file' % packet_filename) continue packet_payload = bpio.ReadBinaryFile(packet_filename) if not packet_payload: lg.err('file %r reading error' % packet_filename) continue if block_num not in self.packets_out: self.packets_out[block_num] = {} self.packets_out[block_num][packet_id] = None p2p_service.SendData( raw_data=packet_payload, ownerID=self.queue_owner_idurl, creatorID=my_id.getIDURL(), remoteID=supplier_idurl, packetID=packet_id, callbacks={ commands.Ack(): lambda newpacket, _: self.automat('ack', newpacket=newpacket), commands.Fail(): lambda newpacket, _: self.automat('fail', newpacket=newpacket), }, ) if failed_supliers > self.correctable_errors: self.block_failed = True lg.err('too many failed suppliers %d in block %d' % ( failed_supliers, block_num, ))
def contacts_remote(include_all=False, include_enabled=True): """ Return ID's list of all known peers. """ l = id_url.to_bin_list(contacts(include_all=include_all, include_enabled=include_enabled)) return [i for i in id_url.fields_list(l) if not id_url.is_the_same(i, my_id.getIDURL())]
def SendToIDs(idlist, wide=False, ack_handler=None, timeout_handler=None, response_timeout=20, wait_packets=False): """ Same, but send to many IDs and also check previous packets to not re-send. """ global _PropagateCounter if _Debug: lg.out(_DebugLevel, "propagate.SendToIDs to %d users, wide=%s" % (len(idlist), wide)) if ack_handler is None: ack_handler = HandleAck if timeout_handler is None: timeout_handler = HandleTimeOut LocalIdentity = my_id.getLocalIdentity() Payload = strng.to_bin(LocalIdentity.serialize()) alreadysent = set() totalsent = 0 inqueue = {} found_previous_packets = 0 for pkt_out in packet_out.queue(): if id_url.is_in(pkt_out.remote_idurl, idlist, as_field=False): if pkt_out.description.count('Identity'): if pkt_out.remote_idurl not in inqueue: inqueue[pkt_out.remote_idurl] = 0 inqueue[pkt_out.remote_idurl] += 1 found_previous_packets += 1 wait_list = [] for contact in idlist: if not contact: continue if contact in alreadysent: # just want to send once even if both customer and supplier continue if contact in inqueue and inqueue[contact] > 2: # now only 2 protocols is working: tcp and udp if _Debug: lg.out(_DebugLevel, ' skip sending [Identity] to %s, packet already in the queue' % contact) continue p = signed.Packet( Command=commands.Identity(), OwnerID=my_id.getIDURL(), CreatorID=my_id.getIDURL(), PacketID=('propagate:%d:%s' % (_PropagateCounter, packetid.UniqueID())), Payload=Payload, RemoteID=contact, ) _PropagateCounter += 1 if _Debug: lg.out(_DebugLevel, " sending %r to %s" % (p, nameurl.GetName(contact), )) res = gateway.outbox(p, wide, response_timeout=response_timeout, callbacks={ commands.Ack(): ack_handler, commands.Fail(): ack_handler, None: timeout_handler, }) if not res: lg.warn('my Identity() was not sent to %r' % contact) continue if wide: # this is a ping packet - need to clear old info p2p_stats.ErasePeerProtosStates(contact) p2p_stats.EraseMyProtosStates(contact) alreadysent.add(contact) totalsent += 1 if wait_packets and res: if isinstance(res, Deferred): wait_list.append(res) elif res.finished_deferred and isinstance(res.finished_deferred, Deferred): wait_list.append(res.finished_deferred) del alreadysent if not wait_packets: return totalsent return DeferredList(wait_list, consumeErrors=True)
def isMyOwnKey(self, *args, **kwargs): """ Condition method. """ return id_url.to_bin(args[0]) == my_id.getIDURL().to_bin()
def on_retrieve(newpacket): # external customer must be able to request # TODO: add validation of public key # if not contactsdb.is_customer(newpacket.OwnerID): # lg.err("had unknown customer %s" % newpacket.OwnerID) # p2p_service.SendFail(newpacket, 'not a customer') # return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False if not glob_path['idurl']: lg.warn('no customer global id found in PacketID: %s' % newpacket.PacketID) p2p_service.SendFail(newpacket, 'incorrect retrieve request') return False if newpacket.CreatorID != glob_path['idurl']: lg.warn('one of customers requesting a Data from another customer!') else: pass # same customer, based on CreatorID : OK! recipient_idurl = newpacket.OwnerID # TODO: process requests from another customer : glob_path['idurl'] filename = make_valid_filename(newpacket.OwnerID, glob_path) if not filename: if True: # TODO: settings.getCustomersDataSharingEnabled() and # SECURITY # TODO: add more validations for receiver idurl # recipient_idurl = glob_path['idurl'] filename = make_valid_filename(glob_path['idurl'], glob_path) if not filename: lg.warn("had empty filename") p2p_service.SendFail(newpacket, 'empty filename') return False if not os.path.exists(filename): lg.warn("did not found requested file locally : %s" % filename) p2p_service.SendFail(newpacket, 'did not found requested file locally') return False if not os.access(filename, os.R_OK): lg.warn("no read access to requested packet %s" % filename) p2p_service.SendFail(newpacket, 'no read access to requested packet') return False data = bpio.ReadBinaryFile(filename) if not data: lg.warn("empty data on disk %s" % filename) p2p_service.SendFail(newpacket, 'empty data on disk') return False stored_packet = signed.Unserialize(data) del data if stored_packet is None: lg.warn("Unserialize failed, not Valid packet %s" % filename) p2p_service.SendFail(newpacket, 'unserialize failed') return False if not stored_packet.Valid(): lg.warn("Stored packet is not Valid %s" % filename) p2p_service.SendFail(newpacket, 'stored packet is not valid') return False if stored_packet.Command != commands.Data(): lg.warn('sending back packet which is not a Data') # here Data() packet is sent back as it is... # that means outpacket.RemoteID=my_id.getIDURL() - it was addressed to that node and stored as it is # need to take that in account every time you receive Data() packet # it can be not a new Data(), but the old data returning back as a response to Retreive() packet # let's create a new Data() packet which will be addressed directly to recipient and "wrap" stored data inside it routed_packet = signed.Packet( Command=commands.Data(), OwnerID=stored_packet.OwnerID, CreatorID=my_id.getIDURL(), PacketID=stored_packet.PacketID, Payload=stored_packet.Serialize(), RemoteID=recipient_idurl, ) if recipient_idurl == stored_packet.OwnerID: lg.info('from request %r : sending %r back to owner: %s' % (newpacket, stored_packet, recipient_idurl)) gateway.outbox(routed_packet) # , target=recipient_idurl) return True lg.info('from request %r : returning data owned by %s to %s' % (newpacket, stored_packet.OwnerID, recipient_idurl)) gateway.outbox(routed_packet) return True
def doSendMyIdentity(self, *args, **kwargs): """ Action method. """ global _KnownChannels self.ping_attempts += 1 if self.fake_identity: identity_object = self.fake_identity else: identity_object = my_id.getLocalIdentity() if not identity_object.Valid(): raise Exception('can not use invalid identity for ping') if self.channel_counter: packet_id = '%s:%d:%d:%s' % (self.channel, _KnownChannels[ self.channel], self.ping_attempts, packetid.UniqueID()) else: packet_id = '%s:%d:%s' % (self.channel, self.ping_attempts, packetid.UniqueID()) ping_packet = signed.Packet( Command=commands.Identity(), OwnerID=my_id.getIDURL(), CreatorID=my_id.getIDURL(), PacketID=packet_id, Payload=strng.to_bin(identity_object.serialize()), RemoteID=self.remote_idurl, ) if self.skip_outbox: packet_out.create( outpacket=ping_packet, wide=True, response_timeout=self.ack_timeout, callbacks={ commands.Ack(): lambda response, info: self.automat( 'ack-received', response=response, info=info), commands.Fail(): lambda response, info: self.automat( 'fail-received', response=response, info=info), None: lambda pkt_out: self.automat('ack-timeout', pkt_out), }, keep_alive=self.keep_alive, ) else: gateway.outbox( outpacket=ping_packet, wide=True, response_timeout=self.ack_timeout, callbacks={ commands.Ack(): lambda response, info: self.automat( 'ack-received', response=response, info=info), commands.Fail(): lambda response, info: self.automat( 'fail-received', response=response, info=info), None: lambda pkt_out: self.automat('ack-timeout', pkt_out), }, keep_alive=self.keep_alive, ) if _Debug: lg.args(_DebugLevel, packet_id=packet_id, remote_idurl=self.remote_idurl, ping_attempts=self.ping_attempts)
def _request_files(self): from storage import backup_matrix from stream import io_throttle from stream import data_sender self.missingPackets = 0 # here we want to request some packets before we start working to # rebuild the missed blocks availableSuppliers = backup_matrix.GetActiveArray( customer_idurl=self.currentCustomerIDURL) # remember how many requests we did on this iteration total_requests_count = 0 # at the moment I do download everything I have available and needed if id_url.is_some_empty( contactsdb.suppliers( customer_idurl=self.currentCustomerIDURL)): if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files SKIP - empty supplier') self.automat('no-requests') return for supplierNum in range( contactsdb.num_suppliers( customer_idurl=self.currentCustomerIDURL)): supplierID = contactsdb.supplier( supplierNum, customer_idurl=self.currentCustomerIDURL) if not supplierID: continue requests_count = 0 # we do requests in reverse order because we start rebuilding from # the last block for blockIndex in range(len(self.workingBlocksQueue) - 1, -1, -1): blockNum = self.workingBlocksQueue[blockIndex] # do not keep too many requests in the queue if io_throttle.GetRequestQueueLength(supplierID) >= 16: break # also don't do too many requests at once if requests_count > 16: break remoteData = backup_matrix.GetRemoteDataArray( self.currentBackupID, blockNum) remoteParity = backup_matrix.GetRemoteParityArray( self.currentBackupID, blockNum) localData = backup_matrix.GetLocalDataArray( self.currentBackupID, blockNum) localParity = backup_matrix.GetLocalParityArray( self.currentBackupID, blockNum) if supplierNum >= len(remoteData) or supplierNum >= len( remoteParity): break if supplierNum >= len(localData) or supplierNum >= len( localParity): break # if remote Data exist and is available because supplier is on-line, # but we do not have it on hand - do request if localData[supplierNum] == 0: PacketID = packetid.MakePacketID(self.currentBackupID, blockNum, supplierNum, 'Data') if remoteData[supplierNum] == 1: if availableSuppliers[supplierNum]: # if supplier is not alive - we can't request from him if not io_throttle.HasPacketInRequestQueue( supplierID, PacketID): customer, remotePath = packetid.SplitPacketID( PacketID) filename = os.path.join( settings.getLocalBackupsDir(), customer, remotePath, ) if not os.path.exists(filename): if io_throttle.QueueRequestFile( self._file_received, my_id.getIDURL(), PacketID, my_id.getIDURL(), supplierID): requests_count += 1 else: # count this packet as missing self.missingPackets += 1 # also mark this guy as one who dont have any data - nor local nor remote else: # but if local Data already exists, but was not sent - do it now if remoteData[supplierNum] != 1: data_sender.A('new-data') # same for Parity if localParity[supplierNum] == 0: PacketID = packetid.MakePacketID(self.currentBackupID, blockNum, supplierNum, 'Parity') if remoteParity[supplierNum] == 1: if availableSuppliers[supplierNum]: if not io_throttle.HasPacketInRequestQueue( supplierID, PacketID): customer, remotePath = packetid.SplitPacketID( PacketID) filename = os.path.join( settings.getLocalBackupsDir(), customer, remotePath, ) if not os.path.exists(filename): if io_throttle.QueueRequestFile( self._file_received, my_id.getIDURL(), PacketID, my_id.getIDURL(), supplierID, ): requests_count += 1 else: self.missingPackets += 1 else: # but if local Parity already exists, but was not sent - do it now if remoteParity[supplierNum] != 1: data_sender.A('new-data') total_requests_count += requests_count if total_requests_count > 0: if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files : %d chunks requested' % total_requests_count) self.automat('requests-sent', total_requests_count) else: if self.missingPackets: if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files : found %d missing packets' % self.missingPackets) self.automat('found-missing') else: if _Debug: lg.out( _DebugLevel, 'backup_rebuilder._request_files : nothing was requested' ) self.automat('no-requests')
def _test(): from coins import mine print(prepare_broadcast_message(my_id.getIDURL(), {'test': 'okidoki'}))
def inbox(newpacket, info, status, error_message): """ """ if newpacket.CreatorID != my_id.getIDURL( ) and newpacket.RemoteID != my_id.getIDURL(): # packet is NOT for us, skip return False if newpacket.Command == commands.Ack(): # a response from remote node, typically handled in other places Ack(newpacket, info) elif newpacket.Command == commands.Fail(): # some operation was failed on other side Fail(newpacket) elif newpacket.Command == commands.Retrieve(): # retrieve some packet customer stored with us # handled by service_supplier() Retrieve(newpacket) elif newpacket.Command == commands.RequestService(): # other node send us a request to get some service # handled by service_p2p_hookups() RequestService(newpacket, info) elif newpacket.Command == commands.CancelService(): # other node wants to stop the service we gave him # handled by service_p2p_hookups() CancelService(newpacket, info) elif newpacket.Command == commands.Data(): # new packet to store for customer, or data coming back from supplier # handled by service_backups() and service_supplier() Data(newpacket) elif newpacket.Command == commands.ListFiles(): # customer wants list of their files # handled by service_supplier() ListFiles(newpacket, info) elif newpacket.Command == commands.Files(): # supplier sent us list of files # handled by service_backups() Files(newpacket, info) elif newpacket.Command == commands.DeleteFile(): # handled by service_supplier() DeleteFile(newpacket) elif newpacket.Command == commands.DeleteBackup(): # handled by service_supplier() DeleteBackup(newpacket) elif newpacket.Command == commands.Correspondent(): # TODO: contact asking for our current identity, not implemented yet Correspondent(newpacket) elif newpacket.Command == commands.Broadcast(): # handled by service_broadcasting() Broadcast(newpacket, info) elif newpacket.Command == commands.Coin(): # handled by service_accountant() Coin(newpacket, info) elif newpacket.Command == commands.RetrieveCoin(): # handled by service_accountant() RetrieveCoin(newpacket, info) elif newpacket.Command == commands.Key(): # handled by service_keys_registry() Key(newpacket, info) elif newpacket.Command == commands.Event(): # handled by service_p2p_hookups() Event(newpacket, info) elif newpacket.Command == commands.Message(): # handled by service_private_messages() Message(newpacket, info) elif newpacket.Command == commands.Contacts(): # handled by service_customer_family() Contacts(newpacket, info) return False
def SendListFiles(target_supplier, customer_idurl=None, key_id=None, query_items=[], wide=False, callbacks={}, timeout=None): """ This is used as a request method from your supplier : if you send him a ListFiles() packet he will reply you with a list of stored files in a Files() packet. """ MyID = my_id.getIDURL() if not customer_idurl: customer_idurl = MyID if not str(target_supplier).isdigit(): RemoteID = target_supplier else: RemoteID = contactsdb.supplier(target_supplier, customer_idurl=customer_idurl) if not RemoteID: lg.warn("RemoteID is empty target_supplier=%s" % str(target_supplier)) return None if not key_id: # key_id = global_id.MakeGlobalID(idurl=customer_idurl, key_alias='customer') # TODO: due to issue with "customer" key backup/restore decided to always use my "master" key # to retrieve my list files info from supplier # expect remote user always poses my master public key from my identity. # probably require more work to build more reliable solution without using my master key at all # when my identity rotated supplier first needs to receive my new identity and then sending ListFiles() key_id = my_id.getGlobalID(key_alias='master') else: key_id = my_keys.latest_key_id(key_id) if not my_keys.is_key_registered(key_id) or not my_keys.is_key_private( key_id): lg.warn( 'key %r not exist or public, my "master" key to be used with ListFiles() packet' % key_id) key_id = my_id.getGlobalID(key_alias='master') PacketID = "%s:%s" % ( key_id, packetid.UniqueID(), ) if not query_items: query_items = [ '*', ] Payload = serialization.DictToBytes({ 'items': query_items, }) if _Debug: lg.out( _DebugLevel, "p2p_service.SendListFiles %r to %r of customer %r with query : %r" % ( PacketID, nameurl.GetName(RemoteID), nameurl.GetName(customer_idurl), query_items, )) result = signed.Packet( Command=commands.ListFiles(), OwnerID=MyID, CreatorID=MyID, PacketID=PacketID, Payload=Payload, RemoteID=RemoteID, ) gateway.outbox(result, wide=wide, callbacks=callbacks, response_timeout=timeout) return result
def build_json_conversation(**record): conv = { 'key_id': '', 'label': '', 'state': 'OFFLINE', 'index': None, 'id': None, 'name': None, 'repr': None, 'events': None, } conv.update(record) if conv['type'] == 'private_message': local_key_id1, _, local_key_id2 = conv['conversation_id'].partition( '&') try: local_key_id1 = int(local_key_id1) local_key_id2 = int(local_key_id2) except: lg.exc() return None usr1 = my_keys.get_local_key(local_key_id1) usr2 = my_keys.get_local_key(local_key_id2) if not usr1 or not usr2: # lg.warn('%r %r : not found sender or recipient key_id for %r' % (usr1, usr2, conv, )) return None usr1 = usr1.replace('master$', '') usr2 = usr2.replace('master$', '') idurl1 = global_id.glob2idurl(usr1, as_field=True) idurl2 = global_id.glob2idurl(usr2, as_field=True) conv_key_id = None conv_label = None user_idurl = None if (id_url.is_cached(idurl1) and idurl1 == my_id.getIDURL() ) or usr1.split('@')[0] == my_id.getIDName(): user_idurl = idurl2 conv_key_id = global_id.UrlToGlobalID(idurl2, include_key=True) conv_label = conv_key_id.replace('master$', '').split('@')[0] if (id_url.is_cached(idurl2) and idurl2 == my_id.getIDURL() ) or usr2.split('@')[0] == my_id.getIDName(): user_idurl = idurl1 conv_key_id = global_id.UrlToGlobalID(idurl1, include_key=True) conv_label = conv_key_id.replace('master$', '').split('@')[0] if conv_key_id: conv['key_id'] = conv_key_id if conv_label: conv['label'] = conv_label else: conv['label'] = conv_key_id if user_idurl: on_st = online_status.getInstance(user_idurl, autocreate=False) if on_st: conv.update(on_st.to_json()) elif conv['type'] == 'group_message' or conv['type'] == 'personal_message': local_key_id, _, _ = conv['conversation_id'].partition('&') try: local_key_id = int(local_key_id) except: lg.exc() return None key_id = my_keys.get_local_key(local_key_id) if not key_id: # lg.warn('key_id was not found for %r' % conv) return None conv['key_id'] = key_id conv['label'] = my_keys.get_label(key_id) or key_id gm = group_member.get_active_group_member(key_id) if gm: conv.update(gm.to_json()) return conv
def doScanAndQueue(self, *args, **kwargs): """ Action method. """ global _ShutdownFlag if _ShutdownFlag: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue _ShutdownFlag is True\n') self.automat('scan-done', 0) return from storage import backup_matrix from storage import backup_fs backup_matrix.ReadLocalFiles() progress = 0 if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue with %d known customers' % len(contactsdb.known_customers())) for customer_idurl in contactsdb.known_customers(): if customer_idurl != my_id.getLocalID(): # TODO: check that later if _Debug: lg.out(_DebugLevel + 6, 'data_sender.doScanAndQueue skip sending to another customer: %r' % customer_idurl) continue known_suppliers = contactsdb.suppliers(customer_idurl) if not known_suppliers or id_url.is_some_empty(known_suppliers): if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue found empty supplier(s) for customer %r, SKIP' % customer_idurl) continue known_backups = misc.sorted_backup_ids(list(backup_matrix.local_files().keys()), True) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue found %d known suppliers for customer %r with %d backups' % ( len(known_suppliers), customer_idurl, len(known_backups))) for backupID in known_backups: this_customer_idurl = packetid.CustomerIDURL(backupID) if this_customer_idurl != customer_idurl: continue customerGlobalID, pathID, _ = packetid.SplitBackupID(backupID, normalize_key_alias=True) item = backup_fs.GetByID(pathID, iterID=backup_fs.fsID(customer_idurl=customer_idurl)) if not item: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r path not exist in catalog' % backupID) continue if item.key_id and customerGlobalID and customerGlobalID != item.key_id: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue skip sending backup %r key is different in the catalog' % backupID) continue packetsBySupplier = backup_matrix.ScanBlocksToSend(backupID, limit_per_supplier=None) total_for_customer = sum([len(v) for v in packetsBySupplier.values()]) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue to be delivered for customer %r : %d' % (customer_idurl, total_for_customer)) for supplierNum in packetsBySupplier.keys(): # supplier_idurl = contactsdb.supplier(supplierNum, customer_idurl=customer_idurl) if supplierNum >= 0 and supplierNum < len(known_suppliers): supplier_idurl = known_suppliers[supplierNum] else: supplier_idurl = None if not supplier_idurl: lg.warn('skip sending, unknown supplier_idurl supplierNum=%s for %s, customer_idurl=%r' % ( supplierNum, backupID, customer_idurl)) continue for packetID in packetsBySupplier[supplierNum]: backupID_, _, supplierNum_, _ = packetid.BidBnSnDp(packetID) if backupID_ != backupID: lg.warn('skip sending, unexpected backupID supplierNum=%s for %s, customer_idurl=%r' % ( packetID, backupID, customer_idurl)) continue if supplierNum_ != supplierNum: lg.warn('skip sending, unexpected supplierNum %s for %s, customer_idurl=%r' % ( packetID, backupID, customer_idurl)) continue if io_throttle.HasPacketInSendQueue(supplier_idurl, packetID): if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue %s already in sending queue for %r' % (packetID, supplier_idurl)) continue if not io_throttle.OkToSend(supplier_idurl): if _Debug: lg.out(_DebugLevel + 6, 'data_sender.doScanAndQueue skip sending, queue is busy for %r\n' % supplier_idurl) continue # customerGlobalID, pathID = packetid.SplitPacketID(packetID) # tranByID = gate.transfers_out_by_idurl().get(supplier_idurl, []) # if len(tranByID) > 3: # log.write(u'transfers by %s: %d\n' % (supplier_idurl, len(tranByID))) # continue customerGlobalID, pathID = packetid.SplitPacketID(packetID) filename = os.path.join( settings.getLocalBackupsDir(), customerGlobalID, pathID, ) if not os.path.isfile(filename): if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue %s is not a file\n' % filename) continue if io_throttle.QueueSendFile( filename, packetID, supplier_idurl, my_id.getIDURL(), self._packetAcked, self._packetFailed, ): progress += 1 if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue put %s in the queue progress=%d' % (packetID, progress, )) else: if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue io_throttle.QueueSendFile FAILED %s' % packetID) if _Debug: lg.out(_DebugLevel, 'data_sender.doScanAndQueue progress=%s' % progress) self.automat('scan-done', progress)
def on_data(newpacket): if id_url.to_bin(newpacket.OwnerID) == my_id.getIDURL().to_bin(): # this Data belong to us, SKIP return False # if not contactsdb.is_customer(newpacket.OwnerID): # # SECURITY # # TODO: process files from another customer : glob_path['idurl'] # lg.warn("skip, %s not a customer, packetID=%s" % (newpacket.OwnerID, newpacket.PacketID)) # # p2p_service.SendFail(newpacket, 'not a customer') # return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") # p2p_service.SendFail(newpacket, 'incorrect path') return False authorized_idurl = verify_packet_ownership(newpacket) if authorized_idurl is None: lg.err("ownership verification failed for %r" % newpacket) # p2p_service.SendFail(newpacket, 'ownership verification failed') return False filename = make_valid_filename(newpacket.OwnerID, glob_path) if not filename: lg.warn("got empty filename, bad customer or wrong packetID?") # p2p_service.SendFail(newpacket, 'empty filename') return False dirname = os.path.dirname(filename) if not os.path.exists(dirname): try: bpio._dirs_make(dirname) except: lg.err("can not create sub dir %s" % dirname) p2p_service.SendFail(newpacket, 'write error', remote_idurl=authorized_idurl) return False data = newpacket.Serialize() donated_bytes = settings.getDonatedBytes() accounting.check_create_customers_quotas(donated_bytes) space_dict, _ = accounting.read_customers_quotas() if newpacket.OwnerID.to_bin() not in list(space_dict.keys()): lg.err("customer space is broken, no info about donated space for %s" % newpacket.OwnerID) p2p_service.SendFail( newpacket, 'customer space is broken, no info about donated space', remote_idurl=authorized_idurl) return False used_space_dict = accounting.read_customers_usage() if newpacket.OwnerID.to_bin() in list(used_space_dict.keys()): try: bytes_used_by_customer = int( used_space_dict[newpacket.OwnerID.to_bin()]) bytes_donated_to_customer = int( space_dict[newpacket.OwnerID.to_bin()]) if bytes_donated_to_customer - bytes_used_by_customer < len(data): lg.warn("no free space left for customer data: %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'no free space left for customer data', remote_idurl=authorized_idurl) return False except: lg.exc() if not bpio.WriteBinaryFile(filename, data): lg.err("can not write to %s" % str(filename)) p2p_service.SendFail(newpacket, 'write error', remote_idurl=authorized_idurl) return False # Here Data() packet was stored as it is on supplier node (current machine) del data p2p_service.SendAck(newpacket, response=strng.to_text(len(newpacket.Payload)), remote_idurl=authorized_idurl) reactor.callLater(0, local_tester.TestSpaceTime) # @UndefinedVariable # if self.publish_event_supplier_file_modified: # TODO: must remove that actually # from main import events # events.send('supplier-file-modified', data=dict( # action='write', # glob_path=glob_path['path'], # owner_id=newpacket.OwnerID, # )) return True
def on_files_received(newpacket, info): list_files_global_id = global_id.ParseGlobalID(newpacket.PacketID) if not list_files_global_id['idurl']: lg.warn('invalid PacketID: %s' % newpacket.PacketID) return False trusted_customer_idurl = list_files_global_id['idurl'] incoming_key_id = list_files_global_id['key_id'] if trusted_customer_idurl == my_id.getIDURL(): if _Debug: lg.dbg(_DebugLevel, 'ignore %s packet which seems to came from my own supplier' % newpacket) # only process list Files() from other customers who granted me access to their files return False if not my_keys.is_valid_key_id(incoming_key_id): lg.warn('ignore, invalid key id in packet %s' % newpacket) return False if not my_keys.is_key_private(incoming_key_id): lg.warn('private key is not registered : %s' % incoming_key_id) p2p_service.SendFail(newpacket, 'private key is not registered') return False try: block = encrypted.Unserialize( newpacket.Payload, decrypt_key=incoming_key_id, ) except: lg.exc(newpacket.Payload) return False if block is None: lg.warn('failed reading data from %s' % newpacket.RemoteID) return False try: raw_files = block.Data() except: lg.exc() return False if block.CreatorID == trusted_customer_idurl: # this is a trusted guy sending some shared files to me try: json_data = serialization.BytesToDict(raw_files, keys_to_text=True, encoding='utf-8') json_data['items'] except: lg.exc() return False count = backup_fs.Unserialize( raw_data=json_data, customer_idurl=trusted_customer_idurl, from_json=True, ) p2p_service.SendAck(newpacket) if count == 0: lg.warn('no files were imported during file sharing') else: backup_control.Save() lg.info('imported %d shared files from %s, key_id=%s' % ( count, trusted_customer_idurl, incoming_key_id, )) events.send('shared-list-files-received', data=dict( customer_idurl=trusted_customer_idurl, new_items=count, )) return True # otherwise this must be an external supplier sending us a files he stores for trusted customer external_supplier_idurl = block.CreatorID try: supplier_raw_list_files = list_files.UnpackListFiles(raw_files, settings.ListFilesFormat()) except: lg.exc() return False # need to detect supplier position from the list of packets # and place that supplier on the correct position in contactsdb supplier_pos = backup_matrix.DetectSupplierPosition(supplier_raw_list_files) known_supplier_pos = contactsdb.supplier_position(external_supplier_idurl, trusted_customer_idurl) if _Debug: lg.args(_DebugLevel, supplier_pos=supplier_pos, known_supplier_pos=known_supplier_pos, external_supplier=external_supplier_idurl, trusted_customer=trusted_customer_idurl, key_id=incoming_key_id) if supplier_pos >= 0: if known_supplier_pos >= 0 and known_supplier_pos != supplier_pos: lg.err('known external supplier %r position %d is not matching to received list files position %d for customer %s' % ( external_supplier_idurl, known_supplier_pos, supplier_pos, trusted_customer_idurl)) else: lg.warn('not possible to detect external supplier position for customer %s from received list files, known position is %s' % ( trusted_customer_idurl, known_supplier_pos)) supplier_pos = known_supplier_pos remote_files_changed, _, _, _ = backup_matrix.process_raw_list_files( supplier_num=supplier_pos, list_files_text_body=supplier_raw_list_files, customer_idurl=trusted_customer_idurl, is_in_sync=True, auto_create=True, ) if remote_files_changed: backup_matrix.SaveLatestRawListFiles( supplier_idurl=external_supplier_idurl, raw_data=supplier_raw_list_files, customer_idurl=trusted_customer_idurl, ) # finally sending Ack() packet back p2p_service.SendAck(newpacket) if remote_files_changed: lg.info('received updated list of files from external supplier %s for customer %s' % (external_supplier_idurl, trusted_customer_idurl)) return True
def send(customer_idurl, packet_id, format_type, key_id, remote_idurl, query_items=[]): if not query_items: query_items = [ '*', ] key_id = my_keys.latest_key_id(key_id) parts = global_id.ParseGlobalID(key_id) if parts['key_alias'] == 'master' and parts['idurl'] != my_id.getIDURL(): # lg.warn('incoming ListFiles() request with customer "master" key: %r' % key_id) if not my_keys.is_key_registered(key_id) and identitycache.HasKey( parts['idurl']): lg.info( 'customer public key %r to be registered locally for the first time' % key_id) known_ident = identitycache.FromCache(parts['idurl']) if not my_keys.register_key(key_id, known_ident.getPublicKey()): lg.err( 'failed to register known public key of the customer: %r' % key_id) if not my_keys.is_key_registered(key_id): lg.warn( 'not able to return Files() for customer %s, key %s not registered' % ( customer_idurl, key_id, )) return p2p_service.SendFailNoRequest(customer_idurl, packet_id, response='key not registered') if _Debug: lg.out( _DebugLevel, "list_files.send to %s, customer_idurl=%s, key_id=%s, query_items=%r" % ( remote_idurl, customer_idurl, key_id, query_items, )) ownerdir = settings.getCustomerFilesDir(customer_idurl) plaintext = '' if os.path.isdir(ownerdir): try: for query_path in query_items: plaintext += process_query_item(query_path, parts['key_alias'], ownerdir) except: lg.exc() return p2p_service.SendFailNoRequest( customer_idurl, packet_id, response='list files query processing error') else: lg.warn('did not found customer folder: %s' % ownerdir) if _Debug: lg.out(_DebugLevel, '\n%s' % plaintext) raw_list_files = PackListFiles(plaintext, format_type) block = encrypted.Block( CreatorID=my_id.getIDURL(), BackupID=key_id, Data=raw_list_files, SessionKey=key.NewSessionKey(session_key_type=key.SessionKeyType()), SessionKeyType=key.SessionKeyType(), EncryptKey=key_id, ) encrypted_list_files = block.Serialize() newpacket = p2p_service.SendFiles( idurl=remote_idurl, raw_list_files_info=encrypted_list_files, packet_id=packet_id, callbacks={ commands.Ack(): on_acked, commands.Fail(): on_failed, None: on_timeout, }, ) return newpacket
def __init__(self): self.creatorID = my_id.getIDURL() self.supplierQueues = {} self.paintFunc = None