def OnFileSendingFinished(self, pkt_out, item, status, size, error_message): if self.shutdown: lg.warn('skip, supplier queue is shutting down') return if not pkt_out.outpacket: lg.warn('skip, outpacket is already None') return packetID = global_id.CanonicalID(pkt_out.outpacket.PacketID) if status == 'finished': if pkt_out.outpacket.Command == commands.Retrieve(): if packetID in self.fileRequestQueue: f_down = self.fileRequestDict[packetID] if _Debug: lg.args(_DebugLevel, obj=f_down, status=status, packetID=packetID, event='retrieve-sent') f_down.event('retrieve-sent', pkt_out.outpacket) elif pkt_out.outpacket.Command == commands.Data(): if packetID in self.fileSendQueue: f_up = self.fileSendDict[packetID] if _Debug: lg.args(_DebugLevel, obj=f_up, status=status, packetID=packetID, event='data-sent') f_up.event('data-sent', pkt_out.outpacket) else: if pkt_out.outpacket.Command == commands.Retrieve(): if packetID in self.fileRequestQueue: if _Debug: lg.dbg(_DebugLevel, 'packet %r is %r during downloading from %s' % (packetID, status, self.remoteID)) f_down = self.fileRequestDict[packetID] f_down.event('request-failed') elif pkt_out.outpacket.Command == commands.Data(): if packetID in self.fileSendQueue: if _Debug: lg.dbg(_DebugLevel, 'packet %r is %r during uploading to %s' % (packetID, status, self.remoteID)) f_up = self.fileSendDict[packetID] f_up.event('sending-failed')
def OutboxStatus(self, pkt_out, status, error): if self.shutdown: lg.warn('supplier queue is shutting down') return False packetID = global_id.CanonicalID(pkt_out.outpacket.PacketID) if status == 'finished': if pkt_out.outpacket.Command == commands.Data(): if packetID in self.fileSendQueue: f_up = self.fileSendDict[packetID] if _Debug: lg.args(_DebugLevel, obj=f_up, status=status, packetID=packetID, event='data-sent') if error == 'unanswered': f_up.event('timeout', pkt_out.outpacket) else: f_up.event('data-sent', pkt_out.outpacket) return False else: if pkt_out.outpacket.Command == commands.Data(): if packetID in self.fileSendQueue: lg.warn('packet %r is %r during uploading to %s' % (packetID, status, self.remoteID)) f_up = self.fileSendDict[packetID] f_up.event('sending-failed') return False return False
def OnDataReceived(self, newpacket, result): # if result == 'timeout': # packetID = global_id.CanonicalID(newpacket) # if packetID in self.fileRequestDict: # self.fileRequestDict[packetID].fileReceivedTime = time.time() # self.fileRequestDict[packetID].result = 'timeout' # for callBack in self.fileRequestDict[packetID].callOnReceived: # callBack(None, 'timeout') # return # we requested some data from a supplier, just received it packetID = global_id.CanonicalID(newpacket.PacketID) if self.shutdown: # if we're closing down this queue (supplier replaced, don't any anything new) if packetID in self.fileRequestDict: for callBack in self.fileRequestDict[packetID].callOnReceived: callBack(newpacket, 'shutdown') if packetID in self.fileRequestDict: del self.fileRequestDict[packetID] lg.warn('supplier queue is shutting down') return if _Debug: lg.out( _DebugLevel, "io_throttle.OnDataReceived %s with result=[%s]" % ( newpacket, result, )) if packetID in self.fileRequestQueue: self.fileRequestQueue.remove(packetID) if _Debug: lg.out( _DebugLevel, " removed %s from %s receiving queue, %d more items" % (packetID, self.remoteName, len(self.fileRequestQueue))) if newpacket.Command == commands.Data(): wrapped_packet = signed.Unserialize(newpacket.Payload) if not wrapped_packet or not wrapped_packet.Valid(): lg.err('incoming Data() is not valid') return if packetID in self.fileRequestDict: self.fileRequestDict[packetID].fileReceivedTime = time.time() self.fileRequestDict[packetID].result = 'received' for callBack in self.fileRequestDict[packetID].callOnReceived: callBack(wrapped_packet, 'received') elif newpacket.Command == commands.Fail(): if packetID in self.fileRequestDict: self.fileRequestDict[packetID].fileReceivedTime = time.time() self.fileRequestDict[packetID].result = 'failed' for callBack in self.fileRequestDict[packetID].callOnReceived: callBack(newpacket, 'failed') else: lg.err('incorrect response command') if packetID in self.fileRequestDict: del self.fileRequestDict[packetID] if _Debug: lg.out( _DebugLevel, "io_throttle.OnDataReceived %s from %s, queue=%d" % (newpacket, self.remoteName, len(self.fileRequestQueue))) self.DoRequest()
def OnDataReceived(self, newpacket, result): # we requested some data from a supplier, and just received it if self.shutdown: lg.warn('skip, supplier queue is shutting down') self.StopAllRequests() return if _Debug: lg.args(_DebugLevel, newpacket=newpacket, result=result, queue=len(self.fileRequestQueue), remoteName=self.remoteName) packetID = global_id.CanonicalID(newpacket.PacketID) if (packetID not in self.fileRequestQueue) or (packetID not in self.fileRequestDict): latest_idurl = global_id.NormalizeGlobalID(packetID, as_field=True)['idurl'].latest another_packetID = global_id.SubstitutePacketID(packetID, idurl=latest_idurl) if (another_packetID in self.fileRequestQueue) and (another_packetID in self.fileRequestDict): packetID = another_packetID lg.warn('found incoming %r with outdated packet id, corrected: %r' % (newpacket, another_packetID, )) if (packetID not in self.fileRequestQueue) or (packetID not in self.fileRequestDict): lg.err('unexpected %r received which is not in the downloading queue' % newpacket) else: f_down = self.fileRequestDict[packetID] if newpacket.Command == commands.Data(): wrapped_packet = signed.Unserialize(newpacket.Payload) if not wrapped_packet or not wrapped_packet.Valid(): lg.err('incoming Data() packet is not valid') f_down.event('fail-received', newpacket) return f_down.event('valid-data-received', wrapped_packet) elif newpacket.Command == commands.Fail(): f_down.event('fail-received', newpacket) else: lg.err('incorrect response command: %r' % newpacket)
def _do_retrieve(self, x=None): packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) localID = my_id.getIDURL() for supplier_idurl in contactsdb.suppliers(): if not supplier_idurl: continue sc = supplier_connector.by_idurl(supplier_idurl) if sc is None or sc.state != 'CONNECTED': continue if online_status.isOffline(supplier_idurl): continue pkt_out = p2p_service.SendRetreive(ownerID=localID, creatorID=localID, packetID=packetID, remoteID=supplier_idurl, response_timeout=60 * 2, callbacks={ commands.Data(): self._on_supplier_response, commands.Fail(): self._on_supplier_fail, }) if pkt_out: self.requesting_suppliers.add(supplier_idurl) self.requested_suppliers_number += 1 self.requests_packets_sent.append((packetID, supplier_idurl)) if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (pkt_out, nameurl.GetName(supplier_idurl)))
def _s(): p = signed.Packet(commands.Data(), my_id.getLocalID(), my_id.getLocalID(), my_id.getLocalID(), bpio.ReadBinaryFile(args[1]), args[0]) outbox(p, wide=True) lg.out(2, 'OUTBOX %d : %r' % (globals()['num_out'], p)) globals()['num_out'] += 1
def DataReceived(self, newpacket, info): # we requested some data from a supplier, just received it if self.shutdown: # if we're closing down this queue (supplier replaced, don't any anything new) return packetID = global_id.CanonicalID(newpacket.PacketID) if packetID in self.fileRequestQueue: self.fileRequestQueue.remove(packetID) if newpacket.Command == commands.Data(): if packetID in self.fileRequestDict: self.fileRequestDict[packetID].fileReceivedTime = time.time() self.fileRequestDict[packetID].result = 'received' for callBack in self.fileRequestDict[packetID].callOnReceived: callBack(newpacket, 'received') elif newpacket.Command == commands.Fail(): if packetID in self.fileRequestDict: self.fileRequestDict[packetID].fileReceivedTime = time.time() self.fileRequestDict[packetID].result = 'failed' for callBack in self.fileRequestDict[packetID].callOnReceived: callBack(newpacket, 'failed') else: raise Exception('incorrect response command') if packetID in self.fileRequestDict: del self.fileRequestDict[packetID] lg.out( 10, "io_throttle.DataReceived %s from %s, queue=%d" % (newpacket, self.remoteName, len(self.fileRequestQueue))) self.DoRequest()
def OnDataReceived(self, newpacket, result): # we requested some data from a supplier, and just received it if self.shutdown: lg.warn('supplier queue is shutting down') self.StopAllRequests() return if _Debug: lg.args(_DebugLevel, newpacket=newpacket, result=result, queue=len(self.fileRequestQueue), remoteName=self.remoteName) packetID = global_id.CanonicalID(newpacket.PacketID) if (packetID not in self.fileRequestQueue) or (packetID not in self.fileRequestDict): lg.err( 'unexpected %r received which is not in the downloading queue' % newpacket) else: f_down = self.fileRequestDict[packetID] if newpacket.Command == commands.Data(): wrapped_packet = signed.Unserialize(newpacket.Payload) if not wrapped_packet or not wrapped_packet.Valid(): lg.err('incoming Data() packet is not valid') f_down.event('fail-received', newpacket) return f_down.event('valid-data-received', wrapped_packet) elif newpacket.Command == commands.Fail(): f_down.event('fail-received', newpacket) else: lg.err('incorrect response command: %r' % newpacket)
def doCancelSendings(self, *args, **kwargs): """ Action method. """ for packet_id in self.outgoing_packets_ids: packetsToCancel = packet_out.search_by_packet_id(packet_id) for pkt_out in packetsToCancel: if pkt_out.outpacket.Command == commands.Data(): pkt_out.automat('cancel')
def _send(c): from transport.udp import udp_stream for idurl in sys.argv[2:]: print('_send', list(udp_stream.streams().keys())) p = signed.Packet(commands.Data(), my_id.getLocalID(), my_id.getLocalID(), 'packet%d' % c, bpio.ReadBinaryFile(sys.argv[1]), idurl) gateway.outbox(p) if c > 1: reactor.callLater(0.01, _send, c - 1)
def _on_inbox_packet_received(self, newpacket, info, status, error_message): from p2p import commands if newpacket.Command == commands.DeleteFile(): return self._on_delete_file(newpacket) elif newpacket.Command == commands.Retrieve(): return self._on_retreive(newpacket) elif newpacket.Command == commands.Data(): return self._on_data(newpacket) elif newpacket.Command == commands.ListFiles(): return self._on_list_files(newpacket) return False
def correct_packet_destination(outpacket): """ """ if outpacket.CreatorID == my_id.getLocalID(): # our data will go where it should go return outpacket.RemoteID if outpacket.Command == commands.Data(): # Data belongs to remote customers and stored locally # must go to CreatorID, because RemoteID pointing to this device return outpacket.CreatorID lg.warn('sending a packet we did not make, and that is not Data packet') return outpacket.RemoteID
def doSuppliersRequestIndexFile(self, arg): """ Action method. """ if _Debug: lg.out(_DebugLevel, 'index_synchronizer.doSuppliersRequestIndexFile') if driver.is_on('service_backups'): from storage import backup_control self.current_local_revision = backup_control.revision() else: self.current_local_revision = -1 self.latest_supplier_revision = -1 self.requesting_suppliers.clear() self.requested_suppliers_number = 0 packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) # packetID = settings.BackupIndexFileName() localID = my_id.getLocalID() for supplierId in contactsdb.suppliers(): if not supplierId: continue if not contact_status.isOnline(supplierId): continue pkt_out = p2p_service.SendRetreive(localID, localID, packetID, supplierId, callbacks={ commands.Data(): self._on_supplier_response, commands.Fail(): self._on_supplier_response, }) # newpacket = signed.Packet( # commands.Retrieve(), # localID, # localID, # packetid.RemotePath(packetID), # '', # supplierId) # pkt_out = gateway.outbox(newpacket, callbacks={ # commands.Data(): self._on_supplier_response, # commands.Fail(): self._on_supplier_response, }) if pkt_out: self.requesting_suppliers.add(supplierId) self.requested_suppliers_number += 1 if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (pkt_out, nameurl.GetName(supplierId)))
def doCancelPackets(self, *args, **kwargs): """ Action method. """ packetsToCancel = packet_out.search_by_packet_id(self.packetID) for pkt_out in packetsToCancel: if pkt_out.outpacket.Command == commands.Data(): lg.warn( 'sending "cancel" to %s addressed to %s because downloading cancelled' % ( pkt_out, pkt_out.remote_idurl, )) pkt_out.automat('cancel')
def _on_inbox_packet_received(self, newpacket, info, status, error_message): from p2p import commands from supplier import customer_space if newpacket.Command == commands.DeleteFile(): return customer_space.on_delete_file(newpacket) elif newpacket.Command == commands.DeleteBackup(): return customer_space.on_delete_backup(newpacket) elif newpacket.Command == commands.Retrieve(): return customer_space.on_retrieve(newpacket) elif newpacket.Command == commands.Data(): return customer_space.on_data(newpacket) elif newpacket.Command == commands.ListFiles(): return customer_space.on_list_files(newpacket) return False
def OnFileSendAckReceived(self, newpacket, info): if self.shutdown: if _Debug: lg.out( _DebugLevel, "io_throttle.OnFileSendAckReceived finishing to %s, shutdown is True" % self.remoteName) return if not newpacket and not info: lg.warn('packet timed out during responding') return self.ackedCount += 1 packetID = global_id.CanonicalID(newpacket.PacketID) if packetID not in self.fileSendQueue: lg.warn("packet %s not in sending queue for %s" % (newpacket.PacketID, self.remoteName)) return if packetID not in self.fileSendDict.keys(): lg.warn("packet %s not in sending dict for %s" % (newpacket.PacketID, self.remoteName)) return self.fileSendDict[packetID].ackTime = time.time() if newpacket.Command == commands.Ack(): self.fileSendDict[packetID].result = 'acked' if self.fileSendDict[packetID].callOnAck: reactor.callLater(0, self.fileSendDict[packetID].callOnAck, newpacket, newpacket.OwnerID, packetID) elif newpacket.Command == commands.Fail(): self.fileSendDict[packetID].result = 'failed' if self.fileSendDict[packetID].callOnFail: reactor.callLater(0, self.fileSendDict[packetID].callOnFail, newpacket.CreatorID, packetID, 'failed') from customer import supplier_connector sc = supplier_connector.by_idurl(newpacket.OwnerID) if sc: if newpacket.Command == commands.Ack(): sc.automat('ack', newpacket) elif newpacket.Command == commands.Fail(): sc.automat('fail', newpacket) elif newpacket.Command == commands.Data(): sc.automat('data', newpacket) else: raise Exception('incorrect packet type received') self.DoSend() # self.RunSend() if _Debug: lg.out( _DebugLevel, "io_throttle.OnFileSendAckReceived %s from %s, queue=%d" % (str(newpacket), self.remoteName, len(self.fileSendQueue)))
def doCancelSendings(self, *args, **kwargs): """ Action method. """ for packet_id in self.outgoing_packets_ids: packetsToCancel = packet_out.search_by_packet_id(packet_id) for pkt_out in packetsToCancel: if pkt_out.outpacket.Command == commands.Data(): lg.warn( 'sending "cancel" to %s addressed to %s from index_synchronizer' % ( pkt_out, pkt_out.remote_idurl, )) pkt_out.automat('cancel')
def INfile(newpacket, pkt_in, status, error_message): """ Count incoming file from ``proto``://``host``, ``newpacket`` is already Unserialized. """ if status != 'finished': return False packet_from = newpacket.OwnerID if newpacket.OwnerID == my_id.getLocalID() and newpacket.Command == commands.Data(): # someone giving our data back packet_from = newpacket.RemoteID if pkt_in.size: IN(packet_from, pkt_in.size) # IN(packet_from, len(newpacket.Payload)) return False
def SendData(raw_data, ownerID, creatorID, remoteID, packetID, callbacks={}): """ """ newpacket = signed.Packet( Command=commands.Data(), OwnerID=ownerID, CreatorID=creatorID, PacketID=packetID, Payload=raw_data, RemoteID=remoteID, ) result = gateway.outbox(newpacket, callbacks=callbacks) if _Debug: lg.out(_DebugLevel, 'p2p_service.SendData %d bytes in packetID=%s' % ( len(raw_data), packetID)) lg.out(_DebugLevel, ' to remoteID=%s ownerID=%s creatorID=%s' % (remoteID, ownerID, creatorID)) return newpacket, result
def doSuppliersSendIndexFile(self, arg): """ Action method. """ if _Debug: lg.out(_DebugLevel, 'index_synchronizer.doSuppliersSendIndexFile') packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) self.sending_suppliers.clear() self.sent_suppliers_number = 0 src = bpio.ReadBinaryFile(settings.BackupIndexFilePath()) localID = my_id.getLocalID() b = encrypted.Block( localID, packetID, 0, key.NewSessionKey(), key.SessionKeyType(), True, src, ) Payload = b.Serialize() for supplierId in contactsdb.suppliers(): if not supplierId: continue if not contact_status.isOnline(supplierId): continue newpacket = signed.Packet(commands.Data(), localID, localID, packetID, Payload, supplierId) pkt_out = gateway.outbox(newpacket, callbacks={ commands.Ack(): self._on_supplier_acked, commands.Fail(): self._on_supplier_acked, }) if pkt_out: self.sending_suppliers.add(supplierId) self.sent_suppliers_number += 1 if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (newpacket, nameurl.GetName(supplierId)))
def doSendRetreive(self, *args, **kwargs): """ Action method. """ if _Debug: lg.args(_DebugLevel, packetID=self.packetID, remoteID=self.remoteID) p2p_service.SendRetreive( self.ownerID, self.parent.creatorID, self.packetID, self.remoteID, callbacks={ commands.Data(): self.parent.OnDataReceived, commands.Fail(): self.parent.OnDataReceived, # None: lambda pkt_out: self.OnDataReceived(fileRequest.packetID, 'timeout'), # timeout }, # response_timeout=10, ) self.requestTime = time.time()
def SendData(raw_data, ownerID, creatorID, remoteID, packetID, callbacks={}): """ """ # TODO: newpacket = signed.Packet( commands.Data(), ownerID, creatorID, packetID, raw_data, remoteID, ) result = gateway.outbox(newpacket, callbacks=callbacks) if _Debug: lg.out( _DebugLevel, 'p2p_service.SendData %d bytes in [%s] to %s, by %s | %s' % (len(raw_data), packetID, remoteID, ownerID, creatorID)) return result
def _on_inbox_packet_received(self, newpacket, info, status, error_message): from logs import lg from main import settings from contacts import contactsdb from userid import my_id from userid import global_id from storage import backup_control from p2p import commands if newpacket.Command == commands.Data(): if newpacket.OwnerID != my_id.getLocalID(): # only catch data belongs to me return False lg.out( self.debug_level, "service_backups._on_inbox_packet_received: %r for us from %s" % ( newpacket, newpacket.RemoteID, )) if newpacket.PacketID == global_id.MakeGlobalID( idurl=my_id.getLocalID(), path=settings.BackupIndexFileName(), ): # TODO: move to service_backup_db backup_control.IncomingSupplierBackupIndex(newpacket) return True if newpacket.Command == commands.Files(): if not newpacket.PacketID.startswith(my_id.getGlobalID() + ':'): # skip Files() which are from another customer return False if not contactsdb.is_supplier(newpacket.OwnerID): # skip Files() if this is not my supplier return False lg.out( self.debug_level, "service_backups._on_inbox_packet_received: %r for us from %s" % ( newpacket, newpacket.RemoteID, )) return backup_control.IncomingSupplierListFiles(newpacket) return False
def _on_supplier_response(self, newpacket, pkt_out): if newpacket.Command == commands.Data(): self.requesting_suppliers.discard(newpacket.RemoteID) elif newpacket.Command == commands.Fail(): self.requesting_suppliers.discard(newpacket.OwnerID) # sc = supplier_connector.by_idurl(newpacket.OwnerID) # if sc: # sc.automat('fail', newpacket) # else: # raise Exception('supplier connector was not found') else: raise Exception('wrong type of response') if _Debug: lg.out( _DebugLevel, 'index_synchronizer._on_supplier_response %s, pending: %d, total: %d' % (newpacket, len(self.requesting_suppliers), self.requested_suppliers_number)) if len(self.requesting_suppliers) == 0: self.automat('all-responded')
def _on_supplier_response(self, newpacket, pkt_out): if newpacket.Command == commands.Data(): wrapped_packet = signed.Unserialize(newpacket.Payload) if not wrapped_packet or not wrapped_packet.Valid(): lg.err('incoming Data() is not valid') return from storage import backup_control backup_control.IncomingSupplierBackupIndex(wrapped_packet) # p2p_service.SendAck(newpacket) self.requesting_suppliers.discard(wrapped_packet.RemoteID) elif newpacket.Command == commands.Fail(): self.requesting_suppliers.discard(newpacket.OwnerID) else: raise Exception('wrong type of response') if _Debug: lg.out( _DebugLevel, 'index_synchronizer._on_supplier_response %s, pending: %d, total: %d' % (newpacket, len(self.requesting_suppliers), self.requested_suppliers_number)) if len(self.requesting_suppliers) == 0: self.automat('all-responded')
def _on_inbox_packet_received(self, newpacket, info, status, error_message): if _Debug: lg.out( _DebugLevel, 'proxy_router._on_inbox_packet_received %s from %s for %s' % (newpacket, newpacket.CreatorID, newpacket.RemoteID)) if newpacket.RemoteID == my_id.getLocalID(): # this packet was addressed directly to me ... if newpacket.Command == commands.Relay(): # but this is a routed packet addressed to someone else if newpacket.CreatorID in self.routes.keys(): # sent by proxy_sender() from node A : a man behind proxy_router() # addressed to some third node B in outside world - need to route self.automat('routed-outbox-packet-received', (newpacket, info)) return True lg.warn( 'packet %s from %s SKIPPED, because no routes with %s' % (newpacket, newpacket.CreatorID, newpacket.CreatorID)) return False # and this is not a Relay packet if newpacket.Command == commands.Identity( ) and newpacket.CreatorID == newpacket.OwnerID: if newpacket.CreatorID in self.routes.keys(): # this is a "propagate" packet from node A addressed to this proxy # mark that packet as handled and send Ack # otherwise it will be wrongly handled in p2p_service self.automat('known-identity-received', newpacket) return True else: # this node is not yet in routers list, # but seems like it tries to contact me # mark this packet as handled and try to process it self.automat('unknown-identity-received', newpacket) return True # so this packet may be of any kind, but addressed to me # for example if I am a supplier for node A he will send me packets in usual way # need to skip this packet here and process it as a normal inbox packet if _Debug: lg.out( _DebugLevel, 'proxy_router._on_inbox_packet_received %s from %s SKIPEED, addressed to me' % (newpacket, newpacket.CreatorID)) return False # this packet was addressed to someone else receiver_idurl = None if newpacket.Command == commands.Data(): # Data packets may have two cases: a new Data or response with existing Data if info.sender_idurl == newpacket.CreatorID: # incoming new Data created by node B addressed to node A if newpacket.RemoteID in self.routes.keys(): receiver_idurl = newpacket.RemoteID elif info.sender_idurl == newpacket.RemoteID: # response from node B addressed to node A, by request from A who own this Data if newpacket.CreatorID in self.routes.keys(): # a Data packet sent by node B : a man from outside world # addressed to a man behind this proxy_router() - need to route to node A receiver_idurl = newpacket.CreatorID else: # some wired packet received lg.warn('unidentified Data packet received: %s from %s' % (newpacket, info.sender_idurl)) return False else: # other packets (not Data) always should be routed to node A by RemoteID if newpacket.RemoteID in self.routes.keys(): # sent by node B : a man from outside world # addressed to a man behind this proxy - need to route to node A receiver_idurl = newpacket.RemoteID if receiver_idurl is not None: self.automat('routed-inbox-packet-received', (receiver_idurl, newpacket, info)) return True if _Debug: lg.out( _DebugLevel, 'proxy_router._on_inbox_packet_received SKIPPED %s' % newpacket) return False
def isDataExpected(self, *args, **kwargs): """ Condition method. """ return commands.Data() in list(self.callbacks.keys())
def search_by_response_packet(newpacket, proto=None, host=None): result = [] incoming_owner_idurl = newpacket.OwnerID incoming_creator_idurl = newpacket.CreatorID incoming_remote_idurl = newpacket.RemoteID if _Debug: lg.out( _DebugLevel, 'packet_out.search_by_response_packet for incoming [%s/%s/%s]:%s(%s) from [%s://%s]' % ( nameurl.GetName(incoming_owner_idurl), nameurl.GetName(incoming_creator_idurl), nameurl.GetName(incoming_remote_idurl), newpacket.Command, newpacket.PacketID, proto, host, )) lg.out(_DebugLevel, ' [%s]' % (','.join([str(p.outpacket) for p in queue()]))) for p in queue(): # TODO: investigate if p.outpacket.PacketID.lower() != newpacket.PacketID.lower(): # PacketID of incoming packet not matching with that outgoing packet continue if p.outpacket.PacketID != newpacket.PacketID: lg.warn( 'packet ID in queue "almost" matching with incoming: %s ~ %s' % ( p.outpacket.PacketID, newpacket.PacketID, )) if not commands.IsCommandAck(p.outpacket.Command, newpacket.Command): # this command must not be in the reply continue expected_recipient = [ p.outpacket.RemoteID, ] if p.outpacket.RemoteID != p.remote_idurl: # outgoing packet was addressed to another node, so that means we need to expect response from another node also expected_recipient.append(p.remote_idurl) matched = False if incoming_owner_idurl in expected_recipient and my_id.getLocalIDURL( ) == incoming_remote_idurl: if _Debug: lg.out( _DebugLevel, ' matched with incoming owner: %s' % expected_recipient) matched = True if incoming_creator_idurl in expected_recipient and my_id.getLocalIDURL( ) == incoming_remote_idurl: if _Debug: lg.out( _DebugLevel, ' matched with incoming creator: %s' % expected_recipient) matched = True if incoming_remote_idurl in expected_recipient and my_id.getLocalIDURL( ) == incoming_owner_idurl and commands.Data() == newpacket.Command: if _Debug: lg.out( _DebugLevel, ' matched my own incoming Data with incoming remote: %s' % expected_recipient) matched = True if matched: result.append(p) if _Debug: lg.out( _DebugLevel, ' found pending outbox [%s/%s/%s]:%s(%s) cb:%s' % (nameurl.GetName(p.outpacket.OwnerID), nameurl.GetName(p.outpacket.CreatorID), nameurl.GetName( p.outpacket.RemoteID), p.outpacket.Command, p.outpacket.PacketID, list(p.callbacks.keys()))) if len(result) == 0: if _Debug: lg.out( _DebugLevel, ' NOT FOUND pending packets in outbox queue matching incoming %s' % newpacket) if newpacket.Command == commands.Ack() and newpacket.PacketID not in [ commands.Identity(), commands.Identity().lower() ]: lg.warn('received %s was not a "good reply" from %s://%s' % ( newpacket, proto, host, )) return result
def RunRequest(self): #out(6, 'io_throttle.RunRequest') packetsToRemove = {} for i in range(0, min(self.fileRequestMaxLength, len(self.fileRequestQueue))): packetID = self.fileRequestQueue[i] # we got notify that this packet was failed to send if packetID in self.requestFailedPacketIDs: self.requestFailedPacketIDs.remove(packetID) packetsToRemove[packetID] = 'failed' continue # request timeouts are disabled for now # currentTime = time.time() # if self.fileRequestDict[packetID].requestTime is not None: # # the packet was requested # if self.fileRequestDict[packetID].fileReceivedTime is None: # # but no answer yet ... # if currentTime - self.fileRequestDict[packetID].requestTime > self.fileRequestDict[packetID].requestTimeout: # # and time is out!!! # self.fileRequestDict[packetID].report = 'timeout' # packetsToRemove[packetID] = 'timeout' # else: # # the packet were received (why it is not removed from the queue yet ???) # self.fileRequestDict[packetID].result = 'received' # packetsToRemove[packetID] = 'received' # the packet was not requested yet if self.fileRequestDict[packetID].requestTime is None: customer, pathID = packetid.SplitPacketID(packetID) if not os.path.exists(os.path.join(settings.getLocalBackupsDir(), customer, pathID)): fileRequest = self.fileRequestDict[packetID] if _Debug: lg.out(_DebugLevel, "io_throttle.RunRequest for packetID " + fileRequest.packetID) # transport_control.RegisterInterest(self.DataReceived,fileRequest.creatorID,fileRequest.packetID) # callback.register_interest(self.DataReceived, fileRequest.creatorID, fileRequest.packetID) p2p_service.SendRetreive( fileRequest.ownerID, fileRequest.creatorID, fileRequest.packetID, fileRequest.remoteID, callbacks={ commands.Data(): self.OnDataReceived, commands.Fail(): self.OnDataReceived, # None: lambda pkt_out: self.OnDataReceived(fileRequest.packetID, 'timeout'), # timeout }, # response_timeout=10, ) # newpacket = signed.Packet( # commands.Retrieve(), # fileRequest.ownerID, # fileRequest.creatorID, # packetid.RemotePath(fileRequest.packetID), # "", # fileRequest.remoteID) # gateway.outbox(newpacket, callbacks={ # commands.Data(): self.DataReceived, # commands.Fail(): self.DataReceived}) fileRequest.requestTime = time.time() else: # we have the data file, no need to request it self.fileRequestDict[packetID].result = 'exist' packetsToRemove[packetID] = 'exist' # if request queue is empty - remove all records about packets failed to request if len(self.fileRequestQueue) == 0: del self.requestFailedPacketIDs[:] # remember requests results result = len(packetsToRemove) # remove finished requests for packetID, why in packetsToRemove.items(): # self.fileRequestQueue.remove(packetID) if _Debug: lg.out(_DebugLevel, "io_throttle.RunRequest removed %s from %s receiving queue, %d more items" % ( packetID, self.remoteName, len(self.fileRequestQueue))) self.OnDataRequestFailed(packetID, why) del packetsToRemove return result
def inbox(newpacket, info, status, error_message): """ """ if newpacket.CreatorID != my_id.getLocalID( ) and newpacket.RemoteID != my_id.getLocalID(): # packet is NOT for us, skip return False commandhandled = False if newpacket.Command == commands.Ack(): # a response from remote node, typically handled in other places Ack(newpacket, info) commandhandled = False elif newpacket.Command == commands.Fail(): # some operation was failed on other side Fail(newpacket) commandhandled = False elif newpacket.Command == commands.Retrieve(): # retrieve some packet customer stored with us # handled by service_supplier() Retrieve(newpacket) commandhandled = False elif newpacket.Command == commands.RequestService(): # other node send us a request to get some service # handled by service_p2p_hookups() RequestService(newpacket, info) commandhandled = False elif newpacket.Command == commands.CancelService(): # other node wants to stop the service we gave him # handled by service_p2p_hookups() CancelService(newpacket, info) commandhandled = False elif newpacket.Command == commands.Data(): # new packet to store for customer, or data coming back from supplier # handled by service_backups() and service_supplier() Data(newpacket) commandhandled = False elif newpacket.Command == commands.ListFiles(): # customer wants list of their files # handled by service_supplier() ListFiles(newpacket, info) commandhandled = False elif newpacket.Command == commands.Files(): # supplier sent us list of files # handled by service_backups() Files(newpacket, info) commandhandled = False elif newpacket.Command == commands.DeleteFile(): # handled by service_supplier() DeleteFile(newpacket) commandhandled = False elif newpacket.Command == commands.DeleteBackup(): # handled by service_supplier() DeleteBackup(newpacket) commandhandled = False elif newpacket.Command == commands.Correspondent(): # TODO: contact asking for our current identity, not implemented yet Correspondent(newpacket) commandhandled = False elif newpacket.Command == commands.Broadcast(): # handled by service_broadcasting() Broadcast(newpacket, info) commandhandled = False elif newpacket.Command == commands.Coin(): # handled by service_accountant() Coin(newpacket, info) commandhandled = False elif newpacket.Command == commands.RetrieveCoin(): # handled by service_accountant() RetrieveCoin(newpacket, info) commandhandled = False elif newpacket.Command == commands.Key(): # handled by service_keys_registry() Key(newpacket, info) commandhandled = False elif newpacket.Command == commands.Event(): # handled by service_p2p_hookups() Event(newpacket, info) commandhandled = False elif newpacket.Command == commands.Message(): # handled by service_private_messages() Message(newpacket, info) commandhandled = False return commandhandled