def create(outpacket, wide, callbacks, target=None, route=None, response_timeout=None, keep_alive=True): """ """ if _Debug: lg.out( _DebugLevel, 'packet_out.create [%s/%s/%s]:%s(%s) target=%s route=%s' % ( nameurl.GetName(outpacket.OwnerID), nameurl.GetName(outpacket.CreatorID), nameurl.GetName(outpacket.RemoteID), outpacket.Command, outpacket.PacketID, target, route, )) p = PacketOut(outpacket, wide, callbacks, target, route, response_timeout, keep_alive) queue().append(p) p.automat('run') return p
def doUpdateSuppliers(self, *args, **kwargs): """ Action method. """ from customer import io_throttle # supplierList = contactsdb.suppliers() # take a list of suppliers positions that was changed changedSupplierNums = backup_matrix.SuppliersChangedNumbers( self.current_suppliers) # notify io_throttle that we do not neeed already this suppliers for supplierNum in changedSupplierNums: lg.out( 2, "backup_monitor.doUpdateSuppliers supplier %d changed: [%s]->[%s]" % ( supplierNum, nameurl.GetName(self.current_suppliers[supplierNum]), nameurl.GetName(contactsdb.suppliers()[supplierNum]), )) suplier_idurl = self.current_suppliers[supplierNum] io_throttle.DeleteSuppliers([ suplier_idurl, ]) # erase (set to 0) remote info for this guys backup_matrix.ClearSupplierRemoteInfo(supplierNum)
def outbox(outpacket, wide=False, callbacks={}, target=None, route=None): """ Sends `packet` to the network. :param outpacket: an instance of ``signed.Packet`` :param wide: set to True if you need to send the packet to all contacts of Remote Identity :param callbacks: provide a callback methods to get response here need to provide a callback for given command callback arguments are: (response_packet, info) :param target: if your recipient is not equal to outpacket.RemoteID :param route: dict with parameters, you can manage how to process this packet: 'packet': <another packet to be send>, 'proto': <receiver proto>, 'host': <receiver host>, 'remoteid': <receiver idurl>, 'description': <description on the packet>, Returns: `None` if data was not sent, no filter was applied `Deferred` object if filter was applied but sending was delayed `packet_out.PacketOut` object if packet was sent """ if _Debug: lg.out( _DebugLevel - 8, "gateway.outbox [%s] signed by %s|%s to %s, wide=%s" % ( outpacket.Command, nameurl.GetName(outpacket.OwnerID), nameurl.GetName(outpacket.CreatorID), nameurl.GetName(outpacket.RemoteID), wide, )) return callback.run_outbox_filter_callbacks(outpacket, wide, callbacks)
def process(newpacket, info): from p2p import p2p_service from userid import my_id if not driver.is_on('service_p2p_hookups'): if _Debug: lg.out( _DebugLevel, 'packet_in.process SKIP incoming packet, service_p2p_hookups is not started' ) return None if _Debug: lg.out( _DebugLevel, 'packet_in.process [%s/%s/%s]:%s(%s) from %s://%s is "%s"' % ( nameurl.GetName(newpacket.OwnerID), nameurl.GetName(newpacket.CreatorID), nameurl.GetName(newpacket.RemoteID), newpacket.Command, newpacket.PacketID, info.proto, info.host, info.status, )) if info.status != 'finished': if _Debug: lg.out(_DebugLevel, ' skip, packet status is : [%s]' % info.status) return None if newpacket.Command == commands.Identity(): if newpacket.RemoteID != my_id.getLocalIDURL(): if _Debug: lg.out(_DebugLevel, ' incoming Identity is routed to another user') if not p2p_service.Identity(newpacket, send_ack=False): lg.warn('non-valid identity received') return None # remote peer sending a valid identity to another peer routed via my machine # need to handle that packet - it should be processed by proxy_server return handle(newpacket, info) # contact sending us current identity we might not have # so we handle it before check that packet is valid # because we might not have his identity on hands and so can not verify the packet # so we check that his Identity is valid and save it into cache # than we check the packet to be valid too. if not p2p_service.Identity(newpacket): lg.warn('non-valid identity received') return None if not identitycache.HasKey(newpacket.CreatorID): if _Debug: lg.out( _DebugLevel, ' will cache remote identity %s before processing incoming packet %s' % (newpacket.CreatorID, newpacket)) d = identitycache.immediatelyCaching(newpacket.CreatorID) d.addCallback(lambda _: handle(newpacket, info)) d.addErrback(lambda err: lg.err('failed caching remote %s identity: %s' % (newpacket.CreatorID, str(err)))) return d return handle(newpacket, info)
def __repr__(self): args = '%s(%s)' % (str(self.Command), str(self.PacketID)) if _Debug: if lg.is_debug(_DebugLevel): args += ' %s|%s for %s' % (nameurl.GetName( self.OwnerID), nameurl.GetName( self.CreatorID), nameurl.GetName(self.RemoteID)) return 'signed{ %s }' % args
def IncomingSupplierListFiles(newpacket, list_files_global_id): """ Called when command "Files" were received from one of my suppliers. This is an answer from given supplier (after my request) to get a list of our files stored on his machine. """ supplier_idurl = newpacket.OwnerID # incoming_key_id = newpacket.PacketID.strip().split(':')[0] customer_idurl = list_files_global_id['idurl'] num = contactsdb.supplier_position(supplier_idurl, customer_idurl=customer_idurl) if num < -1: lg.warn('unknown supplier: %s' % supplier_idurl) return False from supplier import list_files from customer import list_files_orator target_key_id = my_keys.latest_key_id(list_files_global_id['key_id']) if not my_keys.is_key_private(target_key_id): lg.warn('key %r not registered, not possible to decrypt ListFiles() packet from %r' % (target_key_id, supplier_idurl, )) return False try: block = encrypted.Unserialize(newpacket.Payload, decrypt_key=target_key_id, ) input_data = block.Data() except: lg.err('failed decrypting data from packet %r received from %r' % (newpacket, supplier_idurl)) return False list_files_raw = list_files.UnpackListFiles(input_data, settings.ListFilesFormat()) remote_files_changed, backups2remove, paths2remove, missed_backups = backup_matrix.process_raw_list_files( supplier_num=num, list_files_text_body=list_files_raw, customer_idurl=None, is_in_sync=None, auto_create=False, ) list_files_orator.IncomingListFiles(newpacket) if remote_files_changed: backup_matrix.SaveLatestRawListFiles(supplier_idurl, list_files_raw) if _Debug: lg.args(_DebugLevel, supplier=nameurl.GetName(supplier_idurl), customer=nameurl.GetName(customer_idurl), backups2remove=len(backups2remove), paths2remove=len(paths2remove), files_changed=remote_files_changed, missed_backups=len(missed_backups), ) if len(backups2remove) > 0: p2p_service.RequestDeleteListBackups(backups2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d backups' % len(backups2remove)) if len(paths2remove) > 0: p2p_service.RequestDeleteListPaths(paths2remove) if _Debug: lg.out(_DebugLevel, ' also sent requests to remove %d paths' % len(paths2remove)) if len(missed_backups) > 0: from storage import backup_rebuilder backup_rebuilder.AddBackupsToWork(missed_backups) backup_rebuilder.A('start') if _Debug: lg.out(_DebugLevel, ' also triggered service_rebuilding with %d missed backups' % len(missed_backups)) del backups2remove del paths2remove del missed_backups return True
def DeleteFile(request): """ Delete one ore multiple files (that belongs to another user) or folders on my machine. """ if _Debug: lg.out( _DebugLevel, 'p2p_service.DeleteFile [%s] by %s | %s' % (request.PacketID, request.OwnerID, request.CreatorID)) if not driver.is_on('service_supplier'): return SendFail(request, 'supplier service is off') if request.Payload == '': ids = [request.PacketID] else: ids = request.Payload.split('\n') filescount = 0 dirscount = 0 for pcktID in ids: glob_path = global_id.ParseGlobalID(pcktID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID(my_id.getGlobalID() + ':' + request.PacketID) if not glob_path['path']: lg.warn("got incorrect PacketID") SendFail(request, 'incorrect PacketID') return # TODO: add validation of customerGlobID # TODO: process requests from another customer filename = makeFilename(request.OwnerID, glob_path['path']) if filename == "": filename = constructFilename(request.OwnerID, glob_path['path']) if not os.path.exists(filename): lg.warn( "had unknown customer: %s or pathID is not correct or not exist: %s" % (nameurl.GetName(request.OwnerID), glob_path['path'])) return SendFail(request, 'not a customer, or file not found') if os.path.isfile(filename): try: os.remove(filename) filescount += 1 except: lg.exc() elif os.path.isdir(filename): try: bpio._dir_remove(filename) dirscount += 1 except: lg.exc() else: lg.warn("path not found %s" % filename) if _Debug: lg.out( _DebugLevel, "p2p_service.DeleteFile from [%s] with %d IDs, %d files and %d folders were removed" % (nameurl.GetName( request.OwnerID), len(ids), filescount, dirscount)) SendAck(request)
def process(newpacket, info): """ Main entry point where all incoming signed packets are coming from remote peers. The main aspect here is to "authenticate" remote node - need to know it identity. """ from p2p import p2p_service from userid import my_id if not driver.is_on('service_p2p_hookups'): if _Debug: lg.out(_DebugLevel, 'packet_in.process SKIP incoming packet, service_p2p_hookups is not started') return None if _Debug: lg.out(_DebugLevel, 'packet_in.process [%s/%s/%s]:%s(%s) from %s://%s is "%s"' % ( nameurl.GetName(newpacket.OwnerID), nameurl.GetName(newpacket.CreatorID), nameurl.GetName(newpacket.RemoteID), newpacket.Command, newpacket.PacketID, info.proto, info.host, info.status, )) if info.status != 'finished': if _Debug: lg.out(_DebugLevel, ' skip, packet status is : [%s]' % info.status) return None # if _PacketLogFileEnabled: # lg.out(0, ' \033[0;49;92mIN %s(%s) with %d bytes from %s to %s TID:%s\033[0m' % ( # newpacket.Command, newpacket.PacketID, info.bytes_received, # global_id.UrlToGlobalID(info.sender_idurl), global_id.UrlToGlobalID(newpacket.RemoteID), # info.transfer_id), log_name='packet', showtime=True) # we must know recipient identity if not id_url.is_cached(newpacket.RemoteID): d = identitycache.immediatelyCaching(newpacket.RemoteID) d.addCallback(lambda _: process(newpacket, info)) d.addErrback(lambda err: lg.err('incoming remote ID is unknown, failed caching remote %s identity: %s' % (newpacket.RemoteID, str(err))) and None) return d if newpacket.Command == commands.Identity(): if newpacket.RemoteID != my_id.getIDURL(): if _Debug: lg.out(_DebugLevel, ' incoming Identity is routed to another user') if not p2p_service.Identity(newpacket, send_ack=False): lg.warn('received identity was not processed') return None # remote peer sending a valid identity to another peer routed via my machine # need to handle that packet - it should be processed by proxy_server return handle(newpacket, info) # contact sending us current identity we might not have # so we handle it before check that packet is valid # because we might not have his identity on hands and so can not verify the packet # so we check that his Identity is valid and save it into cache # than we check the packet to be valid too. if not p2p_service.Identity(newpacket): lg.warn('received identity was not processed') return None if not identitycache.HasKey(newpacket.CreatorID): if _Debug: lg.out(_DebugLevel, ' will cache remote identity %s before processing incoming packet %s' % (newpacket.CreatorID, newpacket)) d = identitycache.immediatelyCaching(newpacket.CreatorID) d.addCallback(lambda _: handle(newpacket, info)) d.addErrback(lambda err: lg.err('failed caching remote %s identity: %s' % (newpacket.CreatorID, str(err))) and None) return d return handle(newpacket, info)
def Identity(newpacket, send_ack=True): """ Normal node or Identity server is sending us a new copy of an identity for a contact of ours. Checks that identity is signed correctly. Sending requests to cache all sources (other identity servers) holding that identity. """ # TODO: move to service_gateway newxml = newpacket.Payload newidentity = identity.identity(xmlsrc=newxml) # SECURITY # check that identity is signed correctly # old public key matches new one # this is done in `UpdateAfterChecking()` idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.info('received new identity: %s' % idurl) if not identitycache.UpdateAfterChecking(idurl, newxml): lg.warn("ERROR has non-Valid identity") return False # Now that we have ID we can check packet if not newpacket.Valid(): # If not valid do nothing lg.warn("not Valid packet from %s" % idurl) return False # TODO: after receiving full list of identity sources we can call ALL OF THEM or those which are not cached yet. # this way we can be sure that even if first source (server holding your public key) is not responding # other sources still can give you required user info: public key, contacts, etc.. # TODO: we can also consolidate few "idurl" sources for every public key - basically identify user by public key # something like: # for source in identitycache.FromCache(idurl).getSources(): # if source not in identitycache.FromCache(idurl): # d = identitycache.immediatelyCaching(source) # d.addCallback(lambda xml_src: identitycache.UpdateAfterChecking(idurl, xml_src)) # d.addErrback(lambda err: lg.warn('caching filed: %s' % err)) if newpacket.OwnerID == idurl: # TODO: this needs to be moved to a service # wide=True : a small trick to respond to all contacts if we receive pings if _Debug: lg.out( _DebugLevel, "p2p_service.Identity idurl=%s ... also sent WIDE Acks" % nameurl.GetName(idurl)) else: if _Debug: lg.out( _DebugLevel, "p2p_service.Identity idurl=%s, but packet ownerID=%s ... also sent WIDE Acks" % ( nameurl.GetName(idurl), newpacket.OwnerID, )) if not send_ack: return True reactor.callLater(0, SendAck, newpacket, wide=True) # @UndefinedVariable return True
def search_by_response_packet(newpacket, proto=None, host=None): result = [] incoming_owner_idurl = newpacket.OwnerID incoming_creator_idurl = newpacket.CreatorID incoming_remote_idurl = newpacket.RemoteID if _Debug: lg.out(_DebugLevel, 'packet_out.search_by_response_packet for incoming [%s/%s/%s]:%s(%s) from [%s://%s]' % ( nameurl.GetName(incoming_owner_idurl), nameurl.GetName(incoming_creator_idurl), nameurl.GetName(incoming_remote_idurl), newpacket.Command, newpacket.PacketID, proto, host, )) lg.out(_DebugLevel, ' [%s]' % (','.join([str(p.outpacket) for p in queue()]))) for p in queue(): # TODO: investigate if p.outpacket.PacketID.lower() != newpacket.PacketID.lower(): # PacketID of incoming packet not matching with that outgoing packet continue if p.outpacket.PacketID != newpacket.PacketID: lg.warn('packet ID in queue "almost" matching with incoming: %s ~ %s' % ( p.outpacket.PacketID, newpacket.PacketID, )) if not commands.IsCommandAck(p.outpacket.Command, newpacket.Command): # this command must not be in the reply continue expected_recipient = [p.outpacket.RemoteID, ] if p.outpacket.RemoteID != p.remote_idurl: # outgoing packet was addressed to another node, so that means we need to expect response from another node also expected_recipient.append(p.remote_idurl) matched = False if incoming_owner_idurl in expected_recipient and my_id.getLocalIDURL() == incoming_remote_idurl: if _Debug: lg.out(_DebugLevel, ' matched with incoming owner: %s' % expected_recipient) matched = True if incoming_creator_idurl in expected_recipient and my_id.getLocalIDURL() == incoming_remote_idurl: if _Debug: lg.out(_DebugLevel, ' matched with incoming creator: %s' % expected_recipient) matched = True if incoming_remote_idurl in expected_recipient and my_id.getLocalIDURL() == incoming_owner_idurl and commands.Data() == newpacket.Command: if _Debug: lg.out(_DebugLevel, ' matched my own incoming Data with incoming remote: %s' % expected_recipient) matched = True if matched: result.append(p) if _Debug: lg.out(_DebugLevel, ' found pending outbox [%s/%s/%s]:%s(%s) cb:%s' % ( nameurl.GetName(p.outpacket.OwnerID), nameurl.GetName(p.outpacket.CreatorID), nameurl.GetName(p.outpacket.RemoteID), p.outpacket.Command, p.outpacket.PacketID, list(p.callbacks.keys()))) if len(result) == 0: if _Debug: lg.out(_DebugLevel, ' NOT FOUND pending packets in outbox queue matching incoming %s' % newpacket) if newpacket.Command == commands.Ack() and newpacket.PacketID not in [commands.Identity(), commands.Identity().lower()]: lg.warn('received %s was not a "good reply" from %s://%s' % (newpacket, proto, host, )) return result
def Files(request, info): """ A directory list came in from some supplier or another customer. """ if _Debug: lg.out( _DebugLevel, 'p2p_service.Files %d bytes in [%s] from %s by %s|%s' % ( len(request.Payload), request.PacketID, nameurl.GetName(request.RemoteID), nameurl.GetName(request.OwnerID), nameurl.GetName(request.CreatorID), ))
def on_inbox_message(self, sender, message): name = nameurl.GetName(sender) if sender not in self.users: self.users.append(sender) self.history.append({ 'text': 'user %s was joined' % name, 'name': '', 'time': time.time(), }) self.history.append({ 'text': message, 'name': nameurl.GetName(sender), 'sender': sender, 'time': time.time(), })
def doStart(self, *args, **kwargs): """ Action method. """ options = {'idurl': my_id.getLocalID(), } id_contact = '' default_host = '' if self.proto == 'tcp': if not id_contact: default_host = strng.to_bin(misc.readExternalIP()) + b':' + strng.to_bin(str(settings.getTCPPort())) options['host'] = id_contact or default_host options['tcp_port'] = settings.getTCPPort() elif self.proto == 'udp': if not id_contact: default_host = strng.to_bin(nameurl.GetName(my_id.getLocalID())) + b'@' + strng.to_bin(platform.node()) options['host'] = id_contact or default_host options['dht_port'] = settings.getDHTPort() options['udp_port'] = settings.getUDPPort() elif self.proto == 'proxy': pass elif self.proto == 'http': if not id_contact: default_host = strng.to_bin(misc.readExternalIP()) + b':' + strng.to_bin(str(settings.getHTTPPort())) options['host'] = id_contact or default_host options['http_port'] = settings.getHTTPPort() if _Debug: lg.out(8, 'network_transport.doStart connecting %s transport : %s' % (self.proto.upper(), options)) self.interface.connect(options)
def Ack(newpacket, info): if _Debug: lg.out( _DebugLevel, "p2p_service.Ack %s from [%s] at %s://%s with %d bytes payload" % (newpacket.PacketID, nameurl.GetName(newpacket.CreatorID), info.proto, info.host, len(newpacket.Payload)))
def OnFileSendFailReceived(self, RemoteID, PacketID, why): if self.shutdown: if _Debug: lg.out(_DebugLevel, "io_throttle.OnFileSendFailReceived finishing to %s, shutdown is True" % self.remoteName) return self.failedCount += 1 if PacketID not in list(self.fileSendDict.keys()): lg.warn("packet %s not in send dict" % PacketID) return self.fileSendDict[PacketID].result = why fileToSend = self.fileSendDict[PacketID] assert fileToSend.remoteID == RemoteID # transport_control.RemoveSupplierRequestFromSendQueue(fileToSend.packetID, fileToSend.remoteID, commands.Data()) # queue.remove_supplier_request(fileToSend.packetID, fileToSend.remoteID, commands.Data()) # transport_control.RemoveInterest(fileToSend.remoteID, fileToSend.packetID) # callback.remove_interest(fileToSend.remoteID, fileToSend.packetID) if why == 'timeout': contact_status.PacketSendingTimeout(RemoteID, PacketID) if fileToSend.callOnFail: reactor.callLater(0, fileToSend.callOnFail, RemoteID, PacketID, why) self.DoSend() # self.RunSend() if _Debug: lg.out(_DebugLevel, "io_throttle.OnFileSendFailReceived %s to [%s] because %s" % ( PacketID, nameurl.GetName(fileToSend.remoteID), why))
def __init__(self, parent, callOnReceived, creatorID, packetID, ownerID, remoteID, debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, publish_events=False, **kwargs): """ Builds `file_down()` state machine. """ self.parent = parent self.callOnReceived = [] self.callOnReceived.append(callOnReceived) self.creatorID = creatorID self.packetID = global_id.CanonicalID(packetID) parts = global_id.ParseGlobalID(packetID) self.customerID = parts['customer'] self.remotePath = parts['path'] self.customerIDURL = parts['idurl'] customerGlobalID, remotePath, versionName, fileName = packetid.SplitVersionFilename(packetID) self.backupID = packetid.MakeBackupID(customerGlobalID, remotePath, versionName) self.fileName = fileName self.ownerID = ownerID self.remoteID = remoteID self.requestTime = None self.fileReceivedTime = None self.requestTimeout = max(30, 2 * int(settings.getBackupBlockSize() / settings.SendingSpeedLimit())) self.result = '' self.created = utime.get_sec1970() super(FileDown, self).__init__( name="file_down_%s_%s/%s/%s" % (nameurl.GetName(self.remoteID), remotePath, versionName, fileName), state="AT_STARTUP", debug_level=debug_level, log_events=log_events, log_transitions=log_transitions, publish_events=publish_events, **kwargs )
def incoming_message(request, priv_message_object, message_text): lg.out(6, 'dbwrite.incoming_message of %d bytes, type=%s' % (len(message_text), type(message_text))) from django.shortcuts import get_object_or_404 from django.utils.html import escape from django.contrib.auth.models import User from web.jqchatapp.models import Room, Message, RoomMember from contacts import contactsdb from userid import my_id from lib import nameurl idurl = request.OwnerID try: ThisUser = User.objects.get(username=my_id.getIDName()) except: lg.out(16, ' SKIP, seems user did not logged in') # lg.exc() return try: ThisRoom = get_object_or_404(Room, idurl=idurl) except: nik = contactsdb.get_correspondent_nickname(idurl) ThisRoom = Room(idurl=idurl, name=(nik or nameurl.GetName(idurl))) ThisRoom.save() message_text = escape(unicode(message_text)) Message.objects.create_message(idurl, ThisRoom, message_text) if not RoomMember.objects.find_member(idurl=idurl): RoomMember.objects.create_member(idurl=idurl, # name=nameurl.GetName(idurl), room=ThisRoom)
def _do_retrieve(self, x=None): packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) localID = my_id.getIDURL() for supplier_idurl in contactsdb.suppliers(): if not supplier_idurl: continue sc = supplier_connector.by_idurl(supplier_idurl) if sc is None or sc.state != 'CONNECTED': continue if online_status.isOffline(supplier_idurl): continue pkt_out = p2p_service.SendRetreive(ownerID=localID, creatorID=localID, packetID=packetID, remoteID=supplier_idurl, response_timeout=60 * 2, callbacks={ commands.Data(): self._on_supplier_response, commands.Fail(): self._on_supplier_fail, }) if pkt_out: self.requesting_suppliers.add(supplier_idurl) self.requested_suppliers_number += 1 self.requests_packets_sent.append((packetID, supplier_idurl)) if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (pkt_out, nameurl.GetName(supplier_idurl)))
def SendContacts(remote_idurl, json_payload={}, wide=False, callbacks={}): """ """ MyID = my_id.getLocalID() if _Debug: lg.out( _DebugLevel, "p2p_service.SendContacts to %s" % nameurl.GetName(remote_idurl)) PacketID = packetid.UniqueID() try: json_payload['type'] json_payload['space'] except: lg.err() return None Payload = serialization.DictToBytes(json_payload) result = signed.Packet( Command=commands.Contacts(), OwnerID=MyID, CreatorID=MyID, PacketID=PacketID, Payload=Payload, RemoteID=remote_idurl, ) gateway.outbox(result, wide=wide, callbacks=callbacks) return result
def QueueSendFile(self, fileName, packetID, remoteID, ownerID, callOnAck=None, callOnFail=None): #out(10, "io_throttle.QueueSendFile %s to %s" % (packetID, nameurl.GetName(remoteID))) remoteID = id_url.field(remoteID) ownerID = id_url.field(ownerID) if not os.path.exists(fileName): lg.err("%s not exist" % fileName) if callOnFail is not None: reactor.callLater(.01, callOnFail, remoteID, packetID, 'not exist') # @UndefinedVariable return False if remoteID not in list(self.supplierQueues.keys()): self.supplierQueues[remoteID] = SupplierQueue( remoteID, self.creatorID) lg.info("made a new sending queue for %s" % nameurl.GetName(remoteID)) return self.supplierQueues[remoteID].SupplierSendFile( fileName, packetID, ownerID, callOnAck, callOnFail, )
def QueueRequestFile(self, callOnReceived, creatorID, packetID, ownerID, remoteID): # make sure that we don't actually already have the file # if packetID != settings.BackupInfoFileName(): remoteID = id_url.field(remoteID) ownerID = id_url.field(ownerID) creatorID = id_url.field(creatorID) if packetID not in [ settings.BackupInfoFileName(), settings.BackupInfoFileNameOld(), settings.BackupInfoEncryptedFileName(), ]: customer, pathID = packetid.SplitPacketID(packetID) filename = os.path.join(settings.getLocalBackupsDir(), customer, pathID) if os.path.exists(filename): lg.warn("%s already exist " % filename) if callOnReceived: reactor.callLater(0, callOnReceived, packetID, 'exist') # @UndefinedVariable return False if remoteID not in list(self.supplierQueues.keys()): # made a new queue for this man self.supplierQueues[remoteID] = SupplierQueue( remoteID, self.creatorID) lg.info("made a new receiving queue for %s" % nameurl.GetName(remoteID)) # lg.out(10, "io_throttle.QueueRequestFile asking for %s from %s" % (packetID, nameurl.GetName(remoteID))) return self.supplierQueues[remoteID].SupplierRequestFile( callOnReceived, creatorID, packetID, ownerID)
def SendListFiles(target_supplier, customer_idurl=None, key_id=None, wide=False, callbacks={}): """ This is used as a request method from your supplier : if you send him a ListFiles() packet he will reply you with a list of stored files in a Files() packet. """ MyID = my_id.getLocalID() if not customer_idurl: customer_idurl = MyID if not str(target_supplier).isdigit(): RemoteID = target_supplier else: RemoteID = contactsdb.supplier(target_supplier, customer_idurl=customer_idurl) if not RemoteID: lg.warn("RemoteID is empty target_supplier=%s" % str(target_supplier)) return None if _Debug: lg.out(_DebugLevel, "p2p_service.SendListFiles to %s" % nameurl.GetName(RemoteID)) if not key_id: key_id = global_id.MakeGlobalID(idurl=customer_idurl, key_alias='customer') PacketID = "%s:%s" % (key_id, packetid.UniqueID(), ) Payload = settings.ListFilesFormat() result = signed.Packet( Command=commands.ListFiles(), OwnerID=MyID, CreatorID=MyID, PacketID=PacketID, Payload=Payload, RemoteID=RemoteID, ) gateway.outbox(result, wide=wide, callbacks=callbacks) return result
def rate_all_users(): from p2p import online_status from p2p import p2p_connector if not p2p_connector.A(): lg.warn('ratings update skipped, p2p_connector() is not running') return if p2p_connector.A().state != 'CONNECTED': lg.warn('ratings update skipped, p2p_connector() is CONNECTED') return if _Debug: lg.out(_DebugLevel, 'ratings.rate_all_users') monthStr = time.strftime('%B') for idurl in contactsdb.contacts_remote(include_all=True): if not idurl: continue isalive = online_status.isOnline(idurl) mall, malive, tall, talive = increase_rating(idurl, isalive) month_percent = 100.0 * float(malive) / float(mall) total_percent = 100.0 * float(talive) / float(tall) if _Debug: lg.out( _DebugLevel, '[%6.2f%%: %s/%s] in %s and [%6.2f%%: %s/%s] total - %s' % ( month_percent, malive, mall, monthStr, total_percent, talive, tall, nameurl.GetName(idurl), )) read_index()
def send(customer_idurl, packet_id, format_type): customer_name = nameurl.GetName(customer_idurl) MyID = my_id.getLocalID() RemoteID = customer_idurl PacketID = packet_id if _Debug: lg.out( _DebugLevel, "list_files.send to %s, format is '%s'" % (customer_name, format_type)) custdir = settings.getCustomersFilesDir() ownerdir = os.path.join(custdir, nameurl.UrlFilename(customer_idurl)) if not os.path.isdir(ownerdir): if _Debug: lg.out(_DebugLevel, "list_files.send did not found customer dir: " + ownerdir) src = PackListFiles('', format_type) result = signed.Packet(commands.Files(), MyID, MyID, PacketID, src, RemoteID) gateway.outbox(result) return result plaintext = TreeSummary(ownerdir) if _Debug: lg.out(_DebugLevel + 8, '\n%s' % (plaintext)) src = PackListFiles(plaintext, format_type) result = signed.Packet(commands.Files(), MyID, MyID, PacketID, src, RemoteID) gateway.outbox(result) return result
def __init__(self, supplier_idurl, customer_idurl, needed_bytes, key_id=None, queue_subscribe=True): """ """ self.supplier_idurl = supplier_idurl self.customer_idurl = customer_idurl self.needed_bytes = needed_bytes self.key_id = key_id self.queue_subscribe = queue_subscribe self.do_calculate_needed_bytes() name = 'supplier_%s_%s' % ( nameurl.GetName(self.supplier_idurl), diskspace.MakeStringFromBytes(self.needed_bytes).replace(' ', ''), ) self.request_packet_id = None self.callbacks = {} try: st = bpio.ReadTextFile(settings.SupplierServiceFilename( idurl=self.supplier_idurl, customer_idurl=self.customer_idurl, )).strip() except: st = 'DISCONNECTED' automat.Automat.__init__( self, name, state=st, debug_level=_DebugLevel, log_events=_Debug, log_transitions=_Debug, ) for cb in self.callbacks.values(): cb(self.supplier_idurl, self.state, self.state)
def __init__(self, outpacket, wide, callbacks={}, target=None, route=None, response_timeout=None, keep_alive=True): self.outpacket = outpacket self.wide = wide self.callbacks = {} self.caching_deferred = None self.description = self.outpacket.Command + '[' + self.outpacket.PacketID + ']' self.remote_idurl = target self.route = route self.response_timeout = response_timeout if self.route: self.description = self.route['description'] self.remote_idurl = self.route['remoteid'] if not self.remote_idurl: self.remote_idurl = self.outpacket.RemoteID # correct_packet_destination(self.outpacket) self.remote_name = nameurl.GetName(self.remote_idurl) self.label = 'out_%d_%s' % (get_packets_counter(), self.remote_name) self.keep_alive = keep_alive automat.Automat.__init__( self, self.label, 'AT_STARTUP', debug_level=_DebugLevel, log_events=_Debug, publish_events=False, ) increment_packets_counter() for command, cb in callbacks.items(): self.set_callback(command, cb)
def SendIdentity(remote_idurl, wide=False, timeout=10, callbacks={}): """ """ packet_id = 'identity:%s' % packetid.UniqueID() if _Debug: lg.out( _DebugLevel, "p2p_service.SendIdentity to %s wide=%s packet_id=%r" % ( nameurl.GetName(remote_idurl), wide, packet_id, )) result = signed.Packet( Command=commands.Identity(), OwnerID=my_id.getLocalID(), CreatorID=my_id.getLocalID(), PacketID=packet_id, Payload=my_id.getLocalIdentity().serialize(), RemoteID=remote_idurl, ) gateway.outbox(result, wide=wide, callbacks=callbacks, response_timeout=timeout) return result
def SendToID(idurl, ack_handler=None, Payload=None, NeedAck=False, wide=False): """ Create ``packet`` with my Identity file and calls ``transport.gateway.outbox()`` to send it. """ lg.out( 8, "propagate.SendToID [%s] wide=%s" % (nameurl.GetName(idurl), str(wide))) if ack_handler is None: ack_handler = HandleAck thePayload = Payload if thePayload is None: thePayload = my_id.getLocalIdentity().serialize() p = signed.Packet( commands.Identity(), my_id.getLocalID(), # MyID, my_id.getLocalID(), # MyID, 'Identity', # my_id.getLocalID(), #PacketID, thePayload, idurl) # callback.register_interest(AckHandler, p.RemoteID, p.PacketID) gateway.outbox(p, wide, callbacks={ commands.Ack(): ack_handler, commands.Fail(): ack_handler, }) if wide: # this is a ping packet - need to clear old info stats.ErasePeerProtosStates(idurl) stats.EraseMyProtosStates(idurl)
def get(url): """ A smart way to get identity from cache. If not cached in memory but found on disk - will cache from disk. """ if has_idurl(url): return idget(url) else: try: partfilename = nameurl.UrlFilename(url) except: lg.out(1, "identitydb.get ERROR %s is incorrect" % str(url)) return None filename = os.path.join(settings.IdentityCacheDir(), partfilename) if not os.path.exists(filename): lg.out( 6, "identitydb.get file %s not exist" % os.path.basename(filename)) return None idxml = bpio.ReadTextFile(filename) if idxml: idobj = identity.identity(xmlsrc=idxml) url2 = idobj.getIDURL() if url == url2: idset(url, idobj) return idobj else: lg.out(1, "identitydb.get ERROR url=%s url2=%s" % (url, url2)) return None lg.out(6, "identitydb.get %s not found" % nameurl.GetName(url)) return None
def SendToID(idurl, Payload=None, wide=False, ack_handler=None, timeout_handler=None, response_timeout=20, ): """ Create ``packet`` with my Identity file and calls ``transport.gateway.outbox()`` to send it. """ global _PropagateCounter if _Debug: lg.out(_DebugLevel, "propagate.SendToID [%s] wide=%s" % (nameurl.GetName(idurl), str(wide))) if ack_handler is None: ack_handler = HandleAck if timeout_handler is None: timeout_handler = HandleTimeOut thePayload = Payload if thePayload is None: thePayload = strng.to_bin(my_id.getLocalIdentity().serialize()) p = signed.Packet( Command=commands.Identity(), OwnerID=my_id.getIDURL(), CreatorID=my_id.getIDURL(), PacketID=('propagate:%d:%s' % (_PropagateCounter, packetid.UniqueID())), Payload=thePayload, RemoteID=idurl, ) _PropagateCounter += 1 result = gateway.outbox(p, wide, response_timeout=response_timeout, callbacks={ commands.Ack(): ack_handler, commands.Fail(): ack_handler, None: timeout_handler, }) if wide: # this is a ping packet - need to clear old info p2p_stats.ErasePeerProtosStates(idurl) p2p_stats.EraseMyProtosStates(idurl) return result