def cb(path, subpath, name): if not os.path.isfile(path): return True packetsrc = bpio.ReadBinaryFile(path) if not packetsrc: try: os.remove( path) # if is is no good it is of no use to anyone printlog('Validate %r removed (empty file)' % path) except: printlog('Validate ERROR removing %r' % path) return False p = signed.Unserialize(packetsrc) if p is None: try: os.remove( path) # if is is no good it is of no use to anyone printlog('Validate %r removed (unserialize error)' % path) except: printlog('Validate ERROR removing %r') return False result = p.Valid() packetsrc = '' del p if not result: try: os.remove( path) # if is is no good it is of no use to anyone printlog('Validate %r removed (invalid packet)' % path) except: printlog('Validate ERROR removing %r' % path) return False time.sleep(0.1) return False
def OnDataReceived(self, newpacket, result): # we requested some data from a supplier, and just received it if self.shutdown: lg.warn('supplier queue is shutting down') self.StopAllRequests() return if _Debug: lg.args(_DebugLevel, newpacket=newpacket, result=result, queue=len(self.fileRequestQueue), remoteName=self.remoteName) packetID = global_id.CanonicalID(newpacket.PacketID) if (packetID not in self.fileRequestQueue) or (packetID not in self.fileRequestDict): lg.err( 'unexpected %r received which is not in the downloading queue' % newpacket) else: f_down = self.fileRequestDict[packetID] if newpacket.Command == commands.Data(): wrapped_packet = signed.Unserialize(newpacket.Payload) if not wrapped_packet or not wrapped_packet.Valid(): lg.err('incoming Data() packet is not valid') f_down.event('fail-received', newpacket) return f_down.event('valid-data-received', wrapped_packet) elif newpacket.Command == commands.Fail(): f_down.event('fail-received', newpacket) else: lg.err('incorrect response command: %r' % newpacket)
def _on_supplier_response(self, newpacket, info): wrapped_packet = signed.Unserialize(newpacket.Payload) if _Debug: lg.args(_DebugLevel, newpacket=newpacket, wrapped_packet=wrapped_packet) if not wrapped_packet or not wrapped_packet.Valid(): lg.err('incoming Data() is not valid') return supplier_idurl = wrapped_packet.RemoteID from storage import backup_control supplier_revision = backup_control.IncomingSupplierBackupIndex( wrapped_packet) self.requesting_suppliers.discard(supplier_idurl) if supplier_revision is not None: reactor.callLater(0, self.automat, 'index-file-received', ( newpacket, supplier_revision, )) # @UndefinedVariable if _Debug: lg.out( _DebugLevel, 'index_synchronizer._on_supplier_response %s from %r, pending: %d, total: %d' % (newpacket, supplier_idurl, len(self.requesting_suppliers), self.requested_suppliers_number)) if len(self.requesting_suppliers) == 0: reactor.callLater(0, self.automat, 'all-responded') # @UndefinedVariable
def cb(path, subpath, name): # if not os.access(path, os.R_OK | os.W_OK): # return False if not os.path.isfile(path): return True # if name in [settings.BackupIndexFileName(),]: # return False packetsrc = bpio.ReadBinaryFile(path) if not packetsrc: try: os.remove(path) # if is is no good it is of no use to anyone printlog('Validate ' + path + ' removed (empty file)') except: printlog('Validate ERROR removing ' + path) return False p = signed.Unserialize(packetsrc) if p is None: try: os.remove(path) # if is is no good it is of no use to anyone printlog('Validate ' + path + ' removed (unserialize error)') except: printlog('Validate ERROR removing ' + path) return False result = p.Valid() packetsrc = '' del p if not result: try: os.remove(path) # if is is no good it is of no use to anyone printlog('Validate ' + path + ' removed (invalid packet)') except: printlog('Validate ERROR removing ' + path) return False time.sleep(0.1) return False
def OnDataReceived(self, newpacket, result): # we requested some data from a supplier, and just received it if self.shutdown: lg.warn('skip, supplier queue is shutting down') self.StopAllRequests() return if _Debug: lg.args(_DebugLevel, newpacket=newpacket, result=result, queue=len(self.fileRequestQueue), remoteName=self.remoteName) packetID = global_id.CanonicalID(newpacket.PacketID) if (packetID not in self.fileRequestQueue) or (packetID not in self.fileRequestDict): latest_idurl = global_id.NormalizeGlobalID(packetID, as_field=True)['idurl'].latest another_packetID = global_id.SubstitutePacketID(packetID, idurl=latest_idurl) if (another_packetID in self.fileRequestQueue) and (another_packetID in self.fileRequestDict): packetID = another_packetID lg.warn('found incoming %r with outdated packet id, corrected: %r' % (newpacket, another_packetID, )) if (packetID not in self.fileRequestQueue) or (packetID not in self.fileRequestDict): lg.err('unexpected %r received which is not in the downloading queue' % newpacket) else: f_down = self.fileRequestDict[packetID] if newpacket.Command == commands.Data(): wrapped_packet = signed.Unserialize(newpacket.Payload) if not wrapped_packet or not wrapped_packet.Valid(): lg.err('incoming Data() packet is not valid') f_down.event('fail-received', newpacket) return f_down.event('valid-data-received', wrapped_packet) elif newpacket.Command == commands.Fail(): f_down.event('fail-received', newpacket) else: lg.err('incorrect response command: %r' % newpacket)
def OnDataReceived(self, newpacket, result): # if result == 'timeout': # packetID = global_id.CanonicalID(newpacket) # if packetID in self.fileRequestDict: # self.fileRequestDict[packetID].fileReceivedTime = time.time() # self.fileRequestDict[packetID].result = 'timeout' # for callBack in self.fileRequestDict[packetID].callOnReceived: # callBack(None, 'timeout') # return # we requested some data from a supplier, just received it packetID = global_id.CanonicalID(newpacket.PacketID) if self.shutdown: # if we're closing down this queue (supplier replaced, don't any anything new) if packetID in self.fileRequestDict: for callBack in self.fileRequestDict[packetID].callOnReceived: callBack(newpacket, 'shutdown') if packetID in self.fileRequestDict: del self.fileRequestDict[packetID] lg.warn('supplier queue is shutting down') return if _Debug: lg.out( _DebugLevel, "io_throttle.OnDataReceived %s with result=[%s]" % ( newpacket, result, )) if packetID in self.fileRequestQueue: self.fileRequestQueue.remove(packetID) if _Debug: lg.out( _DebugLevel, " removed %s from %s receiving queue, %d more items" % (packetID, self.remoteName, len(self.fileRequestQueue))) if newpacket.Command == commands.Data(): wrapped_packet = signed.Unserialize(newpacket.Payload) if not wrapped_packet or not wrapped_packet.Valid(): lg.err('incoming Data() is not valid') return if packetID in self.fileRequestDict: self.fileRequestDict[packetID].fileReceivedTime = time.time() self.fileRequestDict[packetID].result = 'received' for callBack in self.fileRequestDict[packetID].callOnReceived: callBack(wrapped_packet, 'received') elif newpacket.Command == commands.Fail(): if packetID in self.fileRequestDict: self.fileRequestDict[packetID].fileReceivedTime = time.time() self.fileRequestDict[packetID].result = 'failed' for callBack in self.fileRequestDict[packetID].callOnReceived: callBack(newpacket, 'failed') else: lg.err('incorrect response command') if packetID in self.fileRequestDict: del self.fileRequestDict[packetID] if _Debug: lg.out( _DebugLevel, "io_throttle.OnDataReceived %s from %s, queue=%d" % (newpacket, self.remoteName, len(self.fileRequestQueue))) self.DoRequest()
def doProcessInboxPacket(self, arg): """ Action method. """ newpacket, info, _, _ = arg block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out( 2, 'proxy_receiver.doProcessInboxPacket ERROR reading data from %s' % newpacket.CreatorID) return try: session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey) padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData) inpt = cStringIO.StringIO(padded_data[:int(block.Length)]) data = inpt.read() except: lg.out( 2, 'proxy_receiver.doProcessInboxPacket ERROR reading data from %s' % newpacket.CreatorID) lg.exc() try: inpt.close() except: pass return inpt.close() routed_packet = signed.Unserialize(data) if not routed_packet: lg.out( 2, 'proxy_receiver.doProcessInboxPacket ERROR unserialize packet from %s' % newpacket.CreatorID) return if _Debug: lg.out( _DebugLevel, '<<<Relay-IN %s from %s://%s' % ( str(routed_packet), info.proto, info.host, )) packet_in.process(routed_packet, info) del block del data del padded_data del inpt del session_key del routed_packet
def doProcessInboxPacket(self, arg): """ Action method. """ newpacket, info, _, _ = arg block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out(2, 'proxy_receiver.doProcessInboxPacket ERROR reading data from %s' % newpacket.CreatorID) return try: session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey) padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData) inpt = cStringIO.StringIO(padded_data[:int(block.Length)]) data = inpt.read() except: lg.out(2, 'proxy_receiver.doProcessInboxPacket ERROR reading data from %s' % newpacket.CreatorID) lg.exc() try: inpt.close() except: pass return inpt.close() routed_packet = signed.Unserialize(data) if not routed_packet: lg.out(2, 'proxy_receiver.doProcessInboxPacket ERROR unserialize packet failed from %s' % newpacket.CreatorID) return if routed_packet.Command == commands.Identity(): newidentity = identity.identity(xmlsrc=routed_packet.Payload) idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.warn('received new identity: %s' % idurl) if not identitycache.UpdateAfterChecking(idurl, routed_packet.Payload): lg.warn("ERROR has non-Valid identity") return if not routed_packet.Valid(): lg.out(2, 'proxy_receiver.doProcessInboxPacket ERROR invalid packet from %s' % newpacket.CreatorID) return self.traffic_in += len(data) if _Debug: lg.out(_DebugLevel, '<<<Relay-IN %s from %s://%s with %d bytes' % ( str(routed_packet), info.proto, info.host, len(data))) packet_in.process(routed_packet, info) del block del data del padded_data del inpt del session_key del routed_packet
def pop_signed_message(queue_id, message_id): existing_message = pop_message(queue_id, message_id) if not existing_message: return existing_message try: signed_data = signed.Unserialize(existing_message.payload) except: raise Exception('unserialize message fails') if not signed_data: raise Exception('unserialized message is empty') if not signed_data.Valid(): raise Exception('unserialized message is not valid') try: existing_message.payload = json.loads(signed_data.Payload) except: raise Exception('failed reading message json data') return existing_message
def test_signed_packet(self): key.InitMyKey() payload_size = 1024 attempts = 10 for i in range(attempts): data1 = os.urandom(payload_size) p1 = signed.Packet( 'Data', my_id.getIDURL(), my_id.getIDURL(), 'SomeID', data1, self.bob_ident.getIDURL(), ) self.assertTrue(p1.Valid()) raw1 = p1.Serialize() p2 = signed.Unserialize(raw1) self.assertTrue(p2.Valid())
def pop_signed_message(queue_id, message_id): # TODO: to be continue existing_message = pull_message(queue_id, message_id) if not existing_message: return existing_message try: signed_data = signed.Unserialize(existing_message.payload) except: raise Exception('unserialize message fails') if not signed_data: raise Exception('unserialized message is empty') if not signed_data.Valid(): raise Exception('unserialized message is not valid') try: existing_message.payload = serialization.BytesToDict( signed_data.Payload, keys_to_text=True) except: raise Exception('failed reading message json data') return existing_message
def backup_done(bid, result): from crypt import signed try: os.mkdir(os.path.join(settings.getLocalBackupsDir(), bid + '.out')) except: pass for filename in os.listdir(os.path.join(settings.getLocalBackupsDir(), bid)): filepath = os.path.join(settings.getLocalBackupsDir(), bid, filename) payld = str(bpio.ReadBinaryFile(filepath)) outpacket = signed.Packet('Data', my_id.getLocalID(), my_id.getLocalID(), filename, payld, 'http://megafaq.ru/cvps1010.xml') newfilepath = os.path.join(settings.getLocalBackupsDir(), bid + '.out', filename) bpio.WriteBinaryFile(newfilepath, outpacket.Serialize()) # Assume we delivered all pieces from ".out" to suppliers and lost original data # Then we requested the data back and got it into ".inp" try: os.mkdir(os.path.join(settings.getLocalBackupsDir(), bid + '.inp')) except: pass for filename in os.listdir( os.path.join(settings.getLocalBackupsDir(), bid + '.out')): filepath = os.path.join(settings.getLocalBackupsDir(), bid + '.out', filename) data = bpio.ReadBinaryFile(filepath) inppacket = signed.Unserialize(data) assert inppacket assert inppacket.Valid() newfilepath = os.path.join(settings.getLocalBackupsDir(), bid + '.inp', filename) bpio.WriteBinaryFile(newfilepath, inppacket.Payload) # Now do restore from input data backupID = bid + '.inp' outfd, tarfilename = tmpfile.make( 'restore', extension='.tar.gz', prefix=backupID.replace('/', '_') + '_', ) r = restore_worker.RestoreWorker(backupID, outfd) r.MyDeferred.addBoth(restore_done, tarfilename) reactor.callLater(1, r.automat, 'init')
def test_signed_packet(self): key.InitMyKey() data1 = os.urandom(1024) p1 = signed.Packet( 'Data', my_id.getIDURL(), my_id.getIDURL(), 'SomeID', data1, 'RemoteID:abc', ) self.assertTrue(p1.Valid()) raw1 = p1.Serialize() p2 = signed.Unserialize(raw1) self.assertTrue(p2.Valid()) raw2 = p2.Serialize() data2 = p2.Payload self.assertEqual(data1, data2) self.assertEqual(raw1, raw2)
def _on_supplier_response(self, newpacket, pkt_out): if newpacket.Command == commands.Data(): wrapped_packet = signed.Unserialize(newpacket.Payload) if not wrapped_packet or not wrapped_packet.Valid(): lg.err('incoming Data() is not valid') return from storage import backup_control backup_control.IncomingSupplierBackupIndex(wrapped_packet) # p2p_service.SendAck(newpacket) self.requesting_suppliers.discard(wrapped_packet.RemoteID) elif newpacket.Command == commands.Fail(): self.requesting_suppliers.discard(newpacket.OwnerID) else: raise Exception('wrong type of response') if _Debug: lg.out( _DebugLevel, 'index_synchronizer._on_supplier_response %s, pending: %d, total: %d' % (newpacket, len(self.requesting_suppliers), self.requested_suppliers_number)) if len(self.requesting_suppliers) == 0: self.automat('all-responded')
def test_signed_packet(self): from crypt import key from crypt import signed from userid import my_id key.InitMyKey() data1 = os.urandom(1024) p1 = signed.Packet( 'Data', my_id.getLocalID(), my_id.getLocalID(), 'SomeID', data1, 'RemoteID:abc', ) self.assertTrue(p1.Valid()) raw1 = p1.Serialize() p2 = signed.Unserialize(raw1) self.assertTrue(p2.Valid()) raw2 = p2.Serialize() data2 = p2.Payload self.assertEqual(data1, data2) self.assertEqual(raw1, raw2)
def inbox(info): """ 1) The protocol modules write to temporary files and gives us that filename 2) We unserialize 3) We check that it is for us 4) We check that it is from one of our contacts. 5) We use signed.validate() to check signature and that number fields are numbers 6) Any other sanity checks we can do and if anything funny we toss out the packet . 7) Then change the filename to the PackedID that it should be. and call the right function(s) for this new packet (encryptedblock, scrubber, remotetester, customerservice, ...) to dispatch it to right place(s). 8) We have to keep track of bandwidth to/from everyone, and make a report every 24 hours which we send to BitDust sometime in the 24 hours after that. """ global _LastInboxPacketTime # if _DoingShutdown: # if _Debug: # lg.out(_DebugLevel - 4, "gateway.inbox ignoring input since _DoingShutdown ") # return None if info.filename == "" or not os.path.exists(info.filename): lg.err("bad filename=" + info.filename) return None try: data = bpio.ReadBinaryFile(info.filename) except: lg.err("gateway.inbox ERROR reading file " + info.filename) return None if len(data) == 0: lg.err("gateway.inbox ERROR zero byte file from %s://%s" % (info.proto, info.host)) return None if callback.run_finish_file_receiving_callbacks(info, data): lg.warn( 'incoming data of %d bytes was filtered out in file receiving callbacks' % len(data)) return None try: newpacket = signed.Unserialize(data) except: lg.err("gateway.inbox ERROR during Unserialize data from %s://%s" % (info.proto, info.host)) lg.exc() return None if newpacket is None: lg.warn("newpacket from %s://%s is None" % (info.proto, info.host)) return None # newpacket.Valid() will be called later in the flow in packet_in.handle() method try: Command = newpacket.Command OwnerID = newpacket.OwnerID CreatorID = newpacket.CreatorID PacketID = newpacket.PacketID Date = newpacket.Date Payload = newpacket.Payload RemoteID = newpacket.RemoteID Signature = newpacket.Signature packet_sz = len(data) except: lg.err("gateway.inbox ERROR during Unserialize data from %s://%s" % (info.proto, info.host)) lg.err("data length=" + str(len(data))) lg.exc() # fd, filename = tmpfile.make('other', '.bad') # os.write(fd, data) # os.close(fd) return None _LastInboxPacketTime = time.time() if _Debug: lg.out( _DebugLevel - 8, "gateway.inbox [%s] signed by %s|%s (for %s) from %s://%s" % (Command, nameurl.GetName(OwnerID), nameurl.GetName(CreatorID), nameurl.GetName(RemoteID), info.proto, info.host)) if _Debug and lg.is_debug(_DebugLevel): monitoring() # control.request_update([('packet', newpacket.PacketID)]) return newpacket
from userid import my_id bpio.init() lg.set_debug_level(18) settings.init() key.InitMyKey() if len(sys.argv) > 1: print 'reading' data1 = bpio.ReadBinaryFile(sys.argv[1]) print '%d bytes long, hash: %s' % ( len(data1), misc.BinaryToAscii(key.Hash(data1)).strip()) p1 = signed.Packet('Data', my_id.getLocalID(), my_id.getLocalID(), 'SomeID', data1, 'RemoteID:abc') else: print 'unserialize from "input"' p1 = signed.Unserialize(bpio.ReadBinaryFile('input')) data1 = p1.Payload print 'serialize', p1 print ' Command:', p1.Command, type(p1.Command) print ' OwnerID:', p1.OwnerID, type(p1.OwnerID) print ' CreatorID:', p1.CreatorID, type(p1.CreatorID) print ' PacketID:', p1.PacketID, type(p1.PacketID) print ' Date:', p1.Date, type(p1.Date) print ' Payload:', len(p1.Payload), misc.BinaryToAscii(key.Hash( p1.Payload)).strip(), type(p1.Payload) print ' RemoteID:', p1.RemoteID, type(p1.RemoteID) print ' Signature:', p1.Signature, type(p1.Signature) src1 = p1.Serialize() print len(src1), 'bytes long' # print len(p1.Payload), misc.BinaryToAscii(key.HashMD5(p1.Payload)) p2 = signed.Unserialize(src1)
def Retrieve(request): """ Customer is asking us for data he previously stored with us. We send with ``outboxNoAck()`` method because he will ask again if he does not get it """ # TODO: move to storage folder # TODO: rename to RetrieveData() if _Debug: lg.out( _DebugLevel, 'p2p_service.Retrieve [%s] by %s | %s' % (request.PacketID, request.OwnerID, request.CreatorID)) if not driver.is_on('service_supplier'): return SendFail(request, 'supplier service is off') if not contactsdb.is_customer(request.OwnerID): lg.warn("had unknown customer " + request.OwnerID) SendFail(request, 'not a customer') return glob_path = global_id.ParseGlobalID(request.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID(my_id.getGlobalID() + ':' + request.PacketID) if not glob_path['path']: lg.warn("got incorrect PacketID") SendFail(request, 'incorrect PacketID') return if glob_path['idurl']: if request.CreatorID == glob_path['idurl']: if _Debug: lg.out(_DebugLevel, ' same customer CreatorID') else: lg.warn( 'one of customers requesting a Data from another customer!') else: lg.warn('no customer global id found in PacketID: %s' % request.PacketID) # TODO: process requests from another customer : glob_path['idurl'] filename = makeFilename(request.OwnerID, glob_path['path']) if filename == '': if True: # TODO: settings.getCustomersDataSharingEnabled() and # driver.services()['service_supplier'].has_permissions(request.CreatorID, ) filename = makeFilename(glob_path['idurl'], glob_path['path']) if filename == '': lg.warn("had empty filename") SendFail(request, 'empty filename') return if not os.path.exists(filename): lg.warn("did not find requested file locally " + filename) SendFail(request, 'did not find requested file locally') return if not os.access(filename, os.R_OK): lg.warn("no read access to requested packet " + filename) SendFail(request, 'no read access to requested packet') return data = bpio.ReadBinaryFile(filename) if not data: lg.warn("empty data on disk " + filename) SendFail(request, 'empty data on disk') return outpacket = signed.Unserialize(data) del data if outpacket is None: lg.warn("Unserialize fails, not Valid packet " + filename) SendFail(request, 'unserialize fails') return if not outpacket.Valid(): lg.warn("unserialized packet is not Valid " + filename) SendFail(request, 'unserialized packet is not Valid') return if _Debug: lg.out( _DebugLevel, "p2p_service.Retrieve sending %r back to %s" % (outpacket, nameurl.GetName(outpacket.CreatorID))) gateway.outbox(outpacket, target=outpacket.CreatorID)
def _do_process_inbox_packet(self, *args, **kwargs): newpacket, info, _, _ = args[0] block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.err('reading data from %s' % newpacket.CreatorID) return try: session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey) padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData, session_key_type=block.SessionKeyType) inpt = BytesIO(padded_data[:int(block.Length)]) data = inpt.read() except: lg.err('reading data from %s' % newpacket.CreatorID) lg.exc() try: inpt.close() except: pass return inpt.close() if newpacket.Command == commands.RelayAck(): try: ack_info = serialization.BytesToDict(data, keys_to_text=True, values_to_text=True) except: lg.exc() return if _Debug: lg.out(_DebugLevel, '<<<Relay-ACK %s:%s from %s://%s with %d bytes %s' % ( ack_info['command'], ack_info['packet_id'], info.proto, info.host, len(data), ack_info['error'], )) if _PacketLogFileEnabled: lg.out(0, ' \033[0;49;33mRELAY ACK %s(%s) with %d bytes from %s to %s TID:%s\033[0m' % ( ack_info['command'], ack_info['packet_id'], info.bytes_received, global_id.UrlToGlobalID(ack_info['from']), global_id.UrlToGlobalID(ack_info['to']), info.transfer_id), log_name='packet', showtime=True) from transport.proxy import proxy_sender if proxy_sender.A(): proxy_sender.A('relay-ack', ack_info, info) return True if newpacket.Command == commands.RelayFail(): try: fail_info = serialization.BytesToDict(data, keys_to_text=True, values_to_text=True) except: lg.exc() return if _Debug: lg.out(_DebugLevel, '<<<Relay-FAIL %s:%s from %s://%s with %d bytes %s' % ( fail_info['command'], fail_info['packet_id'], info.proto, info.host, len(data), fail_info['error'], )) if _PacketLogFileEnabled: lg.out(0, ' \033[0;49;33mRELAY FAIL %s(%s) with %d bytes from %s to %s TID:%s\033[0m' % ( fail_info['command'], fail_info['packet_id'], info.bytes_received, global_id.UrlToGlobalID(fail_info['from']), global_id.UrlToGlobalID(fail_info['to']), info.transfer_id), log_name='packet', showtime=True) from transport.proxy import proxy_sender if proxy_sender.A(): proxy_sender.A('relay-failed', fail_info, info) return True routed_packet = signed.Unserialize(data) if not routed_packet: lg.err('unserialize packet failed from %s' % newpacket.CreatorID) return if _Debug: lg.out(_DebugLevel, '<<<Relay-IN %s from %s://%s with %d bytes' % ( str(routed_packet), info.proto, info.host, len(data))) if _PacketLogFileEnabled: lg.out(0, ' \033[0;49;33mRELAY IN %s(%s) with %d bytes from %s to %s TID:%s\033[0m' % ( routed_packet.Command, routed_packet.PacketID, info.bytes_received, global_id.UrlToGlobalID(info.sender_idurl), global_id.UrlToGlobalID(routed_packet.RemoteID), info.transfer_id), log_name='packet', showtime=True) if routed_packet.Command == commands.Identity(): if _Debug: lg.out(_DebugLevel, ' found identity in relay packet %s' % routed_packet) newidentity = identity.identity(xmlsrc=routed_packet.Payload) idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.info('received new identity %s rev %r' % (idurl.original(), newidentity.getRevisionValue(), )) if not identitycache.UpdateAfterChecking(idurl, routed_packet.Payload): lg.warn("ERROR has non-Valid identity") return if routed_packet.Command in [commands.Relay(), commands.RelayIn(), ] and routed_packet.PacketID.lower().startswith('identity:'): if _Debug: lg.out(_DebugLevel, ' found routed identity in relay packet %s' % routed_packet) try: routed_identity = signed.Unserialize(routed_packet.Payload) newidentity = identity.identity(xmlsrc=routed_identity.Payload) idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.warn('received new "routed" identity: %s' % idurl) if not identitycache.UpdateAfterChecking(idurl, routed_identity.Payload): lg.warn("ERROR has non-Valid identity") return except: lg.exc() if newpacket.Command == commands.RelayIn() and routed_packet.Command == commands.Fail(): if routed_packet.Payload == b'route not exist' or routed_packet.Payload == b'route already closed': for pout in packet_out.search_by_packet_id(routed_packet.PacketID): lg.warn('received %r from %r, outgoing packet is failed: %r' % (routed_packet.Payload, newpacket.CreatorID, pout, )) pout.automat('request-failed') return self.traffic_in += len(data) packet_in.process(routed_packet, info) del block del data del padded_data del inpt del session_key del routed_packet
def inbox(info): """ 1) The protocol modules write to temporary files and gives us that filename 2) We unserialize 3) We check that it is for us 4) We check that it is from one of our contacts. 5) We use signed.validate() to check signature and that number fields are numbers 6) Any other sanity checks we can do and if anything funny we toss out the packet 7) Then change the filename to the PackedID that it should be and call the right function(s) for this new packet: (encryptedblock, scrubber, remotetester, customerservice, ...) to dispatch it to right place(s). """ global _LastInboxPacketTime # if _DoingShutdown: # if _Debug: # lg.out(_DebugLevel, "gateway.inbox ignoring input since _DoingShutdown ") # return None if _Debug: lg.out(_DebugLevel, "gateway.inbox [%s]" % info.filename) if info.filename == "" or not os.path.exists(info.filename): lg.err("bad filename=" + info.filename) return None try: data = bpio.ReadBinaryFile(info.filename) except: lg.err("gateway.inbox ERROR reading file " + info.filename) return None if len(data) == 0: lg.err("gateway.inbox ERROR zero byte file from %s://%s" % (info.proto, info.host)) return None if callback.run_finish_file_receiving_callbacks(info, data): lg.warn( 'incoming data of %d bytes was filtered out in file receiving callbacks' % len(data)) return None try: newpacket = signed.Unserialize(data) except: lg.err("gateway.inbox ERROR during Unserialize data from %s://%s" % (info.proto, info.host)) lg.exc() return None if newpacket is None: lg.warn("newpacket from %s://%s is None" % (info.proto, info.host)) return None # newpacket.Valid() will be called later in the flow in packet_in.handle() method try: Command = newpacket.Command OwnerID = newpacket.OwnerID CreatorID = newpacket.CreatorID PacketID = newpacket.PacketID Date = newpacket.Date Payload = newpacket.Payload RemoteID = newpacket.RemoteID Signature = newpacket.Signature packet_sz = len(data) except: lg.err("gateway.inbox ERROR during Unserialize data from %s://%s" % (info.proto, info.host)) lg.err("data length=" + str(len(data))) lg.exc() return None _LastInboxPacketTime = time.time() if _Debug: lg.out( _DebugLevel - 2, "gateway.inbox [%s] signed by %s|%s (for %s) from %s://%s" % (Command, nameurl.GetName(OwnerID), nameurl.GetName(CreatorID), nameurl.GetName(RemoteID), info.proto, info.host)) if _PacketLogFileEnabled: lg.out( 0, ' \033[1;49;92mINBOX %s(%s) %s %s for %s\033[0m' % ( newpacket.Command, newpacket.PacketID, global_id.UrlToGlobalID(newpacket.OwnerID), global_id.UrlToGlobalID(newpacket.CreatorID), global_id.UrlToGlobalID(newpacket.RemoteID), ), log_name='packet', showtime=True) return newpacket
def on_retrieve(newpacket): # external customer must be able to request # TODO: add validation of public key # if not contactsdb.is_customer(newpacket.OwnerID): # lg.err("had unknown customer %s" % newpacket.OwnerID) # p2p_service.SendFail(newpacket, 'not a customer') # return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False if not glob_path['idurl']: lg.warn('no customer global id found in PacketID: %s' % newpacket.PacketID) p2p_service.SendFail(newpacket, 'incorrect retrieve request') return False if newpacket.CreatorID != glob_path['idurl']: lg.warn('one of customers requesting a Data from another customer!') else: pass # same customer, based on CreatorID : OK! recipient_idurl = newpacket.OwnerID # TODO: process requests from another customer : glob_path['idurl'] filename = make_valid_filename(newpacket.OwnerID, glob_path) if not filename: if True: # TODO: settings.getCustomersDataSharingEnabled() and # SECURITY # TODO: add more validations for receiver idurl # recipient_idurl = glob_path['idurl'] filename = make_valid_filename(glob_path['idurl'], glob_path) if not filename: lg.warn("had empty filename") p2p_service.SendFail(newpacket, 'empty filename') return False if not os.path.exists(filename): lg.warn("did not found requested file locally : %s" % filename) p2p_service.SendFail(newpacket, 'did not found requested file locally') return False if not os.access(filename, os.R_OK): lg.warn("no read access to requested packet %s" % filename) p2p_service.SendFail(newpacket, 'no read access to requested packet') return False data = bpio.ReadBinaryFile(filename) if not data: lg.warn("empty data on disk %s" % filename) p2p_service.SendFail(newpacket, 'empty data on disk') return False stored_packet = signed.Unserialize(data) del data if stored_packet is None: lg.warn("Unserialize failed, not Valid packet %s" % filename) p2p_service.SendFail(newpacket, 'unserialize failed') return False if not stored_packet.Valid(): lg.warn("Stored packet is not Valid %s" % filename) p2p_service.SendFail(newpacket, 'stored packet is not valid') return False if stored_packet.Command != commands.Data(): lg.warn('sending back packet which is not a Data') # here Data() packet is sent back as it is... # that means outpacket.RemoteID=my_id.getIDURL() - it was addressed to that node and stored as it is # need to take that in account every time you receive Data() packet # it can be not a new Data(), but the old data returning back as a response to Retreive() packet # let's create a new Data() packet which will be addressed directly to recipient and "wrap" stored data inside it routed_packet = signed.Packet( Command=commands.Data(), OwnerID=stored_packet.OwnerID, CreatorID=my_id.getIDURL(), PacketID=stored_packet.PacketID, Payload=stored_packet.Serialize(), RemoteID=recipient_idurl, ) if recipient_idurl == stored_packet.OwnerID: lg.info('from request %r : sending %r back to owner: %s' % (newpacket, stored_packet, recipient_idurl)) gateway.outbox(routed_packet) # , target=recipient_idurl) return True lg.info('from request %r : returning data owned by %s to %s' % (newpacket, stored_packet.OwnerID, recipient_idurl)) gateway.outbox(routed_packet) return True
def _on_retreive(self, newpacket): import os from logs import lg from system import bpio from userid import my_id from userid import global_id from crypt import signed from contacts import contactsdb from transport import gateway from p2p import p2p_service from p2p import commands if not contactsdb.is_customer(newpacket.OwnerID): lg.err("had unknown customer %s" % newpacket.OwnerID) p2p_service.SendFail(newpacket, 'not a customer') return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False if glob_path['idurl']: if newpacket.CreatorID == glob_path['idurl']: pass # same customer, based on CreatorID : OK! else: lg.warn( 'one of customers requesting a Data from another customer!' ) else: lg.warn('no customer global id found in PacketID: %s' % newpacket.PacketID) # TODO: process requests from another customer : glob_path['idurl'] filename = self._do_make_valid_filename(newpacket.OwnerID, glob_path) if not filename: if True: # TODO: settings.getCustomersDataSharingEnabled() and # SECURITY filename = self._do_make_valid_filename( glob_path['idurl'], glob_path) if not filename: lg.warn("had empty filename") p2p_service.SendFail(newpacket, 'empty filename') return False if not os.path.exists(filename): lg.warn("did not find requested file locally : %s" % filename) p2p_service.SendFail(newpacket, 'did not find requested file locally') return False if not os.access(filename, os.R_OK): lg.warn("no read access to requested packet %s" % filename) p2p_service.SendFail(newpacket, 'no read access to requested packet') return False data = bpio.ReadBinaryFile(filename) if not data: lg.warn("empty data on disk %s" % filename) p2p_service.SendFail(newpacket, 'empty data on disk') return False outpacket = signed.Unserialize(data) del data if outpacket is None: lg.warn("Unserialize fails, not Valid packet %s" % filename) p2p_service.SendFail(newpacket, 'unserialize fails') return False if not outpacket.Valid(): lg.warn("unserialized packet is not Valid %s" % filename) p2p_service.SendFail(newpacket, 'unserialized packet is not Valid') return False if outpacket.Command != commands.Data(): lg.warn('sending back packet which is not a Data') # here Data() packet is sent back as it is... # that means outpacket.RemoteID=my_id.getLocalID() - it was addressed to that node and stored as it is # need to take that in account every time you receive Data() packet # it can be not a new Data(), but the old data returning back as a response to Retreive() packet lg.warn('from request %r : sending %r back to %s' % (newpacket, outpacket, outpacket.CreatorID)) gateway.outbox(outpacket, target=outpacket.CreatorID) return True
def _do_forward_outbox_packet(self, outpacket_info_tuple): """ This packet addressed to me but contain routed data to be transferred to another node. I will decrypt with my private key and send to outside world further. """ newpacket, info = outpacket_info_tuple block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out( 2, 'proxy_router.doForwardOutboxPacket ERROR reading data from %s' % newpacket.RemoteID) return try: session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey) padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData) inpt = BytesIO(padded_data[:int(block.Length)]) # see proxy_sender.ProxySender : _on_first_outbox_packet() for sending part json_payload = serialization.BytesToDict(inpt.read(), keys_to_text=True) inpt.close() sender_idurl = json_payload['f'] # from receiver_idurl = json_payload['t'] # to wide = json_payload['w'] # wide routed_data = json_payload['p'] # payload except: lg.out( 2, 'proxy_router.doForwardOutboxPacket ERROR reading data from %s' % newpacket.RemoteID) lg.exc() try: inpt.close() except: pass return route = self.routes.get(sender_idurl, None) if not route: inpt.close() lg.warn('route with %s not found' % (sender_idurl)) p2p_service.SendFail(newpacket, 'route not exist', remote_idurl=sender_idurl) return routed_packet = signed.Unserialize(routed_data) if not routed_packet or not routed_packet.Valid(): lg.out( 2, 'proxy_router.doForwardOutboxPacket ERROR unserialize packet from %s' % newpacket.RemoteID) return # send the packet directly to target user_id # we pass not callbacks because all response packets from this call will be also re-routed pout = packet_out.create( routed_packet, wide=wide, callbacks={}, target=receiver_idurl, ) if _Debug: lg.out( _DebugLevel, '>>>Relay-IN-OUT %d bytes from %s at %s://%s :' % ( len(routed_data), nameurl.GetName(sender_idurl), info.proto, info.host, )) lg.out( _DebugLevel, ' routed to %s : %s' % (nameurl.GetName(receiver_idurl), pout)) del block del routed_data del padded_data del route del inpt del session_key del routed_packet
def _do_process_inbox_packet(self, *args, **kwargs): newpacket, info, _, _ = args[0] block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.err('reading data from %s' % newpacket.CreatorID) return try: session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey) padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData) inpt = BytesIO(padded_data[:int(block.Length)]) data = inpt.read() except: lg.err('reading data from %s' % newpacket.CreatorID) lg.exc() try: inpt.close() except: pass return inpt.close() routed_packet = signed.Unserialize(data) if not routed_packet: lg.err('unserialize packet failed from %s' % newpacket.CreatorID) return if _Debug: lg.out(_DebugLevel, '<<<Relay-IN %s from %s://%s with %d bytes' % ( str(routed_packet), info.proto, info.host, len(data))) if routed_packet.Command == commands.Identity(): if _Debug: lg.out(_DebugLevel, ' found identity in relay packet %s' % routed_packet) newidentity = identity.identity(xmlsrc=routed_packet.Payload) idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.info('received new identity: %s' % idurl) if not identitycache.UpdateAfterChecking(idurl, routed_packet.Payload): lg.warn("ERROR has non-Valid identity") return if routed_packet.Command == commands.Relay() and routed_packet.PacketID.lower() == 'identity': if _Debug: lg.out(_DebugLevel, ' found routed identity in relay packet %s' % routed_packet) try: routed_identity = signed.Unserialize(routed_packet.Payload) newidentity = identity.identity(xmlsrc=routed_identity.Payload) idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.warn('received new "routed" identity: %s' % idurl) if not identitycache.UpdateAfterChecking(idurl, routed_identity.Payload): lg.warn("ERROR has non-Valid identity") return except: lg.exc() # if not routed_packet.Valid(): # lg.err('invalid packet %s from %s' % ( # routed_packet, newpacket.CreatorID, )) # return self.traffic_in += len(data) packet_in.process(routed_packet, info) del block del data del padded_data del inpt del session_key del routed_packet
def doForwardOutboxPacket(self, arg): """ Action method. """ # decrypt with my key and send to outside world newpacket, info = arg block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.out( 2, 'proxy_router.doForwardOutboxPacket ERROR reading data from %s' % newpacket.RemoteID) return try: session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey) padded_data = key.DecryptWithSessionKey(session_key, block.EncryptedData) inpt = cStringIO.StringIO(padded_data[:int(block.Length)]) sender_idurl = inpt.readline().rstrip('\n') receiver_idurl = inpt.readline().rstrip('\n') wide = inpt.readline().rstrip('\n') wide = wide == 'wide' except: lg.out( 2, 'proxy_router.doForwardOutboxPacket ERROR reading data from %s' % newpacket.RemoteID) lg.exc() try: inpt.close() except: pass return route = self.routes.get(sender_idurl, None) if not route: inpt.close() lg.warn('route with %s not found' % (sender_idurl)) p2p_service.SendFail(newpacket, 'route not exist', remote_idurl=sender_idurl) return data = inpt.read() inpt.close() routed_packet = signed.Unserialize(data) if not routed_packet: lg.out( 2, 'proxy_router.doForwardOutboxPacket ERROR unserialize packet from %s' % newpacket.RemoteID) return # send the packet directly to target user_id # we pass not callbacks because all response packets from this call will be also re-routed pout = packet_out.create( routed_packet, wide=wide, callbacks={}, target=receiver_idurl, ) # gateway.outbox(routed_packet, wide=wide) if _Debug: lg.out( _DebugLevel, '>>>Relay-OUT %d bytes from %s at %s://%s :' % ( len(data), nameurl.GetName(sender_idurl), info.proto, info.host, )) lg.out( _DebugLevel, ' routed to %s : %s' % (nameurl.GetName(receiver_idurl), pout)) del block del data del padded_data del route del inpt del session_key del routed_packet
def _do_process_inbox_packet(self, *args, **kwargs): newpacket, info, _, _ = args[0] block = encrypted.Unserialize(newpacket.Payload) if block is None: lg.err('reading data from %s' % newpacket.CreatorID) return try: session_key = key.DecryptLocalPrivateKey(block.EncryptedSessionKey) padded_data = key.DecryptWithSessionKey( session_key, block.EncryptedData, session_key_type=block.SessionKeyType) inpt = BytesIO(padded_data[:int(block.Length)]) data = inpt.read() except: lg.err('reading data from %s' % newpacket.CreatorID) lg.exc() try: inpt.close() except: pass return inpt.close() routed_packet = signed.Unserialize(data) if not routed_packet: lg.err('unserialize packet failed from %s' % newpacket.CreatorID) return if _Debug: lg.out( _DebugLevel, '<<<Relay-IN %s from %s://%s with %d bytes' % (str(routed_packet), info.proto, info.host, len(data))) if _PacketLogFileEnabled: lg.out( 0, ' \033[0;49;33mRELAY IN %s(%s) with %d bytes from %s to %s TID:%s\033[0m' % (routed_packet.Command, routed_packet.PacketID, info.bytes_received, global_id.UrlToGlobalID(info.sender_idurl), global_id.UrlToGlobalID( routed_packet.RemoteID), info.transfer_id), log_name='packet', showtime=True) if routed_packet.Command == commands.Identity(): if _Debug: lg.out(_DebugLevel, ' found identity in relay packet %s' % routed_packet) newidentity = identity.identity(xmlsrc=routed_packet.Payload) idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.info('received new identity %s rev %r' % ( idurl.original(), newidentity.getRevisionValue(), )) if not identitycache.UpdateAfterChecking(idurl, routed_packet.Payload): lg.warn("ERROR has non-Valid identity") return if routed_packet.Command == commands.Relay( ) and routed_packet.PacketID.lower().startswith('identity:'): if _Debug: lg.out( _DebugLevel, ' found routed identity in relay packet %s' % routed_packet) try: routed_identity = signed.Unserialize(routed_packet.Payload) newidentity = identity.identity(xmlsrc=routed_identity.Payload) idurl = newidentity.getIDURL() if not identitycache.HasKey(idurl): lg.warn('received new "routed" identity: %s' % idurl) if not identitycache.UpdateAfterChecking( idurl, routed_identity.Payload): lg.warn("ERROR has non-Valid identity") return except: lg.exc() # if not routed_packet.Valid(): # lg.err('invalid packet %s from %s' % ( # routed_packet, newpacket.CreatorID, )) # return self.traffic_in += len(data) packet_in.process(routed_packet, info) del block del data del padded_data del inpt del session_key del routed_packet