def doReadAndUnserialize(self, arg): """ Action method. """ self.status, self.bytes_received, self.error_message = arg # DO UNSERIALIZE HERE , no exceptions newpacket = gateway.inbox(self) if newpacket is None: if _Debug: lg.out( _DebugLevel, "<<< IN <<< !!!NONE!!! [%s] %s from %s %s" % (self.proto.upper().ljust(5), self.status.ljust(8), self.host, os.path.basename(self.filename)), ) # net_misc.ConnectionFailed(None, proto, 'receiveStatusReport %s' % host) try: fd, _ = tmpfile.make("other", ".inbox.error") data = bpio.ReadBinaryFile(self.filename) os.write(fd, "from %s:%s %s\n" % (self.proto, self.host, self.status)) os.write(fd, str(data)) os.close(fd) except: lg.exc() try: os.remove(self.filename) except: lg.exc() self.automat("unserialize-failed", None) return self.label += "_%s[%s]" % (newpacket.Command, newpacket.PacketID) self.automat("valid-inbox-packet", newpacket)
def doSerializeAndWrite(self, arg): """ Action method. """ # serialize and write packet on disk a_packet = self.outpacket if self.route: a_packet = self.route["packet"] try: fileno, self.filename = tmpfile.make("outbox") self.packetdata = a_packet.Serialize() os.write(fileno, self.packetdata) os.close(fileno) self.filesize = len(self.packetdata) if self.filesize < 1024 * 10: self.timeout = 10 elif self.filesize > 1024 * 1024: self.timeout = int(self.filesize / float(settings.SendingSpeedLimit())) else: self.timeout = 300 # self.timeout = min( # settings.SendTimeOut() * 3, # max(int(self.filesize/(settings.SendingSpeedLimit()/len(queue()))), # settings.SendTimeOut())) except: lg.exc() self.packetdata = None self.automat("write-error")
def doBlockPushAndRaid(self, arg): """ Action method. """ newblock = arg if self.terminating: self.automat('block-raid-done', (newblock.BlockNumber, None)) lg.out(_DebugLevel, 'backup.doBlockPushAndRaid SKIP, terminating=True') return fileno, filename = tmpfile.make('raid') serializedblock = newblock.Serialize() blocklen = len(serializedblock) os.write(fileno, str(blocklen) + ":" + serializedblock) os.close(fileno) self.workBlocks[newblock.BlockNumber] = filename dt = time.time() outputpath = os.path.join(settings.getLocalBackupsDir(), self.backupID) task_params = (filename, self.eccmap.name, self.backupID, newblock.BlockNumber, outputpath) raid_worker.add_task('make', task_params, lambda cmd, params, result: self._raidmakeCallback(params, result, dt),) self.automat('block-raid-started', newblock) del serializedblock if _Debug: lg.out(_DebugLevel, 'backup.doBlockPushAndRaid %s : start process data from %s to %s, %d' % ( newblock.BlockNumber, filename, outputpath, id(self.terminating)))
def download_and_replace_starter(output_func=None): repo, locationURL = misc.ReadRepoLocation() url = locationURL + settings.WindowsStarterFileName() lg.out(6, 'os_windows_update.download_and_replace_starter ' + str(url)) result = Deferred() def _done(x, filename): try: fin = open(filename, 'rb') src = fin.read() fin.close() except: if output_func: output_func('error opening downloaded starter file') result.errback(Exception('error opening downloaded starter file')) return local_filename = os.path.join(GetLocalDir(), settings.WindowsStarterFileName()) bpio.backup_and_remove(local_filename) try: os.rename(filename, local_filename) lg.out(4, 'os_windows_update.download_and_replace_starter file %s was updated' % local_filename) except: lg.out(1, 'os_windows_update.download_and_replace_starter ERROR can not rename %s to %s ' % (filename, local_filename)) lg.exc() result.errback(Exception('can not rename the file ' + filename)) return python27dll_path = os.path.join(GetLocalDir(), 'python27.dll') if not os.path.exists(python27dll_path): lg.out(4, 'os_windows_update.download_and_replace_starter file "python27.dll" not found download from "%s" repo' % repo) url = settings.DefaultRepoURL(repo) + 'python27.dll' d = net_misc.downloadHTTP(url, python27dll_path) d.addCallback(_done_python27_dll, filename) d.addErrback(_fail, filename) return result.callback(1) def _done_python27_dll(x, filename): lg.out(4, 'os_windows_update.download_and_replace_starter file %s was updated' % filename) result.callback(1) def _fail(x, filename): lg.out(1, 'os_windows_update.download_and_replace_starter FAILED') if output_func: try: output_func(x.getErrorMessage()) except: output_func('error downloading starter') try: os.remove(filename) except: lg.out(1, 'os_windows_update.download_and_replace_starter ERROR can not remove ' + filename) result.errback(Exception('error downloading starter')) fileno, filename = tmpfile.make('other', '.starter') os.close(fileno) d = net_misc.downloadHTTP(url, filename) d.addCallback(_done, filename) d.addErrback(_fail, filename) return result
def __init__(self, stream, file_id, file_size): self.transfer_id = None self.registration = None self.stream = stream self.file_id = file_id self.size = file_size self.fin, self.filename = tmpfile.make("tcp-in") self.bytes_received = 0 self.started = time.time() self.last_block_time = time.time() self.timeout = max(int(self.size / settings.SendingSpeedLimit()), 3) if _Debug: lg.out(_DebugLevel, '<<<TCP-IN %s with %d bytes write to %s' % ( self.file_id, self.size, self.filename))
def Start(backupID, outputLocation, callback=None): lg.out(8, 'restore_monitor.Start %s to %s' % (backupID, outputLocation)) global _WorkingBackupIDs global _WorkingRestoreProgress if backupID in _WorkingBackupIDs.keys(): return None outfd, outfilename = tmpfile.make('restore', '.tar.gz', backupID.replace('/', '_') + '_') r = restore.restore(backupID, outfd) r.MyDeferred.addCallback(restore_done, outfilename, outputLocation, callback) r.MyDeferred.addErrback(restore_failed, outfilename, callback) r.set_block_restored_callback(block_restored_callback) r.set_packet_in_callback(packet_in_callback) _WorkingBackupIDs[backupID] = r _WorkingRestoreProgress[backupID] = {} r.automat('init') return r
def stringReceived(self, data): try: version = data[0] command = data[1] payload = data[2:] except: self.disconnect() # self.transport.loseConnection() lg.exc() lg.warn('incorrect data from %s\n' % str(self.transport.getPeer())) return if command == 'h': # lg.out(6, 'id_server.stringReceived HELLO received from %s' % payload) self.sendString('%swid-server:%s' % (version, A().hostname)) return if command != 'd': self.disconnect() # self.transport.loseConnection() lg.warn('not a "data" packet from %s' % str(self.transport.getPeer())) return inp = cStringIO.StringIO(payload) try: file_id = struct.unpack('i', inp.read(4))[0] file_size = struct.unpack('i', inp.read(4))[0] except: inp.close() self.disconnect() # self.transport.loseConnection() lg.exc() lg.warn('wrong data from %s' % str(self.transport.getPeer())) return if self.fin is None: self.fin, self.fpath = tmpfile.make('idsrv', '.xml') inp_data = inp.read() inp.close() os.write(self.fin, inp_data) self.received += len(inp_data) # self.transport.loseConnection() self.sendString('%so%s' % (version, struct.pack('i', file_id))) # lg.out(6, 'id_server.stringReceived %d bytes received from %s' % (len(data), str(self.transport.getPeer()))) if self.received == file_size: os.close(self.fin) A('incoming-identity-file', self.fpath) self.fin = None self.fpath = None
def SendServers(): """ My identity file can be stored in different locations, see the "sources" field. So I can use different identity servers to store more secure and reliable. This method will send my identity file to all my identity servers via transport_tcp. """ from transport.tcp import tcp_node _, sendfilename = tmpfile.make("propagate", close_fd=True) LocalIdentity = my_id.getLocalIdentity() bpio.WriteTextFile(sendfilename, LocalIdentity.serialize(as_text=True)) dlist = [] for idurl in LocalIdentity.getSources(as_originals=True): # sources for out identity are servers we need to send to protocol, host, port, filename = nameurl.UrlParse(idurl) # TODO: rebuild identity-server logic to be able to send my identity via HTTP POST instead of TCP and # get rid of second TCP port at all webport, tcpport = known_servers.by_host().get( host, ( # by default use "expected" port numbers settings.IdentityWebPort(), settings.IdentityServerPort())) normalized_address = net_misc.normalize_address(( host, int(tcpport), )) dlist.append( tcp_node.send( sendfilename, normalized_address, 'Identity', keep_alive=False, )) if _Debug: lg.args(_DebugLevel, normalized_address=normalized_address, filename=filename) dl = DeferredList(dlist, consumeErrors=True) return dl
def Start(backupID, outputLocation, callback=None, keyID=None): lg.out(8, 'restore_monitor.Start %s to %s' % (backupID, outputLocation)) global _WorkingBackupIDs global _WorkingRestoreProgress if backupID in _WorkingBackupIDs.keys(): return _WorkingBackupIDs[backupID] outfd, outfilename = tmpfile.make( 'restore', '.tar.gz', backupID.replace('@', '_').replace('.', '_').replace('/', '_').replace( ':', '_') + '_') r = restore.restore(backupID, outfd, KeyID=keyID) r.MyDeferred.addCallback(restore_done, backupID, outfilename, outputLocation, callback) # r.MyDeferred.addErrback(restore_failed, outfilename, callback) r.set_block_restored_callback(block_restored_callback) r.set_packet_in_callback(packet_in_callback) _WorkingBackupIDs[backupID] = r _WorkingRestoreProgress[backupID] = {} r.automat('init') return r
def doReadAndUnserialize(self, *args, **kwargs): """ Action method. """ from transport import gateway self.status, self.bytes_received, self.error_message = args[0] # DO UNSERIALIZE HERE , no exceptions newpacket = gateway.inbox(self) if newpacket is None: if _Debug: lg.out(_DebugLevel, '<<< IN <<< !!!NONE!!! [%s] %s from %s %s' % ( self.proto.upper().ljust(5), self.status.ljust(8), self.host, os.path.basename(self.filename),)) # net_misc.ConnectionFailed(None, proto, 'receiveStatusReport %s' % host) try: fd, _ = tmpfile.make('error', extension='.inbox') data = bpio.ReadBinaryFile(self.filename) os.write(fd, strng.to_bin('from %s:%s %s\n' % (self.proto, self.host, self.status))) os.write(fd, data) os.close(fd) except: lg.exc() try: os.remove(self.filename) except: lg.exc() self.automat('unserialize-failed', None) return self.label = '[%s(%s)]' % (newpacket.Command, newpacket.PacketID[:10]) if _Debug: lg.out(_DebugLevel + 2, 'packet_in.doReadAndUnserialize: %s' % newpacket) self.automat('valid-inbox-packet', newpacket) events.send('inbox-packet-recevied', data=dict( packet_id=newpacket.PacketID, command=newpacket.Command, creator_id=newpacket.CreatorID, date=newpacket.Date, size=len(newpacket.Payload), remote_id=newpacket.RemoteID, ))
def doSerializeAndWrite(self, arg): """ Action method. """ # serialize and write packet on disk a_packet = self.outpacket if self.route: a_packet = self.route['packet'] try: fileno, self.filename = tmpfile.make('outbox') self.packetdata = a_packet.Serialize() os.write(fileno, self.packetdata) os.close(fileno) self.filesize = len(self.packetdata) # self.timeout = min( # settings.SendTimeOut() * 3, # max(int(self.filesize/(settings.SendingSpeedLimit()/len(queue()))), # settings.SendTimeOut())) except: lg.exc() self.packetdata = None self.automat('write-error')
def backup_done(bid, result): from crypt import signed try: os.mkdir(os.path.join(settings.getLocalBackupsDir(), bid + '.out')) except: pass for filename in os.listdir(os.path.join(settings.getLocalBackupsDir(), bid)): filepath = os.path.join(settings.getLocalBackupsDir(), bid, filename) payld = str(bpio.ReadBinaryFile(filepath)) outpacket = signed.Packet( 'Data', my_id.getLocalID(), my_id.getLocalID(), filename, payld, 'http://megafaq.ru/cvps1010.xml') newfilepath = os.path.join(settings.getLocalBackupsDir(), bid + '.out', filename) bpio.AtomicWriteFile(newfilepath, outpacket.Serialize()) # Assume we delivered all pieces from ".out" to suppliers and lost original data # Then we requested the data back and got it into ".inp" try: os.mkdir(os.path.join(settings.getLocalBackupsDir(), bid + '.inp')) except: pass for filename in os.listdir(os.path.join(settings.getLocalBackupsDir(), bid + '.out')): filepath = os.path.join(settings.getLocalBackupsDir(), bid + '.out', filename) data = bpio.ReadBinaryFile(filepath) inppacket = signed.Unserialize(data) assert inppacket.Valid() newfilepath = os.path.join(settings.getLocalBackupsDir(), bid + '.inp', filename) bpio.AtomicWriteFile(newfilepath, inppacket.Payload) # Now do restore from input data backupID = bid + '.inp' outfd, tarfilename = tmpfile.make('restore', '.tar.gz', backupID.replace('/', '_') + '_') r = restore.restore(backupID, outfd) r.MyDeferred.addBoth(restore_done, tarfilename) reactor.callLater(1, r.automat, 'init')
def backup_done(bid, result): from crypt import signed try: os.mkdir(os.path.join(settings.getLocalBackupsDir(), bid + '.out')) except: pass for filename in os.listdir(os.path.join(settings.getLocalBackupsDir(), bid)): filepath = os.path.join(settings.getLocalBackupsDir(), bid, filename) payld = str(bpio.ReadBinaryFile(filepath)) outpacket = signed.Packet('Data', my_id.getLocalID(), my_id.getLocalID(), filename, payld, 'http://megafaq.ru/cvps1010.xml') newfilepath = os.path.join(settings.getLocalBackupsDir(), bid + '.out', filename) bpio.AtomicWriteFile(newfilepath, outpacket.Serialize()) # Assume we delivered all pieces from ".out" to suppliers and lost original data # Then we requested the data back and got it into ".inp" try: os.mkdir(os.path.join(settings.getLocalBackupsDir(), bid + '.inp')) except: pass for filename in os.listdir( os.path.join(settings.getLocalBackupsDir(), bid + '.out')): filepath = os.path.join(settings.getLocalBackupsDir(), bid + '.out', filename) data = bpio.ReadBinaryFile(filepath) inppacket = signed.Unserialize(data) assert inppacket.Valid() newfilepath = os.path.join(settings.getLocalBackupsDir(), bid + '.inp', filename) bpio.AtomicWriteFile(newfilepath, inppacket.Payload) # Now do restore from input data backupID = bid + '.inp' outfd, tarfilename = tmpfile.make('restore', '.tar.gz', backupID.replace('/', '_') + '_') r = restore.restore(backupID, outfd) r.MyDeferred.addBoth(restore_done, tarfilename) reactor.callLater(1, r.automat, 'init')
def doReadAndUnserialize(self, arg): """ Action method. """ self.status, self.bytes_received, self.error_message = arg # DO UNSERIALIZE HERE , no exceptions newpacket = gateway.inbox(self) if newpacket is None: if _Debug: lg.out( _DebugLevel, '<<< IN <<< !!!NONE!!! [%s] %s from %s %s' % ( self.proto.upper().ljust(5), self.status.ljust(8), self.host, os.path.basename(self.filename), )) # net_misc.ConnectionFailed(None, proto, 'receiveStatusReport %s' % host) try: fd, _ = tmpfile.make('error', '.inbox') data = bpio.ReadBinaryFile(self.filename) os.write( fd, 'from %s:%s %s\n' % (self.proto, self.host, self.status)) os.write(fd, str(data)) os.close(fd) except: lg.exc() try: os.remove(self.filename) except: lg.exc() self.automat('unserialize-failed', None) return self.label += '_%s[%s]' % (newpacket.Command, newpacket.PacketID) if _Debug: lg.out(_DebugLevel + 2, 'packet_in.doReadAndUnserialize: %s' % newpacket) self.automat('valid-inbox-packet', newpacket)
def __init__(self, queue, stream_id, size): """ """ self.transfer_id = None self.registration = None self.queue = queue self.stream_callback = None self.stream_id = stream_id self.fd, self.filename = tmpfile.make("udp-in") self.size = size self.bytes_received = 0 self.started = time.time() self.cancelled = False self.timeout = False self.status = None self.error_message = '' if _Debug: lg.out( 18, 'udp_file_queue.InboxFile.__init__ {%s} [%d] from %s with %d bytes' % (os.path.basename( self.filename), self.stream_id, str( self.queue.session.peer_address), self.size))
def __init__(self, queue, stream_id, size): """ """ self.typ = 'udp-in' self.transfer_id = None self.registration = None self.queue = queue self.stream_callback = None self.stream_id = stream_id self.fd, self.filename = tmpfile.make("udp-in") self.size = size self.bytes_received = 0 self.started = time.time() self.cancelled = False self.timeout = False self.status = None self.error_message = '' if _Debug: lg.out( 18, 'udp_file_queue.InboxFile.__init__ {%s} [%d] from %s with %d bytes' % (os.path.basename(self.filename), self.stream_id, str(self.queue.session.peer_address), self.size))
def SendServers(): """ My identity file can be stored in different locations, see the "sources" field. So I can use different identity servers to store more secure. This method will send my identity file to all my identity servers via transport_tcp. """ from transport.tcp import tcp_node sendfile, sendfilename = tmpfile.make("propagate") os.close(sendfile) LocalIdentity = my_id.getLocalIdentity() bpio.WriteTextFile(sendfilename, LocalIdentity.serialize()) dlist = [] for idurl in LocalIdentity.sources: # sources for out identity are servers we need to send to protocol, host, port, filename = nameurl.UrlParse(idurl) # if host == settings.IdentityServerName(): # host = '67.207.147.183' webport, tcpport = known_servers.by_host().get( host, (settings.IdentityWebPort(), settings.IdentityServerPort())) # srvhost = '%s:%d' % (host, int(tcpport)) dlist.append( tcp_node.send( sendfilename, net_misc.normalize_address(( host, int(tcpport), )), 'Identity', keep_alive=False, )) # dlist.append(gateway.send_file_single('tcp', srvhost, sendfilename, 'Identity')) dl = DeferredList(dlist, consumeErrors=True) return dl
def Start(backupID, outputLocation, callback=None, keyID=None): if _Debug: lg.out(_DebugLevel, 'restore_monitor.Start %s to %s' % (backupID, outputLocation)) global _WorkingBackupIDs global _WorkingRestoreProgress if backupID in list(_WorkingBackupIDs.keys()): return _WorkingBackupIDs[backupID] outfd, outfilename = tmpfile.make( 'restore', extension='.tar.gz', prefix=backupID.replace('@', '_').replace('.', '_').replace( '/', '_').replace(':', '_') + '_', ) from storage import restore_worker r = restore_worker.RestoreWorker(backupID, outfd, KeyID=keyID) r.MyDeferred.addCallback(restore_done, backupID, outfd, outfilename, outputLocation, callback) r.set_block_restored_callback(block_restored_callback) r.set_packet_in_callback(packet_in_callback) _WorkingBackupIDs[backupID] = r _WorkingRestoreProgress[backupID] = {} r.automat('init') return r
def doReadAndUnserialize(self, *args, **kwargs): """ Action method. """ from transport import gateway self.status, self.bytes_received, self.error_message = args[0] if _PacketLogFileEnabled: lg.out(0, ' \033[2;49;32mRECEIVED %d bytes from %s://%s TID:%s\033[0m' % ( self.bytes_received, self.proto, self.host, self.transfer_id), log_name='packet', showtime=True) # DO UNSERIALIZE HERE , no exceptions newpacket = gateway.inbox(self) if newpacket is None: if _Debug: lg.out(_DebugLevel, '<<< IN <<< !!!NONE!!! [%s] %s from %s %s' % ( self.proto.upper().ljust(5), self.status.ljust(8), self.host, os.path.basename(self.filename),)) # net_misc.ConnectionFailed(None, proto, 'receiveStatusReport %s' % host) try: fd, _ = tmpfile.make('error', extension='.inbox') data = bpio.ReadBinaryFile(self.filename) os.write(fd, strng.to_bin('from %s:%s %s\n' % (self.proto, self.host, self.status))) os.write(fd, data) os.close(fd) except: lg.exc() if os.path.isfile(self.filename): try: os.remove(self.filename) except: lg.exc() self.automat('unserialize-failed', None) return self.label = '[%s@%s]' % (newpacket.Command, newpacket.PacketID) if _Debug: lg.out(_DebugLevel, 'packet_in.doReadAndUnserialize: %s' % newpacket) self.automat('valid-inbox-packet', newpacket)
def doBlockPushAndRaid(self, arg): """ Action method. """ newblock = arg if newblock is None: self.abort() self.automat('fail') lg.out(_DebugLevel, 'backup.doBlockPushAndRaid ERROR newblock is empty, terminating=%s' % self.terminating) lg.warn('failed to encrypt block, ABORTING') return if self.terminating: self.automat('block-raid-done', (newblock.BlockNumber, None)) lg.out(_DebugLevel, 'backup.doBlockPushAndRaid SKIP, terminating=True') return fileno, filename = tmpfile.make('raid', extension='.raid') serializedblock = newblock.Serialize() blocklen = len(serializedblock) os.write(fileno, str(blocklen) + ":" + serializedblock) os.close(fileno) self.workBlocks[newblock.BlockNumber] = filename # key_alias = 'master' # if self.keyID: # key_alias = packetid.KeyAlias(self.keyID) dt = time.time() customer_dir = self.customerGlobalID # global_id.MakeGlobalID(customer=self.customerGlobalID, key_alias=key_alias) outputpath = os.path.join( settings.getLocalBackupsDir(), customer_dir, self.pathID, self.version) task_params = (filename, self.eccmap.name, self.version, newblock.BlockNumber, outputpath) raid_worker.add_task('make', task_params, lambda cmd, params, result: self._raidmakeCallback(params, result, dt),) self.automat('block-raid-started', newblock) del serializedblock if _Debug: lg.out(_DebugLevel, 'backup.doBlockPushAndRaid %s : start process data from %s to %s, %d' % ( newblock.BlockNumber, filename, outputpath, id(self.terminating)))
def download_and_replace_starter(output_func=None): repo, locationURL = misc.ReadRepoLocation() url = locationURL + settings.WindowsStarterFileName() lg.out(6, 'os_windows_update.download_and_replace_starter ' + str(url)) result = Deferred() def _done(x, filename): try: fin = open(filename, 'rb') src = strng.to_text(fin.read()) fin.close() except: if output_func: output_func('error opening downloaded starter file') result.errback(Exception('error opening downloaded starter file')) return local_filename = os.path.join(GetLocalDir(), settings.WindowsStarterFileName()) bpio.backup_and_remove(local_filename) try: os.rename(filename, local_filename) lg.out( 4, 'os_windows_update.download_and_replace_starter file %s was updated' % local_filename) except: lg.out( 1, 'os_windows_update.download_and_replace_starter ERROR can not rename %s to %s ' % (filename, local_filename)) lg.exc() result.errback(Exception('can not rename the file ' + filename)) return python27dll_path = os.path.join(GetLocalDir(), 'python27.dll') if not os.path.exists(python27dll_path): lg.out( 4, 'os_windows_update.download_and_replace_starter file "python27.dll" not found download from "%s" repo' % repo) url = settings.DefaultRepoURL(repo) + 'python27.dll' d = net_misc.downloadHTTP(url, python27dll_path) d.addCallback(_done_python27_dll, filename) d.addErrback(_fail, filename) return result.callback(1) def _done_python27_dll(x, filename): lg.out( 4, 'os_windows_update.download_and_replace_starter file %s was updated' % filename) result.callback(1) def _fail(x, filename): lg.out(1, 'os_windows_update.download_and_replace_starter FAILED') if output_func: try: output_func(x.getErrorMessage()) except: output_func('error downloading starter') try: os.remove(filename) except: lg.out( 1, 'os_windows_update.download_and_replace_starter ERROR can not remove ' + filename) result.errback(Exception('error downloading starter')) fileno, filename = tmpfile.make('all', extension='.starter') os.close(fileno) d = net_misc.downloadHTTP(url, filename) d.addCallback(_done, filename) d.addErrback(_fail, filename) return result
def inbox(info): """ 1) The protocol modules write to temporary files and gives us that filename 2) We unserialize 3) We check that it is for us 4) We check that it is from one of our contacts. 5) We use signed.validate() to check signature and that number fields are numbers 6) Any other sanity checks we can do and if anything funny we toss out the packet . 7) Then change the filename to the PackedID that it should be. and call the right function(s) for this new packet (encryptedblock, scrubber, remotetester, customerservice, ...) to dispatch it to right place(s). 8) We have to keep track of bandwidth to/from everyone, and make a report every 24 hours which we send to BitDust sometime in the 24 hours after that. """ global _DoingShutdown global _LastInboxPacketTime if _DoingShutdown: if _Debug: lg.out(_DebugLevel - 4, "gateway.inbox ignoring input since _DoingShutdown ") return None if info.filename == "" or not os.path.exists(info.filename): lg.err("bad filename=" + info.filename) return None try: data = bpio.ReadBinaryFile(info.filename) except: lg.err("gateway.inbox ERROR reading file " + info.filename) return None if len(data) == 0: lg.err("gateway.inbox ERROR zero byte file from %s://%s" % (info.proto, info.host)) return None try: newpacket = signed.Unserialize(data) except: lg.err("gateway.inbox ERROR during Unserialize data from %s://%s" % (info.proto, info.host)) lg.exc() return None if newpacket is None: lg.warn("newpacket from %s://%s is None" % (info.proto, info.host)) return None try: Command = newpacket.Command OwnerID = newpacket.OwnerID CreatorID = newpacket.CreatorID PacketID = newpacket.PacketID Date = newpacket.Date Payload = newpacket.Payload RemoteID = newpacket.RemoteID Signature = newpacket.Signature packet_sz = len(data) except: lg.err("gateway.inbox ERROR during Unserialize data from %s://%s" % (info.proto, info.host)) lg.err("data length=" + str(len(data))) lg.exc() fd, filename = tmpfile.make("other", ".bad") os.write(fd, data) os.close(fd) return None _LastInboxPacketTime = time.time() if _Debug: lg.out( _DebugLevel - 8, "gateway.inbox [%s] signed by %s|%s (for %s) from %s://%s" % ( Command, nameurl.GetName(OwnerID), nameurl.GetName(CreatorID), nameurl.GetName(RemoteID), info.proto, info.host, ), ) if _Debug and lg.is_debug(_DebugLevel): monitoring() control.request_update([("packet", newpacket.PacketID)]) return newpacket