def _extract_done(retcode, backupID, source_filename, output_location): assert retcode is True print('file size is: %d bytes' % len(bpio.ReadBinaryFile('/tmp/random_file'))) assert bpio.ReadBinaryFile( '/tmp/random_file') == bpio.ReadBinaryFile( '/tmp/_some_folder/random_file') reactor.callLater(0, raid_worker.A, 'shutdown') # @UndefinedVariable reactor.callLater(0.5, test_done.callback, True) # @UndefinedVariable
def _check_install(self): """ Return True if Private Key and local identity files exists and both is valid. """ lg.out(2, 'initializer._check_install') from userid import identity from crypt import key keyfilename = settings.KeyFileName() keyfilenamelocation = settings.KeyFileNameLocation() if os.path.exists(keyfilenamelocation): keyfilename = bpio.ReadTextFile(keyfilenamelocation) if not os.path.exists(keyfilename): keyfilename = settings.KeyFileName() idfilename = settings.LocalIdentityFilename() if not os.path.exists(keyfilename) or not os.path.exists(idfilename): lg.out( 2, 'initializer._check_install local key or local id not exists') return False current_key = bpio.ReadBinaryFile(keyfilename) current_id = bpio.ReadBinaryFile(idfilename) if current_id == '': lg.out(2, 'initializer._check_install local identity is empty ') return False if current_key == '': lg.out(2, 'initializer._check_install private key is empty ') return False try: key.InitMyKey() except: lg.out(2, 'initializer._check_install fail loading private key ') return False try: ident = identity.identity(xmlsrc=current_id) except: lg.out(2, 'initializer._check_install fail init local identity ') return False try: res = ident.Valid() and ident.isCorrect() except: lg.out( 2, 'initializer._check_install wrong data in local identity ') return False if not res: lg.out(2, 'initializer._check_install local identity is not valid ') return False lg.out(2, 'initializer._check_install done') return True
def doRestoreBlock(self, arg): """ Action method. """ filename = arg blockbits = bpio.ReadBinaryFile(filename) if not blockbits: self.automat('block-failed') return splitindex = blockbits.index(":") lengthstring = blockbits[0:splitindex] try: datalength = int(lengthstring) # real length before raidmake/ECC blockdata = blockbits[ splitindex + 1:splitindex + 1 + datalength] # remove padding from raidmake/ECC newblock = encrypted.Unserialize( blockdata, decrypt_key=self.KeyID) # convert to object except: lg.exc() self.automat('block-failed') return self.automat('block-restored', ( newblock, filename, ))
def render_POST(self, request): global _Outbox idurl = request.getHeader('idurl') if idurl is None: return '' lg.out(14, 'http_node.SenderServer.render connection from ' + idurl) if idurl not in list(_Outbox.keys()): return '' r = '' for filename in _Outbox[idurl]: if not os.path.isfile(filename): continue if not os.access(filename, os.R_OK): continue src = bpio.ReadBinaryFile(filename) if src == '': continue src64 = base64.b64encode(src) r += src64 + '\n' lg.out( 12, 'http_node.SenderServer.render sent %s to %s' % (filename, idurl)) #TODO request.getPeer() # transport_control.sendStatusReport( # request.getClient(), # filename, # 'finished', # 'http',) _Outbox.pop(idurl, None) return r
def doReadAndUnserialize(self, *args, **kwargs): """ Action method. """ from transport import gateway self.status, self.bytes_received, self.error_message = args[0] if _PacketLogFileEnabled: lg.out( 0, ' \033[2;49;32mRECEIVED %d bytes from %s://%s TID:%s\033[0m' % (self.bytes_received, self.proto, self.host, self.transfer_id), log_name='packet', showtime=True) # DO UNSERIALIZE HERE , no exceptions newpacket = gateway.inbox(self) if newpacket is None: if _Debug: lg.out( _DebugLevel, '<<< IN <<< !!!NONE!!! [%s] %s from %s %s' % ( self.proto.upper().ljust(5), self.status.ljust(8), self.host, os.path.basename(self.filename), )) # net_misc.ConnectionFailed(None, proto, 'receiveStatusReport %s' % host) try: fd, _ = tmpfile.make('error', extension='.inbox') data = bpio.ReadBinaryFile(self.filename) os.write( fd, strng.to_bin('from %s:%s %s\n' % (self.proto, self.host, self.status))) os.write(fd, data) os.close(fd) except: lg.exc() if os.path.isfile(self.filename): try: os.remove(self.filename) except: lg.exc() self.automat('unserialize-failed', None) return self.label = '[%s(%s)]' % (newpacket.Command, newpacket.PacketID) if _Debug: lg.out(_DebugLevel, 'packet_in.doReadAndUnserialize: %s' % newpacket) self.automat('valid-inbox-packet', newpacket) if False: events.send('inbox-packet-recevied', data=dict( packet_id=newpacket.PacketID, command=newpacket.Command, creator_id=newpacket.CreatorID, date=newpacket.Date, size=len(newpacket.Payload), remote_id=newpacket.RemoteID, ))
def doRestoreBlock(self, *args, **kwargs): """ Action method. """ filename = args[0] blockbits = bpio.ReadBinaryFile(filename) if not blockbits: self.automat('block-failed') return try: splitindex = blockbits.index(b":") lengthstring = blockbits[0:splitindex] datalength = int(lengthstring) # real length before raidmake/ECC blockdata = blockbits[ splitindex + 1:splitindex + 1 + datalength] # remove padding from raidmake/ECC newblock = encrypted.Unserialize( blockdata, decrypt_key=self.key_id) # convert to object except: self.automat('block-failed') if _Debug: lg.exc('bad block: %r' % blockbits) else: lg.exc() return if not newblock: self.automat('block-failed') return self.automat('block-restored', ( newblock, filename, ))
def step1(version_digest): lg.out(4, 'os_windows_update.step1') global _UpdatingInProgress global _CurrentVersionDigest global _NewVersionNotifyFunc global _UpdatingByUser _CurrentVersionDigest = str(version_digest).strip() local_checksum = bpio.ReadBinaryFile(settings.CheckSumFile()).strip() if local_checksum == _CurrentVersionDigest: lg.out( 6, 'os_windows_update.step1 no need to update, checksums are equal') _UpdatingInProgress = False if _NewVersionNotifyFunc is not None: _NewVersionNotifyFunc(_CurrentVersionDigest) return appList = bpio.find_process([ 'bpgui.', ]) if len(appList) > 0: if not _UpdatingByUser: lg.out( 6, 'os_windows_update.step1 bpgui is running, ask user to update.' ) _UpdatingInProgress = False if _NewVersionNotifyFunc is not None: _NewVersionNotifyFunc(_CurrentVersionDigest) return d = download_info() d.addCallback(step2, _CurrentVersionDigest) d.addErrback(fail)
def _s(): p = signed.Packet(commands.Data(), my_id.getLocalID(), my_id.getLocalID(), my_id.getLocalID(), bpio.ReadBinaryFile(args[1]), args[0]) outbox(p, wide=True) lg.out(2, 'OUTBOX %d : %r' % (globals()['num_out'], p)) globals()['num_out'] += 1
def cb(path, subpath, name): if not os.path.isfile(path): return True packetsrc = bpio.ReadBinaryFile(path) if not packetsrc: try: os.remove( path) # if is is no good it is of no use to anyone printlog('Validate %r removed (empty file)' % path) except: printlog('Validate ERROR removing %r' % path) return False p = signed.Unserialize(packetsrc) if p is None: try: os.remove( path) # if is is no good it is of no use to anyone printlog('Validate %r removed (unserialize error)' % path) except: printlog('Validate ERROR removing %r') return False result = p.Valid() packetsrc = '' del p if not result: try: os.remove( path) # if is is no good it is of no use to anyone printlog('Validate %r removed (invalid packet)' % path) except: printlog('Validate ERROR removing %r' % path) return False time.sleep(0.1) return False
def cb(path, subpath, name): # if not os.access(path, os.R_OK | os.W_OK): # return False if not os.path.isfile(path): return True # if name in [settings.BackupIndexFileName(),]: # return False packetsrc = bpio.ReadBinaryFile(path) if not packetsrc: try: os.remove(path) # if is is no good it is of no use to anyone printlog('Validate ' + path + ' removed (empty file)') except: printlog('Validate ERROR removing ' + path) return False p = signed.Unserialize(packetsrc) if p is None: try: os.remove(path) # if is is no good it is of no use to anyone printlog('Validate ' + path + ' removed (unserialize error)') except: printlog('Validate ERROR removing ' + path) return False result = p.Valid() packetsrc = '' del p if not result: try: os.remove(path) # if is is no good it is of no use to anyone printlog('Validate ' + path + ' removed (invalid packet)') except: printlog('Validate ERROR removing ' + path) return False time.sleep(0.1) return False
def file_hash(path): """ Read file and get get its hash. """ src = bpio.ReadBinaryFile(path) if not src: return None return get_hash(src)
def _success(x): global _CurrentVersionDigest global _NewVersionNotifyFunc _CurrentVersionDigest = str(x) local_version = bpio.ReadBinaryFile(settings.CheckSumFile()) lg.out(6, 'os_windows_update.check._success local=%s current=%s' % (local_version, _CurrentVersionDigest)) if _NewVersionNotifyFunc is not None: _NewVersionNotifyFunc(_CurrentVersionDigest) return x
def _send(c): from transport.udp import udp_stream for idurl in sys.argv[2:]: print('_send', list(udp_stream.streams().keys())) p = signed.Packet(commands.Data(), my_id.getLocalID(), my_id.getLocalID(), 'packet%d' % c, bpio.ReadBinaryFile(sys.argv[1]), idurl) gateway.outbox(p) if c > 1: reactor.callLater(0.01, _send, c - 1)
def backup_done(bid, result): from crypt import signed try: os.mkdir(os.path.join(settings.getLocalBackupsDir(), bid + '.out')) except: pass for filename in os.listdir(os.path.join(settings.getLocalBackupsDir(), bid)): filepath = os.path.join(settings.getLocalBackupsDir(), bid, filename) payld = str(bpio.ReadBinaryFile(filepath)) outpacket = signed.Packet('Data', my_id.getLocalID(), my_id.getLocalID(), filename, payld, 'http://megafaq.ru/cvps1010.xml') newfilepath = os.path.join(settings.getLocalBackupsDir(), bid + '.out', filename) bpio.WriteBinaryFile(newfilepath, outpacket.Serialize()) # Assume we delivered all pieces from ".out" to suppliers and lost original data # Then we requested the data back and got it into ".inp" try: os.mkdir(os.path.join(settings.getLocalBackupsDir(), bid + '.inp')) except: pass for filename in os.listdir( os.path.join(settings.getLocalBackupsDir(), bid + '.out')): filepath = os.path.join(settings.getLocalBackupsDir(), bid + '.out', filename) data = bpio.ReadBinaryFile(filepath) inppacket = signed.Unserialize(data) assert inppacket assert inppacket.Valid() newfilepath = os.path.join(settings.getLocalBackupsDir(), bid + '.inp', filename) bpio.WriteBinaryFile(newfilepath, inppacket.Payload) # Now do restore from input data backupID = bid + '.inp' outfd, tarfilename = tmpfile.make( 'restore', extension='.tar.gz', prefix=backupID.replace('/', '_') + '_', ) r = restore_worker.RestoreWorker(backupID, outfd) r.MyDeferred.addBoth(restore_done, tarfilename) reactor.callLater(1, r.automat, 'init')
def identity_recover_v1(self, request): data = _request_data(request) private_key_source = data.get('private_key_source') if not private_key_source: private_key_local_file = data.get('private_key_local_file') if private_key_local_file: from system import bpio private_key_source = bpio.ReadBinaryFile( bpio.portablePath(private_key_local_file)) return api.identity_recover(private_key_source=private_key_source, known_idurl=data.get('known_idurl'))
def doSuppliersSendIndexFile(self, arg): """ Action method. """ if _Debug: lg.out(_DebugLevel, 'index_synchronizer.doSuppliersSendIndexFile') packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) self.sending_suppliers.clear() self.sent_suppliers_number = 0 src = bpio.ReadBinaryFile(settings.BackupIndexFilePath()) localID = my_id.getLocalID() b = encrypted.Block( localID, packetID, 0, key.NewSessionKey(), key.SessionKeyType(), True, src, ) Payload = b.Serialize() for supplierId in contactsdb.suppliers(): if not supplierId: continue if not contact_status.isOnline(supplierId): continue newpacket, pkt_out = p2p_service.SendData( raw_data=Payload, ownerID=localID, creatorID=localID, remoteID=supplierId, packetID=packetID, callbacks={ commands.Ack(): self._on_supplier_acked, commands.Fail(): self._on_supplier_acked, }, ) # newpacket = signed.Packet( # commands.Data(), localID, localID, packetID, # Payload, supplierId) # pkt_out = gateway.outbox(newpacket, callbacks={ # commands.Ack(): self._on_supplier_acked, # commands.Fail(): self._on_supplier_acked, }) if pkt_out: self.sending_suppliers.add(supplierId) self.sent_suppliers_number += 1 if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (newpacket, nameurl.GetName(supplierId)))
def doSuppliersSendIndexFile(self, *args, **kwargs): """ Action method. """ if _Debug: lg.out(_DebugLevel, 'index_synchronizer.doSuppliersSendIndexFile') packetID = global_id.MakeGlobalID( customer=my_id.getGlobalID(key_alias='master'), path=settings.BackupIndexFileName(), ) self.sending_suppliers.clear() self.outgoing_packets_ids = [] self.sent_suppliers_number = 0 localID = my_id.getIDURL() b = encrypted.Block( CreatorID=localID, BackupID=packetID, BlockNumber=0, SessionKey=key.NewSessionKey( session_key_type=key.SessionKeyType()), SessionKeyType=key.SessionKeyType(), LastBlock=True, Data=bpio.ReadBinaryFile(settings.BackupIndexFilePath()), ) Payload = b.Serialize() for supplier_idurl in contactsdb.suppliers(): if not supplier_idurl: continue sc = supplier_connector.by_idurl(supplier_idurl) if sc is None or sc.state != 'CONNECTED': continue if online_status.isOffline(supplier_idurl): continue newpacket, pkt_out = p2p_service.SendData( raw_data=Payload, ownerID=localID, creatorID=localID, remoteID=supplier_idurl, packetID=packetID, callbacks={ commands.Ack(): self._on_supplier_acked, commands.Fail(): self._on_supplier_acked, }, ) if pkt_out: self.sending_suppliers.add(supplier_idurl) self.sent_suppliers_number += 1 self.outgoing_packets_ids.append(packetID) if _Debug: lg.out( _DebugLevel, ' %s sending to %s' % (newpacket, nameurl.GetName(supplier_idurl)))
def step4(version_digest): lg.out(4, 'os_windows_update.step4') global _UpdatingInProgress global _CurrentVersionDigest global _NewVersionNotifyFunc global _UpdatingByUser _CurrentVersionDigest = str(version_digest) local_version = bpio.ReadBinaryFile(settings.CheckSumFile()) if local_version == _CurrentVersionDigest: lg.out(6, 'os_windows_update.step4 no need to update') _UpdatingInProgress = False return lg.out( 6, 'os_windows_update.step4 local=%s current=%s ' % (local_version, _CurrentVersionDigest)) if settings.getUpdatesMode() == settings.getUpdatesModeValues( )[2] and not _UpdatingByUser: lg.out( 6, 'os_windows_update.step4 run scheduled, but mode is %s, skip now' % settings.getUpdatesMode()) return if _UpdatingByUser or settings.getUpdatesMode( ) == settings.getUpdatesModeValues()[0]: # info_file_path = os.path.join(bpio.getExecutableDir(), settings.FilesDigestsFilename()) info_file_path = settings.InfoFile() if os.path.isfile(info_file_path): try: os.remove(info_file_path) except: lg.out( 1, 'os_windows_update.step4 ERROR can no remove ' + info_file_path) lg.exc() param = '' if _UpdatingByUser: param = 'show' from main import shutdowner if param == 'show': shutdowner.A('stop', 'restartnshow') else: shutdowner.A('stop', 'restart') else: if _NewVersionNotifyFunc is not None: _NewVersionNotifyFunc(_CurrentVersionDigest)
def doReadAndUnserialize(self, arg): """ Action method. """ from transport import gateway self.status, self.bytes_received, self.error_message = arg # DO UNSERIALIZE HERE , no exceptions newpacket = gateway.inbox(self) if newpacket is None: if _Debug: lg.out( _DebugLevel, '<<< IN <<< !!!NONE!!! [%s] %s from %s %s' % ( self.proto.upper().ljust(5), self.status.ljust(8), self.host, os.path.basename(self.filename), )) # net_misc.ConnectionFailed(None, proto, 'receiveStatusReport %s' % host) try: fd, _ = tmpfile.make('error', extension='.inbox') data = bpio.ReadBinaryFile(self.filename) os.write( fd, 'from %s:%s %s\n' % (self.proto, self.host, self.status)) os.write(fd, data) os.close(fd) except: lg.exc() try: os.remove(self.filename) except: lg.exc() self.automat('unserialize-failed', None) return self.label += '_%s[%s]' % (newpacket.Command, newpacket.PacketID) if _Debug: lg.out(_DebugLevel + 2, 'packet_in.doReadAndUnserialize: %s' % newpacket) self.automat('valid-inbox-packet', newpacket) events.send('inbox-packet-recevied', data=dict( packet_id=newpacket.PacketID, command=newpacket.Command, creator_id=newpacket.CreatorID, date=newpacket.Date, size=len(newpacket.Payload), remote_id=newpacket.RemoteID, ))
def _bk_done(bid, result): from crypt import signed customer, remotePath = packetid.SplitPacketID(bid) try: os.mkdir(os.path.join(settings.getLocalBackupsDir(), customer, remotePath + '.out')) except: pass for filename in os.listdir(os.path.join(settings.getLocalBackupsDir(), customer, remotePath)): filepath = os.path.join(settings.getLocalBackupsDir(), customer, remotePath, filename) payld = bpio.ReadBinaryFile(filepath) newpacket = signed.Packet( 'Data', my_id.getLocalID(), my_id.getLocalID(), filename, payld, 'http://megafaq.ru/cvps1010.xml') newfilepath = os.path.join(settings.getLocalBackupsDir(), customer, remotePath + '.out', filename) bpio.WriteBinaryFile(newfilepath, newpacket.Serialize()) reactor.stop()
def doSendData(self, *args, **kwargs): """ Action method. """ payload = bpio.ReadBinaryFile(self.fileName) if not payload: self.event('error', Exception('file %r reading error' % self.fileName)) return p2p_service.SendData( raw_data=payload, ownerID=self.ownerID, creatorID=self.parent.creatorID, remoteID=self.remoteID, packetID=self.packetID, callbacks={ commands.Ack(): self.parent.OnFileSendAckReceived, commands.Fail(): self.parent.OnFileSendAckReceived, }, ) self.sendTime = time.time()
def show(): global _WSGIPort if _WSGIPort is not None: if _Debug: lg.out(_DebugLevel, 'control.show on port %d' % _WSGIPort) webbrowser.open('http://localhost:%d' % _WSGIPort) else: try: local_port = int( bpio.ReadBinaryFile(settings.LocalWSGIPortFilename())) except: local_port = None if not local_port: if _Debug: lg.out( _DebugLevel, 'control.show SKIP, LocalWebPort is None, %s is empty' % settings.LocalWSGIPortFilename()) else: if _Debug: lg.out(_DebugLevel, 'control.show on port %d' % local_port) webbrowser.open('http://localhost:%d' % local_port)
def show(): global _WSGIPort if _WSGIPort is not None: lg.out(4, 'control.show on port %d' % _WSGIPort) webbrowser.open('http://localhost:%d' % _WSGIPort) # webbrowser.open_new('http://127.0.0.1:%d' % _WSGIPort) # webbrowser.open_new('http://localhost/:%d' % _WSGIPort) # webbrowser.open_new('http://localhost:8080') # webbrowser.open('http://localhost:8080') else: try: local_port = int( bpio.ReadBinaryFile( settings.LocalWSGIPortFilename())) except: local_port = None if not local_port: lg.out( 4, 'control.show SKIP, LocalWebPort is None, %s is empty' % settings.LocalWSGIPortFilename()) else: lg.out(4, 'control.show on port %d' % local_port) webbrowser.open('http://localhost:%d' % local_port)
def rewrite_indexes(db_instance, source_db_instance): """ """ if _Debug: lg.out(_DebugLevel, 'coins_db.rewrite_indexes') source_location = os.path.join(source_db_instance.path, '_indexes') source_indexes = os.listdir(source_location) existing_location = os.path.join(db_instance.path, '_indexes') existing_indexes = os.listdir(existing_location) for existing_index_file in existing_indexes: if existing_index_file != '00id.py': index_name = existing_index_file[2:existing_index_file.index('.')] existing_index_path = os.path.join(existing_location, existing_index_file) os.remove(existing_index_path) if _Debug: lg.out(_DebugLevel, ' removed index at %s' % existing_index_path) buck_path = os.path.join(db_instance.path, index_name + '_buck') if os.path.isfile(buck_path): os.remove(buck_path) if _Debug: lg.out(_DebugLevel, ' also bucket at %s' % buck_path) stor_path = os.path.join(db_instance.path, index_name + '_stor') if os.path.isfile(stor_path): os.remove(stor_path) if _Debug: lg.out(_DebugLevel, ' also storage at %s' % stor_path) for source_index_file in source_indexes: if source_index_file != '00id.py': index_name = source_index_file[2:source_index_file.index('.')] destination_index_path = os.path.join(existing_location, source_index_file) source_index_path = os.path.join(source_location, source_index_file) if not bpio.WriteTextFile(destination_index_path, bpio.ReadTextFile(source_index_path)): lg.warn('failed writing index to %s' % destination_index_path) continue destination_buck_path = os.path.join(db_instance.path, index_name + '_buck') source_buck_path = os.path.join(source_db_instance.path, index_name + '_buck') if not bpio.WriteBinaryFile(destination_buck_path, bpio.ReadBinaryFile(source_buck_path)): lg.warn('failed writing index bucket to %s' % destination_buck_path) continue destination_stor_path = os.path.join(db_instance.path, index_name + '_stor') source_stor_path = os.path.join(source_db_instance.path, index_name + '_stor') if not bpio.WriteBinaryFile(destination_stor_path, bpio.ReadBinaryFile(source_stor_path)): lg.warn('failed writing index storage to %s' % destination_stor_path) continue if _Debug: lg.out( _DebugLevel, ' wrote index %s from %s' % (index_name, source_index_path))
def RunSend(self): if self._runSend: return self._runSend = True #out(6, 'io_throttle.RunSend') packetsFialed = {} packetsToRemove = set() packetsSent = 0 # let's check all packets in the queue for i in range(len(self.fileSendQueue)): try: packetID = self.fileSendQueue[i] except: lg.warn("item at position %d not exist in send queue" % i) continue fileToSend = self.fileSendDict[packetID] # we got notify that this packet was failed to send if packetID in self.sendFailedPacketIDs: self.sendFailedPacketIDs.remove(packetID) packetsFialed[packetID] = 'failed' continue # we already sent the file if fileToSend.sendTime is not None: packetsSent += 1 # and we got ack if fileToSend.ackTime is not None: # deltaTime = fileToSend.ackTime - fileToSend.sendTime # so remove it from queue packetsToRemove.add(packetID) # if we do not get an ack ... else: # ... we do not want to wait to long if time.time() - fileToSend.sendTime > fileToSend.sendTimeout: # so this packet is failed because no response on it packetsFialed[packetID] = 'timeout' # we sent this packet already - check next one continue # the data file to send no longer exists - it is failed situation if not os.path.exists(fileToSend.fileName): lg.warn("file %s not exist" % (fileToSend.fileName)) packetsFialed[packetID] = 'not exist' continue # do not send too many packets, need to wait for ack # hold other packets in the queue and may be send next time if packetsSent > self.fileSendMaxLength: # if we sending big file - we want to wait # other packets must go without waiting in the queue # 10K seems fine, because we need to filter only Data and Parity packets here try: if os.path.getsize(fileToSend.fileName) > 1024 * 10: continue except: lg.exc() continue # prepare the packet # dt = time.time() Payload = bpio.ReadBinaryFile(fileToSend.fileName) # newpacket = signed.Packet( # commands.Data(), # fileToSend.ownerID, # self.creatorID, # fileToSend.packetID, # Payload, # fileToSend.remoteID, # ) p2p_service.SendData( raw_data=Payload, ownerID=fileToSend.ownerID, creatorID=self.creatorID, remoteID=fileToSend.remoteID, packetID=fileToSend.packetID, callbacks={ commands.Ack(): self.OnFileSendAckReceived, commands.Fail(): self.OnFileSendAckReceived, }, ) # outbox will not resend, because no ACK, just data, # need to handle resends on own # transport_control.outboxNoAck(newpacket) # gateway.outbox(newpacket, callbacks={ # commands.Ack(): self.OnFileSendAckReceived, # commands.Fail(): self.OnFileSendAckReceived, # }) # str(bpio.ReadBinaryFile(fileToSend.fileName)) # {commands.Ack(): self.OnFileSendAckReceived, # commands.Fail(): self.OnFileSendAckReceived} # transport_control.RegisterInterest( # self.OnFileSendAckReceived, # fileToSend.remoteID, # fileToSend.packetID) # callback.register_interest(self.OnFileSendAckReceived, fileToSend.remoteID, fileToSend.packetID) # lg.out(12, 'io_throttle.RunSend %s to %s, dt=%s' % ( # str(newpacket), nameurl.GetName(fileToSend.remoteID), str(time.time()-dt))) # mark file as been sent fileToSend.sendTime = time.time() packetsSent += 1 # process failed packets for packetID, why in packetsFialed.items(): self.OnFileSendFailReceived(self.fileSendDict[packetID].remoteID, packetID, why) packetsToRemove.add(packetID) # remove finished packets for packetID in packetsToRemove: self.fileSendQueue.remove(packetID) del self.fileSendDict[packetID] if _Debug: lg.out(_DebugLevel, "io_throttle.RunSend removed %s from %s sending queue, %d more items" % ( packetID, self.remoteName, len(self.fileSendQueue))) # if sending queue is empty - remove all records about packets failed to send if len(self.fileSendQueue) == 0: del self.sendFailedPacketIDs[:] # remember results result = max(len(packetsToRemove), packetsSent) # erase temp lists del packetsFialed del packetsToRemove self._runSend = False return result
result = Packet(Command, OwnerID, CreatorID, PacketID, Payload, RemoteID) return result def MakePacketInThread(CallBackFunc, Command, OwnerID, CreatorID, PacketID, Payload, RemoteID): """ Signing packets is not atomic operation, so can be moved out from the main thread. """ d = threads.deferToThread(MakePacket, Command, OwnerID, CreatorID, PacketID, Payload, RemoteID) d.addCallback(CallBackFunc) def MakePacketDeferred(Command, OwnerID, CreatorID, PacketID, Payload, RemoteID): """ Another nice way to create a signed packet . """ return threads.deferToThread(MakePacket, Command, OwnerID, CreatorID, PacketID, Payload, RemoteID) #------------------------------------------------------------------------------ if __name__ == '__main__': bpio.init() lg.set_debug_level(18) from main import settings settings.init() key.InitMyKey() p = Unserialize(bpio.ReadBinaryFile(sys.argv[1])) print(p)
def inbox(info): """ 1) The protocol modules write to temporary files and gives us that filename 2) We unserialize 3) We check that it is for us 4) We check that it is from one of our contacts. 5) We use signed.validate() to check signature and that number fields are numbers 6) Any other sanity checks we can do and if anything funny we toss out the packet . 7) Then change the filename to the PackedID that it should be. and call the right function(s) for this new packet (encryptedblock, scrubber, remotetester, customerservice, ...) to dispatch it to right place(s). 8) We have to keep track of bandwidth to/from everyone, and make a report every 24 hours which we send to BitDust sometime in the 24 hours after that. """ global _LastInboxPacketTime # if _DoingShutdown: # if _Debug: # lg.out(_DebugLevel - 4, "gateway.inbox ignoring input since _DoingShutdown ") # return None if info.filename == "" or not os.path.exists(info.filename): lg.err("bad filename=" + info.filename) return None try: data = bpio.ReadBinaryFile(info.filename) except: lg.err("gateway.inbox ERROR reading file " + info.filename) return None if len(data) == 0: lg.err("gateway.inbox ERROR zero byte file from %s://%s" % (info.proto, info.host)) return None if callback.run_finish_file_receiving_callbacks(info, data): lg.warn( 'incoming data of %d bytes was filtered out in file receiving callbacks' % len(data)) return None try: newpacket = signed.Unserialize(data) except: lg.err("gateway.inbox ERROR during Unserialize data from %s://%s" % (info.proto, info.host)) lg.exc() return None if newpacket is None: lg.warn("newpacket from %s://%s is None" % (info.proto, info.host)) return None # newpacket.Valid() will be called later in the flow in packet_in.handle() method try: Command = newpacket.Command OwnerID = newpacket.OwnerID CreatorID = newpacket.CreatorID PacketID = newpacket.PacketID Date = newpacket.Date Payload = newpacket.Payload RemoteID = newpacket.RemoteID Signature = newpacket.Signature packet_sz = len(data) except: lg.err("gateway.inbox ERROR during Unserialize data from %s://%s" % (info.proto, info.host)) lg.err("data length=" + str(len(data))) lg.exc() # fd, filename = tmpfile.make('other', '.bad') # os.write(fd, data) # os.close(fd) return None _LastInboxPacketTime = time.time() if _Debug: lg.out( _DebugLevel - 8, "gateway.inbox [%s] signed by %s|%s (for %s) from %s://%s" % (Command, nameurl.GetName(OwnerID), nameurl.GetName(CreatorID), nameurl.GetName(RemoteID), info.proto, info.host)) if _Debug and lg.is_debug(_DebugLevel): monitoring() # control.request_update([('packet', newpacket.PacketID)]) return newpacket
_p.abspath(_p.join(_p.dirname(_p.abspath(sys.argv[0])), '..'))) from logs import lg from system import bpio from crypt import key from crypt import signed from main import settings from lib import misc from userid import my_id bpio.init() lg.set_debug_level(18) settings.init() key.InitMyKey() if len(sys.argv) > 1: print 'reading' data1 = bpio.ReadBinaryFile(sys.argv[1]) print '%d bytes long, hash: %s' % ( len(data1), misc.BinaryToAscii(key.Hash(data1)).strip()) p1 = signed.Packet('Data', my_id.getLocalID(), my_id.getLocalID(), 'SomeID', data1, 'RemoteID:abc') else: print 'unserialize from "input"' p1 = signed.Unserialize(bpio.ReadBinaryFile('input')) data1 = p1.Payload print 'serialize', p1 print ' Command:', p1.Command, type(p1.Command) print ' OwnerID:', p1.OwnerID, type(p1.OwnerID) print ' CreatorID:', p1.CreatorID, type(p1.CreatorID) print ' PacketID:', p1.PacketID, type(p1.PacketID) print ' Date:', p1.Date, type(p1.Date) print ' Payload:', len(p1.Payload), misc.BinaryToAscii(key.Hash(
def on_retrieve(newpacket): # external customer must be able to request # TODO: add validation of public key # if not contactsdb.is_customer(newpacket.OwnerID): # lg.err("had unknown customer %s" % newpacket.OwnerID) # p2p_service.SendFail(newpacket, 'not a customer') # return False glob_path = global_id.ParseGlobalID(newpacket.PacketID) if not glob_path['path']: # backward compatible check glob_path = global_id.ParseGlobalID( my_id.getGlobalID('master') + ':' + newpacket.PacketID) if not glob_path['path']: lg.err("got incorrect PacketID") p2p_service.SendFail(newpacket, 'incorrect path') return False if not glob_path['idurl']: lg.warn('no customer global id found in PacketID: %s' % newpacket.PacketID) p2p_service.SendFail(newpacket, 'incorrect retrieve request') return False if newpacket.CreatorID != glob_path['idurl']: lg.warn('one of customers requesting a Data from another customer!') else: pass # same customer, based on CreatorID : OK! recipient_idurl = newpacket.OwnerID # TODO: process requests from another customer : glob_path['idurl'] filename = make_valid_filename(newpacket.OwnerID, glob_path) if not filename: if True: # TODO: settings.getCustomersDataSharingEnabled() and # SECURITY # TODO: add more validations for receiver idurl # recipient_idurl = glob_path['idurl'] filename = make_valid_filename(glob_path['idurl'], glob_path) if not filename: lg.warn("had empty filename") p2p_service.SendFail(newpacket, 'empty filename') return False if not os.path.exists(filename): lg.warn("did not found requested file locally : %s" % filename) p2p_service.SendFail(newpacket, 'did not found requested file locally') return False if not os.access(filename, os.R_OK): lg.warn("no read access to requested packet %s" % filename) p2p_service.SendFail(newpacket, 'no read access to requested packet') return False data = bpio.ReadBinaryFile(filename) if not data: lg.warn("empty data on disk %s" % filename) p2p_service.SendFail(newpacket, 'empty data on disk') return False stored_packet = signed.Unserialize(data) del data if stored_packet is None: lg.warn("Unserialize failed, not Valid packet %s" % filename) p2p_service.SendFail(newpacket, 'unserialize failed') return False if not stored_packet.Valid(): lg.warn("Stored packet is not Valid %s" % filename) p2p_service.SendFail(newpacket, 'stored packet is not valid') return False if stored_packet.Command != commands.Data(): lg.warn('sending back packet which is not a Data') # here Data() packet is sent back as it is... # that means outpacket.RemoteID=my_id.getIDURL() - it was addressed to that node and stored as it is # need to take that in account every time you receive Data() packet # it can be not a new Data(), but the old data returning back as a response to Retreive() packet # let's create a new Data() packet which will be addressed directly to recipient and "wrap" stored data inside it routed_packet = signed.Packet( Command=commands.Data(), OwnerID=stored_packet.OwnerID, CreatorID=my_id.getIDURL(), PacketID=stored_packet.PacketID, Payload=stored_packet.Serialize(), RemoteID=recipient_idurl, ) if recipient_idurl == stored_packet.OwnerID: lg.info('from request %r : sending %r back to owner: %s' % (newpacket, stored_packet, recipient_idurl)) gateway.outbox(routed_packet) # , target=recipient_idurl) return True lg.info('from request %r : returning data owned by %s to %s' % (newpacket, stored_packet.OwnerID, recipient_idurl)) gateway.outbox(routed_packet) return True
def _do_send_packets(self, backup_id, block_num): customer_id, path_id, version_name = packetid.SplitBackupID(backup_id) archive_snapshot_dir = os.path.join(settings.getLocalBackupsDir(), customer_id, path_id, version_name) if _Debug: lg.args(_DebugLevel, backup_id=backup_id, block_num=block_num, archive_snapshot_dir=archive_snapshot_dir) if not os.path.isdir(archive_snapshot_dir): self.block_failed = True lg.err('archive snapshot folder was not found in %r' % archive_snapshot_dir) return None failed_supliers = 0 for supplier_num in range(len(self.suppliers_list)): supplier_idurl = self.suppliers_list[supplier_num] if not supplier_idurl: failed_supliers += 1 lg.warn('unknown supplier supplier_num=%d' % supplier_num) continue for dataORparity in ( 'Data', 'Parity', ): packet_id = packetid.MakePacketID(backup_id, block_num, supplier_num, dataORparity) packet_filename = os.path.join( archive_snapshot_dir, '%d-%d-%s' % ( block_num, supplier_num, dataORparity, )) if not os.path.isfile(packet_filename): lg.err('%s is not a file' % packet_filename) continue packet_payload = bpio.ReadBinaryFile(packet_filename) if not packet_payload: lg.err('file %r reading error' % packet_filename) continue if block_num not in self.packets_out: self.packets_out[block_num] = {} self.packets_out[block_num][packet_id] = None p2p_service.SendData( raw_data=packet_payload, ownerID=self.queue_owner_idurl, creatorID=my_id.getIDURL(), remoteID=supplier_idurl, packetID=packet_id, callbacks={ commands.Ack(): lambda newpacket, _: self.automat('ack', newpacket=newpacket), commands.Fail(): lambda newpacket, _: self.automat('fail', newpacket=newpacket), }, ) if failed_supliers > self.correctable_errors: self.block_failed = True lg.err('too many failed suppliers %d in block %d' % ( failed_supliers, block_num, ))