def doInit(self, *args, **kwargs): """ Action method. """ self._do_block_rebuilding() self.known_suppliers = [ _f for _f in contactsdb.suppliers(customer_idurl=self.customer_idurl) if _f ] if not self.EccMap: if self.customer_idurl == my_id.getIDURL(): self.EccMap = eccmap.Current() lg.info('ECC map %r set from local for my own suppliers' % self.EccMap) if not self.EccMap: known_eccmap_dict = {} for supplier_idurl in self.known_suppliers: known_ecc_map = contactsdb.get_supplier_meta_info( supplier_idurl=supplier_idurl, customer_idurl=self.customer_idurl, ).get('ecc_map', None) if known_ecc_map: if known_ecc_map not in known_eccmap_dict: known_eccmap_dict[known_ecc_map] = 0 known_eccmap_dict[known_ecc_map] += 1 if known_eccmap_dict: all_known_eccmaps = list(known_eccmap_dict.items()) all_known_eccmaps.sort(key=lambda i: i[1], reverse=True) self.EccMap = eccmap.eccmap(all_known_eccmaps[0][0]) lg.info('ECC map %r recognized from suppliers meta info' % self.EccMap) else: known_ecc_map = None if driver.is_on('service_shared_data'): from access import shared_access_coordinator active_share = shared_access_coordinator.get_active_share( self.key_id) if active_share: known_ecc_map = active_share.known_ecc_map if known_ecc_map: self.EccMap = eccmap.eccmap(known_ecc_map) lg.info('ECC map %r recognized from active share %r' % ( self.EccMap, active_share, )) else: num_suppliers = len(self.known_suppliers) if num_suppliers not in eccmap.GetPossibleSuppliersCount(): num_suppliers = settings.DefaultDesiredSuppliers() self.EccMap = eccmap.eccmap( eccmap.GetEccMapName(num_suppliers)) lg.warn( 'no meta info found, guessed ECC map %r from %d known suppliers' % (self.EccMap, len(self.known_suppliers))) self.max_errors = eccmap.GetCorrectableErrors( self.EccMap.NumSuppliers()) if data_receiver.A(): data_receiver.A().addStateChangedCallback( self._on_data_receiver_state_changed)
def doInit(self, arg): """ Action method. """ if data_receiver.A(): data_receiver.A().addStateChangedCallback( self._on_data_receiver_state_changed) self.known_suppliers = filter( None, contactsdb.suppliers(customer_idurl=self.customer_idurl)) known_eccmap_dict = {} for supplier_idurl in self.known_suppliers: known_ecc_map = contactsdb.get_supplier_meta_info( supplier_idurl=supplier_idurl, customer_idurl=self.customer_idurl, ).get('ecc_map', None) if known_ecc_map: if known_ecc_map not in known_eccmap_dict: known_eccmap_dict[known_ecc_map] = 0 known_eccmap_dict[known_ecc_map] += 1 if known_eccmap_dict: all_known_eccmaps = known_eccmap_dict.items() all_known_eccmaps.sort(key=lambda i: i[1], reverse=True) self.EccMap = eccmap.eccmap(all_known_eccmaps[0][0]) lg.info('eccmap %s recognized from suppliers meta info' % self.EccMap) else: self.EccMap = eccmap.eccmap( eccmap.GetEccMapName(len(self.known_suppliers))) lg.warn( 'no meta info found, guessed eccmap %s from %d known suppliers' % (self.EccMap, len(self.known_suppliers)))
def _restore(): outfd, outfilename = tmpfile.make( 'restore', extension='.tar.gz', prefix=backupID.replace('@', '_').replace('.', '_').replace('/', '_').replace(':', '_') + '_', ) r = restore_worker.RestoreWorker(backupID, outfd, KeyID=None, ecc_map=eccmap.eccmap(test_ecc_map)) r.MyDeferred.addCallback(_restore_done, backupID, outfd, outfilename, outputLocation) r.automat('init')
def test_backup_restore(self): test_ecc_map = 'ecc/2x2' test_done = Deferred() backupID = '[email protected]_8084:1/F1234' outputLocation = '/tmp/' with open('/tmp/_some_folder/random_file', 'wb') as fout: fout.write(os.urandom(10)) # fout.write(os.urandom(100*1024)) backupPipe = backup_tar.backuptardir_thread('/tmp/_some_folder/') def _extract_done(retcode, backupID, source_filename, output_location): assert retcode is True print('file size is: %d bytes' % len(bpio.ReadBinaryFile('/tmp/random_file'))) assert bpio.ReadBinaryFile('/tmp/random_file') == bpio.ReadBinaryFile('/tmp/_some_folder/random_file') reactor.callLater(0, raid_worker.A, 'shutdown') # @UndefinedVariable reactor.callLater(0.5, test_done.callback, True) # @UndefinedVariable def _restore_done(result, backupID, outfd, tarfilename, outputlocation): assert result == 'done' d = backup_tar.extracttar_thread(tarfilename, outputlocation) d.addCallback(_extract_done, backupID, tarfilename, outputlocation) return d def _restore(): outfd, outfilename = tmpfile.make( 'restore', extension='.tar.gz', prefix=backupID.replace('@', '_').replace('.', '_').replace('/', '_').replace(':', '_') + '_', ) r = restore_worker.RestoreWorker(backupID, outfd, KeyID=None, ecc_map=eccmap.eccmap(test_ecc_map)) r.MyDeferred.addCallback(_restore_done, backupID, outfd, outfilename, outputLocation) r.automat('init') def _bk_done(bid, result): assert result == 'done' def _bk_closed(job): if False: os.remove('/tmp/.bitdust_tmp/backups/[email protected]_8084/1/F1234/0-1-Data') os.remove('/tmp/.bitdust_tmp/backups/[email protected]_8084/1/F1234/0-1-Parity') reactor.callLater(0.5, _restore) # @UndefinedVariable reactor.callWhenRunning(raid_worker.A, 'init') # @UndefinedVariable job = backup.backup(backupID, backupPipe, blockSize=1024*1024, ecc_map=eccmap.eccmap(test_ecc_map)) job.finishCallback = _bk_done job.addStateChangedCallback(lambda *a, **k: _bk_closed(job), oldstate=None, newstate='DONE') reactor.callLater(0.5, job.automat, 'start') # @UndefinedVariable return test_done
def doInit(self, *args, **kwargs): """ Action method. """ self.known_suppliers = [ _f for _f in contactsdb.suppliers(customer_idurl=self.customer_idurl) if _f ] known_eccmap_dict = {} for supplier_idurl in self.known_suppliers: known_ecc_map = contactsdb.get_supplier_meta_info( supplier_idurl=supplier_idurl, customer_idurl=self.customer_idurl, ).get('ecc_map', None) if known_ecc_map: if known_ecc_map not in known_eccmap_dict: known_eccmap_dict[known_ecc_map] = 0 known_eccmap_dict[known_ecc_map] += 1 if known_eccmap_dict: all_known_eccmaps = list(known_eccmap_dict.items()) all_known_eccmaps.sort(key=lambda i: i[1], reverse=True) self.EccMap = eccmap.eccmap(all_known_eccmaps[0][0]) lg.info('eccmap %s recognized from suppliers meta info' % self.EccMap) else: num_suppliers = len(self.known_suppliers) if num_suppliers == 0: num_suppliers = settings.DefaultDesiredSuppliers() self.EccMap = eccmap.eccmap(eccmap.GetEccMapName(num_suppliers)) lg.warn( 'no meta info found, guessed eccmap %s from %d known suppliers' % (self.EccMap, len(self.known_suppliers))) if data_receiver.A(): data_receiver.A().addStateChangedCallback( self._on_data_receiver_state_changed)
def raidread(OutputFileName, eccmapname, version, blockNumber, data_parity_dir): try: myeccmap = eccmap.eccmap(eccmapname) GoodFiles = list(range(0, 200)) MakingProgress = 1 while MakingProgress == 1: MakingProgress = 0 for PSegNum in range(myeccmap.paritysegments): PFileName = os.path.join( data_parity_dir, version, str(blockNumber) + '-' + str(PSegNum) + '-Parity') if os.path.exists(PFileName): Map = myeccmap.ParityToData[PSegNum] TotalDSegs = 0 GoodDSegs = 0 for DSegNum in Map: TotalDSegs += 1 FileName = os.path.join( data_parity_dir, version, str(blockNumber) + '-' + str(DSegNum) + '-Data') if os.path.exists(FileName): GoodFiles[GoodDSegs] = FileName GoodDSegs += 1 else: BadName = FileName if GoodDSegs == TotalDSegs - 1: MakingProgress = 1 GoodFiles[GoodDSegs] = PFileName GoodDSegs += 1 RebuildOne(GoodFiles, GoodDSegs, BadName) # Count up the good segments and combine GoodDSegs = 0 output = open(OutputFileName, "wb") for DSegNum in range(myeccmap.datasegments): FileName = os.path.join( data_parity_dir, version, str(blockNumber) + '-' + str(DSegNum) + '-Data') if os.path.exists(FileName): GoodDSegs += 1 moredata = open(FileName, "rb").read() output.write(moredata) output.close() return GoodDSegs except: lg.exc() return None
def _do_start_archive_backup(self): local_path = self.local_data_callback(self.queue_id, self.latest_sequence_id) supplier_path_id = os.path.join(self.archive_folder_path, strng.to_text(self.latest_sequence_id)) dataID = misc.NewBackupID() backup_id = packetid.MakeBackupID( customer=self.queue_owner_id, path_id=supplier_path_id, key_alias=self.queue_alias, version=dataID, ) backup_fs.MakeLocalDir(settings.getLocalBackupsDir(), backup_id) if bpio.Android(): compress_mode = 'none' else: compress_mode = 'bz2' arcname = os.path.basename(local_path) backupPipe = backup_tar.backuptarfile_thread(local_path, arcname=arcname, compress=compress_mode) self.backup_job = backup.backup( backupID=backup_id, pipe=backupPipe, blockResultCallback=self._on_archive_backup_block_result, finishCallback=self._on_archive_backup_done, blockSize=1024 * 1024 * 10, sourcePath=local_path, keyID=self.group_key_id, ecc_map=eccmap.eccmap(self.ecc_map), creatorIDURL=self.queue_owner_idurl, ) self.backup_job.automat('start') if _Debug: lg.args(_DebugLevel, job=self.backup_job, backup_id=backup_id, local_path=local_path, group_key_id=self.group_key_id)
def do_in_memory(filename, eccmapname, version, blockNumber, targetDir): try: INTSIZE = 4 myeccmap = eccmap.eccmap(eccmapname) # any padding at end and block.Length fixes RoundupFile(filename, myeccmap.datasegments * INTSIZE) wholefile = ReadBinaryFileAsArray(filename) length = len(wholefile) length = length * 4 seglength = (length + myeccmap.datasegments - 1) / myeccmap.datasegments #: dict of data segments sds = {} for seg_num, chunk in enumerate(raidutils.chunks(wholefile, int(seglength / 4))): FileName = targetDir + '/' + str(blockNumber) + '-' + str(seg_num) + '-Data' with open(FileName, "wb") as f: chunk_to_write = copy.copy(chunk) chunk_to_write.byteswap() sds[seg_num] = iter(chunk) f.write(chunk_to_write) psds_list = raidutils.build_parity( sds, int(seglength / INTSIZE), myeccmap.datasegments, myeccmap, myeccmap.paritysegments) dataNum = len(sds) parityNum = len(psds_list) for PSegNum, _ in psds_list.items(): FileName = targetDir + '/' + str(blockNumber) + '-' + str(PSegNum) + '-Parity' with open(FileName, 'wb') as f: f.write(psds_list[PSegNum]) return dataNum, parityNum except: lg.exc() return -1, -1
def geteccmap(name): global _ECCMAP if name not in _ECCMAP: _ECCMAP[name] = eccmap.eccmap(name) return _ECCMAP[name]