def __resume_transaction(self, transaction, progress_info): file_path = transaction.get_file_path() transaction_id = transaction.get_id() if transaction.is_downloading(): file_md = self.__find_file(file_path) if (not file_md) or (not file_md.is_file()): logger.error('No file found at %s' % file_path) else: for chunk in file_md.chunks: db_name = '%s.%s' % (transaction_id(), seek) db_path = self.__db_cache.get_cache_path(db_name) if chunk.seek in progress_info: transaction.append_data_block(chunk.seek, chunk.size, \ self.new_data_block(itansaction_id, chunk.seek, chunk.size), chunk.key) else: self.__db_cache.remove_data_block(db_name) self.update_transaction_state(transaction.TS_FAILED) else: for seek, (size, local_name, foreign_name) in progress_info.items(): if foreign_name and foreign_name != 'None': db = self.new_data_block(transaction_id, seek, size) transaction.append_data_block(seek, size, db, foreign_name, no_transfer=True) self.__tr_log_update(transaction_id, seek, size, db.get_name(), foreign_name) else: self.transfer_data_block(transaction_id, seek, size, \ self.new_data_block(transaction_id, seek, size))
def __load_md_db(self, md_file_path): self.db = anydbm.open(md_file_path, 'c') self.__last_item_id = long(self.__get_db_val('last_item_id', 0)) self.__last_journal_rec_id = long( self.__get_db_val('last_journal_rec_id', 0)) if self.__journal: j_key = self.__get_db_val('journal_key', None) if j_key != self.__journal.get_journal_key(): logger.info( 'Invalid journal key in metadata database! Recreating it...' ) self.db.close() self.__remove_md_file(md_file_path) self.db = anydbm.open(md_file_path, 'c') self.db['journal_key'] = self.__journal.get_journal_key() self.__last_item_id = 0 self.__last_journal_rec_id = 0 try: self.__init_from_journal(self.__last_journal_rec_id) except NimbusException, err: logger.error( 'Metadata was not restored from journal! Details: %s' % err) logger.info('Trying restoring full journal records...') self.db.close() self.__remove_md_file(md_file_path) self.db = anydbm.open(md_file_path, 'c') self.__init_from_journal(0)
def __resume_transaction(self, transaction, progress_info): file_path = transaction.get_file_path() transaction_id = transaction.get_id() if transaction.is_downloading(): file_md = self.__find_file(file_path) if (not file_md) or (not file_md.is_file()): logger.error('No file found at %s'%file_path) else: for chunk in file_md.chunks: db_name = '%s.%s'%(transaction_id(), seek) db_path = self.__db_cache.get_cache_path(db_name) if chunk.seek in progress_info: transaction.append_data_block(chunk.seek, chunk.size, \ self.new_data_block(itansaction_id, chunk.seek, chunk.size), chunk.key) else: self.__db_cache.remove_data_block(db_name) self.update_transaction_state(transaction.TS_FAILED) else: for seek, (size, local_name, foreign_name) in progress_info.items(): if foreign_name and foreign_name != 'None': db = self.new_data_block(transaction_id, seek, size) transaction.append_data_block(seek, size, db, foreign_name, no_transfer=True) self.__tr_log_update(transaction_id, seek, size, db.get_name(), foreign_name) else: self.transfer_data_block(transaction_id, seek, size, \ self.new_data_block(transaction_id, seek, size))
def __load_md_db(self, md_file_path): self.db = anydbm.open(md_file_path, 'c') self.__last_item_id = long(self.__get_db_val('last_item_id', 0)) self.__last_journal_rec_id = long(self.__get_db_val('last_journal_rec_id', 0)) if self.__journal: j_key = self.__get_db_val('journal_key', None) if j_key != self.__journal.get_journal_key(): logger.info('Invalid journal key in metadata database! Recreating it...') self.db.close() self.__remove_md_file(md_file_path) self.db = anydbm.open(md_file_path, 'c') self.db['journal_key'] = self.__journal.get_journal_key() self.__last_item_id = 0 self.__last_journal_rec_id = 0 try: self.__init_from_journal(self.__last_journal_rec_id) except NimbusException, err: logger.error('Metadata was not restored from journal! Details: %s'%err) logger.info('Trying restoring full journal records...') self.db.close() self.__remove_md_file(md_file_path) self.db = anydbm.open(md_file_path, 'c') self.__init_from_journal(0)
def remove(self, key, replica_count=DEFAULT_REPLICA_COUNT): params = {'key':key, 'replica_count':replica_count} packet = FabnetPacketRequest(method='ClientDeleteData', parameters=params, sync=True) resp = self.fri_client.call_sync(self.fabnet_hostname, packet, FRI_CLIENT_TIMEOUT) if resp.ret_code != 0: logger.error('ClientDeleteData error: %s'%resp.ret_message) return False return True
def remove(self, key, replica_count=DEFAULT_REPLICA_COUNT): params = {'key': key, 'replica_count': replica_count} packet = FabnetPacketRequest(method='ClientDeleteData', parameters=params, sync=True) resp = self.fri_client.call_sync(self.fabnet_hostname, packet, FRI_CLIENT_TIMEOUT) if resp.ret_code != 0: logger.error('ClientDeleteData error: %s' % resp.ret_message) return False return True
def put(self, data_block, key=None, replica_count=DEFAULT_REPLICA_COUNT, wait_writes_count=2, allow_rewrite=True): packet = FabnetPacketRequest(method='PutKeysInfo', parameters={'key': key}, sync=True) resp = self.fri_client.call_sync(self.fabnet_hostname, packet, FRI_CLIENT_TIMEOUT) if resp.ret_code != 0: raise Exception('Key info error: %s' % resp.ret_message) if not resp.ret_parameters.has_key('key_info'): raise Exception( 'Invalid PutKeysInfo response! key_info is expected') key_info = resp.ret_parameters['key_info'] key, node_addr = key_info params = {'key':key, 'replica_count':replica_count, \ 'wait_writes_count': wait_writes_count} packet = FabnetPacketRequest(method='ClientPutData', parameters=params, \ binary_data=ChunkedBinaryData.prepare(data_block, FILE_ITER_BLOCK_SIZE), sync=True) resp = self.fri_client.call_sync(node_addr, packet, FRI_CLIENT_TIMEOUT) try: if resp.ret_code != 0: raise Exception('ClientPutData error: %s' % resp.ret_message) if not resp.ret_parameters.has_key('key'): raise Exception( 'put data block error: no data key found in response message "%s"' % resp) primary_key = resp.ret_parameters['key'] checksum = resp.ret_parameters['checksum'] if isinstance(data_block, DataBlock): db_checksum = data_block.checksum() else: db_checksum = hashlib.sha1(data_block).hexdigest() if checksum != db_checksum: raise Exception('Invalid data block checksum!') except Exception, err: logger.error('[put] %s' % err) logger.traceback_debug() if not allow_rewrite: self.remove(key, replica_count) raise err
def run(self): while True: job = self.queue.get() try: if job == QUIT_JOB or self.stop_flag.is_set(): break db_key, replica_count = job self.fabnet_gateway.remove(db_key, replica_count) except Exception, err: logger.error('DeleteWorker error: %s'%err) logger.traceback_debug() finally:
def run(self): while True: job = self.queue.get() try: if job == QUIT_JOB or self.stop_flag.is_set(): break db_key, replica_count = job self.fabnet_gateway.remove(db_key, replica_count) except Exception, err: logger.error('DeleteWorker error: %s' % err) logger.traceback_debug() finally:
def exec_openssl(cls, command, stdin=None, cwd=None): '''Run openssl command. PKI_OPENSSL_BIN doesn't need to be specified''' c = [os.environ.get('OPENSSL_EXEC', 'openssl')] c.extend(command) try: proc = Subprocess(c, with_input=True) stdout_value, stderr_value = proc.communicate(stdin) out = stdout_value if stderr_value: out += '\n%s'%stderr_value if proc.returncode != 0: logger.warning('OpenSSL error: %s'%out) except Exception, err: logger.error('openssl call error! "%s" failed: %s'%(' '.join(c), err)) raise err
def run(self): while True: out_streem = data = None job = self.queue.get() data_block = None transaction = None seek = None try: if job == QUIT_JOB or self.stop_flag.is_set(): break transaction, seek = job data_block, _, foreign_name = transaction.get_data_block( seek, noclone=False) if not foreign_name: raise Exception('foreign name does not found for seek=%s' % seek) if transaction.is_failed(): logger.debug( 'Transaction {%s} is failed! Skipping data block downloading...' % transaction.get_id()) data_block.remove() continue self.fabnet_gateway.get(foreign_name, transaction.get_replica_count(), data_block) data_block.close() self.transactions_manager.update_transaction(transaction.get_id(), seek, \ is_failed=False, foreign_name=data_block.get_name()) except Exception, err: events_provider.error('GetWorker', '%s failed: %s' % (transaction, err)) logger.traceback_debug() try: if transaction and data_block: self.transactions_manager.update_transaction(transaction.get_id(), seek, \ is_failed=True, foreign_name=data_block.get_name()) data_block.remove() except Exception, err: logger.error('[GetWorker.__on_error] %s' % err) logger.traceback_debug()
def start_download_transaction(self, file_path): file_md, item_id = self.__find_file_from_inprogress(file_path) if (not file_md) or (not file_md.is_file()): raise PathException('No file found at %s' % file_path) transaction_id = item_id transaction = Transaction(Transaction.TT_DOWNLOAD, file_path, file_md.replica_count, transaction_id) stored_transaction = False try: for chunk in file_md.chunks: db_path = self.__db_cache.get_cache_path('%s.%s' % (item_id, chunk.seek)) if os.path.exists(db_path): data_block = self.new_data_block(item_id, chunk.seek, chunk.size) if data_block.full(): transaction.append_data_block(chunk.seek, \ chunk.size, data_block, chunk.key, no_transfer=True) continue else: logger.error('Removing corrupted data block: %s' % data_block.get_name()) data_block.remove() if file_md.is_local: raise NoLocalFileFound('No local chunk found for file %s (%s.%s)'%\ (file_md.name, item_id, chunk.seek)) if not stored_transaction: self.__transactions[transaction_id] = transaction self.__tr_log_start_transaction(transaction) stored_transaction = True data_block = self.new_data_block(item_id, chunk.seek, chunk.size) self.transfer_data_block(transaction_id, chunk.seek, chunk.size, data_block, chunk.key) except Exception, err: logger.traceback_debug() if stored_transaction: self.update_transaction_state(transaction_id, Transaction.TS_FAILED) raise err
def exec_openssl(cls, command, stdin=None, cwd=None): '''Run openssl command. PKI_OPENSSL_BIN doesn't need to be specified''' c = [os.environ.get('OPENSSL_EXEC', 'openssl')] c.extend(command) try: proc = Subprocess(c, with_input=True) stdout_value, stderr_value = proc.communicate(stdin) out = stdout_value if stderr_value: out += '\n%s' % stderr_value if proc.returncode != 0: logger.warning('OpenSSL error: %s' % out) except Exception, err: logger.error('openssl call error! "%s" failed: %s' % (' '.join(c), err)) raise err
def put(self, data_block, key=None, replica_count=DEFAULT_REPLICA_COUNT, wait_writes_count=2, allow_rewrite=True): packet = FabnetPacketRequest(method='PutKeysInfo', parameters={'key': key}, sync=True) resp = self.fri_client.call_sync(self.fabnet_hostname, packet, FRI_CLIENT_TIMEOUT) if resp.ret_code != 0: raise Exception('Key info error: %s'%resp.ret_message) if not resp.ret_parameters.has_key('key_info'): raise Exception('Invalid PutKeysInfo response! key_info is expected') key_info = resp.ret_parameters['key_info'] key, node_addr = key_info params = {'key':key, 'replica_count':replica_count, \ 'wait_writes_count': wait_writes_count} packet = FabnetPacketRequest(method='ClientPutData', parameters=params, \ binary_data=ChunkedBinaryData.prepare(data_block, FILE_ITER_BLOCK_SIZE), sync=True) resp = self.fri_client.call_sync(node_addr, packet, FRI_CLIENT_TIMEOUT) try: if resp.ret_code != 0: raise Exception('ClientPutData error: %s'%resp.ret_message) if not resp.ret_parameters.has_key('key'): raise Exception('put data block error: no data key found in response message "%s"'%resp) primary_key = resp.ret_parameters['key'] checksum = resp.ret_parameters['checksum'] if isinstance(data_block, DataBlock): db_checksum = data_block.checksum() else: db_checksum = hashlib.sha1(data_block).hexdigest() if checksum != db_checksum: raise Exception('Invalid data block checksum!') except Exception, err: logger.error('[put] %s'%err) logger.traceback_debug() if not allow_rewrite: self.remove(key, replica_count) raise err
def get(self, primary_key, replica_count, data_block): packet = FabnetPacketRequest(method='GetKeysInfo', parameters={'key': primary_key, 'replica_count': replica_count}, sync=True) resp = self.fri_client.call_sync(self.fabnet_hostname, packet, FRI_CLIENT_TIMEOUT) if resp.ret_code != 0: raise Exception('Get keys info error: %s'%resp.ret_message) keys_info = resp.ret_parameters['keys_info'] for key, is_replica, node_addr in keys_info: params = {'key': key, 'is_replica': is_replica} packet = FabnetPacketRequest(method='GetDataBlock', parameters=params, sync=True) resp = self.fri_client.call_sync(node_addr, packet, FRI_CLIENT_TIMEOUT) if resp.ret_code == RC_NO_DATA: logger.error('No data found for key %s on node %s'%(key, node_addr)) elif resp.ret_code != 0: logger.error('Get data block error for key %s from node %s: %s'%(key, node_addr, resp.ret_message)) elif resp.ret_code == 0: exp_checksum = resp.ret_parameters['checksum'] while True: chunk = resp.binary_data.get_next_chunk() if not chunk: break data_block.write(chunk, encrypt=False) if exp_checksum != data_block.checksum(): logger.error('Currupted data block for key %s from node %s'%(primary_key, node_addr)) continue return data_block return None
def run(self): while True: out_streem = data = None job = self.queue.get() data_block = None transaction = None seek = None try: if job == QUIT_JOB or self.stop_flag.is_set(): break transaction, seek = job data_block,_,foreign_name = transaction.get_data_block(seek, noclone=False) if not foreign_name: raise Exception('foreign name does not found for seek=%s'%seek) if transaction.is_failed(): logger.debug('Transaction {%s} is failed! Skipping data block downloading...'%transaction.get_id()) data_block.remove() continue self.fabnet_gateway.get(foreign_name, transaction.get_replica_count(), data_block) data_block.close() self.transactions_manager.update_transaction(transaction.get_id(), seek, \ is_failed=False, foreign_name=data_block.get_name()) except Exception, err: events_provider.error('GetWorker','%s failed: %s'%(transaction, err)) logger.traceback_debug() try: if transaction and data_block: self.transactions_manager.update_transaction(transaction.get_id(), seek, \ is_failed=True, foreign_name=data_block.get_name()) data_block.remove() except Exception, err: logger.error('[GetWorker.__on_error] %s'%err) logger.traceback_debug()
def start_download_transaction(self, file_path): file_md, item_id = self.__find_file_from_inprogress(file_path) if (not file_md) or (not file_md.is_file()): raise PathException('No file found at %s'%file_path) transaction_id = item_id transaction = Transaction(Transaction.TT_DOWNLOAD, file_path, file_md.replica_count, transaction_id) stored_transaction = False try: for chunk in file_md.chunks: db_path = self.__db_cache.get_cache_path('%s.%s'%(item_id, chunk.seek)) if os.path.exists(db_path): data_block = self.new_data_block(item_id, chunk.seek, chunk.size) if data_block.full(): transaction.append_data_block(chunk.seek, \ chunk.size, data_block, chunk.key, no_transfer=True) continue else: logger.error('Removing corrupted data block: %s'% data_block.get_name()) data_block.remove() if file_md.is_local: raise NoLocalFileFound('No local chunk found for file %s (%s.%s)'%\ (file_md.name, item_id, chunk.seek)) if not stored_transaction: self.__transactions[transaction_id] = transaction self.__tr_log_start_transaction(transaction) stored_transaction = True data_block = self.new_data_block(item_id, chunk.seek, chunk.size) self.transfer_data_block(transaction_id, chunk.seek, chunk.size, data_block, chunk.key) except Exception, err: logger.traceback_debug() if stored_transaction: self.update_transaction_state(transaction_id, Transaction.TS_FAILED) raise err
def run(self): while True: job = self.queue.get() transaction = None data_block = None key = None try: if job == QUIT_JOB or self.stop_flag.is_set(): break transaction, seek = job data_block, _, _ = transaction.get_data_block(seek) if not data_block.exists(): raise Exception( 'Data block %s does not found at local cache!' % data_block.get_name()) try: key = self.fabnet_gateway.put(data_block, replica_count=transaction.get_replica_count(), \ allow_rewrite=False) except Exception, err: logger.error('Put data block error: %s' % err) logger.error('Cant put data block from file %s. Wait %s seconds and try again...'%\ (transaction.get_file_path(), FG_ERROR_TIMEOUT)) time.sleep(FG_ERROR_TIMEOUT) data_block.reopen() self.queue.put(job) continue data_block.close() self.transactions_manager.update_transaction( transaction.get_id(), seek, is_failed=False, foreign_name=key) except Exception, err: events_provider.critical('PutWorker', '%s failed: %s' % (transaction, err)) logger.traceback_debug() try: if transaction: self.transactions_manager.update_transaction(transaction.get_id(), seek, \ is_failed=True) except Exception, err: logger.error('[PutWorker.__on_error] %s' % err) logger.traceback_debug()
def get(self, primary_key, replica_count, data_block): packet = FabnetPacketRequest(method='GetKeysInfo', parameters={ 'key': primary_key, 'replica_count': replica_count }, sync=True) resp = self.fri_client.call_sync(self.fabnet_hostname, packet, FRI_CLIENT_TIMEOUT) if resp.ret_code != 0: raise Exception('Get keys info error: %s' % resp.ret_message) keys_info = resp.ret_parameters['keys_info'] for key, is_replica, node_addr in keys_info: params = {'key': key, 'is_replica': is_replica} packet = FabnetPacketRequest(method='GetDataBlock', parameters=params, sync=True) resp = self.fri_client.call_sync(node_addr, packet, FRI_CLIENT_TIMEOUT) if resp.ret_code == RC_NO_DATA: logger.error('No data found for key %s on node %s' % (key, node_addr)) elif resp.ret_code != 0: logger.error( 'Get data block error for key %s from node %s: %s' % (key, node_addr, resp.ret_message)) elif resp.ret_code == 0: exp_checksum = resp.ret_parameters['checksum'] while True: chunk = resp.binary_data.get_next_chunk() if not chunk: break data_block.write(chunk, encrypt=False) if exp_checksum != data_block.checksum(): logger.error( 'Currupted data block for key %s from node %s' % (primary_key, node_addr)) continue return data_block return None
def run(self): while True: job = self.queue.get() transaction = None data_block = None key = None try: if job == QUIT_JOB or self.stop_flag.is_set(): break transaction, seek = job data_block,_,_ = transaction.get_data_block(seek) if not data_block.exists(): raise Exception('Data block %s does not found at local cache!'%data_block.get_name()) try: key = self.fabnet_gateway.put(data_block, replica_count=transaction.get_replica_count(), \ allow_rewrite=False) except Exception, err: logger.error('Put data block error: %s'%err) logger.error('Cant put data block from file %s. Wait %s seconds and try again...'%\ (transaction.get_file_path(), FG_ERROR_TIMEOUT)) time.sleep(FG_ERROR_TIMEOUT) data_block.reopen() self.queue.put(job) continue data_block.close() self.transactions_manager.update_transaction(transaction.get_id(), seek, is_failed=False, foreign_name=key) except Exception, err: events_provider.critical('PutWorker', '%s failed: %s'%(transaction, err)) logger.traceback_debug() try: if transaction: self.transactions_manager.update_transaction(transaction.get_id(), seek, \ is_failed=True) except Exception, err: logger.error('[PutWorker.__on_error] %s'%err) logger.traceback_debug()
def error(self, event_provider, message): logger.error(message) self.emit(Event.ET_ERROR, event_provider, message)