def cleanup(state): '''Cleanup state''' # Dump DBr ''' dbr.dump(state.states.dbr) ''' # Disconnect DBr dbr.cleanup(state.states.dbr) # Get DB file size for statistics db_size = fmtutil.byte_size(dbr.get_db_file_size(state.states.dbr)) state.set_stats_category('Tracking DB', 'Size', db_size) # Store DB (local --> storage) err, msg = storager.store( state.states.storager, state.states.dbr.model.filepath, "{}/{}".format(state.model.dirstorage, '_aeroback'), None) if err: _D.ERROR( __name__, "Error storing File Versioned Tracking DB", 'file', state.states.dbr.model.filename, 'msg', msg )
def _store_archive(state): err, msg = storager.store( state.states.storager, state.archive_filepath, state.model.dirstorage, state.model.atype) if err: return 1, "Error storing archive: {}".format(msg) state.total_stored_files += 1 size = os.stat(state.archive_filepath).st_size state.set_stats('Uploaded', fmtutil.byte_size(size)) # Add file to DB dbr.add_version( state.states.dbr, state.archive_filename, size) # Get list of older versions to remove filenames = dbr.remove_versions_older_than( state.states.dbr, state.model.history_size) # Remove files from storage for filename in filenames: storager.unstore( state.states.storager, state.model.dirstorage, filename) return 0, None
def cleanup(state): """Cleanup state""" # Dump DBr """ dbr.dump(state.states.dbr) """ # Disconnect DBr dbr.cleanup(state.states.dbr) # Get DB file size for statistics db_size = fmtutil.byte_size(dbr.get_db_file_size(state.states.dbr)) state.set_stats_category("Tracking DB", "Size", db_size) # Store DB (local --> storage) err, msg = storager.store(state.states.storager, state.states.dbr.model.filepath, state.model.dirstorage, None) if err: _D.ERROR(__name__, "Error storing DB", "file", state.states.dbr.model.filename, "msg", msg)
def cleanup(state): '''Cleanup state''' # Clear DB list of local files and uploaded files to minimize DB size dbr.clear_locals_uploads(state.states.dbr) # Dump DB dbr.dump_params(state.states.dbr) #dbr.dump_files_storage(state.states.dbr) # Get statistics of what's in storage now storfiles, storsize = dbr.stats_storage(state.states.dbr) print '#### STOR FILES', storfiles print '#### STOR SIZE', storsize progress = int(float(storsize) / float(state.total_local_size) * 100.0) storsize = fmtutil.byte_size(storsize) state.set_stats_category('Storage Total', 'Progress', '{}%'.format(progress)) state.set_stats_category('Storage Total', 'Files count', storfiles) state.set_stats_category('Storage Total', 'Files size', storsize) # Disconnect DBr dbr.cleanup(state.states.dbr) # Get DB file size for statistics db_size = fmtutil.byte_size(dbr.get_db_file_size(state.states.dbr)) state.set_stats_category('Tracking DB', 'DB size', db_size) # Store DB (local --> storage) err, msg = storager.store( state.states.storager, state.states.dbr.model.filepath, "{}/{}".format(state.model.dirstorage, '_aeroback'), None) if err: _D.ERROR( __name__, "Error storing File Incremental Tracking DB", 'file', state.states.dbr.model.filename, 'msg', msg )
def _store(state): """ Store each file. Only allows max_fails failures before stopping and returning error. """ max_fails = 5 fails = 0 i = 0 total_size = 0 # Get files to store rows = dbr.get_files_upload(state.states.dbr) directory = state.model.directory dirstorage = state.model.dirstorage #dir_str_len = len(state.model.directory) # Store each file for row in rows: #filepath = row[0] #modified = row[1] #size = row[2] filepath = row['filepath'] modified = row['modified'] size = row['size'] # Extract path that is in between directory and filename filedir, filename = fsutil.path_to_body_tail(filepath) ''' if len(filedir) > dir_str_len: # Case of file in subdirectory of directory dirstorage = os.path.join( state.model.dirstorage, filedir[dir_str_len + 1:]) else: # Case of file located in directory itself dirstorage = state.model.dirstorage ''' err, msg = storager.store( state.states.storager, os.path.join(directory, filepath), "{}/{}".format(dirstorage, filedir), state.model.atype) if err: # Log error _D.ERROR( __name__, "Error storing file", 'file', filepath ) fails += 1 if fails == max_fails: break else: # Update DB on file store success print "\t+ ", filepath dbr.add_update_storage_file(state.states.dbr, filepath, modified, size) i += 1 total_size += size # Commit all added storage files, if any stores happened if i: dbr.finish_adding_storage_files(state.states.dbr) dbr.add_stats(state.states.dbr, state.model.date_str, total_size) # Dump stats #dbr.dump_stats(state.states.dbr) state.total_stored_files = i state.total_stored_size = total_size state.set_stats_category('Session Uploaded', 'Files count', i) state.set_stats_category('Session Uploaded', 'Files size', fmtutil.byte_size(total_size)) if fails: return 1, "Error storing files, aborted after {} failures".format(max_fails) return 0, None