def _archive_dirs(state): name = '-'.join(state.model.description.lower().split()) if len(name) > 100: name = name[:99] state.archive_filename = "{}_{}.tar.gz".format( name, state.model.date_str) state.archive_filepath = os.path.join( state.model.dir_temp, state.archive_filename ) # Read TAR help on -C option: # http://www.gnu.org/software/tar/manual/html_section/one.html#SEC117 invocation = ['tar', '-czvf', state.archive_filepath] # Append each dir for d in state.model.dirs: parent, folder = fsutil.path_to_body_tail(d) # If either is empty then we don't support this one if not parent or not folder: _D.WARNING( __name__, "Directory path cannot be split for archiving, skipping it", 'dir', d, 'parent', parent, 'folder', folder ) continue invocation.append('-C') invocation.append(parent) invocation.append(folder) res, err, msg = cmdutil.call_cmd(invocation) if err: return 1, "Error archiving dirs: {}".format(msg) else: return 0, None pass
def _store(state): """ Store each file. Only allows max_fails failures before stopping and returning error. """ max_fails = 5 fails = 0 i = 0 total_size = 0 # Get files to store rows = dbr.get_files_upload(state.states.dbr) directory = state.model.directory dirstorage = state.model.dirstorage #dir_str_len = len(state.model.directory) # Store each file for row in rows: #filepath = row[0] #modified = row[1] #size = row[2] filepath = row['filepath'] modified = row['modified'] size = row['size'] # Extract path that is in between directory and filename filedir, filename = fsutil.path_to_body_tail(filepath) ''' if len(filedir) > dir_str_len: # Case of file in subdirectory of directory dirstorage = os.path.join( state.model.dirstorage, filedir[dir_str_len + 1:]) else: # Case of file located in directory itself dirstorage = state.model.dirstorage ''' err, msg = storager.store( state.states.storager, os.path.join(directory, filepath), "{}/{}".format(dirstorage, filedir), state.model.atype) if err: # Log error _D.ERROR( __name__, "Error storing file", 'file', filepath ) fails += 1 if fails == max_fails: break else: # Update DB on file store success print "\t+ ", filepath dbr.add_update_storage_file(state.states.dbr, filepath, modified, size) i += 1 total_size += size # Commit all added storage files, if any stores happened if i: dbr.finish_adding_storage_files(state.states.dbr) dbr.add_stats(state.states.dbr, state.model.date_str, total_size) # Dump stats #dbr.dump_stats(state.states.dbr) state.total_stored_files = i state.total_stored_size = total_size state.set_stats_category('Session Uploaded', 'Files count', i) state.set_stats_category('Session Uploaded', 'Files size', fmtutil.byte_size(total_size)) if fails: return 1, "Error storing files, aborted after {} failures".format(max_fails) return 0, None