def encode_file(self,file_path): #Find the filename, used as prefix of every partial file name prefix = os.path.basename(file_path) #Find relative path from directory root to the file relative_path = os.path.dirname(os.path.relpath(file_path, self.root_source_path)) #Calculate output path based on target directory path and the relative path output_path = os.path.join(self.target_root_path, relative_path) utility.make_sure_directory_exists(output_path) filesize = os.stat(file_path).st_size with open(file_path, 'rb') as f: filefec.encode_to_files(f, filesize, output_path, prefix, self.k, self.n, self.suffix, False, False)
def code(filestr, prefix, k, n, dirname=''): """Makes a directory for the specified file portion to code and stores encoded packets into that directory.""" print "filestr", filestr, "prefix", prefix, "dirname", dirname current_dir = os.getcwd() os.chdir(dirname) os.mkdir(filestr + '.dir') f = open(filestr, 'rb') f.seek(0,2) # Seek to 0th bit from the end of the file (second param = 2) file_size = f.tell() # See what's the index of the last bit, or the filesize f.seek(0,0) # Seek to 0th bit of the file (second param = 0) # Call filefec's encode_to_files # parameters : File / File Size / Target Directory / File Name / k / n / File Extension filefec.encode_to_files(f, file_size, filestr + '.dir', prefix, k, n, '.chunk') os.chdir(current_dir)
def main(): if '-V' in sys.argv or '--version' in sys.argv: print("zfec library version: ", libversion) print("zfec command-line tool version: ", __version__) sys.exit(0) parser = argparse.ArgumentParser( description= "Encode a file into a set of share files, a subset of which can later be used to recover the original file." ) parser.add_argument('inputfile', help='file to encode or "-" for stdin', type=argparse.FileType('rb'), metavar='INF') parser.add_argument( '-d', '--output-dir', help= 'directory in which share file names will be created (default ".")', default='.', metavar='D') parser.add_argument( '-p', '--prefix', help= 'prefix for share file names; If omitted, the name of the input file will be used.', metavar='P') parser.add_argument('-s', '--suffix', help='suffix for share file names (default ".fec")', default='.fec', metavar='S') parser.add_argument( '-m', '--totalshares', help='the total number of share files created (default %d)' % DEFAULT_M, default=DEFAULT_M, type=int, metavar='M') parser.add_argument( '-k', '--requiredshares', help='the number of share files required to reconstruct (default %d)' % DEFAULT_K, default=DEFAULT_K, type=int, metavar='K') parser.add_argument( '-f', '--force', help= 'overwrite any file which already in place an output file (share file)', action='store_true') parser.add_argument('-v', '--verbose', help='print out messages about progress', action='store_true') parser.add_argument( '-q', '--quiet', help= 'quiet progress indications and warnings about silly choices of K and M', action='store_true') parser.add_argument('-V', '--version', help='print out version number and exit', action='store_true') args = parser.parse_args() is_infile_stdin = False if args.prefix is None: args.prefix = args.inputfile.name if args.prefix == "<stdin>": args.prefix = "" is_infile_stdin = True if args.verbose and args.quiet: print("Please choose only one of --verbose and --quiet.") sys.exit(1) if args.totalshares > 256 or args.totalshares < 1: print( "Invalid parameters, totalshares is required to be <= 256 and >= 1\nPlease see the accompanying documentation." ) sys.exit(1) if args.requiredshares > args.totalshares or args.requiredshares < 1: print( "Invalid parameters, requiredshares is required to be <= totalshares and >= 1\nPlease see the accompanying documentation." ) sys.exit(1) if not args.quiet: if args.requiredshares == 1: print( "warning: silly parameters: requiredshares == 1, which means that every share will be a complete copy of the file. You could use \"cp\" for the same effect. But proceeding to do it anyway..." ) if args.requiredshares == args.totalshares: print( "warning: silly parameters: requiredshares == totalshares, which means that all shares will be required in order to reconstruct the file. You could use \"split\" for the same effect. But proceeding to do it anyway..." ) in_file = args.inputfile try: args.inputfile.seek(0, 2) fsize = args.inputfile.tell() args.inputfile.seek(0, 0) except IOError: if is_infile_stdin: contents = args.inputfile.read() fsize = len(contents) else: raise Exception("zfec - needs a real (Seekable) file handle to" " measure file size upfront.") try: return filefec.encode_to_files(in_file, fsize, args.output_dir, args.prefix, args.requiredshares, args.totalshares, args.suffix, args.force, args.verbose) finally: args.inputfile.close()
def zfec_encode(inf, fsize, dirname, prefix, k, m, suffix='.fec', overwrite=False, verbose=False): with open(inf, 'rb') as f: filefec.encode_to_files(f, fsize, dirname, prefix, k, m, suffix, overwrite, False)
def _fec_encode(ns): logging.info('FEC pass started') tmpd = tempfile.mkdtemp(dir=os.getcwd()) logging.debug('created temp dir at %s' % tmpd) # total shares tshares = len(ns.outputs) for root, dirs, files in os.walk(ns.input): # output dir, name mapping od = root.replace(ns.input, os.path.basename(tmpd)) # recreate tree structure in temp dir for dname in dirs: osubdir = os.path.join(tmpd, dname) os.mkdir(osubdir) logging.debug('created %s' % osubdir) for f in files: fpath = os.path.join(root, f) logging.debug('processing file: %s' % fpath) with open(os.path.join(root, f)) as fd: fsize = os.path.getsize(fpath) logging.debug('FEC %s (%d bytes)' % (fpath, fsize)) filefec.encode_to_files(fd, fsize, od, f, ns.shares, tshares, '.fec', ns.force, False) logging.info('FEC pass completed') logging.info('Distribution pass started') for root, dirs, files in os.walk(ns.input): unrooted = os.path.relpath(root, ns.input) logging.debug('unrooted path: %s' % unrooted) # map dir tree structure unto output directories for outdir in ns.outputs: for dname in dirs: try: osubdir = os.path.join(outdir, dname) os.mkdir(osubdir) logging.debug('created %s' % osubdir) except OSError: logging.debug('exists: %s' % osubdir) for f in files: # glob on FEC output files to build list of things to distribute gexpr = common.fec_glob(f) gpath = os.path.join(tmpd, unrooted, gexpr) logging.debug('glob path for %s: %s' % (f, gpath)) fecs = [os.path.basename(fec) for fec in glob.glob(gpath)] logging.debug('FEC chunks for %s: %s' % (f, fecs)) if len(fecs) != tshares: logging.debug('len(fecs)=%d;shares=%d' % (len(fecs), tshares)) sys.stdout.write('Chunks and output dir counts mismatch\n') common.cleanup(tmpd) sys.exit(ERR['CHUNK_COUNT_MISMATCH']) # spread chunks over output dirs for idx, fec in enumerate(fecs): ofec = os.path.join(ns.outputs[idx], unrooted, fec) if not ns.force and os.path.exists(ofec): logging.debug('chunk collision: %s' % ofec) sys.stderr.write('Some chunks with the same name exist\n') common.cleanup(tmpd) sys.exit(ERR['NO_OVERWRITE']) ifec = os.path.join(tmpd, unrooted, fec) logging.debug('input FEC for %s: %s' % (f, ifec)) shutil.copyfile(ifec, ofec) logging.debug('wrote %s' % ofec) logging.info('Distribution pass completed') common.cleanup(tmpd)
class pi_radio_file_manager(): def __init__(self, tx_directory_doing=_def_tx_directory_doing, tx_directory_done=_def_tx_directory_done, tx_directory_waiting=_def_tx_directory_waiting, rx_directory_doing=_def_rx_directory_doing, rx_directory_done=_def_rx_directory_done, rx_directory_recovering=_def_rx_directory_recovering): self._tx_directory_doing = tx_directory_doing self._tx_directory_done = tx_directory_done self._tx_directory_waiting = tx_directory_waiting self._rx_directory_doing = rx_directory_doing self._rx_directory_done = rx_directory_done self._rx_directory_recovering = rx_directory_recovering #TODO: Check folders exists and are writeable if not try to create or raise an exception def check_file_is_not_used(self, file_name): if os.path.exists(self._tx_directory_waiting + file_name): try: os.rename(self._tx_directory_waiting + file_name, self._tx_directory_waiting + file_name) return True except OSError as e: logging.error("Access-error on file " + file_name + ": " + str(e)) return False else: raise Exception( "Cannot check for file access on a non-existing file: " + file_name) def recover_payload(self, file_name): #Move file to working dir if os.path.exists(self._rx_directory_waiting + file_name): try: os.rename(self._rx_directory_waiting + file_name, self._rx_directory_doing + file_name) return self._rx_directory_doing + file_name except OSError as e: raise Exception("Cannot move file to working directory " + file_name + ": " + str(e)) def create_payload(self, file_name): #Move file to working dir if os.path.exists(self._tx_directory_waiting + file_name): try: os.rename(self._tx_directory_waiting + file_name, self._tx_directory_doing + file_name) logging.debug("File moved to working directory") except OSError as e: raise Exception("Cannot move file to working directory " + file_name + ": " + str(e)) #Compress file try: source_file = open(self._tx_directory_doing + file_name, 'rb') compressed_file_name = file_name + ".gz" target_file = gzip.open( self._tx_directory_doing + compressed_file_name, 'wb', _def_gzip_compress_level) shutil.copyfileobj(source_file, target_file) source_file.close() target_file.close() logging.debug("File compressed: %s" % compressed_file_name) except Exception, e: raise Exception("Cannot compress file " + file_name + ": " + str(e)) #Create FEC files try: source_file = open(self._tx_directory_doing + compressed_file_name, 'rb') source_file_size = os.path.getsize(self._tx_directory_doing + compressed_file_name) filefec.encode_to_files(source_file, source_file_size, self._tx_directory_doing, file_name, _def_zfec_required_parts, _def_zfec_total_parts, suffix=".fec", overwrite=True, verbose=False) source_file.close() fec_files = [] for fec_part in range(_def_zfec_total_parts): fec_file_name = self._tx_directory_doing + file_name + ".%02d" % fec_part + "_%02d" % _def_zfec_total_parts + ".fec" fec_files.append(fec_file_name) logging.debug("Error correction files created: %s" % ' + '.join(sorted(fec_files))) except Exception, e: raise Exception("Cannot create error correcting files " + file_name + ": " + str(e))
def main(): if '-V' in sys.argv or '--version' in sys.argv: print("zfec library version: ", libversion) print("zfec command-line tool version: ", __version__) sys.exit(0) parser = argparse.ArgumentParser(description="Encode a file into a set of share files, a subset of which can later be used to recover the original file.") parser.add_argument('inputfile', help='file to encode or "-" for stdin', type=argparse.FileType('rb'), metavar='INF') parser.add_argument('-d', '--output-dir', help='directory in which share file names will be created (default ".")', default='.', metavar='D') parser.add_argument('-p', '--prefix', help='prefix for share file names; If omitted, the name of the input file will be used.', metavar='P') parser.add_argument('-s', '--suffix', help='suffix for share file names (default ".fec")', default='.fec', metavar='S') parser.add_argument('-m', '--totalshares', help='the total number of share files created (default %d)' % DEFAULT_M, default=DEFAULT_M, type=int, metavar='M') parser.add_argument('-k', '--requiredshares', help='the number of share files required to reconstruct (default %d)' % DEFAULT_K, default=DEFAULT_K, type=int, metavar='K') parser.add_argument('-f', '--force', help='overwrite any file which already in place an output file (share file)', action='store_true') parser.add_argument('-v', '--verbose', help='print out messages about progress', action='store_true') parser.add_argument('-q', '--quiet', help='quiet progress indications and warnings about silly choices of K and M', action='store_true') parser.add_argument('-V', '--version', help='print out version number and exit', action='store_true') args = parser.parse_args() is_infile_stdin = False if args.prefix is None: args.prefix = args.inputfile.name if args.prefix == "<stdin>": args.prefix = "" is_infile_stdin = True if args.verbose and args.quiet: print("Please choose only one of --verbose and --quiet.") sys.exit(1) if args.totalshares > 256 or args.totalshares < 1: print("Invalid parameters, totalshares is required to be <= 256 and >= 1\nPlease see the accompanying documentation.") sys.exit(1) if args.requiredshares > args.totalshares or args.requiredshares < 1: print("Invalid parameters, requiredshares is required to be <= totalshares and >= 1\nPlease see the accompanying documentation.") sys.exit(1) if not args.quiet: if args.requiredshares == 1: print("warning: silly parameters: requiredshares == 1, which means that every share will be a complete copy of the file. You could use \"cp\" for the same effect. But proceeding to do it anyway...") if args.requiredshares == args.totalshares: print("warning: silly parameters: requiredshares == totalshares, which means that all shares will be required in order to reconstruct the file. You could use \"split\" for the same effect. But proceeding to do it anyway...") in_file = args.inputfile try: args.inputfile.seek(0, 2) fsize = args.inputfile.tell() args.inputfile.seek(0, 0) except IOError: if is_infile_stdin: contents = args.inputfile.read() fsize = len(contents) else: raise Exception("zfec - needs a real (Seekable) file handle to" " measure file size upfront.") try: return filefec.encode_to_files(in_file, fsize, args.output_dir, args.prefix, args.requiredshares, args.totalshares, args.suffix, args.force, args.verbose) finally: args.inputfile.close()