def showfiles(args): """Writes out the input and output files. Works both for a pack file and for an extracted directory. """ pack = Path(args.pack[0]) if not pack.exists(): logging.critical("Pack or directory %s does not exist", pack) sys.exit(1) if pack.is_dir(): # Reads info from an unpacked directory runs, packages, other_files = load_config_file(pack / 'config.yml', canonical=True) # The '.reprounzip' file is a pickled dictionary, it contains the name # of the files that replaced each input file (if upload was used) with pack.open('rb', '.reprounzip') as fp: unpacked_info = pickle.load(fp) input_files = unpacked_info.get('input_files', {}) print("Input files:") for i, run in enumerate(runs): if len(runs) > 1: print(" Run %d:" % i) for input_name, path in iteritems(run['input_files']): print(" %s (%s)" % (input_name, path)) if input_files.get(input_name) is not None: assigned = PosixPath(input_files[input_name]) else: assigned = "(original)" print(" %s" % assigned) print("Output files:") for i, run in enumerate(runs): if len(runs) > 1: print(" Run %d:" % i) for output_name, path in iteritems(run['output_files']): print(" %s (%s)" % (output_name, path)) else: # pack.is_file() # Reads info from a pack file runs, packages, other_files = load_config(pack) print("Input files:") for i, run in enumerate(runs): if len(runs) > 1: print(" Run %d:" % i) for input_name, path in iteritems(run['input_files']): print(" %s (%s)" % (input_name, path)) print("Output files:") for i, run in enumerate(runs): if len(runs) > 1: print(" Run %d:" % i) for output_name, path in iteritems(run['output_files']): print(" %s (%s)" % (output_name, path))
def __init__(self, path): path = Path(path) size = None if path.exists(): if path.is_link(): self.comment = "Link to %s" % path.read_link(absolute=True) elif path.is_dir(): self.comment = "Directory" else: size = path.size() self.comment = hsize(size) File.__init__(self, path, size)
def showfiles(args): """Writes out the input and output files. Works both for a pack file and for an extracted directory. """ pack = Path(args.pack[0]) if not pack.exists(): logging.critical("Pack or directory %s does not exist", pack) sys.exit(1) if pack.is_dir(): # Reads info from an unpacked directory config = load_config_file(pack / 'config.yml', canonical=True) # The '.reprounzip' file is a pickled dictionary, it contains the name # of the files that replaced each input file (if upload was used) with pack.open('rb', '.reprounzip') as fp: unpacked_info = pickle.load(fp) assigned_input_files = unpacked_info.get('input_files', {}) print("Input files:") for input_name, f in iteritems(config.inputs_outputs): if not f.read_runs: continue print(" %s (%s)" % (input_name, f.path)) if assigned_input_files.get(input_name) is not None: assigned = assigned_input_files[input_name] else: assigned = "(original)" print(" %s" % assigned) print("Output files:") for output_name, f in iteritems(config.inputs_outputs): if f.write_runs: print(" %s (%s)" % (output_name, f.path)) else: # pack.is_file() # Reads info from a pack file config = load_config(pack) print("Input files:") for input_name, f in iteritems(config.inputs_outputs): if f.read_runs: print(" %s (%s)" % (input_name, f.path)) print("Output files:") for output_name, f in iteritems(config.inputs_outputs): if f.write_runs: print(" %s (%s)" % (output_name, f.path))
def setup_logging(tag, verbosity): """Sets up the logging module. """ levels = [logging.CRITICAL, logging.WARNING, logging.INFO, logging.DEBUG] console_level = levels[min(verbosity, 3)] file_level = logging.INFO min_level = min(console_level, file_level) # Create formatter, with same format as C extension fmt = "[%s] %%(asctime)s %%(levelname)s: %%(message)s" % tag formatter = LoggingDateFormatter(fmt) # Console logger handler = logging.StreamHandler() handler.setLevel(console_level) handler.setFormatter(formatter) # Set up logger rootlogger = logging.root rootlogger.setLevel(min_level) rootlogger.addHandler(handler) # File logger if os.environ.get('REPROZIP_NO_LOGFILE', '').lower() in ('', 'false', '0', 'off'): dotrpz = Path('~/.reprozip').expand_user() try: if not dotrpz.is_dir(): dotrpz.mkdir() filehandler = logging.handlers.RotatingFileHandler(str(dotrpz / 'log'), mode='a', delay=False, maxBytes=400000, backupCount=5) except (IOError, OSError): logger.warning("Couldn't create log file %s", dotrpz / 'log') else: filehandler.setFormatter(formatter) filehandler.setLevel(file_level) rootlogger.addHandler(filehandler) filehandler.emit( logging.root.makeRecord( __name__.split('.', 1)[0], logging.INFO, "(log start)", 0, "Log opened %s %s", (datetime.now().strftime("%Y-%m-%d"), sys.argv), None)) logging.getLogger('urllib3').setLevel(logging.INFO)
def setup_logging(tag, verbosity): """Sets up the logging module. """ levels = [logging.CRITICAL, logging.WARNING, logging.INFO, logging.DEBUG] console_level = levels[min(verbosity, 3)] file_level = logging.INFO min_level = min(console_level, file_level) # Create formatter, with same format as C extension fmt = "[%s] %%(asctime)s %%(levelname)s: %%(message)s" % tag formatter = LoggingDateFormatter(fmt) # Console logger handler = logging.StreamHandler() handler.setLevel(console_level) handler.setFormatter(formatter) # Set up logger logger = logging.root logger.setLevel(min_level) logger.addHandler(handler) # File logger dotrpz = Path('~/.reprozip').expand_user() try: if not dotrpz.is_dir(): dotrpz.mkdir() filehandler = logging.handlers.RotatingFileHandler(str(dotrpz / 'log'), mode='a', delay=False, maxBytes=400000, backupCount=5) except (IOError, OSError): logging.warning("Couldn't create log file %s", dotrpz / 'log') else: filehandler.setFormatter(formatter) filehandler.setLevel(file_level) logger.addHandler(filehandler) filehandler.emit(logging.root.makeRecord( __name__.split('.', 1)[0], logging.INFO, "(log start)", 0, "Log opened %s %s", (datetime.now().strftime("%Y-%m-%d"), sys.argv), None))
def main(): parser = argparse.ArgumentParser( description="Adds __future__ imports to Python files") parser.add_argument('-v', '--verbose', action='count', dest='verbosity', default=1) parser.add_argument('-e', '--enable', action='append', help="Future import to enable") parser.add_argument('file', nargs=argparse.ONE_OR_MORE, help="File or directory in which to replace") args = parser.parse_args() levels = [logging.CRITICAL, logging.WARNING, logging.INFO, logging.DEBUG] logging.basicConfig(level=levels[args.verbosity]) if not args.enable: logging.critical("Nothing to do") sys.exit(1) enable = set(to_bytes(feature) for feature in args.enable) unrecognized = enable - FUTURES if unrecognized: logging.critical("Error: unknown futures %s" % ', '.join(unrecognized)) sys.exit(1) for target in args.file: target = Path(target) if target.is_file(): if not target.name.endswith('.py'): logging.warning("File %s doesn't end with .py, processing " "anyway..." % target) process_file(target, enable) elif target.is_dir(): logging.info("Processing %s recursively..." % target) for filename in target.recursedir('*.py'): process_file(filename, enable) else: logging.warning("Skipping %s..." % target)
def setup_logging(tag, verbosity): """Sets up the logging module. """ levels = [logging.CRITICAL, logging.WARNING, logging.INFO, logging.DEBUG] console_level = levels[min(verbosity, 3)] file_level = logging.INFO min_level = min(console_level, file_level) # Create formatter, with same format as C extension fmt = "[%s] %%(asctime)s %%(levelname)s: %%(message)s" % tag formatter = LoggingDateFormatter(fmt) # Console logger handler = logging.StreamHandler() handler.setLevel(console_level) handler.setFormatter(formatter) # Set up logger logger = logging.root logger.setLevel(min_level) logger.addHandler(handler) # File logger dotrpz = Path('~/.reprozip').expand_user() try: if not dotrpz.is_dir(): dotrpz.mkdir() filehandler = logging.handlers.RotatingFileHandler(str(dotrpz / 'log'), mode='a', delay=False, maxBytes=400000, backupCount=5) except (IOError, OSError): logging.warning("Couldn't create log file %s", dotrpz / 'log') else: filehandler.setFormatter(formatter) filehandler.setLevel(file_level) logger.addHandler(filehandler)
def combine(args): """combine subcommand. Reads in multiple trace databases and combines them into one. The runs from the original traces are appended ('run_id' field gets translated to avoid conflicts). """ traces = [] for tracepath in args.traces: if tracepath == '-': tracepath = Path(args.dir) / 'trace.sqlite3' else: tracepath = Path(tracepath) if tracepath.is_dir(): tracepath = tracepath / 'trace.sqlite3' traces.append(tracepath) reprozip.traceutils.combine_traces(traces, Path(args.dir)) reprozip.tracer.trace.write_configuration(Path(args.dir), args.identify_packages, args.find_inputs_outputs, overwrite=True)
def showfiles(args): """Writes out the input and output files. Works both for a pack file and for an extracted directory. """ def parse_run(runs, s): for i, run in enumerate(runs): if run['id'] == s: return i try: r = int(s) except ValueError: logger.critical("Error: Unknown run %s", s) raise UsageError if r < 0 or r >= len(runs): logger.critical("Error: Expected 0 <= run <= %d, got %d", len(runs) - 1, r) sys.exit(1) return r show_inputs = args.input or not args.output show_outputs = args.output or not args.input def file_filter(fio): if file_filter.run is None: return ((show_inputs and fio.read_runs) or (show_outputs and fio.write_runs)) else: return ((show_inputs and file_filter.run in fio.read_runs) or (show_outputs and file_filter.run in fio.write_runs)) file_filter.run = None pack = Path(args.pack[0]) if not pack.exists(): logger.critical("Pack or directory %s does not exist", pack) sys.exit(1) if pack.is_dir(): # Reads info from an unpacked directory config = load_config_file(pack / 'config.yml', canonical=True) # Filter files by run if args.run is not None: file_filter.run = parse_run(config.runs, args.run) # The '.reprounzip' file is a pickled dictionary, it contains the name # of the files that replaced each input file (if upload was used) unpacked_info = metadata_read(pack, None) assigned_input_files = unpacked_info.get('input_files', {}) if show_inputs: shown = False for input_name, f in sorted(config.inputs_outputs.items()): if f.read_runs and file_filter(f): if not shown: print("Input files:") shown = True if args.verbosity >= 2: print(" %s (%s)" % (input_name, f.path)) else: print(" %s" % input_name) assigned = assigned_input_files.get(input_name) if assigned is None: assigned = "(original)" elif assigned is False: assigned = "(not created)" elif assigned is True: assigned = "(generated)" else: assert isinstance(assigned, (bytes, str)) print(" %s" % assigned) if not shown: print("Input files: none") if show_outputs: shown = False for output_name, f in sorted(config.inputs_outputs.items()): if f.write_runs and file_filter(f): if not shown: print("Output files:") shown = True if args.verbosity >= 2: print(" %s (%s)" % (output_name, f.path)) else: print(" %s" % output_name) if not shown: print("Output files: none") else: # pack.is_file() # Reads info from a pack file config = load_config(pack) # Filter files by run if args.run is not None: file_filter.run = parse_run(config.runs, args.run) if any(f.read_runs for f in config.inputs_outputs.values()): print("Input files:") for input_name, f in sorted(config.inputs_outputs.items()): if f.read_runs and file_filter(f): if args.verbosity >= 2: print(" %s (%s)" % (input_name, f.path)) else: print(" %s" % input_name) else: print("Input files: none") if any(f.write_runs for f in config.inputs_outputs.values()): print("Output files:") for output_name, f in sorted(config.inputs_outputs.items()): if f.write_runs and file_filter(f): if args.verbosity >= 2: print(" %s (%s)" % (output_name, f.path)) else: print(" %s" % output_name) else: print("Output files: none")
def wrapper(args): target = Path(args.target[0]) if not target.is_dir(): logging.critical("Error: Target directory doesn't exist") raise UsageError return func(args)
def showfiles(args): """Writes out the input and output files. Works both for a pack file and for an extracted directory. """ def parse_run(runs, s): for i, run in enumerate(runs): if run['id'] == s: return i try: r = int(s) except ValueError: logging.critical("Error: Unknown run %s", s) raise UsageError if r < 0 or r >= len(runs): logging.critical("Error: Expected 0 <= run <= %d, got %d", len(runs) - 1, r) sys.exit(1) return r show_inputs = args.input or not args.output show_outputs = args.output or not args.input def file_filter(fio): if file_filter.run is None: return ((show_inputs and fio.read_runs) or (show_outputs and fio.write_runs)) else: return ((show_inputs and file_filter.run in fio.read_runs) or (show_outputs and file_filter.run in fio.write_runs)) file_filter.run = None pack = Path(args.pack[0]) if not pack.exists(): logging.critical("Pack or directory %s does not exist", pack) sys.exit(1) if pack.is_dir(): # Reads info from an unpacked directory config = load_config_file(pack / 'config.yml', canonical=True) # Filter files by run if args.run is not None: file_filter.run = parse_run(config.runs, args.run) # The '.reprounzip' file is a pickled dictionary, it contains the name # of the files that replaced each input file (if upload was used) unpacked_info = metadata_read(pack, None) assigned_input_files = unpacked_info.get('input_files', {}) if show_inputs: shown = False for input_name, f in sorted(iteritems(config.inputs_outputs)): if f.read_runs and file_filter(f): if not shown: print("Input files:") shown = True if args.verbosity >= 2: print(" %s (%s)" % (input_name, f.path)) else: print(" %s" % input_name) assigned = assigned_input_files.get(input_name) if assigned is None: assigned = "(original)" elif assigned is False: assigned = "(not created)" elif assigned is True: assigned = "(generated)" else: assert isinstance(assigned, (bytes, unicode_)) print(" %s" % assigned) if not shown: print("Input files: none") if show_outputs: shown = False for output_name, f in sorted(iteritems(config.inputs_outputs)): if f.write_runs and file_filter(f): if not shown: print("Output files:") shown = True if args.verbosity >= 2: print(" %s (%s)" % (output_name, f.path)) else: print(" %s" % output_name) if not shown: print("Output files: none") else: # pack.is_file() # Reads info from a pack file config = load_config(pack) # Filter files by run if args.run is not None: file_filter.run = parse_run(config.runs, args.run) if any(f.read_runs for f in itervalues(config.inputs_outputs)): print("Input files:") for input_name, f in sorted(iteritems(config.inputs_outputs)): if f.read_runs and file_filter(f): if args.verbosity >= 2: print(" %s (%s)" % (input_name, f.path)) else: print(" %s" % input_name) else: print("Input files: none") if any(f.write_runs for f in itervalues(config.inputs_outputs)): print("Output files:") for output_name, f in sorted(iteritems(config.inputs_outputs)): if f.write_runs and file_filter(f): if args.verbosity >= 2: print(" %s (%s)" % (output_name, f.path)) else: print(" %s" % output_name) else: print("Output files: none")