def temoa_checks(self): """Make sure Python 2.7 is used and that a suitable solver is available.""" if version_info < (2, 7): msg = ( "Temoa requires Python v2.7 to run.\n\n The model may not solve" "properly with another version.") raise SystemExit(msg) if self.options.neos is True: # Invoke NEOS solver manager if flag is specified in config file self.optimizer = pyomo.opt.SolverManagerFactory('neos') else: self.optimizer = SolverFactory(self.options.solver) if self.optimizer: pass elif self.options.solver != 'NONE': SE.write( "\nWarning: Unable to initialize solver interface for '{}'\n\n" .format(self.options.solver)) if SE.isatty(): SE.write("Please press enter to continue or Ctrl+C to quit.") if os.path.join( 'temoa_model', 'config_sample_myopic') not in options.file_location: raw_input()
def process_file(self, file): # This should mirror PipelineStep, so an entire pipeline can function # as a pipeline step for step in self.steps: file.report["Errors"] = None try: file = step.process_file(file) assert file is not None except AbortPipelineForThisImage as exc: file.report.update({"PipelineAbortedMessage": str(exc)}) print(f"\nAborting at {step.__class__.__name__}: {str(exc)}", file=stderr) break except Exception as exc: path = file.filename if hasattr(file.fetcher, "pathondisk"): path = file.fetcher.pathondisk print( f"\n{exc.__class__.__name__}: {str(exc)} while processing '{path}'\n", file=stderr) if stderr.isatty(): traceback.print_exc(file=stderr) file.report["Errors"] = f"{exc.__class__.__name__}: {str(exc)}" self.report.record(file.instant, **file.report) if isinstance(exc, FatalPipelineError): raise self.report.record(file.instant, **file.report) return file
def write_error(s): """ Writes out an error message to stderr in red text. This is done so that the error messages from the Tasks system can be easily distinguished from the errors from the underlying commands being run. If we cannot change the text color (not supported by OS or redirecting to a file) then just the string is written. """ from sys import stderr from os import name try: is_tty = stderr.isatty() except: is_tty = False if is_tty: if name == "posix": stderr.write("\x1b[1;31m") elif name == "nt": from ctypes import windll, Structure, c_short, c_ushort, byref k32 = windll.kernel32 handle = k32.GetStdHandle(-12) class COORD(Structure): _fields_ = [("X", c_short), ("Y", c_short)] class SMALL_RECT(Structure): _fields_ = [("L", c_short), ("T", c_short), ("R", c_short), ("B", c_short)] class CONSOLE_SCREEN_BUFFER_INFO(Structure): _fields_ = [("Size", COORD), ("CursorPos", COORD), ("Color", c_ushort), ("Rect", SMALL_RECT), ("MaxSize", COORD)] csbi = CONSOLE_SCREEN_BUFFER_INFO() k32.GetConsoleScreenBufferInfo(handle, byref(csbi)) prev = csbi.Color k32.SetConsoleTextAttribute(handle, 12) stderr.write(s) if is_tty: if name == "posix": stderr.write("\x1b[0m") elif name == "nt": k32.SetConsoleTextAttribute(handle, prev) stderr.write("\n")
def _do_delete(self): if self.rm_script is not None: with open(self.rm_script, "a") as fh: cmd = "rm -vf" if self.mv_dest is None else f"mv -n -t {self.mv_dest}" for f in self.to_delete: print(cmd, realpath(f), file=fh) else: if not self.force: if self.mv_dest is not None: click.echo( f"Will move the following files to {self.mv_dest}:") else: click.echo("Will delete the following files:") for f in self.to_delete: click.echo(f"\t{f}") if self.force or click.confirm("Is that OK?"): for f in self.to_delete: try: if self.mv_dest is None: os.unlink(f) else: os.makedirs(self.mv_dest, exist_ok=True) shutil.move(f, self.mv_dest) except Exception as exc: tqdm.write(f"Error deleting {f}: {str(exc)}") if stderr.isatty(): traceback.print_exc(file=stderr) tqdm.write(f"Deleted {len(self.to_delete)} files") self.to_delete = []
def index(self, progress=True): if len(self._instants) == 0 or len(self._files) == 0: with FileLock(self._index_file, timeout=3600): pass try: if op.exists( self._index_file ): # TODO FIXME make this check if the index is stale print("read index", self._index_file, file=stderr) with open(self._index_file, "r") as fh: self._files = {} self._instants = {} for line in tqdm(fh): fetcher = Fetcher.from_json(json.loads(line)) self._files[fetcher.filename] = fetcher self._instants[fetcher.instant] = fetcher if len(self._instants) > 0 and len(self._files) > 0: return except Exception as exc: print("Failed to load index file:", str(exc), file=stderr) if stderr.isatty(): traceback.print_exc(file=stderr) with FileLock(self._index_file): try: itr = self.iter(tar_contents=False) if progress: itr = tqdm(itr) if self.write_index: with open(self._index_file, "w") as fh: for f in itr: self._instants[f.instant] = f.fetcher print(json.dumps(f.fetcher.dict(), cls=PathAwareJsonEncoder), file=fh) else: for f in itr: self._instants[f.instant] = f.fetcher except Exception as exc: print("Failed to create timestream index file:", str(exc), file=stderr) if stderr.isatty(): traceback.print_exc(file=stderr) if op.exists(self._index_file): os.unlink(self._index_file)
def warn(warning: str): """ Logs a warning to stderr. """ from sys import stderr from time import ctime if stderr.isatty(): print("\033[38;2;238;216;78mWW:", warning, "-\t", ctime(), "\033[m", file=stderr) else: print("WW:", warning, '-\t', ctime(), file=stderr)
def main(code, args, end): for index in range(min(7, len(args))): atoms['³⁴⁵⁶⁷⁸⁹'[index]].call = lambda literal=args[index]: literal try: output(jamb_eval(code, args[:2]), end) except KeyboardInterrupt: if stderr.isatty(): sys.stderr.write('\n') return 130
def setup_color(color): enable_out = (False if color == 'never' else True if color == 'always' else stdout.isatty()) Out_Style.enable(enable_out) Out_Fore.enable(enable_out) enable_err = (False if color == 'never' else True if color == 'always' else stderr.isatty()) Err_Style.enable(enable_err) Err_Fore.enable(enable_err)
def _terminal_has_colors() -> bool: """Initialize the terminal and check if it support colors. Returns: bool: True if successful False otherwise. """ if not (hasattr(stderr, 'isatty') and stderr.isatty()): return False try: curses.setupterm() return curses.tigetnum('colors') > 0 except Exception: # pylint: disable=broad-except return False
def error(err: Exception, fatal: int=False): """ Logs an error to stderr, then exits if fatal is a non-falsy value, using it as an exit code """ from sys import stderr from time import ctime if stderr.isatty(): fmt = "\033[38;2;255;0;0mEE: %s:" print(fmt % type(err).__name__, "%s" % err, "-\t", ctime(), "\033[m", file=stderr) else: print("EE: %s:" % type(err).__name__, "%s" % err, "-\t", ctime(), file=stderr) if fatal: exit(int(fatal))
def write(self, file): if op.exists(self._index_file): with FileLock(self._index_file): try: os.unlink(self._index_file) except Exception as exc: print(str(exc), file=stderr) if stderr.isatty(): traceback.print_exc(file=stderr) if self.name is None: raise RuntimeError("TSv2Stream not opened") if not isinstance(file, TimestreamFile): raise TypeError("file should be a TimestreamFile") subpath = self._timestream_path(file) if self.bundle == "none": outpath = op.join(self.path, subpath) os.makedirs(op.dirname(outpath), exist_ok=True) with FileLock(outpath): with open(outpath, 'wb') as fh: fh.write(file.content) else: if self.bundle == "root": self.path = str(self.path) for ext in [".tar", ".zip", f".{file.format}"]: if self.path.lower().endswith(ext): self.path = self.path[:-len(ext)] self.path = Path(self.path) bundle = self._bundle_archive_path(file) bdir = op.dirname(bundle) if bdir: # i.e. if not $PWD os.makedirs(bdir, exist_ok=True) with FileLock(bundle): with zipfile.ZipFile(bundle, mode="a", compression=zipfile.ZIP_STORED, allowZip64=True) as zip: pathinzip = op.join(self.name, subpath) if pathinzip not in zip.namelist(): zip.writestr(pathinzip, file.content) else: file_crc = zlib.crc32(file.content) zip_crc = zip.getinfo(pathinzip).CRC if file_crc != zip_crc: raise RuntimeError( f"ERROR: trying to overwrite file with different content: zip={bundle}, subpath={subpath}" )
def temoa_checks(self): """Make sure Python 2.7 is used and that a suitable solver is available.""" if version_info < (2, 7): msg = ("Temoa requires Python v2.7 to run.\n\n The model may not solve" "properly with another version.") raise SystemExit( msg ) self.optimizer = SolverFactory( self.options.solver ) if self.optimizer: pass elif self.options.solver != 'NONE': SE.write( "\nWarning: Unable to initialize solver interface for '{}'\n\n" .format( self.options.solver )) if SE.isatty(): SE.write( "Please press enter to continue or Ctrl+C to quit." ) raw_input()
def err_progress(iterator, label='progress', suffix='', frequency=0.1): ''' For interactive terminals, return a generator that yields the elements of iterator and displays a progress indicator on std err. ''' if not frequency or not stderr.isatty(): return iterator from pithy.ansi import ERASE_LINE if label is None: label = str(iterator) pre = '◊ ' + label + ': ' post = (suffix and ' ' + suffix) + '…\r' final = ' ' + suffix + '.' if suffix else '.' if isinstance(frequency, float): from time import time def err_progress_gen(): prev_t = time() step = 1 next_i = step for i, el in enumerate(iterator): if i == next_i: print(pre + str(i) + post, end='', file=stderr, flush=True) print(ERASE_LINE, end='', file=stderr, flush=False) t = time() d = t - prev_t step = max(1, int(step * frequency / d)) prev_t = t next_i = i + step yield el print(pre + str(i) + final, file=stderr) else: def err_progress_gen(): for i, el in enumerate(iterator): if i % frequency == 0: print(pre + str(i) + post, end='', file=stderr, flush=True) print(ERASE_LINE, end='', file=stderr, flush=False) yield el print(pre + str(i) + final, file=stderr) return err_progress_gen()
def field_map(match): indent = match.group(1) field = match.group(2) layer_def = layer.definition['layer'] fields = layer_def['fields'] if field not in fields: raise ValueError(f"Field '{field}' not found in the layer definition file") if 'values' not in fields[field]: raise ValueError( f"Field '{field}' has no values defined in the layer definition file") values = fields[field]['values'] if not isinstance(values, dict): raise ValueError(f"Definition for {field}/values has to be a dictionary") conditions = [] ignored = [] for map_to, mapping in values.items(): # mapping is a dictionary of input_fields -> (a value or a list of values) # If it is not a dictionary, skip it if not isinstance(mapping, dict): ignored.append(map_to) continue whens = [] for in_fld, in_vals in mapping.items(): if isinstance(in_vals, str): in_vals = [in_vals] wildcards = [v for v in in_vals if '%' in v] in_vals = [v for v in in_vals if '%' not in v] if in_vals: if len(in_vals) == 1: expr = f"={sql_value(in_vals[0])}" else: expr = f" IN ({', '.join((sql_value(v) for v in in_vals))})" whens.append(f'{sql_field(in_fld)}{expr}') for wildcard in wildcards: whens.append(f'{sql_field(in_fld)} LIKE {sql_value(wildcard)}') if whens: cond = f'\n{indent} OR '.join(whens) + \ (' ' if len(whens) == 1 else f'\n{indent} ') conditions.append(f"WHEN {cond}THEN {sql_value(map_to)}") else: ignored.append(map_to) if ignored and not stderr.isatty(): print(f"-- Assuming manual SQL handling of field '{field}' values " f"[{','.join(ignored)}] in layer {layer_def['id']}", file=stderr) return indent + f'\n{indent}'.join(conditions)
def suggest_missing(miss=None, exclude=[]): global suggest sgs = dict(suggest) for exc in exclude: try: sgs.pop(exc) except KeyError: pass kamaki_docs = 'http://www.synnefo.org/docs/kamaki/latest' for k, v in (miss, sgs[miss]) if miss else sgs.items(): if v['active'] and stderr.isatty(): stderr.write('Suggestion: you may like to install %s\n' % k) stderr.write( ('%s\n' % v['description']).encode(pref_enc, 'replace')) stderr.write('\tIt is easy, here are the instructions:\n') stderr.write('\t%s/installation.html%s\n' % ( kamaki_docs, v['url'])) stderr.flush()
def parse(self): conditions = [] ignored = [] for map_to, mapping in self.field.values.items(): # mapping is a dictionary of input_fields -> (a value or a list of values) # If it is not a dictionary, skip it if not isinstance(mapping, dict) and not isinstance(mapping, list): ignored.append(map_to) continue expr = self.to_expression(map_to, mapping) if expr: conditions.append(expr) else: ignored.append(map_to) if ignored and not stderr.isatty(): print(f"-- Assuming manual SQL handling of field '{self.field.name}' " f"values [{','.join(ignored)}] in layer {self.layer.id}", file=stderr) return self.indent + f'\n{self.indent}'.join(conditions)
def suggest_missing(miss=None, exclude=[]): global suggest sgs = dict(suggest) for exc in exclude: try: sgs.pop(exc) except KeyError: pass kamaki_docs = 'http://www.synnefo.org/docs/kamaki/latest' for k, v in (miss, sgs[miss]) if miss else sgs.items(): if v['active'] and stderr.isatty(): stderr.write('Suggestion: you may like to install %s\n' % k) stderr.write( ('%s\n' % v['description']).encode(pref_enc, 'replace')) stderr.write('\tIt is easy, here are the instructions:\n') stderr.write('\t%s/installation.html%s\n' % (kamaki_docs, v['url'])) stderr.flush()
#!/usr/bin/env python from sys import stdin, stdout, stderr print "Piped input:", not stdin.isatty() print "Piped output:", not stdout.isatty() print "Piped error:", not stderr.isatty()
self._dict = defaultdict(str) def __getattr__(self, key): return self._dict[key] Null_Style = Null_Fore = colorama_shim() if stdout.isatty(): Out_Style = Colo_Style Out_Fore = Colo_Fore else: Out_Style = Null_Style Out_Fore = Null_Fore if stderr.isatty(): Err_Style = Colo_Style Err_Fore = Colo_Fore else: Err_Style = Null_Style Err_Fore = Null_Fore def info_main(*args): logger.info(''.join([Err_Style.BRIGHT, Err_Fore.GREEN] + list(args) + [Err_Style.RESET_ALL, Err_Fore.RESET])) def info_notify(s): info('{}{}{}{}'.format(Err_Style.BRIGHT, Err_Fore.LIGHTBLUE_EX, s, Err_Style.RESET_ALL))
#!/usr/bin/env python3 # Dedicated to the public domain under CC0: https://creativecommons.org/publicdomain/zero/1.0/. from sys import stderr exit(0 if stderr.isatty() else 1)
def scanimage(imgpath): # init all variables error = None pixel_mean = None codes = None qr_chamber = None qr_experiment = None file_size = None # Get size try: file_size = getsize(imgpath) except Exception as exc: if stderr.isatty(): print(str(exc), imgpath, file=stderr) tb() error = "FileSize" # Load image try: pixels = loadimage(imgpath) if pixels.dtype == object: raise ValueError( "Bad image load: see https://github.com/python-pillow/Pillow/issues/3863" ) pixel_mean = np.mean(pixels) codes = zbarlight.scan_codes('qrcode', Image.fromarray(pixels)) if codes is not None: codes = ';'.join(sorted(x.decode('utf8') for x in codes)) qr_chamber = re_chamber(codes) qr_experiment = re_experiment(codes) except Exception as exc: if stderr.isatty(): print(str(exc), imgpath, file=stderr) tb() error = "ImageIO" # Metadata exif_time = None try: exif_time = get_exif_time(imgpath) except KeyError: pass except Exception as exc: if stderr.isatty(): print(str(exc), imgpath, file=stderr) tb() error = "EXIF" dir_chamber = re_chamber(dirname(imgpath)) dir_experiment = re_experiment(dirname(imgpath)) fn_chamber = re_chamber(basename(imgpath)) fn_experiment = re_experiment(basename(imgpath)) fn_time = re_time(basename(imgpath)) return { "imgpath": imgpath, "qr_chamber": qr_chamber, "qr_experiment": qr_experiment, "qr_codes": codes, "pixel_mean": pixel_mean, "exif_time": exif_time, "dir_chamber": dir_chamber, "dir_experiment": dir_experiment, "fn_chamber": fn_chamber, "fn_experiment": fn_experiment, "fn_time": fn_time, "file_size": file_size, "error": error, }
def __init__(self): self._dict = defaultdict(str) def __getattr__(self, key): return self._dict[key] Null_Style = Null_Fore = colorama_shim() if stdout.isatty(): Out_Style = Colo_Style Out_Fore = Colo_Fore else: Out_Style = Null_Style Out_Fore = Null_Fore if stderr.isatty(): Err_Style = Colo_Style Err_Fore = Colo_Fore else: Err_Style = Null_Style Err_Fore = Null_Fore def info_main(*args): logger.info(''.join([Err_Style.BRIGHT, Err_Fore.GREEN] + list(args) + [Err_Style.RESET_ALL, Err_Fore.RESET])) def info_notify(s): info('{}{}{}{}'.format(Err_Style.BRIGHT, Err_Fore.LIGHTBLUE_EX, s, Err_Style.RESET_ALL))
def temoa_solve(model_data): from sys import argv, version_info if version_info < (2, 7): msg = ("Temoa requires Python v2.7 to run.\n\nIf you've " "installed Coopr with Python 2.6 or less, you'll need to reinstall " 'Coopr, taking care to install with a Python 2.7 (or greater) ' 'executable.') raise SystemExit(msg) from time import clock from coopr.opt import SolverFactory, SolverManagerFactory from coopr.pyomo import ModelData from utils import results_writer from pformat_results import pformat_results tee = False solver_manager = SolverManagerFactory('serial') options = parse_args() dot_dats = options.dot_dat opt = SolverFactory(options.solver) if opt: opt.keepFiles = options.keepPyomoLP opt.generateSymbolicLabels = options.useSymbolLabels if options.generateSolverLP: opt.options.wlp = path.basename(options.dot_dat[0])[:-4] + '.lp' SE.write('\nSolver will write file: {}\n\n'.format(opt.options.wlp)) elif options.solver != 'NONE': SE.write("\nWarning: Unable to initialize solver interface for '{}'\n\n" .format(options.solver)) SE.write('[ ] Reading data files.') SE.flush() # Recreate the pyomo command's ability to specify multiple "dot dat" files # on the command line begin = clock() duration = lambda: clock() - begin mdata = ModelData() for f in dot_dats: if f[-4:] != '.dat': msg = "\n\nExpecting a dot dat (e.g., data.dat) file, found '{}'\n" raise SystemExit(msg.format(f)) mdata.add(f) mdata.read(model_data.model) SE.write('\r[%8.2f\n' % duration()) SE.write('[ ] Creating Temoa model instance.') SE.flush() # Now do the solve and ... model_data.instance = model_data.model.create(mdata) SE.write('\r[%8.2f\n' % duration()) SE.write('[ ] Solving.') SE.flush() if opt: model_data.result = solver_manager.solve(model_data.instance, opt=opt, tee=tee, suffixes=['dual', 'rc']) # result = opt.solve(instance) SE.write('\r[%8.2f\n' % duration()) else: SE.write('\r---------- Not solving: no available solver\n') raise SystemExit SE.write('[ ] Formatting results.') SE.flush() # ... print the easier-to-read/parse format results_writer(model_data.result, model_data.instance) # updated_results = instance.update_results(result) # formatted_results = pformat_results(instance, updated_results) SE.write('\r[%8.2f\n' % duration()) # SO.write(formatted_results) if options.graph_format: SE.write('[ ] Creating Temoa model diagrams.') SE.flush() instance.load(result) CreateModelDiagrams(instance, options) SE.write('\r[%8.2f\n' % duration()) if not (SO.isatty() or SE.isatty()): SO.write("\n\nNotice: You are not receiving 'standard error' messages." " Temoa uses the 'standard error' file to send meta information " "on the progress of the solve. If you aren't intentionally " "ignoring standard error messages, you may correct the issue by " "updating coopr/src/coopr.misc/coopr/misc/scripts.py as per this " "coopr changeset: " "https://software.sandia.gov/trac/coopr/changeset/5363\n")
def verify(ephemerals, resource, informat, force_delete, rm_script, move_dest, pixel_distance, distance_file, only_check_exists): """ Verify images from each of EPHEMERAL, ensuring images are in --resources. """ resource_ts = TimeStream(resource, format=informat) decoder = DecodeImageFileStep() resource_ts.index() if rm_script is not None: with open(rm_script, "w") as fh: print("# rmscript for", *ephemerals, file=fh) with Removalist(rm_script=rm_script, mv_dest=move_dest, force=force_delete) as rmer: if distance_file is not None: distance_file = open(distance_file, "w") print("ephemeral_image\tresource_image\tdistance", file=distance_file) for ephemeral in ephemerals: click.echo(f"Crawling ephemeral timestream: {ephemeral}") ephemeral_ts = TimeStream(ephemeral, format=informat) try: for image in tqdm(ephemeral_ts, unit=" files"): try: res_img = resource_ts.getinstant(image.instant) if not isinstance(image.fetcher, FileContentFetcher): click.echo( f"WARNING: can't delete {image.filename} as it is bundled", err=True) continue if only_check_exists: rmer.remove(image.fetcher.pathondisk) elif pixel_distance is not None: eimg = decoder.process_file(image) rimg = decoder.process_file(res_img) if eimg.pixels.shape != rimg.pixels.shape: if distance_file is not None: print(basename(image.filename), basename(res_img.filename), "NA", file=distance_file) continue dist = np.mean(abs(eimg.pixels - rimg.pixels)) if distance_file is not None: print(basename(image.filename), basename(res_img.filename), dist, file=distance_file) if dist < pixel_distance: rmer.remove(realpath(image.fetcher.pathondisk)) elif image.md5sum == res_img.md5sum: rmer.remove(realpath(image.fetcher.pathondisk)) except KeyError: tqdm.write(f"{image.instant} not in {resource}") if distance_file is not None: print(basename(image.filename), "", "", file=distance_file) except Exception as exc: click.echo( f"WARNING: error in resources lookup of {image.filename}: {str(exc)}", err=True) if stderr.isatty(): traceback.print_exc(file=stderr) except KeyboardInterrupt: print("\n\nExiting cleanly", file=stderr) break if distance_file is not None: distance_file.close()
def out(*args, **kwargs): if stderr.isatty(): print(*args, file=stderr, **kwargs)
def main(): parser = argparse.ArgumentParser(description = __doc__) parser.add_argument('-m', '--metric', default=DEFAULT_METRIC,\ help="a severity metric lambda for maximal repeat (c)ount and (l)ength" +\ ", defaults to '%s'" % DEFAULT_METRIC) parser.add_argument("file", nargs='+', help="source file to analyze") args = parser.parse_args() filenames, metric = args.file, args.metric for c in metric: assert c in ALLOWED_METRIC_CHARS, "metric characters must be in '%s'" %\ ''.join(list(ALLOWED_METRIC_CHARS)) metric = eval("lambda c, l: " + metric, None, None) a = Analyzer(canonize) total = 0 for filename in filenames: stderr.write("processing %s...\n" % filename) stderr.flush() node_count = 0 for node_count in a.add(filename): if stderr.isatty(): stderr.write('\r\t%d nodes processed' % node_count) stderr.flush() total += node_count stderr.write('\n') #stderr.write('\tsize of strie is now %d bytes\n' % a._trie.size) #stderr.flush() stderr.write("total nodes: %d\n" % total) stderr.write("finding maximal repeats (this may take a while)\n") repeats = a.get_sorted_repeats(metric) total_severity = 0 for repeat in repeats: severity = metric(len(repeat.indices), repeat.length) contains = repeat.contains if contains is None: total_severity += severity else: total_severity += max(0,\ metric(len(repeat.indices), repeat.length - contains.length + 1)) print colored_red(\ 'severity %d: %d repeats of length %d; id %d, contains %d' %\ (severity, len(repeat.indices), repeat.length, id(repeat),\ id(contains) if contains is not None else 0)) indices = '@ ' for index in repeat.indices: indices += '(%s,%d) ' % (basename(filenames[index[0]]),\ a.lineno_map[index[0]][index[1] - repeat.length + 1] + 1) print colored_red(indices) index = iter(repeat.indices).next() for i in xrange(index[1] - repeat.length + 1, index[1] + 1): print '\t', a.canonized[a.lines[index[0]][i]] print colored_red('%d/%d lines can be refactored' %\ (total_severity, a.lines_count)) try: with open(CUR_FILENAME, 'rb') as f: print colored_red('last run it was %s/%s lines' %\ tuple(f.read().strip().split('/'))) except IOError as e: if e.errno != 2: # 2 is file does not exist raise with open(CUR_FILENAME, 'wb') as f: f.write('%d/%d' % (total_severity, a.lines_count))
#!/usr/bin/env python '''@package docstring Some common python functions ''' from re import sub from sys import stdout, stderr from os import getenv from collections import namedtuple _atty_out = stdout.isatty() _atty_err = stderr.isatty() def PInfo(module, msg, newline="\n"): ''' function to write to stdout''' if _atty_out: stdout.write('\033[0;32mINFO\033[0m [%-40s]: %s%s' % (module, msg, newline)) else: stderr.write( 'INFO [%-40s]: %s%s' % (module, msg, newline) ) # redirect color-less output to stderr to maintain stream in log files def PWarning(module, msg, newline="\n"): ''' function to write to stdout''' if _atty_out: stdout.write('\033[0;91mWARNING\033[0m [%-40s]: %s%s' % (module, msg, newline)) else:
def _tty_output(self, message, print_newline=True): if stderr.isatty(): if print_newline: message += "\n" stderr.write(message)
args = argparser.parse_args(list(argv) or None) args.write_output(dodosc(args.file)(tuple(args.parse_input(args.input)))) def map_abs(inv): return map(abs, inv) def read_stdin(inv): from sys import stdin yield from map_abs(inv) yield from stdin.buffer.read() def write_bytes(outv): from sys import stdout stdout.buffer.write(bytes(map((256).__rmod__, outv))) def write_nums(outv): for number in outv: print(number) if __name__ == '__main__': try: dodos() except KeyboardInterrupt: if stderr.isatty(): stderr.write('\n') raise SystemExit(130)
return else: cb(('deny', "wrong otp")) if __name__ == '__main__': path = os.path.expanduser(CONFIG) if not os.path.exists(path): out("no config found, performing initial setup of %s" % CONFIG) setup(path) else: queue = Queue() with open(path, 'rt') as fd: cfg = json.load(fd) if 'totp_secret' in cfg and stderr.isatty(): t = Thread(target=check_totp, kwargs=dict(secret=cfg['totp_secret'], cb=queue.put)) t.daemon = True t.start() if 'tokenizer' in cfg: from tokenizer import tokenizer t = Thread(target=tokenizer, kwargs=dict(cfg=cfg['tokenizer'], cb=queue.put)) t.daemon = True t.start() try:
def download_file(url: str, file: Path, resume=False, rate_limit=0.0, progress=False): """Throttled download with progress bar :param url: URL to download :param file: Output file :param resume: Append instead of overwrite :param rate_limit: Rate limit in MB/s :param progress: Show progress bar """ if resume and file.exists(): file_mode = 'ab' done_bytes = file.size else: file_mode = 'wb' done_bytes = 0 # To be downloaded each second if rate_limit != 0: chunk_size = int(rate_limit * 1024 * 1024) else: # Default chunk size of 1 MB means we do not loose whole file if download gets aborted chunk_size = 1024 * 1024 # Ask server to skip the first N bytes request = urllib.request.Request(url) request.add_header('Range', 'bytes={}-'.format(done_bytes)) with urllib.request.urlopen(request) as response: if progress: # Get size of download total_bytes = int(response.headers['content-length']) + done_bytes # Avoid ZeroDivisionError and only print progress bar if we are in a terminal if total_bytes == 0 or not stderr.isatty(): progress = False with file.open(file_mode) as f: while True: # Print a progress bar if progress: percent = 100 * (done_bytes / total_bytes) # Never more than 70 hash signs bar = '#' * min(70 * done_bytes // total_bytes, 70) ####----- (padded to 70 chars) NNN.N (padded to 5 chars) % print('\r{:-<70} {: >5.1f}%'.format(bar, percent), end='', flush=True, file=stderr) # Download and write a chunk started = time.time() chunk = response.read(chunk_size) done_bytes += len(chunk) if not chunk: if progress: print() # newline when done break f.write(chunk) if rate_limit: try: # Every chunk should take 1 second, sleep for the time that's left time.sleep(1 + started - time.time()) except ValueError: pass
def _tty_flush(self): if stderr.isatty(): stderr.flush()
# -*- coding: utf-8 -*- from sys import stderr, stdout _verbose = False _warning_str = ('\033[0;101;37mWARNIG:\033[0m' if stderr.isatty() else "WARNING:") _error_str = ('\033[0;41;37mERROR:\033[0m' if stderr.isatty() else "ERROR:") def _flush_stdout(): # This is needed before using stderr, since it is never buffered if not stdout.isatty(): stdout.flush() def turn_verbose_on(): global _verbose _verbose = True def normal(message=''): print(message) def verbose(message=''): if _verbose: print(message)