def check_prompt_vm_name(): if cfg.opts.vmname: # If specified on cmdline, use it pass else: # Otherwise generate a guess _default_name = re.sub('[.]', '', cfg.opts.templateName) if stdout.isatty(): # If have tty, prompt with default guess cfg.opts.vmname = raw_input(c.CYAN("Enter VMNAME ") + c.BOLD("[{}]".format(_default_name)) + c.CYAN(" : ")) if not cfg.opts.vmname: cfg.opts.vmname = _default_name else: # If no tty, use default guess print(c.yellow("VMNAME not specified; using '{}'".format(_default_name))) cfg.opts.vmname = _default_name # Validate selected guest name while True: match = re.search(r'^{}$'.format(cfg.opts.vmname), cfg.guestList, re.M) if match: print(c.YELLOW("Already have a VM with the name '{}'".format(cfg.opts.vmname))) print(c.BOLD("\nExisting VMs:")) print(c.cyan(cfg.guestList.strip())) if not stdout.isatty(): exit(1) cfg.opts.vmname = raw_input(c.CYAN("Enter a unique VM name : ")) else: break
def search(self): """Main method to perform the search.""" # initialize the calculations self._initialize() # if the queue is not set, perform a CPU search, else use # GPU-acceleration if self.queue is None: self._cpu_init() self._cpu_search() else: self._gpu_init() self._gpu_search() # perform an extra print line if the output is an interactive shell, # for proper output if _stdout.isatty(): print() # make the accessible interaction space a Volume object, for easier # manipulation later accessible_interaction_space = \ volume.Volume(self.data['accessible_interaction_space'], self.voxelspacing, self.data['origin']) return accessible_interaction_space, self.data['accessible_complexes'], self.data['violations']
def main(): # On very first run, we need to get osinfo-query db & virt-builder template list # If tabCacheDir already exists, to speed execution when tab-completing, this does nothing build_initial_cache() # Parse cmdline arguments (If tab-completing, execution stops before returning) # options namespace saved to cfg.opts argparser.parse() # Get possible os-variants and virt-builder --list output if not cfg.osvariantChoices: refresh_cache() # Test for all needed system commands, appropriate permissions from modules import sysvalidator sysvalidator.check_system_config() # Prompt user for any missing (required) input from modules import finalprompter finalprompter.prompt_final_checks() # Launch virt-builder from modules import builder builder.build() # Quit if requested if cfg.opts.build_image_only: exit() # Launch virt-install from modules import installer installer.install() # Optionally launch serial connection if cfg.opts.autoconsole and stdout.isatty(): if cfg.opts.loglevel < 20: sleep(5.0) subprocess.call(["virsh", "console", cfg.opts.vmname])
def check_prompt_root_pw(): if not cfg.opts.root_password: if stdout.isatty(): # If have tty, prompt for pass while True: passwd = raw_input(c.CYAN("Enter root password for new VM or enter '") + c.BOLD("random") + c.CYAN("' or '") + c.BOLD("disabled") + c.CYAN("' or file path : ")) if passwd: break cfg.opts.root_password = '' if passwd == 'random': c.verbose("Password for root will be randomly generated") elif passwd == 'disabled': c.verbose("Password auth for root will be disabled") elif os.path.isfile(os.path.expanduser(passwd)): passwd = os.path.expanduser(passwd) c.verbose("Password for root will be set by reading first line of file '{}'".format(passwd)) cfg.opts.root_password = '******' else: c.verbose("Password for root will be set to string '{}'".format(passwd)) cfg.opts.root_password = '******' cfg.opts.root_password += passwd save_passwd = raw_input(c.CYAN("Save password choice as default to '{}'? ".format(cfg.cfgfileUser)) + c.BOLD("[y]/n") + c.CYAN(" : ")) if save_passwd != 'n': subprocess.call(['mkdir', '-p', os.path.dirname(os.path.expanduser(cfg.cfgfileUser))]) with open(os.path.expanduser(cfg.cfgfileUser), 'a+') as f: f.write('# Added by {}:\nroot-password = {}\n'.format(cfg.prog, cfg.opts.root_password)) c.verbose("Wrote 'root-password = {}' to {}".format(cfg.opts.root_password, cfg.cfgfileUser)) else: print(c.RED("No root password specified; aborting")) print("Either run with stdin/stdout connected to tty to interactively enter password or\n" " Use '--root-password password:PASSWORDSTRING' or\n" " Use '--root-password file:PASSWORDFILE' or\n" " Use '--root-password random'\n" " Note that any of these can be specified in config file as well") exit(1)
def output(data, show_links=False, show_headers=False, output_json=False): """Main output function used for printing to stdout. It will invoke helper output function using generator or list and output total number of Resources if needed.""" # If we have generator and don't have to output JSON, we can # loop throught it and output one resource at a time while # keeping count of them, so we can output the total later if isinstance(data, types.GeneratorType) and not output_json: resources_count = 0 for d in data: _output(d, show_links=show_links, show_headers=show_headers, output_json=output_json) resources_count += 1 # For every other case, we are putting resources in a list (if # they are not already) and outputting them all at once else: if isinstance(data, types.GeneratorType): data = list(data) elif not isinstance(data, list): data = [data] _output(data, show_links=show_links, show_headers=show_headers, output_json=output_json) resources_count = len(data) if stdout.isatty() and not output_json: stdout.write('\nTotal number of Resources returned: {}\n'.format(resources_count))
def run( paths, output=_I_STILL_HATE_EVERYTHING, recurse=core.flat, sort_by=lambda x : x, ls=core.ls, stdout=stdout, ): """ Project-oriented directory and file information lister. """ if output is _I_STILL_HATE_EVERYTHING: output = core.columnized if stdout.isatty() else core.one_per_line def _sort_by(thing): return not getattr(thing, "_always_sorts_first", False), sort_by(thing) contents = [ path_and_children for path in paths or (project.from_path(FilePath(".")),) for path_and_children in recurse(path=path, ls=ls) ] for line in output(contents, sort_by=_sort_by): stdout.write(line) stdout.write("\n")
def geturl(url, dst): """YouTube Video download function""" print("\nSaving video to '%s'" % (dst)) if stdout.isatty(): return urlretrieve(url, dst, lambda nb, bs, fs, url=url: _reporthook(nb, bs, fs)) else: return urlretrieve(url, dst)
def process_response(self, request, response): from sys import stdout if stdout.isatty(): for query in connections['default'].queries : print "\033[1;31m[%s]\033[0m \033[1m%s\033[0m" % (query['time'], " ".join(query['sql'].split())) return response
def report(self, scope, lines=0, level=1, is_tty=stdout.isatty()): if level >= len(utils.sev): level = len(utils.sev) - 1 tmpstr = "" if self.count > 0: tmpstr += "%sFiles tested (%s):%s\n\t" % (utils.color['HEADER'], len(scope), utils.color['DEFAULT']) if is_tty else "File tested (%s):\n\t" % (len(scope)) tmpstr += "%s\n" % "\n\t".join(scope) tmpstr += "%sFiles skipped (%s):%s" % (utils.color['HEADER'], len(self.skipped), utils.color['DEFAULT']) if is_tty else "File skipped (%s):\n\t" % (len(self.skipped)) for (fname, reason) in self.skipped: tmpstr += "\n\t%s (%s)" % (fname, reason) tmpstr += "\n%sTest results:%s\n" % (utils.color['HEADER'], utils.color['DEFAULT']) if is_tty else "Test results:\n" for filename,issues in self.resstore.items(): for lineno, issue_type, issue_text in issues: if utils.sev.index(issue_type) >= level: tmpstr += "%s>> %s\n - %s::%s%s\n" % (utils.color.get(issue_type, utils.color['DEFAULT']), issue_text, filename, lineno, utils.color['DEFAULT']) if is_tty else ">> %s\n - %s::%s\n" % (issue_text, filename, lineno) for i in utils.mid_range(lineno, lines): line = linecache.getline(filename, i) #linecache returns '' if line does not exist if line != '': tmpstr += "\t%3d %s" % (i, linecache.getline(filename, i)) print(tmpstr) else: self.logger.error("no results to display - %s files scanned" % self.count)
def highlight(string, is_tty=stdout.isatty()): """ Green text. """ if os.name == 'nt': is_tty = False return ('\033[32;1m' + string + '\033[0m') if is_tty else string
def error(string, is_tty=stdout.isatty()): """ Red text. """ if os.name == 'nt': is_tty = False return ('\033[31;1m' + string + '\033[0m') if is_tty else string
def process_response(request, response): """Reads the query data and prints it""" from sys import stdout if stdout.isatty(): for query in connection.queries: print("\033[1;31m[%s]\033[0m \033[1m%s\033[0m" % ( query['time'], " ".join(query['sql'].split()))) return response
def prompt_for_template_and_exit(): if stdout.isatty(): print(c.cyan("Press Ctrl-c to quit or Enter to see available virt-builder templates")) x = raw_input("") print(get_virt_builder_list()) exit() else: exit(1)
def __make_color(color, text): """ Here's actual magic happen. Returns: str: Colored text """ if stdout.isatty(): return color + text + CmdColors.END return text
def __call__(self, request): response = self.get_response(request) from sys import stdout if stdout.isatty(): for query in connection.queries: print("\033[1;31m[%s]\033[0m \033[1m%s\033[0m" % ( query['time'], " ".join(query['sql'].split()))) return response
def process_response(self, request, response): from sys import stdout from django.db import connection if stdout.isatty(): for query in connection.queries : print "\033[1;31m[%s]\033[0m \033[1m%s\033[0m" % (query['time'], " ".join(query['sql'].split())) print len(connection.queries) return response
def print_table(self, alloc, entity, rows, only_these_fields, sort=False, transforms=None): # For printing out results in an ascii table or CSV format. if alloc.quiet: return if not rows: return if not isinstance(sort, list): sort = [sort] if not isinstance(only_these_fields, list): only_these_fields = [only_these_fields] only_these_fields = self.__get_only_these_fields( alloc, entity, rows, only_these_fields) field_names = only_these_fields[1::2] # Re-order the table, this changes the dict to a list i.e. # dict.items(). rows = self.__get_sorted_rows(alloc, entity, rows, sort) if rows: rows2 = [] for k_, row in rows: row = self.__get_row( alloc, entity, row, only_these_fields, transforms) rows2.append(row) rows = rows2 if alloc.csv: csv_table = csv.writer(sys.stdout, lineterminator="\n") for row in rows: csv_table.writerow([unicode(s).encode('utf-8') for s in row]) else: table = PrettyTable() table.set_field_names(field_names) # table.field_names = field_names for label in field_names: if '$' in label: table.set_field_align(label, "r") else: table.set_field_align(label, "l") if stdout.isatty(): proc = subprocess.Popen( ['stty', 'size'], stdout=subprocess.PIPE, stderr=open("/dev/null", "w")) ret = proc.wait() if ret == 0: height_, width = proc.communicate()[0].split() width = int(width) rows = self.__fit_rows_to_screen( alloc, rows, field_names, width) for row in rows: table.add_row(row) print unicode(table.get_string(header=True)).encode('utf-8') # http://stackoverflow.com/questions/15793886/how-to-avoid-a-broken-pipe-error-when-printing-a-large-amount-of-formatted-data sys.stdout.flush()
def format(self, record): levelname = record.levelname msg = Formatter.format(self, record) if levelname in COLORS: if stdout.isatty(): msg = COLORS[levelname] % msg else: print ("*" * 100, levelname, "(%s)" % type(levelname), "not in", COLORS.keys()) return msg
def check_prompt_hostname(): if not cfg.opts.hostname: _default_name = '{}.{}'.format(re.sub('[.]', '', cfg.opts.vmname), cfg.opts.dnsdomain) if cfg.opts.hostname_prompt and stdout.isatty(): cfg.opts.hostname = raw_input(c.CYAN("Enter HOSTNAME ") + c.BOLD("[{}]".format(_default_name)) + c.CYAN(" or ") + c.BOLD("!") + c.CYAN(" to skip changing hostname : ")) if not cfg.opts.hostname: cfg.opts.hostname = _default_name else: c.verbose("HOSTNAME not specified; using '{}'".format(_default_name)) cfg.opts.hostname = _default_name
def set_console_logger(logger): """ Set a console logger only if application output is sent to a terminal """ if stdout.isatty(): # pragma: no cover - not testable # logger = getLogger(__pkg_name__) ch = ColorStreamHandler(stdout) ch.setFormatter(Formatter('%(asctime)s - %(name)-12s - %(levelname)s - %(message)s')) ch.setLevel(DEBUG) logger.addHandler(ch)
def check_prompt_img_outfilepath(): cfg.opts.outFile = '{}/{}'.format(cfg.opts.img_dir, cfg.opts.vmname) if cfg.opts.img_format in 'qcow2': cfg.opts.outFile += '.qcow2' # Ensure image file doesn't exist while os.path.exists(cfg.opts.outFile): print(c.YELLOW("Already have an image file with the name '{}' (in dir '{}')".format(os.path.basename(cfg.opts.outFile), cfg.opts.img_dir))) if not stdout.isatty(): exit(1) _x = raw_input(c.CYAN("\nEnter a unique image file name (not incl. path) : ")) cfg.opts.outFile = '{}/{}'.format(cfg.opts.img_dir, _x)
def run(self, command_list): """Execute subcommand.""" # Get the command line arguments into a dictionary o, remainder_ = self.get_args(command_list, self.ops, self.help_text) self.quiet = o['quiet'] taskID = '' # Got this far, then authenticate self.authenticate() # Get a taskID either passed via command line, or figured out from a # task name tops = {} if self.is_num(o['task']): taskID = o['task'] elif o['task']: tops = {} tops["taskName"] = o["task"] tops["taskView"] = "prioritised" taskID = self.search_for_task(tops) if taskID: s = '' str0 = self.print_task(taskID, prependEmailHeader=True) str1 = self.make_request( {"method": "get_task_emails", "taskID": taskID}) str2 = self.make_request( {"method": "get_timeSheetItem_comments", "taskID": taskID}) if str0: s += str0 + "\n\n" if str1: s += str1 + "\n\n" if str2: s += str2 # If we're redirecting stdout eg alloc mbox -t 123 >task123.html if not stdout.isatty(): print unicode(s).encode('utf-8') else: try: fd, filepath = tempfile.mkstemp( prefix="alloc-%s_" % taskID, suffix=".mbox") with closing(os.fdopen(fd, 'wb')) as tf: tf.write(unicode(s).encode('utf-8')) subprocess.check_call( [os.getenv("MAILER") or "mutt", "-f", filepath]) finally: os.remove(filepath)
def process_response(self, request, response): if settings.DEBUG and stdout.isatty(): for query in connection.queries : _sql = query['sql'] _sql = [ KEYWORDS['time'] % query['time'] ] + _sql.split( ) for index, item in enumerate( _sql ): if KEYWORDS.has_key( item ): _sql[index] = KEYWORDS[item] % item else: _sql[index] = KEYWORDS['text'] % item print u' '.join( _sql) return response
def setup_color(color): enable_out = (False if color == 'never' else True if color == 'always' else stdout.isatty()) Out_Style.enable(enable_out) Out_Fore.enable(enable_out) enable_err = (False if color == 'never' else True if color == 'always' else stderr.isatty()) Err_Style.enable(enable_err) Err_Fore.enable(enable_err)
def setup_output(verbose): """Helper function used for setting the global logging level.""" if verbose: level = logging.DEBUG elif stdout.isatty(): level = logging.INFO else: level = logging.ERROR logging.basicConfig(format='%(message)s', level=level) logging.getLogger('requests').propagate = False return get_logger()
def print_line(text): """Return a formatted line of text, replacing tabs with a visible character If stdout is a tty, a unicode rightwards arrow (u2192) will be used for tabs, otherwise the rarely used negation character (¬). This keeps the character counts in line with fixed-width columns while still making tabs distinguishable in output. """ if stdout.isatty(): text = text.replace('\t', TAB_ARROW) else: text = text.replace('\t', '¬') return ''.join([Fore.GREEN, text.rstrip('\n'), Fore.RESET])
def print_line(text): """Return a formatted line of text, replacing tabs with a visible character If stdout is a tty and claims to support UTF-8 encoding, a unicode rightwards arrow (u2192) will be used for tabs, otherwise a space character ( ) will be used. This keeps the character counts in line with fixed-width columns while still making tabs distinguishable in output. """ if stdout.isatty() and stdout.encoding == 'UTF-8': text = text.replace('\t', TAB_ARROW) else: text = text.replace('\t', ' ') return ''.join([Fore.GREEN, text.rstrip('\n'), Fore.RESET])
def colorstr(style, string): """ Add color commands to the string. :param string: The string to color :param style: The style to apply :returns: New string """ if stdout.isatty(): codes = COLORS[style] else: codes = ('', '') return codes[0] + str(string) + codes[1]
def _output(data, show_links=False, show_headers=False, output_json=False): """Output function used for printing to stdout. It will invoke the correct helper output function (ie. human readable/json/tsv)""" if not show_links: data = _remove_links(data) else: data = _show_links(data) if output_json: _output_to_tty_json(data) elif stdout.isatty(): _output_to_tty_human_readable(data) else: _output_tsv(data, show_headers=show_headers)
def main(): """Main function""" parser = argparse.ArgumentParser(description='Linter for the pan language') parser.add_argument('paths', metavar='PATH', type=str, nargs='*', help='Paths of files to check') parser.add_argument('--vi', action='store_true', help='Output line numbers in a vi option style') parser.add_argument('--table', action='store_true', help='Display a table of per-file problem stats') parser.add_argument('--allow_mvn_templates', action='store_true', help='Allow use of maven templates') parser.add_argument('--always_exit_success', action='store_true', help='Always exit cleanly even if problems are found') group_output = parser.add_mutually_exclusive_group() group_output.add_argument('--debug', action='store_true', help='Enable debug output') group_output.add_argument('--ide', action='store_true', help='Output machine-readable results for use by IDEs') args = parser.parse_args() # Only output colors sequences if the output is a terminal colorama_init(strip=(not stdout.isatty()) or args.ide) global DEBUG DEBUG = args.debug problems_found = 0 reports = [] problem_stats = {} if not args.paths: print 'No files were provided, not doing anything' return 0 for path in args.paths: for filename in glob(path): file_reports, file_problems = lint_file(filename, args.allow_mvn_templates) reports += file_reports problems_found += file_problems problem_stats[filename] = file_problems for report in reports: print_report(*report, vi=args.vi) if args.table: print print 'Problem count per file:' print filestats_table(problem_stats) print print '%d problems found in total' % problems_found if args.always_exit_success: return 0 if problems_found: return 1
def run(self, command_list): """Execute subcommand.""" # Get the command line arguments into a dictionary o, remainder_ = self.get_args(command_list, self.ops, self.help_text) # Got this far, then authenticate self.authenticate() self.quiet = o['quiet'] projectID = 0 taskID = 0 clientID = 0 # Get a projectID either passed via command line, or figured out from a # project name if self.is_num(o['project']): projectID = o['project'] elif o['project']: projectID = self.search_for_project(o['project']) # Get a taskID either passed via command line, or figured out from a # task name if self.is_num(o['task']): taskID = o['task'] elif o['task']: tops = {} tops["taskName"] = o["task"] tops["taskView"] = "prioritised" taskID = self.search_for_task(tops) # Get a clientID either passed via command line, or figured out from a # client name if self.is_num(o['client']): clientID = o['client'] elif o['client']: clientID = self.search_for_client({"clientName": o['client']}) # url to alloc base = "/".join(self.url.split("/")[:-2]) if taskID: url = base + "/task/task.php?sessID=" + \ self.sessID + "&taskID=" + taskID elif projectID: url = base + "/project/project.php?sessID=" + \ self.sessID + "&projectID=" + projectID elif clientID: url = base + "/client/client.php?sessID=" + \ self.sessID + "&clientID=" + clientID elif o['time']: url = base + "/time/timeSheet.php?sessID=" + \ self.sessID + "&timeSheetID=" + str(o['time']) elif not o['task'] and not o['project'] and not o['client'] and not o['time']: url = base + "/index.php?sessID=" + self.sessID else: self.die('Specify one of -t, -p, -c, etc.') # If we're redirecting stdout eg -t 123 >task123.html if not stdout.isatty(): print self.get_alloc_html(url) elif url: browser = '' brow_lynx = self.which('lynx') brow_elinks = self.which('elinks') brow_sensible = self.which('sensible-browser') if 'BROWSER' in os.environ and os.environ['BROWSER']: browser = os.environ['BROWSER'] elif brow_sensible: browser = brow_sensible elif brow_lynx: browser = brow_lynx elif brow_elinks: browser = brow_elinks if not browser: self.die( '$BROWSER not defined, and sensible-browser and lynx weren\'t found in PATH.') elif url: command = browser + ' "' + url + '"' if o['quiet']: command += ' >/dev/null' self.msg('Running: ' + command) os.system(command)
"""ANSI color codes and functions for terminal output.""" import os import sys from sys import stdout from .definitions import Color IS_TTY = stdout.isatty() COLOR = "NO_COLOR" not in os.environ and IS_TTY def changed(color: bool, message: str): """Output changed information to the console. :param color: Whether to color the message :param message: The message to output """ if color: print(f"\r{Color.YELLOW}{message}{Color.END}\033[K") else: print(message) def failed(color: bool, message: str): """Output failure information to the console. :param color: Whether to color the message :param message: The message to output """ if color:
def interactive_set(default_values, secret_name=None, configmap_name=None, namespace=None, from_file=False, extra_operator_labels=None, interactive=True): log_kwargs = { 'func': 'config/interactive_set', 'secret': secret_name, 'configmap': configmap_name, 'namespace': namespace } logs.debug('start', **log_kwargs) set_values = {} for key, default_value in default_values.items(): saved_value = get(key, secret_name=secret_name, configmap_name=configmap_name, namespace=namespace) preset_value = get_preset_answer(namespace, configmap_name, secret_name, key) if preset_value: set_values[key] = preset_value elif interactive and stdout.isatty(): if saved_value: if from_file: msg = ', leave empty to use the saved value' else: msg = f', leave empty to use the saved value: {saved_value}' default_value = saved_value elif default_value is not None: assert not from_file msg = f', leave empty to use the default value: {default_value}' else: msg = ' (required)' if from_file: print( f'Enter the path to a file containing the value for {key}{msg}' ) source_path = input(f'{key} path: ') if source_path: with open(source_path) as f: set_values[key] = f.read() elif saved_value: set_values[key] = saved_value else: raise Exception('file path is required') else: if default_value in [True, False]: print(f'Enter a boolean value for {key}{msg}') entered_value = input(f'{key} [y/n]: ') bool_value = default_value if entered_value == '' else ( entered_value == 'y') set_values[key] = 'y' if bool_value else 'n' else: print(f'Enter a value for {key}{msg}') entered_value = input(f'{key}: ') set_values[key] = str(entered_value or default_value) else: set_values[ key] = saved_value if saved_value is not None else default_value logs.debug('set', **log_kwargs) return set(values=set_values, secret_name=secret_name, configmap_name=configmap_name, namespace=namespace, extra_operator_labels=extra_operator_labels)
"""Query client CLI.""" from argparse import ArgumentParser from json import dumps from logging import DEBUG, INFO, basicConfig, getLogger from socket import timeout from sys import exit, stdout # pylint: disable=W0622 from mcipc.config import LOG_FORMAT, InvalidCredentials, Credentials from mcipc.query.client import Client from mcipc.query.config import CONFIG __all__ = ['main'] DEFAULT_INDENT = 2 if stdout.isatty() else None LOGGER = getLogger('queryclt') def get_args(): """Parses and returns the CLI arguments.""" parser = ArgumentParser(description='A Minecraft Query client.') parser.add_argument('server', help='the server to connect to') parser.add_argument('-i', '--indent', type=int, default=DEFAULT_INDENT, help='indentation for JSON output') parser.add_argument('-t', '--timeout', type=float,
def concat_str(*args): # check whether to use colors, bold etc. or not if Colors.color == 1 or Colors.color == 0 and stdout.isatty(): return ''.join([str(arg) for arg in args]) else: return ''.join([str(arg) for arg in args[1:-1]])
#!/usr/bin/env python '''@package docstring Some common python functions ''' from re import sub from sys import stdout, stderr from os import getenv from collections import namedtuple _atty_out = stdout.isatty() _atty_err = stderr.isatty() def PInfo(module, msg, newline="\n"): ''' function to write to stdout''' if _atty_out: stdout.write('\033[0;32mINFO\033[0m [%-40s]: %s%s' % (module, msg, newline)) else: stderr.write( 'INFO [%-40s]: %s%s' % (module, msg, newline) ) # redirect color-less output to stderr to maintain stream in log files def PWarning(module, msg, newline="\n"): ''' function to write to stdout''' if _atty_out: stdout.write('\033[0;91mWARNING\033[0m [%-40s]: %s%s' % (module, msg, newline)) else:
def process(args): import aurman.aur_utilities try: read_config() # read config - available via AurmanConfig.aurman_config except InvalidInput: sys.exit(1) if os.getuid() == 0: aurman_error("Do not run aurman with sudo") sys.exit(1) # parse parameters of user try: pacman_args = parse_pacman_args(args) except InvalidInput: aurman_note("aurman --help or aurman -h") sys.exit(1) # show help if pacman_args.operation is PacmanOperations.HELP: # remove colors in case of not terminal if stdout.isatty(): print(aurman_help) else: print(Colors.strip_colors(str(aurman_help))) sys.exit(0) # show version if pacman_args.operation is PacmanOperations.VERSION: # remove colors in case of not terminal if stdout.isatty(): aurman_note(expac("-Q", ("v", ), ("aurman-git", "aurman"))[0]) else: print(expac("-Q", ("v", ), ("aurman-git", "aurman"))[0]) sys.exit(0) # if not -S or --sync, just redirect to pacman if pacman_args.operation is not PacmanOperations.SYNC: try: if pacman_args.operation in [ PacmanOperations.UPGRADE, PacmanOperations.REMOVE, PacmanOperations.DATABASE, PacmanOperations.FILES ]: run("sudo pacman {}".format(" ".join( ["'{}'".format(arg) for arg in args])), shell=True) else: run("pacman {}".format(" ".join( ["'{}'".format(arg) for arg in args])), shell=True) except InvalidInput: sys.exit(1) sys.exit(0) # -S or --sync Package.optimistic_versioning = pacman_args.optimistic_versioning # if --optimistic_versioning packages_of_user_names = list( set(pacman_args.targets )) # targets of the aurman command without duplicates sysupgrade = pacman_args.sysupgrade # if -u or --sysupgrade sysupgrade_force = sysupgrade and not isinstance( sysupgrade, bool) # if -u -u or --sysupgrade --sysupgrade needed = pacman_args.needed # if --needed noedit = pacman_args.noedit # if --noedit always_edit = pacman_args.always_edit # if --always_edit show_changes = pacman_args.show_changes # if --show_changes devel = pacman_args.devel # if --devel only_unfulfilled_deps = not pacman_args.deep_search # if not --deep_search pgp_fetch = pacman_args.pgp_fetch # if --pgp_fetch noconfirm = pacman_args.noconfirm # if --noconfirm search = pacman_args.search # list containing the specified strings for -s and --search solution_way = pacman_args.solution_way # if --solution_way do_everything = pacman_args.do_everything # if --do_everything clean = pacman_args.clean # if --clean rebuild = pacman_args.rebuild # if --rebuild clean_force = clean and not isinstance(clean, bool) # if --clean --clean no_notification_unknown_packages = 'miscellaneous' in AurmanConfig.aurman_config \ and 'no_notification_unknown_packages' in AurmanConfig.aurman_config[ 'miscellaneous'] concrete_no_notification_packages = set() if 'no_notification_unknown_packages' in AurmanConfig.aurman_config: for package_name in AurmanConfig.aurman_config[ 'no_notification_unknown_packages']: concrete_no_notification_packages.add(package_name) sudo_acquired = 'miscellaneous' in AurmanConfig.aurman_config \ and 'no_sudo_loop' in AurmanConfig.aurman_config['miscellaneous'] pacman_called = False not_remove = pacman_args.holdpkg # list containing the specified packages for --holdpkg # if --holdpkg_conf append holdpkg from pacman.conf if pacman_args.holdpkg_conf: not_remove.extend(pacman_conf("HoldPkg")) # remove duplicates not_remove = list(set(not_remove)) if noedit and show_changes: aurman_error("--noedit and --show_changes is not what you want") sys.exit(1) if noedit and always_edit: aurman_error("--noedit and --always_edit is not what you want") sys.exit(1) aur = pacman_args.aur # do only aur things repo = pacman_args.repo # do only repo things if repo and aur: aurman_error("--repo and --aur is not what you want") sys.exit(1) if pacman_args.keyserver: keyserver = pacman_args.keyserver[0] else: keyserver = None if keyserver is None \ and 'miscellaneous' in AurmanConfig.aurman_config \ and 'keyserver' in AurmanConfig.aurman_config['miscellaneous']: keyserver = AurmanConfig.aurman_config['miscellaneous']['keyserver'] if pacman_args.domain: aurman.aur_utilities.aur_domain = pacman_args.domain[0] default_show_changes = 'miscellaneous' in AurmanConfig.aurman_config \ and 'default_show_changes' in AurmanConfig.aurman_config['miscellaneous'] ignore_arch = 'miscellaneous' in AurmanConfig.aurman_config and \ 'ignore_arch' in AurmanConfig.aurman_config['miscellaneous'] # do not allow -y without -u if pacman_args.refresh and not sysupgrade: aurman_error("-y without -u is not allowed!") sys.exit(1) # unrecognized parameters if pacman_args.invalid_args: aurman_error( "The following parameters are not recognized yet: {}".format( pacman_args.invalid_args)) aurman_note("aurman --help or aurman -h") sys.exit(1) # if user wants to --clean if clean: if not aur: pacman(str(pacman_args), False, sudo=True) if not repo: if not os.path.isdir(Package.cache_dir): aurman_error("Cache directory {} not found." "".format( Colors.BOLD( Colors.LIGHT_MAGENTA(Package.cache_dir)))) sys.exit(1) aurman_note("Cache directory: {}".format( Colors.BOLD(Colors.LIGHT_MAGENTA(Package.cache_dir)))) if clean_force: if noconfirm or \ ask_user("Do you want to remove {} from cache?" "".format(Colors.BOLD(Colors.LIGHT_MAGENTA("all files"))), False): aurman_status("Deleting cache dir...") if run("rm -rf {}".format(Package.cache_dir), shell=True, stdout=DEVNULL, stderr=DEVNULL).returncode != 0: aurman_error("Directory {} could not be deleted" "".format( Colors.BOLD( Colors.LIGHT_MAGENTA( Package.cache_dir)))) sys.exit(1) else: if noconfirm or \ ask_user("Do you want to remove {} clones from cache?" "".format(Colors.BOLD(Colors.LIGHT_MAGENTA("all uninstalled"))), False): aurman_status("Deleting uninstalled clones from cache...") # if pkgbase not available, the name of the package is the base expac_returns = expac("-Q -1", ("e", "n"), ()) dirs_to_not_delete = set() for expac_return in expac_returns: pkgbase = expac_return.split("?!")[0] if pkgbase == "(null)": dirs_to_not_delete.add(expac_return.split("?!")[1]) else: dirs_to_not_delete.add(pkgbase) for thing in os.listdir(Package.cache_dir): if os.path.isdir(os.path.join(Package.cache_dir, thing)): if thing not in dirs_to_not_delete: dir_to_delete = os.path.join( Package.cache_dir, thing) if run("rm -rf {}".format(dir_to_delete), shell=True, stdout=DEVNULL, stderr=DEVNULL).returncode != 0: aurman_error( "Directory {} could not be deleted" "".format( Colors.BOLD( Colors.LIGHT_MAGENTA( dir_to_delete)))) sys.exit(1) if not noconfirm and \ ask_user("Do you want to remove {} from cache? ({})" "".format(Colors.BOLD(Colors.LIGHT_MAGENTA("all untracked git files")), Colors.BOLD(Colors.LIGHT_MAGENTA("even from installed packages"))), False): aurman_status("Deleting untracked git files from cache...") for thing in os.listdir(Package.cache_dir): if os.path.isdir(os.path.join(Package.cache_dir, thing)): dir_to_clean = os.path.join( Package.cache_dir, thing) if run("git clean -ffdx" "", shell=True, stdout=DEVNULL, stderr=DEVNULL, cwd=dir_to_clean).returncode != 0: aurman_error( "Directory {} could not be cleaned" "".format( Colors.BOLD( Colors.LIGHT_MAGENTA( dir_to_clean)))) sys.exit(1) sys.exit(0) # if user just wants to search if search: # we only need the installed system for aur queries if not repo: try: installed_system = System(System.get_installed_packages()) except InvalidInput: sys.exit(1) else: installed_system = None # start search try: search_and_print(search, installed_system, str(pacman_args), repo, aur) except InvalidInput: sys.exit(1) sys.exit(0) # groups are for pacman groups_chosen = [] if not aur: groups = pacman("-Sg", True, sudo=False) for name in packages_of_user_names[:]: if name in groups: groups_chosen.append(name) packages_of_user_names.remove(name) # pacman call in the beginning of the routine if not aur \ and (sysupgrade and (not do_everything or pacman_args.refresh) or groups_chosen): if not sudo_acquired: acquire_sudo() sudo_acquired = True pacman_called = True pacman_args_copy = deepcopy(pacman_args) pacman_args_copy.targets = groups_chosen # aurman handles the update if do_everything: pacman_args_copy.sysupgrade = False # ignore packages from other sources for sysupgrade try: packages_from_other_sources_ret = packages_from_other_sources() except InvalidInput: sys.exit(1) names_to_ignore = packages_from_other_sources_ret[0] for name_to_ignore in packages_from_other_sources_ret[1]: names_to_ignore.add(name_to_ignore) for already_ignored in pacman_args_copy.ignore: names_to_ignore |= set(already_ignored.split(",")) if names_to_ignore: pacman_args_copy.ignore = [",".join(names_to_ignore)] try: pacman(str(pacman_args_copy), False) except InvalidInput: sys.exit(1) # nothing to do for us if not sysupgrade and not packages_of_user_names: sys.exit(0) # delete -u --sysupgrade -y --refresh from parsed args # not needed anymore pacman_args.sysupgrade = False pacman_args.refresh = False # one status message if pacman_called: aurman_status("initializing {}...".format(Colors.BOLD("aurman")), True) else: aurman_status("initializing {}...".format(Colors.BOLD("aurman")), False) # analyzing installed packages try: installed_system = System(System.get_installed_packages()) except InvalidInput: sys.exit(1) packages_to_show = [ package for package in installed_system.not_repo_not_aur_packages_list if package.name not in concrete_no_notification_packages ] if packages_to_show and not no_notification_unknown_packages: aurman_status( "the following packages are neither in known repos nor in the aur") for package in packages_to_show: aurman_note("{}".format(Colors.BOLD( Colors.LIGHT_MAGENTA(package)))) # fetching upstream repo packages... try: upstream_system = System(System.get_repo_packages()) except InvalidInput: sys.exit(1) # fetching needed aur packages if not repo: upstream_system.append_packages_by_name(packages_of_user_names) # fetch info for all installed aur packages, too names_of_installed_aur_packages = [ package.name for package in installed_system.aur_packages_list ] names_of_installed_aur_packages.extend( [package.name for package in installed_system.devel_packages_list]) upstream_system.append_packages_by_name( names_of_installed_aur_packages) # remove known repo packages in case of --aur if aur: for package in upstream_system.repo_packages_list: del upstream_system.all_packages_dict[package.name] upstream_system = System( list(upstream_system.all_packages_dict.values())) # sanitize user input try: sanitized_names = upstream_system.sanitize_user_input( packages_of_user_names) sanitized_not_to_be_removed = installed_system.sanitize_user_input( not_remove) except InvalidInput: sys.exit(1) # names to not be removed must be also known on the upstream system, # otherwise aurman solving cannot handle this case. for name in sanitized_not_to_be_removed: if name not in upstream_system.all_packages_dict: aurman_error( "Packages you want to be not removed must be aur or repo packages.\n" " {} is not known.".format( Colors.BOLD(Colors.LIGHT_MAGENTA(name)))) sys.exit(1) # for dep solving not to be removed has to be treated as wanted to install sanitized_names |= sanitized_not_to_be_removed # fetching ignored packages ignored_packages_names = Package.get_ignored_packages_names( pacman_args.ignore, pacman_args.ignoregroup, upstream_system) # explicitly typed in names will not be ignored ignored_packages_names -= sanitized_names for ignored_packages_name in ignored_packages_names: if ignored_packages_name in upstream_system.all_packages_dict: if ignored_packages_name in installed_system.all_packages_dict: aurman_note("{} {} package {}".format( Colors.BOLD(Colors.LIGHT_MAGENTA("Ignoring")), Colors.BOLD(Colors.LIGHT_CYAN("installed")), Colors.BOLD(Colors.LIGHT_MAGENTA(ignored_packages_name)))) upstream_system.all_packages_dict[ ignored_packages_name] = installed_system.all_packages_dict[ ignored_packages_name] else: aurman_note("{} {} package {}".format( Colors.BOLD(Colors.LIGHT_MAGENTA("Ignoring")), Colors.BOLD(Colors.LIGHT_BLUE("upstream ")), Colors.BOLD(Colors.LIGHT_MAGENTA(ignored_packages_name)))) del upstream_system.all_packages_dict[ignored_packages_name] # recreating upstream system if ignored_packages_names: upstream_system = System( list(upstream_system.all_packages_dict.values())) # if user entered --devel and not --repo, fetch all needed pkgbuilds etc. for the devel packages if devel and not repo: aurman_status( "looking for new pkgbuilds of devel packages and fetch them...") for package in upstream_system.devel_packages_list: if package.name not in ignored_packages_names: package.fetch_pkgbuild() try: for package in upstream_system.devel_packages_list: if package.name not in ignored_packages_names: package.show_pkgbuild(noedit, show_changes, pgp_fetch, keyserver, always_edit, default_show_changes) except InvalidInput: sys.exit(1) for package in upstream_system.devel_packages_list: if package.name not in ignored_packages_names: package.get_devel_version() # checking which packages need to be installed if not needed: concrete_packages_to_install = [ upstream_system.all_packages_dict[name] for name in sanitized_names ] else: possible_packages = [ upstream_system.all_packages_dict[name] for name in sanitized_names ] concrete_packages_to_install = [] for package in possible_packages: if package.name in installed_system.all_packages_dict: installed_package = installed_system.all_packages_dict[ package.name] if not version_comparison(installed_package.version, "=", package.version): concrete_packages_to_install.append(package) else: concrete_packages_to_install.append(package) # in case of sysupgrade fetch all installed packages, of which newer versions are available if sysupgrade: installed_packages = [] if not repo: installed_packages.extend( [package for package in installed_system.aur_packages_list]) installed_packages.extend( [package for package in installed_system.devel_packages_list]) if not aur: installed_packages.extend( [package for package in installed_system.repo_packages_list]) for package in installed_packages: # must not be that we have not received the upstream information assert package.name in upstream_system.all_packages_dict upstream_package = upstream_system.all_packages_dict[package.name] # normal sysupgrade if not sysupgrade_force: if version_comparison(upstream_package.version, ">", package.version): if upstream_package not in concrete_packages_to_install: concrete_packages_to_install.append(upstream_package) # sysupgrade with downgrades else: if not version_comparison(upstream_package.version, "=", package.version): if upstream_package not in concrete_packages_to_install: concrete_packages_to_install.append(upstream_package) aurman_status("calculating solutions...") if only_unfulfilled_deps: solutions = Package.dep_solving(concrete_packages_to_install, installed_system, upstream_system) else: solutions = Package.dep_solving(concrete_packages_to_install, System( ()), upstream_system) # validates the found solutions and lets the user choose one of them, if there are more than one valid solutions try: chosen_solution = installed_system.validate_and_choose_solution( solutions, concrete_packages_to_install) except InvalidInput: aurman_error("we could not find a solution") # if not --deep_search if only_unfulfilled_deps: aurman_error( "if you think that there should be one, rerun aurman with the --deep_search flag" ) sys.exit(1) # needed because deep_search ignores installed packages if not only_unfulfilled_deps: pacman_args.needed = True # solution contains no packages if not chosen_solution: aurman_note("nothing to do... everything is up to date") sys.exit(0) try: installed_system.show_solution_differences_to_user( chosen_solution, upstream_system, noconfirm, not only_unfulfilled_deps, solution_way) except InvalidInput: sys.exit(1) if not repo: aurman_status("looking for new pkgbuilds and fetch them...") for package in chosen_solution: if package.type_of is PossibleTypes.REPO_PACKAGE \ or devel and package.type_of is PossibleTypes.DEVEL_PACKAGE: continue package.fetch_pkgbuild() try: for package in chosen_solution: if package.type_of is PossibleTypes.REPO_PACKAGE \ or devel and package.type_of is PossibleTypes.DEVEL_PACKAGE: continue package.show_pkgbuild(noedit, show_changes, pgp_fetch, keyserver, always_edit, default_show_changes) except InvalidInput: sys.exit(1) # install packages if not sudo_acquired: acquire_sudo() sudo_acquired = True # repo packages to install from other sources repo_packages_dict = packages_from_other_sources()[1] # generate pacman args for the aur packages pacman_args_copy = deepcopy(pacman_args) pacman_args_copy.operation = PacmanOperations.UPGRADE pacman_args_copy.targets = [] args_for_explicit = str(pacman_args_copy) pacman_args_copy.asdeps = True pacman_args_copy.asexplicit = False args_for_dependency = str(pacman_args_copy) # calc chunks to install solution_packages_chunks = System.calc_install_chunks(chosen_solution) # install the chunks for package_chunk in solution_packages_chunks: # repo chunk if package_chunk[0].type_of is PossibleTypes.REPO_PACKAGE: # container for explicit repo deps as_explicit_container = set() for package in package_chunk: if package.name in sanitized_names and package.name not in sanitized_not_to_be_removed \ or ((package.name in installed_system.all_packages_dict) and (installed_system.all_packages_dict[package.name].install_reason == 'explicit')): as_explicit_container.add(package.name) pacman_args_copy = deepcopy(pacman_args) pacman_args_copy.targets = [ package.name for package in package_chunk if package.name not in repo_packages_dict ] pacman_args_copy.targets.extend([ "{}/".format(repo_packages_dict[package.name]) + package.name for package in package_chunk if package.name in repo_packages_dict ]) pacman_args_copy.asdeps = True pacman_args_copy.asexplicit = False try: pacman(str(pacman_args_copy), False, use_ask=True) except InvalidInput: sys.exit(1) if as_explicit_container: pacman("-D --asexplicit {}".format( " ".join(as_explicit_container)), True, sudo=True) # aur chunks always consist of one package else: package = package_chunk[0] try: package.build(ignore_arch, rebuild) if package.name in sanitized_names and package.name not in sanitized_not_to_be_removed \ or ((package.name in installed_system.all_packages_dict) and (installed_system.all_packages_dict[package.name].install_reason == 'explicit')): package.install(args_for_explicit, use_ask=True) else: package.install(args_for_dependency, use_ask=True) except InvalidInput: sys.exit(1)
def get_encoding(): if hasattr(stdout, 'isatty') and stdout.isatty(): return stdout.encoding return getpreferredencoding()
def cli(n, verbose, gzip, xz, outfile): """ Fetch a bunch of s3 files into a tar archive. \b For every non-empty line in stdin - Treat line as a URI and fetch document from it - Write content of the file to a tar archive using `bucket-name/path/to/file` as file name """ logging.basicConfig(format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', level=logging.ERROR) nconnections = 24 if n is None else n exit_early = False def dump_to_tar(data_stream, tar): nonlocal exit_early fps = RateEstimator() for d in data_stream: fps() fname = d.url[5:] if d.data is not None: if verbose: if fps.every(10): print('.', file=stderr, end='', flush=True) if fps.every(100): print(' {}'.format(str(fps)), file=stderr) add_txt_file(tar, fname, d.data, last_modified=d.last_modified) else: print("Failed %s (%s)" % (d.url, str(d.error)), file=stderr) if exit_early: break if verbose: print(' {}'.format(str(fps)), file=stderr) fetcher = S3Fetcher(nconcurrent=nconnections) is_pipe = outfile == '-' tar_opts = dict(mode='w'+tar_mode(gzip=gzip, xz=xz, is_pipe=is_pipe)) if is_pipe: if stdout.isatty(): click.echo("Will not write to a terminal", err=True) sys.exit(1) # TODO: on windows switch stdout to binary mode tar_opts['fileobj'] = stdout.buffer else: tar_opts['name'] = outfile urls = read_stdin_lines(skip_empty=True) def on_ctrlc(sig, frame): nonlocal exit_early print('Shuttting down', file=sys.stderr) exit_early = True signal.signal(signal.SIGINT, on_ctrlc) with tarfile.open(**tar_opts) as tar: dump_to_tar(fetcher(urls), tar) fetcher.close()
LETTERS_FILE = "./misc/letters.json" MAX_THREAD_COUNT = 7 OUTPUT_DIR = "./output" GEOLOCATION_WEBSITE = "http://freegeoip.net/json" ### YOUR VIRUSTOTAL API KEYs VT_APIKEY_LIST = [] BANNER = ''' _ __ _ _ _ __ _ _ ___| |__ ___ ___| | __ | '_ \| | | | '_ \| | | |/ __| '_ \ / _ \/ __| |/ / | |_) | |_| | | | | |_| | (__| | | | __/ (__| < | .__/ \__,_|_| |_|\__, |\___|_| |_|\___|\___|_|\_\\ |_| |___/ {} '''.format(VERSION) # Set console colors if platform != 'win32' and stdout.isatty(): YEL = '\x1b[33m' MAG = '\x1b[35m' BLU = '\x1b[34m' GRE = '\x1b[32m' RED = '\x1b[31m' RST = '\x1b[39m' CYA = '\x1b[36m' else: YEL = '' MAG = '' GRE = '' RED = '' BLU = '' CYA = ''
def write(line): if stdout.isatty(): print(line) logging.info(line)
def main(): """Main function""" parser = argparse.ArgumentParser(description='Linter for the pan language') parser.add_argument('paths', metavar='PATH', type=str, nargs='*', help='Paths of files to check') parser.add_argument('--vi', action='store_true', help='Output line numbers in a vi option style') parser.add_argument('--table', action='store_true', help='Display a table of per-file problem stats') parser.add_argument('--allow_mvn_templates', action='store_true', help='Allow use of maven templates') parser.add_argument('--always_exit_success', action='store_true', help='Always exit cleanly even if problems are found') group_output = parser.add_mutually_exclusive_group() group_output.add_argument('--debug', action='store_true', help='Enable debug output') group_output.add_argument( '--ide', action='store_true', help='Output machine-readable results for use by IDEs') args = parser.parse_args() # Only output colors sequences if the output is a terminal colorama_init(strip=(not stdout.isatty()) or args.ide) global DEBUG DEBUG = args.debug problems_found = 0 reports = [] problem_stats = {} if not args.paths: print 'No files were provided, not doing anything' return 0 for path in args.paths: for filename in glob(path): file_reports, file_problems = lint_file(filename, args.allow_mvn_templates) reports += file_reports problems_found += file_problems problem_stats[filename] = file_problems for report in reports: print_report(*report, vi=args.vi) if args.table: print print 'Problem count per file:' print filestats_table(problem_stats) print print '%d problems found in total' % problems_found if args.always_exit_success: return 0 if problems_found: return 1
def _init_session(arguments, is_non_api=False): """ :returns: cloud name """ _help = arguments['help'].value global _debug _debug = arguments['debug'].value _verbose = arguments['verbose'].value _cnf = arguments['config'] _setup_logging(_debug, _verbose) if _help or is_non_api: return None # Patch https for SSL Authentication ca_file = arguments['ca_file'].value or _cnf.get('global', 'ca_certs') ignore_ssl = arguments['ignore_ssl'].value or (_cnf.get( 'global', 'ignore_ssl').lower() == 'on') if ca_file: try: https.patch_with_certs(ca_file) except https.SSLUnicodeError as sslu: raise CLIError( 'Failed to set CA certificates file %s' % ca_file, importance=2, details=[ 'SSL module cannot handle non-ascii file names', 'Check the file path and consider moving and renaming', 'To set the new CA certificates path', ' kamaki config set ca_certs CA_FILE', sslu, ]) else: warn = red('CA certifications path not set (insecure) ') kloger.warning(warn) https.patch_ignore_ssl(ignore_ssl) _check_config_version(_cnf.value) _colors = _cnf.value.get('global', 'colors') if not (stdout.isatty() and _colors == 'on'): remove_colors() cloud = arguments['cloud'].value or _cnf.value.get('global', 'default_cloud') if not cloud: num_of_clouds = len(_cnf.value.keys('cloud')) if num_of_clouds == 1: cloud = _cnf.value.keys('cloud')[0] elif num_of_clouds > 1: raise CLIError( 'Found %s clouds but none of them is set as default' % (num_of_clouds), importance=2, details=[ 'Please, choose one of the following cloud names:', ', '.join(_cnf.value.keys('cloud')), 'To see all cloud settings:', ' kamaki config get cloud.<cloud name>', 'To set a default cloud:', ' kamaki config set default_cloud <cloud name>', 'To pick a cloud for the current session, use --cloud:', ' kamaki --cloud=<cloud name> ...' ]) if cloud not in _cnf.value.keys('cloud'): raise CLIError('No cloud%s is configured' % ((' "%s"' % cloud) if cloud else ''), importance=3, details=[ 'To configure a new cloud "%s", find and set the' % (cloud or '<cloud name>'), 'single authentication URL and token:', ' kamaki config set cloud.%s.url <URL>' % (cloud or '<cloud name>'), ' kamaki config set cloud.%s.token <t0k3n>' % (cloud or '<cloud name>') ]) auth_args = dict() for term in ('url', 'token'): try: auth_args[term] = _cnf.get_cloud(cloud, term) except KeyError or IndexError: auth_args[term] = '' if not auth_args[term]: raise CLIError('No authentication %s provided for cloud "%s"' % (term.upper(), cloud), importance=3, details=[ 'Set a %s for cloud %s:' % (term.upper(), cloud), ' kamaki config set cloud.%s.%s <%s>' % (cloud, term, term.upper()) ]) return cloud
def is_term(): """Tells whether the current stdout/stderr stream are connected to a terminal (vs. a regular file or pipe)""" return stdout.isatty()
def is_a_tty(): ''' True if tty and stdout availale. (from https://github.com/willyg302) ''' return hasattr(stdout, 'isatty') and stdout.isatty()
def is_colorterm(): """Tells whether the current terminal (if any) support colors escape sequences""" terms = ['xterm-color', 'ansi'] return stdout.isatty() and environ.get('TERM') in terms
def print_table(self, alloc, entity, rows, only_these_fields, sort=False, transforms=None): # For printing out results in an ascii table or CSV format. if alloc.quiet: return if not rows: return if not isinstance(sort, list): sort = [sort] if not isinstance(only_these_fields, list): only_these_fields = [only_these_fields] only_these_fields = self.__get_only_these_fields( alloc, entity, rows, only_these_fields) field_names = only_these_fields[1::2] # Re-order the table, this changes the dict to a list i.e. # dict.items(). rows = self.__get_sorted_rows(alloc, entity, rows, sort) if rows: rows2 = [] for k_, row in rows: row = self.__get_row(alloc, entity, row, only_these_fields, transforms) rows2.append(row) rows = rows2 if alloc.csv: csv_table = csv.writer(sys.stdout, lineterminator="\n") for row in rows: csv_table.writerow([unicode(s).encode('utf-8') for s in row]) else: table = PrettyTable() table.set_field_names(field_names) # table.field_names = field_names for label in field_names: if '$' in label: table.set_field_align(label, "r") else: table.set_field_align(label, "l") if stdout.isatty(): proc = subprocess.Popen(['stty', 'size'], stdout=subprocess.PIPE, stderr=open("/dev/null", "w")) ret = proc.wait() if ret == 0: height_, width = proc.communicate()[0].split() width = int(width) rows = self.__fit_rows_to_screen(alloc, rows, field_names, width) for row in rows: table.add_row(row) print unicode(table.get_string(header=True)).encode('utf-8') # http://stackoverflow.com/questions/15793886/how-to-avoid-a-broken-pipe-error-when-printing-a-large-amount-of-formatted-data sys.stdout.flush()
from ptrace.terminal import enableEchoMode, terminalWidth from errno import ESRCH from ptrace.cpu_info import CPU_POWERPC from ptrace.debugger import ChildError from ptrace.debugger.memory_mapping import readProcessMappings from ptrace.os_tools import RUNNING_PYTHON3 try: unichr raw_input except NameError: # Python 3 unichr = chr raw_input = input import re if stdout.isatty(): try: # Use readline for better raw_input() import readline # noqa except ImportError: pass # Match a register name: $eax, $gp0, $orig_eax REGISTER_REGEX = re.compile(r"\$[a-z]+[a-z0-9_]+") # BYTES_REGEX = re.compile(r"""(?:'([^'\\]*)'|"([^"\\]*)")""") SIGNALS = inverseDict(SIGNAMES) # name -> signum COMMANDS = ( # trace instructions
#!/usr/bin/env python from sys import stdin, stdout, stderr print "Piped input:", not stdin.isatty() print "Piped output:", not stdout.isatty() print "Piped error:", not stderr.isatty()
fh = logging.FileHandler(QTOP_LOGFILE) fh.setLevel(log_level) fh.setFormatter(formatter) logger.addHandler(fh) fh = logging.StreamHandler() fh.setLevel(logging.ERROR) # TODO originally: # fh.setLevel(logging.ERROR) if options.DEBUG else fh.setLevel(logging.ERROR) # ->this resulted in uncaught exceptions not printing to stderr !! fh.setFormatter(formatter) logger.addHandler(fh) logger.disabled = False # TODO: maybe make this a cmdline switch? -D ? logging.info("\n") logging.info("=" * 50) logging.info("STARTING NEW LOG ENTRY...") logging.info("=" * 50) logging.info("\n\n") logging.debug('Verbosity level = %s' % options.verbose) logging.debug("input, output isatty: %s\t%s" % (stdin.isatty(), stdout.isatty())) if options.COLOR == 'AUTO': options.COLOR = 'ON' if (os.environ.get("QTOP_COLOR", stdout.isatty()) in ("ON", True)) else 'OFF' logging.debug("options.COLOR is now set to: %s" % options.COLOR) options.REMAP = False # Default value sys.excepthook = handle_exception # TODO: check if I really need this any more
def process_response(self, request, response): from sys import stdout if stdout.isatty(): for query in connection.queries : print "\033[1;31m[%s]\033[0m \033[1m%s\033[0m" % (query['time'], " ".join(query['sql'].split())) return response
def printing(): return False if NO_BAR else (stdout.isatty() or IN_NOTEBOOK)
def get_argument_parser(): parser = argparse.ArgumentParser( description='Icalendar Calendar Command Line Interface', formatter_class=argparse.ArgumentDefaultsHelpFormatter, fromfile_prefix_chars="@") parser.add_argument( "--version", action="version", version="%%(prog)s %s (%s)" % (icalcli.__version__, icalcli.__author__)) parser.add_argument( "-i", "--interactive", action="store_true", default=False, help="Interactively execute commands") parser.add_argument( "-c", "--config", default=expanduser('~/.icalcli.py'), type=str, help="Config script to be executed") parser.add_argument( "--locale", default='', type=str, help="System locale") parser.add_argument( "--conky", action="store_true", default=False, help="Use Conky color codes") parser.add_argument( "--nocolor", action="store_false", default=stdout.isatty(), dest="color", help="Enable/Disable all color output") parser.add_argument( "--lineart", default="unicode", choices=["fancy", "unicode", "ascii"], help="Choose line art style for calendars: \"fancy\": for" + "VTcodes, \"unicode\" for Unicode box drawing characters," + "\"ascii\" for old-school plusses, hyphens and pipes.") # parent parser types used for subcommands outputs_parser = get_outputs_parser() color_parser = get_color_parser() # Output parser should imply color parser output_parser = get_output_parser(parents=[color_parser]) # remind_parser = get_remind_parser() cal_query_parser = get_cal_query_parser() # parsed start and end times start_end_parser = get_start_end_parser() # tacks on search text search_parser = get_search_parser() sub = parser.add_subparsers( help="Invoking a subcommand with --help prints subcommand usage.", dest="command") # sub.required = True sub.add_parser("sync", aliases=['y']) sub.add_parser("quit", aliases=['q']) # sub.add_parser("recent", aliases=['r'], # parents=[outputs_parser, output_parser]) sub.add_parser( "search", aliases=['s'], parents=[outputs_parser, output_parser, search_parser]) edit = sub.add_parser("edit", aliases=['e'], parents=[ outputs_parser, output_parser, search_parser]) edit.add_argument( "--no-auto-sync", action="store_true", default=False, help="Do not automatically sync when calendar changed") delete = sub.add_parser( "delete", aliases=['d'], parents=[outputs_parser, output_parser, search_parser]) # delete.add_argument("--no-prompt", action="store_true", default=False, # help="Delete without prompting") delete.add_argument("--no-auto-sync", action="store_true", default=False, help="Automatically sync when calendar changed") agenda = sub.add_parser( "agenda", aliases=['g'], parents=[outputs_parser, output_parser, start_end_parser]) agenda.add_argument('-n', "--days", type=int, default=5, nargs="?") calw = sub.add_parser( "calw", aliases=['w'], parents=[outputs_parser, output_parser, cal_query_parser]) calw.add_argument('-n', "--weeks", type=int, default=2, nargs="?") sub.add_parser( "calm", aliases=['m'], parents=[outputs_parser, output_parser, cal_query_parser]) # sub.add_parser("interactive", aliases=['i']) add = sub.add_parser("add", aliases=['a'], parents=[outputs_parser, output_parser]) fill_add_parser(add) return parser
#!/usr/bin/env python2 import math from sys import stdout # Colored message ANSI constants g_green = chr(27) + "[32m" if stdout.isatty() else "" g_yellow = chr(27) + "[33m" if stdout.isatty() else "" g_normal = chr(27) + "[0m" if stdout.isatty() else "" def printStatsOfList(results, label='Statistics', summaryOnly=False): total = totalSq = n = 0 allOfThem = [] for a in results: total += a totalSq += a*a n += 1 allOfThem.append(a) if n == 0: return varianceFull = (totalSq - total*total/n)/n if varianceFull < 0.: varianceFull = 0. if n > 1: variance = (totalSq - total*total/n)/(n-1) if variance < 0.: variance = 0. else: variance = 0. srted = sorted(allOfThem)
def write(line): """Write line to stdout and logfile.""" if stdout.isatty(): print line logging.info(line)
VIR_MIGRATE_TUNNELLED, ) from igvm.hypervisor_preferences import ( HashDifference, HypervisorAttributeValue, HypervisorAttributeValueLimit, InsufficientResource, OtherVMs, OverAllocation, ) COMMON_FABRIC_SETTINGS = dict( disable_known_hosts=True, use_ssh_config=True, always_use_pty=stdout.isatty(), forward_agent=True, shell='/bin/sh -c', timeout=5, connection_attempts=1, remote_interrupt=True, ) # Can't not add a key with dict built above and None value gets interpreted # as "None" username, thus separate code. if 'IGVM_SSH_USER' in environ: COMMON_FABRIC_SETTINGS['user'] = environ.get('IGVM_SSH_USER') VG_NAME = 'xen-data' # Reserved pool space on Hypervisor # TODO: this could be a percent value, at least for ZFS.
def main(): import argparse import sys try: import pyperclip except ImportError: pyperclip = None nargs = "+" if "--clip" in sys.argv and pyperclip: nargs = "*" parser = argparse.ArgumentParser(description="PTPImg uploader") parser.add_argument('images', metavar='filename|url', nargs=nargs) parser.add_argument( '-k', '--api-key', default=os.environ.get('PTPIMG_API_KEY'), help='PTPImg API key (or set the PTPIMG_API_KEY environment variable)') if pyperclip is not None: parser.add_argument( '-n', '--dont-copy', action='store_false', default=True, dest='clipboard', help='Do not copy the resulting URLs to the clipboard') parser.add_argument( '--clip', action='store_true', default=False, help='copy from image from clipboard. Image can either ' + 'be a path to the image, a url to the image') parser.add_argument('-b', '--bbcode', action='store_true', default=False, help='Output links in BBCode format (with [img] tags)') parser.add_argument('--nobell', action='store_true', default=False, help='Do not bell in a terminal on completion') args = parser.parse_args() images = args.images if args.clip: images.append(pyperclip.paste()) if not args.api_key: parser.error('Please specify an API key') try: image_urls = upload(args.api_key, images) if args.bbcode: printed_urls = [ '[img]{}[/img]'.format(image_url) for image_url in image_urls ] else: printed_urls = image_urls print(*printed_urls, sep='\n') # Copy to clipboard if possible if getattr(args, 'clipboard', False): pyperclip.copy('\n'.join(image_urls)) # Ring a terminal if we are in terminal and allowed to do this if not args.nobell and stdout.isatty(): stdout.write('\a') stdout.flush() except (UploadFailed, ValueError) as e: parser.error(str(e))
def get_args(self, alloc, command_list, ops, s): # This function allows us to handle the cli arguments efficiently. options = [] rtn = {} # For interrogation of a command's potential arguments ops.append( ('', 'list-option ', 'List all options in a single column.')) # The options parser cannot handle long args that have optional parameters # If --csv is used without an argument, replace it with --csv=always if '--csv' in command_list: idx = command_list.index('--csv') if len(command_list) > idx + 1 and command_list[idx + 1] in ['always', 'never', 'auto']: command_list[idx] = '--csv=' + command_list[idx + 1] del command_list[idx + 1] else: command_list[idx] = '--csv=always' no_arg_ops, all_ops, all_ops_list = self.__parse_args(ops) parser = argparse.ArgumentParser( prog=os.path.basename(" ".join(sys.argv[0:2])), add_help=False) for k, v in all_ops.items(): a1 = [] a2 = {} a2['dest'] = k if ':' in v[2]: a2['default'] = [] a2['action'] = 'append' elif '.' in v[2]: a2['default'] = '' a2['action'] = 'store' if v[0] != '-': a1.append(v[0]) if v[1] != '--': a1.append(v[1]) if v[0] in no_arg_ops or v[1] in no_arg_ops: a2['action'] = 'store_true' a2['default'] = False parser.add_argument(*a1, **a2) # Parse the options options = parser.parse_args(sys.argv[2:]) # Turn the options into a dict for opt, val in vars(options).items(): rtn[opt] = val # If --help print help and die if rtn['help']: print self.get_subcommand_help(command_list, ops, s) sys.exit(0) # If --list-option print options and die if 'list-option' in rtn and rtn['list-option']: for opt in all_ops_list: print opt sys.exit(0) # If --csv tell the alloc object about it if 'csv' in rtn and rtn['csv']: alloc.csv = True if rtn['csv'] == 'auto' and stdout.isatty(): alloc.csv = False if rtn['csv'] == 'never': alloc.csv = False elif not stdout.isatty(): alloc.csv = True return rtn, ""
def __init__(self) -> None: self.enable(stdout.isatty())
# Copyright (c) 2018-2019 Collabora, Ltd. # # SPDX-License-Identifier: Apache-2.0 # # Author(s): Ryan Pavlik <*****@*****.**> import platform from collections import namedtuple from enum import Enum from inspect import getframeinfo from pathlib import Path from sys import stdout # if we have termcolor and we know our stdout is a TTY, # pull it in and use it. if hasattr(stdout, 'isatty') and stdout.isatty(): try: from termcolor import colored as colored_impl HAVE_COLOR = True except ImportError: HAVE_COLOR = False elif platform.system() == 'Windows': try: from termcolor import colored as colored_impl import colorama colorama.init() HAVE_COLOR = True except ImportError: HAVE_COLOR = False else: