def prepare(self):
     result = yield from self._get_class_links()
     if result is None:
         logger.error(
             "Cannot get list of links to download. "
             "Check username and password")
         return
     if self.chapter is not None:
         result = result[self.chapter - 1:]
     if not result:
         logger.info("There is nothing to download")
         return
     colorama_init()
     number_of_files = sum([len(res[1]) for res in result])
     _print_color_line(
         "Starting to download {} files".
         format(number_of_files), Fore.GREEN)
     sem = asyncio.Semaphore(self.concurrency)
     downloaders = []
     cookies = {self.AUTH_COOKIE_NAME: self.auth_cookies}
     send_message(self.info_coroutine, InitialMessage, number_of_files)
     for name, links in result:
         directory = os.path.join(self.directory, name)
         if not os.path.exists(directory):
             os.mkdir(directory)
         for link in links:
             downloader = FileDownloader(
                 directory, link, self.info_coroutine,
                 headers=self.REQUESTS_HEADERS, cookies=cookies, sem=sem)
             downloaders.append(downloader.start())
     return (yield from asyncio.wait(downloaders))
Пример #2
0
def init():
    colorama_init()
    dirname = os.path.dirname(os.path.abspath(__file__))
    parser = argparse.ArgumentParser(description="FTPVista 4.0")
    parser.add_argument("-c", "--config", dest="config_file", metavar="FILE", default=os.path.join(dirname, 'ftpvista.conf'), help="Path to the config file")
    subparsers = parser.add_subparsers(dest='action')
    # init
    subparsers.add_parser('init', help='Initialize FTPVista database')
    # start
    subparsers.add_parser('start', help='Start FTPVista')
    # start-oc
    subparsers.add_parser('start-oc', help='Start online checker')
    # clean
    parser_clean = subparsers.add_parser('clean', help='Empty the index, or the database, or everything !')
    parser_clean.add_argument("subject", choices=["db", "index", "all"], help="Empty the index, or the database, or everything !")
    # delete
    parser_delete = subparsers.add_parser('delete', help='Manually delete a server from the index')
    parser_delete.add_argument("server", help="IP of the server to delete")

    args = parser.parse_args()

    if os.getuid() != 0:
        print("You must be root in order to run FTPVista. Exiting.")
        exit(1)

    return main(args)
Пример #3
0
 def __init__(self, numRows=6, numColumns=6, seed=time.time()):
     self.seed = seed
     self.numColumns = numColumns
     self.numRows = numRows
     self._setupBoard(numRows, numColumns, self.seed)
     self._colours = colours.Colours()
     colorama_init()
Пример #4
0
def init():
    colorama_init()
    args = parse()

    if args.action == 'list_providers':
        list_providers.go(args.csv)
    elif args.action == 'list_tools':
        list_tools.go(args.csv)
    elif args.action == 'generate':
        if 'auto' in args.include_tools and 'auto' in args.exclude_tools:
            auto_mode = True
        else:
            auto_mode = False

        if 'none' in args.include_providers or 'all' in args.exclude_providers:
            no_providers = True
        else:
            no_providers = False

        if 'none' in args.include_tools or 'all' in args.exclude_tools:
            no_tools = True
        else:
            no_tools = False

        if auto_mode and no_providers:
            print('\033[1m\033[31mTo use the smart mode on the tools you have to select at least 1 provider\033[0m')
            exit(2)
        elif no_tools and no_providers:
            print('\033[1m\033[31mYou have to select at least 1 provider or tool\033[0m')
            exit(2)
        else:
            if not args.essid:
                args.essid = ''
            if not args.serial:
                args.serial = ''
            res, error_code = generate.go(
                args.bssid,
                args.essid,
                args.serial,
                args.include_tools,
                args.exclude_tools,
                args.include_providers,
                args.exclude_providers
            )
            if args.csv:
                print('pin')
                print('\n'.join(res))
            else:
                if error_code == 3:
                    print('\033[1m\033[33mWARNING: Online providers are checked without internet connection\033[0m')
                print('The resulting pin(s) is/are:\n\033[1m\033[32m{0}\033[0m'.format(
                    ', '.join(res))
                )
            exit(error_code)
    elif args.action == 'update_db':
        error_code = update_db.go(args.include_providers, args.exclude_providers)
        if error_code == 3 and not args.csv:
            print('\033[1m\033[33mWARNING: Only offline databases are updated without internet connection\033[0m')
        exit(error_code)
Пример #5
0
    def __init__(self):
        """ Initializes the class."""

        colorama_init()
        readline.parse_and_bind('tab: complete')
        readline.parse_and_bind('set editing-mode vi')
        self.history = []
        self.commands = dict({(klass.__dict__['name'], klass())
                              for klass in Command.__subclasses__()})
Пример #6
0
    def __init__(self, definitions, colors, want_readline, want_completion):
        super(TerminalShell, self).__init__('<stdin>')
        self.input_encoding = locale.getpreferredencoding()
        self.lineno = 0

        # Try importing readline to enable arrow keys support etc.
        self.using_readline = False
        try:
            if want_readline:
                import readline
                self.using_readline = sys.stdin.isatty() and sys.stdout.isatty()
                self.ansi_color_re = re.compile("\033\\[[0-9;]+m")
                if want_completion:
                    readline.set_completer(lambda text, state: self.complete_symbol_name(text, state))

                    # Make _ a delimiter, but not $ or `
                    readline.set_completer_delims(' \t\n_~!@#%^&*()-=+[{]}\\|;:\'",<>/?')

                    readline.parse_and_bind("tab: complete")
                    self.completion_candidates = []
        except ImportError:
            pass

        # Try importing colorama to escape ansi sequences for cross platform
        # colors
        try:
            from colorama import init as colorama_init
        except ImportError:
            colors = 'NoColor'
        else:
            colorama_init()
            if colors is None:
                terminal_supports_color = (sys.stdout.isatty() and os.getenv('TERM') != 'dumb')
                colors = 'Linux' if terminal_supports_color else 'NoColor'

        color_schemes = {
            'NOCOLOR': (
                ['', '', '', ''],
                ['', '', '', '']),
            'LINUX': (
                ['\033[32m', '\033[1m', '\033[22m', '\033[39m'],
                ['\033[31m', '\033[1m', '\033[22m', '\033[39m']),
            'LIGHTBG': (
                ['\033[34m', '\033[1m', '\033[22m', '\033[39m'],
                ['\033[31m', '\033[1m', '\033[22m', '\033[39m']),
        }

        # Handle any case by using .upper()
        term_colors = color_schemes.get(colors.upper())
        if term_colors is None:
            out_msg = "The 'colors' argument must be {0} or None"
            print(out_msg.format(repr(list(color_schemes.keys()))))
            quit()

        self.incolors, self.outcolors = term_colors
        self.definitions = definitions
Пример #7
0
def main():
    parser = prepare_parser()
    args_options = {
        k: v for k, v in vars(parser.parse_args()).items() if v is not None
    }
    allowed_configs = DEFAULT_CONFIGS + [args_options.get("config")]
    config_files = list(filter_config_files(*allowed_configs))
    options = read_configs(*config_files)
    options.update(args_options)
    if not check_options(options):
        parser.print_help()
        return
    if not os.path.isdir(options["directory"]):
        logger.error("Direcory {} is not exists.".format(options["directory"]))
        parser.print_help()
        return
    db = options["db"]
    force = options["force"]
    recreate = options["recreate"]
    create_db(db, recreate)
    access_token = get_access_token(db)
    current_apks = get_apks_records(db)
    apks = options["apks"]
    outdated_packages = set(current_apks) - set(apks)
    if outdated_packages:
        delete_apks_records(db, tuple(outdated_packages))
        current_apks = get_apks_records(db)
    params = {
        "androidId": options["android_id"],
        "email": options["email"],
        "password": options["password"],
        "auth_sub_token": access_token,
        "debug": True
    }
    api = GooglePlayAPI(**params)
    apks_details = api.bulkDetails(apks)
    update_access_token(db, api.get_token())
    apks_data = {
        name: m.doc.details.appDetails.versionCode
        for name, m in zip(apks, apks_details.entry)
    }
    new_apks = [
        name
        for name, ver in apks_data.items()
        if force or (name not in current_apks or current_apks[name].code < ver)
    ]
    colorama_init()
    new_apks_info = get_packages_info(api, new_apks)
    if options["info"]:
        show_packages_info(new_apks_info, current_apks)
        return
    if not new_apks_info:
        _print_color_line("There are no new apk packages to update", Fore.RED)
        return
    download_packages(api, new_apks_info, options)
Пример #8
0
    def __init__(self):
        colorama_init()

        self._q = Queue.Queue()
        self._work_items = set()
        self._scheduled = []
        self._running = []

        self._observer = watchdog.observers.Observer()
        self._observer.schedule(_EventHandler(self._q), ".", recursive=True)
        self._observer.start()
Пример #9
0
Файл: log.py Проект: awsch/lantz
def init_colorama():
    try:
        from colorama import Fore, Back, Style, init as colorama_init
        colorama_init()
        colorama = True
        DEFAULT_FMT = Style.NORMAL + '{asctime} <color>{levelname:8s}</color>' + Style.RESET_ALL + ' {message}'
        ColorizingFormatter.add_color_schemes(Style, Fore, Back)
    except Exception as e:
        LOGGER.info('Log will not be colorized. Could not import colorama: {}', e)
        colorama = False
        DEFAULT_FMT = '{asctime} {levelname:8s} {message}'
    return colorama, DEFAULT_FMT
Пример #10
0
def main():
    """Main function"""
    parser = argparse.ArgumentParser(description='Linter for the pan language')
    parser.add_argument('paths', metavar='PATH', type=str, nargs='*', help='Paths of files to check')
    parser.add_argument('--vi', action='store_true', help='Output line numbers in a vi option style')
    parser.add_argument('--table', action='store_true', help='Display a table of per-file problem stats')
    parser.add_argument('--allow_mvn_templates', action='store_true', help='Allow use of maven templates')
    parser.add_argument('--always_exit_success', action='store_true', help='Always exit cleanly even if problems are found')
    group_output = parser.add_mutually_exclusive_group()
    group_output.add_argument('--debug', action='store_true', help='Enable debug output')
    group_output.add_argument('--ide', action='store_true', help='Output machine-readable results for use by IDEs')
    args = parser.parse_args()

    # Only output colors sequences if the output is a terminal
    colorama_init(strip=(not stdout.isatty()) or args.ide)
    global DEBUG
    DEBUG = args.debug

    problems_found = 0

    reports = []
    problem_stats = {}

    if not args.paths:
        print 'No files were provided, not doing anything'
        return 0

    for path in args.paths:
        for filename in glob(path):
            file_reports, file_problems = lint_file(filename, args.allow_mvn_templates)
            reports += file_reports
            problems_found += file_problems
            problem_stats[filename] = file_problems

    for report in reports:
        print_report(*report, vi=args.vi)

    if args.table:
        print
        print 'Problem count per file:'
        print filestats_table(problem_stats)

    print
    print '%d problems found in total' % problems_found

    if args.always_exit_success:
        return 0

    if problems_found:
        return 1
Пример #11
0
    def __init__(self, **kwargs):

        # Initialize colorama
        colorama_init()

        # Default tag width
        self.width = 9

        # Message / input response / kwargs
        self.msg = None
        self.response = {}
        self.kwargs = kwargs

        # Timestamp boolean
        self.timestamp = kwargs.get("use_timestamp", False)
Пример #12
0
    def handle(self, *args, **options):
        colorama_init()

        org = get_org(options['org'])
        urn = options['urn']

        intro = Style.BRIGHT + "Welcome to the message console.\n\n"
        intro += Style.NORMAL + "Send messages by typing anything\n"
        intro += "Change org with the org command, ex: " + Fore.YELLOW + "org 3" + Fore.WHITE + "\n"
        intro += "Change contact with the contact command, ex: " + Fore.YELLOW + "contact tel:+250788124124" + Fore.WHITE + "\n"
        intro += "Exit with the " + Fore.YELLOW + "exit" + Fore.WHITE + " command\n\n"

        intro += ("Currently sending messages for %s [%d] as " + Fore.CYAN + "%s" + Fore.WHITE) % (org.name, org.id, urn)

        MessageConsole(org, urn).cmdloop(intro=intro)
Пример #13
0
def main(limit):
    global driver

    colorama_init()
    print >> stdout, Fore.YELLOW + 'Initializing selenium..' + Fore.RESET
    try:
        driver = webdriver.PhantomJS()
        if driver is None:
            raise TypeError
    except:
        print >> stderr, Fore.RED + 'An error occurred while launching ' + \
                'selenium(PhantomJS).' + Fore.RESET
        exit(1)
    
    print >> stdout, Fore.YELLOW + 'Connecting to Facebook..' + Fore.RESET
    result = connect()
    if result is False:
        print >> stderr, Fore.RED + 'Failed to connect to Facebook' + \
                Fore.RESET
        exit(1)

    user, password = prompt()
    print >> stdout, Fore.YELLOW + 'Trying to sign in on Facebook..' + \
            Fore.RESET
    result = login(user, password)
    if result is False:
        print >> stderr, Fore.RED + 'Failed to sign in to Facebook' + \
                Fore.RESET
        exit(1)
    print >> stdout, Fore.GREEN + 'Logged in on Facebook' + Fore.RESET
    
    print >> stdout, Fore.CYAN + 'Getting list from Facebook' + Fore.RESET
    generator = fetch_list(limit)
    try:
        users = generator.next() 
    except TypeError:
        print >> stderr, Fore.RED + 'Failed to fetch page source' + Fore.RESET
        exit(1)
    
    cnt = len(users)
    print >> stdout, Fore.BLUE + 'Loaded {0} users'.format(cnt) + Fore.RESET
    
    print >> stdout, Fore.CYAN + \
            'Getting top {0} users detailed data..'.format(limit) + \
            Fore.RESET
    final = generator.next()
    
    print_table(final)
Пример #14
0
    def __init__(self, definitions, colors):
        self.input_encoding = locale.getpreferredencoding()

        # Try importing readline to enable arrow keys support etc.
        self.using_readline = False
        try:
            import readline
            self.using_readline = sys.stdin.isatty() and sys.stdout.isatty()
            self.ansi_color_re = re.compile("\033\\[[0-9;]+m")
        except ImportError:
            pass

        # Try importing colorama to escape ansi sequences for cross platform
        # colors
        try:
            from colorama import init as colorama_init
        except ImportError:
            colors = 'NoColor'
        else:
            colorama_init()
            if colors is None:
                terminal_supports_color = (sys.stdout.isatty() and
                                           os.getenv('TERM') != 'dumb')
                colors = 'Linux' if terminal_supports_color else 'NoColor'

        color_schemes = {
            'NOCOLOR': (
                ['', '', '', ''],
                ['', '', '', '']),
            'LINUX': (
                ['\033[32m', '\033[1m', '\033[22m', '\033[39m'],
                ['\033[31m', '\033[1m', '\033[22m', '\033[39m']),
            'LIGHTBG': (
                ['\033[34m', '\033[1m', '\033[22m', '\033[39m'],
                ['\033[31m', '\033[1m', '\033[22m', '\033[39m']),
        }

        # Handle any case by using .upper()
        term_colors = color_schemes.get(colors.upper())
        if term_colors is None:
            out_msg = "The 'colors' argument must be {0} or None"
            print out_msg.format(repr(color_schemes.keys()))
            quit()

        self.incolors, self.outcolors = term_colors
        self.definitions = definitions
Пример #15
0
def main():
    version = '\n'.join(["%(prog)s (zone_normalize) {}",
                         "Copyright (C) 2016 Max R.D. Parmer",
                         "License AGPLv3+: GNU Affero GPL version 3 or later.",
                         "http://www.gnu.org/licenses/agpl.html"])

    parser = argparse.ArgumentParser()
    parser.add_argument('-d',
                        '--dump',
                        action='store_true',
                        help='Dump a list of line structures instead of \
                        printing and coloring each entry.')
    parser.add_argument('-nc',
                        '--no-color',
                        action='store_true',
                        help='Disable color.')
    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version=version.format(__version__))
    parser.add_argument('zones',
                        nargs='+',
                        type=maybe_compressed_file,
                        help='A file or list of zone files, optionally these \
                        files may be gzipped.')
    args = parser.parse_args()

    if HAS_COLOR and not args.no_color:
        colors = [Fore.GREEN, Fore.MAGENTA, Fore.BLUE, Fore.CYAN, Fore.YELLOW]

        colorama_init(autoreset=True)
        unpacked_fmt = ZONE_FMT_STR.split()
        final_format = " ".join([color for segment in
                                 zip(colors, unpacked_fmt)
                                 for color in segment])
    else:
        final_format = ZONE_FMT_STR

    for zone in args.zones:
        with zone as zonefh:
            if args.dump:
                pp = PrettyPrinter(indent=4, compact=True, width=72)
                pp.pprint([l for l in zone_normalize(zonefh)])
            else:
                for record in zone_normalize(zonefh):
                    print(zone_dict_to_str(record, fmt_str=final_format))
Пример #16
0
 def __init__(self, menu_class=Menu):
     """
     Init app
     """
     self.ask_pound()
     self._user_saved = None
     self.errors = Error_Handler(error_dict)
     self.command = Shell(avoid_recursion="pydir", errors=self.errors)
     self.user_command = PydirShell(self)
     colorama_init()
     self.input_list = []
     self.reset_pages()
     self.user_filter = None
     self.user_sorter = 'name'
     self.menu = menu_class(self)
     self.last_mod_time = None
     self._project_saved_searches = {}
     self.output = Output()
Пример #17
0
def main():
    argparser = argparse.ArgumentParser()
    argparser.add_argument('filename', type=str)
    args = argparser.parse_args()

    colorama_init()

    with open(args.filename, 'r') as fin:
        node = ast.parse(fin.read())

    ir = ImportRetriever()
    ir.visit(node)
    cd = CoroutineDefFinder()
    cd.visit(node)
    print(cd._scoped_coros)
    print(cd._scoped_types)
    cc = CoroutineChecker(ir._user_locals, cd._scoped_coros, cd._scoped_types)
    cc.visit(node)
Пример #18
0
def init():
    colorama_init()
    parser = argparse.ArgumentParser(description="FTPVista 4.0 installer")
    subparsers = parser.add_subparsers(dest='action')
    parser_install = subparsers.add_parser('install', help='Install FTPVista system elements')
    parser_install.add_argument('--user', action='store_true', help='Create/delete unix user of FTPVista', default=False)
    parser_install.add_argument('--configuration', action='store_true', help='(Un)install configuration file and FTPVista root directory', default=False)
    parser_install.add_argument('--services', action='store_true', help='(Un)install upstart or systemd services scripts', default=False)
    parser_install.add_argument('--logrotate', action='store_true', help='(Un)install logrotate configuration file', default=False)
    parser_install.add_argument('--webserver', action='store_true', help='(Un)install uwsgi script and create apache Virtual Host', default=False)
    parser_install.add_argument('--all', action='store_true', help='(Un)install everything')
    parser_install.add_argument('home', help='FTPVista home path', nargs='?', default=None)
    parser_uninstall = subparsers.add_parser('uninstall', help='Uninstall FTPVista system elements')
    parser_uninstall.add_argument('--user', action='store_true', help='Create/delete unix user of FTPVista', default=False)
    parser_uninstall.add_argument('--configuration', action='store_true', help='(Un)install configuration file and FTPVista root directory', default=False)
    parser_uninstall.add_argument('--services', action='store_true', help='(Un)install upstart or systemd services scripts', default=False)
    parser_uninstall.add_argument('--logrotate', action='store_true', help='(Un)install logrotate configuration file', default=False)
    parser_uninstall.add_argument('--all', action='store_true', help='(Un)install everything')
    parser_uninstall.add_argument('home', help='FTPVista home path')
    args = parser.parse_args()

    if not args.action:
        parser.print_help()
        exit(1)

    # For simplicity
    if not hasattr(args, 'webserver'):
        args.webserver = False

    if not args.user and not args.configuration and not args.services and not args.logrotate and not args.webserver:
        args.all = True

    if args.configuration and not args.all:
        args.user = True

    if os.getuid() != 0:
        print("You must be root in order to run FTPVista installer. Exiting.")
        exit(1)

    if args.action == 'install' and not args.all and args.home is None and (args.services or args.logrotate or args.webserver):
        print(w('home parameter is mandatory here. (e.g. install.py install --abc <home>)'))
        exit(1)

    return main(args)
Пример #19
0
def main():
	# Initialize colorama for colored output.
	colorama_init(autoreset=True)
	
	# Get the list of files for consideration.
	py_files, cpp_files = gen_file_lists()
	
	# Filter the python files to the ones that call rospy.init_node().
	py_nodes = parse_py(py_files)
	# Filter the c++ files to the ones that call ros::init().
	cpp_nodes = parse_cpp(cpp_files)
	
	# If there are no errors, convert the node lists to YAML and write them to project.yaml.
	if not check_for_errors(py_nodes, cpp_nodes):
		projfile = open('project.yaml', 'w')
		projfile.write(write_yaml(py_nodes, cpp_nodes))
		
		print(Fore.GREEN + "RIDE node file generated without errors!")
		print(Style.BRIGHT + "Make sure to look at the generated file, as it is likely incorrect/incomplete.")
Пример #20
0
    def handle(self, token, *args, **options):
        colorama_init()

        try:
            token_obj = APIToken.objects.get(key=token)
        except APIToken.DoesNotExist:
            raise CommandError("No such API token exists")

        user, org = token_obj.user, token_obj.org

        self.stdout.write("Checking with token %s for user %s [%d] in org %s [%d] with role %s...\n\n"
                          % (colored(token, Fore.BLUE),
                             colored(user.username, Fore.BLUE),
                             user.pk,
                             colored(org.name, Fore.BLUE),
                             org.pk,
                             colored(token_obj.role, Fore.BLUE)))

        for test in ENDPOINT_TESTS:
            self.test_url(token, *test)
Пример #21
0
    def handle(self, *args, **options):
        self.verbose = options['verbosity'] >= 2

        colorama_init()

        settings.COMPRESS_ENABLED = True

        problems = []

        for org in Org.objects.filter(is_active=True).order_by('name'):
            self.log("Checking view performance for org '%s'..." % org.name)
            self.log(" > Checking as admin user...")

            admin = org.administrators.first()
            problems += self.test_as_user(org, None, admin)

            for partner in org.partners.order_by('name'):
                restriction = "%d labels" % partner.get_labels().count() if partner.is_restricted else "unrestricted"

                self.log(" > Checking as user in partner '%s' (%s)..." % (partner.name, restriction))

                # find a suitable user in this partner
                user = User.objects.filter(profile__partner=partner, profile__change_password=False).first()
                if user:
                    problems += self.test_as_user(org, partner, user)
                else:
                    self.log("    - No suitable user found (skipping)")

        self.stdout.write("Problems...")

        for problem in sorted(problems, key=lambda p: p.time, reverse=True):
            view_name, query_string = problem.test
            url = reverse(view_name) + query_string

            self.stdout.write(" > %s %s secs (org='%s', partner='%s')" % (
                colored(url, Fore.BLUE),
                colorcoded(problem.time, REQUEST_TIME_LIMITS),
                problem.org.name,
                problem.partner.name if problem.partner else ''
            ))
Пример #22
0
    def __init__(self, definitions, colors):
        # Try importing readline to enable arrow keys support etc.
        try:
            import readline
        except ImportError:
            pass

        # Try importing colorama to escape ansi sequences for cross platform colors
        try:
            from colorama import init as colorama_init
        except ImportError:
            colors = 'NoColor'
        else:
            colorama_init()
            if colors is None:
                colors = 'Linux'

        color_schemes = {
            'NOCOLOR' : (
                ['', '', '', ''],
                ['', '', '', '']),
            'LINUX' : (
                ['\033[32m', '\033[1m', '\033[22m', '\033[39m'],
                ['\033[31m', '\033[1m', '\033[22m','\033[39m']),
            'LIGHTBG' : (
                ['\033[34m', '\033[1m', '\033[22m', '\033[39m'],
                ['\033[31m', '\033[1m', '\033[22m','\033[39m']),
        }

        # Handle any case by using .upper()
        term_colors = color_schemes.get(colors.upper())
        if term_colors is None:
            out_msg = "The 'colors' argument must be {0} or None"
            print out_msg.format(repr(color_schemes.keys()))
            quit()

        self.incolors, self.outcolors = term_colors
        self.definitions = definitions
Пример #23
0
    def __call__(self):
        """For each available question prompts the Human if it's valid evidence or not.

        Returns None in case that all question has been answered (or when the Human
        indicates that he's tired of answering).
        Each time that Human replies with a custom answer (not in the base list) that
        answer will be returned instantaneously (and no further question will be shown
        except the terminal is invoked again).
        """
        colorama_init()
        self.explain()
        for evidence in self.questions[len(self.raw_answers):]:
            answer = self.get_human_answer(evidence)
            if answer in self.extra_options:
                # Will not be handled here but in the caller.
                return answer
            elif answer == self.RUN:
                # No more questions and answers for now. Use what is available.
                return None
            else:
                self.raw_answers.append(answer)
                if answer in [self.YES, self.NO]:
                    self.store_answer_callback(evidence, answer == self.YES)
Пример #24
0
    def __init__(self):
        if colorama_init:
            colorama_init(autoreset=False)
            self.colors = {
                "RESET": Fore.RESET,
                "BLACK": Fore.BLACK,
                "RED": Fore.RED,
                "GREEN": Fore.GREEN,
                "YELLOW": Fore.YELLOW,
                "BLUE": Fore.BLUE,
                "MAGENTA": Fore.MAGENTA,
                "CYAN": Fore.CYAN
                #"GRAY": Fore.GRAY
            }
        else:
            CSI = "\33["
            self.CSI = CSI
            self.colors = {
                "RESET": CSI + "0m",
                "BLACK": CSI + "0;30m",
                "RED": CSI + "0;31m",
                "GREEN": CSI + "0;32m",
                "YELLOW": CSI + "0;33m",
                "BLUE": CSI + "0;34m",
                "MAGENTA": CSI + "0;35m",
                "CYAN": CSI + "0;36m"
                #"GRAY": CSI + "0;37m"
            }

        self.mapped_colors = {}
        self.mapped_colors["default"] = {
            "DANGER": "RED",
            "ERROR": "RED",
            "OK": "GREEN",
            "SUCCESS": "GREEN",
            "WARNING": "YELLOW"
        }
Пример #25
0
def main():
	# Init output coloring library.
	colorama_init(autoreset=True)
	
	# Try to open the project file.
	try:
		proj_file = file('./project.yaml', 'r')
	except IOError:
		print(Fore.RED + 'Error: Couldn\'t find project.yaml file in current directory!');
		return
	
	# If we opened the file, parse the yaml.
	project = yaml_load(proj_file)
	pkg_name = os.path.basename(os.getcwd())
	
	output = []
	# Start the roslaunch file.
	# NOTE: The brackets around the string are VERY important.  Without them, it
	# 		will add the characters of the string to output one by one instead of
	#		adding the string as a whole.
	output += ['<launch>']
	
	# Define the core ros parameters.  We do this to add the workspace directory onto
	# ROS's package path.
	output += ['\t<machine name="local-ride" address="localhost" default="true" ' + \
				'ros-root="$(env ROS_ROOT)" ros-package-path="' + workspace_dir + ':$(env ROS_PACKAGE_PATH)" />']
	
	# Loop through the nodes listed in the project file.
	for node in project['nodes']:
		# Create a string to build the node XML into.
		node_xml = '\t'
		shorthand = True
		
		# Add lines for launching nodes.
		if 'exec' in node:
			node_xml += '<node '
			node_xml += 'name="' + node['name'] + '" '
			if 'pkg' in node:
				node_xml += 'pkg="' + node['pkg'] + '" '
			else:
				node_xml += 'pkg="' + pkg_name + '" '
				node_xml += 'output="screen" '
			node_xml += 'type="' + node['exec'] + '" '
			if 'chdir' in node:
				if node['chdir'] == True:
					node_xml += 'cwd="node" '
		elif 'launch' in node:
			node_xml += '<include '
			node_xml += 'file="$(find ' + node['pkg'] + ')/' + node['launch'] + '" '
		else:
			print(Fore.YELLOW + 'Warning: node "' + node['name'] + \
					'" has no launch configuration, and therefore will not be run when package is launched.')
			continue
		
		# Figure out whether we're using shorthand or not.
		if 'remap' in node or 'params' in node:
			shorthand = False
			node_xml = node_xml[:-1] + '>\n'
		
		# Handle remapping.
		if 'remap' in node:
			for mapping in node['remap']:
				node_xml += '\t\t<remap from="' + mapping[0] + '" to="' + mapping[1] +'" />\n'
		
		if 'params' in node:
			for param in node['params']:
				node_xml += '\t\t<rosparam param="' + param['name'] + '"> ' + str(param['value']) + ' </rosparam>\n'
		
		# Close the tag.
		if shorthand:
			node_xml += '/>'
		else:
			if 'exec' in node:
				node_xml += '\t</node>'
			elif 'launch' in node:
				node_xml += '\t</include>'
		
		# Add the xml for this node to the output string.
		output += [node_xml]
	
	# End the roslaunch file.
	output += ['</launch>']
	
	output_path = pkg_name + '.launch'
	try:
		output_file = open(output_path, 'w')
		output_file.write('\n'.join(output))
		print(Fore.GREEN + 'roslaunch file successfully written to "' + output_path + '"!');
	except IOError:
		pass
Пример #26
0
    def handle(self, *args, **options):
        colorama_init()
        org = get_org(options["org"])
        scheme, path, *rest = URN.to_parts(options["urn"])

        db = settings.DATABASES["default"]
        db_url = f"postgres://{db['USER']}:{db['PASSWORD']}@{db['HOST']}:{db['PORT']}/{db['NAME']}?sslmode=disable"
        redis_url = settings.CACHES["default"]["LOCATION"]

        try:
            print(
                f"✅ Mailroom version {mailroom.get_client().version()} running at️ {Fore.CYAN}{settings.MAILROOM_URL}{Fore.RESET}"
            )
        except ConnectionError:
            launch = f'mailroom -db="{db_url}" -redis={redis_url}'
            raise CommandError(
                f"Unable to connect to mailroom. Please launch it with...\n\n{launch}"
            )

        try:
            requests.get(COURIER_URL)
            print(
                f"✅ Courier running at️ {Fore.CYAN}{COURIER_URL}{Fore.RESET}")
        except ConnectionError:
            launch = f'courier -db="{db_url}" -redis={redis_url} -spool-dir="."'
            raise CommandError(
                f"Unable to connect to courier. Please launch it with...\n\n{launch}"
            )

        try:
            channel = TestChannel.create(org,
                                         org.administrators.first(),
                                         COURIER_URL,
                                         callback=self.response_callback,
                                         scheme=scheme)
            print(
                f"✅ Testing channel started at️ {Fore.CYAN}{channel.server.base_url}{Fore.RESET}"
            )
        except Exception as e:
            raise CommandError(f"Unable to start test channel: {str(e)}")

        print(
            f"\nSending messages to {Fore.CYAN}{org.name}{Fore.RESET} as {Fore.CYAN}{scheme}:{path}{Fore.RESET}. Use Ctrl+C to quit."
        )

        self.responses_wait = None
        try:
            while True:
                line = input(f"📱 {Fore.CYAN}{path}{Fore.RESET}> ")
                if not line:
                    continue

                msg_in = channel.incoming(path, line)

                # we wait up to 2 seconds for a response from courier
                self.responses_wait = threading.Event()
                self.responses_wait.wait(timeout=2)

                for response in org.msgs.filter(
                        direction="O", id__gt=msg_in.id).order_by("id"):
                    print(
                        f"💬 {Fore.GREEN}{response.channel.address}{Fore.RESET}> {response.text}"
                    )

        except KeyboardInterrupt:
            pass
Пример #27
0
def start_hyperopt_list(args: Dict[str, Any]) -> None:
    """
    List hyperopt epochs previously evaluated
    """
    from freqtrade.optimize.hyperopt import Hyperopt

    config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)

    print_colorized = config.get('print_colorized', False)
    print_json = config.get('print_json', False)
    no_details = config.get('hyperopt_list_no_details', False)
    no_header = False

    filteroptions = {
        'only_best':
        config.get('hyperopt_list_best', False),
        'only_profitable':
        config.get('hyperopt_list_profitable', False),
        'filter_min_trades':
        config.get('hyperopt_list_min_trades', 0),
        'filter_max_trades':
        config.get('hyperopt_list_max_trades', 0),
        'filter_min_avg_time':
        config.get('hyperopt_list_min_avg_time', None),
        'filter_max_avg_time':
        config.get('hyperopt_list_max_avg_time', None),
        'filter_min_avg_profit':
        config.get('hyperopt_list_min_avg_profit', None),
        'filter_max_avg_profit':
        config.get('hyperopt_list_max_avg_profit', None),
        'filter_min_total_profit':
        config.get('hyperopt_list_min_total_profit', None),
        'filter_max_total_profit':
        config.get('hyperopt_list_max_total_profit', None)
    }

    trials_file = (config['user_data_dir'] / 'hyperopt_results' /
                   'hyperopt_results.pickle')

    # Previous evaluations
    trials = Hyperopt.load_previous_results(trials_file)
    total_epochs = len(trials)

    trials = _hyperopt_filter_trials(trials, filteroptions)

    # TODO: fetch the interval for epochs to print from the cli option
    epoch_start, epoch_stop = 0, None

    if print_colorized:
        colorama_init(autoreset=True)

    try:
        # Human-friendly indexes used here (starting from 1)
        for val in trials[epoch_start:epoch_stop]:
            Hyperopt.print_results_explanation(val, total_epochs,
                                               not filteroptions['only_best'],
                                               print_colorized)

    except KeyboardInterrupt:
        print('User interrupted..')

    if trials and not no_details:
        sorted_trials = sorted(trials, key=itemgetter('loss'))
        results = sorted_trials[0]
        Hyperopt.print_epoch_details(results, total_epochs, print_json,
                                     no_header)
Пример #28
0
def event_times(input_file, org, token, deadline, target_team):
    '''
    input-file: file contains list of repo-hash.

    repo-hash : string in <repo>:<hash> format
            hw0-ianre657:cb75e99
    '''
    global github_organization
    global github_token

    try:
        parsed_repos = get_repo_infos(input_file)
    except FileNotFoundError as e:
        print(str(e))
        return

    colorama_init(autoreset=True)
    spinner = Halo(stream=sys.stderr)

    github_organization = org
    github_token = token

    print(f'deadline: {deadline}')

    submit_deadline = iso8601.parse_date(deadline)
    submit_deadline = submit_deadline.replace(tzinfo=LOCAL_TIMEZONE)

    spinner.info(f"Deadline : {submit_deadline}")
    success_group = []
    fail_group = []
    spinner.start("Start to check late submissions")

    # get team membershup info
    if target_team is not None:
        only_team_members = set(
            Team(org=github_organization,
                 team_slug=target_team,
                 github_token=github_token).members.keys())

    for idx, repo in enumerate(parsed_repos, start=1):
        #print("get commit time for {}".format(repo))
        if target_team is not None:
            import re
            user_id = re.sub('hw[\d]+-', '', repo.name)
            # print(f'user_id :{user_id}')
            if user_id not in only_team_members:
                continue
        spinner.text = f"({idx}/{len(parsed_repos)}) Checking {repo.name}"
        result = getRepoCommitTime(org=github_organization,
                                   repo=repo.name,
                                   commit_hash=repo.commit_hash)
        for r in result:
            # print(r)
            passed, delta = is_deadline_passed(
                submit_deadline, iso8601.parse_date(r.pushed_time))
            if passed:
                fail_group.append({
                    'repo-name': r.repo,
                    'commit-hash': r.commit_hash,
                    'time-passed': delta,
                    'last-pushtime': r.pushed_time
                })
            else:
                success_group.append((r, delta))
                #print(f'{r}: {delta} later')
    spinner.succeed("Check finished")
    print('=' * 20, 'REPORT', '=' * 20)
    print(f'Total submissions : {len(parsed_repos)}')
    print(f'late submissions: {len(fail_group)}')
    print(f'Submission Deadline: {submit_deadline}')
    print(tabulate(fail_group, headers="keys"))
Пример #29
0
def main(arguments=None):
    parser = argparse.ArgumentParser(
        prog='recpermissions',
        description=
        _('Change Linux permissions and ownership in one step. It can delete empty directories when necessary.'
          ),
        epilog=_("Developed by Mariano Muñoz 2018-{}".format(
            __versiondate__.year)),
        formatter_class=argparse.RawTextHelpFormatter)
    parser.add_argument('--version', action='version', version=__version__)

    parser.add_argument(
        '--user',
        help=
        _("File owner will be changed to this parameter. It does nothing if it's not set."
          ),
        action="store",
        default=None)
    parser.add_argument(
        '--group',
        help=
        _("File owner group will be changed to this parameter. It does nothing if it's not set."
          ),
        action="store",
        default=None)
    parser.add_argument(
        '--files',
        help=
        _("File permissions to set in all files. It does nothing if it's not set."
          ),
        default=None,
        metavar='PERM')
    parser.add_argument(
        '--directories',
        help=
        _("Directory permissions to set in all directories. It does nothing if it's not set."
          ),
        default=None,
        metavar='PERM')
    parser.add_argument(
        '--remove_emptydirs',
        help=
        _("If it's established, removes empty directories recursivily from current path."
          ),
        action="store_true",
        default=False)
    parser.add_argument(
        '--only',
        help=
        _("Only changes permissions to the file or directory passed in absolute_path parameter."
          ),
        action="store_true",
        default=False)
    parser.add_argument(
        'absolute_path',
        help=
        _("Directory who is going to be changed permissions and owner recursivily"
          ),
        action="store")

    args = parser.parse_args(arguments)

    colorama_init(autoreset=True)

    # Sets locale to get integer format localized strings
    try:
        locale.setlocale(locale.LC_ALL, ".".join(locale.getlocale()))
    except:
        pass

    if os.path.isabs(args.absolute_path) == False:
        print(Fore.RED + Style.BRIGHT +
              _("Path parameter must be an absolute one") + Style.RESET_ALL)
        sys.exit(1)

    deleted_dirs = []
    files = []
    dirs = []
    changed_dirs = []
    changed_files = []
    error_files = []

    #Generate list of files and directories
    if args.only == False:
        for (dirpath, dirnames, filenames) in os.walk(args.absolute_path):
            for d in dirnames:
                dirs.append(os.path.join(dirpath, d))

            for f in filenames:
                files.append(os.path.join(dirpath, f))
    else:
        if os.path.isdir(args.absolute_path):
            dirs.append(args.absolute_path)
        else:
            files.append(args.absolute_path)

    #Iterate list of dirs
    for dirname in dirs:
        if os.path.exists(dirname) == False:
            error_files.append(dirname)
            continue

        b_permissions = set_octal_string_permissions(dirname, args.directories)
        b_ownership = set_file_ownership(dirname, args.user, args.group)
        if b_permissions == True or b_ownership == True:
            changed_dirs.append(dirname)

        if args.remove_emptydirs == True:
            if is_dir_empty(dirname):
                os.rmdir(dirname)
                deleted_dirs.append(dirname)

    #Iterate list of files
    for filename in files:
        if os.path.exists(filename) == False:
            error_files.append(filename)
            continue

        b_permissions = set_octal_string_permissions(filename, args.files)
        b_ownership = set_file_ownership(filename, args.user, args.group)
        if b_permissions or b_ownership == True:
            changed_files.append(dirname)

    print(Style.BRIGHT + _("RecPermissions summary:"))
    print(Style.BRIGHT + Fore.GREEN + "  * " + Fore.RESET +
          _("Directories found: ") + Fore.YELLOW + localized_int(len(dirs)))
    print(Style.BRIGHT + Fore.GREEN + "  * " + Fore.RESET +
          _("Files found: ") + Fore.YELLOW + localized_int(len(files)))
    print(Style.BRIGHT + Fore.GREEN + "  * " + Fore.RESET +
          _("Directories changed: ") + Fore.YELLOW +
          localized_int(len(changed_dirs)))
    print(Style.BRIGHT + Fore.GREEN + "  * " + Fore.RESET +
          _("Files changed: ") + Fore.YELLOW +
          localized_int(len(changed_files)))
    print(Style.BRIGHT + Fore.GREEN + "  * " + Fore.RESET +
          _("Directories deleted: ") + Fore.YELLOW +
          localized_int(len(deleted_dirs)))
    if len(error_files) > 0:
        print(Style.BRIGHT + Fore.GREEN + "  * " + Fore.RESET +
              _("{} error files:").format(Fore.RED +
                                          localized_int(len(error_files)) +
                                          Fore.RESET))
        for e in error_files:
            print(Style.BRIGHT + Fore.RED + "     + " + Style.RESET_ALL + e)
Пример #30
0
 def __init__(self):
     colorama_init()
     if os.name == "posix":
         ## Hide the terminal cursor using ANSI escape codes
         sys.stdout.write("\033[?25l")
         sys.stdout.flush()
Пример #31
0
Log module

Handles logging initialization and formating.
"""
import sys
import logging
no_colorama = False
try:
    from colorama import init as colorama_init, Fore, Back, Style
except ImportError:
    no_colorama = True
# If colorama isn't installed use an ANSI basic replacement
if no_colorama:
    from .mcpyrate.ansi import Fore, Back, Style  # noqa: F811
else:
    colorama_init()
# Default domain, base name for the tool
domain = 'kilog'
filters = None


def get_logger(name=None):
    """Get a module for a submodule or the root logger if no name is
       provided"""
    # print('get_logger '+str(name))
    if name:
        return logging.getLogger(domain + '.' + name)
    return logging.getLogger(domain)


def set_domain(name):
Пример #32
0
 def __init__(self, args, name: str = None):
     colorama_init()
     if name:
         self.name = name
Пример #33
0
    def start(self) -> None:
        self.random_state = self._set_random_state(self.config.get('hyperopt_random_state', None))
        logger.info(f"Using optimizer random state: {self.random_state}")
        self.hyperopt_table_header = -1
        # Initialize spaces ...
        self.init_spaces()

        self.prepare_hyperopt_data()

        # We don't need exchange instance anymore while running hyperopt
        self.backtesting.exchange.close()
        self.backtesting.exchange._api = None  # type: ignore
        self.backtesting.exchange._api_async = None  # type: ignore
        self.backtesting.exchange.loop = None  # type: ignore
        # self.backtesting.exchange = None  # type: ignore
        self.backtesting.pairlists = None  # type: ignore

        cpus = cpu_count()
        logger.info(f"Found {cpus} CPU cores. Let's make them scream!")
        config_jobs = self.config.get('hyperopt_jobs', -1)
        logger.info(f'Number of parallel jobs set as: {config_jobs}')

        self.opt = self.get_optimizer(self.dimensions, config_jobs)

        if self.print_colorized:
            colorama_init(autoreset=True)

        try:
            with Parallel(n_jobs=config_jobs) as parallel:
                jobs = parallel._effective_n_jobs()
                logger.info(f'Effective number of parallel workers used: {jobs}')

                # Define progressbar
                if self.print_colorized:
                    widgets = [
                        ' [Epoch ', progressbar.Counter(), ' of ', str(self.total_epochs),
                        ' (', progressbar.Percentage(), ')] ',
                        progressbar.Bar(marker=progressbar.AnimatedMarker(
                            fill='\N{FULL BLOCK}',
                            fill_wrap=Fore.GREEN + '{}' + Fore.RESET,
                            marker_wrap=Style.BRIGHT + '{}' + Style.RESET_ALL,
                        )),
                        ' [', progressbar.ETA(), ', ', progressbar.Timer(), ']',
                    ]
                else:
                    widgets = [
                        ' [Epoch ', progressbar.Counter(), ' of ', str(self.total_epochs),
                        ' (', progressbar.Percentage(), ')] ',
                        progressbar.Bar(marker=progressbar.AnimatedMarker(
                            fill='\N{FULL BLOCK}',
                        )),
                        ' [', progressbar.ETA(), ', ', progressbar.Timer(), ']',
                    ]
                with progressbar.ProgressBar(
                    max_value=self.total_epochs, redirect_stdout=False, redirect_stderr=False,
                    widgets=widgets
                ) as pbar:
                    EVALS = ceil(self.total_epochs / jobs)
                    for i in range(EVALS):
                        # Correct the number of epochs to be processed for the last
                        # iteration (should not exceed self.total_epochs in total)
                        n_rest = (i + 1) * jobs - self.total_epochs
                        current_jobs = jobs - n_rest if n_rest > 0 else jobs

                        asked = self.opt.ask(n_points=current_jobs)
                        f_val = self.run_optimizer_parallel(parallel, asked, i)
                        self.opt.tell(asked, [v['loss'] for v in f_val])

                        # Calculate progressbar outputs
                        for j, val in enumerate(f_val):
                            # Use human-friendly indexes here (starting from 1)
                            current = i * jobs + j + 1
                            val['current_epoch'] = current
                            val['is_initial_point'] = current <= INITIAL_POINTS

                            logger.debug(f"Optimizer epoch evaluated: {val}")

                            is_best = HyperoptTools.is_best_loss(val, self.current_best_loss)
                            # This value is assigned here and not in the optimization method
                            # to keep proper order in the list of results. That's because
                            # evaluations can take different time. Here they are aligned in the
                            # order they will be shown to the user.
                            val['is_best'] = is_best
                            self.print_results(val)

                            if is_best:
                                self.current_best_loss = val['loss']
                                self.current_best_epoch = val

                            self._save_result(val)

                            pbar.update(current)

        except KeyboardInterrupt:
            print('User interrupted..')

        logger.info(f"{self.num_epochs_saved} {plural(self.num_epochs_saved, 'epoch')} "
                    f"saved to '{self.results_file}'.")

        if self.current_best_epoch:
            HyperoptTools.try_export_params(
                self.config,
                self.backtesting.strategy.get_strategy_name(),
                self.current_best_epoch)

            HyperoptTools.show_epoch_details(self.current_best_epoch, self.total_epochs,
                                             self.print_json)
        else:
            # This is printed when Ctrl+C is pressed quickly, before first epochs have
            # a chance to be evaluated.
            print("No epochs evaluated yet, no best result.")
Пример #34
0
def main():
    # Getting Colorama utility ready to work
    colorama_init(autoreset=True)

    # Printing warning if on Windows
    if system() == 'Windows':
        print(
            Style.BRIGHT + "Nota Bene: " + Style.RESET_ALL +
            "siccome lo script è stato lanciato da Windows i nomi delle cartelle e dei file potrebbero subire delle variazioni.\n"
        )

    # Creating persistent session
    current_session = requests.Session()
    headers = {
        'User-Agent':
        'Mozilla/5.0 (X11; Linux x86_64; rv:62.0) Gecko/20100101 Firefox/62.0'
    }

    # Getting conn_id token from vvvvid and putting it into a payload
    conn_id = {
        'conn_id':
        current_session.get('https://www.vvvvid.it/user/login',
                            headers=headers).json()['data']['conn_id']
    }

    # Creating requests object
    requests_obj = {
        'session': current_session,
        'headers': headers,
        'payload': conn_id
    }

    # Check if ffmpeg is available in PATH
    ffmpeg_local = ''
    if which('ffmpeg') is None:
        # If the user is running the script from Windows or Mac, ffmpeg's build can be inside dependency folder
        if system() in ['Windows', 'Darwin']:
            ffmpeg_dir_files = os.listdir(os.path.join(current_dir, 'ffmpeg'))
            ffmpeg_dir_files.remove('readme.md')

            # If the directory is ambiguous stop the script
            if len(ffmpeg_dir_files) > 1:
                print(
                    "La tua directory di ffmpeg contiene troppi file/cartelle. Assicurati che contenga solo il readme e la cartella con la build di ffmpeg."
                )
                quit()
            elif len(ffmpeg_dir_files) == 0:
                print(
                    "Questo script ha una dipendenza da ffmpeg, che non risulta essere installato. Per maggiori informazioni, consulta il readme sulla pagina GitHub del progetto."
                )
                quit()

            ffmpeg_local = os.path.join(current_dir, 'ffmpeg',
                                        ffmpeg_dir_files[0], 'bin')
        else:
            print(
                "Questo script ha una dipendenza da ffmpeg, che non risulta essere installato. Per maggiori informazioni, consulta il readme sulla pagina GitHub del progetto, nella sezione installazione per Ubuntu."
            )
            quit()

    # Get anime list from local file, ignoring lines commented
    with open("downloads_list.txt", 'r') as f:
        for line in f:
            line = line.strip() + '/'
            if not line.startswith('#'):
                dl_from_vvvvid(line, requests_obj, ffmpeg_local)
Пример #35
0
def color_init():
    if COLORED:
        colorama_init()
Пример #36
0
    def start(self) -> None:
        timerange = TimeRange.parse_timerange(None if self.config.get(
            'timerange') is None else str(self.config.get('timerange')))
        data = load_data(datadir=Path(self.config['datadir']),
                         pairs=self.config['exchange']['pair_whitelist'],
                         ticker_interval=self.backtesting.ticker_interval,
                         timerange=timerange)

        if not data:
            logger.critical("No data found. Terminating.")
            return

        min_date, max_date = get_timeframe(data)

        logger.info('Hyperopting with data from %s up to %s (%s days)..',
                    min_date.isoformat(), max_date.isoformat(),
                    (max_date - min_date).days)

        preprocessed = self.backtesting.strategy.tickerdata_to_dataframe(data)

        dump(preprocessed, self.tickerdata_pickle)

        # We don't need exchange instance anymore while running hyperopt
        self.backtesting.exchange = None  # type: ignore

        self.load_previous_results()

        cpus = cpu_count()
        logger.info(f"Found {cpus} CPU cores. Let's make them scream!")
        config_jobs = self.config.get('hyperopt_jobs', -1)
        logger.info(f'Number of parallel jobs set as: {config_jobs}')

        self.dimensions = self.hyperopt_space()
        self.opt = self.get_optimizer(self.dimensions, config_jobs)

        if self.config.get('print_colorized', False):
            colorama_init(autoreset=True)

        try:
            with Parallel(n_jobs=config_jobs) as parallel:
                jobs = parallel._effective_n_jobs()
                logger.info(
                    f'Effective number of parallel workers used: {jobs}')
                EVALS = max(self.total_epochs // jobs, 1)
                for i in range(EVALS):
                    asked = self.opt.ask(n_points=jobs)
                    f_val = self.run_optimizer_parallel(parallel, asked, i)
                    self.opt.tell(asked, [v['loss'] for v in f_val])
                    self.fix_optimizer_models_list()
                    for j in range(jobs):
                        current = i * jobs + j
                        val = f_val[j]
                        val['current_epoch'] = current
                        val['is_initial_point'] = current < INITIAL_POINTS
                        self.log_results(val)
                        self.trials.append(val)
                        logger.debug(f"Optimizer epoch evaluated: {val}")
        except KeyboardInterrupt:
            print('User interrupted..')

        self.save_trials()
        self.log_trials_result()
Пример #37
0
def cli():
    colorama_init()
Пример #38
0
    def start(self) -> None:
        self.random_state = self._set_random_state(self.config.get('hyperopt_random_state', None))
        logger.info(f"Using optimizer random state: {self.random_state}")
        self.hyperopt_table_header = -1
        data, timerange = self.backtesting.load_bt_data()

        preprocessed = self.backtesting.strategy.tickerdata_to_dataframe(data)

        # Trim startup period from analyzed dataframe
        for pair, df in preprocessed.items():
            preprocessed[pair] = trim_dataframe(df, timerange)
        min_date, max_date = get_timerange(data)

        logger.info(
            'Hyperopting with data from %s up to %s (%s days)..',
            min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days
        )
        dump(preprocessed, self.tickerdata_pickle)

        # We don't need exchange instance anymore while running hyperopt
        self.backtesting.exchange = None  # type: ignore

        self.trials = self.load_previous_results(self.trials_file)

        cpus = cpu_count()
        logger.info(f"Found {cpus} CPU cores. Let's make them scream!")
        config_jobs = self.config.get('hyperopt_jobs', -1)
        logger.info(f'Number of parallel jobs set as: {config_jobs}')

        self.dimensions: List[Dimension] = self.hyperopt_space()
        self.opt = self.get_optimizer(self.dimensions, config_jobs)

        if self.print_colorized:
            colorama_init(autoreset=True)

        try:
            with Parallel(n_jobs=config_jobs) as parallel:
                jobs = parallel._effective_n_jobs()
                logger.info(f'Effective number of parallel workers used: {jobs}')
                EVALS = ceil(self.total_epochs / jobs)
                for i in range(EVALS):
                    # Correct the number of epochs to be processed for the last
                    # iteration (should not exceed self.total_epochs in total)
                    n_rest = (i + 1) * jobs - self.total_epochs
                    current_jobs = jobs - n_rest if n_rest > 0 else jobs

                    asked = self.opt.ask(n_points=current_jobs)
                    f_val = self.run_optimizer_parallel(parallel, asked, i)
                    self.opt.tell(asked, [v['loss'] for v in f_val])
                    self.fix_optimizer_models_list()

                    for j, val in enumerate(f_val):
                        # Use human-friendly indexes here (starting from 1)
                        current = i * jobs + j + 1
                        val['current_epoch'] = current
                        val['is_initial_point'] = current <= INITIAL_POINTS
                        logger.debug(f"Optimizer epoch evaluated: {val}")

                        is_best = self.is_best_loss(val, self.current_best_loss)
                        # This value is assigned here and not in the optimization method
                        # to keep proper order in the list of results. That's because
                        # evaluations can take different time. Here they are aligned in the
                        # order they will be shown to the user.
                        val['is_best'] = is_best

                        self.print_results(val)

                        if is_best:
                            self.current_best_loss = val['loss']
                        self.trials.append(val)
                        # Save results after each best epoch and every 100 epochs
                        if is_best or current % 100 == 0:
                            self.save_trials()
        except KeyboardInterrupt:
            print('User interrupted..')

        self.save_trials(final=True)

        if self.trials:
            sorted_trials = sorted(self.trials, key=itemgetter('loss'))
            results = sorted_trials[0]
            self.print_epoch_details(results, self.total_epochs, self.print_json)
        else:
            # This is printed when Ctrl+C is pressed quickly, before first epochs have
            # a chance to be evaluated.
            print("No epochs evaluated yet, no best result.")
Пример #39
0
    def go(self):

        colorama_init()
       
        if len(self.args) < 3 or sys.argv[1] == "--help":
            print(USAGE)
            sys.exit(1)

        mode = self.args[1]
        path = sys.argv[2]
        callbacks = None
        extra_vars = dict()

        parser = argparse.ArgumentParser()
        parser.add_argument('--validate', action='store_true', help='policy file to validate')
        parser.add_argument('--apply', action='store_true', help="policy file to apply")
        parser.add_argument('--check', action='store_true', help="policy file to check")
        parser.add_argument('--tags', help='optional comma seperated list of tags')
        parser.add_argument('--push', action='store_true', help='run in push mode')
        parser.add_argument('--local', action='store_true', help='run in local mode')
        parser.add_argument('--verbose', action='store_true', help='(with --push) increase verbosity')
        parser.add_argument('--extra-vars', help="add extra variables from the command line")
        parser.add_argument('--limit-groups', help="(with --push) limit groups executed to this comma-separated list of patterns")
        parser.add_argument('--limit-hosts', help="(with --push) limit hosts executed to this comma-separated list of patterns")
        args = parser.parse_args(self.args[1:])

        all_modes = [ args.validate, args.apply, args.check ]
        selected_modes = [ x for x in all_modes if x is True ]
        if len(selected_modes) != 1:
            print(selected_modes)
            print(USAGE)
            sys.exit(1)

        all_modes = [ args.push, args.local ]
        selected_modes = [ x for x in all_modes if x is True ]
        if len(selected_modes) != 1:
            print(USAGE)
            sys.exit(1)

        if args.extra_vars is not None:
            extra_vars = self.handle_extra_vars(args.extra_vars)

        Callbacks().set_callbacks([ LocalCliCallbacks(), CommonCallbacks() ])
        Context().set_verbose(args.verbose)

        abspath = os.path.abspath(sys.modules[self.policy.__module__].__file__)
        relative_root = os.path.dirname(abspath)
        os.chdir(os.path.dirname(abspath))

        tags = None
        if args.tags is not None:
            tags = args.tags.strip().split(",")

        api = Api(
            policies=[self.policy], 
            tags=tags, 
            push=args.push, 
            extra_vars=extra_vars, 
            limit_groups=args.limit_groups, 
            limit_hosts=args.limit_hosts,
            relative_root=relative_root)

        try:
            if args.validate:
                # just check for missing files and invalid types
                api.validate()
            elif args.check:
                # operate in dry-run mode
                api.check()
            elif args.apply:
                # configure everything
                api.apply()
            else:
                print(USAGE)
                sys.exit(1)
        except OpsMopStop as oms:
            sys.exit(1)
        except OpsMopError as ome:
            print("")
            print(str(ome))
            print("")
            sys.exit(1)


        print("")
        sys.exit(0)
Пример #40
0
from typing import Any, Dict, List

import requests

try:
    from colorama import init as colorama_init
    from colorama import Fore, Back, Style
except ImportError:

    class ColoramaShim:
        def __getattribute__(self, key):
            return ''

    Fore = Back = Style = ColoramaShim()
else:
    colorama_init(autoreset=True)

parser = argparse.ArgumentParser(
    description='Provides information about a git repository hosted on '
    'GitHub without cloning it.')
parser.add_argument('GitHub_URL',
                    type=str,
                    help='A GitHub URL for a repo to analyze')
args = parser.parse_args()


def num_kilobytes_to_size_str(size_bytes: int) -> str:
    size_name = ("KiB", "MiB", "GiB", "TiB")
    x = int(math.floor(math.log(size_bytes, 1024)))
    return f"{size_bytes / (1024 ** x):.2f} {size_name[x]}"
Пример #41
0
    # Python 3
    izip = zip

from time import time
from uuid import uuid4

from pyramid.path import DottedNameResolver
from pyramid.settings import asbool
from pyramid.static import static_view

from colorama import init as colorama_init
from colorama import Fore

logger = logging.getLogger(__name__)

colorama_init()

log_namespace = __name__


def iterargs(*args, **kwargs):
    """
    Generator that yields given args and kwargs as strings.

    """
    for arg in args:
        yield "%r" % (arg, )

    for key, value in kwargs.items():
        yield "%s=%r" % (key, value)
Пример #42
0
def announce_grade(homework_prefix, token, org, only_id, feedback_source_repo):
    '''announce student grades to each hw repo'''

    # TODO: use logging lib to log messages
    colorama_init()
    spinner = Halo(stream=sys.stderr)

    student_feedback_title = f"Grade for {homework_prefix}"

    gstudents = Gstudents()
    feedback_vars = gstudents.left_join(homework_prefix)

    # Clone feedback repo & set needed variables
    cur = Path('.')

    for d in cur.glob("feedback-tmp-*"):
        shutil.rmtree(d)
    spinner.info("delete dated folder")

    root_folder = Path(
        tempfile.mkdtemp(prefix="feedback-tmp-{}-".format(
            datetime.now().strftime("%b%d%H%M%S")),
                         dir="."))
    spinner.succeed(
        f"Create tmp folder {Fore.YELLOW}{root_folder}{Style.RESET_ALL}")

    feedback_repo_path = root_folder / 'feedbacks'

    spinner.start(f"cloning feeback source repo : {feedback_source_repo}")
    _, t = measure_time(sp.run)([
        'git',
        'clone',
        f'https://github.com/{org}/{feedback_source_repo}.git',
        feedback_repo_path.name,
    ],
                                cwd=root_folder,
                                stdout=sp.DEVNULL,
                                stderr=sp.DEVNULL)
    spinner.succeed(
        f"cloning feeback source repo : {feedback_source_repo} ... {t:4.2f} sec"
    )
    client = httpx.AsyncClient(headers=httpx.Headers(
        {
            "User-Agent": "GitHubClassroomUtils/1.0",
            "Authorization": "token " + token,
            # needed for the check-suites request
            "Accept": "application/vnd.github.antiope-preview+json"
        }))

    hw_path = feedback_repo_path / homework_prefix / 'reports'

    # generate feedbacks
    fbs, t = measure_time(gen_feedbacks)(homework_prefix, hw_path,
                                         feedback_vars)
    spinner.succeed(f"Generate content for feedbacks ... {t:5.3f} sec")

    # handle only_id
    if only_id:
        try:
            # detect possible buggy condition
            info = gstudents.get_student(only_id)
        except RuntimeError as e:
            print(' *=' * 30)
            print('Warning!')
            print(e)
            return
        only_repo_name = get_hw_repo_name(homework_prefix,
                                          info['github_handle'])
        fbs = list(filter(lambda fb: fb['repo_name'] == only_repo_name, fbs))

    async def push_to_remote(feedback_title, feedbacks):
        # push to remote
        async def push_feedback(fb):
            request_body = {'title': feedback_title, 'body': fb['value']}
            try:
                issue_num = await find_existing_issue(client, org,
                                                      fb['repo_name'],
                                                      feedback_title)
            except BaseException as e:
                print(f'error on {fb["repo_name"]}')
                return
            if issue_num:
                request_body['state'] = 'open'  # reopen issue
                url = f"https://api.github.com/repos/{org}/{fb['repo_name']}/issues/{issue_num}"
                await edit_issue_async(client, url, issue_num, request_body)
            else:
                url = f"https://api.github.com/repos/{org}/{fb['repo_name']}/issues"
                await create_issue_async(client, url, request_body)
            print(f'success {fb["repo_name"]}')

        async with trio.open_nursery() as nursery:
            for fb in feedbacks:
                nursery.start_soon(push_feedback, fb)

    _, t = measure_time(trio.run)(push_to_remote, student_feedback_title, fbs)
    spinner.succeed(f"Push feedbacks to remote ... {t:5.2f} sec")
    spinner.succeed(f'finished announce grade')
    return
Пример #43
0
  DoCmd('git commit -vam \'Update\'',ignore_exit_code=1)
  DoCmd('git push origin',ignore_exit_code=1)

def sethpath():
  "put bash utils in execute path ahead of everything so we don't get the wrong 'find', f'rinstance"
  path = os.environ['PATH'] + ':' + GitBinPath
  path = GitBinPath + ':' + path
  os.environ['PATH'] = path


  ##
 ## 
##  START HERE
##    
# colorized output fix
colorama_init(autoreset=True)
timestr = strftime("%d-%b-%Y-%H%M", gmtime())

oldsuf = '-precheck-in-' + timestr

# go to dir , remove work copy, save off local copy of repo
Chdir(LocalDir)
DoCmd('mv ' + RepoName + ' ' + RepoName + oldsuf)

# get on git, check out current copy
LogIntoGit()
DoCmd('git clone [email protected]:/' + RepoOwner + '/' + RepoName + '.git')
Chdir(RepoName + oldsuf)
# overlay our copy on git's
# couple of side-effects to note: This  copy always updates everything. Shouldn't be a problem unless someone checks in a bad copy.
# OK because we can always recover an old version
Пример #44
0
def main(H: str = '',
         P: str = '',
         C: str = '',
         w: int = None,
         sp: bool = False,
         i: str = '',
         d: bool = False,
         de: bool = False):
    global paths
    global creds
    paths = [FAKE_PATH] + [ln.rstrip() for ln in open(P or PATHS_FILE)]
    creds = [ln.rstrip() for ln in open(C or CREDS_FILE)]
    if d or de:
        colorama_init()
        FY = str(Fore.YELLOW)
        FR = str(Fore.RESET)

        log_level = logging.DEBUG
        log_format_base = '[%(name)s %(levelname)s]\n%(message)s\n'
        log_format_c = FY + '[%(name)s %(levelname)s]' + FR + '\n%(message)s\n'
        formatter = logging.Formatter(log_format_base)
        formatter_c = logging.Formatter(log_format_c)

        root_logger = logging.getLogger()
        root_logger.setLevel(log_level)
        net_logger.setLevel(log_level)

        file_handler = logging.FileHandler(LOG_FILE, 'w')
        file_handler.setLevel(log_level)
        file_handler.setFormatter(formatter)
        root_logger.addHandler(file_handler)

    if de:
        stream_handler = logging.StreamHandler(sys.stderr)
        stream_handler.setLevel(log_level)
        stream_handler.setFormatter(formatter_c)
        root_logger.addHandler(stream_handler)

    results = []
    hosts_file_path = H or LOCAL_DIR / 'hosts_554.txt'

    with ThreadPoolExecutor(w) as executor:
        with open(hosts_file_path) as hosts_file:
            futures = {}

            for line in hosts_file:
                host = line.rstrip()
                port = 554

                if ':' in host:
                    host, port = host.split(':')

                arg = (host, port, sp, i)

                future = executor.submit(process_target, arg)
                futures[future] = arg

            with tqdm(total=len(futures)) as progress:
                for future in as_completed(futures):
                    host, port, *_ = futures[future]
                    res = future.result()
                    progress.update()
                    results += res

    for result in results:
        print(result)
Пример #45
0
def start_hyperopt_list(args: Dict[str, Any]) -> None:
    """
    List hyperopt epochs previously evaluated
    """
    from freqtrade.optimize.hyperopt_tools import HyperoptTools

    config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE)

    print_colorized = config.get('print_colorized', False)
    print_json = config.get('print_json', False)
    export_csv = config.get('export_csv', None)
    no_details = config.get('hyperopt_list_no_details', False)
    no_header = False

    filteroptions = {
        'only_best':
        config.get('hyperopt_list_best', False),
        'only_profitable':
        config.get('hyperopt_list_profitable', False),
        'filter_min_trades':
        config.get('hyperopt_list_min_trades', 0),
        'filter_max_trades':
        config.get('hyperopt_list_max_trades', 0),
        'filter_min_avg_time':
        config.get('hyperopt_list_min_avg_time', None),
        'filter_max_avg_time':
        config.get('hyperopt_list_max_avg_time', None),
        'filter_min_avg_profit':
        config.get('hyperopt_list_min_avg_profit', None),
        'filter_max_avg_profit':
        config.get('hyperopt_list_max_avg_profit', None),
        'filter_min_total_profit':
        config.get('hyperopt_list_min_total_profit', None),
        'filter_max_total_profit':
        config.get('hyperopt_list_max_total_profit', None),
        'filter_min_objective':
        config.get('hyperopt_list_min_objective', None),
        'filter_max_objective':
        config.get('hyperopt_list_max_objective', None),
    }

    results_file = get_latest_hyperopt_file(
        config['user_data_dir'] / 'hyperopt_results',
        config.get('hyperoptexportfilename'))

    # Previous evaluations
    epochs = HyperoptTools.load_previous_results(results_file)
    total_epochs = len(epochs)

    epochs = hyperopt_filter_epochs(epochs, filteroptions)

    if print_colorized:
        colorama_init(autoreset=True)

    if not export_csv:
        try:
            print(
                HyperoptTools.get_result_table(config, epochs, total_epochs,
                                               not filteroptions['only_best'],
                                               print_colorized, 0))
        except KeyboardInterrupt:
            print('User interrupted..')

    if epochs and not no_details:
        sorted_epochs = sorted(epochs, key=itemgetter('loss'))
        results = sorted_epochs[0]
        HyperoptTools.print_epoch_details(results, total_epochs, print_json,
                                          no_header)

    if epochs and export_csv:
        HyperoptTools.export_csv_file(config, epochs, total_epochs,
                                      not filteroptions['only_best'],
                                      export_csv)
Пример #46
0
def color_init():
    if COLORED:
        colorama_init()
Пример #47
0
        if not total:
            total = self.total

        percent = 1
        if total > 0:
            percent = float(index) / total

        if self.bar:
            hashes = '#' * int(round(percent * self.size))
            if self.hashes == None or self.hashes != hashes:
                self.hashes = hashes
                spaces = ' ' * (self.size - len(hashes))
                sys.stdout.write(
                    "\r{2} [{0}] {1:3.0f}%".format(hashes + spaces, 100 * percent, self.title)
                )
                sys.stdout.flush()
        else:
            print(p("{0} {1:3.2f}%".format(self.title, 100 * percent)))
            
    def finish(self, total = None):
        if not total:
            total = self.total
            self.progress(total)

        if self.bar:
            sys.stdout.write("\n\n")
            sys.stdout.flush()

colorama_init();

Пример #48
0
def run_model(rule_number):
    colorama_init()

    print "Creating model for Rule #%s..." % rule_number
    model = createModel(getModelParamsForRule(rule_number))
    run_io_through_nupic(model, int(rule_number))
Пример #49
0
    def __init__(self, definitions, colors, want_readline, want_completion):
        super(TerminalShell, self).__init__("<stdin>")
        self.input_encoding = locale.getpreferredencoding()
        self.lineno = 0

        # Try importing readline to enable arrow keys support etc.
        self.using_readline = False
        try:
            if want_readline:
                import readline

                self.using_readline = sys.stdin.isatty() and sys.stdout.isatty(
                )
                self.ansi_color_re = re.compile("\033\\[[0-9;]+m")
                if want_completion:
                    readline.set_completer(lambda text, state: self.
                                           complete_symbol_name(text, state))

                    # Make _ a delimiter, but not $ or `
                    readline.set_completer_delims(
                        " \t\n_~!@#%^&*()-=+[{]}\\|;:'\",<>/?")

                    readline.parse_and_bind("tab: complete")
                    self.completion_candidates = []
        except ImportError:
            pass

        # Try importing colorama to escape ansi sequences for cross platform
        # colors
        try:
            from colorama import init as colorama_init
        except ImportError:
            colors = "NoColor"
        else:
            colorama_init()
            if colors is None:
                terminal_supports_color = (sys.stdout.isatty()
                                           and os.getenv("TERM") != "dumb")
                colors = "Linux" if terminal_supports_color else "NoColor"

        color_schemes = {
            "NOCOLOR": (["", "", "", ""], ["", "", "", ""]),
            "LINUX": (
                ["\033[32m", "\033[1m", "\033[22m", "\033[39m"],
                ["\033[31m", "\033[1m", "\033[22m", "\033[39m"],
            ),
            "LIGHTBG": (
                ["\033[34m", "\033[1m", "\033[22m", "\033[39m"],
                ["\033[31m", "\033[1m", "\033[22m", "\033[39m"],
            ),
        }

        # Handle any case by using .upper()
        term_colors = color_schemes.get(colors.upper())
        if term_colors is None:
            out_msg = "The 'colors' argument must be {0} or None"
            print(out_msg.format(repr(list(color_schemes.keys()))))
            quit()

        self.incolors, self.outcolors = term_colors
        self.definitions = definitions
Пример #50
0
def main():
    """Main function"""
    parser = argparse.ArgumentParser(description='Linter for the pan language')
    parser.add_argument('paths',
                        metavar='PATH',
                        type=str,
                        nargs='*',
                        help='Paths of files to check')
    parser.add_argument('--vi',
                        action='store_true',
                        help='Output line numbers in a vi option style')
    parser.add_argument('--table',
                        action='store_true',
                        help='Display a table of per-file problem stats')
    parser.add_argument('--allow_mvn_templates',
                        action='store_true',
                        help='Allow use of maven templates')
    parser.add_argument('--always_exit_success',
                        action='store_true',
                        help='Always exit cleanly even if problems are found')
    group_output = parser.add_mutually_exclusive_group()
    group_output.add_argument('--debug',
                              action='store_true',
                              help='Enable debug output')
    group_output.add_argument(
        '--ide',
        action='store_true',
        help='Output machine-readable results for use by IDEs')
    args = parser.parse_args()

    # Only output colors sequences if the output is a terminal
    colorama_init(strip=(not stdout.isatty()) or args.ide)
    global DEBUG
    DEBUG = args.debug

    problems_found = 0

    reports = []
    problem_stats = {}

    if not args.paths:
        print 'No files were provided, not doing anything'
        return 0

    for path in args.paths:
        for filename in glob(path):
            file_reports, file_problems = lint_file(filename,
                                                    args.allow_mvn_templates)
            reports += file_reports
            problems_found += file_problems
            problem_stats[filename] = file_problems

    for report in reports:
        print_report(*report, vi=args.vi)

    if args.table:
        print
        print 'Problem count per file:'
        print filestats_table(problem_stats)

    print
    print '%d problems found in total' % problems_found

    if args.always_exit_success:
        return 0

    if problems_found:
        return 1
Пример #51
0
async def main(lib_kwargs, **kwargs):
    async def start(gwy) -> None:
        protocol_factory = create_protocol_factory(LocalProtocol, gwy, None)

        gwy.pkt_protocol, gwy.pkt_transport = create_pkt_stack(
            gwy,
            None,
            packet_log=gwy._input_file,
            protocol_factory=protocol_factory,
        )
        if gwy.pkt_transport.get_extra_info(POLLER_TASK):
            gwy._tasks.append(gwy.pkt_transport.get_extra_info(POLLER_TASK))

    def setup_database(db_file: str):
        con = None
        try:
            con = sqlite3.connect(db_file)
        except sqlite3.Error as err:
            print(err)

        try:
            cur = con.cursor()
            cur.execute(SQL_CREATE_TABLE)
        except sqlite3.Error as err:
            print(err)

        return con

    def process_packet(pkt) -> None:
        global last_pkt

        def insert_pkt(pkt):
            global counter

            data_fields = (
                pkt.dtm,  # dtm
                pkt.packet[0:3],  # rssi
                pkt.packet[4:6],  # verb
                pkt.packet[7:10],  # seqn
                pkt.packet[11:20],  # dev0
                pkt.packet[21:30],  # dev1
                pkt.packet[31:40],  # dev2
                pkt.packet[41:45],  # code
                pkt.packet[46:49],  # len
                pkt.packet[50:],  # payload
            )

            try:
                # cur = con.cursor()
                cur.execute(SQL_UPSERT_ROW, data_fields)

            except sqlite3.Error as err:
                print(err)

            else:
                if counter % 1000 == 0:
                    msg, hdr = f"{pkt.dtm} {pkt}", f"{Style.BRIGHT}{Fore.CYAN}"
                    print(f"{hdr}{msg[:CONSOLE_COLS]}")
                elif counter % 1000 == 1:
                    con.commit()
                counter += 1

            return cur.lastrowid

        if not pkt.is_valid:
            # msg, hdr = f"{pkt.dtm} {pkt._pkt_str}", f"{Fore.MAGENTA}"
            # print(f"{hdr}{msg[:CONSOLE_COLS]}")
            return

        if last_pkt:
            if all((
                    pkt.packet[4:6] == "RP",
                    pkt.packet[11:20] == last_pkt.packet[21:30],
                    pkt.packet[21:30] == last_pkt.packet[11:20],
                    pkt.packet[41:45] == last_pkt.packet[41:45],
            )):
                insert_pkt(last_pkt)
            last_pkt = None

        elif pkt.packet[4:6] == "RQ" and pkt.packet[11:13] == "18":
            last_pkt = pkt
            return

        else:
            insert_pkt(pkt)

    global counter

    print("\r\nclient.py: Starting evohome_rf (utils)...")

    if sys.platform == "win32":
        asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())

    colorama_init(autoreset=True)
    con = setup_database(kwargs[DATABASE])
    cur = con.cursor()

    gwy = Gateway(None, **lib_kwargs)
    await start(gwy)  # replaces asyncio.create_task(gwy.start())

    while gwy.pkt_protocol is None:
        await asyncio.sleep(0.05)
    gwy.pkt_protocol.pkt_callback = process_packet

    try:  # main code here
        await asyncio.gather(*gwy._tasks)

    except asyncio.CancelledError:
        msg = " - ended via: CancelledError (e.g. SIGINT)"
    except GracefulExit:
        msg = " - ended via: GracefulExit"
    except (KeyboardInterrupt, SystemExit):
        msg = " - ended via: KeyboardInterrupt"
    except EvohomeError as err:
        msg = f" - ended via: EvohomeError: {err}"
    else:  # if no Exceptions raised, e.g. EOF when parsing
        msg = " - ended without error (e.g. EOF)"

    con.commit()

    print(f"\r\nclient.py: Finished evohome_rf (utils).\r\n{msg}\r\n")
    print(f"  - uploaded {counter} rows\r\n")
Пример #52
0
    def start(self) -> None:
        self.random_state = self._set_random_state(self.config.get('hyperopt_random_state', None))
        logger.info(f"Using optimizer random state: {self.random_state}")
        self.hyperopt_table_header = -1
        data, timerange = self.backtesting.load_bt_data()

        preprocessed = self.backtesting.strategy.ohlcvdata_to_dataframe(data)

        # Trim startup period from analyzed dataframe
        for pair, df in preprocessed.items():
            preprocessed[pair] = trim_dataframe(df, timerange)
        min_date, max_date = get_timerange(data)

        logger.info(f'Hyperopting with data from {min_date.strftime(DATETIME_PRINT_FORMAT)} '
                    f'up to {max_date.strftime(DATETIME_PRINT_FORMAT)} '
                    f'({(max_date - min_date).days} days)..')

        dump(preprocessed, self.data_pickle_file)

        # We don't need exchange instance anymore while running hyperopt
        self.backtesting.exchange = None  # type: ignore
        self.backtesting.pairlists = None  # type: ignore
        self.backtesting.strategy.dp = None  # type: ignore
        IStrategy.dp = None  # type: ignore

        self.epochs = self.load_previous_results(self.results_file)

        cpus = cpu_count()
        logger.info(f"Found {cpus} CPU cores. Let's make them scream!")
        config_jobs = self.config.get('hyperopt_jobs', -1)
        logger.info(f'Number of parallel jobs set as: {config_jobs}')

        self.dimensions: List[Dimension] = self.hyperopt_space()
        self.opt = self.get_optimizer(self.dimensions, config_jobs)

        if self.print_colorized:
            colorama_init(autoreset=True)

        try:
            with Parallel(n_jobs=config_jobs) as parallel:
                jobs = parallel._effective_n_jobs()
                logger.info(f'Effective number of parallel workers used: {jobs}')

                # Define progressbar
                if self.print_colorized:
                    widgets = [
                        ' [Epoch ', progressbar.Counter(), ' of ', str(self.total_epochs),
                        ' (', progressbar.Percentage(), ')] ',
                        progressbar.Bar(marker=progressbar.AnimatedMarker(
                            fill='\N{FULL BLOCK}',
                            fill_wrap=Fore.GREEN + '{}' + Fore.RESET,
                            marker_wrap=Style.BRIGHT + '{}' + Style.RESET_ALL,
                        )),
                        ' [', progressbar.ETA(), ', ', progressbar.Timer(), ']',
                    ]
                else:
                    widgets = [
                        ' [Epoch ', progressbar.Counter(), ' of ', str(self.total_epochs),
                        ' (', progressbar.Percentage(), ')] ',
                        progressbar.Bar(marker=progressbar.AnimatedMarker(
                            fill='\N{FULL BLOCK}',
                        )),
                        ' [', progressbar.ETA(), ', ', progressbar.Timer(), ']',
                    ]
                with progressbar.ProgressBar(
                         max_value=self.total_epochs, redirect_stdout=False, redirect_stderr=False,
                         widgets=widgets
                     ) as pbar:
                    EVALS = ceil(self.total_epochs / jobs)
                    for i in range(EVALS):
                        # Correct the number of epochs to be processed for the last
                        # iteration (should not exceed self.total_epochs in total)
                        n_rest = (i + 1) * jobs - self.total_epochs
                        current_jobs = jobs - n_rest if n_rest > 0 else jobs

                        asked = self.opt.ask(n_points=current_jobs)
                        f_val = self.run_optimizer_parallel(parallel, asked, i)
                        self.opt.tell(asked, [v['loss'] for v in f_val])

                        # Calculate progressbar outputs
                        for j, val in enumerate(f_val):
                            # Use human-friendly indexes here (starting from 1)
                            current = i * jobs + j + 1
                            val['current_epoch'] = current
                            val['is_initial_point'] = current <= INITIAL_POINTS

                            logger.debug(f"Optimizer epoch evaluated: {val}")

                            is_best = self.is_best_loss(val, self.current_best_loss)
                            # This value is assigned here and not in the optimization method
                            # to keep proper order in the list of results. That's because
                            # evaluations can take different time. Here they are aligned in the
                            # order they will be shown to the user.
                            val['is_best'] = is_best
                            self.print_results(val)

                            if is_best:
                                self.current_best_loss = val['loss']
                            self.epochs.append(val)

                            # Save results after each best epoch and every 100 epochs
                            if is_best or current % 100 == 0:
                                self._save_results()

                            pbar.update(current)

        except KeyboardInterrupt:
            print('User interrupted..')

        self._save_results()
        logger.info(f"{self.num_epochs_saved} {plural(self.num_epochs_saved, 'epoch')} "
                    f"saved to '{self.results_file}'.")

        if self.epochs:
            sorted_epochs = sorted(self.epochs, key=itemgetter('loss'))
            best_epoch = sorted_epochs[0]
            self.print_epoch_details(best_epoch, self.total_epochs, self.print_json)
        else:
            # This is printed when Ctrl+C is pressed quickly, before first epochs have
            # a chance to be evaluated.
            print("No epochs evaluated yet, no best result.")
Пример #53
0
async def main(lib_kwargs, **kwargs):
    def print_results(**kwargs):

        if kwargs[GET_FAULTS]:
            fault_log = gwy.system_by_id[
                kwargs[GET_FAULTS]]._fault_log.fault_log

            if fault_log is None:
                print("No fault log, or failed to get the fault log.")
            else:
                [print(f"{k:02X}", v) for k, v in fault_log.items()]

        if kwargs[GET_SCHED][0]:
            system_id, zone_idx = kwargs[GET_SCHED]
            zone = gwy.system_by_id[system_id].zone_by_idx[zone_idx]
            schedule = zone._schedule.schedule

            if schedule is None:
                print("Failed to get the schedule.")
            else:
                print("Schedule = \r\n", json.dumps(schedule))  # , indent=4))

        if kwargs[SET_SCHED][0]:
            system_id, _ = kwargs[GET_SCHED]

        # else:
        #     print(gwy.device_by_id[kwargs["device_id"]])

    def print_summary(gwy):
        if gwy.evo is None:
            print(f"Schema[gateway] = {json.dumps(gwy.schema)}\r\n")
            print(f"Params[gateway] = {json.dumps(gwy.params)}\r\n")
            print(f"Status[gateway] = {json.dumps(gwy.status)}")
            return

        print(
            f"Schema[{repr(gwy.evo)}] = {json.dumps(gwy.evo.schema, indent=4)}\r\n"
        )
        print(
            f"Params[{repr(gwy.evo)}] = {json.dumps(gwy.evo.params, indent=4)}\r\n"
        )
        print(
            f"Status[{repr(gwy.evo)}] = {json.dumps(gwy.evo.status, indent=4)}\r\n"
        )

        orphans = {
            "orphans": {
                d.id: d.status
                for d in sorted(gwy.devices) if d not in gwy.evo.devices
            }
        }
        print(f"Status[gateway] = {json.dumps(orphans, indent=4)}")

        devices = {"devices": {d.id: d.schema for d in sorted(gwy.devices)}}
        print(f"Schema[devices] = {json.dumps(devices, indent=4)}")

    def process_message(msg) -> None:
        dtm = msg.dtm if kwargs["long_dates"] else f"{msg.dtm:%H:%M:%S.%f}"[:-3]
        if msg.src.type == "18":
            print(
                f"{Style.BRIGHT}{COLORS.get(msg.verb)}{dtm} {msg}"[:
                                                                   CONSOLE_COLS]
            )
        else:
            print(f"{COLORS.get(msg.verb)}{dtm} {msg}"[:CONSOLE_COLS])

    print("\r\nclient.py: Starting evohome_rf...")

    if sys.platform == "win32":
        asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())

    serial_port, lib_kwargs = normalise_config_schema(lib_kwargs)
    gwy = Gateway(serial_port, **lib_kwargs)

    if kwargs[REDUCE_PROCESSING] < DONT_CREATE_MESSAGES:
        # no MSGs will be sent to STDOUT, so send PKTs instead
        colorama_init(autoreset=True)  # TODO: remove strip=True
        protocol, _ = gwy.create_client(process_message)

    try:  # main code here
        task = asyncio.create_task(gwy.start())

        if kwargs[COMMAND] == MONITOR:
            tasks = spawn_monitor_scripts(gwy, **kwargs)

        if kwargs[COMMAND] == EXECUTE:
            tasks = spawn_execute_scripts(gwy, **kwargs)
            await asyncio.gather(*tasks)

            cmds = (EXECUTE_CMD, SCAN_DISC, SCAN_FULL, SCAN_HARD, SCAN_XXXX)
            if not any(kwargs[k] for k in cmds):
                # await gwy.stop()
                task.cancel()

        if False:  # TODO: temp test code
            print("AAA")
            await asyncio.sleep(3)
            print("BBB")
            cmd = Command.get_zone_name("01:145038", "00")
            msg = await gwy.async_send_cmd(cmd)
            print("CCC")
            print(msg)
            print("ZZZ")

        await task

    except asyncio.CancelledError:
        msg = " - ended via: CancelledError (e.g. SIGINT)"
    except GracefulExit:
        msg = " - ended via: GracefulExit"
    except KeyboardInterrupt:
        msg = " - ended via: KeyboardInterrupt"
    except EvohomeError as err:
        msg = f" - ended via: EvohomeError: {err}"
    else:  # if no Exceptions raised, e.g. EOF when parsing
        msg = " - ended without error (e.g. EOF)"

    print("\r\nclient.py: Finished evohome_rf, results:\r\n")
    if kwargs[COMMAND] == EXECUTE:
        print_results(**kwargs)
    else:
        print_summary(gwy)

    # schema, msgs = gwy._get_state()
    # f = open("state_msgs.log", "w")
    # [
    #     f.write(f"{m.dtm.isoformat(sep='T')} {m._pkt}\r\n")
    #     for m in msgs.values()
    #     # if not m.is_expired
    # ]
    # f.close()

    # f = open("state_schema.json", "w")
    # f.write(json.dumps(schema, indent=4))
    # f.close()

    # # await gwy._set_state(schema, msgs)

    print(f"\r\nclient.py: Finished evohome_rf.\r\n{msg}\r\n")
Пример #54
0
 def __init__(self) -> None:
     colorama_init()
Пример #55
0
def add_students(student_handles, token, org, team):
    '''
    student_handles: github user to add (usernames)
    '''
    if len(student_handles) == 0:
        print(f'required handles')
        return 1
    colorama_init()

    github_students = student_handles
    github_organization = org
    github_team = team
    github_token = token

    if github_token == "":
        print(Back.RED + Fore.BLACK + "No github_token is given, use " +
              Fore.WHITE + "cmd_argument" + Fore.BLACK + " or sepcify it in " +
              Fore.WHITE + "github_congif.py " + Back.RESET + Fore.RESET)
    #print('org: {}'.format(github_organization))
    #print('token: {}'.format(github_token))
    # print("students:{}".format(github_students))

    with Halo() as spinner:
        spinner.text = "fetch existing team members from GitHub"
        # spinner.start()

        team = Team(github_organization,
                    team_slug=github_team,
                    github_token=github_token)
        spinner.succeed()
        dic = {
            'team': github_team,
            'n_mem': len(team.members.keys()),
            'num_style': Fore.GREEN,
            'reset_str': Back.RESET + Fore.RESET
        }
        spinner.info(
            "{team} - {num_style}{n_mem}{reset_str} members".format(**dic))
        spinner.text_color = "green"

    existed_members = set(team.members.keys())
    outside_users = list(set(github_students) - existed_members)

    #print("Users to invite:")
    #print_table(outside_users, cols=5, wide=15)

    invalid_id = []
    with Halo() as spinner:
        spinner.text = ""
        # spinner.start()
        total = len(outside_users)
        for idx, u in enumerate(outside_users, start=1):
            spinner.text = "{}/{} Check valid GitHub username : {}".format(
                idx, total, u)
            if check_is_github_user(u, github_token) == False:
                invalid_id.append(u)
        spinner.text = "{}/{} Check valid GitHub username".format(total, total)
        spinner.succeed()

    if len(invalid_id) != 0:
        print("Find non-existed github user names:")
        # control strings take space
        print_table([Fore.RED + i + Fore.RESET for i in invalid_id],
                    cols=5,
                    wide=25)

    non_member_valid_users = list(set(outside_users) - set(invalid_id))

    # membership info
    membership_infos = {key: "unknown" for key in non_member_valid_users}
    with Halo() as spinner:
        spinner.text = "Check Membership information"
        spinner.start()
        total = len(non_member_valid_users)
        for idx, username in enumerate(non_member_valid_users, start=1):
            spinner.text = "{}/{} Check Membership information : {}".format(
                idx, total, username)
            res = team.get_memberships(username)
            if res.status_code == 200:
                membership_infos[username] = res.json()['state']
        spinner.text = "{}/{} Check Membership information".format(
            total, total)
        spinner.succeed()

    pending_users = [
        u for u in membership_infos.keys() if membership_infos[u] == "pending"
    ]
    no_memship_users = [
        u for u in membership_infos.keys() if membership_infos[u] == "unknown"
    ]

    print("Users already in pending state (total:{}):".format(
        len(pending_users)))
    print_table(pending_users)

    print("Users to add (total: {})".format(len(no_memship_users)))
    print_table(no_memship_users)

    failed_users = []
    with Halo() as spinner:
        for user_name in no_memship_users:
            spinner.text = Fore.GREEN + "adding user: "******"{}".format(user_name)
            spinner.start()
            res = team.add_user_to_team(user_name)
            if res.status_code == 200:
                spinner.succeed()
            else:
                failed_users.append(user_name)
                spinner.text += ", return code: " + Fore.RED + \
                    str(res.status_code) + Fore.RESET
                spinner.fail()
    failed_users = list(set(failed_users))

    if len(failed_users) != 0:
        print("Users failed to add")
        print_table(failed_users)

    with Halo() as spinner:
        spinner.text = "Adding students successfully"
        spinner.info()
Пример #56
0
from ssh_audit.outputbuffer import OutputBuffer
from ssh_audit.policy import Policy
from ssh_audit.product import Product
from ssh_audit.protocol import Protocol
from ssh_audit.software import Software
from ssh_audit.ssh1_kexdb import SSH1_KexDB
from ssh_audit.ssh1_publickeymessage import SSH1_PublicKeyMessage
from ssh_audit.ssh2_kex import SSH2_Kex
from ssh_audit.ssh2_kexdb import SSH2_KexDB
from ssh_audit.ssh_socket import SSH_Socket
from ssh_audit.utils import Utils
from ssh_audit.versionvulnerabilitydb import VersionVulnerabilityDB

try:  # pragma: nocover
    from colorama import init as colorama_init
    colorama_init(strip=False)  # pragma: nocover
except ImportError:  # pragma: nocover
    pass


def usage(err: Optional[str] = None) -> None:
    retval = exitcodes.GOOD
    uout = Output()
    p = os.path.basename(sys.argv[0])
    uout.head('# {} {}, https://github.com/jtesta/ssh-audit\n'.format(
        p, VERSION))
    if err is not None and len(err) > 0:
        uout.fail('\n' + err)
        retval = exitcodes.UNKNOWN_ERROR
    uout.info('usage: {0} [options] <host>\n'.format(p))
    uout.info('   -h,  --help             print this help')