Ejemplo n.º 1
0
class TestCommandAction(unittest.TestCase):
    def setUp(self):
        self.parser = ArgumentParser()

    def test_choices(self):
        command_table = {'pre-existing': object()}
        self.parser.add_argument(
            'command', action=CommandAction, command_table=command_table)
        parsed_args = self.parser.parse_args(['pre-existing'])
        self.assertEqual(parsed_args.command, 'pre-existing')

    def test_choices_added_after(self):
        command_table = {'pre-existing': object()}
        self.parser.add_argument(
            'command', action=CommandAction, command_table=command_table)
        command_table['after'] = object()

        # The pre-existing command should still be able to be parsed
        parsed_args = self.parser.parse_args(['pre-existing'])
        self.assertEqual(parsed_args.command, 'pre-existing')

        # The command added after the argument's creation should be
        # able to be parsed as well.
        parsed_args = self.parser.parse_args(['after'])
        self.assertEqual(parsed_args.command, 'after')
Ejemplo n.º 2
0
def cli_args(mockargs=None):
    """Parse command line arguments"""
    parser = ArgumentParser(description='Firelet daemon')

    parser.add_argument("-c", "--conffile", nargs='?',
        default='/etc/firelet/firelet.ini', help="configuration file", metavar="FILE")
    parser.add_argument("-r", "--repodir", nargs='?',
        help="configuration repository dir")
    parser.add_argument("-D", "--debug",
        action="store_true", dest="debug", default=False,
        help="run in debug mode and print messages to stdout")
    parser.add_argument("-q", "--quiet",
        action="store_true", dest="quiet", default=False,
        help="print less messages to stdout")

    # catch all other arguments
    parser.add_argument('commands', nargs='+',
        help='an integer for the accumulator')

    if mockargs:
        opts = parser.parse_args(mockargs)
    else:
        opts = parser.parse_args()

    # temporary hack before rewriting the entire file using argparse
    six_commands = opts.commands + [None] * (6 - len(opts.commands))
    return opts, six_commands
Ejemplo n.º 3
0
    def get_options(cls):
        """
        Parse the command line to analyze options ... returns options
        """
        if cls.options is None:
            try:
                from argparse import ArgumentParser
            except:
                from fabio.third_party.argparse import ArgumentParser

            parser = ArgumentParser(usage="Tests for %s" % cls.name)
            parser.add_argument("-d", "--debug", dest="debug", help="run in debugging mode",
                                default=False, action="store_true")
            parser.add_argument("-i", "--info", dest="info", help="run in more verbose mode ",
                                default=False, action="store_true")
            parser.add_argument("-f", "--force", dest="force", help="force the build of the library",
                                default=False, action="store_true")
            parser.add_argument("-r", "--really-force", dest="remove",
                                help="remove existing build and force the build of the library",
                                default=False, action="store_true")
            parser.add_argument(dest="args", type=str, nargs='*')
            if IN_SOURCES:
                cls.options = parser.parse_args()
            else:
                cls.options = parser.parse_args([])
        return cls.options
Ejemplo n.º 4
0
def main(argv):
    """Parse arguments."""
    logging.basicConfig(level=logging.INFO,
                        format="%(levelname)s: %(message)s")
    description = "Filter SEAware results file"
    parser = ArgumentParser(description=description)
    parser.add_argument("-i", "--infile", default=None, 
        help="input SEAware results CSV filename  (default: stdin)")
    parser.add_argument("-o", "--outfile", default=None, 
        help="output filtered results CSV filename (default: stdout)")
    parser.add_argument("-b", "--best-affinity", action="store_true",
                        help="keep only the best affinity set for each target")
    parser.add_argument("-s", "--select-affinity", default=SELECTED_AFFINITY,
                        help="select a particular affinity for every target " +
                        "(default: %(default)s)")
    parser.add_argument("-p", "--pvalue-threshold", default=PVALUE_THRESHOLD,
                        type=float, 
                        help="remove all results worse than this p-value " +
                        "threshold (default: %(default)g)")
    parser.add_argument("--set-mode", action='store_true', 
                        help="Compare set-versus-set results")
    options = parser.parse_args(args=argv[1:])
    return handler(in_fn=options.infile, out_fn=options.outfile,
                   select_affinity=options.select_affinity,
                   best_affinity=options.best_affinity, 
                   pvalue_threshold=options.pvalue_threshold,
                   setcore=options.set_mode)
    options = parser.parse_args(args=argv[1:])
    return handler(infile=options.infile, outfile=options.outfile)
Ejemplo n.º 5
0
def main():
    parser = ArgumentParser(
        description='Set a random wallpaper on each monitor')
    parser.parse_args()

    wallpapers = random.sample(wallpaper_files(), number_of_monitors())
    set_wallpapers(wallpapers)
Ejemplo n.º 6
0
def parse_commandline_arguments(args=None):
    '''
    Add several subcommands, each with their own options and arguments.
    '''
    parser = ArgumentParser(prog='xprofile', description='A tool to manage and automatically apply xrandr configurations.')

    parser.add_argument('--verbose', action='store_true', help='output more verbosely')
    parser.add_argument('--config',  default='~/.xprofilerc', help='config file to read profiles from')
    parser.add_argument('--version', action='version', version='%(prog)s ' + __version__)

    subparsers = parser.add_subparsers(description='The following commands are available', dest='subcommand')
    subparsers.required = True

    parser_a = subparsers.add_parser('list', help="list all available xrandr profiles")
    parser_a.set_defaults(func=list_all_profiles)

    parser_b = subparsers.add_parser('current', help="get information about the current active profile")
    parser_b.set_defaults(func=get_current_profile)

    parser_c = subparsers.add_parser('generate', help="generate a new profile and print to stdout")
    parser_c.add_argument('--description', default=None, help='the description for the new profile')
    parser_c.add_argument('--profile', default=None, help='the name for the new profile')
    parser_c.set_defaults(func=generate_profile)

    parser_d = subparsers.add_parser('activate', help="activate the given profile or automatically select one")
    parser_d.add_argument('--dry-run', action='store_true', help='don\'t activate the profile')
    parser_d.add_argument('profile', default=None, nargs='?', help='the profile to select')
    parser_d.set_defaults(func=activate_profile)

    if args:
        parsed_args = parser.parse_args(args)
    else:
        parsed_args = parser.parse_args()

    return parsed_args, parser
Ejemplo n.º 7
0
def run_gunicorn():
    """
    Exec gunicorn with our wsgi app.

    Settings are taken from environment variables as listed in the help text.
    This is intended to be called as a console_script entry point.
    """

    # this only exists to provide help/usage text
    parser = ArgumentParser(description=DESCRIPTION,
                            formatter_class=RawTextHelpFormatter)
    parser.parse_args()

    workers = os.environ.get('GUNICORN_WORKERS', '4')
    port = os.environ.get('REGISTRY_PORT', '5000')
    graceful_timeout = os.environ.get('GUNICORN_GRACEFUL_TIMEOUT', '3600')
    silent_timeout = os.environ.get('GUNICORN_SILENT_TIMEOUT', '3600')

    address = '0.0.0.0:{0}'.format(port)

    gunicorn_path = distutils.spawn.find_executable('gunicorn')
    if gunicorn_path is None:
        print('error: gunicorn executable not found', file=sys.stderr)
        sys.exit(1)

    os.execl(gunicorn_path, 'gunicorn', '--access-logfile', '-', '--debug',
             '--max-requests', '100', '--graceful-timeout', graceful_timeout,
             '-t', silent_timeout, '-k', 'gevent', '-b', address, '-w', workers,
             'docker_registry.wsgi:application')
Ejemplo n.º 8
0
def main():
    parser = ArgumentParser(description="""\
Setup rust.  Install toolchains and toolchain components, and crates I use.
""")
    parser.parse_args()

    rustup = shutil.which('rustup')
    if not rustup:
        sys.exit('rustup missing; install from https://rustup.rs!')

    # Don't use check_call here; rustup might be installed system wide
    call([rustup, 'self', 'update'])
    check_call([rustup, 'update'])

    for toolchain in TOOLCHAINS:
        setup_toolchain(rustup, toolchain)

    # Setup stable as default toolchain
    check_call([rustup, 'default', 'stable'])

    # Install all crates
    cargo = shutil.which('cargo')
    if not cargo:
        sys.exit('cargo missing; something went wrong with toolchains!')

    for crate in CRATES:
        check_call([cargo, 'install', '--force', crate])
Ejemplo n.º 9
0
def seqdb_command():
    from argparse import ArgumentParser
    import sys
    from ..frontend.configuration import GeneralConfiguration
    parser = ArgumentParser(prog='seqdb', description='seqtool database administration.')
    parser.add_argument("--cache_dir", help='database directory')

    subparsers = parser.add_subparsers(help='sub-command help')
    parser_load = subparsers.add_parser('load', help='load database from mysql server')
    parser_load.add_argument("--host", default='localhost', help='see Makefile')
    parser_load.add_argument("--password", help='see Makefile')
    parser_load.add_argument("-f", "--force", dest="force", help="force update", action='store_true')
    parser_load.set_defaults(func=seqdb_load)

    parser_bed = subparsers.add_parser('bed', help='load bed files.')
    parser_bed.add_argument("--bed_dir", help='bed directory')
    parser_bed.set_defaults(func=seqdb_bed)

    parser_clear = subparsers.add_parser('clear', help='clear database')
    parser_clear.set_defaults(func=seqdb_clear)

    if (len(sys.argv) < 2):
        args = parser.parse_args(['-h'])
    else:
        args = parser.parse_args(sys.argv[1:])

    if not args.cache_dir:
        gc = GeneralConfiguration()
        args.cache_dir = gc.get_cache_dir()

    args.instance = DB(args.cache_dir, None)

    args.func(args)
Ejemplo n.º 10
0
    def options(self):
        parser = ArgumentParser(formatter_class=RawTextHelpFormatter,
            epilog='''EXAMPLES: 
            python secutils.py -t Project/Discovery/ -p Project/Enum/
            python secutils.py -rn Project/Discovery/target1 -o Report
            python secutils.py -rN Project/target1/nessus Project/target2/nessus/ -T VulnsDB_Spanish.xls -o Report.xls''')
        
        parser._optionals.title = "MISC"
        parser.add_argument('-v', '--version', action='version', version='%(prog)s 2.0')
        parser.add_argument('-o', metavar='OUTPUT-FILE', dest='output', action='append', help='Set an xls output file')
    
        nmapGroup = parser.add_argument_group('NMAP UTILITIES')
        nmapGroup.add_argument('-t', metavar='DIR', nargs='+', dest='pTargets', action='append', help='Create a list of targets from nmap files in xml format located in DIR')
        nmapGroup.add_argument('-p', metavar='DIR', nargs='+', dest='pPorts', action='append', help='Create list of open ports from nmap files in xml format located in DIR')
        nmapGroup.add_argument('-rn', metavar='DIR', nargs='+', dest='pNmap', action='append', help='Create an XLS report from nmap files in xml format located in DIR')
        
        nessusGroup = parser.add_argument_group('NESSUS UTILITIES')
        nessusGroup.add_argument('-rN', metavar='DIR', nargs='+', dest='pNessus', action='append', help='Create an XLS report from .nessus files located in DIR')
        nessusGroup.add_argument('-T', metavar='FILE', dest='dbNessus', action='append', help='Use an xls database FILE to translate nessus reports. Must be used along with -rN')
        
        nessusGroup = parser.add_argument_group('ACUNETIX UTILITIES')
        nessusGroup.add_argument('-ra', metavar='DIR', nargs='+', dest='pAcunetix', action='append', help='Create an XLS report from acunetix files in xml format located in DIR')
        
        nessusGroup = parser.add_argument_group('NETSPARKER UTILITIES')
        nessusGroup.add_argument('-rk', metavar='DIR', nargs='+', dest='pNetsparker', action='append', help='Create an XLS report from netsparker files in xml format located in DIR')        

        if len(sys.argv) == 1:
            return parser.parse_args('--help'.split())
        else:
            return parser.parse_args()
Ejemplo n.º 11
0
def main():
    """Main function. Runs when called as a script.
    """
    arg_parser = ArgumentParser(
        description="Dumps data from the WSDOT Traffic API to JSON files.")
    arg_parser.add_argument(
        "api-code", nargs="?",
        help="WSDOT Traveler API code. This parameter can be omitted if the %s\
 environment variable is defined." % ENVIRONMENT_VAR_NAME)
    arg_parser.parse_args()
    # Create the output directory if not already present.
    if not os.path.exists(OUTDIR):
        os.mkdir(OUTDIR)
    for endpoint_name in URLS:
        # Get the features via the API.
        features = get_traveler_info(endpoint_name, CODE)
        # Extract field definitions
        fields = FieldInfo.from_features(features)

        # Write data and field info to JSON files.
        out_path = os.path.join(OUTDIR, "%s.json" % endpoint_name)
        with open(out_path, 'w') as json_file:
            json.dump(
                features, json_file, cls=CustomEncoder, indent=True)
        out_path = os.path.join(OUTDIR, "%s_fields.json" % endpoint_name)
        with open(out_path, 'w') as json_file:
            json.dump(
                fields, json_file, indent=True, default=_field_serializer)

        # dump geojson
        geojson = dict_list_to_geojson(features)
        out_path = os.path.join(OUTDIR, "%s.geojson" % endpoint_name)
        with open(out_path, 'w') as json_file:
            json.dump(
                geojson, json_file, cls=CustomEncoder, indent=True)
Ejemplo n.º 12
0
    def parse_args(self, args=None):
        """Parse the arguments"""
        parser = ArgumentParser(description="Plot the numbers given in a file "
                                "or in stdin")

        rgroup = parser.add_argument_group("Read from...")
        rgroup.add_argument('--std-in', action="store_true", default=False,
                            help="Perform doc tests and exit instead.")
        rgroup.add_argument('--in-file', '-f', type=str, default=None,
                            help="Specify input file path.")

        dgroup = parser.add_argument_group("Input data...")
        dgroup.add_argument('--xy', '-x', action="store_true", default=False,
                            help="Treat first column as x values, and the "
                            "following as y-values (default False).")
        dgroup.add_argument('--col', '-c', action="append", dest='cols',
                            type=int, default=list(),
                            help="Specify which columns to investigate. "
                            "Repeat if needed. Default: All")
        dgroup.add_argument('--ignore-first', '-i', action="store_true",
                            default=False, help="ignore first line")
        dgroup.add_argument('--sep', '-s', default=' ',
                            help="Specify separator, default: space")

        fgroup = parser.add_argument_group("Formatting...")
        fgroup.add_argument('--gap', '-g', type=float, default=0.01,
                            help="inverted number of subpoints in lines")
        fgroup.add_argument('--not-implemented')

        if args:
            self.args = parser.parse_args(args)
        else:
            self.args = parser.parse_args()
        return
Ejemplo n.º 13
0
    def parse(self, *largs):
        from argparse import ArgumentParser

        parser = ArgumentParser(prog=self.program,
                description=self.description, epilog=self.epilog)

        if len(self.models) > 1:
            parser.add_argument('-m', '--model', help='codec model',
                    dest='model', action='store', choices=self.models)
        if len(self.codecs) > 1:
            parser.add_argument('-c', '--codec', help='video codec',
                    dest='codec', action='store', choices=self.codecs)
        if len(self.actions) > 1:
            parser.add_argument('-a', '--action', help='action',
                    dest='action', action='store', choices=self.actions)

        if self.encode_by_option:
            parser.add_argument('-x', '--width', help='encoded picture width',
                    dest='width', action='store', type=int, default=0)
            parser.add_argument('-y', '--height', help='encoded picture height',
                    dest='height', action='store', type=int, default=0)
        if self.encode_by_option or self.digest_by_frames:
            parser.add_argument('-f', '--frames', help='frames',
                    dest='frames', action='store', type=int, default=0)

        parser.add_argument('files', help='input filenames',
                action='store', metavar='FILENAMES', nargs='*')

        parser.parse_args(args=largs, namespace=self)

        return vars(self)
Ejemplo n.º 14
0
    def test_add2parser(self):
        opt1 = Options.make([('foo', 'bar')], hej='med')
        main_parser = ArgumentParser()
        actions_group = main_parser.add_argument_group('foo')
        actions_group.add_argument('-b', '--bar', default='foo')
        opt2 = opt1.bind_copy_to_parser(main_parser.add_argument_group('bar'))


        # main parser
        args = main_parser.parse_args(['-fmar'])
        assert args.foo == 'mar'
        assert args.bar == 'foo'

        assert opt2.foo == 'bar'

        opt2.update_if_present(foo='mar', nothere=False)
        assert args.foo == 'mar'
        assert 'nothere' not in opt2

        opt2.foo = 'bar'
        opt2.update_if_present(args)
        assert opt2.foo == 'mar'
        assert opt1.foo == 'bar'

        opt2.foo = 'bar'
        main_parser.parse_args(['-fmar'], namespace=opt2)
        assert args.foo == 'mar'
        assert 'nothere' not in opt2
Ejemplo n.º 15
0
def process_opt():
    parser = ArgumentParser()

    parser.add_argument("-p", dest="profile", default=None, help="Option: profile sync|cdn|backup|idle|regular"
                                                               "example: ./executor.py -p sync")

    parser.add_argument("-o", dest="ops", default=10, help="Option: ops #"
                                                             "example: ./executor.py -o 5")

    parser.add_argument("-t", dest="itv", default=1, help="Option: itv #"
                                                             "example: ./executor.py -t 5")

    parser.add_argument("-f", dest="folder", default='stacksync_folder', help="Option: ftp folder, folder owncloud_folder|stacksync_folder "
                                                          "example: ./executor.py -f owncloud_folder")

    parser.add_argument("-x", dest="pid", default='StackSync', help="Option: ProcedureName, "
                                                                              "pid StackSync|OwnCloud "
                                                                              "example: ./executor.py -x OwnCloud")

    parser.add_argument("--out", dest="output", default='output', help="Folder for output files")
    opt = parser.parse_args()

    if not opt.itv:
        parser.print_help()
        print 'Example: ./executor.py -o 100 -p sync -t 1 -f owncloud_folder -x OwnCloud'
        sys.exit(1)

    opt = parser.parse_args()

    print opt.profile
    print opt.ops
    print opt.itv

    return opt
Ejemplo n.º 16
0
def test_get_proxy_from_args_bad(argstring):
	# Make sure bad arguments fail to validate
	parser = ArgumentParser()
	arguments.add_proxy_args(parser)
	
	with pytest.raises(SystemExit):
		parser.parse_args(argstring.split())
Ejemplo n.º 17
0
def main():

    parser = ArgumentParser()
    sub_parser = parser.add_subparsers()

    # settings create
    create_parser = sub_parser.add_parser("create", help="create setup.py")
    create_parser.set_defaults(func=create_setuppy)

    # setttings show-classifiers
    show_parser = sub_parser.add_parser(
        "show-classifiers",
        help="show classifiers list"
    )
    show_parser.set_defaults(func=show_classifiers)

    # setup test-env
    test_env_parser = sub_parser.add_parser(
        "setup-test-env",
        help="setup test environment"
    )
    test_env_parser.set_defaults(func=setup_test_environment)

    args = parser.parse_args()

    if hasattr(args, "func"):
        args.func(args)
    else:
        parser.parse_args(["-h"])
Ejemplo n.º 18
0
 def test_args(self):
     # we can add create argparse-arguments from `args`
     parser = ArgumentParser()
     for arg in OOConvProcessor.args:
         parser.add_argument(
             arg.short_name, arg.long_name, **arg.keywords)
     result = vars(parser.parse_args([]))
     # defaults
     assert result == {'oocp_output_format': 'html',
                       'oocp_pdf_version': False,
                       'oocp_pdf_tagged': False,
                       'oocp_hostname': 'localhost',
                       'oocp_port': 2002,
                       }
     # explicitly set value (different from default)
     result = vars(parser.parse_args(['-oocp-out-fmt', 'pdf',
                                      '-oocp-pdf-version', '1',
                                      '-oocp-pdf-tagged', '1',
                                      '-oocp-host', 'example.com',
                                      '-oocp-port', '1234', ]))
     assert result == {'oocp_output_format': 'pdf',
                       'oocp_pdf_version': True,
                       'oocp_pdf_tagged': True,
                       'oocp_hostname': 'example.com',
                       'oocp_port': 1234}
Ejemplo n.º 19
0
def main():

    # Load configuration
    config = get_config((
        './kaslan.yaml',
        expanduser('~/.kaslan.yaml'),
        '/etc/kaslan.yaml',
    ))

    # Create main parser
    parser = ArgumentParser(description=__description__)
    parser.add_argument('-u', dest='vcenter_user', help='Override vCenter user', default=getpass.getuser())
    parser.add_argument('--host', dest='vcenter_host', help='Override vCenter host', default=config['vcenter_host'])
    parser.add_argument('--port', dest='vcenter_port', help='Override vCenter port', default=config['vcenter_port'])
    subparsers = parser.add_subparsers(dest='cmd')

    # Stdin oarser
    parser_stdin = subparsers.add_parser('input', help='Process commands from input')
    parser_stdin.add_argument('filenames', help='files to use instead of stdin', nargs='*')

    # Command parsers
    for cmd in (datastore, clone, compute, disks, status, destroy):
        cmd.cli_setup(subparsers, config)

    # Parse arguments
    args = parser.parse_args()
    if args.cmd == 'input':
        for line in fileinput.input(args.filenames):
            args = parser.parse_args(line.split())
            args.func(args, config)
    else:
        args.func(args, config)
    print ''
Ejemplo n.º 20
0
    def test_environment_hook_options(self):
        class TestingPlugin(BaseTestingPlugin):
            add_parser_arguments_called = 0
            from_args_called = 0

            @classmethod
            def add_parser_arguments(cls, parser, name, option_prefix,
                                     dest_prefix):
                cls.add_parser_arguments_called += 1
                return super(TestingPlugin, cls).add_parser_arguments(
                    parser, name, option_prefix, dest_prefix)

            @classmethod
            def from_args(cls, args, name, dest_prefix):
                cls.from_args_called += 1
                return super(TestingPlugin, cls).from_args(
                    args, name, dest_prefix)

        # Given
        extension = Extension(
            'testing-plugin', None, TestingPlugin, None)
        environment_manager = ExtensionManager.make_test_instance(
            [extension], namespace=PluginManager.ENVIRONMENT_HOOK,
        )
        hook_managers = [(PluginManager.ENVIRONMENT_HOOK, environment_manager)]
        plugin_manager = PluginManager.testing_plugin_manager(
            hook_managers=hook_managers, driver_managers=())
        parser = ArgumentParser(add_help=False)

        # When
        plugin_manager.add_plugin_arguments(parser)

        # Then
        self.assertEqual(TestingPlugin.add_parser_arguments_called, 1)
        actions = parser._actions
        self.assertEqual(len(actions), 1)
        action, = actions
        self.assertEqual(action.option_strings, ['--with-testing-plugin'])

        # When
        args = parser.parse_args([])
        enabled_plugins = plugin_manager.get_enabled_hook_plugins(
            plugin_manager.ENVIRONMENT_HOOK, args)

        # Then
        self.assertEqual(enabled_plugins, [])
        self.assertEqual(TestingPlugin.from_args_called, 1)

        # When
        args = parser.parse_args(['--with-testing-plugin'])
        enabled_plugins = plugin_manager.get_enabled_hook_plugins(
            plugin_manager.ENVIRONMENT_HOOK, args)

        # Then
        self.assertEqual(len(enabled_plugins), 1)

        plugin_obj, = enabled_plugins
        self.assertEqual(TestingPlugin.from_args_called, 2)
        self.assertTrue(plugin_obj.enabled)
Ejemplo n.º 21
0
    def test_add_arguments_debug_mode(self):
        parser = ArgumentParser()
        DiscoverRunner.add_arguments(parser)

        ns = parser.parse_args([])
        self.assertFalse(ns.debug_mode)
        ns = parser.parse_args(["--debug-mode"])
        self.assertTrue(ns.debug_mode)
Ejemplo n.º 22
0
def main():

    parser = ArgumentParser(usage="%(prog)s")
    common.setup_global_opts(parser)
    parser.parse_args()
    common.read_config(None)

    metadata.read_metadata(xref=True)
Ejemplo n.º 23
0
 def __init__(self,prog,*argv):
   self.prog = os.path.basename(prog)
   descr = ('Listen for YouTube URLs, launch youtube-dl (roughly) on-demand, '
     'and download videos to current working directory.  If available, will '
     'trigger OS graphical notifications when new URLs are found and when '
     'youtube-dl is invoked.')
   parser = ArgumentParser(self.prog,description=descr)
   parser.parse_args(argv)
Ejemplo n.º 24
0
def main():
    """Parses command line arguments and runs actions"""
    from argparse import ArgumentParser

    parser = ArgumentParser(description='Sanapano command line tool')
    parser.add_argument('action', choices=['runserver'])
    parser.parse_args()
    runserver()
Ejemplo n.º 25
0
def run_gunicorn():
    """Exec gunicorn with our wsgi app.

    Settings are taken from environment variables as listed in the help text.
    This is intended to be called as a console_script entry point.
    """

    # this only exists to provide help/usage text
    parser = ArgumentParser(description=DESCRIPTION,
                            formatter_class=RawTextHelpFormatter)
    parser.parse_args()

    gunicorn_path = distutils.spawn.find_executable('gunicorn')
    if not gunicorn_path:
        print('error: gunicorn executable not found', file=sys.stderr)
        sys.exit(1)

    address = '%s:%s' % (
        env.source('REGISTRY_HOST'),
        env.source('REGISTRY_PORT')
    )

    args = [
        gunicorn_path, 'gunicorn',
        '--access-logfile', env.source('GUNICORN_ACCESS_LOG_FILE'),
        '--error-logfile', env.source('GUNICORN_ERROR_LOG_FILE'),
        '--max-requests', '100',
        '-k', 'gevent',
        '--graceful-timeout', env.source('GUNICORN_GRACEFUL_TIMEOUT'),
        '-t', env.source('GUNICORN_SILENT_TIMEOUT'),
        '-w', env.source('GUNICORN_WORKERS'),
        '-b', address,
    ]

    if env.source('SETTINGS_FLAVOR') != 'prod':
        args.append('--reload')

    user = env.source('GUNICORN_USER')
    group = env.source('GUNICORN_GROUP')
    if user or group:
        if getpass.getuser() == 'root':
            if user:
                logger.info('Downgrading privs to user %s' % user)
                args.append('-u')
                args.append(user)

            if group:
                logger.info('Downgrading privs to group %s' % user)
                args.append('-g')
                args.append(group)
        else:
            logger.warn('You asked we drop priviledges, but we are not root!')

    args += env.source('GUNICORN_OPTS')
    args.append('docker_registry.wsgi:application')
    # Stringify all args and call
    os.execl(*[str(v) for v in args])
Ejemplo n.º 26
0
class Options(OrderedDict):
    def __init__(self, *args, **kwargs):
        self._short_args = OrderedDict(h='help')
        self._argsparser = ArgumentParser()
        super().__init__(*args, **kwargs)


    @staticmethod
    def _getter(key, self):
        return self[key]

    @staticmethod
    def _setter(key, self, value):
        self[key] = value

    def find_short_arg(self, key):
        shortarg = key[0]
        i = 1
        while shortarg in self._short_args and i < len(key):
            shortarg += key[i]
        self._short_args[shortarg] = key
        return shortarg

    def __setitem__(self, key, value):
        if key not in self or not hasattr(self, key):
            if not isinstance(key, str):
                raise ValueError('option names must be of type string')
            setattr(self.__class__, key, property(partial(self._getter, key),
                                        partial(self._setter, key)))

            arg = partial(self._argsparser.add_argument, '-' + self.find_short_arg(key), '--' + key, dest=key)
            kwargs = dict()
            if isinstance(value, dict):
                kwargs.update(value)
                value = value.get('default', None)
            else:
                kwargs['default'] = value

            if 'type' not in kwargs and value is not None:
                kwargs['type'] = type(value)
            arg(**kwargs)
        super().__setitem__(key, value)

    def parseargs(self, *args):
        if len(args) < 1:
            args = None
        else:
            args = [str(arg) for arg in args]
        self._argsparser.parse_args(args=args, namespace=self)

    @classmethod
    def make(cls, *args, **kwargs):
        class CustomOptions(cls):
            pass

        return CustomOptions(*args, **kwargs)
Ejemplo n.º 27
0
def main(argv):
    parser = ArgumentParser(prog="bootstrap", usage="./bootstrap.py <script>",
                            description=__doc__)
    parser.add_argument("script", nargs="*")
    parser.add_argument("-m", help="run library module as a script (terminates option list)")

    Options = collections.namedtuple("Options", ["script", "module"])
    if len(argv) == 1:
        options = Options(script=None, module=None)
    else:
        if argv[1] in ["-h", "--help"]:
            parser.print_help()
            return
        if argv[1] == "-m":
            if len(argv) < 3:
                parser.parse_args(argv[1:])
                return
            options = Options(script=None, module=argv[2:])
        else:
            options = Options(script=argv[1:], module=None)

    if options.script is not None:
        logger.info("Executing %s from source checkout", options.script)
        script = options.script[0]
        argv = options.script[1:]
        kind, target = find_executable(script)
        if kind == "path":
            run_file(target, argv)
        elif kind == "entry_point":
            run_entry_point(target, argv)
        else:
            logger.error("Script %s not found", options.script)
    elif options.module is not None:
        logging.info("Running module %s", options.module)
        import runpy
        module = options.module[0]
        try:
            old = sys.argv
            sys.argv = [None] + options.module[1:]
            runpy.run_module(module, run_name="__main__", alter_sys=True)
        finally:
            sys.argv = old
    else:
        logging.info("Running IPython by default")
        logger.info("Patch the sys.argv: %s", sys.argv)
        sys.path.insert(2, "")
        try:
            from IPython import embed
        except Exception as err:
            logger.error("Unable to execute iPython, using normal Python")
            logger.error(err)
            import code
            code.interact()
        else:
            embed()
Ejemplo n.º 28
0
def main():
    parser = ArgumentParser(description=__doc__)
    parser.parse_args()

    root_env = get_conda_root_prefix()
    build_root = os.path.join(root_env, 'conda-bld')
    if os.path.isdir(build_root):
        print("Removing conda build root %s" % build_root)
        rm_rf(build_root)
    else:
        print("Conda build root %s does not exist" % build_root)
Ejemplo n.º 29
0
def get_parser():
    parser = ArgumentParser()
    parser.add_argument('-v', '--version', action='store_true', help='show version')
    parser.add_argument('-f', '--cleanBuild', action='store_true', help='force to execute a clean build')
    parser.add_argument('-w', '--wait', action='store_true', help='make application wait for debugger')
    parser.add_argument('-a', '--all', action='store_true',
                        help="together with '-f', freeline will force to clean build all projects.")
    parser.add_argument('-c', '--clean', action='store_true', help='clean cache directory and workspace')
    parser.add_argument('-d', '--debug', action='store_true', help='show freeline debug output (NOT DEBUG APPLICATION)')
    # parser.add_argument('-i', '--init', action='store_true', help='init freeline project')
    parser.parse_args()
    return parser
Ejemplo n.º 30
0
 def __init__(self):
     parser = ArgumentParser()
     parser.add_argument("--config", metavar="FILE", help="Loads configuration from FILE.")
     parser.add_argument(
         "--dump-config", help="Loads and dumps current configuration to standard out.", action="store_true"
     )
     parser.add_argument(
         "--dump-annotated",
         help=("Dumps default configuration (with comments) to standard out."),
         action="store_true",
     )
     parser.parse_args(namespace=self)
Ejemplo n.º 31
0
        description=
        "Convert file with data along rows into file with data down columns.")
    parser.add_argument("infile",
                        type=FileType('r'),
                        default=stdin,
                        nargs="?",
                        help="file to convert (default stdin)")
    parser.add_argument("-d", type=str, default=" ", help="delimiter")
    parser.add_argument("-o",
                        dest="outfile",
                        metavar="outfile",
                        type=FileType('w'),
                        default=stdout,
                        help="output file (default stdout)")

    args = parser.parse_args(argv[1:])

    # Read input
    data = {}
    N = None
    for line in args.infile.readlines():
        thisline = line.strip().split(args.d)
        if len(thisline) > 0:
            data[thisline[0]] = thisline[1:]
            if N == None:
                N = len(thisline[1:])
            else:
                if len(thisline[1:]) != N:
                    print "Rows have different numbers of elements. Aborting."
                    exit(1)
                            help='The number of voxels to be used to compute the error and therefore'
                                 ' find the optimal hyperparameters. In general, more voxels used may'
                                 ' imply better generalization, but also more computation time and'
                                 ' use of resources')
    arg_parser.add_argument('--voxel-offset', type=int, default=10,
                            help="Number of voxels that will not be taken into account in all directions, "
                                 "both at the beginning and at the end. That is, for a voxel offset of v, "
                                 "and volumes with dimensions (x_dim, y_dim, z_dim), "
                                 "only the following voxels will be taken into account: "
                                 "[v:x_dim-v, v:y_dim-v, v:z_dim-v]")
    arg_parser.add_argument('--categories', nargs='+', type=int,
                            help='Category or categories (as they are represented '
                                 'in the Excel file) for which the hyperparameters should be found.')
    arg_parser.add_argument('--prefix', help='Prefix used in the result files')

    arguments = arg_parser.parse_args()
    config_file = arguments.configuration_file
    parameters = arguments.parameters
    error = arguments.error
    error_func = AVAILABLE_SCORES[arguments.error]
    N = arguments.iterations
    m = arguments.voxels
    voxel_offset = arguments.voxel_offset
    categories = arguments.categories
    prefix = arguments.prefix

    """ LOAD DATA USING DATALOADER """
    subjects, covariate_names, covariates, processing_parameters,  affine_matrix, output_dir, \
    results_io, type_data = helper_functions.load_data_from_config_file(config_file)

    if parameters:
def dot_product(x, y):
    """A dot product of the matrix and the vecotr

    Args:
        x (np.array): numpy array of vectors of length y
        y (np.array): one dimensional numpy array

    Returns: np.array

    """
    return np.asarray([np.dot(i, y) for i in x])

if __name__ == '__main__':
    try:
        try:
            options = parser.parse_args()
        except Exception, e:
            parser.print_help()
            sys.exit()
        if options.verbose:
            start_time = time.time()
            start_date = time.strftime("%d-%m-%Y at %H:%M:%S")
            syserr("############## Started script on %s ##############\n" % start_date)
        try:
            main(options)
        except EmptyDataException, e:
            syserr(str(e) + "\n")
        if options.verbose:
            syserr("### Successfully finished in %i seconds, on %s ###\n" % (time.time() - start_time, time.strftime("%d-%m-%Y at %H:%M:%S")))
    except KeyboardInterrupt:
        syserr("Interrupted by user after %i seconds!\n" % (time.time() - start_time))
Ejemplo n.º 34
0
def dm_iinfo(argv=sys.argv[1:]):
    def fmt_time(timestamp):
        time_fmt = '%Y-%m-%d %H:%M:%S'
        return datetime.datetime.fromtimestamp(timestamp).strftime(time_fmt)

    def format_progress(progress):
        return progress

    def print_value(maxlen, f, value, entry={}):
        colorizer = entry.get('colorizer', None)
        if 'fmt' in entry:
            value = entry['fmt'](value)
        if sys.version_info[0] == 2:
            if not isinstance(value, str) and not isinstance(value, unicode):
                value = str(value)
        else:
            if not isinstance(value, str):
                value = str(value)
        f = format_bold(('{0: <%d}' % maxlen).format(f))
        sep = ': '
        for line in value.split('\n'):
            if colorizer is not None:
                line = colorizer(line)
            print(f + sep + line)
            f = ('{0: <%d}' % maxlen).format('')
            sep = '  '

    def count_groups(fields, obj):
        current_group = ''
        ret = {'': 0}
        for entry in fields:
            if 'group' in entry:
                current_group = entry.get('group')
                ret[current_group] = 0
            elif 'field' in entry:
                f = entry.get('field')
                if obj.get(f, None) is not None:
                    ret[current_group] += 1
            elif 'fieldre' in entry:
                expr = re.compile(entry.get('fieldre'))
                ret[current_group] += sum(
                    [1 if expr.match(k) else 0 for k in obj.keys()])
        return ret

    fields = [{
        'group': 'Transfer'
    }, {
        'field': 'retries'
    }, {
        'field': 'status',
        'colorizer': format_status
    }, {
        'field': 'progress',
        'colorizer': format_progress
    }, {
        'field': 'errmsg',
        'colorizer': format_error
    }, {
        'field': 'time_created',
        'fmt': fmt_time
    }, {
        'field': 'transferred'
    }, {
        'field': 'mode'
    }, {
        'group': 'Local File'
    }, {
        'field': 'local_file'
    }, {
        'field': 'local_atime',
        'fmt': fmt_time
    }, {
        'field': 'local_ctime',
        'fmt': fmt_time
    }, {
        'field': 'local_size'
    }, {
        'field': 'checksum'
    }, {
        'group': 'Remote Object'
    }, {
        'field': 'remote_file'
    }, {
        'field': 'remote_size'
    }, {
        'field': 'remote_create_time',
        'fmt': fmt_time
    }, {
        'field': 'remote_modify_time',
        'fmt': fmt_time
    }, {
        'field': 'remote_checksum'
    }, {
        'field': 'collection'
    }, {
        'field': 'object'
    }, {
        'field': 'remote_owner_name'
    }, {
        'field': 'remote_owner_zone'
    }, {
        'field': 'remote_replica_number'
    }, {
        'field': 'remote_replica_status'
    }, {
        'group': 'DMF Data'
    }, {
        'fieldre': 'DMF_.*'
    }]
    parser = ArgumentParser(description='Get details for object.')
    parser.add_argument('file', type=str, help='object')
    args = parser.parse_args(argv)
    ensure_daemon_is_running()
    client = Client(DmIRodsServer.get_socket_file())
    code, result = client.request({"info": args.file})
    if code != ReturnCode.OK:
        print_request_error(code, result)
        sys.exit(8)
    obj = json.loads(result)
    if not obj:
        return
    maxlen = max([len(v) for v in obj.keys()]) + 2
    groups = count_groups(fields, obj)
    current_group = ''
    for entry in fields:
        if 'group' in entry:
            current_group = entry.get('group')
            if groups.get(current_group, 0) > 0:
                print("--------------------------")
                print(current_group)
                print("--------------------------")
        elif 'field' in entry:
            if groups.get(current_group, 0) > 0:
                f = entry.get('field')
                value = obj.get(f, None)
                if value is not None:
                    print_value(maxlen, f, value, entry)
        elif 'fieldre' in entry:
            if groups.get(current_group, 0) > 0:
                expr = re.compile(entry.get('fieldre'))
                for f, value in {
                        k: v
                        for k, v in obj.items() if expr.match(k)
                }.items():
                    print_value(maxlen, f, value, entry)
Ejemplo n.º 35
0
def main(argv):
    parser = ArgumentParser(usage=__doc__.lstrip())
    parser.add_argument("--verbose",
                        "-v",
                        action="count",
                        default=1,
                        help="more verbosity")
    parser.add_argument(
        "--no-build",
        "-n",
        action="store_true",
        default=False,
        help="do not build the project (use system installed version)")
    parser.add_argument("--build-only",
                        "-b",
                        action="store_true",
                        default=False,
                        help="just build, do not run any tests")
    parser.add_argument("--doctests",
                        action="store_true",
                        default=False,
                        help="Run doctests in module")
    parser.add_argument("--refguide-check",
                        action="store_true",
                        default=False,
                        help="Run refguide check (do not run regular tests.)")
    parser.add_argument("--coverage",
                        action="store_true",
                        default=False,
                        help=("report coverage of project code. HTML output"
                              " goes under build/coverage"))
    parser.add_argument("--gcov",
                        action="store_true",
                        default=False,
                        help=("enable C code coverage via gcov (requires GCC)."
                              " gcov output goes to build/**/*.gc*"))
    parser.add_argument("--lcov-html",
                        action="store_true",
                        default=False,
                        help=("produce HTML for C code coverage information "
                              "from a previous run with --gcov. "
                              "HTML output goes to build/lcov/"))
    parser.add_argument("--mode",
                        "-m",
                        default="fast",
                        help="'fast', 'full', or something that could be "
                        "passed to nosetests -A [default: fast]")
    parser.add_argument("--submodule",
                        "-s",
                        default=None,
                        help="Submodule whose tests to run (cluster,"
                        " constants, ...)")
    parser.add_argument("--pythonpath",
                        "-p",
                        default=None,
                        help="Paths to prepend to PYTHONPATH")
    parser.add_argument("--tests",
                        "-t",
                        action='append',
                        help="Specify tests to run")
    parser.add_argument("--python",
                        action="store_true",
                        help="Start a Python shell with PYTHONPATH set")
    parser.add_argument("--ipython",
                        "-i",
                        action="store_true",
                        help="Start IPython shell with PYTHONPATH set")
    parser.add_argument("--shell",
                        action="store_true",
                        help="Start Unix shell with PYTHONPATH set")
    parser.add_argument("--debug",
                        "-g",
                        action="store_true",
                        help="Debug build")
    parser.add_argument("--parallel",
                        "-j",
                        type=int,
                        default=1,
                        help="Number of parallel jobs for build and testing")
    parser.add_argument("--show-build-log",
                        action="store_true",
                        help="Show build output rather than using a log file")
    parser.add_argument("--bench",
                        action="store_true",
                        help="Run benchmark suite instead of test suite")
    parser.add_argument("--bench-compare",
                        action="append",
                        metavar="BEFORE",
                        help=("Compare benchmark results of current HEAD to"
                              " BEFORE. Use an additional "
                              "--bench-compare=COMMIT to override HEAD with"
                              " COMMIT. Note that you need to commit your "
                              "changes first!"))
    parser.add_argument("args",
                        metavar="ARGS",
                        default=[],
                        nargs=REMAINDER,
                        help="Arguments to pass to Nose, Python or shell")
    parser.add_argument("--pep8",
                        action="store_true",
                        default=False,
                        help="Perform pep8 check with pycodestyle.")
    parser.add_argument("--doc",
                        action="append",
                        nargs="?",
                        const="html-scipyorg",
                        help="Build documentation")
    args = parser.parse_args(argv)

    if args.pep8:
        # os.system("flake8 scipy --ignore=F403,F841,F401,F811,F405,E121,E122,"
        #           "E123,E125,E126,E127,E128,E226,E231,E251,E265,E266,E302,"
        #           "E402,E501,E712,E721,E731,E741,W291,W293,W391,W503,W504"
        #           "--exclude=scipy/_lib/six.py")
        os.system("pycodestyle scipy benchmarks/benchmarks")
        sys.exit(0)

    if args.bench_compare:
        args.bench = True
        args.no_build = True  # ASV does the building

    if args.lcov_html:
        # generate C code coverage output
        lcov_generate()
        sys.exit(0)

    if args.pythonpath:
        for p in reversed(args.pythonpath.split(os.pathsep)):
            sys.path.insert(0, p)

    if args.gcov:
        gcov_reset_counters()

    if args.debug and args.bench:
        print("*** Benchmarks should not be run against debug version; "
              "remove -g flag ***")

    if not args.no_build:
        site_dir = build_project(args)
        sys.path.insert(0, site_dir)
        os.environ['PYTHONPATH'] = site_dir

    extra_argv = args.args[:]
    if extra_argv and extra_argv[0] == '--':
        extra_argv = extra_argv[1:]

    if args.python:
        if extra_argv:
            # Don't use subprocess, since we don't want to include the
            # current path in PYTHONPATH.
            sys.argv = extra_argv
            with open(extra_argv[0], 'r') as f:
                script = f.read()
            sys.modules['__main__'] = new_module('__main__')
            ns = dict(__name__='__main__', __file__=extra_argv[0])
            exec(script, ns)
            sys.exit(0)
        else:
            import code
            code.interact()
            sys.exit(0)

    if args.ipython:
        import IPython
        IPython.embed(user_ns={})
        sys.exit(0)

    if args.shell:
        shell = os.environ.get('SHELL', 'sh')
        print("Spawning a Unix shell...")
        os.execv(shell, [shell] + extra_argv)
        sys.exit(1)

    if args.doc:
        cmd = ["make", "-Cdoc", 'PYTHON="{}"'.format(sys.executable)]
        cmd += args.doc
        if args.parallel:
            cmd.append('SPHINXOPTS="-j{}"'.format(args.parallel))
        subprocess.run(cmd, check=True)
        sys.exit(0)

    if args.coverage:
        dst_dir = os.path.join(ROOT_DIR, 'build', 'coverage')
        fn = os.path.join(dst_dir, 'coverage_html.js')
        if os.path.isdir(dst_dir) and os.path.isfile(fn):
            shutil.rmtree(dst_dir)
        extra_argv += ['--cov-report=html:' + dst_dir]

    if args.refguide_check:
        cmd = [
            os.path.join(ROOT_DIR, 'tools', 'refguide_check.py'), '--doctests'
        ]
        if args.submodule:
            cmd += [args.submodule]
        os.execv(sys.executable, [sys.executable] + cmd)
        sys.exit(0)

    if args.bench:
        # Run ASV
        items = extra_argv
        if args.tests:
            items += args.tests
        if args.submodule:
            items += [args.submodule]

        bench_args = []
        for a in items:
            bench_args.extend(['--bench', a])

        if not args.bench_compare:
            cmd = [
                os.path.join(ROOT_DIR, 'benchmarks', 'run.py'), 'run', '-n',
                '-e', '--python=same'
            ] + bench_args
            os.execv(sys.executable, [sys.executable] + cmd)
            sys.exit(1)
        else:
            if len(args.bench_compare) == 1:
                commit_a = args.bench_compare[0]
                commit_b = 'HEAD'
            elif len(args.bench_compare) == 2:
                commit_a, commit_b = args.bench_compare
            else:
                p.error("Too many commits to compare benchmarks for")

            # Check for uncommitted files
            if commit_b == 'HEAD':
                r1 = subprocess.call(
                    ['git', 'diff-index', '--quiet', '--cached', 'HEAD'])
                r2 = subprocess.call(['git', 'diff-files', '--quiet'])
                if r1 != 0 or r2 != 0:
                    print("*" * 80)
                    print("WARNING: you have uncommitted changes --- "
                          "these will NOT be benchmarked!")
                    print("*" * 80)

            # Fix commit ids (HEAD is local to current repo)
            p = subprocess.Popen(['git', 'rev-parse', commit_b],
                                 stdout=subprocess.PIPE)
            out, err = p.communicate()
            commit_b = out.strip()

            p = subprocess.Popen(['git', 'rev-parse', commit_a],
                                 stdout=subprocess.PIPE)
            out, err = p.communicate()
            commit_a = out.strip()

            cmd = [
                os.path.join(ROOT_DIR, 'benchmarks', 'run.py'), 'continuous',
                '-e', '-f', '1.05', commit_a, commit_b
            ] + bench_args
            os.execv(sys.executable, [sys.executable] + cmd)
            sys.exit(1)

    if args.build_only:
        sys.exit(0)
    else:
        __import__(PROJECT_MODULE)
        test = sys.modules[PROJECT_MODULE].test

    if args.submodule:
        tests = [PROJECT_MODULE + "." + args.submodule]
    elif args.tests:
        tests = args.tests
    else:
        tests = None

    # Run the tests

    if not args.no_build:
        test_dir = site_dir
    else:
        test_dir = os.path.join(ROOT_DIR, 'build', 'test')
        if not os.path.isdir(test_dir):
            os.makedirs(test_dir)

    shutil.copyfile(os.path.join(ROOT_DIR, '.coveragerc'),
                    os.path.join(test_dir, '.coveragerc'))

    cwd = os.getcwd()
    try:
        os.chdir(test_dir)
        result = test(args.mode,
                      verbose=args.verbose,
                      extra_argv=extra_argv,
                      doctests=args.doctests,
                      coverage=args.coverage,
                      tests=tests,
                      parallel=args.parallel)
    finally:
        os.chdir(cwd)

    if isinstance(result, bool):
        sys.exit(0 if result else 1)
    elif result.wasSuccessful():
        sys.exit(0)
    else:
        sys.exit(1)
Ejemplo n.º 36
0
            outside this range (although they are allowed for testing purposes
            """
    print intro
    state = BoatState()
    connection = SerialConnection(port=port)
    try:
        while 1:
            rudder = int(input("Enter rudder position: "))
            sails = int(input("Enter sails position: "))
            state.set_pos((rudder, sails))
            transmit_serial(state, connection)
            print state
    except KeyboardInterrupt:
        print "\nINTERUPT PROGRAM HALT"


if __name__ == "__main__":
    argparser = ArgumentParser(description="Manual Servo Control")
    argparser.add_argument('-t',
                           action='store',
                           dest='run_number',
                           help='run number used in logging')
    argparser.add_argument('-port',
                           action='store',
                           dest='port',
                           default='/dev/ttyACM0',
                           help='port for uploader arduino')
    r = argparser.parse_args()
    log = (r.run_number is not None)
    main(port=r.port, log=log, logfilenum=r.run_number)
Ejemplo n.º 37
0
def main(test=False):
    if test:
        sbdir = osp.join(osp.dirname(__file__),
                     os.pardir, os.pardir, os.pardir, 'sandbox')
        tmpdir = osp.join(sbdir, 'tobedeleted')

        # fname = osp.join(tmpdir, 'scipy-0.10.1.win-amd64-py2.7.exe')
        fname = osp.join(sbdir, 'VTK-5.10.0-Qt-4.7.4.win32-py2.7.exe')
        print(Package(fname))
        sys.exit()
        target = osp.join(utils.BASE_DIR, 'build',
                      'winpython-2.7.3', 'python-2.7.3')
        fname = osp.join(utils.BASE_DIR, 'packages.src', 'docutils-0.9.1.tar.gz')

        dist = Distribution(target, verbose=True)
        pack = Package(fname)
        print(pack.description)
        # dist.install(pack)
        # dist.uninstall(pack)
    else:

        parser = ArgumentParser(description="WinPython Package Manager: install, "\
                        "uninstall or upgrade Python packages on a Windows "\
                        "Python distribution like WinPython.")
        parser.add_argument('fname', metavar='package',
                    type=str if py3compat.PY3 else unicode,
                    help='path to a Python package')
        parser.add_argument('-t', '--target', dest='target', default=sys.prefix,
                    help='path to target Python distribution '\
                         '(default: "%s")' % sys.prefix)
        parser.add_argument('-i', '--install', dest='install',
                    action='store_const', const=True, default=False,
                    help='install package (this is the default action)')
        parser.add_argument('-u', '--uninstall', dest='uninstall',
                    action='store_const', const=True, default=False,
                    help='uninstall package')
        args = parser.parse_args()

        if args.install and args.uninstall:
            raise RuntimeError("Incompatible arguments: --install and --uninstall")

        if not args.install and not args.uninstall:
            args.install = True

        if not osp.isfile(args.fname):
            raise IOError("File not found: %s" % args.fname)

        if utils.is_python_distribution(args.target):
            dist = Distribution(args.target)
            try:
                package = Package(args.fname)
                if package.is_compatible_with(dist):
                    if args.install:
                        dist.install(package)
                    else:
                        dist.uninstall(package)
                else:
                    raise RuntimeError("Package is not compatible with Python "\
                               "%s %dbit" % (dist.version, dist.architecture))
            except NotImplementedError:
                raise RuntimeError("Package is not (yet) supported by WPPM")
        else:
            raise WindowsError("Invalid Python distribution %s" % args.target)
Ejemplo n.º 38
0
def shell():
    version_too_old = False
    if sys.version_info[0] == 2:
        if sys.version_info < (2, 7):
            version_too_old = True
        elif sys.version_info.major == 3 and sys.version_info < (3, 5):
            version_too_old = True
    if version_too_old:
        print('PyRadio requires python 2.7 or 3.5+...')
        sys.exit(1)

    requested_player = ''
    parser = ArgumentParser(description='Curses based Internet radio player')
    parser.add_argument('-s',
                        '--stations',
                        default='',
                        help='Use specified station CSV file.')
    parser.add_argument('-p',
                        '--play',
                        nargs='?',
                        default='False',
                        help='Start and play.'
                        'The value is num station or empty for random.')
    parser.add_argument(
        '-u',
        '--use-player',
        default='',
        help='Use specified player. '
        'A comma-separated list can be used to specify detection order. '
        'Supported players: mpv, mplayer, vlc.')
    parser.add_argument('-a',
                        '--add',
                        action='store_true',
                        help='Add station to list.')
    parser.add_argument('-ls',
                        '--list-playlists',
                        action='store_true',
                        help='List of available playlists in config dir.')
    parser.add_argument('-l',
                        '--list',
                        action='store_true',
                        help='List of available stations in a playlist.')
    parser.add_argument('-t',
                        '--theme',
                        default='',
                        help='Use specified theme.')
    parser.add_argument('-tlp',
                        '--toggle-load-last-playlist',
                        action='store_true',
                        help='Toggle autoload last opened playlist.')
    parser.add_argument(
        '-scd',
        '--show-config-dir',
        action='store_true',
        help='Print config directory [CONFIG DIR] location and exit.')
    parser.add_argument(
        '-ocd',
        '--open-config-dir',
        action='store_true',
        help='Open config directory [CONFIG DIR] with default file manager.')
    parser.add_argument(
        '-ep',
        '--extra-player_parameters',
        default=None,
        help=
        "Provide extra player parameters as a string. The parameter is saved in the configuration file and is activated for the current session. The string\'s format is [player_name:parameters]. player_name can be 'mpv', 'mplayer' or 'vlc'. Alternative format to pass a profile: [player_name:profile:profile_name]. In this case, the profile_name must be a valid profile defined in the player\'s config file (not for VLC)."
    )
    parser.add_argument(
        '-ap',
        '--active-player-param-id',
        default=0,
        help=
        'Specify the extra player parameter set to be used with the default player. ACTIVE_PLAYER_PARAM_ID is 1-11 (refer to the output of the -lp option)'
    )
    parser.add_argument('-lp',
                        '--list-player-parameters',
                        default=None,
                        action='store_true',
                        help='List extra players parameters.')
    if platform.startswith('win'):
        parser.add_argument('--exe',
                            action='store_true',
                            default=False,
                            help='Show EXE file location (Windows only).')
    parser.add_argument('-U',
                        '--update',
                        action='store_true',
                        help='Update PyRadio.')
    if platform.startswith('linux'):
        parser.add_argument('--user',
                            action='store_true',
                            default=False,
                            help='Install only for current user (linux only).')
    parser.add_argument('-R',
                        '--uninstall',
                        action='store_true',
                        help='Uninstall PyRadio.')
    parser.add_argument('--unlock',
                        action='store_true',
                        help="Remove sessions' lock file.")
    parser.add_argument('-d',
                        '--debug',
                        action='store_true',
                        help='Start pyradio in debug mode.')
    parser.add_argument('-V',
                        '--version',
                        action='store_true',
                        help='Display version information.')
    ''' extra downloads
        only use them after the developer says so,
        for debug purposes only
            --devel           download official devel branch
            --sng-master      download developer release (master)
            --sng-devel       download developer devel branch
            --force-update    give a versio > than current,
                              to check update notification functionality
    '''
    parser.add_argument('--sng-master', action='store_true', help=SUPPRESS)
    parser.add_argument('--sng-devel', action='store_true', help=SUPPRESS)
    parser.add_argument('--devel', action='store_true', help=SUPPRESS)
    parser.add_argument('--force-update', default='', help=SUPPRESS)
    args = parser.parse_args()
    sys.stdout.flush()

    config_already_read = False

    with pyradio_config_file() as pyradio_config:

        if args.version:
            pyradio_config.get_pyradio_version()
            print('PyRadio version: {}'.format(
                pyradio_config.current_pyradio_version))
            print('Python version: {}'.format(
                sys.version.replace('\n', ' ').replace('\r', ' ')))
            pyradio_config.read_config()
            if pyradio_config.distro != 'None':
                print('Distribution: {}'.format(pyradio_config.distro))
            sys.exit()

        if platform.startswith('win'):
            if args.exe:
                print_exe_paths()
                sys.exit()

        if args.toggle_load_last_playlist:
            if pyradio_config.locked:
                print('Error: Another instance of PyRadio is already running!')
                print('       Please close it and try again...')
                sys.exit(1)
            else:
                read_config(pyradio_config)
                pyradio_config.opts['open_last_playlist'][
                    1] = not pyradio_config.opts['open_last_playlist'][1]
                pyradio_config.opts['dirty_config'][1] = True
                print('Setting auto load last playlist to: {}'.format(
                    pyradio_config.opts['open_last_playlist'][1]))
                save_config()
            sys.exit(0)

        package = 0
        if args.uninstall or args.update:
            if args.sng_master:
                package = 1
            elif args.sng_devel:
                package = 2
            elif args.devel:
                package = 3
            if not config_already_read:
                read_config(pyradio_config)
                config_already_read = True
            if pyradio_config.distro != 'None' and \
                    not platform.startswith('win'):
                no_update(args.uninstall)

        if args.update:
            if package == 0:
                pyradio_config.get_pyradio_version()
                last_tag = get_github_tag()
                if last_tag:
                    print('Released version   :  {}'.format(last_tag))
                    print('Installed version  :  {}'.format(
                        pyradio_config.current_pyradio_version))
                    if version_string_to_list(
                            last_tag) <= version_string_to_list(
                                pyradio_config.current_pyradio_version):
                        print(
                            'Latest version already installed. Nothing to do....'
                        )
                        sys.exit()
                else:
                    print(
                        'Error reading online version.\nPlease make sure you are connected to the internet and try again.'
                    )
                    sys.exit(1)

            python_version_to_use = 3 if PY3 else 2
            try:
                upd = PyRadioUpdate(
                    package=package,
                    python_version_to_use=python_version_to_use)
                if platform.startswith('linux'):
                    upd.user = args.user
                upd.update_pyradio()
            except RuntimeError:
                upd = PyRadioUpdateOnWindows(
                    package=package,
                    python_version_to_use=python_version_to_use)
                upd.update_or_uninstall_on_windows(mode='update-open')
            sys.exit()

        if args.uninstall:
            python_version_to_use = 3 if PY3 else 2
            try:
                upd = PyRadioUpdate(
                    package=package,
                    python_version_to_use=python_version_to_use)
                upd.remove_pyradio()
            except RuntimeError:
                upd = PyRadioUpdateOnWindows(
                    package=package,
                    python_version_to_use=python_version_to_use)
                upd.update_or_uninstall_on_windows(mode='uninstall-open',
                                                   from_pyradio=True)
            sys.exit()
        ''' check conflicting parameters '''
        if args.active_player_param_id and \
                args.extra_player_parameters:
            print(
                'Error: You cannot use parameters "-ep" and "-ap" together!\n')
            sys.exit(1)
        ''' user specified extra player parameter '''
        if args.active_player_param_id:
            try:
                a_param = int(args.active_player_param_id)
            except ValueError:
                print('Error: Parameter -ap is not a number\n')
                sys.exit(1)
            if 1 <= a_param <= 11:
                pyradio_config.user_param_id = a_param
            else:
                print('Error: Parameter -ap must be between 1 and 11')
                print('       Actually, it must be between 1 and the maximum')
                print('       number of parameters for your default player.\n')
                args.list_player_parameters = True
        ''' list extra player parameters '''
        if args.list_player_parameters:
            print('PyRadio Players Extra Parameters')
            print(32 * '-')
            read_config(pyradio_config)
            default_player_name = pyradio_config.opts['player'][1].replace(
                ' ', '').split(',')[0]
            if default_player_name == '':
                default_player_name = SUPPORTED_PLAYERS[0]
            for a_player in SUPPORTED_PLAYERS:
                if default_player_name == a_player:
                    print('Player: ' + a_player + ' (default)')
                else:
                    print('Player: ' + a_player)
                default = 0
                for i, a_param in enumerate(
                        pyradio_config.saved_params[a_player]):
                    if i == 0:
                        default = int(a_param)
                    else:
                        str_default = '(default)' if i == default else ''
                        count = str(i) if i > 9 else ' ' + str(i)
                        print('    {0}. {1} {2}'.format(
                            count, a_param, str_default))
                print('')
            sys.exit()
        ''' extra player parameters '''
        if args.extra_player_parameters:
            if ':' in args.extra_player_parameters:
                if pyradio_config.locked:
                    print('Error: This session is locked!')
                    print(
                        '       Please exist any other instances of the program'
                    )
                    print('       that are currently running and try again.')
                    sys.exit(1)
                else:
                    if args.extra_player_parameters.startswith('vlc:profile'):
                        print('Error in parameter: "-ep".')
                        print('  VLC does not supports profiles\n')
                        sys.exit()
                    else:
                        pyradio_config.command_line_params = args.extra_player_parameters
            else:
                print('Error in parameter: "-ep".')
                print('  Parameter format: "player_name:parameters"')
                print(
                    '                 or "player_name:profile:name_of_profile"\n'
                )
                sys.exit()

        if args.unlock:
            pyradio_config.locked = False
            pyradio_config.force_to_remove_lock_file = True
            sys.exit()

        if args.show_config_dir:
            print('PyRadio config dir: "{}"'.format(
                pyradio_config.stations_dir))
            sys.exit()

        if args.open_config_dir:
            open_conf_dir(pyradio_config)
            sys.exit()

        if args.list_playlists:
            pyradio_config.list_playlists()
            sys.exit()

        if args.list is False and args.add is False:
            print('Reading config...')
        if not config_already_read:
            read_config(pyradio_config)
            config_already_read = True

        if args.use_player != '':
            requested_player = args.use_player

        if args.list is False and args.add is False:
            print('Reading playlist...')
        sys.stdout.flush()
        is_last_playlist = False
        if pyradio_config.open_last_playlist:
            last_playlist = pyradio_config.get_last_playlist()
            if last_playlist:
                args.stations = last_playlist
                is_last_playlist = True
        ret = pyradio_config.read_playlist_file(
            stationFile=args.stations, is_last_playlist=is_last_playlist)
        if ret < 0:
            print_playlist_selection_error(args.stations, pyradio_config, ret)

        # No need to parse the file if we add station
        # Actually we do need to do so now, so that we
        # handle 2-column vs. 3-column playlists
        if args.add:
            if sys.version_info < (3, 0):
                params = raw_input("Enter the name: "), raw_input(
                    "Enter the url: "), raw_input(
                        "Enter the encoding (leave empty for '" +
                        pyradio_config.default_encoding + "'): ")
            else:
                params = input("Enter the name: "), input(
                    "Enter the url: "), input(
                        "Enter the encoding (leave empty for '" +
                        pyradio_config.default_encoding + "'): ")
            msg = ('name', 'url')
            for i, a_param in enumerate(params):
                if i < 2:
                    if a_param.strip() == '':
                        print('** Error: No {} entered. Aborting...'.format(
                            msg[i]))
                        sys.exit(1)
            ret = pyradio_config.append_station(params, args.stations)
            if ret < 0:
                print_playlist_selection_error(args.stations, pyradio_config,
                                               ret)
            sys.exit()

        if args.list:
            header_format_string, format_string = get_format_string(
                pyradio_config.stations)
            header_string = header_format_string.format(
                '[Name]', '[URL]', '[Encoding]')
            print(header_string)
            print(len(header_string) * '-')
            for num, a_station in enumerate(pyradio_config.stations):
                if a_station[2]:
                    encoding = a_station[2]
                else:
                    encoding = pyradio_config.default_encoding
                print(
                    format_string.format(str(num + 1), a_station[0],
                                         a_station[1], encoding))
            sys.exit()

        if args.debug:
            __configureLogger()
            if platform.startswith('win'):
                print(
                    'Debug mode activated\n  printing messages to file: "{}\pyradio.log"'
                    .format(getenv('USERPROFILE')))
            else:
                print(
                    'Debug mode activated; printing messages to file: "~/pyradio.log"'
                )
        else:
            ''' Refer to https://docs.python.org/3.7/howto/logging.html
                section "What happens if no configuration is provided"
            '''
            logging.raiseExceptions = False
            logging.lastResort = None

        if requested_player == '':
            requested_player = pyradio_config.player
        #else:
        #    pyradio_config.requested_player = requested_player

        if args.play == 'False':
            if args.stations == '':
                args.play = pyradio_config.default_station
        elif args.play is not None:
            try:
                check_int = int(args.play)
            except:
                print('Error: Invalid parameter (-p ' + args.play + ')')
                sys.exit(1)
        if args.play == '-1':
            args.play = 'False'
        ''' get auto play last playlist data '''
        if pyradio_config.last_playlist_to_open != []:
            pre_select = pyradio_config.last_playlist_to_open[1]
            if pyradio_config.last_playlist_to_open[2] > -1:
                args.play = str(pyradio_config.last_playlist_to_open[2] + 1)
            else:
                args.play = 'False'
        else:
            pre_select = 'False'

        theme_to_use = args.theme
        if not theme_to_use:
            theme_to_use = pyradio_config.theme

        # Starts the radio TUI.
        pyradio = PyRadio(pyradio_config,
                          play=args.play,
                          pre_select=pre_select,
                          req_player=requested_player,
                          theme=theme_to_use,
                          force_update=args.force_update)
        ''' Setting ESCAPE key delay to 25ms
            Refer to: https://stackoverflow.com/questions/27372068/why-does-the-escape-key-have-a-delay-in-python-curses
        '''
        environ.setdefault('ESCDELAY', '25')
        ''' set window title '''
        if platform.startswith('win'):
            import ctypes
            try:
                if pyradio_config.locked:
                    win_title = 'PyRadio: Your Internet Radio Player (Session Locked)'
                else:
                    win_title = 'PyRadio: Your Internet Radio Player'
                ctypes.windll.kernel32.SetConsoleTitleW(win_title)
            except:
                pass
        else:
            try:
                if pyradio_config.locked:
                    sys.stdout.write(
                        '\x1b]2;PyRadio: Your Internet Radio Player (Session Locked)\x07'
                    )
                else:
                    sys.stdout.write(
                        '\x1b]2;PyRadio: Your Internet Radio Player\x07')
            except:
                pass
        sys.stdout.flush()
        ''' curses wrapper '''
        curses.wrapper(pyradio.setup)
        ''' curses is off '''
        if pyradio.setup_return_status:
            if pyradio_config.WIN_UNINSTALL and platform.startswith('win'):
                from msvcrt import getwch
                from os import sep
                import subprocess
                the_path = __file__.split(sep)
                the_file = sep.join(the_path[:-1]) + sep + 'install.py'
                print(
                    '\nTo complete the process you will have to execute a batch file.'
                )
                print(
                    'Windows Explorer will open the location of the batch file to run.'
                )
                print('')
                print('Please double click')
                print('')
                print('    uninstall.bat')
                print('')
                print('to remove PyRadio from your system.')
                print('')
                print(
                    'After you are done, you can delete the folder it resides in.'
                )
                print('\nPress any key to continue...', end='', flush=True)
                getwch()
                #print('\nPress any key to exit...', end='', flush=True)
                #getwch()
                subprocess.call('python ' + the_file + ' -R',
                                stdout=subprocess.DEVNULL,
                                stderr=subprocess.DEVNULL)
                sys.exit()

            if pyradio_config.WIN_PRINT_PATHS and platform.startswith('win'):
                ''' print exe path '''
                print('')
                print_exe_paths()

            if pyradio_config.WIN_MANAGE_PLAYERS and platform.startswith(
                    'win'):
                ''' manage players'''
                from .win import install_player
                install_player()

            elif pyradio_config.PROGRAM_UPDATE:
                if platform.startswith('win'):
                    upd = PyRadioUpdateOnWindows()
                    upd.update_or_uninstall_on_windows(mode='update-open')
                else:
                    upd = PyRadioUpdate()
                    upd.user = is_pyradio_user_installed()
                    upd.update_pyradio()
            else:
                print('\nThank you for using PyRadio. Cheers!')
        else:
            print(
                '\nThis terminal can not display colors.\nPyRadio cannot function in such a terminal.\n'
            )
Ejemplo n.º 39
0
def main():
    parser = ArgumentParser(
        description=
        'Read GSF file and create an ESRI shape file of the trackplot.',
        epilog=
        'Example: \n To convert a single file use -i c:/temp/myfile.gsf \n to convert GSF files in a folder use -i c:/temp/*.gsf\n To convert all .GSF files recursively in a folder, use -r -i c:/temp \n To convert all files recursively from the current folder, use -r -i ./ \n',
        formatter_class=RawTextHelpFormatter)
    parser.add_argument(
        '-i',
        dest='inputFile',
        action='store',
        help=
        '-i <filename.gsf> : input filename to process. It can also be a wildcard, e.g. *.gsf'
    )
    parser.add_argument(
        '-o',
        dest='outputFile',
        action='store',
        default='track.shp',
        help=
        '-o <SHPfilename.shp> : output filename to create. e.g. trackplot.shp [Default: track.shp]'
    )
    parser.add_argument(
        '-s',
        dest='step',
        action='store',
        default='1',
        help=
        'step size in seconds.  Useful to reduce the complexity of the feature, which keeps ArcMap fastt. [Default: 1]'
    )
    parser.add_argument('-r',
                        action='store_true',
                        default=False,
                        dest='recursive',
                        help='-r : search recursively.  [Default: False]')
    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)

    args = parser.parse_args()
    # we need to remember the previous record so we only create uniq values, not duplicates
    fileOut = args.outputFile
    stepSize = float(args.step)
    if not fileOut.lower().endswith('.shp'):
        fileOut += '.shp'

    fileCounter = 0
    matches = []
    lastTimeStamp = 0
    trackRecordCount = 0

    if args.recursive:
        for root, dirnames, filenames in os.walk(
                os.path.dirname(args.inputFile)):
            for f in fnmatch.filter(filenames, '*.GSF'):
                matches.append(os.path.join(root, f))
                print(matches[-1])
    else:
        for filename in glob(args.inputFile):
            matches.append(filename)
        print(matches)
    if len(matches) == 0:
        print("Nothing found to convert, quitting")
        exit()

    if os.path.isfile(fileOut):
        try:
            # Create a shapefile reader
            r = shapefile.Reader(fileOut)
            # Create a shapefile writer
            # using the same shape type
            # as our reader
            w = shapefile.Writer(r.shapeType)
            # Copy over the existing dbf fields
            w.fields = list(r.fields)
            # Copy over the existing dbf records
            w.records.extend(r.records())
            # Copy over the existing polygons
            w._shapes.extend(r.shapes())
        except shapefile.error:
            print("Problem opening existing shape file, aborting!")
            exit()
    else:
        # w = shapefile.Writer(shapefile.POINTZ)
        w = shapefile.Writer(shapefile.POLYLINE)
        w.autoBalance = 1
        w.field("LineName", "C")
        # w.field("WaterDepth", "N")
        w.field("UNIXTime", "N")
        w.field("SurveyDate", "D")

    for filename in matches:
        # print ("processing file: %s" % filename)
        line_parts = []
        line = []

        r = pygsf.GSFREADER(filename)
        start_time = time.time()  # time  the process
        navigation = r.loadnavigation()
        for update in navigation:
            if update[0] - lastTimeStamp > stepSize:
                line.append([float(update[1]), float(update[2])])
                trackRecordCount += 1
                lastTimeStamp = update[0]
        # now add the very last update
        line.append([float(navigation[-1][1]), float(navigation[-1][2])])

        line_parts.append(line)
        w.line(parts=line_parts)
        # now add to the shape file.
        recTimeStamp = from_timestamp(
            navigation[0][0]).strftime("%Y/%m/%d %H:%M:%S")
        recDate = from_timestamp(navigation[0][0]).strftime("%Y%m%d")
        # depth = 1111.123
        w.record(os.path.basename(filename), int(navigation[0][0]), recDate)
        # w.record(os.path.basename(filename), depth, navigation[0][0], recDate)

        update_progress(
            "Processed: %s (%d/%d)" % (filename, fileCounter, len(matches)),
            (fileCounter / len(matches)))
        lastTimeStamp = update[0]
        fileCounter += 1
        r.close()

    update_progress("Process Complete: ", (fileCounter / len(matches)))
    print("Saving shapefile: %s position updates added %d" %
          (fileOut, trackRecordCount))
    w.save(fileOut)

    # now write out a prj file so the data has a spatial Reference
    f = open(fileOut.replace('.shp', '.prj'), 'w')
    f.write(
        'GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137.0,298.257223563]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]]'
    )  # python will convert \n to os.linesep
    f.close()  # you can omit in most cases as the destructor will call it
    print("Duration %.3fs" % (time.time() - start_time))  # time the process
Ejemplo n.º 40
0
def parse_args():
    '''
    Parse the input arguments, use -h for help.
    '''

    parser = ArgumentParser(description='IS mapper')

    parser.add_argument("--version",
                        action='version',
                        version='%(prog)s ' + ismap_version)
    # Inputs
    parser.add_argument('--runtype',
                        type=str,
                        required=True,
                        help='"typing" or "improvement"')
    parser.add_argument('--reads',
                        nargs='+',
                        type=str,
                        required=False,
                        help='Paired end reads for analysing (can be gzipped)')
    parser.add_argument(
        '--forward',
        type=str,
        required=False,
        default='_1',
        help='Identifier for forward reads if not in MiSeq format (default _1)'
    )
    parser.add_argument(
        '--reverse',
        type=str,
        required=False,
        default='_2',
        help='Identifier for reverse reads if not in MiSeq format (default _2)'
    )
    parser.add_argument(
        '--queries',
        type=str,
        required=True,
        help=
        'Multifasta file for query gene(s) (eg: insertion sequence) that will be mapped to.'
    )
    parser.add_argument('--assemblies',
                        nargs='+',
                        type=str,
                        required=False,
                        help='Contig assemblies, one for each read set')
    parser.add_argument(
        '--assemblyid',
        type=str,
        required=False,
        help=
        'Identifier for assemblies eg: sampleName_contigs (specify _contigs) or sampleName_assembly (specify _assembly). Do not specify extension.'
    )
    parser.add_argument(
        '--extension',
        type=str,
        required=False,
        help=
        'Extension for assemblies (eg: .fasta, .fa, .gbk, default is .fasta)',
        default='.fasta')
    parser.add_argument(
        '--typingRef',
        type=str,
        required=False,
        help='Reference genome for typing against in genbank format')
    parser.add_argument(
        '--type',
        type=str,
        required=False,
        default='fasta',
        help=
        'Indicator for contig assembly type, genbank or fasta (default fasta)')
    parser.add_argument(
        '--path',
        type=str,
        required=False,
        default='',
        help=
        'Path to folder where scripts are (only required for development, default is VLSCI path).'
    )
    # Parameters
    parser.add_argument(
        '--cutoff',
        type=int,
        required=False,
        default=6,
        help=
        'Minimum depth for mapped region to be kept in bed file (default 6)')
    parser.add_argument(
        '--min_range',
        type=str,
        required=False,
        default='0.2',
        help=
        'Minimum percent size of the gap to be called a known hit (default 0.2, or 20 percent)'
    )
    parser.add_argument(
        '--max_range',
        type=str,
        required=False,
        default='1.1',
        help=
        'Maximum percent size of the gap to be called a known hit (default 1.1, or 110 percent)'
    )
    parser.add_argument(
        '--merging',
        type=str,
        required=False,
        default='100',
        help=
        'Value for merging left and right hits in bed files together to simply calculation of closest and intersecting regions (default 100).'
    )
    parser.add_argument('--a',
                        action='store_true',
                        required=False,
                        help='Switch on all alignment reporting for bwa.')
    parser.add_argument('--T',
                        type=str,
                        required=False,
                        default='30',
                        help='Mapping quality score for bwa (default 30).')
    parser.add_argument('--t',
                        type=str,
                        required=False,
                        default='1',
                        help='Number of threads for bwa (default 1).')
    parser.add_argument(
        '--min_clip',
        type=int,
        required=False,
        default='10',
        help=
        'Minimum size for softclipped region to be extracted from initial mapping (default 10).'
    )
    parser.add_argument(
        '--max_clip',
        type=int,
        required=False,
        default=30,
        help='Maximum size for softclipped regions to be included (default 30).'
    )
    # Options for table output (typing)
    parser.add_argument(
        '--cds',
        nargs='+',
        type=str,
        required=False,
        default=['locus_tag', 'gene', 'product'],
        help=
        'qualifiers to look for in reference genbank for CDS features (default locus_tag gene product)'
    )
    parser.add_argument(
        '--trna',
        nargs='+',
        type=str,
        required=False,
        default=['locus_tag', 'product'],
        help=
        'qualifiers to look for in reference genbank for tRNA features (default locus_tag product)'
    )
    parser.add_argument(
        '--rrna',
        nargs='+',
        type=str,
        required=False,
        default=['locus_tag', 'product'],
        help=
        'qualifiers to look for in reference genbank for rRNA features (default locus_tag product)'
    )
    parser.add_argument(
        '--igv',
        action='store_true',
        help=
        'format of output bedfile - if True, adds IGV trackline and formats 4th column for hovertext display'
    )
    parser.add_argument(
        '--chr_name',
        type=str,
        required=False,
        default='not_specified',
        help=
        'chromosome name for bedfile - must match genome name to load in IGV (default = genbank accession)'
    )
    # Reporting options
    parser.add_argument(
        '--log',
        action='store_true',
        required=False,
        help='Switch on logging to file (otherwise log to stdout')
    parser.add_argument(
        '--output',
        type=str,
        required=False,
        help=
        'Prefix for output files. If not supplied, prefix will be current date and time.',
        default='')
    parser.add_argument(
        '--temp',
        action='store_true',
        required=False,
        help=
        'Switch on keeping the temp folder instead of deleting it at the end of the program'
    )
    parser.add_argument(
        '--bam',
        action='store_true',
        required=False,
        help=
        'Switch on keeping the final bam files instead of deleting them at the end of the program'
    )
    parser.add_argument('--directory',
                        type=str,
                        required=False,
                        default='',
                        help='Output directory for all output files.')

    return parser.parse_args()
Ejemplo n.º 41
0
def parse_options():
    """Parse the arguments"""

    argslist = [
        ((
            '-v',
            '--version',
        ),
         dict(action='version',
              version=f'%(prog)s {version.APP_VERSION}',
              help=_('Display program version'))),
        ((
            '-m',
            '--maximise',
        ),
         dict(action='store_true',
              dest='maximise',
              help=_('Maximize the window'))),
        ((
            '-f',
            '--fullscreen',
        ),
         dict(action='store_true',
              dest='fullscreen',
              help=_('Make the window fill the screen'))),
        ((
            '-b',
            '--borderless',
        ),
         dict(action='store_true',
              dest='borderless',
              help=_('Disable window borders'))),
        ((
            '-H',
            '--hidden',
        ),
         dict(action='store_true',
              dest='hidden',
              help=_('Hide the window at startup'))),
        ((
            '-T',
            '--title',
        ),
         dict(action='store',
              dest='forcedtitle',
              metavar='TITLE',
              help=_('Specify a title for the window'))),
        (('--geometry', ),
         dict(
             action='store',
             dest='geometry',
             type=str,
             metavar='GEOMETRY',
             help=
             _('Set the preferred size and position of the window (see X man page)'
               ))),
        ((
            '-g',
            '--config',
        ),
         dict(action='store',
              dest='config',
              metavar='CONFIG',
              help=_('Specify a config file'))),
        (('--working-directory', ),
         dict(action='store',
              dest='working_directory',
              metavar='DIR',
              help=_('Set the working directory'))),
        ((
            '-i',
            '--icon',
        ),
         dict(action='store',
              dest='forcedicon',
              metavar='ICON',
              help=_('Set a custom icon for the window (by file or name)'))),
        ((
            '-r',
            '--role',
        ),
         dict(action='store',
              dest='role',
              metavar='ROLE',
              help=_('Set a custom WM_WINDOW_ROLE property on the window'))),
        ((
            '-l',
            '--layout',
        ),
         dict(action='store',
              dest='layout',
              default='default',
              help=_('Launch with the given layout'))),
        ((
            '-s',
            '--select-layout',
        ),
         dict(action='store_true',
              dest='select',
              help=_('Select a layout from a list'))),
        ((
            '-p',
            '--profile',
        ),
         dict(action='store',
              dest='profile',
              default='default',
              help=_('Use a different profile as the default'))),
        ((
            '-u',
            '--no-dbus',
        ), dict(action='store_true', dest='nodbus', help=_('Disable DBus'))),
        ((
            '-d',
            '--debug',
        ),
         dict(
             action='count',
             dest='debug',
             default=0,
             help=_('Enable debugging information (twice for debug server)'))),
        (('--debug-classes', ),
         dict(
             action='store',
             dest='debug_classes',
             help=_('Comma separated list of classes to limit debugging to'))),
        (('--debug-methods', ),
         dict(
             action='store',
             dest='debug_methods',
             help=_('Comma separated list of methods to limit debugging to'))),
        (('--new-tab', ),
         dict(
             action='store_true',
             dest='new_tab',
             help=_('If Terminator is already running, just open a new tab'))),
        ((
            '-e',
            '-x',
            '--execute',
        ),
         dict(action='store',
              dest='execute',
              nargs=REMAINDER,
              help=_('Use the rest of arguments as a command to execute'))),
    ]

    parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)

    for args, kwargs in argslist:
        parser.add_argument(*args, **kwargs)

    parser.parse_args(namespace=options)

    if options.debug_classes or options.debug_methods:
        if not options.debug > 0:
            options.debug = 1

    if options.debug:
        util.DEBUG = True
        if options.debug > 1:
            util.DEBUGFILES = True
        if options.debug_classes:
            classes = options.debug_classes.split(',')
            for item in classes:
                util.DEBUGCLASSES.append(item.strip())
        if options.debug_methods:
            methods = options.debug_methods.split(',')
            for item in methods:
                util.DEBUGMETHODS.append(item.strip())

    if options.working_directory:
        path = os.path.expanduser(options.working_directory)
        if os.path.isdir(path):
            options.working_directory = path
            os.chdir(path)
        else:
            err(f'OptionParse::parse_options: {path} does not exist')
            options.working_directory = None

    configobj = config.Config()

    if options.layout not in configobj.list_layouts():
        options.layout = 'default'
    if options.profile not in configobj.list_profiles():
        options.profile = 'default'

    if util.DEBUG:
        dbg(f'OptionParse::parse_options: command line options: {options}')
Ejemplo n.º 42
0
    '/admin/([a-z]+)', 'Admin',
    '/ca', 'Ca',
    '/client/status', 'ClientStatus',
    '/client', 'Client',
    '/health', 'Health',
    '/krl', 'Krl',
    '/ping', 'Ping',
    '/test_auth', 'TestAuth',
)

VERSION = '1.5.0'

PARSER = ArgumentParser()
PARSER.add_argument('-c', '--config', action='store', help='Configuration file')
PARSER.add_argument('-v', '--verbose', action='store_true', default=False, help='Add verbosity')
ARGS = PARSER.parse_args()

if not ARGS.config:
    PARSER.error('--config argument is required !')

CONFIG = ConfigParser()
CONFIG.read(ARGS.config)
SERVER_OPTS = {}
SERVER_OPTS['ca'] = CONFIG.get('main', 'ca')
SERVER_OPTS['krl'] = CONFIG.get('main', 'krl')
SERVER_OPTS['port'] = CONFIG.get('main', 'port')
try:
    SERVER_OPTS['admin_db_failover'] = CONFIG.get('main', 'admin_db_failover')
except NoOptionError:
    SERVER_OPTS['admin_db_failover'] = False
SERVER_OPTS['ldap'] = False
    model = SegModel(**vars(hparams))

    # ------------------------
    # 2 SET LOGGER
    # ------------------------
    logger = False
    if hparams.log_wandb:
        logger = WandbLogger()

        # optional: log model topology
        logger.watch(model.net)

    # ------------------------
    # 3 INIT TRAINER
    # ------------------------
    trainer = pl.Trainer.from_argparse_args(hparams)

    # ------------------------
    # 5 START TRAINING
    # ------------------------
    trainer.fit(model)


if __name__ == '__main__':
    cli_lightning_logo()
    parser = ArgumentParser(add_help=False)
    parser = SegModel.add_model_specific_args(parser)
    hparams = parser.parse_args()

    main(hparams)
Ejemplo n.º 44
0
def main():
    stats.init_backends({})
    parser = ArgumentParser(
        description="Used to generate the contextual file like the capabilities, the legends, "
        "the Apache and MapCache configuration",
        prog=sys.argv[0],
    )
    add_comon_options(parser, tile_pyramid=False, no_geom=False)
    parser.add_argument(
        "--status", default=False, action="store_true", help="Display the SQS queue status and exit"
    )
    parser.add_argument(
        "--capabilities",
        "--generate-wmts-capabilities",
        default=False,
        action="store_true",
        help="Generate the WMTS Capabilities",
    )
    parser.add_argument(
        "--legends",
        "--generate-legend-images",
        default=False,
        action="store_true",
        dest="legends",
        help="Generate the legend images",
    )
    parser.add_argument(
        "--openlayers",
        "--generate-openlayers-testpage",
        default=False,
        action="store_true",
        dest="openlayers",
        help="Generate openlayers test page",
    )
    parser.add_argument(
        "--mapcache",
        "--generate-mapcache-config",
        default=False,
        action="store_true",
        dest="mapcache",
        help="Generate MapCache configuration file",
    )
    parser.add_argument(
        "--mapcache-version", default="1.4", choices=("1.4", "1.6"), help="The used version of MapCache"
    )
    parser.add_argument(
        "--apache",
        "--generate-apache-config",
        default=False,
        action="store_true",
        dest="apache",
        help="Generate Apache configuration file",
    )
    parser.add_argument(
        "--dump-config",
        default=False,
        action="store_true",
        help="Dump the used config with default values and exit",
    )

    options = parser.parse_args()
    gene = TileGeneration(options.config, options, layer_name=options.layer)

    if options.status:  # pragma: no cover
        status(gene)
        sys.exit(0)

    if options.cache is None:
        options.cache = gene.config["generation"]["default_cache"]

    if options.dump_config:
        for layer in gene.config["layers"].values():
            gene.init_layer(layer, options)
        _validate_generate_wmts_capabilities(gene.caches[options.cache], True)
        for grid in gene.config["grids"].values():
            if "obj" in grid:
                del grid["obj"]
        print(yaml.dump(gene.config))
        sys.exit(0)

    if options.legends:
        _generate_legend_images(gene)

    if options.capabilities:
        _generate_wmts_capabilities(gene)

    if options.mapcache:
        _generate_mapcache_config(gene, options.mapcache_version)

    if options.apache:
        _generate_apache_config(gene)

    if options.openlayers:
        _generate_openlayers(gene)
Ejemplo n.º 45
0
def parse_args():
    # type: () -> Namespace

    parser = ArgumentParser()
    parser.add_argument('file', type=str, help='Filename to check')
    parser.add_argument("-c",
                        "--checkers",
                        dest="checkers",
                        default=default_checkers,
                        help="Comma-separated list of checkers")
    parser.add_argument("-i",
                        "--ignore-codes",
                        dest="ignore_codes",
                        help="Comma-separated list of error codes to ignore")
    parser.add_argument("-e",
                        "--enable-codes",
                        dest="enable_codes",
                        default='',
                        help="Comma-separated list of error codes to ignore")
    parser.add_argument('--max-line-length',
                        dest='max_line_length',
                        default=80,
                        action='store',
                        help='Maximum line length')
    parser.add_argument('--no-merge-configs',
                        dest='merge_configs',
                        action='store_false',
                        help=('Whether to ignore config files found at a '
                              'higher directory than this one'))
    parser.add_argument('--multi-thread',
                        type=str2bool,
                        default=True,
                        action='store',
                        help=('Run checkers sequentially, '
                              'rather than simultaneously'))
    parser.add_argument(
        '--venv-root',
        dest='venv_root',
        default='~/.virtualenvs',
        action='store',
        help=(
            'Location of all Python virtual environments. '
            'Used with auto-detecting virtual envs created by virtualenvwrapper'
        ))
    parser.add_argument(
        '--venv-path',
        dest='venv_path',
        default=None,
        action='store',
        help=('The full path to a virtualenv. Used with a directly-created '
              '(not using virtualenvwrapper) virtualenv.'))
    parser.add_argument('--pylint-rcfile',
                        default=None,
                        dest='pylint_rcfile',
                        help='Location of a config file for pylint')
    parser.add_argument('--mypy-config-file',
                        default=None,
                        dest='mypy_config_file',
                        help='Location of a config file for mypy')
    parser.add_argument('--flake8-config-file',
                        default=None,
                        dest='flake8_config_file',
                        help='Location of a config file for flake8')
    parser.add_argument(
        '--report-checker-errors-inline',
        type=str2bool,
        default=True,
        action='store',
        help=("Whether to fake failing checkers's STDERR as a reported "
              "error for easier display."))

    parser.add_argument('--mypy-no-implicit-optional',
                        type=str2bool,
                        default=False,
                        action='store')

    parser.add_argument('--debug',
                        action='store_true',
                        help=('Enable output to help debug pycheckers itself'))

    return parser.parse_args()
def define_arguments():
    """
    Define arguments that this script will use.
    :return: Populated argument parser
    """

    description = ('This tool installs all files/directories required by '
                   'Ansible for Aruba-OS Switch and CX integration.\n\n'
                   'Requirements:'
                   '\n\t- Linux OS only'
                   '\n\t- Ansible release version 2.5 or later installed'
                   '\n\t- Python 2.7 installed')

    epilog = ('Directories added:'
              '\n\t- <ansible_module_path>/modules/network/arubaoss'
              '\n\t- <ansible_module_path>/modules/network/arubaoscx'
              '\n\t- <ansible_module_path>/module_utils/network/arubaoss'
              '\n\n'
              'Files added/modified:'
              '\n\t- <ansible_module_path>/plugins/action/arubaoss.py'
              '\n\t- <ansible_module_path>/plugins/connection/'
              'arubaoscx_rest.py'
              '\n\t- <ansible_module_path>/config/base.yml')

    parser = ArgumentParser(description=description,
                            formatter_class=RawDescriptionHelpFormatter,
                            epilog=epilog)
    parser.add_argument('-r',
                        '--remove',
                        required=False,
                        help=('remove all files & directories installed '
                              'by this script.'),
                        action='store_true')
    parser.add_argument('--reinstall',
                        required=False,
                        help=('remove all files & directories installed '
                              'by this script. Then re-install.'),
                        action='store_true')

    group = parser.add_mutually_exclusive_group(required=False)
    group.add_argument('--cx',
                       required=False,
                       help=('only install files/directories required for '
                             'ArubaOS-CX.'),
                       action='store_true')
    group.add_argument('--switch',
                       required=False,
                       help=('only install files/directories required for '
                             'ArubaOS-Switch.'),
                       action='store_true')
    group.add_argument('--controller',
                       required=False,
                       help=('only install files/directories required for '
                             'ArubaOS-Controller.'),
                       action='store_true')
    group.add_argument('--activate',
                       required=False,
                       help=('only install files/directories required for '
                             'Aruba-Activate.'),
                       action='store_true')
    group.add_argument('--airwave',
                       required=False,
                       help=('only install files/directories required for '
                             'Aruba-Airwave.'),
                       action='store_true')
    group.add_argument('--clearpass',
                       required=False,
                       help=('only install files/directories required for '
                             'Aruba-ClearPass.'),
                       action='store_true')
    return parser.parse_args()
Ejemplo n.º 47
0
        required=True
    )

    parser.add_argument(
        "-o",
        "--output",
        help="Path to output directory",
        required=True,
        type=str
    )

    parser.add_argument(
        "-p",
        "--processes",
        help="How many processes to use",
        default=1,
        type=int
    )

    if len(sys.argv) <= 1:
        parser.print_help()
        exit(0)

    try:
        main(parser.parse_args(sys.argv[1:]))
    except ArgumentError as err:
        logger.error(err)
        parser.print_help()


Ejemplo n.º 48
0
    print("")
    print("Unit test for Part Two.")
    print("Test {inp} gives {res}".format(inp=ex1, res=partTwo(ex1, ex2_rules)))
    print("Test {inp} gives {res}".format(inp=ex2, res=partTwo(ex2, ex2_rules)))


def partOne(molecule, rules_str):
    rules = getAllRules(rules_str)
    return len(set(genAllSubstitutions(molecule, rules)))

def partTwo(molecule, rules_str):
    # It goes much faster to go in reverse, for this.
    rules = getAllReverseRules(rules_str)
    return AStar(molecule, 'e', rules)

if __name__ == '__main__':
    from argparse import ArgumentParser, FileType

    args = ArgumentParser()
    args.add_argument("-t", "--test", help='Unit tests', action='store_true')
    args.add_argument("-i", "--input", help='Your input file (for rules only)', type=FileType('r'))
    args.add_argument("-m", "--molecule", help='Your input molecule', type=str)
    options = args.parse_args()

    if options.test:
        UnitTest()
    if options.input:
        inp = options.input.read().strip()
        print("Answer for part one is : {res}".format(res=partOne(options.molecule, inp)))
        print("Answer for part two is : {res}".format(res=partTwo(options.molecule, inp)))
Ejemplo n.º 49
0
def main():
    inf = 10000000000000
    err_max = 0.0000001
    p = ArgumentParser()
    p.add_argument('-f', '--folder', type=str, default='')
    p.add_argument('-a', '--adj', type=str, default='adjacency.dat')
    p.add_argument('-E', '--end_file', type=str, default='.dat')
    p.add_argument('-w', '--w_file', type=str, default='w.dat')
    p.add_argument('-l', '--L', type=int, default=4)
    p.add_argument('-i', '--initialization', type=int, default=0)
    p.add_argument('-k', '--K', type=int, default=5)
    p.add_argument('-r', '--N_real', type=int, default=1)
    p.add_argument('-t', '--maxit', type=int, default=500)
    p.add_argument('-e', '--tolerance', type=float, default=0.1)
    p.add_argument('-g', '--err', type=float, default=0.1)
    p.add_argument('-o', '--out_adjacency', type=int, default=0)
    p.add_argument('-A', '--assortative', type=int, default=0)
    p.add_argument('-u', '--undirected', type=int, default=0)
    p.add_argument('-z', '--rseed', type=int, default=0)
    p.add_argument('-y', '--decision', type=int, default=2)
    args = p.parse_args()

    folder = "../data/" + args.folder
    if (args.undirected == True):
        A = [nx.MultiGraph() for l in range(args.L)]  # list of graphs
    else:
        A = [nx.MultiDiGraph() for l in range(args.L)]  # list of graphs

    tl.read_graph(folder, args.adj, A)
    print "Undirected=", bool(args.undirected)
    print "Assortative=", bool(args.assortative)
    tl.print_graph_stat(A, args.undirected)

    if (args.out_adjacency): tl.out_graph(folder, A)

    if (args.undirected == True):
        u_list = v_list = tl.remove_zero_entries_undirected(A)
    else:
        u_list = tl.remove_zero_entries_u(
            A)  # list of INT INDECES of nodes with zero out degree
        v_list = tl.remove_zero_entries_v(
            A)  # list of INT INDECES of nodes with zero in degree

    MT = mt.MultiTensor(N=A[0].number_of_nodes(),
                        L=args.L,
                        K=args.K,
                        N_real=args.N_real,
                        tolerance=args.tolerance,
                        decision=args.decision,
                        maxit=args.maxit,
                        rseed=args.rseed,
                        out_adjacency=bool(args.out_adjacency),
                        inf=inf,
                        err_max=err_max,
                        err=args.err,
                        initialization=args.initialization,
                        undirected=bool(args.undirected),
                        folder=folder,
                        end_file=args.end_file,
                        adj=args.adj,
                        w_file=args.w_file,
                        assortative=bool(args.assortative))

    tic = time.clock()
    N = A[0].number_of_nodes()
    B = np.empty(shape=[args.L, N, N])

    for l in range(args.L):
        B[l, :, :] = nx.to_numpy_matrix(A[l], weight='weight')

    MT.cycle_over_realizations(A, B, u_list, v_list)

    #tl.print_graph_stat(A)

    toc = time.clock()
    print "It took ", toc - tic, " seconds."
Ejemplo n.º 50
0
def main(args=None):
    """Main method entry point."""
    if not args:
        args = sys.argv[1:]

    parser = ArgumentParser(description=DEFAULTS['description'],
                            epilog=DEFAULTS['epilog'],
                            fromfile_prefix_chars='@',
                            formatter_class=RawTextHelpFormatter)
    parser.add_argument('-v',
                        '--version',
                        default=False,
                        action='store_true',
                        help='show version information and exits')
    parser.add_argument('-d',
                        '--download',
                        default=False,
                        action='store_true',
                        help='download the corpus data and content')
    parser.add_argument('--defaults',
                        default=False,
                        action='store_true',
                        help='show the default values and exits')
    parser.add_argument('-l',
                        '--list',
                        default=False,
                        action='store_true',
                        help='list all of the corpora')
    parser.add_argument('-b',
                        '--bucket',
                        default=DEFAULTS['bucket'],
                        help='name of a bucket to use as corpus root')

    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)

    args = parser.parse_args(args)

    version_header = "S3 Corpora v{0}\n".format(__version__)

    if args.version:
        sys.stdout.write(version_header)
        sys.exit(0)

    if args.defaults:
        sys.stdout.write('Bucket: {}\n'.format(DEFAULTS['bucket']))
        sys.stdout.write('BlobStore: {}\n'.format(DEFAULTS['blobstore']))
        sys.exit(0)

    bucket_name = args.bucket
    sys.stdout.write('Initialising bucket: {}\n'.format(bucket_name))
    sys.stdout.flush()
    as3_bucket = AS3Bucket(bucket_name, persist=True)

    if args.download:
        sys.stdout.write(
            'Initialising blobstore: {}\n'.format(BLOB_STORE_ROOT))
        sys.stdout.flush()
        blobstore = BlobStore(BLOB_STORE_ROOT)
        sys.stdout.write('Downloading bucket: {}\n'.format(bucket_name))
        sys.stdout.flush()
        as3_bucket.download_bucket(blobstore)

    if args.list:
        for corpus in as3_bucket.get_corpora():
            sys.stdout.write('{} : {}\n'.format(corpus.datacentre.doi,
                                                corpus.datacentre.name))
Ejemplo n.º 51
0
def handle_leave():
    app.logger.info("Got leave event")


@handler.add(PostbackEvent)
def handle_postback(event):
    if event.postback.data == 'ping':
        line_bot_api.reply_message(
            event.reply_token, TextSendMessage(text='pong'))


@handler.add(BeaconEvent)
def handle_beacon(event):
    line_bot_api.reply_message(
        event.reply_token,
        TextSendMessage(text='Got beacon event. hwid=' + event.beacon.hwid))


if __name__ == "__main__":
    arg_parser = ArgumentParser(
        usage='Usage: python ' + __file__ + ' [--port <port>] [--help]'
    )
    arg_parser.add_argument('-p', '--port', default=8000, help='port')
    arg_parser.add_argument('-d', '--debug', default=False, help='debug')
    options = arg_parser.parse_args()

    # create tmp dir for download content
    make_static_tmp_dir()

    app.run(debug=options.debug, port=options.port)
Ejemplo n.º 52
0
        sys.exit('ERROR: given input file is not a valid json file')
    return data

    
if __name__ == '__main__':
    # Configure the command line options
    parser = ArgumentParser()
    parser.add_argument('-i', '--input_file', type=str, required=True,
                        help='[Required] Determines the input file with options data')
    parser.add_argument('-S', '--underlying_price', type=float, required=True,
                        help='[Required] Current underlying price')
    parser.add_argument('-r', '--risk_free_rate', type=float, default=0.01,
                        help='Risk free rate. Default: 0.01')
    parser.add_argument('-o', '--output', action='store_true', default=False,
                        help='If enabled saves the plot as a PNG file. Otherwise (default), it just shows the plot')
    config = parser.parse_args()
    
    # Check that input file does exist
    if not (path.exists(config.input_file) and path.isfile(config.input_file)):
        sys.exit('ERROR: given input file does not exist or is not a file')
    
    # Load strategy
    data = load_strategy(config.input_file)
    
    # Update last price for each options
    # First check if there is data available in data folder for given ticker
    data_folder = 'data'
    available_tickers = [f for f in os.listdir(data_folder) if path.isdir(path.join(data_folder, f))]
    if data['meta']['ticker'].upper() not in available_tickers:
        sys.exit('ERROR: there is no available option data on ticker ' + str(data.meta.ticker))
    
Ejemplo n.º 53
0
def parse_args():
  def_gpu_device = 0
  def_vocab = 32768
  def_embed = 1024
  def_char_embed = 128
  def_queue = 512
  def_stack = 512
  def_srstate = 512
  def_epoch = 50
  def_minibatch = 100
  def_unary_limit = 3

  p = ArgumentParser(
    description='Shift-reduce constituent parser',
    usage=
      '\n  %(prog)s train [options] source model'
      '\n  %(prog)s test source model'
      '\n  %(prog)s -h',
  )

  p.add_argument('mode',
    help='\'train\' or \'test\'')
  p.add_argument('source',
    help='[in] source corpus'
      '\n  train: PENN-style constituent tree in each row'
      '\n  test: space-separated word list in each row')
  p.add_argument('model',
    help='[in/out] model parefix')
  p.add_argument('--use-gpu', action='store_true', default=False,
    help='use GPU calculation')
  p.add_argument('--gpu-device', default=def_gpu_device, metavar='INT', type=int,
    help='GPU device ID to be used (default: %(default)d)')
  p.add_argument('--vocab', default=def_vocab, metavar='INT', type=int,
    help='vocabulary size (default: %(default)d)')
  p.add_argument('--embed', default=def_embed, metavar='INT', type=int,
    help='embedding size (default: %(default)d)')
  p.add_argument('--char-embed', default=def_char_embed, metavar='INT', type=int,
    help='character-based embedding size (default: %(default)d)')
  p.add_argument('--queue', default=def_queue, metavar='INT', type=int,
    help='queue state size (default: %(default)d)')
  p.add_argument('--stack', default=def_stack, metavar='INT', type=int,
    help='stack state size (default: %(default)d)')
  p.add_argument('--srstate', default=def_srstate, metavar='INT', type=int,
    help='shift-reduce state size (default: %(default)d)')
  p.add_argument('--epoch', default=def_epoch, metavar='INT', type=int,
    help='number of training epoch (default: %(default)d)')
  p.add_argument('--minibatch', default=def_minibatch, metavar='INT', type=int,
    help='minibatch size (default: %(default)d)')
  p.add_argument('--unary-limit', default=def_unary_limit, metavar='INT', type=int,
    help='maximum length of unary chain (default: %(default)d)')

  args = p.parse_args()

  # check args
  try:
    if args.mode not in ['train', 'test']: raise ValueError('you must set mode = \'train\' or \'test\'')
    if args.vocab < 1: raise ValueError('you must set --vocab >= 1')
    if args.embed < 1: raise ValueError('you must set --embed >= 1')
    if args.char_embed < 1: raise ValueError('you must set --char-embed >= 1')
    if args.queue < 1: raise ValueError('you must set --queue >= 1')
    if args.stack < 1: raise ValueError('you must set --stack >= 1')
    if args.srstate < 1: raise ValueError('you must set --srstate >= 1')
    if args.epoch < 1: raise ValueError('you must set --epoch >= 1')
    if args.minibatch < 1: raise ValueError('you must set --minibatch >= 1')
  except Exception as ex:
    p.print_usage(file=sys.stderr)
    print(ex, file=sys.stderr)
    sys.exit()

  return args
def _parse_cli_args() -> Namespace:
    parser = ArgumentParser(
        description="get French television listings using Télé Loisirs mobile "
        "API in XMLTV format")
    parser.add_argument(
        "--description",
        action="store_true",
        help="print the description for this grabber",
    )
    parser.add_argument(
        "--version",
        action="store_true",
        help="show the version of this grabber",
    )
    parser.add_argument(
        "--capabilities",
        action="store_true",
        help="show the capabilities this grabber supports",
    )
    parser.add_argument(
        "--configure",
        action="store_true",
        help="generate the configuration file by asking the users which "
        "channels to grab",
    )
    parser.add_argument(
        "--days",
        type=int,
        default=_DEFAULT_DAYS,
        help="grab DAYS days of TV data (default: %(default)s)",
    )
    parser.add_argument(
        "--offset",
        type=int,
        default=_DEFAULT_OFFSET,
        help="grab TV data starting at OFFSET days in the future (default: "
        "%(default)s)",
    )
    parser.add_argument(
        "--output",
        type=Path,
        default=Path("/dev/stdout"),
        help="write the XML data to OUTPUT instead of the standard output",
    )
    parser.add_argument(
        "--config-file",
        type=Path,
        default=_DEFAULT_CONFIG_FILE,
        help="file name to write/load the configuration to/from (default: "
        "%(default)s)",
    )

    log_level_group = parser.add_mutually_exclusive_group()
    log_level_group.add_argument(
        "--quiet",
        action="store_true",
        help="only print error-messages on STDERR",
    )
    log_level_group.add_argument(
        "--debug",
        action="store_true",
        help="provide more information on progress to stderr to help in"
        "debugging",
    )

    return parser.parse_args()
Ejemplo n.º 55
0
def main():
    def print_version():
        print(
            f'version {__version__}, A downloader that download the HLS/DASH stream.'
        )

    parser = ArgumentParser(
        prog='XstreamDL-CLI',
        usage='XstreamDL-CLI [OPTION]... URL/FILE/FOLDER...',
        description='A downloader that download the HLS/DASH stream',
        add_help=False)
    parser.add_argument('-v',
                        '--version',
                        action='store_true',
                        help='print version and exit')
    parser.add_argument('-h',
                        '--help',
                        action='store_true',
                        help='print help message and exit')
    parser.add_argument('--speed-up',
                        action='store_true',
                        help='speed up at end')
    parser.add_argument('--speed-up-left',
                        default='10',
                        help='speed up when left count less than this value')
    parser.add_argument('--live', action='store_true', help='live mode')
    parser.add_argument(
        '--compare-with-url',
        action='store_true',
        help='use full url to compare with last segments to get new segments')
    parser.add_argument(
        '--dont-split-discontinuity',
        action='store_true',
        help='dont take #EXT-X-DISCONTINUITY tag as a new stream')
    parser.add_argument('--name-from-url',
                        action='store_true',
                        help='get name from segment url')
    parser.add_argument(
        '--live-duration',
        default='',
        help=
        'live record time, format HH:MM:SS, example 00:00:30 will record about 30s'
    )
    parser.add_argument('--live-utc-offset',
                        default='0',
                        help='the value is used to correct utc time')
    parser.add_argument('--live-refresh-interval',
                        default='3',
                        help='live refresh interval')
    parser.add_argument('--name', default='', help='specific stream base name')
    parser.add_argument('--base-url',
                        default='',
                        help='set base url for Stream')
    parser.add_argument(
        '--ad-keyword',
        default='',
        help='skip #EXT-X-DISCONTINUITY which segment url has this keyword')
    parser.add_argument(
        '--resolution',
        default='',
        choices=['', '270', '360', '480', '540', '576', '720', '1080', '2160'],
        help='auto choose target quality')
    parser.add_argument('--best-quality',
                        action='store_true',
                        help='auto choose best quality for dash streams')
    parser.add_argument(
        '--video-only',
        action='store_true',
        help='only choose video stream when use --best-quality')
    parser.add_argument(
        '--audio-only',
        action='store_true',
        help='only choose audio stream when use --best-quality')
    parser.add_argument('--all-videos',
                        action='store_true',
                        help='choose all video stream to download')
    parser.add_argument('--all-audios',
                        action='store_true',
                        help='choose all audio stream to download')
    parser.add_argument('--service',
                        default='',
                        help='set serviceLocation for BaseURL choose')
    parser.add_argument('--save-dir',
                        default='Downloads',
                        help='set save dir for Stream')
    parser.add_argument(
        '--select',
        action='store_true',
        help='show stream to select and download, default is to download all')
    parser.add_argument('--multi-s',
                        action='store_true',
                        help='use this option when S tag number > 0')
    parser.add_argument(
        '--disable-force-close',
        action='store_true',
        help=
        'default make all connections closed securely, but it will make DL speed slower'
    )
    parser.add_argument(
        '--limit-per-host',
        default=4,
        help=
        'increase the value if your connection to the stream host is poor, suggest >100 for DASH stream'
    )
    parser.add_argument(
        '--headers',
        default='headers.json',
        help='read headers from headers.json, you can also use custom config')
    parser.add_argument('--url-patch',
                        default='',
                        help='add some custom strings for all segments link')
    parser.add_argument('--overwrite',
                        action='store_true',
                        help='overwrite output files')
    parser.add_argument('--raw-concat',
                        action='store_true',
                        help='concat content as raw')
    parser.add_argument('--disable-auto-concat',
                        action='store_true',
                        help='disable auto-concat')
    parser.add_argument('--enable-auto-delete',
                        action='store_true',
                        help='enable auto-delete files after concat success')
    parser.add_argument(
        '--disable-auto-decrypt',
        action='store_true',
        help='disable auto-decrypt segments before dump to disk')
    parser.add_argument(
        '--key',
        default=None,
        help=
        '<id>:<k>, <id> is either a track ID in decimal or a 128-bit KID in hex, <k> is a 128-bit key in hex'
    )
    parser.add_argument(
        '--b64key',
        default=None,
        help=
        'base64 format aes key, only for HLS standard AES-128-CBC encryption')
    parser.add_argument('--hexiv', default=None, help='hex format aes iv')
    parser.add_argument(
        '--proxy',
        default='',
        help=
        'use socks/http proxy, e.g. socks5://127.0.0.1:10808 or http://127.0.0.1:10809'
    )
    parser.add_argument(
        '--disable-auto-exit',
        action='store_true',
        help='disable auto exit after download end, GUI will use this option')
    parser.add_argument('--parse-only',
                        action='store_true',
                        help='parse only, not to download')
    parser.add_argument(
        '--show-init',
        action='store_true',
        help='show initialization to help you identify same name stream')
    parser.add_argument(
        '--index-to-name',
        action='store_true',
        help=
        'some dash live have the same name for different stream, use this option to avoid'
    )
    parser.add_argument('--log-level',
                        default='INFO',
                        choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'],
                        help='set log level, default is INFO')
    parser.add_argument(
        '--redl-code',
        default='',
        help=
        're-download set of response status codes , e.g. 408,500,502,503,504')
    parser.add_argument('--hide-load-metadata',
                        action='store_true',
                        help='hide `Load #EXT-X-MEDIA metadata` balabala')
    parser.add_argument('URI', nargs='*', help='URL/FILE/FOLDER string')
    args = parser.parse_args()
    if args.help:
        print_version()
        parser.print_help()
        sys.exit()
    if args.version:
        print_version()
        sys.exit()
    if len(args.URI) == 0:
        try:
            uri = input(
                'Paste your URL/FILE/FOLDER string at the end of commands, plz.\nCtrl C to exit or input here:'
            )
        except KeyboardInterrupt:
            sys.exit()
        if uri.strip() != '':
            args.URI.append(uri.strip())
    if len(args.URI) == 0:
        sys.exit('No URL/FILE/FOLDER input')
    for handler in logger.handlers:
        # 注意 这里不能拿 StreamHandler 做判断
        # 因为 FileHandler 父类是 StreamHandler
        # 这样当 handler 是 FileHandler 的时候 isinstance 返回 True
        if isinstance(handler, logging.FileHandler) is False:
            handler.setLevel(logging.getLevelName(args.log_level))
    command_handler(args)
    logger.info(f'use {__version__}, set URI to {args.URI}')
    logger.debug(f'args => {args}')
    daemon = Daemon(args)
    daemon.daemon()
    if args.disable_auto_exit:
        _ = input('press any key to exit.')
Ejemplo n.º 56
0
def parse_cmdline(description, args, createfunc=None, addfunc=None, removefunc=None,
                  startfunc=None, stopfunc=None, showfunc=None, triagefunc=None,
                  coveragefunc=None, destroyfunc=None, validatefunc=None):
    argParser = ArgumentParser(description)

    argParser.add_argument('-v', '--verbose',
                           action='store_true',
                           help="""Verbose mode, print information about the progress""",
                           default=False)

    subparsers = argParser.add_subparsers(description="Orthrus subcommands")

    # Command 'create'
    create_parser = subparsers.add_parser('create', help=CREATE_HELP)
    create_parser.add_argument('-asan', '--afl-asan',
                               action='store_true',
                               help="""Setup binaries for afl with AddressSanitizer""",
                               default=False)
    create_parser.add_argument('-fuzz', '--afl-harden',
                               action='store_true',
                               help="""Setup binaries for afl in 'harden' mode (stack-protector, fortify)""",
                               default=False)
    create_parser.add_argument('-cov', '--coverage',
                               action='store_true',
                               help="""Setup binaries to collect coverage information""",
                               default=False)
    create_parser.add_argument('-d', '--configure-flags', nargs='?',
                               type=str, default="",
                               help='Additional flags for configuring the source')
    # create_parser.add_argument('-f', '--cflags', nargs='?',
    #                         type = str, default="",
    #                         help = 'Additional flags to go into CFLAGS for compilation')
    # create_parser.add_argument('-l', '--ldflags', nargs='?',
    #                         type = str, default="",
    #                         help = 'Additional flags to go into LDFLAGS for compilation')
    create_parser.set_defaults(func=createfunc)

    # Command 'add'
    add_parser = subparsers.add_parser('add', help=ADD_HELP)
    add_parser.add_argument('-n', '--job', required=True, type=str,
                            help='Add a job with executable command line invocation string')
    # add_parser.add_argument('-j', '--job-id', nargs='?',
    #                         type=str, default="",
    #                         help='Job Id for the job which should be selected')
    add_parser.add_argument('-i', '--import', dest='_import', nargs='?',
                            type=str, default="",
                            help='Import an AFL fuzzing output directory provided as tar.gz')
    add_parser.add_argument('-s', '--sample', nargs='?',
                            type=str, default="",
                            help='A single file or directory of afl testcases for fuzzing')
    add_parser.set_defaults(func=addfunc)

    # Command 'remove'
    remove_parser = subparsers.add_parser('remove', help=REMOVE_HELP)
    remove_parser.add_argument('-j', '--job-id', required=True,
                               type=str, help='Job Id for the job which should be removed')
    remove_parser.set_defaults(func=removefunc)

    # Command 'start'
    start_parser = subparsers.add_parser('start', help=START_HELP)
    start_parser.add_argument('-j', '--job-id', required=True,
                              type=str, help='Job Id for the job which should be started')
    start_parser.add_argument('-c', '--coverage',
                              action='store_true',
                              help="""Collect coverage information while fuzzing""",
                              default=False)
    start_parser.add_argument('-m', '--minimize',
                              action='store_true',
                              help="""Minimize corpus before start""",
                              default=False)
    start_parser.set_defaults(func=startfunc)

    # Command 'stop'
    stop_parser = subparsers.add_parser('stop', help=STOP_HELP)
    stop_parser.add_argument('-c', '--coverage',
                             action='store_true',
                             help="""Stop afl-cov instances on stop""",
                             default=False)
    stop_parser.set_defaults(func=stopfunc)

    # Command 'show'
    show_parser = subparsers.add_parser('show', help=SHOW_HELP)
    show_parser.add_argument('-j', '--jobs',
                             action='store_true',
                             help="""Show configured jobs""",
                             default=False)
    show_parser.add_argument('-cov', '--cov',
                             action='store_true',
                             help="""Show coverage of job""",
                             default=False)
    show_parser.set_defaults(func=showfunc)

    # Command 'triage'
    triage_parser = subparsers.add_parser('triage', help=TRIAGE_HELP)
    triage_parser.add_argument('-j', '--job-id', nargs='?',
                               type=str, default="",
                               help="""Job Id for the job which should be triaged""")
    triage_parser.set_defaults(func=triagefunc)

    # Command 'coverage'
    coverage_parser = subparsers.add_parser('coverage', help=COVERAGE_HELP)
    coverage_parser.add_argument('-j', '--job-id', nargs='?',
                               type=str, default="", required=True,
                               help="""Job Id for checking coverage""")
    coverage_parser.set_defaults(func=coveragefunc)

    # Command 'destroy'
    destroy_parser = subparsers.add_parser('destroy', help=DESTROY_HELP)
    # create_parser.add_argument('-x', type=int, default=1)
    destroy_parser.set_defaults(func=destroyfunc)

    # Command 'validate'
    validate_parser = subparsers.add_parser('validate', help=VALIDATE_HELP)
    validate_parser.set_defaults(func=validatefunc)

    return argParser.parse_args(args)
Ejemplo n.º 57
0
def main():
    global config, options

    parser = ArgumentParser()
    common.setup_global_opts(parser)
    parser.add_argument(
        "-i",
        "--identity-file",
        default=None,
        help=_("Specify an identity file to provide to SSH for rsyncing"))
    parser.add_argument("--local-copy-dir",
                        default=None,
                        help=_("Specify a local folder to sync the repo to"))
    parser.add_argument("--no-checksum",
                        action="store_true",
                        default=False,
                        help=_("Don't use rsync checksums"))
    parser.add_argument(
        "--no-keep-git-mirror-archive",
        action="store_true",
        default=False,
        help=_("If a git mirror gets to big, allow the archive to be deleted"))
    options = parser.parse_args()
    config = common.read_config(options)

    if config.get('nonstandardwebroot') is True:
        standardwebroot = False
    else:
        standardwebroot = True

    for serverwebroot in config.get('serverwebroot', []):
        # this supports both an ssh host:path and just a path
        s = serverwebroot.rstrip('/').split(':')
        if len(s) == 1:
            fdroiddir = s[0]
        elif len(s) == 2:
            host, fdroiddir = s
        else:
            logging.error(
                _('Malformed serverwebroot line:') + ' ' + serverwebroot)
            sys.exit(1)
        repobase = os.path.basename(fdroiddir)
        if standardwebroot and repobase != 'fdroid':
            logging.error('serverwebroot path does not end with "fdroid", ' +
                          'perhaps you meant one of these:\n\t' +
                          serverwebroot.rstrip('/') + '/fdroid\n\t' +
                          serverwebroot.rstrip('/').rstrip(repobase) +
                          'fdroid')
            sys.exit(1)

    if options.local_copy_dir is not None:
        local_copy_dir = options.local_copy_dir
    elif config.get('local_copy_dir'):
        local_copy_dir = config['local_copy_dir']
    else:
        local_copy_dir = None
    if local_copy_dir is not None:
        fdroiddir = local_copy_dir.rstrip('/')
        if os.path.exists(fdroiddir) and not os.path.isdir(fdroiddir):
            logging.error(_('local_copy_dir must be directory, not a file!'))
            sys.exit(1)
        if not os.path.exists(os.path.dirname(fdroiddir)):
            logging.error(
                _('The root dir for local_copy_dir "{path}" does not exist!').
                format(path=os.path.dirname(fdroiddir)))
            sys.exit(1)
        if not os.path.isabs(fdroiddir):
            logging.error(_('local_copy_dir must be an absolute path!'))
            sys.exit(1)
        repobase = os.path.basename(fdroiddir)
        if standardwebroot and repobase != 'fdroid':
            logging.error(
                _('local_copy_dir does not end with "fdroid", ' +
                  'perhaps you meant: "{path}"').format(path=fdroiddir +
                                                        '/fdroid'))
            sys.exit(1)
        if local_copy_dir[-1] != '/':
            local_copy_dir += '/'
        local_copy_dir = local_copy_dir.replace('//', '/')
        if not os.path.exists(fdroiddir):
            os.mkdir(fdroiddir)

    if not config.get('awsbucket') \
            and not config.get('serverwebroot') \
            and not config.get('servergitmirrors') \
            and not config.get('androidobservatory') \
            and not config.get('binary_transparency_remote') \
            and not config.get('virustotal_apikey') \
            and local_copy_dir is None:
        logging.warning(
            _('No option set! Edit your config.yml to set at least one of these:'
              ) +
            '\nserverwebroot, servergitmirrors, local_copy_dir, awsbucket, ' +
            'virustotal_apikey, androidobservatory, or binary_transparency_remote'
        )
        sys.exit(1)

    repo_sections = ['repo']
    if config['archive_older'] != 0:
        repo_sections.append('archive')
        if not os.path.exists('archive'):
            os.mkdir('archive')
    if config['per_app_repos']:
        repo_sections += common.get_per_app_repos()

    if os.path.isdir('unsigned') or (
            local_copy_dir is not None
            and os.path.isdir(os.path.join(local_copy_dir, 'unsigned'))):
        repo_sections.append('unsigned')

    for repo_section in repo_sections:
        if local_copy_dir is not None:
            if config['sync_from_local_copy_dir']:
                sync_from_localcopy(repo_section, local_copy_dir)
            else:
                update_localcopy(repo_section, local_copy_dir)
        for serverwebroot in config.get('serverwebroot', []):
            update_serverwebroot(serverwebroot, repo_section)
        if config.get('servergitmirrors', []):
            # update_servergitmirrors will take care of multiple mirrors so don't need a foreach
            servergitmirrors = config.get('servergitmirrors', [])
            update_servergitmirrors(servergitmirrors, repo_section)
        if config.get('awsbucket'):
            update_awsbucket(repo_section)
        if config.get('androidobservatory'):
            upload_to_android_observatory(repo_section)
        if config.get('virustotal_apikey'):
            upload_to_virustotal(repo_section, config.get('virustotal_apikey'))

    binary_transparency_remote = config.get('binary_transparency_remote')
    if binary_transparency_remote:
        push_binary_transparency(BINARY_TRANSPARENCY_DIR,
                                 binary_transparency_remote)

    common.write_status_json(common.setup_status_output(start_timestamp))
    sys.exit(0)
Ejemplo n.º 58
0
 def parseCmdLine(self, description, args):
     argParser = ArgumentParser(description)
     argParser.add_argument('-o', '--out-dir', required=True, type=str,
                            help="""Directory where test results will be written""")
     return argParser.parse_args(args)
Ejemplo n.º 59
0
#!/usr/bin/env python3


from argparse import ArgumentParser
from plotting_tools import plotevent
import numpy as np

parser = ArgumentParser('Make some plots')
parser.add_argument('inputFile')
args = parser.parse_args()

infile = str(args.inputFile)

from DeepJetCore.TrainData import TrainData
import matplotlib.pyplot as plt

td=TrainData()
td.readFromFile(infile)

feat = td.transferFeatureListToNumpy()[0]
truth = td.transferTruthListToNumpy()[0]
nevents = min(len(feat),10)


for e in range(nevents):
    
    print('true energy', truth[e])
    print('reco sum   ', np.sum(feat[e,:,:,:,0]))
    
    fig = plt.figure()
    ax = fig.gca(projection='3d')
Ejemplo n.º 60
0

if __name__ == "__main__":

    ARG_PARSER = ArgumentParser()
    ARG_PARSER.add_argument('--load_g', action='store_true')
    ARG_PARSER.add_argument('--load_d', action='store_true')
    ARG_PARSER.add_argument('--no_save_g', action='store_true')
    ARG_PARSER.add_argument('--no_save_d', action='store_true')

    ARG_PARSER.add_argument('--num_epochs', default=300, type=int)
    ARG_PARSER.add_argument('--seq_len', default=256, type=int)
    ARG_PARSER.add_argument('--batch_size', default=16, type=int)
    ARG_PARSER.add_argument('--g_lrn_rate', default=0.001, type=float)
    ARG_PARSER.add_argument('--d_lrn_rate', default=0.001, type=float)

    ARG_PARSER.add_argument('--no_pretraining', action='store_true')
    ARG_PARSER.add_argument('--g_pretraining_epochs', default=5, type=int)
    ARG_PARSER.add_argument('--d_pretraining_epochs', default=5, type=int)
    # ARG_PARSER.add_argument('--freeze_d_every', default=5, type=int)
    ARG_PARSER.add_argument('--use_sgd', action='store_true')
    ARG_PARSER.add_argument('--conditional_freezing', action='store_true')
    ARG_PARSER.add_argument('--label_smoothing', action='store_true')
    ARG_PARSER.add_argument('--feature_matching', action='store_true')

    ARGS = ARG_PARSER.parse_args()
    MAX_SEQ_LEN = ARGS.seq_len
    BATCH_SIZE = ARGS.batch_size

    main(ARGS)