Ejemplo n.º 1
0
def purge_queues():
    """
    Purge unused queues
    """
    parser = ArgumentParser(version="Apollo Queue Purger 0.1")
    add_argparse_group(parser)
    parser.add_argument(
        "-p",
        "--pattern",
        default=".worker",
        help="Queue destination substring to match for purge " "candidates (default: %(default)s)",
    )
    parser.set_defaults()
    args = parser.parse_args()
    monitor = monitor_from_args(args)

    # Iterate over all queues and delete unused 'worker' queues
    for queue, dic in list(monitor.queues.items()):
        if args.pattern not in queue:
            continue
        if dic["metrics"]["queue_items"] == 0:
            continue
        if (dic["metrics"]["consumer_count"] + dic["metrics"]["producer_count"]) == 0:
            logger.warn("deleting %s", queue)
            monitor.delete_queue(queue)
Ejemplo n.º 2
0
def parse_args():
    """Parse input arguments
    """
    parser = ArgumentParser(description=__doc__,
                            formatter_class=ArgumentDefaultsHelpFormatter)

    parser.add_argument('dataset',
                        help='Path to the database')

    parser.add_argument('-csv', '--csv_file',
                        help='Export to a csv file',
                        default=None)

    parser.add_argument('-f', '--force',
                        help='Rewrite the output csv file',
                        default=False, action='store_true')

    parser.add_argument('-db', '--db-type',
                        help='Type of database',
                        default='lmdb')
    parser.add_argument('-label', '--label',
                        help='Use the label',
                        dest='label', action='store_true')
    parser.add_argument('-nolabel', '--no-label',
                        help='Do not use the label',
                        dest='label', action='store_false')
    parser.add_argument('-mb', '--mini-batch',
                        help='Explore the dataset in batches of the' \
                             'specified size', default=1000, type=int)

    parser.set_defaults(param=False)

    args = parser.parse_args()
    return args
Ejemplo n.º 3
0
def cli():
    parser_main = ArgumentParser()
    parser_main.set_defaults(func=parser_main.print_usage)
    sub_parsers = parser_main.add_subparsers(help='sub-command')

    # main_parser


    # sub_parser: img
    parser_img = sub_parsers.add_parser('image', help='convert image file format')

    parser_img.add_argument('path', nargs='?', help='image file path')
    parser_img.add_argument('-o', '--outdir', help='output directory')
    parser_img.add_argument('-ext', '--ext',
                            help='to this format like png gif ...')

    parser_img.set_defaults(func=main_img)

    # sub_parser: info
    parser_info = sub_parsers.add_parser('info', help='recognise file format')
    parser_info.add_argument('path', nargs='?', help='file path')

    parser_info.set_defaults(func=main_info)

    parse_result = parser_main.parse_args()
    args = remove_value(remove_key(parse_result.__dict__, 'func'), None)
    try:
        parse_result.func(**args)
    except Exception as ex:
        color.print_err(type(ex), ex)
        color.print_err('Invalid args')
Ejemplo n.º 4
0
def parse_args():
    parser = ArgumentParser()
    parser.add_argument('--storage_file', '-s', default=DEFAULT_STORAGE_FILE, help='storage file to use for the credentials (default is {})'.format(DEFAULT_STORAGE_FILE))
    parser.add_argument('--verbose', '-v', dest='verbose', action='store_true', help='display credentials storage location, access token, and refresh token')
    parser.set_defaults(verbose=False)
    parser.add_argument('--noauth_local_webserver','-u', action='store_const', const='--noauth_local_webserver')
    return parser.parse_args()
Ejemplo n.º 5
0
def main():
    '''Usage: %prog [options] custom_json_file directory_for_deploy'''
    aparser = ArgumentParser(description=main.__doc__)
    aparser.add_argument(
        '--nproc',
        help='number of jobs to send in parallel, default=1'
    )
    aparser.add_argument(
        'answer_file',
        help='name of the user input answer file',
    )
    aparser.set_defaults(nproc=1)
    parsed = vars(aparser.parse_args())

    test_data_ds=[]
    my_real_tests=[]
    out_test_ds=[]

    '''read input data and define tests to run'''
    read_user_input(parsed['answer_file'])
    read_test_data(test_data_ds)
  
    my_real_tests = select_tests(test_data_ds)
    
    '''run the tests'''
    out_data_ds = test_scheduler(my_real_tests)
    print_summary_report(out_test_ds)
def main():

    # example on how to run:
    # From elife-bot folder run
    # python starter/starter_CopyGlencoeStillImages.py --env=dev --article-id=15224 --no-poa

    parser = ArgumentParser()
    parser.add_argument("-e", "--env", action="store", type=str, dest="env",
                        help="set the environment to run, e.g. dev, live, prod, end2end")
    parser.add_argument("-a", "--article-id", action="store", type=str, dest="article_id",
                        help="specify the article id to process")
    parser.add_argument("-p", "--poa", action="store_true", dest="poa",
                        help="Article is POA. If omitted it defaults to False.")
    parser.add_argument("-np", "--no-poa", action="store_false", dest="poa",
                        help="Article is NOT POA. If omitted it defaults to False.")
    parser.set_defaults(env="dev", article_id=None, poa=False)

    args = parser.parse_args()
    ENV = None
    if args.env:
        ENV = args.env
    article_id = None
    is_poa = False
    if args.article_id:
        article_id = args.article_id
    if args.poa:
        is_poa = args.poa

    import settings as settingsLib
    settings = settingsLib.get_settings(ENV)

    o = starter_CopyGlencoeStillImages()

    o.start(settings=settings, article_id=article_id, standalone=True, standalone_is_poa=is_poa)
Ejemplo n.º 7
0
def make_parser():
    parser = ArgumentParser(description=__doc__)
    parser.add_argument('input', type=FileType('r'), help='input data')
    parser.add_argument('responses', type=int, help='number of response '
            'variables')
    parser.add_argument('-e','--with-errors', action='store_true', 
            help='response variable columns are interleaved with their standard'
            ' errors')
    parser.add_argument('-d', '--delimiter', default=',', metavar='CHAR',
            help='input fields separator (default: "%(default)s")')
    parser.add_argument('-o', '--output', help='save plot to %(metavar)s',
            metavar='FILE', type=FileType('w'))
    parser.add_argument('-p', '--plot', action='store_true', help='plot fit '
            '(univariate input only!)')
    parser.add_argument('--input-name', help='input variables names',
            type=FileType('r'), metavar='FILE')
    parser.add_argument('--output-name', help='output variables names',
            type=FileType('r'), metavar='FILE')
    parser.add_argument('-0', '--theta0', type=float, help='GP parameter Theta0'
            ' (default: %(default)g)', metavar='VALUE')
    parser.add_argument('-U', '--thetaU', type=float, help='GP parameter ThetaU'
            ' (default: %(default)g)', metavar='VALUE')
    parser.add_argument('-L', '--thetaL', type=float, help='GP parameter ThetaL'
            ' (default: %(default)g)', metavar='VALUE')
    parser.add_argument('-N', '--nugget', type=float, help='GP parameter nugget'
            ' (default: %(default)g)', metavar='VALUE')
    parser.set_defaults(theta0=.1, thetaL=1e-2, thetaU=1, nugget=1e-2)
    return parser
Ejemplo n.º 8
0
def set_attributes_from_cmd_line():
    parser = ArgumentParser(
        description="Find mitotic cells and put related data into a folder")
    parser.add_argument("baseDir",
                        help="path to acquisition folder",
                        type=str)
    parser.add_argument("channel",
                        help="channel used to detect SPBs",
                        type=str)
    parser.add_argument("calibration",
                        help="calibration of image",
                        type=float)
    parser.add_argument("acq_interval",
                        help="interval of fluo acquisition",
                        type=float)
    parser.add_argument("pos",
                        help="position name",
                        type=str)
    parser.add_argument("bf_Prefix",
                        help="bright field prefix",
                        type=str)
    parser.add_argument("fluo_Prefix",
                        help="fluo field prefix",
                        type=str)
    parser.add_argument('--ch5', dest='ch5', action='store_true', help="save into cellh5")
    parser.add_argument('--no-ch5', dest='ch5', action='store_false', help="not save into cellh5")
    parser.set_defaults(ch5=True)
    parser.add_argument("-minimumPeriod",
                        help="minimum time segment to be analyzed",
                        type=int, default=200)
    return parser.parse_args()
Ejemplo n.º 9
0
def parse_args():
    # We need the CouchDB admin credentials
    # These can be provided as command line arguments or via prompt

    parser = ArgumentParser()
    # CouchDB URL is required
    parser.add_argument('couch_server', type=str)
    parser.add_argument('couch_port', type=int)
    parser.add_argument('-u', '--user', dest='adminuser',
                        help='CouchDB admin user')
    parser.add_argument('-p', '--password', dest='adminpass',
                        help='CouchDB admin password')
    parser.add_argument('-db', '--maindb', dest='main_db',
                        help='Main Wikifeat database')
    # Note: your python must be compiled with SSL support to use HTTPS
    parser.add_argument('--use_ssl', dest='use_ssl', action='store_true')
    parser.set_defaults(use_ssl=False)
    parser.set_defaults(main_db="wikifeat_main_db")

    args = parser.parse_args()

    if args.adminuser is None:
        args.adminuser = input("Enter CouchDB admin username: "******"Enter CouchDB admin password: ")

    return args
Ejemplo n.º 10
0
def _parse_command_line_arguments():
    """
    Parse and return command line arguments
    """
    parser = ArgumentParser(
        description=(
            'Command-line tool to generate an observing plan from a LIGO/Virgo GraceID (with an optional galaxy map too)'
        ),
    )
    parser.add_argument(
        'graceid',
        type=str,
        help=(
            'The Grace-ID for the event'
        ),
    )
    parser.add_argument(
        'nside',
        type=int,
        help=(
            'nside for the output map'
            'nside = ceil(sqrt(3/Pi) 60 / s)'
            'where s is the length of one side of the square field of view in degrees.'
            'It will be rounded to the nearest power of two.'
        ),
    )
    parser.add_argument(
        '--gal-map',
        required=False,
        type=str,
        help='A FITS file containing the galaxy density map in HEALPIX format'
    )
    parser.add_argument(
        '--grace-file',
        required=False,
        type=str,
        help='The name of the FITS file containing the probability in HEALPIX format (default of bayestar.fits.gz)'
    )
    parser.add_argument(
        '--nvalues',
        required=False,
        type=int,
        help='Number of Maximum Probability pixels to be shown'
    )
    parser.add_argument(
        '--cumprob',
        required=False,
        type=float,
        help='Output up to the given cumulative probability'
    )
    parser.add_argument('--savefigures',dest='savefigures',action='store_true')
    parser.add_argument('--no-savefigures',dest='savefigures',action='store_false')
    parser.set_defaults(savefigures=False)

    parser.add_argument('--textoutput',dest='textoutput',action='store_true')
    parser.add_argument('--no-textoutput',dest='textoutput',action='store_false')
    parser.set_defaults(textoutput=False)

    arguments = vars(parser.parse_args())
    return arguments
Ejemplo n.º 11
0
    def parse_cmdline(self):
        from fukei import __version__
        parser = ArgumentParser(usage="usage: PROG [options]")
        _ = parser.add_argument
        _("-s", "--server", default='127.0.0.1',
          help="Remote server, IP address or domain (default %(default)r)", type=str)
        _("-k", "--password", default='123', help=
          "Password, should be same in client and server sides (default %(default)r)", type=str)
        _("-c", "--config", default=self.default_path,
          help="config.json path (default %(default)r)", metavar="FILE")
        _("-p", "--server-port", default=8388, help="Remote server port (default %(default)r)", type=int)
        _("-l", "--local-port", default=1080,
          help="Local client port (default %(default)r)", type=int)
        _("-m", "--method", default='table',
          help="Encryption method (default %(default)r)", type=str)
        _("-t", "--timeout", default=10,
          help="connection timeout (default %(default)r)", type=int)
        _("-d", "--debug", action='store_true', default=False,
          help="open debug mode (default %(default)r)",)
        _("-v", "--version", help="Show Fukei version %s" % __version__)
        c = self.get_file_opt()
        parser.set_defaults(**c)

        opt, _ = parser.parse_known_args(self.args)
        return opt
Ejemplo n.º 12
0
def main():
    set_stdout_encoding()

    parser = ArgumentParser(
        formatter_class=ArgumentDefaultsHelpFormatter,
        description='Convert text file to communication',
    )
    parser.set_defaults(annotation_level=AL_NONE)
    parser.add_argument('text_path', type=str,
                        help='Input text file path (- for stdin)')
    parser.add_argument('concrete_path', type=str,
                        help='Output concrete file path (- for stdout)')
    add_annotation_level_argparse_argument(parser)
    parser.add_argument('-l', '--loglevel', '--log-level',
                        help='Logging verbosity level threshold (to stderr)',
                        default='info')
    concrete.version.add_argparse_argument(parser)
    args = parser.parse_args()

    logging.basicConfig(format='%(asctime)-15s %(levelname)s: %(message)s',
                        level=args.loglevel.upper())

    # Won't work on Windows
    text_path = '/dev/fd/0' if args.text_path == '-' else args.text_path
    concrete_path = (
        '/dev/fd/1' if args.concrete_path == '-' else args.concrete_path
    )
    annotation_level = args.annotation_level

    with codecs.open(text_path, encoding='utf-8') as f:
        comm = create_comm(text_path, f.read(),
                           annotation_level=annotation_level)
        write_communication_to_file(comm, concrete_path)
Ejemplo n.º 13
0
def _parse_command_line_arguments():
    """
    Parse and return command line arguments
    """
    parser = ArgumentParser(
        description=(
            'Command-line tool to generate a skymap from a TS file from FermiFAST'
        ),
    )
    parser.add_argument(
        'ts-file',
        type=str,
        help=(
            'A file containing the TS sky map'
        ),
    )
    parser.add_argument('--skiprows',
                        type=int,
                        help='number of rows to skip at the top (default 32)',
                        required=False)
    parser.set_defaults(skiprows=32)
    parser.add_argument('--column',
                        type=int,
                        help='number of the column to use starting from zero (default 22)',
                        required=False)
    parser.set_defaults(column=22)

    arguments = vars(parser.parse_args())
    return arguments
Ejemplo n.º 14
0
def _software_params_to_argparse(parameters):
    """
    Converts a SoftwareParameterCollection into an ArgumentParser object.

    Parameters
    ----------
    parameters: SoftwareParameterCollection
        The software parameters
    Returns
    -------
    argparse: ArgumentParser
        An initialized argument parser
    """
    # Check software parameters
    argparse = ArgumentParser()
    boolean_defaults = {}
    for parameter in parameters:
        arg_desc = {"dest": parameter.name, "required": parameter.required, "help": ""}  # TODO add help
        if parameter.type == "Boolean":
            default = _to_bool(parameter.defaultParamValue)
            arg_desc["action"] = "store_true" if not default else "store_false"
            boolean_defaults[parameter.name] = default
        else:
            python_type = _convert_type(parameter.type)
            arg_desc["type"] = python_type
            arg_desc["default"] = None if parameter.defaultParamValue is None else python_type(parameter.defaultParamValue)
        argparse.add_argument(*_cytomine_parameter_name_synonyms(parameter.name), **arg_desc)
    argparse.set_defaults(**boolean_defaults)
    return argparse
Ejemplo n.º 15
0
    def _get_parser(cls):
        'creates an option parser and adds the command line arguments'

        try:
            from argparse import ArgumentParser
            parser = ArgumentParser(prog=cls.NAME)
        except ImportError:
            # we're fine with OptionParser because we're using it like
            # ArgumentParser
            from optparse import OptionParser
            parser = OptionParser(prog=cls.NAME,
                                  usage='%prog [options] filenames...')

            parser.add_argument = parser.add_option


        parser.set_defaults(mode=Window.DEFAULT_VIEW,
                            flicker=Window.DEFAULT_FLICKER)

        parser.add_argument('-v', '--view',
                            help='one of {%s}' % ', '.join(Window.VIEWS.keys()))

        parser.add_argument('-t', '--time', dest='flicker',
                            help='time string representing the frequency in '
                                 'which to alternate the images '
                                 '(understands: 1s, 500ms, 2.5s)')

        parser.add_argument('--timer', action='store_const', dest='flicker',
                            const=Window.DEFAULT_FLICKER,
                            help='implies --time %r' % Window.DEFAULT_FLICKER)

        return parser
Ejemplo n.º 16
0
def main():
    parser = ArgumentParser(
        formatter_class=ArgumentDefaultsHelpFormatter,
        description='Convert text file to communication',
    )
    parser.set_defaults(annotation_level=AL_NONE)
    parser.add_argument('text_path', type=str,
                        help='Input text file path (- for stdin)')
    parser.add_argument('concrete_path', type=str,
                        help='Output concrete file path (- for stdout)')
    add_annotation_level_argparse_argument(parser)
    concrete.version.add_argparse_argument(parser)
    ns = parser.parse_args()

    # Won't work on Windows... but that use case is very unlikely
    text_path = '/dev/fd/0' if ns.text_path == '-' else ns.text_path
    concrete_path = (
        '/dev/fd/1' if ns.concrete_path == '-' else ns.concrete_path
    )
    annotation_level = ns.annotation_level

    with codecs.open(text_path, encoding='utf-8') as f:
        comm = create_comm(text_path, f.read(),
                           annotation_level=annotation_level)
        write_communication_to_file(comm, concrete_path)
Ejemplo n.º 17
0
def config_web_parser(parser: argparse.ArgumentParser, is_primary=False):
    """Configure the argument parser for the web command

    Args:
        parser: The parser to configure
        is_primary: True if configuring as the main command.  False if
            configuring as a sub-command.
    """
    parser.description = parawrap.fill(
        'Run builtin dev server or print a command that would run the server '
        'if the command were evaluated.  Although the production server will '
        'not be run directly, it could be run with:\n'
        '\n'
        '\t\'eval $({}{} web [OPTIONS] <production-server>)\''
        ''.format(cfg.PKG_NAME, '' if is_primary else ' web')
    )
    parser.formatter_class = MixedHelpFormatter
    parser.add_argument('--debug', '-d',
                        action='store_true',
                        help='Run the dev server with debug web iface and '
                             'reload server on source file changes')
    parser.add_argument('--host',
                        default=cfg.WEB_HOST,
                        help='Server host/name')
    parser.add_argument('--port', '-p',
                        type=int, default=cfg.WEB_PORT,
                        help='Port on which the server listens')
    parser.add_argument('server',
                        choices=('builtin', 'eventlet', 'gunicorn'), default='builtin',
                        help='Run builtin dev server or print command '
                             'related to running a specific production server')
    parser.set_defaults(func=web_cmd)
Ejemplo n.º 18
0
def main():
  parser = ArgumentParser(description="Process GCTracer's NVP output")
  parser.add_argument('keys', metavar='KEY', type=str, nargs='+',
                      help='the keys of NVPs to process')
  parser.add_argument('--histogram-type', metavar='<linear|log2>',
                      type=str, nargs='?', default="linear",
                      help='histogram type to use (default: linear)')
  linear_group = parser.add_argument_group('linear histogram specific')
  linear_group.add_argument('--linear-histogram-granularity',
                            metavar='GRANULARITY', type=int, nargs='?',
                            default=5,
                            help='histogram granularity (default: 5)')
  log2_group = parser.add_argument_group('log2 histogram specific')
  log2_group.add_argument('--log2-histogram-init-bucket', metavar='START',
                          type=int, nargs='?', default=64,
                          help='initial buck size (default: 64)')
  parser.add_argument('--histogram-omit-empty-buckets',
                      dest='histogram_omit_empty',
                      action='store_true',
                      help='omit empty histogram buckets')
  parser.add_argument('--no-histogram', dest='histogram',
                      action='store_false', help='do not print histogram')
  parser.set_defaults(histogram=True)
  parser.set_defaults(histogram_omit_empty=False)
  parser.add_argument('--rank', metavar='<no|min|max|avg>',
                      type=str, nargs='?',
                      default="no",
                      help="rank keys by metric (default: no)")
  parser.add_argument('--csv', dest='csv',
                      action='store_true', help='provide output as csv')
  args = parser.parse_args()

  histogram = None
  if args.histogram:
    bucket_trait = None
    if args.histogram_type == "log2":
      bucket_trait = Log2Bucket(args.log2_histogram_init_bucket)
    else:
      bucket_trait = LinearBucket(args.linear_histogram_granularity)
    histogram = Histogram(bucket_trait, not args.histogram_omit_empty)

  categories = [ Category(key, deepcopy(histogram), args.csv)
                 for key in args.keys ]

  while True:
    line = stdin.readline()
    if not line:
      break
    obj = split_nvp(line)
    for category in categories:
      category.process_entry(obj)

  # Filter out empty categories.
  categories = [x for x in categories if not x.empty()]

  if args.rank != "no":
    categories = sorted(categories, key=make_key_func(args.rank), reverse=True)

  for category in categories:
    print(category)
def main():
    from argparse import ArgumentParser
    parser = ArgumentParser()
    parser.set_defaults(
        loglevel=logging.INFO,
        regions=[],
        subnet_names=[],
        warn_threshold=10,
        crit_threshold=5,
    )
    parser.add_argument('-v', '--verbose', dest='loglevel', action='store_const', const=logging.DEBUG)
    parser.add_argument('-q', '--quiet', dest='loglevel', action='store_const', const=logging.WARN)
    parser.add_argument('-r', '--region', dest='regions', action='append', required=True)
    parser.add_argument('-s', '--subnet-name', dest='subnet_names', action='append', required=True)

    # Nagios options
    parser.add_argument('-w', '--warn-threshold', dest='warn_threshold',
                        help='threshold at which to emit nagios warning', type=int)
    parser.add_argument('-c', '--crit-threshold', dest='crit_threshold',
                        help='threshold at which to emit nagios critical alert', type=int)

    args = parser.parse_args()

    all_subnets = get_subnets(args.regions)
    my_subnets = filter_subnets_by_name(all_subnets, args.subnet_names)
    grouped_subnets = group_subnets_by_type(my_subnets)

    exit_code = report_free_ips(grouped_subnets, args.warn_threshold, args.crit_threshold)
    exit(exit_code)
Ejemplo n.º 20
0
def parse_arguments(args):
    parser = ArgumentParser()
    parser.set_defaults(daemonize=False,
                        host=None,
                        prefix=None,
                        replacement_char='_',
                        pidfile='/var/run/metricinga.pid',
                        poll_interval=60,
                        port=2004,
                        spool_dir='/var/spool/metricinga')

    parser.add_argument('-d', '--daemonize', action='store_true',
            help='Run as a daemon')
    parser.add_argument('--pidfile',
            help='Path to daemon pidfile')
    parser.add_argument('-v', '--verbose', action='store_true',
            help='Enable verbose output')

    parser.add_argument('-P', '--prefix',
            help='Prefix to prepend to all metric names')
    parser.add_argument('-r', '--replacement-char',
            help='Replacement char for illegal metric characters')
    parser.add_argument('-D', '--spool-dir',
            help='Spool directory to watch for perfdata files')
    parser.add_argument('--poll-interval', type=int,
            help='Spool polling interval (if not using inotify)')

    parser.add_argument('-H', '--host',
            help='Graphite host to submit metrics to')
    parser.add_argument('-p', '--port', type=int,
            help='Port to connect to')

    return parser.parse_args(args)
Ejemplo n.º 21
0
def init_config():
    parser = ArgumentParser()
    config_file = "config.json"

    # If config file exists, load variables from json
    load = {}
    if isfile(config_file):
        with open(config_file) as data:
            load.update(read_json(data))

    # Read passed in Arguments
    required = lambda x: not x in load
    parser.add_argument(
        "-a", "--auth_service", help="Auth Service ('ptc' or 'google')", required=required("auth_service")
    )
    parser.add_argument("-u", "--username", help="Username", required=required("username"))
    parser.add_argument("-p", "--password", help="Password", required=required("password"))
    parser.add_argument("-l", "--location", help="Location", required=required("location"))
    parser.add_argument("-s", "--spinstop", help="SpinPokeStop", action="store_true")
    parser.add_argument("-v", "--stats", help="Show Stats and Exit", action="store_true")
    parser.add_argument(
        "-w",
        "--walk",
        help="Walk instead of teleport with given speed (meters per second, e.g. 2.5)",
        type=float,
        default=2.5,
    )
    parser.add_argument("-c", "--cp", help="Set CP less than to transfer(DEFAULT 100)", default=100)

    parser.add_argument("-k", "--gmapkey", help="Set Google Maps API KEY", type=str, default=None)
    parser.add_argument(
        "--maxsteps",
        help="Set the steps around your initial location(DEFAULT 5 mean 25 cells around your location)",
        type=int,
        default=5,
    )

    parser.add_argument("-d", "--debug", help="Debug Mode", action="store_true")
    parser.add_argument("-t", "--test", help="Only parse the specified location", action="store_true")
    parser.add_argument(
        "-tl",
        "--transfer_list",
        help="Transfer these pokemons regardless cp(pidgey,drowzee,rattata)",
        type=str,
        default="",
    )
    parser.set_defaults(DEBUG=False, TEST=False)
    config = parser.parse_args()

    # Passed in arguments shoud trump
    for key in config.__dict__:
        if key in load and config.__dict__[key] is None:
            config.__dict__[key] = load[key]

    if config.auth_service not in ["ptc", "google"]:
        logging.error("Invalid Auth service ('%s') specified! ('ptc' or 'google')", config.auth_service)
        return None

    return config
Ejemplo n.º 22
0
def parse_cli_args(args):
    parser = ArgumentParser()
    parser.add_argument('--version', action='version', version=__version__)
    parser.set_defaults(action=help)
    subparsers = parser.add_subparsers()
    add_backup_parser(subparsers)
    add_restore_parser(subparsers)
    return parser.parse_args(args)
Ejemplo n.º 23
0
def get_options():
    ap = ArgumentParser()
    ap.set_defaults(loop=0)
    ap.add_argument("--src", required=True)
    ap.add_argument("--dst", required=True)
    ap.add_argument("--loop", type=int)

    return ap.parse_args()
Ejemplo n.º 24
0
def main():
    from argparse import ArgumentParser

    from synaptiks import __version__
    from synaptiks.x11 import Display, DisplayError
    from synaptiks.touchpad import Touchpad, NoTouchpadError

    parser = ArgumentParser(
        description='synaptiks touchpad configuration utility',
        epilog="""\
Copyright (C) 2010 Sebastian Wiesner <*****@*****.**>,
distributed under the terms of the BSD License""")
    parser.add_argument('--version', help='Show synaptiks version',
                        action='version', version=__version__)
    actions = parser.add_subparsers(title='Actions')

    init_act = actions.add_parser(
        'init', help='Initialize touchpad configuration.  Should not be '
        'called manually, but automatically at session startup.')
    init_act.set_defaults(action='init')

    load_act = actions.add_parser(
        'load', help='Load the touchpad configuration')
    load_act.add_argument(
        'filename', nargs='?', help='File to load the configuration from.  If '
        'empty, the default configuration file is loaded.')
    load_act.set_defaults(action='load')

    save_act = actions.add_parser(
        'save', help='Save the current touchpad configuration')
    save_act.add_argument(
        'filename', nargs='?', help='File to save the configuration to.  If '
        'empty, the default configuration file is used.')
    save_act.set_defaults(action='save')

    # default filename to load configuration from
    parser.set_defaults(filename=None)

    # we don't have any arguments, but need to make sure, that the builtin
    # arguments (--help mainly) are handled
    args = parser.parse_args()

    try:
        with Display.from_name() as display:
            touchpad = Touchpad.find_first(display)

            if args.action == 'init':
                driver_defaults = TouchpadConfiguration(touchpad)
                driver_defaults.save(get_touchpad_defaults_file_path())
            if args.action in ('init', 'load'):
                TouchpadConfiguration.load(touchpad, filename=args.filename)
            if args.action == 'save':
                current_config = TouchpadConfiguration(touchpad)
                current_config.save(filename=args.filename)
    except DisplayError:
        parser.error('could not connect to X11 display')
    except NoTouchpadError:
        parser.error('no touchpad found')
Ejemplo n.º 25
0
    def __setup_parser(self):
        parser = ArgumentParser(prog='list', description=List.__doc__)
        self._add_argument(parser, '-a', '--available',
                           const=self.__list_planets,
                           help="Print list of available objects.")
        parser.add_argument('-v', '--verbose', action='store_true',
                            dest='verbose', default=False)
        parser.set_defaults(action=self.__show_focussed)

        return parser, self.__list_planets
Ejemplo n.º 26
0
def main():
    # For some reason (take a wild guess) Commvault has decided that
    # their long options will take the form of '-option' not the standard
    # '--option'.


    # Always set HOME to '/root', as the commvault environment is bare
    os.environ['HOME'] = '/root'
    os.environ['TMPDIR'] = '/tmp'
    # ensure we do not inherit commvault's LD_LIBRARY_PATH
    os.environ.pop('LD_LIBRARY_PATH', None)

    argv = sys.argv[1:]

    parser = ArgumentParser()
    parser.add_argument("--config-file", "-c", metavar="<file>",
                        help="Read configuration from the given file")
    parser.add_argument("--log-level", "-l", type='choice',
                        choices=['critical','error','warning','info',
                                 'debug'],
                        help="Specify the log level."
                       )
    parser.add_argument("--quiet", "-q", action="store_true",
                        help="Don't log to console")
    parser.add_argument("--verbose", "-v", action="store_true",
                        help="Verbose output")

    parser.add_argument("--bksets", "-b", metavar="<bkset>,<bkset>...",
                        help="only run the specified backupset",
                        default=[], action=ArgList)

    parser.add_argument("-bkplevel", type=int)
    parser.add_argument("-attempt", type=int)
    parser.add_argument("-status", type=int)
    parser.add_argument("-job", type=int)
    parser.add_argument("-vm")
    parser.add_argument("-cn")
    parser.set_defaults(
        config_file=os.getenv('HOLLAND_CONFIG') or '/etc/holland/holland.conf',
        verbose=False,
    )

    args, largs = parser.parse_known_args(argv)

    bootstrap(args)

    logging.info("Holland (commvault agent) %s started with pid %d",
                 HOLLAND_VERSION, os.getpid())
    # Commvault usually runs with a very low default limit for nofile
    # so a best effort is taken to raise that here.
    try:
        resource.setrlimit(resource.RLIMIT_NOFILE, (262144, 262144))
        logging.debug("(Adjusted ulimit -n (RLIMIT_NOFILE) to %d)", 262144)
    except (ValueError, resource.error), exc:
        logging.debug("Failed to raise RLIMIT_NOFILE: %s", exc)
Ejemplo n.º 27
0
def main(argv=None):
    '''Command line options.'''

    program_name = os.path.basename(sys.argv[0])
    program_version = "v0.1"
    program_build_date = "%s" % __updated__

    #program_usage = '''usage: spam two eggs''' # optional - will be autogenerated by optparse
    program_longdesc = '''''' # optional - give further explanation about what the program does
    program_license = "Copyright 2015 McSwindler (James Swindle)                                            \
                Licensed under the MIT License\nhttp://opensource.org/licenses/MIT"

    if argv is None:
        argv = sys.argv[1:]
    try:
        # setup option parser
        parser = ArgumentParser(epilog=program_longdesc, 
                                description=program_license, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
        parser.add_argument("-i", "--in", dest="indir", help="set input directory", nargs='+', metavar="PATH")
        parser.add_argument("-o", "--out", dest="outdir", help="set output directory", metavar="PATH")
        parser.add_argument("-t", "--temp", dest="tempdir", help="set temporary directory", metavar="PATH")
        parser.add_argument("-f", "--filetype", dest="filetype", help="set output filetype")
        parser.add_argument("-H", "--handbrake", dest="handbrake", help="path to handbrake executable", metavar="FILE")

        # set defaults
        parser.set_defaults(outdir=expanduser("~") + os.sep + "Videos" + os.sep + "Encoded", 
                            indir=expanduser("~") + os.sep + "Videos",
                            filetype="mkv",
                            handbrake="C:\Program Files\Handbrake\HandBrakeCLI.exe")

        # process options
        args = parser.parse_args(argv)

        if args.indir:
            #if args.indir is not list:
            #    args.indir = [args.indir]
            print("indir = %s" % args.indir)
        if args.outdir:
            print("outdir = %s" % args.outdir)
        if args.tempdir:
            print("tempdir = %s" % args.tempdir)
        if args.filetype not in ["mkv", "mp4", "m4v"]:
            raise Exception("Invalid FileType, only mkv, mp4, and m4v are valid")

        # MAIN BODY #
        while True:
            AutoEncode(args)

    except Exception:
        indent = len(program_name) * " "
        sys.stderr.write(program_name + ": " + str(sys.exc_info()[0]) + "\n")
        sys.stderr.write(indent + "  for help use --help\n")
        print(traceback.format_exc())
        return 2
Ejemplo n.º 28
0
def main():
    argparser = ArgumentParser(description='Tongs is a simple console tool for www-site traversal and grabbing bunch of urls')
    argparser.add_argument('-v', '--version', action='version', version='%(prog)s 0.1a')
    argparser.add_argument('url',  help='Initial url')
    argparser.add_argument('-l',   dest='links_regexp',   metavar='LINKS',      help='Process ONLY links that match this regular expression')
    argparser.add_argument('-g',   dest='grab_regexp',    metavar='GRAB',       help='Search links that match this regular expression')
    argparser.add_argument('-a',   dest='amount',         metavar='AMOUNT',     help='Stop after grabbing N links', type=bool)
    argparser.add_argument('-t',   dest='threads_count',  metavar='THREADS',    help='Number or simultaneous threads', type=int)
    argparser.add_argument('-ll',  dest='log_level',      metavar='LOGLEVEL',   help='Level of output', type=int, choices=xrange(0, 51))
    argparser.add_argument('-st',  dest='show_timer',     metavar='SHOWTIMER',  help='Show timer after finish', type=bool)
    argparser.set_defaults(
        threads_count = 10,
        log_level     = 0,
        show_timer    = True
    )
    if len(sys.argv) == 1:
        argparser.print_help()
        sys.exit()
    else:
        settings = argparser.parse_args()

    logging.basicConfig(level = settings.log_level, format = '%(message)s')

    if not settings.links_regexp:
        try:
            u = urlsplit(settings.url)
        except URLError:
            raise Exception('Incorrect url')
        settings.links_regexp = urlunsplit(list(u)[:3] + ['','']) + '.*'
        log(10, 'Looking in %s', settings.links_regexp)

    queue = UrlsQueue()
    queue.put(settings.url)

    workers = [
        Spider(i, queue, settings)
        for i in range(settings.threads_count)
    ]

    map(lambda w: w.start(), workers)

    timer_start = time.time()
    try:
        while any(not w.is_sleeping for w in workers):
            time.sleep(1)
    except KeyboardInterrupt:
        exit(0)
    timer_end = time.time()

    map(lambda w: w.stop(), workers) #Send the termination signal
    map(lambda w: w.join(), workers) #And wait for all threads to stop

    if settings.show_timer:
        log(50, "Finished in %2.1f sec", timer_end - timer_start)
Ejemplo n.º 29
0
def parse_options(argv):
  parser = ArgumentParser(description='Generate Zeal/Dash docset from already built documentation.',
                          epilog='Currently supported types: %s' % ','.join(AVAILABLE_DOC_TYPES))
  parser.add_argument('directory', type=str, nargs='?', metavar='dir', default='.', help='Path to project')
  parser.add_argument('-t', '--type', type=str, choices=AVAILABLE_DOC_TYPES, default='autodetect', metavar='TYPE', help='Documentation format')
  parser.add_argument('--dash', action='store_const', const='dash', dest='format', help='Build Dash docset package')
  parser.add_argument('--zeal', action='store_const', const='zeal', dest='format', help='Build Zeal docset directory')
  parser.add_argument('--install', action='store_const', dest='output', const=ZEAL_DOCSET_DIR, help="When building Zeal docsets, try to install them in Zeal's default docset directory (%s)" % ZEAL_DOCSET_DIR)
  parser.add_argument('-o', '--output', dest='output', metavar='DEST', help='Build output in DEST, instead of current directory or Zeal docset directory')
  parser.set_defaults(format='zeal', output='.')
  return parser.parse_args(argv)
Ejemplo n.º 30
0
def init_config():
    parser = ArgumentParser()
    load = {}

    # Read passed in Arguments
    parser.add_argument("-cf", "--config", help="Configuration file",default=None)
    parser.add_argument("-a", "--auth_service", help="Auth Service ('ptc' or 'google')")
    parser.add_argument("-u", "--username", help="Username")
    parser.add_argument("-p", "--password", help="Password")
    parser.add_argument("-l", "--location", help="Location")
    parser.add_argument("-s", "--spinstop", help="SpinPokeStop", action='store_true')
    parser.add_argument("-v", "--stats", help="Show Stats and Exit", action='store_true')
    parser.add_argument("-w", "--walk", help="Walk instead of teleport with given speed (meters per second, e.g. 2.5)", type=float, default=2.5)
    parser.add_argument("-c", "--cp",help="Set CP less than to transfer(DEFAULT 100)",default=100)

    parser.add_argument("-k", "--gmapkey",help="Set Google Maps API KEY",type=str,default=None)
    parser.add_argument("--maxsteps",help="Set the steps around your initial location(DEFAULT 5 mean 25 cells around your location)",type=int,default=5)

    parser.add_argument("-d", "--debug", help="Debug Mode", action='store_true')
    parser.add_argument("-t", "--test", help="Only parse the specified location", action='store_true')
    parser.add_argument("-tl", "--transfer_list", help="Transfer these pokemons regardless cp(pidgey,drowzee,rattata)", type=str, default='')
    parser.set_defaults(DEBUG=False, TEST=False)
    config = parser.parse_args()

    if config.config and config.config is not 'false':
        default_config = "config.json"

        if isfile(config.config):
            print '[x] Loading configuration file : ' + config.config
            with open(config.config) as data:
                load.update(read_json(data))
        else:
            print '[x] Loading default configuration file'
            with open(default_config) as data:
                load.update(read_json(data))
    else:
        if config.auth_service is None \
                or config.username is None \
                or config.password is None \
                or config.location is None:
            parser.error('without -cf <true|filename.json>, (-a <auth_service> -u <username> -p <password> -l <location>) are required')
            return None


    # Passed in arguments shoud trump
    for key in config.__dict__:
        if key in load:
            config.__dict__[key] = load[key]

    if config.auth_service not in ['ptc', 'google']:
        logging.error("Invalid Auth service ('%s') specified! ('ptc' or 'google')", config.auth_service)
        return None

    return config
Ejemplo n.º 31
0
def get_parser():
    parser = ArgumentParser(description=__doc__,
                            formatter_class=ArgumentDefaultsHelpFormatter)

    parser.add_argument("--fold_ind", type=int, default=3, help="1 to 5")

    parser.add_argument(
        "--model",
        type=str,
        default='DeepLabv3_plus_gcn_skipconnection_3d',
        help="the model name, DeepLabv3_plus_skipconnection_3d, "
        "DeepLabv3_plus_gcn_skipconnection_3d,"
        "UNet3D,"
        "ResidualUNet3D")

    parser.add_argument(
        "--gcn_mode",
        type=int,
        default=2,
        help=
        "the mode for fea2graph and graph2fea, only available for gcn. 0, 1, 2"
    )

    parser.add_argument(
        "--ds_weight",
        type=float,
        default=0.3,
        help="The deep supervision weight used in fea2graph when gcn_mode is 2."
    )

    parser.add_argument("--data_dir",
                        type=str,
                        default='/public/pangshumao/data/five-fold/coarse',
                        help="the data dir")

    parser.add_argument(
        "--resume",
        dest='resume',
        action='store_true',
        help="the training will be resumed from that checkpoint.")

    parser.add_argument(
        "--no-resume",
        dest='resume',
        action='store_false',
        help="the training will not be resumed from that checkpoint.")

    parser.set_defaults(resume=False)

    parser.add_argument("--pre_trained",
                        dest='pre_trained',
                        action='store_true',
                        help="use pretrained the model.")

    parser.add_argument("--no-pre_trained",
                        dest='pre_trained',
                        action='store_false',
                        help="without using pretrained the model.")

    parser.set_defaults(pre_trained=True)

    parser.add_argument("--device",
                        type=str,
                        default='cuda:3',
                        help="which gpu to use")

    parser.add_argument(
        '--loss',
        type=str,
        default='CrossEntropyLoss',
        help="The loss function name, FPFNLoss, CrossEntropyLoss")

    parser.add_argument('--lamda',
                        type=float,
                        default=0.1,
                        help="For FPFNLoss")

    parser.add_argument(
        '--eval_metric',
        type=str,
        default='DiceCoefficient',
        help="The eval_metric name, MeanIoU or DiceCoefficient")

    parser.add_argument('--skip_channels',
                        type=list,
                        default=[0],
                        help="The skip_channels in eval_metric")

    parser.add_argument('--optimizer',
                        type=str,
                        default='Adam',
                        help="Adam or SGD")

    parser.add_argument('--learning_rate',
                        type=float,
                        default=1e-3,
                        help="The initial learning rate")

    parser.add_argument('--seed', type=int, default=0, help="The manual seed")

    return parser
def build_args():
    parser = ArgumentParser()

    # basic args
    path_config = pathlib.Path("../../fastmri_dirs.yaml")
    backend = "ddp"
    num_gpus = 32
    batch_size = 1

    # set defaults based on optional directory config
    data_path = fetch_dir("knee_path", path_config)
    default_root_dir = (fetch_dir("log_path", path_config) / "unet" /
                        "knee_mc_leaderboard")

    # client arguments
    parser.add_argument(
        "--mode",
        default="train",
        choices=("train", "test"),
        type=str,
        help="Operation mode",
    )

    # data transform params
    parser.add_argument(
        "--mask_type",
        choices=("random", "equispaced"),
        default="random",
        type=str,
        help="Type of k-space mask",
    )
    parser.add_argument(
        "--center_fractions",
        nargs="+",
        default=[0.08, 0.04],
        type=float,
        help="Number of center lines to use in mask",
    )
    parser.add_argument(
        "--accelerations",
        nargs="+",
        default=[4, 8],
        type=int,
        help="Acceleration rates to use for masks",
    )

    # data config with path to fastMRI data and batch size
    parser = FastMriDataModule.add_data_specific_args(parser)
    parser.set_defaults(
        data_path=data_path,  # path to fastMRI data
        mask_type="random",  # random for knee data
        challenge="multicoil",  # which challenge
        batch_size=batch_size,  # number of samples per batch
        test_path=None,  # path for test split, overwrites data_path
    )

    # module config
    parser = UnetModule.add_model_specific_args(parser)
    parser.set_defaults(
        in_chans=1,  # number of input channels to U-Net
        out_chans=1,  # number of output chanenls to U-Net
        chans=256,  # number of top-level U-Net channels
        num_pool_layers=4,  # number of U-Net pooling layers
        drop_prob=0.0,  # dropout probability
        lr=0.001,  # RMSProp learning rate
        lr_step_size=40,  # epoch at which to decrease learning rate
        lr_gamma=0.1,  # extent to which to decrease learning rate
        weight_decay=0.0,  # weight decay regularization strength
    )

    # trainer config
    parser = pl.Trainer.add_argparse_args(parser)
    parser.set_defaults(
        gpus=num_gpus,  # number of gpus to use
        replace_sampler_ddp=
        False,  # this is necessary for volume dispatch during val
        accelerator=backend,  # what distributed version to use
        seed=42,  # random seed
        deterministic=True,  # makes things slower, but deterministic
        default_root_dir=default_root_dir,  # directory for logs and checkpoints
        max_epochs=50,  # max number of epochs
    )

    args = parser.parse_args()

    # configure checkpointing in checkpoint_dir
    checkpoint_dir = args.default_root_dir / "checkpoints"
    if not checkpoint_dir.exists():
        checkpoint_dir.mkdir(parents=True)

    args.checkpoint_callback = pl.callbacks.ModelCheckpoint(
        filepath=args.default_root_dir / "checkpoints",
        verbose=True,
        prefix="",
    )

    # set default checkpoint if one exists in our checkpoint directory
    if args.resume_from_checkpoint is None:
        ckpt_list = sorted(checkpoint_dir.glob("*.ckpt"), key=os.path.getmtime)
        if ckpt_list:
            args.resume_from_checkpoint = str(ckpt_list[-1])

    return args
Ejemplo n.º 33
0
def parseArgs(argv):
    """ Parses configuration file and command line arguments.
    Command line arguments overwrite configuration file settiongs which
    in turn overwrite default values.

    Args:
        args (argparse.Namespace): Return the populated namespace.
    """

    # Parse any conf_file specification
    # We make this parser with add_help=False so that
    # it doesn't parse -h and print help.
    conf_parser = AP(
        description=__doc__,  # printed with -h/--help
        # Don't mess with format of description
        formatter_class=ap_RDHF,
        # Turn off help, so we print all options in response to -h
        add_help=False)
    conf_parser.add_argument("-c",
                             "--conf_file",
                             help="Specify config file",
                             metavar="FILE")
    args, remaining_argv = conf_parser.parse_known_args(argv)

    defaults = getDefaults()

    config_source = "Default"
    if args.conf_file:
        config_source = args.conf_file
        config = configparser.ConfigParser()
        config.read([args.conf_file])
        defaults.update(dict(config.items("StarExtract")))

    # Parse rest of arguments
    # Don't suppress add_help here so it will handle -h

    # Inherit options from config_paarser
    parser = AP(parents=[conf_parser])

    parser.set_defaults(**defaults)
    parser.add_argument("--logfile", type=str, help="Filename for log file.")

    parser = get_arguments(parser)

    # Script specific parameters
    parser.add_argument("-t", "--task", type=str, help="Task to execute.")

    # Boolean paramters
    parser.add_argument("--use_tmp",
                        action='store_true',
                        help="Use a temporary directory. Result files will"
                        " be copied to NIGHTvSHOT/res.")
    parser.add_argument("--debug",
                        action='store_true',
                        help="Keep temporary directories")

    # positional arguments
    parser.add_argument('ra',
                        metavar='ra',
                        type=float,
                        help='Right Ascension of star.')
    parser.add_argument('dec',
                        metavar='dec',
                        type=float,
                        help='Declination of star.')
    parser.add_argument('starid', metavar='starid', type=int, help='Star ID')

    args = parser.parse_args(remaining_argv)

    args.config_source = config_source
    # should in principle be able to do this with accumulate???
    # args.use_tmp = args.use_tmp == "True"
    # args.remove_tmp = args.remove_tmp == "True"

    return args
Ejemplo n.º 34
0
                        score_writer.flush()

                        # save the model according to the result of Rank@1, IoU=0.7
                        if r1i7 > best_r1i7:
                            best_r1i7 = r1i7
                            filename = os.path.join(model_dir, "model_{}.ckpt".format(global_step))
                            saver.save(sess, filename)

            score_writer.close()

elif configs.mode.lower() == "test":

    # load previous configs
    model_dir = os.path.join(configs.home_dir, "model")
    pre_configs = load_json(os.path.join(model_dir, "configs.json"))
    parser.set_defaults(**pre_configs)
    configs = parser.parse_args()

    # load video features
    video_feature_path = os.path.join(configs.root, "charades_features_{}".format(configs.feature))
    video_features = load_video_features(video_feature_path, max_position_length=configs.max_position_length)

    # load test dataset
    test_set = load_json(os.path.join(configs.save_dir, "test_set.json"))

    # restore model and evaluate
    with tf.Graph().as_default() as graph:
        model = VSLNet(configs, graph=graph)
        sess_config = tf.ConfigProto(allow_soft_placement=True, log_device_placement=False)
        sess_config.gpu_options.allow_growth = True
Ejemplo n.º 35
0
if __name__ == "__main__":
    parser = ArgumentParser()
    parser.add_argument("--config", required=True, help="path to config")
    parser.add_argument("--log_dir", default='log', help="path to log into")
    parser.add_argument("--checkpoint",
                        default=None,
                        help="path to checkpoint to restore")
    parser.add_argument("--device_ids",
                        default="0",
                        type=lambda x: list(map(int, x.split(','))),
                        help="Names of the devices comma separated.")
    parser.add_argument("--verbose",
                        dest="verbose",
                        action="store_true",
                        help="Print model architecture")
    parser.set_defaults(verbose=False)

    opt = parser.parse_args()
    with open(opt.config) as f:
        config = yaml.load(f)

    log_dir = os.path.join(opt.log_dir,
                           os.path.basename(opt.config).split('.')[0])
    log_dir += ' ' + strftime("%d-%m-%y %H:%M:%S", gmtime())

    reconstruction_module = ReconstructionModule(
        **config['model_params']['reconstruction_module_params'],
        **config['model_params']['common_params'])
    reconstruction_module.to(opt.device_ids[0])
    if opt.verbose:
        print(reconstruction_module)
Ejemplo n.º 36
0
        "--route-id-list",
        dest="route_id_list",
        metavar="ROUTE_ID_LIST",
        default=None,
        type=mkstrlist,
        help="list of routes to populate schedules for (e.g., 8,18,29)")
    parser.add_argument("--indexes",
                        dest="indexes",
                        action='store_true',
                        help="add indexes to speed common queries")
    parser.add_argument(
        "--these-routes-only",
        dest="these_routes_only",
        action='store_true',
        help="only store data for these routes, not for routes with overlap")
    parser.set_defaults(indexes=False)
    parser.add_argument(
        "-L",
        "--log-level",
        dest="log_level",
        metavar="LOGLEVEL",
        default="INFO",
        help=
        "log level of Predictor (one of CRITICAL, ERROR, WARNING, INFO, or DEBUG)",
        choices=('CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'))

    args = parser.parse_args()

    logger_name = "postgres_to_db"
    log_level = getattr(logging, args.log_level)
    logging.basicConfig(
Ejemplo n.º 37
0
    parser.add_argument("--virt-cam",
                        type=int,
                        default=0,
                        help="Virtualcam device ID")
    parser.add_argument("--no-stream",
                        action="store_true",
                        help="On Linux, force no streaming")
    parser.add_argument("--debug",
                        action="store_true",
                        help="Print debug information")

    parser.add_argument("--avatars",
                        default="./avatars",
                        help="path to avatars directory")

    parser.set_defaults(relative=False)
    parser.set_defaults(adapt_scale=False)
    parser.set_defaults(no_pad=False)

    opt = parser.parse_args()

    if opt.no_stream:
        log('Force no streaming')
        _streaming = False

    device = 'cuda' if torch.cuda.is_available() else 'cpu'

    avatars = []
    images_list = sorted(glob.glob(f'{opt.avatars}/*'))
    for i, f in enumerate(images_list):
        if f.endswith('.jpg') or f.endswith('.jpeg') or f.endswith('.png'):
Ejemplo n.º 38
0
        CountProteins()
    else:
        CountGenes()


if __name__ == '__main__':
    from argparse import ArgumentParser

    epilog = 'system (default) encoding: {}'.format(sys.getdefaultencoding())

    parser = ArgumentParser(usage='%(prog)s [options] FILE ...',
                            description=__doc__,
                            epilog=epilog,
                            prog=os.path.basename(sys.argv[0]))

    parser.set_defaults(loglevel=logging.WARNING)

    parser.add_argument('medline',
                        metavar='MEDLINE_URL',
                        type=str,
                        help='MEDLINE DB URL')
    parser.add_argument('gnamed',
                        metavar='GNAMED_URL',
                        type=str,
                        help='gnamed DB URL')
    parser.add_argument('-p',
                        '--proteins',
                        action='store_true',
                        help='count protein symbols')
    parser.add_argument('--version', action='version', version=__version__)
    parser.add_argument('--error',
Ejemplo n.º 39
0
    def bind_self(self, parser: argparse.ArgumentParser):
        fns = parser.get_default('fns')

        if self.exec not in fns:
            fns.append(self.exec)
            parser.set_defaults(fns=fns)
        if stencil is not None:
            np.savez_compressed(str(pixel_path / (str(snapshot_id) + ".npz")), stencil)
            #cv2.imwrite(str(pixel_path / (str(snapshot_id) + ".png")), stencil)
    conn.close()


if __name__ == "__main__":
    parser = ArgumentParser(
        description="Process a GTA session using data from the stencil buffer as well as the camera parameters")
    parser.add_argument("--session", dest='session', required=True, type=int, help="the session to process")
    parser.add_argument("--dataroot", dest='dataroot', required=True, type=str, help="Location of the data")
    parser.add_argument("--pixel_path", dest="pixel_path", required=True, type=str,
                        help="Location to output pixel annotations")
    parser.add_argument("--resume", dest='resume', required=False, type=str, help="resume from a file")
    save_parser = parser.add_mutually_exclusive_group(required=False)
    save_parser.add_argument('--save', dest='save', action='store_true')
    save_parser.add_argument('--no-save', dest='save', action='store_false')
    parser.set_defaults(feature=True)
    args = parser.parse_args()
    results = None
    if (args.resume is None):
        results = process(args.pixel_path, args.dataroot, args.session)
        print("dumping results")
        with open('results.pkl', 'wb') as f:
            dump(results, f)

    else:
        print("loading results")
        with open(args.resume, 'rb') as f:
            results = load(f)
Ejemplo n.º 41
0
    init_comet(params, trainer)
    trainer.run(params['total_kimg'])
    dataset.close()


if __name__ == "__main__":
    parser = ArgumentParser()
    needarg_classes = [
        Trainer, Generator, Discriminator, DepthManager, SaverPlugin,
        OutputGenerator, Adam
    ]
    needarg_classes += get_all_classes(dataset)
    needarg_classes += get_all_classes(output_postprocess)
    excludes = {'Adam': {'lr'}}
    default_overrides = {'Adam': {'betas': (0.0, 0.99)}}
    auto_args = create_params(needarg_classes, excludes, default_overrides)
    for k in default_params:
        parser.add_argument('--{}'.format(k),
                            type=partial(generic_arg_parse,
                                         hinttype=type(default_params[k])))
    for cls in auto_args:
        group = parser.add_argument_group(
            cls, 'Arguments for initialization of class {}'.format(cls))
        for k in auto_args[cls]:
            name = '{}.{}'.format(cls, k)
            group.add_argument('--{}'.format(name), type=generic_arg_parse)
            default_params[name] = auto_args[cls][k]
    parser.set_defaults(**default_params)
    params = get_structured_params(vars(parser.parse_args()))
    main(params)
Ejemplo n.º 42
0
                     dest="omega",
                     default=10.0,
                     help="Smoothness weight")
 parser.add_argument("--margin",
                     type=float,
                     dest="margin",
                     default=0.3,
                     help="Discriminator/Generator balancing")
 parser.add_argument("--d_arch",
                     type=int,
                     dest="d_arch",
                     default=1,
                     help="Discriminator architecture index")
 parser.add_argument('--d_rand', dest='d_rand', action='store_true')
 parser.add_argument('--no-d_rand', dest='d_rand', action='store_false')
 parser.set_defaults(d_rand=True)
 parser.add_argument("--euler_ord",
                     type=str,
                     dest="euler_ord",
                     default="yzx",
                     help="Euler rotation order")
 parser.add_argument("--max_steps",
                     type=int,
                     dest="max_steps",
                     default=60,
                     help="Maximum number of steps in sequence")
 parser.add_argument("--min_steps",
                     type=int,
                     dest="min_steps",
                     default=60,
                     help="Minimun number of steps in sequence")
Ejemplo n.º 43
0
    try:
        for _file in os.listdir(queries_dir):
            os.remove(os.path.join(queries_dir, _file))
        return True
    except Exception as error:
        print 'Error: ', error
        return False


if __name__ == '__main__':
    CONFIG_JSON = get_config_path()
    config = json.load(open(CONFIG_JSON, 'r'))
    scripts_dir = get_project_root_path() + "auto_nag/scripts/"
    queries_dir = get_project_root_path() + "queries/"
    parser = ArgumentParser(__doc__)
    parser.set_defaults(queries_only=False, )
    parser.add_argument("-q",
                        "--queries-only",
                        dest="queries_only",
                        action="store_true",
                        help="just create and print queries")

    options, args = parser.parse_known_args()
    queries = createQueriesList(print_all=options.queries_only,
                                queries_dir=queries_dir)

    if options.queries_only:
        for url in urls:
            print url
    else:
        command = [
Ejemplo n.º 44
0
def main(args=None):
    parser = ArgumentParser()
    parser.add_argument("--version", action="version", version=__version__)
    inputGroup = parser.add_argument_group(
        title="Input arguments",
        description=
        "The following arguments are mutually exclusive (pick only one):",
    )
    xInputGroup = inputGroup.add_mutually_exclusive_group(required=True)
    xInputGroup.add_argument("-g",
                             "--glyphs-path",
                             metavar="GLYPHS",
                             help="Path to .glyphs source file")
    xInputGroup.add_argument(
        "-u",
        "--ufo-paths",
        nargs="+",
        metavar="UFO",
        help="One or more paths to UFO files",
    )
    xInputGroup.add_argument(
        "-m",
        "--mm-designspace",
        metavar="DESIGNSPACE",
        help="Path to .designspace file",
    )

    outputGroup = parser.add_argument_group(title="Output arguments")
    outputGroup.add_argument(
        "-o",
        "--output",
        nargs="+",
        default=("otf", "ttf"),
        metavar="FORMAT",
        help=
        "Output font formats. Choose 1 or more from: %(choices)s. Default: otf, ttf. "
        "(No file paths).",
        choices=(
            "ufo",
            "otf",
            "otf-cff2",
            "ttf",
            "ttf-interpolatable",
            "otf-interpolatable",
            "variable",
            "variable-cff2",
        ),
    )
    outputSubGroup = outputGroup.add_mutually_exclusive_group()
    outputSubGroup.add_argument(
        "--output-path",
        default=None,
        help="Output font file path. Only valid when the output is a single "
        "file (e.g. input is a single UFO or output is variable font)",
    )
    outputSubGroup.add_argument(
        "--output-dir",
        default=None,
        help="Output folder. By default, output folders are created in the "
        "current working directory, grouping output fonts by format.",
    )
    outputGroup.add_argument(
        "-i",
        "--interpolate",
        nargs="?",
        default=False,
        const=True,
        metavar="INSTANCE_NAME",
        help="Interpolate masters and generate all the instances defined. "
        "To only interpolate a specific instance (or instances) that "
        'match a given "name" attribute, you can pass as argument '
        "the full instance name or a regular expression. "
        'E.g.: -i "Noto Sans Bold"; or -i ".* UI Condensed". '
        "(for Glyphs or MutatorMath sources only). ",
    )
    outputGroup.add_argument(
        "--use-mutatormath",
        action="store_true",
        help=(
            "Use MutatorMath to generate instances (supports extrapolation and "
            "anisotropic locations)."),
    )
    outputGroup.add_argument(
        "-M",
        "--masters-as-instances",
        action="store_true",
        help="Output masters as instances",
    )
    outputGroup.add_argument(
        "--family-name",
        help="Family name to use for masters, and to filter output instances",
    )
    outputGroup.add_argument(
        "--round-instances",
        dest="round_instances",
        action="store_true",
        help="Apply integer rounding to all geometry when interpolating",
    )
    outputGroup.add_argument(
        "--designspace-path",
        default=None,
        help="Path to output designspace file (for Glyphs sources only).",
    )
    outputGroup.add_argument(
        "--master-dir",
        default=None,
        help='Directory where to write master UFO. Default: "./master_ufo". '
        'If value is "{tmp}", a temporary directory is created and '
        "removed at the end (for Glyphs sources only).",
    )
    outputGroup.add_argument(
        "--instance-dir",
        default=None,
        help="Directory where to write instance UFOs. Default: "
        '"./instance_ufo". If value is "{tmp}", a temporary directory '
        "is created and removed at the end (for Glyphs sources only).",
    )
    outputGroup.add_argument(
        "--no-write-skipexportglyphs",
        action="store_false",
        dest="write_skipexportglyphs",
        help=
        "Do not store the glyph export flags in the 'public.skipExportGlyphs' "
        "key of designspace/UFO lib, but use the old private glyph lib key "
        "'com.schriftgestaltung.Glyphs.Export' (for Glyphs sources only).",
    )
    outputGroup.add_argument(
        "--validate-ufo",
        action="store_true",
        help="Enable ufoLib validation on reading/writing UFO files. It is "
        "disabled by default",
    )
    outputGroup.add_argument(
        "--expand-features-to-instances",
        action="store_true",
        help="Resolves all include()s in the master feature file and writes "
        "the full feature file to all instance UFOs. Only valid when "
        "interpolating. Use if you share feature files of masters in "
        "external files, as instances can end up elsewhere.",
    )

    contourGroup = parser.add_argument_group(title="Handling of contours")
    contourGroup.add_argument(
        "--keep-overlaps",
        dest="remove_overlaps",
        action="store_false",
        help="Do not remove any overlap.",
    )
    contourGroup.add_argument(
        "--overlaps-backend",
        dest="overlaps_backend",
        metavar="BACKEND",
        choices=("booleanOperations", "pathops"),
        default="booleanOperations",
        help="Select library to remove overlaps. Choose between: %(choices)s "
        "(default: %(default)s)",
    )
    contourGroup.add_argument(
        "--keep-direction",
        dest="reverse_direction",
        action="store_false",
        help="Do not reverse contour direction when output is ttf or "
        "ttf-interpolatable",
    )
    contourGroup.add_argument(
        "-e",
        "--conversion-error",
        type=float,
        default=None,
        metavar="ERROR",
        help="Maximum approximation error for cubic to quadratic conversion "
        "measured in EM",
    )
    contourGroup.add_argument(
        "-a",
        "--autohint",
        nargs="?",
        const="",
        help="Run ttfautohint. Can provide arguments, quoted",
    )
    contourGroup.add_argument(
        "--cff-round-tolerance",
        type=float,
        default=None,
        metavar="FLOAT",
        help="Restrict rounding of point coordinates in CFF table to only "
        "those floats whose absolute difference from their integral part "
        "is less than or equal to the tolerance. By default, all floats "
        "are rounded to integer (tolerance 0.5); 0 disables rounding.",
    )
    contourGroup.add_argument(
        "--optimize-cff",
        type=lambda s: CFFOptimization(int(s)),
        default=CFFOptimization.SUBROUTINIZE,
        help="0 disables all optimizations; 1 specializes the CFF charstring "
        "operators; 2 (default) also enables subroutinization",
    )
    contourGroup.add_argument(
        "--subroutinizer",
        default=None,
        choices=["compreffor", "cffsubr"],
        help="name of the library to use for compressing CFF charstrings. "
        "Choose between: %(choices)s. By default compreffor is used for CFF 1, "
        "and cffsubr for CFF2. NOTE: compreffor doesn't support CFF2.",
    )
    contourGroup.add_argument(
        "--no-optimize-gvar",
        dest="optimize_gvar",
        action="store_false",
        help="Do not perform IUP optimization on variable font's 'gvar' table. "
        "(only works with 'variable' TrueType-flavored output)",
    )

    layoutGroup = parser.add_argument_group(
        title="Handling of OpenType Layout")
    layoutGroup.add_argument(
        "--interpolate-binary-layout",
        nargs="?",
        default=False,
        const=True,
        metavar="MASTER_DIR",
        help="Interpolate layout tables from compiled master binaries. "
        "Requires Glyphs or MutatorMath source.",
    )
    layoutGroup.add_argument(
        "--feature-writer",
        metavar="CLASS",
        action="append",
        dest="feature_writer_specs",
        help="string specifying a feature writer class to load, either "
        "built-in or from an external module, optionally initialized with "
        "the given keyword arguments. The class and module names are "
        "separated by '::'. The option can be repeated multiple times "
        "for each writer class. A special value of 'None' will disable "
        "all automatic feature generation. The option overrides both the "
        "default ufo2ft writers and those specified in the UFO lib.",
    )
    layoutGroup.add_argument(
        "--debug-feature-file",
        metavar="FILE",
        type=FileType("w", encoding="utf-8"),
        default=None,
        help=(
            "Path were to dump OpenType features text to debug auto-generated "
            "features (kern, mark, mkmk, etc.)."),
    )

    feaCompilerGroup = layoutGroup.add_mutually_exclusive_group(required=False)
    feaCompilerGroup.add_argument(
        "--mti-source",
        help="mtiLib feature definition .plist file path (use instead of FEA)",
    )

    glyphnamesGroup = parser.add_mutually_exclusive_group(required=False)
    glyphnamesGroup.add_argument(
        "--production-names",
        dest="use_production_names",
        action="store_true",
        help="Rename glyphs with production names if available otherwise use "
        "uninames.",
    )
    glyphnamesGroup.add_argument("--no-production-names",
                                 dest="use_production_names",
                                 action="store_false")

    subsetGroup = parser.add_mutually_exclusive_group(required=False)
    subsetGroup.add_argument(
        "--subset",
        dest="subset",
        action="store_true",
        help="Subset font using export flags set by glyphsLib",
    )
    subsetGroup.add_argument("--no-subset",
                             dest="subset",
                             action="store_false")

    subroutinizeGroup = parser.add_mutually_exclusive_group(required=False)
    subroutinizeGroup.add_argument(
        "-s",
        "--subroutinize",
        action="store_true",
        help="Optimize CFF table using compreffor (default) [DEPRECATED: use "
        "--optimize-cff option instead]",
    )
    subroutinizeGroup.add_argument("-S",
                                   "--no-subroutinize",
                                   dest="subroutinize",
                                   action="store_false")

    parser.set_defaults(use_production_names=None,
                        subset=None,
                        subroutinize=None)

    logGroup = parser.add_argument_group(title="Logging arguments")
    logGroup.add_argument("--timing",
                          action="store_true",
                          help="Print the elapsed time for each steps")
    logGroup.add_argument(
        "--verbose",
        default="INFO",
        metavar="LEVEL",
        choices=("DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"),
        help="Configure the logger verbosity level. Choose between: "
        "%(choices)s. Default: INFO",
    )
    args = vars(parser.parse_args(args))

    specs = args.pop("feature_writer_specs")
    if specs is not None:
        args["feature_writers"] = _loadFeatureWriters(parser, specs)

    glyphs_path = args.pop("glyphs_path")
    ufo_paths = args.pop("ufo_paths")
    designspace_path = args.pop("mm_designspace")
    input_format = ("Glyphs" if glyphs_path else
                    "designspace" if designspace_path else "UFO") + " source"

    if INTERPOLATABLE_OUTPUTS.intersection(args["output"]):
        if not (glyphs_path or designspace_path):
            parser.error(
                "Glyphs or designspace source required for variable font")
        exclude_args(
            parser,
            args,
            [
                "interpolate",
                "masters_as_instances",
                "interpolate_binary_layout",
                "use_mutatormath",
            ],
            "variable output",
        )
    else:
        exclude_args(parser,
                     args, ["optimize_gvar"],
                     "static output",
                     positive=False)

    if args.get("use_mutatormath"):
        for module in ("defcon", "mutatorMath"):
            try:
                __import__(module)
            except ImportError:
                parser.error(
                    f"{module} module not found; reinstall fontmake with the "
                    "[mutatormath] extra")

    PRINT_TRACEBACK = args.get("verbose", "INFO") == "DEBUG"
    try:
        project = FontProject(
            timing=args.pop("timing"),
            verbose=args.pop("verbose"),
            validate_ufo=args.pop("validate_ufo"),
        )

        if glyphs_path:
            with _make_tempdirs(parser, args):
                project.run_from_glyphs(glyphs_path, **args)
            return

        exclude_args(
            parser,
            args,
            [
                "family_name",
                "mti_source",
                "designspace_path",
                "master_dir",
                "instance_dir",
            ],
            input_format,
        )
        exclude_args(parser,
                     args, ["write_skipexportglyphs"],
                     input_format,
                     positive=False)
        if designspace_path:
            project.run_from_designspace(designspace_path, **args)
            return

        exclude_args(
            parser,
            args,
            [
                "interpolate",
                "use_mutatormath",
                "interpolate_binary_layout",
                "round_instances",
                "expand_features_to_instances",
            ],
            input_format,
        )
        project.run_from_ufos(ufo_paths,
                              is_instance=args.pop("masters_as_instances"),
                              **args)
    except FontmakeError as e:
        if PRINT_TRACEBACK:
            logging.exception(e)
            sys.exit(1)
        sys.exit(f"fontmake: Error: {str(e)}")
Ejemplo n.º 45
0
def main():
    arg_parser = ArgumentParser('A bi-lstm neural NER tagger')
    arg_parser.add_argument('--cuda',
                            action='store_true',
                            help='Whether to use GPU')
    arg_parser.set_defaults(cuda=False)
    subparsers = arg_parser.add_subparsers()

    train_parser = subparsers.add_parser('train', help='Training procedure')
    train_parser.set_defaults(action='train')
    train_parser.add_argument('--train-set',
                              type=Path,
                              required=True,
                              help='Path to the training set')
    train_parser.add_argument(
        '--dev-set',
        type=Path,
        required=True,
        help='Path to the development set for validation')
    train_parser.add_argument('--embedding-size',
                              type=int,
                              default=256,
                              help='Size of the embedding vectors.')
    train_parser.add_argument('--hidden-size',
                              type=int,
                              default=256,
                              help='Size of the LSTM hidden layer.')
    train_parser.add_argument('--batch-size',
                              type=int,
                              default=32,
                              help='Training batch size')
    train_parser.add_argument('--max-epoch',
                              type=int,
                              default=32,
                              help='Maximum number of training epoches')
    train_parser.add_argument('--lr',
                              type=float,
                              default=0.001,
                              help='Learning rate for Adam')
    train_parser.add_argument('--model-save-path',
                              type=Path,
                              default='model.bin',
                              help='Model save path')

    test_parser = subparsers.add_parser('test', help='Testing procedure')
    test_parser.set_defaults(action='test')
    test_parser.add_argument('--model-path',
                             type=Path,
                             required=True,
                             help='Path to the model to evaluate')
    test_parser.add_argument('--test-set',
                             type=Path,
                             required=True,
                             help='Path to the testing set')
    test_parser.add_argument('--output',
                             type=Path,
                             required=True,
                             help='Path to the prediction output')
    test_parser.add_argument('--batch-size',
                             type=int,
                             default=32,
                             help='Testing batch size')

    args = arg_parser.parse_args()
    if args.action == 'train':
        train(args)
    elif args.action == 'test':
        test(args)
Ejemplo n.º 46
0
def make_parser(parser: ArgumentParser):
    parser.set_defaults(function=init)
Ejemplo n.º 47
0
 def _set_command(self, command: CommandInterface,
                  command_parser: ArgumentParser):
     for option in command.options:
         command_parser.add_argument(*option.get('args'),
                                     **option.get('kwargs'))
     command_parser.set_defaults(command=command, **command.defaults)
Ejemplo n.º 48
0
    cmd = "python3 /DeepSpeech/bin/import_cv2.py %s --validate_label_locale /DeepSpeech/bin/bangor_welsh/utils/validate_label_locale.py" % (
        cv_root_dir)

    import_process = subprocess.Popen(shlex.split(cmd))
    import_process.wait()


if __name__ == "__main__":

    parser = ArgumentParser(description=DESCRIPTION,
                            formatter_class=RawTextHelpFormatter)

    parser.add_argument(
        "--archive",
        dest="cv_archive_file_path",
        required=True,
        help=
        "path to downloaded tar.gz containing speech corpus in CommonVoice v2.0 format"
    )
    parser.add_argument(
        "--target_dir",
        dest="cv_root_dir",
        required=True,
        help=
        "target directory for extracted archive, also root directory for training data"
    )

    parser.set_defaults(func=main)
    args = parser.parse_args()
    args.func(**vars(args))
Ejemplo n.º 49
0
def parse_args():
    """Parse arguments into dictionary"""
    argv = sys.argv[1:]
    ## Improve arg parser
    parser = ArgumentParser(
        description=__doc__,
        formatter_class=RawDescriptionHelpFormatter,
        epilog="Old argument format still works!"
    )
    parser.add_argument(
        "--test-driver",
        "--test_driver",
        "-t",
        help="Path to compiled tester driver executable [Default: %(default)s]"
    )
    parser.add_argument(
        "--commands",
        "-c",
        type=int,
        metavar="NUM_COMMANDS",
        help="Number of commands to put in each run. Some 'commands' map to multiple"
        "test instructions. Expects positive number. [Default: %(default)d]"
    )
    parser.add_argument(
        "--runs",
        "-r",
        type=int,
        metavar="NUM_RUNS",
        help="Number of times to run random test cases."
        " Expects positive int. [Default: %(default)d]"
    )
    parser.add_argument(
        "--memcheck",
        "-m",
        choices={"simple", "full"},
        help="Which kind of memory check to do. Don't use 'full'"
        " otherwise you will always fail. [Default: %(default)s]"
    )
    parser.add_argument(
        "--disallowed-commands",
        "--disallowed_commands",
        "-d",
        nargs="*",
        help="List of commands (space seperated), that you don't want to be tested i.e. "
        " --disallowed_commands begin end rbegin rend erase. Might for testing while"
        " writing code.",
    )

    parser.add_argument(
        "--max-insert",
        "--max_insert",
        "--mi",
        type=int,
        help="Set the upper-bound for consecutive inserts. Higher numbers are good for "
        "forcing collisions. Expects positive int. [Default: %(default)d]"
    )
    parser.add_argument(
        "--max-size",
        "--max_size",
        "--ms",
        type=int,
        help="Set the upper-bound for the the number of nodes in a graph. "
        "Lower numbers are good for forcing extremely connected graphs. "
        "Expects positive int. [Default: %(default)d]"
    )
    parser.add_argument(
        "--max-distance-check",
        "--max_distance_check",
        "--max-dist-check",
        "--max_dist_check",
        "--mdc",
        type=int,
        help="Set the upper-bound for the number of distance checks"
        " to do in a row. [Default: %(default)d]"
    )

    parser.set_defaults(
        test_driver="./a.out",
        commands=1000,
        runs=333, # Do a third of tests to compensate for time to generate them
        memcheck="simple",
        disallowed_commands=[],
        max_insert=5,
        max_size=100,
        max_distance_check=20
    )
    #mangle argv
    def add_prefix_char(arg):
        """Mangle the input arguments to allow an older archaic format style"""
        return ("--" + arg) if not arg.startswith("--") and "=" in arg else arg
    mangled_args = [add_prefix_char(arg) for arg in argv]
    params = vars(parser.parse_args(mangled_args))
    return params
Ejemplo n.º 50
0
    ap.add_argument("task", help="Task to be run, module.path:function")
    ap.add_argument(
        "--seconds",
        type=float,
        help="In how many seconds to run the task at, if"
        "recurring determines the frequency.",
    )
    ap.add_argument("--when",
                    type=float,
                    help="The Unix timestamp for when to run the task")
    ap.add_argument("--recurring",
                    action="store_true",
                    help="Make the task recurring")
    ap.add_argument("--args", help="Comma separated list of arguments")
    ap.set_defaults(recurring=False, args=[])

    options = ap.parse_args()
    if not (options.when or options.seconds):
        ap.error("Either seconds or when must be specified.")

    if options.recurring and not options.seconds:
        ap.error("Seconds must be specified for recurring tasks.")

    # Create new task according to spec
    task = Task(
        options.task,
        args=options.args.split(","),
        recurring=options.recurring,
        seconds=options.seconds if options.seconds else 0,
        when=options.when,
Ejemplo n.º 51
0
                        type=int,
                        default=12,
                        dest="valid_steps",
                        help="valid_steps")
    parser.add_argument("--leaky",
                        type=float,
                        default=0.0,
                        dest="leaky",
                        help="leakiness of ReLU, float >= 0")
    parser.add_argument("--split_col",
                        type=str,
                        dest="split_col",
                        default="split")
    parser.add_argument("--split_train",
                        type=str,
                        nargs="+",
                        dest="split_train",
                        default=["train"])
    parser.add_argument("--split_eval",
                        type=str,
                        nargs="+",
                        dest="split_eval",
                        default=["eval"])

    parser.add_argument("--batchnorm", dest="batchnorm", action="store_true")
    parser.add_argument("--maxpool", dest="maxpool", action="store_true")
    parser.set_defaults(batchnorm=False, maxpool=False)

    args = parser.parse_args()
    train(**vars(args))
Ejemplo n.º 52
0
parser_tournament_continuously = subparsers.add_parser(
    'tournament-continously',
    help='generate games between randomly chosen players')
parser_tournament_continuously.add_argument(
    'tournament_dir', help='directory where tournament games are stored')
parser_tournament_continuously.add_argument(
    'model_dir', help='directory where alpha connect models are stored')
parser_tournament_continuously.add_argument('--processes',
                                            type=int,
                                            help='number of cores to use',
                                            default=4)
parser_tournament_continuously.add_argument(
    '--first_player_name_filter', help='regex filter for first player name')
parser_tournament_continuously.add_argument(
    '--first_player_kwargs_filter', help='regex filter for first kwargs')
parser_tournament_continuously.add_argument(
    '--second_player_name_filter', help='regex filter for second player name')
parser_tournament_continuously.add_argument(
    '--second_player_kwargs_filter', help='regex filter for second kwargs')
parser_tournament_continuously.set_defaults(func=_tournament_continuously)

# tournament-elo
parser_tournament_elo = subparsers.add_parser(
    'tournament-elo', help='compute elo score for tournament players')
parser_tournament_elo.add_argument(
    'tournament_dir', help='directory where tournament games are stored')
parser.set_defaults(func=_tournament_elo)

args = parser.parse_args()
args.func(args)
Ejemplo n.º 53
0
        checkpoint_path = seen_params.pop('checkpoint_path')
        model = vnet.VNet.load_from_checkpoint(checkpoint_path, **seen_params)

    trainer = Trainer.from_argparse_args(hparams, auto_lr_find=True)

    trainer.tune(model)


if __name__ == '__main__':
    now = datetime.datetime.now()
    dt_str = now.strftime("%d-%m-%Y_%H-%M-%S")

    parser = ArgumentParser()
    parser.add_argument('--logger_save_dir', default='D:/tmp/logs/december/')
    parser.add_argument('--save_top_k', default=1, type=int)
    parser.add_argument('--experiment_name', default='vnet_tuning_' + dt_str)
    parser.add_argument('--date_time', default=dt_str)
    parser.add_argument('--checkpoint_path', default=None)

    parser = vnet.VNet.add_model_specific_args(parser)
    parser = Trainer.add_argparse_args(parser)

    # Override pytorch_lightning defaults
    parser.set_defaults(max_epochs=5000, gpus=1)

    parser = cli.add_argument_tracking(parser)

    hparams = parser.parse_args()

    main(hparams)
Ejemplo n.º 54
0
def build_parser():
    parser = ArgumentParser()
    parser.add_argument(
        '--content',
        type=str,
        dest='content',
        help=
        'content image. If left blank, it will switch to texture generation mode.',
        metavar='CONTENT',
        default='',
        required=False)
    parser.add_argument('--save_dir',
                        type=str,
                        dest='save_dir',
                        help='save_dir.',
                        metavar='SAVE_DIR',
                        required=False)
    parser.add_argument('--styles',
                        dest='styles',
                        nargs='+',
                        help='one or more style images',
                        metavar='STYLE',
                        required=True)
    parser.add_argument(
        '--use_semantic_masks',
        dest='use_semantic_masks',
        help='If true, it accepts some additional image inputs. They '
        'represent the semantic masks of the content and style images.'
        '(default %(default)s).',
        action='store_true')
    parser.set_defaults(use_semantic_masks=False)
    parser.add_argument(
        '--semantic_masks_weight',
        dest='semantic_masks_weight',
        help='The weight given to semantic masks with respect to other '
        'features generated by the vgg network.',
        metavar='SEMANTIC_MASKS_WEIGHT',
        required=False,
        type=float,
        default=SEMANTIC_MASKS_WEIGHT)
    parser.add_argument('--output_semantic_mask',
                        dest='output_semantic_mask',
                        help='one content image semantic mask',
                        metavar='OUTPUT_SEMANTIC_MASK',
                        required=False)
    parser.add_argument('--style_semantic_masks',
                        dest='style_semantic_masks',
                        nargs='+',
                        help='one or more style image semantic masks',
                        metavar='STYLE_SEMANTIC_MASKS',
                        required=False)
    parser.add_argument(
        '--semantic_masks_num_layers',
        type=int,
        dest='semantic_masks_num_layers',
        help=
        'number of semantic masks per content or style image (default %(default)s).',
        metavar='SEMANTIC_MASKS_NUM_LAYERS',
        default=SEMANTIC_MASKS_NUM_LAYERS)
    parser.add_argument(
        '--content_img_style_weight_mask',
        dest='content_img_style_weight_mask',
        help=
        'The path to one black-and-white mask specifying how much we should "stylize" each pixel '
        'in the outputted image. The areas where the mask has higher value would be stylized more '
        'than other areas. A completely white mask would mean that we stylize the output image '
        'just as before, while a completely dark mask would mean that we do not stylize the '
        'output image at all, so it should look pretty much the same as content image. If you do '
        'not wish to use this feature, just leave it blank (default %(default)s).',
        metavar='CONTENT_IMG_STYLE_WEIGHT_MASK',
        default='',
        required=False)
    parser.add_argument('--output',
                        dest='output',
                        help='Output path. (default %(default)s).',
                        metavar='OUTPUT',
                        default='output/default.jpg',
                        required=False)
    parser.add_argument(
        '--checkpoint-output',
        dest='checkpoint_output',
        help='Formatted string for checkpoint output. This string should '
        'contain at least one %s. (default %(default)s).',
        metavar='OUTPUT_CHECKPOINT',
        default='output_checkpoint/default_%s.jpg',
        required=False)
    parser.add_argument('--iterations',
                        type=int,
                        dest='iterations',
                        help='iterations (default %(default)s)',
                        metavar='ITERATIONS',
                        default=ITERATIONS)
    parser.add_argument(
        '--width',
        type=int,
        dest='width',
        help=
        'Input and output height. All content images and style images should be '
        'automatically scaled accordingly. (default %(default)s).',
        metavar='WIDTH',
        default=256,
        required=False)
    parser.add_argument(
        '--height',
        type=int,
        dest='height',
        help=
        'Input and output height. All content images and style images should be automatically '
        'scaled accordingly. (default %(default)s).',
        metavar='HEIGHT',
        default=256,
        required=False)
    parser.add_argument(
        '--use_mrf',
        dest='use_mrf',
        help=
        'If true, it uses Markov Random Fields loss instead of Gramian loss. '
        '(default %(default)s).',
        action='store_true')
    parser.set_defaults(use_mrf=False)
    parser.add_argument(
        '--content-weight',
        type=float,
        dest='content_weight',
        help='How much we weigh the content loss (default %(default)s).',
        metavar='CONTENT_WEIGHT',
        default=CONTENT_WEIGHT)
    parser.add_argument(
        '--style-weight',
        type=float,
        dest='style_weight',
        help='How much we weigh the style loss (default %(default)s)',
        metavar='STYLE_WEIGHT',
        default=STYLE_WEIGHT)
    parser.add_argument(
        '--style-blend-weights',
        type=float,
        dest='style_blend_weights',
        help='If given multiple styles as input, this determines how much '
        'it weighs each style.',
        nargs='+',
        metavar='STYLE_BLEND_WEIGHT')
    parser.add_argument(
        '--tv-weight',
        type=float,
        dest='tv_weight',
        help='total variation regularization weight (default %(default)s)',
        metavar='TV_WEIGHT',
        default=TV_WEIGHT)
    parser.add_argument('--learning-rate',
                        type=float,
                        dest='learning_rate',
                        help='Learning rate (default %(default)s).',
                        metavar='LEARNING_RATE',
                        default=LEARNING_RATE)
    parser.add_argument(
        '--initial',
        dest='initial',
        help=
        'The initial image that the program starts with. If left blank, it will '
        'start with random noise.',
        metavar='INITIAL',
        required=False)
    parser.add_argument(
        '--print-iterations',
        type=int,
        dest='print_iterations',
        help='The program prints the current losses every this number of '
        'rounds.',
        metavar='PRINT_ITERATIONS',
        default=PRINT_ITERATIONS,
        required=False)
    parser.add_argument(
        '--checkpoint-iterations',
        type=int,
        dest='checkpoint_iterations',
        help='The program saves the current image every this number of '
        'rounds.',
        metavar='CHECKPOINT_ITERATIONS',
        default=CHECKPOINT_ITERATIONS,
        required=False)
    return parser
Ejemplo n.º 55
0
    def __init__(self):
        # common options
        parser = ArgumentParser(
            description=self.description,
            formatter_class=argparse.RawDescriptionHelpFormatter,
            epilog=self.epilog)

        parser.add_argument('-c',
                            '--clair',
                            default='http://localhost:6060',
                            help='clair url, default: %(default)s')
        parser.add_argument(
            '-f',
            '--formats',
            choices=['html', 'json'],
            action='append',
            default=['html'],
            help='output report file with give format, default: %(default)s')
        parser.add_argument(
            '-T',
            '--threshold',
            choices=SEVERITIES,
            default='Unknown',
            metavar='THRESHOLD',
            help='cvd severity threshold, if any servity of vulnerability'
            ' above of threshold, will return non-zero, default: %(default)s'
            ', choices are: {}'.format(SEVERITIES))
        parser.add_argument('-w',
                            '--white-list',
                            help='path to the whitelist file')
        group = parser.add_mutually_exclusive_group()
        group.add_argument('-l', '--local-ip', help='ip address of local host')
        group.add_argument('-r',
                           '--regex',
                           action='store_true',
                           help='if set, repository and tag of images will be '
                           'treated as regular expression')
        parser.add_argument('-i',
                            '--insecure-registry',
                            action='append',
                            dest='insec_regs',
                            metavar='REGISTRY',
                            default=[],
                            help='domain of insecure registry')
        parser.add_argument('-n',
                            '--no-proxy',
                            action='append',
                            help='the proxy will ignore these domains')
        parser.add_argument('-L', '--log-file', help='save log to file')
        parser.add_argument('-d',
                            '--debug',
                            action='store_true',
                            help='print more logs')
        parser.add_argument('-V',
                            '--version',
                            action='version',
                            version=__version__)
        parser.add_argument('images',
                            nargs='+',
                            metavar='IMAGE',
                            help='docker images or regular expression')
        parser.set_defaults(func=self.analyze_image)
        self.args = parser.parse_args()
        if self.args.local_ip and self.args.insec_regs:
            parser.error('argument --local-ip: not allowed with'
                         ' argument --insecure-registry')
        os.environ['NO_PROXY'] = ';'.join(args.white_list)
        self.setup_logging()
Ejemplo n.º 56
0
def read_command_line():
    """Read arguments from commandline"""
    parser = ArgumentParser()

    parameters = get_parameters()

    # File with attendance
    date = datetime.now()
    month = str(date.month) if date.month > 9 else "0" + str(date.month)
    day = str(date.day) if date.day > 9 else "0" + str(date.day)
    parameters['filepath'] = path.join(
        path.dirname(__file__),
        parameters['filepath'] % (date.year, month, day))

    parser.add_argument(
        '--f',
        '--file',
        type=str,
        default=parameters['filepath'],
        help=""" A file including all students, in this course. Format:
                        Attendence(X/-) //  Name //  Username // email""",
        metavar="students_file")
    parser.add_argument('--c',
                        '--course',
                        type=str,
                        default=parameters['course'],
                        help="Name of the course",
                        metavar="course")
    parser.add_argument(
        '--u',
        '--university',
        type=str,
        default=parameters['university'],
        help="Name of the university, the viritual-classroom should \
                        be called <university>-<course>",
        metavar="university")
    parser.add_argument('--m',
                        '--max_students',
                        type=int,
                        default=parameters['max_students'],
                        help="Maximum number of students in each group.",
                        metavar="max group size")
    parser.add_argument(
        '--e',
        '--end_group',
        type=bool,
        default=False,
        metavar="end group (bool)",
        help='Delete the current teams on the form Team-<number>')
    parser.add_argument('--i',
                        '--start_semester',
                        type=bool,
                        default=False,
                        metavar="initialize group (bool)",
                        help='Create repositories and teams for the students.')
    parser.add_argument('--g', '--get_repos', type=bool,
                        default=False, help="Clone all student repos into the" + \
                                             "filepath ./<course>_all_repos",
                        metavar="Get all repos (bool)")
    parser.add_argument(
        '--get_repos_filepath',
        type=str,
        default=".",
        help="This argument is only used when --get_repos is used. \
                              It states the location of where the folder \
                              <course>_all_repos should be located \
                              this is expected to be a relative path from where \
                              you are when you execute this program",
        metavar="Get all repos (bool)")
    parser.add_argument('--F', '--get_feedback', type=bool,
                        default=False, help="Store all the feedback files into the" + \
                                             "filepath ./<course>_all_repos. To change" \
                                             " the location use '--get_feedback_filepath'",
                        metavar="Get all feedbacks (bool)")
    parser.add_argument(
        '--get_feedback_filepath',
        type=str,
        default="",
        help="This argument is only used when --get_feedback is used. \
                              It states the location of where the folder \
                              <course>_all_feedbacks should be located \
                              this is expected to be a relative path from where \
                              you are when you execute this program",
        metavar="Get all feedbacks (bool)")
    parser.add_argument(
        '--smtp',
        type=str,
        choices=['uio', 'google'],
        default=parameters['smtp'],
        help='Choose which smtp server emails are to be sent from.')
    parser.add_argument('--rank',
                        type=bool,
                        default=False,
                        help="How to divide in to groups, with or without a \
                        classification of the students from 1 to 3, where 1 is \
                        a top student.",
                        metavar="rank")
    parser.add_argument('--email',
                        dest='email',
                        action='store_true',
                        help="Send email")
    parser.add_argument('--no-email',
                        dest='email',
                        action='store_false',
                        help="Send no email")
    parser.add_argument(
        "--email_tmp_file",
        dest="email_tmp_file",
        type=str,
        default="email_tmp_%s.txt",
        help=
        "This argument is used to determine the name of the file to store information \
                             emails sent.")
    parser.add_argument(
        "--email_delay",
        dest="email_delay",
        type=float,
        default=1.0,
        help=
        "This argument is used to determine the delay between each email sent."
    )
    parser.add_argument(
        "--email_review_groups",
        dest="email_review_groups",
        type=bool,
        default=False,
        help="This flag tells the script to only send emails to review groups.\
                         Useful if sending out the emails was interrupted.")
    parser.set_defaults(email=True)

    args = parser.parse_args()

    # Check if file exists
    if not path.isfile(args.f) and not args.e and not args.F and not args.g:
        msg = "The file: %s does not exist. \nPlease provide a different file path, or" + \
               "create the file first. Use the script 'copy-attendance-file.py'"
        msg = msg % args.f
        print(msg)
        exit(1)

    return args.f, args.c, args.u, args.m, args.e, args.i, args.g, args.get_repos_filepath, \
            args.F, args.get_feedback_filepath, args.smtp, args.rank, \
            args.email, args.email_tmp_file, args.email_delay, args.email_review_groups
Ejemplo n.º 57
0
# -*- mode: python -*-

import os, sys
if __name__ != '__main__':
    sys.exit(1)

from argparse import ArgumentParser

_NAME_ = "pbench-base.py"

parser = ArgumentParser(_NAME_)
parser.add_argument("-C",
                    "--config",
                    dest="cfg_name",
                    help="Specify config file")
parser.set_defaults(cfg_name=os.environ.get("CONFIG"))
parser.add_argument('prog',
                    metavar='PROG',
                    type=str,
                    nargs=1,
                    help='the program name of the caller')
parser.add_argument('args',
                    metavar='args',
                    type=str,
                    nargs='*',
                    help='program arguments')
parsed, _ = parser.parse_known_args()

_prog = os.path.basename(parsed.prog[0])
_dir = os.path.dirname(parsed.prog[0])
Ejemplo n.º 58
0
        args.first_mask)
    mask1_intersect, mask2_intersect, col, row, bounds = intersection(
        args.first_mask, args.second_mask)
    mask_difference = difference(mask1_intersect, mask2_intersect)
    transform = (bounds[0], mask1_transform[1], 0, bounds[3], 0,
                 mask1_transform[5])
    data2geotiff(mask_difference, transform, projection, data_type, 0,
                 args.name)

    if args.shape:
        raster_boundary2shape(args.name, None, args.name, use_closing=False)


if __name__ == '__main__':
    p = ArgumentParser()

    p.add_argument('first_mask', help='The older mask of the pair')
    p.add_argument('second_mask', help='The newer mask of the pair')
    p.add_argument('name', help='Name of the new mask')
    p.add_argument('--shape',
                   default=False,
                   action='store_true',
                   help='Also return a shape file')
    p.set_defaults(func=create_mask)

    args = p.parse_args()
    if hasattr(args, 'func'):
        args.func(args)
    else:
        p.print_help()
Ejemplo n.º 59
0
                             ''')
    PARSER.add_argument('--gif',
                        action='store_true',
                        help='whether to create a gif')
    PARSER.add_argument('--gif-iter',
                        type=int,
                        default=1,
                        help='save gif frame every x iter')
    PARSER.add_argument('--gif-dir',
                        type=str,
                        default='',
                        help='where to store gif frames')

    PARSER.add_argument('--cuda', dest='cuda', action='store_true')
    PARSER.add_argument('--no-cuda', dest='cuda', action='store_false')
    PARSER.set_defaults(cuda=True)

    PARSER.add_argument('--project', dest='project', action='store_true')
    PARSER.add_argument('--no-project', dest='project', action='store_false')
    PARSER.set_defaults(project=True)

    PARSER.add_argument('--annealed', dest='annealed', action='store_true')
    PARSER.add_argument('--no-annealed', dest='annealed', action='store_false')
    PARSER.set_defaults(annealed=False)

    PARSER.add_argument('--lpips', dest='lpips', action='store_true')
    PARSER.add_argument('--no-lpips', dest='lpips', action='store_false')
    PARSER.set_defaults(lpips=False)

    HPARAMS = PARSER.parse_args()
    HPARAMS.input_path = f'./test_images/{HPARAMS.dataset}'
Ejemplo n.º 60
0
    checkpoint = pl.callbacks.ModelCheckpoint(
        save_top_k=args.save_top_k, monitor="val_loss"
    )
    train_loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size)
    val_loader = torch.utils.data.DataLoader(dataset_val, batch_size=batch_size)
    test_loader = torch.utils.data.DataLoader(dataset_test, batch_size=batch_size)
    trainer = pl.Trainer.from_argparse_args(args, callbacks=[checkpoint])
    trainer.fit(model, train_dataloader=train_loader, val_dataloaders=val_loader)
    trainer.test(model=model, test_dataloaders=test_loader)


if __name__ == "__main__":
    parser = ArgumentParser("Memory task with spiking neural networks")
    parser = pl.Trainer.add_argparse_args(parser)
    parser.set_defaults(
        max_epochs=1000, auto_select_gpus=True, progress_bar_refresh_rate=1
    )
    parser.add_argument(
        "--batch_size",
        default=128,
        type=int,
        help="Number of examples in one minibatch",
    )
    parser.add_argument(
        "--learning_rate", type=float, default=0.01, help="Learning rate to use."
    )
    parser.add_argument(
        "--model",
        default="super",
        choices=["super", "tanh", "circ", "logistic", "circ_dist"],
        help="Model to use for training.",