Ejemplo n.º 1
0
def read_params(args):
    parser = ArgumentParser(description= "GraPhlAn "+__version__+" ("+__date__+") "
                                         "AUTHORS: "+__author__)
    arg = parser.add_argument

    arg('intree', type=str, default = None, metavar='input_tree',
        help = "the input tree in PhyloXML format " )
    arg('outimg', type=str, default = None, metavar='output_image',
        help = "the output image, the format is guessed from the extension "
               "unless --format is given. Available file formats are: png, "
               "pdf, ps, eps, svg" )
    arg('--format', choices=['png','pdf','ps','eps','svg'], default=None, 
        type = str, metavar=['output_image_format'],
        help = "set the format of the output image (default none meaning that "
               "the format is guessed from the output file extension)")
    arg('--warnings', default=1, type=int,
        help = "set whether warning messages should be reported or not (default 1)")
    arg('--positions', default=0, type=int,
        help = "set whether the absolute position of the points should be reported on "
               "the standard output. The two cohordinates are r and theta")
    arg('--dpi', default=72, type=int, metavar='image_dpi',
        help = "the dpi of the output image for non vectorial formats")
    arg('--size', default=7.0, type=float, metavar='image_size',
        help = "the size of the output image (in inches, default 7.0)")
    arg('--pad', default=0.5, type=float, metavar='pad_in',
        help = "the distance between the most external graphical element and "
               "the border of the image")
    arg( '-v','--version', action='version', version="GraPhlAn version "+__version__+" ("+__date__+")", 
        help="Prints the current GraPhlAn version and exit" )
    return vars(parser.parse_args())
Ejemplo n.º 2
0
def main():
    parser = ArgumentParser()
    parser_with_options = set_options(parser)
    args = parser_with_options.parse_args()
    arguments = vars(args)
    if 'help' in arguments:
        parser.print_help()
    else:
        config = Config()
        config.format = arguments['output']
        config.total_lists = arguments['lists']
        config.total_words = arguments['number']

        if arguments['file'] is None:
            current_path = os.path.abspath(
                inspect.getfile(inspect.currentframe())
                )
            dir = os.path.dirname(current_path)
            config.file_path = os.path.join(dir, config.internal_words_file)
        else:
            config.file_path = arguments['file']

    words = generate(config)

    if words:
        print(words)

    exit(0)
Ejemplo n.º 3
0
def rpt_getnew(args):
    parser = ArgumentParser(prog='mx rpt-getnew')
    _add_common_args(parser)
    args = _check_verbose(parser.parse_args(args))

    if not os.path.exists(args.logdir):
        _safe_mkdir(args.logdir)

    gatedirs = get_gate_dirs(gate_url, _is_result_dir, _strip_dotslash)
    localdirs = get_local_dirs(args.logdir)
    for gatedir in gatedirs:
        gate_url_dir = join(gate_url, gatedir)
        local_dir = join(args.logdir, gatedir)
        if not gatedir in localdirs:
            if args.verbose:
                print 'processing: ' + gatedir
            f = urllib.urlopen(join(gate_url_dir, 'testlog'))
            testlog = f.read()
            _safe_mkdir(local_dir)
            with open(join(local_dir, 'testlog'), 'w') as t:
                t.write(testlog)
#        if True:
            # get test results
            gate_url_test = join(gate_url_dir, 'test')
            local_dir_test = join(local_dir, 'test')
            _safe_mkdir(local_dir_test)
            pkgs = get_gate_dirs(gate_url_test, _is_package_dir, _strip_slash)
            for pkg in pkgs:
                if args.verbose:
                    print '  processing package: ' + pkg
                gate_url_test_pkg = join(gate_url_test, pkg)
                local_dir_test_pkg = join(local_dir_test, pkg)
                _copy_files(gate_url_test_pkg, local_dir_test_pkg, pkg)
Ejemplo n.º 4
0
 def __init__(self):
     ArgumentParser.__init__(
         self, prog="see", description="Mark episodes as SEEN", epilog="example: see lost.s01* lost.s02*"
     )
     self.add_argument(
         "filters", metavar="EPISODE", nargs="+", default=[""], help="episode name or filter, ex: lost.s01e0*"
     )
Ejemplo n.º 5
0
def rpt_install_status(args):
    parser = ArgumentParser(prog='mx rpt-install-status')
    parser.add_argument('--detail', action='store_true', help='display package status')
    parser.add_argument('--displaymode', action='store', default='latest', help='display mode: all | latest')
    parser.add_argument('--failed', action='store_true', help='list packages that failed to install')
    parser.add_argument('--ok-any', action='store_true', help='treat as OK if a package ever installed')
    _add_pattern_arg(parser)
    _add_common_args(parser)
    args = _check_verbose(parser.parse_args(args))

    pkgtable = _build_pkgtable(_get_results(args.logdir))

    pkgnames = []
    for pkgname, occurrences in pkgtable.iteritems():
        if re.search(args.pattern, pkgname) is None:
            continue
        status = _check_any_ok(occurrences, args.ok_any)
        if args.failed:
            if not status:
                pkgnames.append(pkgname)
        else:
            if status:
                pkgnames.append(pkgname)

    pkgnames.sort()
    for pkgname in pkgnames:
        print pkgname
        if args.detail:
            if args.displaymode == 'all':
                occurrences = pkgtable[pkgname]
                for occurrence in occurrences:
                    print "  ${0}".format(str(occurrence))
            else:
                print "  ${0}".format(str(occurrences[0]))
Ejemplo n.º 6
0
def rpt_list_testdates(args):
    parser = ArgumentParser(prog='mx rpt-list-testdates')
    _add_common_args(parser)
    parser.add_argument('--printdir', action='store_true', help='print directory containing tests')
    _add_pattern_arg(parser)
    args = _check_verbose(parser.parse_args(args))
    fastr = dict()
    local_dirs = get_local_dirs(args.logdir)
    for local_dir in local_dirs:
        resultInfo = ResultInfo(local_dir)
        result_outputs = _gather_test_outputs(join(args.logdir, local_dir, "test"))
        for pkg, _ in result_outputs.iteritems():
            if re.search(args.pattern, pkg) is None:
                continue
            if not fastr.has_key(pkg):
                testdates = []
                fastr[pkg] = testdates
            else:
                testdates = fastr[pkg]
            testdates.append(resultInfo)

    for pkg, testdates in fastr.iteritems():
        sortedList = sorted(testdates, reverse=True)
        print pkg
        for resultInfo in sortedList:
            if args.printdir:
                print '  ' + join(args.logdir, resultInfo.localdir)
            else:
                print '  ' + str(resultInfo.date)
Ejemplo n.º 7
0
def rpt_compare(args):
    '''
    Analyze package test results by comparing test output with GnuR output.
    Uses either a specific directory, i.e. the 'test' subdirectory of the --testdir argument
    or (default) the latest downloaded results from the --logdir directory
    Return 0 if passed, non-zero if failed
    '''
    parser = ArgumentParser(prog='mx rpt-compare')
    _add_common_args(parser)
    parser.add_argument('--testdir', action='store', help='specific dir containing fastr results')
    parser.add_argument('--pkg', action='store', help='pkg to compare')
    parser.add_argument('--diff', action='store_true', help='execute given diff program on differing outputs')
    parser.add_argument('--difftool', action='store', help='diff tool', default='diff')
    _add_pattern_arg(parser)
    args = _check_verbose(parser.parse_args(args))

    if args.pkg:
        # backwards compatibility
        args.pattern = args.pkg

    gnur = _gather_test_outputs(join(os.getcwd(), "test_gnur"))

    if args.testdir:
        fastr = _gather_test_outputs(join(args.testdir, "test"))
    else:
        fastr = _get_test_outputs(_gather_all_test_outputs(args.logdir))

    rdict = _rpt_compare_pkgs(gnur, fastr, args.verbose, args.pattern, args.diff, args.difftool)
    for _, rc in rdict.iteritems():
        if rc == 1:
            return 1
    return 0
Ejemplo n.º 8
0
def cli(args=sys.argv[1:]):
    parser = ArgumentParser()
    parser.add_argument('-o', '--output-directory', dest='outdir',
        default=here, help="Directory to save runtime data.")

    parser.add_argument('-i', '--input-directory', dest='indir',
        default=here, help="Directory from which to read current runtime data.")

    parser.add_argument('-p', '--platforms', default=None,
        help="Comma separated list of platforms from which to generate data.")

    parser.add_argument('-s', '--suite', dest='suite', default=None,
        help="Suite for which to generate data.")

    args = parser.parse_args(args)

    if not args.suite:
        raise ValueError("Must specify suite with the -u argument")
    if ',' in args.suite:
        raise ValueError("Passing multiple suites is not supported")

    if args.platforms:
        args.platforms = args.platforms.split(',')

    data = query_activedata(args.suite, args.platforms)
    write_runtimes(data, args.suite, indir=args.indir, outdir=args.outdir)
Ejemplo n.º 9
0
def parseargs():
    parser = ArgumentParser(description='Writes some data on mutliple autodetected USB devices. Be careful, they will be formatted!')
    parser.add_argument('data',help="The upper most folder which need to be transferred on all usb Sticks")
    parser.add_argument('-l','--loop',action='store_true',help='Activates the loop, which will then let the program wait for any new devices plugged in. Abort by CRTL+C')
    parser.add_argument('-f','--format',required=False,choices=["ntfs","fat32"],default=formats[0],help="Formats the sticks, default format %(default)s")
    parser.add_argument('-fl','--formatlabel',required=False,type=str,help="Formats the Devices with this Label")
    return parser.parse_args()
def main(argv=None):
    if argv is None:
        argv = sys.argv
    else:
        sys.argv.extend(argv)

    parser = ArgumentParser(
        description="PCAWG Report Generator Gathering Counts", formatter_class=RawDescriptionHelpFormatter
    )
    parser.add_argument(
        "-m", "--metadata_dir", dest="metadata_dir", help="Directory containing metadata manifest files", required=True
    )

    args = parser.parse_args()
    metadata_dir = args.metadata_dir  # this dir contains gnos manifest files, will also host all reports

    if not os.path.isdir(metadata_dir):  # TODO: should add more directory name check to make sure it's right
        sys.exit("Error: specified metadata directory does not exist!")

    report_name = re.sub(r"^pc_report-", "", os.path.basename(__file__))
    report_name = re.sub(r"\.py$", "", report_name)

    generate_report(metadata_dir, report_name)

    return 0
Ejemplo n.º 11
0
def main():
    opt_parser = ArgumentParser(
        description="Manage annotation databases of region_analysis.",
        prog="region_analysis_db.py")
    subparsers = opt_parser.add_subparsers(title="Subcommands",
                                           help="additional help")

    # list parser.
    parser_list = subparsers.add_parser("list", help="List genomes installed \
                                                      in database")
    parser_list.set_defaults(func=listgn)

    # install parser.
    parser_install = subparsers.add_parser("install",
                                           help="Install genome from tar.gz \
                                                  package file")
    parser_install.add_argument("pkg", help="Package file(.tar.gz) to install",
                                type=str)
    parser_install.set_defaults(func=install)
    parser_install.add_argument(
        "-y", "--yes", help="Say yes to all prompted questions",
        action="store_true")

    # remove parser.
    parser_remove = subparsers.add_parser("remove",
                                          help="Remove genome from database")
    parser_remove.add_argument("gn", help="Name of genome to be \
                                           removed(e.g. hg19)", type=str)
    parser_remove.set_defaults(func=remove)
    parser_remove.add_argument(
        "-y", "--yes", help="Say yes to all prompted questions",
        action="store_true")

    args = opt_parser.parse_args()
    args.func(args)
Ejemplo n.º 12
0
def main():
    """
    Parse arguments, set configuration values, then start the broker
    """
    parser = ArgumentParser(description="Crawl frontier worker.")
    parser.add_argument(
        '--config', type=str,
        help='Settings module name, should be accessible by import.')
    parser.add_argument(
        '--address', type=str,
        help='Hostname, IP address or Wildcard * to bind. Default is 127.0.0.1'
        '. When binding to wildcard it defaults to IPv4.')
    parser.add_argument(
        '--log-level', '-L', type=str, default='INFO',
        help='Log level, for ex. DEBUG, INFO, WARN, ERROR, FATAL. Default is'
        ' INFO.')
    parser.add_argument(
        '--port', type=int,
        help='Base port number, server will bind to 6 ports starting from base'
        '. Default is 5550')
    args = parser.parse_args()

    settings = Settings(module=args.config)
    address = args.address if args.address else settings.get("ZMQ_ADDRESS")
    port = args.port if args.port else settings.get("ZMQ_BASE_PORT")
    server = Server(address, port)
    server.logger.setLevel(args.log_level)
    server.start()
Ejemplo n.º 13
0
Archivo: main.py Proyecto: bth/stools
def parse_args():
    parser = ArgumentParser()
    parser.add_argument('-v', '--version', action='version', version='0.2')
    parser.add_argument('-c', '--configuration', default=expanduser("~") + '/.stools/configuration.cfg')
    parser.add_argument('-e', '--execute', nargs='*')
    parser.add_argument('-l', '--log', default='ERROR')
    return parser.parse_args()
Ejemplo n.º 14
0
def parse_args():
    """parse command-line arguments"""
    parser = ArgumentParser(
        description=__doc__, formatter_class=ArgumentDefaultsHelpFormatter
    )
    parser.add_argument(
        '--infile', required=True,
        help='Input I3 filename'
    )
    parser.add_argument(
        '--outdir', default=None,
        help='Output directory'
    )
    #parser.add_argument(
    #    '--n-per-file', type=int, required=True,
    #    help='Maximum number of events per output file'
    #)
    parser.add_argument(
        '--n-total', type=int, default=None,
        help='''Total events to split out into sub-files; if not specified,
        take all (that pass --keep-criteria)'''
    )
    parser.add_argument(
        '--keep-criteria', type=str, default=None,
        help='''Criteria for choosing the event for splitting out; events that
        fail to meet the criteria will not count towards n-total or n-per-file.
        This will be evaluated where the `frame` variable is available to
        retrieve info from.'''
    )
    return parser.parse_args()
Ejemplo n.º 15
0
 def run(cls):
     parser = ArgumentParser(
         description=("The theme development utility. "
                      "Includes three modes: "
                      "one for serving a theme compiled on-the-fly, "
                      "the other for compiling statically a theme "
                      "and the latter to dump URLs to files")
     )
     subparsers = parser.add_subparsers(
         title="Commands",
         description="Available commands (modes of operation)"
     )
     for key, target in cls.commands.items():
         if target is None:
             target = cls
         else:
             target = getattr(cls, target)
         subparser = subparsers.add_parser(
             key,
             description=target.__doc__,
             help=target.__doc__.splitlines()[0]
         )
         target.setup_parser(subparser)
         subparser.set_defaults(target=target)
     args = parser.parse_args()
     if hasattr(args, 'target'):
         sys.exit(args.target.main(args))
     else:
         parser.print_usage()
         sys.exit(-1)
Ejemplo n.º 16
0
def base_multinode_parser():
    """Creates a parser with arguments specific to sending HTTP requests
    to multiple REST APIs.

    Returns:
        {ArgumentParser}: Base parser with default HTTP args
    """
    base_parser = ArgumentParser(add_help=False)

    base_parser.add_argument(
        'urls',
        type=str,
        nargs='+',
        help="The URLs of the validator's REST APIs of interest, separated by"
        " commas or spaces. (no default)")
    base_parser.add_argument(
        '--users',
        type=str,
        action='append',
        metavar='USERNAME[:PASSWORD]',
        help='Specify the users to authorize requests, in the same order as '
        'the URLs, separate by commas. Passing empty strings between commas '
        'is supported.')

    return base_parser
Ejemplo n.º 17
0
def main():
    set_stdout_encoding()

    parser = ArgumentParser(
        formatter_class=ArgumentDefaultsHelpFormatter,
        description='Convert text file to communication',
    )
    parser.set_defaults(annotation_level=AL_NONE)
    parser.add_argument('text_path', type=str,
                        help='Input text file path (- for stdin)')
    parser.add_argument('concrete_path', type=str,
                        help='Output concrete file path (- for stdout)')
    add_annotation_level_argparse_argument(parser)
    parser.add_argument('-l', '--loglevel', '--log-level',
                        help='Logging verbosity level threshold (to stderr)',
                        default='info')
    concrete.version.add_argparse_argument(parser)
    args = parser.parse_args()

    logging.basicConfig(format='%(asctime)-15s %(levelname)s: %(message)s',
                        level=args.loglevel.upper())

    # Won't work on Windows
    text_path = '/dev/fd/0' if args.text_path == '-' else args.text_path
    concrete_path = (
        '/dev/fd/1' if args.concrete_path == '-' else args.concrete_path
    )
    annotation_level = args.annotation_level

    with codecs.open(text_path, encoding='utf-8') as f:
        comm = create_comm(text_path, f.read(),
                           annotation_level=annotation_level)
        write_communication_to_file(comm, concrete_path)
Ejemplo n.º 18
0
def parse_options(args=None):
    if args is None:  # pragma: no cover
        args = sys.argv[1:]
    parser = ArgumentParser()
    parser.add_argument(
        '-i', '--incremental',
        metavar='PATH',
        help='trigger incremental crawl'
    )
    parser.add_argument(
        '-o', '--output',
        metavar='PATH',
        default=DEFAULT_OUTPUT_PATH,
        help='Override persisted data, [default=%(default)s]'
    )
    parser.add_argument(
        '-v', '--verbose',
        action='count',
        dest='verbose',
        help='set verbosity level',
        default=0
    )
    parser.add_argument(
        'crawls',
        metavar='CRAWL',
        nargs='*',
        help='Sub-set of crawls to launch'
    )

    return parser.parse_args(args)
Ejemplo n.º 19
0
def main():
    from argparse import ArgumentParser
    parser = ArgumentParser(usage=usage)
    parser.add_argument("-v", "--verbose", default=0, action="count", help="Increment test verbosity (can be used multiple times)")
    parser.add_argument("-d", "--debug", action="store_true", default=False, help="Print debugging items")
    parser.add_argument("-t", "--test", help="Run only the named test")
    parser.add_argument('type', choices=['accdb', 'mdb'], help='Which type of file to test')

    args = parser.parse_args()

    DRIVERS = {
        'accdb': 'Microsoft Access Driver (*.mdb, *.accdb)',
        'mdb': 'Microsoft Access Driver (*.mdb)'
    }

    here = dirname(abspath(__file__))
    src = join(here, 'empty.' + args.type)
    dest = join(here, 'test.' + args.type)
    shutil.copy(src, dest)

    global CNXNSTRING
    CNXNSTRING = 'DRIVER={%s};DBQ=%s;ExtendedAnsiSQL=1' % (DRIVERS[args.type], dest)
    print(CNXNSTRING)

    cnxn = pyodbc.connect(CNXNSTRING)
    print_library_info(cnxn)
    cnxn.close()

    suite = load_tests(AccessTestCase, args.test)

    testRunner = unittest.TextTestRunner(verbosity=args.verbose)
    result = testRunner.run(suite)
Ejemplo n.º 20
0
def _make_basic_help_parser(include_deprecated=False):
    """Make an arg parser that's used only for printing basic help.

    This prints help very similar to spark-submit itself. Runner args
    are not included unless they are also spark-submit args (e.g. --py-files)
    """
    help_parser = ArgumentParser(usage=_USAGE, description=_DESCRIPTION,
                                 epilog=_BASIC_HELP_EPILOG, add_help=False)

    _add_runner_alias_arg(help_parser)

    for group_desc, opt_names in _SPARK_SUBMIT_ARG_GROUPS:
        if group_desc is None:
            parser_or_group = help_parser
        else:
            parser_or_group = help_parser.add_argument_group(group_desc)

        for opt_name in opt_names:
            _add_spark_submit_arg(parser_or_group, opt_name)

        if group_desc is None:
            _add_basic_args(help_parser)
            _add_help_arg(help_parser)
            if include_deprecated:
                _add_deprecated_arg(help_parser)

    return help_parser
Ejemplo n.º 21
0
def _get_args():
    parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter,
                            description="Node Agent")
    subparsers = parser.add_subparsers(dest="mode")

    parser_cleanup = subparsers.add_parser('cleanup')
    parser_cleanup.add_argument("session", help="Session Name")
    parser_cleanup.add_argument("volume", help="Volume Name")
    parser_cleanup.add_argument("tmpfilename", help="Temporary File Name")
    parser_cleanup.add_argument("--debug", help="Debug", action="store_true")

    parser_session_create = subparsers.add_parser('create')
    parser_session_create.add_argument("session", help="Session Name")
    parser_session_create.add_argument("volume", help="Volume Name")
    parser_session_create.add_argument("brick", help="Brick Path")
    parser_session_create.add_argument("time_to_update", help="Time to Update")
    parser_session_create.add_argument("--reset-session-time",
                                       help="Reset Session Time",
                                       action="store_true")
    parser_session_create.add_argument("--debug", help="Debug",
                                       action="store_true")

    parser_post = subparsers.add_parser('post')
    parser_post.add_argument("session", help="Session Name")
    parser_post.add_argument("volume", help="Volume Name")
    parser_post.add_argument("brick", help="Brick Path")
    parser_post.add_argument("--debug", help="Debug",
                             action="store_true")

    parser_delete = subparsers.add_parser('delete')
    parser_delete.add_argument("session", help="Session Name")
    parser_delete.add_argument("volume", help="Volume Name")
    parser_delete.add_argument("--debug", help="Debug",
                               action="store_true")
    return parser.parse_args()
Ejemplo n.º 22
0
def make_parser(prog):
    parser = ArgumentParser(prog)
    parser.add_argument('--version', action='version', version='%(prog)s ' + VERSION)

    command_parsers = parser.add_subparsers(metavar='command', dest='command')
    command_parsers.required = True

    command_search_parser = command_parsers.add_parser('search', help='search for items')
    command_search_parser.add_argument('type', help='type of item to search for',
                                       choices=['movie', 'person', 'character', 'company', 'keyword'])
    command_search_parser.add_argument('key', help='title or name of item to search for')
    command_search_parser.add_argument('-n', type=int, help='number of items to list')
    command_search_parser.add_argument('--first', action='store_true', help='display only the first result')
    command_search_parser.set_defaults(func=search_item)

    command_get_parser = command_parsers.add_parser('get', help='retrieve information about an item')
    command_get_parser.add_argument('type', help='type of item to retrieve',
                                    choices=['movie', 'person', 'character', 'company', 'keyword'])
    command_get_parser.add_argument('key', help='IMDb id (or keyword name) of item to retrieve')
    command_get_parser.add_argument('-n', type=int, help='number of movies to list (only for keywords)')
    command_get_parser.set_defaults(func=get_item)

    command_top_parser = command_parsers.add_parser('top', help='get top ranked movies')
    command_top_parser.add_argument('-n', type=int, help='number of movies to list')
    command_top_parser.add_argument('--first', action='store_true', help='display only the first result')
    command_top_parser.set_defaults(func=get_top_movies)

    command_bottom_parser = command_parsers.add_parser('bottom', help='get bottom ranked movies')
    command_bottom_parser.add_argument('-n', type=int, help='number of movies to list')
    command_bottom_parser.add_argument('--first', action='store_true', help='display only the first result')
    command_bottom_parser.set_defaults(func=get_bottom_movies)

    return parser
Ejemplo n.º 23
0
def main():
    p = ArgumentParser(description='Plots spectral transmission data from filter datasheets')
    p.add_argument('--wlnm', help='START STOP STEP wavelength in nm', nargs=3, default=(200., 1200., 0.1), type=float)
    p.add_argument('--path', help='path to HDF5 data')
    p.add_argument('-a', '--altkm', help='observer altitude (km)', type=float, default=0.)
    p.add_argument('--zenang', help='zenith angle (deg)', type=float, default=0.)
    p = p.parse_args()

    inpath = Path(p.path).expanduser() if p.path else R / 'gridaurora/precompute'

    flist = ['BG3transmittance.h5', 'NE01transmittance.h5',
             'Wratten32transmittance.h5', 'Wratten21transmittance.h5',
             'HoyaV10transmittance.h5']
    flist = [inpath/f for f in flist]

    windFN = inpath/'ixonWindowT.h5'
    qeFN = inpath/'emccdQE.h5'
# %%
    Ts = []
    for f in flist:
        T = selftest(f, windFN, qeFN, p.wlnm, p.altkm, p.zenang)
        Ts.append(T)
        plotT(T, p.wlnm)
# %%
    comparefilters(Ts)

    show()
Ejemplo n.º 24
0
    def __init__(self, context):
        super(ReconfClientPlugin, self).__init__(context)
        # Give QObjects reasonable names
        self.setObjectName('ReconfClientPlugin')

        # Process standalone plugin command-line arguments
        from argparse import ArgumentParser
        parser = ArgumentParser()
        # Add argument(s) to the parser.
        parser.add_argument("-q", "--quiet", action="store_true",
                      dest="quiet",
                      help="Put plugin in silent mode")
        args, unknowns = parser.parse_known_args(context.argv())
        if not args.quiet:
            print 'arguments: ', args
            print 'unknowns: ', unknowns

        # Create QWidget
        self._widget = tiny_ref_gui.ReconfigureWidget()
        # Show _widget.windowTitle on left-top of each plugin (when 
        # it's set in _widget). This is useful when you open multiple 
        # plugins at once. Also if you open multiple instances of your 
        # plugin at once, these lines add number to make it easy to 
        # tell from pane to pane.
        if context.serial_number() > 1:
            self._widget.setWindowTitle(self._widget.windowTitle() + (' (%d)' % context.serial_number()))
        # Add widget to the user interface
        #parent = QTreeWidgetItem(self._widget.parameterTree)
        #parent.setText(0, "Name {}".format(2))
        #parent.setText(1, "Type")
        #parent.setText(2, "Value")
        #parent.setFlags(parent.flags() | Qt.ItemIsTristate | Qt.ItemIsUserCheckable | Qt.ItemIsEditable)
        context.add_widget(self._widget)
Ejemplo n.º 25
0
def parse_args():
    parser = ArgumentParser()

    parser.add_argument(
        '--force-update-mpt',
        action='store_true',
        help='Update MyPyTutor even if this would not normally be required',
    )
    parser.add_argument(
        '--force-update-tutorials',
        action='store_true',
        help='Update tutorials even if this would not normally be required',
    )
    parser.add_argument(
        '--no-gui',
        action='store_true',
        help='Run the installer without using a GUI',
    )
    parser.add_argument(
        '--version',
        '--version',
        action='store_true',
        help='Print the version and then terminate',
    )

    return parser.parse_args()
Ejemplo n.º 26
0
def main():
    parser = ArgumentParser()
    parser.add_argument('file', action='store')

    args = parser.parse_args()

    print_left_right( "left.txt", "right.txt", open(args.file).readlines() )
Ejemplo n.º 27
0
def query_main(graph, default_index):
    """
    Run a query.

    """
    parser = ArgumentParser()
    parser.add_argument("-i", "--index", default=default_index)
    parser.add_argument("-q", "--query", default='{"match_all": {}}')
    parser.add_argument("-f", "--flat", action="store_true")
    args = parser.parse_args()

    try:
        query = loads(args.query)
    except Exception:
        parser.error("query must be valid json")

    response = graph.elasticsearch_client.search(
        index=args.index,
        body=dict(query=query),
    )

    if args.flat:
        return response["hits"]["hits"]
    else:
        return response
Ejemplo n.º 28
0
def parseArgs():
    parser = ArgumentParser(
        description = 'Convert Entrez Gene Homo_sapiens.xml to python dictionary representation')
    parser.add_argument(
        '--Hsxml', metavar = 'HSXML', type = file, required = True,
        help = 'Name of Homo_sapiens.xml file - include a date reference for download for example')
    return parser.parse_args()
Ejemplo n.º 29
0
 def __init__(self):
     ArgumentParser.__init__(
         self, prog="format", description="print episodes with given formats", epilog="example: format lost*"
     )
     self.add_argument(
         "filters", metavar="EPISODE", nargs="*", default=["*"], help="episode name or filter, ex: lost.s01e0*"
     )
Ejemplo n.º 30
0
def _make_arg_parser():
    # this parser is never used for help messages, but
    # will show usage on error
    parser = ArgumentParser(usage=_USAGE, add_help=False)

    # add positional arguments
    parser.add_argument(dest='script_or_jar', nargs='?')
    parser.add_argument(dest='args', nargs='*')

    _add_basic_args(parser)
    _add_runner_alias_arg(parser)
    _add_help_arg(parser)
    _add_deprecated_arg(parser)

    # add runner opts
    runner_opt_names = set(_RUNNER_OPTS) - set(_HARD_CODED_OPTS)
    _add_runner_args(parser, opt_names=runner_opt_names)

    # add spark-specific opts (without colliding with runner opts)
    for opt_name, switch in _SPARK_SUBMIT_SWITCHES.items():
        if opt_name in _RUNNER_OPTS and switch not in _SWITCH_ALIASES:
            continue
        _add_spark_submit_arg(parser, opt_name)

    return parser
                                     mode="constant", constant_values=0)
            pred_slots.append(pred_padded)
            true_slot = np.array((list(zip(*batch))[2]))
            true_length = np.array((list(zip(*batch))[1]))
            true_slot = true_slot[:, :slot_pred_length]
            slot_acc = accuracy_score(true_slot, decoder_prediction, true_length)
            slot_accs.append(slot_acc)
        pred_slots_a = np.vstack(pred_slots)
        true_slots_a = np.array(list(zip(*index_test))[2])[:pred_slots_a.shape[0]]
        f1_score=f1_for_sequence_batch(true_slots_a, pred_slots_a)
        print("Slot accuracy for epoch {}: {:.3f}".format(epoch, np.average(slot_accs)*100))
        print("Slot F1 score for epoch {}: {:.3f}".format(epoch,f1_score*100 ))
        if (f1_score >best_f1_score):
            best_ep=epoch
            best_sl_acc=np.average(slot_accs)
            best_f1_score=f1_score
    print('\nBEST RESULT: epoch {}, valid accurasy {:.3f}, best test F1 score {:.3f}'.format(best_ep,best_sl_acc*100,best_f1_score*100))

    sess.close()
    with open('results.txt', 'a') as outfile:
        outfile.write('For Folder:'+str(fold)+' using '+str(cell)+' cell, BEST RESULT: epoch '+ str(best_ep)+ ', valid score {:.3f}, best test F1 score {:.3f}'.format( best_sl_acc*100,best_f1_score*100)+'\n')

if __name__ == '__main__':
    parser = ArgumentParser(description='Provide RNN Cell. Use either BasicRNN, or LSTM as arguments.')
	# parser.add_argument('N', type=int,help="Problem size", metavar="N")
    parser.add_argument("Model", default='BasicRNN',type=str,
						help="BasicRNN or LSTM algorithm")
    args = parser.parse_args()
    cell=args.Model
    train()
Ejemplo n.º 32
0
# -*- coding: utf-8 -*-
"""
Created on Sun Nov  4 20:28:33 2018

@author: mark
"""
import cv2
import numpy as np
from argparse import ArgumentParser
from to_read_pfm import readPFM

parser = ArgumentParser();

parser.add_argument("flow_path", help="where to get the optical flow");
parser.add_argument("img0_path", help="where to get the original image0 as base");

parser.add_argument("-VL", help="optical vector length", dest="length", type=int, default=1);
parser.add_argument("-LW", help="line width", dest="line_width", type=int, default=2);
parser.add_argument("-SY", help="show an optical flow between how many rows", dest="show_Y", type=int, default=50);
parser.add_argument("-SX", help="show an optical flow between how many columns", dest="show_X", type=int, default=50);
parser.add_argument("-ON", help="output name", dest="output_name", default='output.jpg');

args = parser.parse_args();

error_flag = False;

if '.pfm' in args.flow_path:
    flow, scale = readPFM(args.flow_path);#assume y direction is in dimension 0, and x in dimension 1.
elif '.npy' in args.flow_path:
    flow = np.load(args.flow_path);
else:
Ejemplo n.º 33
0
    def add_arguments(
        cls,
        parser: ArgumentParser,
        who: str = "",
        desc: str = ".",
        expert: bool = False,
        defaults: bool = True,
    ):
        """Add arguments for KgtkValue option processing.

        When "who" is not empty, it prefixes the options, destinations, and
        help messages.  This facilitates constructing command lines with
        multiple sets of KGTKValue options, such as for different input files.
        """
        prefix1: str = "--"  # The command line argument prefix.
        prefix2: str = ""  # The destination name prefix.
        prefix3: str = ""  # The help message prefix.

        if len(who) > 0:
            prefix1 = "--" + who + "-"
            prefix2 = who + "_"
            prefix3 = who + ": "

        # This helper function makes it easy to suppress options from
        # The help message.  The options are still there, and initialize
        # what they need to initialize.
        def h(msg: str) -> str:
            if expert:
                return msg
            else:
                return SUPPRESS

        # This helper function decices whether or not to include defaults
        # in argument declarations. If we plan to make arguments with
        # prefixes and fallbacks, the fallbacks (the ones without prefixes)
        # should get defaults value, while the prefixed arguments should
        # not get defaults.
        #
        # Note: In obscure circumstances (EnumNameAction, I'm looking at you),
        # explicitly setting "default=None" may fail, whereas omitting the
        # "default=" phrase succeeds.
        #
        # TODO: continue researching these issues.
        def d(default: typing.Any) -> typing.Mapping[str, typing.Any]:
            if defaults:
                return {"default": default}
            else:
                return {}

        vgroup = parser.add_argument_group(
            h(prefix3 + "Data value parsing"),
            h("Options controlling the parsing and processing of KGTK data values"
              + desc))
        vgroup.add_argument(
            prefix1 + "additional-language-codes",
            dest=prefix2 + "additional_language_codes",
            help=h(prefix3 +
                   "Additional language codes. (default=use internal list)."),
            nargs="*",
            default=None)

        vgroup.add_argument(
            prefix1 + "allow-lax-qnodes",
            dest=prefix2 + "allow_lax_qnodes",
            help=
            h(prefix3 +
              "Allow qnode suffixes in quantities to include alphas and dash as well as digits. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "allow-language-suffixes",
            dest=prefix2 + "allow_language_suffixes",
            help=
            h(prefix3 +
              "Allow language identifier suffixes starting with a dash. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "allow-lax-strings",
            dest=prefix2 + "allow_lax_strings",
            help=
            h(prefix3 +
              "Do not check if double quotes are backslashed inside strings. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "allow-lax-lq-strings",
            dest=prefix2 + "allow_lax_lq_strings",
            help=
            h(prefix3 +
              "Do not check if single quotes are backslashed inside language qualified strings. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "allow-wikidata-lq-strings",
            dest=prefix2 + "allow_wikidata_lq_strings",
            help=h(
                prefix3 +
                "Allow Wikidata language qualifiers. (default=%(default)s)."),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "require-iso8601-extended",
            dest=prefix2 + "require_iso8601_extended",
            help=
            h(prefix3 +
              "Require colon(:) and hyphen(-) in dates and times. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "force-iso8601-extended",
            dest=prefix2 + "force_iso8601_extended",
            help=
            h(prefix3 +
              "Force colon (:) and hyphen(-) in dates and times. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "allow-month-or-day-zero",
            dest=prefix2 + "allow_month_or_day_zero",
            help=h(prefix3 +
                   "Allow month or day zero in dates. (default=%(default)s)."),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "repair-month-or-day-zero",
            dest=prefix2 + "repair_month_or_day_zero",
            help=h(
                prefix3 +
                "Repair month or day zero in dates. (default=%(default)s)."),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "allow-end-of-day",
            dest=prefix2 + "allow_end_of_day",
            help=
            h(prefix3 +
              "Allow 24:00:00 to represent the end of the day. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=cls.DEFAULT_ALLOW_END_OF_DAY))

        vgroup.add_argument(
            prefix1 + "minimum-valid-year",
            dest=prefix2 + "minimum_valid_year",
            help=h(prefix3 +
                   "The minimum valid year in dates. (default=%(default)d)."),
            type=int,
            **d(default=cls.MINIMUM_VALID_YEAR))

        vgroup.add_argument(
            prefix1 + "clamp-minimum-year",
            dest=prefix2 + "clamp_minimum_year",
            help=h(prefix3 +
                   "Clamp years at the minimum value. (default=%(default)s)."),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "ignore-minimum-year",
            dest=prefix2 + "ignore_minimum_year",
            help=h(
                prefix3 +
                "Ignore the minimum year constraint. (default=%(default)s)."),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "maximum-valid-year",
            dest=prefix2 + "maximum_valid_year",
            help=h(prefix3 +
                   "The maximum valid year in dates. (default=%(default)d)."),
            type=int,
            **d(default=cls.MAXIMUM_VALID_YEAR))

        vgroup.add_argument(
            prefix1 + "clamp-maximum-year",
            dest=prefix2 + "clamp_maximum_year",
            help=h(prefix3 +
                   "Clamp years at the maximum value. (default=%(default)s)."),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "ignore-maximum-year",
            dest=prefix2 + "ignore_maximum_year",
            help=h(
                prefix3 +
                "Ignore the maximum year constraint. (default=%(default)s)."),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "validate-fromisoformat",
            dest=prefix2 + "validate_fromisoformat",
            help=
            h(prefix3 +
              "Validate that datetim.fromisoformat(...) can parse this date and time. "
              + "This checks that the year/month/day combination is valid.  " +
              "The year must be in the range 1..9999, inclusive. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "allow-lax-coordinates",
            dest=prefix2 + "allow_lax_coordinates",
            help=
            h(prefix3 +
              "Allow coordinates using scientific notation. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "repair-lax-coordinates",
            dest=prefix2 + "repair_lax_coordinates",
            help=
            h(prefix3 +
              "Allow coordinates using scientific notation. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "allow-out-of-range-coordinates",
            dest=prefix2 + "allow_out_of_range_coordinates",
            help=
            h(prefix3 +
              "Allow coordinates that don't make sense. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "minimum-valid-lat",
            dest=prefix2 + "minimum_valid_lat",
            help=h(prefix3 +
                   "The minimum valid latitude. (default=%(default)f)."),
            type=int,
            **d(default=cls.MINIMUM_VALID_LAT))

        vgroup.add_argument(
            prefix1 + "clamp-minimum-lat",
            dest=prefix2 + "clamp_minimum_lat",
            help=h(
                prefix3 +
                "Clamp latitudes at the minimum value. (default=%(default)s)."
            ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "maximum-valid-lat",
            dest=prefix2 + "maximum_valid_lat",
            help=h(prefix3 +
                   "The maximum valid latitude. (default=%(default)f)."),
            type=int,
            **d(default=cls.MAXIMUM_VALID_LAT))

        vgroup.add_argument(
            prefix1 + "clamp-maximum-lat",
            dest=prefix2 + "clamp_maximum_lat",
            help=h(
                prefix3 +
                "Clamp latitudes at the maximum value. (default=%(default)s)."
            ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "minimum-valid-lon",
            dest=prefix2 + "minimum_valid_lon",
            help=h(prefix3 +
                   "The minimum valid longitude. (default=%(default)f)."),
            type=int,
            **d(default=cls.MINIMUM_VALID_LON))

        vgroup.add_argument(
            prefix1 + "clamp-minimum-lon",
            dest=prefix2 + "clamp_minimum_lon",
            help=h(
                prefix3 +
                "Clamp longitudes at the minimum value. (default=%(default)s)."
            ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "maximum-valid-lon",
            dest=prefix2 + "maximum_valid_lon",
            help=h(prefix3 +
                   "The maximum valid longitude. (default=%(default)f)."),
            type=int,
            **d(default=cls.MAXIMUM_VALID_LON))

        vgroup.add_argument(
            prefix1 + "clamp-maximum-lon",
            dest=prefix2 + "clamp_maximum_lon",
            help=h(
                prefix3 +
                "Clamp longitudes at the maximum value. (default=%(default)s)."
            ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "modulo-repair-lon",
            dest=prefix2 + "modulo_repair_lon",
            help=h(prefix3 +
                   "Wrap longitude to (-180.0,180.0]. (default=%(default)s)."),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))

        vgroup.add_argument(
            prefix1 + "escape-list-separators",
            dest=prefix2 + "escape_list_separators",
            help=
            h(prefix3 +
              "Escape all list separators instead of splitting on them. (default=%(default)s)."
              ),
            type=optional_bool,
            nargs='?',
            const=True,
            **d(default=False))
#!/usr/bin/env python
import numpy as np
from pyproj import Proj
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter

from netCDF4 import Dataset as CDF

# set up the argument parser
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.description = "Create CDO-compliant grid description"
parser.add_argument("FILE", nargs="*")
parser.add_argument("-g",
                    "--grid_spacing",
                    dest="grid_spacing",
                    type=float,
                    help="use X m grid spacing",
                    default=1800)
parser.add_argument(
    "-f",
    "--format",
    dest="fileformat",
    type=str.upper,
    choices=["NETCDF4", "NETCDF4_CLASSIC", "NETCDF3_CLASSIC", "NETCDF3_64BIT"],
    help="file format out output file",
    default="netcdf4",
)

options = parser.parse_args()
args = options.FILE
grid_spacing = options.grid_spacing  # convert
Ejemplo n.º 35
0
def main(argv = None):
    if argv is None:
        argv = sys.argv
    else:
        sys.argv.extend(argv)

    try:
        parser = ArgumentParser()
        parser.add_argument('--config-file', help = "Path to the config file", required = True)
        parser.add_argument('--start-samplerd', help = "start samplerd ldmsd",
                            action = "store_true")
        parser.add_argument('--kill-samplerd', action = "store_true",
                            help = "kill samplerd ldmsd")
        parser.add_argument('--start-agg', action = "store_true",
                            help = "start agg ldmsd")
        parser.add_argument('--kill-agg', action = "store_true",
                            help = "kill agg ldmsd")
        parser.add_argument('--kill-9-samplerd', action = "store_true",
                            help = "kill 9 samplerd ldmsd")
        parser.add_argument('--kill-9-agg', action = "store_true",
                            help = "kill 9 agg ldmsd")
        parser.add_argument('--remove-samplerd-files', action = "store_true",
                            help = "Remove the samplerd's log and sock files")
        parser.add_argument('--remove-agg-files', action = "store_true",
                            help = "Remove the aggregataor(s)'s log and sock files")
        parser.add_argument('--check-samplerd', action = "store_true",
                            help = "Check samplerd ldmsd running")
        parser.add_argument('--check-agg', action = "store_true",
                            help = "Check agg ldmsd running")
        parser.add_argument('--samplerd-pid', help = "Get the samplerd PIDs",
                            action = "store_true")
        parser.add_argument('--agg-pid', help = "Get the agg PIDs",
                            action = "store_true")
        args = parser.parse_args()

        cfg = get_var_from_file(module_name = "cfg", filepath = args.config_file)

        if args.start_samplerd:
            print "start samplerd.."
            start_ldmsd(hosts = cfg.SAMPLERD_HOSTS,
                               xprt = cfg.SAMPLERD_XPRT, port = cfg.SAMPLERD_PORT,
                               log = cfg.SAMPLERD_LOG, sockname = cfg.SAMPLERD_SOCK)
        if args.start_agg:
            print "start agg.."
            start_ldmsd(hosts = cfg.AGG_HOSTS, xprt = cfg.AGG_XPRT, port = cfg.AGG_PORT,
                              log = cfg.AGG_LOG, sockname = cfg.AGG_SOCK)
        if args.kill_samplerd:
            print "kill samplerd.."
            kill_ldmsd(hosts = cfg.SAMPLERD_HOSTS, xprt = cfg.SAMPLERD_XPRT,
                                port = cfg.SAMPLERD_PORT)
        if args.kill_agg:
            print "kill agg.."
            kill_ldmsd(hosts = cfg.AGG_HOSTS, xprt = cfg.AGG_XPRT, port = cfg.AGG_PORT)
        if args.kill_9_samplerd:
            print "kill 9 samplerd.."
            kill_9_ldmsd(hosts = cfg.SAMPLERD_HOSTS, xprt = cfg.SAMPLERD_XPRT,
                                port = cfg.SAMPLERD_PORT)
        if args.kill_9_agg:
            print "kill 9 agg.."
            kill_9_ldmsd(hosts = cfg.AGG_HOSTS, xprt = cfg.AGG_XPRT, port = cfg.AGG_PORT)
        if args.remove_samplerd_files:
            print "Removing the files of samplerd"
            remove_file(cfg.SAMPLERD_HOSTS, cfg.SAMPLERD_LOG)
            remove_file(cfg.SAMPLERD_HOSTS, cfg.SAMPLERD_SOCK)
        if args.remove_agg_files:
            print "Removing the files of aggregators"
            remove_file(cfg.AGG_HOSTS, cfg.AGG_LOG)
            remove_file(cfg.AGG_HOSTS, cfg.AGG_SOCK)
        if args.check_samplerd:
            print "Check samplerd ldmsd running? ...."
            print is_ldmsd_running(hosts = cfg.SAMPLERD_HOSTS,
                                              xprt = cfg.SAMPLERD_XPRT,
                                              port = cfg.SAMPLERD_PORT)
        if args.check_agg:
            print "Check agg ldmsd running? ...."
            print is_ldmsd_running(hosts = cfg.AGG_HOSTS, xprt = cfg.AGG_XPRT,
                                            port = cfg.AGG_PORT)
        if args.samplerd_pid:
            print "Getting samplerd pid"
            print get_ldmsd_pid(hosts = cfg.SAMPLERD_HOSTS, xprt = cfg.SAMPLERD_XPRT,
                                port = cfg.SAMPLERD_PORT)
        if args.agg_pid:
            print "Getting agg pid"
            print get_ldmsd_pid(hosts = cfg.AGG_HOSTS, xprt = cfg.AGG_XPRT,
                                port = cfg.AGG_PORT)
    except KeyboardInterrupt:
        return 0
    except Exception:
        traceback.print_exc()
        return 2
Ejemplo n.º 36
0
        # package.getDownloadCounts(start_date=x, end_date=y)in a loop.
        downloads = package.getDownloadCount()
        version_counts[version] += downloads
        series_counts[series] += downloads
        arch_counts[arch] += downloads
        print("%s - %s - %7s - %7s : %s" % (
            name, version, series, arch, downloads))
    print("\nVersion summaries")
    for version in sorted(version_counts):
        count = version_counts[version]
        print("%s: %s" % (version, count))
    print("\nSeries summaries")
    for series in sorted(series_counts):
        count = series_counts[series]
        print("%7s: %s" % (series, count))
    print("\nArch summaries")
    for arch in sorted(arch_counts):
        count = arch_counts[arch]
        print("%5s: %s" % (arch, count))


if __name__ == '__main__':
    parser = ArgumentParser('Count package downloads from a PPA.')
    parser.add_argument('--since', type=to_datetime)
    parser.add_argument('owner_name')
    parser.add_argument('ppa_name')
    parser.add_argument('package_name')
    args = parser.parse_args()
    report_juju_core_installs(
        args.owner_name, args.ppa_name, args.package_name, since=args.since)
Ejemplo n.º 37
0
        if self.options.keep and os.path.exists(interfaceFile):
            with open(interfaceFile, "r") as f:
                filecontent = f.read()

        if self.options.keep and os.path.exists(interfaceFile) and \
                filecontent == content:
            self.info("%s unchanged." % interfaceFile)
        else:
            self.info("Writing %s." % interfaceFile)
            with open(interfaceFile, "w") as f:
                f.write(content)


if __name__ == '__main__':
    argParser = ArgumentParser()
    argParser.add_argument("--mdx",
                           action="append",
                           dest="mdx",
                           default=[],
                           metavar="FILE",
                           help="master idx file to be used.")
    argParser.add_argument(
        "--import",
        action="append",
        dest="imports",
        default=[],
        metavar="FILE",
        help="File to be imported in the generated interface file.")
    argParser.add_argument(
        "--swig-include",
#!/usr/bin/env python3
from __future__ import absolute_import, division, print_function
import os
import glob
#os.environ['CUDA_VISIBLE_DEVICES'] = '0'
import numpy as np
import fid
from scipy.misc import imread
import tensorflow as tf
from argparse import ArgumentParser
import sys

parser = ArgumentParser()
parser.add_argument("--dataset",
                    type=str,
                    default="PCam",
                    choices=["CelebA", "PCam"])
parser.add_argument("--image_format",
                    type=str,
                    default="jpg",
                    choices=["jpg", "png"])
config = parser.parse_args()

# Paths
if config.dataset == "CelebA":
    image_path = 'celeba/results'  # set path to some generated images
    stats_path = 'fid_stats_celeba.npz'
elif config.dataset == "PCam":
    image_path = 'pcam/results'  # set path to some generated images
    stats_path = 'fid_stats_pcam.npz'  # training set statistics
else:
Ejemplo n.º 39
0
        print("")

    # now cd back to where we started
    os.chdir(cwd)

    #print len(sourceFiles),sourceFiles
    #print len(targetFiles),targetFiles
    return sourceFiles, targetFiles

#
# main, runs if started at the command line
#

if __name__ == "__main__":

    parser = ArgumentParser()
    parser.add_argument("-c", "--config-file", dest="configFileText",
                        help="file with Trigger configuration. Paths available are: unbiased, minimumbiasSdCount,largeSdCount, minimumbiasCdCount,largeCdCount, caloOnly, caloMixed, caloCosmicMuon, tprDeMSeed, tprDePSeed, cprDeMSeed, cprDePSeed, triggerOutput", metavar="FILE")
    parser.add_argument("-o", "--online", dest="online", action="store_true",
                        help="if present, use the online main fcl file template instead of offline")
    parser.add_argument("-q", "--quiet",
                        action="store_false", dest="verbose", default=True,
                        help="don't print status messages to stdout")
    
    args = parser.parse_args()
    if args.verbose :
        print("Config file name: {}".format(args.configFileText))
        print("Online flag: {}".format(str(args.online)))

    generate(args.configFileText, args.online, args.verbose, True)
Ejemplo n.º 40
0
def main():
    """
    Import the ngeo apps files
    """

    parser = ArgumentParser(description='import ngeo apps files')

    parser.add_argument('--html', action="store_true", help="Import the html template")
    parser.add_argument('--js', action="store_true", help="Import the javascript controller")
    parser.add_argument('--package', action="store_true", help="Import the package JSON")
    parser.add_argument('interface', metavar='INTERFACE', help="The interface we import")
    parser.add_argument('src', metavar='SRC', help="The ngeo source file")
    parser.add_argument('dst', metavar='DST', help="The destination file")

    args = parser.parse_args()

    with open(args.src) as src:
        data = src.read()

        if args.package:
            ngeo_json_data = loads(data)
            json_data = {}
            json_data["name"] = "{{package}}"
            json_data["version"] = "2.0.0"
            json_data["description"] = "A GeoMapFish project"

            json_data["devDependencies"] = ngeo_json_data["devDependencies"]
            # freeze the ngeo version
            json_data["devDependencies"]["ngeo"] = _get_ngeo_version()
            for package in [
                "angular-jsdoc",
                "angular-mocks",
                "coveralls",
                "gaze",
                "jsdoc",
                "jsdom",
                "karma",
                "karma-coverage",
                "karma-jasmine",
                "karma-phantomjs-launcher",
            ]:
                del json_data["devDependencies"][package]

            data = dumps(json_data, indent=2, sort_keys=True)
            data = _sub(r" +\n", "\n", data)
            data = data + "\n"

        else:
            data = re.sub(r"{{", r"\\{\\{", data)
            data = re.sub(r"}}", r"\\}\\}", data)
            data = re.sub("app", "{{package}}", data)

# temporary disable ...
#        if args.js:
            # Full text search
#            data = _sub(r"datasetTitle: 'Internal',", r"datasetTitle: '{{project}}',", data)

        if args.html:
            data = "<%\n" \
                "from json import dumps\n" \
                "from c2cgeoportal.lib.cacheversion import get_cache_version\n" \
                "%>\n" + \
                data
            # back for ng-app
            data = _sub(r"ng-{{package}}", r"ng-app", data)
            # back for gmf-app- css prefix
            data = _sub(r"gmf-{{package}}-", r"gmf-app-", data, required=False)
            if args.interface == "mobile":
                # back for mobile-web-app-capable
                data = _sub(
                    r"mobile-web-{{package}}-capable",
                    r"mobile-web-app-capable", data
                )
            else:
                data = _sub(
                    r'<img src="image/([^"]+)"( alt="")? ?/>',
                    '<img src="${request.static_url(\'{{package}}:static-ngeo/images/\\1\')}" />',
                    data,
                )
            data = _sub(
                r'<link rel="shortcut icon" href="image/favicon.ico"/>',
                '<link rel="shortcut icon" href="${request.static_url(\'{{package}}:static-ngeo/images/favicon.ico\')}"/>',  # noqa
                data,
            )
            # Styles
            data = _sub(
                r'    <link rel="stylesheet.*/build/{}.css">'.format(args.interface),
                r"""    <link rel="stylesheet" href="${{request.static_url('{{{{package}}}}:static-ngeo/build/{interface}.css')}}" type="text/css">""".format(interface=args.interface),  # noqa
                data,
                count=1,
                flags=re.DOTALL
            )
            # Scripts
            data = _sub(
                r'    <script',
                r"""% if debug:
    <script>
        window.CLOSURE_BASE_PATH = '';
        window.CLOSURE_NO_DEPS = true;
    </script>
    <script""",
                data, count=1
            )
            data = _sub(
                re.escape('    <script src="/@?main=') + ".*" + re.escape('watchwatchers.js"></script>'),
                r"""
    <script src="${{request.static_url('%s/closure/goog/base.js' % request.registry.settings['closure_library_path'])}}"></script>
    <script src="${{request.route_url('deps.js')}}"></script>
    <script>
        goog.require('{{{{package}}}}_{interface}');
    </script>
    <script src="${{request.static_url('{{{{package}}}}:static-ngeo/build/templatecache.js')}}"></script>
    <script src="${{request.static_url('%s/ngeo/utils/watchwatchers.js' % request.registry.settings['node_modules_path'])}}"></script>
    <script>
        {{{{package}}}}.componentsBaseTemplateUrl = '${{request.static_url("{{{{package}}}}:static-ngeo/components")}}';
        // {{{{package}}}}.partialsBaseTemplateUrl = '${{request.static_url("{{{{package}}}}:static-ngeo/partials")}}';
        // {{{{package}}}}.baseTemplateUrl = '${{request.static_url("{{{{package}}}}:static-ngeo/js")}}';
    </script>
% else:
    <script src="${{request.static_url('{{{{package}}}}:static-ngeo/build/{interface}.js')}}"></script>
% endif""".format(interface=args.interface),  # noqa
                data,
                count=1,
                flags=re.DOTALL
            )
            data = _sub(
                '{}([^"]+){}(.*){}'.format(
                    re.escape('<script src="../../../../node_modules/'),
                    re.escape('"'),
                    re.escape("></script>"),
                ),
                r"""<script src="${request.static_url('%s/\1' % request.registry.settings['node_modules_path'])}"\2></script>""",  # noqa
                data,
            )
            data = _sub(
                '{}([^"]+){}(.*){}'.format(
                    re.escape('<script src="../../../../'),
                    re.escape('"'),
                    re.escape("></script>"),
                ),
                r"""<script src="${request.static_url('%s/ngeo/\1' % request.registry.settings['node_modules_path'])}"\2></script>""",  # noqa
                data,
            )
            # i18n
            data = _sub(
                "module.constant\('defaultLang', 'en'\);",
                "module.constant('defaultLang', "
                "'${request.registry.settings[\"default_locale_name\"]}');",
                data,
            )
            data = _sub(re.escape(r"""
        var cacheVersion = '0';
"""), "", data)
            data = _sub(
                re.escape(r"""
        var angularLocaleScriptUrlElements = urlElements.slice(0, urlElements.length - 3);
        angularLocaleScriptUrlElements.push('build', 'angular-locale_\{\{locale\}\}.js?cache_version=' + cacheVersion);"""),  # noqa
                "",
                data,
            )
            data = _sub(
                re.escape(
                    "gmfModule.constant('angularLocaleScript', "
                    "angularLocaleScriptUrlElements.join('/'));"
                ),
                "gmfModule.constant('angularLocaleScript', "
                "'${request.static_url('{{package}}:static-ngeo/build/')}"
                "angular-locale_\{\{locale\}\}.js');",
                data,
            )
            data = _sub(
                re.escape("""
        var langUrls = {};
        ['en', 'fr', 'de'].forEach(function(lang) {
            var langUrlElements = urlElements.slice(0, urlElements.length - 3);
            langUrlElements.push('build', 'gmf-' + lang + '.json?cache_version=' + cacheVersion)
            langUrls[lang] = langUrlElements.join('/')
        });"""),
                r"""        var langUrls = {
${ ',\\n'.join([
    "          '{lang}': '{url}'".format(
        lang=lang,
        url=request.static_url('{{package}}:static-ngeo/build/{lang}.json'.format(lang=lang))
    )
    for lang in request.registry.settings["available_locale_names"]
]) | n}
        };""",
                data,
            )
            data = _sub(
                re.escape("module.constant('cacheVersion', cacheVersion);"),
                "module.constant('cacheVersion', '${get_cache_version()}');",
                data,
            )
            data = _subs(
                [(
                    "module.constant\('gmfSearchGroups', \[\]\);",
                    False
                ), (
                    "module.constant\('gmfSearchGroups', \[[^\]]*\]\);",
                    "module.constant('gmfSearchGroups', ${dumps(fulltextsearch_groups) | n});",
                )],
                data,
            )

            # replace routes
            for constant, url_end, route, required in [
                ("authenticationBaseUrl", r"", "base", True),
                ("fulltextsearchUrl", r"/fulltextsearch", "fulltextsearch", True),
                ("gmfRasterUrl", r"/raster", "raster", args.interface != "mobile"),
                ("gmfProfileCsvUrl", r"/profile.csv", "profile.csv", args.interface != "mobile"),
                ("gmfProfileJsonUrl", r"/profile.json", "profile.json", args.interface != "mobile"),
                ("gmfPrintUrl", r"/printproxy", "printproxy", args.interface != "mobile"),
                ("gmfTreeUrl", r"/themes", "themes", True),
                ("gmfShortenerCreateUrl", r"/short/create", "shortener_create", args.interface != "mobile"),
            ]:
                data = _sub(
                    r"module.constant\('%s', "
                    "'https://geomapfish-demo.camptocamp.net/2.[0-9]/wsgi%s\??([^\']*)'\);" % (
                        constant, url_end
                    ),
                    _RouteDest(constant, route),
                    data,
                    required=required,
                )
            data = _sub(
                re.escape("module.constant('gmfContextualdatacontentTemplateUrl', window.location.pathname + 'contextualdata.html');"),  # noqa
                "module.constant('gmfContextualdatacontentTemplateUrl', {{package}}.componentsBaseTemplateUrl + '/contextualdata/contextualdata.html');",  # noqa
                data, required=False
            )
            data = _sub(
                re.escape("module.value('ngeoWfsPermalinkOptions',") + ".*defaultFeatureNS",
                """module.value('ngeoWfsPermalinkOptions', /** @type {ngeox.WfsPermalinkOptions} */ ({
              url: '${request.route_url('mapserverproxy') | n}',
              wfsTypes: ${dumps(wfs_types) | n},
              defaultFeatureNS""",
                data,
                count=1,
                flags=re.DOTALL,
            )
            data = _sub(
                re.escape("module.constant('defaultTheme', 'OSM');"),
                "module.constant('defaultTheme', 'Demo');",
                data,
            )

        with open(args.dst, "wt") as dst:
            dst.write(data)
Ejemplo n.º 41
0
# Remove trailing comma from a json.

from argparse import ArgumentParser
from pyspark.sql import SparkSession
from src.utils.spark_utils import json_without_trailing_comma

# parse arguments
parser = ArgumentParser()
parser.add_argument('--input-file', help='Input path, path to the file containing trailing commas')
parser.add_argument('--output-path', help='Output path, the valid json file will be store in this path')
args = parser.parse_args()

# initialization
spark = SparkSession.builder.getOrCreate()

input_data_as_text = spark \
    .read \
    .option("multiline", "true") \
    .text(args.input_file)

input_data_without_trailing_comma = json_without_trailing_comma(input_data_as_text)

input_data_without_trailing_comma \
    .write \
    .text(args.output_path)
Ejemplo n.º 42
0
class DelegatedArgumentParser(Delegator):
    DEFAULT_DELEGATE_OBJ = ArgumentParser()
    DELEGATED_METHODS = {}
    DELEGATED_METHOD_ACTIONS = {
        "add_subparsers": RemoveKwArgDelegatedMethodAction(["required"])
    }
Ejemplo n.º 43
0
    def test_accepts_all_args(self):
        all_test_arguments = cluster_config_command.all_arguments

        default_arg_values = {
            '--region-url': None,
            '--uuid': None,
            '--init': False,
            '--tftp-port': None,
            '--tftp-root': None,
        }

        failures = {}

        # Try all cardinalities of combinations of arguments
        for r in range(len(all_test_arguments) + 1):
            for test_arg_names in combinations(all_test_arguments, r):
                test_values = {
                    '--region-url': factory.make_simple_http_url(),
                    '--uuid': str(uuid.uuid4()),
                    '--init': '',
                    '--tftp-port': str(factory.pick_port()),
                    '--tftp-root': factory.make_string(),
                }

                # Build a query dictionary for the given combination of args
                args_under_test = []
                for param_name in test_arg_names:
                    args_under_test.append(param_name)
                    if param_name != '--init':
                        args_under_test.append(test_values[param_name])

                parser = ArgumentParser()
                cluster_config_command.add_arguments(parser)

                # If both init and uuid are passed, argparse will generate
                # a nice ArgumentError exception, which unfortunately,
                # gets caught and sent to exit.
                if '--init' in test_arg_names and '--uuid' in test_arg_names:
                    expected_exception = ExpectedException(SystemExit, '2')
                    with expected_exception, patch('sys.stderr'):
                        parser.parse_known_args(args_under_test)

                else:
                    # Otherwise, parsed args with defaults as usual
                    observed_args = vars(
                        parser.parse_args(args_under_test))

                    expected_args = {}
                    for param_name in all_test_arguments:
                        parsed_param_name = param_name[2:].replace('-', '_')

                        if param_name not in test_arg_names:
                            expected_args[parsed_param_name] = \
                                default_arg_values[param_name]
                        else:
                            expected_args[parsed_param_name] = \
                                observed_args[parsed_param_name]

                    if expected_args != observed_args:
                        failures[str(test_arg_names)] = {
                            'expected_args': expected_args,
                            'observed_args': observed_args,
                        }

        error_message = io.StringIO()
        error_message.write(
            "One or more key / value argument list(s)"
            "passed in the query string (expected_args)"
            "to the API do not match the values in "
            "the returned query string. This "
            "means that some arguments were "
            "dropped / added / changed by the "
            "the function, which is incorrect "
            "behavior. The list of incorrect "
            "arguments is as follows: \n")
        pp = pprint.PrettyPrinter(depth=3, stream=error_message)
        pp.pprint(failures)
        self.assertDictEqual({}, failures, error_message.getvalue())
Ejemplo n.º 44
0
    if not cc:
        raise FileNotFoundError(f"{cc_name} not found")

    cxx = env.get("CXX", "")
    if cxx_name not in cxx:
        cxx = shutil.which(cxx_name)
    if not cxx:
        raise FileNotFoundError(f"{cxx_name} not found")

    env.update({"FC": fc, "CC": cc, "CXX": cxx})

    return env


if __name__ == "__main__":
    p = ArgumentParser()
    p.add_argument("libs", help="libraries to compile (lapack, scalapack, mumps)", nargs="+")
    p.add_argument("-prefix", help="toplevel path to install libraries under", default="~/lib_gcc")
    p.add_argument("-workdir", help="toplevel path to where you keep code repos", default="~/code")
    p.add_argument("-wipe", help="wipe before completely recompiling libs", action="store_true")
    p.add_argument("-b", "--buildsys", help="build system (meson or cmake)", default="meson")
    P = p.parse_args()

    dirs = {"prefix": Path(P.prefix).expanduser().resolve(), "workdir": Path(P.workdir).expanduser().resolve()}

    if "openmpi" in P.libs:
        openmpi(P.wipe, dirs)
    if "lapack" in P.libs:
        lapack(P.wipe, dirs, P.buildsys)
    if "scalapack" in P.libs:
        scalapack(P.wipe, dirs, P.buildsys)
Ejemplo n.º 45
0
        f.readline()
        for line, rank in zip(f, top100):
            idx, answer = line.strip().split(',')
            answer = set(answer.split())
            rank = [(id2docname[i].split('/')[-1].lower()) for i in rank]
            hit = 0
            P = []
            for rank_i, rank in enumerate(rank, 1):
                if rank in answer:
                    hit += 1
                    P.append(hit / rank_i)
            AP.append(sum(P) / len(P))
    return sum(AP) / len(AP)

# set parser
parser = ArgumentParser()
parser.add_argument(
    "-r", action="store_true",
    help="Turn on the relevance feedback", 
    dest="rel_switch", default=False
)
parser.add_argument(
    "-b", action="store_true",
    help="Run the best version", 
    dest="best_switch", default=False
)
parser.add_argument(
    "-i", action="store",
    help="Filename of input query file", 
    dest="query_file", default="data/query-test.xml"
)
Ejemplo n.º 46
0
def get_opt_parser():
    parser = ArgumentParser()
    parser.add_argument("target", choices=["train", "test"])
    parser.add_argument("--new",
                        dest="new",
                        action="store_true",
                        help="create new model")
    parser.add_argument("--render",
                        dest="render",
                        action="store_true",
                        help="render or not")
    parser.add_argument("-v",
                        dest="verbose",
                        action="store_true",
                        help="verbose mode")
    parser.add_argument("--ep",
                        dest="episode",
                        type=int,
                        help="number of episode to test")
    return parser
Ejemplo n.º 47
0
def build_parser():
    parser = ArgumentParser()
    parser.add_argument('--num_epochs',
                        default=75,
                        help='default: 40',
                        type=int)
    parser.add_argument('--batch_size',
                        default=16,
                        help='default: 16',
                        type=int)
    parser.add_argument('--num_threads',
                        default=8,
                        help='# of data read threads (default: 8)',
                        type=int)
    models_str = ' / '.join(config.model_zoo)
    parser.add_argument('--model', help=models_str,
                        required=True)  # DRAGAN, CramerGAN
    parser.add_argument('--name', help='default: name=model')
    parser.add_argument('--dataset', '-D', help='CelebA / LSUN', required=True)
    parser.add_argument(
        '--ckpt_step',
        default=1000,
        help='# of steps for saving checkpoint (default: 5000)',
        type=int)
    parser.add_argument('--renew',
                        action='store_true',
                        help='train model from scratch - \
        clean saved checkpoints and summaries',
                        default=False)

    return parser
Ejemplo n.º 48
0
def main():
    from argparse import ArgumentParser
    parser = ArgumentParser()
    parser.add_argument("-c", "--classes", action='store_true', dest='showclasses',
                        help="show classes found")
    parser.add_argument("-b", "--bases", action="store_true", dest="showbases",
                        help="show base classes (only works if --classes is active)")
    parser.add_argument("-i", "--interfaces", action="store_true", dest="showifaces",
                        help="show interfaces of classes (only works if --classes is active)")
    parser.add_argument("-u", "--use-cache", action='store_true', dest='use_cache',
                        help="use analysis cache")
    parser.add_argument('files', metavar='fname', type=str, nargs='+',
                        help='a file or directory to be scanned')

    options = parser.parse_args()
    print options.use_cache

    stime = time.time()
    psta = PythonSourceTreeAnalyser()
    for f in options.files:
        f = os.path.abspath(os.path.expanduser(f))
        if os.path.isdir(f):
            for pyfile in find_files(f, "*.py", exclude=lambda n: 'test' in n.split(os.sep)):
                psta.analyze_file(pyfile, use_cache=options.use_cache)
        else:
            psta.analyze_file(f, use_cache=options.use_cache)
    psta.dump(sys.stdout, options)
    sys.stdout.write("elapsed time: %s seconds\n\n" % (time.time() - stime))

    if options.use_cache:
        _FileInfoCache.save()
Ejemplo n.º 49
0
import os
from argparse import ArgumentParser
from subprocess import call

PROJECT_NAME = 'data-mng'
MAIN_SERVICE_NAME = 'service-app'
# переменная среды SOURCE_DATA используется в docker-compose
os.environ['SOURCE_DATA'] = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data_store')

docker_compose = f"""docker-compose --project-name {PROJECT_NAME} -f docker_compose/docker-compose.yml \
"""
docker_compose_postfix = f" --rm --name {PROJECT_NAME} {MAIN_SERVICE_NAME} "
simple_run = f'{docker_compose} run {docker_compose_postfix}'

if __name__ == '__main__':
    parser = ArgumentParser()
    parser.add_argument('-s', '--scenario', dest='scenario', required=True, help='Сценарий работы')
    args = parser.parse_args()
    if args.scenario in ('pipenv', 'bash', 'psql', 'load', 'test', 'mongo', 'mongoimport'):
        sh_command = f'{simple_run} {args.scenario}'
    elif args.scenario == 'down':
        sh_command = f'{docker_compose} {args.scenario}'
    elif args.scenario == 'jupyter':
        sh_command = f'{docker_compose} run -d -p 8889:8888 {docker_compose_postfix} {args.scenario}'
    elif args.scenario == 'service':
        sh_command = f'{docker_compose} run -d -p 5001:5000 {docker_compose_postfix} {args.scenario}'
    elif args.scenario == 'docker':
        sh_command = f'{docker_compose} build {MAIN_SERVICE_NAME}'
    else:
        raise ValueError('Ошибочный сценарий: %s' % args.scenario)
    print(sh_command)
Ejemplo n.º 50
0
from ydk.services import CRUDService
from ydk.providers import NetconfServiceProvider
from ydk.models.cisco_ios_xr import Cisco_IOS_XR_ipv4_bgp_cfg \
    as xr_ipv4_bgp_cfg
import logging


def config_bgp(bgp):
    """Add config data to bgp object."""
    pass


if __name__ == "__main__":
    """Execute main program."""
    parser = ArgumentParser()
    parser.add_argument("-v", "--verbose", help="print debugging messages",
                        action="store_true")
    parser.add_argument("device",
                        help="NETCONF device (ssh://user:password@host:port)")
    args = parser.parse_args()
    device = urlparse(args.device)

    # log debug messages if verbose argument specified
    if args.verbose:
        logger = logging.getLogger("ydk")
        logger.setLevel(logging.INFO)
        handler = logging.StreamHandler()
        formatter = logging.Formatter(("%(asctime)s - %(name)s - "
                                      "%(levelname)s - %(message)s"))
        handler.setFormatter(formatter)
Ejemplo n.º 51
0
    from datetime import timedelta
    from shapely.geometry import Polygon
    from sys import exit

    import reame.sensors
    from reame.classes import HistogramGrid
    from reame.utils import tabulate_data_points, REFERENCE_TIME


    try:
        default_day_offset = int(os.environ["SLURM_ARRAY_TASK_ID"])
    except KeyError:
        default_day_offset = 0

    # Command line arguments
    pars = ArgumentParser(description="Generate histograms of AOD.")
    pars.add_argument("algorithm",
                      help="Name of Sensor class used to read the data.")
    inpt = pars.add_mutually_exclusive_group()
    inpt.add_argument("--in_dir", help="strftime-compatible path to input files.")
    inpt.add_argument("--files", nargs="+", default=[],
                      help="Manually override the list of files to process.")
    pars.add_argument("--mod03_path",
                      help="Path to MODIS geolocation files.")
    pars.add_argument("--grid_path",
                      help="Path to MAIAC geolocation files.")
    pars.add_argument("--out_name", default="hist_%Y-%m-%d.npz",
                      help="strftime-compatible path for output file.")
    pars.add_argument("--day_offset", type=int, default=default_day_offset,
                      help="Number of days from Jan 1 1995 to evaluate.")
    pars.add_argument("--grid", default="/home/users/acpovey/ukesm_grid.4.npz",
Ejemplo n.º 52
0
def main():
    from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
    parser = ArgumentParser(
        'Read word-level tests and generate corresponding sentence-level '
        'tests next to them using simple sentence templates.',
        formatter_class=ArgumentDefaultsHelpFormatter
    )
    parser.add_argument('input_paths', nargs='+', metavar='input_path',
                        help='Paths to word-level json test files.  Output '
                             'files will be named by prepending {} to each '
                             'input filename.'.format(OUTPUT_PREFIX))
    args = parser.parse_args()

    logging.basicConfig(level=logging.INFO)

    for input_path in args.input_paths:
        logging.info('Loading word-level test from {}'.format(input_path))
        with open(input_path) as f:
            sets = json.load(f)

        for (set_type, set_dict) in sets.items():
            sentences = []
            for term in set_dict['examples']:
                if any(term.startswith(c) for c in string.ascii_uppercase) and \
                        not term.endswith('American') and \
                        term != term.upper():
                    sentences += [
                        fill_template(template, term)
                        for template in NAME_TEMPLATES
                    ]
                elif term in ADJECTIVES:
                    sentences += [
                        fill_template(template, term)
                        for template in ADJECTIVE_TEMPLATES
                    ]
                elif term in VERBS:
                    sentences += [
                        fill_template(template, term)
                        for template in VERB_TEMPLATES
                    ]
                elif term in MASS_NOUNS:
                    sentences += [
                        fill_template(template, term)
                        for template in MASS_NOUN_TEMPLATES
                    ]
                elif term in ('he', 'she'):
                    sentences += [
                        fill_template(template, term)
                        for template in SUBJECT_PRONOUN_TEMPLATES
                    ]
                elif term in ('him', 'her'):
                    sentences += [
                        fill_template(template, term)
                        for template in OBJECT_PRONOUN_TEMPLATES
                    ]
                elif term in ('his', 'hers'):
                    sentences += [
                        fill_template(template, term)
                        for template in POSSESSIVE_PRONOUN_TEMPLATES
                    ]
                else:
                    if term in PLURAL_NOUNS:
                        singular_term = singularize(term)
                        plural_term = term
                    else:
                        singular_term = term
                        plural_term = pluralize(term)
                    sentences += [
                        fill_template(template, singular_term)
                        for template in SINGULAR_NOUN_TEMPLATES + (
                            SINGULAR_PERSON_TEMPLATES
                            if PERSON_RE.search(term) is not None
                            else SINGULAR_THING_TEMPLATES
                        )
                    ]
                    sentences += [
                        fill_template(template, plural_term)
                        for template in PLURAL_NOUN_TEMPLATES + (
                            PLURAL_PERSON_TEMPLATES
                            if PERSON_RE.search(term) is not None
                            else PLURAL_THING_TEMPLATES
                        )
                    ]

            set_dict['examples'] = sentences

        if len(sets['targ1']['examples']) != len(sets['targ2']['examples']):
            logging.info(
                'Truncating targ1, targ2 to have same size (current sizes: {}, {})'.format(
                    len(sets['targ1']['examples']), len(sets['targ2']['examples'])))
            (sets['targ1']['examples'], sets['targ2']['examples']) = truncate_lists(
                sets['targ1']['examples'], sets['targ2']['examples'])

        (dirname, basename) = os.path.split(input_path)
        output_path = os.path.join(dirname, OUTPUT_PREFIX + basename)

        logging.info('Writing sentence-level test to {}'.format(output_path))
        with open(output_path, 'w') as f:
            json.dump(sets, f, indent=2)
Ejemplo n.º 53
0
    predictions = None
    for in_name, weight in zip(in_names, weights):
        with open(os.path.join(out_path, in_name, 'result.txt')) as file:
            current_paths = np.array(tuple(map(lambda x: x.split()[0], file.readlines())))
            if paths is None:
                paths = current_paths
            assert np.array_equal(paths, current_paths)
        probablility = np.load(os.path.join(out_path, in_name, 'probability.npy'))
        if predictions is None:
            predictions = np.zeros_like(probablility)
        predictions += probablility * weight
    predictions = np.argmax(predictions, axis=1)
    with open(os.path.join(out_path, out_name, 'result.txt'), 'w') as file:
        for path, prediction in zip(paths, predictions):
            file.write(f'{path} {prediction}\n')


if __name__ == '__main__':
    parser = ArgumentParser()
    parser.add_argument('--out-path', type=str, default='/content/logs')
    parser.add_argument('--in-names', type=str, required=True)
    parser.add_argument('--out-name', type=str, default='tmp')
    parser.add_argument('--weights', type=str, required=True)
    options = vars(parser.parse_args())
    run(
        out_path=options['out_path'],
        in_names=options['in_names'],
        out_name=options['out_name'],
        weights=options['weights'],
    )
Ejemplo n.º 54
0
def main():
    parser = ArgumentParser()
    parser.add_argument("output_json", help="Output JSON file")
    parser.add_argument("buildhub_json", help="Output buildhub JSON file")
    parser.add_argument("output_txt", help="Output text file")
    # TODO: Move package-name.mk variables into moz.configure.
    parser.add_argument("pkg_platform", help="Package platform identifier")
    parser.add_argument("--no-download",
                        action="store_true",
                        help="Do not include download information")
    parser.add_argument("--package", help="Path to application package file")
    parser.add_argument("--installer",
                        help="Path to application installer file")
    args = parser.parse_args()
    mozinfo.find_and_update_from_json()

    important_substitutions = [
        "target_alias",
        "target_cpu",
        "target_os",
        "target_vendor",
        "host_alias",
        "host_cpu",
        "host_os",
        "host_vendor",
        "MOZ_UPDATE_CHANNEL",
        "MOZ_APP_VENDOR",
        "MOZ_APP_NAME",
        "MOZ_APP_VERSION",
        "MOZ_APP_MAXVERSION",
        "MOZ_APP_ID",
        "CC",
        "CXX",
        "AS",
        "MOZ_SOURCE_REPO",
    ]

    all_key_value_pairs = {
        x.lower(): buildconfig.substs[x]
        for x in important_substitutions
    }
    build_id = os.environ["MOZ_BUILD_DATE"]
    all_key_value_pairs.update({
        "buildid":
        build_id,
        "moz_source_stamp":
        buildconfig.substs["MOZ_SOURCE_CHANGESET"],
        "moz_pkg_platform":
        args.pkg_platform,
    })

    with open(args.output_json, "wt") as f:
        json.dump(all_key_value_pairs, f, indent=2, sort_keys=True)
        f.write("\n")

    with open(args.buildhub_json, "wt") as f:
        build_time = datetime.datetime.strptime(build_id, "%Y%m%d%H%M%S")
        s = buildconfig.substs
        record = {
            "build": {
                "id": build_id,
                "date": build_time.isoformat() + "Z",
                "as": s["AS"],
                "cc": s["CC"],
                "cxx": s["CXX"],
                "host": s["host_alias"],
                "target": s["target_alias"],
            },
            "source": {
                "product": s["MOZ_APP_NAME"],
                "repository": s["MOZ_SOURCE_REPO"],
                "tree": os.environ["MH_BRANCH"],
                "revision": s["MOZ_SOURCE_CHANGESET"],
            },
            "target": {
                "platform": args.pkg_platform,
                "os": mozinfo.info["os"],
                # This would be easier if the locale was specified at configure time.
                "locale": os.environ.get("AB_CD", "en-US"),
                "version": s["MOZ_APP_VERSION_DISPLAY"]
                or s["MOZ_APP_VERSION"],
                "channel": s["MOZ_UPDATE_CHANNEL"],
            },
        }

        if args.no_download:
            package = None
        elif args.installer and os.path.exists(args.installer):
            package = args.installer
        else:
            package = args.package
        if package:
            st = os.stat(package)
            mtime = datetime.datetime.fromtimestamp(st.st_mtime)
            record["download"] = {
                # The release pipeline will update these keys.
                "url": os.path.basename(package),
                "mimetype": "application/octet-stream",
                "date": mtime.isoformat() + "Z",
                "size": st.st_size,
            }

        json.dump(record, f, indent=2, sort_keys=True)
        f.write("\n")

    with open(args.output_txt, "wt") as f:
        f.write("buildID={}\n".format(build_id))
Ejemplo n.º 55
0
    Arguments:
        args: The object containing the commandline arguments
    """
    wt = WallpaperTransition(
        args.img_dir,
        args.timeout,
        args.duration,
        args.fps,
    )
    wt.loop()


if __name__ == "__main__":
    parser = ArgumentParser(
        description="Wallpaper Transition using Feh",
        formatter_class=ArgumentDefaultsHelpFormatter,
    )
    parser.add_argument(
        "img_dir",
        metavar="DIR",
        type=Path,
        help="the directory of wallpapers you want to loop through",
    )
    parser.add_argument(
        "--timeout",
        type=int,
        default=600,
        help="idle period (in seconds) between transitions",
    )
    parser.add_argument(
        "--duration",
Ejemplo n.º 56
0
def get_args():
    """Parse arguments"""
    parser = ArgumentParser(description=__doc__)
    parser.add_argument('-v', '--verbose', action='store_true')
    return parser.parse_args()
Ejemplo n.º 57
0
#!/usr/bin/env python3
from argparse import ArgumentParser
from keybag import Keybag
from binascii import hexlify


def print_tag(tag, indent=''):
    line = '%s%s = ' % (indent, tag.name)
    if type(tag.value) == int:
        line += '%d' % tag.value
    elif type(tag.value) == bytes:
        line += hexlify(tag.value).decode()
    print(line)


parser = ArgumentParser(description='Parse decrypted keybag')
parser.add_argument('file')
args = parser.parse_args()

kb = Keybag.from_file(args.file)

print('HEADER')
for tag in kb.data.header.tags:
    print_tag(tag, indent='  ')

print('KEYS')
for i, key in enumerate(kb.data.keys):
    print('  %d:' % i)
    for tag in key.tags:
        print_tag(tag, indent='    ')
Ejemplo n.º 58
0
__email__ = '*****@*****.**'
__status__ = 'Development'
# endregion

# region Main function
if __name__ == "__main__":

    # region Check user, platform and print banner
    Base = Base()
    Base.check_user()
    Base.check_platform()
    Base.print_banner()
    # endregion

    # region Parse script arguments
    parser = ArgumentParser(description='ARP scan local network')

    parser.add_argument('-i',
                        '--interface',
                        type=str,
                        help='Set interface name for ARP scanner')
    parser.add_argument('-I',
                        '--target_ip',
                        type=str,
                        help='Set target IP address',
                        default=None)
    parser.add_argument('-t',
                        '--timeout',
                        type=int,
                        help='Set timeout (default=3)',
                        default=3)
Ejemplo n.º 59
0
    with app.app_context():
        BaseModel.set_session(db.session)

        if config.config_model.sqlite.recreate:
            logging.info("Recreating database")
            db.drop_all()
            db.session.commit()

        db.create_all()
        db.session.commit()

    app.register_blueprint(bp)
    app.register_blueprint(frontend_bp)

    return app


if __name__ == '__main__':
    arg_parser = ArgumentParser()
    arg_parser.add_argument("-c",
                            "--config",
                            dest="config_path",
                            help="The path to the configuration file.")
    args = arg_parser.parse_args()
    config_path = args.config_path if args.config_path else "../config/streetview-bingo.yml"

    app: Flask = create_app(config_path)
    sio.run(app,
            host=config.config_model.networking.host,
            port=config.config_model.networking.port)
Ejemplo n.º 60
0
        obj = pickle.load(fp)
        model.states_list.append(obj)
        if model.parallel:
            parallel.add_data(model.states_list[-1].data)
        fp.close()
    print "--------------------------------------add_data process completed!!--------------------------------------"
    #save params&charm#
    save_fig_title(fig_title_path, SAVE_PARAMS, locals())
    save_parameters(param_path, SAVE_PARAMS, locals())
    obs_hypparams['sigma_0'] = np.eye(obs_dim)
    obs_hypparams['mu_0'] = np.zeros(obs_dim)
    #estimation&result_write#
    print "--------------------------------------estimation process start--------------------------------------"
    result = Result(result_dir, DATA_N)
    loglikelihood = []
    for idx in progprint_xrange(ITER_N, perline=10):
        model.resample_model()
        loglikelihood.append(result.save_loglikelihood(model))
        result.save(model)
    result.write_loglikelihood(loglikelihood)
    print "--------------------------------------estimation process completed!!--------------------------------------"


#--------------------------------------direct execution function--------------------------------------#
if __name__ == '__main__':
    from argparse import ArgumentParser
    parser = ArgumentParser()
    parser.add_argument('--result-dir', default=None)
    args = parser.parse_args()
    main(**vars(args))