Example #1
0
    def __init__(self, context):
        super(utilityPlugin, self).__init__(context)
        # Give QObjects reasonable names
        self.setObjectName("utilityPlugin")

        # Process standalone plugin command-line arguments
        from argparse import ArgumentParser

        parser = ArgumentParser()
        # Add argument(s) to the parser.
        parser.add_argument("-q", "--quiet", action="store_true", dest="quiet", help="Put plugin in silent mode")
        args, unknowns = parser.parse_known_args(context.argv())
        if not args.quiet:
            print "arguments: ", args
            print "unknowns: ", unknowns

        # Create QWidget
        self._widget = QWidget()
        # Get path to UI file which is a sibling of this file
        # in this example the .ui and .py file are in the same folder
        ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), "utility.ui")
        # Extend the widget with all attributes and children from UI file
        loadUi(ui_file, self._widget)
        # Give QObjects reasonable names
        self._widget.setObjectName("utilityUi")
        # Show _widget.windowTitle on left-top of each plugin (when
        # it's set in _widget). This is useful when you open multiple
        # plugins at once. Also if you open multiple instances of your
        # plugin at once, these lines add number to make it easy to
        # tell from pane to pane.
        if context.serial_number() > 1:
            self._widget.setWindowTitle(self._widget.windowTitle() + (" (%d)" % context.serial_number()))
        # Add widget to the user interface
        context.add_widget(self._widget)

        self.ID = 0

        self.State = QuadPositionDerived()
        self.Target = QuadPositionDerived()
        self.sub = ""
        self.name = ""
        self.pwd = os.environ["PWD"]

        self._widget.IrisInputBox.insertItems(0, ["iris1", "iris2", "iris3", "iris4", "iris5"])
        self._widget.bStart.clicked.connect(self.Start)
        self._widget.GravityCancelButton.clicked.connect(self.adjust_gravity_cancel)

        self._widget.XBox.setMinimum(-10.0)
        self._widget.XBox.setMaximum(10.0)
        self._widget.XBox.setSingleStep(0.1)
        self._widget.YBox.setMinimum(-10.0)
        self._widget.YBox.setMaximum(10.0)
        self._widget.YBox.setSingleStep(0.1)
        self._widget.ZBox.setMinimum(-10.0)
        self._widget.ZBox.setMaximum(10.0)
        self._widget.ZBox.setSingleStep(0.1)
        self._widget.GravitySpinBox.setMaximum(1800)
        self._widget.GravitySpinBox.setMinimum(1200)
        self._widget.GravitySpinBox.setSingleStep(10)
        self._widget.GravitySpinBox.setValue(1500)
Example #2
0
def main():
    """
    Main function of the proxy scanner.
    """
    global pl, output, q

    parser = ArgumentParser(description='Scans a list of proxies to determine which work for HTTPS.')
    parser.add_argument('--output', default='output/proxies.txt', type=str,
        help='The file in which to store the found proxies.')
    parser.add_argument('--threads', default=10, type=int,
        help='Number of threads to use.')

    args = parser.parse_args()
    output = args.output

    threads = args.threads
    q = Queue(threads * 3)

    print 'Starting threads.'
    for x in xrange(threads):
        t = Thread(target=check_proxies)
        t.daemon = True
        t.start()

    print 'Queueing proxies.'
    for proxy in proxies.proxies:
        q.put(proxy)
    q.join()

    save_proxies()
def parse_args():
    parser = ArgumentParser(description=__doc__)
    parser.add_argument("conf_file", help="configuration to use")
    parser.add_argument("instance_keys", metavar='instance', nargs='+',
                        help=("Instance key(s). If ALL is given, the "
                              "users will be added to all visible instances."))
    return parser.parse_args()
def main():
    """Entry point"""
    parser = ArgumentParser()
    parser.add_argument("-f", "--file", dest="filename", required=True,
                        help="ACK tar file")
    args = parser.parse_args()
    do_parse(args)
Example #5
0
def rpt_list_testdates(args):
    parser = ArgumentParser(prog='mx rpt-list-testdates')
    _add_common_args(parser)
    parser.add_argument('--printdir', action='store_true', help='print directory containing tests')
    _add_pattern_arg(parser)
    args = _check_verbose(parser.parse_args(args))
    fastr = dict()
    local_dirs = get_local_dirs(args.logdir)
    for local_dir in local_dirs:
        resultInfo = ResultInfo(local_dir)
        result_outputs = _gather_test_outputs(join(args.logdir, local_dir, "test"))
        for pkg, _ in result_outputs.iteritems():
            if re.search(args.pattern, pkg) is None:
                continue
            if not fastr.has_key(pkg):
                testdates = []
                fastr[pkg] = testdates
            else:
                testdates = fastr[pkg]
            testdates.append(resultInfo)

    for pkg, testdates in fastr.iteritems():
        sortedList = sorted(testdates, reverse=True)
        print pkg
        for resultInfo in sortedList:
            if args.printdir:
                print '  ' + join(args.logdir, resultInfo.localdir)
            else:
                print '  ' + str(resultInfo.date)
Example #6
0
def main():
    parser = ArgumentParser(description=__doc__.rstrip(),
                            formatter_class=RawDescriptionHelpFormatter)
    parser.add_argument('filename', help=helps['filename'])
    parser.add_argument('-d', '--detailed',
                        action='store_true', dest='detailed',
                        default=False, help=helps['detailed'])
    options = parser.parse_args()

    mesh = Mesh.from_file(options.filename)

    output(mesh.cmesh)
    output('element types:', mesh.descs)
    output('nodal BCs:', sorted(mesh.nodal_bcs.keys()))

    bbox = mesh.get_bounding_box()
    output('bounding box: %s'
           % ', '.join('%s: [%s, %s]' % (name, bbox[0, ii], bbox[1, ii])
                       for ii, name in enumerate('xyz'[:mesh.dim])))

    output('centre:', mesh.coors.mean(0))

    if not options.detailed: return

    from sfepy.discrete.fem.geometry_element import create_geometry_elements
    gels = create_geometry_elements()
    mesh.cmesh.set_local_entities(gels)
    mesh.cmesh.setup_entities()

    for dim in range(1, mesh.cmesh.tdim + 1):
        volumes = mesh.cmesh.get_volumes(dim)
        output('volumes of %d %dD entities: min: %s mean: %s max: %s'
               % (mesh.cmesh.num[dim],
                  dim, volumes.min(), volumes.mean(), volumes.max()))
Example #7
0
def main():
    parser = ArgumentParser()
    parser.add_argument('--print-available-runtimes', action=PrintRuntimes)
    parser.add_argument('-r', '--runtime', action='store', dest='runtime')
    parser.add_argument('-e', '--eval', action='store', dest='expr')
    parser.add_argument("--encoding", action="store", dest="files_encoding", default="utf8")
    parser.add_argument(nargs="*", action='store', dest='files')

    opts = parser.parse_args()

    runtime = execjs.get(opts.runtime)

    codes = []
    for f in opts.files:
        with io.open(f, encoding=opts.files_encoding) as fp:
            codes.append(fp.read())

    context = runtime.compile("\n".join(codes))
    if opts.expr:
        if isinstance(opts.expr, bytes):
            expr = opts.expr.decode()
        else:
            expr = opts.expr
        sys.stdout.write(repr(context.eval(expr)) + "\n")
    else:
        ret = context.eval(sys.stdin.read())
        sys.stdout.write(repr(ret) + "\n")
Example #8
0
def main(args=None):
    if args is None:
        args = sys_argv[1:]

    parser = ArgumentParser(description='')
    parser.add_argument('TREE', type=PathType)
    parser.add_argument('FEATURES', type=feattype)
    ns = parser.parse_args(args)

    tree, alignment, colnames, _ = PhyloGzFile.read(ns.TREE)

    icolnames = [(idx, colname) for idx, colname in enumerate(colnames) if int(NUMERIC.sub('', colname)) in ns.FEATURES]

    for r in alignment:
        # labels has length of icolnames plus the ic50
        labels = [None] * (len(icolnames) + 1)
        i = 1
        for idx, colname in icolnames:
            if len(colnames) > 1:
                labels[i] = colname + r.seq[idx]
            else:
                labels[i] = r.seq[idx]
            i += 1
        try:
            labels[0] = '%.3g' % mean(seqrecord_get_values(r))
        except ValueError:
            if not (len(r.id) > 4 and r.id[:4].lower() == 'node'):
                print(r)
            labels.pop(0)
        # include the ':' here to make sure we grab the end of the label
        tree = re_sub(r'([,()])' + r.id + r'(?:_[0-9]+)?:', r'\g<1>' + '_'.join(labels) + ':', tree)

    print(tree)

    return 0
Example #9
0
def run(argv):
    parser = ArgumentParser()
    parser.add_argument('vem_model', type=str, help='SAM VEM model to use features from')
    parser.add_argument('-c', type=float, default=1.0, help='SVM C parameter')
    options = parser.parse_args(argv[1:])

    log.info('Loading SAM model %s' % options.vem_model)

    sam_model = VEMModel.load(options.vem_model)
    log.info('Making dataset')
    dataset = make_dataset(sam_model)

    metric = ClassificationError()
    scores = []
    for i in range(20):
        train_data, test_data = dataset.split(p=0.90, seed=i)

        topic_svm = TopicSVM(sam_model, C=options.c, normalize=True)
        topic_svm.train(train_data)

        predictions = topic_svm.predict(test_data)
        score = metric(test_data.targets, predictions)
        log.info(score)
        scores.append(score)
    log.info('Mean classification error: %g' % np.mean(scores))
Example #10
0
def main():
   """
   This example contains a simple parser to obtain the locations of both steemd and the data directory,
   creates and runs a new debug node, replays all of the blocks in the data directory, and finally waits
   for the user to interface with it outside of the script. Sending SIGINT succesfully and cleanly terminates
   the program.
   """
   import os, sys
   from argparse import ArgumentParser

   if( os.name != "posix" ):
      print( "This script only works on POSIX systems" )
      return

   parser = ArgumentParser( description='Run a Debug Node on an existing chain. This simply replays all blocks ' + \
                              'and then waits indefinitely to allow user interaction through RPC calls and ' + \
                              'the CLI wallet' )
   parser.add_argument( '--steemd', '-s', type=str, required=True, help='The location of a steemd binary to run the debug node' )
   parser.add_argument( '--data-dir', '-d', type=str, required=True, help='The location of an existing data directory. ' + \
                        'The debug node will pull blocks from this directory when replaying the chain. The directory ' + \
                        'will not be changed.' )

   args = parser.parse_args()

   steemd = Path( args.steemd )
   if( not steemd.exists() ):
      print( 'Error: steemd does not exist.' )
      return

   steemd = steemd.resolve()
   if( not steemd.is_file() ):
      print( 'Error: steemd is not a file.' )
      return

   data_dir = Path( args.data_dir )
   if( not data_dir.exists() ):
      print( 'Error: data_dir does not exist or is not a properly constructed steemd data directory' )

   data_dir = data_dir.resolve()
   if( not data_dir.is_dir() ):
      print( 'Error: data_dir is not a directory' )

   print( 'Creating and starting debug node' )
   debug_node = DebugNode( str( steemd ), str( data_dir ) )

   with debug_node:
      print( 'Replaying blocks...', )
      sys.stdout.flush()
      total_blocks = 0
      while( total_blocks % 100000 == 0 ):
         total_blocks += debug_node.debug_push_blocks( 100000 )
         print( 'Blocks Replayed: ' + str( total_blocks ) )
         sys.stdout.flush()

      print( 'Done!' )
      print( 'Feel free to interact with this node via RPC calls for the cli wallet.' )
      print( 'To shutdown the node, send SIGINT with Ctrl + C to this script. It will shut down safely.' )

      while( True ):
         sleep( 1 )
def main(argv=None):
    if argv is None:
        argv = sys.argv
    else:
        sys.argv.extend(argv)

    parser = ArgumentParser(
        description="PCAWG Report Generator Gathering Counts", formatter_class=RawDescriptionHelpFormatter
    )
    parser.add_argument(
        "-m", "--metadata_dir", dest="metadata_dir", help="Directory containing metadata manifest files", required=True
    )

    args = parser.parse_args()
    metadata_dir = args.metadata_dir  # this dir contains gnos manifest files, will also host all reports

    if not os.path.isdir(metadata_dir):  # TODO: should add more directory name check to make sure it's right
        sys.exit("Error: specified metadata directory does not exist!")

    report_name = re.sub(r"^pc_report-", "", os.path.basename(__file__))
    report_name = re.sub(r"\.py$", "", report_name)

    generate_report(metadata_dir, report_name)

    return 0
def base_multinode_parser():
    """Creates a parser with arguments specific to sending HTTP requests
    to multiple REST APIs.

    Returns:
        {ArgumentParser}: Base parser with default HTTP args
    """
    base_parser = ArgumentParser(add_help=False)

    base_parser.add_argument(
        'urls',
        type=str,
        nargs='+',
        help="The URLs of the validator's REST APIs of interest, separated by"
        " commas or spaces. (no default)")
    base_parser.add_argument(
        '--users',
        type=str,
        action='append',
        metavar='USERNAME[:PASSWORD]',
        help='Specify the users to authorize requests, in the same order as '
        'the URLs, separate by commas. Passing empty strings between commas '
        'is supported.')

    return base_parser
Example #13
0
def process():
    parser = ArgumentParser(description = \
            "Simulate the motion of a flock of birds")
    
    # Parameters
    parser.add_argument('--file', '-f', dest = 'configFile')

    # Print help message even if no flag is provided
    if len(sys.argv) == 1:
        parser.print_help()
        sys.exit(1)

    args = parser.parse_args()

    # Catch exception if file does not exist
    try:
        # Create object
        boids = Flock(args.configFile)
        # Plot figures
        animator = FlockAnimator((-500,1500), (-500,1500), "The Boids!", boids)
        animator.animate_flock()
    except IOError:
        print "The file you provided does not exist.\n" 
        parser.print_help()
    except:
        print "Unexpected error.", sys.exc_info()[0], "\n"
        raise
Example #14
0
def main():
    parser = ArgumentParser()
    parser.add_argument('url', help = 'the url to the git repo containing a urls.md')
    parser.add_argument('type', help = 'type OAuth to use OAuth authentication (ensure you have an OAuth.txt file in your config folder first)', nargs='?', const='Standard')
    args = parser.parse_args()


    try:
        open_json = open('urls.json', 'r')
        if os.stat('urls.json').st_size > 0:
            url_json = json.load(open_json)
        open_json.close()
    except FileNotFoundError:
        url_json = open('urls.json', 'w')
    username = open('../config-location/config.txt', 'r').readline().strip('\n')#location of my

    if args.type == 'OAuth':
        password = open('../config-location/oauth.txt', 'r').readline()#location of oauth file
    else:
        password_file = open('../config-location/config.txt', 'r')
        password_file.readline()
        password = password_file.readline().strip('\n')
    input_url = args.url
    auth = (username, password)
    parts_of_url = input_url.split('/')
    user = parts_of_url[3]
    repo_name = parts_of_url[4]

    request_url = ("https://api.github.com/repos/%s/%s/contents/urls.md/" % (user, repo_name))
    url_list, repo_list, etag = get_urls(request_url, user, repo_name, auth, url_json)
    urls_dict = parse_dict(url_list, repo_list, etag)
    write_json(urls_dict)
Example #15
0
def main():
    parser = ArgumentParser(description='Segment a given corpus')
    parser.add_argument('corpus', type=str, help="path to the corpus")
    parser.add_argument('true_corpus', type=str, help="path to the true corpus")
    parser.add_argument('--alpha', default=0.5, type=float, help="the alpha hyperparameter")
    parser.add_argument('--phash', default=0.5, type=float, help="the p# hyperparameter")
    parser.add_argument('-i', default=10, type=int, help="number of training epochs")

    args = parser.parse_args()

    corpus = Corpus(args.corpus)

    # run the specified number of iterations of Gibbs sampling, saving the
    # output after each iteration
    for i in range(args.i):
        print i
        gibbs_iteration(corpus, alpha=args.alpha, p_hash=args.phash)

        with open(FILENAME, 'wb') as f:
            print 'Saving data to {}'.format(FILENAME)
            dump(corpus.boundaries, f)

    # Evaluation (if true corpus is provided)
    corpus_true = Corpus(args.true_corpus)
    eval = evaluate(corpus, corpus_true)
    print eval
Example #16
0
def ctw(args, extraVMarguments=None):
    """run CompileTheWorld"""

    defaultCtwopts = '-Inline'

    parser = ArgumentParser(prog='mx ctw')
    parser.add_argument('--ctwopts', action='store', help='space separated JVMCI options used for CTW compilations (default: --ctwopts="' + defaultCtwopts + '")', default=defaultCtwopts, metavar='<options>')
    parser.add_argument('--cp', '--jar', action='store', help='jar or class path denoting classes to compile', metavar='<path>')

    args, vmargs = parser.parse_known_args(args)

    if args.ctwopts:
        # Replace spaces  with '#' since -G: options cannot contain spaces
        vmargs.append('-G:CompileTheWorldConfig=' + re.sub(r'\s+', '#', args.ctwopts))

    if args.cp:
        cp = os.path.abspath(args.cp)
    else:
        cp = join(_jdk.home, 'lib', 'modules', 'bootmodules.jimage')
        vmargs.append('-G:CompileTheWorldExcludeMethodFilter=sun.awt.X11.*.*')

    # suppress menubar and dock when running on Mac; exclude x11 classes as they may cause vm crashes (on Solaris)
    vmargs = ['-Djava.awt.headless=true'] + vmargs

    if _vm.jvmciMode == 'disabled':
        vmargs += ['-XX:+CompileTheWorld', '-Xbootclasspath/p:' + cp]
    else:
        if _vm.jvmciMode == 'jit':
            vmargs += ['-XX:+BootstrapJVMCI']
        vmargs += ['-G:CompileTheWorldClasspath=' + cp, 'com.oracle.graal.hotspot.CompileTheWorld']

    run_vm(vmargs + _noneAsEmptyList(extraVMarguments))
def main(argv=None):
    if argv is None:
        argv = sys.argv
    else:
        sys.argv.extend(argv)

    parser = ArgumentParser(description="PCAWG Report Generator Using ES Backend",
             formatter_class=RawDescriptionHelpFormatter)
    parser.add_argument("-m", "--metadata_dir", dest="metadata_dir",
             help="Directory containing metadata manifest files", required=True)
    parser.add_argument("-r", "--gnos_repo", dest="repo",
             help="Specify which GNOS repo to process, process all repos if none specified", required=False)

    args = parser.parse_args()
    metadata_dir = args.metadata_dir  # this dir contains gnos manifest files, will also host all reports
    repo = args.repo

    if not os.path.isdir(metadata_dir):  # TODO: should add more directory name check to make sure it's right
        sys.exit('Error: specified metadata directory does not exist!')

    timestamp = str.split(metadata_dir, '/')[-1]
    es_index = 'p_' + ('' if not repo else repo+'_') + re.sub(r'\D', '', timestamp).replace('20','',1)

    report_name = re.sub(r'^pc_report-', '', os.path.basename(__file__))
    report_name = re.sub(r'\.py$', '', report_name)

    generate_report(es_index, es_queries, metadata_dir, report_name, timestamp, repo)

    return 0
Example #18
0
def _make_arg_parser():
    # this parser is never used for help messages, but
    # will show usage on error
    parser = ArgumentParser(usage=_USAGE, add_help=False)

    # add positional arguments
    parser.add_argument(dest='script_or_jar', nargs='?')
    parser.add_argument(dest='args', nargs='*')

    _add_basic_args(parser)
    _add_runner_alias_arg(parser)
    _add_help_arg(parser)
    _add_deprecated_arg(parser)

    # add runner opts
    runner_opt_names = set(_RUNNER_OPTS) - set(_HARD_CODED_OPTS)
    _add_runner_args(parser, opt_names=runner_opt_names)

    # add spark-specific opts (without colliding with runner opts)
    for opt_name, switch in _SPARK_SUBMIT_SWITCHES.items():
        if opt_name in _RUNNER_OPTS and switch not in _SWITCH_ALIASES:
            continue
        _add_spark_submit_arg(parser, opt_name)

    return parser
Example #19
0
def parse_arguments():
    # Parse arguments.
    parser = ArgumentParser(description='Create data-driven receipts.')
    parser.add_argument('filename', nargs='?', default=None,
        help='Path to a JSON-formatted file that specifies document form data.')
    parser.add_argument(
        '-t', '--template', dest='template_name', action='store',
        default='receipt', help='set template name (default: receipt)')
    parser.add_argument(
        '-o', '--output', dest='output_filename', action='store',
        default='output.pdf', help='set template name (default: output.pdf)')
    parser.add_argument(
        '-d', '--log', dest='loglevel', action='store', default='ERROR',
        help=(
            'set log level [DEBUG, INFO, WARNING, ERROR, CRITICAL] '
            '(default: ERROR)'))
    parser.add_argument(
        '--debug', dest='debug', action='store_true', default=False,
        help='run in DEBUG mode')
    args = parser.parse_args()

    if args.debug:
        args.loglevel = 'DEBUG'

    # Configure logger.
    numeric_level = getattr(logging, args.loglevel.upper(), None)
    if not isinstance(numeric_level, int):
        raise ValueError('Invalid log level: %s' % args.loglevel.upper())
    logging.basicConfig(level=numeric_level)

    return args
Example #20
0
def primers():
    parser = ArgumentParser(prog='primers', description='Print primer properties')
    parser.add_argument('primers', nargs='+', help='primer text file')
    parser.add_argument('-c', '--csv', action='store_true', dest='csv', help='write a csv file(default is html file)')

    args = parser.parse_args()

    inputfiles = args.primers
    
    from ..nucleotide.primer import Primers
    from ..util import xmlwriter
    ps = Primers()

    for filename in inputfiles:
        ps.load_file(filename)

    if args.csv:
        sys.stdout.write(ps.write_csv())
    else:
        html = xmlwriter.XmlWriter(sys.stdout)
        b = xmlwriter.builder(html)
        with b.html:
            with b.head:
                pass
            with b.body(style='font-family:monospace;font-size:small'):
                b.h1('Primers')
                ps.write_html(html)
def main():
  parser = ArgumentParser('--nmap_xml, '
                          '--openvas_xml')

  parser.add_argument('--nmap_xml',
                      dest='nmap_xml',
                      type=str,
                      help='NMAP XML file to parse.')

  parser.add_argument('--openvas_xml',
                      dest='openvas_xml',
                      type=str,
                      help='OpenVAS XML file to parse.')

  args = parser.parse_args()
  nmap_xml = args.nmap_xml
  openvas_xml = args.openvas_xml

  if nmap_xml is not None:

    parse_nmap_xml(nmap_xml)

  if openvas_xml is not None:

    parse_openvas_xml(openvas_xml)

  if openvas_xml is None and nmap_xml is None:
    print('\nI need arguments.\n')
    parser.print_help()
    exit()
Example #22
0
def parseArgs():
    parser = ArgumentParser(
        description = 'Convert Entrez Gene Homo_sapiens.xml to python dictionary representation')
    parser.add_argument(
        '--Hsxml', metavar = 'HSXML', type = file, required = True,
        help = 'Name of Homo_sapiens.xml file - include a date reference for download for example')
    return parser.parse_args()
Example #23
0
def main():
    parser = ArgumentParser()
    parser.add_argument('file', action='store')

    args = parser.parse_args()

    print_left_right( "left.txt", "right.txt", open(args.file).readlines() )
Example #24
0
def shell_interactive():
    from argparse import ArgumentParser
    parser=ArgumentParser()

    parser.add_argument('--from',dest='src',
                        help='sourse file site')

    parser.add_argument('--to',dest='dst',
                        help='destination file site')

    parser.add_argument('--src_char',dest='from_charset',
                        help='source file charater set')

    parser.add_argument('--dst_char',dest='to_charset',
                        help='destination file charater set')

    parser.add_argument('--chmod',action='store_true',
                        help='chmod to 777')

    


    args=parser.parse_args()

    from minghu6.algs.dict import remove_value
    return remove_value(args.__dict__,None)
Example #25
0
    def parseArgs(self):
        self.out.put('Getting command line arguments')
        description = self.getApplicationName()
        if self.__class__.__doc__:
            description += '\n' + self.__class__.__doc__
        parser = ArgumentParser(description=self.getApplicationName())

        # default arguments (set in parent class)
        parser.add_argument('-v',
                            '--verbose',
                            action='store_true',
                            default=False,
                            help="Verbose messaging",
                            )
        parser.add_argument('--debug',
                            action='store_true',
                            default=False,
                            help="Painfully verbose messaging",
                            )

        # custom arguments (set in subclasses)
        self.defineCustomArguments(parser)

        arguments = parser.parse_args()
        arguments = vars(arguments)  # converts Namespace to {}
        return arguments
Example #26
0
def parse_args():
    """ Parse the options from the config file if they are available. Then parse
    the command line options, which will override any options also defined in the
    configuration file

    :return: Dictionary of configuration options defined under "general" in config.ini of the \
             current directory
    :rtype: dict
    """
    # Parse config file options
    args = dict()
    config_opts = config.items('general')
    bool_opts = ['backup', 'test']
    list_opts = ['ignore']
    args.update(dict((k, v) for k, v in filter(lambda k: k[0] not in bool_opts + list_opts, config_opts)))
    for key in bool_opts:
        args[key] = config.getboolean('general', key)
    for key in list_opts:
        args[key] = filter(None, config.get('general', key).split(','))

    if parser:
        arg_parser = ArgumentParser(description="Utility for managing dotfiles")
        for option_name, cli_args in options.items():
            arg_parser.add_argument(*option_name.split(' '), **cli_args)
            # Parse and merge command line options
        cli_args = arg_parser.parse_args().__dict__
        for key in filter(lambda k: k not in list_opts, cli_args.keys()):
            if cli_args[key]:
                args[key] = cli_args[key]
        for key in list_opts:
            if cli_args[key]:
                args[key] = filter(None, cli_args[key].split(','))
    return args
Example #27
0
def cli_args(mockargs=None):
    """Parse command line arguments"""
    parser = ArgumentParser(description='Firelet daemon')

    parser.add_argument("-c", "--conffile", nargs='?',
        default='/etc/firelet/firelet.ini', help="configuration file", metavar="FILE")
    parser.add_argument("-r", "--repodir", nargs='?',
        help="configuration repository dir")
    parser.add_argument("-D", "--debug",
        action="store_true", dest="debug", default=False,
        help="run in debug mode and print messages to stdout")
    parser.add_argument("-q", "--quiet",
        action="store_true", dest="quiet", default=False,
        help="print less messages to stdout")

    # catch all other arguments
    parser.add_argument('commands', nargs='+',
        help='an integer for the accumulator')

    if mockargs:
        opts = parser.parse_args(mockargs)
    else:
        opts = parser.parse_args()

    # temporary hack before rewriting the entire file using argparse
    six_commands = opts.commands + [None] * (6 - len(opts.commands))
    return opts, six_commands
Example #28
0
    def __init__(self, context):
        super(ReconfClientPlugin, self).__init__(context)
        # Give QObjects reasonable names
        self.setObjectName('ReconfClientPlugin')

        # Process standalone plugin command-line arguments
        from argparse import ArgumentParser
        parser = ArgumentParser()
        # Add argument(s) to the parser.
        parser.add_argument("-q", "--quiet", action="store_true",
                      dest="quiet",
                      help="Put plugin in silent mode")
        args, unknowns = parser.parse_known_args(context.argv())
        if not args.quiet:
            print 'arguments: ', args
            print 'unknowns: ', unknowns

        # Create QWidget
        self._widget = tiny_ref_gui.ReconfigureWidget()
        # Show _widget.windowTitle on left-top of each plugin (when 
        # it's set in _widget). This is useful when you open multiple 
        # plugins at once. Also if you open multiple instances of your 
        # plugin at once, these lines add number to make it easy to 
        # tell from pane to pane.
        if context.serial_number() > 1:
            self._widget.setWindowTitle(self._widget.windowTitle() + (' (%d)' % context.serial_number()))
        # Add widget to the user interface
        #parent = QTreeWidgetItem(self._widget.parameterTree)
        #parent.setText(0, "Name {}".format(2))
        #parent.setText(1, "Type")
        #parent.setText(2, "Value")
        #parent.setFlags(parent.flags() | Qt.ItemIsTristate | Qt.ItemIsUserCheckable | Qt.ItemIsEditable)
        context.add_widget(self._widget)
def main(argv):
    (adminUser, adminPass) = (None, None)
    p = ArgumentParser(prog='get-customer-credentials.py',
                       description="Fetches customer credentials using"
                       " admin credentials passed via environment variables"
                       " OS_USERNAME and OS_PASSWORD")
    p.add_argument('ddi', metavar='<ddi>',
                   help="DDI of the customer you want to impersonate")
    opts = p.parse_args(argv)

    try:
        # Hubble passes all the defined section variables to us in our
        # environment, so these could be any variable defined in hubblerc
        adminUser = os.environ['OS_USERNAME']
        adminPass = os.environ['OS_PASSWORD']
    except KeyError:
        pass

    if empty(adminUser) or empty(adminPass):
        print("-- OS_USERNAME or OS_PASSWORD not supplied in environment",
              file=sys.stderr)
        return 1

    # ----------------------------------------------------
    # Do something here to get the customers credentials
    # ----------------------------------------------------
    creds = super.sekrit.getCreds(adminUser, adminPass, opts.ddi)

    print("OS_USERNAME=%s" % creds['username'])
    print("OS_PASSWORD=%s" % creds['password'])
    print("OS_TENANT_NAME=%s" % creds['tenant-id'])

    # You can also inject extra environment variables
    print("OS_AUTH_SYSTEM=special-auth-system")
Example #30
0
def make_parser(prog):
    parser = ArgumentParser(prog)
    parser.add_argument('--version', action='version', version='%(prog)s ' + VERSION)

    command_parsers = parser.add_subparsers(metavar='command', dest='command')
    command_parsers.required = True

    command_search_parser = command_parsers.add_parser('search', help='search for items')
    command_search_parser.add_argument('type', help='type of item to search for',
                                       choices=['movie', 'person', 'character', 'company', 'keyword'])
    command_search_parser.add_argument('key', help='title or name of item to search for')
    command_search_parser.add_argument('-n', type=int, help='number of items to list')
    command_search_parser.add_argument('--first', action='store_true', help='display only the first result')
    command_search_parser.set_defaults(func=search_item)

    command_get_parser = command_parsers.add_parser('get', help='retrieve information about an item')
    command_get_parser.add_argument('type', help='type of item to retrieve',
                                    choices=['movie', 'person', 'character', 'company', 'keyword'])
    command_get_parser.add_argument('key', help='IMDb id (or keyword name) of item to retrieve')
    command_get_parser.add_argument('-n', type=int, help='number of movies to list (only for keywords)')
    command_get_parser.set_defaults(func=get_item)

    command_top_parser = command_parsers.add_parser('top', help='get top ranked movies')
    command_top_parser.add_argument('-n', type=int, help='number of movies to list')
    command_top_parser.add_argument('--first', action='store_true', help='display only the first result')
    command_top_parser.set_defaults(func=get_top_movies)

    command_bottom_parser = command_parsers.add_parser('bottom', help='get bottom ranked movies')
    command_bottom_parser.add_argument('-n', type=int, help='number of movies to list')
    command_bottom_parser.add_argument('--first', action='store_true', help='display only the first result')
    command_bottom_parser.set_defaults(func=get_bottom_movies)

    return parser
Example #31
0
    soup = BeautifulSoup(text, "html5lib")
    ret = pattern.findall(text)
    if ret:
        div = soup.find("div", attrs={"class": "error-code"})
        if div:
            ret = [div.text]
        return False, ret[0]
    else:
        return True, ""


if __name__ == '__main__':
    parser = ArgumentParser()
    parser.add_argument("-p",
                        "--path",
                        action="store",
                        dest="path",
                        help="Path of input file.")
    # čæ‡ę»¤ę”件
    parser.add_argument("-w",
                        "--word_filter",
                        action="store_true",
                        dest="is_filter_by_word",
                        help="Filter web info via word label.")
    parser.add_argument("-i",
                        "--input_filter",
                        action="store_true",
                        dest="is_filter_by_input",
                        help="Filter web info via input label.")
    args = parser.parse_args()
    path = args.path
Example #32
0
# * oco_parameterfile_060401120000.xsd
# 
# L2AggPGE repo:
# https://svn.jpl.nasa.gov/trac/browser/sdos/oco2/trunk/PGE/L2AggPGE/tables

from __future__ import print_function
import os
import sys
from argparse import ArgumentParser

from pprint import pprint
import xml.etree.ElementTree as ET

parser = ArgumentParser()

parser.add_argument("--apf", dest='apf_list',  nargs='+')
parser.add_argument("--def", dest='hdf_def_list',  nargs='+')

args = parser.parse_args()

mapping = {}
agg_dataset_names = set()

# Add non L2AggPGE mappings only output values
mapping['tropopause_altitude'] = {'name': 'RetrievalResults/tropopause_altitude', 'shape': 'Retrieval_Array'}
mapping['tropopause_pressure'] = {'name': 'RetrievalResults/tropopause_pressure', 'shape': 'Retrieval_Array'}
mapping['vector_altitude_levels'] = {'name': 'RetrievalResults/vector_altitude_levels', 'shape': 'Retrieval_Level_Array'}

for ds_name in [ mapping[m]['name'] for m in mapping.keys() ]:
    agg_dataset_names.add(ds_name)
Example #33
0
    lat = np.linspace(12500, 12500, npart, dtype=np.float32)
    pset = ParticleSet(fieldset, pclass=ptype[mode], lon=lon, lat=lat)
    endtime = delta(hours=6).total_seconds()
    pset.execute(kernel[method], dt=delta(minutes=3), endtime=endtime)
    exp_lon = [truth_decaying(x, y, endtime)[0] for x, y, in zip(lon, lat)]
    exp_lat = [truth_decaying(x, y, endtime)[1] for x, y, in zip(lon, lat)]
    assert np.allclose(np.array([p.lon for p in pset]), exp_lon, rtol=rtol)
    assert np.allclose(np.array([p.lat for p in pset]), exp_lat, rtol=rtol)


if __name__ == "__main__":
    p = ArgumentParser(description="""
Example of particle advection around an idealised peninsula""")
    p.add_argument('mode',
                   choices=('scipy', 'jit'),
                   nargs='?',
                   default='jit',
                   help='Execution mode for performing computation')
    p.add_argument('-p',
                   '--particles',
                   type=int,
                   default=1,
                   help='Number of particles to advect')
    p.add_argument(
        '-v',
        '--verbose',
        action='store_true',
        default=False,
        help='Print particle information before and after execution')
    p.add_argument('--fieldset',
                   choices=('stationary', 'moving', 'decaying'),
Example #34
0
def parse_args() -> Namespace:
    parser = ArgumentParser()
    parser.add_argument('--num_of_entry')
    parser.add_argument('--num_of_exit')
    parser.add_argument('--ra_radius', type=float, default=20.0, help='Radius of roundabout (unit:m)')
    parser.add_argument('--ra_input_dir', type=Path, default='./input_ra/', help='The directory to the roundabout input')
    parser.add_argument('--vehicles_input_dir', type=Path, default='./input/', help='The directory of the vehicles input')
    parser.add_argument('--ra_input_dir')
    parser.add_argument('--ra_input_dir')

    parser.add_argument('num_of_entry')
Example #35
0
#!/usr/bin/python2
from object_annotation import ObjectAnnotation
import pickle
import numpy as np
import vivid
import os
from pad_feature import *
from argparse import ArgumentParser

parser = ArgumentParser(description="Harvest features from annotated files")
parser.add_argument('annotation_file', type=str)
parser.add_argument('source_directory', type=str)
parser.add_argument('model_file', type=str)

options = parser.parse_args()

pom =  PADObjectModel.read(options.model_file)

apron = pom.fm.ff_apron()
window_size = np.array(pom.window_size())

annotations = pickle.load(open(options.annotation_file,'r'))

image_list = [os.path.join(options.source_directory, annotation.file_name)
        for annotation in annotations]

fv = vivid.ImageSource(imlist=image_list)
cs = vivid.ConvertedSource(
        fv,
        target_type = vivid.cv.CV_32FC3,
        scale = 1.0 / 255.0)
Example #36
0
def main():
    from argparse import ArgumentParser

    parser = ArgumentParser(
        description="MLPerf Inference Language Modeling Benchmark.")

    parser.add_argument(
        "-m",
        "--model-url",
        default=
        "https://zenodo.org/record/1492892/files/big-lstm.tar.gz?download=1",
        help="Download the model from the specified url.")
    parser.add_argument(
        "-I",
        "--iterations",
        default=100,
        help="The number of batches to run the inference benchmark for.")
    parser.add_argument(
        "-b",
        "--mini-batch-size",
        default=1,
        help="The number of samples to process together in a batch.")
    parser.add_argument("--words-per-sample",
                        default=20,
                        help="The number of words in each sample.")
    parser.add_argument(
        "--maximum-samples",
        default=1000,
        help="The number of samples to read from the validation dataset.")
    parser.add_argument("--model-checksum",
                        default="d41d8cd98f00b204e9800998ecf8427e",
                        help="The MD5 hash of the model.")
    parser.add_argument(
        "-d",
        "--validation-dataset-url",
        default=
        "http://statmt.org/wmt11/training-monolingual-news-commentary.tgz",
        help="Download the validation dataset from the specified url.")
    parser.add_argument("--vocab-path",
                        default="vocab.txt",
                        help="The list of words in the model's vocab")
    parser.add_argument("--validation-checksum",
                        default="d41d8cd98f00b204e9800998ecf8427e",
                        help="The MD5 hash of the validation dataset.")

    arguments = vars(parser.parse_args())

    model = getModel(arguments)
    validationDataset = getValidationDataset(arguments)

    runBenchmark(arguments, model, validationDataset)
Example #37
0
def build_parser():
    par = ArgumentParser()
    par.add_argument('--features_path', type=str,
                     dest='features_path', help='filepath to save/load features', required=True)
    par.add_argument('--file_mapping', type=str,
                     dest='file_mapping', help='filepath to save/load file to image mapping', required=True)
    par.add_argument('--index_folder', type=str,
                     dest='index_folder', help='folder to index', required=False)
    par.add_argument('--input_image', type=str,
                     dest='input_image', help='input image path to search query', required=False)
    par.add_argument('--input_word', type=str,
                     dest='input_word', help='input word to search query', required=False)
    par.add_argument('--glove_path', type=str,
                     dest='glove_path', help='path to pre-trained GloVe vectors', required=False)
    par.add_argument('--model_path', type=str,
                     dest='model_path', help='path to custom model', required=False)
    par.add_argument('--index_boolean', type=str,
                     dest='index_boolean', help='Boolean: Create index instead of search', required=False)
    par.add_argument('--features_from_new_model_boolean', type=str,
                     dest='features_from_new_model_boolean', help='Boolean: If to create features from new model', required=False)
    return par
Example #38
0
from argparse import ArgumentParser
import os, pickle
import pandas as pd

u_attr = [
    'id', 'industry_id', 'discipline_id', 'career_level', 'country',
    'latitude', 'longitude', 'created_at', 'is_payed', 'region', 'employment',
    'title', 'tags'
]

u_attr_values = [[0, 0, 0, 0, 0, 2, 2, 2, 0, 0, 0, 1, 1]]

parser = ArgumentParser()
parser.add_argument('-f',
                    '--fetch',
                    dest='folder',
                    help='Folder with fetched daily data')

workspace = '/local/recsys'


def clean_daily_items(daily_folder):
    data_dir = os.path.join(workspace + "/online/", daily_folder)
    write_dir = data_dir
    f_names = ['items.csv']
    w_names = ['clean_items.csv']
    for name, w_name in zip(f_names, w_names):
        f = open(os.path.join(data_dir, name), 'r')
        w = open(os.path.join(write_dir, w_name), 'w')
        for line in f.readlines():
            line = line.replace('NULL', '-1')
    error = 0
    for i in range(len(tstdata['class'])):
        if tstdata['class'][i] != result[i]:
            error = error + 1
    print(len(tstdata['class']) - error) * 1.0 / len(tstdata['class']) * 100


if __name__ == '__main__':
    from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter

    parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)

    # Add more options if you like
    parser.add_argument("-H",
                        metavar="H",
                        type=int,
                        dest="hidden_neurons",
                        default=40,
                        help="number of neurons in the hidden layer")
    parser.add_argument("-d",
                        metavar="W",
                        type=float,
                        dest="weightdecay",
                        default=0.03,
                        help="weightdecay")
    parser.add_argument("-m",
                        metavar="M",
                        type=float,
                        dest="momentum",
                        default=0.1,
                        help="momentum")
    args = parser.parse_args()
    prefix_list_entry.sequence_number = 40
    prefix_list_entry.grant = xr_ipv4_acl_datatypes.Ipv4AclGrantEnumEnum.permit
    prefix_list_entry.netmask = "255.255.0.0" 
    prefix_list_entry.prefix = "172.19.0.0"
    prefix_list_entry.max_prefix_length = 20
    prefix_list_entry.match_max_length = Empty()
    prefix_list_entry.min_prefix_length = 28
    prefix_list_entry.match_min_length = Empty()
    prefix.prefix_list_entries.prefix_list_entry.append(prefix_list_entry)
    ipv4_acl_and_prefix_list.prefixes.prefix.append(prefix)


if __name__ == "__main__":
    """Execute main program."""
    parser = ArgumentParser()
    parser.add_argument("-v", "--verbose", help="print debugging messages",
                        action="store_true")
    parser.add_argument("device",
                        help="NETCONF device (ssh://user:password@host:port)")
    args = parser.parse_args()
    device = urlparse(args.device)

    # log debug messages if verbose argument specified
    if args.verbose:
        logger = logging.getLogger("ydk")
        logger.setLevel(logging.DEBUG)
        handler = logging.StreamHandler()
        formatter = logging.Formatter(("%(asctime)s - %(name)s - "
                                      "%(levelname)s - %(message)s"))
        handler.setFormatter(formatter)
        logger.addHandler(handler)
Example #41
0
def parse_arguments():
    usage = "python benchrun.py -f <list of test files> -t <list of thread counts>\n       run with --help for argument descriptions"
    parser = ArgumentParser(description="mongo-perf micro-benchmark utility",
                            usage=usage,
                            formatter_class=RawTextHelpFormatter)

    parser.add_argument('-f',
                        '--testfiles',
                        dest='testfiles',
                        nargs="+",
                        help='Provide a list of js test files to run',
                        default=None)
    parser.add_argument('-t',
                        '--threads',
                        dest='threads',
                        nargs="+",
                        help='Specify which thread configuration to use',
                        type=int,
                        default=[1, 2, 4, 8, 12, 16])
    parser.add_argument('-m',
                        '--multidb',
                        dest='multidb',
                        help='Specify how many databases the test should use',
                        type=int,
                        default=1)
    parser.add_argument(
        '-c',
        '--multicoll',
        dest='multicoll',
        help='Specify how many collections the test should use',
        type=int,
        default=1)
    parser.add_argument('--trialTime',
                        dest='seconds',
                        help='Specify how many seconds to run each trial',
                        type=int,
                        default=5)
    parser.add_argument('--trialCount',
                        dest='trials',
                        help='Specify how many trials to run',
                        type=int,
                        default=1)
    parser.add_argument(
        '--shard',
        dest='shard',
        help=
        'Specify shard cluster the test should use, 0 - no shard, 1 - shard with {_id: hashed}, 2 - shard with {_id: 1}',
        type=int,
        default=0,
        choices=[0, 1, 2])
    parser.add_argument('--host',
                        dest='hostname',
                        help='hostname of the mongod/mongos under test',
                        default='localhost')
    parser.add_argument('-p',
                        '--port',
                        dest='port',
                        help='Port of the mongod/mongos under test',
                        default='27017')
    parser.add_argument(
        '--replset',
        dest='replica_set',
        help='replica set name of the mongod/mongos under test',
        default=None)
    parser.add_argument('-s',
                        '--shell',
                        dest='shellpath',
                        help="Path to the mongo shell executable to use.",
                        default='mongo')
    parser.add_argument(
        '--safe',
        dest='safeMode',
        nargs='?',
        const='true',
        choices=['true', 'false'],
        help=
        'this option enables a call to GLE after every op instead of every 100 ops',
        default='false')
    parser.add_argument('-w',
                        dest='w',
                        help='w write concern',
                        type=int,
                        default=0)
    parser.add_argument('-j',
                        dest='j',
                        nargs='?',
                        const='true',
                        choices=['true', 'false'],
                        help='this option turns on the j write concern',
                        default='false')
    parser.add_argument(
        '--writeCmd',
        dest='writeCmd',
        nargs='?',
        const='true',
        choices=['true', 'false'],
        help=
        'this option turns on use of the write command instead of legacy write operations',
        default='true')

    parser.add_argument(
        '--includeFilter',
        dest='includeFilter',
        nargs='+',
        action="append",
        help=
        "Run just the specified tests/suites. Can specify multiple tags per --includeFilter\n"
        "flag. All tests/suites matching any of the tags will be run.\n"
        "Can specify multiple --includeFilter flags on the command line. A test\n"
        "must match all the --includeFilter clauses in order to be run.\n\n"
        "Ex 1: --includeFilter insert remove  --includeFilter core \n"
        "       will run all tests tagged with (\"insert\" OR \"remove\") AND (\"core\").\n"
        "Ex 2: --includeFilter %%\n"
        "       will run all tests",
        default=[])
    parser.add_argument(
        '--excludeFilter',
        dest='excludeFilter',
        nargs='+',
        action="append",
        help="Exclude tests matching all of the tags included.\n"
        "Can specify multiple --excludeFilter flags on the command line. A test\n"
        "matching any --excludeFilter clauses will not be run.\n"
        "A test that is both included according to --includeFilter and excluded by --excludeFilter,\n"
        "will not be run.\n\n"
        "Ex: --excludeFilter slow old --excludeFilter broken \n"
        "     will exclude all tests tagged with (\"slow\" AND \"old\") OR (\"broken\").",
        default=[])
    parser.add_argument('--out',
                        dest='outfile',
                        help='write the results as json to the specified file')
    parser.add_argument('--exclude-testbed',
                        dest='excludeTestbed',
                        nargs='?',
                        const='true',
                        choices=['true', 'false'],
                        default='false',
                        help='Exclude testbed information from results file')
    parser.add_argument(
        '--printArgs',
        dest='printArgs',
        nargs='?',
        const='true',
        choices=['true', 'false'],
        default='false',
        help='Print the benchrun args before running the test.')
    return parser
Example #42
0
                  print i
          except Exception as e:
              #logging.debug(e)
              result = e
              pass
    print "---3 ---curpath : %s testdir : %s"%(curpath,testdir)
    #for section in testlist:
    os.chdir(curpath)


if __name__ == "__main__":
    if len(sys.argv) < 2:
        print "Wrong argument, please input the board number."
        sys.exit()
    parser = ArgumentParser()
    parser.add_argument("-N", "--number", action="store", dest="board_num",
                         default="", help="set the board number")

    args = parser.parse_args()
    if args.board_num:
        board_num = args.board_num
    else:
        sys.exit(0)

    #push_ssh_key()
    #Do OS Test
    do_os_test(board_num)
 



Example #43
0
 def add_args(parser: argparse.ArgumentParser):
     """Add task-specific arguments to the parser."""
     # fmt: off
     parser.add_argument("--hidden-size", type=int, default=128)
     parser.add_argument("--num-shuffle", type=int, default=5)
     parser.add_argument("--save-dir", type=str, default="./embedding")
     parser.add_argument("--load-emb-path", type=str, default=None)
     parser.add_argument('--training-percents',
                         default=[0.9],
                         type=float,
                         nargs='+')
     parser.add_argument('--enhance',
                         type=str,
                         default=None,
                         help='use prone or prone++ to enhance embedding')
Example #44
0
File: demo.py Project: hjyai94/CRF
def get_parser():
    from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
    parser = ArgumentParser(description=__doc__,
                            formatter_class=ArgumentDefaultsHelpFormatter)

    parser.add_argument("image", type=str, help="input image")

    parser.add_argument("label", type=str, help="Label file.")

    parser.add_argument("--gpu",
                        type=str,
                        default='0',
                        help="which gpu to use")

    parser.add_argument('--output',
                        type=str,
                        help="Optionally save output as img.")

    parser.add_argument('--nospeed',
                        action='store_false',
                        help="Skip speed evaluation.")

    parser.add_argument('--normalize',
                        action='store_true',
                        help="Normalize input image before inference.")

    parser.add_argument('--pyinn',
                        action='store_true',
                        help="Use pyinn based Cuda implementation"
                        "for message passing.")

    # parser.add_argument('--compare', action='store_true')
    # parser.add_argument('--embed', action='store_true')

    # args = parser.parse_args()

    return parser
            datapoints = pickle.loads(data)
        except Exception:
            LOGGER.exception("Cannot decode grapite points")
            return

        for (metric, datapoint) in datapoints:
            try:
                datapoint = (float(datapoint[0]), float(datapoint[1]))
            except Exception as ex:
                LOGGER.error(ex)
                continue

            self._process_metric(metric, datapoint)

        self.stream.read_bytes(4, self._on_read_header)


def start_graphite_listener(port):

    echo_server = GraphiteServer()
    echo_server.listen(port)
    IOLoop.instance().start()

if __name__ == '__main__':

    parser = ArgumentParser(description='run a tornado graphite sink')
    parser.add_argument('port', help='port num')
    args = parser.parse_args()
    port = args.port
    start_graphite_listener(port)
Example #46
0
def main():
    arg_parser = ArgumentParser(
        prog="osu! beatmaps downloader",
        description="App to download beatmaps from osu! site")
    arg_parser.add_argument("-o",
                            "--out",
                            dest="download_path",
                            help="Path to download folder")
    arg_parser.add_argument(
        "-s",
        "--songs-path",
        dest="songs_path",
        help=
        "Path to osu!'s 'Songs' directory. If specified, existing beatmaps won't be downloaded"
    )
    arg_parser.add_argument("-c",
                            "--config-path",
                            dest="config_path",
                            help="Path to configuration file")
    arg_parser.add_argument(
        "-m",
        "--multiprocess",
        dest="multiprocess",
        type=int,
        choices=range(4, 12),
        help=
        "Program will run with the specified number (4-12) of subprocesses, instead of threads by default"
    )
    arg_parser.add_argument(
        "-a",
        "--auto-start",
        dest="auto_start",
        action="store_true",
        help=
        "If specified, automatic open beatmaps when it finished downloading")
    arg_parser.add_argument(
        "-p",
        "--use-proxy",
        dest="use_proxy",
        action="store_true",
        help="If specified, program will be used proxy to all connections")
    arg_parser.add_argument(
        "-pp",
        "--use-proxy-parallel",
        dest="use_proxy_parallel",
        action="store_true",
        help=
        "If specified, program will be trying to use proxy on parallel download on each process"
    )

    group = arg_parser.add_mutually_exclusive_group(required=True)
    group.add_argument("urls",
                       nargs='*',
                       default=[],
                       help="Beatmaps urls or file with links")
    group.add_argument("-d",
                       "--dump-exists",
                       nargs='?',
                       dest="dump_path",
                       help="Dump existed beatmaps to file")

    args = arg_parser.parse_args()
    urls = []
    for url in args.urls:
        if match(r"((https?):((//)|(\\\\))+([\w\d:#@%/;$()~_?+-=\\.&](#!)?)*)",
                 url):
            print("Using urls")
            urls = args.urls
            break
        else:
            print("Using links file")
            with open(url, encoding="utf8") as f:
                urls = f.read().splitlines()
    use_proxy = True if args.use_proxy or args.use_proxy_parallel else False
    parser = Parser(urls, args.config_path, args.download_path,
                    args.songs_path, args.auto_start, args.multiprocess,
                    use_proxy)
    if args.dump_path is None:
        parser.parse_songs_parallel(
        ) if args.use_proxy_parallel else parser.parse_songs()
    else:
        with open(args.dump_path, 'w', encoding="utf8") as f:
            f.writelines([
                f"http://osu.ppy.sh/beatmapsets/{el}\n"
                for el in parser.existed_ids
            ])
    return parser
Example #47
0
import json
import os
import re
import spacy
from argparse import ArgumentParser
from copy import deepcopy

from tqdm import tqdm

parser = ArgumentParser()
parser.add_argument("--in-file", type=str)
parser.add_argument("--out-dir", type=str)

args = parser.parse_args()

nlp = spacy.load('en_core_web_sm')

if not os.path.exists(args.out_dir):
    os.makedirs(args.out_dir)


class ReplacementRule:
    def process_instance(self, instance):
        return self._process(instance)

    def _process(self, instance):
        raise NotImplementedError("Not implemented here")

    def name(self):
        raise NotImplementedError("NotImplemented")
def get_args():
  parser = ArgumentParser(
      description='encode label descriptions into vectors')

  ## Required parameters
  parser.add_argument("--word_vector_input", default=None, type=str,
                      help="File of words to be converted into vectors")
  parser.add_argument("--word_vector_output", default=None, type=str,
                      help="Output words and their vectors.")
  parser.add_argument("--data_dir", default=None, type=str, required=True,
                      help="The input data dir. Should contain the .tsv files (or other data files) for the task.")
  parser.add_argument("--model_type", default=None, type=str, required=True,
                      help="Model type selected in the list: " + ", ".join(MODEL_CLASSES.keys()))
  parser.add_argument("--model_name_or_path", default=None, type=str, required=True,
                      help="Path to pre-trained model or shortcut name selected in the list: " + ", ".join(ALL_MODELS))
  parser.add_argument("--task_name", default=None, type=str, required=True,
                      help="The name of the task to train selected in the list.")
  parser.add_argument("--output_dir", default=None, type=str, required=True,
                      help="The output directory where the model predictions and checkpoints will be written.")

  ## Other parameters
  parser.add_argument("--config_name", default="", type=str,
                      help="Pretrained config name or path if not the same as model_name")
  parser.add_argument("--tokenizer_name", default="", type=str,
                      help="Pretrained tokenizer name or path if not the same as model_name")
  parser.add_argument("--cache_dir", default="", type=str,
                      help="Where do you want to store the pre-trained models downloaded from s3")
  parser.add_argument("--max_seq_length", default=128, type=int,
                      help="The maximum total input sequence length after tokenization. Sequences longer "
                            "than this will be truncated, sequences shorter will be padded.")
  parser.add_argument("--do_train", action='store_true',
                      help="Whether to run training.")
  parser.add_argument("--do_eval", action='store_true',
                      help="Whether to run eval on the dev set.")
  parser.add_argument("--evaluate_during_training", action='store_true',
                      help="Rul evaluation during training at each logging step.")
  parser.add_argument("--do_lower_case", action='store_true',
                      help="Set this flag if you are using an uncased model.")

  parser.add_argument("--per_gpu_train_batch_size", default=8, type=int,
                      help="Batch size per GPU/CPU for training.")
  parser.add_argument("--per_gpu_eval_batch_size", default=8, type=int,
                      help="Batch size per GPU/CPU for evaluation.")
  parser.add_argument('--gradient_accumulation_steps', type=int, default=1,
                      help="Number of updates steps to accumulate before performing a backward/update pass.")
  parser.add_argument("--learning_rate", default=5e-5, type=float,
                      help="The initial learning rate for Adam.")
  parser.add_argument("--weight_decay", default=0.0, type=float,
                      help="Weight deay if we apply some.")
  parser.add_argument("--adam_epsilon", default=1e-8, type=float,
                      help="Epsilon for Adam optimizer.")
  parser.add_argument("--max_grad_norm", default=1.0, type=float,
                      help="Max gradient norm.")
  parser.add_argument("--num_train_epochs", default=3.0, type=float,
                      help="Total number of training epochs to perform.")
  parser.add_argument("--max_steps", default=-1, type=int,
                      help="If > 0: set total number of training steps to perform. Override num_train_epochs.")
  parser.add_argument("--warmup_steps", default=0, type=int,
                      help="Linear warmup over warmup_steps.")

  parser.add_argument('--logging_steps', type=int, default=50,
                      help="Log every X updates steps.")
  parser.add_argument('--save_steps', type=int, default=50,
                      help="Save checkpoint every X updates steps.")
  parser.add_argument("--eval_all_checkpoints", action='store_true',
                      help="Evaluate all checkpoints starting with the same prefix as model_name ending and ending with step number")
  parser.add_argument("--no_cuda", action='store_true',
                      help="Avoid using CUDA when available")
  parser.add_argument('--overwrite_output_dir', action='store_true',
                      help="Overwrite the content of the output directory")
  parser.add_argument('--overwrite_cache', action='store_true',
                      help="Overwrite the cached training and evaluation sets")
  parser.add_argument('--seed', type=int, default=42,
                      help="random seed for initialization")

  parser.add_argument('--fp16', action='store_true',
                      help="Whether to use 16-bit (mixed) precision (through NVIDIA apex) instead of 32-bit")
  parser.add_argument('--fp16_opt_level', type=str, default='O1',
                      help="For fp16: Apex AMP optimization level selected in ['O0', 'O1', 'O2', and 'O3']."
                            "See details at https://nvidia.github.io/apex/amp.html")
  parser.add_argument("--local_rank", type=int, default=-1,
                      help="For distributed training: local_rank")
  parser.add_argument('--server_ip', type=str, default='', help="For distant debugging.")
  parser.add_argument('--server_port', type=str, default='', help="For distant debugging.")
  
  args = parser.parse_args()
  return args
Example #49
0
        results = []
        pool = Pool(processes=nprocs)
        results = pool.map(self, chrom_list)
        pool.close()
        pool.join()

        indel_result = self.call_indel()
        results.append(indel_result)

        if self.options.out_bed2:
            self.gen_bed2(results)


if __name__ == "__main__":
    parser = ArgumentParser(description='run')
    parser.add_argument('-bam', '--bam_file', help='bam file', required=True)
    parser.add_argument('-chromlist',
                        '--chrom_list',
                        help='chrom list',
                        required=True)
    parser.add_argument('-minsize',
                        '--min_sv_size',
                        help='min sv size',
                        required=True,
                        type=int)
    parser.add_argument('-maxsize',
                        '--max_sv_size',
                        help='max sv size',
                        required=True,
                        type=int)
    parser.add_argument('-svtype',
Example #50
0
#!/usr/bin/env python

# import libraries
from argparse import ArgumentParser
import numpy as np

# define command line arguments, program description, and help
desc = '''Remove nested haplotigs.'''
parser = ArgumentParser(description=desc)
parser.add_argument("placement", help="haplotigPlacementFile")
args = parser.parse_args()

# get filenames
inFile = args.placement


# functions
def nested(a, b):
    # a nested inside b
    nested = 0
    astart = int(a[7])
    aend = int(a[8])
    bstart = int(b[7])
    bend = int(b[8])
    if astart < bend and astart > bstart and aend > bstart and aend < bend:
        nested = 1
    return nested


# functions
def dup(a, b):
Example #51
0
import seaborn

from nemo.data.io.image import write_gifs
from nemo.data.utils import get_fpaths_in_dir
from nemo.model.analysis.feature_visualization import write_simple_cell_strfs
from nemo.model.analysis.metrics import (lifetime_sparsity, mean_activations,
                                         population_sparsity)
from nemo.model.openpv_utils import (plot_objective_probes,
                                     plot_adaptive_timescale_probes,
                                     read_activity_file,
                                     read_input_and_recon_files,
                                     read_simple_cell_weight_files)

parser = ArgumentParser()
parser.add_argument('ckpt_dir',
                    type=str,
                    help='Path to the OpenPV checkpoint.')
parser.add_argument('save_dir',
                    type=str,
                    help='Where to save these analyses and write out plots.')
parser.add_argument(
    '--openpv_path',
    type=str,
    default='/home/mteti/OpenPV/mlab/util',
    help='Path to *OpenPV/mlab/util',
)
parser.add_argument('--no_features',
                    action='store_true',
                    help='If specified, will not plot the features.')
parser.add_argument(
    '--no_recons',
Example #52
0
            arguments

    """
    if args.debug:
        logging.basicConfig(level=logging.DEBUG)
    elif args.verbose:
        logging.basicConfig(level=logging.INFO)

    testcase = import_module(f"testcases.{args.testcase}")
    interp = Interpreter()
    interp.run(testcase.ast)


if __name__ == "__main__":
    parser = ArgumentParser(
        description="Interpreter for the Oz kernel language's AST"
    )
    parser.add_argument(
        "testcase",
        metavar="TESTCASE",
        type=str,
        help="the name of the testcase",
    )
    parser.add_argument(
        "-v", "--verbose", action="store_true", help="view some verbose output"
    )
    parser.add_argument(
        "-d", "--debug", action="store_true", help="view logging output"
    )
    main(parser.parse_args())
Example #53
0
def parse_arguments():
    parser = ArgumentParser()
    parser.add_argument("-g", "--gui", dest="show_gui", action="store_true", help="show GUI", default=False)
    parser.add_argument(
        "-f", "--force-processing", dest="force_processing", action="store_true", help="force processing", default=False
    )

    parser.add_argument(
        "-n",
        "--number-of-articles",
        dest="number_of_articles",
        help="number of articles to process",
        type=int,
        default=None,
    )

    parser.add_argument("-l", "--lableling", dest="labeling", help="Label preprocessed data", default=None)

    parser.add_argument(
        "-t",
        "--train",
        dest="train",
        action="store_true",
        help="train the threshold for sentiment mapping",
        default=False,
    )

    parser.add_argument(
        "-c", "--compare", dest="compare", action="store_true", help="compare results with labeled data", default=False
    )

    return parser.parse_args()
Example #54
0
if rank == 0:
    
    # import rank 0 modules
    import matplotlib as ml
    ml.use('Agg')
    import matplotlib.pyplot as plt
    plt.rc('font', family='serif')
    plt.rc('axes.spines', right=False, top=False)
    
    import healpy as hp
    import fitsio as ft
    import pandas as pd
    
    from argparse import ArgumentParser
    ap = ArgumentParser(description='PHOT attr. correction pipeline')
    ap.add_argument('--galmap',    default='galaxy.hp.256.fits')
    ap.add_argument('--ranmap',    default='random.hp.256.fits')
    ap.add_argument('--splitdata', default='gal-feat.hp.256.k5.fits')
    ap.add_argument('--photattrs', default='phot-attrs.hp.256.fits')
    ap.add_argument('--wmap',      default='weights.hp.256.fits')
    ap.add_argument('--mask',      default='mask.hp.256.fits')
    ap.add_argument('--log',       default='none')
    ap.add_argument('--clfile',    default='none')
    ap.add_argument('--clsys',     default='none')    
    ap.add_argument('--corfile',   default='none')
    ap.add_argument('--corsys',    default='none')
    ap.add_argument('--nnbar',     default='none')
    ap.add_argument('--hpfit',     default='none')
    ap.add_argument('--oudir',     default='./output/')
    ap.add_argument('--axfit',     nargs='*', type=int,\
                                   default=[i for i in range(18)])
sys.path.append('..')

import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tune_viz.settings')

from argparse import ArgumentParser
from music21.converter import parseData
from progress_bar import ProgressBar
from tune_viz.models import *
import csv

parser = ArgumentParser(
    description='Calculate variability positionally across tunes')
parser.add_argument('-a',
                    '--accidentals',
                    dest='accidentals',
                    type=int,
                    default=2)
parser.add_argument('-t', '--type', dest='type', type=str, default='jig')
parser.add_argument('-l', '--limit', dest='limit', type=int, default=100)
args = parser.parse_args()

tunes = Tune.objects.filter(rhythm=args.type,
                            key__accidentals='s',
                            key__number_of_accidentals=args.accidentals)

if args.limit > 0:
    tunes = tunes[:args.limit]

tune_count = tunes.count()
Example #56
0
See also: plotsw.py (for the shallow water equations case of riemann.c).
'''

import PetscBinaryIO

from sys import exit, stdout
from time import sleep
from argparse import ArgumentParser, RawTextHelpFormatter
import numpy as np
import matplotlib.pyplot as plt

parser = ArgumentParser(description=help,
                        formatter_class=RawTextHelpFormatter)
# positional arguments
parser.add_argument('tfile',metavar='TDATA',
                    help='file from -ts_monitor binary:TDATA')
parser.add_argument('ufile',metavar='UDATA',
                    help='file from -ts_monitor_solution binary:UDATA')
# options
parser.add_argument('-ax',metavar='AX', type=float, default=0.0,
                    help='lower limit of x-axis of computational domain')
parser.add_argument('-ay',metavar='AY', type=float, default=0.0,
                    help='lower limit of y-axis of computational domain')
parser.add_argument('-bx',metavar='BX', type=float, default=1.0,
                    help='upper limit of x-axis of computational domain')
parser.add_argument('-by',metavar='BY', type=float, default=1.0,
                    help='upper limit of y-axis of computational domain')
parser.add_argument('-cellcentered', action='store_true', default=False,
                    help='assume axes are cell-centered')
parser.add_argument('-mx',metavar='MX', type=int, default=-1,
                    help='spatial grid with MX points in x direction; required for 1D and 2D frames')
Example #57
0
from argparse import ArgumentParser
import os
import sys
import cv2
import numpy as np
import json

parser = ArgumentParser()
parser.add_argument("--image-file","-i", type = str,default = None)
parser.add_argument("--video-file", "-v", type = str,default = None)
parser.add_argument("--output-file", "-o", type = str,default = None)
parser.add_argument("--weights","-w", type = str,default = None)
parser.add_argument("--mask-file","-m",type = str, default = "mask.png")

ROOT_DIR = os.path.abspath("")
MASK_RCNN_DIR = os.path.join(ROOT_DIR,'Mask_RCNN')
SORT_DIR = os.path.join(ROOT_DIR,'Sort')

sys.path.append(ROOT_DIR)
sys.path.append(MASK_RCNN_DIR)
sys.path.append(SORT_DIR)

from detector.utils.Detector import Detector
from detector.utils.DetectorConfig  import DetectorConfig

if __name__ == '__main__':
	args = parser.parse_args()
	MODEL_PATH = args.weights

	video_file = args.video_file
	
Example #58
0
    # Make test harnesses, if required
    if harnesses:
      env = cldrive.make_env()
      for params in harnesses:
        testcase = get_or_create(
            s, CLgenTestCase, program_id=program.id, params_id=params.id)
        s.flush()
        clgen_mkharness.mkharness(s, env, testcase)

    if delete:
      fs.rm(path)


if __name__ == "__main__":
  parser = ArgumentParser()
  parser.add_argument("-H", "--hostname", type=str, default="cc1",
                      help="MySQL database hostname")
  parser.add_argument("directory", help="directory containing kernels")
  parser.add_argument("--cl_launchable", action="store_true",
                      help="kernels have signature '__kernel void entry(...)'")
  parser.add_argument("-n", "--num", type=int, default=-1,
                      help="max programs to import, no max if < 0")
  parser.add_argument("--no-harness", action="store_true",
                      help="don't generate cldrive harnesses")
  parser.add_argument("--delete", action="store_true",
                      help="delete file after import")
  args = parser.parse_args()

  db.init(args.hostname)

  # get a list of files to import
  paths = [path for path in Path(args.directory).iterdir() if path.is_file()]
Example #59
0
            time.sleep(1)
            changes.reload()

        records = zone.list_resource_record_sets()
        log_records = [(record.name, record.record_type, record.ttl, record.rrdatas)
                       for record in records]

        return log_records

    except BaseException:
        return "the record %s already exists" % record_name


if __name__ == '__main__':
    parser = ArgumentParser()
    parser.add_argument('-f', '--file', help="your json file", required=True)
    args = parser.parse_args()

    if args.file:

        with open(args.file, 'r') as f:
            data = json.load(f)

            if check_zone(name=data['name']) is not True:
                create_zone(name=data['name'],
                            dns_name=data['zone'],
                            description=data['description'])

            for reg in range(len(data['records'])):
                create_record(name=data['name'],
                              dns_name=data['zone'],
def parse_cmdline():
    version_parser = ArgumentParser(add_help=False)
    version_parser.add_argument('--header_version', type=parse_int, default=0)
    if version_parser.parse_known_args()[0].header_version < 3:
        # For boot header v0 to v2, the kernel commandline field is split into
        # two fields, cmdline and extra_cmdline. Both fields are asciiz strings,
        # so we minus one here to ensure the encoded string plus the
        # null-terminator can fit in the buffer size.
        cmdline_size = BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE - 1
    else:
        cmdline_size = BOOT_ARGS_SIZE + BOOT_EXTRA_ARGS_SIZE

    parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter,
                            epilog=get_vendor_boot_v4_usage())
    parser.add_argument('--kernel', type=FileType('rb'),
                        help='path to the kernel')
    parser.add_argument('--ramdisk', type=FileType('rb'),
                        help='path to the ramdisk')
    parser.add_argument('--second', type=FileType('rb'),
                        help='path to the second bootloader')
    parser.add_argument('--dtb', type=FileType('rb'), help='path to the dtb')
    dtbo_group = parser.add_mutually_exclusive_group()
    dtbo_group.add_argument('--recovery_dtbo', type=FileType('rb'),
                            help='path to the recovery DTBO')
    dtbo_group.add_argument('--recovery_acpio', type=FileType('rb'),
                            metavar='RECOVERY_ACPIO', dest='recovery_dtbo',
                            help='path to the recovery ACPIO')
    parser.add_argument('--cmdline', type=AsciizBytes(bufsize=cmdline_size),
                        default='', help='kernel command line arguments')
    parser.add_argument('--vendor_cmdline',
                        type=AsciizBytes(bufsize=VENDOR_BOOT_ARGS_SIZE),
                        default='',
                        help='vendor boot kernel command line arguments')
    parser.add_argument('--base', type=parse_int, default=0x10000000,
                        help='base address')
    parser.add_argument('--kernel_offset', type=parse_int, default=0x00008000,
                        help='kernel offset')
    parser.add_argument('--ramdisk_offset', type=parse_int, default=0x01000000,
                        help='ramdisk offset')
    parser.add_argument('--second_offset', type=parse_int, default=0x00f00000,
                        help='second bootloader offset')
    parser.add_argument('--dtb_offset', type=parse_int, default=0x01f00000,
                        help='dtb offset')

    parser.add_argument('--os_version', type=parse_os_version, default=0,
                        help='operating system version')
    parser.add_argument('--os_patch_level', type=parse_os_patch_level,
                        default=0, help='operating system patch level')
    parser.add_argument('--tags_offset', type=parse_int, default=0x00000100,
                        help='tags offset')
    parser.add_argument('--board', type=AsciizBytes(bufsize=BOOT_NAME_SIZE),
                        default='', help='board name')
    parser.add_argument('--pagesize', type=parse_int,
                        choices=[2**i for i in range(11, 15)], default=2048,
                        help='page size')
    parser.add_argument('--id', action='store_true',
                        help='print the image ID on standard output')
    parser.add_argument('--header_version', type=parse_int, default=0,
                        help='boot image header version')
    parser.add_argument('-o', '--output', type=FileType('wb'),
                        help='output file name')
    parser.add_argument('--vendor_boot', type=FileType('wb'),
                        help='vendor boot output file name')
    parser.add_argument('--vendor_ramdisk', type=FileType('rb'),
                        help='path to the vendor ramdisk')
    parser.add_argument('--vendor_bootconfig', type=FileType('rb'),
                        help='path to the vendor bootconfig file')

    gki_2_0_signing_args = parser.add_argument_group(
        '[DEPRECATED] GKI 2.0 signing arguments')
    gki_2_0_signing_args.add_argument(
        '--gki_signing_algorithm', help='GKI signing algorithm to use')
    gki_2_0_signing_args.add_argument(
        '--gki_signing_key', help='path to RSA private key file')
    gki_2_0_signing_args.add_argument(
        '--gki_signing_signature_args', default='',
        help='other hash arguments passed to avbtool')
    gki_2_0_signing_args.add_argument(
        '--gki_signing_avbtool_path', default='avbtool',
        help='path to avbtool for boot signature generation')

    args, extra_args = parser.parse_known_args()
    if args.vendor_boot is not None and args.header_version > 3:
        extra_args = parse_vendor_ramdisk_args(args, extra_args)
    if len(extra_args) > 0:
        raise ValueError(f'Unrecognized arguments: {extra_args}')

    if args.header_version < 3:
        args.extra_cmdline = args.cmdline[BOOT_ARGS_SIZE-1:]
        args.cmdline = args.cmdline[:BOOT_ARGS_SIZE-1] + b'\x00'
        assert len(args.cmdline) <= BOOT_ARGS_SIZE
        assert len(args.extra_cmdline) <= BOOT_EXTRA_ARGS_SIZE

    return args