def test_add2parser(self):
        opt1 = Options.make([('foo', 'bar')], hej='med')
        main_parser = ArgumentParser()
        actions_group = main_parser.add_argument_group('foo')
        actions_group.add_argument('-b', '--bar', default='foo')
        opt2 = opt1.bind_copy_to_parser(main_parser.add_argument_group('bar'))


        # main parser
        args = main_parser.parse_args(['-fmar'])
        assert args.foo == 'mar'
        assert args.bar == 'foo'

        assert opt2.foo == 'bar'

        opt2.update_if_present(foo='mar', nothere=False)
        assert args.foo == 'mar'
        assert 'nothere' not in opt2

        opt2.foo = 'bar'
        opt2.update_if_present(args)
        assert opt2.foo == 'mar'
        assert opt1.foo == 'bar'

        opt2.foo = 'bar'
        main_parser.parse_args(['-fmar'], namespace=opt2)
        assert args.foo == 'mar'
        assert 'nothere' not in opt2
Example #2
0
def video_options():
    parser=ArgumentParser(description="Video Source analysis options")
    parser.add_argument('files', metavar='FILE_TO_ANALYSE', nargs='*', help='Files to analyse')

    output_options = parser.add_argument_group("Logging and output")
    output_options.add_argument('-v', '--verbose', action='count', default=None, help='Be verbose in output')
    output_options.add_argument('-q', '--quiet', action='store_false', dest='verbose', help="Supress output")
    output_options.add_argument('-l', '--log', default=None, help="output to a log file")
    output_options.add_argument('-d', '--dump', default=None, help="dump data from runs to file")

    source_actions = parser.add_argument_group('Actions')
    source_actions.add_argument("-i", "--identify",
                                dest="identify",
                                action="store_true",
                                default=True,
                                help="do a simple analysis of the media files")

    source_actions.add_argument("-a", "--analyse",
                                dest="analyse",
                                action="store_true",
                                default=False,
                                help="perform deeper analysis of the file")

    unit_test_actions = parser.add_argument_group('Unit Testing')
    unit_test_actions.add_argument("--unit-tests", action="store_true", default=False, help="Run modules unit tests")

    return parser
Example #3
0
def main():
    from gevent.event import Event
    from argparse import ArgumentParser

    parser = ArgumentParser(description='Lightweight SMTP server.')
    parser.add_argument('-d', '--daemon', dest='daemon', action='store_true',
                        help='Daemonize the process.')
    parser.add_argument('--user', dest='user', type=str, metavar='USR',
                        default='slimta', help='Drop privileges down to USR')
    parser.add_argument('--group', dest='group', type=str, metavar='GRP',
                        default='slimta', help='Drop privileges down to GRP')

    group = parser.add_argument_group('Queue Configuration')
    group.add_argument('--envelope-db', dest='envelope_db', metavar='FILE',
                       type=str, default='envelope.db',
                       help='File path for envelope database')
    group.add_argument('--meta-db', dest='meta_db', type=str,
                       metavar='FILE', default='meta.db',
                       help='File path for meta database')

    group = parser.add_argument_group('Port Configuration')
    group.add_argument('--inbound-port', dest='inbound_port', type=int,
                       metavar='PORT', default=25, help='Listening port number for inbound mail')
    group.add_argument('--outbound-port', dest='outbound_port', type=int,
                       metavar='PORT', default=587, help='Listening port number for outbound mail')
    group.add_argument('--outbound-ssl-port', dest='outbound_ssl_port', type=int,
                       metavar='PORT', default=465, help='Listening SSL-only port number for outbound mail')

    group = parser.add_argument_group('SSL/TLS Configuration')
    group.add_argument('--cert-file', dest='certfile', metavar='FILE',
                       type=str, default='cert.pem',
                       help='TLS certificate file')
    group.add_argument('--key-file', dest='keyfile', metavar='FILE',
                       type=str, default='cert.pem',
                       help='TLS key file')

    group = parser.add_argument_group('Output Configuration')
    group.add_argument('--log-file', dest='logfile', type=str, metavar='FILE',
                       default='output.log',
                       help='Write logs to FILE')
    group.add_argument('--error-file', dest='errorfile', type=str,
                       metavar='FILE', default='error.log',
                       help='Write errors to FILE')

    args = parser.parse_args()

    relay = _start_inbound_relay(args)
    queue = _start_inbound_queue(args, relay)
    _start_inbound_edge(args, queue)

    relay = _start_outbound_relay(args)
    queue = _start_outbound_queue(args, relay)
    _start_outbound_edge(args, queue)

    _daemonize(args)

    try:
        Event().wait()
    except KeyboardInterrupt:
        print
 def _create_argparser(self, formatter_class):
   argparser = ArgumentParser(formatter_class=formatter_class)
   group = argparser.add_argument_group(title='foo')
   advanced_group = argparser.add_argument_group(title='*foo')
   group.add_argument('--bar', help='help for argument bar')
   advanced_group.add_argument('--baz', help='help for argument baz')
   return argparser
def parseArgs():
	parser = ArgumentParser(description='Parses a GenBank file for data '
		'given a search term and specified fields to query. An additional '
		'parsed field is also reported in tab-delimited format.', add_help=False)
	req = parser.add_argument_group('Required')
	req.add_argument('-i', '--infile', required=True, metavar='FILE',
		help='input GenBank file')
	req.add_argument('-s', '--query-search', required=True, metavar='STR',
		type=str, help='search term to look for within the query feature '
		'and query qualifier')
	opt = parser.add_argument_group('Optional')
	opt.add_argument('-f', '--query-feature', default='CDS', metavar='STR',
		help='genbank feature type to search in, e.g., CDS, gene, rRNA, '
		'source, tRNA, misc_feature')
	opt.add_argument('-h', '--help', action='help',
		help='show this help message and exit')
	opt.add_argument('-o', '--outfile', default='parsed.tab', metavar='FILE',
		help='output tab-delimited file containing <locus_tag>\\t'
		'<query-qualifier>\\t<report-qualifier>  [./parsed.tab]')
	opt.add_argument('-q', '--query-qualifier', default='inference',
		metavar='STR', help='qualifier term within each genbank feature to '
		'search in, e.g., locus_tag, inference, codon_start, product, '
		'transl_table, translation')
	opt.add_argument('-r', '--report-qualifier', default='product',
		metavar='STR', help='additional qualifier term to parse data from '
		'and report when queries are found')
	return parser.parse_args()
Example #6
0
def parseArgs():
	parser = ArgumentParser(description='Converts a list of pairwise values '
	'into an 2-dimensional array', add_help=False, epilog='NOTE: data after '
	'the third column are ignored')
	req = parser.add_argument_group('Required')
	req.add_argument('-i', '--infile', required=True, metavar='FILE',
		type=str, help='input file with 3 columns (<id1> <id2> <val>)')
	opt = parser.add_argument_group('Optional')
	opt.add_argument('-d', '--directional', action='store_false',
		default=True, help='pairwise values are directional, so '
		'<id1> <id2> is <valA> but <id2> <id1> is <valB>; default (off) '
		'lists values in lower-left for <id1> <id2> is <valA> and '
		'upper-right for <id2> <id1> is <valB> [off]')
	opt.add_argument('-h', '--help', action='help',
		help='show this help message and exit')
	opt.add_argument('-s', '--sort', action='store_true',
		default=False, help='apply alphabetical sorting of <id1> and <id2> '
		'identifiers to output matrix [off]')
	opt.add_argument('-o', '--outfile', metavar='FILE',
		default=None, help='2-dimensional output matrix [stdout]')
	opt.add_argument('--in-delim', metavar='STR', type=str,
		default='\t', help='input file data delimiter [\'\\t\']')
	opt.add_argument('--out-delim', metavar='STR', type=str,
		default='\t', help='output file data delimiter [\'\\t\']')
	return parser.parse_args()
Example #7
0
def main():
    from argparse import ArgumentParser
    import sys

    parser = ArgumentParser(prog='pytail')

    test_group = parser.add_argument_group("Test")
    test_group.add_argument('--test', dest='test', default=False, action='store_true',
                            help='run some basic tests')

    parser.add_argument('-n', '--lines', dest='lines', default=10, type=int,
                        help='output the last N lines, instead of the last 10')
    parser.add_argument('file', nargs='?', metavar='FILE', help="path to file")

    head_group = parser.add_argument_group('Head')
    head_group.add_argument('-t', '--top', dest='head', default=False, action='store_true',
                            help='output lines from the top instead of the bottom; does not work with follow')

    tail_group = parser.add_argument_group('Tail')
    tail_group.add_argument('-f', '--follow', dest='follow', default=False, action='store_true',
                            help='output appended data as  the  file  grows')
    tail_group.add_argument('-s', '--sleep-interval', metavar='DELAY', dest='sleep', default=1.0, type=float,
                            help='with -f, sleep for approximately DELAY seconds between iterations')

    args = parser.parse_args()

    if args.test:
        _test()
    elif args.file:
        _main(args.file, args)
    else:
        parser.print_help()
        sys.exit(1)
Example #8
0
def parse_cmdline():
    from argparse import ArgumentParser, FileType

    parser = ArgumentParser(prog=package_name)

    parser.add_argument('checklist', metavar='FILE', type=FileType('r'), nargs='+',
                        help='checklist files')

    group = parser.add_argument_group('logging options')

    group.add_argument('-v', '--verbose', dest='logging_level', action='store_const', const=logging.INFO,
                       help='output the verbose information')
    group.add_argument('-d', '--debug', dest='logging_level', action='store_const', const=logging.DEBUG,
                       help='output the debug information')
    group.add_argument('--log-level', dest='logging_level', choices=['ERROR', 'WARNING', 'INFO', 'DEBUG'],
                       default=logging.WARN, help='set root logger level (default: WARNING)')
    group.add_argument('--log-file', dest='logging_file', type=FileType('w'), default='-',
                       help='output the log to file (default: stdout)')

    group = parser.add_argument_group('output options')
    group.add_argument('-t', '--output-type', default='console', nargs=1,
                       choices=SUPPORT_OUTPUTS.keys(),
                       help='output result in type (default: console)')
    group.add_argument('-g', '--generate-config', metavar='FILE', type=FileType('w'),
                       help='generate Sensu config file')

    args = parser.parse_args()

    logging.basicConfig(level=args.logging_level,
                        stream=args.logging_file)

    return args
Example #9
0
def command_parser() :
	""" parsing command line """
	parser = ArgumentParser(description="Parsing VCF file for multiple downstream analysis")
	parser.add_argument("-analysis", metavar="STR", dest="analysis", choices=["prepare_mapping", "prepare_phasing"], required=True, help="available analysis: prepare_mapping, prepare_phasing")
	parser.add_argument("-vcf", metavar="FILE", dest="vcf_file", required=True, help="VCF file generated from GATK")
	parser.add_argument("-p", metavar="STR", dest="out_prefix", required=True, help="prefix of output genotype table file in either MSTMAP or rQTL required input format")

	# add option group for analysis "prepare_mapping"
	prepare_mapping_args_group = parser.add_argument_group("Options for \"prepare_mapping\" analysis", "generate genotype tables that are ready for downstream MSTMAP and/or rQTL\n")
	prepare_mapping_args_group.add_argument("-thinoff", dest="thin_or_not", action='store_true', help="turn off thining the genotype table. Not suggested if you get a big table")
	#prepare_mapping_args_group.add_argument("-thin_mode", dest="thin_mode", choices=["sliding_window", "random"], default="sliding_window", help="specify how you want to thin your genotype table. Current version only supports sliding_window")
	prepare_mapping_args_group.add_argument("-gt_outfmt", metavar="STR", dest="gt_outfmt", choices=["mstmap", "rqtl", "both"], type=str, default="both", help="specify the format of the output genotype table. Current version supports: mstmap, rqtl, and both. By default, genotype tables in both formats will be generated")
	prepare_mapping_args_group.add_argument("-gt_proposal", metavar="FILE", dest="gt_proposal_file", help="file of genotypes proposed at variant sites")

	# add option group for analysis "prepare_phasing"
	prepare_phasing_args_group = parser.add_argument_group("Options for \"prepare_phasing\" analysis", "generate genotype file for downstream Phasing programs\n")
	prepare_phasing_args_group.add_argument("-known_LGs", metavar="FILE", dest="known_LGs_file", help="a file of a set of known linkage groups. If available, the output file generated for phasing program will be categorized by linkage group. Current version, this file must be provided")
	prepare_phasing_args_group.add_argument("-phasing_outfmt", metavar="STR", dest="phasing_outfmt", choices=["fastphase", "beagle"], type=str, default="fastphase", help="specify the format of the output genotype file for the following Phasing programs. Current version supports: fastphase, beagle")

	args = parser.parse_args()

	# make sure vcf file provided exist #
	if not exists(args.vcf_file) :
		stderr.write(timestamper() + " [IO Error]: Cannot find the vcf file you provided %s\n" %(args.vcf_file))
		exit()

	# make sure the output path provided is reachable #
	make_dir_if_needed(dirname(realpath(args.out_prefix)))

	if args.analysis == "prepare_mapping" :
		run_prepare_mapping(args.analysis, args.vcf_file, args.gt_proposal_file, realpath(args.out_prefix), args.gt_outfmt, args.thin_or_not)
	elif args.analysis == "prepare_phasing" :
		run_prepare_phasing(args.analysis, args.vcf_file, args.known_LGs_file, realpath(args.out_prefix), args.phasing_outfmt)
Example #10
0
def parser_with_standard_args(name, description):
    parser = ArgumentParser(
        prog='python -m workfront.'+name,
        description=description,
        formatter_class=SaneFormatter,
    )
    parser.add_argument('--unsafe-certs', action='store_true',
                        help='use an unverified ssl context in case of '
                             'company imposed man-in-the-middle '
                             'situations')
    parser.add_argument('--log-level', default=logging.WARNING, type=int,
                        metavar='LEVEL',
                        help='pass a lower number to see more logging')

    group = parser.add_argument_group('API options')
    group.add_argument('--protocol', default='https',
                       help='url protocol')
    group.add_argument('--domain', default='api-cl01',
                       help='the bit before the dot of your On Demand url.')
    group.add_argument('--version', default='unsupported',
                       help='api version to use')

    group = parser.add_argument_group('Override API url')
    group.add_argument('--url',
                       help='full base url to Workfront API. '
                            'Should end in an API version string and overrides'
                            ' --protocol, --domain and --version')

    return parser
Example #11
0
def main():
  parser = ArgumentParser(description="Process GCTracer's NVP output")
  parser.add_argument('keys', metavar='KEY', type=str, nargs='+',
                      help='the keys of NVPs to process')
  parser.add_argument('--histogram-type', metavar='<linear|log2>',
                      type=str, nargs='?', default="linear",
                      help='histogram type to use (default: linear)')
  linear_group = parser.add_argument_group('linear histogram specific')
  linear_group.add_argument('--linear-histogram-granularity',
                            metavar='GRANULARITY', type=int, nargs='?',
                            default=5,
                            help='histogram granularity (default: 5)')
  log2_group = parser.add_argument_group('log2 histogram specific')
  log2_group.add_argument('--log2-histogram-init-bucket', metavar='START',
                          type=int, nargs='?', default=64,
                          help='initial buck size (default: 64)')
  parser.add_argument('--histogram-omit-empty-buckets',
                      dest='histogram_omit_empty',
                      action='store_true',
                      help='omit empty histogram buckets')
  parser.add_argument('--no-histogram', dest='histogram',
                      action='store_false', help='do not print histogram')
  parser.set_defaults(histogram=True)
  parser.set_defaults(histogram_omit_empty=False)
  parser.add_argument('--rank', metavar='<no|min|max|avg>',
                      type=str, nargs='?',
                      default="no",
                      help="rank keys by metric (default: no)")
  parser.add_argument('--csv', dest='csv',
                      action='store_true', help='provide output as csv')
  args = parser.parse_args()

  histogram = None
  if args.histogram:
    bucket_trait = None
    if args.histogram_type == "log2":
      bucket_trait = Log2Bucket(args.log2_histogram_init_bucket)
    else:
      bucket_trait = LinearBucket(args.linear_histogram_granularity)
    histogram = Histogram(bucket_trait, not args.histogram_omit_empty)

  categories = [ Category(key, deepcopy(histogram), args.csv)
                 for key in args.keys ]

  while True:
    line = stdin.readline()
    if not line:
      break
    obj = split_nvp(line)
    for category in categories:
      category.process_entry(obj)

  # Filter out empty categories.
  categories = [x for x in categories if not x.empty()]

  if args.rank != "no":
    categories = sorted(categories, key=make_key_func(args.rank), reverse=True)

  for category in categories:
    print(category)
Example #12
0
def parseArgs():
	parser = ArgumentParser(description='Computes the average amino acid '
			'identity (AAI) between two protein sets', add_help=False)
	req = parser.add_argument_group('Required')
	req.add_argument('-1', '--set1', required=True, metavar='FILE',
		help='first input FastA sequence file')
	req.add_argument('-2', '--set2', required=True, metavar='FILE',
		help='second input FastA sequence file')
	opt = parser.add_argument_group('Optional')
	opt.add_argument('-c', '--cpus', type=str, metavar='INT',
		default='1', help='number of CPUs [1]')
	opt.add_argument('-f', '--fraction', type=float, metavar='FLOAT',
		default=30.0, help='minimum alignment length percentage [70.0]')
	opt.add_argument('-h', '--help', action='help',
		help='show this help message and exit')
	opt.add_argument('-i', '--identity', type=float, metavar='FLOAT',
		default=80.0, help='minimum percent identity [30.0]')
	opt.add_argument('-l', '--length', type=int, metavar='INT',
		default=0, help='minimum alignment character length (sum of all '
		'aligned segments and all gaps) [0]')
	opt.add_argument('-o', '--outpath', metavar='PATH',
		default=None, help='output directory [./AAI--<date>_<time>]')
	opt.add_argument('-s', '--bitscore', type=float, metavar='FLOAT',
		default=0.0, help='minimum alignment Bit score [0.0]')
	opt.add_argument('--refilter', default=False, action='store_true',
		help='skip blast system commands and re-filter previously generated '
		'blast output from this script with different cutoff parameters, '
		'which overwrites loci and stats output')
	return parser.parse_args()
Example #13
0
def get_args():
    """
    Builds the argument parser
    :return: the argument parser
    :rtype: ArgumentParser
    """
    opts = ArgumentParser()

    settings_opt = opts.add_argument_group("Settings")
    settings_opt.add_argument("-r", "--reset", dest="reset", action="store_true", default=False,
                              help="Completely reset all the configuration.")
    settings_opt.add_argument("-f", "--change-base-dir", dest="base_path", action="store_true", default=False,
                              help="Change the base dir of your videos.")
    settings_opt.add_argument("-d", "--change-dest-dir", dest="dest_path", action="store_true", default=False,
                              help="Change the base dir of your videos.")
    settings_opt.add_argument("-s", "--specific-location", type=str, dest="specific",
                              help="Download to given file from specific location")
    settings_opt.add_argument("-o", "--organize", action="store_true", default=False, dest="organize",
                              help="Organize your library only")

    information_opts = opts.add_argument_group("Information")
    information_opts.add_argument("-c", "--configuration", dest="show_configuration", action="store_true",
                                  default=False, help="Display configuration.")
    information_opts.add_argument("-v", "--version", dest="show_version", action="store_true", default=False,
                                  help="Display the package version")
    information_opts.add_argument("--verbose", dest="verbose", action="store_true", default=False,
                                  help="Display logs also on screen")
    information_opts.add_argument("-l", "--log", dest="show_log", action="store_true", default=False,
                                  help="Display short log")
    information_opts.add_argument("-la", "--all-log", dest="show_all_log", action="store_true", default=False,
                                  help="Display all log")
    information_opts.add_argument("-ca", "--cache", dest="show_cache", action="store_true", default=False,
                                  help="Display cache")

    return opts
Example #14
0
    def parse_args(self, args=None):
        """Parse the arguments"""
        parser = ArgumentParser(description="Plot the numbers given in a file "
                                "or in stdin")

        rgroup = parser.add_argument_group("Read from...")
        rgroup.add_argument('--std-in', action="store_true", default=False,
                            help="Perform doc tests and exit instead.")
        rgroup.add_argument('--in-file', '-f', type=str, default=None,
                            help="Specify input file path.")

        dgroup = parser.add_argument_group("Input data...")
        dgroup.add_argument('--xy', '-x', action="store_true", default=False,
                            help="Treat first column as x values, and the "
                            "following as y-values (default False).")
        dgroup.add_argument('--col', '-c', action="append", dest='cols',
                            type=int, default=list(),
                            help="Specify which columns to investigate. "
                            "Repeat if needed. Default: All")
        dgroup.add_argument('--ignore-first', '-i', action="store_true",
                            default=False, help="ignore first line")
        dgroup.add_argument('--sep', '-s', default=' ',
                            help="Specify separator, default: space")

        fgroup = parser.add_argument_group("Formatting...")
        fgroup.add_argument('--gap', '-g', type=float, default=0.01,
                            help="inverted number of subpoints in lines")
        fgroup.add_argument('--not-implemented')

        if args:
            self.args = parser.parse_args(args)
        else:
            self.args = parser.parse_args()
        return
Example #15
0
    def parse_args(self, argv):
        """Custom and check param list.
        """
        parser = ArgumentParser(prog="cocos %s" % self.__class__.plugin_name(),
                                description=self.__class__.brief_description())
        parser.add_argument('-c', dest='clean', action="store_true",
                            help=MultiLanguage.get_string('GEN_LIBS_ARG_CLEAN'))
        parser.add_argument('-e', dest='engine_path', help=MultiLanguage.get_string('GEN_LIBS_ARG_ENGINE'))
        parser.add_argument('-p', dest='platform', action="append", choices=['ios', 'mac', 'android', 'win32'],
                            help=MultiLanguage.get_string('GEN_LIBS_ARG_PLATFORM'))
        parser.add_argument('-m', "--mode", dest='compile_mode', default='release', choices=['debug', 'release'],
                            help=MultiLanguage.get_string('GEN_LIBS_ARG_MODE'))
        parser.add_argument('--dis-strip', dest='disable_strip', action="store_true",
                            help=MultiLanguage.get_string('GEN_LIBS_ARG_DISABLE_STRIP'))
        group = parser.add_argument_group(MultiLanguage.get_string('GEN_LIBS_GROUP_WIN'))
        group.add_argument('--vs', dest='vs_version', type=int, default=None,
                           help=MultiLanguage.get_string('GEN_LIBS_ARG_VS'))
        group = parser.add_argument_group(MultiLanguage.get_string('GEN_LIBS_GROUP_ANDROID'))
        group.add_argument("--app-abi", dest="app_abi",
                            help=MultiLanguage.get_string('GEN_LIBS_ARG_ABI'))
        group.add_argument("--ap", dest="android_platform",
                            help=MultiLanguage.get_string('COMPILE_ARG_AP'))

        (args, unknown) = parser.parse_known_args(argv)
        self.init(args)

        return args
Example #16
0
    def options(self):
        parser = ArgumentParser(formatter_class=RawTextHelpFormatter,
            epilog='''EXAMPLES: 
            python secutils.py -t Project/Discovery/ -p Project/Enum/
            python secutils.py -rn Project/Discovery/target1 -o Report
            python secutils.py -rN Project/target1/nessus Project/target2/nessus/ -T VulnsDB_Spanish.xls -o Report.xls''')
        
        parser._optionals.title = "MISC"
        parser.add_argument('-v', '--version', action='version', version='%(prog)s 2.0')
        parser.add_argument('-o', metavar='OUTPUT-FILE', dest='output', action='append', help='Set an xls output file')
    
        nmapGroup = parser.add_argument_group('NMAP UTILITIES')
        nmapGroup.add_argument('-t', metavar='DIR', nargs='+', dest='pTargets', action='append', help='Create a list of targets from nmap files in xml format located in DIR')
        nmapGroup.add_argument('-p', metavar='DIR', nargs='+', dest='pPorts', action='append', help='Create list of open ports from nmap files in xml format located in DIR')
        nmapGroup.add_argument('-rn', metavar='DIR', nargs='+', dest='pNmap', action='append', help='Create an XLS report from nmap files in xml format located in DIR')
        
        nessusGroup = parser.add_argument_group('NESSUS UTILITIES')
        nessusGroup.add_argument('-rN', metavar='DIR', nargs='+', dest='pNessus', action='append', help='Create an XLS report from .nessus files located in DIR')
        nessusGroup.add_argument('-T', metavar='FILE', dest='dbNessus', action='append', help='Use an xls database FILE to translate nessus reports. Must be used along with -rN')
        
        nessusGroup = parser.add_argument_group('ACUNETIX UTILITIES')
        nessusGroup.add_argument('-ra', metavar='DIR', nargs='+', dest='pAcunetix', action='append', help='Create an XLS report from acunetix files in xml format located in DIR')
        
        nessusGroup = parser.add_argument_group('NETSPARKER UTILITIES')
        nessusGroup.add_argument('-rk', metavar='DIR', nargs='+', dest='pNetsparker', action='append', help='Create an XLS report from netsparker files in xml format located in DIR')        

        if len(sys.argv) == 1:
            return parser.parse_args('--help'.split())
        else:
            return parser.parse_args()
Example #17
0
def parse_args(plugin_manager: Manager) -> Namespace:
    """
    Parses the command line for runtime arguments for nitpycker and returns them

    :param plugin_manager: the plugin manager to add plugin options
    :return: a Namespace instance containing all arguments
    """
    parser = ArgumentParser(prog="nitpycker", description="A test runner base on python's unittest")

    verbosity_parser = parser.add_argument_group("Verbosity")
    verbosity_arguments = verbosity_parser.add_mutually_exclusive_group()
    verbosity_arguments.set_defaults(verbosity=1)
    verbosity_arguments.add_argument("-v", "--verbose", action="store_const", dest="verbosity", const=2)
    verbosity_arguments.add_argument("-q", "--quiet", action="store_const", dest="verbosity", const=0)

    parser.add_argument(
        "-n", "--process", action="store", dest="process_number", default=multiprocessing.cpu_count(), type=int,
        help="The number of process to run. Defaults to the number of cores"
    )

    parser.add_argument(
        "-p", "--pattern", action="store", dest="pattern", default="test*.py", type=str,
        help="Pattern to match tests ('test*.py' default)"
    )

    parser.add_argument(
        "start_directory", default=".",
        help="a list of any number of test modules, classes and test methods."
    )

    reporter_parser = parser.add_argument_group("Result Reporters")

    plugin_manager.add_arguments(parser=parser, reporter_parser=reporter_parser)

    return parser.parse_args()
Example #18
0
def get_option_parser():
	parser = ArgumentParser(usage=__doc__, version=__version__)

	parser.add_argument(metavar="<FASTQ_FILE>", dest="fastq_file",
		help="Metagenomic / Metatranscriptomic fastq file")
	parser.add_argument(metavar="<OUTPUT_DIR>", dest="output_dir",
		help="Output directory used for storing intermediate and final results.")
	parser.add_argument("--single-stage", action="store", metavar="SINGLE_STAGE", dest="single_stage",
		choices=['preprocessing', 'alignment', 'binning'],
		help="Run only a single stage of the metagenomic pipeline. Options: (preprocessing, alignment, binning)")
	parser.add_argument("--override", action="store_true", dest="override", default=False,
		help="If supplied, the pipeline will override the steps already performed and perform them again.")

	group = parser.add_argument_group("Preprocessing options",
		description="Options which tune the preprocessing step of the metagenomix.")
	group.add_argument("--fastqc", action="store", metavar="FASTQC_REPORT", dest="fastqc_report",
		help="Path to the already generated FastQC report.")
	group.add_argument('--contaminants', action="store", metavar="CONTAMINANTS", dest="contaminants",
		help="Path to the contaminants file (used by FastQC)")

	group = parser.add_argument_group("Alignment options",
		description="Options to tune the alignment step of the metagenomix.")
	group.add_argument("--blast_db", action="store", metavar="BLAST_DB", dest="blast_db",
		help="Path to the blast database to use for the alignment step.")
	group.add_argument("--aln_file", action="store", metavar="ALN_FILE", dest="aln_file",
		help="Already generated blast alignment file (default: megablast -D 3 output option)")
	group.add_argument("--blast_format", action="store", metavar="BLAST_FORMAT", dest="blast_format",
		help="Blast format used for parsing alignment file if one is provided, or for generating if not.")

	return parser
Example #19
0
class ArgParser(object):
    def __init__(self):
        self.ps = ArgumentParser(prog=piony.__appname__,
                                 formatter_class=RawDescriptionHelpFormatter,
                                 description=piony.__doc__, epilog="Enjoy!!!")
        self._setup_options()

    def parse(self, argv):
        if not argv:
            argv = []
        elif isinstance(argv, str):
            argv = argv.split()
        elif not isinstance(argv, list):
            raise InputError("Wrong argv type: {}".format(type(argv)))
        return self.ps.parse_args(argv)

    def apply(self, args):
        from operator import xor
        res = (False, False)
        dbg = {'a': (True, True), 'v': (True, False), 'k': (False, True)}
        if args.verbose:
            for entry in args.verbose:
                res = map(xor, res, dbg[entry])
            piony.G_DEBUG_VISUALS, piony.G_DEBUG_ACTIONS = res

    def _setup_options(self):
        ## Configuration
        farg = self.ps.add_argument
        farg('buds', metavar='bud', nargs='*', type=str, default=None,
             help="Setup profile layout in json directly on cmdline. "
                  "Can be specified several times -- one for each slice. "
                  "Or use pathes to files with slices inside.")
        farg('-v', '--version', action='version', default=None,
             version="%(prog)s {0}".format(piony.__version__),
             help="Version of program.")

        gr_window = self.ps.add_argument_group('Window')
        warg = gr_window.add_argument
        warg('-c', '--config', default=None,
             help="Config file with default settings.")
        warg('-p', '--print', default=None,
             help="Toggle action print/execute to use as frontend only.")

        ## Appearance
        warg('-s', '--size', type=int, default=None,
             help="Sets window size WxH=NxN to derive all rings sizes from it.")
        warg('-F', '--fullscreen', action='store_true', default=None,
             help="Overlay fullscreen/local")
        warg('-T', '--no-tooltip', action='store_true', default=None,
             help="Disable pop-up items, for those who is irritated.")

        ## Process
        gr_general = self.ps.add_argument_group('General')
        garg = gr_general.add_argument
        garg('-k', '--kill', action='store_true', default=None,
             help="Kill running daemonized program.")
        garg('-V', '--verbose', nargs='?', type=str,
             const='a', choices=['a', 'v', 'k'], default=None,
             help="Verbose (debug): [a]ll (default), [v]isuals, [k]eys.")
Example #20
0
def main():
    from gevent.event import Event
    from argparse import ArgumentParser

    parser = ArgumentParser(description='Lightweight SMTP server.')
    parser.add_argument('-d', '--daemon', dest='daemon', action='store_true',
                        help='Daemonize the process.')
    parser.add_argument('--user', dest='user', type=str, metavar='USR',
                        default=None, help='Drop privileges down to USR')
    parser.add_argument('--group', dest='group', type=str, metavar='GRP',
                        default=None, help='Drop privileges down to GRP')

    group = parser.add_argument_group('Port Configuration')
    group.add_argument('--inbound-port', dest='inbound_port', type=int,
                       metavar='PORT', default=1025,
                       help='Listening port number for inbound mail')
    group.add_argument('--inbound-ssl-port', dest='inbound_ssl_port',
                       type=int, metavar='PORT', default=1465,
                       help='Listening SSL-only port number for inbound mail')
    group.add_argument('--outbound-port', dest='outbound_port', type=int,
                       metavar='PORT', default=1587,
                       help='Listening port number for outbound mail')

    group = parser.add_argument_group('SSL/TLS Configuration')
    group.add_argument('--cert-file', dest='certfile', metavar='FILE',
                       type=str, default='cert.pem',
                       help='TLS certificate file')
    group.add_argument('--key-file', dest='keyfile', metavar='FILE',
                       type=str, default='cert.pem',
                       help='TLS key file')

    group = parser.add_argument_group('Output Configuration')
    group.add_argument('--log-file', dest='logfile', type=str, metavar='FILE',
                       default='output.log',
                       help='Write logs to FILE')
    group.add_argument('--error-file', dest='errorfile', type=str,
                       metavar='FILE', default='error.log',
                       help='Write errors to FILE')

    group = parser.add_argument_group('Other Configuration')
    group.add_argument('--spamassassin', action='store_true', default=False,
                       help='Scan messages with local SpamAssassin server')

    args = parser.parse_args()

    in_relay = _start_inbound_relay(args)
    in_queue = _start_inbound_queue(args, in_relay)
    _start_inbound_edge(args, in_queue)

    out_relay = _start_outbound_relay(args)
    out_queue = _start_outbound_queue(args, out_relay, in_queue)
    _start_outbound_edge(args, out_queue)

    _daemonize(args)

    try:
        Event().wait()
    except KeyboardInterrupt:
        print
Example #21
0
class Options:

    def __init__(self):
        self._init_parser()

    def _init_parser(self):
        self.parser = ArgumentParser(prog=sys.argv[0])
        self.group = self.parser.add_argument_group(
            'list',
            'List I/O operations')
        self.groupd = self.parser.add_argument_group(
            'display',
            'Commands for displaying video lists')
        self.group.add_argument('-q', '--query', dest='query',
                                action='store_true',
                                help="Query curent \
                                video in lists")
        self.group.add_argument('-a', '--alist', dest='alist',
                                action='store_true',
                                help="Add video to Alist")
        self.group.add_argument('-k', '--keep', dest='klist',
                                action='store_true',
                                help="Add video to keep list")
        self.group.add_argument('-d', '--delete', dest='dlist',
                                action='store_true',
                                help="Add video to remove list")
        self.groupd.add_argument('-i', '--info', dest='info',
                                 action='store_true',
                                 help="Print information about lists")
        self.groupd.add_argument('-I', '--videoinfo', dest='videoinfo',
                                 action='store_true',
                                 help="Print information about video")
        self.groupd.add_argument('-O', '--outputall', dest='outputall',
                                 action='store_true',
                                 help="Output current alist and klist")
        self.groupd.add_argument('-o', '--output', dest='output',
                                 action='store_true',
                                 help="Output current alist")
        self.group.add_argument('-p', '--purge', dest='purgelist',
                                action='store_true',
                                help='Delete all videos on remove list')
        self.group.add_argument('-r', '--remove', dest='remove',
                                action='store_true',
                                help='Remove current file from all \
                                 lists')
        self.parser.add_argument('-n', '--dryrun', dest='dryrun',
                                 action='store_true',
                                 help="Dryrun, take no real file actions")
        self.parser.add_argument('-N', '--notify', dest='notify',
                                 action='store_true',
                                 help="Use libnotify notifications.")
        self.parser.add_argument('-L', '--list-file', dest='listfile',
                                 default=expanduser("~/.vidlist"),
                                 help='Location of list')
        self.parser.add_argument('FILE', nargs='?',
                                 help='Specify video files manually')

    def parse(self, args=sys.argv[1:]):
        return self.parser.parse_args(args)
def main():
    parser = ArgumentParser(description='Dummy arp script.', formatter_class=ArgumentDefaultsHelpFormatter)
    parser.add_argument_group()
    parser.add_argument("-v", "--victimIP", help="Choose the victim IP address. Example: -v 192.168.0.5")
    parser.add_argument("-r", "--routerIP", help="Choose the router IP address. Example: -r 192.168.0.1")
    args = parser.parse_args()
    if geteuid() != 0:
        exit("[!] Please run as root")
def main():
    """Entry point"""
    from fmriprep import __version__
    parser = ArgumentParser(description='fMRI Preprocessing workflow',
                            formatter_class=RawTextHelpFormatter)

    # Arguments as specified by BIDS-Apps
    # required, positional arguments
    # IMPORTANT: they must go directly with the parser object
    parser.add_argument('bids_dir', action='store', default=os.getcwd())
    parser.add_argument('output_dir', action='store',
                        default=op.join(os.getcwd(), 'out'))
    parser.add_argument('analysis_level', choices=['participant'])

    # optional arguments
    parser.add_argument('--participant_label', action='store', nargs='+')
    parser.add_argument('-v', '--version', action='version',
                        version='fmriprep v{}'.format(__version__))

    # Other options
    g_input = parser.add_argument_group('fMRIprep specific arguments')
    g_input.add_argument('-s', '--session-id', action='store', default='single_session')
    g_input.add_argument('-r', '--run-id', action='store', default='single_run')
    g_input.add_argument('--task-id', help='limit the analysis only ot one task', action='store')
    g_input.add_argument('-d', '--data-type', action='store', choices=['anat', 'func'])
    g_input.add_argument('--debug', action='store_true', default=False,
                         help='run debug version of workflow')
    g_input.add_argument('--nthreads', action='store', default=0,
                         type=int, help='number of threads')
    g_input.add_argument('--mem_mb', action='store', default=0,
                         type=int, help='try to limit requested memory to this number')
    g_input.add_argument('--write-graph', action='store_true', default=False,
                         help='Write workflow graph.')
    g_input.add_argument('--use-plugin', action='store', default=None,
                         help='nipype plugin configuration file')
    g_input.add_argument('-w', '--work-dir', action='store',
                         default=op.join(os.getcwd(), 'work'))
    g_input.add_argument('-t', '--workflow-type', default='auto', required=False,
                         action='store', choices=['auto', 'ds005', 'ds054'],
                         help='specify workflow type manually')
    g_input.add_argument('--skip-native', action='store_true',
                         default=False,
                         help="don't output timeseries in native space")

    #  ANTs options
    g_ants = parser.add_argument_group('specific settings for ANTs registrations')
    g_ants.add_argument('--ants-nthreads', action='store', type=int, default=0,
                        help='number of threads that will be set in ANTs processes')
    g_ants.add_argument('--skull-strip-ants', dest="skull_strip_ants",
                        action='store_true',
                        help='use ANTs-based skull-stripping (default, slow))')
    g_ants.add_argument('--no-skull-strip-ants', dest="skull_strip_ants",
                        action='store_false',
                        help="don't use ANTs-based skull-stripping (use  AFNI instead, fast)")
    g_ants.set_defaults(skull_strip_ants=True)

    opts = parser.parse_args()
    create_workflow(opts)
Example #24
0
def main():
    """
    The utility for handling lxml addon.
    """

    parser = ArgumentParser(description="This specific script build lxml addon")

    parser.add_argument(
        "-t", "--test", action="store_true", dest="test", default=True, help="test if programs are properly installed"
    )

    translating = parser.add_argument_group("Translations Options", "Everything around translations for lxml addon.")
    building = parser.add_argument_group("Build Options", "Everything around lxml package.")

    translating.add_argument("-i", dest="init", default=False, choices=ALL_LINGUAS, help="create the environment")
    translating.add_argument("-u", dest="update", default=False, choices=ALL_LINGUAS, help="update the translation")

    building.add_argument(
        "-c",
        "--compile",
        action="store_true",
        dest="compilation",
        default=False,
        help="compile translation files for generating lxml package",
    )
    building.add_argument("-b", "--build", action="store_true", dest="build", default=False, help="build lxml package")
    building.add_argument(
        "-r",
        "--clean",
        action="store_true",
        dest="clean",
        default=False,
        help="remove files generated by building process",
    )

    args = parser.parse_args()

    if args.test:
        tests()

    if args.init:
        if sys.argv[2:] == ["all"]:
            sys.argv[2:] = ALL_LINGUAS
        init(sys.argv[2:])

    if args.update:
        if sys.argv[2:] == ["all"]:
            sys.argv[2:] = ALL_LINGUAS
        update(sys.argv[2:])

    if args.compilation:
        compilation()

    if args.build:
        build()

    if args.clean:
        clean()
Example #25
0
def build_argument_parser():
    """
    Builds the argument parser
    :return: the argument parser
    :rtype: ArgumentParser
    """
    opts = ArgumentParser()
    opts.add_argument(dest='filename', help='Filename or release name to guess', nargs='*')

    naming_opts = opts.add_argument_group("Naming")
    naming_opts.add_argument('-t', '--type', dest='type', default=None,
                             help='The suggested file type: movie, episode. If undefined, type will be guessed.')
    naming_opts.add_argument('-n', '--name-only', dest='name_only', action='store_true', default=False,
                             help='Parse files as name only, considering "/" and "\\" like other separators.')
    naming_opts.add_argument('-Y', '--date-year-first', action='store_true', dest='date_year_first', default=None,
                             help='If short date is found, consider the first digits as the year.')
    naming_opts.add_argument('-D', '--date-day-first', action='store_true', dest='date_day_first', default=None,
                             help='If short date is found, consider the second digits as the day.')
    naming_opts.add_argument('-L', '--allowed-languages', action='append', dest='allowed_languages',
                             help='Allowed language (can be used multiple times)')
    naming_opts.add_argument('-C', '--allowed-countries', action='append', dest='allowed_countries',
                             help='Allowed country (can be used multiple times)')
    naming_opts.add_argument('-E', '--episode-prefer-number', action='store_true', dest='episode_prefer_number',
                             default=False,
                             help='Guess "serie.213.avi" as the episode 213. Without this option, '
                                  'it will be guessed as season 2, episode 13')
    naming_opts.add_argument('-T', '--expected-title', action='append', dest='expected_title',
                             help='Expected title to parse (can be used multiple times)')
    naming_opts.add_argument('-G', '--expected-group', action='append', dest='expected_group',
                             help='Expected release group (can be used multiple times)')

    input_opts = opts.add_argument_group("Input")
    input_opts.add_argument('-f', '--input-file', dest='input_file', default=False,
                            help='Read filenames from an input text file. File should use UTF-8 charset.')

    output_opts = opts.add_argument_group("Output")
    output_opts.add_argument('-v', '--verbose', action='store_true', dest='verbose', default=False,
                             help='Display debug output')
    output_opts.add_argument('-P', '--show-property', dest='show_property', default=None,
                             help='Display the value of a single property (title, series, video_codec, year, ...)')
    output_opts.add_argument('-a', '--advanced', dest='advanced', action='store_true', default=False,
                             help='Display advanced information for filename guesses, as json output')
    output_opts.add_argument('-j', '--json', dest='json', action='store_true', default=False,
                             help='Display information for filename guesses as json output')
    output_opts.add_argument('-y', '--yaml', dest='yaml', action='store_true', default=False,
                             help='Display information for filename guesses as yaml output')



    information_opts = opts.add_argument_group("Information")
    information_opts.add_argument('-p', '--properties', dest='properties', action='store_true', default=False,
                                  help='Display properties that can be guessed.')
    information_opts.add_argument('-V', '--values', dest='values', action='store_true', default=False,
                                  help='Display property values that can be guessed.')
    information_opts.add_argument('--version', dest='version', action='store_true', default=False,
                                  help='Display the guessit version.')

    return opts
Example #26
0
def parse_options(argv):
    """
    @author: Jonathan Sonntag
    @author: Arne Neumann
    
    Parameters
    ----------
    argv : list of str
        a list of command line arguments (usually from sys.argv[1:])

    Returns
    -------
    parser : argparse.ArgumentParser
        the argument parser generated from the command line input
    args : argparse.ArgumentParser
        the parsed command line arguments
    """
    parser = ArgumentParser(prog='pocores')

    input_options = parser.add_argument_group("Input Options",
        "These options allow you to specify input options")
    input_options.add_argument('-i', '--input', default=sys.stdin,
        type=FileType('r'), dest='input',
        help='Specify the input file', metavar='FILENAME')
    input_options.add_argument('-c', '--input_format', dest='informat',
        default='2009',
        help=('Specify the CoNLL input file format: 2009 or 2010. '
              'default: 2009'))

    coref_options = parser.add_argument_group("Coreference Options",
        "Change coreference resolution parameters without touching the code.")
    coref_options.add_argument('-w', '--weights', dest='weights',
        help=('Specify the coreference filter weights (7 comma separated'
              ' integers, e.g. "8,2,8,3,2,7,0"'))
    coref_options.add_argument('-d', '--max_sent_dist', dest='max_sent_dist',
        help=('Specify how many preceding sentences will be considered for'
            ' finding antecedents'))

    output_options = parser.add_argument_group("Output Options",
        "These options allow you to specify various output options")
    output_options.add_argument('-o', '--output', dest='output_dest',
        nargs='?', default=sys.stdout,
        help=('Specify the output file (output folder in case of brat) to write to.'),
        metavar='FILENAME')
    output_options.add_argument('-f', '--output_format', dest='outformat',
        default='bracketed',
        help=('Specify format the output shall be printed in. Format can be one'
        ' of the following: bracketed, brat, xml'),
        metavar='OUTFORMAT')

    eval_options = parser.add_argument_group("Evaluation/Debug Options")
    eval_options.add_argument('--debug', action='store_true',
        help=('print additional debugging information'))
    eval_options.add_argument('-e', '--eval', dest='eval_file',
        help=(('evaluate Pocores coreference resolution against *.mmax gold '
               'standard file')))

    return parser, parser.parse_args(argv)
def parseArgs():
    parser = ArgumentParser(
        description="Extracts core orthologous sequences from clustered bidirectional best hits (BBH)", add_help=False
    )
    req = parser.add_argument_group("Required")
    req.add_argument(
        "-i",
        "--infile",
        metavar="FILE",
        required=True,
        help="tab-delimited input listing clustered sequence identifiers",
    )
    req.add_argument(
        "-d",
        "--indir",
        metavar="DIR",
        required=True,
        help="directory of FastA files containing sequence files used to generate the infile",
    )
    opt = parser.add_argument_group("Optional")
    opt.add_argument(
        "-c",
        "--core",
        metavar="FLOAT",
        type=float,
        default=1,
        help="fraction of samples a sequence must be in (per cluster) to be considered core [1.0]",
    )
    opt.add_argument(
        "-e",
        "--ext",
        metavar="STR",
        type=str,
        default=".fa",
        help="file extension to append to each extracted FastA file [.fa]",
    )
    opt.add_argument("-h", "--help", action="help", help="show this help message and exit")
    opt.add_argument(
        "-o", "--outpath", metavar="PATH", default=None, help="output directory [./BBH.clust.extracts--<date>_<time>]"
    )
    opt.add_argument(
        "-p",
        "--pref",
        metavar="STR",
        type=str,
        default="",
        help="prefix to discard before extracting sequence names from input FastA files [None]",
    )
    opt.add_argument(
        "-s",
        "--suff",
        metavar="STR",
        type=str,
        default=".faa",
        help="suffix to discard when extracting sequence names from input FastA files [.faa]",
    )
    return parser.parse_args()
Example #28
0
    def prepare_arguments(self):
        """
        Parse the parameters and prepare properly to me use.
        """
        parser = ArgumentParser(description="Generate a report of the test.",
                                prog="pddoctest",
                                usage="%(prog)s [options] [arguments]",
                                add_help=False)
        args_group = parser.add_argument_group("Arguments")
        args_group.add_argument('args', nargs="...", metavar="class.method",
                            help="Class of test or method to test only a "
                            " specific place")

        opt_group = parser.add_argument_group("Options")
        opt_group.add_argument('-v', '--verbose', action='store_true',
                               help="Verbose output")
        opt_group.add_argument("-q", "--quiet", action='store_true',
                               help="Minimal output")
        opt_group.add_argument('-f', '--failfast', action='store_true',
                               help="Stop on first failure")
        opt_group.add_argument('-c', '--catch', action='store_true',
                               help="Catch control-C and display results")
        opt_group.add_argument('-h', '--help', action='help',
                               help='Show this help message and exit')
        opt_group.add_argument('--version', action='version',
                               help="Show program's version number and exit",
                               version="%s %s" % ("%(prog)s", __version__))
        opt_group.add_argument("-p", "--path", action='store', metavar="path",
                               help="List of path's separate by comma to add in"
                               " the PYTHON_PATH", dest="paths")

        django_group = parser.add_argument_group("Django")
        django_group.add_argument("--django", action='store_true',
                                  help="Tell to run django unit test")
        django_group.add_argument("--environ", metavar="module", action="store",
                                  dest="module", default=None,
                                  help="Using to set the DJANGO_SETTINGS_MODULE"
                                  " (default: <myproject_dirname>.settings)")

        ext_group = parser.add_argument_group("Extensions", "Used to format the"
                                              " output.")
        ext_group.add_argument("-e", action="store_true", dest="is_extension",
                               help="Confirm that you wish use the extension."
                               " (default: text_format)")
        ext_group.add_argument("--extension", default="text_format",
                               metavar="name", dest="extension", help="Name of"
                               " extension that generate the report.")
        ext_group.add_argument("--type", default=1, type=int, metavar="number",
                               dest="ext_type", help="Output type that the"
                               " extension will use to generate the report."
                               " To see the options set to 0 (default: 1)")
        ext_group.add_argument("--extfile", default="pddoctest.conf",
                               metavar="path", dest="ext_file",
                               help="Output type that the extension will use to"
                               " generate the report."
                               " To see the options set to 0 (default: 1)")
        return vars(parser.parse_args())
Example #29
0
def build_opts(transformers=None):
    opts = ArgumentParser()
    opts.add_argument(dest='filename', help='Filename or release name to guess', nargs='*')

    naming_opts = opts.add_argument_group("Naming")
    naming_opts.add_argument('-t', '--type', dest='type', default=None,
                             help='The suggested file type: movie, episode. If undefined, type will be guessed.')
    naming_opts.add_argument('-n', '--name-only', dest='name_only', action='store_true', default=False,
                             help='Parse files as name only. Disable folder parsing, extension parsing, and file content analysis.')
    naming_opts.add_argument('-c', '--split-camel', dest='split_camel', action='store_true', default=False,
                             help='Split camel case part of filename.')

    naming_opts.add_argument('-X', '--disabled-transformer', action='append', dest='disabled_transformers',
                             help='Transformer to disable (can be used multiple time)')

    output_opts = opts.add_argument_group("Output")
    output_opts.add_argument('-v', '--verbose', action='store_true', dest='verbose', default=False,
                             help='Display debug output')
    output_opts.add_argument('-P', '--show-property', dest='show_property', default=None,
                             help='Display the value of a single property (title, series, videoCodec, year, type ...)'),
    output_opts.add_argument('-u', '--unidentified', dest='unidentified', action='store_true', default=False,
                             help='Display the unidentified parts.'),
    output_opts.add_argument('-a', '--advanced', dest='advanced', action='store_true', default=False,
                             help='Display advanced information for filename guesses, as json output')
    output_opts.add_argument('-y', '--yaml', dest='yaml', action='store_true', default=False,
                             help='Display information for filename guesses as yaml output (like unit-test)')
    output_opts.add_argument('-f', '--input-file', dest='input_file', default=False,
                             help='Read filenames from an input file.')
    output_opts.add_argument('-d', '--demo', action='store_true', dest='demo', default=False,
                             help='Run a few builtin tests instead of analyzing a file')

    information_opts = opts.add_argument_group("Information")
    information_opts.add_argument('-p', '--properties', dest='properties', action='store_true', default=False,
                                  help='Display properties that can be guessed.')
    information_opts.add_argument('-V', '--values', dest='values', action='store_true', default=False,
                                  help='Display property values that can be guessed.')
    information_opts.add_argument('-s', '--transformers', dest='transformers', action='store_true', default=False,
                                  help='Display transformers that can be used.')
    information_opts.add_argument('--version', dest='version', action='store_true', default=False,
                                  help='Display the guessit version.')

    webservice_opts = opts.add_argument_group("guessit.io")
    webservice_opts.add_argument('-b', '--bug', action='store_true', dest='submit_bug', default=False,
                                 help='Submit a wrong detection to the guessit.io service')

    other_opts = opts.add_argument_group("Other features")
    other_opts.add_argument('-i', '--info', dest='info', default='filename',
                            help='The desired information type: filename, video, hash_mpc or a hash from python\'s '
                            'hashlib module, such as hash_md5, hash_sha1, ...; or a list of any of '
                            'them, comma-separated')

    if transformers:
        for transformer in transformers:
            transformer.register_arguments(opts, naming_opts, output_opts, information_opts, webservice_opts, other_opts)

    return opts, naming_opts, output_opts, information_opts, webservice_opts, other_opts
Example #30
0
def parse_args(argv):
    '''Command line options.'''

    # program_name = os.path.basename(argv[0])
    program_version = "v%s" % __version__
    program_build_date = str(__updated__)
    program_version_message = '%%(prog)s %s (%s)' % (program_version,
                                                     program_build_date)
    program_shortdesc = 'SpySMAC - A Sat Solver configurator using pySMAC'

    program_license = '''%s

  Created by user_name on %s.
  Copyright 2015 AAD Group Freiburg. All rights reserved.

  Licensed under GPLv2
  http://www.gnu.org/licenses/gpl-2.0.html

  Distributed on an "AS IS" basis without warranties
  or conditions of any kind, either express or implied.

USAGE
''' % (program_shortdesc, str(__date__))

    # Setup argument parser
    parser = ArgumentParser(description=program_license,
                            formatter_class=ArgumentDefaultsHelpFormatter)

    req_params = parser.add_argument_group("Required")

    req_params.add_argument("-i", "--inputdir", required=True,
                            help="input directory, use the directory you\
                            specified as output for SpySMAC_run.py")

    req_params.add_argument("-o", "--outputdir", required=True,
                            help="output directory")
    
    opt_params = parser.add_argument_group("Optional")
    
    opt_params.add_argument("-d", "--disable_fanova", action="store_true", default=False,
                            help="disables fANOVA")
    
    opt_params.add_argument("-m", "--memlimit_fanova", default=2024,
                            help="sets memory limit in MB for fANOVA")
    
    opt_params.add_argument("-n", "--num_params", default=10,
                            help="number of most important parameters in fANOVA analysis")

    opt_params.add_argument("-t", "--texstyle", default="article", help="imports a tex template. \n"
			    "Usage: -t ijcai13, -t aaai or -t llncs \n")

    # Process arguments
    args = parser.parse_args(argv[1:])

    return(vars(args))
Example #31
0
            raise ArgumentTypeError("%r is not a positive int" % (x, ))
        return x

    def restricted_float(x):  #avoid nonsense values for the threshold
        x = float(x)
        if x < 0.0 or x > 1.0:
            raise ArgumentTypeError("%r not in range [0.0, 1.0]" % (x, ))
        return x

    parser = ArgumentParser(
        description=
        """The AWoL-MRF algorithm organizes the low-confidence voxels in patches. 
                            In each patch, the labels for these voxels are updated in a sequence given
                            by Prim's algorithm using the Markov Random Field potentials."""
    )
    pg = parser.add_argument_group("AWoL-MRF parameters")
    pg.add_argument("-b",
                    "--beta",
                    type=float,
                    default=-.2,
                    help="[default = %(default)s]")
    pg.add_argument("-p",
                    "--patch_length",
                    type=positive_int,
                    default=5,
                    help="[default = %(default)s]")
    pg.add_argument("-r",
                    "--mixing_ratio",
                    type=positive_int,
                    default=10,
                    help="[default = %(default)s]")
Example #32
0
    def __init__(self):

        option_types = ["Boolean", "Integer", "String", "Real"]
        option_types.extend([x+"Vector" for x in option_types])


        self.option_type_names = defaultdict() #Option types to the names of the actual options.


        parser = ArgumentParser(description="This class is used to generate Rosetta templates for use in any IDE. "
                                            "See the README for more detailed instructions.  ")

        required = parser.add_argument_group("Required")

        required.add_argument("--app_name", "-a",
                            help = "The name of the app.",
                            required = True)





        required.add_argument("--mover_name", "-c",
                            help = "The name of the Mover you are calling for JD2",
                            required = True)

        required.add_argument("--brief", "-b",
                            help = "A brief description of the app.  Enclose in quotes.",
                            required = True)

        required.add_argument("--mover_namespace",
                            help = "Mover namespace for JD2 to add. Will add this hh file for include",
                            nargs='*',
                            default = [])



        optional = parser.add_argument_group("Optional")
        optional.add_argument("--pilot", "-p",
                            help = "Signify that this is a pilot app",
                            default = False,
                            action = "store_true")

        optional.add_argument("--user_name",
                            help = "User name if Pilot app")

        optional.add_argument("--app_dir",
                            help = "Any app DIR if public app or directory in pilot app user name directory",
                            nargs = '*',
                            default = [] )


        optional.add_argument("--app_options",
                            help = "Register needed app options. "
                                   "in:file:s and in:file:l are used by default",
                            nargs="*",
                            default=["in::file::s", "in::file::l"])


        new_opts = parser.add_argument_group("Optional list of app options. (not options_rosetta.py opts)  Ex: antibody::graft_L1  "
                                             "(Only recommended for Pilot Apps)")


        for arg_type in option_types:
            #arg_type = arg_type.lower()
            self.option_type_names[arg_type] = arg_type.lower()+"_opt"

            new_opts.add_argument("--"+arg_type.lower()+"_opt",
                                  help = "Optional List of local new "+arg_type+" options with namespaces.  Ex: grid::grid_length",
                                  nargs = '*',
                                  default = [])


        GenerateRosettaTemplates.__init__(self, "application", parser)

        self.options.class_name = self.options.mover_name #Same thing, different name.

        ##Extend Matching
        self.replacement["--app_name--"] = lambda: self.get_option("app_name", fail_on_none=True)
        self.replacement["--app_options--"] = lambda: self.get_app_options()
        self.replacement["--new_app_options_out--"] = lambda: self.get_new_app_options_out()
        self.replacement["--new_app_options_in--"] = lambda: self.get_new_app_options_in()
        self.replacement["--mover_namespace--"] = lambda: self.get_mover_namespace()
        self.replacement["--mover_path--"] = lambda: self.get_mover_path()
import whois
from argparse import ArgumentParser

parser = ArgumentParser(description="",
                        epilog="""This is a description usage 
    python domain_checker.py -d google.com """)

req_parser = parser.add_argument_group('Required Argument')

req_parser.add_argument('-d',
                        '--domain',
                        dest='domain',
                        type=str,
                        help="specify domain name")

req_parser.add_argument('-l',
                        '--list',
                        dest='list',
                        type=str,
                        help="specify the domain list")

req_parser.add_argument('-o',
                        '--output',
                        dest='output',
                        type=str,
                        help="specify output filename")

args = parser.parse_args()

domain = args.domain
list_filename = args.list
def build_argparser():
    parser = ArgumentParser()

    general = parser.add_argument_group('General')
    general.add_argument('-i', '--input', metavar="PATH", default='0',
                         help="(optional) Path to the input video " \
                         "('0' for the camera, default)")
    general.add_argument('-o',
                         '--output',
                         metavar="PATH",
                         default="",
                         help="(optional) Path to save the output video to")
    general.add_argument('--no_show',
                         action='store_true',
                         help="(optional) Do not display output")
    general.add_argument('-tl',
                         '--timelapse',
                         action='store_true',
                         help="(optional) Auto-pause after each frame")

    models = parser.add_argument_group('Models')
    models.add_argument('-m_fd',
                        metavar="PATH",
                        default="",
                        required=True,
                        help="Path to the Face Detection model XML file")
    models.add_argument(
        '-m_lm',
        metavar="PATH",
        default="",
        required=True,
        help="Path to the Facial Landmarks Regression model XML file")
    models.add_argument('-m_hp',
                        metavar="PATH",
                        default="",
                        required=True,
                        help="Path to the Head Pose model XML file")

    infer = parser.add_argument_group('Inference options')
    infer.add_argument('-d_fd', default='CPU', choices=DEVICE_KINDS,
                       help="(optional) Target device for the " \
                       "Face Detection model (default: %(default)s)")
    infer.add_argument('-d_lm', default='CPU', choices=DEVICE_KINDS,
                       help="(optional) Target device for the " \
                       "Facial Landmarks Regression model (default: %(default)s)")
    infer.add_argument('-d_hp', default='CPU', choices=DEVICE_KINDS,
                       help="(optional) Target device for the " \
                       "Head Pose model (default: %(default)s)")
    infer.add_argument('-d_reid', default='CPU', choices=DEVICE_KINDS,
                       help="(optional) Target device for the " \
                       "Face Reidentification model (default: %(default)s)")
    infer.add_argument('-l', '--cpu_lib', metavar="PATH", default="",
                       help="(optional) For MKLDNN (CPU)-targeted custom layers, if any. " \
                       "Path to a shared library with custom layers implementations")
    infer.add_argument('-c', '--gpu_lib', metavar="PATH", default="",
                       help="(optional) For clDNN (GPU)-targeted custom layers, if any. " \
                       "Path to the XML file with descriptions of the kernels")
    infer.add_argument('-v',
                       '--verbose',
                       action='store_true',
                       help="(optional) Be more verbose")
    infer.add_argument(
        '-pc',
        '--perf_stats',
        action='store_true',
        help="(optional) Output detailed per-layer performance stats")
    infer.add_argument('-t_fd', metavar='[0..1]', type=float, default=0.6,
                       help="(optional) Probability threshold for face detections" \
                       "(default: %(default)s)")
    infer.add_argument('-t_id', metavar='[0..1]', type=float, default=0.3,
                       help="(optional) Cosine distance threshold between two vectors " \
                       "for face identification (default: %(default)s)")
    infer.add_argument('-exp_r_fd', metavar='NUMBER', type=float, default=1.15,
                       help="(optional) Scaling ratio for bboxes passed to face recognition " \
                       "(default: %(default)s)")

    return parser
Example #35
0
                                 sg.__contact__))

    group_verbose = parser.add_mutually_exclusive_group()

    group_verbose.add_argument("--quiet",
                               action='store_true',
                               help="Disable the verbosity")

    group_verbose.add_argument("--debug",
                               action='store_true',
                               help="Highest level of verbosity")

    # Add arguments for input/output files
    # ------------------------------------

    group_io = parser.add_argument_group('Files')

    group_io.add_argument("-i",
                          metavar="file",
                          required=True,
                          help='Input annotated file name.')

    group_io.add_argument("-o",
                          metavar="file",
                          help='Output annotated file name.')

    # Add arguments for the options
    # -----------------------------

    group_opt = parser.add_argument_group('Options')
Example #36
0
def getOptions():
    from argparse import ArgumentParser

    prs = ArgumentParser(
        description="Compare folders and report on hard links",
        epilog="""WARNING : Both folders must be in the same file system.
                  Providing a folder twice will produce all duplicates.""",
    )

    prs.add_argument("-v", "--version", action="version", version=__version__)

    grp = prs.add_argument_group("Comparison action")
    actions = grp.add_mutually_exclusive_group(required=True)
    actions.add_argument(
        "-o",
        "--or",
        help="Report inodes found in any of the folders",
        action="store_const",
        dest="action",
        const=Actions.OR,
    )
    actions.add_argument(
        "-a",
        "--and",
        help="Report inodes found in every folder",
        action="store_const",
        dest="action",
        const=Actions.AND,
    )
    actions.add_argument(
        "-u",
        "--unique",
        help="Report inodes found in only one of the folders",
        action="store_const",
        dest="action",
        const=Actions.UNIQUE,
    )

    prs.add_argument(
        "-x",
        "--exclude",
        help="Exclude files whose name match the given regex",
        action="append",
        default=[],
    )

    prs.add_argument(
        "FOLDER",
        help="Folder to search for links",
    )

    prs.add_argument(
        "FOLDERS",
        nargs="+",
        metavar="FOLDER",
    )

    ops = prs.parse_args()
    ops.FOLDERS.append(ops.FOLDER)

    ops.exclude = [re.compile(r) for r in ops.exclude]

    return ops
Example #37
0
def parse_args_and_arch(
    parser: argparse.ArgumentParser,
    input_args: List[str] = None,
    parse_known: bool = False,
    suppress_defaults: bool = False,
    modify_parser: Optional[Callable[[argparse.ArgumentParser], None]] = None,
):
    """
    Args:
        parser (ArgumentParser): the parser
        input_args (List[str]): strings to parse, defaults to sys.argv
        parse_known (bool): only parse known arguments, similar to
            `ArgumentParser.parse_known_args`
        suppress_defaults (bool): parse while ignoring all default values
        modify_parser (Optional[Callable[[ArgumentParser], None]]):
            function to modify the parser, e.g., to set default values
    """
    if suppress_defaults:
        # Parse args without any default values. This requires us to parse
        # twice, once to identify all the necessary task/model args, and a second
        # time with all defaults set to None.
        args = parse_args_and_arch(
            parser,
            input_args=input_args,
            parse_known=parse_known,
            suppress_defaults=False,
        )
        suppressed_parser = argparse.ArgumentParser(add_help=False,
                                                    parents=[parser])
        suppressed_parser.set_defaults(
            **{k: None
               for k, v in vars(args).items()})
        args = suppressed_parser.parse_args(input_args)
        return argparse.Namespace(
            **{k: v
               for k, v in vars(args).items() if v is not None})

    from fairseq.models import ARCH_MODEL_REGISTRY, ARCH_CONFIG_REGISTRY

    if modify_parser is not None:
        modify_parser(parser)

    # The parser doesn't know about model/criterion/optimizer-specific args, so
    # we parse twice. First we parse the model/criterion/optimizer, then we
    # parse a second time after adding the *-specific arguments.
    # If input_args is given, we will parse those args instead of sys.argv.
    args, _ = parser.parse_known_args(input_args)

    # Add model-specific args to parser.
    if hasattr(args, "arch"):
        model_specific_group = parser.add_argument_group(
            "Model-specific configuration",
            # Only include attributes which are explicitly given as command-line
            # arguments or which have default values.
            argument_default=argparse.SUPPRESS,
        )
        ARCH_MODEL_REGISTRY[args.arch].add_args(model_specific_group)

    # Add *-specific args to parser.
    from fairseq.registry import REGISTRIES

    for registry_name, REGISTRY in REGISTRIES.items():
        choice = getattr(args, registry_name, None)
        if choice is not None:
            cls = REGISTRY["registry"][choice]
            if hasattr(cls, "add_args"):
                cls.add_args(parser)
    if hasattr(args, "task"):
        from fairseq.tasks import TASK_REGISTRY

        TASK_REGISTRY[args.task].add_args(parser)
    if getattr(args, "use_bmuf", False):
        # hack to support extra args for block distributed data parallelism
        from fairseq.optim.bmuf import FairseqBMUF

        FairseqBMUF.add_args(parser)

    # Modify the parser a second time, since defaults may have been reset
    if modify_parser is not None:
        modify_parser(parser)

    # Parse a second time.
    if parse_known:
        args, extra = parser.parse_known_args(input_args)
    else:
        args = parser.parse_args(input_args)
        extra = None

    # Post-process args.
    if hasattr(args,
               "max_sentences_valid") and args.max_sentences_valid is None:
        args.max_sentences_valid = args.max_sentences
    if hasattr(args, "max_tokens_valid") and args.max_tokens_valid is None:
        args.max_tokens_valid = args.max_tokens
    if getattr(args, "memory_efficient_fp16", False):
        args.fp16 = True

    # Apply architecture configuration.
    if hasattr(args, "arch"):
        ARCH_CONFIG_REGISTRY[args.arch](args)

    if parse_known:
        return args, extra
    else:
        return args
Example #38
0
def get_parser():
    """Build parser object"""
    from smriprep.cli.utils import ParseTemplates, output_space as _output_space
    from templateflow.api import templates
    from packaging.version import Version
    from ..__about__ import __version__
    from ..workflows.bold.resampling import NONSTANDARD_REFERENCES
    from .version import check_latest, is_flagged

    verstr = 'fmriprep v{}'.format(__version__)
    currentv = Version(__version__)
    is_release = not any((currentv.is_devrelease, currentv.is_prerelease,
                          currentv.is_postrelease))

    parser = ArgumentParser(
        description='FMRIPREP: fMRI PREProcessing workflows',
        formatter_class=ArgumentDefaultsHelpFormatter)

    # Arguments as specified by BIDS-Apps
    # required, positional arguments
    # IMPORTANT: they must go directly with the parser object
    parser.add_argument(
        'bids_dir',
        action='store',
        type=Path,
        help='the root folder of a BIDS valid dataset (sub-XXXXX folders should '
        'be found at the top level in this folder).')
    parser.add_argument(
        'output_dir',
        action='store',
        type=Path,
        help='the output path for the outcomes of preprocessing and visual '
        'reports')
    parser.add_argument(
        'analysis_level',
        choices=['participant'],
        help='processing stage to be run, only "participant" in the case of '
        'FMRIPREP (see BIDS-Apps specification).')

    # optional arguments
    parser.add_argument('--version', action='version', version=verstr)

    g_bids = parser.add_argument_group('Options for filtering BIDS queries')
    g_bids.add_argument(
        '--skip_bids_validation',
        '--skip-bids-validation',
        action='store_true',
        default=False,
        help=
        'assume the input dataset is BIDS compliant and skip the validation')
    g_bids.add_argument(
        '--participant_label',
        '--participant-label',
        action='store',
        nargs='+',
        help='a space delimited list of participant identifiers or a single '
        'identifier (the sub- prefix can be removed)')
    # Re-enable when option is actually implemented
    # g_bids.add_argument('-s', '--session-id', action='store', default='single_session',
    #                     help='select a specific session to be processed')
    # Re-enable when option is actually implemented
    # g_bids.add_argument('-r', '--run-id', action='store', default='single_run',
    #                     help='select a specific run to be processed')
    g_bids.add_argument('-t',
                        '--task-id',
                        action='store',
                        help='select a specific task to be processed')
    g_bids.add_argument(
        '--echo-idx',
        action='store',
        type=int,
        help='select a specific echo to be processed in a multiecho series')

    g_perfm = parser.add_argument_group('Options to handle performance')
    g_perfm.add_argument('--nthreads',
                         '--n_cpus',
                         '-n-cpus',
                         action='store',
                         type=int,
                         help='maximum number of threads across all processes')
    g_perfm.add_argument('--omp-nthreads',
                         action='store',
                         type=int,
                         default=0,
                         help='maximum number of threads per-process')
    g_perfm.add_argument(
        '--mem_mb',
        '--mem-mb',
        action='store',
        default=0,
        type=int,
        help='upper bound memory limit for FMRIPREP processes')
    g_perfm.add_argument(
        '--low-mem',
        action='store_true',
        help='attempt to reduce memory usage (will increase disk usage '
        'in working directory)')
    g_perfm.add_argument('--use-plugin',
                         action='store',
                         default=None,
                         help='nipype plugin configuration file')
    g_perfm.add_argument('--anat-only',
                         action='store_true',
                         help='run anatomical workflows only')
    g_perfm.add_argument('--boilerplate',
                         action='store_true',
                         help='generate boilerplate only')
    g_perfm.add_argument(
        '--ignore-aroma-denoising-errors',
        action='store_true',
        default=False,
        help='DEPRECATED (now does nothing, see --error-on-aroma-warnings) '
        '- ignores the errors ICA_AROMA returns when there are no '
        'components classified as either noise or signal')
    g_perfm.add_argument(
        '--error-on-aroma-warnings',
        action='store_true',
        default=False,
        help='Raise an error if ICA_AROMA does not produce sensible output '
        '(e.g., if all the components are classified as signal or noise)')
    g_perfm.add_argument(
        "-v",
        "--verbose",
        dest="verbose_count",
        action="count",
        default=0,
        help="increases log verbosity for each occurence, debug level is -vvv")
    g_perfm.add_argument('--debug',
                         action='store_true',
                         default=False,
                         help='DEPRECATED - Does not do what you want.')

    g_conf = parser.add_argument_group('Workflow configuration')
    g_conf.add_argument(
        '--ignore',
        required=False,
        action='store',
        nargs="+",
        default=[],
        choices=['fieldmaps', 'slicetiming', 'sbref'],
        help=
        'ignore selected aspects of the input dataset to disable corresponding '
        'parts of the workflow (a space delimited list)')
    g_conf.add_argument(
        '--longitudinal',
        action='store_true',
        help='treat dataset as longitudinal - may increase runtime')
    g_conf.add_argument(
        '--t2s-coreg',
        action='store_true',
        help=
        'If provided with multi-echo BOLD dataset, create T2*-map and perform '
        'T2*-driven coregistration. When multi-echo data is provided and this '
        'option is not enabled, standard EPI-T1 coregistration is performed '
        'using the middle echo.')
    g_conf.add_argument(
        '--output-spaces',
        nargs='+',
        action=ParseTemplates,
        help="""\
Standard and non-standard spaces to resample anatomical and functional images to. \
Standard spaces may be specified by the form \
``<TEMPLATE>[:res-<resolution>][:cohort-<label>][...]``, where ``<TEMPLATE>`` is \
a keyword (valid keywords: %s) or path pointing to a user-supplied template, and \
may be followed by optional, colon-separated parameters. \
Non-standard spaces (valid keywords: %s) imply specific orientations and sampling \
grids. \
Important to note, the ``res-*`` modifier does not define the resolution used for \
the spatial normalization.
For further details, please check out \
https://fmriprep.readthedocs.io/en/%s/spaces.html""" %
        (', '.join('"%s"' % s
                   for s in templates()), ', '.join(NONSTANDARD_REFERENCES),
         currentv.base_version if is_release else 'latest'))

    g_conf.add_argument(
        '--output-space',
        required=False,
        action='store',
        type=str,
        nargs='+',
        choices=[
            'T1w', 'template', 'fsnative', 'fsaverage', 'fsaverage6',
            'fsaverage5'
        ],
        help='DEPRECATED: please use ``--output-spaces`` instead.')
    g_conf.add_argument(
        '--template',
        required=False,
        action='store',
        type=str,
        choices=['MNI152NLin2009cAsym'],
        help='volume template space (default: MNI152NLin2009cAsym). '
        'DEPRECATED: please use ``--output-spaces`` instead.')
    g_conf.add_argument(
        '--template-resampling-grid',
        required=False,
        action='store',
        help='Keyword ("native", "1mm", or "2mm") or path to an existing file. '
        'Allows to define a reference grid for the resampling of BOLD images in template '
        'space. Keyword "native" will use the original BOLD grid as reference. '
        'Keywords "1mm" and "2mm" will use the corresponding isotropic template '
        'resolutions. If a path is given, the grid of that image will be used. '
        'It determines the field of view and resolution of the output images, '
        'but is not used in normalization. '
        'DEPRECATED: please use ``--output-spaces`` instead.')
    g_conf.add_argument(
        '--bold2t1w-dof',
        action='store',
        default=6,
        choices=[6, 9, 12],
        type=int,
        help='Degrees of freedom when registering BOLD to T1w images. '
        '6 degrees (rotation and translation) are used by default.')
    g_conf.add_argument(
        '--force-bbr',
        action='store_true',
        dest='use_bbr',
        default=None,
        help=
        'Always use boundary-based registration (no goodness-of-fit checks)')
    g_conf.add_argument(
        '--force-no-bbr',
        action='store_false',
        dest='use_bbr',
        default=None,
        help=
        'Do not use boundary-based registration (no goodness-of-fit checks)')
    g_conf.add_argument(
        '--medial-surface-nan',
        required=False,
        action='store_true',
        default=False,
        help=
        'Replace medial wall values with NaNs on functional GIFTI files. Only '
        'performed for GIFTI files mapped to a freesurfer subject (fsaverage or fsnative).'
    )
    g_conf.add_argument('--dummy-scans',
                        required=False,
                        action='store',
                        default=None,
                        type=int,
                        help='Number of non steady state volumes.')

    # ICA_AROMA options
    g_aroma = parser.add_argument_group(
        'Specific options for running ICA_AROMA')
    g_aroma.add_argument('--use-aroma',
                         action='store_true',
                         default=False,
                         help='add ICA_AROMA to your preprocessing stream')
    g_aroma.add_argument(
        '--aroma-melodic-dimensionality',
        action='store',
        default=-200,
        type=int,
        help='Exact or maximum number of MELODIC components to estimate '
        '(positive = exact, negative = maximum)')

    # Confounds options
    g_confounds = parser.add_argument_group(
        'Specific options for estimating confounds')
    g_confounds.add_argument(
        '--return-all-components',
        required=False,
        action='store_true',
        default=False,
        help=
        'Include all components estimated in CompCor decomposition in the confounds '
        'file instead of only the components sufficient to explain 50 percent of '
        'BOLD variance in each CompCor mask')
    g_confounds.add_argument(
        '--fd-spike-threshold',
        required=False,
        action='store',
        default=0.5,
        type=float,
        help=
        'Threshold for flagging a frame as an outlier on the basis of framewise '
        'displacement')
    g_confounds.add_argument(
        '--dvars-spike-threshold',
        required=False,
        action='store',
        default=1.5,
        type=float,
        help=
        'Threshold for flagging a frame as an outlier on the basis of standardised '
        'DVARS')

    #  ANTs options
    g_ants = parser.add_argument_group(
        'Specific options for ANTs registrations')
    g_ants.add_argument(
        '--skull-strip-template',
        action='store',
        default='OASIS30ANTs',
        type=_output_space,
        help='select a template for skull-stripping with antsBrainExtraction')
    g_ants.add_argument(
        '--skull-strip-fixed-seed',
        action='store_true',
        help='do not use a random seed for skull-stripping - will ensure '
        'run-to-run replicability when used with --omp-nthreads 1')

    # Fieldmap options
    g_fmap = parser.add_argument_group(
        'Specific options for handling fieldmaps')
    g_fmap.add_argument(
        '--fmap-bspline',
        action='store_true',
        default=False,
        help='fit a B-Spline field using least-squares (experimental)')
    g_fmap.add_argument(
        '--fmap-no-demean',
        action='store_false',
        default=True,
        help='do not remove median (within mask) from fieldmap')

    # SyN-unwarp options
    g_syn = parser.add_argument_group(
        'Specific options for SyN distortion correction')
    g_syn.add_argument(
        '--use-syn-sdc',
        action='store_true',
        default=False,
        help='EXPERIMENTAL: Use fieldmap-free distortion correction')
    g_syn.add_argument(
        '--force-syn',
        action='store_true',
        default=False,
        help='EXPERIMENTAL/TEMPORARY: Use SyN correction in addition to '
        'fieldmap correction, if available')

    # FreeSurfer options
    g_fs = parser.add_argument_group(
        'Specific options for FreeSurfer preprocessing')
    g_fs.add_argument(
        '--fs-license-file',
        metavar='PATH',
        type=Path,
        help=
        'Path to FreeSurfer license key file. Get it (for free) by registering'
        ' at https://surfer.nmr.mgh.harvard.edu/registration.html')

    # Surface generation xor
    g_surfs = parser.add_argument_group('Surface preprocessing options')
    g_surfs.add_argument('--no-submm-recon',
                         action='store_false',
                         dest='hires',
                         help='disable sub-millimeter (hires) reconstruction')
    g_surfs_xor = g_surfs.add_mutually_exclusive_group()
    g_surfs_xor.add_argument('--cifti-output',
                             action='store_true',
                             default=False,
                             help='output BOLD files as CIFTI dtseries')
    g_surfs_xor.add_argument(
        '--fs-no-reconall',
        '--no-freesurfer',
        action='store_false',
        dest='run_reconall',
        help='disable FreeSurfer surface preprocessing.'
        ' Note : `--no-freesurfer` is deprecated and will be removed in 1.2.'
        ' Use `--fs-no-reconall` instead.')

    g_other = parser.add_argument_group('Other options')
    g_other.add_argument(
        '-w',
        '--work-dir',
        action='store',
        type=Path,
        default=Path('work'),
        help='path where intermediate results should be stored')
    g_other.add_argument(
        '--resource-monitor',
        action='store_true',
        default=False,
        help=
        'enable Nipype\'s resource monitoring to keep track of memory and CPU usage'
    )
    g_other.add_argument(
        '--reports-only',
        action='store_true',
        default=False,
        help=
        'only generate reports, don\'t run workflows. This will only rerun report '
        'aggregation, not reportlet generation for specific nodes.')
    g_other.add_argument(
        '--run-uuid',
        action='store',
        default=None,
        help='Specify UUID of previous run, to include error logs in report. '
        'No effect without --reports-only.')
    g_other.add_argument('--write-graph',
                         action='store_true',
                         default=False,
                         help='Write workflow graph.')
    g_other.add_argument(
        '--stop-on-first-crash',
        action='store_true',
        default=False,
        help='Force stopping on first crash, even if a work directory'
        ' was specified.')
    g_other.add_argument(
        '--notrack',
        action='store_true',
        default=False,
        help='Opt-out of sending tracking information of this run to '
        'the FMRIPREP developers. This information helps to '
        'improve FMRIPREP and provides an indicator of real '
        'world usage crucial for obtaining funding.')
    g_other.add_argument('--sloppy',
                         action='store_true',
                         default=False,
                         help='Use low-quality tools for speed - TESTING ONLY')

    latest = check_latest()
    if latest is not None and currentv < latest:
        print("""\
You are using fMRIPrep-%s, and a newer version of fMRIPrep is available: %s.
Please check out our documentation about how and when to upgrade:
https://fmriprep.readthedocs.io/en/latest/faq.html#upgrading""" %
              (__version__, latest),
              file=sys.stderr)

    _blist = is_flagged()
    if _blist[0]:
        _reason = _blist[1] or 'unknown'
        print("""\
WARNING: Version %s of fMRIPrep (current) has been FLAGGED
(reason: %s).
That means some severe flaw was found in it and we strongly
discourage its usage.""" % (__version__, _reason),
              file=sys.stderr)

    return parser
Example #39
0
def get_parser():
    """Build parser object."""
    from pathlib import Path
    from argparse import ArgumentParser
    from argparse import RawTextHelpFormatter
    from niworkflows.utils.spaces import Reference, SpatialReferences, OutputReferencesAction
    from ..__about__ import __version__

    parser = ArgumentParser(
        description='sMRIPrep: Structural MRI PREProcessing workflows',
        formatter_class=RawTextHelpFormatter)

    # Arguments as specified by BIDS-Apps
    # required, positional arguments
    # IMPORTANT: they must go directly with the parser object
    parser.add_argument(
        'bids_dir',
        action='store',
        type=Path,
        help='the root folder of a BIDS valid dataset (sub-XXXXX folders should '
        'be found at the top level in this folder).')
    parser.add_argument(
        'output_dir',
        action='store',
        type=Path,
        help='the output path for the outcomes of preprocessing and visual '
        'reports')
    parser.add_argument(
        'analysis_level',
        choices=['participant'],
        help='processing stage to be run, only "participant" in the case of '
        'sMRIPrep (see BIDS-Apps specification).')

    # optional arguments
    parser.add_argument('--version',
                        action='version',
                        version='smriprep v{}'.format(__version__))

    g_bids = parser.add_argument_group('Options for filtering BIDS queries')
    g_bids.add_argument(
        '--participant-label',
        '--participant_label',
        action='store',
        nargs='+',
        help='a space delimited list of participant identifiers or a single '
        'identifier (the sub- prefix can be removed)')
    g_bids.add_argument(
        '--bids-filter-file',
        action='store',
        type=Path,
        metavar='PATH',
        help='a JSON file describing custom BIDS input filters using pybids '
        '{<suffix>:{<entity>:<filter>,...},...} '
        '(https://github.com/bids-standard/pybids/blob/master/bids/layout/config/bids.json)'
    )

    g_perfm = parser.add_argument_group('Options to handle performance')
    g_perfm.add_argument('--nprocs',
                         '--ncpus',
                         '--nthreads',
                         '--n_cpus',
                         '-n-cpus',
                         action='store',
                         type=int,
                         help='number of CPUs to be used.')
    g_perfm.add_argument('--omp-nthreads',
                         action='store',
                         type=int,
                         default=0,
                         help='maximum number of threads per-process')
    g_perfm.add_argument(
        '--mem-gb',
        '--mem_gb',
        action='store',
        default=0,
        type=float,
        help='upper bound memory limit for sMRIPrep processes (in GB).')
    g_perfm.add_argument(
        '--low-mem',
        action='store_true',
        help='attempt to reduce memory usage (will increase disk usage '
        'in working directory)')
    g_perfm.add_argument('--use-plugin',
                         action='store',
                         default=None,
                         help='nipype plugin configuration file')
    g_perfm.add_argument('--boilerplate',
                         action='store_true',
                         help='generate boilerplate only')
    g_perfm.add_argument(
        "-v",
        "--verbose",
        dest="verbose_count",
        action="count",
        default=0,
        help="increases log verbosity for each occurence, debug level is -vvv")

    g_conf = parser.add_argument_group('Workflow configuration')
    g_conf.add_argument(
        '--output-spaces',
        nargs='*',
        action=OutputReferencesAction,
        default=SpatialReferences(),
        help='paths or keywords prescribing output spaces - '
        'standard spaces will be extracted for spatial normalization.')
    g_conf.add_argument(
        '--longitudinal',
        action='store_true',
        help='treat dataset as longitudinal - may increase runtime')

    #  ANTs options
    g_ants = parser.add_argument_group(
        'Specific options for ANTs registrations')
    g_ants.add_argument(
        '--skull-strip-template',
        default='OASIS30ANTs',
        type=Reference.from_string,
        help='select a template for skull-stripping with antsBrainExtraction')
    g_ants.add_argument(
        '--skull-strip-fixed-seed',
        action='store_true',
        help='do not use a random seed for skull-stripping - will ensure '
        'run-to-run replicability when used with --omp-nthreads 1')
    g_ants.add_argument(
        '--skull-strip-mode',
        action='store',
        choices=('auto', 'skip', 'force'),
        default='auto',
        help='determiner for T1-weighted skull stripping (force ensures skull '
        'stripping, skip ignores skull stripping, and auto automatically '
        'ignores skull stripping if pre-stripped brains are detected).')

    # FreeSurfer options
    g_fs = parser.add_argument_group(
        'Specific options for FreeSurfer preprocessing')
    g_fs.add_argument(
        '--fs-license-file',
        metavar='PATH',
        type=Path,
        help=
        'Path to FreeSurfer license key file. Get it (for free) by registering'
        ' at https://surfer.nmr.mgh.harvard.edu/registration.html')
    g_fs.add_argument(
        '--fs-subjects-dir',
        metavar='PATH',
        type=Path,
        help='Path to existing FreeSurfer subjects directory to reuse. '
        '(default: OUTPUT_DIR/freesurfer)')

    # Surface generation xor
    g_surfs = parser.add_argument_group('Surface preprocessing options')
    g_surfs.add_argument('--no-submm-recon',
                         action='store_false',
                         dest='hires',
                         help='disable sub-millimeter (hires) reconstruction')
    g_surfs_xor = g_surfs.add_mutually_exclusive_group()

    g_surfs_xor.add_argument('--fs-no-reconall',
                             action='store_false',
                             dest='run_reconall',
                             help='disable FreeSurfer surface preprocessing.')

    g_other = parser.add_argument_group('Other options')
    g_other.add_argument(
        '-w',
        '--work-dir',
        action='store',
        type=Path,
        default=Path('work'),
        help='path where intermediate results should be stored')
    g_other.add_argument(
        '--fast-track',
        action='store_true',
        default=False,
        help='fast-track the workflow by searching for existing derivatives.')
    g_other.add_argument(
        '--resource-monitor',
        action='store_true',
        default=False,
        help=
        'enable Nipype\'s resource monitoring to keep track of memory and CPU usage'
    )
    g_other.add_argument(
        '--reports-only',
        action='store_true',
        default=False,
        help=
        'only generate reports, don\'t run workflows. This will only rerun report '
        'aggregation, not reportlet generation for specific nodes.')
    g_other.add_argument(
        '--run-uuid',
        action='store',
        default=None,
        help='Specify UUID of previous run, to include error logs in report. '
        'No effect without --reports-only.')
    g_other.add_argument('--write-graph',
                         action='store_true',
                         default=False,
                         help='Write workflow graph.')
    g_other.add_argument(
        '--stop-on-first-crash',
        action='store_true',
        default=False,
        help='Force stopping on first crash, even if a work directory'
        ' was specified.')
    g_other.add_argument(
        '--notrack',
        action='store_true',
        default=False,
        help='Opt-out of sending tracking information of this run to '
        'the sMRIPrep developers. This information helps to '
        'improve sMRIPrep and provides an indicator of real '
        'world usage crucial for obtaining funding.')
    g_other.add_argument('--sloppy',
                         action='store_true',
                         default=False,
                         help='Use low-quality tools for speed - TESTING ONLY')

    return parser
Example #40
0
class WeWalletApplication(Application):

    tasks = {
        'develop': 'bael.project.develop:Develop',
        'serve': 'wewallet.console.serve:Serve',
        'alembic-upgrade': 'wewallet.console.alembic:AlembicUpgrade',
        'alembic-revision': 'wewallet.console.alembic:AlembicRevision',
    }

    def create_parser(self):
        self.parser = ArgumentParser()
        self._add_task_group()
        self._add_logging_group()

    def _add_task_group(self):
        tasks = self.parser.add_argument_group(
            'Tasks',
            'Project related options',
        )

        group = tasks.add_mutually_exclusive_group()
        group.add_argument(
            '-d',
            '--develop',
            dest='task',
            help='Download requiretments.',
            action='store_const',
            const='develop',
        )
        group.add_argument(
            '-s',
            '--serve',
            dest='task',
            help='Start development server.',
            action='store_const',
            const='serve',
        )
        group.add_argument(
            '-u',
            '--alembic-upgrade',
            dest='task',
            help='Run migrations.',
            action='store_const',
            const='alembic-upgrade',
        )
        group.add_argument(
            '-r',
            '--alembic-revision',
            dest='task',
            help='Create migration.',
            action='store_const',
            const='alembic-revision',
        )

        tasks.add_argument(
            '-g',
            '--graph',
            dest='graph',
            help='Draw task dependency graph.',
            action="store_true",
        )

    def run_command_or_print_help(self, args):
        if args.task:
            task = self._get_task(args)
            try:
                try:
                    task.run()
                finally:
                    report_path = task.save_report()
            except:
                log.error('Error in %s' % (report_path, ))
                raise
            if args.graph:
                Graph(report_path).render()
        else:
            self.parser.print_help()

    def _get_task(self, args):
        url = self.tasks[args.task]
        return self.import_task(url)()
Example #41
0
                      'you have sourced %s/amber.sh (if you are using sh/ksh/'
                      'bash/zsh) or %s/amber.csh (if you are using csh/tcsh)' %
                      (amberhome, amberhome))
else:
    TitratableResidueList = residues.TitratableResidueList
    LineBuffer = residues._LineBuffer

parser = ArgumentParser(epilog='''This program will read a topology file and
                        generate a cpin file for constant pH simulations with
                        sander''', usage='%(prog)s [Options]')
parser.add_argument('-v', '--version', action='version', version='%s: %s' %
                    (parser.prog, __version__))
parser.add_argument('-d', '--debug', dest='debug', action='store_const',
                    help='Enable verbose tracebacks to debug this program',
                    const=True, default=False)
group = parser.add_argument_group('Output files')
group.add_argument('-o', '--output', dest='output', metavar='FILE',
                   help='Output file. Defaults to standard output')
group.add_argument('-op', '--output-prmtop', dest='outparm', metavar='FILE',
                   help='''For explicit solvent simulations, a custom set of
                   radii are necessary to obtain reasonable results for
                   carboxylate pKas (e.g., AS4 and GL4 residues). If specified,
                   this file will be the prmtop compatible with the reference
                   energies in the printed cpin file.''', default=None)
group = parser.add_argument_group('Required Arguments')
group.add_argument('-p', dest='prmtop', metavar='FILE', required=False,
                   help='Topology file to be used in constant pH simulation',
                   type=str, default='prmtop')
group = parser.add_argument_group('Simulation Options')
group.add_argument('-igb', dest='igb', metavar='IGB', required=False, type=int,
                   help='Generalized Born model which you intend to use to '
Example #42
0
def _validate_forecast_args(args):
    if args.forecast_option is None:
        err_msg = ("One of these arguments must be used: "
                   "-td/--today,-5d/--fivedays,-10d/-tendays,w/--weekend")
        print(f'{argparser.prog}: error:{err_msg}', file=sys.stderr)
        sys.exit


parsers = parser_loader.load("./weatherterm/parsers")

argparser = ArgumentParser(
    prog="weatherterm",
    description="Wwather info from weather.com on your terminal")

required = argparser.add_argument_group("required arguments")
required.add_argument(
    "-p",
    "--parser",
    choices=parsers.keys(),
    required=True,
    dest='parser',
    help=(
        'Specify which parser is going to be used to scrape weather information'
    ))

unit_values = [name.title() for name, value in Unit.__members__.items()]
Unit.__members__.items()
argparser.add_argument("-u", "--unit")

##to be continued
Example #43
0
file1 = None
file2 = None
file3 = None
hash1 = {}
FlipAlleles = 0  #Flag for if I need to flip the reference allele to be the derived allele down the road and whether I have to change any of the annotation information for this reason

#File1 Ex: /home/pg/michaelt/Data/ALL_MAPPING/Pools/PostMerge/mapping_pool_merged/GATK/AllPools.QCed.preGATK.QCed.samplesMerged.rmdup.BQSR.calmd.AllPoolsMerged.ChrAll.GATK.ReduceReads.UG.VQSR.SNP.PASS.wAA.DropOffTargetVariants.1kbWindow.95geno.recode.vcf.gz
#File2 Ex: /home/pg/michaelt/Data/ALL_MAPPING/Pools/PostMerge/mapping_pool_merged/GATK/AllPools.QCed.preGATK.QCed.samplesMerged.rmdup.BQSR.calmd.AllPoolsMerged.ChrAll.GATK.ReduceReads.UG.VQSR.SNP.PASS.wAA.WhiteOnly.AllPhenos.frq.gz
#File3 Ex: /home/pg/michaelt/Data/ALL_MAPPING/Pools/PostMerge/mapping_pool_merged/GATK/AllPools.QCed.preGATK.QCed.samplesMerged.rmdup.BQSR.calmd.AllPoolsMerged.ChrAll.GATK.ReduceReads.UG.VQSR.SNP.PASS.vcf.AnnovarFormat.genome_summary.csv

#Argument handling and parsing
#Parsing arguments
parser = ArgumentParser(add_help=False)

#Required arguments
required = parser.add_argument_group('required arguments:')
required.add_argument("--file1",
                      dest="file1",
                      help="location of file1",
                      required=True,
                      metavar="FILE1")
required.add_argument("--file2",
                      dest="file2",
                      help="location of file2",
                      required=True,
                      metavar="FILE2")
required.add_argument("--file3",
                      dest="file3",
                      help="location of file3",
                      required=True,
                      metavar="FILE3")
Example #44
0
def parse_options():

    description = "This example script is an experimental implementation of the HANA's hdbsql tool. It" \
                  "focuses on the authentication and connection to the HANA server, and it't not meant" \
                  "to implement the full capabilities offered by hdbsql or any other HDB client interface."

    usage = "%(prog)s [options] -d <remote host>"

    parser = ArgumentParser(usage=usage,
                            description=description,
                            epilog=pysap.epilog)

    target = parser.add_argument_group("Target")
    target.add_argument("-d",
                        "--remote-host",
                        dest="remote_host",
                        help="Remote host")
    target.add_argument("-p",
                        "--remote-port",
                        dest="remote_port",
                        type=int,
                        default=39015,
                        help="Remote port [%(default)d]")
    target.add_argument(
        "--route-string",
        dest="route_string",
        help="Route string for connecting through a SAP Router")

    tls = parser.add_argument_group("TLS")
    tls.add_argument("--tls", dest="tls", action="store_true", help="Use TLS")
    tls.add_argument("--tls-no-trust-cert",
                     dest="tls_cert_trust",
                     action="store_false",
                     help="Do not trust the TLS certificate and validate it")
    tls.add_argument(
        "--tls-cert-file",
        dest="tls_cert_file",
        help=
        "Path to the certificate file to use when validating server's TLS certificate."
    )
    tls.add_argument(
        "--tls-check-hostname",
        dest="tls_check_hostname",
        action="store_true",
        help="Validate the hostname provided in the TLS certificate")

    auth = parser.add_argument_group("Authentication")
    auth.add_argument(
        "-m",
        "--method",
        dest="method",
        default="SCRAMSHA256",
        help="Authentication method. Supported methods: {} [%(default)s]".
        format(",".join(saphdb_auth_methods.keys())))
    auth.add_argument("--username", dest="username", help="User name")
    auth.add_argument("--password", dest="password", help="Password")
    auth.add_argument("--jwt-file",
                      dest="jwt_file",
                      metavar="FILE",
                      help="File to read a signed JWT from")
    auth.add_argument("--jwt-key",
                      dest="jwt_key",
                      metavar="FILE",
                      help="File to read the private key to sign the JWT")
    auth.add_argument("--jwt-issuer",
                      dest="jwt_issuer",
                      help="JWT signature issuer")
    auth.add_argument(
        "--jwt-claim",
        dest="jwt_claim",
        default="user_name",
        help="Name of the JWT claim to map username [%(default)s]")
    auth.add_argument(
        "--saml-assertion",
        dest="saml_assertion",
        metavar="FILE",
        help="File to read a signed SAML 2.0 bearer assertion from")
    auth.add_argument("--session-cookie",
                      dest="session_cookie",
                      help="Session Cookie")
    auth.add_argument("--pid",
                      dest="pid",
                      default="0",
                      help="Process ID [%(default)s]")
    auth.add_argument("--hostname", dest="hostname", help="Hostname")

    misc = parser.add_argument_group("Misc options")
    misc.add_argument("-v",
                      "--verbose",
                      dest="verbose",
                      action="store_true",
                      help="Verbose output")

    options = parser.parse_args()

    if not options.remote_host:
        parser.error("Remote host is required")

    if options.method not in saphdb_auth_methods:
        parser.error("Invalid authentication method")
    if not options.username and options.method not in ["SAML"]:
        parser.error("Username needs to be provided")

    if options.method == "JWT":
        if not (options.jwt_file or (options.jwt_key and options.jwt_issuer)):
            parser.error(
                "JWT file or a signing private key and issuer need to be provided for JWT authentication"
            )
        if options.jwt_key and not py_jwt:
            parser.error("JWT crafting requires the PyJWT library installed")

    if options.method == "SAML" and not options.saml_assertion:
        parser.error(
            "SAML bearer assertion file need to be provided for SAML authentication"
        )

    if options.method in ["SCRAMSHA256", "SCRAMPBKDF2SHA256"
                          ] and not options.password:
        parser.error(
            "Password need to be provided for SCRAM-based authentication")

    if options.method == "SessionCookie" and not options.session_cookie:
        parser.error(
            "Session cookie need to be provided for SessionCookie authentication"
        )

    return options
Example #45
0
"""Generate xkcd-style multiple-word passwords."""

from argparse import ArgumentParser
from itertools import izip_longest
from os.path import dirname
from os.path import join
from os.path import realpath
from pyperclip import copy
from random import randint
from random import seed
from sys import stdout

localpath = dirname(realpath(__file__))

parser = ArgumentParser(description='Generate an XKCD-style password.')
outopts = parser.add_argument_group('Output options')
outopts.add_argument('words',
                     nargs='?',
                     metavar='WORDCOUNT',
                     type=int,
                     default=4,
                     help='Number of words to produce (default = 4)')
outopts.add_argument('-c',
                     '--clip',
                     help='Send result to clipboard rather than sys.stdout',
                     action='store_true')
outopts.add_argument('-s',
                     '--seed',
                     metavar='SEEDDATA',
                     help='Seed for random function.',
                     action='store')
Example #46
0
def get_args():
    parser = ArgumentParser(
        description="""Parse sequencing_summary.txt files 
                       and .paf files to find split reads 
                       in an Oxford Nanopore Dataset""",
        add_help=False)
    general = parser.add_argument_group(
        title='General options')
    general.add_argument("-h", "--help",
                         action="help",
                         help="Show this help and exit"
                         )
    general.add_argument("-d", "--distance",
                         help='''Specify the maximum distance between consecutive mappings.
                              This is the difference between \'Target Start\' and \'Target End\' in 
                              the paf file. Defaults to 10000''',
                         type=int,
                         default=10000,
                         metavar=''
                         )
    general.add_argument("-t", "--top",
                         help='''Specify how many top processed reads to display. Default is 10''',
                         type=int,
                         default=10,
                         metavar=''
                         )
    general.add_argument("-a", "--alt",
                         help='''Exclude alternate assemblies''',
                         action="store_false",
                         default=True,
                         )
    general.add_argument("-D", "--debug",
                         help='''Write debug.csv file to current working directory''',
                         action="store_true",
                         default=False,
                         )
    in_args = parser.add_argument_group(
        title='Input sources'
    )
    in_args.add_argument("-s", "--summary",
                         help="A sequencing summary file generated by albacore",
                         type=full_path,
                         default="",
                         required=True,
                         metavar=''
                         )
    in_args.add_argument("-p", "--paf",
                         help="A paf file generated by minimap2",
                         type=full_path,
                         default='',
                         required=True,
                         metavar=''
                         )
    out_args = parser.add_argument_group(
        title='Output files'
    )
    out_args.add_argument('-F', '--out-fused',
                          help='''Specify name of the fused_read file. This file only contains chains of reads. 
                               Defaults to \'fused_reads.txt\'''',
                          type=full_path,
                          default='fused_reads.txt',
                          metavar=''
                          )
    return parser.parse_args()
Example #47
0
def main(argv):
    parser = ArgumentParser(
        description=
        """This program will convert a BIDS MRI dataset to a NIDM-Experiment RDF document.  It will parse phenotype information and simply store variables/values and link to the associated json data dictionary file.\n\n
Example 1: No variable->term mapping, simple BIDS dataset conversion which will add nidm.ttl file to BIDS dataset and .bidsignore file:
\t BIDSMRI2NIDM.py -d [root directory of BIDS dataset] -bidsignore
Example 2: No variable->term mapping, simple BIDS dataset conversion but storing nidm file somewhere else: \n
\t BIDSMRI2NIDM.py -d [root directory of BIDS dataset] -o [PATH/nidm.ttl] \n\n
Example 3: BIDS conversion with variable->term mappings, no existing mappings available, uses Interlex for terms and github, adds nidm.ttl file BIDS dataset and .bidsignore file: \n
\t BIDSMRI2NIDM.py -d [root directory of BIDS dataset] -ilxkey [Your Interlex key] -github [username token] -bidsignore  \n\n
Example 4: BIDS conversion with variable->term mappings, no existing mappings available, uses Interlex + NIDM OWL file for terms and github, adds nidm.ttl file BIDS dataset and .bidsignore file: \n
\t BIDSMRI2NIDM.py -d [root directory of BIDS dataset] -ilxkey [Your Interlex key] -github [username token] -owl -bidsignore  \n\n
Example 5 (FULL MONTY): BIDS conversion with variable->term mappings, uses JSON mapping file first then uses Interlex + NIDM OWL file for terms and github, adds nidm.ttl file BIDS dataset and .bidsignore file: \n
\t BIDSMRI2NIDM.py -d [root directory of BIDS dataset] -json_map [Your JSON file] -ilxkey [Your Interlex key] -github [username token] -owl -bidsignore\n
\t json mapping file has entries for each variable with mappings to formal terms.  Example:  \n
    \t { \n
    \t\t \"site\": { \n
	\t\t \"definition\": \"Number assigned to site\", \n
	\t\t \"label\": \"site_id (UC Provider Care)\", \n
	\t\t \"url\": \"http://uri.interlex.org/NDA/uris/datadictionary/elements/2031448\" \n
	\t\t }, \n
	\t\t \"gender\": { \n
	\t\t \"definition\": \"ndar:gender\", \n
	\t\t \"label\": \"ndar:gender\", \n
	\t\t \"url\": \"https://ndar.nih.gov/api/datadictionary/v2/dataelement/gender\" \n
	\t\t } \n
    \t }""",
        formatter_class=RawTextHelpFormatter)

    parser.add_argument('-d',
                        dest='directory',
                        required=True,
                        help="Path to BIDS dataset directory")
    parser.add_argument('-jsonld',
                        '--jsonld',
                        action='store_true',
                        help='If flag set, output is json-ld not TURTLE')
    parser.add_argument(
        '-png',
        '--png',
        action='store_true',
        help='If flag set, tool will output PNG file of NIDM graph')
    parser.add_argument(
        '-bidsignore',
        '--bidsignore',
        action='store_true',
        default=False,
        help='If flag set, tool will add NIDM-related files to .bidsignore file'
    )
    #adding argument group for var->term mappings
    mapvars_group = parser.add_argument_group(
        'map variables to terms arguments')
    mapvars_group.add_argument(
        '-json_map',
        '--json_map',
        dest='json_map',
        required=False,
        default=False,
        help=
        "Optional user-suppled JSON file containing variable-term mappings.")
    mapvars_group.add_argument(
        '-ilxkey',
        '--ilxkey',
        dest='key',
        required=False,
        default=None,
        help="Interlex/SciCrunch API key to use for query")
    mapvars_group.add_argument(
        '-github',
        '--github',
        type=str,
        nargs='*',
        default=None,
        dest='github',
        required=False,
        help=
        """Use -github flag with list username token(or pw) for storing locally-defined terms in a
    nidm-local-terms repository in GitHub.  If user doesn''t supply a token then user will be prompted for username/password.\n
    Example: -github username token""")
    mapvars_group.add_argument(
        '-owl',
        action='store_true',
        required=False,
        default=None,
        help='Optional flag to query nidm-experiment OWL files')
    #parser.add_argument('-mapvars', '--mapvars', action='store_true', help='If flag set, variables in participant.tsv and phenotype files will be interactively mapped to terms')
    parser.add_argument(
        '-o',
        dest='outputfile',
        required=False,
        default="nidm.ttl",
        help="Outputs turtle file called nidm.ttl in BIDS directory by default"
    )

    args = parser.parse_args()

    directory = args.directory

    #importlib.reload(sys)
    #sys.setdefaultencoding('utf8')

    project = bidsmri2project(directory, args)

    logging.info(project.serializeTurtle())

    logging.info("Serializing NIDM graph and creating graph visualization..")
    #serialize graph

    #if args.outputfile was defined by user then use it else use default which is args.directory/nidm.ttl
    if args.outputfile == "nidm.ttl":
        #if we're choosing json-ld, make sure file extension is .json
        if args.jsonld:
            outputfile = os.path.join(
                directory,
                os.path.splitext(args.outputfile)[0] + ".json")
            #if flag set to add to .bidsignore then add
            if (args.bidsignore):
                addbidsignore(directory,
                              os.path.splitext(args.outputfile)[0] + ".json")

        else:
            outputfile = os.path.join(directory, args.outputfile)
            if (args.bidsignore):
                addbidsignore(directory, args.outputfile)
    else:
        #if we're choosing json-ld, make sure file extension is .json
        if args.jsonld:
            outputfile = os.path.splitext(args.outputfile)[0] + ".json"
            if (args.bidsignore):
                addbidsignore(directory,
                              os.path.splitext(args.outputfile)[0] + ".json")
        else:
            outputfile = args.outputfile
            if (args.bidsignore):
                addbidsignore(directory, args.outputfile)

    #serialize NIDM file
    with open(outputfile, 'w') as f:
        if args.jsonld:
            f.write(project.serializeJSONLD())
        else:
            f.write(project.serializeTurtle())

    #save a DOT graph as PNG
    if (args.png):
        project.save_DotGraph(str(outputfile + ".png"), format="png")
        #if flag set to add to .bidsignore then add
        if (args.bidsignore):
            addbidsignore(directory,
                          os.path.basename(str(outputfile + ".png")))
Example #48
0
def build_argparser():
    parser = ArgumentParser(add_help=False)
    args = parser.add_argument_group('Options')
    args.add_argument('-h',
                      '--help',
                      action='help',
                      default=SUPPRESS,
                      help='Show this help message and exit.')
    args.add_argument(
        "-m",
        "--model",
        help="Required. Path to an .xml file with a trained model",
        required=True,
        type=Path)
    args.add_argument("-v",
                      "--vocab",
                      help="Required. Path to the vocabulary file with tokens",
                      required=True,
                      type=str)
    args.add_argument("--merges",
                      help="Required. Path to the merges file",
                      required=True,
                      type=str)
    args.add_argument("-i",
                      "--input",
                      help="Optional. Input prompt",
                      required=False,
                      type=str,
                      action='append')
    args.add_argument(
        "--max_sample_token_num",
        help="Optional. Maximum number of tokens in generated sample",
        default=40,
        required=False,
        type=int)
    args.add_argument(
        "--top_k",
        help="Optional. Number of tokens with the highest probability "
        "which will be kept for generation",
        default=0,
        required=False,
        type=int)
    args.add_argument(
        "--top_p",
        help="Optional. Maximum probability, tokens with such a probability "
        "and lower will be kept for generation",
        default=0.9,
        required=False,
        type=float)
    args.add_argument("-d",
                      "--device",
                      help="Optional. Target device to perform inference on. "
                      "Default value is CPU",
                      default="CPU",
                      type=str)
    args.add_argument(
        '--dynamic_shape',
        action='store_true',
        help=
        'Run model with dynamic input sequence. If not provided, input sequence will be padded to max_seq_len'
    )
    args.add_argument(
        '--max_seq_len',
        type=int,
        required=False,
        default=1024,
        help=
        'Optional. Maximum sequence length for processing. Default value is 1024'
    )
    return parser
def build_argparser():
    """ Returns argument parser. """

    parser = ArgumentParser(add_help=False)
    args = parser.add_argument_group('Options')
    args.add_argument('-h',
                      '--help',
                      action='help',
                      default=SUPPRESS,
                      help='Show this help message and exit.')
    args.add_argument(
        '-m',
        '--model',
        help='Required. Path to an .xml file with a trained model.',
        required=True,
        type=Path)
    args.add_argument(
        '-i',
        '--input',
        required=True,
        help='Required. An input to process. The input must be a single image, '
        'a folder of images, video file or camera id.')
    args.add_argument(
        '-gf',
        '--gallery_folder',
        help='Required. Path to a folder with images in the gallery.',
        required=True,
        type=Path)
    args.add_argument(
        '--gallery_size',
        required=False,
        type=int,
        help='Optional. Number of images from the gallery used for processing')
    args.add_argument('--loop',
                      default=False,
                      action='store_true',
                      help='Optional. Enable reading the input in a loop.')
    args.add_argument('-o',
                      '--output',
                      required=False,
                      help='Optional. Name of the output file(s) to save.')
    args.add_argument('-limit',
                      '--output_limit',
                      required=False,
                      default=1000,
                      type=int,
                      help='Optional. Number of frames to store in output. '
                      'If 0 is set, all frames are stored.')
    args.add_argument(
        '-d',
        '--device',
        help='Optional. Specify the target device to infer on: CPU, GPU, HDDL '
        'or MYRIAD. The demo will look for a suitable plugin for device '
        'specified (by default, it is CPU).',
        default='CPU',
        type=str)
    args.add_argument(
        "-l",
        "--cpu_extension",
        help="Optional. Required for CPU custom layers. Absolute path to "
        "a shared library with the kernels implementations.",
        type=str,
        default=None)
    args.add_argument('--no_show',
                      action='store_true',
                      help='Optional. Do not visualize inference results.')
    args.add_argument('-u',
                      '--utilization_monitors',
                      default='',
                      type=str,
                      help='Optional. List of monitors to show initially.')
    return parser
def main():
    """ CLI user interface """
    parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter,
                            epilog="""\
thresholds and ranges:
  Threshold ranges are in Nagios format:
  https://nagios-plugins.org/doc/guidelines.html#THRESHOLDFORMAT
  For disk threshold you can specify a unit (e.g. "1000Mi:", "8Gi")\
""")
    required = parser.add_argument_group('required arguments')
    required.add_argument('--warn-cpu',
                          help='cpu warning threshold',
                          required=True)
    required.add_argument('--crit-cpu',
                          help='cpu critical threshold',
                          required=True)
    required.add_argument('--warn-conns',
                          help='free connections warning threshold',
                          required=True)
    required.add_argument('--crit-conns',
                          help='free connections critical threshold',
                          required=True)
    required.add_argument('--warn-disk',
                          help='disk free warning threshold',
                          required=True)
    required.add_argument('--crit-disk',
                          help='disk free critical threshold',
                          required=True)
    required.add_argument('--warn-swap',
                          help='swap used warning threshold',
                          required=True)
    required.add_argument('--crit-swap',
                          help='swap used critical threshold',
                          required=True)

    parser.add_argument('--instance',
                        help='db instance identifier',
                        required=True)
    parser.add_argument('--last_state',
                        help='use last known value',
                        action='store_true')
    parser.add_argument(
        '--percent',
        help='compare usage percent instead of absolute numbers'
        ' for connections and memory',
        action='store_true')
    parser.add_argument('--region',
                        help='AWS region name (default: eu-central-1)',
                        default='eu-central-1')

    args = parser.parse_args()
    states = []

    # gather metrics
    value = unused_connections(args)
    states.append({
        'name':
        'free_connections',
        'state':
        STATE_UNKNOWN
        if value is None else compare(value, args.warn_conns, args.crit_conns),
        'value':
        value,
        'unit':
        '%' if args.percent else '',
    })
    value = free_storage(args)
    states.append({
        'name':
        'free_storage',
        'state':
        STATE_UNKNOWN if value is None else compare(
            value, expand_unit(args.warn_disk), expand_unit(args.crit_disk)),
        'value':
        value if args.percent else value / 1024 / 1024,
        'unit':
        '%' if args.percent else ' MiB',
    })
    value = cpu_used(args)
    states.append({
        'name':
        'cpu_used',
        'state':
        STATE_UNKNOWN
        if value is None else compare(value, args.warn_cpu, args.crit_cpu),
        'value':
        value,
        'unit':
        '%',
    })
    value = swap_used(args)
    states.append({
        'name':
        'swap_used',
        'state':
        STATE_UNKNOWN if value is None else compare(
            value, expand_unit(args.warn_swap), expand_unit(args.crit_swap)),
        'value':
        value / 1024 / 1024,
        'unit':
        'MiB',
    })

    # determine overall state
    final_state = STATE_OK
    final_text = 'OK:'
    if [item for item in states if item['state'] == STATE_CRIT]:
        final_state = STATE_CRIT
        final_text = 'CRITICAL:'
    elif [item for item in states if item['state'] == STATE_WARN]:
        final_state = STATE_WARN
        final_text = 'WARNING:'
    elif [item for item in states if item['state'] == STATE_UNKNOWN]:
        final_state = STATE_UNKNOWN
        final_text = 'UNKNOWN:'

    print(
        final_text, ', '.join([
            "{}:{:.2f}{}".format(item['name'], item['value'], item['unit'])
            for item in states
        ]))
    sys.exit(final_state)
Example #51
0
    def add_task_arguments(cls, parser: argparse.ArgumentParser):
        group = parser.add_argument_group(description="Task related")

        # NOTE(kamo): add_arguments(..., required=True) can't be used
        # to provide --print_config mode. Instead of it, do as
        required = parser.get_default("required")
        required += ["token_list"]

        group.add_argument(
            "--token_list",
            type=str_or_none,
            default=None,
            help="A text mapping int-id to token",
        )
        group.add_argument(
            "--init",
            type=lambda x: str_or_none(x.lower()),
            default=None,
            help="The initialization method",
            choices=[
                "chainer",
                "xavier_uniform",
                "xavier_normal",
                "kaiming_uniform",
                "kaiming_normal",
                None,
            ],
        )

        group.add_argument(
            "--input_size",
            type=int_or_none,
            default=None,
            help="The number of input dimension of the feature",
        )

        group.add_argument(
            "--ctc_conf",
            action=NestedDictAction,
            default=get_default_kwargs(CTC),
            help="The keyword arguments for CTC class.",
        )
        group.add_argument(
            "--joint_net_conf",
            action=NestedDictAction,
            default=None,
            help="The keyword arguments for joint network class.",
        )

        group = parser.add_argument_group(description="Preprocess related")
        group.add_argument(
            "--use_preprocessor",
            type=str2bool,
            default=True,
            help="Apply preprocessing to data or not",
        )
        group.add_argument(
            "--token_type",
            type=str,
            default="bpe",
            choices=["bpe", "char", "word", "phn"],
            help="The text will be tokenized "
            "in the specified level token",
        )
        group.add_argument(
            "--bpemodel",
            type=str_or_none,
            default=None,
            help="The model file of sentencepiece",
        )
        parser.add_argument(
            "--non_linguistic_symbols",
            type=str_or_none,
            help="non_linguistic_symbols file path",
        )
        parser.add_argument(
            "--cleaner",
            type=str_or_none,
            choices=[None, "tacotron", "jaconv", "vietnamese"],
            default=None,
            help="Apply text cleaning",
        )
        parser.add_argument(
            "--g2p",
            type=str_or_none,
            choices=g2p_choices,
            default=None,
            help="Specify g2p method if --token_type=phn",
        )
        parser.add_argument(
            "--speech_volume_normalize",
            type=float_or_none,
            default=None,
            help="Scale the maximum amplitude to the given value.",
        )
        parser.add_argument(
            "--rir_scp",
            type=str_or_none,
            default=None,
            help="The file path of rir scp file.",
        )
        parser.add_argument(
            "--rir_apply_prob",
            type=float,
            default=1.0,
            help="THe probability for applying RIR convolution.",
        )
        parser.add_argument(
            "--noise_scp",
            type=str_or_none,
            default=None,
            help="The file path of noise scp file.",
        )
        parser.add_argument(
            "--noise_apply_prob",
            type=float,
            default=1.0,
            help="The probability applying Noise adding.",
        )
        parser.add_argument(
            "--noise_db_range",
            type=str,
            default="13_15",
            help="The range of noise decibel level.",
        )

        for class_choices in cls.class_choices_list:
            # Append --<name> and --<name>_conf.
            # e.g. --encoder and --encoder_conf
            class_choices.add_arguments(group)
Example #52
0
def cli_parser():
    """
    Construct a CLI argument parser and return the parsed arguments.

    Returns
    -------
    ArgumentParser
        An argument parser for parsing command-line arguments
    """
    parser = ArgumentParser()
    parser.add_argument('filename', help='Case file name', nargs='*')

    # general options
    general_group = parser.add_argument_group('General options')
    general_group.add_argument(
        '-r',
        '--routine',
        choices=routines.__cli__,
        help='Routine to run',
        nargs='*',
        default=['pflow'],
    )
    general_group.add_argument('--edit-config',
                               help='Quick edit of the config file',
                               default='',
                               nargs='?',
                               type=str)
    general_group.add_argument('--license',
                               action='store_true',
                               help='Display software license')

    # I/O
    io_group = parser.add_argument_group(
        'I/O options', 'Optional arguments for managing I/Os')
    io_group.add_argument('-p',
                          '--path',
                          help='Path to case files',
                          type=str,
                          default='',
                          dest='input_path')
    io_group.add_argument('-a',
                          '--addfile',
                          help='Additional files used by some formats.')
    io_group.add_argument('-D',
                          '--dynfile',
                          help='Additional dynamic file in dm format.')
    io_group.add_argument('-P',
                          '--pert',
                          help='Perturbation file path',
                          default='')
    io_group.add_argument('-d',
                          '--dump-raw',
                          help='Dump RAW format case file.')
    io_group.add_argument('-n',
                          '--no-output',
                          help='Force no output of any '
                          'kind',
                          action='store_true')
    io_group.add_argument('-o',
                          '--output_path',
                          help='Output path prefix',
                          type=str,
                          default='')
    io_group.add_argument('-C',
                          '--clean',
                          help='Clean output files',
                          action='store_true')

    config_exclusive = parser.add_mutually_exclusive_group()
    config_exclusive.add_argument('--load-config',
                                  help='path to the rc config to load',
                                  dest='config')
    config_exclusive.add_argument('--save-config',
                                  help='save configuration to file name',
                                  nargs='?',
                                  type=str,
                                  default='')

    # helps and documentations
    group_help = parser.add_argument_group(
        'Help and documentation',
        'Optional arguments for usage, model and config documentation')
    group_help.add_argument('-g',
                            '--group',
                            help='Show the models in the group.')
    group_help.add_argument('-q',
                            '--quick-help',
                            help='Show a quick help of model format.')
    group_help.add_argument('-c',
                            '--category',
                            help='Show model names in the given category.')
    group_help.add_argument('-l',
                            '--model-list',
                            help='Show a full list of all models.',
                            action='store_true')
    group_help.add_argument('-f',
                            '--model-format',
                            help='Show the format definition of models.',
                            type=str)
    group_help.add_argument(
        '-Q',
        '--model-var',
        help='Show the definition of variables <MODEL.VAR>.')
    group_help.add_argument(
        '--config-option',
        help='Show a quick help of a config option <CONFIG.OPTION>')
    group_help.add_argument(
        '--help-config',
        help='Show help of the <CONFIG> class. Use ALL for all configs.')
    group_help.add_argument('-s',
                            '--search',
                            help='Search for models that match the pattern.')
    group_help.add_argument('-e',
                            '--data_example',
                            help='print example parameter of a given model')

    # simulation control
    sim_options = parser.add_argument_group(
        'Simulation control options', 'Overwrites the simulation configs')
    sim_options.add_argument(
        '--dime', help='Specify DiME streaming server address and port')
    sim_options.add_argument('--tf',
                             help='End time of time-domain simulation',
                             type=float)

    # developer options
    dev_group = parser.add_argument_group('Developer options',
                                          'Options for developer debugging')
    dev_group.add_argument(
        '-v',
        '--verbose',
        help='Program logging level.'
        'Available levels are 10-DEBUG, 20-INFO, 30-WARNING, '
        '40-ERROR or 50-CRITICAL. The default level is 20-INFO',
        type=int,
        default=20,
        choices=(10, 20, 30, 40, 50))
    dev_group.add_argument('--profile',
                           action='store_true',
                           help='Enable Python cProfiler')
    dev_group.add_argument('--ncpu',
                           help='Number of parallel processes',
                           type=int,
                           default=0)
    dev_group.add_argument('--show-data',
                           type=str,
                           help='Show model data converted to system base',
                           nargs='*')
    dev_group.add_argument('-x',
                           '--exit',
                           help='Exit before running routine',
                           action='store_true',
                           dest='exit_now')

    return parser
Example #53
0
def qff_parse_args(args=None):
    description = '''\
    This script will apply QuickFF to derive a covalent force field for the given
    system from the ab initio input given in the input files.'''

    parser = ArgumentParser(description=description)
    parser.add_argument('--version',
                        action='version',
                        version='QuickFF %s' % version)
    parser.add_argument(
        '-s',
        '--silent',
        default=False,
        action='store_true',
        help='Swith of all logging completely, overwrites all other verbosity '
        'options.')
    parser.add_argument(
        '-v',
        '--verbose',
        default=False,
        action='store_true',
        help='Increases verbosity, is overwriten if SILENT or VERY_VERBOSE is '
        'switched on.')
    parser.add_argument(
        '-V',
        '--very-verbose',
        default=False,
        action='store_true',
        help='Increases verbosity to highest level, is overwriten if SILENT is '
        'switched on.')
    parser.add_argument(
        '-l',
        '--logfile',
        default=None,
        help='Redirect logger output to a file with name LOGFILE.')
    parser.add_argument(
        '--scoop',
        default=False,
        action='store_true',
        help='Flag to enable parallelisation using SCOOP. With SCOOP, the '
        'command to run QuickFF is slightly different, the absolute path '
        'to quickff.py should be used. For example, to run on 4 cores: '
        'python -m scoop -n4 /path/to/%(prog)s --scoop [options] fns')
    #General settings options
    settings = parser.add_argument_group(
        title='General QuickFF specifications')
    settings.add_argument(
        '-c',
        '--config-file',
        default=None,
        help='Specify a configuration file to read all QuickFF settings from.')
    settings.add_argument(
        '-m',
        '--program-mode',
        default=None,
        choices=[
            prog for prog in allowed_programs if not prog == 'BaseProgram'
        ],
        help='Specify the program mode which defines the set of instructions '
        'that will be executed.')
    settings.add_argument(
        '--fn-traj',
        default=None,
        help='Read/write the perturbation trajectories from/to FN_TRAJ. If the '
        'given file exists, the trajectories are read from the file. '
        'Otherwise, the trajectories are written to the given file.')
    settings.add_argument(
        '--only-traj', default=None,
        help='Construct the perturbation trajectory only for the terms with '+\
             'the given basenames. This options is only applied in the ' +\
             'MakeTrajectories program.'
    )
    settings.add_argument(
        '-p',
        '--plot-traj',
        default=None,
        help='If set to final, plots the various energy contributions along '
        'the perturbation trajectories to using the final force field. '
        'If set to all, plots the contributions along the trajectories '
        'using all intermediate force fields (given suffixes _Apt1, '
        '_Bhc1, _Cpt2 and _Dhc2) as well as the final force field '
        '(given the suffix _Ehc3).')
    settings.add_argument(
        '-x',
        '--xyz-traj',
        default=False,
        action='store_true',
        help='Write the perturbation trajectories in XYZ format. ')
    settings.add_argument(
        '--suffix',
        default=None,
        help='Suffix that will be added to all output files.')
    #Force field options
    ff = parser.add_argument_group(
        title=
        'Options related to the definition and derivation of the force field')
    ff.add_argument(
        '--ei',
        default=None,
        help='A Yaff parameters file defining the electrostatic contribution '
        'of the force field.')
    ff.add_argument(
        '--ei-rcut',
        default=None,
        help='The real space cut off for the electrostatic interactions. If '
        'the system is periodic, the ewald parameters will be adjusted '
        'to this cut off.')
    ff.add_argument(
        '--vdw',
        default=None,
        help='A Yaff parameters file defining the van der Waals contribution '
        'of the force field.')
    ff.add_argument(
        '--vdw-rcut',
        default=None,
        help='The real space cut off for the van der Waals interactions.')
    ff.add_argument(
        '--covres',
        default=None,
        help='A Yaff parameters file defining a residual contribution to the '
        'covalent part of the force field.')
    #System options
    system = parser.add_argument_group(
        title='Options related to the definition of the system')
    system.add_argument(
        '--ffatypes',
        default=None,
        choices=['None', 'list_of_atypes', 'low', 'medium', 'high', 'highest'],
        help='Assign atom types in the system by parsing an ordered list of'
        'atom types as argument or through the automatic built-in '
        'detection (see documentation). By default (or if None is given), '
        'the atom types are assumed to be defined in the input files. '
        '[default=%(default)s]')
    #Input files fn1, fn2, ... represent all input files that specify the system and the ab initio reference data.
    parser.add_argument(
        'fn',
        nargs='+',
        help='Input file name that specify the system and ab initio reference '
        'data. Multiple file names are allowed, but at least one should '
        'be given. Files later in the list overwrite information from '
        'earlier files. Allowed file formats are MolMod checkpoint files '
        '(file.chk), Gaussian formatted checkpoint files (file.fchk) '
        'and VASP xml files (file.xml).  ')
    if args is None:
        args = parser.parse_args()
    else:
        args = parser.parse_args(args.split())
    if not args.ffatypes is None and args.ffatypes.lower() == 'none':
        args.ffatypes = None
    return args
Example #54
0
def main(args=None):
    parser = ArgumentParser()
    parser.add_argument('--version', action='version', version=__version__)
    inputGroup = parser.add_argument_group(
        title='Input arguments',
        description='The following arguments are mutually exclusive.')
    xInputGroup = inputGroup.add_mutually_exclusive_group(required=True)
    xInputGroup.add_argument('-g',
                             '--glyphs-path',
                             metavar='GLYPHS',
                             help='Path to .glyphs source file')
    xInputGroup.add_argument('-u',
                             '--ufo-paths',
                             nargs='+',
                             metavar='UFO',
                             help='One or more paths to UFO files')
    xInputGroup.add_argument('-m',
                             '--mm-designspace',
                             metavar='DESIGNSPACE',
                             help='Path to .designspace file')

    outputGroup = parser.add_argument_group(title='Output arguments')
    outputGroup.add_argument(
        '-o',
        '--output',
        nargs='+',
        default=('otf', 'ttf'),
        metavar="FORMAT",
        help='Output font formats. Choose between: %(choices)s. '
        'Default: otf, ttf',
        choices=('ufo', 'otf', 'ttf', 'ttf-interpolatable', 'variable'))
    outputSubGroup = outputGroup.add_mutually_exclusive_group()
    outputSubGroup.add_argument(
        '--output-path',
        default=None,
        help="Output font file path. Only valid when the output is a single "
        "file (e.g. input is a single UFO or output is variable font)")
    outputSubGroup.add_argument(
        '--output-dir',
        default=None,
        help="Output folder. By default, output folders are created in the "
        "current working directory, grouping output fonts by format.")
    outputGroup.add_argument(
        '-i',
        '--interpolate',
        nargs="?",
        default=False,
        const=True,
        metavar="INSTANCE_NAME",
        help='Interpolate masters and generate all the instances defined. '
        'To only interpolate a specific instance (or instances) that '
        'match a given "name" attribute, you can pass as argument '
        'the full instance name or a regular expression. '
        'E.g.: -i "Noto Sans Bold"; or -i ".* UI Condensed". '
        '(for Glyphs or MutatorMath sources only). ')
    outputGroup.add_argument('-M',
                             '--masters-as-instances',
                             action='store_true',
                             help='Output masters as instances')
    outputGroup.add_argument(
        '--family-name',
        help='Family name to use for masters, and to filter output instances')
    outputGroup.add_argument(
        '--round-instances',
        dest='round_instances',
        action='store_true',
        help='Apply integer rounding to all geometry when interpolating')
    outputGroup.add_argument(
        '--designspace-path',
        default=None,
        help='Path to output designspace file (for Glyphs sources only).')
    outputGroup.add_argument(
        '--master-dir',
        default=None,
        help='Directory where to write master UFO. Default: "./master_ufo". '
        'If value is "{tmp}", a temporary directory is created and '
        'removed at the end (for Glyphs sources only).')
    outputGroup.add_argument(
        '--instance-dir',
        default=None,
        help='Directory where to write instance UFOs. Default: '
        '"./instance_ufo". If value is "{tmp}", a temporary directory '
        'is created and removed at the end (for Glyphs sources only).')
    outputGroup.add_argument(
        '--validate-ufo',
        action='store_true',
        help='Enable ufoLib validation on reading/writing UFO files. It is '
        'disabled by default')

    contourGroup = parser.add_argument_group(title='Handling of contours')
    contourGroup.add_argument('--keep-overlaps',
                              dest='remove_overlaps',
                              action='store_false',
                              help='Do not remove any overlap.')
    contourGroup.add_argument(
        '--overlaps-backend',
        dest='overlaps_backend',
        metavar="BACKEND",
        choices=("booleanOperations", "pathops"),
        default="booleanOperations",
        help='Select library to remove overlaps. Choose between: %(choices)s '
        '(default: %(default)s)')
    contourGroup.add_argument(
        '--keep-direction',
        dest='reverse_direction',
        action='store_false',
        help='Do not reverse contour direction when output is ttf or '
        'ttf-interpolatable')
    contourGroup.add_argument(
        '-e',
        '--conversion-error',
        type=float,
        default=None,
        metavar='ERROR',
        help='Maximum approximation error for cubic to quadratic conversion '
        'measured in EM')
    contourGroup.add_argument(
        '-a',
        '--autohint',
        nargs='?',
        const='',
        help='Run ttfautohint. Can provide arguments, quoted')
    contourGroup.add_argument(
        '--cff-round-tolerance',
        type=float,
        default=None,
        metavar='FLOAT',
        help='Restrict rounding of point coordinates in CFF table to only '
        'those floats whose absolute difference from their integral part '
        'is less than or equal to the tolerance. By default, all floats '
        'are rounded to integer (tolerance 0.5); 0 disables rounding.')
    contourGroup.add_argument(
        '--optimize-cff',
        type=lambda s: CFFOptimization(int(s)),
        default=CFFOptimization.SUBROUTINIZE,
        help='0 disables all optimizations; 1 specializes the CFF charstring '
        'operators; 2 (default) also enables subroutinization')

    layoutGroup = parser.add_argument_group(
        title='Handling of OpenType Layout')
    layoutGroup.add_argument(
        '--interpolate-binary-layout',
        nargs="?",
        default=False,
        const=True,
        metavar="MASTER_DIR",
        help='Interpolate layout tables from compiled master binaries. '
        'Requires Glyphs or MutatorMath source.')
    layoutGroup.add_argument(
        "--feature-writer",
        metavar="CLASS",
        action="append",
        dest="feature_writer_specs",
        help="string specifying a feature writer class to load, either "
        "built-in or from an external module, optionally initialized with "
        "the given keyword arguments. The class and module names are "
        "separated by '::'. The option can be repeated multiple times "
        "for each writer class. A special value of 'None' will disable "
        "all automatic feature generation. The option overrides both the "
        "default ufo2ft writers and those specified in the UFO lib.")

    feaCompilerGroup = layoutGroup.add_mutually_exclusive_group(required=False)
    feaCompilerGroup.add_argument(
        '--use-afdko',
        action='store_true',
        help='Use makeOTF instead of feaLib to compile FEA.')
    feaCompilerGroup.add_argument(
        '--mti-source',
        help='Path to mtiLib .txt feature definitions (use instead of FEA)')

    glyphnamesGroup = parser.add_mutually_exclusive_group(required=False)
    glyphnamesGroup.add_argument(
        '--production-names',
        dest='use_production_names',
        action='store_true',
        help='Rename glyphs with production names if available otherwise use '
        'uninames.')
    glyphnamesGroup.add_argument('--no-production-names',
                                 dest='use_production_names',
                                 action='store_false')

    subsetGroup = parser.add_mutually_exclusive_group(required=False)
    subsetGroup.add_argument(
        '--subset',
        dest='subset',
        action='store_true',
        help='Subset font using export flags set by glyphsLib')
    subsetGroup.add_argument('--no-subset',
                             dest='subset',
                             action='store_false')

    subroutinizeGroup = parser.add_mutually_exclusive_group(required=False)
    subroutinizeGroup.add_argument(
        '-s',
        '--subroutinize',
        action='store_true',
        help='Optimize CFF table using compreffor (default) [DEPRECATED: use '
        '--optimize-cff option instead]')
    subroutinizeGroup.add_argument('-S',
                                   '--no-subroutinize',
                                   dest='subroutinize',
                                   action='store_false')

    parser.set_defaults(use_production_names=None,
                        subset=None,
                        subroutinize=None)

    logGroup = parser.add_argument_group(title='Logging arguments')
    logGroup.add_argument('--timing',
                          action='store_true',
                          help="Print the elapsed time for each steps")
    logGroup.add_argument(
        '--verbose',
        default='INFO',
        metavar='LEVEL',
        choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'),
        help='Configure the logger verbosity level. Choose between: '
        '%(choices)s. Default: INFO')
    args = vars(parser.parse_args(args))

    specs = args.pop("feature_writer_specs")
    if specs is not None:
        args["feature_writers"] = _loadFeatureWriters(parser, specs)

    glyphs_path = args.pop('glyphs_path')
    ufo_paths = args.pop('ufo_paths')
    designspace_path = args.pop('mm_designspace')
    input_format = ("Glyphs" if glyphs_path else
                    "designspace" if designspace_path else "UFO") + " source"

    if 'variable' in args['output']:
        if not (glyphs_path or designspace_path):
            parser.error(
                'Glyphs or designspace source required for variable font')
        exclude_args(parser, args, [
            'interpolate', 'masters_as_instances', 'interpolate_binary_layout'
        ], "variable output")

    try:
        project = FontProject(timing=args.pop('timing'),
                              verbose=args.pop('verbose'),
                              validate_ufo=args.pop('validate_ufo'))

        if glyphs_path:
            with _make_tempdirs(parser, args):
                project.run_from_glyphs(glyphs_path, **args)
            return

        exclude_args(parser, args, [
            'family_name', 'mti_source', 'designspace_path', 'master_dir',
            'instance_dir'
        ], input_format)
        if designspace_path:
            project.run_from_designspace(designspace_path, **args)
            return

        exclude_args(
            parser, args,
            ['interpolate', 'interpolate_binary_layout', 'round_instances'],
            input_format)
        project.run_from_ufos(ufo_paths,
                              is_instance=args.pop('masters_as_instances'),
                              **args)
    except FontmakeError as e:
        import sys
        sys.exit("fontmake: error: %s" % e)
Example #55
0
def rotate_poses(poses_3d, R, t):
    R_inv = np.linalg.inv(R)
    for pose_id in range(poses_3d.shape[0]):
        pose_3d = poses_3d[pose_id].reshape((-1, 4)).transpose()
        pose_3d[0:3] = np.dot(R_inv, pose_3d[0:3] - t)
        poses_3d[pose_id] = pose_3d.transpose().reshape(-1)

    return poses_3d


if __name__ == '__main__':
    parser = ArgumentParser(
        description='Lightweight 3D human pose estimation demo. '
        'Press esc to exit, "p" to (un)pause video or process next image.',
        add_help=False)
    args = parser.add_argument_group('Options')
    args.add_argument('-h',
                      '--help',
                      action='help',
                      default=SUPPRESS,
                      help='Show this help message and exit.')
    args.add_argument(
        '-m',
        '--model',
        help='Required. Path to an .xml file with a trained model.',
        type=str,
        required=True)
    args.add_argument(
        '-i',
        '--input',
        help='Required. Path to input image, images, video file or camera id.',
Example #56
0
def main():
    global log, userAgent

    parser = ArgumentParser(usage="python %(prog)s [options]", )
    parser.add_argument("-V", "--version", action="version", version=VERSION)
    parser.add_argument("-d",
                        "--debug",
                        action="store_const",
                        const=logging.DEBUG,
                        help="debugging mode")
    target = parser.add_argument_group("target arguments")
    target.add_argument("-t",
                        "--target",
                        dest="url",
                        metavar="",
                        help="url of the target",
                        required=True)
    target.add_argument("-u",
                        "--username",
                        dest="usr",
                        metavar="",
                        default="admin",
                        help="username of the target (default: %(default)s)")
    target.add_argument(
        "-p",
        "--password",
        dest="pwd",
        metavar="",
        help="password of the target (change -p to --p to use a wordlist)")
    target.add_argument("--p",
                        dest="pwd_list",
                        type=FileType('r'),
                        help=SUPPRESS)
    request = parser.add_argument_group()
    request.add_argument("--timeout",
                         metavar="",
                         type=int,
                         default=5,
                         help="timed out for requests")
    request.add_argument(
        "--thread",
        metavar="",
        type=int,
        default=5,
        help="numbers of threading multiproccesor (default: %(default)s)")
    request.add_argument("--proxy",
                         metavar="",
                         help="using a HTTP proxy (ex: http://site.com:8000)")
    args = parser.parse_args()

    banner(True)

    if args.debug:
        log.setLevel(args.debug)

    proxy = ""
    password = []
    if args.pwd:
        password.append(args.pwd)
    elif args.pwd_list:
        password = sliceList(args.pwd_list)
    else:
        parser.error("the following arguments are required: -p/--p")

    log.debug("HTTP timeout is set to \"" + str(args.timeout) + "\" seconds")
    timeout = args.timeout

    log.debug("setting a HTTP User-Agent")
    userAgent = random.choice(userAgent)

    if args.proxy is not None:
        proxy = args.proxy
        log.info("\"" + args.proxy + "\" host is used as a proxy")

    try:
        log.info("testing connection to the target")
        request = urllib.request.Request(args.url,
                                         headers={"User-Agent": userAgent})
        respond = urllib.request.urlopen(request, timeout=timeout)
    except Exception as err:
        raise Exception(err)

    log.info("use the word \"" + args.usr + "\" to be used as a username")

    logged = False
    start_time = time.time()
    if len(password) > 1:
        log.debug("total data in wordlist: " + str(len(password)) + " words")

    log.info("starting a login brute force")
    with concurrent.futures.ThreadPoolExecutor(
            max_workers=args.thread) as executor:
        processed = (executor.submit(start, respond.url, args.usr, pwd,
                                     timeout, userAgent, proxy)
                     for pwd in password)
        for i, process in enumerate(
                concurrent.futures.as_completed(processed)):
            if len(password) > 1:
                print("[{}][INFO] testing {} password".format(
                    datetime.now().strftime("%H:%M:%S"), i),
                      end="\r")
            process = process.result()
            if process:
                logged = True
                password = process
                break

    if logged is True:
        log.success(
            "successfully entered into the target dashboard with username \"" +
            args.usr + "\" and password \"" + password + "\"")
    else:
        log.failed("cannot enter into the target dashboard")
    log.info("time taken \"" + str(int(time.time() - start_time)) +
             " seconds\"")
Example #57
0
def main():
    """Entry point"""
    from nipype import config as ncfg
    from nipype.pipeline.engine import Workflow
    from mriqc.utils.bids import collect_bids_data
    from mriqc.workflows.core import build_workflow
    # from mriqc.reports.utils import check_reports

    parser = ArgumentParser(description='MRI Quality Control',
                            formatter_class=RawTextHelpFormatter)

    parser.add_argument('-v',
                        '--version',
                        action='version',
                        version='mriqc v{}'.format(__version__))

    parser.add_argument('bids_dir',
                        action='store',
                        help='The directory with the input dataset '
                        'formatted according to the BIDS standard.')
    parser.add_argument(
        'output_dir',
        action='store',
        help='The directory where the output files '
        'should be stored. If you are running group level analysis '
        'this folder should be prepopulated with the results of the'
        'participant level analysis.')
    parser.add_argument(
        'analysis_level',
        action='store',
        nargs='+',
        help='Level of the analysis that will be performed. '
        'Multiple participant level analyses can be run independently '
        '(in parallel) using the same output_dir.',
        choices=['participant', 'group'])
    parser.add_argument(
        '--participant_label',
        '--subject_list',
        '-S',
        action='store',
        help='The label(s) of the participant(s) that should be analyzed. '
        'The label corresponds to sub-<participant_label> from the '
        'BIDS spec (so it does not include "sub-"). If this parameter '
        'is not provided all subjects should be analyzed. Multiple '
        'participants can be specified with a space separated list.',
        nargs="*")

    g_input = parser.add_argument_group('mriqc specific inputs')
    g_input.add_argument('-d',
                         '--data-type',
                         action='store',
                         nargs='*',
                         choices=['anat', 'anatomical', 'func', 'functional'],
                         default=['anat', 'func'])
    g_input.add_argument('-s', '--session-id', action='store')
    g_input.add_argument('-r', '--run-id', action='store')
    g_input.add_argument('--nthreads',
                         action='store',
                         type=int,
                         help='number of threads')
    g_input.add_argument('--n_procs',
                         action='store',
                         default=0,
                         type=int,
                         help='number of threads')
    g_input.add_argument('--mem_gb',
                         action='store',
                         default=0,
                         type=int,
                         help='available total memory')
    g_input.add_argument('--write-graph',
                         action='store_true',
                         default=False,
                         help='Write workflow graph.')
    g_input.add_argument('--dry-run',
                         action='store_true',
                         default=False,
                         help='Do not run the workflow.')
    g_input.add_argument('--use-plugin',
                         action='store',
                         default=None,
                         help='nipype plugin configuration file')

    g_input.add_argument('--testing',
                         action='store_true',
                         default=False,
                         help='use testing settings for a minimal footprint')

    g_outputs = parser.add_argument_group('mriqc specific outputs')
    g_outputs.add_argument('-w',
                           '--work-dir',
                           action='store',
                           default=op.join(os.getcwd(), 'work'))
    g_outputs.add_argument('--report-dir', action='store')
    g_outputs.add_argument('--verbose-reports',
                           default=False,
                           action='store_true')

    # ANTs options
    g_ants = parser.add_argument_group(
        'specific settings for ANTs registrations')
    g_ants.add_argument(
        '--ants-nthreads',
        action='store',
        type=int,
        default=6,
        help='number of threads that will be set in ANTs processes')
    g_ants.add_argument('--ants-settings',
                        action='store',
                        help='path to JSON file with settings for ANTS')

    # AFNI head motion correction settings
    g_afni = parser.add_argument_group(
        'specific settings for AFNI head motion correction')
    g_afni.add_argument(
        '--hmc-afni',
        action='store_true',
        default=False,
        help='Use ANFI 3dvolreg for head motion correction (HMC) and '
        'frame displacement (FD) estimation')
    g_afni.add_argument(
        '--deoblique',
        action='store_true',
        default=False,
        help='Deoblique the functional scans during head motion '
        'correction preprocessing')
    g_afni.add_argument(
        '--despike',
        action='store_true',
        default=False,
        help='Despike the functional scans during head motion correction '
        'preprocessing')
    g_afni.add_argument(
        '--start-idx',
        action='store',
        type=int,
        help='Initial volume in functional timeseries that should be '
        'considered for preprocessing')
    g_afni.add_argument(
        '--stop-idx',
        action='store',
        type=int,
        help='Final volume in functional timeseries that should be '
        'considered for preprocessing')
    g_afni.add_argument('--correct-slice-timing',
                        action='store_true',
                        default=False,
                        help='Perform slice timing correction')

    opts = parser.parse_args()

    # Build settings dict
    bids_dir = op.abspath(opts.bids_dir)

    # Number of processes
    n_procs = 0
    if opts.nthreads is not None:
        MRIQC_LOG.warn('Option --nthreads has been deprecated in mriqc 0.8.8. '
                       'Please use --n_procs instead.')
        n_procs = opts.nthreads
    if opts.n_procs is not None:
        n_procs = opts.n_procs

    # Check physical memory
    total_memory = opts.mem_gb
    if total_memory < 0:
        try:
            from psutil import virtual_memory
            total_memory = virtual_memory().total // (1024**3) + 1
        except ImportError:
            MRIQC_LOG.warn(
                'Total physical memory could not be estimated, using %d'
                'GB as default', DEFAULT_MEM_GB)
            total_memory = DEFAULT_MEM_GB

    if total_memory > 0:
        av_procs = total_memory // 4
        if av_procs < 1:
            MRIQC_LOG.warn(
                'Total physical memory is less than 4GB, memory allocation'
                ' problems are likely to occur.')
            n_procs = 1
        elif n_procs > av_procs:
            n_procs = av_procs

    settings = {
        'bids_dir': bids_dir,
        'write_graph': opts.write_graph,
        'testing': opts.testing,
        'hmc_afni': opts.hmc_afni,
        'n_procs': n_procs,
        'ants_nthreads': opts.ants_nthreads,
        'output_dir': op.abspath(opts.output_dir),
        'work_dir': op.abspath(opts.work_dir),
        'verbose_reports': opts.verbose_reports or opts.testing
    }

    if opts.hmc_afni:
        settings['deoblique'] = opts.deoblique
        settings['despike'] = opts.despike
        settings['correct_slice_timing'] = opts.correct_slice_timing
        if opts.start_idx:
            settings['start_idx'] = opts.start_idx
        if opts.stop_idx:
            settings['stop_idx'] = opts.stop_idx

    if opts.ants_settings:
        settings['ants_settings'] = opts.ants_settings

    log_dir = op.join(settings['output_dir'], 'logs')

    analysis_levels = opts.analysis_level
    if opts.participant_label is None:
        analysis_levels.append('group')
    analysis_levels = list(set(analysis_levels))
    if len(analysis_levels) > 2:
        raise RuntimeError('Error parsing analysis levels, got "%s"' %
                           ', '.join(analysis_levels))

    settings['report_dir'] = opts.report_dir
    if not settings['report_dir']:
        settings['report_dir'] = op.join(settings['output_dir'], 'reports')

    check_folder(settings['output_dir'])
    if 'participant' in analysis_levels:
        check_folder(settings['work_dir'])

    check_folder(log_dir)
    check_folder(settings['report_dir'])

    # Set nipype config
    ncfg.update_config({
        'logging': {
            'log_directory': log_dir,
            'log_to_file': True
        },
        'execution': {
            'crashdump_dir': log_dir
        }
    })

    plugin_settings = {'plugin': 'Linear'}
    if opts.use_plugin is not None:
        from yaml import load as loadyml
        with open(opts.use_plugin) as pfile:
            plugin_settings = loadyml(pfile)
    else:
        # Setup multiprocessing
        if settings['n_procs'] == 0:
            settings['n_procs'] = 1
            max_parallel_ants = cpu_count() // settings['ants_nthreads']
            if max_parallel_ants > 1:
                settings['n_procs'] = max_parallel_ants

        if settings['n_procs'] > 1:
            plugin_settings['plugin'] = 'MultiProc'
            plugin_settings['plugin_args'] = {'n_procs': settings['n_procs']}

    MRIQC_LOG.info(
        'Running MRIQC-%s (analysis_levels=[%s], participant_label=%s)\n\tSettings=%s',
        __version__, ', '.join(analysis_levels), opts.participant_label,
        settings)

    # Process data types
    qc_types = []
    modalities = []
    for qcdt in sorted(list(set([qcdt[:4] for qcdt in opts.data_type]))):
        if qcdt.startswith('anat'):
            qc_types.append('anatomical')
            modalities.append('t1w')
        if qcdt.startswith('func'):
            qc_types.append('functional')
            modalities.append('func')

    dataset = collect_bids_data(settings['bids_dir'],
                                participant_label=opts.participant_label)

    # Overwrite if participant level is run
    derivatives_dir = settings['bids_dir']

    # Set up participant level
    if 'participant' in analysis_levels:
        workflow = Workflow(name='workflow_enumerator')
        workflow.base_dir = settings['work_dir']

        wf_list = []
        for qctype, mod in zip(qc_types, modalities):
            if not dataset[mod]:
                MRIQC_LOG.warn('No %s scans were found in %s', qctype,
                               settings['bids_dir'])
                continue

            wf_list.append(
                build_workflow(dataset[mod], qctype, settings=settings))

        if wf_list:
            workflow.add_nodes(wf_list)

            if not opts.dry_run:
                workflow.run(**plugin_settings)
        else:
            raise RuntimeError(
                'Error reading BIDS directory (%s), or the dataset is not '
                'BIDS-compliant.' % settings['bids_dir'])
        derivatives_dir = op.join(settings['output_dir'], 'derivatives')

    # Set up group level
    if 'group' in analysis_levels:
        from mriqc.reports import group_html
        from mriqc.utils.misc import generate_csv

        reports_dir = check_folder(op.join(settings['output_dir'], 'reports'))

        for qctype in qc_types:
            dataframe, out_csv = generate_csv(derivatives_dir,
                                              settings['output_dir'], qctype)

            # If there are no iqm.json files, nothing to do.
            if dataframe is None:
                MRIQC_LOG.warn(
                    'No IQM-JSON files were found for the %s data type in %s. The group-level '
                    'report was not generated.', qctype, derivatives_dir)
                continue

            out_html = op.join(reports_dir, qctype[:4] + '_group.html')
            MRIQC_LOG.info('Summary CSV table for the %s data generated (%s)',
                           qctype, out_csv)
            group_html(out_csv,
                       qctype,
                       csv_failed=op.join(settings['output_dir'],
                                          'failed_' + qctype + '.csv'),
                       out_file=out_html)
            MRIQC_LOG.info('Group-%s report generated (%s)', qctype, out_html)
Example #58
0
 def cli(cls, parser: argparse.ArgumentParser):
     group = parser.add_argument_group('Composite Loss')
     group.add_argument('--loss-prescale', default=cls.prescale, type=float)
     group.add_argument('--regression-loss', default='laplace',
                        choices=['smoothl1', 'l1', 'laplace'],
                        help='type of regression loss')
Example #59
0
def main():
    parser = ArgumentParser()
    main = parser.add_argument_group('main')
    main.add_argument('file',
                      type=str,
                      help='file path to save the exported routes to')
    main.add_argument(
        '--modes',
        type=str,
        nargs='+',
        dest='modes',
        help='list of modes to export routes for; defualt is all modes',
        default=('walk', 'pt', 'car', 'bike'),
        choices=('walk', 'pt', 'car', 'bike'))
    main.add_argument('--skip-empty',
                      dest='skip',
                      action='store_true',
                      default=False,
                      help='skip all legs that do not have routes')
    main.add_argument('--epsg',
                      dest='epsg',
                      type=int,
                      default=2223,
                      help='epsg system to convert routes to; default is 2223')

    common = parser.add_argument_group('common')
    common.add_argument(
        '--folder',
        type=str,
        dest='folder',
        default='.',
        help='file path to the directory containing Icarus run data'
        '; default is the working directory')
    common.add_argument(
        '--log',
        type=str,
        dest='log',
        default=None,
        help=
        'file path to save the process log; by default the log is not saved')
    common.add_argument(
        '--level',
        type=str,
        dest='level',
        default='info',
        help='verbosity level of the process log; default is "info"',
        choices=('notset', 'debug', 'info', 'warning', 'error', 'critical'))
    common.add_argument(
        '--replace',
        dest='replace',
        action='store_true',
        default=False,
        help='automatically replace existing data; do not prompt the user')
    args = parser.parse_args()

    handlers = []
    handlers.append(log.StreamHandler())
    if args.log is not None:
        handlers.append(log.FileHandler(args.log, 'w'))
    log.basicConfig(
        format='%(asctime)s %(levelname)s %(filename)s:%(lineno)s %(message)s',
        level=getattr(log, args.level.upper()),
        handlers=handlers)

    path = lambda x: os.path.abspath(os.path.join(args.folder, x))
    home = path('')

    log.info('Running route export tool.')
    log.info(f'Loading run data from {home}.')

    database = SqliteUtil(path('database.db'), readonly=True)
    config = ConfigUtil.load_config(path('config.json'))

    try:
        export_routes(database, args.modes, args.file, args.skip, args.epsg)
    except:
        log.exception('Critical error while exporting routes:')
        exit(1)

    database.close()
Example #60
0
group.add_argument('--checkin', type = type_day, metavar = 'YYYY-MM-DD', default = (startDay - timedelta(1)).strftime('%Y-%m-%d'), help = 'check in')
group.add_argument('--wednesday', dest = 'checkin', action = 'store_const', const = (startDay - timedelta(1)).strftime('%Y-%m-%d'), help = 'check in on Wednesday')
parser.add_argument('--checkout', type = type_day, metavar = 'YYYY-MM-DD', default = (startDay + timedelta(3)).strftime('%Y-%m-%d'), help = 'check out')
group = parser.add_mutually_exclusive_group()
group.add_argument('--max-distance', type = type_distance, metavar = 'BLOCKS', help = "max hotel distance that triggers an alert (or 'connected' to require skywalk hotels)")
group.add_argument('--connected', dest = 'max_distance', action = 'store_const', const = 'connected', help = 'shorthand for --max-distance connected')
parser.add_argument('--budget', type = float, metavar = 'PRICE', default = '99999', help = 'max total rate (not counting taxes/fees) that triggers an alert')
parser.add_argument('--hotel-regex', type = type_regex, metavar = 'PATTERN', default = reCompile('.*'), help = 'regular expression to match hotel name against')
parser.add_argument('--room-regex', type = type_regex, metavar = 'PATTERN', default = reCompile('.*'), help = 'regular expression to match room against')
parser.add_argument('--show-all', action = 'store_true', default='true', help = 'show all rooms, even if miles away (these rooms never trigger alerts)')
group = parser.add_mutually_exclusive_group()
group.add_argument('--delay', type = int, default = 1, metavar = 'MINS', help = 'search every MINS minute(s)')
group.add_argument('--once', action = 'store_true', help = 'search once and exit')
parser.add_argument('--test', action = 'store_true', dest = 'test', help = 'trigger every specified alert and exit')

group = parser.add_argument_group('required arguments')
# Both of these set 'key'; only one of them is required
group.add_argument('--key', nargs = 2, metavar = ('KEY', 'AUTH'), help = 'key (see the README for more information)')
group.add_argument('--url', action = PasskeyUrlAction, dest = 'key', help = 'passkey URL containing your key')

args = parser.parse_args()

baseUrl = "https://book.passkey.com/event/%d/owner/%d" % (eventId, ownerId)

def notifyPushbullet():
    	pushbulletKey = ''
    	pb = Pushbullet(pushbulletKey)
	pb.push_link("Gencon Housing Notification", targetUrl)

def notifyDiscord():
    discordWebhookUrl = ''