Ejemplo n.º 1
0
def main(args=None):
    if args is None:
        pass

    formatter = lambda prog: argparse.ArgumentDefaultsHelpFormatter(
        prog, max_help_position=30)

    parser = argparse.ArgumentParser(
        description="Post-processing and cmorization of EC-Earth output",
        formatter_class=formatter)
    required = parser.add_argument_group("required arguments")

    parser.add_argument(
        "datadir",
        metavar="DIR",
        type=str,
        help="EC-Earth data directory, i.e. for a given component, "
        "for a given leg")
    parser.add_argument("--exp",
                        metavar="EXPID",
                        type=str,
                        default="ECE3",
                        help="Experiment prefix")
    varsarg = required.add_mutually_exclusive_group(required=True)
    varsarg.add_argument(
        "--varlist",
        metavar="FILE",
        type=str,
        help=
        "File (json) containing cmor variables grouped per table, grouped per EC-Earth component"
    )
    varsarg.add_argument(
        "--drq",
        metavar="FILE",
        type=str,
        help=
        "File (json|f90 namelist|xlsx) containing cmor variables, grouped per table"
    )
    required.add_argument("--meta",
                          metavar="FILE.json",
                          type=str,
                          required=True,
                          help="Input metadata file")
    parser.add_argument("--odir",
                        metavar="DIR",
                        type=str,
                        default=None,
                        help="Output directory, by default the "
                        "metadata \'outpath\' entry")
    cmor_utils.ScriptUtils.add_model_exclusive_options(parser, "ece2cmor")
    parser.add_argument(
        "--ececonf",
        metavar='|'.join(components.ece_configs.keys()),
        type=str,
        help="EC-Earth configuration (only used with --drq option)")
    parser.add_argument("--refd",
                        metavar="YYYY-mm-dd",
                        type=str,
                        default="1850-01-01",
                        help="Reference date for output time axes")
    parser.add_argument("--npp",
                        metavar="N",
                        type=int,
                        default=8,
                        help="Number of parallel tasks (only relevant for "
                        "IFS cmorization")
    parser.add_argument("--log",
                        action="store_true",
                        default=False,
                        help="Write to log file")
    parser.add_argument("--flatdir",
                        action="store_true",
                        default=False,
                        help="Do not create sub-directories in "
                        "output folder")
    parser.add_argument("--tabledir",
                        metavar="DIR",
                        type=str,
                        default=ece2cmorlib.table_dir_default,
                        help="Cmorization table directory")
    parser.add_argument("--tableprefix",
                        metavar="PREFIX",
                        type=str,
                        default=ece2cmorlib.prefix_default,
                        help="Cmorization table prefix string")
    parser.add_argument("--tmpdir",
                        metavar="DIR",
                        type=str,
                        default="/tmp/ece2cmor",
                        help="Temporary working directory")
    parser.add_argument(
        "--overwritemode",
        metavar="MODE",
        type=str,
        default="preserve",
        help="MODE:preserve|replace|append, CMOR netcdf overwrite mode",
        choices=["preserve", "replace", "append"])
    parser.add_argument("--skip_alevel_vars",
                        action="store_true",
                        default=False,
                        help="Prevent loading atmospheric "
                        "model-level variables")
    parser.add_argument(
        "-V",
        "--version",
        action="version",
        version="%(prog)s {version}".format(version=__version__.version))
    # Deprecated arguments, only for backward compatibility
    parser.add_argument("--ncdo",
                        metavar="N",
                        type=int,
                        default=4,
                        help=argparse.SUPPRESS)
    parser.add_argument("--nomask",
                        action="store_true",
                        default=False,
                        help=argparse.SUPPRESS)
    parser.add_argument("--nofilter",
                        action="store_true",
                        default=False,
                        help=argparse.SUPPRESS)
    parser.add_argument("--atm",
                        action="store_true",
                        default=False,
                        help="Deprecated! Use --ifs instead")
    parser.add_argument("--oce",
                        action="store_true",
                        default=False,
                        help="Deprecated! Use --nemo instead")
    parser.add_argument("--conf",
                        action="store_true",
                        help="Deprecated! Use --meta instead")
    parser.add_argument("--vars",
                        action="store_true",
                        help="Deprecated! Use --varlist instead")
    cmor_utils.ScriptUtils.add_model_tabfile_options(parser)

    args = parser.parse_args()

    cmor_utils.ScriptUtils.set_custom_tabfiles(args)

    logfile = None
    logformat = "%(asctime)s %(levelname)s:%(name)s: %(message)s"
    logdateformat = "%Y-%m-%d %H:%M:%S"
    if getattr(args, "log", False):
        dirs = os.path.abspath(args.datadir).split(os.sep)
        fname = '-'.join([args.exp] + dirs[-2:] +
                         [time.strftime("%Y%m%d%H%M%S", time.gmtime())])
        logfile = '.'.join([fname, "log"])
        logging.basicConfig(filename=logfile,
                            level=logging.DEBUG,
                            format=logformat,
                            datefmt=logdateformat)
    else:
        logging.basicConfig(level=logging.DEBUG,
                            format=logformat,
                            datefmt=logdateformat)

    if not os.path.isdir(args.datadir):
        log.fatal("Your data directory argument %s cannot be found." %
                  args.datadir)
        sys.exit(' Exiting ece2cmor.')

    if args.varlist is not None and not os.path.isfile(args.varlist):
        log.fatal("Your variable list json file %s cannot be found." %
                  args.varlist)
        sys.exit(' Exiting ece2cmor.')

    if args.drq is not None and not os.path.isfile(args.drq):
        log.fatal("Your data request file %s cannot be found." % args.drq)
        sys.exit(' Exiting ece2cmor.')

    if not os.path.isfile(args.meta):
        log.fatal("Your metadata file %s cannot be found." % args.meta)
        sys.exit(' Exiting ece2cmor.')

    modedict = {
        "preserve": ece2cmorlib.PRESERVE,
        "append": ece2cmorlib.APPEND,
        "replace": ece2cmorlib.REPLACE
    }

    # Initialize ece2cmor:
    ece2cmorlib.initialize(args.meta,
                           mode=modedict[args.overwritemode],
                           tabledir=args.tabledir,
                           tableprefix=args.tableprefix,
                           outputdir=args.odir,
                           logfile=logfile,
                           create_subdirs=(not args.flatdir))
    ece2cmorlib.enable_masks = not args.nomask
    ece2cmorlib.auto_filter = not args.nofilter

    active_components = cmor_utils.ScriptUtils.get_active_components(
        args, args.ececonf)

    filters = None
    if args.skip_alevel_vars:

        def ifs_model_level_variable(target):
            zaxis, levs = cmor_target.get_z_axis(target)
            return zaxis not in ["alevel", "alevhalf"]

        filters = {"model level": ifs_model_level_variable}
    try:
        if getattr(args, "varlist", None) is not None:
            taskloader.load_tasks(args.varlist,
                                  active_components=active_components,
                                  target_filters=filters,
                                  check_duplicates=True)
        else:
            taskloader.load_tasks_from_drq(args.drq,
                                           active_components=["ifs"],
                                           target_filters=filters,
                                           check_prefs=True)
    except taskloader.SwapDrqAndVarListException as e:
        log.error(e.message)
        opt1, opt2 = "vars" if e.reverse else "drq", "drq" if e.reverse else "vars"
        log.error(
            "It seems you are using the --%s option where you should use the --%s option for this file"
            % (opt1, opt2))
        sys.exit(' Exiting ece2cmor.')

    refdate = datetime.datetime.combine(dateutil.parser.parse(args.refd),
                                        datetime.datetime.min.time())

    if "ifs" in active_components:
        ece2cmorlib.perform_ifs_tasks(args.datadir,
                                      args.exp,
                                      refdate=refdate,
                                      tempdir=args.tmpdir,
                                      taskthreads=args.npp,
                                      cdothreads=args.ncdo)
    if "nemo" in active_components:
        ece2cmorlib.perform_nemo_tasks(args.datadir, args.exp, refdate)

    if "lpjg" in active_components:
        ece2cmorlib.perform_lpjg_tasks(args.datadir, args.tmpdir, args.exp,
                                       refdate)
    if "tm5" in active_components:
        ece2cmorlib.perform_tm5_tasks(args.datadir, args.tmpdir, args.exp,
                                      refdate)


#   if procNEWCOMPONENT in active_components:
#       ece2cmorlib.perform_NEWCOMPONENT_tasks(args.datadir, args.exp, refdate)

    ece2cmorlib.finalize()
Ejemplo n.º 2
0
def main() -> None:
    # TODO longer, literate description?

    def add_index_args(parser: argparse.ArgumentParser,
                       default_config_path: PathIsh = None) -> None:
        """
        :param default_config_path:
            if not given, all :func:`demo_sources()` are run
        """
        register_argparse_extend_action_in_pre_py38(parser)
        parser.add_argument('--config',
                            type=Path,
                            default=default_config_path,
                            help='Config path')
        parser.add_argument(
            '--dry',
            action='store_true',
            help="Dry run, won't touch the database, only print the results out"
        )
        parser.add_argument(
            '--sources',
            required=False,
            action="extend",
            nargs="+",
            type=_ordinal_or_name,
            metavar="SOURCE",
            help="Source names (or their 0-indexed position) to index.",
        )
        parser.add_argument(
            '--overwrite',
            required=False,
            action="store_true",
            help="Empty db before populating it with newly indexed visits."
            "  If interrupted, db is left untouched.")

    F = lambda prog: argparse.ArgumentDefaultsHelpFormatter(prog, width=120)
    p = argparse.ArgumentParser(formatter_class=F)  # type: ignore
    subp = p.add_subparsers(dest='mode', )
    ep = subp.add_parser('index',
                         help='Create/update the link database',
                         formatter_class=F)
    add_index_args(ep, default_config_path())
    # TODO use some way to override or provide config only via cmdline?
    ep.add_argument('--intermediate',
                    required=False,
                    help="Used for development, you don't need it")

    sp = subp.add_parser('serve',
                         help='Serve a link database',
                         formatter_class=F)  # type: ignore
    server.setup_parser(sp)

    ap = subp.add_parser(
        'demo',
        help='Demo mode: index and serve a directory in single command',
        formatter_class=F)
    # TODO use docstring or something?
    #

    add_port_arg = lambda p: p.add_argument(
        '--port', type=str, default='13131', help='Port to serve on')

    ap.add_argument('--name',
                    type=str,
                    default='demo',
                    help='Set custom source name')
    add_port_arg(ap)
    ap.add_argument('--no-serve',
                    action='store_const',
                    const=None,
                    dest='port',
                    help='Pass to only index without running server')
    ap.add_argument(
        '--as',
        choices=list(sorted(demo_sources().keys())),
        default='guess',
        help=
        'Promnesia source to index as (see https://github.com/karlicoss/promnesia/tree/master/src/promnesia/sources for the full list)',
    )
    add_index_args(ap)
    ap.add_argument('params',
                    nargs='*',
                    help='Optional extra params for the indexer')

    isp = subp.add_parser(
        'install-server',
        help='Install server as a systemd service (for autostart)',
        formatter_class=F)
    install_server.setup_parser(isp)

    cp = subp.add_parser('config', help='Config management')
    cp.set_defaults(func=lambda *args: cp.print_help())
    scp = cp.add_subparsers()
    ccp = scp.add_parser('check', help='Check config')
    ccp.set_defaults(func=config_check)
    ccp.add_argument('--config',
                     type=Path,
                     default=default_config_path(),
                     help='Config path')

    icp = scp.add_parser('create', help='Create user config')
    icp.add_argument("--config",
                     type=Path,
                     default=default_config_path(),
                     help="Config path")
    icp.set_defaults(func=config_create)

    dp = subp.add_parser('doctor', help='Troubleshooting assistant')
    dp.add_argument('--config',
                    type=Path,
                    default=default_config_path(),
                    help='Config path')
    dp.set_defaults(func=lambda *args: dp.print_help())
    sdp = dp.add_subparsers()
    sdp.add_parser('config',
                   help='Check config').set_defaults(func=config_check)
    sdp.add_parser('database',
                   help='Inspect database').set_defaults(func=cli_doctor_db)
    sdps = sdp.add_parser('server', help='Check server')
    sdps.set_defaults(func=cli_doctor_server)
    add_port_arg(sdps)

    args = p.parse_args()

    # TODO is there a way to print full help? i.e. for all subparsers
    if args.mode is None:
        print('ERROR: Please specify a mode', file=sys.stderr)
        p.print_help(sys.stderr)
        sys.exit(1)

    logger.info("CLI args: %s", args)

    # TODO maybe, it's better for server to compute intermediate representations?
    # the only downside is storage. dunno.
    # worst case -- could use database?

    with get_tmpdir() as tdir:  # TODO??
        if args.mode == 'index':
            do_index(
                config_file=args.config,
                dry=args.dry,
                sources_subset=args.sources,
                overwrite_db=args.overwrite,
            )
        elif args.mode == 'serve':
            server.run(args)
        elif args.mode == 'demo':
            # TODO not sure if 'as' is that useful
            # something like Telegram/Takeout is too hard to setup to justify adhoc mode like this?
            do_demo(
                index_as=getattr(args, 'as'),
                params=args.params,
                port=args.port,
                config_file=args.config,
                dry=args.dry,
                name=args.name,
                sources_subset=args.sources,
                overwrite_db=args.overwrite,
            )
        elif args.mode == 'install-server':  # todo rename to 'autostart' or something?
            install_server.install(args)
        elif args.mode == 'config':
            args.func(args)
        elif args.mode == 'doctor':
            args.func(args)
        else:
            raise AssertionError(f'unexpected mode {args.mode}')
Ejemplo n.º 3
0
    def format_help(self):
        """Individual formatting of sections in the help text.

        When we use the same formatter for all, we either would lose the
        explicit spacing in the epilog, or lose the formatting in other
        sections. In this function we change the formatters, render
        different sections differently, and then concatenate everything
        into a single output.
        """

        # we get our formatters here, fill them up down bleow, and finally render them at the end:
        usage_formatter = argparse.ArgumentDefaultsHelpFormatter(self.prog)
        description_formatter = argparse.RawDescriptionHelpFormatter(self.prog)
        epilog_formatter = argparse.RawDescriptionHelpFormatter(prog=self.prog)
        separator_formatter = argparse.RawDescriptionHelpFormatter(
            prog=self.prog)

        # usage
        usage_formatter.add_usage(self.usage, self._actions,
                                  self._mutually_exclusive_groups)

        # positionals, optionals and user-defined groups
        for action_group in self._action_groups:
            if atty:
                section_title = action_group.title + ' ' * (80 - len(
                    action_group.title) if len(action_group.title) < 80 else 0)
                section_header = bg(250) + fg(236) + attr(
                    'bold') + section_title + attr('reset')
            else:
                section_header = action_group.title

            usage_formatter.start_section(section_header)
            usage_formatter.add_text(action_group.description)
            usage_formatter.add_arguments(action_group._group_actions)
            usage_formatter.end_section()

        # separator
        separator_formatter.add_text('━' * 80 + '\n')

        # description
        if atty:
            description_text = [
                attr('bold') + '🔥 Program description:' + attr('reset'), ''
            ]
        else:
            description_text = ['🔥 Program description:', '']

        description_text.extend([
            textwrap.indent(l, '   ') for l in textwrap.wrap(
                " ".join(textwrap.dedent(self.description).split()), width=77)
        ])
        description_formatter.add_text('\n'.join(description_text))

        # epilog
        epilog_formatter.add_text(self.epilog)

        # determine help from format above
        help_text = '\n'.join([
            usage_formatter.format_help().replace(":\n", "\n")
            if atty else usage_formatter.format_help(),
            separator_formatter.format_help(),
            description_formatter.format_help(),
            epilog_formatter.format_help(),
            separator_formatter.format_help()
        ]) + '\n'

        return help_text
Ejemplo n.º 4
0
def _getargs():
    #CZ#parser = argparse.ArgumentParser(description=_description, epilog=_epilog, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    formatter = lambda prog: argparse.HelpFormatter(
        prog, max_help_position=50, width=120)
    formatter = lambda prog: argparse.ArgumentDefaultsHelpFormatter(
        prog, max_help_position=50, width=120)
    #CZ#formatter = lambda prog: argparse.RawDescriptionHelpFormatter(prog, max_help_position=50, width=120)
    parser = argparse.ArgumentParser(
        description=_description, epilog=_epilog,
        formatter_class=formatter)  #, argument_default=not argparse.SUPPRESS)

    #CZ#pgroup.add_argument('-p' , '--power'        , help='display a power of a given number'   , type=int           , choices=[1,2,3,4,5])
    #CZ#pgroup.add_argument('-s' , '--square'       , help='display a square of a given number'  , type=int)
    parser.add_argument('-d',
                        '--debug',
                        help='increase output debug',
                        action='count',
                        default=0)
    parser.add_argument('-v',
                        '--verbose',
                        help='output verbosity',
                        action='store_true')
    parser.add_argument('-V',
                        '--version',
                        help='print version number',
                        action='version',
                        version='%(prog)s ' + _version)
    parser.add_argument('-fs',
                        '--file_source',
                        help='file source CSV for rewrite layout',
                        type=str,
                        required=True)
    parser.add_argument('-fl',
                        '--file_layout',
                        help='file spreadsheet (only .xls/.xlsx)',
                        type=str,
                        required=True)
    parser.add_argument('-fo',
                        '--file_output',
                        help='file output merge on source-layout',
                        type=str,
                        required=True)
    parser.add_argument('-cv',
                        '--cell_values',
                        help='modify cell value format: "1,1=test"',
                        type=str,
                        nargs='+')
    parser.add_argument('-ci',
                        '--cell_images',
                        help='insert cell image format: "1,1=file"',
                        type=str,
                        nargs='+')
    parser.add_argument('-cs',
                        '--csv_delimiter',
                        help='csv delimiter char',
                        type=str,
                        default=_default_csv_delimiter)
    parser.add_argument('-cd',
                        '--csv_quotechar',
                        help='csv quote char',
                        type=str,
                        default=_default_csv_quotechar)
    #CZ#parser.add_argument('name'                  , help='Name')
    #CZ#parser.add_argument('surname'               , help='Surname')

    args = parser.parse_args()

    return (args)
def get_parser():
    parser = argparse.ArgumentParser(
        description=
        'Script to find, with or without regularization, images that cause high or low activations of specific neurons in a network via numerical optimization. Settings are read from settings.py, overridden in settings_local.py, and may be further overridden on the command line.',
        formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(
            prog, width=100))

    # Network and data options
    parser.add_argument('--caffe-root',
                        type=str,
                        default=settings.caffevis_caffe_root,
                        help='Path to caffe root directory.')
    parser.add_argument('--deploy-proto',
                        type=str,
                        default=settings.caffevis_deploy_prototxt,
                        help='Path to caffe network prototxt.')
    parser.add_argument('--net-weights',
                        type=str,
                        default=settings.caffevis_network_weights,
                        help='Path to caffe network weights.')
    parser.add_argument(
        '--mean',
        type=str,
        default=repr(settings.caffevis_data_mean),
        help=
        '''Mean. The mean may be None, a tuple of one mean value per channel, or a string specifying the path to a mean image to load. Because of the multiple datatypes supported, this argument must be specified as a string that evaluates to a valid Python object. For example: "None", "(10,20,30)", and "'mean.npy'" are all valid values. Note that to specify a string path to a mean file, it must be passed with quotes, which usually entails passing it with double quotes in the shell! Alternately, just provide the mean in settings_local.py.'''
    )
    parser.add_argument(
        '--channel-swap-to-rgb',
        type=str,
        default='(2,1,0)',
        help=
        'Permutation to apply to channels to change to RGB space for plotting. Hint: (0,1,2) if your network is trained for RGB, (2,1,0) if it is trained for BGR.'
    )
    parser.add_argument('--data-size',
                        type=str,
                        default='(227,227)',
                        help='Size of network input.')

    #### FindParams

    # Where to start
    parser.add_argument(
        '--start-at',
        type=str,
        default='mean_plus_rand',
        choices=('mean_plus_rand', 'randu', 'mean'),
        help='How to generate x0, the initial point used in optimization.')
    parser.add_argument(
        '--rand-seed',
        type=int,
        default=0,
        help=
        'Random seed used for generating the start-at image (use different seeds to generate different images).'
    )

    # What to optimize
    parser.add_argument(
        '--push-layer',
        type=str,
        default='fc8',
        help=
        'Name of layer that contains the desired neuron whose value is optimized.'
    )
    parser.add_argument(
        '--push-channel',
        type=int,
        default='130',
        help=
        'Channel number for desired neuron whose value is optimized (channel for conv, neuron index for FC).'
    )
    parser.add_argument(
        '--push-spatial',
        type=str,
        default='None',
        help=
        'Which spatial location to push for conv layers. For FC layers, set this to None. For conv layers, set it to a tuple, e.g. when using `--push-layer conv5` on AlexNet, --push-spatial (6,6) will maximize the center unit of the 13x13 spatial grid.'
    )
    parser.add_argument(
        '--push-dir',
        type=float,
        default=1,
        help=
        'Which direction to push the activation of the selected neuron, that is, the value used to begin backprop. For example, use 1 to maximize the selected neuron activation and  -1 to minimize it.'
    )

    # Use regularization?
    parser.add_argument('--decay',
                        type=float,
                        default=0,
                        help='Amount of L2 decay to use.')
    parser.add_argument(
        '--blur-radius',
        type=float,
        default=0,
        help=
        'Radius in pixels of blur to apply after each BLUR_EVERY steps. If 0, perform no blurring. Blur sizes between 0 and 0.3 work poorly.'
    )
    parser.add_argument(
        '--blur-every',
        type=int,
        default=0,
        help='Blur every BLUR_EVERY steps. If 0, perform no blurring.')
    parser.add_argument(
        '--small-val-percentile',
        type=float,
        default=0,
        help=
        'Induce sparsity by setting pixels with absolute value under SMALL_VAL_PERCENTILE percentile to 0. Not discussed in paper. 0 to disable.'
    )
    parser.add_argument(
        '--small-norm-percentile',
        type=float,
        default=0,
        help=
        'Induce sparsity by setting pixels with norm under SMALL_NORM_PERCENTILE percentile to 0. \\theta_{n_pct} from the paper. 0 to disable.'
    )
    parser.add_argument(
        '--px-benefit-percentile',
        type=float,
        default=0,
        help=
        'Induce sparsity by setting pixels with contribution under PX_BENEFIT_PERCENTILE percentile to 0. Mentioned briefly in paper but not used. 0 to disable.'
    )
    parser.add_argument(
        '--px-abs-benefit-percentile',
        type=float,
        default=0,
        help=
        'Induce sparsity by setting pixels with contribution under PX_BENEFIT_PERCENTILE percentile to 0. \\theta_{c_pct} from the paper. 0 to disable.'
    )

    # How much to optimize
    parser.add_argument(
        '--lr-policy',
        type=str,
        default='constant',
        choices=LR_POLICY_CHOICES,
        help='Learning rate policy. See description in lr-params.')
    parser.add_argument(
        '--lr-params',
        type=str,
        default='{"lr": 1}',
        help=
        'Learning rate params, specified as a string that evalutes to a Python dict. Params that must be provided dependon which lr-policy is selected. The "constant" policy requires the "lr" key and uses the constant given learning rate. The "progress" policy requires the "max_lr" and "desired_prog" keys and scales the learning rate such that the objective function will change by an amount equal to DESIRED_PROG under a linear objective assumption, except the LR is limited to MAX_LR. The "progress01" policy requires the "max_lr", "early_prog", and "late_prog_mult" keys and is tuned for optimizing neurons with outputs in the [0,1] range, e.g. neurons on a softmax layer. Under this policy optimization slows down as the output approaches 1 (see code for details).'
    )
    parser.add_argument('--max-iter',
                        type=int,
                        default=500,
                        help='Number of iterations of the optimization loop.')

    # Where to save results
    parser.add_argument(
        '--output-prefix',
        type=str,
        default='optimize_results/opt',
        help='Output path and filename prefix (default: optimize_results/opt)')
    parser.add_argument(
        '--output-template',
        type=str,
        default='%(p.push_layer)s_%(p.push_channel)04d_%(p.rand_seed)d',
        help=
        'Output filename template; see code for details (default: "%%(p.push_layer)s_%%(p.push_channel)04d_%%(p.rand_seed)d"). '
        'The default output-prefix and output-template produce filenames like "optimize_results/opt_prob_0278_0_best_X.jpg"'
    )
    parser.add_argument(
        '--brave',
        action='store_true',
        help=
        'Allow overwriting existing results files. Default: off, i.e. cowardly refuse to overwrite existing files.'
    )
    parser.add_argument(
        '--skipbig',
        action='store_true',
        help=
        'Skip outputting large *info_big.pkl files (contains pickled version of x0, last x, best x, first x that attained max on the specified layer.'
    )

    return parser
Ejemplo n.º 6
0
def get_config():
    '''
    Defines the command line parameters and returns the parameters passed to the script

    Returns:
        argparse.args -- An object containing the parsed command line parameters
    '''
    parser = argparse.ArgumentParser(
        prog=__file__,
        description="Take word packs as input and output a file with the synonyms highlighted.",
        formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(prog, width=120))

    parser.add_argument(
        "-w", "--wordpacks",
        metavar='PATH',
        required=True,
        help="Wordpacks file to use as input")
    parser.add_argument(
        "-r", "--relations",
        metavar='PATH',
        required=True,
        help="Word relations file to use as input")
    parser.add_argument(
        "-p", "--primary-word-regex",
        metavar='REGEX',
        required=False, 
        default="^#([^\[]+)",
        help="Regex to parse the list of words in the word relations file")
    parser.add_argument(
        "-s", "--synonym-regex",
        required=False,
        metavar='REGEX',
        default='\[(?:associated|syn|broader|custom-list|handcraft|memberof|narrower)[\w\s\-\{\}]*?=\d+\.\d+\]:([^;]+)',
        help="Regex to parse the list of synonyms in the word relations file")
    parser.add_argument(
        "-ss", "--synonym-score-regex",
        metavar='REGEX',
        required=False,
        default='\[(?:associated|syn|broader|custom-list|handcraft|memberof|narrowe)[\w\s\-\{\}]*?-score\]:([^;]+)',
        help="Regex to parse the score of synonyms in the word relations file")
    parser.add_argument(
        "-d", "--word-delimeter",
        metavar='CHAR',
        required=False,
        default="|",
        help="Delimiter to split the text matched by regex --synonym-regex into a list")
    parser.add_argument(
        "-c", "--score-cutoff",
        metavar='NUM',
        required=False,
        default="6.0",
        type=float,
        help="Eliminate synonyms that are below the provided value")
    parser.add_argument(
        "-o", "--output",
        metavar='PATH',
        required=False,
        default="output.txt",
        help="Highlighted synonyms output file path")

    return parser.parse_args(), parser
Ejemplo n.º 7
0
def parse_args():
    parser = argparse.ArgumentParser(
        formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(
            prog, width=9999))
    parser.add_argument("output_directory",
                        help="Output directory to store the outputs")
    parser.add_argument(
        "--dry_run",
        action="store_true",
        default=False,
        help="Do not execute the submission script.",
    )

    group = parser.add_argument_group("MPI settings (required)")
    group.add_argument("--mpi_procs",
                       type=int,
                       default=2,
                       help="Number of processors to use.")
    group.add_argument(
        "--mpi_job_name",
        type=str,
        default="auto_sphire",
        help="Job name of the submitted job.",
    )
    group.add_argument(
        "--mpi_submission_command",
        type=str,
        default="sbatch",
        help="Submission command, e.g. sbatch, qsub, ...",
    )
    group.add_argument(
        "--mpi_submission_template",
        type=str,
        default="TEMPLATES/submit.sh",
        help="Submission template.",
    )

    group = parser.add_argument_group("Global settings (required)")
    group.add_argument(
        "--apix",
        dest="XXX_SP_PIXEL_SIZE_XXX",
        type=float,
        default=1.0,
        help="Pixel size in A/pixel.",
    )
    group.add_argument(
        "--mol_mass",
        dest="XXX_SP_MOL_MASS_XXX",
        type=float,
        default=250.0,
        help=
        "Molecular mass of the protein in kDa. Used to calculate the masking density threshold.",
    )
    group.add_argument(
        "--radius",
        dest="XXX_SP_PARTICLE_RADIUS_XXX",
        type=int,
        default=80,
        help="Particle radius in pixels. Used for normalization.",
    )
    group.add_argument(
        "--box_size",
        dest="XXX_SP_BOX_SIZE_XXX",
        type=int,
        default=200,
        help="Particle box size in pixels.",
    )
    group.add_argument(
        "--symmetry",
        dest="XXX_SP_SYMMETRY_XXX",
        type=str,
        default="c1",
        help="Symmetry of the particle.",
    )
    group.add_argument(
        "--voltage",
        dest="XXX_SP_VOLTAGE_XXX",
        type=float,
        default=300.0,
        help="Microscope voltage in kV.",
    )
    group.add_argument(
        "--mtf",
        dest="XXX_SP_MTF_XXX",
        type=str,
        default="",
        help="MTF file for the sharpening step",
    )
    group.add_argument(
        "--negative_stain",
        action="store_true",
        default=False,
        help="Input is negative stain.",
    )
    group.add_argument(
        "--phase_plate",
        action="store_true",
        default=False,
        help="Input is phase_plate.",
    )
    group.add_argument(
        "--fill_rviper_mask",
        action="store_true",
        default=False,
        help="Fill RVIPER mask.",
    )
    group.add_argument(
        "--memory_per_node",
        dest="XXX_SP_MEMORY_PER_NODE_XXX",
        type=float,
        default=-1,
        help="Available memory per node.",
    )

    group = parser.add_argument_group(
        "Unblur settings (required to run movie alignment)")
    group.add_argument(
        "--unblur_path",
        dest="XXX_SP_UNBLUR_PATH_XXX",
        type=str,
        default=None,
        help="Path pointing to the unblur executable.",
    )
    group.add_argument(
        "--unblur_mic_pattern",
        dest="XXX_SP_UNBLUR_MICROGRAPH_PATTERN_XXX",
        type=str,
        default=None,
        help="Pattern of the micrographs to use for motion correction.",
    )
    group.add_argument(
        "--unblur_exp_per_frame",
        dest="XXX_SP_UNBLUR_EXP_PER_FRAME_XXX",
        type=float,
        default=None,
        help="Exposure per frame. Used for dose adjustment.",
    )
    group.add_argument(
        "--unblur_gain_file",
        dest="XXX_SP_UNBLUR_GAIN_FILE_XXX",
        type=str,
        default=None,
        help=
        "File containing the information for gain correction. Not required if the movies are already gain corrected.",
    )

    group = parser.add_argument_group("Unblur settings (optional)")
    group.add_argument(
        "--skip_unblur",
        action="store_true",
        default=False,
        help="Do not run motion correction",
    )
    group.add_argument(
        "--unblur_output_dir",
        dest="XXX_SP_UNBLUR_OUTPUT_DIR_XXX",
        type=str,
        default="UNBLUR",
        help="Unblur output directory.",
    )
    group.add_argument(
        "--unblur_addition",
        dest="XXX_SP_UNBLUR_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group(
        "CTER settings (required to run CTF estimation)")
    group.add_argument(
        "--cter_cs",
        dest="XXX_SP_CTER_CS_XXX",
        type=float,
        default=2.7,
        help="Spherical aberration of the microscope.",
    )

    group = parser.add_argument_group("CTER settings (optional)")
    group.add_argument(
        "--skip_cter",
        action="store_true",
        default=False,
        help="Do not run CTF estimation.",
    )
    group.add_argument(
        "--cter_output_dir",
        dest="XXX_SP_CTER_OUTPUT_DIR_XXX",
        type=str,
        default="CTER",
        help="CTER output directory.",
    )
    group.add_argument(
        "--cter_mic_pattern",
        dest="XXX_SP_CTER_MICROGRAPH_PATTERN_XXX",
        type=str,
        default="Mics/*.mrc",
        help="Micrograph pattern in case unblur is skipped.",
    )
    group.add_argument(
        "--cter_window_size",
        dest="XXX_SP_CTER_WINDOW_SIZE",
        type=int,
        default=1024,
        help="CTF estimation window size.",
    )
    group.add_argument(
        "--cter_addition",
        dest="XXX_SP_CTER_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group(
        "CRYOLO settings (required to run particle picking)")
    group.add_argument(
        "--cryolo_predict_path",
        dest="XXX_SP_CRYOLO_PREDICT_PATH_XXX",
        type=str,
        default="/Path/cryolo_predict.py",
        help="Path to the cryolo predict executable.",
    )
    group.add_argument(
        "--cryolo_config_path",
        dest="XXX_SP_CRYOLO_CONFIG_PATH_XXX",
        type=str,
        default=None,
        help="Path to the cryolo config file",
    )
    group.add_argument(
        "--cryolo_model_path",
        dest="XXX_SP_CRYOLO_MODEL_PATH_XXX",
        type=str,
        default=None,
        help="Path to the cryolo model file",
    )
    group.add_argument(
        "--cryolo_gpu",
        dest="XXX_SP_CRYOLO_GPU_XXX",
        type=str,
        default="0",
        help="Cryolo GPU list.",
    )

    group = parser.add_argument_group("CRYOLO settings (optional)")
    group.add_argument(
        "--skip_cryolo",
        action="store_true",
        default=False,
        help="Do not run particle picking.",
    )
    group.add_argument(
        "--cryolo_output_dir",
        dest="XXX_SP_CRYOLO_OUTPUT_DIR_XXX",
        type=str,
        default="CRYOLO_PREDICT",
        help="CRYOLO output directory.",
    )
    group.add_argument(
        "--cryolo_mic_path",
        dest="XXX_SP_CRYOLO_MICROGRAPH_PATH_XXX",
        type=str,
        default="Mics/*.mrc",
        help="Micrograph pattern in case unblur is skipped.",
    )
    group.add_argument(
        "--cryolo_addition",
        dest="XXX_SP_CRYOLO_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group("WINDOW settings (optional)")
    group.add_argument(
        "--skip_window",
        action="store_true",
        default=False,
        help="Do not run particle extraction.",
    )
    group.add_argument(
        "--window_box_pattern",
        dest="XXX_SP_WINDOW_BOX_PATTERN_XXX",
        type=str,
        default="Boxes/*.box",
        help="Window box file pattern.",
    )
    group.add_argument(
        "--window_mic_pattern",
        dest="XXX_SP_WINDOW_MICROGRAPH_PATTERN_XXX",
        type=str,
        default="Mics/*.mrc",
        help="Window mrc file pattern.",
    )
    group.add_argument(
        "--window_partres",
        dest="XXX_SP_WINDOW_PARTRES_XXX",
        type=str,
        default="CTER/partres.txt",
        help=
        "CTER partres file. In case of negative stain put this value to the pixel size.",
    )
    group.add_argument(
        "--window_output_dir",
        dest="XXX_SP_WINDOW_OUTPUT_DIR_XXX",
        type=str,
        default="WINDOW",
        help="Window output directory.",
    )
    group.add_argument(
        "--window_addition",
        dest="XXX_SP_WINDOW_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group(
        "ISAC2 settings (required to run 2d classification)")
    group.add_argument(
        "--isac2_img_per_grp",
        dest="XXX_SP_ISAC_IMG_PER_GRP_XXX",
        type=int,
        default=100,
        help="Img per group for the ISAC run.",
    )

    group = parser.add_argument_group("ISAC2 settings (optional)")
    group.add_argument(
        "--skip_isac2",
        action="store_true",
        default=False,
        help="Do not run 2d classification.",
    )
    group.add_argument(
        "--isac2_input_stack",
        dest="XXX_SP_ISAC_STACK_XXX",
        type=str,
        default="bdb:path#stack",
        help="Path to the Input stack for ISAC",
    )
    group.add_argument(
        "--isac2_output_dir",
        dest="XXX_SP_ISAC_OUTPUT_DIR_XXX",
        type=str,
        default="ISAC",
        help="ISAC2 output directory.",
    )
    group.add_argument(
        "--isac2_addition",
        dest="XXX_SP_ISAC_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group("Substack ISAC2 settings (optional)")
    group.add_argument(
        "--substack_output_dir",
        dest="XXX_SP_SUBSTACK_OUTPUT_DIR_XXX",
        type=str,
        default="SUBSTACK",
        help="Substack ISAC2 output directory.",
    )

    group = parser.add_argument_group(
        "Automatic 2D class selection (required)")
    group.add_argument(
        "--cinderella_predict_path",
        dest="XXX_SP_CINDERELLA_PREDICT_PATH_XXX",
        type=str,
        default="/Path/sp_cinderella_predict.py",
        help="Path to the cinderella executable.",
    )
    group.add_argument(
        "--cinderella_model_path",
        dest="XXX_SP_CINDERELLA_MODEL_PATH_XXX",
        type=str,
        default="cinderella_model.h5",
        help="Path to trained cinderella model",
    )

    group = parser.add_argument_group(
        "Automatic 2D class selection (optional)")
    group.add_argument(
        "--skip_cinderella",
        action="store_true",
        default=False,
        help="Do not run automatic 2D class selection.",
    )
    group.add_argument(
        "--cinderella_output_dir",
        dest="XXX_SP_CINDERELLA_OUTPUT_DIR_XXX",
        type=str,
        default="AUTO2D",
        help="Cinderalla output directory.",
    )
    group.add_argument(
        "--cinderella_input_stack",
        dest="XXX_SP_CINDERELLA_STACK_XXX",
        type=str,
        default="isac_classes.h5",
        help="Path to ISAC class stack",
    )
    group.add_argument(
        "--cinderella_conf_thresh",
        dest="XXX_SP_CINDERELLA_CONF_THRESH_XXX",
        type=float,
        default=0.5,
        help=
        "Classes with a confidence higher as that threshold are classified as good.",
    )
    group.add_argument(
        "--cinderella_gpu",
        dest="XXX_SP_GPU_ID_XXX",
        type=int,
        default=-1,
        help="GPU ID.",
    )
    group.add_argument(
        "--cinderella_batch_size",
        dest="XXX_SP_BATCH_SIZE_XXX",
        type=int,
        default=32,
        help="Number of images in one batch during prediction.",
    )

    group = parser.add_argument_group("RVIPER settings (optional)")
    group.add_argument(
        "--skip_rviper",
        action="store_true",
        default=False,
        help="Do not run 3d ab-initio reconstruction.",
    )
    group.add_argument(
        "--rviper_input_stack",
        dest="XXX_SP_RVIPER_INPUT_STACK_XXX",
        type=str,
        default="bdb:path#stack",
        help="Path to the input stack for RVIPER",
    )
    group.add_argument(
        "--rviper_output_dir",
        dest="XXX_SP_RVIPER_OUTPUT_DIR_XXX",
        type=str,
        default="RVIPER",
        help="RVIPER output directory.",
    )
    group.add_argument(
        "--rviper_addition",
        dest="XXX_SP_RVIPER_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group(
        "RVIPER volume adjustment settings (optional)")
    group.add_argument(
        "--skip_adjust_rviper",
        action="store_true",
        default=False,
        help="Skip adjusting a volume.",
    )
    group.add_argument(
        "--adjust_rviper_resample",
        dest="XXX_SP_ADJUSTMENT_RESAMPLE_RATIO_XXX",
        type=str,
        default="bdb:path#stack",
        help="Resample ratio for RVIPER.",
    )
    group.add_argument(
        "--adjust_rviper_output_dir",
        dest="XXX_SP_ADJUSTMENT_OUTPUT_DIR_XXX",
        type=str,
        default="RVIPER_ADJUSTMENT",
        help="RVIPER volume adjustment output directory.",
    )
    group.add_argument(
        "--adjust_rviper_addition",
        dest="XXX_SP_ADJUSTMENT_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group("RVIPER mask settings (optional)")
    group.add_argument(
        "--skip_mask_rviper",
        action="store_true",
        default=False,
        help="Skip creating a mask.",
    )
    group.add_argument(
        "--mask_rviper_ndilation",
        dest="XXX_SP_MASK_RVIPER_NDILAITON_XXX",
        type=int,
        default=3,
        help=
        "Number of dilations of the mask. 1 Dilation adds about 2 pixel to the binary volume.",
    )
    group.add_argument(
        "--mask_rviper_soft_edge",
        dest="XXX_SP_MASK_RVIPER_SOFT_EDGE_XXX",
        type=int,
        default=10,
        help="Number of pixels for the soft edge.",
    )
    group.add_argument(
        "--mask_rviper_output_dir",
        dest="XXX_SP_MASK_RVIPER_OUTPUT_DIR_XXX",
        type=str,
        default="RVIPER_MASK",
        help="RVIPER mask output directory.",
    )
    group.add_argument(
        "--mask_rviper_addition",
        dest="XXX_SP_MASK_RVIPER_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group("Meridien settings (optional)")
    group.add_argument(
        "--skip_meridien",
        action="store_true",
        default=False,
        help="Do not run 3d refinement.",
    )
    group.add_argument(
        "--meridien_input_volume",
        dest="XXX_SP_MERIDIEN_INPUT_VOLUME_XXX",
        type=str,
        default="ref_vol.hdf",
        help="Path to the ref_vol.hdf file",
    )
    group.add_argument(
        "--meridien_input_mask",
        dest="XXX_SP_MERIDIEN_INPUT_MASK_XXX",
        type=str,
        default="mask.hdf",
        help="Path to the mask.hdf file",
    )
    group.add_argument(
        "--meridien_input_stack",
        dest="XXX_SP_MERIDIEN_INPUT_STACK_XXX",
        type=str,
        default="bdb:path#stack",
        help="Path to the Input stack for Meridien",
    )
    group.add_argument(
        "--meridien_output_dir",
        dest="XXX_SP_MERIDIEN_OUTPUT_DIR_XXX",
        type=str,
        default="MERIDIEN",
        help="Meridien output directory.",
    )
    group.add_argument(
        "--meridien_addition",
        dest="XXX_SP_MERIDIEN_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group(
        "Sharpening Meridien settings (optional)")
    group.add_argument(
        "--skip_sharpening_meridien",
        action="store_true",
        default=False,
        help="Skip creating a mask.",
    )
    group.add_argument(
        "--sharpening_meridien_ndilation",
        dest="XXX_SP_SHARPENING_MERIDIEN_NDILAITON_XXX",
        type=int,
        default=2,
        help=
        "Number of dilations of the mask. 1 Dilation adds about 2 pixel to the binary volume.",
    )
    group.add_argument(
        "--sharpening_meridien_soft_edge",
        dest="XXX_SP_SHARPENING_MERIDIEN_SOFT_EDGE_XXX",
        type=int,
        default=1,
        help="Number of pixels for the soft edge.",
    )
    group.add_argument(
        "--sharpening_meridien_output_dir",
        dest="XXX_SP_SHARPENING_MERIDIEN_OUTPUT_DIR_XXX",
        type=str,
        default="SHARPENING",
        help="Sharpening output directory.",
    )
    group.add_argument(
        "--sharpening_meridien_addition",
        dest="XXX_SP_SHARPENING_MERIDIEN_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group("RESTACK settings (optional)")
    group.add_argument("--skip_restack",
                       action="store_true",
                       default=False,
                       help="Skip restacking.")
    group.add_argument(
        "--restack_output_dir",
        dest="XXX_SP_RESTACK_OUTPUT_DIR_XXX",
        type=str,
        default="RESTACK",
        help="Restacking output directory.",
    )
    group.add_argument(
        "--restack_addition",
        dest="XXX_SP_RESTACK_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group.add_argument(
        "--restack_window_output_dir",
        dest="XXX_SP_RESTACK_WINDOW_OUTPUT_DIR_XXX",
        type=str,
        default="RESTACK_WINDOW",
        help="Restacking output directory.",
    )
    group.add_argument(
        "--restack_window_mic_pattern",
        dest="XXX_SP_RESTACK_WINDOW_MICROGRAPH_PATTERN_XXX",
        type=str,
        default=None,
        help="Micrograph pattern for restacking.",
    )
    group.add_argument(
        "--restack_window_partres",
        dest="XXX_SP_RESTACK_PARTRES_XXX",
        type=str,
        default=None,
        help="Partres file",
    )
    group.add_argument(
        "--restack_window_addition",
        dest="XXX_SP_RESTACK_WINDOW_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group.add_argument(
        "--restack_meridien_output_dir",
        dest="XXX_SP_RESTACK_MERIDIEN_OUTPUT_DIR_XXX",
        type=str,
        default="RESTACK_MERIDIEN",
        help="Restacking output directory.",
    )
    group.add_argument(
        "--restack_meridien_addition",
        dest="XXX_SP_RESTACK_MERIDIEN_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group.add_argument(
        "--restack_sharpening_ndilation",
        dest="XXX_SP_RESTACK_SHARPENING_NDILAITON_XXX",
        type=int,
        default=2,
        help=
        "Number of dilations of the mask. 1 Dilation adds about 2 pixel to the binary volume.",
    )
    group.add_argument(
        "--restack_sharpening_soft_edge",
        dest="XXX_SP_RESTACK_SHARPENING_SOFT_EDGE_XXX",
        type=int,
        default=1,
        help="Number of pixels for the soft edge.",
    )
    group.add_argument(
        "--restack_sharpening_output_dir",
        dest="XXX_SP_RESTACK_SHARPENING_OUTPUT_DIR_XXX",
        type=str,
        default="RESTACK_SHARPENING",
        help="Restacking output directory.",
    )
    group.add_argument(
        "--restack_sharpening_addition",
        dest="XXX_SP_RESTACK_SHARPENING_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group = parser.add_argument_group("CTF_REFINE settings (optional)")
    group.add_argument(
        "--skip_ctf_refine",
        action="store_true",
        default=False,
        help="Skip CTF refinement.",
    )
    group.add_argument(
        "--ctf_refine_output_dir",
        dest="XXX_SP_CTF_REFINE_OUTPUT_DIR_XXX",
        type=str,
        default="CTF_REFINE",
        help="Restacking output directory.",
    )
    group.add_argument(
        "--ctf_refine_addition",
        dest="XXX_SP_CTF_REFINE_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group.add_argument(
        "--ctf_meridien_output_dir",
        dest="XXX_SP_CTF_MERIDIEN_OUTPUT_DIR_XXX",
        type=str,
        default="CTF_MERIDIEN",
        help="Restacking output directory.",
    )
    group.add_argument(
        "--ctf_meridien_addition",
        dest="XXX_SP_CTF_MERIDIEN_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    group.add_argument(
        "--ctf_sharpening_ndilation",
        dest="XXX_SP_CTF_SHARPENING_NDILAITON_XXX",
        type=int,
        default=2,
        help=
        "Number of dilations of the mask. 1 Dilation adds about 2 pixel to the binary volume.",
    )
    group.add_argument(
        "--ctf_sharpening_soft_edge",
        dest="XXX_SP_CTF_SHARPENING_SOFT_EDGE_XXX",
        type=int,
        default=1,
        help="Number of pixels for the soft edge.",
    )
    group.add_argument(
        "--ctf_sharpening_output_dir",
        dest="XXX_SP_CTF_SHARPENING_OUTPUT_DIR_XXX",
        type=str,
        default="CTF_SHARPENING",
        help="Restacking output directory.",
    )
    group.add_argument(
        "--ctf_sharpening_addition",
        dest="XXX_SP_CTF_SHARPENING_ADDITION_XXX",
        type=str,
        default="",
        help="Additional parameters that are not part of the required ones.",
    )

    args = parser.parse_args()
    return args
Ejemplo n.º 8
0
def main():
    arg_parser = argparse.ArgumentParser(
        description='Ros Workflow CLI',
        formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(
            prog, max_help_position=60))
    arg_parser.add_argument('-a',
                            '--api',
                            help="URL of the remote Ros server to use.",
                            action="store_true")
    arg_parser.add_argument('-w',
                            '--workflow',
                            help="Workflow to execute.",
                            default="workflow_one.ros")
    arg_parser.add_argument('-s',
                            '--server',
                            help="Hostname of api server",
                            default="http://localhost:5002")
    arg_parser.add_argument('-i',
                            '--arg',
                            help="Add an argument expressed as key=val",
                            action='append',
                            default=[])
    arg_parser.add_argument(
        '-o',
        '--out',
        help=
        "Output the workflow result graph to a file. Use 'stdout' to print to terminal."
    )
    arg_parser.add_argument('-l',
                            '--libpath',
                            help="A directory containing workflow modules.",
                            action='append',
                            default=["."])
    arg_parser.add_argument(
        '-n',
        '--ndex',
        help=
        "Name of the graph to publish to NDEx. Requires valid ~/.ndex credential file."
    )
    arg_parser.add_argument('--validate',
                            help="Validate inputs and outputs",
                            action="store_true")
    args = arg_parser.parse_args()

    LoggingUtil.setup_logging()
    """ Parse input arguments. """
    wf_args = {k: v for k, v in [arg.split("=") for arg in args.arg]}
    response = None
    if args.api:
        """ Use the Ros client to run a workflow remotely. """
        client = Client(url=args.server)
        ros_result = client.run(workflow=args.workflow,
                                args=wf_args,
                                library_path=args.libpath)
        response = ros_result.result

    else:
        """ Execute locally via python async. """
        executor = AsyncioExecutor(workflow=Workflow.get_workflow(
            workflow=args.workflow, inputs=wf_args, library_path=args.libpath))
        tasks = [asyncio.ensure_future(executor.execute())]
        loop = asyncio.get_event_loop()
        loop.run_until_complete(asyncio.wait(tasks))

        response = tasks[0].result()

    if args.ndex:
        """ Output to NDEx. """
        jsonkit = JSONKit()
        graph = jsonkit.select("$.[*][*].result_list.[*][*].result_graph",
                               response)
        logger.debug(
            f"Publishing result as NDEx graph({args.ndex})=> {json.dumps(graph, indent=2)}"
        )
        NDEx()._publish(args.ndex, graph)

    if args.out:
        """ Write to a file, possibly standard ouput. """
        if args.out == "stdout":
            print(f"{json.dumps(response, indent=2)}")
        else:
            with open(args.out, "w") as stream:
                json.dump(response, stream, indent=2)
Ejemplo n.º 9
0
import argparse

argparse.ArgumentParser().epilog

argparse.ArgumentDefaultsHelpFormatter('prog')._fill_text
argparse.ArgumentDefaultsHelpFormatter('prog')._get_help_string
Ejemplo n.º 10
0
        embed()
        dvdinfo = get_dvd_exdate_info_on_holdings( holdings)
        if dvd_data == None:
            dvd_data = dvdinfo[:]
        else:
            dvd_data += dvdinfo[1:] # skip the header
        #
    #
    print '\n'.join(["\t".join(map(str, i)) for i in dvd_data])
    if args.output_file_name != None:
        out_str = '\n'.join([",".join(map(str, i)) for i in dvd_data])
        with open( args.output_file_name, 'w') as outf:
           outf.writelines( out_str)
        
        
        


if __name__ == '__main__':
    parser = argparse.ArgumentParser( description='Demo to get dvd info on holdings', formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(prog))
    
    parser.add_argument('--accts', '-a', nargs="+", type=str, help='TL account(s) separated by spaces Eg: 2GESTUSDL 2GESTEURL')
    parser.add_argument('--global_acct', '-g' , default=False, action='store_true', help='Global(non US) TL accts if --global_acct arg passed')
    parser.add_argument('--start_date', type=str,  help='Starting date of positions')
    parser.add_argument('--end_date', type=str,  help='Ending date of positions')
    parser.add_argument('--output_file_name', '-f',  type=str, default=None, help='name of output file. The output will be in csv format')
    
    args = parser.parse_args()
    main(args)

Ejemplo n.º 11
0
def _getargs():
    _note_epilog = '''
Note: Proxy set ref. options   [XXXX://[user:pass]@]host:port   where XXXX are ( http | https | socks5 | socks5h )
         or set environments   ( HTTP_PROXY | HTTPS_PROXY )
      in this order of importance.

%s
''' % _epilog
    #CZ#parser = argparse.ArgumentParser(description=_description, epilog=_epilog, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    formatter = lambda prog: argparse.HelpFormatter(
        prog, max_help_position=50, width=120)
    formatter = lambda prog: argparse.ArgumentDefaultsHelpFormatter(
        prog, max_help_position=50, width=120)
    formatter = lambda prog: argparse.RawDescriptionHelpFormatter(
        prog, max_help_position=50, width=120)
    #CZ#parser = argparse.ArgumentParser(description=_description, epilog=_epilog, formatter_class=formatter) #, argument_default=not argparse.SUPPRESS)
    parser = argparse.ArgumentParser(
        description=_description,
        epilog=_note_epilog,
        formatter_class=formatter)  # , argument_default=not argparse.SUPPRESS)

    #CZ#pgroup.add_argument('-p' ,  '--power'          , help='| display a power of a given number'    , type=int           , choices=[1,2,3,4,5])
    #CZ#pgroup.add_argument('-s' ,  '--square'         , help='| display a square of a given number'   , type=int)
    parser.add_argument('-d',
                        '--debug',
                        help='| increase output debug',
                        action='count',
                        default=0)
    parser.add_argument('-v',
                        '--verbose',
                        help='| output verbosity',
                        action='store_true')
    parser.add_argument('-V',
                        '--version',
                        help='| print version number',
                        action='version',
                        version='%(prog)s ' + _version)
    parser.add_argument('-ps',
                        '--path_save',
                        help='* path save file uploader',
                        type=str)  #, required=True)
    parser.add_argument('-pf',
                        '--path_file',
                        help='* path file for uploader',
                        type=str)  #, required=True)
    parser.add_argument('-fn',
                        '--flow_name',
                        help='* flow name for uploader',
                        type=str)  #, required=True)
    parser.add_argument('-ua',
                        '--url_address',
                        help='* URL address for uploader file',
                        type=str)  #, required=True)
    parser.add_argument('-uu',
                        '--url_username',
                        help='* URL authentication username',
                        type=str)  #, required=True)
    parser.add_argument('-up',
                        '--url_password',
                        help='* URL authentication password',
                        type=str)  #, required=True)
    parser.add_argument('-uph',
                        '--url_proxy_http',
                        help='| URL set proxy HTTP reference',
                        type=str)
    parser.add_argument('-uphs',
                        '--url_proxy_https',
                        help='| URL set proxy HTTPS reference',
                        type=str)
    parser.add_argument('-cp',
                        '--cryptpswd',
                        help='| return crypted URL_PASSWORD',
                        action='store_true')
    parser.add_argument('-sm',
                        '--send_mail',
                        help='| send mail notification',
                        action='store_true')
    parser.add_argument('-gm',
                        '--gmail',
                        help='| set Gmail SMTP',
                        action='store_true')
    parser.add_argument('-mh',
                        '--mail_host',
                        help='| mail host SMTP (default: %(default)s)',
                        type=str,
                        default=_mail_host)
    parser.add_argument('-mf',
                        '--mail_from',
                        help='| mail from (default: %(default)s)',
                        type=str,
                        default=_mail_from)
    parser.add_argument('-mt',
                        '--mail_to',
                        help='| mail to (default: %(default)s)',
                        type=str,
                        default=_mail_from)
    parser.add_argument('-mc', '--mail_cc', help='| mail cc', type=str)
    parser.add_argument('-ms',
                        '--mail_subject',
                        help='| mail subject (default: %(default)s)',
                        type=str,
                        default=_mail_subject)
    parser.add_argument('-mm',
                        '--mail_message',
                        help='| mail message',
                        type=str)
    #CZ#parser.add_argument('name'                     , help='| Name')
    #CZ#parser.add_argument('surname'                  , help='| Surname')

    args = parser.parse_args()

    roll = True
    if args.cryptpswd:
        if args.url_password is None:
            parser.error(
                'with -cp/--cryptpswd argument, -up/--url_password is required'
            )
        else:
            roll = False
    if roll and (args.path_save is None or args.path_file is None
                 or args.flow_name is None or args.url_address is None
                 or args.url_username is None or args.url_password is None):
        parser.error(
            'the following arguments are required: -ps/--path_save, -pf/--path_file, -fn/--flow_name, -ua/--url_address, -uu/--url_username, -up/--url_password'
        )

    return (args)
Ejemplo n.º 12
0
def _getargs():
    #CZ#parser = argparse.ArgumentParser(description=_description, epilog=_epilog, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    formatter = lambda prog: argparse.HelpFormatter(
        prog, max_help_position=50, width=120)
    formatter = lambda prog: argparse.ArgumentDefaultsHelpFormatter(
        prog, max_help_position=50, width=120)
    formatter = lambda prog: argparse.RawDescriptionHelpFormatter(
        prog, max_help_position=50, width=120)
    parser = argparse.ArgumentParser(
        description=_description, epilog=_epilog,
        formatter_class=formatter)  #, argument_default=not argparse.SUPPRESS)

    #CZ#pgroup.add_argument('-p' , '--power'       , help='display a power of a given number'    , type=int           , choices=[1,2,3,4,5])
    #CZ#pgroup.add_argument('-s' , '--square'      , help='display a square of a given number'   , type=int)
    parser.add_argument('-d',
                        '--debug',
                        help='increase output debug',
                        action='count',
                        default=0)
    parser.add_argument('-v',
                        '--verbose',
                        help='output verbosity',
                        action='store_true')
    parser.add_argument('-V',
                        '--version',
                        help='print version number',
                        action='version',
                        version='%(prog)s ' + _version)
    parser.add_argument('-pb',
                        '--path_base',
                        help='path base or set path this script',
                        type=str)
    parser.add_argument('-lf',
                        '--limit_free',
                        help='limit space free',
                        type=str,
                        required=True)
    parser.add_argument('-hv',
                        '--human_value',
                        help='return human values instead of byte',
                        action='store_true')
    parser.add_argument('-sm',
                        '--send_mail',
                        help='send mail notification',
                        action='store_true')
    parser.add_argument('-gm',
                        '--gmail',
                        help='set Gmail SMTP',
                        action='store_true')
    parser.add_argument('-mh',
                        '--mail_host',
                        help='mail host SMTP (default: %(default)s)',
                        type=str,
                        default=_mail_host)
    parser.add_argument('-mf',
                        '--mail_from',
                        help='mail from (default: %(default)s)',
                        type=str,
                        default=_mail_from)
    parser.add_argument('-mt',
                        '--mail_to',
                        help='mail to',
                        type=str,
                        required=True)
    parser.add_argument('-mc', '--mail_cc', help='mail cc', type=str)
    parser.add_argument('-ms',
                        '--mail_subject',
                        help='mail subject (default: %(default)s)',
                        type=str,
                        default=_mail_subject)
    parser.add_argument('-mm', '--mail_message', help='mail message', type=str)
    #CZ#parser.add_argument('name'                 , help='Name')
    #CZ#parser.add_argument('surname'              , help='Surname')

    args = parser.parse_args()

    return (args)
Ejemplo n.º 13
0
def main():
    # TODO longer, literate description?
    from .common import setup_logger
    setup_logger(get_logger(), level=logging.DEBUG)

    F = lambda prog: argparse.ArgumentDefaultsHelpFormatter(prog, width=120)
    p = argparse.ArgumentParser(formatter_class=F) # type: ignore
    subp = p.add_subparsers(dest='mode', )
    ep = subp.add_parser('index', help='Create/update the link database', formatter_class=F)
    ep.add_argument('--config', type=Path, default=default_config_path(), help='Config path')
    # TODO use some way to override or provide config only via cmdline?
    ep.add_argument('--intermediate', required=False, help="Used for development, you don't need it")

    sp = subp.add_parser('serve', help='Serve a link database', formatter_class=F) # type: ignore
    server.setup_parser(sp)

    ap = subp.add_parser('demo', help='Demo mode: index and serve a directory in single command', formatter_class=F)
    # TODO use docstring or something?
    #

    ap.add_argument('--port', type=str, default='13131'              , help='Port to serve on')
    ap.add_argument('--no-serve', action='store_false', dest='server', help='Pass to only index without running server')
    ap.add_argument('--config', type=Path, required=False            , help='Config to run against. If omitted, will use empty base config')
    ap.add_argument(
        '--as',
        choices=list(sorted(demo_sources().keys())),
        default='guess',
        help='Index the path as',
    )
    ap.add_argument('params', nargs='*', help='Optional extra params for the indexer')

    isp = subp.add_parser('install-server', help='Install server as a systemd service (for autostart)', formatter_class=F)
    install_server.setup_parser(isp)

    cp = subp.add_parser('config', help='Config management')
    scp = cp.add_subparsers()
    ccp = scp.add_parser('check', help='Check config')
    ccp.set_defaults(func=config_check)
    ccp.add_argument('--config', type=Path, default=default_config_path(), help='Config path')

    icp = scp.add_parser('create', help='Create user config')
    icp.set_defaults(func=config_create)

    args = p.parse_args()

    # TODO is there a way to print full help? i.e. for all subparsers
    if args.mode is None:
        print('ERROR: Please specify a mode', file=sys.stderr)
        p.print_help(sys.stderr)
        sys.exit(1)

    # TODO maybe, it's better for server to compute intermediate represetnation?
    # the only downside is storage. dunno.
    # worst case -- could use database?

    with get_tmpdir() as tdir: # TODO??
        if args.mode == 'index':
             do_index(config_file=args.config)
        elif args.mode == 'serve':
            server.run(args)
        elif args.mode == 'demo':
            do_demo(index_as=getattr(args, 'as'), params=args.params, port=args.port, config_file=args.config)
        elif args.mode == 'install-server': # todo rename to 'autostart' or something?
            install_server.install(args)
        elif args.mode == 'config':
            args.func(args)
        else:
            raise AssertionError(f'unexpected mode {args.mode}')
Ejemplo n.º 14
0
 def argparse_formatter_factory(prog):
     return argparse.ArgumentDefaultsHelpFormatter(prog, width=100)
Ejemplo n.º 15
0
def help_formatter(prog):  # pragma: no cover
    """Formatter for the argument parser help strings"""
    return argparse.ArgumentDefaultsHelpFormatter(prog,
                                                  max_help_position=35,
                                                  width=100)
Ejemplo n.º 16
0
def main():
    formatter = lambda prog: argparse.ArgumentDefaultsHelpFormatter(
        prog, max_help_position=37)
    parser = argparse.ArgumentParser(
        description=
        "Create component-specified varlist json for given data request",
        formatter_class=formatter)
    required = parser.add_argument_group("required arguments")
    varsarg = required.add_mutually_exclusive_group(required=True)
    varsarg.add_argument(
        "--drq",
        metavar="FILE",
        type=str,
        help=
        "File (xlsx|json) containing requested cmor variables (Required, unless --allvars is used)"
    )
    varsarg.add_argument(
        "--allvars",
        action="store_true",
        default=False,
        help=
        "Read all possible variables from CMOR tables (Required, unless --drq is used)"
    )
    parser.add_argument("--ececonf",
                        metavar='|'.join(components.ece_configs.keys()),
                        type=str,
                        help="EC-Earth configuration")
    parser.add_argument("--varlist",
                        "-o",
                        metavar="FILE.json",
                        type=str,
                        default="ece-cmip6-data-request-varlist.json",
                        help="Output file name")
    parser.add_argument("--tabdir",
                        metavar="DIR",
                        type=str,
                        default=ece2cmorlib.table_dir_default,
                        help="Cmorization table directory")
    parser.add_argument("--tabid",
                        metavar="PREFIX",
                        type=str,
                        default=ece2cmorlib.prefix_default,
                        help="Cmorization table prefix string")

    args = parser.parse_args()

    print()
    print('Running drq2varlist with:')
    print(' drq2varlist ' + cmor_utils.ScriptUtils.get_drq_vars_options(args))
    print()

    if not args.allvars and not os.path.isfile(args.drq):
        log.fatal("Error: Your data request file %s cannot be found." %
                  args.drq)
        sys.exit(' Exiting drq2varlist.')

    # Initialize ece2cmor:
    ece2cmorlib.initialize_without_cmor(tabledir=args.tabdir,
                                        tableprefix=args.tabid)

    try:
        if getattr(args, "allvars", False):
            matches, omitted = taskloader.load_drq("allvars",
                                                   config=args.ececonf,
                                                   check_prefs=True)
        else:
            matches, omitted = taskloader.load_drq(args.drq,
                                                   config=args.ececonf,
                                                   check_prefs=True)
            # Here we load extra permanent tasks for LPJ-GUESS because the LPJ_GUESS community likes to output these variables at any time independent wheter they are requested by the data request:
            if args.ececonf in [
                    "EC-EARTH-CC", "EC-EARTH-Veg", "EC-EARTH-Veg-LR"
            ]:
                matches_permanent, omitted_permanent = taskloader.load_drq(
                    os.path.join(os.path.dirname(__file__), "..", "resources",
                                 "permanent-tasks.json"),
                    config=args.ececonf,
                    check_prefs=True)
                for model, targetlist in matches_permanent.items():
                    if model in matches:
                        for target in targetlist:
                            if target not in matches[model]:
                                matches[model].append(target)
                    else:
                        matches[model] = targetlist
    except taskloader.SwapDrqAndVarListException as e:
        log.error(e.message)
        opt1, opt2 = "vars" if e.reverse else "drq", "drq" if e.reverse else "vars"
        log.error(
            "It seems you are using the --%s option where you should use the --%s option for this file"
            % (opt1, opt2))
        sys.exit(' Exiting drq2varlist.')

    result = {}
    for model, targetlist in matches.items():
        result[model] = {}
        for target in targetlist:
            table = target.table

            # Taking off several variables from the json data request files:
            skip_case = False
            if target.variable in ['intdoc']:
                # See issue #521:
                log.info(
                    " Variable %s %s is listed in the omit list of drq2varlist and therefore skipped. See https://github.com/EC-Earth/ece2cmor3/issues/521"
                    % (target.table, target.variable))
                skip_case = True
            if target.variable in [
                    'rlntds', 'hfibthermds', 'hflso', 'agessc', 'ficeberg',
                    'hfsso', 'hfcorr', 'wfcorr', 'nwdFracLut'
            ]:
                # See issue #498 & #469:
                log.info(
                    " Variable %s %s is listed in the omit list of drq2varlist and therefore skipped. See https://github.com/EC-Earth/ece2cmor3/issues/498 & https://github.com/EC-Earth/ece2cmor3/issues/469"
                    % (target.table, target.variable))
                skip_case = True
            if target.variable in ['hfibthermds2d', 'ficeberg2d', 'fgcfc12']:
                # See issue #516 and #609-36 & #609-37 at ece-portal:
                log.info(
                    " Variable %s %s is listed in the omit list of drq2varlist and therefore skipped. See https://github.com/EC-Earth/ece2cmor3/issues/516 & https://dev.ec-earth.org/issues/609#note-36"
                    % (target.table, target.variable))
                skip_case = True
            if target.variable in ['cfc11', 'fgsf6']:
                # See issue #504:
                log.info(
                    " Variable %s %s is listed in the omit list of drq2varlist and therefore skipped. See https://github.com/EC-Earth/ece2cmor3/issues/504"
                    % (target.table, target.variable))
                skip_case = True
            if table in ['Oyr'] and target.variable in [
                    'cfc11', 'ocontempdiff', 'ocontemppadvect',
                    'ocontemppmdiff', 'ocontemprmadvect', 'ocontemptend',
                    'osaltdiff', 'osaltpadvect', 'osaltpmdiff',
                    'osaltrmadvect', 'osalttend'
            ]:
                # See issue #493 & #542:
                log.info(
                    " Variable %s %s is listed in the omit list of drq2varlist and therefore skipped. See https://github.com/EC-Earth/ece2cmor3/issues/493 & https://github.com/EC-Earth/ece2cmor3/issues/542"
                    % (target.table, target.variable))
                skip_case = True
            if getattr(args, "allvars", True):
                if table in ['6hrPlevPt'
                             ] and target.variable in ['ta27', 'hus27']:
                    # See issue #542:
                    # Conflicting combinations (skip the 2nd one, an arbitrary choice):
                    # 6hrPlevPt:  ta7h,  ta27
                    # 6hrPlevPt: hus7h, hus27
                    log.info(
                        " Variable %s %s is listed in the omit list of drq2varlist and therefore skipped. See https://github.com/EC-Earth/ece2cmor3/issues/542"
                        % (target.table, target.variable))
                    skip_case = True
                if table in ['Emon'] and target.variable in [
                        'hus27', 'va27', 'ua27'
                ]:
                    # See issue #542:
                    # Emon:        hus, hus27
                    # Emon:         va,  va27
                    # Emon:         ua,  ua27
                    log.info(
                        " Variable %s %s is listed in the omit list of drq2varlist and therefore skipped. See https://github.com/EC-Earth/ece2cmor3/issues/542"
                        % (target.table, target.variable))
                    skip_case = True

            if skip_case is False:
                if table in result[model]:
                    result[model][table].append(target.variable)
                else:
                    result[model][table] = [target.variable]
    with open(args.varlist, 'w') as ofile:
        json.dump(result,
                  ofile,
                  indent=4,
                  separators=(',', ': '),
                  sort_keys=True)
        ofile.write(
            '\n'
        )  # Add newline at the end of the json file because the python json package doesn't do this.
        ofile.close()
Ejemplo n.º 17
0
        self.build_candidates()
        self.save_products(outdir=outdir)

    @classmethod
    def from_yaml_config(cls, fname):
        log.debug("Creating pipeline from config file: {}".format(fname))
        with open(fname, 'r') as fobj:
            conf = yaml.safe_load(fobj)
        log.debug("Pipeline configuration: {}".format(
            json.dumps(conf, indent=4)))
        return cls(conf)


###############################################################################

help_formatter = lambda prog: argparse.ArgumentDefaultsHelpFormatter(
    prog, max_help_position=16)


def get_parser():
    def outdir(path):
        """ Function that checks the outdir argument """
        if not os.path.isdir(path):
            msg = "Specified output directory {!r} does not exist".format(path)
            raise argparse.ArgumentTypeError(msg)
        return path

    parser = argparse.ArgumentParser(
        formatter_class=help_formatter,
        description=
        f"Search multiple DM trials with the riptide end-to-end FFA pipeline.")
    parser.add_argument(
def my_formatter(prog):
    size = shutil.get_terminal_size((80, 20))
    return argparse.ArgumentDefaultsHelpFormatter(
        prog, max_help_position=size.columns, width=size.columns)
Ejemplo n.º 19
0
def main(argv=None):

    if argv is None:
        argv = sys.argv[1:]
    parser = argparse.ArgumentParser(
        description='CDN-Sim in Python',
        formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(
            prog, max_help_position=32))

    inFilesGr = parser.add_argument_group('Input files')
    inFilesGr.add_argument('-trace',
                           metavar='file',
                           default='usr_trace.dat',
                           help='User behavior trace')
    inFilesGr.add_argument('-links',
                           metavar='file',
                           default='as_links.dat',
                           help='IRL AS-to-AS links')
    inFilesGr.add_argument('-origin',
                           metavar='file',
                           default='origin.dat',
                           help='IRL origin prefixes')
    inFilesGr.add_argument('-rank',
                           metavar='file',
                           default='caida.org.dat',
                           help='CAIDA AS rank data')

    simSetupGr = parser.add_argument_group('Simulation setup')
    simSetupGr.add_argument('-geo',
                            metavar='string',
                            default='de',
                            help='Comma-separated list of countries')
    simSetupGr.add_argument('-nhosts',
                            metavar='number',
                            default=1000000,
                            help='Maximal number of hosts')
    simSetupGr.add_argument('-active',
                            metavar='number',
                            default=100000,
                            type=int,
                            help='Simultaneously active streams')
    simSetupGr.add_argument('-backnoise',
                            metavar='number',
                            default=0,
                            help='Simultaneous active background streams')
    simSetupGr.add_argument('-streaming',
                            action='store_true',
                            default=True,
                            help='Live streaming (not VoD)')
    simSetupGr.add_argument('-ondemandCache',
                            action='store_true',
                            default=False,
                            help='Create caches on demand')
    simSetupGr.add_argument('-percentCache',
                            metavar='number',
                            type=int,
                            choices=xrange(1, 101),
                            default=0,
                            help='%% of ASes with static cache')
    simSetupGr.add_argument('-hierarchical',
                            action='store_true',
                            default=False,
                            help='Use hierarchical cache placement')
    simSetupGr.add_argument('-cachesec',
                            metavar='number',
                            type=int,
                            default=10,
                            help='# seconds of video to keep in cache')
    simSetupGr.add_argument('-cacheinit',
                            metavar='number',
                            type=float,
                            default=0.1,
                            help='ondemand cache init time')
    simSetupGr.add_argument('-cachethreshold',
                            metavar='number',
                            type=int,
                            default=1,
                            help='# streams to start a cache')
    simSetupGr.add_argument('-interactive',
                            action='store_true',
                            default=False,
                            help='Interactively populate ASes')
    simSetupGr.add_argument('-reqRate',
                            metavar='number',
                            type=float,
                            default=0,
                            help='Request rate per min (0-auto)')
    simSetupGr.add_argument('-scenario',
                            metavar='file',
                            default='',
                            help='Scenario file (format: time, rate/min)')
    simSetupGr.add_argument('-endtime',
                            metavar='number',
                            type=float,
                            default=30,
                            help='Finalize simulation, no new requests')
    simSetupGr.add_argument('-waitCacheBoot',
                            action='store_true',
                            default=True,
                            help='Wait cache to boot or bypass it')
    simSetupGr.add_argument('-unlimCoreLinkBandwidth',
                            action='store_true',
                            default=False,
                            help='Set no limit to the core link bandwidth')
    resultsGr = parser.add_argument_group('Results')
    resultsGr.add_argument('-siminfo',
                           metavar='text',
                           default='',
                           help='Name of the simulation')
    resultsGr.add_argument('-figures',
                           action='store_true',
                           default=False,
                           help='Figures with results')
    resultsGr.add_argument('-allfigures',
                           action='store_true',
                           default=False,
                           help='Figures for all user streams')
    resultsGr.add_argument('-parallel',
                           action='store_true',
                           default=False,
                           help='Enable parallelism in simulation')

    args = parser.parse_args(argv)

    import matplotlib
    if args.interactive and "DISPLAY" in os.environ:
        matplotlib.use('TkAgg')
    else:
        matplotlib.use('pdf')

    import sim_globals as sg
    sg.init(args)

    printWithClock("CDN-Sim started on " + str(time.ctime()))

    max_hosts = sys.maxint
    if args.nhosts != 'all':
        max_hosts = int(args.nhosts)
    printWithClock("Maximal number of hosts is " + str(max_hosts))

    countries = ['de']
    if args.geo != "":
        countries = str(args.geo).replace(' ', '').split(',')
    else:
        printWithClock("Default geographic area: de")

    printWithClock("Building the geoNetGraph")
    import geoNetGraph
    sg.gnGraph = geoNetGraph.geoNetGraph(args.links, args.origin, args.rank,
                                         countries)

    applyManualInputData = False
    if args.interactive:
        sg.gnGraph.iSetGeoNetGraph(selectHosts=True,
                                   selectCaches=True,
                                   selectProvider=True)
        applyManualInputData = True

    sg.gnGraph.initContentProviders()

    import hl_sim
    simulator = hl_sim.highLevelSimulation()
    sg.simRef = simulator

    printWithClock("Populate the geoNetGraph")
    import userRequests
    sg.urRef = userRequests.userRequests(max_hosts, applyManualInputData)

    nASes = nCaches = 0
    for tmpASNum, tmpAS in sg.gnGraph.netGraph.nodes_iter(data=True):
        if 'ns_nets' in tmpAS and sg.gnGraph.isAccessNode(tmpAS['type']):
            nASes += 1
        if 'static_cache' in tmpAS:
            nCaches += 1
    printWithClock("Number of populated ASes: " + str(nASes))
    printWithClock("Number of ASes with static caches: " + str(nCaches))

    simTimeStamp = time.strftime('%Y.%m.%d-%H.%M.%S')
    printWithClock("Starting simulation on: " + simTimeStamp)
    start = time.time()
    if int(args.backnoise) > 0:
        e = sg.urRef.getNoiseEvent(simulator.lastEventTime)
    else:
        e = sg.urRef.getNextEvent(simulator.lastEventTime)
    simulator.eventPush(e)

    # main simulation loop
    while simulator.step():
        pass

    stop = time.time()
    print("")
    printWithClock("Simulation completed on: " +
                   time.strftime('%Y.%m.%d-%H.%M.%S'))
    printWithClock("Time spent (s): " + str(stop - start))

    for ASnum, ASnode in sg.gnGraph.netGraph.nodes_iter(data=True):
        if 'caches' in ASnode:
            simulator.cacheStatistics_hw.append(
                (ASnum, ASnode['stats_maxThroughput'],
                 ASnode['stats_maxConnections'], ASnode['stats_max_NumVMs']))

    simResDirName = 'sim_res' + args.siminfo + '-' + simTimeStamp
    if os.path.exists('debug_out'):
        simResDirName = 'debug_out'
    else:
        while os.path.exists(simResDirName):
            import string
            simResDirName += '_' + sg.random.choice(string.letters)
            print("Result directory exists! Changing name to " + simResDirName)
        os.makedirs(simResDirName)

    simulator.saveSimStatsToFile(simResDirName)
    simulator.saveSimulationSetupToFile(simResDirName)
    if args.figures:
        simulator.plotSimStats(simResDirName)
        sg.gnGraph.drawGeoNetGraph(simResDirName + '/fig_topology.pdf')

    return 0
Ejemplo n.º 20
0
def main(argv):
    """ Parse command-line options to create reports and mail them ."""
    description = """Read .SIGNED.ZIP files from PATH; parse Codecheck.it
    report for e-mail address, score, and code; edit response and e-mail it 
    back."""
    formatter = lambda prog: \
        argparse.ArgumentDefaultsHelpFormatter(prog, max_help_position=30)
    parser = OptionParser(description=description,
                          add_help=False,
                          formatter_class=formatter)
    arguments = [
        # c1, c2, action, dest, default, help
        (
            '-e',
            '--email',
            'store',
            'ADDR',
            None,
            'SMTP e-mail',
        ),
        (
            '-p',
            '--password',
            'store',
            'PASS',
            None,
            'SMTP password',
        ),
        (
            '-r',
            '--resend',
            'store_true',
            'RESEND',
            False,
            'resend comment, if available',
        ),
        (
            '-v',
            '--verbose',
            'store_true',
            'VERBOSE',
            False,
            'echo status information',
        ),
    ]
    # Add optional arguments with values.
    for c1, c2, a, v, d, h in arguments:
        parser.add_argument(
            c1,
            c2,
            action=a,
            dest=v,
            default=d,
            help=h,
        )
    # Add positional arguments. PATH is both the string and the variable.
    parser.add_argument("PATH",
                        help="path to directory with .SIGNED.ZIP files")
    # Parse arguments.
    ns = parser.parse_args(args=argv[1:])
    if ns.ADDR:
        os.environ['ADDR'] = ns.ADDR
    if ns.PASS:
        os.environ['PASS'] = ns.PASS

    # Process all zips in path.
    zips = Zips(ns.PATH)
    for p in zips.paths:
        print(f"Processing: {p}")
        report = Report(p, ns.VERBOSE)
        if ns.VERBOSE:
            print('values:', [f'{repr(x)}' for x in report.values()])
        if report.signed and report.has_email:
            mailer = Mailer(*report.values(), ns.RESEND)
            if ns.VERBOSE:
                print('message:')
                print(mailer.message)
            mailer.send()
        else:
            # Unsigned reports do not have valid e-mail addresses.
            if not report.signed:
                print(f"ERROR: '{report.filename}' is unsigned")
            else:
                canvas = "Don't make grading more difficult! " \
                        "I need your e-mail address to give you any feedback."
                print(f"ERROR: '{report.email}' not a valid e-mail address "
                      f"in '{report.filename}' "
                      f"{canvas}")
Ejemplo n.º 21
0
def formatter(prog):
    return argparse.ArgumentDefaultsHelpFormatter(prog, max_help_position=100, width=180)
Ejemplo n.º 22
0
            x_decoded = predict_model(sess, generator_model,
                                      np.expand_dims(z_sample, 0))
            digit = x_decoded[0].reshape(digit_size, digit_size)
            figure[i * digit_size:(i + 1) * digit_size,
                   j * digit_size:(j + 1) * digit_size] = digit
    plt.figure(figsize=(10, 10))
    plt.imshow(figure)
    plt.show()

    sess.close()


if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description='train Dropout Uncertainty Model',
        formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(
            prog))
    parser.add_argument('--forward',
                        action='store_true',
                        help='only forward model')
    parser.add_argument('--modelfn', type=str, default=None, help='model name')
    parser.add_argument('--output', type=str, default=None, help='model name')
    # learning hyper-parameters
    parser.add_argument('--init_lr',
                        type=float,
                        default=5e-4,
                        help='initial learning rate.')
    parser.add_argument('--mom', type=float, default=0.9, help='SGD Momentum')
    parser.add_argument('--l2reg',
                        type=float,
                        default=5e-6,
                        help='L2 reg lambda')
Ejemplo n.º 23
0
### load requirements
import os
import subprocess
import argparse
from args_impute import parserbase, parsercluster, parserjob
from py_helpers import unbuffer_stdout, find_exec, file_len
from blueprint import send_job, load_job, save_job, read_clust_conf
unbuffer_stdout()

#############
if not (('-h' in sys.argv) or ('--help' in sys.argv)):
    print '\n...Parsing arguments...'
#############
parser = argparse.ArgumentParser(
    prog='agg_imp.py',
    formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(
        prog, max_help_position=40),
    parents=[parserbase, parsercluster, parserjob])

args, extra_args = parser.parse_known_args()

# TODO: arg print

# get useful modified args
if args.addout is not None and str(args.addout) != '':
    addout_txt = ['--addout', str(args.addout)]
    outdot = str(args.out) + '.' + str(args.addout)
else:
    addout_txt = ['', '']
    outdot = str(args.out)

# directories
Ejemplo n.º 24
0
def parse_args():
    console_width = get_console_width()
    width = console_width * 0.8 if console_width > 80 / 0.8 else 80
    parser = argparse.ArgumentParser(
        prog="turtlecli",
        description="A program for easily querying the Turtle database",
        formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(
            prog, width=width
        ),
    )

    ### General Group ###
    general_group = parser.add_argument_group(
        title="General", description="General-purpose arguments"
    )
    general_group.add_argument(
        "-L", "--limit", type=int, default=10, help="Limit results to the given number"
    )
    general_group.add_argument(
        "-i",
        "--interactive",
        action="store_true",
        help="Drop into an interactive shell after the query is performed",
    )
    general_group.add_argument(
        "--exact",
        action="store_true",
        help="Make searches more precise, and faster. This will perform exact, "
        "case-insensitive searches on things like observer name, project name, etc.",
    )
    general_group.add_argument(
        "--regex",
        action="store_true",
        help="Indicates that given search terms are MySQL-style regular expressions. For "
        "example, if this is given then --project-names '^AGBT.*72$' would be treated "
        "as a regular expression and all resultls in which the project name starts "
        " with AGBT and ends with 72 would be returned",
    )
    general_group.add_argument(
        "-v",
        "--verbose",
        action="store_true",
        help="Make output more verbose. Note: this will display SQL "
        "queries made during the initial query",
    )
    parser.add_argument(
        "--log-level",
        choices=["DEBUG", "INFO", "WARNING", "ERROR"],
        default="INFO",
        help="Specify the logging level. Note that "
        "this will override --verbose, if both "
        "are present.",
    )

    ### Things Group ###
    things_group = parser.add_argument_group(
        title="Entities", description="Arguments that relate to filtering by entity"
    )
    things_group.add_argument(
        "-p",
        "--project-names",
        "--project",
        "--projects",
        metavar="PROJECT",
        nargs="+",
        help="The name(s) of the project(s). If multiple projects are given, "
        "then results will be shown for all of them. Respects --regex",
    )
    things_group.add_argument(
        "-s",
        "--script-names",
        "--script",
        "--scripts",
        metavar="SCRIPT",
        nargs="+",
        help="The name(s) of the script(s). If multiple scripts are given, "
        "then results will be shown for all of them. If --projects is "
        "given, only scripts from within the given project will be shown. "
        "Respects --regex. "
        "NOTE: This option may give unexpected results if --projects is not specified, "
        "since script names are not guaranteed to be unique across all projects!",
    )
    things_group.add_argument(
        "-o",
        "--observers",
        nargs="+",
        metavar="OBSERVER",
        help="Filter for given observer(s). Respects --regex",
    )
    things_group.add_argument(
        "-O",
        "--operators",
        nargs="+",
        metavar="OPERATOR",
        help="Filter for given operator(s). Respects --regex",
    )
    things_group.add_argument(
        "--state",
        help="Filter based on the state of script execution",
        choices=["completed", "in_progress", "aborted"],
    )

    ### Time Group ###
    time_group = parser.add_argument_group(
        title="Time",
        description="Arguments that filter within the time domain. "
        "NOTE: ALL options in this section are with respect to the script "
        "EXECUTION time! Script termination times are not recorded, though you "
        "can look within the logs to see roughly when script termination ocurred",
    )
    time_group.add_argument(
        "-l",
        "--last",
        metavar="DELTA",
        type=float,
        help="Limit to scripts executed within the last DELTA time units. "
        "See --units for details on time unit options",
    )
    time_group.add_argument(
        "-a",
        "--after",
        "--start",
        metavar="DATETIME",
        type=dp.parse,
        help="Limit to scripts executed after this time (note: any reasonable "
        "datetime format will work here). This time is INCLUSIVE. "
        "It DOES NOT respect --buffer! Note: the timezone from --tz will be used "
        "for this argument. To specify a different timezone, you'll need to specify "
        "an explict UTC offset",
    )
    time_group.add_argument(
        "-b",
        "--before",
        "--end",
        metavar="DATETIME",
        type=dp.parse,
        help="Limit to scripts executed before this time "
        "(note: any reasonable datetime format will work here). This time is "
        "INCLUSIVE. It DOES NOT respect --buffer! Note: the timezone from --tz will be used "
        "for this argument. To specify a different timezone, you'll need to specify "
        "an explict UTC offset",
    )
    time_group.add_argument(
        "-t",
        "--times",
        metavar="DATETIME",
        nargs="+",
        type=dp.parse,
        help="Script execution time (note: any reasonable datetime "
        "format will work here). This works in conjunction with --buffer to "
        "find all scripts executed near the given time. Note: the timezone from --tz will be used "
        "for this argument. To specify a different timezone, you'll need to specify "
        "an explict UTC offset",
    )
    time_group.add_argument(
        "-B",
        "--buffer",
        type=float,
        default=0.25,
        help="Designates the size of the time window that projects "
        "will be searched for within. For an exact time, use 0 here. "
        "Units are determined by --units. "
        "Note that the default will only be reasonable if the default "
        "unit is used.",
    )
    time_group.add_argument(
        "-u",
        "--unit",
        default="hours",
        # All (reasonable) choices that can be set for relativedelta
        choices=["seconds", "minutes", "hours", "days", "weeks", "months", "years"],
    )
    time_group.add_argument(
        "--tz",
        nargs="?",
        # This is the value if just --tz is given (no arg)
        const="America/New_York",
        metavar="TIMEZONE",
        type=timezone.pytz.timezone,
        help="Timezone to translate results into. If given without an argument, "
        "defaults to America/New_York (GBT timezone). To specify a different time zone, simply "
        "give the timezone as the argument. If --tz not given at all, then "
        "UTC is used. See https://en.wikipedia.org/wiki/List_of_tz_database_time_zones "
        "for a list of valid timezone names",
    )
    time_group.add_argument(
        "--strftime",
        metavar="FORMAT",
        # default="%Y-%m-%d %H:%M:%S",
        help="Set the formatting for datetime output. See "
        "https://docs.python.org/3.7/library/datetime.html#strftime-strptime-behavior "
        "for formatting reference",
    )

    ### Sorting Group ###
    sorting_group = parser.add_argument_group(
        title="Sorting", description="Arguments specify sorting options"
    )
    sorting_group.add_argument(
        "-S",
        "--sort-by",
        default="datetime",
        # These are simply what I consider a reasonable set of things to filter by
        choices=["id", "obsprocedure", "observer", "operator", "datetime"],
        help="Field to results by",
    )
    sorting_group.add_argument(
        "-d",
        "--direction",
        default="descending",
        choices=["ascending", "descending"],
        help="Direction to sort results",
    )

    ### Output Group ###
    output_group = parser.add_argument_group(
        title="Output", description="Arguments specify output options"
    )
    output_group.add_argument(
        "--output",
        default=".",
        help="Specify the path into which all output files will be written."
        "If the path does not exist, an attempt will be made to create it.",
    )
    output_group.add_argument(
        # TODO: --diff is deprecated
        "--show-diffs",
        "--diff",
        action="store_true",
        help="Show the differences between the scripts for each result",
    )
    output_group.add_argument(
        "--show-logs",
        "--logs",
        # TODO: --logs is deprecated
        action="store_true",
        help="Show the log for each result",
    )
    output_group.add_argument(
        "--save-logs",
        action="store_true",
        help="Save the log for each result to file. Path is relative to --output.",
    )
    output_group.add_argument(
        "--show-scripts",
        action="store_true",
        help="Show the contents of the executed script for each result",
    )
    output_group.add_argument(
        "--save-scripts",
        action="store_true",
        help="Save the script for each result to file. Path is relative to --output.",
    )
    output_group.add_argument(
        "--show-sql",
        action="store_true",
        help="Display every SQL query that is executed during the script. "
        "NOTE: This is primarily intended for use in --interactive mode; "
        "for standard operations simply use --verbose",
    )
    output_group.add_argument(
        "--export-to-git",
        action="store_true",
        help="Export all results to a git repository, with every execution "
        "forming a commit. Execution date is used as commit date. File name "
        "is in the format {PROJECT}.{SCRIPTNAME}.py",
    )

    ### Advanced Group ###
    advanced_group = parser.add_argument_group(
        title="Advanced",
        description="Note that these will take a LONG time. It is "
        "advisable to couple them with a reasonable --limit value. "
        "Note also that none of these are case-sensitive",
    )
    advanced_group.add_argument(
        "-k",
        "--kwargs",
        nargs="+",
        metavar="KEY=VALUE",
        help="Search for one or more keyword=value style statements within observation "
        "scripts. Note that whitespace DOES NOT matter here (though key/value pairs "
        "must be separated by spaces)",
    )
    advanced_group.add_argument(
        "--script-contains",
        nargs="+",
        metavar="STRING",
        help="One or more strings that will be searched for within scripts (case-insensitive)",
    )
    advanced_group.add_argument(
        "--log-contains",
        nargs="+",
        metavar="STRING",
        help="One or more strings that will be searched for within logs (case-insensitive)",
    )
    advanced_group.add_argument(
        "--script-regex",
        nargs="+",
        metavar="REGEX",
        help="One or more MySQL-style regular expression that will be "
        "used to search within scripts",
    )
    advanced_group.add_argument(
        "--log-regex",
        nargs="+",
        metavar="REGEX",
        help="One or more MySQL-style regular expression that will be "
        "used to search within logs",
    )

    args = parser.parse_args()

    if args.exact and args.regex:
        parser.error("--exact cannot be given alongside --regex!")

    if args.kwargs:
        # Parse the keyword-value strings inside of kwargs. If there is
        # a ValueError, consider it a parsing error
        # Replace the user's kwargs with the version we have parsed into a dict
        try:
            args.kwargs = parse_kwargs(args.kwargs)
        except ValueError:
            parser.error(
                "kwargs must be of the format 'keyword=value'; got {}".format(
                    args.kwargs
                )
            )

    if args.after and not args.after.tzinfo:
        args.after = timezone.make_aware(args.after, args.tz)

    if args.before and not args.before.tzinfo:
        args.before = timezone.make_aware(args.before, args.tz)

    if args.times:
        args.times = [
            dt if dt.tzinfo else timezone.make_aware(dt, args.tz) for dt in args.times
        ]

    ### Additional error checking ###
    buffer_given = args.buffer != parser.get_default("buffer")
    # Ensure that --buffer isn't given without --time
    # if buffer_given and parser.get_default("times") not in args.times:
    #     parser.error("--buffer has no effect if --times is not given!")

    if buffer_given and (
        parser.get_default("last") != args.last
        or parser.get_default("after") != args.after
        or parser.get_default("before") != args.before
    ):
        parser.error(
            "--buffer has no effect on time-related options other than --times!"
        )
    # Ensure that the buffer isn't negative
    if args.buffer < 0:
        parser.error("--buffer value must be greater than 0")
    args.buffer = relativedelta(**{args.unit: args.buffer})

    if args.output != parser.get_default("output") and not (
        args.save_scripts or args.save_logs or args.export_to_git
    ):
        parser.error(
            "--output is meaningless without --save-scripts, --save-logs, or --export-to-git"
        )

    return args
Ejemplo n.º 25
0
    def __init__(self):
        """
        Initialise the frame producer object, setting defaults and parsing command-line options.
        """

        # Create an empty list for frame storage
        self.frames = []

        # Load default parameters
        self.defaults = LpdFrameProducerDefaults()

        # Set the terminal width for argument help formatting
        try:
            term_columns = int(os.environ['COLUMNS']) - 2
        except (KeyError, ValueError):
            term_columns = 100

        # Build options for the argument parser
        parser = argparse.ArgumentParser(
            prog='Lpd_frame_producer.py', description='Lpd frame producer',
            formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(
                prog, max_help_position=40, width=term_columns)
        )

        parser.add_argument(
            'pcap_file', type=argparse.FileType('rb'),
            default=self.defaults.pcap_file,
            help='Packet capture file to load'
        )

        parser.add_argument(
            '--address', '-a', type=str, dest='ip_addr',
            default=self.defaults.ip_addr, metavar='ADDR',
            help='Hostname or IP address to transmit UDP frame data to'
        )
        parser.add_argument(
            '--port', '-p', type=str, val_type=int, dest='ports', action=CsvAction,
            default=self.defaults.port_list, metavar='PORT[,PORT,...]',
            help='Comma separatied list of port numbers to transmit UDP frame data to'
        )
        parser.add_argument(
            '--frames', '-n', type=int, dest='num_frames',
            default=self.defaults.num_frames, metavar='FRAMES',
            help='Number of frames to transmit (0 = send all frames found in packet capture file'
        )
        parser.add_argument(
            '--interval', '-i', type=float, dest='tx_interval',
            default=self.defaults.tx_interval, metavar='INTERVAL',
            help='Interval in seconds between transmission of frames'
        )
        parser.add_argument(
            '--pkt_gap', type=int, dest='pkt_gap', metavar='PACKETS',
            help='Insert brief pause between every N packets'
        )
        parser.add_argument(
            '--drop_frac', type=float, dest='drop_frac',
            min=0.0, max=1.0, action=Range,
            default=self.defaults.drop_frac, metavar='FRACTION',
            help='Fraction of packets to drop')
        parser.add_argument(
            '--drop_list', type=int, nargs='+', dest='drop_list',
            default=self.defaults.drop_list,
            help='Packet number(s) to drop from each frame',
        )
        parser.add_argument(
            '--logging', type=str, dest='log_level',
            default=self.defaults.log_level, choices=self.defaults.log_levels.keys(),
            help='Set logging output level'
        )

        # Parse arguments
        self.args = parser.parse_args()
	
        # Map logging level option onto real level
        if self.args.log_level in self.defaults.log_levels:
            log_level = self.defaults.log_levels[self.args.log_level]
        else:
            log_level = self.defaults.log_levels[self.defaults.log_level]

        # Set up logging
        logging.basicConfig(
            level=log_level, format='%(levelname)1.1s %(message)s',
            datefmt='%y%m%d %H:%M:%S'
        )
            
        # Initialise the packet capture file reader
        self.pcap = dpkt.pcap.Reader(self.args.pcap_file)
Ejemplo n.º 26
0
def main():
    parser = argparse.ArgumentParser(
        prog='cta-data-relay',
        description='',
        formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(
            prog, max_help_position=27, width=90))
    actions_grp = parser.add_argument_group(
        title='Actions', description='(exactly one must be specified)')
    actions_mxgrp = actions_grp.add_mutually_exclusive_group(required=True)
    actions_mxgrp.add_argument('--local-to-s3',
                               action='store_true',
                               help='Upload local files to S3 storage')
    actions_mxgrp.add_argument('--s3-to-gridftp',
                               action='store_true',
                               help='Move files from S3 to gridftp storage')
    actions_mxgrp.add_argument('--meta-show',
                               action='store_true',
                               help='Show S3 metadata')
    actions_mxgrp.add_argument('--meta-vs-gridftp',
                               action='store_true',
                               help='Compare S3 metadata vs gridftp storage')
    actions_mxgrp.add_argument('--meta-vs-local',
                               action='store_true',
                               help='Compare S3 metadata vs local storage')
    actions_mxgrp.add_argument('--meta-set-gridftp',
                               action='store_true',
                               help='Set S3 metadata to match gridftp storage')
    actions_mxgrp.add_argument(
        '--meta-prune-to-gridftp',
        action='store_true',
        help='Prune from S3 metadata files not in gridftp')

    misc_grp = parser.add_argument_group('Miscellaneous options')
    misc_grp.add_argument('--local-path',
                          metavar='PATH',
                          help='local source file or directory')
    misc_grp.add_argument('--timeout',
                          metavar='SECONDS',
                          type=int,
                          help='terminate after this amount of time')
    misc_grp.add_argument('--tempdir',
                          metavar='PATH',
                          default='/tmp',
                          help='directory for (de)compression')
    misc_grp.add_argument('--dry-run',
                          default=False,
                          action='store_true',
                          help='dry run')

    s3_grp = parser.add_argument_group('S3 options')
    s3_grp.add_argument('--s3-url',
                        metavar='URL',
                        default='https://rgw.icecube.wisc.edu',
                        help='S3 endpoint URL')
    s3_grp.add_argument('-b',
                        '--bucket',
                        metavar='NAME',
                        required=True,
                        help='S3 bucket name')
    s3_grp.add_argument('-i', dest='access_key_id', help='S3 access key id')
    s3_grp.add_argument('-k',
                        dest='secret_access_key',
                        help='S3 secret access key')
    s3_grp.add_argument('--s3-threads',
                        metavar='NUM',
                        type=int,
                        default=80,
                        help='maximum number of S3 transfer threads')
    s3_grp.add_argument('--object',
                        metavar='KEY',
                        help='operate on specific S3 object only')
    s3_grp.add_argument('--s3-stats-freq',
                        metavar='SEC',
                        default=20,
                        type=int,
                        help='frequency of S3 upload progress updates')

    grid_grp = parser.add_argument_group('GridFTP options')
    grid_grp.add_argument('--gridftp-url',
                          metavar='URL',
                          default='gsiftp://gridftp.icecube.wisc.edu',
                          help='GridFTP endpoint URL')
    grid_grp.add_argument('--gridftp-path',
                          metavar='PATH',
                          help='GridFTP path')
    grid_grp.add_argument('--gridftp-threads',
                          metavar='NUM',
                          type=int,
                          default=45,
                          help='gridftp worker pool size')

    args = parser.parse_args()
    if args.timeout:
        signal.alarm(args.timeout)
    if not os.path.isdir(args.tempdir):
        parser.exit(f'Invalid argument: {args.tempdir} is not a directory')

    s3 = boto3.resource('s3',
                        'us-east-1',
                        endpoint_url=args.s3_url,
                        aws_access_key_id=args.access_key_id,
                        aws_secret_access_key=args.secret_access_key)
    bucket = s3.Bucket(args.bucket)
    bucket.create()

    compr_threads = max(1, int(os.cpu_count() / 2))
    multipart_size = 2**20

    if args.local_to_s3:
        import cta_data_relay.s3zstd
        tx_config = TransferConfig(max_concurrency=args.s3_threads,
                                   multipart_threshold=multipart_size,
                                   multipart_chunksize=multipart_size)
        if args.local_path is None:
            parser.exit(f'Missing required argument --local-path')
        if os.path.isfile(args.local_path):
            file_info = [(args.local_path, os.path.getsize(args.local_path))]
        else:
            file_info = [(de.path, de.stat().st_size)
                         for de in os.scandir(args.local_path) if de.is_file()]
            # Sort to send small files first. This avoids the situation where
            # a file that is too big to be transferred within the allowed time
            # permanently blocks files that follow it in the list
            file_info.sort(key=itemgetter(1))
        cta_data_relay.s3zstd.zupload(bucket, file_info, args.tempdir,
                                      compr_threads, tx_config,
                                      args.s3_stats_freq, args.dry_run)
    elif args.s3_to_gridftp:
        if args.gridftp_path is None:
            parser.exit(f'Missing required argument --gridftp-path')
        s3_to_gridftp(bucket, args.gridftp_url, args.gridftp_path,
                      args.tempdir, args.object, args.dry_run)
    elif args.meta_set_gridftp:
        import cta_data_relay.meta
        if args.gridftp_path is None:
            parser.exit(f'Missing required argument --gridftp-path')
        cta_data_relay.meta.set_gridftp(bucket, args.gridftp_url,
                                        args.gridftp_path,
                                        args.gridftp_threads, args.dry_run)
    elif args.meta_show:
        import cta_data_relay.meta
        cta_data_relay.meta.show(bucket, args.object)
    elif args.meta_vs_gridftp:
        import cta_data_relay.meta
        if args.gridftp_path is None:
            parser.exit(f'Missing required argument --gridftp-path')
        cta_data_relay.meta.diff_gridftp(bucket, args.gridftp_url,
                                         args.gridftp_path,
                                         args.gridftp_threads, args.dry_run)
    elif args.meta_vs_local:
        import cta_data_relay.meta
        if args.local_path is None:
            parser.exit(f'Missing required argument --local-path')
        cta_data_relay.meta.diff_local(bucket, args.local_path)
    elif args.meta_prune_to_gridftp:
        import cta_data_relay.meta
        if args.gridftp_path is None:
            parser.exit(f'Missing required argument --gridftp-path')
        cta_data_relay.meta.prune_not_in_gridftp(bucket, args.gridftp_url,
                                                 args.gridftp_path,
                                                 args.dry_run)
    else:
        parser.exit('Usage error. Unexpected command.')
Ejemplo n.º 27
0
def main(argv=None):

    if argv is None:
        argv = sys.argv[1:]
    parser = argparse.ArgumentParser(
        description='CDN-Sim in Python',
        formatter_class=lambda prog: argparse.ArgumentDefaultsHelpFormatter(
            prog, max_help_position=32))

    inFilesGr = parser.add_argument_group('Input files')
    inFilesGr.add_argument('-trace',
                           metavar='file',
                           default='usr_trace.dat',
                           help='User behavior trace')
    inFilesGr.add_argument('-links',
                           metavar='file',
                           default='as_links.dat',
                           help='IRL AS-to-AS links')
    inFilesGr.add_argument('-origin',
                           metavar='file',
                           default='origin.dat',
                           help='IRL origin prefixes')
    inFilesGr.add_argument('-rank',
                           metavar='file',
                           default='caida.org.dat',
                           help='CAIDA AS rank data')

    simSetupGr = parser.add_argument_group('Simulation setup')
    simSetupGr.add_argument('-geo',
                            metavar='string',
                            default='de',
                            help='Comma-separated list of countries')
    simSetupGr.add_argument('-nhosts',
                            metavar='number',
                            default=1000000,
                            help='Maximal number of hosts')
    simSetupGr.add_argument('-active',
                            metavar='number',
                            default=100000,
                            help='Simultaneously active streams')
    simSetupGr.add_argument('-backnoise',
                            metavar='number',
                            default=0,
                            help='Simultaneous active background streams')
    simSetupGr.add_argument('-streaming',
                            choices=['live', 'vod'],
                            default='live',
                            help='Streaming Live/Vod')
    simSetupGr.add_argument('-ondemandCache',
                            action='store_true',
                            default=False,
                            help='Create caches on demand')
    simSetupGr.add_argument('-percentCache',
                            metavar='number',
                            type=int,
                            choices=xrange(1, 101),
                            default=0,
                            help='%% of ASes with static cache')
    simSetupGr.add_argument('-hierarchical',
                            action='store_true',
                            default=False,
                            help='Use hierarchical cache placement')
    simSetupGr.add_argument('-cachesec',
                            metavar='number',
                            type=int,
                            default=10,
                            help='# seconds of video to keep in cache')
    simSetupGr.add_argument('-cacheinit',
                            metavar='number',
                            type=float,
                            default=0.1,
                            help='ondemand cache init time')
    simSetupGr.add_argument('-cachethreshold',
                            metavar='number',
                            type=int,
                            default=1,
                            help='# streams to start a cache')
    simSetupGr.add_argument('-interactive',
                            action='store_true',
                            default=False,
                            help='Interactively populate ASes')
    simSetupGr.add_argument('-reqRate',
                            metavar='number',
                            type=float,
                            default=0,
                            help='Request rate per min (0-auto)')
    simSetupGr.add_argument('-scenario',
                            metavar='file',
                            default='',
                            help='Scenario file (format: time, rate/min)')
    simSetupGr.add_argument('-endtime',
                            metavar='number',
                            type=float,
                            default=3000,
                            help='Finalize simulation, no new requests')
    simSetupGr.add_argument('-waitCacheBoot',
                            action='store_true',
                            default=False,
                            help='Wait cache to boot or bypass it')

    resultsGr = parser.add_argument_group('Results')
    resultsGr.add_argument('-siminfo',
                           metavar='text',
                           default='',
                           help='Name of the simulation')
    resultsGr.add_argument('-figures',
                           action='store_true',
                           default=False,
                           help='Figures with results')
    resultsGr.add_argument('-allfigures',
                           action='store_true',
                           default=False,
                           help='Figures for all user streams')

    args = parser.parse_args(argv)

    import matplotlib
    if args.interactive and "DISPLAY" in os.environ:
        matplotlib.use('TkAgg')
    else:
        matplotlib.use('pdf')
    import geoNetGraph
    from netStreamingPrimitives import userRequests
    import hl_sim

    printWithClock("CDN Started on " + str(time.ctime()))

    max_hosts = sys.maxint
    if args.nhosts != 'all':
        max_hosts = int(args.nhosts)
    printWithClock("Maximal number of hosts is " + str(max_hosts))

    countries = ['de']
    if args.geo != "":
        countries = str(args.geo).replace(' ', '').split(',')
    else:
        printWithClock("Default geographic area: de")

    printWithClock("Building the geoNetGraph")
    g = geoNetGraph.geoNetGraph(args.links, args.origin, args.rank, countries)

    applyManualInputData = False
    if args.interactive:
        g.iSetGeoNetGraph(selectHosts=True,
                          selectCaches=True,
                          selectProvider=True)
        applyManualInputData = True

    initContentProviders(g)
    printWithClock("Populate the geoNetGraph")
    listOfHosts = populateGeoNetGraph(g, max_hosts, args.percentCache,
                                      applyManualInputData)

    nASes = nCaches = 0
    for tmpASNum, tmpAS in g.netGraph.nodes_iter(data=True):
        if 'ns_nets' in tmpAS and g.isAccessNode(tmpAS['type']):
            nASes += 1
        if 'static_cache' in tmpAS:
            nCaches += 1
    printWithClock("Number of populated ASes: " + str(nASes))
    printWithClock("Number of ASes with static caches: " + str(nCaches))

    simTimeStamp = '-'.join([str(k) for k in time.localtime()[0:6]])
    simResDirName = 'sim_res' + args.siminfo + '(' + simTimeStamp + ')'

    if os.path.exists('debug_out'):
        simResDirName = 'debug_out'
    else:
        if not os.path.exists(simResDirName):
            os.makedirs(simResDirName)
        else:
            print("Result directory exists! Cancel simulation")
            exit(-1)

    printWithClock("Starting simulation on: " + str(time.ctime()))
    start = time.time()
    simulator = hl_sim.highLevelSimulation(args, simResDirName)
    ur = userRequests(simulator, args.trace, g, listOfHosts, max_hosts,
                      int(args.active))
    simulator.urRef = ur
    if int(args.backnoise) > 0:
        simulator.eventPush(ur.getNoiseEvent(simulator.lastEventTime))
    else:
        simulator.eventPush(ur.getNextEvent(simulator.lastEventTime))

    # main simulation loop
    while simulator.eventQueue:
        simulator.step()

    stop = time.time()
    printWithClock("\nSimulation completed on: " + str(time.ctime()))
    printWithClock("Time spent (s): " + str(stop - start))

    simulator.saveSimStatsToFile()
    if args.figures:
        simulator.plotSimStats()
        g.drawGeoNetGraph(simResDirName + '/fig_topology.pdf')

    return 0
Ejemplo n.º 28
0
    # built-in types (CH 5)
    x['MemoryType'] = memoryview(_in)  # 2.7
    x['MemoryType2'] = memoryview(bytearray(_in))  # 2.7
    if PY3:
        x['DictItemsType'] = _dict.items()  # 2.7
        x['DictKeysType'] = _dict.keys()  # 2.7
        x['DictValuesType'] = _dict.values()  # 2.7
    else:
        x['DictItemsType'] = _dict.viewitems()  # 2.7
        x['DictKeysType'] = _dict.viewkeys()  # 2.7
        x['DictValuesType'] = _dict.viewvalues()  # 2.7
    # generic operating system services (CH 15)
    x['RawTextHelpFormatterType'] = argparse.RawTextHelpFormatter('PROG')
    x['RawDescriptionHelpFormatterType'] = argparse.RawDescriptionHelpFormatter(
        'PROG')
    x['ArgDefaultsHelpFormatterType'] = argparse.ArgumentDefaultsHelpFormatter(
        'PROG')
except NameError:
    pass
try:  # python 2.7 (and not 3.1)
    x['CmpKeyType'] = _cmpkey = functools.cmp_to_key(_methodwrap)  # 2.7, >=3.2
    x['CmpKeyObjType'] = _cmpkey('0')  #2.7, >=3.2
except AttributeError:
    pass
if PY3:  # oddities: removed, etc
    x['BufferType'] = x['MemoryType']
else:
    x['BufferType'] = buffer('')

# -- cleanup ----------------------------------------------------------------
a.update(d)  # registered also succeed
if sys.platform[:3] == 'win':
Ejemplo n.º 29
0
# from glob import glob
from math import log10
from args_gwas import *
from py_helpers import unbuffer_stdout, file_len
# , read_conf, link
unbuffer_stdout()


#############
if not (('-h' in sys.argv) or ('--help' in sys.argv)):
    print '\n...Parsing arguments...' 
#############

parser = argparse.ArgumentParser(prog='agg_gwas.py',
                                 formatter_class=lambda prog:
                                 argparse.ArgumentDefaultsHelpFormatter(prog, max_help_position=40),
                                 parents=[parserbase,parseragg])

arg_file = parser.add_argument_group('Input Files')
arg_other = parser.add_argument_group('Other Settings')

arg_file.add_argument('--chunk-file', 
                    type=str,
                    metavar='FILE',
                    help='file defining chunks used for parallelized GWAS',
                    required=True)
arg_file.add_argument('--freq-file', 
                    type=str,
                    metavar='FILE',
                    help='file with case/control allele frequencies for full data (from \'plink --freq case-control --nonfounders\')',
                    required=True)
Ejemplo n.º 30
0
def main():

    parser = argparse.ArgumentParser(
     prog=program_name + ' v' + version,
     description=program_name + ' v' + version + ': ' + \
      """
			Utility for rendering math-enabled markdown documents to PDF format 
			and reserving latex output. Neatly packages the defaults of 
			RMarkdown's knitr, but without the code execution. Requires pandoc 
			and python. Requires R for knitr functionality.
			""",
     formatter_class=lambda prog:
      argparse.ArgumentDefaultsHelpFormatter(prog, max_help_position=20))

    parser.add_argument('mdfile',
                        metavar='mdfile',
                        type=str,
                        help="""
			Math-enabled Markdown file to render.
			""")

    parser.add_argument('-t',
                        '--template',
                        type=str,
                        default=default_latex_template,
                        help="""
		Latex template to render markdown.
		""")

    parser.add_argument('-l',
                        '--keep-latex',
                        action='store_true',
                        help="Keep LaTeX files used to generated PDF.")

    parser.add_argument('-lo',
                        '--latex-only',
                        action='store_true',
                        help="Don't generate PDF, just LaTeX.")

    parser.add_argument('-k',
                        '--knit',
                        action='store_true',
                        help="Pass through RMarkdown's knitr first.")

    parser.add_argument('-x',
                        '--show-pdf-xdg',
                        action='store_true',
                        help="Show the PDF after rendering. Uses xdg-open.")

    parser.add_argument(
        '-c',
        '--show-pdf-chrome',
        action='store_true',
        help="Show the PDF after rendering using Google Chrome.")

    # parser.add_argument(
    # 	'-v', '--verbose', action='store_true',
    # 	help="Display verbose output messages.")

    args = parser.parse_args()

    # Save a littany of paths and dirs
    term_cwd = os.path.abspath(os.getcwd())

    md_basename = os.path.basename(args.mdfile)
    md_filename = os.path.splitext(md_basename)[0]
    md_filepath = os.path.abspath(args.mdfile)
    md_dir = os.path.dirname(md_filepath)

    md_intermediate_filepath = os.path.join(md_dir, md_filename + '.md')
    latex_template_path = os.path.abspath(args.template)
    latex_template_dir = os.path.dirname(latex_template_path)

    os.chdir(md_dir)

    # Execute and interleave the code if you want, using knitr
    if args.knit:
        r_code_on_the_fly = \
         'library(knitr); setwd("{:s}"); knit("{:s}", output="{:s}")'.format(
          md_dir,
          md_filename + '.md',
          md_filename + '.knit.md'
         )

        subprocess_exec(['Rscript', '-e', r_code_on_the_fly])

        # If knitted, change intermediate markdown path
        md_intermediate_filepath = \
         os.path.join(md_dir, md_filename + '.knit.md')

    resource_path_args = ('--resource-path', ':'.join(
        (latex_template_dir, md_dir)))

    # Args to pass to pandoc procedure(s)
    pandoc_args = [
        pandoc_path,
        *memory_usage_args,
        md_intermediate_filepath,
        *output_args,
        *input_args,
        '--template',
        latex_template_path,
        *highlight_args,
        *pdf_engine_args,
        *geometry_args,
        *resource_path_args,
        *extra_args,
    ]
    pdf_outpath = os.path.join(md_dir, md_filename + '.pdf')
    tex_outpath = os.path.join(md_dir, md_filename + '.tex')

    # Render the PDF from the Markdown
    if not args.latex_only:
        subprocess_exec(pandoc_args + ['--output', pdf_outpath])

    # Render the latex that generated the PDF (redundant, but necessary)
    if args.keep_latex or args.latex_only:
        subprocess_exec(pandoc_args + ['--output', tex_outpath])

    if args.show_pdf_xdg:
        # Open the PDF with nohupped XDG
        subprocess_exec(
            ['nohup', 'xdg-open',
             os.path.join(md_dir, md_filename + '.pdf')],
            verbose=False)

    if args.show_pdf_chrome:
        # Open the PDF with Chrome
        subprocess_exec(
            ['google-chrome',
             os.path.join(md_dir, md_filename + '.pdf')],
            verbose=False)