Ejemplo n.º 1
0
    def inject_arguments(self, parser: ArgumentParser) -> None:
        """Inject arguments."""
        super().inject_arguments(parser)

        def _inject_mssql_uri(mssql_uri: str) -> str:
            self._mssql_uri = mssql_uri
            return mssql_uri

        parser.add(
            "--mssql-uri",
            required=True,
            help=" ".join((
                "MSSQL URI used to connect to a MSSQL database:",
                ("mssql+pymssql://USER:PASS@HOST:PORT/DATABASE?"
                 "timeout=TIMEOUT"),
                "Use a valid uri."
                "Url encode all parts, but do not encode the entire uri.",
                "No unencoded colons, ampersands, slashes,",
                "question-marks, etc. in parts.",
                "Specifically, check url encoding of USER (domain slash),"
                "and PASSWORD.",
            )),
            env_var="MSSQL_URI",
            type=_inject_mssql_uri,
        )
Ejemplo n.º 2
0
 def inject_arguments(  # pylint: disable=no-self-use,protected-access
     self, parser: ArgumentParser
 ) -> None:
     """Inject arguments."""
     parser._default_config_files = [
         "/local/config.yaml",
         "/secrets/config.yaml",
     ]
     parser._ignore_unknown_config_file_keys = True
     parser.add(
         "-c",
         "--config",
         is_config_file=True,
         help="config file path",
         env_var="CONFIG",  # make ENV match default metavar
     )
Ejemplo n.º 3
0
    def inject_arguments(self, parser: ArgumentParser) -> None:
        """Inject arguments."""
        super().inject_arguments(parser)

        def _inject_model(path: str) -> Model:
            model = cast(Model, load_pickle_file(path))
            self.model = model
            return model

        parser.add(
            "--model",
            required=True,
            help="Path to pickled model",
            env_var="MODEL_PATH",
            type=_inject_model,
        )
Ejemplo n.º 4
0
    def inject_arguments(self, parser: ArgumentParser) -> None:
        """Inject arguments."""
        super().inject_arguments(parser)

        def _inject_mongo_uri(mongo_uri: str) -> str:
            self._mongo_uri = mongo_uri
            return mongo_uri

        parser.add(
            "--mongo-uri",
            required=True,
            help=("Mongo URI used to connect to a Mongo database: "
                  "mongodb://*****:*****@HOST1,HOST2,.../DATABASE?"
                  "replicaset=REPLICASET&authsource=admin "
                  "Url encode all parts: PASS in particular"),
            env_var="MONGO_URI",
            type=_inject_mongo_uri,
        )
Ejemplo n.º 5
0
    def inject_arguments(self, parser: ArgumentParser) -> None:
        """Inject arguments."""
        super().inject_arguments(parser)

        def _inject_mssql_uri(mssql_uri: str) -> str:
            self._mssql_uri = mssql_uri
            return mssql_uri

        parser.add(
            "--mssql-uri",
            required=True,
            help=(
                "MSSQL URI used to connect to a MSSQL database: "
                "mssql+pymssql://USER:PASS@HOST:PORT/DATABASE?timeout=TIMEOUT "
                "Url encode all parts: USER (domain slash), PASS in particular"
            ),
            env_var="MSSQL_URI",
            type=_inject_mssql_uri,
        )
Ejemplo n.º 6
0
def create_args():
    parser = ArgParser()
    parser.add('--db_section')
    parser.add('--reqnums')
    parser.add('--csv')
    args = parser.parse_args()
    return args
Ejemplo n.º 7
0
def parse_args():
    parser = ArgParser(default_config_files=[".env"])
    parser.add("--start_page", required=True, type=int)
    parser.add("--end_page", type=int, default=701)
    parser.add("--file", required=True, help="books description file")
    args = parser.parse_args()
    return args
Ejemplo n.º 8
0
    def inject_arguments(self, parser: ArgumentParser) -> None:
        """Inject arguments."""
        super().inject_arguments(parser)

        def _inject_mongo_uri(mongo_uri: str) -> str:
            self._mongo_uri = mongo_uri
            return mongo_uri

        parser.add(
            "--mongo-uri",
            required=True,
            help=" ".join((
                "Mongo URI used to connect to a Mongo database:",
                ("mongodb://*****:*****@HOST1,HOST2,.../DATABASE?"
                 "replicaset=REPLICASET&authsource=admin"),
                "Use a valid uri."
                "Url encode all parts, but do not encode the entire uri.",
                "No unencoded colons, ampersands, slashes,",
                "question-marks, etc. in parts.",
                "Specifically, check url encoding of PASSWORD.",
            )),
            env_var="MONGO_URI",
            type=_inject_mongo_uri,
        )
Ejemplo n.º 9
0
def parse_config() -> Namespace:
    parser = ArgParser(config_file_parser_class=YAMLConfigFileParser,
                       default_config_files=["config.yml"])
    parser.add('-c',
               '--config',
               is_config_file=True,
               help='notification server config in yaml format (.yml)')
    parser.add('-p',
               '--port',
               type=int,
               required=True,
               help='notification server port')
    parser.add('-s',
               '--secrets-file',
               type=str,
               required=True,
               help='secrets file path')
    return parser.parse_args()
Ejemplo n.º 10
0
def program_options():
    """Create argument parser for the CLI.
    """
    parser = ArgParser()
    dp_group = parser.add_argument_group(title="Required DP Parameters")
    io_group = parser.add_argument_group(
        title="Required Train Data Parameters")
    comm_group = parser.add_argument_group(
        title="Flags for Communication (no touch)")

    # File IO
    io_group.add(
        "--train-normal-alice",
        metavar="TRAIN-NORMAL-DATA-FILE",
        help="Path to training data file consisting of data samples corresponding "\
            "to the NORMAL classification label.",
        type=str,
        required=True
    )
    io_group.add(
        "--train-tumor-alice",
        metavar="TRAIN-TUMOR-DATA-FILE",
        help="Path to training data file consisting of data samples corresponding "\
            "to the TUMOR classification label.",
        type=str,
        required=True
    )
    io_group.add(
        "--train-normal-bob",
        metavar="TRAIN-NORMAL-DATA-FILE",
        help="Path to training data file consisting of data samples corresponding "\
            "to the NORMAL classification label.",
        type=str,
        required=True
    )
    io_group.add(
        "--train-tumor-bob",
        metavar="TRAIN-TUMOR-DATA-FILE",
        help="Path to training data file consisting of data samples corresponding "\
            "to the TUMOR classification label.",
        type=str,
        required=True
    )

    # DP Req. Options
    dp_group.add(
        "--epsilon",
        nargs="+",
        help="Epsilon value(s) for differentially-private training. "\
            "One or many epsilon values can be specified. If multiple epsilons are "\
            "specified, then independent experiments will be run for each specified "\
            "epislon value. The results of each of these runs will be stored in "\
            "separate, named result files. "\
            "Epsilons can be specified as decimal values. Some examples of valid "\
            "epsilon arguments are "\
            "`--epsilon 3`, "\
            "`--epsilon 5.32341`, "\
            "`--epsilon 3 3.5 4 4.5 20`.",
        type=float,
        required=True
    )

    dp_group.add(
        "--delta",
        nargs="+",
        help= "Delta value(s) for differentially-private training. "\
            "One or many delta values can be specified. If multiple deltas are"\
            "specified, then independent experiments will be run for each delta"\
            "value in combination with each epsilon value."\
            "The reuslts of these runs are stored in separate, named result files."\
            "To use (eps)-DP for privacy calculations, pass use the option\n"\
            "`--delta 0`.",
        type=float,
        required=True
    )

    # Optional arguments
    parser.add(
        "--output-dir",
        help="Directory to store output trained model to. If the directory does not "\
            "exist, it will be created.",
        type=str,
        default="owkin-results"
    )

    # Next arguments
    comm_group.add(
        "--port",
        help=
        "Specifies the port through which the two workers should communicate on the host machine.",
        default="8081",
        type=int)

    comm_group.add(
        "--subprocess",
        help="If set, the training will be performed between two subprocesses. If unset (default), "\
             "the training will be performed with Docker containers.",
        default=False,
        action="store_true"
    )
    args = parser.parse_args()

    # Some post processing, for lists etc.
    if not isinstance(args.epsilon, list):
        args.epsilon = [
            args.epsilon,
        ]

    if not isinstance(args.delta, list):
        args.delta = [
            args.delta,
        ]

    return args
Ejemplo n.º 11
0
def parse_main_arguments(args_in: List[str]) -> MainArguments:
    """
    Configures the command-line interface.

    Parameters
    ----------
    args_in : list of str
        Full argument list from the command line.

    Returns
    -------
    arguments  : MainArguments
        A populated `MainArguments` object.
    """

    parser = ArgParser()
    parser.add(  # type: ignore
        "-c",
        "--csvpath",
        help="Base path for input files.",
        required=True,
        env_var="CSV_PATH",
    )
    parser.add(  # type: ignore
        "-e",
        "--engine",
        help="Database engine.",
        choices=[DbEngine.MSSQL, DbEngine.POSTGRESQL],
        default=DbEngine.MSSQL,
        env_var="DB_ENGINE",
    )
    parser.add(  # type: ignore
        "-s",
        "--server",
        help="Database server name or IP address",
        required=True,
        env_var="DB_SERVER",
    )
    parser.add(  # type: ignore
        "--port",
        help="Database server port number",
        type=int,
        env_var="DB_PORT",
    )
    parser.add(  # type: ignore
        "-d",
        "--dbname",
        help="Name of the database with the LMS tables.",
        env_var="DB_NAME",
        required=True,
    )

    USE_INTEGRATED = "--useintegratedsecurity"
    USE_INTEGRATED_SHORT = "-i"

    parser.add(  # type: ignore
        USE_INTEGRATED_SHORT,
        USE_INTEGRATED,
        help="Use Integrated Security for the database connection.",
        action="store_true",
    )
    user_name_required = (USE_INTEGRATED not in args_in
                          and USE_INTEGRATED_SHORT not in args_in)

    # Retrieve this value because we need it in order to determine
    # if username and password are required
    integrated_env_var = os.getenv("USE_INTEGRATED_SECURITY")
    if integrated_env_var and integrated_env_var.lower() in ("true", "yes",
                                                             "t", "y"):
        user_name_required = False

    parser.add(  # type: ignore
        "-u",
        "--username",
        required=user_name_required,
        env_var="DB_USERNAME",
        help="Database username, when not using integrated security.",
    )
    parser.add(  # type: ignore
        "-p",
        "--password",
        required=user_name_required,
        env_var="DB_PASSWORD",
        help="Database user password, when not using integrated security.",
    )

    parser.add(  # type: ignore
        "-l",
        "--log-level",
        required=False,
        help="The log level for the tool.",
        choices=LOG_LEVELS,
        type=str,
        default="INFO",
        env_var="LOG_LEVEL",
    )

    parser.add(  # type: ignore
        "-n",
        "--encrypt",
        help="Encrypt the connection to the database.",
        action="store_true",
        env_var="ENCRYPT_SQL_CONNECTION",
    )
    parser.add(  # type: ignore
        "-t",
        "--trust-certificate",
        help=
        "When encrypting connections, trust the server certificate. Useful for localhost debuggin with a self-signed certificate. USE WITH CAUTION.",
        action="store_true",
        env_var="TRUST_SERVER_CERTIFICATE",
    )

    args_parsed = parser.parse_args(args_in)

    # Need to add this back in because reading it manually earlier
    # seems to cause it to be misread by the parser.
    args_parsed.useintegratedsecurity = (args_parsed.useintegratedsecurity
                                         or not user_name_required)

    arguments = MainArguments(
        args_parsed.csvpath,
        args_parsed.engine,
        args_parsed.log_level,
        args_parsed.server,
        args_parsed.dbname,
        args_parsed.port,
        args_parsed.encrypt,
        args_parsed.trust_certificate,
    )

    if args_parsed.useintegratedsecurity and args_parsed.engine == DbEngine.MSSQL:
        arguments.build_mssql_adapter_with_integrated_security()
    elif args_parsed.engine == DbEngine.MSSQL:
        arguments.build_mssql_adapter(
            args_parsed.username,
            args_parsed.password,
        )
    elif args_parsed.engine == DbEngine.POSTGRESQL:
        arguments.build_pgsql_adapter(
            args_parsed.username,
            args_parsed.password,
        )

    return arguments
def parse_main_arguments() -> MainArguments:
    """
    Configures the command-line interface.
    Parameters
    ----------
    args_in : list of str
        Full argument list from the command line.
    Returns
    -------
    arguments  : MainArguments
        A populated `MainArguments` object.
    """

    parser = ArgParser()

    parser.add(  # type: ignore
        "-b",
        "--baseUrl",
        help=
        "The base url used to derive the api, metadata, oauth, and dependency urls (e.g., http://server).",
        type=str,
        required=True,
        env_var="PERF_API_BASEURL",
    )
    parser.add(  # type: ignore
        "-k",
        "--key",
        help="The web API OAuth key",
        type=str,
        required=True,
        env_var="PERF_API_KEY",
    )
    parser.add(  # type: ignore
        "-s",
        "--secret",
        help="The web API OAuth secret",
        type=str,
        required=True,
        env_var="PERF_API_SECRET",
    )
    parser.add(  # type: ignore
        "-i",
        "--ignoreCertificateErrors",
        help="Certificate errors are ignored",
        action='store_true',  # default false
        env_var="IGNORE_TLS_CERTIFICATE",
    )
    parser.add(  # type: ignore
        "-c",
        "--connectionLimit",
        help="Maximum concurrent connections to api",
        type=int,
        default=5,
        env_var="PERF_CONNECTION_LIMIT",
    )
    parser.add(  # type: ignore
        "-o",
        "--output",
        help="Directory for writing results",
        type=str,
        default="out",
        env_var="PERF_OUTPUT_DIR",
    )
    parser.add(  # type: ignore
        "-t",
        "--contentType",
        help="CSV or JSON",
        choices=list(OutputFormat),
        default=OutputFormat.CSV,
        type=OutputFormat,
        env_var="PERF_CONTENT_TYPE",
    )
    parser.add(  # type: ignore
        "-r",
        "--resourceList",
        help=
        "(Optional) List of resources to test  - if not provided, all resources will be retrieved",
        nargs="+",
        env_var="PERF_RESOURCE_LIST",
    )
    parser.add(  # type: ignore
        "-p",
        "--pageSize",
        help="The page size to request. Max: 500.",
        type=int,
        default="100",
        env_var="PERF_API_PAGE_SIZE",
    )
    parser.add(  # type: ignore
        "-l",
        "--logLevel",
        help="Console log level: VERBOSE, DEBUG, INFO, WARN, ERROR",
        type=LogLevel,
        choices=list(LogLevel),
        default=LogLevel.INFO,
        env_var="PERF_LOG_LEVEL",
    )
    parser.add(  # type: ignore
        "-d",
        "--description",
        help="Description for the test run",
        type=str,
        default="Paging Volume Test Run",
        env_var="PERF_DESCRIPTION",
    )

    args_parsed = parser.parse_args()

    arguments = MainArguments(
        args_parsed.baseUrl,
        args_parsed.connectionLimit,
        args_parsed.key,
        args_parsed.secret,
        args_parsed.ignoreCertificateErrors,
        args_parsed.output,
        args_parsed.description,
        args_parsed.contentType,
        args_parsed.resourceList or [],
        args_parsed.pageSize,
        args_parsed.logLevel,
    )

    return arguments
Ejemplo n.º 13
0
def setup_parser(parser: configargparse.ArgParser) -> configargparse.ArgParser:
    """Set up ArgParser instance for this application."""
    parser.add("-c",
               "--config",
               required=False,
               is_config_file=True,
               help="Real Time Database Synchronization Publisher "
                    "configuration file path")

    parser.add("--mqtt-host",
               type=str,
               action="store",
               default="mqtt-broker",
               env_var='MQTT_HOST',
               help="MQTT broker to connect to")

    parser.add("--mqtt-port",
               type=int,
               action="store",
               default=1883,
               help="Inet port to connect to")

    parser.add("--mqtt-ca-certs",
               type=str,
               action="store",
               default=None,
               help="MQTT Certificate Authority certificate files")

    parser.add("--mqtt-tls-insecure",
               type=bool,
               action="store",
               default=False,
               help="MQTT TLS is insecure")

    parser.add("--mqtt-topic",
               type=str,
               action="store",
               default="/",
               env_var='MQTT_TOPIC',
               help="MQTT topic to listen to messages")

    parser.add("--redis-host",
               type=str,
               action="store",
               default="redis",
               env_var='REDIS_HOST',
               help="Redis host to connect to")

    parser.add("--redis-db",
               type=int,
               action="store",
               default=0,
               env_var='REDIS_DB',
               help="Redis database to connect to")

    parser.add("--agent-id",
               type=str,
               action="store",
               default="root-rtdb",
               env_var='AGENT_ID',
               help="Process agent used to complement the sync")

    parser.add("--limit-time",
               type=int,
               action="store",
               default=120,
               env_var='LIMIT_TIME',
               help="Time limit to try to reconnect")

    return parser
Ejemplo n.º 14
0
def _initialize_arguments(p: configargparse.ArgParser):
    p.add('--model_storage_directory', help='The directory caching all model runs')
    p.add('--bert_model_path', help='Model path to BERT')
    p.add('--labels', help='Numbers of labels to predict over', type=str)
    p.add('--architecture', help='Training architecture', type=str)
    p.add('--freeze_bert', help='Whether to freeze bert', type=bool)

    p.add('--batch_size', help='Batch size for training multi-label document classifier', type=int)
    p.add('--bert_batch_size', help='Batch size for feeding 510 token subsets of documents through BERT', type=int)
    p.add('--epochs', help='Epochs to train', type=int)
    #Optimizer arguments
    p.add('--learning_rate', help='Optimizer step size', type=float)
    p.add('--weight_decay', help='Adam regularization', type=float)

    p.add('--evaluation_interval', help='Evaluate model on test set every evaluation_interval epochs', type=int)
    p.add('--checkpoint_interval', help='Save a model checkpoint to disk every checkpoint_interval epochs', type=int)

    #Non-config arguments
    p.add('--cuda', action='store_true', help='Utilize GPU for training or prediction')
    p.add('--device')
    p.add('--timestamp', help='Run specific signature')
    p.add('--model_directory', help='The directory storing this model run, a sub-directory of model_storage_directory')
    p.add('--use_tensorboard', help='Use tensorboard logging', type=bool)
    args = p.parse_args()

    args.labels = [x for x in args.labels.split(', ')]





    #Set run specific envirorment configurations
    args.timestamp = time.strftime("run_%Y_%m_%d_%H_%M_%S") + "_{machine}".format(machine=socket.gethostname())
    args.model_directory = os.path.join(args.model_storage_directory, args.timestamp) #directory
    os.makedirs(args.model_directory, exist_ok=True)

    #Handle logging configurations
    log.handlers.clear()
    formatter = logging.Formatter('%(message)s')
    fh = logging.FileHandler(os.path.join(args.model_directory, "log.txt"))
    fh.setLevel(logging.INFO)
    fh.setFormatter(formatter)
    log.addHandler(fh)
    ch = logging.StreamHandler()
    ch.setLevel(logging.INFO)
    ch.setFormatter(formatter)
    log.setLevel(logging.INFO)
    log.addHandler(ch)
    log.info(p.format_values())


    #Set global GPU state
    if torch.cuda.is_available() and args.cuda:
        if torch.cuda.device_count() > 1:
            log.info("Using %i CUDA devices" % torch.cuda.device_count() )
        else:
            log.info("Using CUDA device:{0}".format(torch.cuda.current_device()))
        args.device = 'cuda'
    else:
        log.info("Not using CUDA :(")
        args.dev = 'cpu'

    return args
Ejemplo n.º 15
0
    def _process_args(self):
        flags = ArgParser(prog='repoman',
                          add_config_file_help=True,
                          ignore_unknown_config_file_keys=True,
                          default_config_files=self.config_files)
        flags.add('--config-file', required=False, is_config_file=True,
                  env_var='REPOMAN_CONFIG_FILE',
                  help='override config file path')

        # global flags
        flags.add('--simpledb-domain', action='store', required=True,
                  env_var='REPOMAN_SIMPLEDB_DOMAIN')
        flags.add('--s3-bucket', action='store', required=True,
                  env_var='REPOMAN_S3_BUCKET')
        flags.add('--aws-profile', action='store', required=False, default='',
                  env_var='REPOMAN_AWS_CREDENTIAL_PROFILE',
                  help='Use the specified profile in ~/.aws/credentials')
        flags.add('--region', action='store', required=False,
                  default=None, help='AWS region to connect to')
        flags.add('--aws-role', action='store', required=False, default='',
                  env_var='REPOMAN_AWS_ROLE',
                  help='Full ARN of IAM role to assume before calling any '
                  'other AWS APIs')
        flags.add('--log-config', action='store', required=False, default='',
                  env_var='REPOMAN_LOG_CONFIG',
                  help='path to a JSON file with a python log configuration ')
        flags.add('--skip-checkup', action='store_true',
                  required=False, default=False,
                  help='do not run system health checkup on every action')
        flags.add('--debug', action='store_true', required=False,
                  default=False, help='debug logging')
        flags.add('--gpg-home',
                  required=False, env_var='REPOMAN_GPG_HOME',
                  default='~/.gnupg',
                  help='set path to gpg keyring')
        flags.add('--gpg-signer', action='append',
                  required=False, help='gpg identity to sign as')
        flags.add('--gpg-pinentry-path', action='store',
                  default='/usr/bin/pinentry',
                  required=False, help='path to gpg pinentry program')
        flags.add('--gpg-passphrase', action='append',
                  required=False,
                  help='passphrase for gpg secret key for signing '
                  '(if multiple, must be in same order as --gpg-signer)')
        flags.add('--auto-purge', action='store', default=0,
                  type=int, required=False,
                  help='automatically purge packages older than the '
                  'last N revisions when adding or copying')

        # subparsers for commands
        commands = flags.add_subparsers(dest='command')

        # singelton commands
        commands.add_parser('checkup', help='check that all systems are go')
        commands.add_parser('backup',
                            help='dump the simpledb state to a JSON file')

        # restore command
        restore_flags = commands.add_parser(
            'restore', help='restore simpledb state from a JSON file')
        restore_flags.add(
            'filename', nargs=1, help='path to backup file')

        # commands that take flags
        setup_flags = commands.add_parser(
            'setup',
            help='do initial system configuration: create simpledb domain '
                 'and s3 bucket, specify at least one each architecture, '
                 'distribution and component to publish.'
        )
        repo_flags = commands.add_parser(
            'repo', help='repo management commands')
        add_flags = commands.add_parser(
            'add', help='add package files to repo')
        cp_flags = commands.add_parser(
            'cp', help='move packages between components and distributions')
        rm_flags = commands.add_parser(
            'rm', help='remove specific packages from repo')
        publish_flags = commands.add_parser(
            'publish', help='publish the repository to s3')
        query_flags = commands.add_parser(
            'query', help='query the repository')

        # command flags

        # query
        query_flags.add('-a', '--architecture',
                        action='append', required=False,
                        help='narrow query by architecture(s)')
        query_flags.add('-d', '--distribution',
                        action='append', required=False,
                        help='narrow query by distribution(s)')
        query_flags.add('-c', '--component',
                        action='append', required=False,
                        help='narrow query by component(s)')
        query_flags.add('-p', '--package',
                        action='append', required=False,
                        help='narrow query by package name(s)')
        query_flags.add('-w', '--wildcard', action='store_true', default=False,
                        help='match package names to left of --package flag')
        query_flags.add('-H', '--query-hidden', action='store_true',
                        default=False,
                        help='include packages "hidden" by the removal of '
                        'their distribution/component/architecture')
        query_flags.add('-f', '--format', action='store',
                        dest='outputfmt', default='simple',
                        choices=('json', 'jsonc', 'packages', 'simple',
                                 'plain', 'grid', 'fancy_grid', 'pipe',
                                 'orgtbl', 'jira', 'psql', 'rst', 'mediawiki',
                                 'moinmoin', 'html', 'latex', 'latex_booktabs',
                                 'textile'),
                        help='select output format for querys & rm/cp prompts')
        query_latest = query_flags.add_mutually_exclusive_group()
        query_latest.add('-v', '--version', action='append',
                         help='only return packages matching these versions')
        query_latest.add('-l', '--latest', action='store_const',
                         dest='latest_versions', const=1,
                         help='only return the most recent package version '
                         '(equivalent to `--recent 1`)')
        query_latest.add('-r', '--recent', action='store', default=0,
                         type=int, dest='latest_versions',
                         help='only return the N most recent package versions')

        # setup
        setup_flags.add('-a', '--architecture',
                        action='append', required=True,
                        help='specify at least one architecture')
        setup_flags.add('-d', '--distribution',
                        action='append', required=True,
                        help='specify at least one distribution')
        setup_flags.add('-c', '--component',
                        action='append', required=True,
                        help='specify at least one component')
        setup_flags.add('--s3-acl', action='store',
                        default='private', required=False,
                        choices=S3_BUCKET_ACLS,
                        help='set a canned ACL for the S3 bucket '
                        '(default is private)')
        setup_flags.add('--s3-region', action='store', required=False,
                        help='set region for s3 bucket '
                        '(default is us-east-1 AKA US/Standard)')
        setup_flags.add('--sns-topic', action='store', required=False,
                        help='AWS SNS topic name for logging')
        setup_flags.add('--origin', action='store', required=False,
                        help='origin string for repository')
        setup_flags.add('--label', action='store', required=False,
                        help='label string for repository')
        setup_flags.add('--enable-website', action='store_true',
                        required=False, default=False,
                        help='configure public website hosting for '
                        'the S3 bucket. Implies --s3-acl=public-read')

        # repo management operations
        repo_commands = repo_flags.add_subparsers(dest='repo_command')
        repo_add_architecture_flags = repo_commands.add_parser(
            'add-architecture', help='add a architecture to repo')
        repo_add_architecture_flags.add(
            'architecture_names', nargs='+', help='architecture to add')
        repo_add_architecture_flags.add(
            '--i-fear-no-evil', action='store_true',
            default=False, required=False,
            help='skip confirmation step for scary actions')
        raa_confirm = \
            repo_add_architecture_flags.add_mutually_exclusive_group()
        raa_confirm.add('--confirm', action='store_true', dest='confirm',
                        required=False, default=True,
                        help='confirm any mutating actions')
        raa_confirm.add('-y', '--no-confirm', action='store_false',
                        dest='confirm', required=False, default=False,
                        help='do not prompt for confirmation')

        repo_rm_architecture_flags = repo_commands.add_parser(
            'rm-architecture', help='remove a architecture from repo')
        repo_rm_architecture_flags.add(
            'architecture_names', nargs='+', help='architecture to remove')
        repo_rm_architecture_flags.add(
            '--i-fear-no-evil', action='store_true',
            default=False, required=False,
            help='skip confirmation step for scary actions')
        rra_confirm = repo_rm_architecture_flags.add_mutually_exclusive_group()
        rra_confirm.add('--confirm', action='store_true', dest='confirm',
                        required=False, default=True,
                        help='confirm any mutating actions')
        rra_confirm.add('-y', '--no-confirm', action='store_false',
                        dest='confirm', required=False, default=False,
                        help='do not prompt for confirmation')

        repo_add_distribution_flags = repo_commands.add_parser(
            'add-distribution', help='add a distribution to repo')
        repo_add_distribution_flags.add(
            'distribution_names', nargs='+', help='distribution to add')
        repo_add_distribution_flags.add(
            '--i-fear-no-evil', action='store_true',
            default=False, required=False,
            help='skip confirmation step for scary actions')
        rad_confirm = \
            repo_add_distribution_flags.add_mutually_exclusive_group()
        rad_confirm.add('--confirm', action='store_true', dest='confirm',
                        required=False, default=True,
                        help='confirm any mutating actions')
        rad_confirm.add('-y', '--no-confirm', action='store_false',
                        dest='confirm', required=False, default=False,
                        help='do not prompt for confirmation')

        repo_rm_distribution_flags = repo_commands.add_parser(
            'rm-distribution', help='remove a distribution from repo')
        repo_rm_distribution_flags.add(
            'distribution_names', nargs='+', help='distribution to remove')
        repo_rm_distribution_flags.add(
            '--i-fear-no-evil', action='store_true',
            default=False, required=False,
            help='skip confirmation step for scary actions')
        rrd_confirm = repo_rm_distribution_flags.add_mutually_exclusive_group()
        rrd_confirm.add('--confirm', action='store_true', dest='confirm',
                        required=False, default=True,
                        help='confirm any mutating actions')
        rrd_confirm.add('-y', '--no-confirm', action='store_false',
                        dest='confirm', required=False, default=False,
                        help='do not prompt for confirmation')

        repo_add_component_flags = repo_commands.add_parser(
            'add-component', help='add a component to repo')
        repo_add_component_flags.add(
            'component_names', nargs='+', help='component to add')
        repo_add_component_flags.add(
            '--i-fear-no-evil', action='store_true',
            default=False, required=False,
            help='skip confirmation step for scary actions')
        rac_confirm = repo_add_component_flags.add_mutually_exclusive_group()
        rac_confirm.add('--confirm', action='store_true', dest='confirm',
                        required=False, default=True,
                        help='confirm any mutating actions')
        rac_confirm.add('-y', '--no-confirm', action='store_false',
                        dest='confirm', required=False, default=False,
                        help='do not prompt for confirmation')

        repo_rm_component_flags = repo_commands.add_parser(
            'rm-component', help='remove a component from repo')
        repo_rm_component_flags.add(
            'component_names', nargs='+', help='component to remove')
        repo_rm_component_flags.add(
            '--i-fear-no-evil', action='store_true',
            default=False, required=False,
            help='skip confirmation step for scary actions')
        rrc_confirm = repo_rm_component_flags.add_mutually_exclusive_group()
        rrc_confirm.add('--confirm', action='store_true', dest='confirm',
                        required=False, default=True,
                        help='confirm any mutating actions')
        rrc_confirm.add('-y', '--no-confirm', action='store_false',
                        dest='confirm', required=False, default=False,
                        help='do not prompt for confirmation')

        repo_add_topic_flags = repo_commands.add_parser(
            'add-topic', help='send notifications to an SNS topic')
        repo_add_topic_flags.add('topic_name', nargs=1, action='store',
                                 help='SNS topic to configure for logging')
        rat_confirm = repo_add_topic_flags.add_mutually_exclusive_group()
        rat_confirm.add('--confirm', action='store_true', dest='confirm',
                        required=False, default=True,
                        help='confirm any mutating actions')
        rat_confirm.add('-y', '--no-confirm', action='store_false',
                        dest='confirm', required=False, default=False,
                        help='do not prompt for confirmation')

        repo_rm_topic_flags = repo_commands.add_parser(
            'rm-topic', help='remove SNS topic logging')
        rrt_confirm = repo_rm_topic_flags.add_mutually_exclusive_group()
        rrt_confirm.add('--confirm', action='store_true', dest='confirm',
                        required=False, default=True,
                        help='confirm any mutating actions')
        rrt_confirm.add('-y', '--no-confirm', action='store_false',
                        dest='confirm', required=False, default=False,
                        help='do not prompt for confirmation')

        repo_commands.add_parser('show-config',
                                 help='show current repo configuration')

        repo_add_origin_flags = repo_commands.add_parser(
            'add-origin', help='set the repository origin string')
        repo_add_origin_flags.add('origin', nargs=1, action='store',
                                  help='origin string')
        rao_confirm = repo_add_origin_flags.add_mutually_exclusive_group()
        rao_confirm.add('--confirm', action='store_true', dest='confirm',
                        required=False, default=True,
                        help='confirm any mutating actions')
        rao_confirm.add('-y', '--no-confirm', action='store_false',
                        dest='confirm', required=False, default=False,
                        help='do not prompt for confirmation')

        repo_add_label_flags = repo_commands.add_parser(
            'add-label', help='set the repository label string')
        repo_add_label_flags.add('label', nargs=1, action='store',
                                 help='label string')
        ral_confirm = repo_add_label_flags.add_mutually_exclusive_group()
        ral_confirm.add('--confirm', action='store_true', dest='confirm',
                        required=False, default=True,
                        help='confirm any mutating actions')
        ral_confirm.add('-y', '--no-confirm', action='store_false',
                        dest='confirm', required=False, default=False,
                        help='do not prompt for confirmation')

        repo_commands.add_parser('show-config',
                                 help='show current repo configuration')
        # add packages
        add_flags.add('-d', '--distribution', action='append', required=True,
                      help='add to specified distribution')
        add_flags.add('-c', '--component', action='append', required=True,
                      help='add to specified component')
        add_flags.add('--overwrite',
                      action='store_true', required=False, default=False,
                      help='re-upload packages even if they already exist '
                      'in the repository')
        add_flags.add('--publish',
                      action='store_true', required=False, default=False,
                      help='publish the repo to s3 after adding packages')
        add_flags.add('files', nargs='+', help='debian package files to add')

        # copy
        cp_flags.add('--src-distribution', action='store', required=True,
                     help='specify one or more distributions to copy from')
        cp_flags.add('--dst-distribution', action='store',
                     help='specify one or more distributions to copy to')
        cp_flags.add('--src-component', action='store', required=True,
                     help='specify one or more components to copy from')
        cp_flags.add('--dst-component', action='store',
                     help='specify one or more components to copy to')
        cp_flags.add('-a', '--architecture', action='append', required=False,
                     help='limit to specified architectures')
        cp_flags.add('-p', '--package', action='append',
                     help='specify one or more package names to act on')
        cp_flags.add('--overwrite',
                     action='store_true', required=False, default=False,
                     help='re-upload packages even if they already exist '
                     'in the repository -- this only applies to cross-'
                     'distribution copies')
        cp_flags.add('--promote',
                     action='store_true', required=False, default=False,
                     help='only copy files where the latest source version '
                     'is more recent than the latest destination version ')
        cp_flags.add('-w', '--wildcard', action='store_true', default=False,
                     help='match package names to left of --package flag')
        cp_latest = cp_flags.add_mutually_exclusive_group()
        cp_latest.add('-v', '--version', action='append',
                      help='only copy packages matching these versions')
        cp_latest.add('-l', '--latest', action='store_const',
                      dest='latest_versions', const=1,
                      help='only copy the most recent package version '
                      '(equivalent to `--recent 1`)')
        cp_latest.add('-r', '--recent', action='store', default=0,
                      type=int, dest='latest_versions',
                      help='only copy the N most recent package versions')
        cp_flags.add('--i-fear-no-evil', action='store_true',
                     default=False, required=False,
                     help='skip confirmation step for scary actions')
        cp_confirm = cp_flags.add_mutually_exclusive_group()
        cp_confirm.add('--confirm', action='store_true', dest='confirm',
                       required=False, default=True,
                       help='confirm any mutating actions')
        cp_confirm.add('-y', '--no-confirm', action='store_false',
                       dest='confirm', required=False, default=False,
                       help='do not prompt for confirmation')

        # remove
        rm_flags.add('-a', '--architecture', action='append', required=False,
                     help='limit to specified architectures')
        rm_flags.add('-d', '--distribution', action='append', required=False,
                     help='limit to specified distributions')
        rm_flags.add('-c', '--component', action='append', required=False,
                     help='limit to specified distributions')
        rm_flags.add('-p', '--package', action='append', required=False,
                     help='limit to specified package names')
        rm_flags.add('--remove-from-s3', action='store_true',
                     default=False, required=False,
                     help='remove package files from s3')
        rm_flags.add('--publish', action='store_true', required=False,
                     default=False, help='publish the repo to s3')
        rm_flags.add('-w', '--wildcard', action='store_true', default=False,
                     help='match package names to left of --package flag')
        rm_flags.add('-H', '--rm-hidden', action='store_true',
                     default=False,
                     help='include packages "hidden" by the removal of '
                     'their distribution/component/architecture')
        rm_flags.add('--i-fear-no-evil', action='store_true',
                     default=False, required=False,
                     help='skip confirmation step for scary actions')
        rm_flags.add('-f', '--format', action='store',
                     dest='outputfmt', default='simple',
                     choices=('json', 'jsonc', 'simple', 'plain', 'grid',
                              'fancy_grid', 'pipe', 'orgtbl', 'jira',
                              'psql', 'rst', 'mediawiki', 'moinmoin',
                              'html', 'latex', 'latex_booktabs', 'textile'),
                     help='select output format for querys & rm/cp prompts')
        rm_latest = rm_flags.add_mutually_exclusive_group()

        rm_latest.add('-v', '--version', action='append',
                      help='only delete packages matching these versions')
        rm_latest.add('-l', '--exclude-latest', action='store_const',
                      dest='latest_versions', const=1,
                      help='only delete the most recent package version '
                      '(equivalent to `--recent 1`)')
        rm_latest.add('-r', '--exclude-recent', action='store', default=0,
                      type=int, dest='latest_versions',
                      help='only delete the N most recent package versions')

        rm_confirm = rm_flags.add_mutually_exclusive_group()
        rm_confirm.add('--confirm', action='store_true', dest='confirm',
                       required=False, default=True,
                       help='confirm any mutating actions')
        rm_confirm.add('-y', '--no-confirm', action='store_false',
                       dest='confirm', required=False, default=False,
                       help='do not prompt for confirmation')

        # publish to s3
        publish_flags.add('-d', '--distribution', action='append',
                          required=False,
                          help='limit to specified distributions '
                               '(default is all)')

        config = flags.parse_args(self.argv)
        return config
Ejemplo n.º 16
0
def _initialize_arguments(p: configargparse.ArgParser):
    p.add('--model_storage_directory', help='The directory caching all model runs')
    args = p.parse_args()
Ejemplo n.º 17
0
def parse_args(args):
    """Parse command line parameters

    Args:
      args ([str]): command line parameters as list of strings

    Returns:
      :obj:`argparse.Namespace`: command line parameters namespace
    """
    parser = ArgParser(
        default_config_files=[
            '~/nlpia-bot.ini',
            '~/nlpia_bot.ini',
            '~/nlpiabot.ini',
            '~/nlpia.ini',
            os.path.join(os.path.dirname(constants.BASE_DIR), '*.ini'),
            os.path.join(os.path.dirname(constants.SRC_DIR), '*.ini'),
        ],
        description="Command line bot application, e.g. bot how do you work?")
    parser.add('-c',
               '--config',
               required=False,
               is_config_file=True,
               help="Config file path (default: ~/nlpia-bot.ini)")
    parser.add_argument('--version',
                        action='version',
                        version='nlpia_bot {ver}'.format(ver=__version__))
    parser.add_argument('--name',
                        default=DEFAULT_CONFIG['name'],
                        dest="nickname",
                        help="IRC nick or CLI command name for the bot",
                        type=str,
                        metavar="STR")
    parser.add_argument(
        '-n',
        '--num_top_replies',
        default=DEFAULT_CONFIG['num_top_replies'],
        dest="num_top_replies",
        help=
        "Limit on the number of top (high score) replies that are randomly selected from.",
        type=int,
        metavar="INT")
    parser.add_argument(
        '-p',
        '--persist',
        help=
        "Don't exit. Retain language model in memory and maintain dialog until user says 'exit' or 'quit'",
        dest='persist',
        default=str(DEFAULT_CONFIG['persist'])[0].lower() in 'ty1p',
        action='store_true')
    parser.add_argument(
        '-b',
        '--bots',
        default=DEFAULT_CONFIG[
            'bots'],  # None so config.ini can populate defaults
        dest="bots",
        help=
        "Comma-separated list of bot personalities to load. Defaults: pattern,parul,search_fuzzy,time,eliza",
        type=str,
        metavar="STR")
    parser.add_argument('-v',
                        '--verbose',
                        dest="loglevel",
                        help="set loglevel to INFO",
                        action='store_const',
                        const=logging.INFO)
    parser.add_argument('-vv',
                        '--very-verbose',
                        dest="loglevel",
                        help="set loglevel to DEBUG",
                        action='store_const',
                        const=logging.DEBUG)
    parser.add_argument(
        '-q',
        '--quality_weights',
        default=DEFAULT_CONFIG['quality_weights'],
        dest="quality_weights",
        help=
        'Dictionary of weights: {"spell": .5, "sentiment": .5, "semantics": .5}',
        type=str,
        metavar="DICT_STR")
    parser.add_argument(
        'words',
        type=str,
        nargs='*',
        help=
        "Words to pass to bot as an utterance or conversational statement requiring a bot reply or action."
    )
    parser.add_argument('--semantics',
                        type=float,
                        default=1.0,
                        dest='semantics',
                        metavar='FLOAT',
                        help='set weight of the semantic quality score')
    parser.add_argument('--sentiment',
                        type=float,
                        default=0.5,
                        dest='sentiment',
                        metavar='FLOAT',
                        help='set weight of the sentiment quality score')
    parser.add_argument('--spell',
                        type=float,
                        default=0.2,
                        dest='spell',
                        metavar='FLOAT',
                        help='set weight of the spell quality score')
    return parser.parse_args(args)
Ejemplo n.º 18
0
def parse_main_arguments(args_in: List[str]) -> MainArguments:
    """
    Configures the command-line interface.

    Parameters
    ----------
    args_in : list of str
        Full argument list from the command line.

    Returns
    -------
    arguments  : MainArguments
        A populated `MainArguments` object.
    """

    parser = ArgParser()

    parser.add(  # type: ignore
        "-a",
        "--classroom-account",
        required=True,
        help="The email address of the Google Classroom admin account.",
        type=str,
        default="",
        env_var="CLASSROOM_ACCOUNT",
    )

    parser.add(  # type: ignore
        "-l",
        "--log-level",
        required=False,
        help="The log level for the tool.",
        choices=constants.LOG_LEVELS,
        type=str,
        default="INFO",
        env_var="LOG_LEVEL",
    )

    parser.add(  # type: ignore
        "-o",
        "--output-directory",
        required=False,
        help="The output directory for the generated csv files.",
        type=str,
        default="data/",
        env_var="OUTPUT_DIRECTORY",
    )

    parser.add(  # type: ignore
        "-s",
        "--usage-start-date",
        required=False,
        help="Start date for usage data pull in yyyy-mm-dd format.",
        type=str,
        default="",
        env_var="START_DATE",
    )

    parser.add(  # type: ignore
        "-e",
        "--usage-end-date",
        required=False,
        help="End date for usage data pull in yyyy-mm-dd format.",
        type=str,
        default="",
        env_var="END_DATE",
    )

    parser.add(  # type: ignore
        "-d",
        "--sync-database-directory",
        required=False,
        help="The directory for the sync database.",
        type=str,
        default="data",
        env_var="SYNC_DATABASE_DIRECTORY",
    )

    parser.add(  # type: ignore
        "-f",
        "--feature",
        required=False,
        help="Features to include.",
        type=str,
        nargs='*',
        choices=constants.VALID_FEATURES,
        default=[],
        env_var="FEATURE",
    )

    args_parsed = parser.parse_args(args_in)

    assert isinstance(args_parsed.output_directory,
                      str), "The specified `classroom-account` is not valid."

    arguments = MainArguments(
        classroom_account=args_parsed.classroom_account,
        log_level=args_parsed.log_level,
        output_directory=args_parsed.output_directory,
        usage_start_date=args_parsed.usage_start_date,
        usage_end_date=args_parsed.usage_end_date,
        sync_database_directory=args_parsed.sync_database_directory,
        extract_activities=constants.Features.Activities
        in args_parsed.feature,
        extract_assignments=constants.Features.Assignments
        in args_parsed.feature,
        extract_attendance=constants.Features.Attendance
        in args_parsed.feature,
        extract_grades=constants.Features.Grades in args_parsed.feature,
    )

    return arguments
Ejemplo n.º 19
0
import yaml

parser_args = {
    'add_config_file_help': False,
    'add_env_var_help': False,
    'auto_env_var_prefix': 'CHECKERS_',
}

tournament_dir_arg = {
    'help': 'Tournament directory.',
    'nargs': '?',
    'default': '.',
}

p = ArgParser(**parser_args)
p.add('tournament_dir', **tournament_dir_arg)
options = p.parse_args()
os.chdir(options.tournament_dir)

SCORE = {'1': 1.0, '½': 0.5, '0': 0.0}


def score(s):
    result = 0.0
    for ch in s:
        result += SCORE[ch]
    return result


def load_stats():
    files = glob('stats/*.stat')
Ejemplo n.º 20
0
import secrets

from configargparse import ArgParser

p = ArgParser(
    auto_env_var_prefix='KLAUD_',
    default_config_files=['./settings.ini'],
)
p.add('-c', '--config', is_config_file=True, help='config file path')
p.add('-p', '--port', type=int, default=8000, help='port for serving')
p.add('-H', '--host', type=str, default='0.0.0.0', help='host for serving')
p.add('--hot-reload', action='store_true', help='enable hot reload')

p.add('--db-host', type=str, default='localhost', help='mongo server host')
p.add('--db-port', type=int, default=27017, help='mongo server port')
p.add('--db-user', type=str, default='user', help='mongo server user')
p.add('--db-password',
      type=str,
      default='hackme',
      help='mongo server password')
p.add('--db-name', type=str, default='klaud', help='mongo database name')

p.add('-S',
      '--secret',
      type=str,
      default=secrets.token_hex(64),
      help='secret token')
p.add('--access-token-life',
      type=int,
      default=15,
      help='access token life duration (in minutes)')
Ejemplo n.º 21
0
def get_configuration(args_in: List[str]) -> Configuration:
    """
    Retrieves configuration from the command line or environment variables.

    Parameters
    ----------
    args_in: List[str]
        The system arguments received by the main script

    Returns
    -------
    An object of type Configuration
    """

    parser = ArgParser()
    parser.add(
        "-t",
        "--api-token",
        required=True,
        help=
        "An API token acquired from https://www.inaturalist.org/users/api_token",
        type=str,
        env_var="INAT_API_TOKEN")
    parser.add(
        "-z",
        "--log-level",
        default="INFO",
        help="Standard Python logging level, e.g. ERROR, WARNING, INFO, DEBUG",
        type=str,
        env_var="LOG_LEVEL")
    parser.add("-u",
               "--user-name",
               required=True,
               help="iNaturalist user name",
               type=str,
               env_var="INAT_USER_NAME")
    parser.add("-p",
               "--project-slug",
               required=True,
               help="Slug (short name) of the project to extract",
               type=str,
               env_var="PROJECT_SLUG")
    parser.add(
        "-s",
        "--page-size",
        default=200,
        help=
        "Number of records to retrieve per request. Default: 200. Max: 200. Use lower value for testing.",
        type=int,
        env_var="PAGE_SIZE")
    parser.add("-o",
               "--output-directory",
               default=os.path.join(".", "out"),
               help="Directory name for output files.",
               type=str,
               env_var="OUTPUT_DIR")
    parser.add(
        "-l",
        "--last-id",
        default="0",
        help=
        "The last observation ID from a previous download, used to start a fresh download from the next available observation.",
        type=str,
        env_var="LAST_ID")
    parser.add(
        "-i",
        "--input-file",
        help="An input file that will be merged with the downloaded file",
        type=str,
        env_var="INPUT_FILE",
        default=None)

    args_parsed = parser.parse_args(args_in)

    return Configuration(api_token=args_parsed.api_token,
                         log_level=args_parsed.log_level,
                         user_name=args_parsed.user_name,
                         project_slug=args_parsed.project_slug,
                         page_size=args_parsed.page_size,
                         output_directory=args_parsed.output_directory,
                         last_id=args_parsed.last_id,
                         input_file=args_parsed.input_file)
            #print(output)
            if exists(output_pth):
                file_ = pd.read_csv(output_pth, index_col=0)
                ##append
                pd.concat(
                    [file_,
                     pd.DataFrame.from_dict(output, orient="index")],
                    axis=0).to_csv(output_pth)
            else:
                pd.DataFrame.from_dict(output,
                                       orient="index").to_csv(output_pth)


if __name__ == "__main__":
    p = ArgParser()
    p.add('-c',
          '--config_file',
          default='config.yml',
          is_config_file=True,
          help='Path to config file.')
    p.add('--content_fns', type=str, nargs='+', help='List of variables.')
    p.add('--style_fns', type=str, nargs='+', help='List of variables.')
    p.add('--data_dir', type=str, help='Directory containing examples')
    p.add('--out_dir', type=str, help='Directory where print output')
    p.add('--lc', type=int, help='lc')
    p.add('--ltv', type=int, help='ltv')

    args = p.parse_args()
    main(args)
def get_conn_string_from_args() -> str:

    parser = ArgParser()
    parser.add(
        "--server",
        action="store",
        default="localhost",
        help="Database server name or IP address",
    )
    parser.add(
        "--port", action="store", default="1433", help="Database server port number"
    )
    parser.add(
        "--dbname",
        action="store",
        default="test_analytics_middle_tier_engage",
        help="Name of the test database",
    )
    parser.add(
        "--useintegratedsecurity",
        action="store",
        default=True,
        help="Use Integrated Security for the database connection",
    )
    parser.add(
        "--username",
        action="store",
        help="Database username when not using integrated security",
    )
    parser.add(
        "--password",
        action="store",
        help="Database user password, when not using integrated security",
    )

    parsed = parser.parse_args(sys.argv[1:])

    server = parsed.server
    port = parsed.port or "1433"
    db_name = parsed.dbname
    username = parsed.username
    password = parsed.password

    integrated = parsed.useintegratedsecurity

    if integrated:
        return f"mssql+pyodbc://{server},{port}/{db_name}?driver=ODBC+Driver+17+for+SQL+Server&Trusted_Connection=yes"
    else:
        return f"mssql+pyodbc://{username}:{password}@{server},{port}/{db_name}?driver=ODBC+Driver+17+for+SQL+Server"
Ejemplo n.º 24
0
def program_options():
    """Create argument parser for the CLI.
    """
    parser = ArgParser()
    dp_group = parser.add_argument_group(title="Required DP Parameters")
    io_group = parser.add_argument_group(
        title="Required Train Data Parameters")

    # File IO
    io_group.add(
        "--train-normal",
        metavar="TRAIN-NORMAL-DATA-FILE",
        help="Path to training data file consisting of data samples corresponding "\
            "to the NORMAL classification label.",
        type=str,
        required=True
    )
    io_group.add(
        "--train-tumor",
        metavar="TRAIN-TUMOR-DATA-FILE",
        help="Path to training data file consisting of data samples corresponding "\
            "to the TUMOR classification label.",
        type=str,
        required=True
    )

    # DP Req. Options
    dp_group.add(
        "--epsilon",
        nargs="+",
        help="Epsilon value(s) for differentially-private training. "\
            "One or many epsilon values can be specified. If multiple epsilons are "\
            "specified, then independent experiments will be run for each specified "\
            "epislon value. The results of each of these runs will be stored in "\
            "separate, named result files. "\
            "Epsilons can be specified as decimal values. Some examples of valid "\
            "epsilon arguments are "\
            "`--epsilon 3`, "\
            "`--epsilon 5.32341`, "\
            "`--epsilon 3 3.5 4 4.5 20`.",
        type=float,
        required=True
    )

    dp_group.add(
        "--delta",
        nargs="+",
        help= "Delta value(s) for differentially-private training. "\
            "One or many delta values can be specified. If multiple deltas are"\
            "specified, then independent experiments will be run for each delta"\
            "value in combination with each epsilon value."\
            "The reuslts of these runs are stored in separate, named result files."\
            "To use (eps)-DP for privacy calculations, pass use the option\n"\
            "`--delta 0`.",
        type=float,
        required=True
    )

    parser.add("--participant",
               metavar="PARTICPANT",
               help="Server or client.",
               choices=["server", "client"],
               required=True)

    parser.add("--training-seed",
               help="Seed used for the training.",
               type=int,
               default=42)

    parser.add(
        "--output-dir",
        help="Directory to store output result files to. If the directory does not "\
            "exist, it will be created.",
        type=str,
        default="."
    )

    parser.add("--host",
               help="Specifies the server address.",
               default="localhost",
               type=str)

    parser.add(
        "--port",
        help=
        "Specifies the port through which the two workers should communicate on the host machine.",
        default="8081",
        type=int)

    parser.add("--mode",
               help="Specifies the launching mode.",
               default="subprocess",
               choices=["subprocess", "docker"],
               type=str)

    args = parser.parse_args()

    # Some post processing, for lists etc.
    if not isinstance(args.epsilon, list):
        args.epsilon = [
            args.epsilon,
        ]

    if not isinstance(args.delta, list):
        args.delta = [
            args.delta,
        ]

    # Convert all relative file paths into absolute paths
    resolve_path = lambda p: str(pathlib.Path(p).resolve())
    args.train_normal = resolve_path(args.train_normal)
    args.train_tumor = resolve_path(args.train_tumor)
    if args.output_dir is not None:
        args.output_dir = resolve_path(args.output_dir)

    return args
Ejemplo n.º 25
0
def parse_main_arguments(args_in: List[str]) -> MainArguments:
    """
    Configures the command-line interface.

    Parameters
    ----------
    args_in : list of str
        Full argument list from the command line.

    Returns
    -------
    arguments  : MainArguments
        A populated `MainArguments` object.
    """

    parser = ArgParser()

    parser.add(  # type: ignore
        "-s",
        "--server",
        help="Database server name or IP address",
        required=True,
        env_var="DB_SERVER",
    )
    parser.add(  # type: ignore
        "--port",
        help="Database server port number",
        type=int,
        env_var="DB_PORT",
        default=1433,
    )
    parser.add(  # type: ignore
        "-d",
        "--dbname",
        help="Name of the database with the LMS tables.",
        env_var="DB_NAME",
        required=True,
    )

    USE_INTEGRATED = "--useintegratedsecurity"
    USE_INTEGRATED_SHORT = "-i"

    parser.add(  # type: ignore
        USE_INTEGRATED_SHORT,
        USE_INTEGRATED,
        help="Use Integrated Security for the database connection.",
        action="store_true",
    )
    user_name_required = (USE_INTEGRATED not in args_in
                          and USE_INTEGRATED_SHORT not in args_in)

    # Retrieve this value because we need it in order to determine
    # if username and password are required
    integrated_env_var = os.getenv("USE_INTEGRATED_SECURITY")
    if integrated_env_var and integrated_env_var.lower() in ("true", "yes",
                                                             "t", "y"):
        user_name_required = False

    parser.add(  # type: ignore
        "-u",
        "--username",
        required=user_name_required,
        env_var="DB_USERNAME",
        help="Database username, when not using integrated security.",
    )
    parser.add(  # type: ignore
        "-p",
        "--password",
        required=user_name_required,
        env_var="DB_PASSWORD",
        help="Database user password, when not using integrated security.",
    )

    parser.add(  # type: ignore
        "-l",
        "--log-level",
        required=False,
        help="The log level for the tool.",
        choices=LOG_LEVELS,
        type=str,
        default="INFO",
        env_var="LOG_LEVEL",
    )

    parser.add(  # type: ignore
        "-e",
        "--exceptions-report-directory",
        required=False,
        help="File path for optional output of a CSV exception report.",
        type=str,
        env_var="EXCEPTIONS_REPORT_DIRECTORY",
    )

    parser.add(  # type: ignore
        "-n",
        "--encrypt",
        help="Encrypt the connection to the database.",
        action="store_true",
        env_var="ENCRYPT_SQL_CONNECTION",
    )
    parser.add(  # type: ignore
        "-t",
        "--trust-certificate",
        help=
        "When encrypting connections, trust the server certificate. Useful for localhost debugging with a self-signed certificate. USE WITH CAUTION.",
        action="store_true",
        env_var="TRUST_SERVER_CERTIFICATE",
    )
    parser.add(  # type: ignore
        "-g",  # because 'e' and 'n' are already taken (;′⌒`)
        "--engine",
        help=
        "Database engine: mssql for Microsoft SQL Server, or postgresql for PostgreSQL.",
        choices=[DB_ENGINE.MSSQL, DB_ENGINE.POSTGRESQL],
        default=DB_ENGINE.MSSQL,
        type=str,
        env_var="DB_ENGINE",
    )

    args_parsed = parser.parse_args(args_in)

    # Need to add this back in because reading it manually earlier
    # seems to cause it to be misread by the parser.
    args_parsed.useintegratedsecurity = (args_parsed.useintegratedsecurity
                                         or not user_name_required)

    arguments = MainArguments(
        args_parsed.log_level, args_parsed.exceptions_report_directory,
        args_parsed.engine, args_parsed.server, args_parsed.dbname,
        args_parsed.username, args_parsed.password, args_parsed.port,
        args_parsed.encrypt, args_parsed.trust_certificate,
        args_parsed.useintegratedsecurity)

    return arguments
Ejemplo n.º 26
0
def parse_main_arguments(args_in: List[str]) -> MainArguments:
    """
    Configures the command-line interface.

    Parameters
    ----------
    args_in : list of str
        Full argument list from the command line.

    Returns
    -------
    arguments  : MainArguments
        A populated `MainArguments` object.
    """

    parser = ArgParser()
    parser.add(  # type: ignore
        "-k",
        "--client-key",
        required=True,
        help="Schoology client key.",
        type=str,
        env_var="SCHOOLOGY_KEY",
    )

    parser.add(  # type: ignore
        "-s",
        "--client-secret",
        required=True,
        help="Schoology client secret.",
        type=str,
        env_var="SCHOOLOGY_SECRET",
    )

    parser.add(  # type: ignore
        "-o",
        "--output-directory",
        required=False,
        help="The output directory for the generated csv files.",
        type=str,
        default="",
        env_var="OUTPUT_DIRECTORY",
    )

    parser.add(  # type: ignore
        "-l",
        "--log-level",
        required=False,
        help="The log level for the tool.",
        choices=constants.LOG_LEVELS,
        type=str,
        default="INFO",
        env_var="LOG_LEVEL",
    )

    parser.add(  # type: ignore
        "-p",
        "--page-size",
        required=False,
        help="Page size for the paginated requests.",
        type=int,
        default=200,
        env_var="PAGE_SIZE",
    )

    parser.add(  # type: ignore
        "-i",
        "--input-directory",
        required=False,
        help="Input directory for usage CSV files.",
        type=str,
        default=None,
        env_var="SCHOOLOGY_INPUT_DIRECTORY",
    )

    parser.add(  # type: ignore
        "-d",
        "--sync-database-directory",
        required=False,
        help="The directory for the sync database.",
        type=str,
        default="data",
        env_var="SYNC_DATABASE_DIRECTORY",
    )

    parser.add(  # type: ignore
        "-f",
        "--feature",
        required=False,
        help="Features to include.",
        type=str,
        nargs='*',
        choices=constants.VALID_FEATURES,
        default=[],
        env_var="FEATURE",
    )

    args_parsed = parser.parse_args(args_in)
    # Required
    assert isinstance(
        args_parsed.client_key, str
    ), "Argument `client-key` must be a string."
    assert isinstance(
        args_parsed.client_secret, str
    ), "Argument `client-secret` must be a string."

    # Optional
    assert isinstance(
        args_parsed.output_directory, str
    ), "The specified `output-directory` is not valid."
    assert (
        args_parsed.log_level in constants.LOG_LEVELS
    ), "The specified `log-level` is not an allowed value."
    assert isinstance(
        args_parsed.page_size, int
    ), "Argument `page-size` must be an int."

    arguments = MainArguments(
        client_key=args_parsed.client_key,
        client_secret=args_parsed.client_secret,
        output_directory=args_parsed.output_directory,
        log_level=args_parsed.log_level,
        page_size=args_parsed.page_size,
        input_directory=args_parsed.input_directory,
        sync_database_directory=args_parsed.sync_database_directory,
        extract_activities=constants.Features.Activities in args_parsed.feature,
        extract_assignments=constants.Features.Assignments in args_parsed.feature,
        extract_attendance=constants.Features.Attendance in args_parsed.feature,
        extract_grades=constants.Features.Grades in args_parsed.feature,
    )

    return arguments
Ejemplo n.º 27
0
def main():
    parser = ArgParser(
        default_config_files=['/etc/factoriomcd.ini', '~/.factoriomcd.ini'])
    parser.add('-d', '--debug', action='store_true')
    parser.add('-v', '--verbose', action='store_true')

    parser.add('--log-file', default="/opt/factorio/server.out")

    parser.add('--server-id', default="1")

    parser.add('--rcon-host', default="localhost")
    parser.add('--rcon-password', default="asdasd")
    parser.add('--rcon-port', default=31337)

    parser.add('--ws-url',
               default="ws://127.0.0.1:8000/ws_v1/server_callback/1/")
    parser.add('--ws-password', default="asdasd")

    options = parser.parse_args()
    if options.verbose:
        coloredlogs.install(level='DEBUG')
        logger.debug("FactorioMCd initializing...")
    else:
        coloredlogs.install(level='INFO')

    FactorioMCd(options).run()
Ejemplo n.º 28
0

def main(graphRadius: int, branch: int, tour: bool, debug: bool, drawType: str,
         pixelRadius: int, nextPieceDict: str):
    if tour:
        Tour(rad=graphRadius,
             branch=branch,
             drawType=DrawType[drawType],
             debug=debug,
             nxt=json.loads(nextPieceDict)).Build()


if __name__ == "__main__":
    p = ArgParser(default_config_files=["graph.conf"])
    p.add("--branch",
          type=int,
          help="in tour graph: branch factor for each hop.")
    p.add("--tour", type=bool, help="whether to make a tour graph")
    p.add("--debug", type=bool, help="prints debugging info")
    p.add("--graphRadius",
          type=int,
          help="max abs(x) or abs(y) coord for graph.")
    p.add("--drawType",
          type=str,
          help="which DrawType to use for rendoring board")
    p.add("--pixelRadius",
          type=int,
          help="for .bmp renders, radius for each piece's color")
    p.add("--nextPieceDict",
          type=str,
          help="defines piece ordering for a Tour")
Ejemplo n.º 29
0
    def argument_parser():
        # Create command line arguments
        parser = ArgParser()
        # General arguments
        parser.add('--db_section',required=True,help = "Database section in your \
                             .desservices.ini file, e.g., db-desoper or db-destest")
        parser.add("--user", action="store", default=os.environ['USER'],
                            help="username that will submit")
        parser.add('--paramfile',is_config_file=True,help='Key = Value file that can be used to replace\
                             command-line')
        parser.add('--csv',help='CSV of exposures and information specified by user. If specified, \
                             code will use exposures in csv to submit jobs. Must also specify \
                             --delimiter')
        parser.add('--exclude_list',help='A comma-separated list or line-separated file of exposures \
                             to exclude from the dataframe')
        parser.add('--delimiter',default=',',help='The delimiter if specifying csv and is not \
                             comma-separated')
        parser.add('--campaign',required=True, help='Directory in pipebox where templates are \
                             stored, e.g., $PIPEBOX_DIR/templates/pipelines/finalcut/-->Y2A1dev<--')
        parser.add('--savefiles',action='store_true',help='Saves submit files to submit later.')
        parser.add('--queue_size',default=1000,help='If set and savefiles is not specified, code \
                             will submit specified runs up until queue_size is reached. Code \
                             will wait until queue drops below limit to submit next job')
        parser.add('--total_queue',action='store_true',help='If specified, total jobs per \
                             pipeline per machine will be counted and user will be ignored')
        parser.add('--labels',help='Human-readable labels to "mark" a given processing attempt')
        parser.add('--template_name',help='submitwcl template within pipeline/campaign')
        parser.add('--configfile',help='Name of user cfg file')
        parser.add('--out',help='Output directory for submit files')
        parser.add('--auto',action='store_true',help='Will run autosubmit mode if specified')
        parser.add('--resubmit_failed',action='store_true',help='Will ressubmit failed runs')
        parser.add('--resubmit_max',default=99,help='Set max attempt number for resubmit-failed.')
        parser.add('--ignore_processed',action='store_true',help='Will skip any expnum \
                             that has been attempted to process, pass/fail.')
        parser.add('--wait',default=30,help='Wait time (seconds) between dessubmits. \
                                             Default=30s')
        
        # Archive arguments
        parser.add('--target_site',required=True,help='Computing node, i.e., fermigrid-sl6')
        parser.add('--archive_name',help='Home archive to store products, e.g., \
                             desar2home,prodbeta,...')
        parser.add('--project',default='ACT',help='Archive directory where runs are \
                             stored, e.g., $ARCHIVE/-->ACT<--/finalcut/')
        parser.add('--rundir',help='Archive directory structure')
        parser.add('--http',help='The machine to copy files through: desar0, desar1,desar2')
        # JIRA arguments
        parser.add('--jira_parent',help='JIRA parent ticket under which\
                             new ticket will be created.')
        parser.add('--jira_description',help='Description of ticket\
                             found in JIRA')
        parser.add('--jira_project',default='DESOPS',help='JIRA project where \
                             ticket will be created, e.g., DESOPS')
        parser.add('--jira_summary',help='Title of JIRA ticket. To submit multiple \
                             exposures under same ticket you can specify jira_summary')
        parser.add('--jira_user',help='JIRA username')
        parser.add('--jira_section',default='jira-desdm',help='JIRA section \
                             in .desservices.ini file')
        parser.add('--ignore_jira',default=False,action='store_true',help="If specified will not \
                            connect to JIRA, but must specify reqnum and jira_parent.")
        parser.add('--reqnum',help='Part of processing unique identifier. Tied to JIRA ticket \
                             number')
        parser.add('--decade', action='store_true', help='Uses the DECADE subsection of WCL')
    
        # EUPS arguments
        parser.add('--eups_stack',action='append',nargs='+', required=True,help='EUPS production stack, \
                                                                               e.g., finalcut Y2A1+4')
        
        # Science arguments
        parser.add('--ccdnum',help='CCDs to be processed.')
        parser.add('--minsigma',help='Specify minsigma for immask (defaults to 6.0)')
        parser.add('--nite',help='For auto mode: if specified will submit all exposures found \
                         from nite')
        parser.add('--niterange',nargs='+',action='append',help='Specify a range of nites')
        parser.add('--RA','-ra',nargs='+',action='append',help='RA in deg., in the order of min max')
        parser.add('--Dec','-dec',nargs='+',action='append',help='Dec in deg., in the order of min max')
        parser.add('--epoch',help='Observing epoch. If not specified, will be calculated. E.g.,\
                         SVE1,SVE2,Y1E1,Y1E2,Y2E1,Y2E2...')
        parser.add('--inputcals_file',help='Key=Var list of calibrations to be used in processing. \
                         $PIPEBOX_DIR/templates/inputcals for a sample')

        # glide in options
        parser.add('--time_to_live',default=None,type=float,help='The amount of time-to-live (in hours)\
                          for the job to grab a glidein')
        
        # Transfers
        parser.add('--nginx',action='store_true',help='Use nginx?')

        # Condor options
        parser.add('--request_memory',default=8000,help='Amount of memory (MB) to use for processing.\
                                        Default (8000) is set for finalcut on fermigrid-ce nodes. For \
                                        supercal on fermigrid-ce nodes try 32000.')
        parser.add('--request_disk',default=90000000,help='Amount of disk space (MB) to use for \
                                        processing. Default (90000000) is set for finalcut on \
                                        fermigrid-ce nodes. For supercal on fermigrid-ce nodes try \
                                        200000000.')
        parser.add('--request_cpus',default=1,help='# of cpus to use for processing. Default (1) is \
                                        set for finalcut on fermigrid-ce nodes.')
        
        return parser
        target_fn = inargs.out_dir + inargs.out_pref + '_targets.nc'
        print('Save features:', feature_fn)
        feature_ds.to_netcdf(feature_fn)
        print('Save targets:', target_fn)
        target_ds.to_netcdf(target_fn)

    t2 = timeit.default_timer()
    print('Total time: %.2f s' % (t2 - t1))


if __name__ == '__main__':

    p = ArgParser()
    p.add('--config_file',
          default='config.yml',
          is_config_file=True,
          help='Name of config file in this directory. '
          'Must contain feature and target variable lists.')
    p.add_argument('--inputs', type=str, nargs='+', help='Feature variables')
    p.add_argument('--outputs', type=str, nargs='+', help='Target variables')
    p.add_argument('--in_dir',
                   type=str,
                   nargs='+',
                   help='Directory with input (aqua) files.')
    p.add_argument('--out_dir',
                   type=str,
                   help='Directory to write preprocessed file.')
    p.add_argument('--aqua_names',
                   type=str,
                   nargs='+',
                   help='String with filenames to be processed.')
Ejemplo n.º 31
0
def initialize_arguments(p: configargparse.ArgParser):
    p.add('--data', help='Data directory', type=str)
    p.add('--sentence_index',
          help='Column index of sentences in data file',
          type=int)

    p.add('--batch_size',
          help='Batch size for training multi-label document classifier',
          type=int)
    p.add('--max_len', help='Maximum sequence length', type=int)
    p.add('--epochs', help='Epochs to train', type=int)
    #Optimizer arguments
    p.add('--learning_rate', help='Optimizer step size', type=float)
    #p.add('--weight_decay', help='Adam regularization', type=float)
    return p.parse_args()
Ejemplo n.º 32
0
def main():
    p = ArgParser()
    p.add("-c", "--my-config", is_config_file=True, help="config file path")
    p.add(
        "-o",
        "--out",
        help="output directory, '-' for stdin",
        type=DirectoryType(),
        required=True,
    )
    p.add(
        "-a",
        "--as_of",
        default=0,
        help="number of days in the past to project from",
        type=int,
    )
    p.add("-y", "--y_max", help="max y-scale for the census graph", type=int)
    p.add(
        "-d",
        "--n_days",
        help="make a census/admits plot out to n_days",
        type=int,
        action="append",
    )
    p.add("-P", "--prefix", help="prefix for filenames")
    p.add(
        "-pp",
        "--plot_pairs",
        action="store_true",
        help="Plot posterior samples in a pair-plot grid",
    )
    p.add(
        "-pc",
        "--plot_capacity",
        action="store_true",
        help="plot capacity as a horizontal line",
    )

    options = p.parse_args()

    prefix = ""
    if options.prefix is not None:
        prefix = f"{options.prefix}_"

    n_days = [30, 90, 180]
    if options.n_days:
        n_days = options.n_days

    dir = options.out
    print(f"Output directory: {dir}")
    paramdir = path.join(dir, "parameters")
    outdir = path.join(dir, "output")
    figdir = path.join(dir, "figures")

    census_ts, params, args = read_inputs(paramdir)
    first_day = census_ts["date"].values[0]

    # TODO: This needs to be configurable based on the time period specificed
    as_of_days_ago = args["as_of"]
    nobs = census_ts.shape[0] - as_of_days_ago

    # define capacity
    vent_capacity, hosp_capacity = None, None
    if options.plot_capacity:
        vent_capacity = float(params.base.loc[params.param == "vent_capacity"])
        hosp_capacity = float(params.base.loc[params.param == "hosp_capacity"])

    # Chains
    df = pd.read_json(
        path.join(f"{outdir}", "chains.json.bz2"), orient="records", lines=True
    )
    print(f"READ chains file: {df.shape[0]} total iterations")
    # remove burn-in
    # TODO: Make 1000 configurable
    df = df.loc[(df.iter > 1000)]

    qlist = []
    for day in range(census_ts.shape[0]):
        ldist = logistic(
            df.logistic_L, df.logistic_k, df.logistic_x0 - df.offset.astype(int), day
        )
        qlist.append(np.quantile(ldist, [0.05, 0.5, 0.95]))

    # logistic SD plot
    qmat = np.vstack(qlist)
    fig = plt.figure()

    plt.plot(list(range(census_ts.shape[0])), 1 - qmat[:, 1])
    plt.fill_between(
        x=list(range(census_ts.shape[0])),
        y1=1 - qmat[:, 0],
        y2=1 - qmat[:, 2],
        alpha=0.3,
        lw=2,
        edgecolor="k",
    )
    plt.ylabel(f"Relative (effective) social contact")
    plt.xlabel(f"Days since {first_day}")
    plt.ylim(0, 1)
    fig.savefig(path.join(f"{figdir}", f"{prefix}effective_soc_dist.pdf"))

    for howfar in n_days:
        plt_predictive(
            df,
            first_day,
            census_ts,
            figdir,
            as_of_days_ago,
            howfar=howfar,
            prefix=prefix,
            y_max=options.y_max,
            hosp_capacity=hosp_capacity,
            vent_capacity=vent_capacity,
        )

    mk_projection_tables(df, first_day, outdir)

    toplot = df[
        [
            "beta",
            "hosp_prop",
            "ICU_prop",
            "vent_prop",
            "hosp_LOS",
            "ICU_LOS",
            "vent_LOS",
            "incubation_days",
            "recovery_days",
            "logistic_k",
            "logistic_x0",
            "logistic_L",
            "nu",
        ]
    ]

    pspace = np.linspace(0.001, 0.999, 1000)

    fig, ax = plt.subplots(figsize=(8, 40), ncols=1, nrows=len(toplot.columns))
    for i in range(len(toplot.columns)):
        cname = toplot.columns[i]
        if params.loc[params.param == cname, "distribution"].iloc[0] == "gamma":
            x = sps.gamma.ppf(
                pspace,
                params.loc[params.param == cname, "p1"],
                0,
                params.loc[params.param == cname, "p2"],
            )
            y = sps.gamma.pdf(
                x,
                params.loc[params.param == cname, "p1"],
                0,
                params.loc[params.param == cname, "p2"],
            )
        elif params.loc[params.param == cname, "distribution"].iloc[0] == "beta":
            x = sps.beta.ppf(
                pspace,
                params.loc[params.param == cname, "p1"],
                params.loc[params.param == cname, "p2"],
            )
            y = sps.beta.pdf(
                x,
                params.loc[params.param == cname, "p1"],
                params.loc[params.param == cname, "p2"],
            )
        ax[i].plot(x, y, label="prior")
        ax[i].hist(toplot[cname], density=True, label="posterior", bins=30)
        ax[i].set_xlabel(params.loc[params.param == cname, "description"].iloc[0])
        ax[i].legend()
    plt.tight_layout()
    fig.savefig(path.join(f"{figdir}", f"{prefix}marginal_posteriors_v2.pdf"))

    if options.plot_pairs:
        #  Make a pair plot for diagnosing posterior dependence
        plt_pairplot_posteriors(toplot, figdir, prefix=prefix)
Ejemplo n.º 33
0
def parse_main_arguments(args_in: List[str]) -> MainArguments:
    """
    Configures the command-line interface.

    Parameters
    ----------
    args_in : list of str
        Full argument list from the command line.

    Returns
    -------
    arguments  : MainArguments
        A populated `MainArguments` object.
    """

    parser = ArgParser()
    parser.add(  # type: ignore
        "-c",
        "--csvpath",
        help="Base path for input files.",
        required=True,
        env_var="CSV_PATH",
    )
    parser.add(  # type: ignore
        "-e",
        "--engine",
        help="Database engine.",
        choices=[DbEngine.MSSQL, DbEngine.POSTGRESQL],
        default=DbEngine.MSSQL,
        env_var="DB_ENGINE",
    )
    parser.add(  # type: ignore
        "-s",
        "--server",
        help="Database server name or IP address",
        required=True,
        env_var="DB_SERVER",
    )
    parser.add(  # type: ignore
        "--port",
        help="Database server port number",
        type=int,
        env_var="DB_PORT",
    )
    parser.add(  # type: ignore
        "-d",
        "--dbname",
        help="Name of the database with the LMS tables.",
        env_var="DB_NAME",
        required=True,
    )

    USE_INTEGRATED = "--useintegratedsecurity"
    USE_INTEGRATED_SHORT = "-i"

    parser.add(  # type: ignore
        USE_INTEGRATED_SHORT,
        USE_INTEGRATED,
        help="Use Integrated Security for the database connection.",
        action="store_true",
    )
    user_name_required = (USE_INTEGRATED not in args_in
                          and USE_INTEGRATED_SHORT not in args_in)
    # This parameter doesn't work right when used from a .env file,
    # so adding a manual override
    integrated_env_var = os.getenv("USE_INTEGRATED_SECURITY")
    if integrated_env_var and integrated_env_var.lower() in ("true", "yes",
                                                             "t", "y"):
        user_name_required = False

    parser.add(  # type: ignore
        "-u",
        "--username",
        required=user_name_required,
        env_var="DB_USERNAME",
        help="Database username, when not using integrated security.",
    )
    parser.add(  # type: ignore
        "-p",
        "--password",
        required=user_name_required,
        env_var="DB_PASSWORD",
        help="Database user password, when not using integrated security.",
    )

    parser.add(  # type: ignore
        "-l",
        "--log-level",
        required=False,
        help="The log level for the tool.",
        choices=LOG_LEVELS,
        type=str,
        default="INFO",
        env_var="LOG_LEVEL",
    )

    args_parsed = parser.parse_args(args_in)
    args_parsed.useintegratedsecurity = (args_parsed.useintegratedsecurity
                                         or not user_name_required)

    arguments = MainArguments(args_parsed.csvpath, args_parsed.engine,
                              args_parsed.log_level)

    if args_parsed.useintegratedsecurity:
        arguments.set_connection_string_using_integrated_security(
            args_parsed.server, args_parsed.port, args_parsed.dbname)
    else:
        arguments.set_connection_string(
            args_parsed.server,
            args_parsed.port,
            args_parsed.dbname,
            args_parsed.username,
            args_parsed.password,
        )

    return arguments