Example #1
0
def add_logging_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--loglevel', action='store', default='INFO',
                    help="Log level",
                    env_var='LOG_LEVEL')
    ap.add_argument('--logformatter', action='store', default='text',
                    help="Log formatter",
                    env_var='LOG_FORMATTER')
Example #2
0
def add_fs_asset_store_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--asset-store-url-prefix', action='store',
                    metavar='PREFIX', env_var='ASSET_STORE_URL_PREFIX',
                    help='URL prefix of fs asset store (Only applicable for fs \
                          asset store')
    ap.add_argument('--asset-store-secret', action='store', metavar='PATH',
                    env_var='ASSET_STORE_SECRET',
                    help='Secret for signing assets on fs asset store')
Example #3
0
def add_asset_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--asset-store', action='store', metavar='(fs|s3|cloud)',
                    default='fs', env_var='ASSET_STORE',
                    help='Type of asset store')
    ap.add_argument('--asset-store-public', action='store_true',
                    help='Make asset public accessible',
                    env_var='ASSET_STORE_PUBLIC')
    add_fs_asset_store_arguments(ap)
    add_s3_asset_store_arguments(ap)
    add_cloud_asset_store_arguments(ap)
Example #4
0
def add_s3_asset_store_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--asset-store-access-key', action='store', metavar='KEY',
                    env_var='ASSET_STORE_ACCESS_KEY',
                    help='Access key for s3 asset store')
    ap.add_argument('--asset-store-secret-key', action='store',
                    metavar='SECRET', env_var='ASSET_STORE_SECRET_KEY',
                    help='Secret key for s3 asset store')
    ap.add_argument('--asset-store-region', action='store', metavar='REGION',
                    env_var='ASSET_STORE_REGION',
                    help='Region for s3 asset store')
    ap.add_argument('--asset-store-bucket', action='store', metavar='BUCKET',
                    env_var='ASSET_STORE_BUCKET',
                    help='Bucket name for s3 asset store')
    ap.add_argument('--asset-store-s3-url-prefix', action='store',
                    metavar='PREFIX', env_var='ASSET_STORE_S3_URL_PREFIX',
                    help='URL prefix for S3 asset store')
Example #5
0
 def _create_parser(
     parent_parser: configargparse.ArgumentParser
 ) -> configargparse.ArgumentParser:
     parser = parent_parser.add_parser(
         "autotag-chosen", help="fetch tags for chosen posts"
     )
     parser.set_defaults(force=Force.NoForce)
     parser.add_argument(
         "post_id", type=int, help="ID of the post to edit the tags for."
     )
     parser.add_argument(
         "-f",
         dest="force",
         action="store_const",
         const=Force.TagAgain,
         help=("Force tagging even if the post was processed earlier."),
     )
     parser.add_argument(
         "-ff",
         dest="force",
         action="store_const",
         const=Force.DownloadAgain,
         help=("Same as -f, but ignores IQDB cache."),
     )
     parser.add_argument(
         "--source", help="Source URL where to get tags from."
     )
     parser.add_argument(
         "--dry-run", action="store_true", help="Don't do anything."
     )
     return parser
def _mk_thickness_parser(p : ArgumentParser):
    p.add_argument("--xfm-csv", dest="xfm_csv", type=str, #required=True,
                   help="CSV file containing at least 'source', 'xfm', 'target', and 'resampled' columns")  # FIXME
    p.add_argument("--label-mapping", dest="label_mapping", type=FileAtom, #required=True,
                   help="CSV file containing structure information (see minclaplace/wiki/LaplaceGrid)")
    p.add_argument("--atlas-fwhm", dest="atlas_fwhm", type=float, required=True,  # default ?!
                   help="Blurring kernel (mm) for atlas")
    p.add_argument("--thickness-fwhm", dest="thickness_fwhm", type=float, required=True,  # default??
                   help="Blurring kernel (mm) for cortical surfaces")
    return p
Example #7
0
def add_static_asset_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--collect-assets', metavar='DIST', action='store',
                    help="Collect static assets to a directory")
    ap.add_argument('--force-assets', action='store_true',
                    help="Remove dist folder before proceeding")
    ap.add_argument('--serve-static-assets', action='store_true',
                    env_var='SERVE_STATIC_ASSETS',
                    help="Enable to serve static asset from plugin process")
    ap.add_argument('--ignore-public-html', action='store_true',
                    env_var='IGNORE_PUBLIC_HTML',
                    help="Ignore public_html directory for static assets.")
Example #8
0
 def _create_parser(
     parent_parser: configargparse.ArgumentParser
 ) -> configargparse.ArgumentParser:
     parser = parent_parser.add_parser(
         "autotag-newest", help="fetch tags for newest posts"
     )
     parser.add_argument(
         "--dry-run", action="store_true", help="Don't do anything."
     )
     return parser
Example #9
0
 def _create_parser(
     parent_parser: configargparse.ArgumentParser
 ) -> configargparse.ArgumentParser:
     parser = parent_parser.add_parser(
         "edit", help="edit tags interactively"
     )
     parser.add("query", help="query to filter the tags with")
     parser.add_argument(
         "--dry-run", action="store_true", help="Don't do anything."
     )
     return parser
Example #10
0
def add_cloud_asset_store_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--cloud-asset-host', action='store', metavar='HOST',
                    env_var='CLOUD_ASSET_HOST',
                    help='Host of cloud asset store')
    ap.add_argument('--cloud-asset-token', action='store', metavar='TOKEN',
                    env_var='CLOUD_ASSET_TOKEN',
                    help='Token of cloud asset store')
    ap.add_argument('--cloud-asset-public-prefix', action='store',
                    metavar='PREFIX', env_var='CLOUD_ASSET_PUBLIC_PREFIX',
                    help='URL prefix of public asset on cloud asset store')
    ap.add_argument('--cloud-asset-private-prefix', action='store',
                    metavar='PREFIX', env_var='CLOUD_ASSET_PRIVATE_PREFIX',
                    help='URL prefix of private asset on cloud asset store')
Example #11
0
def add_app_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--apikey', metavar='APIKEY', action='store',
                    default=None,
                    help="API Key of the application",
                    env_var='API_KEY')
    ap.add_argument('--masterkey', metavar='MASTERKEY', action='store',
                    default=None,
                    help="Master Key of the application",
                    env_var='MASTER_KEY')
    ap.add_argument('--appname', metavar='APPNAME', action='store',
                    default='',
                    help="Application name of the skygear daemon",
                    env_var='APP_NAME')
Example #12
0
def add_skygear_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--skygear-address', metavar='ADDR', action='store',
                    default='tcp://127.0.0.1:5555',
                    help="Binds to this socket for skygear",
                    env_var='SKYGEAR_ADDRESS')
    ap.add_argument('--skygear-endpoint', metavar='ENDPOINT', action='store',
                    default='http://127.0.0.1:3000',
                    help="Send to this addres for skygear handlers",
                    env_var='SKYGEAR_ENDPOINT')
    ap.add_argument('--pubsub-url', action='store', default=None,
                    env_var='PUBSUB_URL',
                    help="The URL of the pubsub server, should start with "
                         "ws:// or wss:// and include the path")
Example #13
0
File: data.py Project: rlugojr/neon
    aeon_config['image']['center'] = False
    aeon_config['image']['flip_enable'] = True

    return wrap_dataloader(AeonDataLoader(aeon_config, backend_obj))


def make_validation_loader(manifest_file, manifest_root, backend_obj, subset_pct=100):
    aeon_config = common_config(manifest_file, manifest_root, backend_obj.bsz, subset_pct)
    return wrap_dataloader(AeonDataLoader(aeon_config, backend_obj))


def make_tuning_loader(manifest_file, manifest_root, backend_obj):
    aeon_config = common_config(manifest_file, manifest_root, backend_obj.bsz, subset_pct=20)
    aeon_config['shuffle_manifest'] = True
    aeon_config['shuffle_every_epoch'] = True
    return wrap_dataloader(AeonDataLoader(aeon_config, backend_obj))


if __name__ == '__main__':
    from configargparse import ArgumentParser
    parser = ArgumentParser()
    parser.add_argument('--out_dir', required=True, help='path to extract files into')
    parser.add_argument('--input_dir', default=None, help='unused argument')
    parser.add_argument('--padded_size', type=int, default=40,
                        help='Size of image after padding (each side)')
    args = parser.parse_args()

    generated_files = ingest_cifar10(args.out_dir, args.padded_size)

    print("Manifest files written to:\n" + "\n".join(generated_files))
Example #14
0
def add_parser_args(parser: ArgumentParser) -> None:
    parser.add('-v',
               '--version',
               help='version',
               action='version',
               version=version)
    parser.add(
        '-d',
        '--directory',
        action='append',
        help='IaC root directory (can not be used together with --file).')
    parser.add('--add-check',
               action='store_true',
               help="Generate a new check via CLI prompt")
    parser.add('-f',
               '--file',
               action='append',
               help='IaC file(can not be used together with --directory)')
    parser.add(
        '--skip-path',
        action='append',
        help=
        'Path (file or directory) to skip, using regular expression logic, relative to current '
        'working directory. Word boundaries are not implicit; i.e., specifying "dir1" will skip any '
        'directory or subdirectory named "dir1". Ignored with -f. Can be specified multiple times.'
    )
    parser.add(
        '--external-checks-dir',
        action='append',
        help='Directory for custom checks to be loaded. Can be repeated')
    parser.add(
        '--external-checks-git',
        action='append',
        help=
        'Github url of external checks to be added. \n you can specify a subdirectory after a '
        'double-slash //. \n cannot be used together with --external-checks-dir'
    )
    parser.add('-l', '--list', help='List checks', action='store_true')
    parser.add(
        '-o',
        '--output',
        action='append',
        choices=OUTPUT_CHOICES,
        default=None,
        help=
        'Report output format. Add multiple outputs by using the flag multiple times (-o sarif -o cli)'
    )
    parser.add(
        '--output-file-path',
        default=None,
        help=
        'Name for output file. The first selected output via output flag will be saved to the file (default output is cli)'
    )
    parser.add(
        '--output-bc-ids',
        action='store_true',
        help=
        'Print Bridgecrew platform IDs (BC...) instead of Checkov IDs (CKV...), if the check exists in the platform'
    )
    parser.add(
        '--no-guide',
        action='store_true',
        default=False,
        help=
        'Do not fetch Bridgecrew platform IDs and guidelines for the checkov output report. Note: this '
        'prevents Bridgecrew platform check IDs from being used anywhere in the CLI.'
    )
    parser.add('--quiet',
               action='store_true',
               default=False,
               help='in case of CLI output, display only failed checks')
    parser.add('--compact',
               action='store_true',
               default=False,
               help='in case of CLI output, do not display code blocks')
    parser.add(
        '--framework',
        help=
        'filter scan to run only on specific infrastructure code frameworks',
        choices=checkov_runners + ["all"],
        default=['all'],
        nargs="+")
    parser.add(
        '--skip-framework',
        help='filter scan to skip specific infrastructure code frameworks. \n'
        'will be included automatically for some frameworks if system dependencies '
        'are missing.',
        choices=checkov_runners,
        default=None,
        nargs="+")
    parser.add(
        '-c',
        '--check',
        help=
        'filter scan to run only on a specific check identifier(allowlist), You can '
        'specify multiple checks separated by comma delimiter',
        action='append',
        default=None,
        env_var='CKV_CHECK')
    parser.add(
        '--skip-check',
        help=
        'filter scan to run on all check but a specific check identifier(denylist), You can '
        'specify multiple checks separated by comma delimiter',
        action='append',
        default=None,
        env_var='CKV_SKIP_CHECK')
    parser.add(
        '--run-all-external-checks',
        action='store_true',
        help=
        'Run all external checks (loaded via --external-checks options) even if the checks are not present '
        'in the --check list. This allows you to always ensure that new checks present in the external '
        'source are used. If an external check is included in --skip-check, it will still be skipped.'
    )
    parser.add('--bc-api-key',
               env_var='BC_API_KEY',
               sanitize=True,
               help='Bridgecrew API key')
    parser.add(
        '--prisma-api-url',
        env_var='PRISMA_API_URL',
        default=None,
        help=
        'The Prisma Cloud API URL (see: https://prisma.pan.dev/api/cloud/api-urls). '
        'Requires --bc-api-key to be a Prisma Cloud Access Key in the following format: <access_key_id>::<secret_key>'
    )
    parser.add(
        '--docker-image',
        help=
        'Scan docker images by name or ID. Only works with --bc-api-key flag')
    parser.add('--dockerfile-path',
               help='Path to the Dockerfile of the scanned docker image')
    parser.add(
        '--repo-id',
        help=
        'Identity string of the repository, with form <repo_owner>/<repo_name>'
    )
    parser.add(
        '-b',
        '--branch',
        help=
        "Selected branch of the persisted repository. Only has effect when using the --bc-api-key flag",
        default='master')
    parser.add(
        '--skip-fixes',
        help=
        'Do not download fixed resource templates from Bridgecrew. Only has effect when using the '
        '--bc-api-key flag',
        action='store_true')
    parser.add(
        '--skip-suppressions',
        help=
        'Do not download preconfigured suppressions from the Bridgecrew platform. Code comment '
        'suppressions will still be honored. '
        'Only has effect when using the --bc-api-key flag',
        action='store_true')
    parser.add(
        '--skip-policy-download',
        help=
        'Do not download custom policies configured in the Bridgecrew platform. '
        'Only has effect when using the --bc-api-key flag',
        action='store_true')
    parser.add(
        '--download-external-modules',
        help=
        "download external terraform modules from public git repositories and terraform registry",
        default=os.environ.get('DOWNLOAD_EXTERNAL_MODULES', False),
        env_var='DOWNLOAD_EXTERNAL_MODULES')
    parser.add(
        '--var-file',
        action='append',
        help='Variable files to load in addition to the default files (see '
        'https://www.terraform.io/docs/language/values/variables.html#variable-definitions-tfvars-files).'
        'Currently only supported for source Terraform (.tf file), and Helm chart scans.'
        'Requires using --directory, not --file.')
    parser.add('--external-modules-download-path',
               help="set the path for the download external terraform modules",
               default=DEFAULT_EXTERNAL_MODULES_DIR,
               env_var='EXTERNAL_MODULES_DIR')
    parser.add('--evaluate-variables',
               help="evaluate the values of variables and locals",
               default=True)
    parser.add('-ca',
               '--ca-certificate',
               help='Custom CA certificate (bundle) file',
               default=None,
               env_var='BC_CA_BUNDLE')
    parser.add(
        '--repo-root-for-plan-enrichment',
        help=
        'Directory containing the hcl code used to generate a given plan file. Use with -f.',
        dest="repo_root_for_plan_enrichment",
        action='append')
    parser.add('--config-file',
               help='path to the Checkov configuration YAML file',
               is_config_file=True,
               default=None)
    parser.add(
        '--create-config',
        help=
        'takes the current command line args and writes them out to a config file at '
        'the given path',
        is_write_out_config_file_arg=True,
        default=None)
    parser.add(
        '--show-config',
        help='prints all args and config settings and where they came from '
        '(eg. commandline, config file, environment variable or default)',
        action='store_true',
        default=None)
    parser.add(
        '--create-baseline',
        help=
        'Alongside outputting the findings, save all results to .checkov.baseline file'
        ' so future runs will not re-flag the same noise. Works only with `--directory` flag',
        action='store_true',
        default=False)
    parser.add(
        '--baseline',
        help=
        ("Use a .checkov.baseline file to compare current results with a known baseline. "
         "Report will include only failed checks that are new with respect to the provided baseline"
         ),
        default=None,
    )
    parser.add(
        '--min-cve-severity',
        help=
        'Set minimum severity that will cause returning non-zero exit code',
        choices=SEVERITY_RANKING.keys(),
        default='none')
    parser.add(
        '--skip-cve-package',
        help=
        'filter scan to run on all packages but a specific package identifier (denylist), You can '
        'specify this argument multiple times to skip multiple packages',
        action='append',
        default=None)
    # Add mutually exclusive groups of arguments
    exit_code_group = parser.add_mutually_exclusive_group()
    exit_code_group.add('-s',
                        '--soft-fail',
                        help='Runs checks but suppresses error code',
                        action='store_true')
    exit_code_group.add(
        '--soft-fail-on',
        help='Exits with a 0 exit code for specified checks. You can specify '
        'multiple checks separated by comma delimiter',
        action='append',
        default=None)
    exit_code_group.add(
        '--hard-fail-on',
        help=
        'Exits with a non-zero exit code for specified checks. You can specify '
        'multiple checks separated by comma delimiter',
        action='append',
        default=None)
Example #15
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import sys
from configargparse import ArgumentParser
from mechanicalsoup import StatefulBrowser

from SMACB.CalendarioACB import calendario_URLBASE
from SMACB.TemporadaACB import TemporadaACB
from Utils.Web import ExtraeGetParams

parser = ArgumentParser()
parser.add('-v',
           dest='verbose',
           action="count",
           env_var='SM_VERBOSE',
           required=False,
           help='',
           default=0)
parser.add('-d',
           dest='debug',
           action="store_true",
           env_var='SM_DEBUG',
           required=False,
           help='',
           default=False)
parser.add('-j',
           dest='justone',
           action="store_true",
           env_var='SM_JUSTONE',
           required=False,
Example #16
0
def main():
    parser = ArgumentParser()
    parser.add_argument("--file", type=str, required=True)
    parser.add_argument("--ignore-case", type=bool, default=DEFAULT_IGNORE_CASE)
    parser.add_argument("--output-file", type=str, default=DEFAULT_OUTPUT_FILE)
    parser.add_argument("--prefix", type=str, default=None)
    parser.add_argument("--title", type=str, default=DEFAULT_TITLE)
    parser.add_argument("--max-layers", type=int, default=DEFAULT_MAX_LAYERS)
    parser.add_argument(
        "--min-display-percentage",
        type=float,
        default=DEFAULT_MIN_DISPLAY_PERCENTAGE)
    parser.add_argument(
        "--min-label-percentage",
        type=float,
        default=DEFAULT_MIN_LABEL_PERCENTAGE)
    parser.add_argument(
        "--label-font-size", type=int, default=DEFAULT_LABEL_FONT_SIZE)
    args = parser.parse_args()

    try:
        create_from_file(
            args.file,
            args.ignore_case,
            args.output_file,
            args.prefix,
            args.title,
            args.max_layers,
            args.min_display_percentage,
            args.min_label_percentage,
            args.label_font_size)
    except VocabPieError as e:
        e.display()
Example #17
0
 def add_arguments(self, parser: ArgumentParser):
     """Add admin-specific command line arguments to the parser."""
     parser.add_argument(
         "--admin",
         type=str,
         nargs=2,
         metavar=("<host>", "<port>"),
         env_var="ACAPY_ADMIN",
         help=
         "Specify the host and port on which to run the administrative server.\
         If not provided, no admin server is made available.",
     )
     parser.add_argument(
         "--admin-api-key",
         type=str,
         metavar="<api-key>",
         env_var="ACAPY_ADMIN_API_KEY",
         help="Protect all admin endpoints with the provided API key.\
         API clients (e.g. the controller) must pass the key in the HTTP\
         header using 'X-API-Key: <api key>'. Either this parameter or the\
         '--admin-insecure-mode' parameter MUST be specified.",
     )
     parser.add_argument(
         "--admin-insecure-mode",
         action="store_true",
         env_var="ACAPY_ADMIN_INSECURE_MODE",
         help="Run the admin web server in insecure mode. DO NOT USE FOR\
         PRODUCTION DEPLOYMENTS. The admin server will be publicly available\
         to anyone who has access to the interface. Either this parameter or\
         the '--api-key' parameter MUST be specified.",
     )
     parser.add_argument(
         "--no-receive-invites",
         action="store_true",
         env_var="ACAPY_NO_RECEIVE_INVITES",
         help="Prevents an agent from receiving invites by removing the\
         '/connections/receive-invite' route from the administrative\
         interface. Default: false.",
     )
     parser.add_argument(
         "--help-link",
         type=str,
         metavar="<help-url>",
         env_var="ACAPY_HELP_LINK",
         help=
         "A URL to an administrative interface help web page that a controller\
         user interface can get from the agent and provide as a link to users.",
     )
     parser.add_argument(
         "--webhook-url",
         action="append",
         metavar="<url#api_key>",
         env_var="ACAPY_WEBHOOK_URL",
         help=
         "Send webhooks containing internal state changes to the specified\
         URL. Optional API key to be passed in the request body can be appended using\
         a hash separator [#]. This is useful for a controller to monitor agent events\
         and respond to those events using the admin API. If not specified, \
         webhooks are not published by the agent.",
     )
Example #18
0
import os
from typing import Callable

from aiohttp import web
from aiomisc.log import LogFormat, basic_config
from configargparse import ArgumentParser, ArgumentDefaultsHelpFormatter
from yarl import URL

from analyzer.api.app import create_app
from analyzer.utils.consts import ENV_VAR_PREFIX, DEFAULT_PG_URL

parser = ArgumentParser(
    # Парсер будет искать переменные окружения с префиксом ANALYZER_,
    # например ANALYZER_API_ADDRESS и ANALYZER_API_PORT
    auto_env_var_prefix=ENV_VAR_PREFIX,
    # Покажет значения параметров по умолчанию
    formatter_class=ArgumentDefaultsHelpFormatter,
)

group = parser.add_argument_group("API Options")
group.add_argument(
    "--api-address",
    default="0.0.0.0",
    help="IPv4/IPv6 address API server would listen on",
)
group.add_argument("--api-port",
                   type=int,
                   default=8081,
                   help="TCP port API server would listen on")

group = parser.add_argument_group("PostgreSQL options")
Example #19
0
        self.register('action','datetime',DatetimeAction)
        self.register('action','quiet'   ,DecreaseVerbosityAction)
        self.register('action','version' ,VersionAction)
        self.register('action','verbose' ,IncreaseVerbosityAction)

        group = self.add_mutually_exclusive_group()
        group.add_argument('-v','--verbose',action='verbose')
        group.add_argument('-q','--quiet',action='quiet')

        self.add_argument('-V','--version', action='version',version=__version__)
        #self.add_argument('--config', action='config')

    def remove_argument(self,option_string):
        """ Not very robust way to remove arguments """
        # TODO: subparsers?
        for i,action in enumerate(self._actions):
            if option_string in action.option_strings:
                self._handle_conflict_resolve(None, [(option_string,action)])
                #action.container._remove_action(action)

    def error(self, message):
        self.print_usage(sys.stderr)
        raise ArgumentParserError(message)

if __name__ == "__main__":
    parser = ArgumentParser()
    args = parser.parse_args()
    parser.print_help()

Example #20
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

from configargparse import ArgumentParser

from SMACB.Constants import OtherLoc
# from SMACB.MercadoPage import MercadoPageContent
from SMACB.SuperManager import SuperManagerACB
from SMACB.TemporadaACB import TemporadaACB

# from Utils.Misc import ReadFile

if __name__ == '__main__':
    parser = ArgumentParser()

    parser.add('-i', dest='sminfile', type=str, env_var='SM_INFILE', required=False)
    parser.add('-o', dest='smoutfile', type=str, env_var='SM_OUTFILE', required=False)
    parser.add('-t', dest='tempin', type=str, env_var='SM_TEMPIN', required=False)
    parser.add('-x', dest='tempout', type=str, env_var='SM_TEMPOUT', required=False)
    parser.add_argument(dest='files', type=str, nargs='*')

    args = parser.parse_args()

    sm = SuperManagerACB()

    if 'sminfile' in args and args.sminfile:
        sm.loadData(args.sminfile)

    temporada = None
    if 'tempin' in args and args.tempin:
        temporada = TemporadaACB()
Example #21
0

def make_loader(manifest_file, manifest_root, backend_obj, subset_pct=100, random_seed=0):
    aeon_config = common_config(manifest_file, manifest_root, backend_obj.bsz, subset_pct)
    aeon_config['shuffle_manifest'] = True
    aeon_config['shuffle_enable'] = True
    aeon_config['random_seed'] = random_seed
    aeon_config['augmentation'][0]['center'] = True
    aeon_config['augmentation'][0]['flip_enable'] = False

    return wrap_dataloader(AeonDataLoader(aeon_config))


if __name__ == '__main__':
    from configargparse import ArgumentParser
    parser = ArgumentParser()
    parser.add_argument('-o', '--out_dir', type=str, required=True, help='data path')
    parser.add_argument('-c', '--category', type=str, default="bedroom", help='data category')
    parser.add_argument('-s', '--dset', type=str, default="train", help='train, val or test')
    parser.add_argument('-t', '--tag', type=str, default="latest", help='version tag')
    parser.add_argument('-w', '--overwrite', action='store_true', help='overwrite existing data')
    parser.add_argument('-p', '--png', action='store_true', help='conversion to PNG images')
    args = parser.parse_args()

    assert os.path.exists(args.out_dir), "Output directory does not exist"
    categories = lsun_categories(args.tag)
    assert args.category in categories, "Unrecognized LSUN category: {}".format(args.category)
    # download and unpack LSUN data if not yet done so
    download_lsun(lsun_dir=args.out_dir, category=args.category,
                  dset=args.dset, tag=args.tag, overwrite=args.overwrite)
    # ingest LSUN data for AEON loader if not yet done so
Example #22
0
def parse_args(args):
    prog = ArgumentParser(
        config_file_parser_class=YAMLConfigFileParser,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        default_config_files=['~/.config/seal', '~/.seal'],
        description=textwrap.dedent("""\
            PowerfulSeal
        """),
    )

    # General settings
    prog.add_argument(
        '-c',
        '--config',
        is_config_file=True,
        env_var="CONFIG",
        help='Config file path',
    )
    prog.add_argument('-v',
                      '--verbose',
                      action='count',
                      help='Verbose logging.')

    # Policy
    # If --validate-policy-file is set, the other arguments are not used
    policy_options = prog.add_mutually_exclusive_group(required=True)
    policy_options.add_argument(
        '--validate-policy-file',
        help='reads the policy file, validates the schema, returns')
    policy_options.add_argument(
        '--run-policy-file',
        default=os.environ.get("POLICY_FILE"),
        help='location of the policy file to read',
    )
    policy_options.add_argument(
        '--interactive',
        help='will start the seal in interactive mode',
        action='store_true',
    )
    policy_options.add_argument(
        '--label',
        help='starts the seal in label mode',
        action='store_true',
    )
    policy_options.add_argument('--demo',
                                help='starts the demo mode',
                                action='store_true')

    is_validate_policy_file_set = '--validate-policy-file' in args

    # Demo mode
    demo_options = prog.add_argument_group()
    demo_options.add_argument(
        '--heapster-path', help='Base path of Heapster without trailing slash')
    demo_options.add_argument('--aggressiveness',
                              help='Aggressiveness of demo mode (default: 3)',
                              default=3,
                              type=int)

    # Arguments for both label and demo mode
    prog.add_argument(
        '--namespace',
        default='default',
        help='Namespace to use for label and demo mode, defaults to the default '
        'namespace (set to blank for all namespaces)')
    prog.add_argument('--min-seconds-between-runs',
                      help='Minimum number of seconds between runs',
                      default=0,
                      type=int)
    prog.add_argument('--max-seconds-between-runs',
                      help='Maximum number of seconds between runs',
                      default=300,
                      type=int)

    # Inventory
    inventory_options = prog.add_mutually_exclusive_group(
        required=not is_validate_policy_file_set)
    inventory_options.add_argument(
        '-i',
        '--inventory-file',
        default=os.environ.get("INVENTORY_FILE"),
        help='the inventory file of group of hosts to test')
    inventory_options.add_argument(
        '--inventory-kubernetes',
        default=os.environ.get("INVENTORY_KUBERNETES"),
        help='will read all cluster nodes as inventory',
        action='store_true',
    )

    # SSH
    args_ssh = prog.add_argument_group('SSH settings')
    args_ssh.add_argument(
        '--remote-user',
        default=os.environ.get("PS_REMOTE_USER", "cloud-user"),
        help="the of the user for the ssh connections",
    )
    args_ssh.add_argument(
        '--ssh-allow-missing-host-keys',
        default=False,
        action='store_true',
        help='Allow connection to hosts not present in known_hosts',
    )
    args_ssh.add_argument(
        '--ssh-path-to-private-key',
        default=os.environ.get("PS_PRIVATE_KEY"),
        help='Path to ssh private key',
    )

    # Cloud Driver
    cloud_options = prog.add_mutually_exclusive_group(
        required=not is_validate_policy_file_set)
    cloud_options.add_argument(
        '--open-stack-cloud',
        default=os.environ.get("OPENSTACK_CLOUD"),
        action='store_true',
        help="use OpenStack cloud provider",
    )
    cloud_options.add_argument(
        '--aws-cloud',
        default=os.environ.get("AWS_CLOUD"),
        action='store_true',
        help="use AWS cloud provider",
    )
    cloud_options.add_argument(
        '--no-cloud',
        default=os.environ.get("NO_CLOUD"),
        action='store_true',
        help="don't use cloud provider",
    )
    prog.add_argument(
        '--open-stack-cloud-name',
        default=os.environ.get("OPENSTACK_CLOUD_NAME"),
        help=
        "the name of the open stack cloud from your config file to use (if using config file)",
    )

    # Metric Collector
    metric_options = prog.add_mutually_exclusive_group(required=False)
    metric_options.add_argument('--stdout-collector',
                                default=os.environ.get("STDOUT_COLLECTOR"),
                                action='store_true',
                                help="print metrics collected to stdout")
    metric_options.add_argument(
        '--prometheus-collector',
        default=os.environ.get("PROMETHEUS_COLLECTOR"),
        action='store_true',
        help="store metrics in Prometheus and expose metrics over a HTTP server"
    )

    def check_valid_port(value):
        parsed = int(value)
        min_port = 0
        max_port = 65535
        if parsed < min_port or parsed > max_port:
            raise argparse.ArgumentTypeError("%s is an invalid port number" %
                                             value)
        return parsed

    args_prometheus = prog.add_argument_group('Prometheus settings')
    args_prometheus.add_argument(
        '--prometheus-host',
        default='127.0.0.1',
        help=
        'Host to expose Prometheus metrics via the HTTP server when using the --prometheus-collector flag'
    )
    args_prometheus.add_argument(
        '--prometheus-port',
        default=8000,
        help=
        'Port to expose Prometheus metrics via the HTTP server when using the --prometheus-collector flag',
        type=check_valid_port)

    # Kubernetes
    args_kubernetes = prog.add_argument_group('Kubernetes settings')
    args_kubernetes.add_argument(
        '--kube-config',
        default=None,
        help='Location of kube-config file',
    )

    return prog.parse_args(args=args)
Example #23
0
    routes = []
    try:
        for route_raw in routes_raw.split(','):
            prefix, url = route_raw.split('=')
            host, port = url.split(':')
            port = int(port)
            routes.append(Route(prefix, host, port))
    except ValueError:
        raise ArgumentTypeError(
            'Routes should be key-value pairs of prefix (can be empty string) '
            ' and host:port separated by commas. For example: '
            '--routes foo.bar=foo-bar.com:2003,spam=spam.org:2003')
    return routes


parser = ArgumentParser(auto_env_var_prefix="APP_")

parser.add_argument('-f', '--forks', type=int, default=4)
parser.add_argument("-u",
                    "--user",
                    help="Change process UID",
                    type=pwd.getpwnam)
parser.add_argument('-D', '--debug', action='store_true')

parser.add_argument('--log-level',
                    default='info',
                    choices=('debug', 'info', 'warning', 'error', 'fatal'))

parser.add_argument('--log-format',
                    choices=LogFormat.choices(),
                    default='color')
Example #24
0
from aiohttp.web import run_app
from aiomisc import bind_socket
from aiomisc.log import LogFormat, basic_config
from configargparse import ArgumentParser
from setproctitle import setproctitle
from yarl import URL

from recommender.api.app import create_app
from recommender.utils.argparse import clear_environ, positive_int
from recommender.utils.pg import DEFAULT_PG_URL
from recommender.utils.redis import DEFAULT_REDIS_URL

ENV_VAR_PREFIX = 'RECOMMENDER_'

parser = ArgumentParser(auto_env_var_prefix=ENV_VAR_PREFIX,
                        allow_abbrev=False,
                        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--user',
                    required=False,
                    type=pwd.getpwnam,
                    help='Change process UID')

group = parser.add_argument_group('API Options')
group.add_argument('--api-address',
                   default='0.0.0.0',
                   help='IPv4/IPv6 address API server would listen on')
group.add_argument('--api-port',
                   type=positive_int,
                   default=8081,
                   help='TCP port API server would listen on')
Example #25
0
 def add_arguments(self, parser: ArgumentParser):
     """Add ledger-specific command line arguments to the parser."""
     parser.add_argument(
         "--ledger-pool-name",
         type=str,
         metavar="<ledger-pool-name>",
         env_var="ACAPY_LEDGER_POOL_NAME",
         help="Specifies the name of the indy pool to be opened.\
         This is useful if you have multiple pool configurations.",
     )
     parser.add_argument(
         "--genesis-transactions",
         type=str,
         dest="genesis_transactions",
         metavar="<genesis-transactions>",
         env_var="ACAPY_GENESIS_TRANSACTIONS",
         help='Specifies the genesis transactions to use to connect to\
         an Hyperledger Indy ledger. The transactions are provided as string\
         of JSON e.g. \'{"reqSignature":{},"txn":{"data":{"d... <snip>\'',
     )
     parser.add_argument(
         "--genesis-file",
         type=str,
         dest="genesis_file",
         metavar="<genesis-file>",
         env_var="ACAPY_GENESIS_FILE",
         help=
         "Specifies a local file from which to read the genesis transactions.",
     )
     parser.add_argument(
         "--genesis-url",
         type=str,
         dest="genesis_url",
         metavar="<genesis-url>",
         env_var="ACAPY_GENESIS_URL",
         help="Specifies the url from which to download the genesis\
         transactions. For example, if you are using 'von-network',\
         the URL might be 'http://localhost:9000/genesis'.\
         Genesis transactions URLs are available for the Sovrin test/main networks.",
     )
     parser.add_argument(
         "--no-ledger",
         action="store_true",
         env_var="ACAPY_NO_LEDGER",
         help="Specifies that aca-py will run with no ledger configured.\
         This must be set if running in no-ledger mode.  Overrides any\
         specified ledger or genesis configurations.  Default: false.",
     )
     parser.add_argument(
         "--ledger-keepalive",
         default=5,
         type=int,
         env_var="ACAPY_LEDGER_KEEP_ALIVE",
         help=
         "Specifies how many seconds to keep the ledger open. Default: 5",
     )
Example #26
0
    # write SSD VAL CONFIG
    ssd_config_val = get_ssd_config(img_reshape, True)
    ssd_config_path_val = os.path.join(root_dir, 'kitti_ssd_{}_val.cfg'.format(hw))
    util.write_ssd_config(ssd_config_val, ssd_config_path_val, True)

    config_path = os.path.join(root_dir, 'kitti_{}.cfg'.format(hw))
    config = {'manifest': '[train:{}, val:{}]'.format(train_manifest, val_manifest),
              'manifest_root': root_dir,
              'epochs': 100,
              'height': img_reshape[0],
              'width': img_reshape[0],
              'ssd_config': '[train:{}, val:{}]'.format(ssd_config_path, ssd_config_path_val)
              }

    util.write_config(config, config_path)


if __name__ == '__main__':
    from configargparse import ArgumentParser
    parser = ArgumentParser()
    parser.add_argument('--input_dir', required=True, help='path to dir with KITTI zip files.')
    parser.add_argument('--output_dir', required=True, help='path to unzip data.')
    parser.add_argument('--overwrite', action='store_true', help='overwrite files')
    parser.add_argument('--training_pct', default=90, help='fraction of data used for training.')
    parser.add_argument('--skip_unzip', action='store_true', help='skip unzip')

    args = parser.parse_args()

    ingest_kitti(args.input_dir, args.output_dir, train_percent=args.training_pct,
                 overwrite=args.overwrite, skip_unzip=args.skip_unzip)
Example #27
0
        annot_dir (string): directory of annotations
        img_dir (string): directory of images
        root_dir (string): paths will be made relative to this directory
        ext (string, optional): image extension (default=.jpg)
    """
    records = []
    with open(index_file) as f:
        for img in f:
            tag = img.rstrip(os.linesep)

            image = os.path.join(image_dir, tag + '.jpg')
            annot = os.path.join(annot_dir, tag + '.json')

            assert os.path.exists(image), 'Path {} not found'.format(image)
            assert os.path.exists(annot), 'Path {} not found'.format(annot)

            records.append((os.path.relpath(image, root_dir),
                            os.path.relpath(annot, root_dir)))

    np.savetxt(manifest_path, records, fmt='%s,%s')


if __name__ == '__main__':
    parser = ArgumentParser()
    parser.add_argument('--input_dir', required=True, help='path to directory with vocdevkit data')
    parser.add_argument('--output_dir', required=True, help='output directory')
    parser.add_argument('--overwrite', action='store_true', help='overwrite files')
    args = parser.parse_args()

    ingest_pascal(args.input_dir, args.output_dir, overwrite=args.overwrite)
Example #28
0
@app.route('/')
def aggregate_info():
    return jsonify({'service': 'aggregate'})


@app.errorhandler(requests.RequestException)
def http_error(error):
    response = jsonify({
        'status': 'error',
        'code': 500,
        'error': error.message
    })
    response.status_code = 500
    return response


if __name__ == '__main__':
    parser = ArgumentParser(description='Runs the IP service.')
    parser.add_argument('--host', help='Specifies the host for the application.',
                        default='127.0.0.1')
    parser.add_argument('--port', type=int, help='Specifies the port for the application.',
                        default=5000)
    parser.add_argument('-u', '--users', help='Specifies the address to the users service.',
                        required=True, env_var='USERS_URL')
    parser.add_argument('-i', '--ip', help='Specifies the address to the ip service.',
                        required=True, env_var='IPDIAG_URL')
    arguments = parser.parse_args()
    users_endpoint = arguments.users
    ip_endpoint = arguments.ip
    app.run(host=arguments.host, port=arguments.port)
Example #29
0
def main(argv):
    """
        The main function to invoke the powerfulseal cli
    """

    # Describe our configuration.
    prog = ArgumentParser(
        config_file_parser_class=YAMLConfigFileParser,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        default_config_files=['~/.config/seal', '~/.seal'],
        description=textwrap.dedent("""\
            PowerfulSeal
        """),
    )
    # general settings
    prog.add_argument(
        '-c', '--config',
        is_config_file=True,
        env_var="CONFIG",
        help='Config file path',
    )
    prog.add_argument('-v', '--verbose',
        action='count',
        help='Verbose logging.'
    )
    prog.add_argument(
        '--log-file',
        default=None,
        help='Location of log file',
    )


    # inventory related config
    inventory_options = prog.add_mutually_exclusive_group(required=True)
    inventory_options.add_argument('-i', '--inventory-file',
        default=os.environ.get("INVENTORY_FILE"),
        help='the inventory file of group of hosts to test'
    )
    inventory_options.add_argument('--inventory-kubernetes',
        default=os.environ.get("INVENTORY_KUBERNETES"),
        help='will read all cluster nodes as inventory',
        action='store_true',
    )

    # ssh related options
    args_ssh = prog.add_argument_group('SSH settings')
    args_ssh.add_argument(
        '--remote-user',
        default=os.environ.get("PS_REMOTE_USER", "cloud-user"),
        help="the of the user for the ssh connections",
    )
    args_ssh.add_argument(
        '--ssh-allow-missing-host-keys',
        default=False,
        action='store_true',
        help='Allow connection to hosts not present in known_hosts',
    )
    args_ssh.add_argument(
        '--ssh-path-to-private-key',
        default=os.environ.get("PS_PRIVATE_KEY"),
        help='Path to ssh private key',
    )

    # cloud driver related config
    cloud_options = prog.add_mutually_exclusive_group(required=True)
    cloud_options.add_argument('--open-stack-cloud',
        default=os.environ.get("OPENSTACK_CLOUD"),
        action='store_true',
        help="use OpenStack cloud provider",
    )
    cloud_options.add_argument('--aws-cloud',
        default=os.environ.get("AWS_CLOUD"),
        action='store_true',
        help="use AWS cloud provider",
    )
    cloud_options.add_argument('--no-cloud',
        default=os.environ.get("NO_CLOUD"),
        action='store_true',
        help="don't use cloud provider",
    )
    prog.add_argument('--open-stack-cloud-name',
        default=os.environ.get("OPENSTACK_CLOUD_NAME"),
        help="the name of the open stack cloud from your config file to use (if using config file)",
    )

    # KUBERNETES CONFIG
    args_kubernetes = prog.add_argument_group('Kubernetes settings')
    args_kubernetes.add_argument(
        '--kube-config',
        default=None,
        help='Location of kube-config file',
    )

    # policy-related settings
    policy_options = prog.add_mutually_exclusive_group(required=True)
    policy_options.add_argument('--validate-policy-file',
        help='reads the policy file, validates the schema, returns'
    )
    policy_options.add_argument('--run-policy-file',
        default=os.environ.get("POLICY_FILE"),
        help='location of the policy file to read',
    )
    policy_options.add_argument('--interactive',
        help='will start the seal in interactive mode',
        action='store_true',
    )

    args = prog.parse_args(args=argv)

    # Configure logging
    if not args.verbose:
        log_level = logging.ERROR
    elif args.verbose == 1:
        log_level = logging.WARNING
    elif args.verbose == 2:
        log_level = logging.INFO
    else:
        log_level = logging.DEBUG
    
    if args.log_file is not None:
        logging.basicConfig(
            format="[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s", 
            filename=args.log_file,
            level=log_level
        )
    else:
        logging.basicConfig(
            format="[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s",
            stream=sys.stdout,
            level=log_level
        )
    logger = logging.getLogger(__name__)
    logger.setLevel(log_level)

    # build cloud provider driver
    logger.debug("Building the driver")
    if args.open_stack_cloud:
        logger.info("Building OpenStack driver")
        driver = OpenStackDriver(
            cloud=args.open_stack_cloud_name,
        )
    elif args.aws_cloud:
        logger.info("Building AWS driver")
        driver = AWSDriver()
    else:
        logger.info("No driver - some functionality disabled")
        driver = NoCloudDriver()


    # build a k8s client
    kube_config = args.kube_config
    logger.debug("Creating kubernetes client with config %s", kube_config)
    k8s_client = K8sClient(kube_config=kube_config)
    k8s_inventory = K8sInventory(k8s_client=k8s_client)

    # read the local inventory
    logger.debug("Fetching the inventory")
    if args.inventory_file:
        groups_to_restrict_to = read_inventory_file_to_dict(
            args.inventory_file
        )
    else:
        logger.info("Attempting to read the inventory from kubernetes")
        groups_to_restrict_to = k8s_client.get_nodes_groups()

    logger.debug("Restricting inventory to %s" % groups_to_restrict_to)

    inventory = NodeInventory(
        driver=driver,
        restrict_to_groups=groups_to_restrict_to,
    )
    inventory.sync()

    # create an executor
    executor = RemoteExecutor(
        user=args.remote_user,
        ssh_allow_missing_host_keys=args.ssh_allow_missing_host_keys,
        ssh_path_to_private_key=args.ssh_path_to_private_key,
    )

    if args.interactive:
        # create a command parser
        cmd = PSCmd(
            inventory=inventory,
            driver=driver,
            executor=executor,
            k8s_inventory=k8s_inventory,
            k8s_client=k8s_client
        )
        while True:
            try:
                cmd.cmdloop()
            except KeyboardInterrupt:
                print()
                print("Ctrl-c again to quit")
            try:
                input()
            except KeyboardInterrupt:
                sys.exit(0)
    elif args.validate_policy_file:
        PolicyRunner.validate_file(args.validate_policy_file)
        print("All good, captain")
    elif args.run_policy_file:
        policy = PolicyRunner.validate_file(args.run_policy_file)
        PolicyRunner.run(policy, inventory, k8s_inventory, driver, executor)
Example #30
0
def add_logging_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--loglevel',
                    action='store',
                    default='INFO',
                    help="Log level",
                    env_var='LOG_LEVEL')
Example #31
0
 def add_arguments(self, parser: ArgumentParser):
     """Add transport-specific command line arguments to the parser."""
     parser.add_argument(
         "-it",
         "--inbound-transport",
         dest="inbound_transports",
         type=str,
         action="append",
         nargs=3,
         metavar=("<module>", "<host>", "<port>"),
         env_var="ACAPY_INBOUND_TRANSPORT",
         help="REQUIRED. Defines the inbound transport(s) on which the agent\
         listens for receiving messages from other agents. This parameter can\
         be specified multiple times to create multiple interfaces.\
         Built-in inbound transport types include 'http' and 'ws'.\
         However, other transports can be loaded by specifying an absolute\
         module path.",
     )
     parser.add_argument(
         "-ot",
         "--outbound-transport",
         dest="outbound_transports",
         type=str,
         action="append",
         metavar="<module>",
         env_var="ACAPY_OUTBOUND_TRANSPORT",
         help="REQUIRED. Defines the outbound transport(s) on which the agent\
         will send outgoing messages to other agents. This parameter can be passed\
         multiple times to supoort multiple transport types. Supported outbound\
         transport types are 'http' and 'ws'.",
     )
     parser.add_argument(
         "-l",
         "--label",
         type=str,
         metavar="<label>",
         env_var="ACAPY_LABEL",
         help="Specifies the label for this agent. This label is publicized\
         (self-attested) to other agents as part of forming a connection.",
     )
     parser.add_argument(
         "--image-url",
         type=str,
         env_var="ACAPY_IMAGE_URL",
         help=
         "Specifies the image url for this agent. This image url is publicized\
         (self-attested) to other agents as part of forming a connection.",
     )
     parser.add_argument(
         "--max-message-size",
         default=2097152,
         type=ByteSize(min_size=1024),
         metavar="<message-size>",
         env_var="ACAPY_MAX_MESSAGE_SIZE",
         help="Set the maximum size in bytes for inbound agent messages.",
     )
     parser.add_argument(
         "--enable-undelivered-queue",
         action="store_true",
         env_var="ACAPY_ENABLE_UNDELIVERED_QUEUE",
         help="Enable the outbound undelivered queue that enables this agent\
         to hold messages for delivery to agents without an endpoint. This\
         option will require additional memory to store messages in the queue.",
     )
     parser.add_argument(
         "--max-outbound-retry",
         default=4,
         type=ByteSize(min_size=1),
         env_var="ACAPY_MAX_OUTBOUND_RETRY",
         help="Set the maximum retry number for undelivered outbound\
         messages. Increasing this number might cause to increase the\
         accumulated messages in message queue. Default value is 4.",
     )
Example #32
0
def add_debug_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--debug',
                    action='store_true',
                    help='Enable debugging features',
                    env_var='DEBUG')
Example #33
0
    records = [('@FILE', 'FILE')]
    with open(index_file) as f:
        for img in f:
            tag = img.rstrip(os.linesep)

            image = os.path.join(image_dir, tag + '.jpg')
            annot = os.path.join(annot_dir, tag + '.json')

            assert os.path.exists(image), 'Path {} not found'.format(image)
            assert os.path.exists(annot), 'Path {} not found'.format(annot)

            records.append(
                (os.path.relpath(image,
                                 root_dir), os.path.relpath(annot, root_dir)))

    np.savetxt(manifest_path, records, fmt='%s\t%s')


if __name__ == '__main__':
    parser = ArgumentParser()
    parser.add_argument('--input_dir',
                        required=True,
                        help='path to directory with vocdevkit data')
    parser.add_argument('--output_dir', required=True, help='output directory')
    parser.add_argument('--overwrite',
                        action='store_true',
                        help='overwrite files')
    args = parser.parse_args()

    ingest_pascal(args.input_dir, args.output_dir, overwrite=args.overwrite)
Example #34
0
def main(argv):
    """
        The main function to invoke the powerfulseal cli
    """

    # Describe our configuration.
    prog = ArgumentParser(
        config_file_parser_class=YAMLConfigFileParser,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        default_config_files=['~/.config/seal', '~/.seal'],
        description=textwrap.dedent("""\
            PowerfulSeal
        """),
    )

    # general settings
    prog.add_argument(
        '-c', '--config',
        is_config_file=True,
        env_var="CONFIG",
        help='Config file path',
    )
    prog.add_argument('-v', '--verbose',
        action='count',
        help='Verbose logging.'
    )

    # inventory related config
    inventory_options = prog.add_mutually_exclusive_group(required=True)
    inventory_options.add_argument('-i', '--inventory-file',
        default=os.environ.get("INVENTORY_FILE"),
        help='the inventory file of group of hosts to test'
    )
    inventory_options.add_argument('--inventory-kubernetes',
        default=os.environ.get("INVENTORY_KUBERNETES"),
        help='will read all cluster nodes as inventory',
        action='store_true',
    )

    # cloud driver related config
    cloud_options = prog.add_mutually_exclusive_group(required=False)
    cloud_options.add_argument('--open-stack-cloud',
        default=os.environ.get("OPENSTACK_CLOUD"),
        help="the name of the open stack cloud from your config file to use",
    )
    prog.add_argument('--remote-user',
        default=os.environ.get("PS_REMOTE_USER", "cloud-user"),
        help="the of the user for the ssh connections",
    )

    # KUBERNETES CONFIG
    args_kubernetes = prog.add_argument_group('Kubernetes settings')
    args_kubernetes.add_argument(
        '--kube-config',
        default=None,
        help='Location of kube-config file',
    )

    # policy-related settings
    policy_options = prog.add_mutually_exclusive_group(required=True)
    policy_options.add_argument('--validate-policy-file',
        help='reads the policy file, validates the schema, returns'
    )
    policy_options.add_argument('--run-policy-file',
        default=os.environ.get("POLICY_FILE"),
        help='location of the policy file to read',
    )
    policy_options.add_argument('--interactive',
        help='will start the seal in interactive mode',
        action='store_true',
    )

    args = prog.parse_args(args=argv)

    # Configure logging
    if not args.verbose:
        log_level = logging.ERROR
    elif args.verbose == 1:
        log_level = logging.WARNING
    elif args.verbose == 2:
        log_level = logging.INFO
    else:
        log_level = logging.DEBUG
    logging.basicConfig(
        stream=sys.stdout,
        level=log_level
    )
    logger = logging.getLogger(__name__)
    logger.setLevel(log_level)

    # build cloud inventory
    logger.debug("Fetching the remote nodes")
    driver = OpenStackDriver(
        cloud=args.open_stack_cloud,
    )

    # build a k8s client
    kube_config = args.kube_config
    logger.debug("Creating kubernetes client with config %d", kube_config)
    k8s_client = K8sClient(kube_config=kube_config)
    k8s_inventory = K8sInventory(k8s_client=k8s_client)

    # read the local inventory
    logger.debug("Fetching the inventory")
    if args.inventory_file:
        groups_to_restrict_to = read_inventory_file_to_dict(
            args.inventory_file
        )
    else:
        logger.info("Attempting to read the inventory from kubernetes")
        groups_to_restrict_to = k8s_client.get_nodes_groups()

    logger.debug("Restricting inventory to %s" % groups_to_restrict_to)

    inventory = NodeInventory(
        driver=driver,
        restrict_to_groups=groups_to_restrict_to,
    )
    inventory.sync()

    # create an executor
    executor = RemoteExecutor(
        user=args.remote_user,
    )

    if args.interactive:
        # create a command parser
        cmd = PSCmd(
            inventory=inventory,
            driver=driver,
            executor=executor,
            k8s_inventory=k8s_inventory,
        )
        while True:
            try:
                cmd.cmdloop()
            except KeyboardInterrupt:
                print()
                print("Ctrl-c again to quit")
            try:
                input()
            except KeyboardInterrupt:
                sys.exit(0)
    elif args.validate_policy_file:
        PolicyRunner.validate_file(args.validate_policy_file)
        print("All good, captain")
    elif args.run_policy_file:
        policy = PolicyRunner.validate_file(args.run_policy_file)
        PolicyRunner.run(policy, inventory, k8s_inventory, driver, executor)
Example #35
0
 def _add(p: ArgumentParser) -> ArgumentParser:
     p.add_argument(*args, **kwargs)
     return p
Example #36
0
        aeon_config['augmentation'].append(dict())
        aeon_config['augmentation'][0]['type'] = "audio"
        aeon_config['augmentation'][0]['noise_index_file'] = noise_file
        aeon_config['augmentation'][0]['noise_root'] = os.path.dirname(noise_file)
        aeon_config['augmentation'][0]['add_noise_probability'] = 0.5
        aeon_config['augmentation'][0]['noise_level'] = (0.0, 0.5)

    return wrap_dataloader(AeonDataLoader(aeon_config))


def make_test_loader(manifest_file, manifest_root, backend_obj):
    aeon_config = common_config(manifest_file, manifest_root, backend_obj.bsz)
    aeon_config['type'] = 'audio'  # No labels provided
    aeon_config.pop('label', None)
    dl = AeonDataLoader(aeon_config)
    dl = TypeCast(dl, index=0, dtype=np.float32)
    return dl


if __name__ == '__main__':
    from configargparse import ArgumentParser
    parser = ArgumentParser()
    parser.add_argument('--input_dir', required=True, help='path to whale_data.zip')
    parser.add_argument('--out_dir', required=True, help='destination path of extracted files')
    parser.add_argument('--overwrite', required=False, default=False, help='overwriting manifest')
    args = parser.parse_args()

    generated_files = ingest_whales(args.input_dir, args.out_dir, args.overwrite)

    print("Manifest files written to:\n" + "\n".join(generated_files))
Example #37
0
# replace $DATA and $MODEL in paths
# by the values of the env variables
DATA = os.getenv('DATA')
MODEL = os.getenv('MODEL')


def _replace_env(path_str):
    path_str = str(path_str)
    mod_str = path_str.replace('$DATA', DATA)
    mod_str = path_str.replace('$MODEL', MODEL)
    return mod_str


# arg parsing
parser = ArgumentParser()
# add experiment level args
parser.add_argument(
    '-c',
    '--config',
    is_config_file=True,
    default="/mnt/share/raheppt1/data/vessel/config/aorth_seg.yaml")
parser.add_argument('--seed', type=int, default=0)
parser.add_argument('--neptune_project', type=str, default='lab-midas/mednet')
parser.add_argument('--experiment_name', type=str, default="aorth")
parser.add_argument('--data_path', type=_replace_env)
parser.add_argument('--image_group', type=str, default='images')
parser.add_argument('--label_group', type=str, default='labels')
parser.add_argument('--train_set', type=str)
parser.add_argument('--val_set', type=str)
parser.add_argument('--model_dir', type=_replace_env)
Example #38
0
def cli_main():
    # Arguments
    default_config = os.path.join(os.path.split(os.getcwd())[0], 'config.conf')

    print(default_config)

    parser = ArgumentParser(description='Pytorch BYOL',
                            default_config_files=[default_config])
    parser.add_argument('-c',
                        '--my-config',
                        required=False,
                        is_config_file=True,
                        help='config file path')
    parser.add_argument('--finetune',
                        dest='finetune',
                        action='store_true',
                        help='Perform only finetuning (Default: False)')
    parser.set_defaults(finetune=False)
    parser.add_argument(
        '--transfer',
        dest='transfer',
        action='store_true',
        help='Perform transfer learning on linear eval (Default: False)')
    parser.set_defaults(transfer=False)
    parser.add_argument('--offline_log',
                        dest='offline_log',
                        action='store_true',
                        help='Do not log online (Default:  False)')
    parser.set_defaults(offline_log=False)
    parser.add_argument('--pt_checkpoint', type=str, default=None)
    parser.add_argument('--val_every_n', type=int, default=1)
    parser.add_argument('--tag', type=str, default=None)
    parser.add_argument('--resume_ckpt', type=str, default=None)
    parser.add_argument('--seed', type=int, default=222)
    parser.add_argument('--project_name', type=str, default=None)

    # trainer args
    parser = pl.Trainer.add_argparse_args(parser)

    # model args
    parser = BYOL.add_model_specific_args(parser)

    parser = SSLLinearEval.add_model_specific_args(parser)

    args = parser.parse_args()

    seed_everything(args.seed)

    args.status = 'Test'

    args.batch_size = args.ft_batch_size

    # Get DataModule
    dm, ft_dm, args = get_dm(args)

    # Define model
    BYOL(**args.__dict__)

    load_log_file = save_dir = os.path.join(os.getcwd(), 'log_files.txt')

    log_dirs = np.genfromtxt(load_log_file, delimiter=" ", dtype='str')

    print("\n\n Log Dir: {}\n\n".format(log_dirs))

    ft_model_dir = log_dirs[1]
    checkpoint_path = log_dirs[2]

    if not args.offline_log:

        exp_num = log_dirs[3]

        print("Loading checkpoint: {}".format(
            os.path.join(ft_model_dir,
                         os.listdir(ft_model_dir + '/')[-1])))

        print("Experiment Num: {}".format(exp_num))

        project = neptune.init(args.project_name)

        experiment = project.get_experiments(id=exp_num)[0]

        print(experiment)

        callback_list = [TestNeptuneCallback(experiment)]

    else:
        callback_list = [TestNeptuneCallback(None)]

    encoder = BYOL.load_from_checkpoint(checkpoint_path, strict=False)

    SSLLinearEval(encoder.encoder_online, **args.__dict__)

    path = os.path.join(ft_model_dir, os.listdir(ft_model_dir + '/')[-1])

    ft_model = SSLLinearEval.load_from_checkpoint(
        path, strict=False, encoder=encoder.encoder_online, **args.__dict__)

    if args.accelerator == 'ddp' or args.accelerator == 'ddp2':
        replace_sampler = True  # False
    else:
        replace_sampler = True

    trainer_ft = pl.Trainer.from_argparse_args(
        args,
        logger=None,
        checkpoint_callback=False,
        callbacks=callback_list,
        deterministic=False,
        fast_dev_run=False,
        sync_batchnorm=True,
        replace_sampler_ddp=replace_sampler)

    # Fit
    trainer_ft.test(ft_model, datamodule=ft_dm)
def procesaArgumentos():
    parser = ArgumentParser()

    parser.add('-i',
               dest='infile',
               type=str,
               env_var='SM_INFILE',
               required=True)
    parser.add('-t',
               dest='temporada',
               type=str,
               env_var='SM_TEMPORADA',
               required=True)
    parser.add('-j', dest='jornada', type=int, required=True)

    parser.add('-l',
               '--lista-socios',
               dest='listaSocios',
               action="store_true",
               default=False)

    parser.add("-o",
               "--output-dir",
               dest="outputdir",
               type=str,
               default=LOCATIONCACHE)

    parser.add('--nproc', dest='nproc', type=int, default=NJOBS)
    parser.add('--memworker', dest='memworker', default=MEMWORKER)
    parser.add('--joblibmode',
               dest='joblibmode',
               choices=JOBLIBCHOICES,
               default='threads')

    parser.add('-v',
               dest='verbose',
               action="count",
               env_var='SM_VERBOSE',
               required=False,
               default=0)
    parser.add('-d',
               dest='debug',
               action="store_true",
               env_var='SM_DEBUG',
               required=False,
               default=False)
    parser.add('--logdir',
               dest='logdir',
               type=str,
               env_var='SM_LOGDIR',
               required=False)

    # args = vars(parser.parse_args())
    # return Namespace(**args)

    return parser.parse_args()
from configargparse import ArgumentParser
# pylint: disable=E0611
from setproctitle import setproctitle

from experiment_collection_core import service_pb2_grpc
from experiment_collection_server.db.storage_postgresql import StoragePostgresql
from experiment_collection_server.db.storage_sqlite import StorageSQLite
from experiment_collection_server.service import Servicer

logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)

ENV_VAR_PREFIX = 'EXPERIMENT_'

parser = ArgumentParser(
    auto_env_var_prefix=ENV_VAR_PREFIX, allow_abbrev=False,
    formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
group = parser.add_argument_group('Storage Options')
group.add_argument('--storage-type', type=str, help='Storage type (sqlite/postgres)')
group.add_argument('--sqlite-path', type=str, help='sqlite database path')
group.add_argument('--postgres-dsn', type=str, help='postgres connection string')
group = parser.add_argument_group('Server Options')
group.add_argument('--workers', type=int, default=1, help='Number of workers')
group.add_argument('--port', type=str, help='server port')
group = parser.add_argument_group('Token Options')
group.add_argument('--token', type=str, help='Token to create')
parser.add_argument('--action', type=str, help='Type of task (run/token)')


def _get_storage(args):
    if args.storage_type == 'sqlite':
Example #41
0
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************

import logging
import sys
from configargparse import ArgumentParser

from nlp_architect.models.np2vec import NP2vec
from nlp_architect.utils.io import validate_existing_filepath, check_size

logging.basicConfig(stream=sys.stdout, level=logging.INFO)
logger = logging.getLogger(__name__)

if __name__ == "__main__":
    arg_parser = ArgumentParser(__doc__)
    arg_parser.add_argument(
        '--np2vec_model_file',
        default='conll2000.train.model',
        help='path to the file with the np2vec model to load.',
        type=validate_existing_filepath)
    arg_parser.add_argument(
        '--binary',
        help='boolean indicating whether the model to load has been stored in binary '
        'format.',
        action='store_true')
    arg_parser.add_argument(
        '--word_ngrams',
        default=0,
        type=int,
        choices=[0, 1],
Example #42
0
 def add_arguments(self, parser: ArgumentParser):
     """Add wallet-specific command line arguments to the parser."""
     parser.add_argument(
         "--seed",
         type=str,
         metavar="<wallet-seed>",
         env_var="ACAPY_WALLET_SEED",
         help="Specifies the seed to use for the creation of a public\
         DID for the agent to use with a Hyperledger Indy ledger, or a local\
         ('--wallet-local-did') DID. If public, the DID must already exist\
         on the ledger.",
     )
     parser.add_argument(
         "--wallet-local-did",
         action="store_true",
         env_var="ACAPY_WALLET_LOCAL_DID",
         help="If this parameter is set, provisions the wallet with a\
         local DID from the '--seed' parameter, instead of a public DID\
         to use with a Hyperledger Indy ledger.",
     )
     parser.add_argument(
         "--wallet-key",
         type=str,
         metavar="<wallet-key>",
         env_var="ACAPY_WALLET_KEY",
         help="Specifies the master key value to use to open the wallet.",
     )
     parser.add_argument(
         "--wallet-rekey",
         type=str,
         metavar="<wallet-rekey>",
         env_var="ACAPY_WALLET_REKEY",
         help="Specifies a new master key value to which to rotate and to\
         open the wallet next time.",
     )
     parser.add_argument(
         "--wallet-name",
         type=str,
         metavar="<wallet-name>",
         env_var="ACAPY_WALLET_NAME",
         help="Specifies the wallet name to be used by the agent.\
         This is useful if your deployment has multiple wallets.",
     )
     parser.add_argument(
         "--wallet-type",
         type=str,
         metavar="<wallet-type>",
         default="basic",
         env_var="ACAPY_WALLET_TYPE",
         help="Specifies the type of Indy wallet provider to use.\
         Supported internal storage types are 'basic' (memory) and 'indy'.\
         The default (if not specified) is 'basic'.",
     )
     parser.add_argument(
         "--wallet-storage-type",
         type=str,
         metavar="<storage-type>",
         default="default",
         env_var="ACAPY_WALLET_STORAGE_TYPE",
         help="Specifies the type of Indy wallet backend to use.\
         Supported internal storage types are 'basic' (memory),\
         'default' (sqlite), and 'postgres_storage'.  The default,\
         if not specified, is 'default'.",
     )
     parser.add_argument(
         "--wallet-storage-config",
         type=str,
         metavar="<storage-config>",
         env_var="ACAPY_WALLET_STORAGE_CONFIG",
         help='Specifies the storage configuration to use for the wallet.\
         This is required if you are for using \'postgres_storage\' wallet\
         storage type. For example, \'{"url":"localhost:5432",\
         "wallet_scheme":"MultiWalletSingleTable"}\'. This\
         configuration maps to the indy sdk postgres plugin\
         (PostgresConfig).',
     )
     parser.add_argument(
         "--wallet-storage-creds",
         type=str,
         metavar="<storage-creds>",
         env_var="ACAPY_WALLET_STORAGE_CREDS",
         help='Specifies the storage credentials to use for the wallet.\
         This is required if you are for using \'postgres_storage\' wallet\
         For example, \'{"account":"postgres","password":\
         "mysecretpassword","admin_account":"postgres",\
         "admin_password":"******"}\'. This configuration maps\
         to the indy sdk postgres plugin (PostgresCredentials). NOTE:\
         admin_user must have the CREATEDB role or else initialization\
         will fail.',
     )
     parser.add_argument(
         "--replace-public-did",
         action="store_true",
         env_var="ACAPY_REPLACE_PUBLIC_DID",
         help=
         "If this parameter is set and an agent already has a public DID,\
         and the '--seed' parameter specifies a new DID, the agent will use\
         the new DID in place of the existing DID. Default: false.",
     )
     parser.add_argument(
         "--recreate-wallet",
         action="store_true",
         env_var="ACAPY_RECREATE_WALLET",
         help="If an existing wallet exists with the same name, remove and\
         recreate it during provisioning.",
     )
Example #43
0
    def add_model_specific_args(parent_parser):
        parser = ArgumentParser(parents=[parent_parser], add_help=False)

        (args, _) = parser.parse_known_args()

        # optim
        parser.add_argument('--ft_epochs', type=int, default=2)
        parser.add_argument('--ft_batch_size', type=int, default=128)
        parser.add_argument('--ft_learning_rate', type=float, default=0.02)
        parser.add_argument('--ft_weight_decay', type=float, default=1.5e-6)
        parser.add_argument('--ft_optimiser', default='sgd',
                            help='Optimiser, (Options: sgd, adam, lars).')

        return parser
Example #44
0
File: data.py Project: rlugojr/neon
    aeon_config['random_seed'] = random_seed

    if noise_file is not None:
        aeon_config['audio']['noise_index_file'] = noise_file
        aeon_config['audio']['noise_root'] = manifest_root
        aeon_config['audio']['add_noise_probability'] = 0.5
        aeon_config['audio']['noise_level'] = [0.0, 0.5]

    return wrap_dataloader(AeonDataLoader(aeon_config, backend_obj))


def make_test_loader(manifest_file, manifest_root, backend_obj):
    aeon_config = common_config(manifest_file, manifest_root, backend_obj.bsz)
    aeon_config['type'] = 'audio'  # No labels provided
    aeon_config.pop('label', None)
    dl = AeonDataLoader(aeon_config, backend_obj)
    dl = TypeCast(dl, index=0, dtype=np.float32)
    return dl


if __name__ == '__main__':
    from configargparse import ArgumentParser
    parser = ArgumentParser()
    parser.add_argument('--input_dir', required=True, help='path to whale_data.zip')
    parser.add_argument('--out_dir', required=True, help='destination path of extracted files')
    args = parser.parse_args()

    generated_files = ingest_whales(args.input_dir, args.out_dir)

    print("Manifest files written to:\n" + "\n".join(generated_files))
Example #45
0
    aeon_config['iteration_mode'] = "ONCE"
    aeon_config['shuffle_manifest'] = True
    aeon_config['shuffle_enable'] = True
    aeon_config['random_seed'] = random_seed
    aeon_config['augmentation'][0]["center"] = False
    aeon_config['augmentation'][0]["flip_enable"] = True

    return wrap_dataloader(AeonDataLoader(aeon_config))


def make_validation_loader(manifest_file, manifest_root, backend_obj, subset_pct=100):
    aeon_config = common_config(manifest_file, manifest_root, backend_obj.bsz, subset_pct)
    return wrap_dataloader(AeonDataLoader(aeon_config))


def make_tuning_loader(manifest_file, manifest_root, backend_obj):
    aeon_config = common_config(manifest_file, manifest_root, backend_obj.bsz, subset_pct=20)
    aeon_config['shuffle_manifest'] = True
    return wrap_dataloader(AeonDataLoader(aeon_config))


if __name__ == '__main__':
    from configargparse import ArgumentParser
    parser = ArgumentParser()
    parser.add_argument('--out_dir', required=True, help='Directory to write ingested files')
    parser.add_argument('--padded_size', type=int, default=40, help='Size of image after padding')
    parser.add_argument('--overwrite', action='store_true', default=False, help='Overwrite files')
    args = parser.parse_args()

    ingest_cifar10(args.out_dir, args.padded_size, overwrite=args.overwrite)
Example #46
0
def add_debug_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--debug', action='store_true',
                    help='Enable debugging features',
                    env_var='DEBUG')
Example #47
0
def add_plugin_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--subprocess', dest='subprocess', action='store',
                    nargs='+',
                    metavar=('(init|op|hook|handler|timer)', 'name'),
                    help='Trigger subprocess everytime for debug')
    ap.add_argument('--http', action='store_true',
                    help='Trigger http web server',
                    env_var='HTTP')
    ap.add_argument('--http-addr', metavar='HTTP_ADDR', action='store',
                    default='0.0.0.0:8000',
                    help='Address where http web server listen to. In the \
                    format of {HOST}:{PORT}.',
                    env_var='HTTP_ADDR')
    ap.add_argument('--zmq-thread-pool', metavar='ZMQ_THREAD_POOL',
                    action='store',
                    default=4, type=int,
                    help='Number of thread in ZMQTransport thread pool',
                    env_var='ZMQ_THREAD_POOL')
    ap.add_argument('--zmq-thread-limit', metavar='ZMQ_THREAD_LIMIT',
                    action='store',
                    default=10, type=int,
                    help='Max number of thread in ZMQTransport thread pool',
                    env_var='ZMQ_THREAD_LIMIT')
    ap.add_argument('modules', nargs='*', default=[])  # env_var: LOAD_MODULES
def get_configs():
    current_time = datetime.datetime.now().strftime('%d%b%Y-%H:%M')
    parser = ArgumentParser(config_file_parser_class=YAMLConfigFileParser)
    parser.add_argument('-c', '--config', required=True, is_config_file=True,
                        help='config file path')
    parser.add_argument('--batch_size', type=int, default=32, help='input batch size')
    parser.add_argument('--workers', type=int, help='number of data lodaing workers', default=8)
    parser.add_argument('--nepoch', type=int, default=4, help='number of epochs to train for')
    parser.add_argument('--reproduce', default=False, action='store_true')
    parser.add_argument('--time', default=False, action='store_true')
    parser.add_argument('--datatype', default=None, type=str)  # point, point_uniform, point_poisson
    parser.add_argument('--preprocess', default=False, action='store_true')

    # Dataset Settings
    parser.add_argument('--model', type=str, default='', help='model path to load from')
    parser.add_argument('--data_dir', type=str, default='data/mixamo', help='dataset path')
    parser.add_argument('--no_skin', default=False, action='store_true')  # hoped it will make faster dataloader


    parser.add_argument('--log_dir', type=str, default=f"./logs/{current_time}", help='log dir')
    parser.add_argument('--save_step', type=int, default=1000, help='model saving interval')
    parser.add_argument('--vis_step', type=int, default=1000, help='model visualization interval')

    # Model Settings
    parser.add_argument('--num_joints', type=int, default=22)
    parser.add_argument('--joint_loss_type', type=str, default='rel')  # 'rel'/'glob'/'rel2glob'
    parser.add_argument('--bindpose_loss_type', type=str, default='glob')  # 'rel'/'glob'/'rel2glob'
    parser.add_argument('--use_bindpose', default=False, action='store_true')
    parser.add_argument('--use_gt_ibm', default=False, action='store_true')

    parser.add_argument('--use_normal', default=False, action='store_true')
    parser.add_argument('--quantize', type=int, default=0)

    # Network Settings
    parser.add_argument('--use_bn', default=False, action='store_true', help='Use Batch Norm in networks?')
    parser.add_argument('--global_feature_size', type=int, default=1024)
    parser.add_argument('--feature_size', type=int, default=1024)
    parser.add_argument('--channels', type=int, default=[64, 256, 512], nargs=3)
    parser.add_argument('--k', type=int, default=-1)  # k for k-nearest neighbor in euclidean distance
    parser.add_argument('--euc_radius', type=float, default=0.0)  # euclidean ball, 0.6 in RigNet
    parser.add_argument('--network_type', type=str, default='full')  # k for k-nearest neighbor in euclidean distance
    parser.add_argument('--edge_type', type=str, default='tpl_and_euc', help='select one of tpl_and_euc, tpl_only, euc_only')

    # Hyperparameter Settings
    parser.add_argument('--rot_hp', type=float, default=1., help='weight of rotation loss')
    parser.add_argument('--trans_hp', type=float, default=1., help='weight of translation loss')
    parser.add_argument('--skin_hp', type=float, default=1e-3, help='weight of skin loss')
    parser.add_argument('--bm_rot_hp', type=float, default=1., help='weight of rotation loss')
    parser.add_argument('--bm_trans_hp', type=float, default=1, help='weight of translation loss')
    parser.add_argument('--bm_shape_hp', type=float, default=1e-3, help='weight of skin loss')

    # Optimization Settings
    parser.add_argument('--lr', type=float, default=0.001)
    parser.add_argument('--lr_step_size', type=int, default=100)
    parser.add_argument('--lr_gamma', type=float, default=0.8)
    parser.add_argument('--overfit', default=False, action='store_true')
    parser.add_argument('--vis_overfit', default=False, action='store_true')  # overfit on vis dataset

    args = parser.parse_args()

    print(args)
    if os.path.exists(args.log_dir):
        print("\nAre you re-training? [y/n]", end='')
        choice = input().lower()
        if choice not in ['y', 'n']:
            print("please type in valid response")
            sys.exit()
        elif choice == 'n':
            print("The log directory is already occupied. Do you want to remove and rewrite? [y/n]", end='')
            choice = input().lower()
            if choice == 'y':
                shutil.rmtree(args.log_dir, ignore_errors=True)
                os.makedirs(args.log_dir)
            else:
                print("Please choose a different log_dir")
                sys.exit()
        else:
            if args.model != '':
                print("You cannot restart when the model is specified")
                __import__('pdb').set_trace()
            else:
                ckpt_list = [ckpt for ckpt in os.listdir(args.log_dir) if ckpt.endswith('.pth')]
                args.model = os.path.join(args.log_dir,
                                          sorted(ckpt_list, key=lambda ckpt_str: ckpt_str.split('_')[-1].split('.pth')[0])[-1])

            print("Retraining from ckpt: {}".format(args.model))

    else:
        os.makedirs(args.log_dir)
    with open(os.path.join(args.log_dir, 'config.yaml'), 'w') as f:
        yaml.dump(vars(args), f)#, default_flow_style=None)

    if not args.reproduce:
        manual_seed = random.randint(1, 10000)
    else:
        manual_seed = 0
    print("Random Seed: ", manual_seed)
    random.seed(manual_seed)
    torch.manual_seed(manual_seed)

    return args
Example #49
0
def add_logging_arguments(ap: argparse.ArgumentParser):
    ap.add_argument('--loglevel', action='store', default='INFO',
                    help="Log level",
                    env_var='LOG_LEVEL')
Example #50
0
 def _create_parser(
     parent_parser: configargparse.ArgumentParser
 ) -> configargparse.ArgumentParser:
     return parent_parser.add_parser(
         "autotag-config", help="tweak settings of autotagger"
     )
Example #51
0
    splited = record.split(',')
    headerbody = "FILE\t" * len(splited)
    header = "@" + headerbody[:-1] + '\n'
    tmp_dest.write(header)
    record = record.replace(',', '\t')
    tmp_dest.write(record)

    for record in source:
        record = record.replace(',', '\t')
        tmp_dest.write(record)

    source.close()
    tmp_dest.close()

    if output_manifest is None:
        output_manifest = source_manifest

    if os.path.exists(output_manifest):
        os.remove(output_manifest)
    shutil.move(tmp_manifest, output_manifest)


if __name__ == '__main__':
    from configargparse import ArgumentParser
    parser = ArgumentParser()
    parser.add_argument('--manifest_file', required=True, help='Manifest to convert')
    parser.add_argument('--destination', help='Converted Manifest destination')
    args = parser.parse_args()

    convert_manifest(args.manifest_file, args.destination)
Example #52
0
 def add_arguments(self, parser: ArgumentParser):
     """Add protocol-specific command line arguments to the parser."""
     parser.add_argument(
         "--auto-ping-connection",
         action="store_true",
         env_var="ACAPY_AUTO_PING_CONNECTION",
         help="Automatically send a trust ping immediately after a\
         connection response is accepted. Some agents require this before\
         marking a connection as 'active'. Default: false.",
     )
     parser.add_argument(
         "--invite-base-url",
         type=str,
         metavar="<base-url>",
         env_var="ACAPY_INVITE_BASE_URL",
         help=
         "Base URL to use when formatting connection invitations in URL format.",
     )
     parser.add_argument(
         "--monitor-ping",
         action="store_true",
         env_var="ACAPY_MONITOR_PING",
         help="Send a webhook when a ping is sent or received.",
     )
     parser.add_argument(
         "--public-invites",
         action="store_true",
         env_var="ACAPY_PUBLIC_INVITES",
         help="Send invitations out, and receive connection requests,\
         using the public DID for the agent. Default: false.",
     )
     parser.add_argument(
         "--timing",
         action="store_true",
         env_var="ACAPY_TIMING",
         help="Include timing information in response messages.",
     )
     parser.add_argument(
         "--timing-log",
         type=str,
         metavar="<log-path>",
         env_var="ACAPY_TIMING_LOG",
         help="Write timing information to a given log file.",
     )
     parser.add_argument(
         "--trace",
         action="store_true",
         env_var="ACAPY_TRACE",
         help="Generate tracing events.",
     )
     parser.add_argument(
         "--trace-target",
         type=str,
         metavar="<trace-target>",
         env_var="ACAPY_TRACE_TARGET",
         help=
         'Target for trace events ("log", "message", or http endpoint).',
     )
     parser.add_argument(
         "--trace-tag",
         type=str,
         metavar="<trace-tag>",
         env_var="ACAPY_TRACE_TAG",
         help="Tag to be included when logging events.",
     )
     parser.add_argument(
         "--trace-label",
         type=str,
         metavar="<trace-label>",
         env_var="ACAPY_TRACE_LABEL",
         help="Label (agent name) used logging events.",
     )
     parser.add_argument(
         "--preserve-exchange-records",
         action="store_true",
         env_var="ACAPY_PRESERVE_EXCHANGE_RECORDS",
         help=
         "Keep credential exchange records after exchange has completed.",
     )
     parser.add_argument(
         "--emit-new-didcomm-prefix",
         action="store_true",
         env_var="ACAPY_EMIT_NEW_DIDCOMM_PREFIX",
         help="Emit protocol messages with new DIDComm prefix; i.e.,\
         'https://didcomm.org/' instead of (default) prefix\
         'did:sov:BzCbsNYhMrjHiqZDTUASHg;spec/'.",
     )
     parser.add_argument(
         "--exch-use-unencrypted-tags",
         action="store_true",
         env_var="ACAPY_EXCH_USE_UNENCRYPTED_TAGS",
         help=
         "Store tags for exchange protocols (credential and presentation)\
         using unencrypted rather than encrypted tags",
     )
Example #53
0
    def add_arguments(self, parser: ArgumentParser):
        """Add debug command line arguments to the parser."""
        parser.add_argument(
            "--debug",
            action="store_true",
            env_var="ACAPY_DEBUG",
            help="Enables a remote debugging service that can be accessed\
            using ptvsd for Visual Studio Code. The framework will wait\
            for the debugger to connect at start-up. Default: false.",
        )
        parser.add_argument(
            "--debug-seed",
            dest="debug_seed",
            type=str,
            metavar="<debug-did-seed>",
            env_var="ACAPY_DEBUG_SEED",
            help="Specify the debug seed to use.",
        )
        parser.add_argument(
            "--debug-connections",
            action="store_true",
            env_var="ACAPY_DEBUG_CONNECTIONS",
            help=
            "Enable additional logging around connections. Default: false.",
        )
        parser.add_argument(
            "--debug-credentials",
            action="store_true",
            env_var="ACAPY_DEBUG_CREDENTIALS",
            help="Enable additional logging around credential exchanges.\
            Default: false.",
        )
        parser.add_argument(
            "--debug-presentations",
            action="store_true",
            env_var="ACAPY_DEBUG_PRESENTATIONS",
            help="Enable additional logging around presentation exchanges.\
            Default: false.",
        )
        parser.add_argument(
            "--invite",
            action="store_true",
            env_var="ACAPY_INVITE",
            help=
            "After startup, generate and print a new out-of-band connection invitation\
            URL. Default: false.",
        )
        parser.add_argument(
            "--connections-invite",
            action="store_true",
            env_var="ACAPY_CONNECTIONS_INVITE",
            help="After startup, generate and print a new connections protocol \
            style invitation URL. Default: false.",
        )
        parser.add_argument(
            "--invite-label",
            dest="invite_label",
            type=str,
            metavar="<label>",
            env_var="ACAPY_INVITE_LABEL",
            help="Specify the label of the generated invitation.",
        )
        parser.add_argument(
            "--invite-multi-use",
            action="store_true",
            env_var="ACAPY_INVITE_MULTI_USE",
            help="Flag specifying the generated invite should be multi-use.",
        )
        parser.add_argument(
            "--invite-public",
            action="store_true",
            env_var="ACAPY_INVITE_PUBLIC",
            help="Flag specifying the generated invite should be public.",
        )
        parser.add_argument(
            "--invite-metadata-json",
            type=str,
            metavar="<metadata-json>",
            env_var="ACAPY_INVITE_METADATA_JSON",
            help=
            "Add metadata json to invitation created with --invite argument.",
        )
        parser.add_argument(
            "--test-suite-endpoint",
            type=str,
            metavar="<endpoint>",
            env_var="ACAPY_TEST_SUITE_ENDPOINT",
            help="URL endpoint for sending messages to the test suite agent.",
        )

        parser.add_argument(
            "--auto-accept-invites",
            action="store_true",
            env_var="ACAPY_AUTO_ACCEPT_INVITES",
            help="Automatically accept invites without firing a webhook event or\
            waiting for an admin request. Default: false.",
        )
        parser.add_argument(
            "--auto-accept-requests-peer",
            action="store_true",
            env_var="ACAPY_AUTO_ACCEPT_REQUESTS_PEER",
            help="Automatically accept connection and did-exchange requests\
            against peer DIDs without firing a webhook event\
            or waiting for an admin request. Default: false.",
        )
        parser.add_argument(
            "--auto-accept-requests-public",
            action="store_true",
            env_var="ACAPY_AUTO_ACCEPT_REQUESTS_PEER",
            help="Automatically accept connection and did-exchange requests\
            against public DIDs without firing a webhook event\
            or waiting for an admin request. Default: false.",
        )
        parser.add_argument(
            "--auto-respond-messages",
            action="store_true",
            env_var="ACAPY_AUTO_RESPOND_MESSAGES",
            help=
            "Automatically respond to basic messages indicating the message was\
            received. Default: false.",
        )
        parser.add_argument(
            "--auto-respond-credential-proposal",
            action="store_true",
            env_var="ACAPY_AUTO_RESPOND_CREDENTIAL_PROPOSAL",
            help="Auto-respond to credential proposals with corresponding " +
            "credential offers",
        )
        parser.add_argument(
            "--auto-respond-credential-offer",
            action="store_true",
            env_var="ACAPY_AUTO_RESPOND_CREDENTIAL_OFFER",
            help=
            "Automatically respond to Indy credential offers with a credential\
            request. Default: false",
        )
        parser.add_argument(
            "--auto-respond-credential-request",
            action="store_true",
            env_var="ACAPY_AUTO_RESPOND_CREDENTIAL_REQUEST",
            help=
            "Auto-respond to credential requests with corresponding credentials",
        )
        parser.add_argument(
            "--auto-respond-presentation-proposal",
            action="store_true",
            env_var="ACAPY_AUTO_RESPOND_PRESENTATION_PROPOSAL",
            help="Auto-respond to presentation proposals with corresponding " +
            "presentation requests",
        )
        parser.add_argument(
            "--auto-respond-presentation-request",
            action="store_true",
            env_var="ACAPY_AUTO_RESPOND_PRESENTATION_REQUEST",
            help="Automatically respond to Indy presentation requests with a\
            constructed presentation if a corresponding credential can be retrieved\
            for every referent in the presentation request. Default: false.",
        )
        parser.add_argument(
            "--auto-store-credential",
            action="store_true",
            env_var="ACAPY_AUTO_STORE_CREDENTIAL",
            help="Automatically store an issued credential upon receipt.\
            Default: false.",
        )
        parser.add_argument(
            "--auto-verify-presentation",
            action="store_true",
            env_var="ACAPY_AUTO_VERIFY_PRESENTATION",
            help="Automatically verify a presentation when it is received.\
            Default: false.",
        )
Example #54
0
from pathlib import Path
from setproctitle import setproctitle
from threading import RLock

import aiohttp
import async_timeout
import forkme
import msgpack
from aiomisc.log import basic_config, LogFormat
from aiomisc.thread_pool import threaded
from aiomisc.utils import bind_socket, new_event_loop
from configargparse import ArgumentParser
from yarl import URL

log = logging.getLogger()
parser = ArgumentParser(auto_env_var_prefix="APP_")

parser.add_argument('-f', '--forks', type=int, default=4)
parser.add_argument('--pool-size', default=4, type=int)
parser.add_argument('-D', '--debug', action='store_true')

parser.add_argument('--log-level',
                    default='info',
                    choices=('debug', 'info', 'warning', 'error', 'fatal'))

parser.add_argument('--log-format',
                    choices=LogFormat.choices(),
                    default='color')

group = parser.add_argument_group('TCP receiver settings')
group.add_argument('--tcp-listen', type=str, default='0.0.0.0')
Example #55
0
 def add_arguments(self, parser: ArgumentParser):
     """Add general command line arguments to the parser."""
     parser.add_argument(
         "--arg-file",
         is_config_file=True,
         help="Load aca-py arguments from the specified file.  Note that\
         this file *must* be in YAML format.",
     )
     parser.add_argument(
         "--plugin",
         dest="external_plugins",
         type=str,
         action="append",
         required=False,
         metavar="<module>",
         env_var="ACAPY_PLUGIN",
         help="Load <module> as external plugin module. Multiple\
         instances of this parameter can be specified.",
     )
     parser.add_argument(
         "--storage-type",
         type=str,
         metavar="<storage-type>",
         env_var="ACAPY_STORAGE_TYPE",
         help="Specifies the type of storage provider to use for the internal\
         storage engine. This storage interface is used to store internal state.\
         Supported internal storage types are 'basic' (memory)\
         and 'indy'.  The default (if not specified) is 'indy' if the wallet type\
         is set to 'indy', otherwise 'basic'.",
     )
     parser.add_argument(
         "-e",
         "--endpoint",
         type=str,
         nargs="+",
         metavar="<endpoint>",
         env_var="ACAPY_ENDPOINT",
         help="Specifies the endpoints to put into DIDDocs\
         to inform other agents of where they should send messages destined\
         for this agent. Each endpoint could be one of the specified inbound\
         transports for this agent, or the endpoint could be that of\
         another agent (e.g. 'https://example.com/agent-endpoint') if the\
         routing of messages to this agent by a mediator is configured.\
         The first endpoint specified will be used in invitations.\
         The endpoints are used in the formation of a connection\
         with another agent.",
     )
     parser.add_argument(
         "--profile-endpoint",
         type=str,
         metavar="<profile_endpoint>",
         env_var="ACAPY_PROFILE_ENDPOINT",
         help="Specifies the profile endpoint for the (public) DID.",
     )
     parser.add_argument(
         "--read-only-ledger",
         action="store_true",
         env_var="ACAPY_READ_ONLY_LEDGER",
         help="Sets ledger to read-only to prevent updates.\
         Default: false.",
     )
     parser.add_argument(
         "--tails-server-base-url",
         type=str,
         metavar="<tails-server-base-url>",
         env_var="ACAPY_TAILS_SERVER_BASE_URL",
         help="Sets the base url of the tails server in use.",
     )
     parser.add_argument(
         "--tails-server-upload-url",
         type=str,
         metavar="<tails-server-upload-url>",
         env_var="ACAPY_TAILS_SERVER_UPLOAD_URL",
         help=
         "Sets the base url of the tails server for upload, defaulting to the\
         tails server base url.",
     )
Example #56
0
def create_argument_parser(*, prog: str = None):
    """Create am instance of an arg parser, force yaml format for external config."""
    return ArgumentParser(config_file_parser_class=YAMLConfigFileParser,
                          prog=prog)
Example #57
0
              'manifest_root': data_dir,
              'epochs': 230,
              'height': height,
              'width': width,
              'ssd_config': '[train:{}, val:{}]'.format(ssd_config_path, ssd_config_path_val)
              }

    util.write_config(config, config_path)

    # write annotation pickle
    if annot_save is not None:
        pickle.dump(data, open(annot_save, 'w'))


if __name__ == '__main__':
    parser = ArgumentParser()
    parser.add_argument('--data_dir', required=True, help='path to directory with vocdevkit data')
    parser.add_argument('--overwrite', action='store_true', help='overwrite files')
    parser.add_argument('--height', type=int, default=512, help='height of reshaped image')
    parser.add_argument('--width', type=int, default=512, help='width of reshape image')
    parser.add_argument('--train_fraction', type=float, default=0.9, help='width of reshape image')
    parser.add_argument('--annot_save', type=str, default=None,
                        help='separately save annotations to this file.')

    args = parser.parse_args()

    cities = ['AOI_1_Rio', 'AOI_2_Vegas_Train',
              'AOI_3_Paris_Train', 'AOI_4_Shanghai_Train', 'AOI_5_Khartoum_Train']

    ingest_spacenet(cities=cities, data_dir=args.data_dir, height=args.height, width=args.width,
                    overwrite=args.overwrite, annot_save=args.annot_save)
Example #58
0
# limitations under the License.
# ******************************************************************************

import logging
import sys
from configargparse import ArgumentParser

from nlp_architect.models.np2vec import NP2vec
from nlp_architect.utils.io import check_size, validate_existing_filepath

logging.basicConfig(stream=sys.stdout, level=logging.INFO)
logger = logging.getLogger(__name__)


if __name__ == "__main__":
    arg_parser = ArgumentParser(__doc__)
    arg_parser.add_argument(
        '--corpus',
        default='train.txt',
        type=str,
        action=check_size(min=1),
        help='path to the corpus. By default, '
                                 'it is the training set of CONLL2000 shared task dataset.')
    arg_parser.add_argument(
        '--corpus_format',
        default='conll2000',
        type=str,
        choices=[
            'json',
            'txt',
            'conll2000'],