def main():
    """For local testing purposes.

    Note: VNDK snapshot must be already installed under
      prebuilts/vndk/v{version}.
    """
    ANDROID_BUILD_TOP = utils.get_android_build_top()
    PREBUILTS_VNDK_DIR = utils.join_realpath(ANDROID_BUILD_TOP,
                                             'prebuilts/vndk')

    args = get_args()
    vndk_version = args.vndk_version
    install_dir = os.path.join(PREBUILTS_VNDK_DIR, 'v{}'.format(vndk_version))
    if not os.path.isdir(install_dir):
        raise ValueError(
            'Please provide valid VNDK version. {} does not exist.'
            .format(install_dir))
    utils.set_logging_config(args.verbose)

    buildfile_generator = GenBuildFile(install_dir, vndk_version)
    buildfile_generator.generate_root_android_bp()
    buildfile_generator.generate_common_android_bp()
    buildfile_generator.generate_android_bp()

    logging.info('Done.')
def main():
    """For local testing purposes.

    Note: VNDK snapshot must be already installed under
      prebuilts/vndk/v{version}.
    """
    ANDROID_BUILD_TOP = utils.get_android_build_top()
    PREBUILTS_VNDK_DIR = utils.join_realpath(ANDROID_BUILD_TOP,
                                             'prebuilts/vndk')

    args = get_args()
    vndk_version = args.vndk_version
    install_dir = os.path.join(PREBUILTS_VNDK_DIR, 'v{}'.format(vndk_version))
    if not os.path.isdir(install_dir):
        raise ValueError(
            'Please provide valid VNDK version. {} does not exist.'.format(
                install_dir))
    utils.set_logging_config(args.verbose)

    buildfile_generator = GenBuildFile(install_dir, vndk_version)
    buildfile_generator.generate_root_android_bp()
    buildfile_generator.generate_common_android_bp()
    buildfile_generator.generate_android_bp()

    logging.info('Done.')
Exemple #3
0
def main():
    """For local testing purposes.

    Note: VNDK snapshot must be already installed under
      prebuilts/vndk/v{version}.
    """
    ANDROID_BUILD_TOP = utils.get_android_build_top()
    PREBUILTS_VNDK_DIR = utils.join_realpath(ANDROID_BUILD_TOP,
                                             'prebuilts/vndk')

    args = get_args()
    vndk_version = args.vndk_version
    install_dir = os.path.join(PREBUILTS_VNDK_DIR, 'v{}'.format(vndk_version))
    remote = args.remote
    if not os.path.isdir(install_dir):
        raise ValueError(
            'Please provide valid VNDK version. {} does not exist.'
            .format(install_dir))
    utils.set_logging_config(args.verbose)

    temp_artifact_dir = tempfile.mkdtemp()
    os.chdir(temp_artifact_dir)
    manifest_pattern = 'manifest_{}.xml'.format(args.build)
    manifest_dest = os.path.join(temp_artifact_dir, utils.MANIFEST_FILE_NAME)
    logging.info('Fetching {file} from {branch} (bid: {build})'.format(
        file=manifest_pattern, branch=args.branch, build=args.build))
    utils.fetch_artifact(args.branch, args.build, manifest_pattern,
                         manifest_dest)

    license_checker = GPLChecker(install_dir, ANDROID_BUILD_TOP,
                                 temp_artifact_dir, remote)
    try:
        license_checker.check_gpl_projects()
    except ValueError as error:
        logging.error('Error: {}'.format(error))
        raise
    finally:
        logging.info(
            'Deleting temp_artifact_dir: {}'.format(temp_artifact_dir))
        shutil.rmtree(temp_artifact_dir)

    logging.info('Done.')
def main():
    """For local testing purposes.

    Note: VNDK snapshot must be already installed under
      prebuilts/vndk/v{version}.
    """
    ANDROID_BUILD_TOP = utils.get_android_build_top()
    PREBUILTS_VNDK_DIR = utils.join_realpath(ANDROID_BUILD_TOP,
                                             'prebuilts/vndk')

    args = get_args()
    vndk_version = args.vndk_version
    install_dir = os.path.join(PREBUILTS_VNDK_DIR, 'v{}'.format(vndk_version))
    remote = args.remote
    if not os.path.isdir(install_dir):
        raise ValueError(
            'Please provide valid VNDK version. {} does not exist.'
            .format(install_dir))
    utils.set_logging_config(args.verbose)

    temp_artifact_dir = tempfile.mkdtemp()
    os.chdir(temp_artifact_dir)
    manifest_pattern = 'manifest_{}.xml'.format(args.build)
    manifest_dest = os.path.join(temp_artifact_dir, utils.MANIFEST_FILE_NAME)
    logging.info('Fetching {file} from {branch} (bid: {build})'.format(
        file=manifest_pattern, branch=args.branch, build=args.build))
    utils.fetch_artifact(args.branch, args.build, manifest_pattern,
                         manifest_dest)

    license_checker = GPLChecker(install_dir, ANDROID_BUILD_TOP,
                                 temp_artifact_dir, remote)
    try:
        license_checker.check_gpl_projects()
    except ValueError as error:
        logging.error('Error: {}'.format(error))
        raise
    finally:
        logging.info(
            'Deleting temp_artifact_dir: {}'.format(temp_artifact_dir))
        shutil.rmtree(temp_artifact_dir)

    logging.info('Done.')
def main():
    """Program entry point."""
    args = get_args()

    if args.local:
        if args.build or args.branch:
            raise ValueError(
                'When --local option is set, --branch or --build cannot be '
                'specified.')
        elif not os.path.isdir(args.local):
            raise RuntimeError(
                'The specified local directory, {}, does not exist.'.format(
                    args.local))
    else:
        if not (args.build and args.branch):
            raise ValueError(
                'Please provide both --branch and --build or set --local '
                'option.')

    vndk_version = str(args.vndk_version)

    install_dir = os.path.join(PREBUILTS_VNDK_DIR, 'v{}'.format(vndk_version))
    if not os.path.isdir(install_dir):
        raise RuntimeError(
            'The directory for VNDK snapshot version {ver} does not exist.\n'
            'Please request a new git project for prebuilts/vndk/v{ver} '
            'before installing new snapshot.'.format(ver=vndk_version))

    utils.set_logging_config(args.verbose)

    os.chdir(install_dir)

    if not args.use_current_branch:
        start_branch(args.build)

    remove_old_snapshot(install_dir)
    os.makedirs(utils.COMMON_DIR_PATH)

    temp_artifact_dir = None
    if not args.local:
        temp_artifact_dir = tempfile.mkdtemp()

    try:
        install_snapshot(args.branch, args.build, args.local, install_dir,
                         temp_artifact_dir)
        gather_notice_files(install_dir)
        revise_ld_config_txt_if_needed(vndk_version)

        buildfile_generator = GenBuildFile(install_dir, vndk_version)
        update_buildfiles(buildfile_generator)

        if not args.local:
            license_checker = GPLChecker(install_dir, ANDROID_BUILD_TOP,
                                         temp_artifact_dir)
            check_gpl_license(license_checker)
            logger.info(
                'Successfully updated VNDK snapshot v{}'.format(vndk_version))
    except Exception as error:
        logger.error('FAILED TO INSTALL SNAPSHOT: {}'.format(error))
        raise
    finally:
        if temp_artifact_dir:
            logger.info(
                'Deleting temp_artifact_dir: {}'.format(temp_artifact_dir))
            shutil.rmtree(temp_artifact_dir)

    if not args.local:
        commit(args.branch, args.build, vndk_version)
        logger.info('Successfully created commit for VNDK snapshot v{}'.format(
            vndk_version))

    logger.info('Done.')
Exemple #6
0
    commands.py hello  [--name=<TXT>] [-q | -d]
    commands.py -h
    commands.py -v

Options:
    -h, --help       display this message and exit
    -v, --version    display version
    -n, --name=TXT   greeting name [default: You]
    -q, --quiet      set log level to WARNING [default: INFO]
    -d, --debug      set log level to DEBUG [default: INFO]
"""
import logging

from docopt import docopt
from docopt_dispatch import dispatch

from utils import set_logging_config
from settings import VERSION


@dispatch.on('hello')
def do_hello(name=None, **kwargs):
    print("Hello, {}!". format(name))


if __name__ == '__main__':
    kwargs = docopt(__doc__)
    set_logging_config(kwargs)
    logging.debug(kwargs)
    dispatch(__doc__, version=VERSION)
Exemple #7
0
def main():
    parser = argparse.ArgumentParser()

    parser.add_argument('--device',
                        type=str,
                        default='cuda:0',
                        help='gpu device number of using')

    parser.add_argument(
        '--config',
        type=str,
        default=os.path.join('.', 'config',
                             '5way_1shot_resnet12_mini-imagenet.py'),
        help='config file with parameters of the experiment. '
        'It is assumed that the config file is placed under the directory ./config'
    )

    parser.add_argument(
        '--checkpoint_dir',
        type=str,
        default=os.path.join('.', 'checkpoints'),
        help='path that checkpoint will be saved and loaded. '
        'It is assumed that the checkpoint file is placed under the directory ./checkpoints'
    )

    parser.add_argument('--num_gpu', type=int, default=1, help='number of gpu')

    parser.add_argument('--display_step',
                        type=int,
                        default=100,
                        help='display training information in how many step')

    parser.add_argument('--log_step',
                        type=int,
                        default=100,
                        help='log information in how many steps')

    parser.add_argument(
        '--log_dir',
        type=str,
        default=os.path.join('.', 'logs'),
        help='path that log will be saved. '
        'It is assumed that the checkpoint file is placed under the directory ./logs'
    )

    parser.add_argument('--dataset_root',
                        type=str,
                        default='./data',
                        help='root directory of dataset')

    parser.add_argument('--seed', type=int, default=222, help='random seed')

    parser.add_argument('--mode',
                        type=str,
                        default='train',
                        help='train or eval')

    args_opt = parser.parse_args()

    config_file = args_opt.config

    # Set train and test datasets and the corresponding data loaders
    config = imp.load_source("", config_file).config
    train_opt = config['train_config']
    eval_opt = config['eval_config']

    args_opt.exp_name = '{}way_{}shot_{}_{}'.format(train_opt['num_ways'],
                                                    train_opt['num_shots'],
                                                    config['backbone'],
                                                    config['dataset_name'])
    train_opt['num_queries'] = 1
    eval_opt['num_queries'] = 1
    set_logging_config(os.path.join(args_opt.log_dir, args_opt.exp_name))
    logger = logging.getLogger('main')

    # Load the configuration params of the experiment
    logger.info('Launching experiment from: {}'.format(config_file))
    logger.info('Generated logs will be saved to: {}'.format(args_opt.log_dir))
    logger.info('Generated checkpoints will be saved to: {}'.format(
        args_opt.checkpoint_dir))
    print()

    logger.info('-------------command line arguments-------------')
    logger.info(args_opt)
    print()
    logger.info('-------------configs-------------')
    logger.info(config)

    # set random seed
    np.random.seed(args_opt.seed)
    torch.manual_seed(args_opt.seed)
    torch.cuda.manual_seed_all(args_opt.seed)
    random.seed(args_opt.seed)
    torch.backends.cudnn.deterministic = True
    torch.backends.cudnn.benchmark = False

    if config['dataset_name'] == 'mini-imagenet':
        dataset = MiniImagenet
        print('Dataset: MiniImagenet')
    elif config['dataset_name'] == 'tiered-imagenet':
        dataset = TieredImagenet
        print('Dataset: TieredImagenet')
    elif config['dataset_name'] == 'cifar-fs':
        dataset = Cifar
        print('Dataset: Cifar')
    elif config['dataset_name'] == 'cub-200-2011':
        dataset = CUB200
        print('Dataset: CUB200')
    else:
        logger.info(
            'Invalid dataset: {}, please specify a dataset from '
            'mini-imagenet, tiered-imagenet, cifar-fs and cub-200-2011.'.
            format(config['dataset_name']))
        exit()

    cifar_flag = True if args_opt.exp_name.__contains__('cifar') else False
    if config['backbone'] == 'resnet12':
        enc_module = ResNet12(emb_size=config['emb_size'],
                              cifar_flag=cifar_flag)
        print('Backbone: ResNet12')
    elif config['backbone'] == 'resnet18':
        enc_module = ResNet18(emb_size=config['emb_size'])
        print('Backbone: ResNet18')
    elif config['backbone'] == 'wrn':
        enc_module = WRN(emb_size=config['emb_size'])
        print('Backbone: WRN')
    elif config['backbone'] == 'convnet':
        enc_module = ConvNet(emb_size=config['emb_size'],
                             cifar_flag=cifar_flag)
        print('Backbone: ConvNet')
    else:
        logger.info(
            'Invalid backbone: {}, please specify a backbone model from '
            'convnet, resnet12, resnet18 and wrn.'.format(config['backbone']))
        exit()

    gnn_module = DPGN(
        config['num_generation'], train_opt['dropout'],
        train_opt['num_ways'] * train_opt['num_shots'],
        train_opt['num_ways'] * train_opt['num_shots'] +
        train_opt['num_ways'] * train_opt['num_queries'],
        train_opt['loss_indicator'], config['point_distance_metric'],
        config['distribution_distance_metric'])

    # multi-gpu configuration
    [
        print('GPU: {}  Spec: {}'.format(i, torch.cuda.get_device_name(i)))
        for i in range(args_opt.num_gpu)
    ]

    if args_opt.num_gpu > 1:
        print('Construct multi-gpu model ...')
        enc_module = nn.DataParallel(enc_module,
                                     device_ids=range(args_opt.num_gpu),
                                     dim=0)
        gnn_module = nn.DataParallel(gnn_module,
                                     device_ids=range(args_opt.num_gpu),
                                     dim=0)
        print('done!\n')

    if not os.path.exists(
            os.path.join(args_opt.checkpoint_dir, args_opt.exp_name)):
        os.makedirs(os.path.join(args_opt.checkpoint_dir, args_opt.exp_name))
        logger.info('no checkpoint for model: {}, make a new one at {}'.format(
            args_opt.exp_name,
            os.path.join(args_opt.checkpoint_dir, args_opt.exp_name)))
        best_step = 0
    else:
        if not os.path.exists(
                os.path.join(args_opt.checkpoint_dir, args_opt.exp_name,
                             'model_best.pth.tar')):
            best_step = 0
        else:
            logger.info('find a checkpoint, loading checkpoint from {}'.format(
                os.path.join(args_opt.checkpoint_dir, args_opt.exp_name)))
            best_checkpoint = torch.load(
                os.path.join(args_opt.checkpoint_dir, args_opt.exp_name,
                             'model_best.pth.tar'))

            logger.info('best model pack loaded')
            best_step = best_checkpoint['iteration']
            enc_module.load_state_dict(
                best_checkpoint['enc_module_state_dict'])
            gnn_module.load_state_dict(
                best_checkpoint['gnn_module_state_dict'])
            logger.info(
                'current best test accuracy is: {}, at step: {}'.format(
                    best_checkpoint['test_acc'], best_step))

    dataset_train = dataset(root=args_opt.dataset_root, partition='train')
    dataset_valid = dataset(root=args_opt.dataset_root, partition='val')
    dataset_test = dataset(root=args_opt.dataset_root, partition='test')

    train_loader = DataLoader(dataset_train,
                              num_tasks=train_opt['batch_size'],
                              num_ways=train_opt['num_ways'],
                              num_shots=train_opt['num_shots'],
                              num_queries=train_opt['num_queries'],
                              epoch_size=train_opt['iteration'])
    valid_loader = DataLoader(dataset_valid,
                              num_tasks=eval_opt['batch_size'],
                              num_ways=eval_opt['num_ways'],
                              num_shots=eval_opt['num_shots'],
                              num_queries=eval_opt['num_queries'],
                              epoch_size=eval_opt['iteration'])
    test_loader = DataLoader(dataset_test,
                             num_tasks=eval_opt['batch_size'],
                             num_ways=eval_opt['num_ways'],
                             num_shots=eval_opt['num_shots'],
                             num_queries=eval_opt['num_queries'],
                             epoch_size=eval_opt['iteration'])

    data_loader = {
        'train': train_loader,
        'val': valid_loader,
        'test': test_loader
    }

    # create trainer
    trainer = DPGNTrainer(enc_module=enc_module,
                          gnn_module=gnn_module,
                          data_loader=data_loader,
                          log=logger,
                          arg=args_opt,
                          config=config,
                          best_step=best_step)

    if args_opt.mode == 'train':
        trainer.train()
    elif args_opt.mode == 'eval':
        trainer.eval()
    else:
        print('select a mode')
        exit()
Exemple #8
0
        help=""" NSML mode setting """,
    )
    parser.add_argument(
        "--iteration",
        type=int,
        default=0,
        help=""" NSML default setting """,
    )
    parser.add_argument(
        "--pause",
        type=int,
        default=0,
        help=""" NSML default setting """,
    )
    args = parser.parse_args()

    with open(args.base_config, "r") as f:
        defined_config = json.load(f)
    config = NestedNamespace()
    config.load_from_json(defined_config)
    config.nsml = args

    set_logging_config()

    if args.mode == "train_and_evaluate":
        re_train_and_evaluate(config)
    elif args.mode == "test" or args.mode == "infer":
        test(config)
    else:
        raise ValueError(f"Unrecognized mode. {config.mode}")
def main():
    """Program entry point."""
    args = get_args()

    local = None
    if args.local:
        local = os.path.expanduser(args.local)

    if local:
        if args.build or args.branch:
            raise ValueError(
                'When --local option is set, --branch or --build cannot be '
                'specified.')
        elif not os.path.isdir(local):
            raise RuntimeError(
                'The specified local directory, {}, does not exist.'.format(
                    local))
    else:
        if not (args.build and args.branch):
            raise ValueError(
                'Please provide both --branch and --build or set --local '
                'option.')

    vndk_version = args.vndk_version

    install_dir = os.path.join(PREBUILTS_VNDK_DIR, 'v{}'.format(vndk_version))
    if not os.path.isdir(install_dir):
        raise RuntimeError(
            'The directory for VNDK snapshot version {ver} does not exist.\n'
            'Please request a new git project for prebuilts/vndk/v{ver} '
            'before installing new snapshot.'.format(ver=vndk_version))

    utils.set_logging_config(args.verbose)

    os.chdir(install_dir)

    if not args.use_current_branch:
        start_branch(args.build)

    remove_old_snapshot(install_dir)
    os.makedirs(utils.COMMON_DIR_PATH)

    temp_artifact_dir = None
    if not local:
        temp_artifact_dir = tempfile.mkdtemp()

    try:
        install_snapshot(args.branch, args.build, local, install_dir,
                         temp_artifact_dir)
        gather_notice_files(install_dir)
        post_processe_files_if_needed(vndk_version)

        buildfile_generator = GenBuildFile(install_dir, vndk_version)
        update_buildfiles(buildfile_generator)

        if not local:
            license_checker = GPLChecker(install_dir, ANDROID_BUILD_TOP,
                                         temp_artifact_dir, args.remote)
            check_gpl_license(license_checker)
            logging.info(
                'Successfully updated VNDK snapshot v{}'.format(vndk_version))
    except Exception as error:
        logging.error('FAILED TO INSTALL SNAPSHOT: {}'.format(error))
        raise
    finally:
        if temp_artifact_dir:
            logging.info(
                'Deleting temp_artifact_dir: {}'.format(temp_artifact_dir))
            shutil.rmtree(temp_artifact_dir)

    if not local:
        commit(args.branch, args.build, vndk_version)
        logging.info(
            'Successfully created commit for VNDK snapshot v{}'.format(
                vndk_version))

    logging.info('Done.')
Exemple #10
0
from commands import AddFixturesCommand, MakeMigrationsCommand, MigrateCommand, RunCommand
from settings import SERVICE_NAME, current_settings
from utils import parse_args, set_logging_config

COMMANDS = {
    "makemigrations": MakeMigrationsCommand,
    "migrate": MigrateCommand,
    "addfixtures": AddFixturesCommand,
    "run": RunCommand,
}

if __name__ == "__main__":
    args = parse_args(message=SERVICE_NAME, commands=COMMANDS.keys())

    if args.config is None:
        current_settings.from_env(service=SERVICE_NAME)
    else:
        current_settings.from_ini(service=SERVICE_NAME, filename=args.config)
    if all((current_settings.logging.filetype,
            current_settings.logging.filename)):
        set_logging_config(
            level=current_settings.logging.level,
            filetype=current_settings.logging.filetype,
            filename=current_settings.logging.filename,
        )

    command = COMMANDS[args.command](settings=current_settings)
    command.run()