Ejemplo n.º 1
0
def main(_):
    # Please make sure that a root dir is specified before running this script!
    root_dir = None

    model_config = mc.ModelConfig(model_type='nin', dataset='cifar10')
    load_and_run(model_config, root_dir)
    print('Loaded a NIN_CIFAR10 model.')

    # example for resnet cifar100
    model_config = mc.ModelConfig(model_type='resnet', dataset='cifar100')
    load_and_run(model_config, root_dir)
    print('Loaded a RESNET_CIFAR100 model.')
Ejemplo n.º 2
0
def main(_):
  # Please make sure that a root dir is specified before running this script!
  root_dir = None

  model_config = mc.ModelConfig(
      model_type='nin', dataset='cifar10', root_dir=root_dir)
  load_and_run(model_config, root_dir)
  print('Loaded a NIN_CIFAR10 model.')
  print('Evaluating the NIN_CIFAR10 model.')
  eval_result = evaluate_model(model_config, root_dir)
  print('Test Accuracy: {}'.format(eval_result))
  print('Stored Test Accuracy: {}'.format(model_config.test_stats()))
  print('Stored Train Accuracy: {}'.format(model_config.training_stats()))
  print('==========================================')
  # example for resnet cifar100
  model_config = mc.ModelConfig(model_type='resnet', dataset='cifar100')
  load_and_run(model_config, root_dir)
  print('Loaded a RESNET_CIFAR100 model.')
Ejemplo n.º 3
0
    count = 0

    for wide_multiplier in d[key]['wide_multiplier']:
        for batchnorm in d[key]['batchnorm']:
            for dropout_prob in d[key]['dropout_prob']:
                for augmentation in d[key]['augmentation']:
                    for decay_fac in d[key]['decay_fac']:
                        for copy in d[key]['copy']:
                            for normalization in d[key]['normalization']:
                                for learning_rate in d[key]['learning_rate']:
                                    model_config = mc.ModelConfig(
                                        model_type=model_type,
                                        dataset=dataset,
                                        wide_multiplier=wide_multiplier,
                                        batchnorm=batchnorm,
                                        dropout_prob=dropout_prob,
                                        data_augmentation=augmentation,
                                        l2_decay_factor=decay_fac,
                                        normalization=normalization,
                                        learning_rate=learning_rate,
                                        copy=copy,
                                        root_dir=root_dir)
                                    count += 1
                                    train_loss = np.NaN
                                    train_cross_entropy = np.NaN
                                    train_global_step = np.NaN
                                    train_accuracy = np.NaN
                                    eval_loss = np.NaN
                                    eval_cross_entropy = np.NaN
                                    eval_global_step = np.NaN
                                    eval_accuracy = np.NaN
                                    try: