Beispiel #1
0
def main(argv=None):    # pylint: disable=unused-argument
    logging.basicConfig(datefmt="%d/%Y %I:%M:%S", level=logging.INFO,
                        format='%(asctime)s [%(levelname)s] (%(filename)s:%(lineno)s) %(message)s'
                        )

    if not gfile.Exists(FLAGS.parameter_dir):
        print ("parameter_dir {} not exists.".format(FLAGS.parameter_dir))
        sys.exit(-1)

    if gfile.Exists(FLAGS.eval_log_dir):
        gfile.DeleteRecursively(FLAGS.eval_log_dir)

    gfile.MakeDirs(FLAGS.eval_log_dir)

    print(config.get_config_str())
    
    evaluate()
Beispiel #2
0
def main(argv=None):
    logging.basicConfig(
        datefmt="%d/%Y %I:%M:%S",
        level=logging.INFO,
        format=
        '%(asctime)s [%(levelname)s] (%(filename)s:%(lineno)s) %(message)s')

    if gfile.Exists(FLAGS.parameter_dir):
        gfile.DeleteRecursively(FLAGS.parameter_dir)
    if gfile.Exists(FLAGS.train_log_dir):
        gfile.DeleteRecursively(FLAGS.train_log_dir)

    gfile.MakeDirs(FLAGS.parameter_dir)
    gfile.MakeDirs(FLAGS.train_log_dir)

    print(config.get_config_str())

    start_train()
Beispiel #3
0
                    action='store_true',
                    help='resume from checkpoint')
parser.add_argument('--seed',
                    type=int,
                    default=1,
                    metavar='S',
                    help='random seed (default: 1)')

args = parser.parse_args()

torch.manual_seed(args.seed)

config_file = 'config.yaml'
model_type = 'UNET'
config = config.Configuration(model_type, config_file)
print(config.get_config_str())
config = config.config_dict

device = 'cuda' if torch.cuda.is_available() else 'cpu'
best_acc = 0  # best test accuracy
start_epoch = 0  # start from epoch 0 or last checkpoint epoch

# Data
print('==> Preparing data..')
transform_train = transforms.Compose([
    transforms.RandomCrop(32, padding=4),
    transforms.RandomHorizontalFlip(),
    transforms.ToTensor(),
    transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])