Пример #1
0
def infer(valid_queue, model, criterion):
    objs = utils.AvgrageMeter()
    top1 = utils.AvgrageMeter()
    top5 = utils.AvgrageMeter()
    model.eval()
    args = get_darts_args()

    with torch.no_grad():
        for step, (input, target) in enumerate(valid_queue):
            input = input.cuda()
            target = target.cuda(non_blocking=True)
            logits = model(input)
            loss = criterion(logits, target)

            prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5))
            n = input.size(0)
            objs.update(loss.data.item(), n)
            top1.update(prec1.data.item(), n)
            top5.update(prec5.data.item(), n)

            if step % args['report_freq'] == 0:
                logging.info('valid %03d %e %f %f', step, objs.avg, top1.avg,
                             top5.avg)

    return top1.avg, objs.avg
Пример #2
0
def train(train_queue, valid_queue, model, architect, criterion, optimizer, lr,
          epoch):
    objs = utils.AvgrageMeter()
    top1 = utils.AvgrageMeter()
    top5 = utils.AvgrageMeter()

    args = get_darts_args()

    for step, (input, target) in enumerate(train_queue):
        model.train()
        n = input.size(0)
        input = input.cuda()
        target = target.cuda(non_blocking=True)

        # get a random minibatch from the search queue with replacement
        #input_search, target_search = next(iter(valid_queue))
        try:
            input_search, target_search = next(valid_queue_iter)
        except:
            valid_queue_iter = iter(valid_queue)
            input_search, target_search = next(valid_queue_iter)
        input_search = input_search.cuda()
        target_search = target_search.cuda(non_blocking=True)

        if epoch >= 15:
            architect.step(input,
                           target,
                           input_search,
                           target_search,
                           lr,
                           optimizer,
                           unrolled=args['unrolled'])

        optimizer.zero_grad()
        logits = model(input)
        loss = criterion(logits, target)

        loss.backward()
        nn.utils.clip_grad_norm(model.parameters(), args['grad_clip'])
        optimizer.step()

        prec1, prec5 = utils.accuracy(logits, target, topk=(1, 5))
        objs.update(loss.data.item(), n)
        top1.update(prec1.data.item(), n)
        top5.update(prec5.data.item(), n)

        if step % args['report_freq'] == 0:
            logging.info('train %03d %e %f %f', step, objs.avg, top1.avg,
                         top5.avg)

    return top1.avg, objs.avg
Пример #3
0
                                default=0,
                                help='Experiment number',
                                type=str)
    cmdline_parser.add_argument('-v', '--verbose',
                                default='INFO',
                                choices=['INFO', 'DEBUG'],
                                help='verbosity')
    args, unknowns = cmdline_parser.parse_known_args()
    log_lvl = logging.INFO if args.verbose == 'INFO' else logging.DEBUG
    logging.basicConfig(level=log_lvl)

    if unknowns:
        logging.warning('Found unknown arguments!')
        logging.warning(str(unknowns))
        logging.warning('These will be ignored')
    darts_args = get_darts_args()
    darts_args['seed'] = args.seed
    exp_name = f'EXP{str(args.exp_no)}'
    exp_dir = f"./{exp_name}"
    if not os.path.exists(exp_dir):
        os.mkdir(exp_dir)
    darts(exp_name, darts_args)
    # _ = main(
    #         bohb_config,
    #         data_dir=settings.data_dir,
    #         num_epochs=int(20),
    #         batch_size=bohb_config['batch_size'],
    #         learning_rate=bohb_config['learning_rate'],
    #         train_criterion=settings.loss_dict[bohb_config['training_loss']],
    #         model_optimizer=settings.opti_dict[bohb_config['optimizer']],
    #         data_augmentations=None,