model.seen += imgs.size(0)

        if (epoch + 1) % opt.evaluation_interval == 0 and epoch >= 19:
            print("\n---- Evaluating Model ----")
            print(opt.weights_folder)
            # Evaluate the model on the validation set
            if loss.item() < 4:
                conf_thres = 0.3
            else:
                conf_thres = 0.1
            loss, precision, recall, AP, f1, ap_class = evaluate(
                model,
                path=valid_path,
                iou_thres=0.5,
                conf_thres=conf_thres,  # !!! origianl 0.5 current 0.3
                nms_thres=0.05,  #!!! original 0.5
                img_size=640,
                batch_size=opt.batch_size,
            )
            evaluation_metrics = [
                ("val_precision", precision.mean()),
                ("val_recall", recall.mean()),
                ("val_mAP", AP.mean()),
                ("val_f1", f1.mean()),
            ]
            logger.list_of_scalars_summary(evaluation_metrics, epoch)

            # Print class APs and mAP
            ap_table = [["Index", "Class name", "AP"]]
            for i, c in enumerate(ap_class):
Ejemplo n.º 2
0
            _, output = classifier(features)
            loss = criterion(output, clss)

            print('Back propagation')
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()

            writer.add_scalar('loss', loss.data.cpu().numpy(), iters)
            train_info += ' loss: {:.4f}'.format(loss.data.cpu().numpy())

            print(train_info)
        if epoch % args.val_epoch == 0:
            ''' evaluate the model '''
            acc = evaluate(feature_stractor, classifier, val_loader)
            writer.add_scalar('val_acc', acc, iters)
            print('Epoch: [{}] ACC:{}'.format(epoch, acc))
            ''' save best model '''
            if acc > best_acc:
                save_model(
                    feature_stractor,
                    os.path.join(feaStr_path, 'model_best_feaStr.pth.tar'))
                save_model(
                    classifier,
                    os.path.join(class_path, 'model_best_class.pth.tar'))
                best_acc = acc
        ''' save model '''
        save_model(
            feature_stractor,
            os.path.join(feaStr_path, 'model_{}_feaStr.pth.tar'.format(epoch)))
Ejemplo n.º 3
0
            _, output = classifier(features)
            loss = criterion(output, clss)

            optimizer.zero_grad()
            loss.backward()
            optimizer.step()

            writer.add_scalar('loss', loss.data.cpu().numpy(), iters)
            losses.append(loss)
            train_info += ' loss: {:.4f}'.format(loss.data.cpu().numpy())

            #print(train_info)
        if epoch % args.val_epoch == 0:
            ''' evaluate the model '''
            acc = evaluate(classifier, val_loader)
            writer.add_scalar('val_acc', acc, iters)
            print('Epoch: [{}] ACC:{}'.format(epoch, acc))
            ''' save best model '''
            if acc > best_acc:
                save_model(
                    classifier,
                    os.path.join(args.save_dir, 'model_best_class.pth.tar'))
                best_acc = acc
        ''' save model '''
        save_model(
            classifier,
            os.path.join(args.save_dir,
                         'model_{}_class.pth.tar'.format(epoch)))

        plt.plot(range(len(losses)), losses)
Ejemplo n.º 4
0
    model.load_state_dict(torch.load(opt.weights_path))

    # Get dataloader
    print("\n---- Evaluating Model ----")

    opt.iou_thres = 0.5
    ap = 0
    APs = []
    #    for opt.iou_thres in np.arange(0.5,0.96,0.05):
    for opt.iou_thres in np.arange(0.5, 0.54, 0.05):

        loss, precision, recall, AP, f1, ap_class = evaluate(
            model,
            path=opt.valid_path,
            iou_thres=opt.iou_thres,  # !!! origianl 0.50.5,
            conf_thres=opt.conf_thres,  # !!! origianl 0.5 current 0.3
            nms_thres=opt.nms_thres,  #!!! original 0.5 0.001
            img_size=opt.img_size,
            batch_size=opt.batch_size,
        )
        #print(AP.mean())
        ap += AP.mean()
        APs.append([opt.iou_thres, AP.mean()])

    evaluation_metrics = [
        ("val_precision", round(precision.mean(), 4)),
        ("val_recall", round(recall.mean(), 4)),
        ("val_mAP", round(AP.mean(), 4)),
        ("val_f1", round(f1.mean(), 4)),
    ]
Ejemplo n.º 5
0
            #cls = torch.from_numpy(np.expand_dims(np.asarray(cls), axis=0))
            #cls = cls.cuda()

            loss = criterion(pred, batch_cls)

            #print('Back propagation')
            optimizer.zero_grad()
            loss.backward()
            optimizer.step()

            train_info += ' loss: {:.4f}'.format(loss.data.cpu().numpy())

            print(train_info)  #, end = '\r')
        if epoch % args.val_epoch == 0:
            ''' evaluate the model '''
            acc = evaluate(feature_stractor, rnn, val_loader, args.train_batch)
            writer.add_scalar('val_acc', acc, iters)
            print('Epoch: [{}] ACC:{}'.format(epoch, acc))
            ''' save best model '''
            if acc > best_acc:
                print('saved model', epoch)
                save_model(
                    rnn, os.path.join(args.save_dir, 'model_best_rnn.pth.tar'))
                save_model(
                    feature_stractor,
                    os.path.join(args.save_dir, 'model_best_fea.pth.tar'))
                best_acc = acc
        ''' save model '''
        save_model(
            rnn,
            os.path.join(args.save_dir, 'model_{}_rnn.pth.tar'.format(epoch)))