return new_cmap


if __name__ == '__main__':
    parse_args_and_merge_const()

    random_state = np.random.RandomState(const.RANDOM_SEED)

    if os.path.exists('models') is False:
        os.makedirs('models')

    df = pd.read_csv(const.base_path + const.USE_CSV)
    inf_df = df

    inf_dataset = DeepFashionCAPDataset(inf_df,
                                        random_state=random_state,
                                        mode=const.DATASET_PROC_METHOD_INF,
                                        base_path=const.base_path)
    inf_dataloader = torch.utils.data.DataLoader(
        inf_dataset,
        batch_size=1,  #const.INF_BATCH_SIZE,
        shuffle=False,
        num_workers=6)
    inf_step = len(inf_dataloader)

    net = const.USE_NET(const.USE_IORN)
    net = net.to(const.device)
    net.load_state_dict(torch.load(const.INIT_MODEL), strict=False)

    writer = SummaryWriter(const.INF_DIR)

    inf_step = len(inf_dataloader)
        random_state = np.random.RandomState(const.RANDOM_SEED)


#    const.DATASET_PROC_METHOD_TRAIN = 'ELASTIC_ROTATION_BBOXRESIZE'
#    const.DATASET_PROC_METHOD_TRAIN = 'ROTATION_BBOXRESIZE'
    const.DATASET_PROC_METHOD_TRAIN = 'BBOXRESIZE'
    const.USE_CSV = 'info.csv'
#    const.USE_CSV = 'debug_info.csv'
    if os.path.exists('models') is False:
        os.makedirs('models')

    print(const.base_path)
    df = pd.read_csv(const.base_path + const.USE_CSV)
    train_df = df[df['evaluation_status'] == 'val']
    train_dataset = DeepFashionCAPDataset(train_df,
                                          base_path=const.base_path,
                                          random_state=random_state,
                                          mode=const.DATASET_PROC_METHOD_TRAIN)
    train_dataloader = torch.utils.data.DataLoader(train_dataset,
                                                   batch_size=const.BATCH_SIZE,
                                                   shuffle=True,
                                                   num_workers=4)
#    val_df = df[df['evaluation_status'] == 'val']
#    val_dataset = DeepFashionCAPDataset(val_df, mode=const.DATASET_PROC_METHOD_VAL)
#    val_dataloader = torch.utils.data.DataLoader(val_dataset, batch_size=const.VAL_BATCH_SIZE, shuffle=False, num_workers=1)

#    for i in range(100):
#        train_dataset.plot_sample(i)
#
    step = 0
    for i, sample in enumerate(train_dataloader):
        step += 1
    # Initialize random generators with given seed
    if const.RANDOM_SEED != None:
        torch.manual_seed(const.RANDOM_SEED)
        np.random.seed(const.RANDOM_SEED)
        random.seed(const.RANDOM_SEED)
        random_state = np.random.RandomState(const.RANDOM_SEED)

    if os.path.exists('models') is False:
        os.makedirs('models')

    df = pd.read_csv(const.base_path + const.USE_CSV)
    train_df = df[df['evaluation_status'] == 'train']
    train_dataset = DeepFashionCAPDataset(train_df,
                                          random_state=random_state,
                                          mode=const.DATASET_PROC_METHOD_TRAIN,
                                          base_path=const.base_path,
                                          el_alpha=const.EL_ALPHA,
                                          el_sigma=const.EL_SIGMA)
    train_dataloader = torch.utils.data.DataLoader(train_dataset,
                                                   batch_size=const.BATCH_SIZE,
                                                   shuffle=True,
                                                   num_workers=8)
    val_df = df[df['evaluation_status'] == 'val']
    val_dataset = DeepFashionCAPDataset(val_df,
                                        random_state=random_state,
                                        mode=const.DATASET_PROC_METHOD_VAL,
                                        base_path=const.base_path)
    val_dataloader = torch.utils.data.DataLoader(val_dataset,
                                                 batch_size=const.VAL_BATCH_SIZE,
                                                 shuffle=False,
                                                 num_workers=8)
import pandas as pd
import torch
import torch.utils.data
from src import const
from src.utils import parse_args_and_merge_const
from tensorboardX import SummaryWriter
import os

if __name__ == '__main__':
    parse_args_and_merge_const()
    if os.path.exists('models') is False:
        os.makedirs('models')

    df = pd.read_csv(base_path + const.USE_CSV)
    train_df = df[df['evaluation_status'] == 'train']
    train_dataset = DeepFashionCAPDataset(train_df,
                                          mode=const.DATASET_PROC_METHOD_TRAIN)
    train_dataloader = torch.utils.data.DataLoader(train_dataset,
                                                   batch_size=const.BATCH_SIZE,
                                                   shuffle=True,
                                                   num_workers=4)
    val_df = df[df['evaluation_status'] == 'test']
    val_dataset = DeepFashionCAPDataset(val_df,
                                        mode=const.DATASET_PROC_METHOD_VAL)
    val_dataloader = torch.utils.data.DataLoader(
        val_dataset,
        batch_size=const.VAL_BATCH_SIZE,
        shuffle=False,
        num_workers=4)
    val_step = len(val_dataloader)

    net = const.USE_NET()
Exemple #5
0
            "{0}, (label: {1})".format(
                classes[preds[idx].max(dim=0)[1].item()],
                classes[int(labels[idx].item())]),
            color=("green" if preds[idx].max(
                dim=0)[1].item() == int(labels[idx].item()) else "red"))
    return fig


if __name__ == '__main__':
    parse_args_and_merge_const()
    if os.path.exists('models') is False:
        os.makedirs('models')

    df = pd.read_csv(base_path + const.USE_CSV)
    test_df = df[df['evaluation_status'] == 'test']
    test_dataset = DeepFashionCAPDataset(test_df,
                                         mode=const.DATASET_PROC_METHOD_TRAIN)
    test_dataloader = torch.utils.data.DataLoader(test_dataset,
                                                  batch_size=const.BATCH_SIZE,
                                                  shuffle=True,
                                                  num_workers=4)
    test_step = len(test_dataloader)

    net = const.USE_NET()
    net.load_state_dict(torch.load(const.save_model_path))
    net = net.to(const.device)

    writer = SummaryWriter(const.VAL_DIR)

    step = 0
    print("Start Evaluate")
    net.eval()