Ejemplo n.º 1
0
def exp_train():
    exp_name = 'EXP_NAME'
    out_base_dir = os.path.join(os.getcwd(), 'symlinks/exp/EXP_GROUP')
    exp_const = ExpConstants(exp_name, out_base_dir)
    exp_const.model_dir = os.path.join(exp_const.exp_dir, 'models')
    exp_const.log_dir = os.path.join(exp_const.exp_dir, 'log')
    exp_const.vis_dir = os.path.join(exp_const.exp_dir, 'vis')
    exp_const.log_step = 10
    exp_const.model_save_step = 1000
    exp_const.val_step = 1000
    exp_const.num_val_samples = 1000
    exp_const.batch_size = 32
    exp_const.num_epochs = 1000
    exp_const.lr = 0.01
    exp_const.momentum = 0.9
    exp_const.num_workers = 5
    exp_const.optimizer = 'SGD'
    exp_const.subset = {'training': 'train', 'validation': 'val'}

    data_const = DATASET_CONSTANTS()

    model_const = Constants()
    model_const.model_num = None
    model_const.net = NET_CONSTANTS()
    model_const.net_path = os.path.join(exp_const.model_dir,
                                        f'net_{model_const.model_num}')

    train.main(exp_const, data_const, model_const)
Ejemplo n.º 2
0
def exp_extract_embeddings():
    args = parser.parse_args()
    not_specified_args = manage_required_args(
        args,
        parser,
        required_args=[
            'embed_dim',
            'xform',
            'model_num',
            'syn'])

    exp_name = f'{args.xform}_{args.embed_dim}'
    out_base_dir = os.path.join(
        os.getcwd(),
        'symlinks/exp/multi_sense_cooccur')
    exp_const = ExpConstants(exp_name,out_base_dir)
    exp_const.model_dir = os.path.join(exp_const.exp_dir,'models')
    exp_const.cooccur_types = [
        'syn',
        'attr_attr',
        'obj_attr',
        'obj_hyp',
        'context'
    ]
    if args.syn==False:
        exp_const.cooccur_types = exp_const.cooccur_types[1:]

    data_const = MultiSenseCooccurDatasetConstants()
    data_const.cooccur_csv = os.path.join(
        os.getcwd(),
        'symlinks/exp/multi_sense_cooccur/cooccurrences/merged_cooccur.csv')

    model_const = Constants()
    model_const.model_num = args.model_num
    model_const.net = LogBilinearConstants()
    model_const.net.num_words = 93553
    model_const.net.embed_dims = args.embed_dim
    model_const.net.two_embedding_layers = False
    model_const.net.xform_type = args.xform
    model_const.net.xform_num_layers = None
    model_const.net.use_bias = True
    model_const.net.use_fx = False
    model_const.net.cooccur_types = copy.deepcopy(exp_const.cooccur_types)
    model_const.net_path = os.path.join(
        exp_const.model_dir,
        f'net_{model_const.model_num}')

    extract_embeddings.main(exp_const,data_const,model_const)
    extract_embeddings_xformed.main(exp_const,data_const,model_const)
Ejemplo n.º 3
0
def exp_eval():
    exp_name = 'EXP_NAME'
    out_base_dir = os.path.join(os.getcwd(), 'symlinks/exp/EXP_GROUP')
    exp_const = ExpConstants(exp_name, out_base_dir)
    exp_const.model_dir = os.path.join(exp_const.exp_dir, 'models')
    exp_const.log_dir = os.path.join(exp_const.exp_dir, 'log')
    exp_const.vis_dir = os.path.join(exp_const.exp_dir, 'vis')
    exp_const.batch_size = 32
    exp_const.num_workers = 5

    data_const = DATASET_CONSTANTS()
    data_const.subset = 'eval'

    model_const = Constants()
    model_const.model_num = None
    model_const.net = NET_CONSTANTS()
    model_const.net_path = os.path.join(exp_const.model_dir,
                                        f'net_{model_const.model_num}')

    evaluation.main(exp_const, data_const, model_const)
Ejemplo n.º 4
0
def main(**kwargs):
    exp_const = ExpConstants(kwargs['exp_name'], kwargs['exp_base_dir'])
    exp_const.log_dir = os.path.join(exp_const.exp_dir, 'logs')
    exp_const.model_dir = os.path.join(exp_const.exp_dir, 'models')
    exp_const.vis_dir = os.path.join(exp_const.exp_dir, 'vis')
    exp_const.optimizer = 'Adam'
    exp_const.lr = 1e-3
    exp_const.momentum = None
    exp_const.num_epochs = 100
    exp_const.log_step = 100
    exp_const.model_save_step = 1000
    exp_const.val_step = 1000
    exp_const.num_val_samples = None

    data_const = {'train': Constants(), 'val': Constants()}

    model_const = Constants()
    model_const.model_num = kwargs['model_num']
    model_const.net = Constants()
    model_const.net_path = os.path.join(exp_const.model_dir,
                                        f'net_{model_const.model_num}')
Ejemplo n.º 5
0
def exp_train():
    args = parser.parse_args()

    # create experiments directory and required folders
    out_base_dir = os.path.join(os.getcwd(), f'exp/{args.dataset_type}')
    exp_const = ExpConstants(args.run_name, out_base_dir)
    exp_const.model_dir = os.path.join(exp_const.exp_dir, 'models')
    exp_const.log_dir = os.path.join(exp_const.exp_dir, 'log')
    exp_const.vis_dir = os.path.join(exp_const.exp_dir, 'vis')

    use_cuda = torch.cuda.is_available()
    exp_const.device = "cuda:0" if use_cuda else "cpu"

    # tranining params
    exp_const.optimizer = args.optimizer
    exp_const.num_epochs = args.num_epochs
    exp_const.batch_size = args.batch_size
    exp_const.lr = args.lr
    exp_const.momentum = args.momentum
    exp_const.num_workers = args.num_workers

    # logging, saving
    exp_const.log_step = args.log_step
    exp_const.model_save_epoch = args.model_save_epoch
    exp_const.val_epoch = args.val_epoch
    exp_const.subset = {'training': 'train', 'test': 'test'}

    # dataset
    data_const = DatasetConstants(root=args.dataroot,
                                  download=args.download_dataset,
                                  train=True)
    data_const.dataset_type = args.dataset_type

    # model (resnet and attribute embeddings)
    model_const = Constants()
    model_const.model_num = None
    model_const.sim_loss = args.sim_loss
    model_const.ce_loss_warmup = args.ce_loss_warmup

    model_const.net = ResnetConstants()
    if args.dataset_type == 'Cifar100':
        model_const.net.num_layers = "cifar100"  # a custom resnet for cifar100, to adjust the dimensions of the feature maps
        model_const.net.num_classes = 100
    else:
        model_const.net.num_layers = args.num_layers
        if args.dataset_type == "Imagenet":
            model_const.net.num_classes = 1000
        elif args.dataset_type == "VOC":
            model_const.net.num_classes = 20
        elif args.dataset_type == "STL10":
            model_const.net.num_layers = 'cifar100'  # TODO: deeper resnets does not work on STL10.
            model_const.net.num_classes = 10

    model_const.net.pretrained = False
    model_const.net_path = os.path.join(exp_const.model_dir,
                                        f'net_{model_const.model_num}')

    model_const.attr_embed = AttributeEmbeddingsConstants()
    model_const.attr_embed_path = os.path.join(
        exp_const.model_dir, f'attr_embed_{model_const.model_num}')
    model_const.attr_embed.glove_dim = 300
    model_const.attr_embed.num_classes = model_const.net.num_classes

    # attribute embedding dimensions
    if args.embed_type == 'vico_linear':
        model_const.attr_embed.no_glove = True  # Zero out the glove component
        model_const.attr_embed.embed_dims = 300 + args.vico_dim
        embed_dir = os.path.join(
            os.getcwd(),
            'data/pretrained-embeddings/' + \
            f'glove_300_vico_linear_100/')
        model_const.attr_embed.embed_h5py = os.path.join(
            embed_dir, 'visual_word_vecs.h5py')
        model_const.attr_embed.embed_word_to_idx_json = os.path.join(
            embed_dir, 'visual_word_vecs_idx.json')
    elif args.embed_type == 'vico_select':
        model_const.attr_embed.no_glove = True  # Zero out the glove component
        model_const.attr_embed.hypernym = args.hypernym
        model_const.attr_embed.embed_dims = 300 + args.vico_dim

        embed_dir = os.path.join(
            os.getcwd(),
            'data/pretrained-embeddings/' + \
            f'glove_300_vico_select_200/')
        model_const.attr_embed.embed_h5py = os.path.join(
            embed_dir, 'visual_word_vecs.h5py')
        model_const.attr_embed.embed_word_to_idx_json = os.path.join(
            embed_dir, 'visual_word_vecs_idx.json')
    else:
        err_str = f'{args.embed_type} is currently not implemented in the runner'
        assert (False), err_str

    # pass all constants to training method
    train.main(exp_const, data_const, model_const)
Ejemplo n.º 6
0
def exp_train():
    args = parser.parse_args()
    not_specified_args = manage_required_args(args,
                                              parser,
                                              required_args=[
                                                  'held_classes', 'embed_type',
                                                  'glove_dim', 'vico_dim',
                                                  'run'
                                              ],
                                              optional_args=[])
    exp_name = \
        args.embed_type + '_' + \
        str(args.glove_dim) + '_' + \
        str(args.vico_dim) + '_' + \
        'held_classes_' + str(args.held_classes)
    out_base_dir = os.path.join(os.getcwd(),
                                f'symlinks/exp/cifar100/zero_shot_{args.run}')
    exp_const = ExpConstants(exp_name, out_base_dir)
    exp_const.model_dir = os.path.join(exp_const.exp_dir, 'models')
    exp_const.log_dir = os.path.join(exp_const.exp_dir, 'log')
    exp_const.vis_dir = os.path.join(exp_const.exp_dir, 'vis')
    exp_const.log_step = 200
    exp_const.model_save_step = 1000
    exp_const.val_step = 1000
    exp_const.batch_size = 128
    exp_const.num_epochs = 50  #100
    exp_const.lr = 0.01
    exp_const.momentum = 0.9
    exp_const.num_workers = 5
    exp_const.optimizer = 'Adam'
    exp_const.feedforward = False
    exp_const.subset = {'training': 'train', 'test': 'test'}

    data_const = Cifar100DatasetConstants()
    data_const.num_held_out_classes = args.held_classes

    model_const = Constants()
    model_const.model_num = None
    model_const.net = ResnetConstants()
    model_const.net.num_layers = 32
    model_const.net.num_classes = 100
    model_const.net.pretrained = False
    model_const.net_path = os.path.join(exp_const.model_dir,
                                        f'net_{model_const.model_num}')
    model_const.embed2class = Embed2ClassConstants()
    model_const.embed2class.linear = True
    model_const.embed2class_path = os.path.join(
        exp_const.model_dir, f'embed2class_{model_const.model_num}')
    model_const.embed2class.glove_dim = args.glove_dim

    # Dimensions
    if args.embed_type == 'glove':
        model_const.embed2class.embed_dims = args.glove_dim
        model_const.embed2class.embed_h5py = os.path.join(
            os.getcwd(),
            f'symlinks/data/glove/proc/glove_6B_{args.glove_dim}d.h5py')
        model_const.embed2class.embed_word_to_idx_json = os.path.join(
            os.getcwd(),
            f'symlinks/data/glove/proc/glove_6B_{args.glove_dim}d_word_to_idx.json'
        )
    elif args.embed_type == 'glove_vico_linear':
        model_const.embed2class.embed_dims = args.glove_dim + args.vico_dim
        embed_dir = os.path.join(
            os.getcwd(),
            'symlinks/exp/multi_sense_cooccur/' + \
            f'linear_100/concat_with_glove_{args.glove_dim}')
        model_const.embed2class.embed_h5py = os.path.join(
            embed_dir, 'visual_word_vecs.h5py')
        model_const.embed2class.embed_word_to_idx_json = os.path.join(
            embed_dir, 'visual_word_vecs_idx.json')
    elif args.embed_type == 'vico_linear':
        model_const.embed2class.no_glove = True  # Zero out the glove component
        model_const.embed2class.embed_dims = args.glove_dim + args.vico_dim
        embed_dir = os.path.join(
            os.getcwd(),
            'symlinks/exp/multi_sense_cooccur/' + \
            f'linear_100/concat_with_glove_{args.glove_dim}')
        model_const.embed2class.embed_h5py = os.path.join(
            embed_dir, 'visual_word_vecs.h5py')
        model_const.embed2class.embed_word_to_idx_json = os.path.join(
            embed_dir, 'visual_word_vecs_idx.json')
    elif args.embed_type == 'glove_vico_select':
        model_const.embed2class.embed_dims = args.glove_dim + args.vico_dim
        embed_dir = os.path.join(
            os.getcwd(),
            'symlinks/exp/multi_sense_cooccur/' + \
            f'select_200/concat_with_glove_{args.glove_dim}')
        model_const.embed2class.embed_h5py = os.path.join(
            embed_dir, 'visual_word_vecs.h5py')
        model_const.embed2class.embed_word_to_idx_json = os.path.join(
            embed_dir, 'visual_word_vecs_idx.json')
    else:
        err_str = f'{args.embed_type} is currently not implemented in the runner'
        assert (False), err_str

    train.main(exp_const, data_const, model_const)
Ejemplo n.º 7
0
def exp_train():
    args = parser.parse_args()
    not_specified_args = manage_required_args(
        args,
        parser,
        required_args=[
            'embed_dim',
            'xform',
            'model_num',
            'syn'])

    exp_name = f'{args.xform}_{args.embed_dim}'
    out_base_dir = os.path.join(
        os.getcwd(),
        'symlinks/exp/multi_sense_cooccur')
    exp_const = ExpConstants(exp_name,out_base_dir)
    exp_const.model_dir = os.path.join(exp_const.exp_dir,'models')
    exp_const.log_dir = os.path.join(exp_const.exp_dir,'log')
    exp_const.vis_dir = os.path.join(exp_const.exp_dir,'vis')
    exp_const.log_step = 100
    exp_const.model_save_step = 10000
    exp_const.batch_size = 1000
    exp_const.num_epochs = 10
    exp_const.lr = 0.01
    exp_const.momentum = 0.9    # used only when optimizer is set to 'SGD'
    exp_const.num_workers = 5
    # First train with Adam then finetune with Adagrad
    if args.model_num==-1:
        exp_const.optimizer = 'Adam'
    else:
        exp_const.optimizer = 'Adagrad'
    exp_const.weight_decay = 0
    exp_const.cooccur_weights = {
        'syn': 1,
        'attr_attr': 1,
        'obj_attr': 1,
        'obj_hyp': 1,
        'context': 1,
    }
    if args.syn==False:
        del exp_const.cooccur_weights['syn']

    exp_const.use_neg = True
    
    data_const = MultiSenseCooccurDatasetConstants()
    data_const.cooccur_csv = os.path.join(
        os.getcwd(),
        'symlinks/exp/multi_sense_cooccur/cooccurrences/merged_cooccur.csv')
    data_const.use_self_count = True

    model_const = Constants()
    if args.model_num==-1:
        model_const.model_num = None
    else:
        model_const.model_num = args.model_num
    model_const.net = LogBilinearConstants()
    model_const.net.num_words = 93553
    model_const.net.embed_dims = args.embed_dim
    model_const.net.two_embedding_layers = False
    model_const.net.xform_type = args.xform
    model_const.net.xform_num_layers = None
    model_const.net.use_bias = True
    model_const.net.use_fx = False
    model_const.net.cooccur_types = [
        'syn',
        'attr_attr',
        'obj_attr',
        'obj_hyp',
        'context'
    ]
    if args.syn==False:
        model_const.net.cooccur_types = model_const.net.cooccur_types[1:]

    model_const.net_path = os.path.join(
        exp_const.model_dir,
        f'net_{model_const.model_num}')

    train.main(exp_const,data_const,model_const)