Exemplo n.º 1
0
def save_checkpoint(params, filename):
    try:
        torch.save(params, filename)
        # logout('Written to: ' + filename)
    except Exception as e:
        logout("Could not save: " + filename, "w")
        raise e
Exemplo n.º 2
0
def load_checkpoint(model, filename):
    try:
        model.load_state_dict(load(filename), strict=False)
    except Exception as e:
        logout("Could not load: " + filename, "w")
        raise e
    return model
Exemplo n.º 3
0
 def load_triples(self, triples_files):
     """
     loads all triples in the triples file
     :param triples_file: contains triples for train, valid, or test
     :return:
     """
     triples = np.ndarray(shape=(0, 3), dtype=int)
     for triples_file in triples_files:
         try:
             file_triples = pd.read_csv(self.fp + triples_file,
                                        sep=" |,|\t",
                                        skiprows=1,
                                        header=None,
                                        dtype={
                                            0: np.int32,
                                            1: np.int32,
                                            2: np.int32
                                        },
                                        engine="python").to_numpy()
             file_triples[:, [1, 2]] = file_triples[:, [2, 1]]
             triples = np.append(triples, file_triples, axis=0)
         except IOError as e:
             logout('Could not load ' + str(triples_file), "f")
             raise IOError
     return triples
Exemplo n.º 4
0
    def load_id_map(self, label_file):
        """
        loads a mapping between triples/strings and IDs
        :param label_file: filename of labels
        :return: ID mapping(s) for the set of labels in a file
        """
        try:
            labels = pd.read_csv(self.fp + label_file,
                                 sep="\t",
                                 skiprows=1,
                                 header=None,
                                 dtype={
                                     0: np.str,
                                     1: np.int32
                                 })
        except IOError as e:
            logout("Could not load " + str(label_file), "f")
            raise IOError

        label2index = {
            labels.iloc[idx, 0]: labels.iloc[idx, 1]
            for idx in range(len(labels))
        }
        index2label = {
            labels.iloc[idx, 1]: labels.iloc[idx, 0]
            for idx in range(len(labels))
        }
        return label2index, index2label
Exemplo n.º 5
0
    def load_mask(self, dataset_fps=None):
        """
        loads the hr -> o & rt -> h vocab used for "filtering" during evaluation
        """
        t_mask = {}
        h_mask = {}
        all_triples = np.ndarray(shape=(0, 3))

        if dataset_fps is None:
            dataset_fps = [self.fp]
        else:
            dataset_fps += [self.fp]
        dataset_fps = list(set(dataset_fps))

        # loads all train, valid, and test triples
        triple_file_names = ["train2id", "valid2id", "test2id"]
        for dataset_fp in dataset_fps:
            for filename in triple_file_names:
                triples_file = dataset_fp + filename + ".txt"
                try:
                    new_triples = pd.read_csv(triples_file,
                                              sep=" |,|\t",
                                              skiprows=1,
                                              header=None,
                                              dtype={
                                                  0: np.int32,
                                                  1: np.int32,
                                                  2: np.int32
                                              },
                                              engine="python").to_numpy()
                    new_triples[:, [1, 2]] = new_triples[:, [2, 1]]
                    all_triples = np.append(all_triples, new_triples, axis=0)
                except IOError as e:
                    logout('Could not load ' + str(triples_file), "f")
                    exit()
        all_triples = np.unique(all_triples, axis=0)

        # sets the hr -> t & rt -> h vocabs
        for triple in all_triples:
            h, r, t = triple
            if (r, t) in h_mask:
                if h not in h_mask[(r, t)]:
                    h_mask[(r, t)].append(h)
            else:
                h_mask[(r, t)] = [h]

            if (h, r) in t_mask:
                if t not in t_mask[(h, r)]:
                    t_mask[(h, r)].append(t)
            else:
                t_mask[(h, r)] = [t]

        self.h_mask = h_mask
        self.t_mask = t_mask
Exemplo n.º 6
0
def init_model(args):
    model = None
    if args.model == "transe":
        model = std_models.TransE(args.num_ents, args.num_rels,
                                  args.hidden_size, args.margin,
                                  args.neg_ratio, args.batch_size, args.device)
        model.to(args.device, non_blocking=True)
    elif args.model == "analogy":
        model = std_models.Analogy(args.num_ents, args.num_rels,
                                   args.hidden_size, args.device)
        model.to(args.device, non_blocking=True)
    else:
        logout(
            "The model '" + str(args.model) +
            "' to be used is not implemented.", "f")
        exit()
    return model
Exemplo n.º 7
0
def init_optimizer(args, model):
    optim_model = model
    optimizer = None
    if args.opt_method == "adagrad":
        try:
            lr = args.opt_params[0]
            optimizer = optim.Adagrad(optim_model.parameters(), lr=lr)
        except ValueError as e:
            logout("Parameters for adagrad are [-op lr]", "f")
            exit()
    elif args.opt_method == "adadelta":
        try:
            lr = args.opt_params[0]
            optimizer = optim.Adadelta(optim_model.parameters(), lr=lr)
        except ValueError as e:
            logout("Parameters for adadelta are [-op lr]", "f")
            exit()
    elif args.opt_method == "adam":
        try:
            lr = args.opt_params[0]
            optimizer = optim.Adam(optim_model.parameters(), lr=lr)
        except ValueError as e:
            logout("Parameters for adam are [-op lr]", "f")
            exit()
    elif args.opt_method == "sgd":
        try:
            lr = args.opt_params[0]
            optimizer = optim.SGD(optim_model.parameters(), lr=lr)
        except ValueError as e:
            logout("Parameters for sgd are [-op lr]", "f")
            exit()
    else:
        logout("Optimization options are 'adagrad','adadelta','adam','sgd'",
               "f")
        exit()

    return optimizer
Exemplo n.º 8
0
    # loads best model for session
    load_args = copy(args)
    load_args.tag = basename(__main__.__file__).split(".")[0]
    load_args.sess = str(sess)
    model = model_utils.load_model(load_args, model)

    return model


if __name__ == "__main__":
    exp_parser = ExperimentArgParse("Standard setting experiment")
    exp_args = exp_parser.parse()

    # select hardware to use
    if exp_args.cuda and torch.cuda.is_available():
        logout("Running with CUDA")
        exp_args.device = torch.device('cuda')
    else:
        logout("Running with CPU, experiments will be slow", "w")
        exp_args.device = torch.device('cpu')

    if exp_args.sess_mode == "TRAIN":
        exp_tr_bp, exp_de_bp, exp_viz, exp_model, exp_optim, exp_tracker = setup_experiment(
            exp_args)

        while exp_tracker.continue_training():
            # validate
            if exp_tracker.validate():
                inf_metrics = np.asarray([exp_de_bp.process_epoch(exp_model)])
                # log inference metrics
                exp_viz.add_de_sample(inf_metrics)