Пример #1
0
    def __init__(self, args, search_space, action_list, submodel_manager):
        """
        Constructor for training algorithm.
        Build sub-model manager and controller.
        Build optimizer and cross entropy loss for controller.

        Args:
            args: From command line, picked up by `argparse`.
        """
        self.args = args
        self.controller_step = 0  # counter for controller
        self.cuda = args.cuda
        self.epoch = 0
        self.start_epoch = 0
        self.submodel_manager = None
        self.controller = None

        self.early_stop_manager = EarlyStop(10)
        self.reward_manager = TopAverage(10)

        super(RL_Selector, self).__init__(args, search_space, action_list,
                                          submodel_manager)
        self.build_model()  # build controller
        self.max_length = self.args.shared_rnn_max_length

        controller_optimizer = _get_optimizer(self.args.controller_optim)
        self.controller_optim = \
            controller_optimizer(self.controller.parameters(),
                                 lr=self.args.controller_lr)
Пример #2
0
    def __init__(self, args):

        self.args = args

        if hasattr(args, 'dataset') and args.dataset in ["cora", "citeseer", "pubmed"]:
            self.data = load(args)
            self.args.in_feats = self.in_feats = self.data.features.shape[1]
            self.args.num_class = self.n_classes = self.data.num_labels

        self.early_stop_manager = EarlyStop(10)
        self.reward_manager = TopAverage(10)
        print('the experiment config:', '\n', args)
        self.args = args
        self.drop_out = args.in_drop
        self.multi_label = args.multi_label
        self.lr = args.lr
        self.weight_decay = args.weight_decay
        self.retrain_epochs = args.retrain_epochs
        self.loss_fn = torch.nn.BCELoss() # binary cross entropy loss
        self.epochs = args.epochs
        self.train_graph_index = 0
        self.train_set_length = 10

        self.param_file = args.param_file
        self.shared_params = None

        self.loss_fn = torch.nn.functional.nll_loss
Пример #3
0
    def __init__(self, args):

        self.args = args

        if hasattr(args, 'dataset') and args.dataset in [
                "cora", "citeseer", "pubmed"
        ]:
            self.data = load(args)
            self.args.in_feats = self.in_feats = self.data.features.shape[1]
            self.args.num_class = self.n_classes = self.data.num_labels

        self.early_stop_manager = EarlyStop(10)

        self.reward_manager = TopAverage(10)
        """
class TopAverage(object):
    def __init__(self, top_k=10):
        self.scores = []
        self.top_k = top_k

    def get_top_average(self):
        if len(self.scores) > 0:
            return np.mean(self.scores)
        else:
            return 0

    def get_average(self, score):
        if len(self.scores) > 0:
            avg = np.mean(self.scores)
        else:
            avg = 0
        # print("Top %d average: %f" % (self.top_k, avg))
        self.scores.append(score)
        self.scores.sort(reverse=True)
        self.scores = self.scores[:self.top_k]
        return avg

    def get_reward(self, score):
        reward = score - self.get_average(score)
        return np.clip(reward, -0.5, 0.5)
        """
        self.args = args
        self.drop_out = args.in_drop
        self.multi_label = args.multi_label
        self.lr = args.lr
        self.weight_decay = args.weight_decay
        self.retrain_epochs = args.retrain_epochs
        self.loss_fn = torch.nn.BCELoss()
        self.epochs = args.epochs
        self.train_graph_index = 0
        self.train_set_length = 10

        self.param_file = args.param_file
        self.shared_params = None

        self.loss_fn = torch.nn.functional.nll_loss