def __init__(self, name, hparams, bnn_model='RMSProp'):
        """Creates a PosteriorBNNSampling object based on a specific optimizer.

    The algorithm has two basic tools: an Approx BNN and a Contextual Dataset.
    The Bayesian Network keeps the posterior based on the optimizer iterations.

    Args:
      name: Name of the algorithm.
      hparams: Hyper-parameters of the algorithm.
      bnn_model: Type of BNN. By default RMSProp (point estimate).
    """

        self.name = name
        self.hparams = hparams
        self.optimizer_n = hparams.optimizer

        self.training_freq = hparams.training_freq
        self.training_epochs = hparams.training_epochs
        self.t = 0
        self.data_h = ContextualDataset(hparams.context_dim,
                                        hparams.num_actions, hparams.buffer_s)

        # to be extended with more BNNs (BB alpha-div, GPs, SGFS, constSGD...)
        bnn_name = '{}-bnn'.format(name)
        if bnn_model == 'Variational':
            self.bnn = VariationalNeuralBanditModel(hparams, bnn_name)
        elif bnn_model == 'AlphaDiv':
            self.bnn = BBAlphaDivergence(hparams, bnn_name)
        elif bnn_model == 'Variational_BF':
            self.bnn = BfVariationalNeuralBanditModel(hparams, bnn_name)
        elif bnn_model == 'GP':
            self.bnn = MultitaskGP(hparams)
        else:
            self.bnn = NeuralBanditModel(self.optimizer_n, hparams, bnn_name)
Exemplo n.º 2
0
    def __init__(self, name, hparams, textflag='yes', optimizer='RMS'):

        self.name = name
        self.hparams = hparams
        self.epsilon = self.hparams.epsilon
        self.latent_dim = self.hparams.layer_sizes[-1]
        self.intercept = True
        if self.intercept:
            self.param_dim = 1 + self.latent_dim
        else:
            self.param_dim = self.latent_dim
        # Gaussian prior for each beta_i

        # Regression and NN Update Frequency
        self.update_freq_lr = hparams.training_freq
        self.update_freq_nn = hparams.training_freq_network

        self.t = 0
        self.optimizer_n = optimizer

        self.num_epochs = hparams.training_epochs
        self.data_h = ContextualDataset(hparams.context_dim,
                                        hparams.num_actions,
                                        intercept=False)
        self.latent_h = ContextualDataset(self.latent_dim,
                                          hparams.num_actions,
                                          intercept=self.intercept)
        if textflag == 'yes':
            self.bnn = TextCNN('adam', self.hparams.num_actions,
                               self.hparams.batch_size, '{}-bnn'.format(name))
        else:
            self.bnn = NeuralBanditModel(optimizer, hparams,
                                         '{}-bnn'.format(name))
  def __init__(self, name, hparams, optimizer='RMS'):
    """Creates a BootstrappedSGDSampling object based on a specific optimizer.

      hparams.q: Number of models that are independently trained.
      hparams.p: Prob of independently including each datapoint in each model.

    Args:
      name: Name given to the instance.
      hparams: Hyperparameters for each individual model.
      optimizer: Neural network optimization algorithm.
    """

    self.name = name
    self.hparams = hparams
    self.optimizer_n = optimizer

    self.training_freq = hparams.training_freq
    self.training_epochs = hparams.training_epochs
    self.t = 0

    self.q = hparams.q
    self.p = hparams.p

    self.datasets = [
        ContextualDataset(hparams.context_dim,
                          hparams.num_actions,
                          hparams.buffer_s)
        for _ in range(self.q)
    ]

    self.bnn_boot = [
        NeuralBanditModel(optimizer, hparams, '{}-{}-bnn'.format(name, i))
        for i in range(self.q)
    ]
Exemplo n.º 4
0
    def __init__(self, name, hparams, optimizer='RMS'):

        self.name = name
        self.hparams = hparams
        self.n_a = self.hparams.num_actions
        self.n_d = self.hparams.layer_sizes[-1]
        self.alpha = self.hparams.alpha
        self.lam = self.hparams.lam

        self.a = np.concatenate(tuple([np.eye(self.n_d)[np.newaxis, :, :] for i in range(self.n_a)]), axis=0) * self.lam
        self.inv_a = np.concatenate(tuple([np.eye(self.n_d)[np.newaxis, :, :] for i in range(self.n_a)]),
                                    axis=0) / self.lam

        self.b = np.zeros((self.n_a, self.n_d))

        self.theta = np.zeros((self.n_a, self.n_d))

        # Params for BNN

        self.update_freq_nn = hparams.training_freq_network

        self.t = 0
        self.optimizer_n = optimizer

        self.num_epochs = hparams.training_epochs
        self.data_h = ContextualDataset(hparams.context_dim,
                                        hparams.num_actions,
                                        bootstrap=getattr(hparams, 'bootstrap', None),
                                        intercept=False)

        self.bnn = NeuralBanditModel(optimizer, hparams, '{}-bnn'.format(name))
  def __init__(self, name, hparams,textflag ='no', optimizer='RMS'):

    self.name = name
    self.hparams = hparams
    self.latent_dim = self.hparams.layer_sizes[-1]
    self.intercept = False
    if self.intercept:
      self.param_dim=1+self.latent_dim
    else:
      self.param_dim = self.latent_dim
    # Gaussian prior for each beta_i
    self._lambda_prior = self.hparams.lambda_prior

    self.mu = [
        np.zeros(self.param_dim)
        for _ in range(self.hparams.num_actions)
    ]

    self.f = [
      np.zeros(self.param_dim)
      for _ in range(self.hparams.num_actions)
    ]
    self.yy = [0 for _ in range(self.hparams.num_actions)]

    self.cov = [(1.0 / self.lambda_prior) * np.eye(self.param_dim)
                for _ in range(self.hparams.num_actions)]

    self.precision = [
        self.lambda_prior * np.eye(self.param_dim)
        for _ in range(self.hparams.num_actions)
    ]

    # Inverse Gamma prior for each sigma2_i
    self._a0 = self.hparams.a0
    self._b0 = self.hparams.b0

    self.a = [self._a0 for _ in range(self.hparams.num_actions)]
    self.b = [self._b0 for _ in range(self.hparams.num_actions)]

    # Regression and NN Update Frequency
    self.update_freq_lr = hparams.training_freq
    self.update_freq_nn = hparams.training_freq_network

    self.t = 0
    self.optimizer_n = optimizer

    self.num_epochs = hparams.training_epochs
    self.data_h = ContextualDataset(hparams.context_dim,
                                    hparams.num_actions,
                                    intercept=False)
    self.latent_h = ContextualDataset(self.latent_dim,
                                      hparams.num_actions,
                                      intercept=self.intercept)
    if textflag=='yes':
      self.bnn = TextCNN('adam', self.hparams.num_actions,self.hparams.batch_size, '{}-bnn'.format(name))
    else:
      self.bnn = NeuralBanditModel(optimizer, hparams, '{}-bnn'.format(name))
    def __init__(self, name, hparams):
        """Creates the algorithm, and sets up the adaptive Gaussian noise."""

        self.name = name
        self.hparams = hparams
        self.verbose = getattr(self.hparams, 'verbose', True)
        self.noise_std = getattr(self.hparams, 'noise_std', 0.005)
        self.eps = getattr(self.hparams, 'eps', 0.05)
        self.d_samples = getattr(self.hparams, 'd_samples', 300)
        self.optimizer = getattr(self.hparams, 'optimizer', 'RMS')

        # keep track of noise heuristic statistics
        self.std_h = [self.noise_std]
        self.eps_h = [self.eps]
        self.kl_h = []
        self.t = 0

        self.freq_update = hparams.training_freq
        self.num_epochs = hparams.training_epochs

        self.data_h = ContextualDataset(hparams.context_dim,
                                        hparams.num_actions,
                                        hparams.buffer_s,
                                        bootstrap=getattr(
                                            hparams, 'bootstrap', None))
        self.bnn = NeuralBanditModel(self.optimizer, hparams,
                                     '{}-bnn'.format(name))

        with self.bnn.graph.as_default():
            # noise-injection std placeholder
            self.bnn.noise_std_ph = tf.placeholder(tf.float32, shape=())

            # create noise corruption op; adds noise to all weights
            tvars = tf.trainable_variables()
            self.bnn.noisy_grads = [
                tf.random_normal(v.get_shape(), 0, self.bnn.noise_std_ph)
                for v in tvars
            ]

            # add noise to all params, then compute prediction, then subtract.
            with tf.control_dependencies(self.bnn.noisy_grads):
                self.bnn.noise_add_ops = [
                    tvars[i].assign_add(n)
                    for i, n in enumerate(self.bnn.noisy_grads)
                ]
                with tf.control_dependencies(self.bnn.noise_add_ops):
                    # we force the prediction for 'y' to be recomputed after adding noise
                    self.bnn.noisy_nn, self.bnn.noisy_pred_val = self.bnn.forward_pass(
                    )

                    self.bnn.noisy_pred = tf.identity(self.bnn.noisy_pred_val)
                    with tf.control_dependencies(
                        [tf.identity(self.bnn.noisy_pred)]):
                        self.bnn.noise_sub_ops = [
                            tvars[i].assign_add(-n)
                            for i, n in enumerate(self.bnn.noisy_grads)
                        ]
Exemplo n.º 7
0
    def __init__(self, name, hparams, optimizer='RMS'):

        self.name = name
        self.hparams = hparams
        self.latent_dim = self.hparams.layer_sizes[-1]

        # Gaussian prior for each beta_i
        self._lambda_prior = self.hparams.lambda_prior

        self.mu = [
            np.zeros(self.latent_dim) for _ in range(self.hparams.num_actions)
        ]

        self.cov = [(1.0 / self.lambda_prior) * np.eye(self.latent_dim)
                    for _ in range(self.hparams.num_actions)]

        self.precision = [
            self.lambda_prior * np.eye(self.latent_dim)
            for _ in range(self.hparams.num_actions)
        ]

        # Inverse Gamma prior for each sigma2_i
        self._a0 = self.hparams.a0
        self._b0 = self.hparams.b0

        self.a = [self._a0 for _ in range(self.hparams.num_actions)]
        self.b = [self._b0 for _ in range(self.hparams.num_actions)]

        # Regression and NN Update Frequency
        self.update_freq_lr = hparams.training_freq
        self.update_freq_nn = hparams.training_freq_network

        self.t = 0
        self.optimizer_n = optimizer

        self.num_epochs = hparams.training_epochs
        self.data_h = ContextualDataset(hparams.context_dim,
                                        hparams.num_actions,
                                        bootstrap=getattr(
                                            hparams, 'bootstrap', None),
                                        intercept=False)
        self.latent_h = ContextualDataset(self.latent_dim,
                                          hparams.num_actions,
                                          intercept=False)
        # print(self.latent_h.actions)
        self.bnn = NeuralBanditModel(optimizer, hparams, '{}-bnn'.format(name))

        if getattr(hparams, 'bootstrap', None) is not None:
            new_z = self.bnn.sess.run(
                self.bnn.nn, feed_dict={self.bnn.x: self.data_h.contexts})
            self.latent_h.replace_data(contexts=new_z,
                                       actions=self.data_h.actions,
                                       rewards=self.data_h.rewards)
    def __init__(self, name, hparams, optimizer='RMS'):

        self.name = name
        self.hparams = hparams
        self.latent_dim = self.hparams.layer_sizes[-1]

        # Gaussian prior for each beta_i
        self._lambda_prior = self.hparams.lambda_prior

        self.mu = [
            np.zeros(self.latent_dim) for _ in range(self.hparams.num_actions)
        ]

        self.cov = [(1.0 / self.lambda_prior) * np.eye(self.latent_dim)
                    for _ in range(self.hparams.num_actions)]

        self.precision = [
            self.lambda_prior * np.eye(self.latent_dim)
            for _ in range(self.hparams.num_actions)
        ]
        self.mu_prior_flag = self.hparams.mu_prior_flag
        self.sigma_prior_flag = self.hparams.sigma_prior_flag

        self.precision_prior = self.precision[:]
        self.mu_prior = np.zeros((self.latent_dim, self.hparams.num_actions))
        # Inverse Gamma prior for each sigma2_i
        self._a0 = self.hparams.a0
        self._b0 = self.hparams.b0

        self.a = [self._a0 for _ in range(self.hparams.num_actions)]
        self.b = [self._b0 for _ in range(self.hparams.num_actions)]

        # Regression and NN Update Frequency
        self.update_freq_lr = hparams.training_freq
        self.update_freq_nn = hparams.training_freq_network

        self.t = 0
        self.optimizer_n = optimizer

        self.num_epochs = hparams.training_epochs
        self.data_h = ContextualDataset(hparams.context_dim,
                                        hparams.num_actions,
                                        intercept=False,
                                        buffer_s=hparams.mem)
        self.latent_h = ContextualDataset(self.latent_dim,
                                          hparams.num_actions,
                                          intercept=False,
                                          buffer_s=hparams.mem)
        self.bnn = NeuralBanditModel(optimizer, hparams, '{}-bnn'.format(name))
Exemplo n.º 9
0
    def __init__(self, name, hparams, optimizer='RMS'):

        self.name = name
        self.eps = 0.9
        self.decay = 0.99  # computed for 10,000 steps
        self.hparams = hparams

        # Regression and NN Update Frequency
        self.update_freq_lr = hparams.training_freq
        self.update_freq_nn = hparams.training_freq_network

        self.t = 0
        self.optimizer_n = optimizer

        self.num_epochs = hparams.training_epochs
        self.data_h = ContextualDataset(hparams.context_dim,
                                        hparams.num_actions,
                                        intercept=False)
        self.bnn = NeuralBanditModel(optimizer, hparams,
                                     '{}-greedy'.format(name))
Exemplo n.º 10
0
    def __init__(self, name, hparams, bnn_model='RMSProp', optimizer='RMS'):
        """Creates a PosteriorBNNSampling object based on a specific optimizer.

    The algorithm has two basic tools: an Approx BNN and a Contextual Dataset.
    The Bayesian Network keeps the posterior based on the optimizer iterations.

    Args:
      name: Name of the algorithm.
      hparams: Hyper-parameters of the algorithm.
      bnn_model: Type of BNN. By default RMSProp (point estimate).
    """

        self.name = name
        self.hparams = hparams
        self.optimizer_n = hparams.optimizer

        self.training_freq = hparams.training_freq
        self.training_epochs = hparams.training_epochs
        self.t = 0
        self.gamma = 0

        self.bonus = np.zeros(hparams.num_actions)
        self.C1 = 0.001
        self.C2 = 0.001
        self.C3 = 0.00001
        self.data_h = ContextualDataset(hparams.context_dim,
                                        hparams.num_actions, hparams.buffer_s)

        # to be extended with more BNNs (BB alpha-div, GPs, SGFS, constSGD...)
        bnn_name = '{}-ucb'.format(name)
        self.bnn = NeuralBanditModel(self.optimizer_n, hparams, bnn_name)
        self.p = (hparams.context_dim + 1) * (hparams.layer_sizes[0]) + (
            hparams.layer_sizes[0] + 1) * (hparams.layer_sizes[0]) * (
                len(hparams.layer_sizes) - 1) + (hparams.layer_sizes[0] +
                                                 1) * hparams.num_actions
        self.Zinv = (1 / hparams.lamb) * np.eye(self.p)
        self.detZ = hparams.lamb**self.p