Пример #1
0
class EnKFRunner:
    def __init__(self, data, targets, connections, model_out, n_ensembles,
                 rng):
        self.data = data
        self.targets = targets
        self.connections = connections
        self.model_out = model_out
        self.n_ensembles = n_ensembles
        self.rng = rng
        self.enkf = EnKF(maxit=1, online=False, n_batches=1)
        self.connections = self._shape_connections(connections)

    @staticmethod
    def _shape_connections(connections):
        df = pd.read_pickle(connections)
        weights = df['weight'].values
        return weights

    def run(self):
        gamma = np.eye(10) * 0.01
        for i in range(self.rng):
            self.enkf.fit(data=self.data,
                          ensemble=self.connections,
                          ensemble_size=self.n_ensembles,
                          moments1=self.connections.mean(0),
                          observations=self.targets,
                          model_output=self.model_out,
                          noise=0.0,
                          gamma=gamma,
                          )
        ens = self.enkf.ensemble
        return ens
Пример #2
0
 def __init__(self, data, targets, connections, model_out, n_ensembles,
              rng):
     self.data = data
     self.targets = targets
     self.connections = connections
     self.model_out = model_out
     self.n_ensembles = n_ensembles
     self.rng = rng
     self.enkf = EnKF(maxit=1, online=False, n_batches=1)
     self.connections = self._shape_connections(connections)
Пример #3
0
 conv_loss_mnist = []
 # average test losses
 test_losses = []
 np.random.seed(0)
 torch.manual_seed(0)
 batch_size = 64
 model = MnistOptimizee(root=root,
                        batch_size=batch_size,
                        seed=0,
                        n_ensembles=n_ensembles).to(device)
 conv_ens = None
 gamma = np.eye(10) * 0.01
 enkf = EnKF(tol=1e-5,
             maxit=1,
             stopping_crit='',
             online=False,
             shuffle=False,
             n_batches=1,
             converge=False)
 rng = int(60000 / batch_size * 8)
 for i in range(1):
     model.generation = i + 1
     if i == 0:
         try:
             out = model.load_model('')
             # replace cov matrix with cov from weights (ensembles)
             # m = torch.distributions.Normal(out['conv_params'].mean(),
             #                                out['conv_params'].std())
             # model.cov = m.sample((n_ensembles, model.length))
         except FileNotFoundError as fe:
             print(fe)