Exemplo n.º 1
0
    def __init__(self, num_subfunctions=50, num_dims=10, objective='l2'):
        # don't introduce a Theano dependency until we have to
        from utils import _tonp

        self.name = 'DeepAE'
        layer_sizes = [ 28*28, 1000, 500, 250, 30]
        #layer_sizes = [ 28*28, 20]
        # Load data
        X, y = load_mnist()
        # break the data up into minibatches
        self.subfunction_references = []
        for mb in range(num_subfunctions):
            self.subfunction_references.append([X[:, mb::num_subfunctions], y[mb::num_subfunctions]])
        # evaluate on subset of training data
        self.n_full = 10000
        idx = random_choice(X.shape[1], self.n_full, replace=False)
        ##use all the training data for a smoother plot
        #idx = np.array(range(X.shape[1]))
        self.full_objective_references = [[X[:,idx].copy(), y[idx].copy()]]
        from dropout.deepae import build_f_df # here so theano not required for import
        self.theano_f_df, self.model = build_f_df(layer_sizes, use_bias=True, objective=objective)
        crossent_params = False
        if crossent_params:
            history = np.load('/home/poole/Sum-of-Functions-Optimizer/sfo_output.npz')
            out = dict(history=history['arr_0'])
            params = out['history'].item()['x']['SFO']
            self.theta_init = params
        else:
            self.theta_init = [param.get_value() for param in self.model.params]
Exemplo n.º 2
0
    def __init__(self, num_subfunctions=50, num_dims=10, objective='l2'):
        # don't introduce a Theano dependency until we have to
        from utils import _tonp

        self.name = 'DeepAE'
        layer_sizes = [28 * 28, 1000, 500, 250, 30]
        #layer_sizes = [ 28*28, 20]
        # Load data
        X, y = load_mnist()
        # break the data up into minibatches
        self.subfunction_references = []
        for mb in range(num_subfunctions):
            self.subfunction_references.append(
                [X[:, mb::num_subfunctions], y[mb::num_subfunctions]])
        # evaluate on subset of training data
        self.n_full = 10000
        idx = random_choice(X.shape[1], self.n_full, replace=False)
        ##use all the training data for a smoother plot
        #idx = np.array(range(X.shape[1]))
        self.full_objective_references = [[X[:, idx].copy(), y[idx].copy()]]
        from dropout.deepae import build_f_df  # here so theano not required for import
        self.theano_f_df, self.model = build_f_df(layer_sizes,
                                                  use_bias=True,
                                                  objective=objective)
        crossent_params = False
        if crossent_params:
            history = np.load(
                '/home/poole/Sum-of-Functions-Optimizer/sfo_output.npz')
            out = dict(history=history['arr_0'])
            params = out['history'].item()['x']['SFO']
            self.theta_init = params
        else:
            self.theta_init = [
                param.get_value() for param in self.model.params
            ]
Exemplo n.º 3
0
 def __init__(self, num_subfunctions=100, num_dims=10, rectifier='soft'):
     self.name = 'MLP'
     #layer_sizes = [ 28*28, 1200, 10 ]
     #layer_sizes = [ 28*28, 1200, 10 ]
     #layer_sizes = [ 28*28, 500, 120, num_dims ]
     #layer_sizes = [ 28*28, 120, 12, num_dims ]
     layer_sizes = [ 28*28, 1200, 1200, num_dims ]
     # Load data
     X, y = load_mnist()
     # break the data up into minibatches
     self.subfunction_references = []
     for mb in range(num_subfunctions):
         self.subfunction_references.append([X[:, mb::num_subfunctions], y[mb::num_subfunctions]])
     # evaluate on subset of training data
     idx = random_choice(X.shape[1], 5000, replace=False)
     #use all the training data for a smoother plot
     #idx = np.array(range(X.shape[1]))
     self.full_objective_references = [[X[:,idx].copy(), y[idx].copy()]]
     from dropout.mlp import build_f_df # here so theano not required for import
     self.theano_f_df, self.model = build_f_df(layer_sizes, rectifier=rectifier,
         use_bias=True)
     self.theta_init = [param.get_value() for param in self.model.params]
Exemplo n.º 4
0
 def __init__(self, num_subfunctions=100, num_dims=10, rectifier='soft'):
     self.name = 'MLP'
     #layer_sizes = [ 28*28, 1200, 10 ]
     #layer_sizes = [ 28*28, 1200, 10 ]
     #layer_sizes = [ 28*28, 500, 120, num_dims ]
     #layer_sizes = [ 28*28, 120, 12, num_dims ]
     layer_sizes = [28 * 28, 1200, 1200, num_dims]
     # Load data
     X, y = load_mnist()
     # break the data up into minibatches
     self.subfunction_references = []
     for mb in range(num_subfunctions):
         self.subfunction_references.append(
             [X[:, mb::num_subfunctions], y[mb::num_subfunctions]])
     # evaluate on subset of training data
     idx = random_choice(X.shape[1], 5000, replace=False)
     #use all the training data for a smoother plot
     #idx = np.array(range(X.shape[1]))
     self.full_objective_references = [[X[:, idx].copy(), y[idx].copy()]]
     from dropout.mlp import build_f_df  # here so theano not required for import
     self.theano_f_df, self.model = build_f_df(layer_sizes,
                                               rectifier=rectifier,
                                               use_bias=True)
     self.theta_init = [param.get_value() for param in self.model.params]