示例#1
0
文件: utils.py 项目: Peratham/Mozi
def gpu_to_cpu_model(model):
    for layer in model.layers:
        for member, value in layer.__dict__.items():
            if is_shared_var(value):
                layer.__dict__[member] = T._shared(np.array(value.get_value(), floatX),
                                          name=value.name, borrow=False)
        for i in xrange(len(layer.params)):
            if is_shared_var(layer.params[i]):
                layer.params[i] = T._shared(np.array(layer.params[i].get_value(), floatX),
                                          name=layer.params[i].name, borrow=False)
    return model
示例#2
0
def gpu_to_cpu_model(model):
    for layer in model.layers:
        for member, value in list(layer.__dict__.items()):
            if is_shared_var(value):
                layer.__dict__[member] = T._shared(np.array(value.get_value(), floatX),
                                          name=value.name, borrow=False)
        for i in range(len(layer.params)):
            if is_shared_var(layer.params[i]):
                layer.params[i] = T._shared(np.array(layer.params[i].get_value(), floatX),
                                          name=layer.params[i].name, borrow=False)
    return model
示例#3
0
    def setup(self):

        self.log.info( '..begin setting up train object')

        #===================[ build params and deltas list ]==================#

        params = []
        deltas = []

        for i, layer in enumerate(self.model.layers):
            layer_name = "{}_{}".format(layer.__class__.__name__, i)
            if hasattr(layer, 'params'):
                for param in layer.params:
                    # checked that the param to be updated is shared variable
                    if is_shared_var(param):
                        param.name = str(i) + '_' + str(param.name)
                        param.name += '_' + layer.__class__.__name__
                        params += [param]
                        deltas += [shared_zeros(shape=param.shape.eval())]

        #=====================[ training params updates ]=====================#

        self.log.info("..update params: " + str(params))
        train_y_pred, train_layers_stats = self.model.train_fprop(self.model.input_var)
        train_cost = self.train_cost(self.model.output_var, train_y_pred).astype(floatX)
        gparams = T.grad(train_cost, params)
        train_updates = self.learning_method.update(deltas, params, gparams)

        #=================[ append updates from each layer ]==================#

        for i, layer in enumerate(self.model.layers):
            layer_name = "{}_{}".format(layer.__class__.__name__, i)
            if hasattr(layer, 'updates') and len(layer.updates) > 0:
                self.log.info("..{}: has shared variable updates".format(layer_name))
                train_updates += layer.updates

        #----[ append updates of stats from each layer to train updates ]-----#

        self.train_stats_names, train_stats_vars = split_list(train_layers_stats)
        train_stats_vars = [var.astype(floatX) for var in train_stats_vars]
        self.train_stats_shared = generate_shared_list(train_stats_vars)
        train_stats_updates = merge_lists(self.train_stats_shared, train_stats_vars)
        if self.verbose:
            train_updates += train_stats_updates

        #-------------------------[ train functions ]-------------------------#

        self.log.info('..begin compiling functions')
        self.training = theano.function(inputs=merge_var(self.model.input_var, self.model.output_var),
                                        outputs=train_cost,
                                        updates=train_updates,
                                        on_unused_input='warn',
                                        allow_input_downcast=True)

        self.log.info('..training function compiled')

        #=============================[ testing ]=============================#

        test_y_pred, test_layers_stats = self.model.test_fprop(self.model.input_var)

        #-----[ append updates of stats from each layer to test updates ]-----#

        self.test_stats_names, test_stats_vars = split_list(test_layers_stats)
        test_stats_vars = [var.astype(floatX) for var in test_stats_vars]
        self.test_stats_shared = generate_shared_list(test_stats_vars)
        test_stats_updates = []
        if self.verbose:
            test_stats_updates = merge_lists(self.test_stats_shared, test_stats_vars)

        #-------------------------[ test functions ]--------------------------#

        test_stopping_error = self.valid_cost(self.model.output_var, test_y_pred).astype(floatX)
        test_cost = self.train_cost(self.model.output_var, test_y_pred).astype(floatX)

        self.testing = theano.function(inputs=merge_var(self.model.input_var, self.model.output_var),
                                       outputs=(test_stopping_error, test_cost),
                                       updates=test_stats_updates,
                                       on_unused_input='warn',
                                       allow_input_downcast=True)

        self.log.info('..testing function compiled')
示例#4
0
    def setup(self):

        self.log.info('..begin setting up train object')

        #===================[ build params and deltas list ]==================#

        params = []
        deltas = []

        for i, layer in enumerate(self.model.layers):
            layer_name = "{}_{}".format(layer.__class__.__name__, i)
            if hasattr(layer, 'params'):
                for param in layer.params:
                    # checked that the param to be updated is shared variable
                    if is_shared_var(param):
                        param.name = str(i) + '_' + str(param.name)
                        param.name += '_' + layer.__class__.__name__
                        params += [param]
                        deltas += [shared_zeros(shape=param.shape.eval())]

        #=====================[ training params updates ]=====================#

        self.log.info("..update params: " + str(params))
        train_y_pred, train_layers_stats = self.model.train_fprop(
            self.model.input_var)
        train_cost = self.train_cost(self.model.output_var,
                                     train_y_pred).astype(floatX)
        gparams = T.grad(train_cost, params)
        train_updates = self.learning_method.update(deltas, params, gparams)

        #=================[ append updates from each layer ]==================#

        for i, layer in enumerate(self.model.layers):
            layer_name = "{}_{}".format(layer.__class__.__name__, i)
            if hasattr(layer, 'updates') and len(layer.updates) > 0:
                self.log.info(
                    "..{}: has shared variable updates".format(layer_name))
                train_updates += layer.updates

        #----[ append updates of stats from each layer to train updates ]-----#

        self.train_stats_names, train_stats_vars = split_list(
            train_layers_stats)
        train_stats_vars = [var.astype(floatX) for var in train_stats_vars]
        self.train_stats_shared = generate_shared_list(train_stats_vars)
        train_stats_updates = merge_lists(self.train_stats_shared,
                                          train_stats_vars)
        if self.verbose:
            train_updates += train_stats_updates

        #-------------------------[ train functions ]-------------------------#

        self.log.info('..begin compiling functions')
        self.training = theano.function(inputs=merge_var(
            self.model.input_var, self.model.output_var),
                                        outputs=train_cost,
                                        updates=train_updates,
                                        on_unused_input='warn',
                                        allow_input_downcast=True)

        self.log.info('..training function compiled')

        #=============================[ testing ]=============================#

        test_y_pred, test_layers_stats = self.model.test_fprop(
            self.model.input_var)

        #-----[ append updates of stats from each layer to test updates ]-----#

        self.test_stats_names, test_stats_vars = split_list(test_layers_stats)
        test_stats_vars = [var.astype(floatX) for var in test_stats_vars]
        self.test_stats_shared = generate_shared_list(test_stats_vars)
        test_stats_updates = []
        if self.verbose:
            test_stats_updates = merge_lists(self.test_stats_shared,
                                             test_stats_vars)

        #-------------------------[ test functions ]--------------------------#

        test_stopping_error = self.valid_cost(self.model.output_var,
                                              test_y_pred).astype(floatX)
        test_cost = self.train_cost(self.model.output_var,
                                    test_y_pred).astype(floatX)

        self.testing = theano.function(
            inputs=merge_var(self.model.input_var, self.model.output_var),
            outputs=(test_stopping_error, test_cost),
            updates=test_stats_updates,
            on_unused_input='warn',
            allow_input_downcast=True)

        self.log.info('..testing function compiled')