Esempio n. 1
def load(connection, source, ignore_missed=False):
    Load and set parameters for layers from the
    specified source.

    connection : list of layers or connection

    source : str or dict
        It can be path to the pickle file that stores
        parameters or dictionary that has key values that
        store layer name and values is a dictionary that
        stores parameter names and their values.

    ignore_missed : bool
        ``False`` means that error will be triggered in case
        if some of the layers doesn't have storage parameters
        in the specified source. Defaults to ``False``.

        In case if source has invalid data type.
    if isinstance(connection, BaseNetwork):
        connection = connection.connection

    if isinstance(source, six.string_types):
        with open(source, 'rb') as f:
            data = pickle.load(f)

    elif isinstance(source, dict):
        data = source

        raise TypeError("Source type is unknown. Got {}, expected dict "
                        "or str".format(type(source)))

    for layer, attrname, _ in iter_parameters(connection):
        if not in data or attrname not in data[]:
            if ignore_missed:

            raise ValueError("Cannot load parameters from the specified "
                             "data source. Layer `{}` doesn't have "
                             "stored parameter `{}`."
                             "".format(, attrname))

        loaded_parameter = data[][attrname]

        attrvalue = getattr(layer, attrname)

    # We need to initalize connection, to make sure
    # that each layer will generate shared variables
    # and validate connections
Esempio n. 2
def parameter_values(connection):
    Iterate over all network's trainable parameters.

    connection : layer, connection

    Theano shared variable
        Network's trainable parameter.
    parameters = []

    for _, _, parameter in iter_parameters(connection):

    return parameters
Esempio n. 3
def save(connection, filepath):
    Save layer parameters in pickle file.

    connection : network, list of layer or connection
        Connection that needs to be saved.

    filepath : str
        Path to the pickle file that will store
        network's parameters.
    if isinstance(connection, BaseNetwork):
        connection = connection.connection

    data = defaultdict(dict)

    for layer, attrname, parameter in iter_parameters(connection):
        data[][attrname] = parameter.get_value()

    with open(filepath, 'wb+') as f:
        pickle.dump(data, f)
Esempio n. 4
    def init_train_updates(self):
        network_input = self.variables.network_input
        network_output = self.variables.network_output
        inv_hessian = self.variables.inv_hessian
        prev_params = self.variables.prev_params
        prev_full_gradient = self.variables.prev_full_gradient

        params = parameter_values(self.connection)
        param_vector = parameters2vector(self)

        gradients = T.grad(self.variables.error_func, wrt=params)
        full_gradient = T.concatenate([grad.flatten() for grad in gradients])

        new_inv_hessian = ifelse(
            T.eq(self.variables.epoch, 1),
                                 param_vector - prev_params,
                                 full_gradient - prev_full_gradient)
        param_delta =
        layers_and_parameters = list(iter_parameters(self.layers))

        def prediction(step):
            updated_params = param_vector + step * param_delta

            # This trick allow us to replace shared variables
            # with theano variables and get output from the network
            start_pos = 0
            for layer, attrname, param in layers_and_parameters:
                end_pos = start_pos + param.size
                updated_param_value = T.reshape(
                setattr(layer, attrname, updated_param_value)
                start_pos = end_pos

            output = self.connection.output(network_input)

            # We need to replace back parameter to shared variable
            for layer, attrname, param in layers_and_parameters:
                setattr(layer, attrname, param)

            return output

        def phi(step):
            return self.error(network_output, prediction(step))

        def derphi(step):
            error_func = self.error(network_output, prediction(step))
            return T.grad(error_func, wrt=step)

        step = asfloat(line_search(phi, derphi))
        updated_params = param_vector + step * param_delta
        updates = setup_parameter_updates(params, updated_params)

            (inv_hessian, new_inv_hessian),
            (prev_params, param_vector),
            (prev_full_gradient, full_gradient),

        return updates