Example #1
0
    def fc_explicit_param_names(self):
        brew.Register(fc_explicit_param_names)
        model = model_helper.ModelHelper(name="test_model")
        dim_in = 10
        dim_out = 100
        weight_name = "test_weight_name"
        bias_name = "test_bias_name"
        inputs_name = "test_inputs"
        output_name = "test_output"

        input_distribution = stats.norm()
        inputs = input_distribution.rvs(size=(1, dim_in)).astype(np.float32)
        workspace.FeedBlob(inputs_name, inputs)

        weights = np.random.normal(size=(dim_out, dim_in)).astype(np.float32)
        bias = np.transpose(
            np.random.normal(size=(dim_out)).astype(np.float32))

        brew.fc_explicit_param_names(
            model,
            inputs_name,
            output_name,
            dim_in=dim_in,
            dim_out=dim_out,
            bias_name=bias_name,
            weight_name=weight_name,
            weight_init=("GivenTensorFill", {
                "values": weights
            }),
            bias_init=("GivenTensorFill", {
                "values": bias
            }),
        )

        workspace.RunNetOnce(model.param_init_net)

        model.net.Proto().type = "async_scheduling"
        workspace.CreateNet(model.net)

        workspace.RunNet(model.net)

        expected_output = np.dot(inputs, np.transpose(weights)) + bias
        outputs_diff = expected_output - workspace.FetchBlob(output_name)

        self.assertEqual(np.linalg.norm(outputs_diff), 0)
Example #2
0
def MakeForwardPassOps(
    model: ModelHelper,
    model_id: str,
    input_blob: str,
    output_blob: str,
    weights: List[str],
    biases: List[str],
    activations: List[str],
    layers: List[int],
    dropout_ratio: float,
    is_test: bool = False,
) -> None:
    """
    Performs a forward pass of a multi-layer perceptron.

    :param model: The ModelHelper object whose net will execute this pass
    :param model_id: A unique string for this model that is used to hold
        activation levels
    :param input_blob: The blob containing the input data
    :param output_blob: The blob where the output data will be placed
    :param weights: A list of blobs containing the weights
    :param biases: A list of blobs containing the bias nodes
    :param activations: A list of strings describing the activation functions
         Currently only 'linear' and 'relu' are supported
    :param layers: A list of integers describing the layer sizes
    :param dropout_ratio: The fraction of nodes to drop out during training.
    :param is_test: Indicates whether or not this forward pass should skip
        node dropout.
    """
    model.net.NanCheck([input_blob], [input_blob])
    num_layer_connections = len(layers) - 1
    for x in six.moves.range(num_layer_connections):
        inputs = None
        outputs = None
        if x == 0:
            inputs = input_blob
        else:
            inputs = "ModelState_" + str(x) + "_" + model_id
        if x + 1 == num_layer_connections:
            outputs = output_blob
        else:
            outputs = "ModelState_" + str(x + 1) + "_" + model_id

        activation = activations[x]
        dim_in = layers[x]
        dim_out = layers[x + 1]
        weight_name = weights[x]
        bias_name = biases[x]

        brew.fc_explicit_param_names(  # type: ignore
            model,
            inputs,
            outputs,
            dim_in=dim_in,
            dim_out=dim_out,
            bias_name=bias_name,
            weight_name=weight_name,
            weight_init=("GivenTensorFill", {
                'values': workspace.FetchBlob(weight_name)
            }),
            bias_init=("GivenTensorFill", {
                'values': workspace.FetchBlob(bias_name)
            }))

        if activation == 'relu':
            brew.relu(model, outputs, outputs)
        elif activation == 'linear':
            pass
        else:
            raise Exception("Unknown activation function")

        if dropout_ratio > 0.01:
            brew.dropout(model,
                         outputs,
                         outputs,
                         ratio=dropout_ratio,
                         is_test=is_test)

    model.net.NanCheck([output_blob], [output_blob])
Example #3
0
    def make_forward_pass_ops(
        self,
        model: ModelHelper,
        input_blob: str,
        output_blob: str,
        is_test: bool = False,
    ) -> None:
        """
        Performs a forward pass of a multi-layer perceptron.

        :param model: The ModelHelper object whose net will execute this pass
        :param input_blob: The blob containing the input data
        :param output_blob: The blob where the output data will be placed
        :param is_test: Indicates whether or not this forward pass should skip
            node dropout.
        """
        model.net.NanCheck([input_blob], [input_blob])
        model_states = []
        num_layer_connections = len(self.layers) - 1
        for x in range(num_layer_connections + 1):
            if x == 0:
                model_states.append(input_blob)
            elif x == num_layer_connections:
                model_states.append(output_blob)
            else:
                model_states.append(
                    model.net.NextBlob("ModelState_" + str(x) + "_" +
                                       self.model_id))
        for x in range(num_layer_connections):
            inputs = model_states[x]
            outputs = model_states[x + 1]

            activation = self.activations[x]
            dim_in = self.layers[x]
            dim_out = self.layers[x + 1]
            weight_name = self.weights[x]
            bias_name = self.biases[x]

            brew.fc_explicit_param_names(  # type: ignore
                model,
                inputs,
                outputs,
                dim_in=dim_in,
                dim_out=dim_out,
                bias_name=bias_name,
                weight_name=weight_name,
                weight_init=(
                    "GivenTensorFill",
                    {
                        "values": workspace.FetchBlob(weight_name)
                    },
                ),
                bias_init=(
                    "GivenTensorFill",
                    {
                        "values": workspace.FetchBlob(bias_name)
                    },
                ),
            )

            if activation == "relu":
                brew.relu(model, outputs, outputs)
            elif activation == "linear":
                pass
            else:
                raise Exception("Unknown activation function")

            if self.dropout_ratio > 0.01:
                brew.dropout(model,
                             outputs,
                             outputs,
                             ratio=self.dropout_ratio,
                             is_test=is_test)

        model.net.NanCheck([output_blob], [output_blob])