Esempio n. 1
0
    def layering(self):
        input_node = self.input_node
        for idx in range(self.hidden_layer_num):
            self.layers['affine' + str(idx)] = tfl.Affine(
                self.params['W' + str(idx)],
                input_node,
                self.params['b' + str(idx)],
                name='affine' + str(idx),
                graph=self)
            self.layers['activation' + str(idx)] = self.activator(
                self.layers['affine' + str(idx)],
                name='activation' + str(idx),
                graph=self)
            input_node = self.layers['activation' + str(idx)]

        idx = self.hidden_layer_num
        self.layers['affine' + str(idx)] = tfl.Affine(
            self.params['W' + str(idx)],
            self.layers['activation' + str(idx - 1)],
            self.params['b' + str(idx)],
            name='affine' + str(idx),
            graph=self)
        self.output = self.layers['affine' + str(idx)]

        self.error = tfl.SoftmaxWithCrossEntropyLoss(self.output,
                                                     self.target_node,
                                                     name="SCEL",
                                                     graph=self)
Esempio n. 2
0
    def layering(self, refitting=False):
        input_node = self.input_node

        if not refitting:
            self.output_mean_list['affine'] = {}
            self.output_variance_list['affine'] = {}
            self.output_skewness_list['affine'] = {}
            self.output_kurtosis_list['affine'] = {}

            self.output_mean_list['activation'] = {}
            self.output_variance_list['activation'] = {}
            self.output_skewness_list['activation'] = {}
            self.output_kurtosis_list['activation'] = {}

        for idx in range(self.hidden_layer_num):
            self.layers['affine' + str(idx)] = tfl.Affine(
                self.params['W' + str(idx)],
                input_node,
                self.params['b' + str(idx)],
                name='affine' + str(idx),
                graph=self)
            self.layers['activation' + str(idx)] = self.activator(
                self.layers['affine' + str(idx)],
                name='activation' + str(idx),
                graph=self)
            input_node = self.layers['activation' + str(idx)]

            if not refitting:
                self.output_mean_list['affine'][idx] = []
                self.output_variance_list['affine'][idx] = []
                self.output_skewness_list['affine'][idx] = []
                self.output_kurtosis_list['affine'][idx] = []

                self.output_mean_list['activation'][idx] = []
                self.output_variance_list['activation'][idx] = []
                self.output_skewness_list['activation'][idx] = []
                self.output_kurtosis_list['activation'][idx] = []

        idx = self.hidden_layer_num
        self.layers['affine' + str(idx)] = tfl.Affine(
            self.params['W' + str(idx)],
            self.layers['activation' + str(idx - 1)],
            self.params['b' + str(idx)],
            name='affine' + str(idx),
            graph=self)
        self.output = self.layers['affine' + str(idx)]

        if not refitting:
            self.output_mean_list['affine'][idx] = []
            self.output_variance_list['affine'][idx] = []
            self.output_skewness_list['affine'][idx] = []
            self.output_kurtosis_list['affine'][idx] = []

        self.error = tfl.SoftmaxWithCrossEntropyLoss(self.output,
                                                     self.target_node,
                                                     name="SCEL",
                                                     graph=self)
Esempio n. 3
0
    def layering(self, refitting=False):
        input_node = self.input_node

        for idx, cnn_param in enumerate(self.cnn_param_list):
            if cnn_param['type'] == 'conv':
                self.layers['conv' + str(idx)] = tfl.Convolution(
                    w=self.params['W' + str(idx)],
                    x=input_node,
                    b=self.params['b' + str(idx)],
                    pad=cnn_param['pad'],
                    stride=cnn_param['stride'],
                    name='conv' + str(idx),
                    graph=self)
                self.layers['activation' + str(idx)] = self.activator(
                    u=self.layers['conv' + str(idx)],
                    name='activation' + str(idx),
                    graph=self)
                input_node = self.layers['activation' + str(idx)]
            elif cnn_param['type'] == 'pooling':
                self.layers['pooling' + str(idx)] = tfl.Pooling(
                    w=self.params['W' + str(idx)],
                    x=input_node,
                    pad=cnn_param['pad'],
                    stride=cnn_param['stride'],
                    name='pooling' + str(idx),
                    graph=self)
                input_node = self.layers['pooling' + str(idx)]

        self.layers['reshape'] = tfl.Reshape(
            u=input_node,
            p_shape=self.shape_before_fc,
            n_shape=self.num_neurons_flatten_for_fc,
            name='reshape',
            graph=self)

        idx += 1
        self.layers['affine' + str(idx)] = tfl.Affine(
            w=self.params['W' + str(idx)],
            x=self.layers['reshape'],
            b=self.params['b' + str(idx)],
            name='affine' + str(idx),
            graph=self)
        self.layers['activation' + str(idx)] = self.activator(
            u=self.layers['affine' + str(idx)],
            name='activation' + str(idx),
            graph=self)

        idx += 1
        self.layers['affine' + str(idx)] = tfl.Affine(
            w=self.params['W' + str(idx)],
            x=self.layers['activation' + str(idx - 1)],
            b=self.params['b' + str(idx)],
            name='affine' + str(idx),
            graph=self)

        self.output = self.layers['affine' + str(idx)]

        self.error = tfl.SoftmaxWithCrossEntropyLoss(self.output,
                                                     self.target_node,
                                                     name="SCEL",
                                                     graph=self)
Esempio n. 4
0
    def layering(self, refitting=False):
        input_node = self.input_node

        if not refitting:
            self.output_mean_list['conv'] = {}
            self.output_variance_list['conv'] = {}
            self.output_skewness_list['conv'] = {}
            self.output_kurtosis_list['conv'] = {}

            self.output_mean_list['pooling'] = {}
            self.output_variance_list['pooling'] = {}
            self.output_skewness_list['pooling'] = {}
            self.output_kurtosis_list['pooling'] = {}

            self.output_mean_list['affine'] = {}
            self.output_variance_list['affine'] = {}
            self.output_skewness_list['affine'] = {}
            self.output_kurtosis_list['affine'] = {}

            self.output_mean_list['activation'] = {}
            self.output_variance_list['activation'] = {}
            self.output_skewness_list['activation'] = {}
            self.output_kurtosis_list['activation'] = {}

        for idx, cnn_param in enumerate(self.cnn_param_list):
            if cnn_param['type'] == 'conv':
                self.layers['conv' + str(idx)] = tfl.Convolution(
                    w       =self.params['W' + str(idx)],
                    x       =input_node,
                    b       =self.params['b' + str(idx)],
                    pad     =cnn_param['pad'],
                    stride  =cnn_param['stride'],
                    name    ='conv' + str(idx),
                    graph   =self
                )

                if self.use_batch_normalization:
                    self.layers['batch_normal' + str(idx)] = tfl.BatchNormalization(
                        x       =self.layers['conv' + str(idx)],
                        gamma   =self.params['gamma' + str(idx)],
                        beta    =self.params['beta' + str(idx)],
                        running_mean=self.params['running_mean' + str(idx)],
                        running_var=self.params['running_var' + str(idx)],
                        name    ='batch_normal' + str(idx),
                        graph   =self
                    )
                    next_input_node = self.layers['batch_normal' + str(idx)]
                else:
                    next_input_node = self.layers['conv' + str(idx)]

                self.layers['activation' + str(idx)] = self.activator(
                    u       =next_input_node,
                    name    ='activation' + str(idx),
                    graph   =self
                )

                if self.use_dropout:
                    self.layers['dropout' + str(idx)] = tfl.Dropout(
                        x=self.layers['activation' + str(idx)],
                        dropout_ratio=self.dropout_ratio_list[idx],
                        name='dropout' + str(idx),
                        graph=self
                    )
                    input_node = self.layers['dropout' + str(idx)]
                else:
                    input_node = self.layers['activation' + str(idx)]

                if not refitting:
                    self.output_mean_list['conv'][idx] = []
                    self.output_variance_list['conv'][idx] = []
                    self.output_skewness_list['conv'][idx] = []
                    self.output_kurtosis_list['conv'][idx] = []

                    self.output_mean_list['activation'][idx] = []
                    self.output_variance_list['activation'][idx] = []
                    self.output_skewness_list['activation'][idx] = []
                    self.output_kurtosis_list['activation'][idx] = []

            elif cnn_param['type'] == 'pooling':
                self.layers['pooling' + str(idx)] = tfl.Pooling(
                    x=input_node,
                    filter_h=cnn_param['filter_h'],
                    filter_w=cnn_param['filter_w'],
                    stride=cnn_param['stride'],
                    name='pooling' + str(idx),
                    graph=self
                )
                input_node = self.layers['pooling' + str(idx)]

                if not refitting:
                    self.output_mean_list['pooling'][idx] = []
                    self.output_variance_list['pooling'][idx] = []
                    self.output_skewness_list['pooling'][idx] = []
                    self.output_kurtosis_list['pooling'][idx] = []

        self.layers['reshape'] = tfl.Reshape(
            u       =input_node,
            p_shape =self.shape_before_fc,
            n_shape =self.num_neurons_flatten_for_fc,
            name    ='reshape',
            graph   =self
        )

        idx += 1
        self.layers['affine' + str(idx)] = tfl.Affine(
            w       =self.params['W' + str(idx)],
            x       =self.layers['reshape'],
            b       =self.params['b' + str(idx)],
            name    ='affine' + str(idx),
            graph   =self
        )

        if self.use_batch_normalization:
            self.layers['batch_normal' + str(idx)] = tfl.BatchNormalization(
                x=self.layers['affine' + str(idx)],
                gamma=self.params['gamma' + str(idx)],
                beta=self.params['beta' + str(idx)],
                running_mean=self.params['running_mean' + str(idx)],
                running_var=self.params['running_var' + str(idx)],
                name='batch_normal' + str(idx),
                graph=self
            )
            next_input_node = self.layers['batch_normal' + str(idx)]
        else:
            next_input_node = self.layers['affine' + str(idx)]

        self.layers['activation' + str(idx)] = self.activator(
            u       =next_input_node,
            name    ='activation' + str(idx),
            graph   =self
        )

        if self.use_dropout:
            self.layers['dropout' + str(idx)] = tfl.Dropout(
                x=self.layers['activation' + str(idx)],
                dropout_ratio=self.dropout_ratio_list[idx],
                name='dropout' + str(idx),
                graph=self
            )
            input_node = self.layers['dropout' + str(idx)]
        else:
            input_node = self.layers['activation' + str(idx)]

        if not refitting:
            self.output_mean_list['affine'][idx] = []
            self.output_variance_list['affine'][idx] = []
            self.output_skewness_list['affine'][idx] = []
            self.output_kurtosis_list['affine'][idx] = []

            self.output_mean_list['activation'][idx] = []
            self.output_variance_list['activation'][idx] = []
            self.output_skewness_list['activation'][idx] = []
            self.output_kurtosis_list['activation'][idx] = []

        idx += 1
        self.layers['affine' + str(idx)] = tfl.Affine(
            w       =self.params['W' + str(idx)],
            x       =input_node,
            b       =self.params['b' + str(idx)],
            name    ='affine' + str(idx),
            graph   =self
        )

        self.last_layer_idx = idx

        if not refitting:
            self.output_mean_list['affine'][idx] = []
            self.output_variance_list['affine'][idx] = []
            self.output_skewness_list['affine'][idx] = []
            self.output_kurtosis_list['affine'][idx] = []

        self.output = self.layers['affine' + str(idx)]

        self.error = tfl.SoftmaxWithCrossEntropyLoss(self.output, self.target_node, name="SCEL", graph=self)
Esempio n. 5
0
    def layering(self, refitting=False):
        input_node = self.input_node

        if not refitting:
            self.output_mean_list['affine'] = {}
            self.output_variance_list['affine'] = {}
            self.output_skewness_list['affine'] = {}
            self.output_kurtosis_list['affine'] = {}

            self.output_mean_list['activation'] = {}
            self.output_variance_list['activation'] = {}
            self.output_skewness_list['activation'] = {}
            self.output_kurtosis_list['activation'] = {}

        for idx in range(self.hidden_layer_num):
            self.layers['affine' + str(idx)] = tfl.Affine(
                self.params['W' + str(idx)],
                input_node,
                self.params['b' + str(idx)],
                name='affine' + str(idx),
                graph=self
            )

            if self.use_batch_normalization:
                self.layers['batch_normal' + str(idx)] = tfl.BatchNormalization(
                    x=self.layers['affine' + str(idx)],
                    gamma=self.params['gamma' + str(idx)],
                    beta=self.params['beta' + str(idx)],
                    running_mean=self.params['running_mean' + str(idx)],
                    running_var=self.params['running_var' + str(idx)],
                    name='batch_normal' + str(idx),
                    graph=self
                )
                next_input_node = self.layers['batch_normal' + str(idx)]
            else:
                next_input_node = self.layers['affine' + str(idx)]

            self.layers['activation' + str(idx)] = self.activator(
                next_input_node,
                name='activation' + str(idx),
                graph=self
            )

            if self.use_dropout:
                self.layers['dropout' + str(idx)] = tfl.Dropout(
                    x=self.layers['activation' + str(idx)],
                    dropout_ratio=self.dropout_ratio_list[idx],
                    name='dropout' + str(idx),
                    graph=self
                )
                input_node = self.layers['dropout' + str(idx)]
            else:
                input_node = self.layers['activation' + str(idx)]

            if not refitting:
                self.output_mean_list['affine'][idx] = []
                self.output_variance_list['affine'][idx] = []
                self.output_skewness_list['affine'][idx] = []
                self.output_kurtosis_list['affine'][idx] = []

                self.output_mean_list['activation'][idx] = []
                self.output_variance_list['activation'][idx] = []
                self.output_skewness_list['activation'][idx] = []
                self.output_kurtosis_list['activation'][idx] = []

        idx = self.hidden_layer_num

        if self.use_dropout:
            input_node = self.layers['dropout' + str(idx - 1)]
        else:
            input_node = self.layers['activation' + str(idx - 1)]

        self.layers['affine' + str(idx)] = tfl.Affine(
            self.params['W' + str(idx)],
            input_node,
            self.params['b' + str(idx)],
            name='affine' + str(idx),
            graph=self
        )

        if not refitting:
            self.output_mean_list['affine'][idx] = []
            self.output_variance_list['affine'][idx] = []
            self.output_skewness_list['affine'][idx] = []
            self.output_kurtosis_list['affine'][idx] = []

        self.output = self.layers['affine' + str(idx)]

        self.error = tfl.SoftmaxWithCrossEntropyLoss(self.output, self.target_node, name="SCEL", graph=self)