def save(self,filename,start_layer = 0,max_layer_num = -1,withfinal=True): nnet_dict = {} if max_layer_num == -1: max_layer_num = self.n_layers for i in range(start_layer, max_layer_num): dict_a = str(i) + ' W' if i == 0: nnet_dict[dict_a] = _array2string((1.0 - self.input_dropout_factor) * ( self.layers[i].params[0].get_value())) else: nnet_dict[dict_a] = _array2string((1.0 - self.dropout_factor[i - 1])* ( self.layers[i].params[0].get_value())) dict_a = str(i) + ' b' nnet_dict[dict_a] = _array2string(self.layers[i].params[1].get_value()) if withfinal: dict_a = 'logreg W' nnet_dict[dict_a] = _array2string((1.0 - self.dropout_factor[-1])* ( self.logLayer.params[0].get_value())) dict_a = 'logreg b' nnet_dict[dict_a] = _array2string(self.logLayer.params[1].get_value()) with open(filename, 'wb') as fp: json.dump(nnet_dict, fp, indent=2, sort_keys = True) fp.flush()
def save_cnn2dict(self): n_layers = self.conv_layer_num cnn_dict = {} for i in xrange(n_layers): conv_layer = self.conv_layers[i] filter_shape = conv_layer.filter_shape for next_X in xrange(filter_shape[0]): for this_X in xrange(filter_shape[1]): dict_a = 'W ' + str(i) + ' ' + str(next_X) + ' ' + str(this_X) cnn_dict[dict_a] = _array2string((conv_layer.W.get_value())[next_X, this_X]) dict_a = 'b ' + str(i) cnn_dict[dict_a] = _array2string(conv_layer.b.get_value()) return cnn_dict;
def save_cnn2dict(self): n_layers = self.conv_layer_num cnn_dict = {} for i in xrange(n_layers): conv_layer = self.conv_layers[i] filter_shape = conv_layer.filter_shape for X_0 in xrange(filter_shape[0]): for X_1 in xrange(filter_shape[1]): for X_2 in xrange(filter_shape[2]): dict_a = 'W ' + str(i) + ' ' + str(X_0) + ' ' + str(X_1) + ' ' + str(X_2) cnn_dict[dict_a] = _array2string((conv_layer.W.get_value())[X_0, X_1, X_2]) dict_a = 'b ' + str(i) cnn_dict[dict_a] = _array2string(conv_layer.b.get_value()) return cnn_dict;
def save_mlp2dict(self,withfinal=True,max_layer_num=-1): if max_layer_num == -1: max_layer_num = self.hidden_layer_num mlp_dict = {} for i in range(max_layer_num): dict_a = str(i) +' W' mlp_dict[dict_a] = _array2string(self.mlp_layers[i].params[0].get_value()) dict_a = str(i) + ' b' mlp_dict[dict_a] = _array2string(self.mlp_layers[i].params[1].get_value()) if withfinal: dict_a = 'logreg W' mlp_dict[dict_a] = _array2string(self.logLayer.params[0].get_value()) dict_a = 'logreg b' mlp_dict[dict_a] = _array2string(self.logLayer.params[1].get_value()) return mlp_dict
def save_cnn2dict(self): n_layers = self.conv_layer_num cnn_dict = {} for i in xrange(n_layers): conv_layer = self.conv_layers[i] filter_shape = conv_layer.filter_shape for X_0 in xrange(filter_shape[0]): for X_1 in xrange(filter_shape[1]): for X_2 in xrange(filter_shape[2]): dict_a = 'W ' + str(i) + ' ' + str(X_0) + ' ' + str( X_1) + ' ' + str(X_2) cnn_dict[dict_a] = _array2string( (conv_layer.W.get_value())[X_0, X_1, X_2]) dict_a = 'b ' + str(i) cnn_dict[dict_a] = _array2string(conv_layer.b.get_value()) return cnn_dict
def save_mlp2dict(self,withfinal=True,max_layer_num=-1): if max_layer_num == -1: max_layer_num = self.hidden_layer_num mlp_dict = {} for i in range(max_layer_num): dict_a = str(i) +' W' if i == 0: mlp_dict[dict_a] = _array2string((1.0 - self.input_dropout_factor) *self.mlp_layers[i].params[0].get_value()) else: mlp_dict[dict_a] = _array2string((1.0 - self.dropout_factor[i - 1]) * self.mlp_layers[i].params[0].get_value()) dict_a = str(i) + ' b' mlp_dict[dict_a] = _array2string(self.mlp_layers[i].params[1].get_value()) if withfinal: dict_a = 'logreg W' mlp_dict[dict_a] = _array2string((1.0 - self.dropout_factor[-1])*self.logLayer.params[0].get_value()) dict_a = 'logreg b' mlp_dict[dict_a] = _array2string(self.logLayer.params[1].get_value()) return mlp_dict