Exemple #1
0
 def make_full_connected_layer(self, key, input, weight_shape, bias_shape):
     with tf.name_scope(key):
         # Weights
         if 'type' in self.config[key]['weight']:
             weight_type = self.config[key]['weight']['type']
         else:
             weight_type = 'random_normal'
         if 'name' in self.config[key]['weight']:
             weight_name = self.config[key]['weight']['name']
         else:
             weight_name = 'Weight_' + key
         self.model_params['weights'][key] = \
             Weights.define(weight_shape, weight_type=weight_type, weight_name=weight_name)
         # Bias
         if 'type' in self.config[key]['bias']:
             bias_type = self.config[key]['bias']['type']
         else:
             bias_type = 'random_normal'
         if 'name' in self.config[key]['bias']:
             bias_name = self.config[key]['bias']['name']
         else:
             bias_name = 'Bias_' + key
         self.model_params['bias'][key] = \
             Bias.define(bias_shape, bias_type=bias_type, bias_name=bias_name)
         if 'name' in self.config[key]:
             layer_name = self.config[key]['name']
         else:
             layer_name = key
         max_pool_layer = mlp_layer(input,
                                    self.model_params['weights'][key],
                                    self.model_params['bias'][key],
                                    layer_name=layer_name)
         self.model_params['layers'][key] = max_pool_layer
Exemple #2
0
 def make_predictions(self):
     with tf.device(self.device):
         with tf.name_scope('Predictions'):
             value = mlp_layer(self.predict_params['input'], self.model_params['weight'],
                               self.model_params['bias'],
                               activation_type=self.config['activation_fn'])
             self.params['logits'] = tf.nn.softmax(value)
             self.predict_params['predict_class'] = \
                 tf.argmax(self.params['logits'], dimension=1, name='predict_class')
             self.predict_params['predict_one_hot'] = \
                 tf.one_hot(self.predict_params['predict_class'],
                            depth=self.num_classes, on_value=1.0,
                            off_value=0.0, axis=-1)
Exemple #3
0
 def make_output_layer(self):
     prev_layer_key = 'layer_' + str(self.num_layers)
     layer_key = 'output_layer'
     prev_layer = self.model_params['layers'][prev_layer_key]
     if 'name' in self.config[layer_key]:
         layer_name = self.config[layer_key]['name']
     else:
         layer_name = 'Output_Layer'
     if 'activation_fn' in self.config[layer_key]:
         activation_fn = self.config[layer_key]['activation_fn']
     else:
         activation_fn = 'sigmoid'
     if prev_layer_key in self.config:
         in_units = self.config[prev_layer_key]['num_nodes']
     else:
         in_units = self.num_features
     weight_shape = (in_units, self.num_classes)
     bias_shape = (self.num_classes, )
     if 'type' in self.config[layer_key]['weight']:
         weight_type = self.config[layer_key]['weight']['type']
     else:
         weight_type = 'random_normal'
     if 'name' in self.config[layer_key]['weight']:
         weight_name = self.config[layer_key]['weight']['name']
     else:
         weight_name = 'Weight_output'
     if 'type' in self.config[layer_key]['bias']:
         bias_type = self.config[layer_key]['bias']['type']
     else:
         bias_type = 'random_normal'
     if 'name' in self.config[layer_key]['bias']:
         bias_name = self.config[layer_key]['bias']['name']
     else:
         bias_name = 'Bias_output'
     weight = Weights.define(weight_shape,
                             weight_type=weight_type,
                             weight_name=weight_name)
     bias = Bias.define(bias_shape,
                        bias_type=bias_type,
                        bias_name=bias_name)
     self.model_params['weights']['output_layer'] = weight
     self.model_params['bias']['output_layer'] = bias
     self.model_params['layers']['output_layer'] = \
         mlp_layer(prev_layer, weight, bias, activation_type=activation_fn, layer_name=layer_name)
Exemple #4
0
 def create_graph(self, num_classes, num_features):
     start = time.time()
     self.num_features = num_features
     self.num_classes = num_classes
     self.graph = tf.Graph()
     # Step 1: Creating placeholders for inputs
     self.make_placeholders_for_inputs(self.graph)
     # Step 2: Creating initial parameters for the variables
     self.make_parameters(self.graph)
     # Step 3: Make predictions for the data
     final_layer = mlp_layer(self.predict_params['input'], self.model_params['weight'],
                               self.model_params['bias'],
                               activation_type=self.config['activation_fn'])
     self.make_predictions(self.graph, final_layer, self.num_classes)
     # Step 4: Perform optimization operation
     self.make_optimization(self.graph, final_layer)
     # Step 5: Calculate accuracies
     self.make_accuracy(self.graph)
     self.start_session(self.graph)
     end = time.time()
     print('Graph completed')
Exemple #5
0
 def make_layers(self, number_of_layers=1):
     prev_layer = self.predict_params['input']
     for layer_no in range(1, number_of_layers + 1):
         weight_key = 'weight_' + str(layer_no)
         bias_key = 'bias_' + str(layer_no)
         layer_key = 'layer_' + str(layer_no)
         if 'name' in self.config[layer_key]:
             layer_name = self.config[layer_key]['name']
         else:
             layer_name = 'Layer_' + str(layer_no)
         if 'activation_fn' in self.config[layer_key]:
             activation_fn = self.config[layer_key]['activation_fn']
         else:
             activation_fn = 'sigmoid'
         weight = self.model_params['weights'][weight_key]
         bias = self.model_params['bias'][bias_key]
         layer = mlp_layer(prev_layer,
                           weight,
                           bias,
                           layer_name=layer_name,
                           activation_type=activation_fn)
         self.model_params['layers'][layer_key] = layer
         prev_layer = self.model_params['layers'][layer_key]