Beispiel #1
0
 def inference(params=None):
     x, _ = input_data([None, 5], name='x')
     y, _ = input_data([None, 5], name='y')
     x1, _ = fully_connected_network(x, [5, 1], name='x1')
     y1, _ = fully_connected_network(y, [5, 1], name='y1')
     d, _ = angle_distance(x1 + y1, name='d')
     return d
Beispiel #2
0
 def test_input_data_rename_nodes(self):
     """
     Test the input_data and rename_nodes functions
     :return:
     """
     with tf.Session() as sess:
         x, _ = input_data([], 'x')
         y, _ = rename_nodes(x, ['y'])
         self.assertEqual(sess.run(y, {x[0]: 1.0})[0], 1.0)
Beispiel #3
0
def inference(params=None):
    """
    This function defines the TF computational model network
    @return:
    network_output_nodes as an array of tf nodes
    """""
    x, _ = input_data([None, 1], 'x')
    poly, _ = polynomial(x, 2, name='p')
    poly, _ = rename_nodes(poly, ['ybar'])
    return poly
Beispiel #4
0
def inference():
    """
    This function defines the TF computational model network
    @return:
    network_output_nodes as an array of tf nodes
    """ ""
    x, _ = input_data([None, 1], 'x')
    fcn, _ = fully_connected_network(x, [10, 10, 1], dropout_keep_probs=.5)
    #  expand, _   = fully_connected_layer(x, 10, name='expand')
    #  contract, _ = fully_connected_layer(expand, 1, name='contract')
    ybar, _ = rename_nodes(fcn, ['ybar'])
    return ybar
Beispiel #5
0
    def _l2_diff_loss(network_output_nodes, params=None):
        """
		Default loss is an L2 loss that creates new inputs with names 'yn'

		@param network_output_nodes: list of tensorflow nodes
		@return:
		 a list of tensorflow nodes.  This list has equal length as network_output_nodes.
		"""
        loss = []
        for j, network_output_node in enumerate(network_output_nodes):
            y, _ = core.input_data(network_output_node.get_shape(), 'y%d' % j)
            loss.append(
                tf.sqrt(
                    tf.reduce_mean(tf.square(tf.sub(y, network_output_node)))))
        return loss
Beispiel #6
0
 def test_fully_connected_layer(self):
     with tf.Session() as sess:
         x, _ = input_data([1], 'x')
         y, _ = fully_connected_layer(x, 10)
         self.assertEqual(sess.run(y, {x[0]: 1.0})[0], 1.0)
Beispiel #7
0
    def __init__(self,
                 inference,
                 training_data,
                 loss=None,
                 batch_size=128,
                 data_map=None,
                 name='model',
                 model_output_location='/tmp',
                 optimizer=None,
                 train_iteration=None,
                 train_iteration_done=_default_train_iteration_done,
                 params=None,
                 log_to_db=True):
        """
		Constructor for the trainer class

		@type inference: function
		@param inference: The function that builds a network.  This function has no argument and returns a list of
			output nodes of the network.
		@param training_data:
		@param loss:
		@param batch_size:
		@param data_map:
		@param name:
		@param model_output_location:
		@param optimizer:
		@param train_iteration:
		@param train_iteration_done:
		@param optimizer_params:
		"""
        self._model_log_db = None
        self._model_name = name
        self._model_output_location = model_output_location
        self._model_ckpt_filename = os.path.join(
            self._model_output_location,
            self._model_name + CHECKPOINT_FILE_EXTENSION)
        self._inference = inference
        self._loss = loss
        self._batch_size = batch_size
        self._data_map = data_map
        self._training_data = training_data
        self._params = common_utils.extend_dict(params, default_params)
        self._train_iteration_done = train_iteration_done
        self._model_graph_def = None
        if log_to_db:
            import framework.db.model_log_db as db
            self._model_log_db = db.ModelLogDbWriter()
            self._model_log_db_id = self._model_log_db.begin_training(
                self._model_name, self._model_output_location)
            with open(
                    os.path.join(self._model_output_location,
                                 'model_log_db_id.txt'), 'w') as f:
                f.write("%s\n" % self._model_log_db_id)
        if loss:
            self._loss = loss
        else:
            self._loss = Trainer._l2_diff_loss
        if optimizer:
            self._optimizer = optimizer
        else:
            self._optimizer = Trainer._adam_optimizer
        if train_iteration:
            self._train_iteration = train_iteration
        else:
            self._train_iteration = Trainer._default_train_iteration

        # create session and build graph
        self._graph = tf.Graph()
        self._session = tf.Session(graph=self._graph)
        with self._graph.as_default():
            self._is_training = core.input_data([],
                                                IS_TRAINING_PLACEHOLDER_NAME,
                                                dtype=tf.bool)
            self._network_output_nodes = self._inference(self._params)
            self._learning_rate = tf.Variable(common_utils.get_dict_value(
                self._params, LEARNING_RATE_PARAM_NAME, DEFAULT_LEARNING_RATE),
                                              trainable=False)
            self._model_graph_def = copy.copy(self._graph.as_graph_def())

            # loss is an array of loss functions
            self._loss_nodes = self._loss(self._network_output_nodes,
                                          params=self._params)

            # build optimizer
            self._optimizer_nodes = self._optimizer(self._params,
                                                    self._loss_nodes,
                                                    self._learning_rate)