def loader_init(self):
     with tf.device('/cpu:0'):
         ordered = True if self.args.mode == "train" else False
         self.loader = tftables.load_dataset(
             filename=self.args.input_path,
             dataset_path=self.args.dataset_name,
             input_transform=self.transform,
             batch_size=self.args.batch_size,
             ordered=ordered)
Exemplo n.º 2
0
def create_loader_from_hdf5(sess, batch_size, filename):
    loader = tftables.load_dataset(filename=filename,
                                   dataset_path="/spectra",
                                   input_transform=input_transform,
                                   batch_size=batch_size,
                                   cyclic=True,
                                   ordered=True)

    data_stream, metals_stream = loader.dequeue()

    def initialize_stream():
        '''tftables has no initialization, so this is an empty function'''
        pass

    loader.start(sess)
    return data_stream, metals_stream, initialize_stream
Exemplo n.º 3
0
    def test_quick_start_A(self):
        my_network = lambda x, y: x
        num_iterations = 100
        num_labels = 10

        with tf.device('/cpu:0'):
            # This function preprocesses the batches before they
            # are loaded into the internal queue.
            # You can cast data, or do one-hot transforms.
            # If the dataset is a table, this function is required.
            def input_transform(tbl_batch):
                labels = tbl_batch['label']
                data = tbl_batch['data']

                truth = tf.to_float(tf.one_hot(labels, num_labels, 1, 0))
                data_float = tf.to_float(data)

                return truth, data_float

            # Open the HDF5 file and create a loader for a dataset.
            # The batch_size defines the length (in the outer dimension)
            # of the elements (batches) returned by the reader.
            # Takes a function as input that pre-processes the data.
            loader = tftables.load_dataset(
                filename=self.test_filename,
                dataset_path=self.test_mock_data_path,
                input_transform=input_transform,
                batch_size=20)

        # To get the data, we dequeue it from the loader.
        # Tensorflow tensors are returned in the same order as input_transformation
        truth_batch, data_batch = loader.dequeue()

        # The placeholder can then be used in your network
        result = my_network(truth_batch, data_batch)

        with tf.Session() as sess:

            # This context manager starts and stops the internal threads and
            # processes used to read the data from disk and store it in the queue.
            with loader.begin(sess):
                for _ in range(num_iterations):
                    sess.run(result)
Exemplo n.º 4
0
#     instance_length = 256


# Load X

def input_transform(tbl_batch):
        labels = tbl_batch['labels']
        data = tbl_batch['adjacent_beats']

        truth = tf.to_float(labels)
        data_float = tf.to_float(data)

        return truth, data_float

train_loader = tftables.load_dataset(filename=split_dir + "/train.h5",
                                   dataset_path='',
                                   input_transform=input_transform,
                                   batch_size=20)

# x_train_file = h5py.File(split_dir + "/train.h5")
# x_test_file = h5py.File(split_dir + "/test.h5")
# X_train = np.array(x_train_file.get('adjacent_beats'))
# X_test = np.array(x_test_fileß.get('adjacent_beats'))
# x_train_file.close()
# x_test_file.close()

pdb.set_trace()


# Load X_train, y_train, X_test and y_test
# pdb.set_trace()
# X_train = np.array(x_file.get('X_train'))
Exemplo n.º 5
0
    # If the dataset is a table, this function is required.
    def input_transform(tbl_batch):
        labels = tbl_batch['label']
        data = tbl_batch['data']

        truth = tf.to_float(tf.one_hot(labels, num_labels, 1, 0))
        data_float = tf.to_float(data)

        return truth, data_float

    # Open the HDF5 file and create a loader for a dataset.
    # The batch_size defines the length (in the outer dimension)
    # of the elements (batches) returned by the reader.
    # Takes a function as input that pre-processes the data.
    loader = tftables.load_dataset(filename='bottleneck_fc_model.h5',
                                   dataset_path='/data',
                                   input_transform=input_transform,
                                   batch_size=16)

# To get the data, we dequeue it from the loader.
# Tensorflow tensors are returned in the same order as input_transformation
truth_batch, data_batch = loader.dequeue()

# The placeholder can then be used in your network
result = my_network(truth_batch, data_batch)

with tf.Session() as sess:

    # This context manager starts and stops the internal threads and
    # processes used to read the data from disk and store it in the queue.
    with loader.begin(sess):
        for _ in range(num_iterations):