def test_can_convert_from_mat_file_to_numpy_files(self): # Prepare paths. images_numpy_file_path = os.path.join('functional_tests', 'test_data', 'images_nyud_micro.npy') depths_numpy_file_path = os.path.join('functional_tests', 'test_data', 'depths_nyud_micro.npy') mat_file_path = os.path.join('functional_tests', 'test_data', 'nyud_micro.mat') # Run the conversion script. GoData().convert_mat_file_to_numpy_file(mat_file_path) # Check that the files are created. assert os.path.isfile(images_numpy_file_path) assert os.path.isfile(depths_numpy_file_path) # Check that magic values are correct when the data is reloaded from numpy files. images = np.load(images_numpy_file_path) assert images[5, 10, 10, 1] == 91 depths = np.load(depths_numpy_file_path) assert math.isclose(depths[5, 10, 10], 3.75686, abs_tol=0.001) # Clean up. remove_file_if_exists(images_numpy_file_path) remove_file_if_exists(depths_numpy_file_path)
def test_crop_data_removes_edge_data(self): go_data = GoData() array = np.arange(324.0).reshape((1, 18, 18)) cropped_array = go_data.crop_data(array) assert np.array_equal(cropped_array, array[:, 8:10, 8:10])
def test_mat_data_to_numpy_for_accelerometer_gives_correct_shape(self): mock_mat_data = Mock() mock_get_array = np.empty((300, 4)) mock_mat_data.get.return_value = mock_get_array.transpose() # Matlab's hdf5 gives a reverse order. array = GoData().convert_mat_data_to_numpy_array(mock_mat_data, 'accelData') assert array.shape == (300, 4)
def test_mat_data_to_numpy_for_depths_automatically_uses_the_correct_transpose(self): mock_mat_data = Mock() mock_get_array = np.empty((10, 20, 300)) mock_mat_data.get.return_value = mock_get_array.transpose() # Matlab's hdf5 gives a reverse order. array = GoData().convert_mat_data_to_numpy_array(mock_mat_data, 'images') assert array.shape == (300, 10, 20)
def test_convert_mat_file_to_numpy_file_writes_extracted_numpys_to_files(self, mock_numpy_save, h5py_file_mock): go_data = GoData() go_data.convert_mat_data_to_numpy_array = Mock(side_effect=[1, 2]) go_data.crop_data = lambda x: x go_data.convert_mat_file_to_numpy_file('') assert mock_numpy_save.call_args_list[0] == ((os.path.join('images_') + '.npy', 1),) assert mock_numpy_save.call_args_list[1] == ((os.path.join('depths_') + '.npy', 2),)
def test_convert_mat_data_to_numpy_array_can_specify_the_number_of_images_to_extract(self): go_data = GoData() mock_mat_data = Mock() mock_mat_data.get.return_value = np.array([[[[1]]], [[[2]]], [[[3]]]]) transposed_array = go_data.convert_mat_data_to_numpy_array(mock_mat_data, 'fake variable', number_of_samples=2) assert np.array_equal(transposed_array, np.array([[[[1]]], [[[2]]]]))
def test_convert_mat_data_to_numpy_array_extracts_and_tranposes_the_data(self): go_data = GoData() mock_mat_data = Mock() mock_mat_data.get.return_value = np.array([[[[1, 2, 3]]]]) transposed_array = go_data.convert_mat_data_to_numpy_array(mock_mat_data, 'fake variable') assert mock_mat_data.get.call_args == (('fake variable',),) assert np.array_equal(transposed_array, np.array([[[[1]], [[2]], [[3]]]]))
def test_convert_mat_file_to_numpy_file_reads_the_mat_file(self, h5py_file_mock, mock_numpy_save): mat_file_name = 'fake name' go_data = GoData() go_data.crop_data = Mock() go_data.convert_mat_data_to_numpy_array = Mock() go_data.convert_mat_file_to_numpy_file(mat_file_name) assert h5py_file_mock.call_args == ((mat_file_name, 'r'),)
def test_data_shuffling(self, mock_permutation): go_data = GoData() go_data.images = np.array([1, 2, 3]) go_data.labels = np.array(['a', 'b', 'c']) mock_permutation.return_value = [2, 0, 1] go_data.shuffle() go_data.images = np.array([3, 1, 2]) go_data.labels = np.array(['c', 'a', 'b'])
def test_convert_mat_file_to_numpy_file_passes_can_be_called_on_a_specific_number_of_images( self, mock_numpy_save, h5py_file_mock): go_data = GoData() mock_convert = Mock() go_data.convert_mat_data_to_numpy_array = mock_convert go_data.crop_data = Mock() go_data.convert_mat_file_to_numpy_file('', number_of_samples=2) assert mock_convert.call_args[1]['number_of_samples'] == 2
def test_convert_mat_file_to_numpy_file_calls_extract_mat_data_to_numpy_array(self, h5py_file_mock, mock_numpy_save): h5py_file_mock.return_value = 'fake mat data' go_data = GoData() go_data.convert_mat_data_to_numpy_array = Mock() go_data.crop_data = Mock() go_data.convert_mat_file_to_numpy_file('') assert go_data.convert_mat_data_to_numpy_array.call_args_list[1][0] == ('fake mat data', 'depths') assert go_data.convert_mat_data_to_numpy_array.call_args_list[0][0] == ('fake mat data', 'images')
def test_rebin_outputs_the_right_types_based_on_dimensions(self): four_dimensions = np.array([[[[1]]]]) # Collection of images. three_dimensions = np.array([[[1]]]) # Collection of depths. go_data = GoData() go_data.width = 1 go_data.height = 1 go_data.channels = 1 rebinned_four_dimensions = go_data.shrink_array_with_rebinning(four_dimensions) rebinned_three_dimensions = go_data.shrink_array_with_rebinning(three_dimensions) assert rebinned_four_dimensions.dtype == np.uint8 assert rebinned_three_dimensions.dtype == np.float64
def test_can_convert_from_mat_to_tfrecord_and_read_tfrecord(self): # Prepare paths. data_directory = os.path.join('functional_tests', 'test_data') mat_file_path = os.path.join(data_directory, 'nyud_micro.mat') tfrecords_file_path = os.path.join(data_directory, 'nyud_micro.tfrecords') # Run the conversion script. go_data = GoData(data_directory=data_directory, data_name='nyud_micro') go_data.convert_mat_to_tfrecord(mat_file_path) # Check that the file is created. assert os.path.isfile(tfrecords_file_path) # Reload data. images, depths = go_data.inputs(data_type='', batch_size=10) # Check that magic values are correct when the data is reloaded. magic_image_numbers = [ -0.17450979, -0.15882352, -0.15490195, -0.15098038, -0.14705881, -0.14313725, -0.11960781, -0.056862712, 0.0058823824 ] magic_depth_numbers = [ 1.1285654, 1.8865139, 2.104018, 2.1341071, 2.6960645, 3.318316, 3.4000545, 3.4783292, 3.7568643, 3.9500945 ] session = tf.Session() coordinator = tf.train.Coordinator() threads = tf.train.start_queue_runners(sess=session, coord=coordinator) try: with session.as_default(): assert np.isclose(magic_image_numbers, images.eval()[5, 10, 10, 1], atol=0.00001).any() assert np.isclose(magic_depth_numbers, depths.eval()[5, 10, 10], atol=0.00001).any() except tf.errors.OutOfRangeError: fail('Should not hit this.') finally: coordinator.request_stop() coordinator.join(threads) session.close() # Clean up. remove_file_if_exists(tfrecords_file_path)
def __init__(self, message_queue=None): super().__init__() # Common variables. self.batch_size = 8 self.number_of_epochs = 50000 self.initial_learning_rate = 0.00001 self.data = GoData(data_name='nyud') self.summary_step_period = 1 self.log_directory = "logs" self.dropout_keep_probability = 0.5 # Internal setup. self.moving_average_loss = None self.moving_average_decay = 0.1 self.stop_signal = False self.step = 0 self.saver = None self.session = None self.dropout_keep_probability_tensor = tf.placeholder(tf.float32) self.queue = message_queue
def test_data_path_property(self): go_data = GoData() go_data.data_directory = 'directory' go_data.data_name = 'file_name' assert go_data.data_path == os.path.join('directory', 'file_name')