def generic_append_to_check(input_data, overwrite_data):
    """Generic function to test replacing data of appended dimension"""
    with temporary_directory() as tmp_dir:
        file_path = tmp_dir + '/test.nc'
        nc_io_driver = NetCDFIODriver(file_path)
        input_type = type(input_data)
        # Create a write and an append of the data
        append_path = 'data_append'
        data_append = nc_io_driver.create_storage_variable(
            append_path, input_type)
        # Append data 3 times
        for i in range(3):
            data_append.append(input_data)
        # Overwrite second entry
        data_append.write(overwrite_data, at_index=1)
        data_append_out = data_append.read()
        try:
            for key in input_data.keys(
            ):  # Must act on the data_write since it has the .keys method
                assert np.all(data_append_out[0][key] == input_data[key])
                assert np.all(data_append_out[2][key] == input_data[key])
                assert np.all(data_append_out[1][key] == overwrite_data[key])
            assert set(input_data.keys()) == set(
                data_append_out[0].keys())  # Assert keys match
            assert set(input_data.keys()) == set(data_append_out[2].keys())
        except AttributeError:
            assert np.all(data_append_out[0] == input_data)
            assert np.all(data_append_out[2] == input_data)
            assert np.all(data_append_out[1] == overwrite_data)
def test_read_trap():
    """Test that attempting to read a non-existent file fails"""
    with temporary_directory() as tmp_dir:
        test_store = tmp_dir + '/teststore.nc'
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        si.var1.read()
def test_storage_interface_creation():
    """Test that the storage interface can create a top level file and read from it"""
    with temporary_directory() as tmp_dir:
        test_store = tmp_dir + '/teststore.nc'
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        si.add_metadata('name', 'data')
        assert si.storage_driver.ncfile.getncattr('name') == 'data'
def test_variable_write_read():
    """Test that a variable can be create and written to file"""
    with temporary_directory() as tmp_dir:
        test_store = tmp_dir + '/teststore.nc'
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        input_data = 4
        si.four.write(input_data)
        output_data = si.four.read()
        assert output_data == input_data
def test_netcdf_driver_group_manipulation():
    """Test that the NetCDFIODriver can create groups, rebind to groups, and that they are on the file"""
    with temporary_directory() as tmp_dir:
        nc_io_driver = NetCDFIODriver(tmp_dir + 'test.nc')
        group2 = nc_io_driver.get_directory('group1/group2')
        group1 = nc_io_driver.get_directory('group1')
        ncfile = nc_io_driver.ncfile
        ncgroup1 = ncfile.groups['group1']
        ncgroup2 = ncfile.groups['group1'].groups['group2']
        assert group1 is ncgroup1
        assert group2 is ncgroup2
def test_netcdf_driver_dimension_manipulation():
    """Test that the NetCDFIODriver can check and create dimensions"""
    with temporary_directory() as tmp_dir:
        nc_io_driver = NetCDFIODriver(tmp_dir + '/test.nc')
        NetCDFIODriver.check_scalar_dimension(nc_io_driver)
        NetCDFIODriver.check_iterable_dimension(nc_io_driver, length=4)
        NetCDFIODriver.check_infinite_dimension(nc_io_driver)
        ncfile = nc_io_driver.ncfile
        dims = ncfile.dimensions
        assert 'scalar' in dims
        assert 'iterable4' in dims
        assert 'iteration' in dims
def test_write_at_index_must_exist():
    """Ensure that the write(data, at_index) must exist first"""
    with temporary_directory() as tmp_dir:
        file_path = tmp_dir + '/test.nc'
        nc_io_driver = NetCDFIODriver(file_path)
        input_data = 4
        input_type = type(input_data)
        # Create a write and an append of the data
        append_path = 'data_append'
        data_append = nc_io_driver.create_storage_variable(
            append_path, input_type)
        data_append.write(input_data, at_index=0)
def test_variable_append_read():
    """Test that a variable can be create and written to file"""
    with temporary_directory() as tmp_dir:
        test_store = tmp_dir + '/teststore.nc'
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        input_data = np.eye(3) * 4.0
        si.four.append(input_data)
        si.four.append(input_data)
        output_data = si.four.read()
        assert np.all(output_data[0] == input_data)
        assert np.all(output_data[1] == input_data)
def test_netcdf_driver_metadata_creation():
    """Test that the NetCDFIODriver can create metadata on different objects"""
    with temporary_directory() as tmp_dir:
        nc_io_driver = NetCDFIODriver(tmp_dir + '/test.nc')
        group1 = nc_io_driver.get_directory('group1')
        nc_io_driver.add_metadata('root_metadata', 'IAm(G)Root!')
        nc_io_driver.add_metadata('group_metadata',
                                  'group1_metadata',
                                  path='/group1')
        ncfile = nc_io_driver.ncfile
        nc_metadata = ncfile.getncattr('root_metadata')
        group_metadata = group1.getncattr('group_metadata')
        assert nc_metadata == 'IAm(G)Root!'
        assert group_metadata == 'group1_metadata'
def test_write_at_index_is_bound():
    """Ensure that the write(data, at_index) cannot write to an index beyond"""
    with temporary_directory() as tmp_dir:
        file_path = tmp_dir + '/test.nc'
        nc_io_driver = NetCDFIODriver(file_path)
        input_data = 4
        input_type = type(input_data)
        # Create a write and an append of the data
        append_path = 'data_append'
        data_append = nc_io_driver.create_storage_variable(
            append_path, input_type)
        data_append.append(input_data)  # Creates the first data
        data_append.write(input_data,
                          at_index=1)  # should fail for out of bounds index
def test_metadata_creation():
    """Test that metadata can be added to variables and directories"""
    with temporary_directory() as tmp_dir:
        test_store = tmp_dir + '/teststore.nc'
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        input_data = 4
        si.dir0.var1.write(input_data)
        si.dir0.add_metadata('AmIAGroup', 'yes')
        si.dir0.var1.add_metadata('AmIAGroup', 'no')
        dir0 = si.storage_driver.ncfile.groups['dir0']
        var1 = dir0.variables['var1']
        assert dir0.getncattr('AmIAGroup') == 'yes'
        assert var1.getncattr('AmIAGroup') == 'no'
def test_unbound_read():
    """Test that a variable can read from the file without previous binding"""
    with temporary_directory() as tmp_dir:
        test_store = tmp_dir + '/teststore.nc'
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        input_data = 4 * unit.kelvin
        si.four.write(input_data)
        si.storage_driver.close()
        del si
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        output_data = si.four.read()
        assert input_data == output_data
def test_at_index_write():
    """Test that writing at a specific index of appended data works"""
    with temporary_directory() as tmp_dir:
        test_store = tmp_dir + '/teststore.nc'
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        input_data = 4
        overwrite_data = 5
        for i in range(3):
            si.four.append(input_data)
        si.four.write(overwrite_data, at_index=1)  # Sacrilege, I know -LNN
        output_data = si.four.read()
        assert np.all(output_data[0] == input_data)
        assert np.all(output_data[2] == input_data)
        assert np.all(output_data[1] == overwrite_data)
def test_multi_variable_creation():
    """Test that multiple variables can be created in a single directory structure"""
    with temporary_directory() as tmp_dir:
        test_store = tmp_dir + '/teststore.nc'
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        input_data = [4.0, 4.0, 4.0]
        si.dir0.var0.write(input_data)
        si.dir0.var1.append(input_data)
        si.dir0.var1.append(input_data)
        si.storage_driver.close()
        del si, driver
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        assert si.dir0.var0.read() == input_data
        app_data = si.dir0.var1.read()
        assert app_data[0] == input_data
        assert app_data[1] == input_data
def test_directory_creation():
    """Test that automatic directory-like objects are created on the fly"""
    with temporary_directory() as tmp_dir:
        test_store = tmp_dir + '/teststore.nc'
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        input_data = 'four'
        si.dir0.dir1.dir2.var.write(input_data)
        ncfile = si.storage_driver.ncfile
        target = ncfile
        for i in range(3):
            my_dir = 'dir{}'.format(i)
            assert my_dir in target.groups
            target = target.groups[my_dir]
        si.storage_driver.close()
        del si
        driver = spawn_driver(test_store)
        si = StorageInterface(driver)
        target = si
        for i in range(3):
            my_dir = 'dir{}'.format(i)
            target = getattr(target, my_dir)
        assert target.var.read() == input_data
def generic_type_codec_check(input_data, with_append=True):
    """Generic type codec test to ensure all callable functions are working"""
    with temporary_directory() as tmp_dir:
        file_path = tmp_dir + '/test.nc'
        nc_io_driver = NetCDFIODriver(file_path)
        input_type = type(input_data)
        # Create a write and an append of the data
        write_path = 'data_write'
        data_write = nc_io_driver.create_storage_variable(
            write_path, input_type)
        if with_append:
            append_path = 'group1/data_append'
            data_append = nc_io_driver.create_storage_variable(
                append_path, input_type)
        # Store initial data (unbound write/append)
        data_write.write(input_data)
        if with_append:
            data_append.append(input_data)
        # Test that we can act on them again (bound write/append)
        data_write.write(input_data)
        if with_append:
            data_append.append(input_data)
        # Test bound read
        data_write_out = data_write.read()
        if with_append:
            data_append_out = data_append.read()
        try:  # Compound dictionary processing
            for key in data_write_out.keys():
                assert np.all(data_write_out[key] == input_data[key])
        except AttributeError:
            assert np.all(data_write_out == input_data)
        if with_append:
            try:
                for key in data_write_out.keys():
                    assert np.all(data_append_out[0][key] == input_data[key])
                    assert np.all(data_append_out[1][key] == input_data[key])
            except AttributeError:
                assert np.all(data_append_out[0] == input_data)
                assert np.all(data_append_out[1] == input_data)
        # Delete the IO driver (and close the ncfile in the process)
        nc_io_driver.close()
        del data_write, data_write_out
        if with_append:
            del data_append, data_append_out
        # Reopen and test reading actions
        nc_io_driver = NetCDFIODriver(file_path, access_mode='r')
        data_write = nc_io_driver.get_storage_variable(write_path)
        if with_append:
            data_append = nc_io_driver.get_storage_variable(append_path)
        # Test unbound read
        data_write_out = data_write.read()
        if with_append:
            data_append_out = data_append.read()
        try:  # Compound dictionary processing
            for key in data_write_out.keys():
                assert np.all(data_write_out[key] == input_data[key])
        except AttributeError:
            assert np.all(data_write_out == input_data)
        if with_append:
            try:
                for key in data_write_out.keys(
                ):  # Must act on the data_write since it has the .keys method
                    assert np.all(data_append_out[0][key] == input_data[key])
                    assert np.all(data_append_out[1][key] == input_data[key])
            except AttributeError:
                assert np.all(data_append_out[0] == input_data)
                assert np.all(data_append_out[1] == input_data)