def _test_missing_chunks(self, shape, chunk_overrides=None):
     # Put fake dataset into chunk store
     store = NpyFileChunkStore(self.tempdir)
     prefix = 'cb2'
     data, chunk_info = put_fake_dataset(store, prefix, shape, chunk_overrides)
     # Delete some random chunks in each array of the dataset
     missing_chunks = {}
     rs = random.Random(4)
     for array, info in chunk_info.items():
         array_name = store.join(prefix, array)
         slices = da.core.slices_from_chunks(info['chunks'])
         culled_slices = rs.sample(slices, len(slices) // 10 + 1)
         missing_chunks[array] = culled_slices
         for culled_slice in culled_slices:
             chunk_name, shape = store.chunk_metadata(array_name, culled_slice)
             os.remove(os.path.join(store.path, chunk_name) + '.npy')
     vfw = ChunkStoreVisFlagsWeights(store, chunk_info, None)
     assert_equal(vfw.store, store)
     assert_equal(vfw.vis_prefix, prefix)
     # Check that (only) missing chunks have been replaced by zeros
     vis = data['correlator_data']
     for culled_slice in missing_chunks['correlator_data']:
         vis[culled_slice] = 0.
     assert_array_equal(vfw.vis, vis)
     weights = data['weights'] * data['weights_channel'][..., np.newaxis]
     for culled_slice in missing_chunks['weights'] + missing_chunks['weights_channel']:
         weights[culled_slice] = 0.
     assert_array_equal(vfw.weights, weights)
     # Check that (only) missing chunks have been flagged as 'data lost'
     flags = data['flags']
     for culled_slice in missing_chunks['flags']:
         flags[culled_slice] = 0
     for culled_slice in itertools.chain(*missing_chunks.values()):
         flags[culled_slice] |= DATA_LOST
     assert_array_equal(vfw.flags, flags)
Exemple #2
0
 def test_missing_chunks(self):
     # Put fake dataset into chunk store
     store = NpyFileChunkStore(self.tempdir)
     base_name = 'cb2'
     shape = (10, 64, 30)
     data, chunk_info = put_fake_dataset(store, base_name, shape)
     # Delete a random chunk in each array of the dataset
     missing_chunks = {}
     rs = random.Random(4)
     for array, info in chunk_info.items():
         array_name = store.join(base_name, array)
         slices = da.core.slices_from_chunks(info['chunks'])
         culled_slice = rs.choice(slices)
         missing_chunks[array] = culled_slice
         chunk_name, shape = store.chunk_metadata(array_name, culled_slice)
         os.remove(os.path.join(store.path, chunk_name) + '.npy')
     vfw = ChunkStoreVisFlagsWeights(store, base_name, chunk_info)
     # Check that (only) missing chunks have been replaced by zeros
     vis = data['correlator_data']
     vis[missing_chunks['correlator_data']] = 0.
     assert_array_equal(vfw.vis, vis)
     weights = data['weights'] * data['weights_channel'][..., np.newaxis]
     weights[missing_chunks['weights']] = 0.
     weights[missing_chunks['weights_channel']] = 0.
     assert_array_equal(vfw.weights, weights)
     # Check that (only) missing chunks have been flagged as 'data lost'
     flags = data['flags']
     flags[missing_chunks['flags']] = 0.
     flags[missing_chunks['correlator_data']] |= 8
     flags[missing_chunks['weights']] |= 8
     flags[missing_chunks['weights_channel']] |= 8
     flags[missing_chunks['flags']] |= 8
     assert_array_equal(vfw.flags, flags)