Beispiel #1
0
 def _test_missing_chunks(self, shape, chunk_overrides=None):
     # Put fake dataset into chunk store
     store = NpyFileChunkStore(self.tempdir)
     prefix = 'cb2'
     data, chunk_info = put_fake_dataset(store, prefix, shape, chunk_overrides)
     # Delete some random chunks in each array of the dataset
     missing_chunks = {}
     rs = random.Random(4)
     for array, info in chunk_info.items():
         array_name = store.join(prefix, array)
         slices = da.core.slices_from_chunks(info['chunks'])
         culled_slices = rs.sample(slices, len(slices) // 10 + 1)
         missing_chunks[array] = culled_slices
         for culled_slice in culled_slices:
             chunk_name, shape = store.chunk_metadata(array_name, culled_slice)
             os.remove(os.path.join(store.path, chunk_name) + '.npy')
     vfw = ChunkStoreVisFlagsWeights(store, chunk_info, None)
     assert_equal(vfw.store, store)
     assert_equal(vfw.vis_prefix, prefix)
     # Check that (only) missing chunks have been replaced by zeros
     vis = data['correlator_data']
     for culled_slice in missing_chunks['correlator_data']:
         vis[culled_slice] = 0.
     assert_array_equal(vfw.vis, vis)
     weights = data['weights'] * data['weights_channel'][..., np.newaxis]
     for culled_slice in missing_chunks['weights'] + missing_chunks['weights_channel']:
         weights[culled_slice] = 0.
     assert_array_equal(vfw.weights, weights)
     # Check that (only) missing chunks have been flagged as 'data lost'
     flags = data['flags']
     for culled_slice in missing_chunks['flags']:
         flags[culled_slice] = 0
     for culled_slice in itertools.chain(*missing_chunks.values()):
         flags[culled_slice] |= DATA_LOST
     assert_array_equal(vfw.flags, flags)
Beispiel #2
0
 def test_missing_chunks(self):
     # Put fake dataset into chunk store
     store = NpyFileChunkStore(self.tempdir)
     base_name = 'cb2'
     shape = (10, 64, 30)
     data, chunk_info = put_fake_dataset(store, base_name, shape)
     # Delete a random chunk in each array of the dataset
     missing_chunks = {}
     rs = random.Random(4)
     for array, info in chunk_info.items():
         array_name = store.join(base_name, array)
         slices = da.core.slices_from_chunks(info['chunks'])
         culled_slice = rs.choice(slices)
         missing_chunks[array] = culled_slice
         chunk_name, shape = store.chunk_metadata(array_name, culled_slice)
         os.remove(os.path.join(store.path, chunk_name) + '.npy')
     vfw = ChunkStoreVisFlagsWeights(store, base_name, chunk_info)
     # Check that (only) missing chunks have been replaced by zeros
     vis = data['correlator_data']
     vis[missing_chunks['correlator_data']] = 0.
     assert_array_equal(vfw.vis, vis)
     weights = data['weights'] * data['weights_channel'][..., np.newaxis]
     weights[missing_chunks['weights']] = 0.
     weights[missing_chunks['weights_channel']] = 0.
     assert_array_equal(vfw.weights, weights)
     # Check that (only) missing chunks have been flagged as 'data lost'
     flags = data['flags']
     flags[missing_chunks['flags']] = 0.
     flags[missing_chunks['correlator_data']] |= 8
     flags[missing_chunks['weights']] |= 8
     flags[missing_chunks['weights_channel']] |= 8
     flags[missing_chunks['flags']] |= 8
     assert_array_equal(vfw.flags, flags)
Beispiel #3
0
 def test_construction(self):
     # Put fake dataset into chunk store
     store = NpyFileChunkStore(self.tempdir)
     prefix = 'cb1'
     shape = (10, 64, 30)
     data, chunk_info = put_fake_dataset(store, prefix, shape)
     vfw = ChunkStoreVisFlagsWeights(store, chunk_info)
     weights = data['weights'] * data['weights_channel'][..., np.newaxis]
     # Check that data is as expected when accessed via VisFlagsWeights
     assert_equal(vfw.shape, data['correlator_data'].shape)
     assert_array_equal(vfw.vis.compute(), data['correlator_data'])
     assert_array_equal(vfw.flags.compute(), data['flags'])
     assert_array_equal(vfw.weights.compute(), weights)
Beispiel #4
0
    def test_weight_power_scale(self):
        ants = 7
        index1, index2 = np.triu_indices(ants)
        inputs = ['m{:03}h'.format(i) for i in range(ants)]
        corrprods = np.array([(inputs[a], inputs[b]) for (a, b) in zip(index1, index2)])
        # Put fake dataset into chunk store
        store = NpyFileChunkStore(self.tempdir)
        prefix = 'cb1'
        shape = (10, 64, len(index1))

        # Make up some vis data where the expected scaling factors can be
        # computed by hand. Note: the autocorrs are all set to powers of
        # 2 so that we avoid any rounding errors.
        vis = np.full(shape, 2 + 3j, np.complex64)
        vis[:, :, index1 == index2] = 2     # Make all autocorrs real
        vis[3, :, index1 == index2] = 4     # Tests time indexing
        vis[:, 7, index1 == index2] = 4     # Tests frequency indexing
        vis[:, :, ants] *= 8                # The (1, 1) baseline
        vis[4, 5, 0] = 0                    # The (0, 0) baseline
        expected_scale = np.full(shape, 0.25, np.float32)
        expected_scale[3, :, :] = 1 / 16
        expected_scale[:, 7, :] = 1 / 16
        expected_scale[:, :, index1 == 1] /= 8
        expected_scale[:, :, index2 == 1] /= 8
        expected_scale[4, 5, index1 == 0] = 2.0**-32
        expected_scale[4, 5, index2 == 0] = 2.0**-32

        data, chunk_info = put_fake_dataset(
            store, prefix, shape, array_overrides={'correlator_data': vis})
        vfw = ChunkStoreVisFlagsWeights(store, chunk_info, corrprods)
        weights = data['weights'] * data['weights_channel'][..., np.newaxis] * expected_scale

        # Check that data is as expected when accessed via VisFlagsWeights
        assert_equal(vfw.shape, data['correlator_data'].shape)
        assert_array_equal(vfw.vis.compute(), data['correlator_data'])
        assert_array_equal(vfw.flags.compute(), data['flags'])
        assert_array_equal(vfw.weights.compute(), weights)