def test_repeated_dim_and_adjacent(self):
        source_assembly = np.random.rand(5, 5)
        source_assembly = NeuroidAssembly(
            source_assembly,
            coords={
                'image_id':
                ('presentation', list(range(source_assembly.shape[0]))),
                'image_meta':
                ('presentation', np.zeros(source_assembly.shape[0])),
                'adjacent': 12
            },
            dims=['presentation', 'presentation'])

        target_assembly = NeuroidAssembly(
            np.zeros(3), coords={
                'image_id': [0, 2, 3]
            }, dims=['image_id']).stack(presentation=('image_id', ))

        subset_assembly = subset(source_assembly,
                                 target_assembly,
                                 subset_dims=('presentation', ),
                                 repeat=True)
        np.testing.assert_array_equal(subset_assembly.shape, (3, 3))
        assert set(subset_assembly['image_id'].values) == set(
            target_assembly['image_id'].values)
        assert subset_assembly['adjacent'] == 12
    def test_smaller_last(self):
        source_assembly = np.random.rand(100, 3)
        source_assembly = NeuroidAssembly(
            source_assembly,
            coords={
                'image_id': list(range(source_assembly.shape[0])),
                'neuroid_id': list(range(source_assembly.shape[1]))
            },
            dims=['image_id', 'neuroid_id'])
        source_assembly = source_assembly.stack(presentation=('image_id', ),
                                                neuroid=('neuroid_id', ))

        target_assembly = source_assembly.sel(
            presentation=list(map(lambda x: (50 + x, ), range(50))),
            neuroid=list(map(lambda x: (1 + x, ), range(2))))

        subset_assembly = subset(source_assembly,
                                 target_assembly,
                                 subset_dims=('presentation', ))
        np.testing.assert_array_equal(subset_assembly.coords.keys(),
                                      target_assembly.coords.keys())
        for coord_name in target_assembly.coords:
            assert all(
                subset_assembly[coord_name] == target_assembly[coord_name])
        assert (subset_assembly == target_assembly).all()
Пример #3
0
    def _package_prediction(self, predicted_values, source):
        coords = {
            coord: (dims, values)
            for coord, dims, values in walk_coords(source)
            if not array_is_element(dims, self._neuroid_dim)
        }
        # re-package neuroid coords
        dims = source.dims
        # if there is only one neuroid coordinate, it would get discarded and the dimension would be used as coordinate.
        # to avoid this, we can build the assembly first and then stack on the neuroid dimension.
        neuroid_level_dim = None
        if len(
                self._target_neuroid_values
        ) == 1:  # extract single key: https://stackoverflow.com/a/20145927/2225200
            (neuroid_level_dim, _), = self._target_neuroid_values.items()
            dims = [
                dim if dim != self._neuroid_dim else neuroid_level_dim
                for dim in dims
            ]
        for target_coord, target_value in self._target_neuroid_values.items():
            # this might overwrite values which is okay
            coords[target_coord] = (neuroid_level_dim
                                    or self._neuroid_dim), target_value
        prediction = NeuroidAssembly(predicted_values,
                                     coords=coords,
                                     dims=dims)
        if neuroid_level_dim:
            prediction = prediction.stack(
                **{self._neuroid_dim: [neuroid_level_dim]})

        return prediction
Пример #4
0
 def look_at_cached(self, model_identifier, stimuli_identifier, stimuli):
     responses = self.activations_model(stimuli,
                                        layers=self.recording_layers)
     # map time
     regions = set(responses['region'].values)
     if len(regions) > 1:
         raise NotImplementedError(
             "cannot handle more than one simultaneous region")
     region = list(regions)[0]
     time_bins = [
         self.time_mapping[region][timestep]
         if timestep in self.time_mapping[region] else (None, None)
         for timestep in responses['time_step'].values
     ]
     responses['time_bin_start'] = 'time_step', [
         time_bin[0] for time_bin in time_bins
     ]
     responses['time_bin_end'] = 'time_step', [
         time_bin[1] for time_bin in time_bins
     ]
     responses = NeuroidAssembly(responses.rename({'time_step':
                                                   'time_bin'}))
     responses = responses[{
         'time_bin': [
             not np.isnan(time_start)
             for time_start in responses['time_bin_start']
         ]
     }]
     # select time
     time_responses = []
     for time_bin in tqdm(self.recording_time_bins,
                          desc='CORnet-time to recording time'):
         time_bin = time_bin if not isinstance(
             time_bin, np.ndarray) else time_bin.tolist()
         time_bin_start, time_bin_end = time_bin
         nearest_start = find_nearest(responses['time_bin_start'].values,
                                      time_bin_start)
         bin_responses = responses.sel(time_bin_start=nearest_start)
         bin_responses = NeuroidAssembly(
             bin_responses.values,
             coords={
                 **{
                     coord: (dims, values)
                     for coord, dims, values in walk_coords(bin_responses) if coord not in [
                         'time_bin_level_0', 'time_bin_end'
                     ]
                 },
                 **{
                     'time_bin_start': ('time_bin', [time_bin_start]),
                     'time_bin_end': ('time_bin', [time_bin_end])
                 }
             },
             dims=bin_responses.dims)
         time_responses.append(bin_responses)
     responses = merge_data_arrays(time_responses)
     return responses
 def test_alignment(self):
     jumbled_prediction = NeuroidAssembly(np.random.rand(500, 10),
                                          coords={'image_id': ('presentation', list(reversed(range(500)))),
                                                  'image_meta': ('presentation', [0] * 500),
                                                  'neuroid_id': ('neuroid', list(reversed(range(10)))),
                                                  'neuroid_meta': ('neuroid', [0] * 10)},
                                          dims=['presentation', 'neuroid'])
     prediction = jumbled_prediction.sortby(['image_id', 'neuroid_id'])
     correlation = XarrayCorrelation(scipy.stats.pearsonr)
     score = correlation(jumbled_prediction, prediction)
     assert all(score == approx(1))
Пример #6
0
 def _load_assembly(self):
     assembly = load_naturalStories()
     stimulus_set = assembly.stimulus_set
     # we're going to treat subjects as "neuroids" to make it easier for our metrics
     assembly = assembly.mean('subjects')
     assembly = assembly.expand_dims('neuroid')
     assembly['neuroid_id'] = 'neuroid', [0]
     assembly['subject_id'] = 'neuroid', ['all']
     assembly = NeuroidAssembly(assembly)
     assembly.attrs['stimulus_set'] = stimulus_set
     return assembly
 def test_neuroid_single_coord(self):
     jumbled_source = NeuroidAssembly(np.random.rand(500, 10),
                                      coords={'image_id': ('presentation', list(reversed(range(500)))),
                                              'image_meta': ('presentation', [0] * 500),
                                              'neuroid_id': ('neuroid_id', list(reversed(range(10))))},
                                      dims=['presentation', 'neuroid_id']).stack(neuroid=['neuroid_id'])
     target = jumbled_source.sortby(['image_id', 'neuroid_id'])
     regression = XarrayRegression(LinearRegression())
     regression.fit(jumbled_source, target)
     prediction = regression.predict(jumbled_source)
     assert set(prediction.dims) == {'presentation', 'neuroid'}
     assert len(prediction['neuroid_id']) == 10
Пример #8
0
 def test_misaligned(self):
     jumbled_source = NeuroidAssembly(np.random.rand(500, 10),
                                      coords={'image_id': ('presentation', list(reversed(range(500)))),
                                              'image_meta': ('presentation', [0] * 500),
                                              'neuroid_id': ('neuroid', list(reversed(range(10)))),
                                              'neuroid_meta': ('neuroid', [0] * 10)},
                                      dims=['presentation', 'neuroid'])
     target = jumbled_source.sortby(['image_id', 'neuroid_id'])
     cv = CrossValidation(splits=10, stratification_coord=None)
     metric = self.MetricPlaceholder()
     score = cv(jumbled_source, target, apply=metric)
     assert len(metric.train_source_assemblies) == len(metric.test_source_assemblies) == \
            len(metric.train_target_assemblies) == len(metric.test_target_assemblies) == 10
     assert len(score.attrs['raw']) == 10
Пример #9
0
 def test_one_division_similarity_dim_last(self):
     assembly = np.random.rand(3, 100)
     assembly = NeuroidAssembly(
         assembly,
         coords={'neuroid': list(range(assembly.shape[1])), 'division_coord': list(range(assembly.shape[0]))},
         dims=['division_coord', 'neuroid'])
     transformation = CartesianProduct(dividers=['division_coord'])
     placeholder = self.MetricPlaceholder()
     transformation(assembly, apply=placeholder)
     assert len(assembly['division_coord']) == len(placeholder.assemblies)
     targets = [assembly.sel(division_coord=i) for i in assembly['division_coord'].values]
     for target in targets:
         match = any([actual == target] for actual in placeholder.assemblies)
         assert match, "expected divided assembly not found: {target}"
 def test_fitpredict_alignment(self):
     jumbled_source = NeuroidAssembly(np.random.rand(500, 10),
                                      coords={'image_id': ('presentation', list(reversed(range(500)))),
                                              'image_meta': ('presentation', [0] * 500),
                                              'neuroid_id': ('neuroid', list(reversed(range(10)))),
                                              'neuroid_meta': ('neuroid', [0] * 10)},
                                      dims=['presentation', 'neuroid'])
     target = jumbled_source.sortby(['image_id', 'neuroid_id'])
     regression = XarrayRegression(LinearRegression())
     regression.fit(jumbled_source, target)
     prediction = regression.predict(jumbled_source)
     # do not test for alignment of metadata - it is only important that the data is well-aligned with the metadata.
     np.testing.assert_array_almost_equal(prediction.sortby(['image_id', 'neuroid_id']).values,
                                          target.sortby(['image_id', 'neuroid_id']).values)
Пример #11
0
def test_average_label_trials():
    assembly = NeuroidAssembly(
        [
            ['a'],
            ['a'],
            ['a'],
            ['b'],
            ['b'],
            ['a'],
        ],
        coords={
            'image_id': ('presentation', ['a', 'a', 'a', 'b', 'b', 'b']),
            'repetition': ('presentation', [0, 1, 2, 0, 1, 2]),
            'presentation_dummy': ('presentation', ['x'] * 6),
            'choice': ('choice', ['dummy'])
        },
        dims=['presentation', 'choice'])
    averaged_assembly = average_trials(assembly)
    assert len(averaged_assembly['choice']) == 1, "messed up dimension"
    assert len(averaged_assembly['presentation']) == 2
    assert set(averaged_assembly['image_id'].values) == {'a', 'b'}
    np.testing.assert_array_equal(
        averaged_assembly.sel(image_id='a').values, [['a']])
    np.testing.assert_array_equal(
        averaged_assembly.sel(image_id='b').values, [['b']])
Пример #12
0
def test_average_neural_trials():
    assembly = NeuroidAssembly(
        [[1, 2, 3], [2, 3, 4], [3, 4, 5], [4, 5, 6], [5, 6, 7], [6, 7, 8],
         [7, 8, 9], [8, 9, 10]],
        coords={
            'image_id':
            ('presentation', ['a', 'a', 'b', 'b', 'c', 'c', 'd', 'd']),
            'repetition': ('presentation', [0, 1, 0, 1, 0, 1, 0, 1]),
            'presentation_dummy': ('presentation', ['x'] * 8),
            'neuroid_id': ('neuroid', [0, 1, 2]),
            'region': ('neuroid', ['IT', 'IT', 'IT'])
        },
        dims=['presentation', 'neuroid'])
    averaged_assembly = average_trials(assembly)
    assert len(averaged_assembly['neuroid']) == 3, "messed up neuroids"
    assert len(averaged_assembly['presentation']) == 4
    assert set(averaged_assembly['image_id'].values) == {'a', 'b', 'c', 'd'}
    np.testing.assert_array_equal(averaged_assembly['neuroid_id'].values,
                                  assembly['neuroid_id'].values)
    np.testing.assert_array_equal(
        averaged_assembly.sel(image_id='a').values, [[1.5, 2.5, 3.5]])
    np.testing.assert_array_equal(
        averaged_assembly.sel(image_id='b').values, [[3.5, 4.5, 5.5]])
    np.testing.assert_array_equal(
        averaged_assembly.sel(image_id='c').values, [[5.5, 6.5, 7.5]])
    np.testing.assert_array_equal(
        averaged_assembly.sel(image_id='d').values, [[7.5, 8.5, 9.5]])
Пример #13
0
 def from_paths(self, *args, **kwargs):
     raw_activations = super(TemporalExtractor, self).from_paths(*args, **kwargs)
     # introduce time dimension
     regions = defaultdict(list)
     for layer in set(raw_activations['layer'].values):
         match = re.match(r'(([^-]*)\..*|logits|avgpool)-t([0-9]+)', layer)
         region, timestep = match.group(2) if match.group(2) else match.group(1), match.group(3)
         stripped_layer = match.group(1)
         regions[region].append((layer, stripped_layer, timestep))
     activations = {}
     for region, time_layers in regions.items():
         for (full_layer, stripped_layer, timestep) in time_layers:
             region_time_activations = raw_activations.sel(layer=full_layer)
             region_time_activations['layer'] = 'neuroid', [stripped_layer] * len(region_time_activations['neuroid'])
             activations[(region, timestep)] = region_time_activations
     for key, key_activations in activations.items():
         region, timestep = key
         key_activations['region'] = 'neuroid', [region] * len(key_activations['neuroid'])
         activations[key] = NeuroidAssembly([key_activations.values], coords={
             **{coord: (dims, values) for coord, dims, values in walk_coords(activations[key])
                if coord != 'neuroid_id'},  # otherwise, neuroid dim will be as large as before with nans
             **{'time_step': [int(timestep)]}
         }, dims=['time_step'] + list(key_activations.dims))
     activations = list(activations.values())
     activations = merge_data_arrays(activations)
     # rebuild neuroid_id without timestep
     neuroid_id = [".".join([f"{value}" for value in values]) for values in zip(*[
         activations[coord].values for coord in ['model', 'region', 'neuroid_num']])]
     activations['neuroid_id'] = 'neuroid', neuroid_id
     return activations
Пример #14
0
    def test_no_expand_raw_level(self):
        assembly = np.random.rand(3, 100)
        assembly = NeuroidAssembly(assembly,
                                   coords={
                                       'neuroid':
                                       list(range(assembly.shape[1])),
                                       'division_coord':
                                       list(range(assembly.shape[0]))
                                   },
                                   dims=['division_coord', 'neuroid'])
        transformation = CartesianProduct(dividers=['division_coord'])

        class RawMetricPlaceholder(Metric):
            def __call__(self, assembly, *args, **kwargs):
                result = Score([assembly.values[0]], dims=['dim'])
                raw = Score(result.copy(),
                            coords={
                                'dim_id': ('dim', [assembly.values[1]]),
                                'division_coord': ('dim', [assembly.values[2]])
                            })
                result.attrs['raw'] = raw
                return result

        metric = RawMetricPlaceholder()
        result = transformation(assembly, apply=metric)
        assert result.dims == ("division_coord", "dim")
        assert hasattr(result, 'raw')
        assert result.raw.dims == ("dim", )
        assert 'division_coord' not in result.raw.dims  # no dimension
        assert hasattr(result.raw, 'division_coord')  # but a level
        assert result.raw["dim"].variable.level_names == [
            "dim_id", "division_coord"
        ]
Пример #15
0
 def _package_layer(self, layer_activations, layer, stimuli_paths):
     assert layer_activations.shape[0] == len(stimuli_paths)
     activations, flatten_indices = flatten(layer_activations, return_index=True)  # collapse for single neuroid dim
     assert flatten_indices.shape[1] in [1, 2, 3]
     # see comment in _package for an explanation why we cannot simply have 'channel' for the FC layer
     if flatten_indices.shape[1] == 1:    # FC
         flatten_coord_names = ['channel', 'channel_x', 'channel_y']
     elif flatten_indices.shape[1] == 2:  # Transformer
         flatten_coord_names = ['channel', 'embedding']
     elif flatten_indices.shape[1] == 3:  # 2DConv
         flatten_coord_names = ['channel', 'channel_x', 'channel_y']
     flatten_coords = {flatten_coord_names[i]: [sample_index[i] if i < flatten_indices.shape[1] else np.nan for sample_index in flatten_indices]
                       for i in range(len(flatten_coord_names))}
     layer_assembly = NeuroidAssembly(
         activations,
         coords={**{'stimulus_path': stimuli_paths,
                    'neuroid_num': ('neuroid', list(range(activations.shape[1]))),
                    'model': ('neuroid', [self.identifier] * activations.shape[1]),
                    'layer': ('neuroid', [layer] * activations.shape[1]),
                    },
                 **{coord: ('neuroid', values) for coord, values in flatten_coords.items()}},
         dims=['stimulus_path', 'neuroid']
     )
     neuroid_id = [".".join([f"{value}" for value in values]) for values in zip(*[
         layer_assembly[coord].values for coord in ['model', 'layer', 'neuroid_num']])]
     layer_assembly['neuroid_id'] = 'neuroid', neuroid_id
     return layer_assembly
 def test_equal(self):
     assembly = np.random.rand(100, 3)
     assembly = NeuroidAssembly(assembly,
                                coords={
                                    'image_id':
                                    list(range(assembly.shape[0])),
                                    'neuroid_id':
                                    list(range(assembly.shape[1]))
                                },
                                dims=['image_id', 'neuroid_id'])
     assembly = assembly.stack(presentation=('image_id', ),
                               neuroid=('neuroid_id', ))
     subset_assembly = subset(assembly,
                              assembly,
                              subset_dims=('presentation', ))
     assert (subset_assembly == assembly).all()
Пример #17
0
 def test_alignment(self):
     assembly = NeuroidAssembly(
         [[1, 2], [1, 2], [4, 3], [4, 3]],
         coords={
             'image_id': ('presentation', list(range(4))),
             'image_meta': ('presentation', list(range(4))),
             'neuroid_id': ('neuroid', list(range(2))),
             'neuroid_meta': ('neuroid', list(range(2)))
         },
         dims=['presentation', 'neuroid'])
     matrix = RSA()(assembly)
     assert np.all(np.diag(matrix) == approx(1., abs=.001))
     assert all(matrix.values[np.triu_indices(matrix.shape[0], k=1)] ==
                matrix.values[np.tril_indices(matrix.shape[0], k=-1)]
                ), "upper and lower triangular need to be equal"
     expected = DataAssembly(
         [[1., 1., -1., -1.], [1., 1., -1., -1.], [-1., -1., 1., 1.],
          [-1., -1., 1., 1.]],
         coords={
             'image_id': ('presentation', list(range(4))),
             'image_meta': ('presentation', list(range(4)))
         },
         dims=['presentation', 'presentation'])
     np.testing.assert_array_almost_equal(
         matrix.values,
         expected.values)  # does not take ordering into account
Пример #18
0
def build_assembly(assembly, coord_list=['ty', 'tz']):
    values = np.stack(
            [getattr(assembly, coord).values for coord in coord_list], 
            axis=1)
    coords = {
            'neuroid_id': ('neuroid', list(range(len(coord_list)))),
            'neuroid_meaning': ('neuroid', coord_list)}
    for coord, dims, value in walk_coords(assembly):
        if len(dims) == 0:
            continue
        if dims[0] == 'presentation':
            coords[coord] = ('presentation', value)
    new_assembly = NeuroidAssembly(
            values,
            coords=coords,
            dims=['presentation', 'neuroid'])
    new_assembly.attrs['stimulus_set'] = assembly.stimulus_set
    return new_assembly
 def test_neuroid_single_coord(self):
     prediction = NeuroidAssembly(np.random.rand(500, 10),
                                  coords={'image_id': ('presentation', list(range(500))),
                                          'image_meta': ('presentation', [0] * 500),
                                          'neuroid_id': ('neuroid_id', list(range(10)))},
                                  dims=['presentation', 'neuroid_id']).stack(neuroid=['neuroid_id'])
     correlation = XarrayCorrelation(lambda a, b: (1, 0))
     score = correlation(prediction, prediction)
     np.testing.assert_array_equal(score.dims, ['neuroid'])
     assert len(score['neuroid']) == 10
 def test_correlation(self):
     prediction = NeuroidAssembly(np.random.rand(500, 10),
                                  coords={'image_id': ('presentation', list(range(500))),
                                          'image_meta': ('presentation', [0] * 500),
                                          'neuroid_id': ('neuroid', list(range(10))),
                                          'neuroid_meta': ('neuroid', [0] * 10)},
                                  dims=['presentation', 'neuroid'])
     correlation = XarrayCorrelation(lambda a, b: (1, 0))
     score = correlation(prediction, prediction)
     assert all(score == approx(1))
Пример #21
0
 def test_small(self):
     assembly = NeuroidAssembly((np.arange(30 * 25) + np.random.standard_normal(30 * 25)).reshape((30, 25)),
                                coords={'image_id': ('presentation', np.arange(30)),
                                        'object_name': ('presentation', ['a', 'b', 'c'] * 10),
                                        'neuroid_id': ('neuroid', np.arange(25)),
                                        'region': ('neuroid', ['some_region'] * 25)},
                                dims=['presentation', 'neuroid'])
     metric = CrossRegressedCorrelation(regression=pls_regression(), correlation=pearsonr_correlation())
     score = metric(source=assembly, target=assembly)
     assert score.sel(aggregation='center') == approx(1, abs=.00001)
Пример #22
0
 def look_at(self, *args, **kwargs):
     return NeuroidAssembly(
         [[1, 2, 3], [1, 2, 3]],
         coords={
             'image_id': ('presentation', ['image1', 'image2']),
             'object_name': ('presentation', ['number', 'number']),
             'neuroid_id': ('neuroid', [1, 2, 3]),
             'region': ('neuroid', ['IT'] * 3),
         },
         dims=['presentation', 'neuroid'])
Пример #23
0
 def test_dummy_data(self):
     data = NeuroidAssembly(np.tile(np.arange(10)[:, np.newaxis], [5, 10]),
                            coords={'image_id': ('presentation', np.tile(list(alphabet)[:10], 5)),
                                    'image_meta': ('presentation', np.tile(list(alphabet)[:10], 5)),
                                    'repetition': ('presentation', np.repeat(np.arange(5), 10)),
                                    'neuroid_id': ('neuroid', np.arange(10)),
                                    'neuroid_meta': ('neuroid', np.arange(10))},
                            dims=['presentation', 'neuroid'])
     ceiler = InternalConsistency()
     ceiling = ceiler(data)
     assert ceiling.sel(aggregation='center') == 1
 def test_dimensions(self):
     prediction = NeuroidAssembly(np.random.rand(500, 10),
                                  coords={'image_id': ('presentation', list(range(500))),
                                          'image_meta': ('presentation', [0] * 500),
                                          'neuroid_id': ('neuroid', list(range(10))),
                                          'neuroid_meta': ('neuroid', [0] * 10)},
                                  dims=['presentation', 'neuroid'])
     correlation = XarrayCorrelation(lambda a, b: (1, 0))
     score = correlation(prediction, prediction)
     np.testing.assert_array_equal(score.dims, ['neuroid'])
     np.testing.assert_array_equal(score.shape, [10])
Пример #25
0
def build_cate_assembly(assembly):
    category_names = assembly.category_name.values
    unique_cate_names = np.unique(category_names)
    # Tricky solution for some weird requirements later
    new_category_names = [
            [curr_name, curr_name] \
            for curr_name in category_names]
    coords = {
            'neuroid_id': ('neuroid', [0, 1]),
            'neuroid_meaning': ('neuroid', ['category', 'category'])}
    for coord, dims, value in walk_coords(assembly):
        if len(dims) == 0:
            continue
        if dims[0] == 'presentation':
            coords[coord] = ('presentation', value)
    new_assembly = NeuroidAssembly(
            new_category_names,
            coords=coords,
            dims=['presentation', 'neuroid'])
    new_assembly.attrs['stimulus_set'] = assembly.stimulus_set
    return new_assembly
Пример #26
0
 def test_small(self, regression_ctr):
     assembly = NeuroidAssembly((np.arange(30 * 25) + np.random.standard_normal(30 * 25)).reshape((30, 25)),
                                coords={'image_id': ('presentation', np.arange(30)),
                                        'object_name': ('presentation', ['a', 'b', 'c'] * 10),
                                        'neuroid_id': ('neuroid', np.arange(25)),
                                        'region': ('neuroid', [None] * 25)},
                                dims=['presentation', 'neuroid'])
     regression = regression_ctr()
     regression.fit(source=assembly, target=assembly)
     prediction = regression.predict(source=assembly)
     assert all(prediction['image_id'] == assembly['image_id'])
     assert all(prediction['neuroid_id'] == assembly['neuroid_id'])
Пример #27
0
 def test_presentation_neuroid(self):
     assembly = NeuroidAssembly(np.random.rand(500, 10),
                                coords={'image_id': ('presentation', list(range(500))),
                                        'image_meta': ('presentation', [0] * 500),
                                        'neuroid_id': ('neuroid', list(range(10))),
                                        'neuroid_meta': ('neuroid', [0] * 10)},
                                dims=['presentation', 'neuroid'])
     cv = CrossValidationSingle(splits=10, stratification_coord=None)
     metric = self.MetricPlaceholder()
     score = cv(assembly, apply=metric)
     assert len(metric.train_assemblies) == len(metric.test_assemblies) == 10
     assert len(score.attrs['raw']['split']) == 10
Пример #28
0
def get_assembly():
    image_names = []
    for i in range(1, 21):
        image_names.append(f'images/{i}.png')
    assembly = NeuroidAssembly(
        (np.arange(40 * 5) + np.random.standard_normal(40 * 5)).reshape(
            (5, 40, 1)),
        coords={
            'image_id': ('presentation', image_names * 2),
            'object_name': ('presentation', ['a'] * 40),
            'repetition': ('presentation', ([1] * 20 + [2] * 20)),
            'neuroid_id': ('neuroid', np.arange(5)),
            'region': ('neuroid', ['IT'] * 5),
            'time_bin_start': ('time_bin', [70]),
            'time_bin_end': ('time_bin', [170])
        },
        dims=['neuroid', 'presentation', 'time_bin'])
    labels = ['a'] * 10 + ['b'] * 10
    stimulus_set = StimulusSet([{
        'image_id': image_names[i],
        'object_name': 'a',
        'image_label': labels[i]
    } for i in range(20)])
    stimulus_set.image_paths = {
        image_name: os.path.join(os.path.dirname(__file__), image_name)
        for image_name in image_names
    }
    stimulus_set.identifier = 'test'
    assembly.attrs['stimulus_set'] = stimulus_set
    assembly.attrs['stimulus_set_name'] = stimulus_set.identifier
    assembly = assembly.squeeze("time_bin")
    return assembly.transpose('presentation', 'neuroid')
    def test_equal_shifted(self):
        target_assembly = np.random.rand(100, 3)
        target_assembly = NeuroidAssembly(
            target_assembly,
            coords={
                'image_id': list(range(target_assembly.shape[0])),
                'neuroid_id': list(range(target_assembly.shape[1]))
            },
            dims=['image_id', 'neuroid_id'])
        target_assembly = target_assembly.stack(presentation=('image_id', ),
                                                neuroid=('neuroid_id', ))

        shifted_values = np.concatenate(
            (target_assembly.values[1:], target_assembly.values[:1]))
        shifed_ids = np.array(list(range(shifted_values.shape[0]))) + 1
        shifed_ids[-1] = 0
        source_assembly = NeuroidAssembly(
            shifted_values,
            coords={
                'image_id': shifed_ids,
                'neuroid_id': list(range(shifted_values.shape[1]))
            },
            dims=['image_id', 'neuroid_id'])
        source_assembly = source_assembly.stack(presentation=('image_id', ),
                                                neuroid=('neuroid_id', ))

        subset_assembly = subset(source_assembly,
                                 target_assembly,
                                 subset_dims=('presentation', ))
        np.testing.assert_array_equal(subset_assembly.coords.keys(),
                                      target_assembly.coords.keys())
        assert subset_assembly.shape == target_assembly.shape
    def test_repeated_target(self):
        source_assembly = np.random.rand(5, 3)
        source_assembly = NeuroidAssembly(
            source_assembly,
            coords={
                'image_id': list(range(source_assembly.shape[0])),
                'neuroid_id': list(range(source_assembly.shape[1]))
            },
            dims=['image_id', 'neuroid_id'])
        source_assembly = source_assembly.stack(presentation=('image_id', ),
                                                neuroid=('neuroid_id', ))

        target_assembly = NeuroidAssembly(
            np.repeat(source_assembly, 2, axis=0),
            coords={
                'image_id':
                np.repeat(list(range(source_assembly.shape[0])), 2, axis=0),
                'neuroid_id':
                list(range(source_assembly.shape[1]))
            },
            dims=['image_id', 'neuroid_id'])
        target_assembly = target_assembly.stack(presentation=('image_id', ),
                                                neuroid=('neuroid_id', ))

        subset_assembly = subset(source_assembly,
                                 target_assembly,
                                 subset_dims=('presentation', ),
                                 repeat=True)
        np.testing.assert_array_equal(subset_assembly.coords.keys(),
                                      target_assembly.coords.keys())
        for coord_name in target_assembly.coords:
            assert all(
                subset_assembly[coord_name] == target_assembly[coord_name])
        np.testing.assert_array_equal(subset_assembly, target_assembly)
        assert (subset_assembly == target_assembly).all()