def test_18_timebins(self):
     model = TemporalIgnore(layer_model=LayerMappedModelMock())
     time_bins = [(70 + i * 10, 80 + i * 10) for i in range(18)]
     model.start_recording(recording_target='IT', time_bins=time_bins)
     recordings = model.look_at('dummy')
     assert set(recordings.dims) == {'presentation', 'neuroid', 'time_bin'}
     np.testing.assert_array_equal(recordings['time_bin_start'].values,
                                   [start for start, end in time_bins])
     np.testing.assert_array_equal(recordings['time_bin_end'].values,
                                   [end for start, end in time_bins])
 def test_two_timebins(self):
     layer_model = LayerMappedModelMock()
     model = TemporalIgnore(layer_model=layer_model)
     model.start_recording(recording_target='IT',
                           time_bins=[(70, 170), (170, 270)])
     recordings = model.look_at('dummy')
     assert set(recordings.dims) == {'presentation', 'neuroid', 'time_bin'}
     np.testing.assert_array_equal(recordings['time_bin_start'].values,
                                   [70, 170])
     np.testing.assert_array_equal(recordings['time_bin_end'].values,
                                   [170, 270])
Exemple #3
0
 def _call(
         self,
         model_identifier,
         benchmark_identifier,
         visual_degrees,  # storage fields
         model,
         benchmark,
         layers,
         prerun=False):
     layer_scores = []
     for i, layer in enumerate(tqdm(layers, desc="layers")):
         layer_model = LayerMappedModel(
             identifier=f"{model_identifier}-{layer}",
             visual_degrees=visual_degrees,
             # per-layer identifier to avoid overlap
             activations_model=model,
             region_layer_map={benchmark.region: layer})
         layer_model = TemporalIgnore(layer_model)
         if i == 0 and prerun:  # pre-run activations together to avoid running every layer separately
             # we can only pre-run stimuli in response to the benchmark, since we might otherwise be missing
             # visual_degrees resizing.
             layer_model = PreRunLayers(model=model,
                                        layers=layers,
                                        forward=layer_model)
         score = benchmark(layer_model)
         score = score.expand_dims('layer')
         score['layer'] = [layer]
         layer_scores.append(score)
     layer_scores = Score.merge(*layer_scores)
     layer_scores = layer_scores.sel(
         layer=layers)  # preserve layer ordering
     return layer_scores
 def build_layer_model(self, identifier, model, benchmark, layer):
     layer_model = LayerMappedModel(
         identifier=identifier,
         activations_model=model,
         region_layer_map={benchmark.region: layer})
     layer_model = TemporalIgnore(layer_model)
     return layer_model
Exemple #5
0
    def _call(
            self,
            model_identifier,
            benchmark_identifier,  # storage fields
            model,
            benchmark,
            layers,
            prerun=False):
        if prerun:
            # pre-run activations together to avoid running every layer separately
            model(layers=layers, stimuli=benchmark._assembly.stimulus_set)

        layer_scores = []
        for layer in tqdm(layers, desc="layers"):
            layer_model = LayerMappedModel(
                identifier=f"{model_identifier}-{layer}",
                # per-layer identifier to avoid overlap
                activations_model=model,
                region_layer_map={benchmark.region: layer})
            layer_model = TemporalIgnore(layer_model)
            score = benchmark(layer_model)
            score = score.expand_dims('layer')
            score['layer'] = [layer]
            layer_scores.append(score)
        layer_scores = Score.merge(*layer_scores)
        layer_scores = layer_scores.sel(
            layer=layers)  # preserve layer ordering
        return layer_scores
Exemple #6
0
 def load(model_name=model_name, layer=layer, region=region, pca_components=pca_components):
     activations_model = base_model_pool[model_name]
     if pca_components:
         LayerPCA.hook(activations_model, n_components=pca_components)
         activations_model.identifier += "-pca_1000"
     model = LayerMappedModel(f"{model_name}-{layer}", activations_model=activations_model, visual_degrees=8)
     model.commit(region, layer)
     model = TemporalIgnore(model)
     return model
Exemple #7
0
    def test_newmodel_pytorch(self):
        import torch
        from torch import nn
        from model_tools.activations.pytorch import load_preprocess_images

        class MyModel(nn.Module):
            def __init__(self):
                super(MyModel, self).__init__()
                self.conv1 = torch.nn.Conv2d(in_channels=3,
                                             out_channels=2,
                                             kernel_size=3)
                self.relu1 = torch.nn.ReLU()
                linear_input_size = np.power((224 - 3 + 2 * 0) / 1 + 1, 2) * 2
                self.linear = torch.nn.Linear(int(linear_input_size), 1000)
                self.relu2 = torch.nn.ReLU(
                )  # can't get named ReLU output otherwise

                # init weights for reproducibility
                self.conv1.weight.data.fill_(0.01)
                self.conv1.bias.data.fill_(0.01)
                self.linear.weight.data.fill_(0.01)
                self.linear.bias.data.fill_(0.01)

            def forward(self, x):
                x = self.conv1(x)
                x = self.relu1(x)
                x = x.view(x.size(0), -1)
                x = self.linear(x)
                x = self.relu2(x)
                return x

        preprocessing = functools.partial(load_preprocess_images,
                                          image_size=224)
        model_id = 'new_pytorch'
        activations_model = PytorchWrapper(model=MyModel(),
                                           preprocessing=preprocessing,
                                           identifier=model_id)
        layer = 'relu2'
        candidate = LayerMappedModel(f"{model_id}-{layer}",
                                     activations_model=activations_model,
                                     visual_degrees=8)
        candidate.commit('IT', layer)
        candidate = TemporalIgnore(candidate)

        ceiled_score = score_model(
            model_identifier=model_id,
            model=candidate,
            benchmark_identifier='dicarlo.MajajHong2015.IT-pls')
        score = ceiled_score.raw
        assert score.sel(aggregation='center') == approx(.0820823, abs=.01)
    def _call(
            self,
            model_identifier,
            benchmark_identifier,  # storage fields
            model,
            benchmark,
            layers,
            prerun=False):
        if prerun:
            # pre-run activations together to avoid running every layer separately
            model(layers=layers, stimuli=benchmark._assembly.stimulus_set)

        for layer in tqdm(layers, desc="layers"):
            layer_model = LayerMappedModel(
                identifier=f"{model_identifier}-{layer}",
                activations_model=model,
                region_layer_map={benchmark.region: layer})
            layer_model = TemporalIgnore(layer_model)
            layer_model.start_recording(benchmark.region,
                                        time_bins=benchmark.timebins)
            layer_model.look_at(benchmark._assembly.stimulus_set)
 def test_single_timebin(self):
     model = TemporalIgnore(layer_model=LayerMappedModelMock())
     model.start_recording(recording_target='IT', time_bins=[(70, 170)])
     recordings = model.look_at('dummy')
     assert set(recordings.dims) == {'presentation',
                                     'neuroid'}  # squeezed time-bin