Esempio n. 1
0
def _DicarloMajaj2015RegionLowMidVar(region, identifier_metric_suffix, similarity_metric, ceiler):
    assembly_repetition = LazyLoad(lambda region=region: load_var0var3_assembly(average_repetitions=False, region=region))
    assembly = LazyLoad(lambda region=region: load_var0var3_assembly(average_repetitions=True, region=region))
    return NeuralBenchmark(identifier=f'dicarlo.Majaj2015.lowmidvar.{region}-{identifier_metric_suffix}', version=3,
                           assembly=assembly, similarity_metric=similarity_metric,
                           ceiling_func=lambda: ceiler(assembly_repetition),
                           parent=region, paper_link='http://www.jneurosci.org/content/35/39/13402.short')
Esempio n. 2
0
def _DicarloSanghaviMurty2020Region(region, identifier_metric_suffix, similarity_metric, ceiler):
    assembly_repetition = LazyLoad(lambda region=region: load_assembly(average_repetitions=False, region=region))
    assembly = LazyLoad(lambda region=region: load_assembly(average_repetitions=True, region=region))
    return NeuralBenchmark(identifier=f'dicarlo.SanghaviMurty2020.{region}-{identifier_metric_suffix}', version=1,
                           assembly=assembly, similarity_metric=similarity_metric,
                           visual_degrees=VISUAL_DEGREES, number_of_trials=NUMBER_OF_TRIALS,
                           ceiling_func=lambda: ceiler(assembly_repetition),
                           parent=region,
                           bibtex=BIBTEX)
Esempio n. 3
0
def _experimental_benchmark_pool():
    """
    Benchmarks that can be used, but are not evaluated for the website.
    """
    pool = {}
    # neural benchmarks
    from .majajhong2015 import DicarloMajajHong2015V4Mask, DicarloMajajHong2015ITMask, \
        DicarloMajajHong2015V4RDM, DicarloMajajHong2015ITRDM
    pool['dicarlo.MajajHong2015.V4-mask'] = LazyLoad(
        DicarloMajajHong2015V4Mask)
    pool['dicarlo.MajajHong2015.IT-mask'] = LazyLoad(
        DicarloMajajHong2015ITMask)
    pool['dicarlo.MajajHong2015.V4-rdm'] = LazyLoad(DicarloMajajHong2015V4RDM)
    pool['dicarlo.MajajHong2015.IT-rdm'] = LazyLoad(DicarloMajajHong2015ITRDM)
    from .freemanziemba2013 import MovshonFreemanZiemba2013V1RDM, MovshonFreemanZiemba2013V2RDM, \
        MovshonFreemanZiemba2013V1Single
    pool['movshon.FreemanZiemba2013.V1-rdm'] = LazyLoad(
        MovshonFreemanZiemba2013V1RDM)
    pool['movshon.FreemanZiemba2013.V2-rdm'] = LazyLoad(
        MovshonFreemanZiemba2013V2RDM)
    pool['movshon.FreemanZiemba2013.V1-single'] = LazyLoad(
        MovshonFreemanZiemba2013V1Single)
    from .cadena2017 import ToliasCadena2017PLS, ToliasCadena2017Mask
    pool['tolias.Cadena2017-pls'] = LazyLoad(ToliasCadena2017PLS)
    pool['tolias.Cadena2017-mask'] = LazyLoad(ToliasCadena2017Mask)

    return pool
def _standard_benchmark(identifier, load_assembly, visual_degrees, number_of_trials, stratification_coord, bibtex):
    assembly_repetition = LazyLoad(lambda: load_assembly(average_repetitions=False))
    assembly = LazyLoad(lambda: load_assembly(average_repetitions=True))
    similarity_metric = CrossRegressedCorrelation(
        regression=pls_regression(), correlation=pearsonr_correlation(),
        crossvalidation_kwargs=dict(stratification_coord=stratification_coord))
    ceiler = InternalConsistency()
    return NeuralBenchmark(identifier=f"{identifier}-pls", version=1,
                           assembly=assembly, similarity_metric=similarity_metric,
                           visual_degrees=visual_degrees, number_of_trials=number_of_trials,
                           ceiling_func=lambda: ceiler(assembly_repetition),
                           parent=None,
                           bibtex=bibtex)
Esempio n. 5
0
def _MovshonFreemanZiemba2013Region(region, identifier_metric_suffix,
                                    similarity_metric, ceiler):
    assembly_repetition = LazyLoad(
        lambda region=region: load_assembly(False, region=region))
    assembly = LazyLoad(
        lambda region=region: load_assembly(True, region=region))
    return NeuralBenchmark(
        identifier=
        f'movshon.FreemanZiemba2013.{region}-{identifier_metric_suffix}',
        version=2,
        assembly=assembly,
        similarity_metric=similarity_metric,
        ceiling_func=lambda: ceiler(assembly_repetition),
        parent=region,
        paper_link='https://www.nature.com/articles/nn.3402')
def _DicarloMajajHong2015Region(region, identifier_metric_suffix,
                                similarity_metric, ceiler):
    assembly_repetition = LazyLoad(lambda region=region: load_assembly(
        average_repetitions=False, region=region))
    assembly = LazyLoad(lambda region=region: load_assembly(
        average_repetitions=True, region=region))
    return NeuralBenchmark(
        identifier=f'dicarlo.MajajHong2015.{region}-{identifier_metric_suffix}',
        version=3,
        assembly=assembly,
        similarity_metric=similarity_metric,
        visual_degrees=VISUAL_DEGREES,
        number_of_trials=NUMBER_OF_TRIALS,
        ceiling_func=lambda: ceiler(assembly_repetition),
        parent=region,
        paper_link='http://www.jneurosci.org/content/35/39/13402.short')
Esempio n. 7
0
 def __init__(self):
     super(RajalinghamMatchtosamplePublicBenchmark,
           self).__init__(metric=I2n(), metric_identifier='i2n')
     self._assembly = LazyLoad(
         lambda: load_rajalingham2018(access='public'))
     self._ceiling_func = lambda: self._metric.ceiling(self._assembly,
                                                       skipna=True)
Esempio n. 8
0
    def __init__(self, base_model_pool, model_layers, reload=True):
        super(MLBrainPool, self).__init__(reload)
        self.reload = True
        for basemodel_identifier, activations_model in base_model_pool.items():
            if basemodel_identifier not in model_layers:
                warnings.warn(
                    f"{basemodel_identifier} not found in model_layers")
                continue
            layers = model_layers[basemodel_identifier]

            from model_tools.brain_transformation import ModelCommitment

            # enforce early parameter binding: https://stackoverflow.com/a/3431699/2225200

            def load(identifier=basemodel_identifier,
                     activations_model=activations_model,
                     layers=layers):
                assert hasattr(activations_model, 'reload')
                activations_model.reload()
                brain_model = ModelCommitment(
                    identifier=identifier,
                    activations_model=activations_model,
                    layers=layers)
                return brain_model

            self[basemodel_identifier] = LazyLoad(load)
Esempio n. 9
0
 def __init__(self, identifier, metric, data_version='base'):
     self._identifier = identifier
     self._data_version = data_version
     self._target_assembly = LazyLoad(lambda: self._load_assembly(version=self._data_version))
     self._single_metric = metric
     self._ceiler = self.PereiraExtrapolationCeiling(subject_column='subject', num_bootstraps=100)
     self._cross = CartesianProduct(dividers=['experiment', 'atlas'])
Esempio n. 10
0
 def __init__(self,
              identifier,
              split_coord='word',
              unique_split_values=False):
     self._logger = logging.getLogger(fullname(self))
     self._identifier = identifier
     assembly = LazyLoad(self._load_assembly)
     self._target_assembly = assembly
     regression = linear_regression(xarray_kwargs=dict(
         stimulus_coord='word_id',
         neuroid_coord='subject_id'))  # used for sorting -- keep at word_id
     correlation = pearsonr_correlation(xarray_kwargs=dict(
         correlation_coord='word_id',
         neuroid_coord='subject_id'))  # used for sorting -- keep at word_id
     self._metric = CrossRegressedCorrelation(
         regression=regression,
         correlation=correlation,
         crossvalidation_kwargs=dict(
             splits=5,
             kfold=True,
             split_coord=split_coord,
             stratification_coord='sentence_id',
             unique_split_values=unique_split_values))
     self._cross_subject = CartesianProduct(dividers=['subject_id'])
     self._ceiler = self.ManySubjectExtrapolationCeiling(
         subject_column='subject_id')
Esempio n. 11
0
def _MovshonFreemanZiemba2013Region(region, identifier_metric_suffix,
                                    similarity_metric, ceiler):
    assembly_repetition = LazyLoad(
        lambda region=region: load_assembly(False, region=region))
    assembly = LazyLoad(
        lambda region=region: load_assembly(True, region=region))
    return NeuralBenchmark(
        identifier=
        f'movshon.FreemanZiemba2013.{region}-{identifier_metric_suffix}',
        version=2,
        assembly=assembly,
        similarity_metric=similarity_metric,
        parent=region,
        ceiling_func=lambda: ceiler(assembly_repetition),
        visual_degrees=VISUAL_DEGREES,
        number_of_trials=NUMBER_OF_TRIALS,
        bibtex=BIBTEX)
Esempio n. 12
0
 def __init__(self, identifier, metric):
     self._identifier = identifier
     assembly = LazyLoad(self.load_assembly)
     self._target_assembly = assembly
     self._metric = metric
     self._average_sentence = False
     self._ceiler = ExtrapolationCeiling(subject_column='subject_UID')
     self._electrode_ceiler = self.ElectrodeExtrapolation(subject_column='subject_UID')
Esempio n. 13
0
def _standard_benchmark(identifier, load_assembly, stratification_coord):
    assembly_repetition = LazyLoad(
        lambda: load_assembly(average_repetitions=False))
    assembly = LazyLoad(lambda: load_assembly(average_repetitions=True))
    similarity_metric = CrossRegressedCorrelation(
        regression=pls_regression(),
        correlation=pearsonr_correlation(),
        crossvalidation_kwargs=dict(stratification_coord=stratification_coord))
    ceiler = InternalConsistency()
    return NeuralBenchmark(
        identifier=f"{identifier}-pls",
        version=1,
        assembly=assembly,
        similarity_metric=similarity_metric,
        ceiling_func=lambda: ceiler(assembly_repetition),
        parent=None,
        paper_link='http://www.jneurosci.org/content/35/39/13402.short')
Esempio n. 14
0
def _engineering_benchmark_pool():
    """
    Additional engineering (ML) benchmarks. These benchmarks are public, but are also be evaluated for the website.
    """
    pool = {}

    from .imagenet import Imagenet2012
    pool['fei-fei.Deng2009-top1'] = LazyLoad(Imagenet2012)

    from .imagenet_c import Imagenet_C_Noise, Imagenet_C_Blur, Imagenet_C_Weather, Imagenet_C_Digital
    pool['dietterich.Hendrycks2019-noise-top1'] = LazyLoad(Imagenet_C_Noise)
    pool['dietterich.Hendrycks2019-blur-top1'] = LazyLoad(Imagenet_C_Blur)
    pool['dietterich.Hendrycks2019-weather-top1'] = LazyLoad(
        Imagenet_C_Weather)
    pool['dietterich.Hendrycks2019-digital-top1'] = LazyLoad(
        Imagenet_C_Digital)

    return pool
Esempio n. 15
0
def _engineering_benchmark_pool():
    """
    Additional engineering (ML) benchmarks. These benchmarks are public, but are also be evaluated for the website.
    """
    pool = {}

    from .imagenet import Imagenet2012
    pool['fei-fei.Deng2009-top1'] = LazyLoad(Imagenet2012)

    return pool
Esempio n. 16
0
    def __init__(self):
        super(VOneCORnetBrainPool, self).__init__(reload=True)

        model_pool = {
            'VOneCORnet-S': LazyLoad(vonecornet_s_brainmodel),
        }

        self._accessed_brain_models = []

        for identifier, brain_model in model_pool.items():
            self[identifier] = brain_model
Esempio n. 17
0
 def __init__(self):
     self._metric = I2n()
     self._fitting_stimuli = brainscore.get_stimulus_set('dicarlo.objectome.public')
     self._assembly = LazyLoad(lambda: load_assembly('private'))
     self._visual_degrees = 8
     self._number_of_trials = 2
     super(DicarloRajalingham2018I2n, self).__init__(
         identifier='dicarlo.Rajalingham2018-i2n', version=2,
         ceiling_func=lambda: self._metric.ceiling(self._assembly),
         parent='behavior',
         bibtex=BIBTEX)
Esempio n. 18
0
    def layer_candidate(self, model_name, layer, region, pca_components: Union[None, int] = 1000):
        def load(model_name=model_name, layer=layer, region=region, pca_components=pca_components):
            activations_model = base_model_pool[model_name]
            if pca_components:
                LayerPCA.hook(activations_model, n_components=pca_components)
                activations_model.identifier += "-pca_1000"
            model = LayerMappedModel(f"{model_name}-{layer}", activations_model=activations_model, visual_degrees=8)
            model.commit(region, layer)
            model = TemporalIgnore(model)
            return model

        return LazyLoad(load)  # lazy-load to avoid loading all models right away
Esempio n. 19
0
def _public_benchmark_pool():
    """
    Benchmarks that are publicly usable, but are not used for the website.
    """
    pool = {}
    # neural benchmarks
    from .public_benchmarks import FreemanZiembaV1PublicBenchmark, FreemanZiembaV2PublicBenchmark, \
        MajajV4PublicBenchmark, MajajITPublicBenchmark
    pool['movshon.FreemanZiemba2013public.V1-pls'] = LazyLoad(
        FreemanZiembaV1PublicBenchmark)
    pool['movshon.FreemanZiemba2013public.V2-pls'] = LazyLoad(
        FreemanZiembaV2PublicBenchmark)
    pool['dicarlo.Majaj2015public.V4-pls'] = LazyLoad(MajajV4PublicBenchmark)
    pool['dicarlo.Majaj2015public.IT-pls'] = LazyLoad(MajajITPublicBenchmark)

    # behavioral benchmarks
    from .public_benchmarks import RajalinghamMatchtosamplePublicBenchmark
    pool['dicarlo.Rajalingham2018public-i2n'] = LazyLoad(
        RajalinghamMatchtosamplePublicBenchmark)

    return pool
Esempio n. 20
0
    def __init__(self):
        super(VOneNetModelPool, self).__init__(reload=True)

        _key_functions = {
            'voneresnet-50': lambda: voneresnet(model_name='resnet50'),
            'voneresnet-50-robust': lambda: voneresnet(model_name='resnet50_at'),
            'voneresnet-50-non_stochastic': lambda: voneresnet(model_name='resnet50_ns'),
        }

        # instantiate models with LazyLoad wrapper
        for identifier, function in _key_functions.items():
            self[identifier] = LazyLoad(function)
Esempio n. 21
0
 def __init__(self):
     self._metric = I2n()
     self._fitting_stimuli = brainscore.get_stimulus_set(
         'dicarlo.objectome.public')
     self._assembly = LazyLoad(lambda: load_assembly('private'))
     super(DicarloRajalingham2018I2n, self).__init__(
         identifier='dicarlo.Rajalingham2018-i2n',
         version=2,
         ceiling_func=lambda: self._metric.ceiling(self._assembly),
         parent='behavior',
         paper_link='https://www.biorxiv.org/content/early/2018/02/12/240614'
     )
Esempio n. 22
0
    def __init__(self, identifier, bold_shift=4):
        self._identifier = identifier
        assembly = LazyLoad(lambda: self._load_assembly(bold_shift))
        self._target_assembly = assembly
        regression = linear_regression(xarray_kwargs=dict(
            stimulus_coord='stimulus_id', neuroid_coord='neuroid_id'))
        correlation = pearsonr_correlation(xarray_kwargs=dict(
            correlation_coord='stimulus_id', neuroid_coord='neuroid_id'))
        self._metric = CrossRegressedCorrelation(
            regression=regression, correlation=correlation,
            crossvalidation_kwargs=dict(splits=5, kfold=True, split_coord='stimulus_id', stratification_coord='story'))

        self._ceiler = ExtrapolationCeiling(subject_column='subject_UID', post_process=self.post_process_ceilings)
def load_single_layer_models():
    model = cornet_s_brainmodel('base', True).activations_model._model
    layer_number = 0
    for name, m in model.named_modules():
        if type(m) == nn.Conv2d:
            layer_number = layer_number + 1
            brain_models[f'CORnet-S_norm_dist_L{layer_number}'] = LazyLoad(
                lambda layer_number=layer_number: cornet_s_brainmodel(f'norm_dist_L{layer_number}', True,
                                                                      function=apply_norm_dist, config=[layer_number],
                                                                      type='model'))
            brain_models[f'CORnet-S_jumbler_L{layer_number}'] = LazyLoad(
                lambda layer_number=layer_number: cornet_s_brainmodel(f'jumbler_L{layer_number}', True,
                                                                      function=apply_all_jumbler, config=[layer_number],
                                                                      type='model'))
            brain_models[f'CORnet-S_fixed_value_L{layer_number}'] = LazyLoad(
                lambda layer_number=layer_number: cornet_s_brainmodel(f'fixed_value_L{layer_number}', True,
                                                                      function=apply_fixed_value, config=[layer_number],
                                                                      type='model'))
            brain_models[f'CORnet-S_fixed_value_small_L{layer_number}'] = LazyLoad(
                lambda layer_number=layer_number: cornet_s_brainmodel(f'fixed_value_small_L{layer_number}', True,
                                                                      function=apply_fixed_value_small,
                                                                      config=[layer_number],
                                                                      type='model'))
Esempio n. 24
0
 def __init__(self, identifier):
     self._logger = logging.getLogger(fullname(self))
     self._identifier = identifier
     assembly = LazyLoad(self._load_assembly)
     self._target_assembly = assembly
     regression = linear_regression(xarray_kwargs=dict(
         stimulus_coord='word_id', neuroid_coord='subject_id'))
     correlation = pearsonr_correlation(xarray_kwargs=dict(
         correlation_coord='word_id', neuroid_coord='subject_id'))
     self._metric = CrossRegressedCorrelation(
         regression=regression,
         correlation=correlation,
         crossvalidation_kwargs=dict(splits=5,
                                     kfold=True,
                                     split_coord='word_id',
                                     stratification_coord='sentence_id'))
Esempio n. 25
0
def get_ml_pool(test_models, module, submission):
    ml_brain_pool = {}
    if submission.model_type == 'BaseModel':
        logger.info(f"Start working with base models")
        layers = {}
        base_model_pool = {}
        for model in test_models:
            function = lambda model_inst=model.name: module.get_model(model_inst)
            base_model_pool[model.name] = LazyLoad(function)
            try:
                layers[model.name] = module.get_layers(model.name)
            except Exception:
                logging.warning(f'Could not retrieve layer for model {model} -- skipping model')
        model_layers = ModelLayers(layers)
        ml_brain_pool = MLBrainPool(base_model_pool, model_layers)
    else:
        logger.info(f"Start working with brain models")
        for model in test_models:
            ml_brain_pool[model.name] = module.get_model(model.name)
    return ml_brain_pool
Esempio n. 26
0
def AruCichy2019RDM():
    assembly = LazyLoad(lambda: load_assembly(average_repetitions=False))

    similarity_metric = RDMCrossValidated(crossvalidation_kwargs=dict(
        stratification_coord=None))
    ceiler = RDMConsistency()

    ceiling = Score([1, np.nan],
                    coords={'aggregation': ['center', 'error']},
                    dims=['aggregation'])
    return RDMBenchmark(identifier=f'aru.Cichy2019-rdm',
                        version=1,
                        assembly=assembly,
                        similarity_metric=similarity_metric,
                        visual_degrees=VISUAL_DEGREES,
                        ceiling_func=lambda: ceiling,
                        parent='Kamila',
                        number_of_trials=1,
                        region='IT',
                        time_bins=[(62.5, 156.2)])
Esempio n. 27
0
    def __init__(self):
        super(BaseModelPool, self).__init__()
        self._accessed_base_models = set()

        self._key_functions = {}

        for model in ['alexnet', 'densenet169', 'resnet101']:
            model_func = getattr(model_file, model)
            self.add_model_to_maps(
                model, '', lambda bound_func=model_func: bound_func('', True))
            self.add_model_to_maps(
                model,
                '_random',
                lambda bound_func=model_func: bound_func('random', False))
            self.add_model_to_maps(model,
                                   '_norm_dist',
                                   lambda bound_func=model_func: bound_func(
                                       'norm_dist', True, apply_norm_dist))
            self.add_model_to_maps(model,
                                   '_jumbler',
                                   lambda bound_func=model_func: bound_func(
                                       'jumbler', True, apply_all_jumbler))
            self.add_model_to_maps(
                model,
                '_kernel_jumbler',
                lambda bound_func=model_func: bound_func(
                    'kernel_jumbler', True, apply_in_kernel_jumbler))
            self.add_model_to_maps(
                model,
                '_channel_jumbler',
                lambda bound_func=model_func: bound_func(
                    'channel_jumbler', True, apply_channel_jumbler))
            self.add_model_to_maps(
                model,
                '_norm_dist_kernel',
                lambda bound_func=model_func: bound_func(
                    'norm_dist_kernel', True, apply_norm_dist_kernel))

        for identifier, function in self._key_functions.items():
            self[identifier] = LazyLoad(function)
Esempio n. 28
0
    def __init__(self, base_model_pool, model_layers, reload=True):
        super(VOneNetBrainPool, self).__init__(reload)
        self.reload = True
        for basemodel_identifier, activations_model in base_model_pool.items():
            if basemodel_identifier not in model_layers:
                warnings.warn(
                    f"{basemodel_identifier} not found in model_layers")
                continue
            model_layer = model_layers[basemodel_identifier]

            def load(identifier=basemodel_identifier,
                     activations_model=activations_model,
                     layers=model_layer):
                assert hasattr(activations_model, 'reload')
                activations_model.reload()
                brain_model = get_vonenet_commitment(
                    identifier=identifier,
                    activations_model=activations_model,
                    layers=layers,
                    stochastic=STOCHASTIC_MODELS[identifier])
                return brain_model

            self[basemodel_identifier] = LazyLoad(load)
Esempio n. 29
0
def _evaluation_benchmark_pool():
    """"
    Standard benchmarks that are evaluated for the website.
    """
    pool = {}
    # neural benchmarks
    from .majaj2015 import DicarloMajaj2015V4PLS, DicarloMajaj2015ITPLS
    pool['dicarlo.Majaj2015.V4-pls'] = LazyLoad(DicarloMajaj2015V4PLS)
    pool['dicarlo.Majaj2015.IT-pls'] = LazyLoad(DicarloMajaj2015ITPLS)
    from .freemanziemba2013 import MovshonFreemanZiemba2013V1PLS, MovshonFreemanZiemba2013V2PLS
    pool['movshon.FreemanZiemba2013.V1-pls'] = LazyLoad(
        MovshonFreemanZiemba2013V1PLS)
    pool['movshon.FreemanZiemba2013.V2-pls'] = LazyLoad(
        MovshonFreemanZiemba2013V2PLS)
    from .kar2019 import DicarloKar2019OST
    pool['dicarlo.Kar2019-ost'] = LazyLoad(DicarloKar2019OST)

    # behavioral benchmarks
    from .rajalingham2018 import DicarloRajalingham2018I2n
    pool['dicarlo.Rajalingham2018-i2n'] = LazyLoad(DicarloRajalingham2018I2n)

    return pool
Esempio n. 30
0
    def __init__(self, input_size=None):
        super(BaseModelPool, self).__init__(reload=True)

        _key_functions = {
            'alexnet':
            lambda: pytorch_model('alexnet',
                                  image_size=224
                                  if input_size == None else input_size),
            'squeezenet1_0':
            lambda: pytorch_model('squeezenet1_0',
                                  image_size=224
                                  if input_size == None else input_size),
            'squeezenet1_1':
            lambda: pytorch_model('squeezenet1_1',
                                  image_size=224
                                  if input_size == None else input_size),
            'resnet-18':
            lambda: pytorch_model('resnet18',
                                  image_size=224
                                  if input_size == None else input_size),
            'resnet-34':
            lambda: pytorch_model('resnet34',
                                  image_size=224
                                  if input_size == None else input_size),
            'resnet-50-pytorch':
            lambda: pytorch_model('resnet50',
                                  image_size=224
                                  if input_size == None else input_size),
            'resnet-50-robust':
            lambda: robust_model('resnet50',
                                 image_size=224
                                 if input_size == None else input_size),
            'vgg-16':
            lambda: keras_model('vgg16',
                                'VGG16',
                                image_size=224
                                if input_size == None else input_size),
            'vgg-19':
            lambda: keras_model('vgg19',
                                'VGG19',
                                image_size=224
                                if input_size == None else input_size),
            'vggface':
            vggface,
            'xception':
            lambda: keras_model('xception',
                                'Xception',
                                image_size=299
                                if input_size == None else input_size),
            'densenet-121':
            lambda: keras_model('densenet',
                                'DenseNet121',
                                image_size=224
                                if input_size == None else input_size),
            'densenet-169':
            lambda: keras_model('densenet',
                                'DenseNet169',
                                image_size=224
                                if input_size == None else input_size),
            'densenet-201':
            lambda: keras_model('densenet',
                                'DenseNet201',
                                image_size=224
                                if input_size == None else input_size),
            'inception_v1':
            lambda: TFSlimModel.init('inception_v1',
                                     preprocessing_type='inception',
                                     image_size=224
                                     if input_size == None else input_size),
            'inception_v2':
            lambda: TFSlimModel.init('inception_v2',
                                     preprocessing_type='inception',
                                     image_size=224
                                     if input_size == None else input_size),
            'inception_v3':
            lambda: TFSlimModel.init('inception_v3',
                                     preprocessing_type='inception',
                                     image_size=299
                                     if input_size == None else input_size),
            'inception_v4':
            lambda: TFSlimModel.init('inception_v4',
                                     preprocessing_type='inception',
                                     image_size=299
                                     if input_size == None else input_size),
            'inception_resnet_v2':
            lambda: TFSlimModel.init('inception_resnet_v2',
                                     preprocessing_type='inception',
                                     image_size=299
                                     if input_size == None else input_size),
            'resnet-50_v1':
            lambda: TFSlimModel.init('resnet-50_v1',
                                     net_name='resnet_v1_50',
                                     preprocessing_type='vgg',
                                     image_size=224
                                     if input_size == None else input_size,
                                     labels_offset=0),
            'resnet-101_v1':
            lambda: TFSlimModel.init('resnet-101_v1',
                                     net_name='resnet_v1_101',
                                     preprocessing_type='vgg',
                                     image_size=224
                                     if input_size == None else input_size,
                                     labels_offset=0),
            'resnet-152_v1':
            lambda: TFSlimModel.init('resnet-152_v1',
                                     net_name='resnet_v1_152',
                                     preprocessing_type='vgg',
                                     image_size=224
                                     if input_size == None else input_size,
                                     labels_offset=0),
            # image_size is 299 for resnet-v2, this is a bug in tf-slim.
            # see https://github.com/tensorflow/models/tree/8b18491b26e4b8271db757a3245008882ea112b3/research/slim:
            # "ResNet V2 models use Inception pre-processing and input image size of 299"
            'resnet-50_v2':
            lambda: TFSlimModel.init('resnet-50_v2',
                                     net_name='resnet_v2_50',
                                     preprocessing_type='inception',
                                     image_size=299
                                     if input_size == None else input_size),
            'resnet-101_v2':
            lambda: TFSlimModel.init('resnet-101_v2',
                                     net_name='resnet_v2_101',
                                     preprocessing_type='inception',
                                     image_size=299
                                     if input_size == None else input_size),
            'resnet-152_v2':
            lambda: TFSlimModel.init('resnet-152_v2',
                                     net_name='resnet_v2_152',
                                     preprocessing_type='inception',
                                     image_size=299
                                     if input_size == None else input_size),
            'nasnet_mobile':
            lambda: TFSlimModel.init('nasnet_mobile',
                                     preprocessing_type='inception',
                                     image_size=331
                                     if input_size == None else input_size),
            'nasnet_large':
            lambda: TFSlimModel.init('nasnet_large',
                                     preprocessing_type='inception',
                                     image_size=331
                                     if input_size == None else input_size),
            'pnasnet_large':
            lambda: TFSlimModel.init('pnasnet_large',
                                     preprocessing_type='inception',
                                     image_size=331
                                     if input_size == None else input_size),
            'bagnet9':
            lambda: bagnet("bagnet9"),
            'bagnet17':
            lambda: bagnet("bagnet17"),
            'bagnet33':
            lambda: bagnet("bagnet33"),
            'resnet50-SIN':
            lambda: texture_vs_shape(model_identifier='resnet50-SIN',
                                     model_name='resnet50_trained_on_SIN'),
            'resnet50-SIN_IN':
            lambda: texture_vs_shape(model_identifier='resnet50-SIN_IN',
                                     model_name=
                                     'resnet50_trained_on_SIN_and_IN'),
            'resnet50-SIN_IN_IN':
            lambda: texture_vs_shape(
                model_identifier='resnet50-SIN_IN_IN',
                model_name=
                'resnet50_trained_on_SIN_and_IN_then_finetuned_on_IN'),
            'resnext101_32x8d_wsl':
            lambda: wsl(8),
            'resnext101_32x16d_wsl':
            lambda: wsl(16),
            'resnext101_32x32d_wsl':
            lambda: wsl(32),
            'resnext101_32x48d_wsl':
            lambda: wsl(48),
            'fixres_resnext101_32x48d_wsl':
            lambda: fixres(
                'resnext101_32x48d_wsl',
                'https://dl.fbaipublicfiles.com/FixRes_data/FixRes_Pretrained_Models/ResNeXt_101_32x48d.pth'
            ),
            'dcgan':
            lambda: dcgan("get_discriminator"),
            'convrnn_224':
            convrnn,
        }
        # MobileNets
        for version, multiplier, image_size in [
                # v1
            (1, 1.0, 224),
            (1, 1.0, 192),
            (1, 1.0, 160),
            (1, 1.0, 128),
            (1, 0.75, 224),
            (1, 0.75, 192),
            (1, 0.75, 160),
            (1, 0.75, 128),
            (1, 0.5, 224),
            (1, 0.5, 192),
            (1, 0.5, 160),
            (1, 0.5, 128),
            (1, 0.25, 224),
            (1, 0.25, 192),
            (1, 0.25, 160),
            (1, 0.25, 128),
                # v2
            (2, 1.4, 224),
            (2, 1.3, 224),
            (2, 1.0, 224),
            (2, 1.0, 192),
            (2, 1.0, 160),
            (2, 1.0, 128),
            (2, 1.0, 96),
            (2, 0.75, 224),
            (2, 0.75, 192),
            (2, 0.75, 160),
            (2, 0.75, 128),
            (2, 0.75, 96),
            (2, 0.5, 224),
            (2, 0.5, 192),
            (2, 0.5, 160),
            (2, 0.5, 128),
            (2, 0.5, 96),
            (2, 0.35, 224),
            (2, 0.35, 192),
            (2, 0.35, 160),
            (2, 0.35, 128),
            (2, 0.35, 96),
        ]:
            identifier = f"mobilenet_v{version}_{multiplier}_{image_size}"
            if (version == 1 and multiplier
                    in [.75, .5, .25]) or (version == 2 and multiplier == 1.4):
                net_name = f"mobilenet_v{version}_{multiplier * 100:03.0f}"
            else:
                net_name = f"mobilenet_v{version}"
            # arg=arg default value enforces closure:
            # https://docs.python.org/3/faq/programming.html#why-do-lambdas-defined-in-a-loop-with-different-values-all-return-the-same-result
            _key_functions[identifier] = \
                lambda identifier=identifier, image_size=image_size, net_name=net_name, \
                       multiplier=multiplier: TFSlimModel.init(
                    identifier, preprocessing_type='inception', image_size=image_size, net_name=net_name,
                    model_ctr_kwargs={'depth_multiplier': multiplier})

        # instantiate models with LazyLoad wrapper
        for identifier, function in _key_functions.items():
            self[identifier] = LazyLoad(function)