Esempio n. 1
0
    def __init__(self, config=None, **kwargs):
        set_seed(0)
        self.config = gr.config.update_config(kwargs, config,
                                              self.__class__.default_config())

        self.threshold = 50
        self.n_sections = 20
        self.n_orientations = 1
        self.n_latents = 8
        self.img_size = (256, 256)
        self.regions_masks = self.get_regions_masks()

        checkpoint_filepath = os.path.join(
            os.path.dirname(__file__),
            'reference_dataset_pca_fourier_spectrum_descriptors_model.pickle')
        self.pca_model = PCAModel.load_model(checkpoint_filepath)

        if self.config.set_BC_range:
            # computed on external reference dataset of 20 000 images -> np.percentile(0.01 - 0.99)
            range = np.load(
                os.path.join(
                    os.path.dirname(__file__),
                    'reference_dataset_pca_fourier_spectrum_descriptors_range.npz'
                ))
            self.BC_range = [range['low'], range['high']]
        else:
            self.BC_range = [np.zeros(self.n_latents), np.ones(self.n_latents)]

        return
Esempio n. 2
0
    def __init__(self, config=None, **kwargs):
        set_seed(0)
        self.config = gr.config.update_config(kwargs, config,
                                              self.__class__.default_config())

        # model
        config_initialization = gr.Config()
        config_initialization.type = 'load_pretrained_model'
        config_initialization.load_from_model_path = os.path.join(
            os.path.dirname(__file__), 'trained_model.pth')
        self.load_model(config_initialization)
        self.n_latents = self.model.n_latents

        if self.config.set_BC_range:
            range = np.load(
                os.path.join(os.path.dirname(__file__),
                             'reference_dataset_bvae_descriptors_range.npz'))
            self.BC_range = [
                torch.from_numpy(range['low']).unsqueeze(0),
                torch.from_numpy(range['high']).unsqueeze(0)
            ]
        else:
            self.BC_range = [
                torch.zeros(self.n_latents).unsqueeze(0),
                torch.ones(self.n_latents).unsqueeze(0)
            ]

        if self.model.use_gpu:
            self.BC_range[0] = self.BC_range[0].cuda()
            self.BC_range[1] = self.BC_range[1].cuda()

        return
Esempio n. 3
0
    def __init__(self, config=None, **kwargs):
        set_seed(0)
        self.config = gr.config.update_config(kwargs, config,
                                              self.__class__.default_config())

        self.threshold = 50
        self.n_harmonics = 25
        self.n_latents = 8

        checkpoint_filepath = os.path.join(
            os.path.dirname(__file__),
            'reference_dataset_pca_fourier_elliptical_descriptors_model.pickle'
        )
        self.pca_model = PCAModel.load_model(checkpoint_filepath)

        if self.config.set_BC_range:
            # computed on external reference dataset of 20 000 images -> np.percentile(0.01 - 0.99)
            range = np.load(
                os.path.join(
                    os.path.dirname(__file__),
                    'reference_dataset_pca_fourier_elliptical_descriptors_range.npz'
                ))
            self.BC_range = [range['low'], range['high']]
        else:
            self.BC_range = [np.zeros(self.n_latents), np.ones(self.n_latents)]

        return
Esempio n. 4
0
    def __init__(self, config=None, **kwargs):
        self.config = gr.config.update_config(kwargs, config,
                                              self.__class__.default_config())

        # set seed
        randomhelper.set_seed(self.config.seed)

        # set preprocess function
        if self.config.preprocess_observation.type == 'function':
            self.preprocess = deepcopy(
                self.config.preprocess_observation.function)
Esempio n. 5
0
    def load(filepath='representation.pickle',
             map_location='cpu',
             config=None):
        representation = datahelper.load(filepath,
                                         map_location=map_location,
                                         config=config)
        randomhelper.set_seed(representation.config.seed)
        if hasattr(representation, 'model'):
            if hasattr(representation.model.config, 'device'):
                if map_location == 'cpu':
                    representation.model.set_device(use_gpu=False)
                elif map_location == "cuda:0":
                    representation.model.set_device(use_gpu=True)

        return representation
Esempio n. 6
0
    def __init__(self, config=None, **kwargs):
        set_seed(0)
        self.config = gr.config.update_config(kwargs, config,
                                              self.__class__.default_config())

        # model
        self.statistic_names = [
            'activation_mass', 'activation_volume', 'activation_density',
            'activation_mass_distribution', 'activation_hu1', 'activation_hu2',
            'activation_hu3', 'activation_hu4', 'activation_hu5',
            'activation_hu6', 'activation_hu7', 'activation_hu8',
            'activation_flusser9', 'activation_flusser10',
            'activation_flusser11', 'activation_flusser12',
            'activation_flusser13'
        ]
        self.n_statistics = len(self.statistic_names)
        self.n_latents = 8
        self.img_size = (256, 256)

        checkpoint_filepath = os.path.join(
            os.path.dirname(__file__),
            'reference_dataset_pca_lenia_statistics_descriptors_model.pickle')
        self.pca_model = PCAModel.load_model(checkpoint_filepath)

        if self.config.set_BC_range:
            # computed on external reference dataset of 20 000 images -> np.percentile(0.01 - 0.99)
            range = np.load(
                os.path.join(
                    os.path.dirname(__file__),
                    'reference_dataset_pca_lenia_statistics_descriptors_range.npz'
                ))
            self.BC_range = [range['low'], range['high']]
        else:
            self.BC_range = [np.zeros(self.n_latents), np.ones(self.n_latents)]

        return
Esempio n. 7
0
            theta2 = orientation_region[1]

            region_mask = (grid_r >= r1) & (grid_r < r2) & (
                grid_theta >= theta1) & (grid_theta < theta2)
            region_power_spectrum = deepcopy(half_power_spectrum)[region_mask]
            feature_vector[2 * cur_region_idx] = region_power_spectrum.mean()
            feature_vector[2 * cur_region_idx +
                           1] = region_power_spectrum.std()
            cur_region_idx += 1

    return feature_vector.detach().cpu().numpy()


if __name__ == '__main__':

    set_seed(0)
    n_features_BC = 8

    n_sections = 20
    n_orientations = 1

    dataset_config = LENIADataset.default_config()
    dataset_config.data_root = '/gpfswork/rech/zaj/ucf28eq/data/lenia_datasets/data_005/'
    dataset_config.split = 'train'
    dataset = LENIADataset(config=dataset_config)
    non_animal_ids = torch.where(dataset.labels == 1)[0].numpy()

    # create fourier descriptors and save statistics
    normalized_coefficients = np.zeros(
        (dataset.n_images, 2 * n_sections * n_orientations))
    for idx in range(dataset.n_images):