Beispiel #1
0
def pplot(latitude,
          longitude,
          source,
          resolution=1.,
          style=special_parameters.plt_style,
          nb_cols=5,
          alpha=1.):
    """
    patch plot
    :param style:
    :param latitude:
    :param longitude:
    :param source:
    :param resolution:
    :return:
    """
    r = check_source(source)
    rasters = r['rasters']
    extractor = PatchExtractor(rasters, resolution=resolution)
    extractor.add_all()
    extractor.plot(item=(latitude, longitude),
                   return_fig=True,
                   style=style,
                   nb_cols=nb_cols,
                   alpha=alpha)
Beispiel #2
0
def raster_characteristics(source):
    """
    print infos about the rasters
    :param source:
    :return:
    """
    r = check_source(source)
    rasters = r['rasters']
    extractor = PatchExtractor(rasters)
    extractor.add_all()

    print_statistics(str(extractor))
class EnvironmentalIGNDataset(Dataset):
    def __init__(self, labels, dataset, ids, rasters, patches, size_patch=64, extractor=None, transform=None,
                 add_all=True, limit=-1):
        self.extractor = extractor
        self.labels = labels
        self.ids = ids
        self.dataset = dataset
        self.patches = patches

        self.limit = limit

        if extractor is None:
            self.extractor = PatchExtractor(rasters, size=size_patch, verbose=True)
            if add_all:
                self.extractor.add_all()
        else:
            self.extractor = extractor

        self.transform = transform

    def file_exists(self, idx):
        return os.path.isfile(self.path(idx))

    def path(self, idx):
        image_id = str(int(self.ids[idx]))
        return os.path.join(self.patches, image_id[-2:], image_id[-4:-2], image_id + '.npy')

    def __len__(self):
        return len(self.labels) if self.limit == -1 else min(len(self.labels), self.limit)

    def __getitem__(self, idx):

        ign_patch = np.transpose(np.load(self.path(idx)), (2, 0, 1))

        tensor = self.extractor[self.dataset[idx]]
        tensor = np.concatenate([tensor, ign_patch], axis=0)
        if self.transform is not None:
            tensor = self.transform(tensor).copy()
        return torch.from_numpy(tensor).float(), self.labels[idx]

    @property
    def named_dimensions(self):
        return [r.name for r in self.extractor.rasters] + ['IGN']

    def __repr__(self):
        return self.__str__()

    def __str__(self):
        return self.__class__.__name__ + '(size: {})'.format(len(self))
class GeoLifeClefDataset(Dataset):
    def __init__(self, root_dir, labels, dataset, ids, extractor=None):
        self.extractor = extractor
        self.labels = labels
        self.ids = ids
        self.dataset = dataset
        if extractor is None:
            self.extractor = PatchExtractor(root_dir, size=64, verbose=True)
            self.extractor.add_all()
        else:
            self.extractor = extractor

    def __len__(self):
        return len(self.labels)

    def __getitem__(self, idx):
        tensor = self.extractor[self.dataset[idx]]
        tensor = random_rotation(tensor).copy()
        return torch.from_numpy(tensor).float(), self.labels[idx]
class GeoLifeClefDataset(Dataset):
    def __init__(self,
                 root_dir,
                 labels,
                 dataset,
                 ids,
                 extractor=None,
                 nb_labels=3336,
                 second_neihbour=True):
        self.labels = labels
        self.ids = ids
        self.dataset = dataset
        self.nb_labels = nb_labels
        self.extractor = extractor

        self.pos_multipoints = None
        self.kdtree = None
        self.multipoints = None
        self.second_neihbour = second_neihbour

        if extractor is None:
            self.extractor = PatchExtractor(root_dir, size=64, verbose=True)
            self.extractor.add_all()
        else:
            self.extractor = extractor

    def __len__(self):
        return len(self.labels)

    def __getitem__(self, idx):
        pos = [list(self.dataset[idx])]
        dist, neighbours = self.kdtree.query(pos, k=2)
        if dist[0][0] == 0 and self.second_neihbour:
            mp_pos = self.pos_multipoints[neighbours[0][1]]
        else:
            mp_pos = self.pos_multipoints[neighbours[0][0]]
        coocs = self.multipoints[mp_pos]

        tensor = self.extractor[self.dataset[idx]]

        return (torch.from_numpy(tensor).float(),
                torch.from_numpy(coocs).float()), self.labels[idx]
class GeoLifeClefDataset(Dataset):
    def __init__(self,
                 root_dir,
                 labels,
                 dataset,
                 ids,
                 n_neighbours=200,
                 extractor=None,
                 nb_labels=3336):
        self.labels = labels
        self.ids = ids
        self.dataset = dataset
        self.n_neighbours = n_neighbours
        self.nb_labels = nb_labels
        self.extractor = extractor

        self.kdtree = None
        self.train_dataset = None

        if extractor is None:
            self.extractor = PatchExtractor(root_dir, size=64, verbose=True)
            self.extractor.add_all()
        else:
            self.extractor = extractor

    def __len__(self):
        return len(self.labels)

    def __getitem__(self, idx):
        pos = [list(self.dataset[idx])]
        dist, neighbours = self.kdtree.query(pos, k=self.n_neighbours)
        coocs = np.zeros(self.nb_labels)
        for n in neighbours[0]:
            if idx != self.train_dataset.ids[n]:
                coocs[self.train_dataset.labels[n]] += 1

        tensor = self.extractor[self.dataset[idx]]

        return (torch.from_numpy(tensor).float(),
                torch.from_numpy(coocs).float()), self.labels[idx]
class EnvironmentalDataset(Dataset):
    def __init__(self,
                 labels,
                 dataset,
                 ids,
                 rasters,
                 size_patch=64,
                 transform=None,
                 add_all=True,
                 limit=-1,
                 reset_extractor=False,
                 **kwargs):
        self.labels = labels
        self.ids = ids
        self.dataset = dataset

        self.limit = limit
        global ENVIRONMENTAL_DATASET_EXTRACTOR
        if ENVIRONMENTAL_DATASET_EXTRACTOR is None or reset_extractor:
            self.extractor = PatchExtractor(rasters,
                                            size=size_patch,
                                            verbose=True)
            if add_all:
                self.extractor.add_all()
            ENVIRONMENTAL_DATASET_EXTRACTOR = self.extractor
        else:
            self.extractor = ENVIRONMENTAL_DATASET_EXTRACTOR

        self.transform = transform

    def __len__(self):
        return len(self.labels) if self.limit == -1 else min(
            len(self.labels), self.limit)

    def __getitem__(self, idx):
        if type(self.extractor) is not bool:
            tensor = self.extractor[self.dataset[idx]]
            if self.transform is not None:
                tensor = self.transform(tensor).copy()
            return torch.from_numpy(tensor).float(), self.labels[idx]
        else:
            return self.dataset[idx], self.labels[idx]

    def numpy(self):
        """
        :return: a numpy dataset of 1D vectors
        """
        return np.array([
            torch.flatten(self[i][0]).numpy() for i in range(len(self))
        ]), self.labels

    @deprecated()
    def get_vectors(self):
        vec = []
        for idx, data in enumerate(self.dataset):
            vector = self.extractor[self.dataset[idx]]
            if self.transform is not None:
                vector = self.transform(vector).copy()
            vector = list(vector)
            vec.append(vector)
        return vec

    def __repr__(self):
        return self.__str__()

    def __str__(self):
        return self.__class__.__name__ + '(size: {})'.format(len(self))