def load_targets(self):
     """Load example targets."""
     y = []
     X = []
     print("> Assembling the data.")
     for scientific_name in tqdm(self.target_species):
         annotations = get_specified_target(scientific_name,
                                            nb_annotations=100)
         for annotation in tqdm(annotations):
             X_ = extract_tiles_from_annotation(annotation, 10)
             X.extend(X_)
             y_ = [scientific_name] * len(X_)
             y.extend(y_)
     self.X = X
     self.y = y
     print("> Assembly complete.")
     c = Vessel("confusion_matrix.dat")
     c.X = X
     c.y = y
     c.save()
Exemple #2
0
        batch_idx = np.random.randint(0, int(nb_training_images),
                                      examples_per_class)
        for idx in batch_idx:
            X.append(v.X[target_name][idx])
            y.append(v.y[target_name][idx])
    X = np.array(X)
    y = np.array(y)
    return X, y


if __name__ == "__main__":

    v = Vessel("targets.dat")

    # X — images; y — class identities (one-hot vectors).
    v.X = {}
    v.y = {}

    targets = sorted(glob(f"data/*"))
    v.targets = {}
    for target in targets:
        target_name = target.split("/")[-1]
        v.targets[target_name] = glob(f"{target}/*")
    target_names = list(v.targets.keys())
    v.target_names = target_names

    # Now generate training/test data.
    for itr, target in enumerate(tqdm(v.target_names)):
        v.X[target] = []
        v.y[target] = []
        paths_to_images = v.targets[target]