Example #1
0
    def extract_image(self, input_path, output_path, supplements_path, image_nb, save=True):
        current = Image.open(os.path.join(input_path, "in{0:06d}.jpg".format(image_nb)))
        new_data = MosaicImage(current, self.image_parameters)
        self.som.set_data(new_data.get_data())
        winners = self.som.get_all_winners()
        diff_winners = np.zeros(winners.shape)
        for j in range(len(winners)):
            diff_winners[j] = manhattan_distance(np.asarray(winners[j]), np.asarray(self.initial_map[j]))
        diff_winners = diff_winners.reshape(new_data.nb_pictures)
        diff_winners = np.kron(diff_winners, np.ones((self.pictures_dim[0], self.pictures_dim[1])))
        #             diff_winners *= 30  # Use this parameter ?
        diff_winners = ImageOps.autocontrast(Image.fromarray(diff_winners).convert('L'))

        reconstructed = Image.fromarray(new_data.reconstruct(self.som.get_reconstructed_data(winners)))
        som_difference = ImageOps.autocontrast(ImageChops.difference(reconstructed, current).convert('L'))
        som_difference_modulated = ImageChops.multiply(som_difference, diff_winners)

        # Binarizing
        fn = lambda x: 255 if x > self.threshold else 0
        thresholded = som_difference_modulated.convert('L').point(fn, mode='1')

        result = ImageChops.multiply(thresholded, self.mask)

        # Saving
        if save:
            som_difference.save(os.path.join(supplements_path, "difference", "dif{0:06d}.png".format(image_nb)))
            diff_winners.save(os.path.join(supplements_path, "diff_winners", "win{0:06d}.png".format(image_nb)))
            som_difference_modulated.save(os.path.join(supplements_path, "saliency", "sal{0:06d}.png".format(image_nb)))
            thresholded.save(os.path.join(supplements_path, "thresholded", "thr{0:06d}.png".format(image_nb)))

            result.save(os.path.join(output_path, "bin{0:06d}.png".format(image_nb)))
Example #2
0
    def learning(self, bkg_image):
        # PARAMETERS
        self.image_parameters = Parameters({"pictures_dim": self.pictures_dim})
        data = MosaicImage(bkg_image, self.image_parameters)
        inputs_SOM = Parameters({"alpha": Variable(start=self.alpha_start, end=self.alpha_end, nb_steps=self.nb_epochs),
                                 "sigma": Variable(start=self.sigma_start, end=self.sigma_end, nb_steps=self.nb_epochs),
                                 "data": data.get_data(),
                                 "neurons_nbr": self.neurons_nb,
                                 "epochs_nbr": self.nb_epochs})
        self.som = SOM(inputs_SOM)

        # RUN
        for i in range(self.nb_epochs):
            # print('Epoch', i)
            self.som.run_epoch()
        self.initial_map = self.som.get_all_winners()
Example #3
0
            os.path.join("Data", "tracking", "dataset", "baseline", "office",
                         "bkg.jpg")), Parameters({"pictures_dim": [16, 16]}))
    image = MosaicImage(
        Image.open(
            os.path.join("Data", "tracking", "dataset", "baseline", "office",
                         "input", "in001010.jpg")),
        Parameters({"pictures_dim": [16, 16]}))

    nb_epochs = 100
    inputs = Parameters({
        "epsilon_winner": 0.1,
        "epsilon_neighbour": 0.006,
        "maximum_age": 10,
        "error_decrease_new_unit": 0.5,
        "error_decrease_global": 0.995,
        "data": bkg.get_data(),
        "neurons_nbr": 200,
        "epochs_nbr": nb_epochs
    })
    gng = GrowingNeuralGas(inputs)

    start = time.time()
    gng.run()
    end = time.time()

    print("Executed in " + str(end - start) + " seconds.")
    print(gng.square_error(gng.get_all_winners()))
    reconstructed = bkg.reconstruct(gng.get_reconstructed_data())
    # plt.imshow(reconstructed)
    print(len(gng.network.nodes()))
Example #4
0
class Execution:
    def __init__(self):
        self.metadata = {}
        self.dataset = {}
        self.model = {}
        self.codebooks = {}
        self.metrics = {}

        self.data = None
        self.training_data = None
        self.map = None

    def open(self, path):
        txt = codecs.open(path, 'r', encoding='utf-8').read()
        data = json.loads(txt)
        self.metadata = data["metadata"]
        self.dataset = data["dataset"]
        self.model = data["model"]
        self.codebooks = data["codebooks"]
        self.metrics = data["metrics"]

    def light_open(self, path):
        txt = codecs.open(path, 'r', encoding='utf-8').read()
        data = json.loads(txt)
        self.metadata = data["metadata"]
        self.dataset = data["dataset"]
        self.model = data["model"]
        self.metrics = data["metrics"]

    def save(self, path):
        data = {
            "metadata": self.metadata,
            "dataset": self.dataset,
            "model": self.model,
            "metrics": self.metrics,
            "codebooks": self.codebooks
        }
        json.dump(data,
                  codecs.open(os.path.join(path,
                                           self.metadata["name"] + ".json"),
                              'w',
                              encoding='utf-8'),
                  indent=2)

    def load_dataset(self):
        if self.dataset["type"] == "image":
            path = os.path.join("Data", "images", self.dataset["file"])
            img = Image.open(path)
            parameters = Parameters({
                "pictures_dim":
                [self.dataset["width"], self.dataset["height"]]
            })
            self.data = MosaicImage(img, parameters)
        elif self.dataset["type"] == "random_image":
            path = os.path.join("Data", "images", self.dataset["file"])
            img = Image.open(path)
            parameters = Parameters({
                "pictures_dim":
                [self.dataset["width"], self.dataset["height"]]
            })
            self.data = MosaicImage(img, parameters)
            self.training_data = RandomImage(img, parameters)
        elif self.dataset["type"] == "tracking":
            path = os.path.join("Data", "tracking", "dataset",
                                self.dataset["file"], "back.jpg")
            #if self.metadata["seed"] % 2 == 1:
            #    path = os.path.join("Data", "tracking", "dataset", self.dataset["file"], "input", "bkg.jpg")
            #else:
            #    path = os.path.join("Data", "tracking", "dataset", self.dataset["file"], "input", "bkg2.jpg")
            img = Image.open(path)
            parameters = Parameters({
                "pictures_dim":
                [self.dataset["width"], self.dataset["height"]]
            })
            self.data = MosaicImage(img, parameters)
        else:
            print("Error : No dataset type specified !")

    def run(self):
        #if self.metadata["seed"] % 2 == 1:
        np.random.seed(self.metadata["seed"])
        #else:
        #    np.random.seed(self.metadata["seed"]-1)
        if self.model["model"] == "gng":
            self.runGNG()
        else:
            self.runSOM()

    def runGNG(self):
        if self.data is None:
            self.load_dataset()
        inputs = Parameters({
            "epsilon_winner":
            self.model["epsilon_winner"],  # 0.1,
            "epsilon_neighbour":
            self.model["epsilon_neighbour"],  # 0.006,
            "maximum_age":
            self.model["maximum_age"],  # 10,
            "error_decrease_new_unit":
            self.model["error_decrease_new_unit"],  # 0.5,
            "error_decrease_global":
            self.model["error_decrease_global"],  # 0.995,
            "data":
            self.data.get_data(),
            "neurons_nbr":
            self.model["nb_neurons"],
            "epochs_nbr":
            self.model["nb_epochs"]
        })
        self.map = GrowingNeuralGas(inputs)
        self.map.run()

    def runSOM(self):
        if self.data is None:
            self.load_dataset()
        nb_epochs = self.model["nb_epochs"]
        if "alpha_start" not in self.model: self.model["alpha_start"] = 0.2
        if "alpha_end" not in self.model: self.model["alpha_end"] = 0.05
        if "sigma_start" not in self.model: self.model["sigma_start"] = 0.7
        if "sigma_end" not in self.model: self.model["sigma_end"] = 0.015
        if "nb_images_evals" not in self.dataset:
            self.dataset["nb_images_evals"] = 75
        parameters = Parameters({
            "alpha":
            Variable(start=self.model["alpha_start"],
                     end=self.model["alpha_end"],
                     nb_steps=nb_epochs),
            "sigma":
            Variable(start=self.model["sigma_start"],
                     end=self.model["sigma_end"],
                     nb_steps=nb_epochs),
            "data":
            self.data.get_data(),
            "neurons_nbr": (self.model["width"], self.model["height"]),
            "epochs_nbr":
            nb_epochs
        })
        if self.model["model"] == "standard":
            self.map = SOM(parameters)
        elif self.model["model"] == "fast":
            self.map = FastSOM(parameters)
        elif self.model["model"] == "recursive":
            self.map = RecursiveSOM(parameters)
        else:
            print("Error : Unknown model !")

        # if "initialisation" not in self.codebooks:
        #     self.codebooks["initialisation"] = self.som.neurons.tolist()
        if "final" in self.codebooks:
            self.map.neurons = np.asarray(self.codebooks["final"])
        else:
            self.map.run()
            #for i in range(nb_epochs):
            #    self.map.run_epoch()
            #self.codebooks["final"] = copy.deepcopy(self.map.neurons.tolist())

        # for i in range(nb_epochs):
        #     print("Epoch "+str(i+1))
        #     if "Epoch "+str(i + 1) not in self.codebooks:
        #         if self.training_data is not None:
        #             self.som.data = self.training_data.get_data(self.som.data.shape[0])
        #         self.som.run_epoch()
        #         # self.codebooks["Epoch " + str(i + 1)] = copy.deepcopy(self.som.neurons.tolist())
        #     self.som.run_epoch()
        self.map.data = self.data.get_data()

    def compute_metrics(self):
        self.metrics["Square_error"] = self.map.square_error()
        if self.model["model"] == "gng":
            self.metrics["Neurons"] = len(self.map.network.nodes())
            self.metrics["Connections"] = len(self.map.network.edges())
        if self.dataset["type"] == "tracking":
            current_path = os.path.join("Data", "tracking", "dataset",
                                        self.dataset["file"])
            input_path = os.path.join(current_path, "input")
            roi_file = open(os.path.join(current_path, "temporalROI.txt"),
                            "r").readline().split()
            temporal_roi = (int(roi_file[0]), int(roi_file[1]))
            mask_roi = Image.open(os.path.join(current_path, "ROI.png"))

            nb_img_gen = self.dataset["nb_images_evals"]
            step = 1
            if nb_img_gen > 0:
                step = (temporal_roi[1] + 1 - temporal_roi[0]) // nb_img_gen

            base = os.path.join("Results", "GNGopti", self.metadata["name"],
                                "results")
            output_path = os.path.join(base, self.dataset["file"])
            supplements_path = os.path.join(base, "supplements")

            parameters = Parameters({
                "pictures_dim":
                [self.dataset["width"], self.dataset["height"]],
                "step":
                step
            })

            trackingMetric = TrackingMetrics(input_path,
                                             output_path,
                                             supplements_path,
                                             temporal_roi,
                                             mask_roi,
                                             parameters=parameters)
            trackingMetric.compute(self.map)
            cmp = Comparator()
            fmeasure, precision, recall = cmp.evaluate__folder_c(
                current_path, output_path, step)
            # print(fitness)
            self.metrics["fmeasure"] = fmeasure
            self.metrics["precision"] = precision
            self.metrics["recall"] = recall

    def compute_varying_threshold_metric(self):
        if self.dataset["type"] == "tracking":
            current_path = os.path.join("Data", "tracking", "dataset",
                                        self.dataset["file"])
            roi_file = open(os.path.join(current_path, "temporalROI.txt"),
                            "r").readline().split()
            temporal_roi = (int(roi_file[0]), int(roi_file[1]))

            base = os.path.join("Results", "Sizing", self.metadata["name"],
                                "results")
            output_path = os.path.join(base, self.dataset["file"])
            supplements_path = os.path.join(base, "supplements")
            difference_path = os.path.join(supplements_path, "saliency")

            nb_img_gen = self.dataset["nb_images_evals"]
            step = 1
            if nb_img_gen > 0:
                step = (temporal_roi[1] + 1 - temporal_roi[0]) // nb_img_gen

            res = []
            ranges = list(range(1, 20)) + list(range(20, 101, 5))
            for threshold in ranges:
                for img in os.listdir(difference_path):
                    som_difference = Image.open(
                        os.path.join(difference_path, img))
                    # Binarizing
                    fn = lambda x: 255 if x > threshold else 0
                    thresholded = som_difference.convert('L').point(fn,
                                                                    mode='1')
                    result = Image.new("L", som_difference.size)
                    result.paste(thresholded, (0, 0))
                    result.save(os.path.join(output_path, "bin" + img[3:]))

                cmp = Comparator()
                fitness = cmp.evaluate__folder_c(current_path, output_path,
                                                 step)
                # print(fitness)
                res.append(fitness)
                #self.metrics["fmeasure-t" + str(threshold)] = fitness
            self.metrics["fmeasure_threshold"] = res

    def compute_steps_metrics(self):
        # self.metrics["Square_error"] = self.som.square_error()
        # self.metrics["Neurons"] = len(self.som.network.nodes())
        # self.metrics["Connections"] = len(self.som.network.edges())
        if self.dataset["type"] == "tracking":
            current_path = os.path.join("Data", "tracking", "dataset",
                                        self.dataset["file"])
            input_path = os.path.join(current_path, "input")
            roi_file = open(os.path.join(current_path, "temporalROI.txt"),
                            "r").readline().split()
            temporal_roi = (int(roi_file[0]), int(roi_file[1]))
            mask_roi = Image.open(os.path.join(current_path, "ROI.png"))

            base = os.path.join("Results", "GNGoptimisation",
                                self.metadata["name"], "results")
            output_path = os.path.join(base, self.dataset["file"])
            supplements_path = os.path.join(base, "supplements")

            parameters = Parameters({
                "pictures_dim":
                [self.dataset["width"], self.dataset["height"]]
            })

            # ranges = list(range(1, 5)) + list(range(5, 101, 5))
            #res = []
            #ranges = range(1,201)
            #for i in ranges:
            #    cmp = Comparator()
            #    fitness = cmp.evaluate__folder_c(current_path, output_path, i)
            #    res.append(fitness)
            #self.metrics["fmeasure-steps"] = res

            res = []
            nb_image_ranges = range(5, 201)
            for i in nb_image_ranges:
                step = (temporal_roi[1] + 1 - temporal_roi[0]) // i
                cmp = Comparator()
                fitness = cmp.evaluate__folder_c(current_path, output_path,
                                                 step)
                res.append(fitness)
            self.metrics["fmeasure-nbimgs"] = res

    def full_threshold_evaluation(self, path):
        self.compute_varying_threshold_metric()
        self.save(path)
        print("Simulation", self.metadata["name"], "ended")

    def full_step_evaluation(self, path):
        self.compute_steps_metrics()
        self.save(path)
        print("Simulation", self.metadata["name"], "ended")

    def full_simulation(self, path):
        if "fmeasure" not in self.metrics:
            self.run()
            self.compute_metrics()
            self.save(path)
Example #5
0
        return np.mean(error)


if __name__ == '__main__':
    start = time.time()
    img = MosaicImage(Image.open("Data/images/Elijah.png"),
                      Parameters({"pictures_dim": [10, 10]}))

    nb_epochs = 100
    inputs = Parameters({
        "alpha":
        Variable(start=0.6, end=0.05, nb_steps=nb_epochs),
        "sigma":
        Variable(start=0.5, end=0.001, nb_steps=nb_epochs),
        "data":
        img.get_data(),
        "neurons_nbr": (8, 8),
        "epochs_nbr":
        nb_epochs
    })
    som = RecursiveSOM(inputs)
    som.run()

    end = time.time()
    print("Executed in " + str(end - start) + " seconds.")
    print("Mean :", som.mean_error(), "compared to",
          som.level_one.mean_error())
    print("Square : ", som.square_error(), "compared to",
          som.level_one.square_error())

    two = img.reconstruct(som.get_reconstructed_data())
Example #6
0
from Data.Mosaic_Image import MosaicImage

pictures_dim = [10, 10]
name = "office"
path = os.path.join("Data", "spikes", "office", "src")
bkg = Image.open(os.path.join(path, name + "0.png"))
img_parameters = Parameters({"pictures_dim": pictures_dim})
data = MosaicImage(bkg, img_parameters)
nb_epochs = 100
parameters = Parameters({
    "alpha":
    Variable(start=0.6, end=0.05, nb_steps=nb_epochs),
    "sigma":
    Variable(start=0.5, end=0.001, nb_steps=nb_epochs),
    "data":
    data.get_data(),
    "neurons_nbr": (10, 10),
    "epochs_nbr":
    nb_epochs
})
map = SOM(parameters)
for i in range(nb_epochs):
    print("Epoch " + str(i + 1))
    map.run_epoch()

map.data = data.get_data()

output_path = os.path.join("Data", "spikes", "office", "out")
os.makedirs(os.path.join(output_path, "difference"), exist_ok=True)
os.makedirs(os.path.join(output_path, "diff_winners"), exist_ok=True)
os.makedirs(os.path.join(output_path, "saliency"), exist_ok=True)
Example #7
0
# bkg = Image.open(path3 + "example_base.png")

############
# LEARNING #
############
pictures_dim = [10, 10]
parameters = Parameters({"pictures_dim": pictures_dim})
data = MosaicImage(bkg, parameters)
nb_epochs = 50
inputs_SOM = Parameters({
    "alpha":
    Variable(start=0.5, end=0.25, nb_steps=nb_epochs),
    "sigma":
    Variable(start=0.1, end=0.03, nb_steps=nb_epochs),
    "data":
    data.get_data(),
    "neurons_nbr": (10, 10),
    "epochs_nbr":
    nb_epochs
})
som = SOM(inputs_SOM)
for i in range(nb_epochs):
    print('Epoch ', i)
    som.run_epoch()
    original = data.image
    reconstructed = Image.fromarray(
        data.reconstruct(som.get_reconstructed_data()))
    som_image = data.reconstruct(som.get_neural_list(), size=som.neurons_nbr)
    difference_image = ImageChops.difference(original,
                                             reconstructed).convert('L')
    difference = np.asarray(difference_image)
Example #8
0
    def fully_random_vector(self):
        return np.random.randint(np.shape(self.data)[0])

    def unique_random_vector(self):
        self.current_vector_index = self.vector_list.pop(0)
        return self.current_vector_index

    def generate_random_list(self):
        self.vector_list = list(range(len(self.data)))
        np.random.shuffle(self.vector_list)


if __name__ == '__main__':
    img = MosaicImage(Image.open(os.path.join("Data", "Images", "Lenna.png")),
                      Parameters({"pictures_dim": [20, 20]}))
    data = img.get_data()

    start = time.time()

    nb_epochs = 20
    inputs = Parameters({
        "alpha":
        Variable(start=0.6, end=0.05, nb_steps=nb_epochs),
        "sigma":
        Variable(start=0.5, end=0.001, nb_steps=nb_epochs),
        "data":
        img.get_data(),
        "neurons_nbr": (5, 5),
        "epochs_nbr":
        nb_epochs
    })
Example #9
0
            error[i] = np.mean((self.working_data[i] - self.network.nodes[winners[i]]['vector'])**2)
        return np.mean(error)

    def display(self):
        reconstructed = self.img.reconstruct(self.get_reconstructed_data())
        # som_image = mosaic.reconstruct(gng.get_neural_list(), size=som.neurons_nbr)
        print("Error : ", self.square_error())
        print("Neurons : ", len(self.network.nodes()))
        print("Connections : ", len(self.network.edges()))
        plt.imshow(reconstructed)
        plt.show()


if __name__ == '__main__':
    img = MosaicImage(Image.open(os.path.join("Data", "Tracking", "Dataset", "baseline", "highway", "bkg.jpg")), Parameters({"pictures_dim": [16, 16]}))
    data = img.get_data()
    gng = GrowingNeuralGas(data)
    gng.img = img
    gng.fit_network(e_b=0.1, e_n=0.006, a_max=10, l=200, a=0.5, d=0.995, passes=100, plot_evolution=False)
    reconstructed = img.reconstruct(gng.get_reconstructed_data())
    # som_image = mosaic.reconstruct(gng.get_neural_list(), size=som.neurons_nbr)
    # print(gng.get_all_winners())
    plt.imshow(reconstructed)
    plt.show()
    print(len(gng.network.nodes))

    img2 = MosaicImage(Image.open(os.path.join("Data", "Tracking", "Dataset", "baseline", "highway", "input", "in001010.jpg")), Parameters({"pictures_dim": [16, 16]}))
    data2 = img2.get_data()
    old_winners = gng.get_all_winners()
    gng.data = data2
    gng.working_data = data2
Example #10
0
chosen_path = path + "/dataset/" + categories[1] + "/" + elements[1] + "/input/"
temporal_ROI = (570, 2050)
images_list = []
parameters = Parameters({"pictures_dim": pictures_dim})
plot = None
img = Image.open(chosen_path + 'in{0:06d}.jpg'.format(1)).convert('L')
bkg = Image.open(chosen_path + 'in{0:06d}.jpg'.format(60)).convert('L')
difference = ImageChops.difference(img, bkg)

data = MosaicImage(difference, parameters)

nb_epochs = 50
inputs_SOM = Parameters({"alpha": Variable(start=0.6, end=0.05, nb_steps=nb_epochs),
                         "sigma": Variable(start=0.5, end=0.001, nb_steps=nb_epochs),
                         "data": data.get_data(),
                         "neurons_nbr": (10, 10),
                         "epochs_nbr": nb_epochs})
som = SOM(inputs_SOM)
for i in range(nb_epochs):
    print('Epoch ', i)
    som.run_epoch()
    original = data.image
    reconstructed = data.reconstruct(som.get_reconstructed_data())
    som_image = data.reconstruct(som.get_neural_list(), size=som.neurons_nbr)
    difference = ImageChops.difference(original, Image.fromarray(reconstructed))
    difference = np.asarray(difference)
    # difference = np.sum(difference, axis=2)
    # difference = np.divide(difference, 255*3)
    difference = np.divide(difference, 255)
    print(np.mean(np.square(difference)))