def test_overfitting(self):
        gen = TrainImageGenerator("../../datasets/micro/annotations.csv",
                                  "../../datasets/micro",
                                  batch_size=1,
                                  augumenter=NoAgumenter())

        xs = tf.placeholder(tf.float32, (None, 300, 300, 3))
        ys = [
            tf.placeholder(
                tf.float32,
                (None, fm.width, fm.height, len(fm.aspect_ratios), 5))
            for fm in FEATURE_MAPS
        ]

        net = ssd(xs)
        loss = ssd_total_loss(ys, net)
        minimizer = tf.train.AdamOptimizer(1e-4).minimize(loss)

        inputs, expected_list = gen.generate_batch([0])

        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())
            writer = tf.summary.FileWriter('../../graphs', sess.graph)

            for i in range(500):
                calc_loss, _ = sess.run([loss, minimizer], {
                    xs: inputs,
                    **dict(zip(ys, expected_list))
                })
                print(f"{i}: {calc_loss}")

            predictions = sess.run(net, {xs: inputs})

            visualize_prediction(get_sample_from_batchx(inputs, 0),
                                 get_sample_from_batchy(predictions, 0))
Exemple #2
0
    def test_finds_bounding_boxes(self):
        trainer = TrainImageGenerator(
            annotation_path="../../datasets/micro/annotations.csv",
            images_path="../../datasets/micro")
        x, y = trainer.generate_sample(0)

        found_boxes = interpret_label(y, FEATURE_MAPS)

        self.assertTrue(len(found_boxes) > 0)
Exemple #3
0
    def test_single_sample_generation(self):
        trainer = TrainImageGenerator(
            annotation_path="../../datasets/mini/annotations.csv",
            images_path="../../datasets/mini")
        x, y = trainer.generate_sample(0)

        self.assertEqual(np.shape(x), (300, 300, 3))
        self.assertEqual(len(y), len(FEATURE_MAPS))

        for layer, fm in zip(y, FEATURE_MAPS):
            self.assertEqual(fm.width, layer.shape[0])
            self.assertEqual(fm.height, layer.shape[1])
            self.assertEqual(len(fm.aspect_ratios), layer.shape[2])
            self.assertEqual(layer.shape[3], 5)
Exemple #4
0
 def test_initalization(self):
     trainer = TrainImageGenerator(
         annotation_path="../../datasets/mini/annotations.csv",
         images_path="../../datasets/mini")
     self.assertEqual(trainer.batch_size, 8)
     self.assertEqual(trainer.num_samples, 32)
     self.assertEqual(trainer.num_batches, 32 / 8)
Exemple #5
0
    def fit(self):
        gen = TrainImageGenerator(self.annotations_path, self.images_path,
                                  batch_size=self.batch_size, augumenter=NoAgumenter())

        print(f"Found {gen.num_samples} samples!")

        xs = tf.placeholder(tf.float32, (None, 300, 300, 3))
        ys = [tf.placeholder(tf.float32, (None, fm.width, fm.height, len(fm.aspect_ratios), 5)) for fm in FEATURE_MAPS]

        net = ssd(xs)
        loss = ssd_total_loss(ys, net)
        minimizer = tf.train.AdamOptimizer(self.alpha).minimize(loss)

        saver = tf.train.Saver()

        with tf.Session() as sess:
            sess.run(tf.global_variables_initializer())

            training_start_time = datetime.now()
            last_epoch_save_time = datetime.now()

            for i in range(self.max_epoch):
                for batch_x, batch_y in gen.get_batches_in_epoch():
                    calc_loss, _ = sess.run([loss, minimizer], {xs: batch_x, **dict(zip(ys, batch_y))})
                    print(f"{i}: {calc_loss}")

                if self.save_every_epoch:
                    saver.save(sess, self.output_model_path + f"-epoch{i}")

                now = datetime.now()

                # Periodic saving
                if (self.save_after_minutes > 0) and (now > last_epoch_save_time + timedelta(minutes=self.save_after_minutes)):
                    saver.save(sess, self.output_model_path + f"-epoch{i}")
                    last_epoch_save_time = datetime.now()

                # Ending training after time
                if (self.kill_after_minutes > 0) and (now > training_start_time + timedelta(minutes=self.kill_after_minutes)):
                    break

            saver.save(sess, self.output_model_path + "-final")
            print(f"Model trained sucessfully. Latest version saved as {self.output_model_path}-final")
Exemple #6
0
    def test_get_batches_in_epoch(self):
        trainer = TrainImageGenerator(
            annotation_path="../../datasets/mini/annotations.csv",
            images_path="../../datasets/mini")

        counter = 0
        for x_batch, y_batch in trainer.get_batches_in_epoch():
            counter = counter + 1

            self.assertEqual((8, 300, 300, 3), np.shape(x_batch))
            self.assertEqual(len(y_batch), len(FEATURE_MAPS))

            for layer, fm in zip(y_batch, FEATURE_MAPS):
                self.assertEqual(8, layer.shape[0])
                self.assertEqual(fm.width, layer.shape[1])
                self.assertEqual(fm.height, layer.shape[2])
                self.assertEqual(len(fm.aspect_ratios), layer.shape[3])
                self.assertEqual(layer.shape[4], 5)

        self.assertEqual(counter, 4)
from postprocessing.visualization import visualize_prediction
from dataset_generation.data_feeder import TrainImageGenerator
from dataset_generation.augmenter import NoAgumenter

if __name__ == "__main__":
    generator = TrainImageGenerator("../../datasets/micro/annotations.csv",
                                    "../../datasets/micro",
                                    batch_size=1,
                                    augumenter=NoAgumenter())

    img, label = generator.generate_sample(0)
    visualize_prediction(img, label)