コード例 #1
0
def create_segmentation_dataset(images_folder,
                                target_labels,
                                color_palette,
                                n_images,
                                batch_size=10,
                                split="train"):
    try:
        # creating dataset
        dataset = Coco(
            version=2017,  # versions 5 and 6 supported
            split=split,
            task="segmentation",
            labels=list(target_labels.keys()),  # target labels
            n_images=n_images,  # number of images by class
            batch_size=batch_size  # batch images size
        )
        print(len(dataset))  # size of dataset
        data_folder = Path(images_folder)
        data_folder.mkdir(exist_ok=True)
        FileUtil.clear_folder(data_folder)
        # Download images
        for i, batch_images in enumerate(dataset):
            print(f"download done for batch {i+1} of {dataset.batches_count}")
            for image in batch_images:
                image.export(data_folder, labels_map,
                             color_palette)  # copy images to disk
    except Exception as ex:
        print(f"[ERROR] Error creating the dataset {ex}")
コード例 #2
0
ファイル: tf_trainable.py プロジェクト: 5l1v3r1/FalconCV
    def train(self,
              epochs=100,
              val_split=0.3,
              clear_folder=False,
              override_pipeline=False,
              eval=False):
        try:

            if clear_folder:
                FileUtil.clear_folder(self._out_folder)
            self.num_steps = epochs
            self._mk_labels_map()
            self._mk_records(val_split)
            # update pipeline
            self._out_folder.joinpath(os.path.sep.join(
                ["export", "Servo"])).mkdir(exist_ok=True, parents=True)
            # merge pipelines
            save_pipeline_config(self.pipeline, str(self._out_folder))
            # start training
            tf.logging.set_verbosity(tf.logging.INFO)
            if eval:
                self._train_and_eval()
            else:
                self._train()
        except Exception as ex:
            raise Exception("Error training the model : {}".format(ex)) from ex
        return super(TfTrainableModel, self).train()
コード例 #3
0
def create_dataset(images_folder, labels_map, color_palette, n):
    try:
        # creating dataset
        dataset = Coco(v=2017)
        dataset.setup(split="train", task="segmentation")
        #labels = dataset.labels_map.values() # get valid labels
        os.makedirs(images_folder, exist_ok=True)
        FileUtil.clear_folder(images_folder)
        for batch_images in dataset.fetch(n=n,
                                          labels=list(labels_map.keys()),
                                          batch_size=500):
            for img in batch_images:
                img.export(images_folder, labels_map, color_palette)
                for region in img.regions:
                    pass
                    # print(region.shape_attributes["x"],
                    #       region.shape_attributes["y"])
    except Exception as ex:
        print("error creating the dataset {} ".format(ex))
コード例 #4
0
def create_detection_dataset(images_folder, target_labels, n_images,
                             batch_size, split):
    try:
        # creating dataset
        dataset = OpenImages(
            version=6,  # versions 5 and 6 supported
            split=split,
            task="detection",
            labels=target_labels,  # target labels
            n_images=n_images,  # number of images by class
            batch_size=batch_size  # batch images size
        )
        print(len(dataset))  # size of dataset
        data_folder = Path(images_folder)
        data_folder.mkdir(exist_ok=True)
        FileUtil.clear_folder(data_folder)
        # Download images
        for i, batch_images in enumerate(dataset):
            print(
                f"download done for batch {i + 1} of {dataset.batches_count}")
            for image in batch_images:
                image.export(data_folder)  # copy images to disk
    except Exception as ex:
        print(f"[ERROR] Error creating the dataset {ex}")