def __init__(self): self._opt = TrainOptions().parse() self._dataset_train = DatasetFactory.get_by_name("FinetuneDataset", self._opt) self._dataset_train_size = len(self._dataset_train) print('#train images = %d' % self._dataset_train_size) self._model = ModelsFactory.get_by_name("FineModel", self._opt, is_train=True) self._train()
def __init__(self): self._opt = TestOptions().parse() self._img_path = self._opt.img_path self._img_size = self._opt.image_size self.fine_model = ModelsFactory.get_by_name('FineModel', self._opt, is_train=False) self.svm_model_A = ModelsFactory.get_by_name('SvmModel', self._opt, is_train=False) self.svm_model_A.load('A') self.svm_model_B = ModelsFactory.get_by_name('SvmModel', self._opt, is_train=False) self.svm_model_B.load('B') self.svms = [self.svm_model_A, self.svm_model_B] self.reg_model = ModelsFactory.get_by_name('RegModel', self._opt, is_train=False) self.test()
def __init__(self): self._opt = TrainOptions().parse() self._dataset_train = DatasetFactory.get_by_name( "SVMDataset", self._opt) self._dataset_train_size = len(self._dataset_train) print('#train images = %d' % self._dataset_train_size) self.classA_features, self.classA_labels, self.classB_features, self.classB_labels = self._dataset_train.get_datas( ) self._modelA = ModelsFactory.get_by_name("SvmModel", self._opt, is_train=True) self._modelB = ModelsFactory.get_by_name("SvmModel", self._opt, is_train=True) self._train(self._modelA, self.classA_features, self.classA_labels, "A") self._train(self._modelB, self.classB_features, self.classB_labels, "B")
def __init__(self): self._opt = TestOptions().parse() self._img_path = self._opt.img_path self._img_width = self._opt.image_width self._img_height = self._opt.image_height self._model = ModelsFactory.get_by_name('AlexModel', self._opt, is_train=False) self._classes = self._opt.classes.split(",") self._class_to_ind = dict( zip(range(1, len(self._classes) + 1), self._classes)) self._class_to_ind[0] = 'None' self.test()
def __init__(self): self._opt = TrainOptions().parse() data_loader_train = CustomDatasetDataLoader(self._opt, is_for_train=True) #data_loader_test = CustomDatasetDataLoader(self._opt, is_for_train=False) self._dataset_train = data_loader_train.load_data() #self._dataset_test = data_loader_test.load_data() self._dataset_train_size = len(data_loader_train) #self._dataset_test_size = len(data_loader_test) print('#train images = %d' % self._dataset_train_size) #print('#test images = %d' % self._dataset_test_size) self._model = ModelsFactory.get_by_name(self._opt.model, self._opt) self._tb_visualizer = TBVisualizer(self._opt) self._train()
def __init__(self, opt): super(SVMDataset, self).__init__(opt) self._name = 'SVMDataset' self.datas = [] self.classA_features = [] self.classA_labels = [] self.classB_features = [] self.classB_labels = [] self.save_path = os.path.join(self._opt.generate_save_path, 'svm_data.npy') self._img_size = self._opt.image_size self.cursor = 0 self.model = ModelsFactory.get_by_name('FineModel', self._opt, is_train=False) # read dataset if os.path.exists(self.save_path): self._load_from_numpy() else: self._load_dataset()