class TestCscClassifier: def __init__(self): self.d = Dataset('full_pascal_trainval') self.d_val = Dataset('full_pascal_test') self.cls = 'dog' suffix = 'default' self.csc = CSCClassifier(suffix, self.cls, self.d, self.d_val) csc_test = np.load(config.get_ext_dets_filename(self.d, 'csc_default')) self.dets = csc_test[()] def get_scores_for_img(self, img_ind, cls_ind): dets = self.dets.filter_on_column('cls_ind', self.d.classes.index(self.cls), omit=True) dets = dets.subset(['score', 'img_ind']) dets = dets.filter_on_column('img_ind', 0, omit=True) dets.arr = self.csc.normalize_dpm_scores(dets.arr) return dets.subset_arr('score') def test_classify_image(self): scores = self.get_scores_for_img(0, self.cls) res = self.csc.classify_image(scores) assert (round(res, 12) == 1.) def test_compute_histogram(self): for img in range(50): scores = self.get_scores_for_img(0, self.cls) vector = self.csc.create_vector_from_scores(scores)
class TestCscClassifier: def __init__(self): self.d = Dataset('full_pascal_trainval') self.d_val = Dataset('full_pascal_test') self.cls = 'dog' suffix = 'default' self.csc = CSCClassifier(suffix, self.cls, self.d, self.d_val) csc_test = np.load(config.get_ext_dets_filename(self.d, 'csc_default')) self.dets = csc_test[()] def get_scores_for_img(self, img_ind, cls_ind): dets = self.dets.filter_on_column('cls_ind', self.d.classes.index(self.cls), omit=True) dets = dets.subset(['score', 'img_ind']) dets = dets.filter_on_column('img_ind', 0, omit=True) dets.arr = self.csc.normalize_dpm_scores(dets.arr) return dets.subset_arr('score') def test_classify_image(self): scores = self.get_scores_for_img(0, self.cls) res = self.csc.classify_image(scores) assert(round(res,12) == 1.) def test_compute_histogram(self): for img in range(50): scores = self.get_scores_for_img(0, self.cls) vector = self.csc.create_vector_from_scores(scores)
def __init__(self): self.d = Dataset('full_pascal_trainval') self.d_val = Dataset('full_pascal_test') self.cls = 'dog' suffix = 'default' self.csc = CSCClassifier(suffix, self.cls, self.d, self.d_val) csc_test = np.load(config.get_ext_dets_filename(self.d, 'csc_default')) self.dets = csc_test[()]
def train_csc_svms(d_train, d_val, kernel, C): # d: trainval # d_train: train | trainval # d_val: val | test dp = DatasetPolicy(d_train, d_train, detectors=['csc_default']) for cls_idx in range(mpi.comm_rank, len(d_train.classes), mpi.comm_size): cls = d_train.classes[cls_idx] ext_detector = dp.actions[cls_idx].obj csc = CSCClassifier('default', cls, d_train, d_val) csc.train_for_cls(ext_detector, kernel, C)
def __init__(self, dataset, train_dataset, cls, dets, detname): """ Expects cached detections in Table format to be passed in. The dets should not have the 'cls_ind' column, as they should all be of the same class. """ Detector.__init__(self, dataset, train_dataset, cls, detname) self.dets = dets # TODO: hack for csc_X suffix = detname[4:] if self.detname == 'dpm': self.classif = DPMClassifier() else: self.classif = CSCClassifier(suffix, cls, train_dataset, dataset)
def __init__(self, dataset, train_dataset, cls, dets, detname): """ Expects cached detections in Table format to be passed in. The dets should not have the 'cls_ind' column, as they should all be of the same class. """ Detector.__init__(self,dataset,train_dataset,cls,detname) self.dets = dets # TODO: hack for csc_X suffix = detname[4:] if self.detname=='dpm': self.classif = DPMClassifier() else: self.classif = CSCClassifier(suffix, cls, train_dataset, dataset)
def test_csc_svm(d_train, d_val): dp = DatasetPolicy(d_val, d_train, detectors=['csc_default']) table = np.zeros((len(d_val.images), len(d_val.classes))) for cls_idx in range(mpi.comm_rank, len(d_val.classes), mpi.comm_size): cls = d_val.classes[cls_idx] ext_detector = dp.actions[cls_idx].obj # Load the classifier we trained in train_csc_svms csc = CSCClassifier('default', cls, d_train, d_val) table[:, cls_idx] = csc.eval_cls(ext_detector) print '%d is at safebarrier'%mpi.comm_rank safebarrier(comm) print 'passed safebarrier' table = comm.reduce(table, op=MPI.SUM, root=0) if mpi.comm_rank == 0: print 'save table' print table cPickle.dump(table, open('table','w')) print 'saved' return table
def test_csc_svm(d_train, d_val): dp = DatasetPolicy(d_val, d_train, detectors=['csc_default']) table = np.zeros((len(d_val.images), len(d_val.classes))) for cls_idx in range(mpi.comm_rank, len(d_val.classes), mpi.comm_size): cls = d_val.classes[cls_idx] ext_detector = dp.actions[cls_idx].obj # Load the classifier we trained in train_csc_svms csc = CSCClassifier('default', cls, d_train, d_val) table[:, cls_idx] = csc.eval_cls(ext_detector) print '%d is at safebarrier' % mpi.comm_rank safebarrier(comm) print 'passed safebarrier' table = comm.reduce(table, op=MPI.SUM, root=0) if mpi.comm_rank == 0: print 'save table' print table cPickle.dump(table, open('table', 'w')) print 'saved' return table
class ExternalDetector(Detector): """ A mock interface to the Felzenszwalb DPM, CSC, or the Pendersoli CtF detector. Actually works by pre-loading all the detections and then returning them as requested. """ def __init__(self, dataset, train_dataset, cls, dets, detname): """ Expects cached detections in Table format to be passed in. The dets should not have the 'cls_ind' column, as they should all be of the same class. """ Detector.__init__(self, dataset, train_dataset, cls, detname) self.dets = dets # TODO: hack for csc_X suffix = detname[4:] if self.detname == 'dpm': self.classif = DPMClassifier() else: self.classif = CSCClassifier(suffix, cls, train_dataset, dataset) def detect(self, image, astable=False, dets=None): """ Return the detections that match that image index in cached dets. Must return in the same format as the Detector superclass, so we have to delete a column. """ if not dets: img_ind = self.dataset.get_img_ind(image) dets = self.dets.filter_on_column('img_ind', img_ind, omit=True) time_passed = 0 if not dets.arr.shape[0] < 1: time_passed = np.max(dets.subset_arr('time')) # Halve the time passed if my may25 DPM detector, to have reasonable times # Also halve the time passed by csc_half detector, because we halved its AP if self.detname == 'dpm_may25' or self.detname == 'csc_half': time_passed /= 2 # TODO: hack that returns time around 1s always #hist(np.maximum(0.8,1+0.1*np.random.randn(1000)),20) time_passed = np.maximum(0.8, 1 + 0.1 * np.random.randn()) dets = dets.with_column_omitted('time') if astable: return (dets, time_passed) else: return (dets.arr, time_passed) def compute_score(self, image, oracle=False, dets=None): """ Return the 0/1 decision of whether the cls of this detector is present in the image, given the detections table. If oracle=True, returns the correct answer (look up the ground truth). """ if oracle: return Detector.compute_score(self, image, oracle) if not dets: img_ind = self.dataset.get_img_ind(image) dets = self.dets.filter_on_column('img_ind', img_ind) scores = dets.subset_arr('score') score = self.classif.classify_image(scores) dt = 0 # TODO: figure out the dt situation above return (score, dt)
print table #cPickle.dump(table, open('tab_linear_5','w')) return table if __name__ == '__main__': d_train = Dataset('full_pascal_trainval') d_val = Dataset('full_pascal_val') train_gt = d_train.get_cls_ground_truth() val_gt = d_val.get_cls_ground_truth() if mpi.comm_rank == 0: filename = os.path.join( config.get_classifier_dataset_dirname( CSCClassifier('default', 'dog', d_train, d_val), d_train), 'crossval.txt') kernels = ['linear'] Cs = [50] settings = list(itertools.product(kernels, Cs)) for setin in settings: kernel = setin[0] C = setin[1] #train_csc_svms(d_train, d_val, kernel, C) safebarrier(comm) table_arr = test_csc_svm(d_train, d_val)
class ExternalDetector(Detector): """ A mock interface to the Felzenszwalb DPM, CSC, or the Pendersoli CtF detector. Actually works by pre-loading all the detections and then returning them as requested. """ def __init__(self, dataset, train_dataset, cls, dets, detname): """ Expects cached detections in Table format to be passed in. The dets should not have the 'cls_ind' column, as they should all be of the same class. """ Detector.__init__(self,dataset,train_dataset,cls,detname) self.dets = dets # TODO: hack for csc_X suffix = detname[4:] if self.detname=='dpm': self.classif = DPMClassifier() else: self.classif = CSCClassifier(suffix, cls, train_dataset, dataset) def detect(self, image, astable=False, dets=None): """ Return the detections that match that image index in cached dets. Must return in the same format as the Detector superclass, so we have to delete a column. """ if not dets: img_ind = self.dataset.get_img_ind(image) dets = self.dets.filter_on_column('img_ind',img_ind,omit=True) time_passed = 0 if not dets.arr.shape[0]<1: time_passed = np.max(dets.subset_arr('time')) # Halve the time passed if my may25 DPM detector, to have reasonable times # Also halve the time passed by csc_half detector, because we halved its AP if self.detname=='dpm_may25' or self.detname=='csc_half': time_passed /= 2 # TODO: hack that returns time around 1s always #hist(np.maximum(0.8,1+0.1*np.random.randn(1000)),20) time_passed = np.maximum(0.8,1+0.1*np.random.randn()) dets = dets.with_column_omitted('time') if astable: return (dets, time_passed) else: return (dets.arr, time_passed) def compute_score(self, image, oracle=False, dets=None): """ Return the 0/1 decision of whether the cls of this detector is present in the image, given the detections table. If oracle=True, returns the correct answer (look up the ground truth). """ if oracle: return Detector.compute_score(self, image, oracle) if not dets: img_ind = self.dataset.get_img_ind(image) dets = self.dets.filter_on_column('img_ind',img_ind) scores = dets.subset_arr('score') score = self.classif.classify_image(scores) dt = 0 # TODO: figure out the dt situation above return (score,dt)