def checkArgs(Args):

    # print('Using config:')
    # pprint.pprint(cfg)

    print yellow("Checking the args ...")

    if Args.fineTune == 'True':
        Args.fine_tune = True
    else:
        Args.fine_tune = False

    if Args.useDemo == 'True':
        Args.use_demo = True
    else:
        Args.use_demo = False

    if Args.method == 'test':
        if Args.weights is None:
            print red("  Specify the testing network weights!")
            sys.exit(3)
        else:
            print blue("  Test the weight: \n    {}".format(Args.weights))
    elif Args.fine_tune:
        if Args.weights is None:
            print red("  Specify the finetune network weights!")
            sys.exit(4)
        else:
            print blue("  Finetune the weight:  {}".format(Args.weights))
    else:
        print red("  The network will RE-TRAIN from empty ! ! ")
    print '  Called with args:', args
Beispiel #2
0
 def __init__(self):
     self.data_path = cfg.DATA_DIR
     self.folder = 'demo_p3_gaojiaqiao'
     self.test_set = self.load_dataset()
     self.testing_rois_length = len(self.test_set)
     print blue(
         'Dataset {} initialization has been done successfully.'.format(
             self.testing_rois_length))
     time.sleep(2)
Beispiel #3
0
    def load_dataset(self):
        Instruction_cache_file = path_add(self.data_path,
                                          'Instruction_cache_data.pkl')
        train_cache_file = path_add(self.data_path, 'train_cache_data.pkl')
        valid_cache_file = path_add(self.data_path, 'valid_cache_data.pkl')
        test_cache_file = path_add(self.data_path, 'test_cache_data.pkl')
        if os.path.exists(train_cache_file) & os.path.exists(
                valid_cache_file) & os.path.exists(
                    test_cache_file) & os.path.exists(Instruction_cache_file):
            print blue('Loaded the STi dataset from pkl cache files ...')
            with open(Instruction_cache_file, 'rb') as fid:
                key_points = cPickle.load(fid)
                print yellow('  NOTICE: the groundtruth range is [{}] meters, the label to keep is {},\n          including folders:{},\n  Please verify that meets requirement !' \
                    .format(key_points[0], key_points[1], key_points[2]))
            with open(train_cache_file, 'rb') as fid:
                train_set = cPickle.load(fid)
                print '  train gt set(cnt:{}) loaded from {}'.format(
                    len(train_set), train_cache_file)

            with open(valid_cache_file, 'rb') as fid:
                valid_set = cPickle.load(fid)
                print '  valid gt set(cnt:{}) loaded from {}'.format(
                    len(valid_set), valid_cache_file)

            with open(test_cache_file, 'rb') as fid:
                test_set = cPickle.load(fid)
                print '  test gt set(cnt:{}) loaded from {}'.format(
                    len(test_set), test_cache_file)

            return train_set, valid_set, test_set

        print blue('Prepare the STi dataset for training, please wait ...')
        self.total_roidb = self.load_sti_annotation()
        self.filter_roidb = self.filter(self.total_roidb, self.type_to_keep)
        train_set, valid_set, test_set = self.assign_dataset(
            self.filter_roidb)  # train,valid percent
        with open(Instruction_cache_file, 'wb') as fid:
            cPickle.dump(
                [cfg.DETECTION_RANGE, self.type_to_keep, self.folder_list],
                fid, cPickle.HIGHEST_PROTOCOL)
            print yellow('  NOTICE: the groundtruth range is [{}] meters, the label to keep is {},\n          use the dataset:{},\n  Please verify that meets requirement !' \
                .format(cfg.DETECTION_RANGE, self.type_to_keep, self.folder_list))
        with open(train_cache_file, 'wb') as fid:
            cPickle.dump(train_set, fid, cPickle.HIGHEST_PROTOCOL)
            print '  Wrote and loaded train gt roidb(cnt:{}) to {}'.format(
                len(train_set), train_cache_file)
        with open(valid_cache_file, 'wb') as fid:
            cPickle.dump(valid_set, fid, cPickle.HIGHEST_PROTOCOL)
            print '  Wrote and loaded valid gt roidb(cnt:{}) to {}'.format(
                len(valid_set), valid_cache_file)
        with open(test_cache_file, 'wb') as fid:
            cPickle.dump(test_set, fid, cPickle.HIGHEST_PROTOCOL)
            print '  Wrote and loaded test gt roidb(cnt:{}) to {}'.format(
                len(test_set), test_cache_file)

        return train_set, valid_set, test_set
Beispiel #4
0
 def __init__(self):
     self.data_path = cfg.DATA_DIR
     # self.folder_list = ['32_yuanqu_11804041320']
     self.folder_list = [
         '32_yuanqu_11804041320', 'p3_beihuan_B16_11803221546',
         '32beams_dingdianlukou_2018-03-12-11-02-41',
         '32_daxuecheng_01803191740', '32_gaosulu_test', 'xuanchuan'
     ]
     self._classes = [
         'unknown', 'smallMot', 'bigMot', 'nonMot', 'pedestrian'
     ]  #TODO:declare: there is another label dont care! becareful
     self.type_to_keep = ['smallMot', 'bigMot', 'nonMot', 'pedestrian']
     self.num_classes = len(self._classes)
     self.class_convert = dict(zip(self._classes, xrange(self.num_classes)))
     self.total_roidb = []
     self.filter_roidb = []
     self.percent_train = 0.7
     self.percent_valid = 0.3
     self.train_set, self.valid_set, self.test_set = self.load_dataset()
     self.validing_rois_length = len(self.valid_set)
     self.training_rois_length = len(self.train_set)
     print blue('Dataset initialization has been done successfully.')
     time.sleep(2)
Beispiel #5
0
    def eat_data_in_one_piece(self):
        if not os.path.exists(path_add(self.path, 'data_in_one_piece')):
            os.mkdir(path_add(self.path, 'data_in_one_piece'))
        TrainSet_POS_file_name = path_add(self.path, 'data_in_one_piece',
                                          'TrainSet_POS.npy')
        TrainSet_NEG_file_name = path_add(self.path, 'data_in_one_piece',
                                          'TrainSet_NEG.npy')
        ValidSet_POS_file_name = path_add(self.path, 'data_in_one_piece',
                                          'ValidSet_POS.npy')
        ValidSet_NEG_file_name = path_add(self.path, 'data_in_one_piece',
                                          'ValidSet_NEG.npy')

        if not os.path.exists(path_add(self.path, 'filter_data_in_one_piece')):
            os.mkdir(path_add(self.path, 'filter_data_in_one_piece'))
        info_file_name = path_add(self.path, 'filter_data_in_one_piece',
                                  'information_about_files.npy')
        TrainSet_POS_filter_file_name = path_add(self.path,
                                                 'filter_data_in_one_piece',
                                                 'Filter_TrainSet_POS.npy')
        ValidSet_POS_filter_file_name = path_add(self.path,
                                                 'filter_data_in_one_piece',
                                                 'Filter_ValidSet_POS.npy')
        TrainSet_NEG_filter_file_name = path_add(self.path,
                                                 'filter_data_in_one_piece',
                                                 'Filter_TrainSet_NEG.npy')
        ValidSet_NEG_filter_file_name = path_add(self.path,
                                                 'filter_data_in_one_piece',
                                                 'Filter_ValidSet_NEG.npy')

        if os.path.exists(TrainSet_POS_filter_file_name) and os.path.exists(ValidSet_POS_filter_file_name) \
                and os.path.exists(TrainSet_NEG_filter_file_name) and os.path.exists(ValidSet_NEG_filter_file_name) \
                and os.path.exists(info_file_name):
            print(
                'Eating filtered data(Points more than {}) from npy zip file in folder:filter_data_in_one_piece ...'
                .format(darkyellow('[' + str(np.load(info_file_name)) + ']')))
            self.TrainSet_POS = np.load(TrainSet_POS_filter_file_name)
            self.TrainSet_NEG = np.load(TrainSet_NEG_filter_file_name)
            self.ValidSet_POS = np.load(ValidSet_POS_filter_file_name)
            self.ValidSet_NEG = np.load(ValidSet_NEG_filter_file_name)

            self.train_positive_cube_cnt = self.TrainSet_POS.shape[0]
            self.train_negative_cube_cnt = self.TrainSet_NEG.shape[0]
            self.valid_positive_cube_cnt = self.ValidSet_POS.shape[0]
            self.valid_negative_cube_cnt = self.ValidSet_NEG.shape[0]

            print(
                '  emmm,there are TP:{} TN:{} VP:{} VN:{} in my belly.'.format(
                    purple(str(self.TrainSet_POS.shape[0])),
                    purple(str(self.TrainSet_NEG.shape[0])),
                    purple(str(self.ValidSet_POS.shape[0])),
                    purple(str(self.ValidSet_NEG.shape[0])),
                ))

            return None

        if os.path.exists(TrainSet_POS_file_name) and os.path.exists(TrainSet_NEG_file_name) \
                and os.path.exists(ValidSet_POS_file_name) and os.path.exists(ValidSet_NEG_file_name):
            print(blue('Let`s eating exiting data(without filter) !'))
            self.TrainSet_POS = np.load(TrainSet_POS_file_name)
            self.TrainSet_NEG = np.load(TrainSet_NEG_file_name)
            self.ValidSet_POS = np.load(ValidSet_POS_file_name)
            self.ValidSet_NEG = np.load(ValidSet_NEG_file_name)
        else:
            print(darkyellow('Let`s eating raw data onr by one !'))
            train_pos_name_list = sorted(
                os.listdir(path_add(self.path, 'KITTI_TRAIN_BOX', 'POSITIVE')))
            train_neg_name_list = sorted(
                os.listdir(path_add(self.path, 'KITTI_TRAIN_BOX', 'NEGATIVE')))
            valid_pos_name_list = sorted(
                os.listdir(path_add(self.path, 'KITTI_VALID_BOX', 'POSITIVE')))
            valid_neg_name_list = sorted(
                os.listdir(path_add(self.path, 'KITTI_VALID_BOX', 'NEGATIVE')))
            for name in train_pos_name_list:
                data = np.load(
                    path_add(self.path, 'KITTI_TRAIN_BOX', 'POSITIVE') + '/' +
                    name)
                self.TrainSet_POS.append(data)
            self.TrainSet_POS = np.array(self.TrainSet_POS, dtype=np.uint8)
            np.save(TrainSet_POS_file_name, self.TrainSet_POS)
            print('  Yummy!')

            for name in train_neg_name_list:
                data = np.load(
                    path_add(self.path, 'KITTI_TRAIN_BOX', 'NEGATIVE') + '/' +
                    name)
                self.TrainSet_NEG.append(data)
            self.TrainSet_NEG = np.array(self.TrainSet_NEG, dtype=np.uint8)
            np.save(TrainSet_NEG_file_name, self.TrainSet_NEG)

            print('  Take another piece!')

            for name in valid_pos_name_list:
                data = np.load(
                    path_add(self.path, 'KITTI_VALID_BOX', 'POSITIVE') + '/' +
                    name)
                self.ValidSet_POS.append(data)
            self.ValidSet_POS = np.array(self.ValidSet_POS, dtype=np.uint8)
            np.save(ValidSet_POS_file_name, self.ValidSet_POS)
            print('  One more!')

            for name in valid_neg_name_list:
                data = np.load(
                    path_add(self.path, 'KITTI_VALID_BOX', 'NEGATIVE') + '/' +
                    name)
                self.ValidSet_NEG.append(data)
            self.ValidSet_NEG = np.array(self.ValidSet_NEG, dtype=np.uint8)
            np.save(ValidSet_NEG_file_name, self.ValidSet_NEG)
            print('  I`m full ...')
            print('All data has been saved in zip npy file!')

        print(
            'There are TP:{} TN:{} VP:{} VN:{} and has been successfully eaten!'
            .format(self.TrainSet_POS.shape[0], self.TrainSet_NEG.shape[0],
                    self.ValidSet_POS.shape[0], self.ValidSet_NEG.shape[0]))

        print(
            darkyellow(
                'Filter the positive data which has less points({}) inside ... '
                .format(self.arg.positive_points_needed)))
        train_sum = np.array([
            self.TrainSet_POS[i].sum()
            for i in range(self.TrainSet_POS.shape[0])
        ])
        keep_mask1 = np.where(train_sum > self.arg.positive_points_needed)
        self.TrainSet_POS = self.TrainSet_POS[keep_mask1]
        np.save(TrainSet_POS_filter_file_name, self.TrainSet_POS)

        valid_sum = np.array([
            self.ValidSet_POS[i].sum()
            for i in range(self.ValidSet_POS.shape[0])
        ])
        keep_mask2 = np.where(valid_sum > self.arg.positive_points_needed)
        self.ValidSet_POS = self.ValidSet_POS[keep_mask2]
        np.save(ValidSet_POS_filter_file_name, self.ValidSet_POS)

        np.save(ValidSet_NEG_filter_file_name, self.ValidSet_NEG)
        np.save(TrainSet_NEG_filter_file_name, self.TrainSet_NEG)
        np.save(info_file_name, self.arg.positive_points_needed)

        self.train_positive_cube_cnt = self.TrainSet_POS.shape[0]
        self.train_negative_cube_cnt = self.TrainSet_NEG.shape[0]
        self.valid_positive_cube_cnt = self.ValidSet_POS.shape[0]
        self.valid_negative_cube_cnt = self.ValidSet_NEG.shape[0]

        print(
            green(
                'Done! TrainPositive remain: {},ValidPositive remain: {} and has been saved'
            ).format(
                self.TrainSet_POS.shape[0],
                self.ValidSet_POS.shape[0],
            ))