Пример #1
0
    def train(self, samples):
        samples_l = [s for s in samples if self._is_left_turn(s)]
        samples_r = [s for s in samples if not self._is_left_turn(s)]

        X_l, y_l = extract_features.get_matrices_from_samples(samples_l)
        X_r, y_r = extract_features.get_matrices_from_samples(samples_r)
        X_l = filter_feature_matrix(X_l, self.features)
        X_r = filter_feature_matrix(X_r, self.features)

        self.regressor_l = sklearn.ensemble.RandomForestRegressor(n_estimators=int(self.n_estimators/2))
        self.regressor_r = sklearn.ensemble.RandomForestRegressor(n_estimators=int(self.n_estimators/2))
        self.regressor_l.fit(X_l, y_l)
        self.regressor_r.fit(X_r, y_r)
Пример #2
0
    def predict_all_estimators(self, sample):
        """Get the prediction of every estimator separated"""
        X, _ = extract_features.get_matrices_from_samples([sample])
        X = filter_feature_matrix(X, self.features)
        # Most of the code is directly copied from Scikit
        # Check data
        check_is_fitted(self.regressor, 'n_outputs_')

        # Check data
        X = check_array(X, dtype=DTYPE, accept_sparse="csr")
        if issparse(X) and (X.indices.dtype != np.intc or
                            X.indptr.dtype != np.intc):
            raise ValueError("No support for np.int64 index based "
                             "sparse matrices")

        # Assign chunk of trees to jobs
        n_jobs, n_trees, starts = _partition_estimators(self.regressor.n_estimators,
                                                        self.regressor.n_jobs)

        # Parallel loop
        all_y_hat = Parallel(n_jobs=n_jobs, verbose=self.regressor.verbose,
                             backend="threading")(
            delayed(_parallel_helper)(e, 'predict', X, check_input=False)
            for e in self.regressor.estimators_)

        return all_y_hat
Пример #3
0
 def train(self, samples):
     X, y = extract_features.get_matrices_from_samples(samples)
     X = filter_feature_matrix(X, self.features)
     if self.single_target_variable:
         self.steps = get_steps_from_sample(samples[0])
         X, y = make_single_target_variable(X, self.steps, y)
     self.regressor.fit(X, y)
Пример #4
0
 def predict(self, sample):
     X, _ = extract_features.get_matrices_from_samples([sample])
     X = filter_feature_matrix(X, self.features)
     if self._is_left_turn(sample):
         return self.regressor_l.predict(X)[0]
     else:
         return self.regressor_r.predict(X)[0]
Пример #5
0
 def predict(self, sample):
     X, _ = extract_features.get_matrices_from_samples([sample])
     X = filter_feature_matrix(X, self.features)
     X = np.tile(X, (len(self.angle_steps), 1))
     X = np.column_stack((X, self.angle_steps))
     y_pred = np.ravel(self.classifier.predict(X))
     return self.bin_to_continuous(y_pred)
Пример #6
0
 def predict(self, sample):
     X, _ = extract_features.get_matrices_from_samples([sample])
     X = filter_feature_matrix(X, self.features)
     if self.single_target_variable:
         X, _ = make_single_target_variable(X, self.steps)
         y = self.regressor.predict(X)
         y = make_multiple_target_variable(y, self.steps)
         return y[0]
     else:
         return self.regressor.predict(X)[0]
Пример #7
0
    def predict_proba_raw(self, sample):
        """Return the raw output of predicting the probability for each class"""
        X, _ = extract_features.get_matrices_from_samples([sample])
        X = filter_feature_matrix(X, self.features)
        X = np.tile(X, (len(self.angle_steps), 1))
        X = np.column_stack((X, self.angle_steps))

        y_pred = self.classifier.predict_proba(X)

        # Pad the probability array with zero columns for disregarded classes
        missing_classes = [i for i in range(self.bin_num) if i not in self.classifier.classes_]
        y_pred = np.insert(y_pred, sorted(list(missing_classes)), 0., axis=1)

        return y_pred
Пример #8
0
 def train(self, samples):
     X, y = extract_features.get_matrices_from_samples(samples)
     X = filter_feature_matrix(X, self.features)
     print 'Training classifier with %d samples...' % (len(X))
     self.angle_steps = np.linspace(0.,180.,np.shape(y)[1])
     steps = len(self.angle_steps)
     X_new = np.zeros((np.shape(X)[0]*steps, np.shape(X)[1]+1))
     # Introduce the angle as new feature
     for i, angle in enumerate(self.angle_steps):
         X_new[i*steps:(i+1)*steps, :-1] = np.tile(X[i], (steps, 1))
         X_new[i*steps:(i+1)*steps, -1] = self.angle_steps
     X = X_new
     self.min_radius = np.amin(y)
     self.max_radius = np.amax(y)
     y = self.continuous_to_bin(np.ravel(y))
     self.classifier.fit(X, y)
Пример #9
0
 def predict(self, sample):
     X, _ = extract_features.get_matrices_from_samples([sample])
     X = filter_feature_matrix(X, self.features)
     return self.regressor.predict(X)[0]
Пример #10
0
 def train(self, samples):
     X, y = extract_features.get_matrices_from_samples(samples)
     X = filter_feature_matrix(X, self.features)
     self.regressor = sklearn.ensemble.RandomForestRegressor(n_estimators=self.n_estimators)
     self.regressor.fit(X, y)
Пример #11
0
def normalize_features(samples):
    """Normalize all the feature vectors in samples"""
    X, y = get_matrices_from_samples(samples)
    X = sklearn.preprocessing.normalize(X, axis=1, copy=False)
    return get_samples_from_matrices(X, y, samples)