def decisionTreeRMR(self,featureMatrix,phishingURLLabel,fakeFeatureMatrix,fakeLabels,technique,OUTPUT_START):
        re = Resampling()
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(dir_name+'/F2_Scores/'+OUTPUT_START+'-'+technique+'F2_DecisionTreeResultsRMR.txt','a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(featureMatrix,phishingURLLabel,test_size=0.20,random_state=40)

            #----------- Analysis
            URL_Train = list(URL_Train)
            # for everyFeature in fakeFeatureMatrix:
            #     URL_Train.append(everyFeature)
            #
            # Label_Train = list(Label_Train)
            # for everyFakeLabel in fakeLabels:
            #     Label_Train.append(everyFakeLabel)

            print 'Performing Oversampling'
            featureMatrix2, phishingLabel2 = re.RMROversampling(URL_Train, Label_Train)

            print 'After Oversampling...'
            print 'Total: '+str(len(phishingLabel2))
            print 'Ratio: '
            print collections.Counter(phishingLabel2)

            parameters_DecisionTree = {'criterion': ('gini', 'entropy'), 'splitter': ('best', 'random')}
            estimator = DecisionTreeClassifier()
            # totalSamples = len(Label_Train)
            # positiveCount = int(Label_Train.count('1'))       #should be 65% of total
            # predictionResult.write("Percentage of positive samples in training phase: %.2f " % (positiveCount/float(totalSamples)))
            clf = GridSearchCV(estimator, parameters_DecisionTree, n_jobs=15,scoring=ftwo_scorer)
            URL_Test = list(URL_Test)
            featureMatrix2 = list(featureMatrix2)
            phishingLabel2 = list(phishingLabel2)
            clf.fit(featureMatrix2,phishingLabel2)
            result = clf.predict(URL_Test)
            print "Type of REsult is:"
            print type(result)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" + str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = fbeta_score(Label_Test, result,beta = my_beta,pos_label=1.0)
            predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
            # accuracy_matrix.append(f1Score)
            # accuracyScore = 0
            # aucMetric = 0
            # rocAucScore = 0
            # rocAucScoreMicro=0
            # rocAucScoreMacro=0
            # rocAucScoreWeighted=0
            # # accuracyScore = accuracy_score(Label_Test, result)
            # # aucMetric = auc(Label_Test, result, reorder=True)
            # rocAucScoreMicro = roc_auc_score(Label_Test, result,average='micro')
            # rocAucScoreMacro = roc_auc_score(Label_Test, result,average='macro')
            # rocAucScoreWeighted = roc_auc_score(Label_Test, result,average='weighted')
        except Exception as e:
            predictionResult.write(str(e))
Пример #2
0
    def rbmClassifyb1Smote(self, featureMatrix, phishingURLLabel):
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(dir_name + '/RBMResultsb1Smote.txt', 'a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(
                featureMatrix, phishingURLLabel, test_size=0.20)
            estimator = BernoulliRBM()
            svm = SVC()
            rm = Resampling()
            featureMatrix2, phishingLabel2 = rm.ADASYNOversampling(
                URL_Train, Label_Train)
            clf = Pipeline(steps=[('rbm', estimator), ('SVC', svm)])
            clf.fit(featureMatrix2, phishingLabel2)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" +
                                   str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = f1_score(Label_Test,
                               result,
                               pos_label='1',
                               average='macro')
            predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
            accuracy_matrix.append(f1Score)
        except Exception as e:
            predictionResult.write(str(e))

        predictionResult.write(
            "RBM Classification with Borderline-1 Smote Completed with Avg. Score: "
            + str(np.mean(accuracy_matrix)))
Пример #3
0
    def mlpBoostb1SMOTE(self, featureMatrix, phishingURLLabel,
                        fakeFeatureMatrix, fakeLabels, technique,
                        OUTPUT_START):
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(
            dir_name + '/F1_Scores/' + OUTPUT_START + '-' + technique +
            'F1_MLPResultsSmoteb1.txt', 'a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(
                featureMatrix, phishingURLLabel, test_size=0.20)

            parameters_MLP = {
                'activation': ('identity', 'tanh', 'logistic', 'relu'),
                'solver': ('lbfgs', 'sgd', 'adam'),
                'learning_rate': ('constant', 'invscaling', 'adaptive'),
                'max_iter': [200, 300, 400, 500]
            }
            URL_Train = list(URL_Train)
            # for everyFeature in fakeFeatureMatrix:
            #     URL_Train.append(everyFeature)
            #
            # Label_Train = list(Label_Train)
            # for everyFakeLabel in fakeLabels:
            #     Label_Train.append(everyFakeLabel)
            estimator = MLPClassifier()
            rm = Resampling()
            featureMatrix2, phishingLabel2 = rm.b1smoteOversampling(
                URL_Train, Label_Train)
            clf = GridSearchCV(estimator,
                               parameters_MLP,
                               n_jobs=15,
                               scoring='accuracy')
            clf.fit(featureMatrix2, phishingLabel2)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            #predictionResult.write("\nThe 1's are:" + str(collections.Counter(result)))
            predictionResult.flush()
            # f1Score = f1_score(Label_Test, result, pos_label=1.0)
            # predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
            # accuracy_matrix.append(f1Score)
            # accuracyScore = 0
            # aucMetric = 0
            # rocAucScore = 0
            # rocAucScoreMicro=0
            # rocAucScoreMacro=0
            # rocAucScoreWeighted=0
            # # accuracyScore = accuracy_score(Label_Test, result)
            # # aucMetric = auc(Label_Test, result, reorder=True)
            # rocAucScoreMicro = roc_auc_score(Label_Test, result,average='micro')
            # rocAucScoreMacro = roc_auc_score(Label_Test, result,average='macro')
            # rocAucScoreWeighted = roc_auc_score(Label_Test, result,average='weighted')
            accuracyScore = accuracy_score(Label_Test, result)
            predictionResult.write("\nThe accuracy_score is:" +
                                   str(accuracyScore))
        except Exception as e:
            predictionResult.write(str(e))
    def decisionTreeRMR(self,featureMatrix,phishingURLLabel,fakeFeatureMatrix,fakeLabels,technique):
        re = Resampling()
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(dir_name+'/'+technique+'DecisionTreeResultsRMR.txt','a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(featureMatrix,phishingURLLabel,test_size=0.20,random_state=40)

            #----------- Analysis
            URL_Train = list(URL_Train)
            for everyFeature in fakeFeatureMatrix:
                URL_Train.append(everyFeature)

            Label_Train = list(Label_Train)
            for everyFakeLabel in fakeLabels:
                Label_Train.append(everyFakeLabel)

            print 'Train Test Split:'
            print 'Training Values:'
            print 'Total:' + str(len(Label_Train))
            print 'Phishy: '+str(list(Label_Train).count(1))
            print 'Non Phishy:' + str(list(Label_Train).count(0))

            print 'Testing Values:'
            print 'Total:' + str(len(Label_Test))
            print 'Phishy: ' + str(list(Label_Test).count(1))
            print 'Non Phishy:' + str(list(Label_Test).count(0))
            print 'Performing Oversampling'
            featureMatrix2, phishingLabel2 = re.RMROversampling(URL_Train, Label_Train)

            print 'After Oversampling...'
            print 'Total: '+str(len(phishingLabel2))
            print 'Ratio: '
            print collections.Counter(phishingLabel2)

            parameters_DecisionTree = {'criterion': ('gini', 'entropy'), 'splitter': ('best', 'random')}
            estimator = DecisionTreeClassifier()
            # totalSamples = len(Label_Train)
            # positiveCount = int(Label_Train.count('1'))       #should be 65% of total
            # predictionResult.write("Percentage of positive samples in training phase: %.2f " % (positiveCount/float(totalSamples)))
            clf = GridSearchCV(estimator, parameters_DecisionTree, n_jobs=1)
            clf.fit(featureMatrix2,phishingLabel2)
            result = clf.predict(URL_Test)
            #print "Type of REsult is:"
            #print type(result)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" + str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = f1_score(Label_Test, result, pos_label='1', average='macro')
            predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
            accuracy_matrix.append(f1Score)
        except Exception as e:
            predictionResult.write(str(e))

        predictionResult.write("Decision Tree with RMR Classification Completed with Avg. Score: " + str(np.mean(accuracy_matrix)))
        print 'Decision Tree Classification Completed with Avg. Score: ' + str(np.mean(accuracy_matrix))
    def KNNADASYN(self, featureMatrix, phishingURLLabel, fakeFeatureMatrix,
                  fakeLabels, technique, OUTPUT_START):
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(
            dir_name + '/F2_Scores/' + OUTPUT_START + '-' + technique +
            'F2_KNNResultsADASYN.txt', 'a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(
                featureMatrix, phishingURLLabel, test_size=0.20)
            parameters_KNN = {
                'n_neighbors': [5, 10, 30, 50],
                'algorithm': ('auto', 'ball_tree', 'kd_tree')
            }
            URL_Train = list(URL_Train)
            # for everyFeature in fakeFeatureMatrix:
            #     URL_Train.append(everyFeature)
            #
            # Label_Train = list(Label_Train)
            # for everyFakeLabel in fakeLabels:
            #     Label_Train.append(everyFakeLabel)

            estimator = KNN_Classifier()
            rm = Resampling()
            featureMatrix2, phishingLabel2 = rm.ADASYNOversampling(
                URL_Train, Label_Train)
            clf = GridSearchCV(estimator,
                               parameters_KNN,
                               n_jobs=15,
                               scoring=ftwo_scorer)
            clf.fit(featureMatrix2, phishingLabel2)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" +
                                   str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = fbeta_score(Label_Test,
                                  result,
                                  beta=my_beta,
                                  pos_label=1.0)
            predictionResult.write("\nThe f2_score is:" + str(f1Score))
            predictionResult.flush()
            # accuracy_matrix.append(f1Score)
            # accuracyScore = 0
            # aucMetric = 0
            # rocAucScore = 0
            # rocAucScoreMicro=0
            # rocAucScoreMacro=0
            # rocAucScoreWeighted=0
            # accuracyScore = accuracy_score(Label_Test, result)
            # aucMetric = auc(Label_Test, result, reorder=True)
            # rocAucScoreMicro = roc_auc_score(Label_Test, result,average='micro')
            # rocAucScoreMacro = roc_auc_score(Label_Test, result,average='macro')
            # rocAucScoreWeighted = roc_auc_score(Label_Test, result,average='weighted')
        except Exception as e:
            predictionResult.write(str(e))
    def randomForestRMR(self, featureMatrix, phishingURLLabel,
                        fakeFeatureMatrix, fakeLabels, technique,
                        OUTPUT_START):
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(
            dir_name + '/F1_Scores/' + OUTPUT_START + '-' + technique +
            'F1_RandomForestResultsRMR.txt', 'a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(
                featureMatrix, phishingURLLabel, test_size=0.20)

            URL_Train = list(URL_Train)
            # for everyFeature in fakeFeatureMatrix:
            #     URL_Train.append(everyFeature)
            #
            # Label_Train = list(Label_Train)
            # for everyFakeLabel in fakeLabels:
            #     Label_Train.append(everyFakeLabel)
            parameters_RandomForest = {
                'n_estimators': [10, 100, 1000],
                'criterion': ('gini', 'entropy'),
                'oob_score': (True, False),
                'warm_start': (True, False)
            }
            estimator = RandomForestClassifier()
            rm = Resampling()
            featureMatrix2, phishingLabel2 = rm.RMROversampling(
                URL_Train, Label_Train)
            clf = GridSearchCV(estimator,
                               parameters_RandomForest,
                               n_jobs=15,
                               scoring=ftwo_scorer)
            clf.fit(featureMatrix2, phishingLabel2)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" +
                                   str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = f1_score(Label_Test, result, pos_label=1.0)
            predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
            # accuracy_matrix.append(f1Score)
            # accuracyScore = 0
            # aucMetric = 0
            # rocAucScore = 0
            # rocAucScoreMicro=0
            # rocAucScoreMacro=0
            # rocAucScoreWeighted=0
            # # accuracyScore = accuracy_score(Label_Test, result)
            # # aucMetric = auc(Label_Test, result, reorder=True)
            # rocAucScoreMicro = roc_auc_score(Label_Test, result,average='micro')
            # rocAucScoreMacro = roc_auc_score(Label_Test, result,average='macro')
            # rocAucScoreWeighted = roc_auc_score(Label_Test, result,average='weighted')
        except Exception as e:
            predictionResult.write(str(e))
Пример #7
0
def main():
    X_bar = np.array([[-94.234001, -139.953995, -1.342158, 0.025863],
                      [-94.234001, -139.953995, -1.342158, 0.079745],
                      [-94.234001, -139.953995, -1.342158, 0.13982],
                      [-94.234001, -139.953995, -1.342158, 0.200218]])
    print(X_bar.shape)
    print(X_bar[0, 3])
    sampleObj = Resampling()
    X_bar_resampled = sampleObj.low_variance_sampler(X_bar)
    print(X_bar_resampled)
    def supportVectorSMOTE(self, featureMatrix, phishingURLLabel,
                           fakeFeatureMatrix, fakeLabels, technique,
                           OUTPUT_START):
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(
            dir_name + '/' + OUTPUT_START + '-' + technique +
            'SVMResultsSmote.txt', 'a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(
                featureMatrix, phishingURLLabel, test_size=0.20)

            URL_Train = list(URL_Train)
            for everyFeature in fakeFeatureMatrix:
                URL_Train.append(everyFeature)

            Label_Train = list(Label_Train)
            for everyFakeLabel in fakeLabels:
                Label_Train.append(everyFakeLabel)
            parameters_SVC = {
                'C': [1.0, 10.0, 30.0],
                'kernel': ('rbf', 'sigmoid', 'linear'),
                'probability': (True, False),
                'shrinking': (True, False),
                'decision_function_shape': ('ovo', 'ovr', 'None')
            }
            # totalSamples = len(Label_Train)
            # positiveCount = int(Label_Train.count('1'))  # should be 65% of total
            # predictionResult.write("Percentage of positive samples in training phase: %.2f " % (positiveCount / float(totalSamples)))
            estimator = SVC()
            clf = GridSearchCV(estimator, parameters_SVC, n_jobs=15)
            rm = Resampling()
            featureMatrix2, phishingLabel2 = rm.ADASYNOversampling(
                URL_Train, Label_Train)
            clf.fit(featureMatrix2, phishingLabel2)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" +
                                   str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = f1_score(Label_Test,
                               result,
                               pos_label='1',
                               average='macro')
            predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
            accuracy_matrix.append(f1Score)
        except Exception as e:
            predictionResult.write(str(e))

        predictionResult.write(
            "SVM Classification with Smote Completed with Avg. Score: " +
            str(np.mean(accuracy_matrix)))
Пример #9
0
 def adaBoostSVMSmote(self, featureMatrix, phishingURLLabel,
                      fakeFeatureMatrix, fakeLabels, technique,
                      OUTPUT_START):
     dir_name = os.path.dirname(os.path.realpath(__file__))
     predictionResult = open(
         dir_name + '/F1_Scores/' + OUTPUT_START + '-' + technique +
         'F1_AdaBoostResultsSVMSmote.txt', 'a+')
     predictionResult.truncate()
     accuracy_matrix = []
     try:
         URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(
             featureMatrix, phishingURLLabel, test_size=0.20)
         parameters_adaBoost = {
             'n_estimators': [50, 100, 1000],
             'algorithm': ('SAMME', 'SAMME.R')
         }
         URL_Train = list(URL_Train)
         # for everyFeature in fakeFeatureMatrix:
         #     URL_Train.append(everyFeature)
         #
         # Label_Train = list(Label_Train)
         # for everyFakeLabel in fakeLabels:
         #     Label_Train.append(everyFakeLabel)
         estimator = AdaBoostClassifier()
         rm = Resampling()
         featureMatrix2, phishingLabel2 = rm.SVMsmoteOversampling(
             URL_Train, Label_Train)
         clf = GridSearchCV(estimator, parameters_adaBoost, n_jobs=15)
         clf.fit(featureMatrix2, phishingLabel2)
         result = clf.predict(URL_Test)
         predictionResult.write(str(result))
         predictionResult.flush()
         predictionResult.write("\nThe 1's are:" +
                                str(collections.Counter(result)))
         predictionResult.flush()
         f1Score = f1_score(Label_Test, result, pos_label=1.0)
         predictionResult.write("\nThe f1_score is:" + str(f1Score))
         predictionResult.flush()
         # accuracy_matrix.append(f1Score)
         # accuracyScore = 0
         # aucMetric = 0
         # rocAucScore = 0
         # rocAucScoreMicro=0
         # rocAucScoreMacro=0
         # rocAucScoreWeighted=0
         # # accuracyScore = accuracy_score(Label_Test, result)
         # # aucMetric = auc(Label_Test, result, reorder=True)
         # rocAucScoreMicro = roc_auc_score(Label_Test, result,average='micro')
         # rocAucScoreMacro = roc_auc_score(Label_Test, result,average='macro')
         # rocAucScoreWeighted = roc_auc_score(Label_Test, result,average='weighted')
     except Exception as e:
         predictionResult.write(str(e))
Пример #10
0
    def supportVectorADASYN(self, featureMatrix, phishingURLLabel,
                            fakeFeatureMatrix, fakeLabels, technique):
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(
            dir_name + '/' + technique + 'SVMResultsADASyn.txt', 'a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(
                featureMatrix, phishingURLLabel, test_size=0.20)

            URL_Train = list(URL_Train)
            for everyFeature in fakeFeatureMatrix:
                URL_Train.append(everyFeature)

            Label_Train = list(Label_Train)
            for everyFakeLabel in fakeLabels:
                Label_Train.append(everyFakeLabel)
            parameters_SVC = {
                'C': [1.0, 10.0, 15.0, 20.0, 22.0, 30.0],
                'kernel': ('rbf', 'sigmoid', 'linear'),
                'probability': (True, False),
                'shrinking': (True, False),
                'decision_function_shape': ('ovo', 'ovr', 'None')
            }

            estimator = SVC()
            clf = GridSearchCV(estimator, parameters_SVC, n_jobs=8)
            rm = Resampling()
            featureMatrix2, phishingLabel2 = rm.ADASYNOversampling(
                URL_Train, Label_Train)
            clf.fit(featureMatrix2, phishingLabel2)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" +
                                   str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = f1_score(Label_Test,
                               result,
                               pos_label='1',
                               average='macro')
            predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
            accuracy_matrix.append(f1Score)
        except Exception as e:
            predictionResult.write(str(e))

        predictionResult.write(
            "SVM Classification with ADASYN Completed with Avg. Score: " +
            str(np.mean(accuracy_matrix)))
    def decisionTreeNoOversampling(self,featureMatrix,phishingURLLabel,fakeFeatureMatrix,fakeLabels,technique,OUTPUT_START):
        re = Resampling()
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(dir_name+'/F2_Scores/'+OUTPUT_START+'-'+technique+'F2_DecisionTreeResultsNoOversampling.txt','a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(featureMatrix,phishingURLLabel,test_size=0.20)
            print 'Originally length of URL_Train is:'
            print len(URL_Train)
            print URL_Test.shape
            print URL_Train.shape
            print Label_Train.shape
            parameters_DecisionTree = {'criterion': ('gini', 'entropy'), 'splitter': ('best', 'random')}
            estimator = DecisionTreeClassifier()
            clf = GridSearchCV(estimator, parameters_DecisionTree, n_jobs=15,scoring=ftwo_scorer)


            clf.fit(URL_Train,Label_Train)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            # predictionResult.write("\nThe 1's are:" + str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = fbeta_score(Label_Test, result,beta = my_beta,pos_label=1.0)
            predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
        except Exception as e:
            predictionResult.write(str(e))
    def adaBoostADASYN(self, featureMatrix, phishingURLLabel,
                       fakeFeatureMatrix, fakeLabels, technique):
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(
            dir_name + '/' + technique + 'AdaBoostResultsADASYN.txt', 'a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(
                featureMatrix, phishingURLLabel, test_size=0.20)
            parameters_adaBoost = {
                'n_estimators': [50, 100, 1000],
                'algorithm': ('SAMME', 'SAMME.R')
            }
            URL_Train = list(URL_Train)
            for everyFeature in fakeFeatureMatrix:
                URL_Train.append(everyFeature)

            Label_Train = list(Label_Train)
            for everyFakeLabel in fakeLabels:
                Label_Train.append(everyFakeLabel)

            estimator = AdaBoostClassifier()
            rm = Resampling()
            featureMatrix2, phishingLabel2 = rm.ADASYNOversampling(
                URL_Train, Label_Train)
            clf = GridSearchCV(estimator, parameters_adaBoost, n_jobs=8)
            clf.fit(featureMatrix2, phishingLabel2)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" +
                                   str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = f1_score(Label_Test,
                               result,
                               pos_label='1',
                               average='macro')
            predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
            accuracy_matrix.append(f1Score)
        except Exception as e:
            predictionResult.write(str(e))

        predictionResult.write(
            "Ada Boost Classification with ADASYN Completed with Avg. Score: "
            + str(np.mean(accuracy_matrix)))
    def gaussianBoostb1SMOTE(self, featureMatrix, phishingURLLabel, fakeFeatureMatrix, fakeLabels, technique, OUTPUT_START):
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(dir_name + '/F2_Scores/' + OUTPUT_START + '-' + technique + 'F2_GaussianResultsSmoteb1.txt', 'a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(featureMatrix, phishingURLLabel,
                                                                            test_size=0.20)

            URL_Train = list(URL_Train)
            # for everyFeature in fakeFeatureMatrix:
            #     URL_Train.append(everyFeature)
            #
            # Label_Train = list(Label_Train)
            # for everyFakeLabel in fakeLabels:
            #     Label_Train.append(everyFakeLabel)
            #estimator = AdaBoostClassifier()
            rm = Resampling()
            featureMatrix2, phishingLabel2 = rm.b1smoteOversampling(URL_Train, Label_Train)
            clf = GaussianProcessClassifier(n_jobs=15)
            clf.fit(featureMatrix2, phishingLabel2)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" + str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = fbeta_score(Label_Test, result, beta=my_beta, pos_label=1.0)
            predictionResult.write("\nThe f2_score is:" + str(f1Score))
            predictionResult.flush()
            # accuracy_matrix.append(f1Score)
            # accuracyScore = 0
            # aucMetric = 0
            # rocAucScore = 0
            # rocAucScoreMicro=0
            # rocAucScoreMacro=0
            # rocAucScoreWeighted=0
            # # accuracyScore = accuracy_score(Label_Test, result)
            # # aucMetric = auc(Label_Test, result, reorder=True)
            # rocAucScoreMicro = roc_auc_score(Label_Test, result,average='micro')
            # rocAucScoreMacro = roc_auc_score(Label_Test, result,average='macro')
            # rocAucScoreWeighted = roc_auc_score(Label_Test, result,average='weighted')
        except Exception as e:
            predictionResult.write(str(e))
    def decisionTreeNoOversampling(self,featureMatrix,phishingURLLabel,fakeFeatureMatrix,fakeLabels,technique):
        re = Resampling()
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(dir_name+'/'+technique+'DecisionTreeResultsNoOversampling.txt','a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(featureMatrix,phishingURLLabel,test_size=0.20)
            print 'Originally length of URL_Train is:'
            print len(URL_Train)
            #----------- Analysis

            URL_Train = list(URL_Train)
            for everyFeature in fakeFeatureMatrix:
                URL_Train.append(everyFeature)

            Label_Train = list(Label_Train)
            for everyFakeLabel in fakeLabels:
                Label_Train.append(everyFakeLabel)

            URL_Train = numpy.array(URL_Train,dtype='double')
            Label_Train = numpy.array(Label_Train,dtype='double')
            print 'Append Fake Features to Training Set....New length of Training is:'
            print len(URL_Train)
            print len(Label_Train)

            print 'Train Test Split:'
            print 'Training Values:'
            print 'Total:' + str(len(Label_Train))
            print 'Phishy: '+str(list(Label_Train).count(1))
            print 'Non Phishy:' + str(list(Label_Train).count(0))

            print 'Testing Values:'
            print 'Total:' + str(len(Label_Test))
            print 'Phishy: ' + str(list(Label_Test).count(1))
            print 'Non Phishy:' + str(list(Label_Test).count(0))
            parameters_DecisionTree = {'criterion': ('gini', 'entropy'), 'splitter': ('best', 'random')}
            estimator = DecisionTreeClassifier()
            clf = GridSearchCV(estimator, parameters_DecisionTree, n_jobs=1)
            clf.fit(URL_Train,Label_Train)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" + str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = f1_score(Label_Test, result, pos_label='1', average='macro')
            predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
            accuracy_matrix.append(f1Score)
        except Exception as e:
            predictionResult.write(str(e))

        predictionResult.write("Decision Tree Classification without Oversampling Completed with Avg. Score: " + str(np.mean(accuracy_matrix)))
        print 'Decision Tree Classification Completed with Avg. Score: ' + str(np.mean(accuracy_matrix))
    def randomForestb1SMOTE(self,featureMatrix,phishingURLLabel,fakeFeatureMatrix,fakeLabels,technique):
        dir_name = os.path.dirname(os.path.realpath(__file__))
        predictionResult = open(dir_name + '/'+technique+'RandomForestResultsb1Smote.txt', 'a+')
        predictionResult.truncate()
        accuracy_matrix = []
        try:
            URL_Train, URL_Test, Label_Train, Label_Test = train_test_split(featureMatrix, phishingURLLabel,
                                                                            test_size=0.20)
            parameters_RandomForest = {'n_estimators': [10, 100, 1000], 'criterion': ('gini', 'entropy'),
                                       'oob_score': (True, False), 'warm_start': (True, False)}

            URL_Train = list(URL_Train)
            for everyFeature in fakeFeatureMatrix:
                URL_Train.append(everyFeature)

            Label_Train = list(Label_Train)
            for everyFakeLabel in fakeLabels:
                Label_Train.append(everyFakeLabel)
            estimator = RandomForestClassifier()
            rm = Resampling()
            featureMatrix2,phishingLabel2 = rm.b1smoteOversampling(URL_Train,Label_Train)
            clf = GridSearchCV(estimator, parameters_RandomForest, n_jobs=8)
            clf.fit(featureMatrix2, phishingLabel2)
            result = clf.predict(URL_Test)
            predictionResult.write(str(result))
            predictionResult.flush()
            predictionResult.write("\nThe 1's are:" + str(collections.Counter(result)))
            predictionResult.flush()
            f1Score = f1_score(Label_Test, result, pos_label='1', average='macro')
            predictionResult.write("\nThe f1_score is:" + str(f1Score))
            predictionResult.flush()
            accuracy_matrix.append(f1Score)
        except Exception as e:
            predictionResult.write(str(e))

        predictionResult.write("Random Forest Classification with Borderline-1 SMOTE Completed with Avg. Score: " + str(np.mean(accuracy_matrix)))
def main():

    src_path_map = '../data/map/wean.dat'
    src_path_log = '../data/log/robotdata1.log'

    map_obj = MapReader(src_path_map)
    occupancy_map = map_obj.get_map()
    logfile = open(src_path_log, 'r')

    motion_model = MotionModel()
    sensor_model = SensorModel(occupancy_map)
    resampler = Resampling()

    num_particles = 1
    #X_bar = np.transpose(np.array([5.75300000e+03,  1.71200000e+03, -4.57011189e-01]))
    X_bar = init_particles_random(num_particles, occupancy_map)
    print(X_bar.shape)
    vis_flag = 1

    first_time_idx = True

    x_est_odom = []
    y_est_odom = []

    for time, line in enumerate(logfile):

        meas_type = line[
            0]  # L : laser scan measurement, O : odometry measurement
        meas_vals = np.fromstring(
            line[2:], dtype=np.float64,
            sep=' ')  # convert measurement values from string to double

        odometry_robot = meas_vals[
            0:3]  # odometry reading [x, y, theta] in odometry frame
        time_stamp = meas_vals[-1]

        if (meas_type == "L"):
            odometry_laser = meas_vals[
                3:6]  # [x, y, theta] coordinates of laser in odometry frame
            ranges = meas_vals[
                6:-1]  # 180 range measurement values from single laser scan

        if (first_time_idx):
            u_t0 = odometry_robot
            first_time_idx = False
            continue

        X_bar_new = np.zeros((num_particles, 4), dtype=np.float64)
        u_t1 = odometry_robot
        flag = 0
        for m in range(0, num_particles):
            #print("First",X_bar.shape)
            x_t0 = X_bar[0][0:3]
            #print(x_t0.shape)
            x_t1 = motion_model.update(u_t0, u_t1, x_t0)
            print("1---------", x_t1)
            #input()
            if (meas_type == "L"):
                z_t = ranges
                #w_t=1
                w_t = sensor_model.beam_range_finder_model(z_t, x_t1)
                # flag=1;
                # break
                # w_t = 1/num_particles
                print("2-----------------", X_bar_new)
                X_bar_new[m, :] = np.hstack((x_t1, w_t))
            else:
                X_bar_new[m, :] = np.hstack((x_t1, X_bar[m, 3]))

            print("3-------------------", X_bar_new)
            X_bar_new[m, :] = np.hstack((x_t1, X_bar[m, 3]))
            #print("Second",x_t1.shape)
            #print("Threee",X_bar_new.shape)
        #if(flag==1):
        #break
        print("4-------------------------", X_bar_new)
        X_bar = X_bar_new
        print("5--------------------------", X_bar)
        u_t0 = u_t1

        x_est_odom.append(X_bar[0][0])
        y_est_odom.append(X_bar[0][1])

    plt.imshow(occupancy_map, cmap='Greys')
    #plt.show()
    #plt.subplot(1,2,1)
    plt.draw()
    plt.subplot(1, 2, 1)
    plt.plot(x_est_odom, y_est_odom)
    plt.show()
Пример #17
0
def main():
    """
    Description of variables used
    u_t0 : particle state odometry reading [x, y, theta] at time (t-1) [odometry_frame]
    u_t1 : particle state odometry reading [x, y, theta] at time t [odometry_frame]
    x_t0 : particle state belief [x, y, theta] at time (t-1) [world_frame]
    x_t1 : particle state belief [x, y, theta] at time t [world_frame]
    X_bar : [num_particles x 4] sized array containing [x, y, theta, wt] values for all particles
    z_t : array of 180 range measurements for each laser scan
    """
    """
    Initialize Parameters
    """
    src_path_map = '../data/map/wean.dat'
    src_path_log = '../data/log/robotdata2.log'
    map_obj = MapReader(src_path_map)
    occupancy_map = map_obj.get_map()

    vis_flag = 1
    if vis_flag:
        visualize_map(occupancy_map)

    logfile = open(src_path_log, 'r')
    occupancy_map = binary_map(occupancy_map)
    motion_model = MotionModel()
    sensor_model = SensorModel(occupancy_map)
    resampler = Resampling()

    num_particles = 500
    X_bar = init_particles_random(num_particles, occupancy_map)

    print('X_bar shape:', X_bar.shape)

    ##    Monte Carlo Localization Algorithm : Main Loop
    first_time_idx = True
    for time_idx, line in enumerate(logfile):
        start_time = time.time()
        meas_type = line[
            0]  # L : laser scan measurement, O : odometry measurement
        if (meas_type == "L"):
            meas_vals = np.fromstring(line[2:], dtype=np.float64, sep=' ')
            odometry_robot = meas_vals[0:3]
            time_stamp = meas_vals[-1]
            odometry_laser = meas_vals[
                3:6]  # [x, y, theta] coordinates of laser in odometry frame
            ranges = meas_vals[6:-1]
            if (first_time_idx):
                u_t0 = odometry_robot
                first_time_idx = False
                continue
            u_t1 = odometry_robot

            if sum(abs(u_t1[:2] -
                       u_t0[:2])) < 1 and abs(u_t1[2] - u_t0[2]) < (3.14 / 20):
                print('skip')  ## skip states that doesn't moved
                continue
            if vis_flag:
                visualize_timestep(X_bar, time_idx, occupancy_map)
            # impplement multiprocessing for
            args = [[X_bar[m], u_t0, u_t1, ranges, motion_model, sensor_model]
                    for m in range(0, num_particles)]
            with multiprocessing.Pool(processes=16) as p:
                res = p.starmap(combine_motion_sensor, args)
            X_bar = np.asarray(res)
            # np.save('log2_Xbar/'+str(time_idx)+'.npy',X_bar) # save Xbar for accelerate display
            u_t0 = u_t1

            ## RESAMPLING
            X_bar = resampler.low_variance_sampler(X_bar)

            end_time = time.time()
            print("Processing time step " + str(time_idx) + " at time " +
                  str(time_stamp) + "s")
            print('time_cost for 1 time step:', end_time - start_time)
            if time_idx > 300:  # stop display when time_idx>300, for saving time
                vis_flag = 0
def main():
    """
    Description of variables used
    u_t0 : particle state odometry reading [x, y, theta] at time (t-1) [odometry_frame]
    u_t1 : particle state odometry reading [x, y, theta] at time t [odometry_frame]
    x_t0 : particle state belief [x, y, theta] at time (t-1) [world_frame]
    x_t1 : particle state belief [x, y, theta] at time t [world_frame]
    X_bar : [num_particles x 4] sized array containing [x, y, theta, wt] values for all particles
    z_t : array of 180 range measurements for each laser scan
    """
    """
    Initialize Parameters
    """
    src_path_map = '../data/map/wean.dat'
    src_path_log = '../data/log/robotdata1.log'

    map_obj = MapReader(src_path_map)
    occupancy_map = map_obj.get_map()
    logfile = open(src_path_log, 'r')

    motion_model = MotionModel()
    sensor_model = SensorModel(occupancy_map)
    resampler = Resampling()

    #num_particles = 500
    num_particles = 100
    X_bar = init_particles_random(num_particles, occupancy_map)
    #X_bar = np.array([[5000, 4000, np.pi,1]])
    #X_bar[0,0:3]

    vis_flag = 1
    """
    Monte Carlo Localization Algorithm : Main Loop
    """
    if vis_flag:
        visualize_map(occupancy_map)

    first_time_idx = True
    for time_idx, line in enumerate(logfile):

        # Read a single 'line' from the log file (can be either odometry or laser measurement)
        meas_type = line[
            0]  # L : laser scan measurement, O : odometry measurement
        meas_vals = np.fromstring(
            line[2:], dtype=np.float64,
            sep=' ')  # convert measurement values from string to double

        odometry_robot = meas_vals[
            0:3]  # odometry reading [x, y, theta] in odometry frame
        time_stamp = meas_vals[-1]

        # if ((time_stamp <= 0.0) | (meas_type == "O")): # ignore pure odometry measurements for now (faster debugging)
        # continue

        if (meas_type == "L"):
            odometry_laser = meas_vals[
                3:6]  # [x, y, theta] coordinates of laser in odometry frame
            ranges = meas_vals[
                6:-1]  # 180 range measurement values from single laser scan

        #print "Processing time step " + str(time_idx) + " at time " + str(time_stamp) + "s"

        if (first_time_idx):
            u_t0 = odometry_robot
            first_time_idx = False
            continue

        X_bar_new = np.zeros((num_particles, 4), dtype=np.float64)
        u_t1 = odometry_robot
        for m in range(0, num_particles):
            """
            MOTION MODEL
            """
            x_t0 = X_bar[m, 0:3]
            x_t1 = motion_model.update(u_t0, u_t1, x_t0)
            """
            SENSOR MODEL
            """
            if (meas_type == "L"):
                z_t = ranges
                print("in sensor model")
                w_t = sensor_model.beam_range_finder_model(z_t, x_t1)
                print("outside sensor model")
                print(w_t)
                #input()
                # w_t = 1/num_particles
                X_bar_new[m, :] = np.hstack((x_t1, w_t))
            else:
                X_bar_new[m, :] = np.hstack((x_t1, X_bar[m, 3]))

        X_bar = X_bar_new
        u_t0 = u_t1

        # """
        # RESAMPLING
        # """
        X_bar = resampler.low_variance_sampler(X_bar)

        # fig = plt.figure()
        # # plt.switch_backend('TkAgg')
        # print("Bot-pos",X_bar[9,0:2])
        # #print("Xbar",X_bar)
        # mng = plt.get_current_fig_manager();  # mng.resize(*mng.window.maxsize())
        # plt.ion(); plt.imshow(occupancy_map, cmap='Greys'); plt.axis([0, 800, 0, 800]);
        # plt.plot(X_bar[:,0]/10.0,X_bar[:,1]/10.0,'o',color='red')
        # plt.plot(x_t1[0]/10.0,x_t1[1]/10.0,'o',color='yellow')
        # plt.show()
        # plt.pause(1)
        # plt.close()

        if vis_flag:
            visualize_timestep(X_bar, time_idx)
Пример #19
0
def main():

    """
    Description of variables used
    u_t0 : particle state odometry reading [x, y, theta] at time (t-1) [odometry_frame]   
    u_t1 : particle state odometry reading [x, y, theta] at time t [odometry_frame]
    x_t0 : particle state belief [x, y, theta] at time (t-1) [world_frame]
    x_t1 : particle state belief [x, y, theta] at time t [world_frame]
    X_bar : [num_particles x 4] sized array containing [x, y, theta, wt] values for all particles
    z_t : array of 180 range measurements for each laser scan
    """

    """
    Initialize Parameters
    """
    src_path_map = '../data/map/wean.dat'
    src_path_log = '../data/log/robotdata3.log'

    map_obj = MapReader(src_path_map)
    occupancy_map = map_obj.get_map() 
    logfile = open(src_path_log, 'r')
    #lookup = np.load('lookup_zero.npy')

    motion_model = MotionModel()
    sensor_model = SensorModel(occupancy_map, lookup_flag=True)
    resampler = Resampling()

    num_particles = 1000
    #X_bar = init_particles_random(num_particles, occupancy_map)
    X_bar = init_particles_freespace(num_particles, occupancy_map)
    #X_bar = np.array([[6500,1500,1*np.pi/2,1]])

    vis_flag = True
    count = 0

    """
    Monte Carlo Localization Algorithm : Main Loop
    """
    if vis_flag:
        visualize_map(occupancy_map)

    first_time_idx = True
    for time_idx, line in enumerate(logfile):
        #vis_flag = count % 1 == 0
        #vis_flag = False
        
        count += 1
            
        # Read a single 'line' from the log file (can be either odometry or laser measurement)
        meas_type = line[0] # L : laser scan measurement, O : odometry measurement
        meas_vals = np.fromstring(line[2:], dtype=np.float64, sep=' ') # convert measurement values from string to double

        odometry_robot = meas_vals[0:3] # odometry reading [x, y, theta] in odometry frame
        time_stamp = meas_vals[-1]

        if ((time_stamp <= 0.0)): # ignore pure odometry measurements for now (faster debugging) 
        #if ((time_stamp <= 0.0) or meas_type == "O"): # ignore pure odometry measurements for now (faster debugging) 
            continue

        if (meas_type == "L"):
             odometry_laser = meas_vals[3:6] # [x, y, theta] coordinates of laser in odometry frame
             ranges = meas_vals[6:-1] # 180 range measurement values from single laser scan
        
        print("Processing time step " + str(time_idx) + " at time " + str(time_stamp) + "s")

        if (first_time_idx):
            u_t0 = odometry_robot
            first_time_idx = False
            continue

        X_bar_new = np.zeros( (num_particles,4), dtype=np.float64)
        u_t1 = odometry_robot
        num_rays = sensor_model.num_rays
        if meas_type == "O":
            xqs, yqs, xms, yms = None, None, None, None
        elif meas_type == "L":
            xqs = np.zeros((num_particles,num_rays))
            yqs = np.zeros((num_particles,num_rays))
            xms = np.zeros((num_particles,num_rays))
            yms = np.zeros((num_particles,num_rays))
        for m in range(0, num_particles):
            #print(m)
            """
            MOTION MODEL
            
            """
            x_t0 = X_bar[m, 0:3]
            #print('before motion: {}, {}, {}'.format(x_t0[0], x_t0[1], 180/np.pi*x_t0[2]))
            x_t1 = motion_model.update(u_t0, u_t1, x_t0)
            #print('after motion: {}, {}, {}'.format(x_t1[0], x_t1[1], 180/np.pi*x_t1[2]))
            x = int(x_t1[0]/10)
            y = int(x_t1[1]/10)
            
            if not(0 <= occupancy_map[y, x] <= 0.0) and meas_type == "L":
                #print('dumping particle')
                w_t = 0
                X_bar_new[m, :] = np.hstack((x_t1, w_t))
                continue

            """
            SENSOR MODEL
            """
            
            if (meas_type == "L"):
                z_t = ranges
                w_t, probs, z_casts, xqs[m], yqs[m], xms[m], yms[m] = sensor_model.beam_range_finder_model(z_t, x_t1)
                X_bar_new[m,:] = np.hstack((x_t1, w_t))
            else:
                X_bar_new[m,:] = np.hstack((x_t1, X_bar[m,3]))
            #print(w_t)
        X_bar = X_bar_new
        u_t0 = u_t1

        if vis_flag:
            #visualize_timestep(X_bar, time_idx, xqs, yqs, xms, yms)
            visualize_timestep(X_bar, time_idx)

        """
        RESAMPLING
        """
        if (meas_type == "L"):
            X_bar = resampler.low_variance_sampler(X_bar)
Пример #20
0
# -----------------initialize variables-------------------
alpha1 = 0
alpha2 = 0
alpha3 = 0  # increasing these two variables appears to make the particles move farther
alpha4 = 0

mot = MotionModel(alpha1, alpha2, alpha3, alpha4)

M = 1000  # number of particles

map = MapBuilder('../map/wean.dat')
mapList = map.getMap()
mapInit(mapList)

sensorModel = SensorModel('../map/wean.dat')
resampler = Resampling()

# ------------initialize particles throughout map--------
# generate list of locations with '0' value
counter = 0
goodLocs_x = []
goodLocs_y = []
for i in range(0, 800):
    for j in range(0, 800):
        if mapList[i][j] == 0:
            counter = counter + 1
            goodLocs_x.append(i)
            goodLocs_y.append(j)

# randomly select from those locations with '0' value
p_x = []
Пример #21
0
from sklearn.pipeline import Pipeline
from CaricaDati import CaricaPickles
from CreaDB2 import DBWhole
from OpenRaw import OpenRaw
from Resampling import Resampling
from MessaInDepth import MessaInDepth
from joblib import dump, load
import matplotlib.gridspec as grds
from Metrics import SignalStats
# This program allow to perform automatic depth matching by loading a dataset and performing a 2-steps-Shift using 2 neural networks. The probability of shift are visualized
# in a subplot
listaMov=[1.5,-1.25,9.5,0.4,0.25,4] #This are the shift value sampled using the Depth matching program
#Loading test dataset
N=90
[Cores2,Logs2]=OpenRaw(2,0) 
[RicCores2,RicLogs]=Resampling(Logs2,Cores2,N,0) 
BLIND=DBWhole(RicCores2,RicLogs,N,listaMov,10) 
Xt=BLIND[:,0:-1]
yt=BLIND[:,-1]

#Loading models
BestPipe=load('./MLModels/MigliorMod90BT2Final')# NN for bulk shift
BestPiperef=load('./MLModels/MigliorMod90BT2Final') # NN for refined shift

# Prediction of the original and transformed signal
yp2=BestPipe.predict_proba(Xt)
Div=int((BLIND.shape[1]-1)/2-1)
Xt3=-Xt
yprib=BestPipe.predict_proba(Xt3)
Xt4=np.concatenate([np.flip(Xt[:,0:Div+1],axis=1),np.flip(Xt[:,Div+1:Div*2+2],axis=1)],axis=1)
ypflip=BestPipe.predict_proba(Xt4)
Пример #22
0
def main():
    """
    Description of variables used
    u_t0 : particle state odometry reading [x, y, theta] at time (t-1) [odometry_frame]   
    u_t1 : particle state odometry reading [x, y, theta] at time t [odometry_frame]
    x_t0 : particle state belief [x, y, theta] at time (t-1) [world_frame]
    x_t1 : particle state belief [x, y, theta] at time t [world_frame]
    X_bar : [num_particles x 4] sized array containing [x, y, theta, wt] values for all particles
    z_t : array of 180 range measurements for each laser scan
    """
    """
    Initialize Parameters
    """
    src_path_map = '../data/map/wean.dat'
    src_path_log = '../data/log/robotdata1.log'

    map_obj = MapReader(src_path_map)
    occupancy_map = map_obj.get_map()
    logfile = open(src_path_log, 'r')

    motion_model = MotionModel()
    sensor_model = SensorModel(occupancy_map)
    resampler = Resampling()

    num_particles = 700
    X_bar = init_particles_freespace(num_particles, occupancy_map)

    vis_flag = 1
    """
    Monte Carlo Localization Algorithm : Main Loop
    """
    if vis_flag:
        visualize_map(occupancy_map)

    first_time_idx = True
    for time_idx, line in enumerate(logfile):

        # Read a single 'line' from the log file (can be either odometry or laser measurement)
        meas_type = line[
            0]  # L : laser scan measurement, O : odometry measurement
        meas_vals = np.fromstring(
            line[2:], dtype=np.float64,
            sep=' ')  # convert measurement values from string to double

        odometry_robot = meas_vals[
            0:3]  # odometry reading [x, y, theta] in odometry frame
        time_stamp = meas_vals[-1]

        # if ((time_stamp <= 0.0) | (meas_type == "O")): # ignore pure odometry measurements for now (faster debugging)
        # continue

        if (meas_type == "L"):
            odometry_laser = meas_vals[
                3:6]  # [x, y, theta] coordinates of laser in odometry frame
            ranges = meas_vals[
                6:-1]  # 180 range measurement values from single laser scan

        print("Processing time step " + str(time_idx) + " at time " +
              str(time_stamp) + "s")

        if (first_time_idx):
            u_t0 = odometry_robot
            first_time_idx = False
            continue

        X_bar_new = np.zeros((num_particles, 4), dtype=np.float64)
        u_t1 = odometry_robot
        for m in range(0, num_particles):
            """
            MOTION MODEL
            """
            x_t0 = X_bar[m, 0:3]
            x_t1 = motion_model.update(u_t0, u_t1, x_t0)
            """
            SENSOR MODEL
            """
            if (meas_type == "L"):
                z_t = ranges
                w_t = sensor_model.beam_range_finder_model(z_t, x_t1)
                X_bar_new[m, :] = np.hstack((x_t1, w_t))
            else:
                X_bar_new[m, :] = np.hstack((x_t1, X_bar[m, 3]))

        X_bar = X_bar_new
        u_t0 = u_t1
        """
        RESAMPLING
        """
        X_bar = resampler.low_variance_sampler(X_bar)

        if vis_flag:
            # meas_vals[3:6] - [x, y, theta] coordinates of laser in odometry frame
            # meas_vals[6:-1] # 180 range measurement values from single laser scan
            visualize_timestep(X_bar, time_idx)
Пример #23
0
def main():
    """
    Description of variables used
    u_t0 : particle state odometry reading [x, y, theta] at time (t-1) [odometry_frame]   
    u_t1 : particle state odometry reading [x, y, theta] at time t [odometry_frame]
    x_t0 : particle state belief [x, y, theta] at time (t-1) [world_frame]
    x_t1 : particle state belief [x, qy, theta] at time t [world_frame]
    X_bar : [num_particles x 4] sized array containing [x, y, theta, wt] values for all particles
    z_t : array of 180 range measurements for each laser scan
    """
    """
    Initialize Parameters
    """
    src_path_map = '../data/map/wean.dat'
    src_path_log = '../data/log/robotdata1.log'

    map_obj = MapReader(src_path_map)
    occupancy_map = map_obj.get_map()
    logfile = open(src_path_log, 'r')

    motion_model = MotionModel()
    sensor_model = SensorModel(occupancy_map)
    resampler = Resampling()

    num_particles = 3000
    og_num_particles = num_particles
    sumd = 0
    # ---------------------------------------------------
    # Create intial set of particles
    X_bar = init_particles_freespace(num_particles, occupancy_map)

    # Useful for debugging, places particles near correct starting area for log1
    #X_bar = init_debug(num_particles)
    # ---------------------------------------------------

    vis_flag = 1

    # ---------------------------------------------------
    # Weights are dummy weights for testing motion model
    w0_vals = np.ones((1, num_particles), dtype=np.float64)
    w_t = w0_vals / num_particles
    #----------------------------------------------------
    """
    Monte Carlo Localization Algorithm : Main Loop
    """
    if vis_flag:
        visualize_map(occupancy_map)

    iter_num = 0
    first_time_idx = True
    for time_idx, line in enumerate(logfile):

        # Read a single 'line' from the log file (can be either odometry or laser measurement)
        meas_type = line[
            0]  # L : laser scan measurement, O : odometry measurement
        meas_vals = np.fromstring(
            line[2:], dtype=np.float64,
            sep=' ')  # convert measurement values from string to double

        odometry_robot = meas_vals[
            0:3]  # odometry reading [x, y, theta] in odometry frame
        # print "odometry_robot = ", odometry_robot
        time_stamp = meas_vals[-1]

        #if ((time_stamp <= 0.0) | (meas_type == "O")): # ignore pure odometry measurements for now (faster debugging)
        #continue

        if (meas_type == "L"):
            odometry_laser = meas_vals[
                3:6]  # [x, y, theta] coordinates of laser in odometry frame
            ranges = meas_vals[
                6:-1]  # 180 range measurement values from single laser scan

        print "Processing time step " + str(time_idx) + " at time " + str(
            time_stamp) + "s"

        if (first_time_idx):
            u_t0 = odometry_robot
            first_time_idx = False
            continue

        X_bar_new = np.zeros((num_particles, 4), dtype=np.float64)
        u_t1 = odometry_robot

        yd = u_t1[1] - u_t0[1]
        xd = u_t1[0] - u_t0[0]
        d = math.sqrt(pow(xd, 2) + pow(yd, 2))
        if d < 1.0:
            visualize_timestep(X_bar, time_idx, time_idx)
            continue
        if d > 20:  # lost robot
            print('\nROBOT IS LOST!!!\nResetting particles...\n')
            X_bar = init_particles_freespace(og_num_particles, occupancy_map)
            num_particles = og_num_particles
            X_bar_new = np.zeros((num_particles, 4), dtype=np.float64)
            u_t0 = u_t1
            visualize_timestep(X_bar, time_idx, time_idx)
            sumd = 0
        else:
            sumd = sumd + d

        for m in range(0, num_particles):
            """
            MOTION MODEL
            """

            x_t0 = X_bar[m, 0:3]
            x_t1 = motion_model.update(u_t0, u_t1, x_t0)
            #motion_last = math.sqrt((x_t1[0,1]-x_t0[0,1])**2 +  (x_t1[0,0]-x_t0[0,0])**2)

            # ---------------------------------------------------
            # For testing Motion Model
            # X_bar_new[m,:] = np.hstack((x_t1, w_t))
            # ---------------------------------------------------
            """
            SENSOR MODEL
            """
            if (meas_type == "L"):
                z_t = ranges
                x_l1 = motion_model.laser_position(odometry_laser, u_t1, x_t1)
                #print w_t.shape
                w_t = sensor_model.beam_range_finder_model(z_t, x_l1)
                # #print w_t.shape
                # if w_t > 0.0 and X_bar[m,3] > 0.0:
                #     w_new = math.log(X_bar[m,3]) + math.log(w_t)
                #     w_new = math.exp(w_new)
                # else:
                #      w_new = 0.0
                X_bar_new[m, :] = np.hstack((x_t1, [[w_t]]))
                #time.sleep(10)
            else:
                X_bar_new[m, :] = np.hstack((x_t1, [[X_bar[m, 3]]]))

        # sorted_particles = X_bar[X_bar[:,3].argsort()]
        # print(sorted_particles[499,3])

        X_bar = X_bar_new

        u_t0 = u_t1
        X_bar[:, 3] = X_bar[:, 3] / sum(X_bar[:, 3])
        """
        RESAMPLING
        """

        if sumd > 10.0:
            # X_bar = resampler.low_variance_sampler_rand(X_bar, occupancy_map)
            sumd = 0
            if X_bar[:, 3].var() < 9.0e-8 and num_particles > 500:
                num_particles = num_particles - 300
                print 'Adapting particles\nCurrent particle size = ', num_particles
            elif X_bar[:, 3].var() < 1.0e-7 and num_particles > 300:
                num_particles = num_particles - 100
                print 'Adapting particles\nCurrent particle size = ', num_particles

            # if num_particles < og_num_particles and X_bar[:,3].var() > 5.0e-7:
            #     num_particles = num_particles + 100
            #     print 'Adapting particles\nCurrent particle size = ', num_particles

            X_bar = resampler.low_variance_sampler(X_bar, num_particles)
            #print X_bar[:,3].var()

        if vis_flag:
            visualize_timestep(X_bar, time_idx, time_idx)
Пример #24
0
def main():
    """
    Description of variables used
    u_t0 : particle state odometry reading [x, y, theta] at time (t-1) [odometry_frame]
    u_t1 : particle state odometry reading [x, y, theta] at time t [odometry_frame]
    x_t0 : particle state belief [x, y, theta] at time (t-1) [world_frame]
    x_t1 : particle state belief [x, y, theta] at time t [world_frame]
    X_bar : [num_particles x 4] sized array containing [x, y, theta, wt] values for all particles
    z_t : array of 180 range measurements for each laser scan
    """
    """
    Initialize Parameters
    """
    src_path_map = '../data/map/wean.dat'
    src_path_log = '../data/log/robotdata1.log'

    map_obj = MapReader(src_path_map)
    occupancy_map = map_obj.get_map()
    logfile = open(src_path_log, 'r')

    motion_model = MotionModel()
    sensor_model = SensorModel(occupancy_map)
    resampler = Resampling()

    num_particles = 100
    time_period = 10
    # X_bar = init_particles_random(num_particles, occupancy_map)
    X_bar = init_particles_freespace(num_particles, occupancy_map)
    vis_flag = 1
    vis_type = 'mean'  # {mean, max}
    """
    Monte Carlo Localization Algorithm : Main Loop
    """
    if vis_flag:
        visualize_map(occupancy_map)

    first_time_idx = True
    count = 0
    for time_idx, line in enumerate(logfile):
        # if time_idx % 9 != 0: continue

        # Read a single 'line' from the log file (can be either odometry or laser measurement)
        meas_type = line[
            0]  # L : laser scan measurement, O : odometry measurement
        meas_vals = np.fromstring(
            line[2:], dtype=np.float64,
            sep=' ')  # convert measurement values from string to double

        odometry_robot = meas_vals[
            0:3]  # odometry reading [x, y, theta] in odometry frame
        time_stamp = meas_vals[-1]

        if ((time_stamp <= 0.0) | (meas_type == "O")
            ):  # ignore pure odometry measurements for now (faster debugging)
            continue

        count = count + 1

        if (meas_type == "L"):
            odometry_laser = meas_vals[
                3:6]  # [x, y, theta] coordinates of laser in odometry frame
            ranges = meas_vals[
                6:-1]  # 180 range measurement values from single laser scan

        # print "Processing time step " + str(time_idx) + " at time " + str(time_stamp) + "s"

        if (first_time_idx):
            u_t0 = odometry_robot
            first_time_idx = False
            continue

        X_bar_new = np.zeros((num_particles, 4), dtype=np.float64)
        u_t1 = odometry_robot

        for m in range(0, num_particles):
            """
            MOTION MODEL
            """
            x_t0 = X_bar[m, 0:3]
            x_t1 = motion_model.update(u_t0, u_t1, x_t0)
            """
            SENSOR MODEL
            """
            if (meas_type == "L"):
                z_t = ranges
                w_t = sensor_model.beam_range_finder_model(z_t, x_t1)
                # w_t = 1/num_particles
                # X_bar_new[m,:] = np.hstack((x_t1, w_t))
                new_wt = X_bar[m, 3] * motion_model.give_prior(
                    x_t1, u_t1, x_t0, u_t0)
                X_bar_new[m, :] = np.hstack((x_t1, new_wt))
            else:
                X_bar_new[m, :] = np.hstack((x_t1, X_bar[m, 3]))

        if (vis_type == 'max'):
            best_particle_idx = np.argmax(X_bar_new, axis=0)[-1]
            vis_particle = X_bar_new[best_particle_idx][:-1]
        elif (vis_type == 'mean'):
            # ipdb.set_trace()
            X_weighted = X_bar_new[:, :3] * X_bar_new[:, 3:4]
            X_mean = np.sum(X_weighted, axis=0)
            vis_particle = X_mean / sum(X_bar_new[:, 3:4])

        # print(X_bar_new[:,-1].T)
        sensor_model.visualization = True
        sensor_model.plot_measurement = True
        # sensor_model.beam_range_finder_model(ranges, vis_particle)
        sensor_model.visualization = False
        sensor_model.plot_measurement = False

        X_bar = X_bar_new
        u_t0 = u_t1
        """
        RESAMPLING
        """
        #if(np.mean(x_t1 - x_t0) > 0.2):
        X_bar = resampler.low_variance_sampler(X_bar)
        add_particles = num_particles / 5
        # time_period = 10

        if (count % time_period == 0 or sum(X_bar[:, -1]) == 0):
            X_bar_re_init = init_particles_freespace(add_particles,
                                                     occupancy_map)
            X_bar[:, -1] = 1.0 / (num_particles + add_particles)
            X_bar_re_init[:, -1] = 1.0 / (num_particles + add_particles)
            X_bar = np.concatenate((X_bar, X_bar_re_init), axis=0)
            num_particles = X_bar.shape[0]
            print num_particles

        if (count % 100 == 0):
            time_period = time_period * 5

        # X_bar = resampler.multinomial_sampler(X_bar)
        # check if importance too low
        # thres = 1e-29
        # indices = np.where(X_bar[:,3] > thres)[0]
        # print(X_bar.shape[0] - indices.shape[0])
        # temp = init_particles_freespace(X_bar.shape[0] - indices.shape[0], occupancy_map)
        # X_bar = np.concatenate((X_bar[indices], temp), axis = 0)
        # X_bar[:,-1] = 1.0/num_particles

        if vis_flag:
            visualize_timestep(X_bar, time_idx)
Пример #25
0
def main():
    """
	Description of variables used
	u_t0 : particle state odometry reading [x, y, theta] at time (t-1) [odometry_frame]
	u_t1 : particle state odometry reading [x, y, theta] at time t [odometry_frame]
	x_t0 : particle state belief [x, y, theta] at time (t-1) [world_frame]
	x_t1 : particle state belief [x, y, theta] at time t [world_frame]
	X_bar : [num_particles x 4] sized array containing [x, y, theta, wt] values for all particles
	z_t : array of 180 range measurements for each laser scan
	"""
    """
	Initialize Parameters
	"""
    src_path_map = '../data/map/wean.dat'
    src_path_log = '../data/log/robotdata2.log'

    map_obj = MapReader(src_path_map)
    occupancy_map = map_obj.get_map()
    logfile = open(src_path_log, 'r')

    motion_model = MotionModel()
    sensor_model = SensorModel(occupancy_map)
    resampler = Resampling()

    num_particles = 300
    X_bar = init_particles_random(num_particles, occupancy_map)

    vis_flag = 1

    print("a1: ",motion_model.alpha_1,"\na2: ",motion_model.alpha_2,"\na3: ",\
       motion_model.alpha_3,"\na4: ",motion_model.alpha_4,"\nstdv: ",sensor_model.stdDevHit, \
    "\nlamd: ", sensor_model.lambdaShort,"\nzhit: ",sensor_model.zHit,"\nzsht: ",sensor_model.zShort, \
    "\nzmax: ", sensor_model.zMax, "\nzrnd: ", sensor_model.zRand, "\ncert: ", sensor_model.certainty, \
    "\nlsub: ", sensor_model.laserSubsample)
    """
	Monte Carlo Localization Algorithm : Main Loop
	"""
    if vis_flag:
        visualize_map(occupancy_map)

    first_time_idx = True
    #initialize worker threads
    p = ThreadPool(15)
    for time_idx, line in enumerate(logfile):
        # Read a single 'line' from the log file (can be either odometry or laser measurement)
        meas_type = line[
            0]  # L : laser scan measurement, O : odometry measurement
        meas_vals = np.fromstring(
            line[2:], dtype=np.float64,
            sep=' ')  # convert measurement values from string to double
        odometry_robot = meas_vals[
            0:3]  # odometry reading [x, y, theta] in odometry frame
        time_stamp = meas_vals[-1]

        #if ((time_stamp <= 0.0) | (meas_type == "O")): # ignore pure odometry measurements for now (faster debugging)
        #	continue

        if (meas_type == "L"):
            odometry_laser = meas_vals[
                3:6]  # [x, y, theta] coordinates of laser in odometry frame
            ranges = meas_vals[
                6:-1]  # 180 range measurement values from single laser scan

        print("Processing time step " + str(time_idx) + " at time " +
              str(time_stamp) + "s")

        if (first_time_idx):
            u_t0 = odometry_robot
            first_time_idx = False
            continue

        X_bar_new = np.zeros((num_particles, 4), dtype=np.float64)
        u_t1 = odometry_robot
        # X_bar.shape[0] (length) decreases from 500 to 499 after time step 1

        #PARALLELIZED MOTION MODEL

        x_t0Arr = [[u_t0, u_t1, X_bar[m, 0:3]]
                   for m in range(0, X_bar.shape[0])]
        x_t1Arr = p.map(motion_model.par_update, x_t0Arr)

        #PARALLELIZED SENSOR MODEL

        if (meas_type == "L"):
            z_t = ranges
            sensInArr = [[z_t, x_t1Arr[m]] for m in range(0, X_bar.shape[0])]
            w_tArr = p.map(sensor_model.par_beam_range_finder_model, sensInArr)
            for m in range(0, X_bar.shape[0]):
                X_bar_new[m, :] = np.hstack((x_t1Arr[m], w_tArr[m]))
        else:
            for m in range(0, X_bar.shape[0]):
                X_bar_new[m, :] = np.hstack((x_t1Arr[m], X_bar[m, 3]))


#		for m in range(0, X_bar.shape[0]):
#
#			#MOTION MODEL
#
#			x_t0 = X_bar[m, 0:3]
#			x_t1 = motion_model.update(u_t0, u_t1, x_t0)
#
#
#			#SENSOR MODEL
#
#			if (meas_type == "L"):
#				z_t = ranges
#				w_t = sensor_model.beam_range_finder_model(z_t, x_t1)
#				# w_t = 1/num_particles
#				X_bar_new[m,:] = np.hstack((x_t1, w_t))
#			else:
#				X_bar_new[m,:] = np.hstack((x_t1, X_bar[m,3]))
        X_bar = X_bar_new
        u_t0 = u_t1
        """
		RESAMPLING
		"""
        X_bar = resampler.low_variance_sampler(X_bar)

        if vis_flag:
            visualize_timestep(X_bar, time_idx)
Пример #26
0
def main():
    """
    Description of variables used
    u_t0 : particle state odometry reading [x, y, theta] at time (t-1) [odometry_frame]   
    u_t1 : particle state odometry reading [x, y, theta] at time t [odometry_frame]
    x_t0 : particle state belief [x, y, theta] at time (t-1) [world_frame]
    x_t1 : particle state belief [x, y, theta] at time t [world_frame]
    particles : [num_particles x 4] sized array containing [x, y, theta, wt] values for all particles
    z_t : array of 180 range measurements for each laser scan
    """
    """
    Initialize Parameters
    """
    ###########################################  SET THE NUMBER OF PARTICLES #####################################
    num_particles = 10000
    ###########################################  SET THE NUMBER OF PARTICLES #####################################

    src_path_map = '../data/map/wean.dat'
    src_path_log = '../data/log/robotdata1.log'

    #src_path_log = '../data/log/robotdata5.log'

    map_obj = MapReader(src_path_map)
    occupancy_map = map_obj.get_map()
    map_size_x = map_obj.get_map_size_x()
    map_size_y = map_obj.get_map_size_y()

    logfile = open(src_path_log, 'r')

    motion_model = MotionModel()
    sensor_model = SensorModel(occupancy_map)
    resampler = Resampling()

    particles = init_particles_freespace(num_particles, occupancy_map)
    #particles = np.array([[4100,3990,3,1],[4060,3990,3,1],[4000,3990,3,1],[4000,3990,2,1],[6150,1270,1.7,1]])
    # The particles above are
    # In the correct location with approximately the correct angle
    # Correct angle but a little farther out into the hallway
    # Correct angle but squarely in the hallway
    # In the center of the hallway and at wrong angle
    # Completely wrong in the big room at the bottom

    vis_flag = 1
    """
    Monte Carlo Localization Algorithm : Main Loop
    """
    first_time_idx = True
    lastUsedOdometry = np.array([0, 0, 0])
    imageNumber = 0

    for time_idx, line in enumerate(logfile):
        # time_idx is just a counter
        # line is the text from a line in the file.

        # Read a single 'line' from the log file (can be either odometry or laser measurement)
        meas_type = line[
            0]  # L : laser scan measurement, O : odometry measurement
        meas_vals = np.fromstring(
            line[2:], dtype=np.float64,
            sep=' ')  # convert measurement values from string to double

        state = meas_vals[
            0:3]  # odometry reading [x, y, theta] in odometry frame
        time_stamp = meas_vals[-1]

        # if ((time_stamp <= 0.0) | (meas_type == "O")): # ignore pure odometry measurements for now (faster debugging)
        # continue

        if (meas_type == "L"):  # Laser data
            odometry_laser = meas_vals[
                3:6]  # [x, y, theta] coordinates of laser in odometry frame
            #print(odometry_laser)
            delta = odometry_laser - lastUsedOdometry
            distance = math.sqrt(delta[0] * delta[0] + delta[1] * delta[1])

            # Don't update if it didn't move
            if ((distance < 35) and (delta[2] < .05)
                ):  # Don't update or do anything if the robot is stationary
                print(
                    "Time: %f\tDistance: %f\tangle: %f\tDidn't move enough..."
                    % (time_stamp, distance, delta[2]))
                continue

            print("Time: %f\tDistance: %f\tangle: %f" %
                  (time_stamp, distance, delta[2]))

            lastUsedOdometry = odometry_laser
            ranges = meas_vals[
                6:-1]  # 180 range measurement values from single laser scan

        else:
            #print("Skipping this record because it is an odometry record.")
            continue

        #print "Processing time step " + str(time_idx) + " at time " + str(time_stamp) + "s"

        if (first_time_idx):
            lastState = state
            first_time_idx = False
            continue

        #particles_new = np.zeros( (num_particles,4), dtype=np.float64)
        currentState = state

        # MOTION MODEL - move each particle
        startTime = time.time()
        for m in range(num_particles):
            oldParticle = particles[m, 0:3]
            particles[m, 0:3] = motion_model.update(
                lastState, currentState, oldParticle)  # This is [X,Y,theta]
            # # Use this line for testing probabilities
            # newParticle = particles[m,0:3] ######### Don't update the position of the particle.####################
        print("Motion model completed in  %s seconds" %
              (time.time() - startTime))  # Typically takes .125 seconds for
        # 10000 particles

        # SENSOR MODEL - find the liklihood of each particle
        startTime = time.time()
        for m in range(num_particles):
            particles[m, 3] = sensor_model.beam_range_finder_model(
                ranges, particles[m, 0:3])
        print("Sensor model completed in  %s seconds" %
              (time.time() - startTime))  # Typically takes 7.85 seconds for
        # 10000 particles

        lastState = currentState

        # print '***********Particles before normalizing***************'
        # print(particles)

        # #normalize the weights
        #minWeight = min(particles[:,3]);
        #maxWeight = max(particles[:,3]);
        #weightRng = (maxWeight - minWeight);
        #if (abs(weightRng)<0.0000001):
        #    particles[:,3] = (1/float(num_particles))*np.ones(num_particles);
        #else:
        #    particles[:,3] = (particles[:,3] - minWeight)/weightRng;

        #print '***********Particles after normalizing***************'
        #
        #print(particles)
        #particles = resampler.low_variance_sampler(particles,num_particles)
        particles = resampler.multinomial_sampler(particles, num_particles)

        #print("Completed in  %s seconds" % (time.time() - startTime))  # this is currently taking about .4 seconds per particle
        # Resampling typically takes 8 ms for 5000 particles.

        if vis_flag:
            imageNumber += 1
            visualize_map(occupancy_map, particles, imageNumber)
Пример #27
0
def main():

    """
    Description of variables used
    u_t0 : particle state odometry reading [x, y, theta] at time (t-1) [odometry_frame]   
    u_t1 : particle state odometry reading [x, y, theta] at time t [odometry_frame]
    x_t0 : particle state belief [x, y, theta] at time (t-1) [world_frame]
    x_t1 : particle state belief [x, y, theta] at time t [world_frame]
    X_bar : [num_particles x 4] sized array containing [x, y, theta, wt] values for all particles
    z_t : array of 180 range measurements for each laser scan
    """

    """
    Initialize Parameters
    """
    src_path_map = '../data/map/wean.dat'
    src_path_log = '../data/log/robotdata2.log'

    map_obj = MapReader(src_path_map)
    occupancy_map = map_obj.get_map()
    # get the free space map
    occupancy_map = filter_close_map(occupancy_map)
    # occupancy_map = np.logical_and(occupancy_map<occu_thresh, occupancy_map>=0)
    
    logfile = open(src_path_log, 'r')

    motion_model = MotionModel()
    sensor_model = SensorModel(occupancy_map)
    resampler = Resampling()

    num_particles = init_n_particles
#    X_bar = init_particles_random(num_particles, occupancy_map)
    X_bar = init_particles_freespace(num_particles, occupancy_map)
    X_bar = np.vstack((X_bar, init_test_particle()))
    # X_bar = init_test_particle()

    num_particles, _ = X_bar.shape

    vis_flag = 1

    """
    Monte Carlo Localization Algorithm : Main Loop
    """
    # if vis_flag:
    #     visualize_map(occupancy_map)

    # draw_robot(X_bar)
        
    first_time_idx = True
    u_t0 = np.array([0])
    for time_idx, line in enumerate(logfile):

        # Read a single 'line' from the log file (can be either odometry or laser measurement)
        meas_type = line[0] # L : laser scan measurement, O : odometry measurement
        meas_vals = np.fromstring(line[2:], dtype=np.float64, sep=' ') # convert measurement values from string to double

        odometry_robot = meas_vals[0:3] # odometry reading [x, y, theta] in odometry frame
        time_stamp = meas_vals[-1]

        # if ((time_stamp <= 0.0) | (meas_type == "O")): # ignore pure odometry measurements for now (faster debugging) 
            # continue

        ranges = np.array([0])
        if (meas_type == "L"):
             odometry_laser = meas_vals[3:6] # [x, y, theta] coordinates of laser in odometry frame
             ranges = meas_vals[6:-1] # 180 range measurement values from single laser scan
        
        print("Processing time step " + str(time_idx) + " at time " + str(time_stamp) + "s")

        if (first_time_idx):
            u_t0 = odometry_robot
            first_time_idx = False
            continue

        # X_bar_new = np.zeros( (num_particles,4), dtype=np.float64)
        u_t1 = odometry_robot

        #X_bar_new = X_bar

        # pool = Pool(4)

        for m in range(0, num_particles):
            X_bar[m, :] = parallel_motion_sensor_model(m, u_t0, u_t1, ranges, meas_type,
                                                                              sensor_model, motion_model,
                                                                              X_bar[m, :])
            # X_bar_new[m,:] = pool.apply_async(parallel_motion_sensor_model, (m, u_t0, u_t1, ranges, meas_type,
            #                                                sensor_model, motion_model, X_bar_new[m,:]))
        # pool.close()
        # pool.join()

        # for m in range(0, num_particles):
        #
        #     """
        #     MOTION MODEL
        #     """
        #     if np.linalg.norm(u_t0 - u_t1) != 0:
        #         x_t0 = X_bar[m,:3]
        #         x_t1 = motion_model.update(u_t0, u_t1, x_t0)
        #     else:
        #         x_t0 = X_bar[m, :3]
        #         x_t1 = x_t0
        #
        #
        #     """
        #     SENSOR MODEL
        #     """
        #     if (meas_type == "L" ) and (np.linalg.norm(u_t0-u_t1) != 0):
        #         z_t = ranges
        #         w_t = sensor_model.beam_range_finder_model(z_t, x_t1)
        #         # w_t = 1/num_particles
        #         X_bar_new[m,:] = np.hstack((x_t1, w_t))
        #     else:
        #         X_bar_new[m,:] = np.hstack((x_t1, X_bar[m,3]))
        
        #X_bar = X_bar_new
        moved = np.linalg.norm(u_t0-u_t1) != 0
        u_t0 = u_t1

        """
        RESAMPLING
        """

        if moved:
            if meas_type == "L":
                X_bar = resampler.low_variance_sampler(X_bar)
                print (X_bar.shape)
                num_particles, _ = X_bar.shape
            if vis_flag:
                visualize_timestep(X_bar, time_idx, occupancy_map)
Пример #28
0
from Resampling import Resampling
import numpy as np

num_particles=20
y0_vals = np.random.uniform( 3600, 4300, (num_particles, 1) )
x0_vals = np.random.uniform( 3500, 5000, (num_particles, 1) )
theta0_vals = np.random.uniform( -3.14, 3.14, (num_particles, 1) )

# initialize weights for all particles

w0_vals = np.random.uniform(0,100,(num_particles,1))
w0_vals = w0_vals / num_particles

X_bar = np.hstack((x0_vals,y0_vals,theta0_vals,w0_vals))
resampler = Resampling()
print(X_bar)
for i in range(num_particles):
    X_bar = resampler.low_variance_sampler(X_bar)
    print(X_bar)
    input()
Пример #29
0
"""
"""
Initialize Parameters
"""
src_path_map = '../data/map/wean.dat'
src_path_log = '../data/log/robotdata1.log'

map_obj = MapReader(src_path_map)
occupancy_map = map_obj.get_map()
# logfile = open(src_path_log, 'r')
with open(src_path_log, 'r') as f:
    logs = f.readlines()

motion_model = MotionModel()
sensor_model = SensorModel(occupancy_map)
resampler = Resampling()

num_particles = 500
# X_bar = init_particles_random(num_particles, occupancy_map)
X_bar = init_particles_freespace(num_particles, occupancy_map)
vis_flag = 1
vis_type = 'mean'  # {mean, max}
addition = True
"""
Monte Carlo Localization Algorithm : Main Loop
"""
if vis_flag:
    visualize_map(occupancy_map)

# initialize plot to save the video of the full sequence
# First set up the figure, the axis, and the plot element we want to animate
def Mine_Pipline(X_tensor,
                 Y_tensor,
                 FeatureNames,
                 eval_model,
                 select_model,
                 oversampling=False):

    try:
        clf_fs = SelectFromModel(get_model(eval_model))  # feature selection
        clf_classifier = get_model(select_model)  #classifier model

        # initial parameters
        accuracy_train = 0
        accuracy_test = 0
        loo = LeaveOneOut()
        predict_label = []
        Important_features = []

        for train_index, test_index in loo.split(X_tensor):

            # separate training data and testing data
            X_train, X_test = X_tensor[train_index], X_tensor[test_index]
            y_train, y_test = Y_tensor[train_index], Y_tensor[test_index]

            # fit the feature selection model
            clf_fs.fit(X_train, y_train)
            X_train_fs = clf_fs.transform(X_train)

            # print ("X_train: ", X_train.shape)
            # print("X_train: ", X_train_fs.shape)

            # get selected feature and save selected features name in a list
            feature_select_bool = clf_fs.get_support()
            selected_feature = []
            i = 0
            for item in feature_select_bool:
                if item == True:
                    selected_feature.append(FeatureNames[i])
                i = i + 1
            Important_features.append(selected_feature)

            # Over-sample the data after feature selection
            if (oversampling == True):
                re = Resampling()
                X_train_fs, y_train = re.smoteOversampling(X_train_fs, y_train)

            # Transform the testing data
            X_test = clf_fs.transform(X_test)

            # fit the classifier
            clf_classifier.fit(X_train_fs, y_train)

            # predict training data and testing data
            #pred_train = clf_classifier.predict(X_train_fs)
            pred = clf_classifier.predict(X_test)
            #accuracy_train += accuracy_score(y_train, pred_train)
            accuracy_test += accuracy_score(y_test, pred)
            # print(y_test,pred)
            # print(accuracy_test)
            predict_label.append(pred)

        acc_result = (accuracy_test / len(X_tensor))
        # print("Classifier: ",select_model, "Evaluator: ",eval_model)
        # print("Gensim accuracy_train:", accuracy_train / len(X_tensor))
        # print("Gensim accuracy_test:", accuracy_test / len(X_tensor))
        predict_label = np.array(predict_label)
        fpr, tpr, thresholds = metrics.roc_curve(Y_tensor,
                                                 predict_label,
                                                 pos_label=1)
        auc1 = metrics.auc(fpr, tpr)
        CM = confusion_matrix(Y_tensor, predict_label, labels=[1, 0]).ravel()
        CR = classification_report(Y_tensor, predict_label)
        # print("Confusion Matrix: ",CM)
        #print(CR,"AUC= ",auc1)
        return acc_result, CM, CR, auc1, predict_label, Important_features
    except:
        return -1, -1, -1, -1, -1, -1