def create_keras_model_dense():
    mc = MeasureCollection()
    m = Measurement(1, 1111111111, 48, 14, 4,
                    GroundTruth(1111, GroundTruthClass.FREE_SPACE))
    mc.add_measure(m)
    dataset = DataSet.get_raw_sensor_dataset([mc])
    model = Sequential()
    model.add(Dense(64, activation='relu', input_dim=len(dataset.x[0])))
    model.add(Dropout(0.1))
    model.add(Dense(64, activation='relu'))
    model.add(Dropout(0.1))
    model.add(Dense(64, activation='relu'))
    model.add(Dropout(0.5))
    model.add(Dense(64, activation='relu'))
    model.add(Dropout(0.1))
    model.add(Dense(64, activation='relu'))
    model.add(Dropout(0.1))
    model.add(Dense(len(dataset.class_labels), activation='softmax'))

    sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
    model.compile(loss='categorical_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])

    return model
Exemple #2
0
base_path = 'C:\\sw\\master\\collected data\\'

options = {
    'mc_min_speed': 1.0,
    'mc_merge': True,
    'mc_separation_threshold': 1.0,
    'mc_min_measure_count': 2,
    # 'mc_surrounding_times_s': [2.0, 5.0],
    'outlier_threshold_distance': 1.0,
    'outlier_threshold_diff': 0.5,
    # 'replacement_values': {0.01: 10.01},
    'min_measurement_value': 0.06,
}

dataset = None
measure_collections_files_dir = MeasureCollection.read_directory(
    base_path, options=options)
measure_collections_dir = {}
for file_name, measure_collections in measure_collections_files_dir.items():
    print(file_name)
    dataset = DataSet.get_raw_sensor_dataset_per_10cm(measure_collections,
                                                      dataset=dataset,
                                                      is_softmax_y=True)
    measure_collections_dir.update(
        MeasureCollection.mc_list_to_dict(measure_collections))

# Generate dummy data
x_train = [x_t for i, x_t in enumerate(dataset.x) if i < len(dataset.x) * 0.8]
y_train = [
    x_t for i, x_t in enumerate(dataset.y_true) if i < len(dataset.x) * 0.8
]
x_test = [x_t for i, x_t in enumerate(dataset.x) if i >= len(dataset.x) * 0.8]
Exemple #3
0
        'outlier_threshold_diff': 0.5,
        'replacement_values': {
            0.01: 10.01
        },
        'min_measurement_value': 0.06,
    }

    # measurements = Measurement.read('C:\\sw\\master\\collected data\\data_20170718_tunnel\\raw_20170718_074348_696382.dat',
    #                                 'C:\\sw\\master\\collected data\\data_20170718_tunnel\\raw_20170718_074348_696382.dat_images_Camera\\00gt1500721683.81.dat',
    #                                 options)
    #
    # visualization.show_distance_signal(measurements, plt.figure(1))
    # visualization.show_distance_signal_scatter(measurements, plt.figure(2))

    #free_space_measure_collections = []
    measure_collections_dir = MeasureCollection.read_directory(base_path,
                                                               options=options)
    #gt25 = 0
    i = 1
    for file_name, measure_collection in measure_collections_dir.items():
        #visualization.show_2d_scatter(measure_collection, fig=plt.figure(1))
        #visualization.show_distances_plus_segmentation(measure_collection, fig=plt.figure(i))
        #visualization.show_distance_for_class(measure_collection, [GroundTruthClass.OVERTAKEN_BICYCLE], fig=plt.figure(i))
        #gt25 += len([mc for mc in measure_collection if mc.get_probable_ground_truth() == GroundTruthClass.FREE_SPACE and mc.length >= 5])
        #free_space_measure_collections.extend([mc for mc in measure_collection if mc.get_probable_ground_truth() == GroundTruthClass.FREE_SPACE]);
        i += 1
    #
    # print gt25
    # visualization.show_distance_histogram_length(free_space_measure_collections, fig=plt.figure(i))
    #measure_collections = MeasureCollection.read_from_file('C:\\sw\\master\\collected data\\data_20170707\\tagged_mc_20170705_065613_869794.dat')
    #visualization.show_distances_plus_segmentation(measure_collections)
    #visualization.show_distance_signal(measurements)
Exemple #4
0
                                                os.path.join(
                                                    camera_folder,
                                                    gt_files[0]),
                                                options=options)
                cnt_measurements += len(measurements)
                total_seconds += measurements[len(
                    measurements) - 1].timestamp - measurements[0].timestamp

    return cnt_measurements, total_seconds


if __name__ == '__main__':
    base_path = 'C:\\sw\\master\\collected data\\'

    options = {}

    measure_collections = {}
    excluded_mcs = options.get('exclude_mcs', [])

    print('')
    raw_size = get_raw_dataset_size(base_path, options)
    print('cnt_measurements, total_seconds')
    print(raw_size)
    print('')

    measure_collections_dir = MeasureCollection.read_directory(base_path,
                                                               options=options)
    print('')
    print('cnt_measure_collections, cnt_measurements, total_seconds')
    print(MeasureCollection.get_size(measure_collections_dir))
        'outlier_threshold_distance': 1.0,
        'outlier_threshold_diff': 0.5,
        'max_measure_value': 10.0,
        # 'replacement_values': {0.01: 10.01},
        'min_measurement_value': 0.06
    }

    camera_folder = scenario_path + '_images_Camera\\'
    ground_truth_file = [
        os.path.join(camera_folder, f) for f in os.listdir(camera_folder) if
        os.path.isfile(os.path.join(camera_folder, f)) and f.startswith('00gt')
    ][0]
    measurements_scenario = Measurement.read(scenario_path,
                                             ground_truth_file,
                                             options=options)
    measure_collections_scenario = MeasureCollection.create_measure_collections(
        measurements_scenario, options=options)
    dataset_scenario = DataSet.get_dataset(measure_collections_scenario)

    options['exclude_mcs'] = measure_collections_scenario

    dataset = None
    measure_collections_files_dir = MeasureCollection.read_directory(
        base_path, options=options)
    for file_name, measure_collections in measure_collections_files_dir.items(
    ):
        dataset = DataSet.get_dataset(measure_collections, dataset=dataset)

    clf = RandomForestClassifier(n_estimators=1000, n_jobs=-1, random_state=42)
    clf.fit(dataset.x, dataset.y_true)

    predictions = clf.predict(np.array(dataset_scenario.x)).reshape(1, -1)[0]
        'mc_merge': True,
        'mc_separation_threshold': 1.0,
        'mc_min_measure_count': 2,
        # 'mc_surrounding_times_s': [2.0, 5.0],
        'outlier_threshold_distance': 1.0,
        'outlier_threshold_diff': 0.5,
        # 'replacement_values': {0.01: 10.01},
        'min_measurement_value': 0.06,
    }

    dataset_raw = None
    dataset_10cm = None
    dataset_10cm_surrounding = None
    dataset_parking = None
    #write_to_file(base_path, ml_file_path)
    measure_collections_files_dir = MeasureCollection.read_directory(
        base_path, options=options)

    parking_space_map_clusters, _ = create_parking_space_map(
        measure_collections_files_dir)
    measure_collections_files_dir = filter_parking_space_map_mcs(
        measure_collections_files_dir, parking_space_map_clusters)
    print(MeasureCollection.get_size(measure_collections_files_dir))

    measure_collections_dir = {}
    for file_name, measure_collections in measure_collections_files_dir.items(
    ):
        print(file_name)
        #print(len(measure_collection))
        #measure_collection = filter_acceleration_situations(measure_collection)
        #print('filtered', len(measure_collection))
        #MeasureCollection.write_arff_file(measure_collections1, ml_file_path)