def plot_singlewave(file):
    sensor = import_sensorfile(file)
    sensor_processed = process_input(sensor)
    timestamps = []
    [timestamps.append(str(item[0])) for item in sensor]

    sensor_processed_calibrated = mf.calibrate_median(sensor_processed)
    sensor_filtered = mf.medfilt(sensor_processed_calibrated, 3)

    plt.plot(sensor_filtered, linewidth='0.8')
    plt.xlim([0, 12000])
    plt.ylim([-5, 5])
    plt.ylabel('Acceleration (g)')
    plt.xlabel('Time (ms)')
    # plt.xticks(sensor_filtered, timestamps, rotation='vertical')
    plt.show()
def process_triaxial(file_path, window_start_time, window_end_time):
    with open(file_path) as csv_sensorfile:

        # dictionary to hold features
        features = {}

        sensorfile = csv.reader(csv_sensorfile, delimiter=',', quotechar='|')
        sensor_rows = []

        for row in sensorfile:
            # the correct format has 4 elements (avoids header and footer rows)
            if len(row) == 4:
                try:
                    timestamp = int(row[0])
                    x = float(row[1])
                    y = float(row[2])
                    z = float(row[3])
                    sensor_rows.append([timestamp, x, y, z])
                except ValueError:
                    continue

        # if no data exists generate empty stats
        # otherwise calculate stats
        if not sensor_rows:
            features = produce_empty_triaxial_sensor_dict(features)
        else:
            # convert basic python lists to numPy arrays using slices
            sensor_rows = np.array(sensor_rows)
            # convert to list for sorting algorithm
            timestamps = sensor_rows[:, 0].tolist()

            # calculate window indexes
            window_indexes = calculate_window_indexes(timestamps, window_start_time, window_end_time)
            window_start_index = window_indexes[0]
            window_end_index = window_indexes[1]
            # validate window indexes
            window_indexes_valid = window_index_conditions_valid(window_start_index, window_end_index)

            # if valid, window the data, and calculate stats
            if window_indexes_valid:
                # make slices of data using indexes
                x_win = sensor_rows[window_start_index:window_end_index, 1]
                y_win = sensor_rows[window_start_index:window_end_index, 2]
                z_win = sensor_rows[window_start_index:window_end_index, 3]
                svm_win = get_magnitude(x_win, y_win, z_win)

                # Pass through a median filter first using kernel of 21
                kernel = 15  # must be an odd number
                x_median_filter = mf.medfilt(x_win, kernel)
                y_median_filter = mf.medfilt(y_win, kernel)
                z_median_filter = mf.medfilt(z_win, kernel)
                svm_median_filter = mf.medfilt(svm_win, kernel)

                # Calibrate filtered SVM using min & median values
                svm_median_filter_calibrated_min = mf.calibrate_minimum(svm_median_filter)
                svm_median_filter_calibrated_med = mf.calibrate_median(svm_median_filter)

                # calculate stats
                stats_x = calc_stats_for_data_stream_as_dictionary(x_win)
                stats_y = calc_stats_for_data_stream_as_dictionary(y_win)
                stats_z = calc_stats_for_data_stream_as_dictionary(z_win)
                stats_svm = calc_stats_for_data_stream_as_dictionary(svm_win)
                stats_x_median_filter = calc_stats_for_data_stream_as_dictionary(x_median_filter)
                stats_y_median_filter = calc_stats_for_data_stream_as_dictionary(y_median_filter)
                stats_z_median_filter = calc_stats_for_data_stream_as_dictionary(z_median_filter)
                stats_svm_median_filter = calc_stats_for_data_stream_as_dictionary(svm_median_filter)
                stats_svm_median_filter_calibrated_min = calc_stats_for_data_stream_as_dictionary(
                        svm_median_filter_calibrated_min)
                stats_svm_median_filter_calibrated_med = calc_stats_for_data_stream_as_dictionary(
                        svm_median_filter_calibrated_med)

                features.update({'x': stats_x,
                                 'y': stats_y,
                                 'z': stats_z,
                                 'svm': stats_svm,
                                 'x_median_filter': stats_x_median_filter,
                                 'y_median_filter': stats_y_median_filter,
                                 'z_median_filter': stats_z_median_filter,
                                 'svm_median_filter': stats_svm_median_filter,
                                 'svm_median_filter_calibrated_min': stats_svm_median_filter_calibrated_min,
                                 'svm_median_filter_calibrated_med': stats_svm_median_filter_calibrated_med
                                 })
            else:
                features = produce_empty_triaxial_sensor_dict(features)

        return features
def process_triaxial(file_path, window_start_time, window_end_time):
    with open(file_path) as csv_sensorfile:

        # dictionary to hold features
        features = {}

        sensorfile = csv.reader(csv_sensorfile, delimiter=',', quotechar='|')
        sensor_rows = []

        for row in sensorfile:
            # the correct format has 4 elements (avoids header and footer rows)
            if len(row) == 4:
                try:
                    timestamp = int(row[0])
                    x = float(row[1])
                    y = float(row[2])
                    z = float(row[3])
                    sensor_rows.append([timestamp, x, y, z])
                except ValueError:
                    continue

        # if no data exists generate empty stats
        # otherwise calculate stats
        if not sensor_rows:
            features = produce_empty_triaxial_sensor_dict(features)
        else:
            # convert basic python lists to numPy arrays using slices
            sensor_rows = np.array(sensor_rows)
            # convert to list for sorting algorithm
            timestamps = sensor_rows[:, 0].tolist()

            # calculate window indexes
            window_indexes = calculate_window_indexes(timestamps,
                                                      window_start_time,
                                                      window_end_time)
            window_start_index = window_indexes[0]
            window_end_index = window_indexes[1]
            # validate window indexes
            window_indexes_valid = window_index_conditions_valid(
                window_start_index, window_end_index)

            # if valid, window the data, and calculate stats
            if window_indexes_valid:
                # make slices of data using indexes
                x_win = sensor_rows[window_start_index:window_end_index, 1]
                y_win = sensor_rows[window_start_index:window_end_index, 2]
                z_win = sensor_rows[window_start_index:window_end_index, 3]
                svm_win = get_magnitude(x_win, y_win, z_win)

                # Pass through a median filter first using kernel of 21
                kernel = 15  # must be an odd number
                x_median_filter = mf.medfilt(x_win, kernel)
                y_median_filter = mf.medfilt(y_win, kernel)
                z_median_filter = mf.medfilt(z_win, kernel)
                svm_median_filter = mf.medfilt(svm_win, kernel)

                # Calibrate filtered SVM using min & median values
                svm_median_filter_calibrated_min = mf.calibrate_minimum(
                    svm_median_filter)
                svm_median_filter_calibrated_med = mf.calibrate_median(
                    svm_median_filter)

                # calculate stats
                stats_x = calc_stats_for_data_stream_as_dictionary(x_win)
                stats_y = calc_stats_for_data_stream_as_dictionary(y_win)
                stats_z = calc_stats_for_data_stream_as_dictionary(z_win)
                stats_svm = calc_stats_for_data_stream_as_dictionary(svm_win)
                stats_x_median_filter = calc_stats_for_data_stream_as_dictionary(
                    x_median_filter)
                stats_y_median_filter = calc_stats_for_data_stream_as_dictionary(
                    y_median_filter)
                stats_z_median_filter = calc_stats_for_data_stream_as_dictionary(
                    z_median_filter)
                stats_svm_median_filter = calc_stats_for_data_stream_as_dictionary(
                    svm_median_filter)
                stats_svm_median_filter_calibrated_min = calc_stats_for_data_stream_as_dictionary(
                    svm_median_filter_calibrated_min)
                stats_svm_median_filter_calibrated_med = calc_stats_for_data_stream_as_dictionary(
                    svm_median_filter_calibrated_med)

                features.update({
                    'x':
                    stats_x,
                    'y':
                    stats_y,
                    'z':
                    stats_z,
                    'svm':
                    stats_svm,
                    'x_median_filter':
                    stats_x_median_filter,
                    'y_median_filter':
                    stats_y_median_filter,
                    'z_median_filter':
                    stats_z_median_filter,
                    'svm_median_filter':
                    stats_svm_median_filter,
                    'svm_median_filter_calibrated_min':
                    stats_svm_median_filter_calibrated_min,
                    'svm_median_filter_calibrated_med':
                    stats_svm_median_filter_calibrated_med
                })
            else:
                features = produce_empty_triaxial_sensor_dict(features)

        return features