def testPointFilteringShanghaiJump(self):
        classicJumpTrip1 = self.trips[0]
        self.loadPointsForTrip(classicJumpTrip1.get_id())
        classicJumpSections1 = [s for s in self.sections if s.trip_id == classicJumpTrip1.get_id()]
        outlier_algo = eaics.BoxplotOutlier()
        jump_algo = eaicj.SmoothZigzag()

        for i, section in enumerate(classicJumpSections1):
            logging.debug("-" * 20 + "Considering section %s" % i + "-" * 20)

            section_df = self.ts.get_data_df("background/filtered_location", esds.get_time_query_for_section(section.get_id()))
            with_speeds_df = eaicl.add_dist_heading_speed(section_df)

            maxSpeed = outlier_algo.get_threshold(with_speeds_df)
            logging.debug("Max speed for section %s = %s" % (i, maxSpeed))

            jump_algo.filter(with_speeds_df)
            logging.debug("Retaining points %s" % np.nonzero(jump_algo.inlier_mask_))

            to_delete_mask = np.logical_not(jump_algo.inlier_mask_)
            logging.debug("Deleting points %s" % np.nonzero(to_delete_mask))

            delete_ids = list(with_speeds_df[to_delete_mask]._id)
            logging.debug("Deleting ids %s" % delete_ids)

            # Automated checks. Might be able to remove logging statements later
            if i != 2:
                # Not the bad section. Should not be filtered
                self.assertEqual(np.count_nonzero(to_delete_mask), 0)
                self.assertEqual(len(delete_ids), 0)
            else:
                # The bad section, should have the third point filtered
                self.assertEqual(np.count_nonzero(to_delete_mask), 1)
                self.assertEqual([str(id) for id in delete_ids], ["55d8c4837d65cb39ee983cb4"])
示例#2
0
    def testPointFilteringRichmondJump(self):
        classicJumpTrip1 = self.trip_entries[6]
        self.loadPointsForTrip(classicJumpTrip1.get_id())
        classicJumpSections1 = [s for s in self.section_entries
                                if s.data.trip_id == classicJumpTrip1.get_id()]
        outlier_algo = eaics.BoxplotOutlier()
        jump_algo = eaicj.SmoothZigzag(False, 100)

        for i, section_entry in enumerate(classicJumpSections1):
            logging.debug("-" * 20 + "Considering section %s" % i + "-" * 20)

            section_df = self.ts.get_data_df("background/filtered_location",
                            esda.get_time_query_for_trip_like(esda.RAW_SECTION_KEY,
                                                              section_entry.get_id()))
            with_speeds_df = eaicl.add_dist_heading_speed(section_df)

            maxSpeed = outlier_algo.get_threshold(with_speeds_df)
            logging.debug("Max speed for section %s = %s" % (i, maxSpeed))

            jump_algo.filter(with_speeds_df)
            logging.debug("Retaining points %s" % np.nonzero(jump_algo.inlier_mask_))

            to_delete_mask = np.logical_not(jump_algo.inlier_mask_)
            logging.debug("Deleting points %s" % np.nonzero(to_delete_mask))

            delete_ids = list(with_speeds_df[to_delete_mask]._id)
            logging.debug("Deleting ids %s" % delete_ids)

            # There is only one section
            self.assertEqual(i, 0)
            # The bad section, should have the third point filtered
            self.assertEqual(np.count_nonzero(to_delete_mask), 1)
            self.assertEqual([str(id) for id in delete_ids], ["55e86dbb7d65cb39ee987e09"])
 def get_median_speed(self, mcs, mce):
     loc_df = self.seg_method.filter_points_for_range(
         self.seg_method.location_points, mcs, mce)
     if len(loc_df) > 0:
         loc_df.reset_index(inplace=True)
         with_speed_df = eaicl.add_dist_heading_speed(loc_df)
         return with_speed_df.speed.median()
     else:
         return None
    def testPointFilteringZigzag(self):
        classicJumpTrip1 = self.trip_entries[8]
        self.loadPointsForTrip(classicJumpTrip1.get_id())
        classicJumpSections1 = [
            s for s in self.section_entries
            if s.data.trip_id == classicJumpTrip1.get_id()
        ]
        outlier_algo = eaics.BoxplotOutlier()
        jump_algo = eaicj.SmoothZigzag(False, 100)

        for i, section_entry in enumerate(classicJumpSections1):
            logging.debug("-" * 20 + "Considering section %s" % i + "-" * 20)

            section_df = self.ts.get_data_df(
                "background/filtered_location",
                esda.get_time_query_for_trip_like(esda.RAW_SECTION_KEY,
                                                  section_entry.get_id()))
            with_speeds_df = eaicl.add_dist_heading_speed(section_df)

            maxSpeed = outlier_algo.get_threshold(with_speeds_df)
            logging.debug("Max speed for section %s = %s" % (i, maxSpeed))

            jump_algo.filter(with_speeds_df)
            logging.debug("Retaining points %s" %
                          np.nonzero(jump_algo.inlier_mask_.to_numpy()))

            to_delete_mask = np.logical_not(jump_algo.inlier_mask_)
            logging.debug("Deleting points %s" %
                          np.nonzero(to_delete_mask.to_numpy()))

            delete_ids = list(with_speeds_df[to_delete_mask]._id)
            logging.debug("Deleting ids %s" % delete_ids)

            if i == 0:
                # this is the zigzag section
                self.assertEqual(
                    np.nonzero(to_delete_mask.to_numpy())[0].tolist(),
                    [25, 64, 114, 115, 116, 117, 118, 119, 120, 123, 126])
                self.assertEqual(delete_ids, [
                    boi.ObjectId('55edafe77d65cb39ee9882ff'),
                    boi.ObjectId('55edcc157d65cb39ee98836e'),
                    boi.ObjectId('55edcc1f7d65cb39ee988400'),
                    boi.ObjectId('55edcc1f7d65cb39ee988403'),
                    boi.ObjectId('55edcc1f7d65cb39ee988406'),
                    boi.ObjectId('55edcc1f7d65cb39ee988409'),
                    boi.ObjectId('55edcc1f7d65cb39ee98840c'),
                    boi.ObjectId('55edcc207d65cb39ee988410'),
                    boi.ObjectId('55edcc207d65cb39ee988412'),
                    boi.ObjectId('55edcc217d65cb39ee98841f'),
                    boi.ObjectId('55edcc217d65cb39ee988429')
                ])
            else:
                self.assertEqual(len(np.nonzero(to_delete_mask.to_numpy())[0]),
                                 0)
                self.assertEqual(len(delete_ids), 0)
示例#5
0
def get_filtered_points(section, filtered_section_data):
    logging.debug("Getting filtered points for section %s" % section)
    ts = esta.TimeSeries.get_time_series(section.user_id)
    loc_entry_it = ts.find_entries(["background/filtered_location"],
                                   esda.get_time_query_for_trip_like(
                                       esda.RAW_SECTION_KEY, section.get_id()))

    loc_entry_list = [ecwe.Entry(e) for e in loc_entry_it]

    # We know that the assertion fails in the geojson conversion code and we
    # handle it there, so we are just going to comment this out for now.
    # assert (loc_entry_list[-1].data.loc == section.data.end_loc,
    #         "section_location_array[-1].loc != section.end_loc even after df.ts fix",
    #         (loc_entry_list[-1].data.loc, section.data.end_loc))

    # Find the list of points to filter
    filtered_points_entry_doc = ts.get_entry_at_ts("analysis/smoothing",
                                                   "data.section",
                                                   section.get_id())

    if filtered_points_entry_doc is None:
        logging.debug(
            "No filtered_points_entry, filtered_points_list is empty")
        filtered_point_id_list = []
    else:
        # TODO: Figure out how to make collections work for the wrappers and then change this to an Entry
        filtered_points_entry = ad.AttrDict(filtered_points_entry_doc)
        filtered_point_id_list = list(
            filtered_points_entry.data.deleted_points)
        logging.debug("deleting %s points from section points" %
                      len(filtered_point_id_list))

    filtered_loc_list = remove_outliers(loc_entry_list, filtered_point_id_list)

    # filtered_loc_list has removed the outliers. Now, we resample the data at
    # 30 sec intervals
    resampled_loc_df = resample(filtered_loc_list, interval=30)
    # If this is the first section, we need to find the start place of the parent trip
    # and actually start from there. That will fix the distances but not the duration
    # because we haven't yet figured out how to get the correct start time.
    # TODO: Fix this!!
    # For now, we will fudge this in the geojson converter, as always

    with_speeds_df = eaicl.add_dist_heading_speed(resampled_loc_df)
    with_speeds_df["idx"] = np.arange(0, len(with_speeds_df))
    with_speeds_df_nona = with_speeds_df.dropna()
    logging.info("removed %d entries containing n/a" %
                 (len(with_speeds_df_nona) - len(with_speeds_df)))
    return with_speeds_df_nona
def get_filtered_points(section, filtered_section_data):
    logging.debug("Getting filtered points for section %s" % section)
    ts = esta.TimeSeries.get_time_series(section.user_id)
    loc_entry_it = ts.find_entries(["background/filtered_location"],
                                   esda.get_time_query_for_trip_like(
                                       esda.RAW_SECTION_KEY, section.get_id()))

    loc_entry_list = [ecwe.Entry(e) for e in loc_entry_it]

    # We know that the assertion fails in the geojson conversion code and we
    # handle it there, so we are just going to comment this out for now.
    # assert (loc_entry_list[-1].data.loc == section.data.end_loc,
    #         "section_location_array[-1].loc != section.end_loc even after df.ts fix",
    #         (loc_entry_list[-1].data.loc, section.data.end_loc))

    # Find the list of points to filter
    filtered_points_entry_doc = ts.get_entry_at_ts("analysis/smoothing",
                                                   "data.section",
                                                   section.get_id())

    if filtered_points_entry_doc is None:
        logging.debug("No filtered_points_entry, filtered_points_list is empty")
        filtered_point_id_list = []
    else:
        # TODO: Figure out how to make collections work for the wrappers and then change this to an Entry
        filtered_points_entry = ad.AttrDict(filtered_points_entry_doc)
        filtered_point_id_list = list(filtered_points_entry.data.deleted_points)
        logging.debug("deleting %s points from section points" % len(
            filtered_point_id_list))

    filtered_loc_list = remove_outliers(loc_entry_list, filtered_point_id_list)

    # filtered_loc_list has removed the outliers. Now, we resample the data at
    # 30 sec intervals
    resampled_loc_df = resample(filtered_loc_list, interval=30)
    # If this is the first section, we need to find the start place of the parent trip
    # and actually start from there. That will fix the distances but not the duration
    # because we haven't yet figured out how to get the correct start time.
    # TODO: Fix this!!
    # For now, we will fudge this in the geojson converter, as always

    with_speeds_df = eaicl.add_dist_heading_speed(resampled_loc_df)
    with_speeds_df["idx"] = np.arange(0, len(with_speeds_df))
    with_speeds_df_nona = with_speeds_df.dropna()
    logging.info("removed %d entries containing n/a" % 
        (len(with_speeds_df_nona) - len(with_speeds_df)))
    return with_speeds_df_nona
    def testPointFilteringZigzag(self):
        classicJumpTrip1 = self.trip_entries[8]
        self.loadPointsForTrip(classicJumpTrip1.get_id())
        classicJumpSections1 = [s for s in self.section_entries
                                if s.data.trip_id == classicJumpTrip1.get_id()]
        outlier_algo = eaics.BoxplotOutlier()
        jump_algo = eaicj.SmoothZigzag(False, 100)

        for i, section_entry in enumerate(classicJumpSections1):
            logging.debug("-" * 20 + "Considering section %s" % i + "-" * 20)

            section_df = self.ts.get_data_df("background/filtered_location",
                            esda.get_time_query_for_trip_like(esda.RAW_SECTION_KEY,
                                                              section_entry.get_id()))
            with_speeds_df = eaicl.add_dist_heading_speed(section_df)

            maxSpeed = outlier_algo.get_threshold(with_speeds_df)
            logging.debug("Max speed for section %s = %s" % (i, maxSpeed))

            jump_algo.filter(with_speeds_df)
            logging.debug("Retaining points %s" % np.nonzero(jump_algo.inlier_mask_))

            to_delete_mask = np.logical_not(jump_algo.inlier_mask_)
            logging.debug("Deleting points %s" % np.nonzero(to_delete_mask))

            delete_ids = list(with_speeds_df[to_delete_mask]._id)
            logging.debug("Deleting ids %s" % delete_ids)

            if i == 0:
                # this is the zigzag section
                self.assertEqual(np.nonzero(to_delete_mask)[0].tolist(),
                                 [25, 64, 114, 115, 116, 117, 118, 119, 120, 123, 126])
                self.assertEqual(delete_ids,
                                 [boi.ObjectId('55edafe77d65cb39ee9882ff'),
                                  boi.ObjectId('55edcc157d65cb39ee98836e'),
                                  boi.ObjectId('55edcc1f7d65cb39ee988400'),
                                  boi.ObjectId('55edcc1f7d65cb39ee988403'),
                                  boi.ObjectId('55edcc1f7d65cb39ee988406'),
                                  boi.ObjectId('55edcc1f7d65cb39ee988409'),
                                  boi.ObjectId('55edcc1f7d65cb39ee98840c'),
                                  boi.ObjectId('55edcc207d65cb39ee988410'),
                                  boi.ObjectId('55edcc207d65cb39ee988412'),
                                  boi.ObjectId('55edcc217d65cb39ee98841f'),
                                  boi.ObjectId('55edcc217d65cb39ee988429')])
            else:
                self.assertEqual(len(np.nonzero(to_delete_mask)[0]), 0)
                self.assertEqual(len(delete_ids), 0)
    def testPointFilteringShanghaiJump(self):
        classicJumpTrip1 = self.trip_entries[0]
        self.loadPointsForTrip(classicJumpTrip1.get_id())
        classicJumpSections1 = [
            s for s in self.section_entries
            if s.data.trip_id == classicJumpTrip1.get_id()
        ]
        outlier_algo = eaics.BoxplotOutlier()
        jump_algo = eaicj.SmoothZigzag(False, 100)

        for i, section_entry in enumerate(classicJumpSections1):
            logging.debug("-" * 20 + "Considering section %s" % i + "-" * 20)

            section_df = self.ts.get_data_df(
                "background/filtered_location",
                esda.get_time_query_for_trip_like(esda.RAW_SECTION_KEY,
                                                  section_entry.get_id()))
            with_speeds_df = eaicl.add_dist_heading_speed(section_df)

            maxSpeed = outlier_algo.get_threshold(with_speeds_df)
            logging.debug("Max speed for section %s = %s" % (i, maxSpeed))

            jump_algo.filter(with_speeds_df)
            logging.debug("Retaining points %s" %
                          np.nonzero(jump_algo.inlier_mask_.to_numpy()))

            to_delete_mask = np.logical_not(jump_algo.inlier_mask_)
            logging.debug("Deleting points %s" %
                          np.nonzero(to_delete_mask.to_numpy()))

            delete_ids = list(with_speeds_df[to_delete_mask]._id)
            logging.debug("Deleting ids %s" % delete_ids)

            # Automated checks. Might be able to remove logging statements later
            if i != 2:
                # Not the bad section. Should not be filtered
                self.assertEqual(np.count_nonzero(to_delete_mask), 0)
                self.assertEqual(len(delete_ids), 0)
            else:
                # The bad section, should have the third point filtered
                self.assertEqual(np.count_nonzero(to_delete_mask), 1)
                self.assertEqual([str(id) for id in delete_ids],
                                 ["55d8c4837d65cb39ee983cb4"])
def section_to_geojson(section, tl):
    """
    This is the trickiest part of the visualization.
    The section is basically a collection of points with a line through them.
    So the representation is a feature in which one feature which is the line, and one feature collection which is the set of point features.
    :param section: the section to be converted
    :return: a feature collection which is the geojson version of the section
    """

    ts = esta.TimeSeries.get_time_series(section.user_id)
    entry_it = ts.find_entries(["background/filtered_location"], esds.get_time_query_for_section(section.get_id()))
    # points_df = ts.get_data_df("background/filtered_location", esds.get_time_query_for_section(section.get_id()))
    # points_df = points_df.drop("elapsedRealTimeNanos", axis=1)
    # logging.debug("points_df.columns = %s" % points_df.columns)

    # TODO: Decide whether we want to use Rewrite to use dataframes throughout instead of python arrays.
    # dataframes insert nans. We could use fillna to fill with default values, but if we are not actually
    # using dataframe features here, it is unclear how much that would help.
    feature_array = []
    section_location_array = [ecwl.Location(ts._to_df_entry(entry)) for entry in entry_it]

    logging.debug("first element in section_location_array = %s" % section_location_array[0])

    # Fudge the end point so that we don't have a gap because of the ts != write_ts mismatch
    # TODO: Fix this once we are able to query by the data timestamp instead of the metadata ts
    if section_location_array[-1].loc != section.end_loc:
        last_loc_doc = ts.get_entry_at_ts("background/filtered_location", "data.ts", section.end_ts)
        last_loc_data = ecwe.Entry(last_loc_doc).data
        last_loc_data["_id"] = last_loc_doc["_id"]
        section_location_array.append(last_loc_data)
        logging.debug("Adding new entry %s to fill the end point gap between %s and %s"
            % (last_loc_data.loc, section_location_array[-2].loc, section.end_loc))

    # Find the list of points to filter
    filtered_points_entry_doc = ts.get_entry_at_ts("analysis/smoothing", "data.section",
                                                               section.get_id())
    if filtered_points_entry_doc is None:
        logging.debug("No filtered_points_entry, returning unchanged array")
        filtered_section_location_array = section_location_array
    else:
        # TODO: Figure out how to make collections work for the wrappers and then change this to an Entry
        filtered_points_entry = ad.AttrDict(filtered_points_entry_doc)
        filtered_point_list = list(filtered_points_entry.data.deleted_points)
        logging.debug("deleting %s points from section points" % len(filtered_point_list))
        filtered_section_location_array = [l for l in section_location_array if l.get_id() not in filtered_point_list]

    with_speeds = eaicl.add_dist_heading_speed(pd.DataFrame(filtered_section_location_array))
    speeds = list(with_speeds.speed)
    distances = list(with_speeds.distance)
    for idx, row in with_speeds.iterrows():
        # TODO: Remove instance of setting value without going through wrapper class
        filtered_section_location_array[idx]["speed"] = row["speed"]
        filtered_section_location_array[idx]["distance"] = row["distance"]
    points_feature_array = [location_to_geojson(l) for l in filtered_section_location_array]

    points_line_feature = point_array_to_line(filtered_section_location_array)
    # If this is the first section, we already start from the trip start. But we actually need to start from the
    # prior place. Fudge this too. Note also that we may want to figure out how to handle this properly in the model
    # without needing fudging. TODO: Unclear how exactly to do this
    if section.start_stop is None:
        # This is the first section. So we need to find the start place of the parent trip
        parent_trip = tl.get_object(section.trip_id)
        start_place_of_parent_trip = tl.get_object(parent_trip.start_place)
        points_line_feature.geometry.coordinates.insert(0, start_place_of_parent_trip.location.coordinates)

    for i, point_feature in enumerate(points_feature_array):
        point_feature.properties["idx"] = i

    points_line_feature.id = str(section.get_id())
    points_line_feature.properties = copy.copy(section)
    points_line_feature.properties["feature_type"] = "section"
    points_line_feature.properties["sensed_mode"] = str(points_line_feature.properties.sensed_mode)
    points_line_feature.properties["distance"] = sum(distances)
    points_line_feature.properties["speeds"] = speeds
    points_line_feature.properties["distances"] = distances

    _del_non_derializable(points_line_feature.properties, ["start_loc", "end_loc"])

    feature_array.append(gj.FeatureCollection(points_feature_array))
    feature_array.append(points_line_feature)

    return gj.FeatureCollection(feature_array)
def section_to_geojson(section, tl):
    """
    This is the trickiest part of the visualization.
    The section is basically a collection of points with a line through them.
    So the representation is a feature in which one feature which is the line, and one feature collection which is the set of point features.
    :param section: the section to be converted
    :return: a feature collection which is the geojson version of the section
    """

    ts = esta.TimeSeries.get_time_series(section.user_id)
    entry_it = ts.find_entries(["background/filtered_location"],
                               esds.get_time_query_for_section(
                                   section.get_id()))
    # points_df = ts.get_data_df("background/filtered_location", esds.get_time_query_for_section(section.get_id()))
    # points_df = points_df.drop("elapsedRealTimeNanos", axis=1)
    # logging.debug("points_df.columns = %s" % points_df.columns)

    # TODO: Decide whether we want to use Rewrite to use dataframes throughout instead of python arrays.
    # dataframes insert nans. We could use fillna to fill with default values, but if we are not actually
    # using dataframe features here, it is unclear how much that would help.
    feature_array = []
    section_location_array = [
        ecwl.Location(ts._to_df_entry(entry)) for entry in entry_it
    ]
    if len(section_location_array) != 0:
        logging.debug("first element in section_location_array = %s" %
                      section_location_array[0])

        # Fudge the end point so that we don't have a gap because of the ts != write_ts mismatch
        # TODO: Fix this once we are able to query by the data timestamp instead of the metadata ts
        if section_location_array[-1].loc != section.end_loc:
            last_loc_doc = ts.get_entry_at_ts("background/filtered_location",
                                              "data.ts", section.end_ts)
            last_loc_data = ecwe.Entry(last_loc_doc).data
            last_loc_data["_id"] = last_loc_doc["_id"]
            section_location_array.append(last_loc_data)
            logging.debug(
                "Adding new entry %s to fill the end point gap between %s and %s"
                % (last_loc_data.loc, section_location_array[-2].loc,
                   section.end_loc))

    # Find the list of points to filter
    filtered_points_entry_doc = ts.get_entry_at_ts("analysis/smoothing",
                                                   "data.section",
                                                   section.get_id())
    if filtered_points_entry_doc is None:
        logging.debug("No filtered_points_entry, returning unchanged array")
        filtered_section_location_array = section_location_array
    else:
        # TODO: Figure out how to make collections work for the wrappers and then change this to an Entry
        filtered_points_entry = ad.AttrDict(filtered_points_entry_doc)
        filtered_point_list = list(filtered_points_entry.data.deleted_points)
        logging.debug("deleting %s points from section points" %
                      len(filtered_point_list))
        filtered_section_location_array = [
            l for l in section_location_array
            if l.get_id() not in filtered_point_list
        ]

    with_speeds = eaicl.add_dist_heading_speed(
        pd.DataFrame(filtered_section_location_array))
    speeds = list(with_speeds.speed)
    distances = list(with_speeds.distance)

    if len(filtered_section_location_array) != 0:
        for idx, row in with_speeds.iterrows():
            # TODO: Remove instance of setting value without going through wrapper class
            filtered_section_location_array[idx]["speed"] = row["speed"]
            filtered_section_location_array[idx]["distance"] = row["distance"]

    points_feature_array = [
        location_to_geojson(l) for l in filtered_section_location_array
    ]

    points_line_feature = point_array_to_line(filtered_section_location_array)
    # If this is the first section, we already start from the trip start. But we actually need to start from the
    # prior place. Fudge this too. Note also that we may want to figure out how to handle this properly in the model
    # without needing fudging. TODO: Unclear how exactly to do this
    if section.start_stop is None:
        # This is the first section. So we need to find the start place of the parent trip
        parent_trip = tl.get_object(section.trip_id)
        start_place_of_parent_trip = tl.get_object(parent_trip.start_place)
        points_line_feature.geometry.coordinates.insert(
            0, start_place_of_parent_trip.location.coordinates)

    for i, point_feature in enumerate(points_feature_array):
        point_feature.properties["idx"] = i

    points_line_feature.id = str(section.get_id())
    points_line_feature.properties = copy.copy(section)
    points_line_feature.properties["feature_type"] = "section"
    points_line_feature.properties["sensed_mode"] = str(
        points_line_feature.properties.sensed_mode)
    points_line_feature.properties["distance"] = sum(distances)
    points_line_feature.properties["speeds"] = speeds
    points_line_feature.properties["distances"] = distances

    _del_non_derializable(points_line_feature.properties,
                          ["start_loc", "end_loc"])

    feature_array.append(gj.FeatureCollection(points_feature_array))
    feature_array.append(points_line_feature)

    return gj.FeatureCollection(feature_array)
    def get_merge_direction(self, streak_start, streak_end):
        """
        Checks to decide merge direction
        - if either direction is WALKING and speed is greater than 1.4 + slosh then 
            must be the other direction
        - pick direction that is closer to the median speed
        """
        start_change = self.motion_changes[streak_start]
        end_change = self.motion_changes[streak_end]
        ssm, sem = start_change
        esm, eem = end_change

        if streak_start == 0:
            # There is no before section - only choices are to merge backward
            # or make a new section
            logging.debug(
                "get_merge_direction: at beginning of changes, can only merge backward"
            )
            return MergeResult(Direction.BACKWARD, FinalMode.UNMERGED)

        before_motion = self.motion_changes[streak_start - 1]
        bsm, bem = before_motion

        if streak_end + 1 == len(self.motion_changes):
            # There is no after section - only one way to merge!
            logging.debug(
                "get_merge_direction: at end of changes, can only merge forward"
            )
            return MergeResult(Direction.FORWARD, FinalMode.UNMERGED)

        after_motion = self.motion_changes[streak_end + 1]
        asm, aem = after_motion

        if bsm.type == asm.type:
            logging.debug(
                "before type = %s, after type = %s, merge direction is don't care, returning forward"
                % (bsm.type, asm.type))
            return MergeResult(Direction.FORWARD, FinalMode.UNMERGED)

        loc_points = self.seg_method.filter_points_for_range(
            self.seg_method.location_points, ssm, eem)
        loc_points.reset_index(inplace=True)
        with_speed_loc_points = eaicl.add_dist_heading_speed(loc_points)

        points_before = self.seg_method.filter_points_for_range(
            self.seg_method.location_points, bsm, bem)
        points_before.reset_index(inplace=True)
        with_speed_points_before = eaicl.add_dist_heading_speed(points_before)

        points_after = self.seg_method.filter_points_for_range(
            self.seg_method.location_points, asm, aem)
        points_after.reset_index(inplace=True)
        with_speed_points_after = eaicl.add_dist_heading_speed(points_after)

        curr_median_speed = self.get_section_speed(loc_points,
                                                   with_speed_loc_points,
                                                   points_before, points_after)

        # check for walking speed, which is the one constant is a cruel,
        # shifting world where there is no truth
        if (eaid.is_walking_type(asm.type)
                and (not eaid.is_walking_speed(curr_median_speed))):
            logging.debug(
                "after is walking, but speed is %d, merge forward, returning 1"
                % curr_median_speed)
            return MergeResult(Direction.FORWARD, FinalMode.UNMERGED)
        elif (eaid.is_walking_type(bsm.type)
              and (not eaid.is_walking_speed(curr_median_speed))):
            logging.debug(
                "before is walking, but speed is %d, merge backward, returning -1"
            )
            return MergeResult(Direction.BACKWARD, FinalMode.UNMERGED)

        logging.debug(
            "while merging, comparing curr speed %s with before %s and after %s"
            % (curr_median_speed, with_speed_points_before.speed.median(),
               with_speed_points_after.speed.median()))
        if (abs(curr_median_speed - with_speed_points_before.speed.median()) <
                abs(curr_median_speed -
                    with_speed_points_after.speed.median())):
            # speed is closer to before than after, merge with before, merge forward
            logging.debug("before is closer, merge forward, returning 1")
            return MergeResult(Direction.FORWARD, FinalMode.UNMERGED)
        else:
            logging.debug("after is closer, merge backward, returning -1")
            return MergeResult(Direction.BACKWARD, FinalMode.UNMERGED)