Пример #1
0
    def test_load_save_JSON(self):
        ride = GpxParser(
            'tests/data/sample_with_stop.gpx').get_ride_from_track()
        p1 = ride.points[42]
        filename = 'tests/data/test.json'
        utils.save_json(filename, p1.to_JSON())

        ok_(os.path.exists(filename))

        js = utils.load_json(filename, GeoPoint)
        eq_(js.lat, p1.lat)

        os.remove(filename)
        ok_(not os.path.exists(filename))
Пример #2
0
    def detectstops(self):
        # Getting all gpx files in specified folder
        gpx_files = []
        for f in os.listdir(self.gpx_folder):
            if f.endswith('.gpx'):
                gpx_files.append(os.path.join(self.gpx_folder, f))
        self.l.info("There's %d gpx files to be proccessed. Starting now..." %
                    len(gpx_files))

        self.csv.info(
            "\n--CSV-Ride-data--\nride_file, distance_meters, duration_seconds, moving_time, stop_time, stop_count, stop_duration_list"
        )
        # Detecting Stops and storing Points of Interest
        total = 0
        stop_durations = []
        for gpx in gpx_files:
            ride = GpxParser(gpx).get_ride_from_track(
                self.config.region_ignores)

            stops, ignored_time = detect_stops(
                ride, self.config.stops_cap_durations_at)

            for s in stops:
                utils.save_json(
                    os.path.join(self.workspace_folder, "poi_%s.json" % s.id),
                    s.to_JSON())

            ride.duration -= ignored_time
            stop_duration_list = [s.duration for s in stops]
            stop_durations += stop_duration_list
            stop_time = sum(stop_duration_list)
            stop_count = len(stops)
            moving_time = ride.duration - stop_time
            total += stop_count
            self.csv.info("%s,%8.2f,%5d,%5d,%5d,%3d,%s" %
                          (os.path.basename(gpx), ride.distance, ride.duration,
                           moving_time, stop_time, stop_count, " ".join(
                               map(str, stop_duration_list))))

        self.csv.info("--CSV--\n")
        self.csv.info("\n--CSV-stop-durations--")
        self.csv.info(",".join(map(str, stop_durations)))
        self.csv.info("--CSV--\n")
        self.l.info(
            "Done! There was %d stops detected\nThe data is available at %s" %
            (total, self.workspace_folder))
Пример #3
0
    def generatemetrics(self):
        # Loading ROIs
        json_files = []
        for f in os.listdir(self.workspace_folder):
            if os.path.basename(f).startswith(
                    "roi_" + self.roi_version) and f.endswith('.json'):
                json_files.append(os.path.join(self.workspace_folder, f))

        ROIs = {}
        pattern = re.compile("roi_(\d+_\d+_\d+)\.")
        for jsf in json_files:
            roi = utils.load_json(jsf, RegionOfInterest)
            ROIs[pattern.search(jsf).group(1)] = roi
        self.l.info("Loaded %d ROIs." % len(json_files))
        self.l.info("Generating metrics...")

        obj = Metrics(ROIs, self.workspace_folder).generate()

        output = os.path.join(self.workspace_folder, "metrics.json")
        utils.save_json(os.path.join(output), json.dumps(obj, indent=4))
        self.l.info("Done! The metrics are available at %s" % output)
Пример #4
0
    def detectpasses(self):
        # Getting all gpx files in specified folder
        gpx_files = []
        for f in os.listdir(self.gpx_folder):
            if f.endswith('.gpx'):
                gpx_files.append(os.path.join(self.gpx_folder, f))
        self.l.info("There's %d gpx files to be proccessed." % len(gpx_files))

        # Loading ROIs
        json_files = []
        for f in os.listdir(self.workspace_folder):
            if os.path.basename(f).startswith("roi_") and f.endswith('.json'):
                json_files.append(os.path.join(self.workspace_folder, f))

        ROIs = []
        for jsf in json_files:
            roi = utils.load_json(jsf, RegionOfInterest)
            ROIs.append(roi)
        self.l.info("Loaded %d ROIs." % len(json_files))

        # Detecting Passes and storing Points of Interest
        total = 0
        total_stats = {
            "# ROIs entered": 0,
            "# ROIs stop POI": 0,
            "# ROIs stop without POI": 0,
            "# ROIs pass": 0,
            "# ROIs pass but no cluster": 0,
            "pass_speed_list": []
        }
        self.csv.info(
            "\n--CSV-passes-data--\nride_file, ROI_in, ROI_stop_POI, ROI_stop_no_POI, ROI_pass_outside_cluster, ROI_pass, pass_speed_list"
        )

        for gpx in gpx_files:
            ride = GpxParser(gpx).get_ride_from_track(
                self.config.region_ignores)

            passes, stats = detect_passes(ride, ROIs,
                                          self.config.dbscan_eps_in_meters,
                                          self.config.dbscan_min_samples,
                                          self.workspace_folder)

            for k in stats.keys():
                total_stats[k] += stats[k]
            passes_count = len(passes)
            total += passes_count
            self.csv.info(
                "%s,%3d,%3d,%3d,%3d,%3d,%s" %
                (os.path.basename(gpx), stats["# ROIs entered"],
                 stats["# ROIs stop POI"], stats["# ROIs stop without POI"],
                 stats["# ROIs pass but no cluster"], passes_count, " ".join(
                     map(str, stats['pass_speed_list']))))

            for p in passes:
                utils.save_json(
                    os.path.join(self.workspace_folder, "poi_%s.json" % p.id),
                    p.to_JSON())

        self.csv.info("--CSV--\n")
        self.csv.info("\n--CSV-pass-speeds--")
        self.csv.info(",".join(map(str, total_stats['pass_speed_list'])))
        self.csv.info("--CSV--\n")
        self.l.info(
            "Done! There was %d passes detected\nThe data is available at %s" %
            (total, self.workspace_folder))
Пример #5
0
def detect_passes(ride, ROIs, eps_in_meters, min_samples, workspace_folder):
    import numpy
    import commutemate.clustering as clustering

    stops = []
    on_a_stop = False
    stop_buffer = None
    previous_stop = None

    passes = []
    on_a_roi = False
    pass_buffer = []
    current_roi = None
    previous_pass = None

    stats = {
        "# ROIs entered": 0,
        "# ROIs stop POI": 0,
        "# ROIs stop without POI": 0,
        "# ROIs pass": 0,
        "# ROIs pass but no cluster": 0,
        "pass_speed_list": []
    }

    for p in ride.points[1:]:

        if not current_roi:
            roi = __inside_a_ROI(p, ROIs)
        elif current_roi and utils.is_inside_range(current_roi.center_range,
                                                   p):
            roi = current_roi
        else:
            roi = None

        if roi:
            on_a_roi = True
            if not current_roi:
                stats["# ROIs entered"] += 1
            current_roi = roi

            if p.speed < STOPPED_SPEED_KMH_THRESHOLD:
                on_a_stop = True
                stop_buffer = p
            else:
                pass_buffer.append(p)
        else:
            if on_a_stop:
                poi = PointOfInterest(stop_buffer, PointOfInterest.TYPE_STOP,
                                      ride.origin, ride.destination)
                if current_roi.is_poi_included(poi.id):
                    # Updating POI with preivous ROIs info
                    poi = utils.load_json(
                        os.path.join(workspace_folder, "poi_%s.json" % poi.id),
                        PointOfInterest)
                    poi.previous_stop_ROI = previous_stop.id if previous_stop else None
                    poi.previous_pass_ROI = previous_pass.id if previous_pass else None
                    utils.save_json(
                        os.path.join(workspace_folder, "poi_%s.json" % poi.id),
                        poi.to_JSON())
                    previous_stop = poi
                    stats["# ROIs stop POI"] += 1
                else:
                    stats["# ROIs stop without POI"] += 1

                on_a_stop = False
                stop_buffer = None
                on_a_roi = False
                pass_buffer = []
                current_roi = None

            elif on_a_roi:
                pass_in_cluster = []

                # check from all the points inside a ROI which ones are inside the original cluster
                for ppass in pass_buffer:
                    poi = PointOfInterest(ppass, PointOfInterest.TYPE_PASS,
                                          ride.origin, ride.destination)
                    poi.set_duration(0)
                    poi.set_previous_stop(previous_stop)
                    poi.previous_stop_ROI = previous_stop.id if previous_stop else None
                    poi.previous_pass_ROI = previous_pass.id if previous_pass else None

                    # need to hydrate ROI to have POIs bearing info
                    RegionOfInterest.hydrate_POIs(current_roi,
                                                  workspace_folder)
                    current_roi.set_poi_list([poi], PointOfInterest.TYPE_PASS)
                    POIs = numpy.array(current_roi.get_all_pois())
                    X = numpy.array(current_roi.get_all_poi_coords())

                    # If pass point is not part of stop cluster, this means that the pass is in another direction
                    db = clustering.cluster_with_bearing_weight(
                        POIs, X, eps_in_meters, min_samples)
                    n_clusters_ = len(set(db))

                    if n_clusters_ == 1:
                        pass_in_cluster.append(poi)

                    current_roi.set_poi_list([], PointOfInterest.TYPE_PASS)

                if len(pass_in_cluster) > 0:
                    # we have officially a pass, the crowd goes crazy
                    stats["# ROIs pass"] += 1
                    poi = pass_in_cluster[
                        len(pass_in_cluster) /
                        2]  # get buffer mid point as point for POI
                    stats["pass_speed_list"].append(poi.point.speed)
                    previous_pass = poi
                    passes.append(poi)
                else:
                    stats["# ROIs pass but no cluster"] += 1

                on_a_stop = False
                stop_buffer = None
                on_a_roi = False
                pass_buffer = []
                current_roi = None

    return passes, stats