示例#1
0
def get_timeline_from_dt(user_id, place_key, trip_key,
                         start_local_dt, end_local_dt,
                         geojson=None, extra_query_list=None):
    logging.info("About to query for date components %s -> %s" % (start_local_dt, end_local_dt))
    (place_gq, trip_gq) = get_place_trip_geoquery(geojson)
    places_entries = esda.get_entries(place_key, user_id,
                                      esttc.TimeComponentQuery(
                                          "data.enter_local_dt", start_local_dt,
                                          end_local_dt),
                                      geo_query=place_gq,
                                      extra_query_list=extra_query_list)
    trips_entries = esda.get_entries(trip_key, user_id,
                                     esttc.TimeComponentQuery(
                                         "data.start_local_dt", start_local_dt,
                                         end_local_dt),
                                     geo_query=trip_gq,
                                     extra_query_list=extra_query_list)

    for place in places_entries:
        logging.debug("Considering place %s: %s -> %s " %
                      (place.get_id(), place.data.enter_fmt_time, place.data.exit_fmt_time))
    for trip in trips_entries:
        logging.debug("Considering trip %s: %s -> %s " %
                      (trip.get_id(), trip.data.start_fmt_time, trip.data.end_fmt_time))

    return Timeline(place_key, trip_key, places_entries, trips_entries)
示例#2
0
def get_timeline_from_dt(user_id,
                         place_key,
                         trip_key,
                         start_local_dt,
                         end_local_dt,
                         geojson=None,
                         extra_query_list=None):
    logging.info("About to query for date components %s -> %s" %
                 (start_local_dt, end_local_dt))
    (place_gq, trip_gq) = get_place_trip_geoquery(geojson)
    places_entries = esda.get_entries(place_key,
                                      user_id,
                                      esttc.TimeComponentQuery(
                                          "data.enter_local_dt",
                                          start_local_dt, end_local_dt),
                                      geo_query=place_gq,
                                      extra_query_list=extra_query_list)
    trips_entries = esda.get_entries(trip_key,
                                     user_id,
                                     esttc.TimeComponentQuery(
                                         "data.start_local_dt", start_local_dt,
                                         end_local_dt),
                                     geo_query=trip_gq,
                                     extra_query_list=extra_query_list)

    for place in places_entries:
        logging.debug("Considering place %s: %s -> %s " %
                      (place.get_id(), place.data.enter_fmt_time,
                       place.data.exit_fmt_time))
    for trip in trips_entries:
        logging.debug(
            "Considering trip %s: %s -> %s " %
            (trip.get_id(), trip.data.start_fmt_time, trip.data.end_fmt_time))

    return Timeline(place_key, trip_key, places_entries, trips_entries)
示例#3
0
def get_timeline(user_id,
                 place_key,
                 trip_key,
                 untracked_key,
                 start_ts,
                 end_ts,
                 geojson=None,
                 extra_query_list=None):
    logging.info("About to query for timestamps %s -> %s" % (start_ts, end_ts))
    """
    Return a timeline of the trips and places from this start timestamp to this end timestamp.
    Note that each place and each trip has *two* associated timestamps, so we need to define which trips need to be
    returned. Right now, we define this as all places that are entered and all trips that are started within the
    specified time frame. Note that this means that, by definition, this may not include the starting and ending places
    for all trips, which is something that we need for our visualization. But we don't want the timeline to be
    visualization specific.
    Let's compromise by adding method to fill in start and end places which we will call if the timeline is used
    for visualization and not if not. This also means that we can use the id map to avoid duplicates in case the
    place does exist.
    :param user_id: the user whose timeline we are considering
    :param start_ts: the starting timestamp. we will include all places and trips that start after this.
    :param end_ts: the ending timestamp. we will include all places and trips that end after this.
    :return: a timeline object
    """
    (place_gq, trip_gq) = get_place_trip_geoquery(geojson)
    places_entries = esda.get_entries(place_key,
                                      user_id=user_id,
                                      time_query=estt.TimeQuery(
                                          "data.enter_ts", start_ts, end_ts),
                                      geo_query=place_gq,
                                      extra_query_list=extra_query_list)
    trips_entries = esda.get_entries(trip_key,
                                     user_id=user_id,
                                     untracked_key=untracked_key,
                                     time_query=estt.TimeQuery(
                                         "data.start_ts", start_ts, end_ts),
                                     geo_query=trip_gq,
                                     extra_query_list=extra_query_list)
    for place in places_entries:
        logging.debug("Considering place %s: %s -> %s " %
                      (place.get_id(), place.data.enter_fmt_time,
                       place.data.exit_fmt_time))
    for trip in trips_entries:
        logging.debug(
            "Considering trip %s: %s -> %s " %
            (trip.get_id(), trip.data.start_fmt_time, trip.data.end_fmt_time))

    return Timeline(place_key, trip_key, places_entries, trips_entries)
示例#4
0
    def runPredictionPipeline(self, user_id, timerange):
        self.ts = esta.TimeSeries.get_time_series(user_id)
        self.toPredictSections = esda.get_entries(esda.CLEANED_SECTION_KEY,
                                                  user_id,
                                                  time_query=timerange)
        if (len(self.toPredictSections) == 0):
            logging.debug("len(toPredictSections) == 0, early return")
            if self.last_section_done is not None:
                logging.error("self.last_section_done == %s, expecting None" %
                              self.last_section_done)
                if eac.get_config()["classification.validityAssertions"]:
                    assert False
            return None

        self.loadModelStage()
        logging.info("loadModelStage DONE")
        self.selFeatureIndices = self.selectFeatureIndicesStep()
        logging.info("selectFeatureIndicesStep DONE")
        (self.toPredictFeatureMatrix, self.tripIds, self.sectionIds) = \
            self.generateFeatureMatrixAndIDsStep(self.toPredictSections)
        logging.info("generateFeatureMatrixAndIDsStep DONE")
        self.predictedProb = self.predictModesStep()
        #This is a matrix of the entries and their corresponding probabilities for each classification
        logging.info("predictModesStep DONE")
        self.savePredictionsStep()
        logging.info("savePredictionsStep DONE")
示例#5
0
def read_data(uuid=None, size=None, old=True):
    db = edb.get_trip_db()
    if not old:
        logging.debug("not old")
        trips = esda.get_entries(esda.RAW_TRIP_KEY,
                                 uuid,
                                 time_query=None,
                                 geo_query=None)
        return trips

    if old:
        data = []
        trip_db = db
        if uuid:
            trips = trip_db.find({'user_id': uuid, 'type': 'move'})
        else:
            trips = trip_db.find({'type': 'move'})
        for t in trips:
            try:
                trip = Trip.trip_from_json(t)
            except:
                continue
            if not (trip.trip_start_location and trip.trip_end_location
                    and trip.start_time):
                continue
            data.append(trip)
            if size:
                if len(data) == size:
                    break
        return data
示例#6
0
def read_data(uuid=None):
    trips = esda.get_entries(esda.CLEANED_TRIP_KEY,
                             uuid,
                             time_query=None,
                             geo_query=None)
    logging.info("After reading data, returning %s trips" % len(trips))
    return trips
def read_data(uuid=None, size=None, old=True):
    db = edb.get_trip_db()
    if not old:
        logging.debug("not old")
        trips = esda.get_entries(esda.RAW_TRIP_KEY, uuid,
                                 time_query=None, geo_query=None)
        return trips

    if old:
        data = []
        trip_db = db
        if uuid:
            trips = trip_db.find({'user_id' : uuid, 'type' : 'move'})
        else:
            trips = trip_db.find({'type' : 'move'})
        for t in trips:
            try: 
                trip = Trip.trip_from_json(t)
            except:
                continue
            if not (trip.trip_start_location and trip.trip_end_location and trip.start_time):
                continue
            data.append(trip)
            if size:
                if len(data) == size:
                    break
        return data
def count_query():
    edb.pm_address = request.json['pm_address']
    # Dummy id used as a placeholder. It must be consistent for each user but
    # otherwise doesn't matter. An optimization would remove all instance of user_uuid.
    user_uuid = request.json['uuid']
    query = request.json['query']
    query_obj = saq.AE(1)
    cost = query_obj.generate_diff_priv_cost(query['alpha'], query['offset'])

    # Try and deduce from the privacy budget
    available_budget = safmt.deduct_budget(edb.pm_address, cost)
    if not available_budget:
        # Query could not complete, no budget remaining
        return {"success": False}

    start_time = query['start_ts']
    end_time = query['end_ts']
    time_query = estt.TimeQuery("data.ts", start_time, end_time)
    region = query['sel_region']
    if region is None:
        geo_query = None
    else:
        geo_query = estg.GeoQuery(["data.loc"], region)

    loc_entry_list = esda.get_entries(esda.CLEANED_LOCATION_KEY, user_uuid, 
                                    time_query=time_query, geo_query=geo_query)
    convert_objectid_to_string(loc_entry_list)
    if len(loc_entry_list) > 0:
        ret_val = 1
    else:
        ret_val = 0
    return {"success" : True, "results": ret_val}
示例#9
0
def create_confirmed_trips(user_id, timerange):
    ts = esta.TimeSeries.get_time_series(user_id)
    toConfirmTrips = esda.get_entries(esda.CLEANED_TRIP_KEY, user_id,
        time_query=timerange)
    logging.debug("Converting %d cleaned trips to confirmed ones" % len(toConfirmTrips))
    lastTripProcessed = None
    if len(toConfirmTrips) == 0:
        logging.debug("len(toConfirmTrips) == 0, early return")
        return None
    input_key_list = eac.get_config()["userinput.keylist"]
    for tct in toConfirmTrips:
        # Copy the trip and fill in the new values
        confirmed_trip_dict = copy.copy(tct)
        del confirmed_trip_dict["_id"]
        confirmed_trip_dict["metadata"]["key"] = "analysis/confirmed_trip"
        confirmed_trip_dict["data"]["cleaned_trip"] = tct.get_id()
        confirmed_trip_dict["data"]["user_input"] = \
            get_user_input_dict(ts, tct, input_key_list)
        confirmed_trip_entry = ecwe.Entry(confirmed_trip_dict)
        # save the entry
        ts.insert(confirmed_trip_entry)
        # if everything is successful, then update the last successful trip
        lastTripProcessed = tct

    return lastTripProcessed
示例#10
0
def incident_heatmap(user_uuid, modes, time_query, region):
    """
    Return a list of geojson points with properties for the time and the stress level
    related to incidents. This should not return full entries because that can
    expose the user_id in the aggregate case. Maybe it can return the data part only?
    Or should we put the other entries into the properties?
    :param modes: The modes that we want to query for
    :param time_query: The time query, in either local date or timestamp
    :param region: The region of interest
    :return: list of `incident` objects, with all metadata stripped out
    """

    if region is None:
        geo_query = None
    else:
        geo_query = estg.GeoQuery(["data.loc"], region)

    extra_query_list = []
    if modes is not None:
        mode_enum_list = [ecwm.MotionTypes[mode] for mode in modes]
        extra_query_list.append(esdlq.get_mode_query(mode_enum_list))

    if user_uuid is None:
        incident_entry_list = esda.get_entries(MANUAL_INCIDENT_KEY, user_id=None,
                                          time_query=time_query, geo_query=geo_query,
                                          extra_query_list=extra_query_list)
    else:
        # We don't support aggregate queries on the usercache. And that is
        # actually fine, because we don't expect immediate results for the
        # aggregate case. We just want to query the usercache to ensure that
        # the incidents don't magically disappear just because they got pushed
        # to the server but are not yet processed
        incident_entry_list = estc.find_entries([MANUAL_INCIDENT_KEY], time_query)
    return {"incidents": [e.data for e in incident_entry_list]}
示例#11
0
def range_mode_heatmap(mode, start_ts, end_ts):
    start_dt = esdl.get_local_date(start_ts, "UTC")
    end_dt = esdl.get_local_date(end_ts, "UTC")
    time_query = esttc.TimeComponentQuery("data.ts", start_dt, end_dt)
    loc_entry_list = esda.get_entries(esda.CLEANED_LOCATION_KEY, user_id=None,
                                      time_query=time_query, geo_query=None,
                                      extra_query_list=[esdlq.get_mode_query(mode)])
    return {"latlng": [e.data.loc.coordinates for e in loc_entry_list]}
示例#12
0
    def testSegmentationWrapperIOS(self):
        eaist.segment_current_trips(self.iosUUID)
        # The previous line should have created places and trips and stored
        # them into the database. Now, we want to query to ensure that they
        # were created correctly.
        tq_place = estt.TimeQuery("data.enter_ts", 1446796800, 1446847600)
        created_places_entries = esda.get_entries(esda.RAW_PLACE_KEY,
                                                  self.iosUUID, tq_place)

        tq_trip = estt.TimeQuery("data.start_ts", 1446796800, 1446847600)
        created_trips_entries = esda.get_entries(esda.RAW_TRIP_KEY,
                                                 self.iosUUID, tq_trip)

        for i, place in enumerate(created_places_entries):
            logging.debug(
                "Retrieved places %s: %s -> %s" %
                (i, place.data.enter_fmt_time, place.data.exit_fmt_time))
        for i, trip in enumerate(created_trips_entries):
            logging.debug(
                "Retrieved trips %s: %s -> %s" %
                (i, trip.data.start_fmt_time, trip.data.end_fmt_time))

        # We expect there to be 4 places, but the first one is that start of
        # the chain, so it has a start_time of None and it won't be retrieved
        # by the query on the start_time that we show here.
        self.assertEqual(len(created_places_entries), 2)
        self.assertEqual(len(created_trips_entries), 2)

        # Pick the first two trips and the first place and ensure that they are all linked correctly
        # Note that this is the first place, not the second place because the true first place will not
        # be retrieved by the query, as shown above
        # The first trip here is a dummy trip, so let's check the second and third trip instead
        trip0 = created_trips_entries[0]
        trip1 = created_trips_entries[1]
        place0 = created_places_entries[0]

        self.assertEqual(trip0.data.end_place, place0.get_id())
        self.assertEqual(trip1.data.start_place, place0.get_id())
        self.assertEqual(place0.data.ending_trip, trip0.get_id())
        self.assertEqual(place0.data.starting_trip, trip1.get_id())

        self.assertEqual(round(trip0.data.duration), 14 * 60 + 41)
        self.assertEqual(round(trip1.data.duration),
                         1 * 60 * 60 + 50 * 60 + 56)

        self.assertIsNotNone(place0.data.location)
示例#13
0
def range_mode_heatmap(mode, start_ts, end_ts):
    start_dt = esdl.get_local_date(start_ts, "UTC")
    end_dt = esdl.get_local_date(end_ts, "UTC")
    time_query = esttc.TimeComponentQuery("data.ts", start_dt, end_dt)
    loc_entry_list = esda.get_entries(
        esda.CLEANED_LOCATION_KEY,
        user_id=None,
        time_query=time_query,
        geo_query=None,
        extra_query_list=[esdlq.get_mode_query(mode)])
    return {"latlng": [e.data.loc.coordinates for e in loc_entry_list]}
    def testSegmentationWrapperIOS(self):
        eaist.segment_current_trips(self.iosUUID)
        # The previous line should have created places and trips and stored
        # them into the database. Now, we want to query to ensure that they
        # were created correctly.
        tq_place = estt.TimeQuery("data.enter_ts", 1446796800, 1446847600)
        created_places_entries = esda.get_entries(esda.RAW_PLACE_KEY,
                                                  self.iosUUID, tq_place)

        tq_trip = estt.TimeQuery("data.start_ts", 1446796800, 1446847600)
        created_trips_entries = esda.get_entries(esda.RAW_TRIP_KEY,
                                                 self.iosUUID, tq_trip)

        for i, place in enumerate(created_places_entries):
            logging.debug("Retrieved places %s: %s -> %s" % (i, place.data.enter_fmt_time, place.data.exit_fmt_time))
        for i, trip in enumerate(created_trips_entries):
            logging.debug("Retrieved trips %s: %s -> %s" % (i, trip.data.start_fmt_time, trip.data.end_fmt_time))

        # We expect there to be 4 places, but the first one is that start of
        # the chain, so it has a start_time of None and it won't be retrieved
        # by the query on the start_time that we show here.
        self.assertEqual(len(created_places_entries), 3)
        self.assertEqual(len(created_trips_entries), 3)

        # Pick the first two trips and the first place and ensure that they are all linked correctly
        # Note that this is the first place, not the second place because the true first place will not
        # be retrieved by the query, as shown above
        # The first trip here is a dummy trip, so let's check the second and third trip instead
        trip0 = created_trips_entries[1]
        trip1 = created_trips_entries[2]
        place0 = created_places_entries[1]

        self.assertEqual(trip0.data.end_place, place0.get_id())
        self.assertEqual(trip1.data.start_place, place0.get_id())
        self.assertEqual(place0.data.ending_trip, trip0.get_id())
        self.assertEqual(place0.data.starting_trip, trip1.get_id())

        self.assertEqual(round(trip0.data.duration), 58 * 60 + 51)
        self.assertEqual(round(trip1.data.duration), 38 * 60 + 57)

        self.assertIsNotNone(place0.data.location)
示例#15
0
def get_timeline(user_id, place_key, trip_key, untracked_key, start_ts, end_ts,
                 geojson=None, extra_query_list=None):
    logging.info("About to query for timestamps %s -> %s" % (start_ts, end_ts))
    """
    Return a timeline of the trips and places from this start timestamp to this end timestamp.
    Note that each place and each trip has *two* associated timestamps, so we need to define which trips need to be
    returned. Right now, we define this as all places that are entered and all trips that are started within the
    specified time frame. Note that this means that, by definition, this may not include the starting and ending places
    for all trips, which is something that we need for our visualization. But we don't want the timeline to be
    visualization specific.
    Let's compromise by adding method to fill in start and end places which we will call if the timeline is used
    for visualization and not if not. This also means that we can use the id map to avoid duplicates in case the
    place does exist.
    :param user_id: the user whose timeline we are considering
    :param start_ts: the starting timestamp. we will include all places and trips that start after this.
    :param end_ts: the ending timestamp. we will include all places and trips that end after this.
    :return: a timeline object
    """
    (place_gq, trip_gq) = get_place_trip_geoquery(geojson)
    places_entries = esda.get_entries(place_key, user_id=user_id,
                                      time_query=estt.TimeQuery("data.enter_ts",
                                                                start_ts,
                                                                end_ts),
                                      geo_query=place_gq,
                                      extra_query_list=extra_query_list)
    trips_entries = esda.get_entries(trip_key, user_id=user_id,
                                     untracked_key=untracked_key,
                                     time_query=estt.TimeQuery("data.start_ts",
                                                               start_ts,
                                                               end_ts),
                                     geo_query=trip_gq,
                                     extra_query_list=extra_query_list)
    for place in places_entries:
        logging.debug("Considering place %s: %s -> %s " % (place.get_id(),
                        place.data.enter_fmt_time, place.data.exit_fmt_time))
    for trip in trips_entries:
        logging.debug("Considering trip %s: %s -> %s " % (trip.get_id(),
                        trip.data.start_fmt_time, trip.data.end_fmt_time))

    return Timeline(place_key, trip_key, places_entries, trips_entries)
示例#16
0
  def testGenerateFeatureMatrixAndIds(self):
    self.testSelectFeatureIndicesStep()

    self.pipeline.user_id = self.testUUID
    self.pipeline.ts = esta.TimeSeries.get_time_series(self.testUUID)
    self.pipeline.toPredictSections = esda.get_entries(esda.CLEANED_SECTION_KEY, self.testUUID, 
        time_query=None)
    (self.pipeline.toPredictFeatureMatrix,
        self.pipeline.tripIds,
        self.pipeline.sectionIds) = \
        self.pipeline.generateFeatureMatrixAndIDsStep(self.pipeline.toPredictSections)
    self.assertEqual(self.pipeline.toPredictFeatureMatrix.shape[0], len(self.pipeline.sectionIds))
    self.assertEqual(self.pipeline.toPredictFeatureMatrix.shape[0], len(self.pipeline.tripIds))
    self.assertEqual(self.pipeline.toPredictFeatureMatrix.shape[1], len(self.pipeline.selFeatureIndices))
    self.assertEqual(self.pipeline.toPredictFeatureMatrix.shape[0], len(self.pipeline.toPredictSections))
示例#17
0
def range_mode_heatmap(user_uuid, modes, time_query, region):

    if region is None:
        geo_query = None
    else:
        geo_query = estg.GeoQuery(["data.loc"], region)

    extra_query_list = []
    if modes is not None:
        mode_enum_list = [ecwm.MotionTypes[mode] for mode in modes]
        extra_query_list.append(esdlq.get_mode_query(mode_enum_list))

    loc_entry_list = esda.get_entries(esda.CLEANED_LOCATION_KEY, user_id=user_uuid,
                                      time_query=time_query, geo_query=geo_query,
                                      extra_query_list=extra_query_list)
    return {"lnglat": [e.data.loc.coordinates for e in loc_entry_list]}
def segment_current_sections(user_id):
    time_query = epq.get_time_range_for_sectioning(user_id)
    try:
        trips_to_process = esda.get_entries(esda.RAW_TRIP_KEY, user_id, time_query)
        for trip in trips_to_process:
            logging.info("+" * 20 + ("Processing trip %s for user %s" % (trip.get_id(), user_id)) + "+" * 20)
            segment_trip_into_sections(user_id, trip.get_id(), trip.data.source)
        if len(trips_to_process) == 0:
            # Didn't process anything new so start at the same point next time
            last_trip_processed = None
        else:    
            last_trip_processed = trips_to_process[-1]
        epq.mark_sectioning_done(user_id, last_trip_processed)
    except:
        logging.exception("Sectioning failed for user %s" % user_id)
        epq.mark_sectioning_failed(user_id)
def segment_current_sections(user_id):
    time_query = epq.get_time_range_for_sectioning(user_id)
    try:
        trips_to_process = esda.get_entries(esda.RAW_TRIP_KEY, user_id, time_query)
        for trip in trips_to_process:
            logging.info("+" * 20 + ("Processing trip %s for user %s" % (trip.get_id(), user_id)) + "+" * 20)
            segment_trip_into_sections(user_id, trip.get_id(), trip.data.source)
        if len(trips_to_process) == 0:
            # Didn't process anything new so start at the same point next time
            last_trip_processed = None
        else:    
            last_trip_processed = trips_to_process[-1]
        epq.mark_sectioning_done(user_id, last_trip_processed)
    except:
        logging.exception("Sectioning failed for user %s" % user_id)
        epq.mark_sectioning_failed(user_id)
示例#20
0
def Berkeley_pop_route(start_ts, end_ts):
    berkeley_json  = {"geometry": {
      "type": "Polygon",
      "coordinates": [[
        [-122.267443, 37.864693], [-122.267443, 37.880687], [-122.250985, 37.880687], [-122.250985, 37.864693], [-122.267443, 37.864693]
        ]]
      }
    }
    # box = [ [-122.267443, 37.864693], [-122.250985, 37.880687] ]
    start_dt = esdl.get_local_date(start_ts, "UTC")
    end_dt = esdl.get_local_date(end_ts, "UTC")
    time_query = esttc.TimeComponentQuery("data.ts", start_dt, end_dt)
    geo_query = estg.GeoQuery(["data.loc"], berkeley_json)
    loc_entry_list = esda.get_entries(esda.CLEANED_LOCATION_KEY, user_id=None,
                                      time_query=time_query,
                                      geo_query=geo_query)
    return {"latlng": [e.data.loc.coordinates for e in loc_entry_list]}
def filter_current_sections(user_id):
    time_query = epq.get_time_range_for_smoothing(user_id)
    try:
        sections_to_process = esda.get_entries(esda.RAW_SECTION_KEY, user_id,
                                               time_query)
        for section in sections_to_process:
            logging.info("^" * 20 + ("Smoothing section %s for user %s" % (section.get_id(), user_id)) + "^" * 20)
            filter_jumps(user_id, section.get_id())
        if len(sections_to_process) == 0:
            # Didn't process anything new so start at the same point next time
            last_section_processed = None
        else:    
            last_section_processed = sections_to_process[-1]
        epq.mark_smoothing_done(user_id, last_section_processed)
    except:
        logging.exception("Marking smoothing as failed")
        epq.mark_smoothing_failed(user_id)
示例#22
0
def range_mode_heatmap(modes, from_ld, to_ld, region):
    time_query = esttc.TimeComponentQuery("data.local_dt", from_ld, to_ld)

    if region is None:
        geo_query = None
    else:
        geo_query = estg.GeoQuery(["data.loc"], region)

    if modes is None:
        extra_query_list = None
    else:
        mode_enum_list = [ecwm.MotionTypes[mode] for mode in modes]
        extra_query_list = [esdlq.get_mode_query(mode_enum_list)]

    loc_entry_list = esda.get_entries(esda.CLEANED_LOCATION_KEY, user_id=None,
                                      time_query=time_query, geo_query=geo_query,
                                      extra_query_list=extra_query_list)
    return {"lnglat": [e.data.loc.coordinates for e in loc_entry_list]}
示例#23
0
def range_mode_heatmap(user_uuid, modes, time_query, region):

    if region is None:
        geo_query = None
    else:
        geo_query = estg.GeoQuery(["data.loc"], region)

    extra_query_list = []
    if modes is not None:
        mode_enum_list = [ecwm.MotionTypes[mode] for mode in modes]
        extra_query_list.append(esdlq.get_mode_query(mode_enum_list))

    loc_entry_list = esda.get_entries(esda.CLEANED_LOCATION_KEY,
                                      user_id=user_uuid,
                                      time_query=time_query,
                                      geo_query=geo_query,
                                      extra_query_list=extra_query_list)
    return {"lnglat": [e.data.loc.coordinates for e in loc_entry_list]}
示例#24
0
def filter_current_sections(user_id):
    time_query = epq.get_time_range_for_smoothing(user_id)
    try:
        sections_to_process = esda.get_entries(esda.RAW_SECTION_KEY, user_id,
                                               time_query)
        for section in sections_to_process:
            logging.info("^" * 20 + ("Smoothing section %s for user %s" %
                                     (section.get_id(), user_id)) + "^" * 20)
            filter_jumps(user_id, section.get_id())
        if len(sections_to_process) == 0:
            # Didn't process anything new so start at the same point next time
            last_section_processed = None
        else:
            last_section_processed = sections_to_process[-1]
        epq.mark_smoothing_done(user_id, last_section_processed)
    except:
        logging.exception("Marking smoothing as failed")
        epq.mark_smoothing_failed(user_id)
示例#25
0
    def runPredictionPipeline(self, user_id, timerange):
        self.ts = esta.TimeSeries.get_time_series(user_id)
        self.toPredictSections = esda.get_entries(esda.CLEANED_SECTION_KEY,
                                                  user_id,
                                                  time_query=timerange)
        if (len(self.toPredictSections) == 0):
            logging.debug("len(toPredictSections) == 0, early return")
            if self.last_section_done is not None:
                logging.error("self.last_section_done == %s, expecting None" %
                              self.last_section_done)
                if eac.get_config()["classification.validityAssertions"]:
                    assert False
            return None

        self.predictedProb = self.predictModesStep()
        logging.info("predictModesStep DONE")
        self.savePredictionsStep()
        logging.info("savePredictionsStep DONE")
示例#26
0
    def testIOSSegmentationWrapperWithAutoTrip(self):
        eaist.segment_current_trips(self.iosUUID)
        eaiss.segment_current_sections(self.iosUUID)

        tq_trip = estt.TimeQuery("data.start_ts", 1446700000, 1446900000)
        created_trips = esda.get_entries(esda.RAW_TRIP_KEY, self.iosUUID,
                                         tq_trip)

        self.assertEqual(len(created_trips), 2)
        logging.debug("created trips = %s" % created_trips)

        sections_stops = [(len(esdt.get_raw_sections_for_trip(self.iosUUID, trip.get_id())),
                           len(esdt.get_raw_stops_for_trip(self.iosUUID, trip.get_id())))
                          for trip in created_trips]
        logging.debug(sections_stops)
        self.assertEqual(len(sections_stops), len(created_trips))
        # The expected value was copy-pasted from the debug statement above
        self.assertEqual(sections_stops,
                         [(0, 0), (11, 10)])
    def testIOSSegmentationWrapperWithAutoTrip(self):
        eaist.segment_current_trips(self.iosUUID)
        eaiss.segment_current_sections(self.iosUUID)

        tq_trip = estt.TimeQuery("data.start_ts", 1446700000, 1446900000)
        created_trips = esda.get_entries(esda.RAW_TRIP_KEY, self.iosUUID,
                                         tq_trip)

        self.assertEqual(len(created_trips), 2)
        logging.debug("created trips = %s" % created_trips)

        sections_stops = [(len(esdt.get_raw_sections_for_trip(self.iosUUID, trip.get_id())),
                           len(esdt.get_raw_stops_for_trip(self.iosUUID, trip.get_id())))
                          for trip in created_trips]
        logging.debug(sections_stops)
        self.assertEqual(len(sections_stops), len(created_trips))
        # The expected value was copy-pasted from the debug statement above
        self.assertEqual(sections_stops,
                         [(0, 0), (11, 10)])
示例#28
0
    def testGenerateFeatureMatrixAndIds(self):
        self.testSelectFeatureIndicesStep()

        self.pipeline.user_id = self.testUUID
        self.pipeline.ts = esta.TimeSeries.get_time_series(self.testUUID)
        self.pipeline.toPredictSections = esda.get_entries(
            esda.CLEANED_SECTION_KEY, self.testUUID, time_query=None)
        (self.pipeline.toPredictFeatureMatrix,
            self.pipeline.tripIds,
            self.pipeline.sectionIds) = \
            self.pipeline.generateFeatureMatrixAndIDsStep(self.pipeline.toPredictSections)
        self.assertEqual(self.pipeline.toPredictFeatureMatrix.shape[0],
                         len(self.pipeline.sectionIds))
        self.assertEqual(self.pipeline.toPredictFeatureMatrix.shape[0],
                         len(self.pipeline.tripIds))
        self.assertEqual(self.pipeline.toPredictFeatureMatrix.shape[1],
                         len(self.pipeline.selFeatureIndices))
        self.assertEqual(self.pipeline.toPredictFeatureMatrix.shape[0],
                         len(self.pipeline.toPredictSections))
    def testSegmentationWrapperWithAutoTrip(self):
        eaist.segment_current_trips(self.androidUUID)
        eaiss.segment_current_sections(self.androidUUID)

        tq_trip = estt.TimeQuery("data.start_ts", 1440658800, 1440745200)
        created_trips = esda.get_entries(esda.RAW_TRIP_KEY, self.androidUUID,
                                         tq_trip)

        self.assertEqual(len(created_trips), 8)

        sections_stops = [(len(esdt.get_raw_sections_for_trip(self.androidUUID, trip.get_id())),
                           len(esdt.get_raw_stops_for_trip(self.androidUUID, trip.get_id())))
                          for trip in created_trips]
        logging.debug(sections_stops)
        self.assertEqual(len(sections_stops), len(created_trips))
        # The expected value was copy-pasted from the debug statement above
        self.assertEqual(sections_stops,
                         [(2, 1), (1, 0), (2, 1), (2, 1), (1, 0), (2, 1),
                          (4, 3), (2, 1)])
示例#30
0
    def testSegmentationWrapperWithAutoTrip(self):
        eaist.segment_current_trips(self.androidUUID)
        eaiss.segment_current_sections(self.androidUUID)

        tq_trip = estt.TimeQuery("data.start_ts", 1440658800, 1440745200)
        created_trips = esda.get_entries(esda.RAW_TRIP_KEY, self.androidUUID,
                                         tq_trip)

        self.assertEqual(len(created_trips), 8)

        sections_stops = [(len(esdt.get_raw_sections_for_trip(self.androidUUID, trip.get_id())),
                           len(esdt.get_raw_stops_for_trip(self.androidUUID, trip.get_id())))
                          for trip in created_trips]
        logging.debug(sections_stops)
        self.assertEqual(len(sections_stops), len(created_trips))
        # The expected value was copy-pasted from the debug statement above
        self.assertEqual(sections_stops,
                         [(2, 1), (1, 0), (2, 1), (2, 1), (1, 0), (2, 1),
                          (4, 3), (2, 1)])
示例#31
0
def range_mode_heatmap(modes, from_ld, to_ld, region):
    time_query = esttc.TimeComponentQuery("data.local_dt", from_ld, to_ld)

    if region is None:
        geo_query = None
    else:
        geo_query = estg.GeoQuery(["data.loc"], region)

    if modes is None:
        extra_query_list = None
    else:
        mode_enum_list = [ecwm.MotionTypes[mode] for mode in modes]
        extra_query_list = [esdlq.get_mode_query(mode_enum_list)]

    loc_entry_list = esda.get_entries(esda.CLEANED_LOCATION_KEY,
                                      user_id=None,
                                      time_query=time_query,
                                      geo_query=geo_query,
                                      extra_query_list=extra_query_list)
    return {"lnglat": [e.data.loc.coordinates for e in loc_entry_list]}
    def testSegmentationWrapperWithAutoTrip(self):
        eaist.segment_current_trips(self.testUUID)
        eaiss.segment_current_sections(self.testUUID)

        tq_trip = estt.TimeQuery("data.start_ts", 1440658800, 1440745200)
        created_trips = esda.get_entries(esda.RAW_TRIP_KEY, self.testUUID,
                                         tq_trip)

        for i, trip in enumerate(created_trips):
            logging.debug("current trip is %s" % trip)
            created_stops = esdt.get_raw_stops_for_trip(self.testUUID, trip.get_id())
            created_sections = esdt.get_raw_sections_for_trip(self.testUUID, trip.get_id())

            for j, stop in enumerate(created_stops):
                logging.info("Retrieved stops %s: %s -> %s" %
                             (j, stop.data.enter_fmt_time,
                              stop.data.exit_fmt_time))
            for j, section in enumerate(created_sections):
                logging.info("Retrieved sections %s: %s -> %s" %
                             (j, section.data.start_fmt_time,
                              section.data.end_fmt_time))
示例#33
0
def Berkeley_pop_route(start_ts, end_ts):
    berkeley_json = {
        "geometry": {
            "type":
            "Polygon",
            "coordinates":
            [[[-122.267443, 37.864693], [-122.267443, 37.880687],
              [-122.250985, 37.880687], [-122.250985, 37.864693],
              [-122.267443, 37.864693]]]
        }
    }
    # box = [ [-122.267443, 37.864693], [-122.250985, 37.880687] ]
    start_dt = esdl.get_local_date(start_ts, "UTC")
    end_dt = esdl.get_local_date(end_ts, "UTC")
    time_query = esttc.TimeComponentQuery("data.ts", start_dt, end_dt)
    geo_query = estg.GeoQuery(["data.loc"], berkeley_json)
    loc_entry_list = esda.get_entries(esda.CLEANED_LOCATION_KEY,
                                      user_id=None,
                                      time_query=time_query,
                                      geo_query=geo_query)
    return {"lnglat": [e.data.loc.coordinates for e in loc_entry_list]}
示例#34
0
  def runPredictionPipeline(self, user_id, timerange):
    self.ts = esta.TimeSeries.get_time_series(user_id)
    self.toPredictSections = esda.get_entries(esda.CLEANED_SECTION_KEY, user_id, 
        time_query=timerange)
    if (len(self.toPredictSections) == 0):
        logging.debug("len(toPredictSections) == 0, early return")
        assert self.last_section_done is None, ("self.last_section_done == %s, expecting None" % \
            self.last_section_done)
        return None

    self.loadModelStage()
    logging.info("loadModelStage DONE")
    self.selFeatureIndices = self.selectFeatureIndicesStep()
    logging.info("selectFeatureIndicesStep DONE")
    (self.toPredictFeatureMatrix, self.tripIds, self.sectionIds) = \
        self.generateFeatureMatrixAndIDsStep(self.toPredictSections)
    logging.info("generateFeatureMatrixAndIDsStep DONE")
    self.predictedProb = self.predictModesStep()
    #This is a matrix of the entries and their corresponding probabilities for each classification
    logging.info("predictModesStep DONE")
    self.savePredictionsStep()
    logging.info("savePredictionsStep DONE")
示例#35
0
def incident_heatmap(user_uuid, modes, time_query, region):
    """
    Return a list of geojson points with properties for the time and the stress level
    related to incidents. This should not return full entries because that can
    expose the user_id in the aggregate case. Maybe it can return the data part only?
    Or should we put the other entries into the properties?
    :param modes: The modes that we want to query for
    :param time_query: The time query, in either local date or timestamp
    :param region: The region of interest
    :return: list of `incident` objects, with all metadata stripped out
    """

    if region is None:
        geo_query = None
    else:
        geo_query = estg.GeoQuery(["data.loc"], region)

    extra_query_list = []
    if modes is not None:
        mode_enum_list = [ecwm.MotionTypes[mode] for mode in modes]
        extra_query_list.append(esdlq.get_mode_query(mode_enum_list))

    if user_uuid is None:
        incident_entry_list = esda.get_entries(
            MANUAL_INCIDENT_KEY,
            user_id=None,
            time_query=time_query,
            geo_query=geo_query,
            extra_query_list=extra_query_list)
    else:
        # We don't support aggregate queries on the usercache. And that is
        # actually fine, because we don't expect immediate results for the
        # aggregate case. We just want to query the usercache to ensure that
        # the incidents don't magically disappear just because they got pushed
        # to the server but are not yet processed
        incident_entry_list = estc.find_entries([MANUAL_INCIDENT_KEY],
                                                time_query)
    return {"incidents": [e.data for e in incident_entry_list]}
示例#36
0
    def testSegmentationWrapperWithAutoTrip(self):
        eaist.segment_current_trips(self.testUUID)
        eaiss.segment_current_sections(self.testUUID)

        tq_trip = estt.TimeQuery("data.start_ts", 1440658800, 1440745200)
        created_trips = esda.get_entries(esda.RAW_TRIP_KEY, self.testUUID,
                                         tq_trip)

        for i, trip in enumerate(created_trips):
            logging.debug("current trip is %s" % trip)
            created_stops = esdt.get_raw_stops_for_trip(
                self.testUUID, trip.get_id())
            created_sections = esdt.get_raw_sections_for_trip(
                self.testUUID, trip.get_id())

            for j, stop in enumerate(created_stops):
                logging.info(
                    "Retrieved stops %s: %s -> %s" %
                    (j, stop.data.enter_fmt_time, stop.data.exit_fmt_time))
            for j, section in enumerate(created_sections):
                logging.info("Retrieved sections %s: %s -> %s" %
                             (j, section.data.start_fmt_time,
                              section.data.end_fmt_time))
    def testSegmentationWrapperCombined(self):
        # Change iOS entries to have the android UUID
        tsdb = edb.get_timeseries_db()
        for entry in esta.TimeSeries.get_time_series(
                self.iosUUID).find_entries():
            entry["user_id"] = self.androidUUID
            tsdb.save(entry)
        
        # Now, segment the data for the combined UUID, which will include both
        # android and ios
        eaist.segment_current_trips(self.androidUUID)

        tq_place = estt.TimeQuery("data.enter_ts", 1440658800, 1446847600)
        created_places_entries = esda.get_entries(esda.RAW_PLACE_KEY,
                                                  self.androidUUID, tq_place)

        tq_trip = estt.TimeQuery("data.start_ts", 1440658800, 1446847600)
        created_trips_entries = esda.get_entries(esda.RAW_TRIP_KEY,
                                                 self.androidUUID, tq_trip)

        for i, place in enumerate(created_places_entries):
            logging.debug("Retrieved places %s: %s -> %s" % (i, place.data.enter_fmt_time, place.data.exit_fmt_time))
        for i, trip in enumerate(created_trips_entries):
            logging.debug("Retrieved trips %s: %s -> %s" % (i, trip.data.start_fmt_time, trip.data.end_fmt_time))

        # We expect there to be 12 places, but the first one is that start of
        # the chain, so it has a start_time of None and it won't be retrieved
        # by the query on the start_time that we show here.
        self.assertEqual(len(created_places_entries), 11)
        self.assertEqual(len(created_trips_entries), 11)

        # Pick the first two trips and the first place and ensure that they are all linked correctly
        # Note that this is the first place, not the second place because the true first place will not
        # be retrieved by the query, as shown above
        # The first trip here is a dummy trip, so let's check the second and third trip instead
        trip0time = created_trips_entries[0]
        trip1time = created_trips_entries[1]
        place0time = created_places_entries[0]
        
        self.assertEqual(trip0time.data.end_place, place0time.get_id())
        self.assertEqual(trip1time.data.start_place, place0time.get_id())
        self.assertEqual(place0time.data.ending_trip, trip0time.get_id())
        self.assertEqual(place0time.data.starting_trip, trip1time.get_id())

        self.assertEqual(round(trip0time.data.duration), 11 * 60 + 9)
        self.assertEqual(round(trip1time.data.duration), 6 * 60 + 54)

        self.assertIsNotNone(place0time.data.location)
        
        # There are 8 android trips first (index: 0-7).
        # index 8 is the short, bogus trip
        # So we want to check trips 9 and 10
        trip0dist = created_trips_entries[9]
        trip1dist = created_trips_entries[10]
        place0dist = created_places_entries[9]
        
        self.assertEqual(trip0dist.data.end_place, place0dist.get_id())
        self.assertEqual(trip1dist.data.start_place, place0dist.get_id())
        self.assertEqual(place0dist.data.ending_trip, trip0dist.get_id())
        self.assertEqual(place0dist.data.starting_trip, trip1dist.get_id())

        self.assertEqual(round(trip0dist.data.duration), 58 * 60 + 51)
        self.assertEqual(round(trip1dist.data.duration), 39 * 60 + 49)

        self.assertIsNotNone(place0dist.data.location)
def read_data(uuid=None):
    trips = esda.get_entries(esda.CLEANED_TRIP_KEY, uuid,
                             time_query=None, geo_query=None)
    logging.info("After reading data, returning %s trips" % len(trips))
    return trips
    def testSegmentationWrapperCombined(self):
        # Change iOS entries to have the android UUID
        for entry in esta.TimeSeries.get_time_series(
                self.iosUUID).find_entries():
            entry["user_id"] = self.androidUUID
            edb.get_timeseries_db().save(entry)
        
        # Now, segment the data for the combined UUID, which will include both
        # android and ios
        eaist.segment_current_trips(self.androidUUID)

        tq_place = estt.TimeQuery("data.enter_ts", 1440658800, 1446847600)
        created_places_entries = esda.get_entries(esda.RAW_PLACE_KEY,
                                                  self.androidUUID, tq_place)

        tq_trip = estt.TimeQuery("data.start_ts", 1440658800, 1446847600)
        created_trips_entries = esda.get_entries(esda.RAW_TRIP_KEY,
                                                 self.androidUUID, tq_trip)

        for i, place in enumerate(created_places_entries):
            logging.debug("Retrieved places %s: %s -> %s" % (i, place.data.enter_fmt_time, place.data.exit_fmt_time))
        for i, trip in enumerate(created_trips_entries):
            logging.debug("Retrieved trips %s: %s -> %s" % (i, trip.data.start_fmt_time, trip.data.end_fmt_time))

        # We expect there to be 12 places, but the first one is that start of
        # the chain, so it has a start_time of None and it won't be retrieved
        # by the query on the start_time that we show here.
        self.assertEqual(len(created_places_entries), 11)
        self.assertEqual(len(created_trips_entries), 11)

        # Pick the first two trips and the first place and ensure that they are all linked correctly
        # Note that this is the first place, not the second place because the true first place will not
        # be retrieved by the query, as shown above
        # The first trip here is a dummy trip, so let's check the second and third trip instead
        trip0time = created_trips_entries[0]
        trip1time = created_trips_entries[1]
        place0time = created_places_entries[0]
        
        self.assertEqual(trip0time.data.end_place, place0time.get_id())
        self.assertEqual(trip1time.data.start_place, place0time.get_id())
        self.assertEqual(place0time.data.ending_trip, trip0time.get_id())
        self.assertEqual(place0time.data.starting_trip, trip1time.get_id())

        self.assertEqual(round(trip0time.data.duration), 11 * 60 + 9)
        self.assertEqual(round(trip1time.data.duration), 6 * 60 + 54)

        self.assertIsNotNone(place0time.data.location)
        
        # There are 8 android trips first (index: 0-7).
        # index 8 is the short, bogus trip
        # So we want to check trips 9 and 10
        trip0dist = created_trips_entries[9]
        trip1dist = created_trips_entries[10]
        place0dist = created_places_entries[9]
        
        self.assertEqual(trip0dist.data.end_place, place0dist.get_id())
        self.assertEqual(trip1dist.data.start_place, place0dist.get_id())
        self.assertEqual(place0dist.data.ending_trip, trip0dist.get_id())
        self.assertEqual(place0dist.data.starting_trip, trip1dist.get_id())

        self.assertEqual(round(trip0dist.data.duration), 58 * 60 + 51)
        self.assertEqual(round(trip1dist.data.duration), 38 * 60 + 57)

        self.assertIsNotNone(place0dist.data.location)