Exemple #1
0
    def update_ride_basic(self, strava_activity: Activity, ride: Ride):
        """
        Set basic ride properties from the Strava Activity object.

        :param strava_activity: The Strava Activity
        :param ride: The ride model object.
        """
        # Should apply to both new and preexisting rides ...
        # If there are multiple instagram photos, then request syncing of non-primary photos too.

        if strava_activity.photo_count > 1 and ride.photos_fetched is None:
            self.logger.debug(
                "Scheduling non-primary photos sync for {!r}".format(ride))
            ride.photos_fetched = False

        ride.private = bool(strava_activity.private)
        ride.name = strava_activity.name
        ride.start_date = strava_activity.start_date_local

        # We need to round so that "1.0" miles in data is "1.0" miles when we convert back from meters.
        ride.distance = round(
            float(unithelper.miles(strava_activity.distance)), 3)

        ride.average_speed = float(
            unithelper.mph(strava_activity.average_speed))
        ride.maximum_speed = float(unithelper.mph(strava_activity.max_speed))
        ride.elapsed_time = timedelta_to_seconds(strava_activity.elapsed_time)
        ride.moving_time = timedelta_to_seconds(strava_activity.moving_time)

        location_parts = []
        if strava_activity.location_city:
            location_parts.append(strava_activity.location_city)
        if strava_activity.location_state:
            location_parts.append(strava_activity.location_state)
        location_str = ', '.join(location_parts)

        ride.location = location_str

        ride.commute = strava_activity.commute
        ride.trainer = strava_activity.trainer
        ride.manual = strava_activity.manual
        ride.elevation_gain = float(
            unithelper.feet(strava_activity.total_elevation_gain))
        ride.timezone = str(strava_activity.timezone)

        # # Short-circuit things that might result in more obscure db errors later.
        if ride.elapsed_time is None:
            raise DataEntryError("Activities cannot have null elapsed time.")

        if ride.moving_time is None:
            raise DataEntryError("Activities cannot have null moving time.")

        if ride.distance is None:
            raise DataEntryError("Activities cannot have null distance.")

        self.logger.debug(
            "Writing ride for {athlete!r}: \"{ride!r}\" on {date}".format(
                athlete=ride.athlete.name,
                ride=ride.name,
                date=ride.start_date.strftime('%m/%d/%y')))
Exemple #2
0
def index(request):
    primary_bike_id = ''
    bikes = request.user.gear_set.all()
    for bike in bikes:
        if bike.primary:
            primary_bike_id = bike.strava_id

    gear_id = request.POST.get('gear_id', primary_bike_id)
    before = request.POST.get('before')
    after = request.POST.get('after')
    if before and after:
        before = datetime.datetime.strptime(before, '%Y-%m-%d') + timedelta(hours=23, minutes=59, seconds=59)
        after = datetime.datetime.strptime(after, '%Y-%m-%d') + timedelta(hours=23, minutes=59, seconds=59)
    else:
        before = datetime.datetime.today().replace(hour=23, minute=59, second=59, microsecond=0)
        after = before - timedelta(days=7)

    stra = strava()
    filters = [
        {'field': 'athlete.id', 'query': request.user.strava_id},
        {'field': 'start_date', 'query': {'$lt': before, '$gte': after}},
        {'field': 'gear_id', 'query': gear_id},
    ]
    activities = stra.aggregate_activities_mongo(filters, {
        '_id': None,
        'distance': {'$sum': '$distance'},
        'elapsed_time': {'$sum': '$moving_time'},
        'elevation': {'$sum': '$total_elevation_gain'},
        'average_speed': {'$avg': '$average_speed'},
        'kilojoules': {'$sum': '$kilojoules'},
    })

    template_fields = {
        'bikes': bikes,
        'gear_id':gear_id,
        'before':before.strftime('%Y-%m-%d'),
        'after':after.strftime('%Y-%m-%d'),
        'distance': unithelper.miles(unit('m')(0)),
        'time': unithelper.hours(unit('s')(0)),
        'elevation': unithelper.meters(unit('m')(0)),
        'avg_speed': unithelper.mph(unit('m')(0)/unit('s')(1)),
        'kjs': 0,
    }
    activity = None
    for agg in activities:
        if not activity:
            activity = agg

    if activity:
        merge_dict = template_fields.copy()
        merge_dict.update({
            'distance': unithelper.miles(unit('m')(activity['distance'])),
            'time': unithelper.hours(unit('s')(activity['elapsed_time'])),
            'elevation': unithelper.meters(unit('m')(activity['elevation'])),
            'avg_speed': unithelper.mph(unit('m')(activity['average_speed'])/unit('s')(1)),
            'kjs': activity['kilojoules'],
        })
        template_fields = merge_dict

    return render_to_response('strava/templates/strava_index.html', template_fields, context_instance=RequestContext(request))
Exemple #3
0
def update_ride_from_activity(strava_activity, ride):
    """
    Refactoring to just set ride properties from the Strava Activity object.

    :param strava_activity: The Strava Activyt
    :type strava_activity: stravalib.model.Activity
    :param ride: The ride model object.
    :type ride: Ride
    """
     # Should apply to both new and preexisting rides ...
    # If there are multiple instagram photos, then request syncing of non-primary photos too.

    if strava_activity.photo_count > 1 and ride.photos_fetched is None:


        log.debug("Scheduling non-primary photos sync for {!r}".format(ride))
        ride.photos_fetched = False

    ride.private = bool(strava_activity.private)
    ride.name = strava_activity.name
    ride.start_date = strava_activity.start_date_local

    # We need to round so that "1.0" miles in strava is "1.0" miles when we convert back from meters.
    ride.distance = round(float(unithelper.miles(strava_activity.distance)), 3)

    ride.average_speed = float(unithelper.mph(strava_activity.average_speed))
    ride.maximum_speed = float(unithelper.mph(strava_activity.max_speed))
    ride.elapsed_time = timedelta_to_seconds(strava_activity.elapsed_time)
    ride.moving_time = timedelta_to_seconds(strava_activity.moving_time)

    location_parts = []
    if strava_activity.location_city:
        location_parts.append(strava_activity.location_city)
    if strava_activity.location_state:
        location_parts.append(strava_activity.location_state)
    location_str = ', '.join(location_parts)

    ride.location = location_str

    ride.commute = strava_activity.commute
    ride.trainer = strava_activity.trainer
    ride.manual = strava_activity.manual
    ride.elevation_gain = float(unithelper.feet(strava_activity.total_elevation_gain))
    ride.timezone = str(strava_activity.timezone)

    # # Short-circuit things that might result in more obscure db errors later.
    if ride.elapsed_time is None:
        raise DataEntryError("Activities cannot have null elapsed time.")

    if ride.moving_time is None:
        raise DataEntryError("Activities cannot have null moving time.")

    if ride.distance is None:
        raise DataEntryError("Activities cannot have null distance.")

    log.debug("Writing ride for {athlete!r}: \"{ride!r}\" on {date}".format(athlete=ride.athlete.name,
                                                                        ride=ride.name,
                                                                        date=ride.start_date.strftime('%m/%d/%y')))
Exemple #4
0
    def build_df_summary(self):
        self.df_summary = pd.DataFrame()
        self.df_summary['activity_id'] = [self.id]

        if self.start_latlng is not None:
            self.df_summary['start_lat'] = [self.start_latlng[0]]
            self.df_summary['start_lon'] = [self.start_latlng[1]]
        else:
            self.df_summary['start_lat'] = None
            self.df_summary['start_lon'] = None
        if self.end_latlng is not None:
            self.df_summary['end_lat'] = [self.end_latlng[0]]
            self.df_summary['end_lon'] = [self.end_latlng[1]]
        else:
            self.df_summary['end_lat'] = None
            self.df_summary['end_lon'] = None

        self.df_summary['achievement_count'] = [self.achievement_count]
        self.df_summary['activity_id'] = [self.id]
        self.df_summary['average_heartrate'] = [self.average_heartrate]
        self.df_summary['average_speed'] = [
            unithelper.mph(self.average_speed).num
        ]
        self.df_summary['average_watts'] = [self.average_watts]
        self.df_summary['calories'] = [self.calories]
        self.df_summary['commute'] = [self.commute]
        self.df_summary['description'] = [self.description]
        self.df_summary['device_name'] = [self.device_name]
        self.df_summary['distance'] = [unithelper.miles(self.distance).num]
        self.df_summary['elapsed_time'] = [self.elapsed_time.seconds]
        self.df_summary['gear_id'] = [self.gear_id]
        self.df_summary['kilojoules'] = [self.kilojoules]
        self.df_summary['location_city'] = [self.location_city]
        self.df_summary['location_country'] = [self.location_country]
        self.df_summary['location_state'] = [self.location_state]
        self.df_summary['max_heartrate'] = [self.max_heartrate]
        self.df_summary['max_speed'] = [unithelper.mph(self.max_speed).num]
        self.df_summary['max_watts'] = [self.max_watts]
        self.df_summary['moving_time'] = [self.moving_time.seconds]
        self.df_summary['name'] = [self.name]
        self.df_summary['pr_count'] = [self.pr_count]
        self.df_summary['start_date_local'] = [self.start_date_local]
        self.df_summary['start_date_utc'] = [self.start_date]
        self.df_summary['start_day_local'] = [self.start_date_local.date()]
        self.df_summary['timezone'] = [str(self.timezone)]
        self.df_summary['total_elevation_gain'] = [
            unithelper.feet(self.total_elevation_gain).num
        ]
        self.df_summary['trainer'] = [self.trainer]
        self.df_summary['type'] = [self.type]
        self.df_summary.set_index(['start_date_utc'], inplace=True)
Exemple #5
0
    def test_speed_units(self):
        a = model.Activity()

        a.max_speed = 1000  # m/s
        a.average_speed = 1000  # m/s
        self.assertEquals(3600.0, float(uh.kph(a.max_speed)))
        self.assertEquals(3600.0, float(uh.kph(a.average_speed)))

        a.max_speed = uh.mph(1.0)
        # print repr(a.max_speed)

        self.assertAlmostEqual(1.61, float(uh.kph(a.max_speed)), places=2)
    def test_speed_units(self):
        a = model.Activity()

        a.max_speed = 1000  # m/s
        a.average_speed = 1000  # m/s
        self.assertEquals(3600.0, float(uh.kph(a.max_speed)))
        self.assertEquals(3600.0, float(uh.kph(a.average_speed)))

        a.max_speed = uh.mph(1.0)
        #print repr(a.max_speed)

        self.assertAlmostEqual(1.61, float(uh.kph(a.max_speed)), places=2)
Exemple #7
0
    def build_df_samples(self):
        seconds = 1
        streams = get_strava_client().get_activity_streams(self.id,
                                                           types=types)
        self.df_samples = pd.DataFrame(columns=types)
        # Write each row to a dataframe
        for item in types:
            if item in streams.keys():
                self.df_samples[item] = pd.Series(streams[item].data,
                                                  index=None)
        self.df_samples['start_date_local'] = self.start_date_local
        self.df_samples['timestamp_local'] = pd.Series(
            map(calctime, self.df_samples['time'],
                self.df_samples['start_date_local']))
        self.df_samples.set_index('timestamp_local', inplace=True)

        # Parse latlngs into seperate columns
        try:
            self.df_samples['latitude'] = self.df_samples['latlng'].apply(
                lambda x: x[0] if isinstance(x, list) else None).apply(
                    pd.to_numeric, errors='coerce')
            self.df_samples['longitude'] = self.df_samples['latlng'].apply(
                lambda x: x[1] if isinstance(x, list) else None).apply(
                    pd.to_numeric, errors='coerce')
        except KeyError:
            self.df_samples['latitude'] = None
            self.df_samples['longitude'] = None

        # Interpolate samples - each workout in samples data should already be at 1s intervals, calling resample fills in gaps so mean() does not matter
        self.df_samples = self.df_samples.resample(str(seconds) + 'S').mean()
        self.df_samples = self.df_samples.interpolate(
            limit_direction='both'
        )  # TODO: Consider if interpolating of nans is skuing data too much

        try:  # Indoor activity samples wont have altitudes
            self.df_samples['altitude'] = self.df_samples['altitude'] * 3.28084
        except KeyError:
            self.df_samples['altitude'] = None

        try:
            # Convert celcius to farenheit
            self.df_samples['temp'] = unithelper.c2f(self.df_samples['temp'])
        except:
            pass

        try:
            # Convert meter per second to mph
            self.df_samples['velocity_smooth'] = unithelper.mph(
                self.df_samples['velocity_smooth']).num
        except:
            pass
        try:
            # Convert meters to feet
            self.df_samples['distance'] = unithelper.feet(
                self.df_samples['distance']).num
        except:
            pass

        # Add Time Interval
        epoch = pd.to_datetime('1970-01-01')
        self.df_samples['time_interval'] = self.df_samples['time'].astype(
            'int').apply(lambda x: epoch + timedelta(seconds=x))

        # Add date column
        self.df_samples['date'] = self.df_samples.index.date
        # Add activity id and name back in
        self.df_samples['activity_id'] = self.id
        self.df_samples['act_name'] = self.name
Exemple #8
0
def activity_dict(athlete, a):
    elapsed_time = unithelper.seconds(a.elapsed_time.total_seconds())
    if athlete.measurement_preference == 'feet':
        distance = str(unithelper.miles(a.distance))
        gain = str(unithelper.feet(a.total_elevation_gain))
        if a.type == 'Ride':
            speed = str(unithelper.mph(a.average_speed))
            elapsed_speed = str(unithelper.mph(a.distance / elapsed_time))
        else:
            try:
                speed = "{0:.2f} /mi".format(
                    60 / (unithelper.mph(a.average_speed).num))
                elapsed_speed = "{0:.2f} /mi".format(
                    60 / unithelper.mph(a.distance / elapsed_time).num)
            except ZeroDivisionError:
                speed = 'NaN'
                elapsed_speed = 'NaN'
    else:
        distance = str(unithelper.kilometers(a.distance))
        gain = str(unithelper.meters(a.total_elevation_gain))
        if a.type == 'Ride':
            speed = str(unithelper.kph(a.average_speed))
            elapsed_speed = str(unithelper.kph(a.distance / elapsed_time))
        else:
            try:
                speed = "{0:.2f} /km".format(
                    60 / (unithelper.kph(a.average_speed).num))
                elapsed_speed = "{0:.2f} /km".format(
                    60 / unithelper.kph(a.distance / elapsed_time).num)
            except ZeroDivisionError:
                speed = 'NaN'
                elapsed_speed = 'NaN'

    date = a.start_date_local.strftime(athlete.date_preference
                                       or "%a, %d %b %Y")
    weekday = calendar.day_name[a.start_date_local.weekday()]

    workout_type = ''
    if a.type == 'Run':
        workout_type = ['', 'Race', 'Long Run', 'Workout'][int(a.workout_type
                                                               or 0)]

    garmin_link = ''
    if a.external_id:
        if a.external_id.startswith('garmin_push_'):
            garmin_id = a.external_id.split('_')[2]
            garmin_link = 'https://connect.garmin.com/modern/activity/{}'.format(
                garmin_id)

    return {
        'id': a.id,
        'link': url_for('query.show_activity', id=a.id),
        'strava_link': 'https://www.strava.com/activities/{}'.format(a.id),
        'garmin_link': garmin_link,
        'name': a.name,
        'type': a.type,
        'workout_type': workout_type,
        'date': date,
        'weekday': weekday,
        'distance': distance,
        'gain': gain,
        'elapsed_time': str(a.elapsed_time),
        'moving_time': str(a.moving_time),
        'speed': speed,
        'elapsed_speed': elapsed_speed,
        'start_latlng': [a.start_latitude, a.start_longitude],
        'polyline': a.map.polyline or a.map.summary_polyline
    }
Exemple #9
0
def main(event, context):
    """Triggered from a message on a Cloud Pub/Sub topic.
    Args:
         event (dict): Event payload.
         context (google.cloud.functions.Context): Metadata for the event.
    """
    STRAVA_CLIENT_ID = os.environ.get('strava_client_id')
    STRAVA_CLIENT_SECRET = os.environ.get('strava_client_secret')

    pubsub_message = base64.b64decode(event['data']).decode('utf-8')
    pubsub_dict = json.loads(pubsub_message.replace("'", '"'))
    print(pubsub_dict)
    datastore_client = datastore.Client(project='strava-int')
    bq_client = bigquery.Client(project='strava-int')
    strava_client = Client()

    # event notification from strava
    aspect_type = pubsub_dict['aspect_type']
    object_id = pubsub_dict['object_id']
    owner_id = pubsub_dict['owner_id']
    object_type = pubsub_dict['object_type']
    event_time = pubsub_dict['event_time']
    event_datetime = time.strftime('%Y-%m-%d %H:%M:%S',
                                   time.localtime(event_time))

    if object_type == 'activity':
        now = time.time()

        if aspect_type == 'delete':
            athlete_activity_dict = [{
                'activity_hub_seq':
                hashlib.md5(str(object_id).encode()).hexdigest(),
                'sat_load_date':
                event_datetime,
                'delete_ind':
                True
            }]
            # athlete_activity_obj = AthleteActivity(athlete_activity_dict)
            # sat_athlete_activity = athlete_activity_obj.satellite()
            sat_table_ref = bq_client.dataset('strava_datavault').table(
                'activity_sat')
            bq_client.load_table_from_json(athlete_activity_dict,
                                           sat_table_ref)

        if aspect_type != 'delete':
            # stored athlete from datastore
            athlete_key = datastore_client.key('Athlete', owner_id)
            athlete = datastore_client.get(athlete_key)
            if now > athlete['expires_at']:
                access_token = strava_client.refresh_access_token(
                    client_id=STRAVA_CLIENT_ID,
                    client_secret=STRAVA_CLIENT_SECRET,
                    refresh_token=athlete['refresh_token'])

                athlete.update(access_token)
                datastore_client.put(athlete)

            # create new client for authenticated athlete
            athlete_client = Client(access_token=athlete['access_token'])
            activity = athlete_client.get_activity(object_id)
            activity_dict = activity.to_dict()
            supplement = {
                'athlete_id': owner_id,
                'activity_id': object_id,
                'load_date': event_datetime
            }
            activity_dict.update(supplement)

            # GCS Storage
            upload_blob(activity_dict, owner_id, object_id, event_time,
                        aspect_type)

            converted_units = {
                'distance_mi':
                unithelper.mile(getattr(activity, 'distance', None)).get_num(),
                'average_speed_mi':
                unithelper.mph(getattr(activity, 'average_speed',
                                       None)).get_num(),
                'max_speed_mi':
                unithelper.mph(getattr(activity, 'max_speed', None)).get_num(),
                'elapsed_time_s':
                int(
                    unithelper.timedelta_to_seconds(
                        getattr(activity, 'elapsed_time', None))),
                'moving_time_s':
                int(
                    unithelper.timedelta_to_seconds(
                        getattr(activity, 'moving_time', None)))
            }

            activity_dict.update(converted_units)

            athlete_activity_obj = AthleteActivity(activity_dict)
            sat_athlete_activity = athlete_activity_obj.satellite()
            print(sat_athlete_activity)
            # BQ insert
            sat_table_ref = bq_client.dataset('strava_datavault').table(
                'activity_sat')
            bq_client.load_table_from_json(sat_athlete_activity, sat_table_ref)
            if aspect_type == 'create':
                link_athlete_activity = athlete_activity_obj.link()
                link_table_ref = bq_client.dataset('strava_datavault').table(
                    'athlete_activity_link')
                bq_client.load_table_from_json(link_athlete_activity,
                                               link_table_ref)

                hub_activity = athlete_activity_obj.hub()
                hub_table_ref = bq_client.dataset('strava_datavault').table(
                    'activity_hub')
                bq_client.load_table_from_json(hub_activity, hub_table_ref)
Exemple #10
0
def to_mph(mps):
    return float(unithelper.mph(act.average_speed))
Exemple #11
0
    if "latlng" in stream_filter:
      sdf["lat"] = [a[0] for a in sdf.latlng]
      sdf["lng"] = [a[1] for a in sdf.latlng]
      del sdf["latlng"]

    detail_fname = join(output_detail_dir, "{0}.json".format(act.id))
    sdf.to_json(detail_fname)

    # with open(join(output_detail_dir, "{0}.p".format(act.id)), "wb") as f:
    #     pickle.dump(sdf, f, 2)

    print "{0} on {1:%d %b %Y} [kudos {2}]".format(act.name, act.start_date, act.kudos_count)
    print "\tHR: {0}".format(act.average_heartrate)
    print "\tDistance: {0}".format(unithelper.miles(act.distance))
    print "\tSpeed: {0}".format(unithelper.mph(act.average_speed))

    item = dict((k, func(getattr(act, k, None))) for k, func in save_fields.iteritems())
    data.append(item)
    start_dates.append(act.start_date)

df = pd.DataFrame(data=data, index=start_dates)

summary_fname = join(output_dir, "summary.json")
df.to_json(summary_fname)

# with open(join(output_dir, "summary.p"), "wb") as f:
#     pickle.dump(df, f, 2)

df.to_csv(join(output_dir, "summary.csv"))