Example #1
0
 def resolve_webhook(self, object_id: int):
     """
     Resolves a webhook event from Strava by retrieving the activity data and
     checking it against the user's existing rules. If a match is found, sends the
     request to Strava to rename it.
     """
     self.refresh()
     client = Client(access_token=self.access_token)
     activity = client.get_activity(object_id)
     # The Strava API doesn't give enough precision in its start latitude and
     # longitude values, so we have to call the raw stream of points to get what we
     # need.
     points = client.get_activity_streams(
         activity.id, types=["latlng"], resolution="low"
     )
     activity_start = points["latlng"].data[0]
     current_app.logger.info(
         f"Webhook event received: Activity {object_id}, User {self.id}, Starting"
         f"point {activity_start}, Starting time {activity.start_date_local}"
     )
     for rule in self.rules.all():
         current_app.logger.info(f"Checking {rule} against activity {object_id}")
         if rule.check_rule(activity_start, activity.start_date_local):
             # if not current_app.config.TESTING:
             client.update_activity(object_id, name=rule.activity_name)
             current_app.logger.info(
                 f"Activity {activity.id} renamed to {rule.activity_name} for {self}"
             )
             break  # No need to check any more activities
Example #2
0
def get_activity(event: dict, context=None):
    """
    Retrieve the given activity, and its streams, and save the data as one
    json file to Cloud Storage.

    Args:
        event (dict): Dict in Pub/Sub format
        context: Not used
    """
    # Initialise Google Cloud Storage. Doing this first because to save an
    # API call if there is a problem loading the bucket.
    storage_client = storage.Client()
    bucket = storage_client.bucket(os.getenv('STORAGE_BUCKET_NAME'))

    # Parse the Pub/Sub message data.
    data = json.loads(base64.b64decode(event['data']).decode('utf-8'))
    logging.info(data)

    # Check if a fresh token is required.
    if sm.expires_at < datetime.now().timestamp():
        refresh_access_token()
    client = Client(access_token=sm.access_token)

    # Check we're logged in correctly.
    logger.info('Logged in as athlete %s', client.get_athlete())

    # Download the activity data.
    activity = client.get_activity(data['object_id'])
    activity_dict = activity.to_dict()
    logger.debug('Activity %s: %s', activity.id, activity_dict)

    # Download the streams data.
    streams = client.get_activity_streams(activity.id, STREAM_TYPES)
    logger.debug(streams)

    # Append the streams data to the activity data. This is the only
    # manipulation in this workflow; everything else should be manipulated
    # when you read this file from Cloud Storage.
    activity_dict['streams'] = {k: v.to_dict() for k, v in streams.items()}
    logger.debug(activity_dict)

    # Save to Cloud Storage.
    blob = bucket.blob(f'{activity.id}.json')
    blob.upload_from_string(json.dumps(activity_dict),
                            content_type="application/json")

    return 0
Example #3
0
def getCourseInfo():
	STORED_ACCESS_TOKEN = "903910abaf69b186957a22c1227d6f19237cd233"

	client = Client(access_token=STORED_ACCESS_TOKEN)
	athlete_data = client.get_athlete()
	activity_id = 887327831
	activity = client.get_activity(887327831)

	types = ['time', 'altitude', 'velocity_smooth', 'distance', 'grade_smooth', 'watts','latlng']
	streams = client.get_activity_streams(activity_id, types = types, resolution='medium')

	# for i in range(0, len(streams['time'].data)):
	#     print "{} {}".format(streams['distance'].data[i], streams['watts'].data[i])

	x = streams['distance'].data
	y = streams['altitude'].data
	inclin = streams['grade_smooth'].data
	latlng = streams['latlng'].data
	
	return {'x':x,'y': y, 'inclin':inclin, 'latlng': latlng}
Example #4
0
def sync_activity(request):
    activity = get_item_from_request(request)
    client = Client(access_token=get_access_token(request))
    strava = client.get_activity(activity.strava_id)

    # Sync description this is not included in the activity overview.
    if strava.description:
        activity.description = strava.description

    # Activities can have many streams, you can request desired stream types
    # 'time', 'latlng', 'altitude', 'heartrate', 'temp'
    types = [
        'time', 'latlng', 'distance', 'altitude', 'velocity_smooth',
        'heartrate', 'cadence', 'watts', 'temp', 'moving', 'grade_smooth'
    ]
    streams = client.get_activity_streams(activity.strava_id, types=types)
    if "time" in streams:
        activity.time_stream = streams["time"].data
    if "latlng" in streams:
        activity.latlng_stream = streams["latlng"].data
    if "distance" in streams:
        activity.distance_stream = streams["distance"].data
    if "altitude" in streams:
        activity.altitude_stream = streams["altitude"].data
    if "velocity_smooth" in streams:
        activity.velocity_smooth_stream = streams["velocity_smooth"].data
    if "heartrate" in streams:
        activity.heartrate_stream = streams["heartrate"].data
    if "cadence" in streams:
        activity.cadence_stream = streams["cadence"].data
    if "watts" in streams:
        activity.watts_stream = streams["watts"].data
    if "temp" in streams:
        activity.temp_stream = streams["temp"].data
    if "moving" in streams:
        activity.moving_stream = streams["moving"].data
    if "grade_smooth" in streams:
        activity.grade_smoth_stream = streams["grade_smooth"].data
    return activity
Example #5
0
def map(request, athlete_id, activity_id):

    # athlete_id = athlete_id #421122 #750228
    # activity_id = activity_id #577320490 #476912675


    path = "../../../../../data/" + format(athlete_id) + "_" + format(activity_id) + ".txt"
    file_to_write_to = open(path, "w")
    # print(file_to_write_to.path)
    writer = csv.writer(file_to_write_to, delimiter=',', quotechar='', quoting=csv.QUOTE_NONE)
    #write header row for text file
    activity_tuple = "AthleteID", "ActivityID", "StartTime", "TotalElapsedTime", "TotalDistanceMeters", "MaxSpeedMPH", "MeasuredTime", "Latitude", "Longitude", "AltitudeMeters", "DistanceMeters", "current_speedMPH", "CurrentGrade"
    writer.writerow(activity_tuple)


    # By now the athlete should exist
    current_athlete = Athlete.objects.get(id_strava = athlete_id)

    # Use the access_token ### "58298d33c3a183c12673691a1ae53d261b08c3a4"
    client = Client(access_token=current_athlete.access_token)

    #activity id 
    strava_ride = client.get_activity(activity_id)
    
    # values we are using in calculations and sending to the template
    max_speed = format(float(strava_ride.max_speed * 2.23693629), '.9f')
    average_speed = format(float(strava_ride.average_speed * 2.23693629), '.9f')
    ride_name = strava_ride.name

    # Streams
    stream_types = "time","distance","latlng","altitude","grade_smooth","velocity_smooth"
    streams = client.get_activity_streams(activity_id, types=stream_types)

    stream_time = streams["time"].data 
    stream_distance = streams["distance"].data 
    stream_lat_lng = streams["latlng"].data 
    stream_altitude = streams["altitude"].data 
    stream_grade = streams["grade_smooth"].data 
    stream_velocity = streams["velocity_smooth"].data

    stream_tuple = zip(stream_time, stream_distance, stream_lat_lng, stream_altitude, stream_grade, stream_velocity)

    # combined_array is to collect all the values to do some calculations later.
    combined_array = []

    # combined_string is a string version of the array to send to the template.
    combined_string = ""

    # Getting info from the streams and combining it all into a CSV format. 
    for (tTime,tDistance,tLatLng,tAltitude,tGrade,tVelocity) in stream_tuple:
        current_time = strava_ride.start_date_local + timedelta(seconds=tTime)
        # current_speed = format(float(tVelocity * 2.23693629), '.9f')
        current_speed = tVelocity * 2.23693629
        activity_tuple = athlete_id, activity_id, strava_ride.start_date_local, strava_ride.elapsed_time, \
            float(strava_ride.distance), max_speed, current_time, tLatLng[0], tLatLng[1], \
            tAltitude, tDistance, current_speed, tGrade
        writer.writerow(activity_tuple)

        temp_stuff = []
        temp_stuff.append(format(current_time))
        temp_stuff.append(format(tLatLng[0]))
        temp_stuff.append(format(tLatLng[1]))
        temp_stuff.append(format(current_speed))

        combined_array.append(temp_stuff)
    
    file_to_write_to.close()


    # make special Shred Analytics average speeds that remove all 0 values.     
    sa_average_speed = 0.0
    sa_avg_index = 0

    for i in combined_array:
        # i[3] is speed
        if float(i[3]) > 0.5:
            sa_average_speed = sa_average_speed + float(i[3])
            sa_avg_index = sa_avg_index + 1 

        # Make a string version of the arracy to send to Javascript. 
        combined_string += ','.join(i) + "@"

    # the important calculation
    sa_average_speed = sa_average_speed / sa_avg_index

    context = {'sa_average_speed': sa_average_speed, 'max_speed': max_speed, 'average_speed': average_speed, 'ride_name': ride_name, 'athlete_id': athlete_id, 'activity_id': activity_id, 'start_lat': combined_array[3][1], 'start_lon': combined_array[3][2], 'file_string': combined_string}
    
    template = loader.get_template('shred/map.html')
    return HttpResponse(template.render(context))
Example #6
0
def compare (request):

    athlete_id = request.GET.get("athlete_id")

    # how do I get what is in the URL?
    # I want to send an arbitrary array and get that in the URL
    # code = request.GET.get("code")  <-- works for one item
    # what if the URL is just /compare/runs=123,456,789
    # runs = request.GET.get("runs")
    # Then split that string into an array...

    # use the token URL pattern as a template

    # By now the athlete should exist
    current_athlete = Athlete.objects.get(id_strava = athlete_id)

    # Use the access_token ### "58298d33c3a183c12673691a1ae53d261b08c3a4"
    client = Client(access_token=current_athlete.access_token)


    # hardcoded sleighride activities. 
    activities = [547540459, 547545741, 547550929, 559626235]

    max_speed = 0.0
    ride_names = []
    
    # combined_array is to collect all the values to do some calculations later.
    combined_array = []
    all_speeds = []


    for ac in activities:
        #activity id 
        strava_ride = client.get_activity(ac)

        # Get and update the highest maxspeed across rides. 
        t_max_speed = float(strava_ride.max_speed * 2.23693629)
        if t_max_speed > max_speed:
            max_speed = t_max_speed
        
        # Add the activity name to this array    
        ride_names.append(strava_ride.name)


        # Streams
        stream_types = "time","distance","latlng","altitude","grade_smooth","velocity_smooth"
        streams = client.get_activity_streams(strava_ride.id, types=stream_types)

        stream_time = streams["time"].data 
        stream_distance = streams["distance"].data 
        stream_lat_lng = streams["latlng"].data 
        stream_altitude = streams["altitude"].data 
        stream_grade = streams["grade_smooth"].data 
        stream_velocity = streams["velocity_smooth"].data

        stream_tuple = zip(stream_time, stream_distance, stream_lat_lng, stream_altitude, stream_grade, stream_velocity)

        # Getting info from the streams and combining it all into a CSV format. 
        for (tTime,tDistance,tLatLng,tAltitude,tGrade,tVelocity) in stream_tuple:
            current_time = strava_ride.start_date_local + timedelta(seconds=tTime)
            # current_speed = format(float(tVelocity * 2.23693629), '.9f')
            current_speed = tVelocity * 2.23693629
            
            if current_speed > 0.5:
                all_speeds.append(current_speed)

            temp_stuff = []
            temp_stuff.append(format(current_time))
            temp_stuff.append(format(tLatLng[0]))
            temp_stuff.append(format(tLatLng[1]))
            temp_stuff.append(format(current_speed))

            combined_array.append(temp_stuff)
        # End inner FOR
    
        # insert splitter between runs
        combined_array.append(["$$$"])
    # END outer FOR


    # make special Shred Analytics average speeds that remove all 0 values.     
    sa_average_speed = mean(all_speeds)

    # combined_string is a string version of the array to send to the template.
    combined_string = ""

    # Make a string version of the arracy to send to Javascript. 
    for i in combined_array:
        combined_string += ','.join(i) + "@"

    context = {'sa_average_speed': sa_average_speed, 'max_speed': max_speed, 'ride_names': ride_names, 'start_lat': combined_array[3][1], 'start_lon': combined_array[3][2], 'file_string': combined_string}
    
    template = loader.get_template('shred/compare.html')
    return HttpResponse(template.render(context))
Example #7
0
def authorization(request):
    client = Client()
    code = request.GET['code']
    access_token = client.exchange_code_for_token(client_id=MY_STRAVA_CLIENT_ID, client_secret=MY_STRAVA_CLIENT_SECRET, code=code)   
    
    # making a global variable to be used across views. don't know how this will work in practice
    
    client = Client(access_token=access_token)
    athlete = client.get_athlete() # Get current athlete details
    
    global athleteId 
    athleteId = athlete.id
    
    # if athlete doesn't exist, add them
    if len(Athlete.objects.filter(athleteId=athleteId)) == 0:
        ath = Athlete.objects.create(name=str(athlete.firstname+' '+athlete.lastname), athleteId=athleteId, profilePic=athlete.profile, city=athlete.city, country=athlete.country, sex=athlete.sex, premium=athlete.premium, created_at=athlete.created_at, updated_at=athlete.updated_at, followers=athlete.follower_count, friends=athlete.friend_count, email=athlete.email, weight=athlete.weight, meas_pref=athlete.measurement_preference, runsSummary = DataFrame({}).to_json(orient='records'), fitLines = DataFrame({}).to_json(orient='records'), masterList = DataFrame({}).to_json(orient='records'))

        ath.profilePic.name = "rudyzPic"
        ath.save(update_fields=['profilePic'])
 
    # if athlete already exists, draw their file
    elif len(Athlete.objects.filter(athleteId=athleteId)) == 1:
        ath = Athlete.objects.get(athleteId=athleteId)
           
    ############################################ 
    ##### compiling new runs, updating summary
        
    # athlete's existing runs summary   
    existingSummary = DataFrame(pd.read_json(ath.runsSummary))
    existingFitlines = DataFrame(pd.read_json(ath.fitLines)) 
    masterList = DataFrame(pd.read_json(ath.masterList))
     
    activities = list(client.get_activities()) 
    
    # activity IDs of runs already in the system
    try:
        ids = existingSummary.activityId
    except AttributeError:
        ids = []
         
    for i in range(len(activities)):   
    #for i in range(30,37):
        # Ignoring activities already in the system 
        if (len(ids) == 0) or (float(activities[i].id) not in list(ids)):
            
            try:
                # compiling df for raw json-ization
                activityId = activities[i].id
                run = client.get_activity_streams(activityId, types=['time','latlng','distance','heartrate','altitude','cadence'])
                latlng = run['latlng'].data
                time = run['time'].data
                distance = run['distance'].data
                heartrate = run['heartrate'].data
                altitude = run['altitude'].data
                cadence = run['cadence'].data
                date = activities[i].start_date_local 
                activity = activityId   
                dfi = thresher.assemble(date, activityId, heartrate, distance, time, altitude, latlng, cadence) 
                
                
                # basic cleanup, only removing totally unreasonable values
                dfi = thresher.basicClean(dfi)


                # if we ever want to try our hand at improving strava's speed data (ie by predicting speed when GPS blanks), intervene here:
                    
                #dfi = thresher.addDistDeltas(dfi)
                             
                                        
                try: 
                    fitline = thresher.getFitlineLws(dfi) # this adds speed-shifted columns
                except:
                    fitline = pd.DataFrame({})
                    
                try:
                    mafScore = fitline[fitline.hr == 140.0].avgSpeed.iloc[0]
                    print "MAF "
                    print mafScore
                except:
                    mafScore = np.nan
                    
                fitline_json = fitline.to_json(orient='records')
                
                 # getting summary info for run (as one-entry dict)
                runSummary = thresher.getSingleSummaryDf(dfi)
                
                # adding mafScore to summary
                runSummary['mafScore'] = mafScore
                
                print runSummary
                
                # adding predicted hr and speed values
                #dfi = thresher.getPred(dfi)

                # saving entry to database
                Activity.objects.create(act_id = activityId, name=str(activities[i].name), description=activities[i].description, act_type=activities[i].type, date=activities[i].start_date_local, timezone=activities[i].timezone, df=dfi.to_json(orient='records'), avgHr=runSummary['avgHr'], hrVar=runSummary['variation'], realMiles=runSummary['realMiles'], recovery=runSummary['recovery'], easy=runSummary['easy'], stamina=runSummary['stamina'], impulse=runSummary['impulse'], totalTime=runSummary['totalTime'], totalDist=runSummary['totalDist'], climb=runSummary['climb'], fitline=fitline_json, mafScore=mafScore, athlete=ath)
                
                # updating runs summary
                existingSummary = existingSummary.append(runSummary, ignore_index=True)
                existingFitlines = existingFitlines.append(fitline, ignore_index=True)
                masterList = masterList.append(dfi, ignore_index=True)
                
            except:
                continue    
    
    
    # saving updated runs summary to athlete profile
    ath.runsSummary = existingSummary.to_json(orient='records')
    ath.save(update_fields=['runsSummary'])
    
    existingSummary.to_pickle("runsSummary.txt")
    
    # saving updated runs summary to athlete profile
    ath.fitLines = existingFitlines.to_json(orient='records')
    ath.save(update_fields=['fitLines'])
    
    ath.masterList = masterList.to_json(orient='records')
    ath.save(update_fields=['masterList'])
    
    # testing...
    existingSummary = pd.read_json(ath.runsSummary)
    #print(existingSummary)
    
    existingFitlines = pd.read_json(ath.fitLines)
    #print(existingFitlines)

    
    global path
    path = os.path.dirname(__file__)
    # updating dataframe, pickling for use in other views
    #global df
    #df = thresher.masterAssemble(client) 
    
    masterDf = pd.read_json(ath.masterList)
    #print(masterDf)
    masterDf.to_pickle(str(path)+"/"+str(athlete.id)+"masterDf.txt")

    return render(request, 'stravaChimp/authorization.html', {'code':code, 'access_token':access_token, 'athleteId':athleteId})
Example #8
0
def build_workout(self, token, pk, send=False, list=None):

    print('>>> build_workout:', pk)
    client = Client(token)
    user = client.get_athlete()
    strUser = StravaUser.objects.filter(uid=user.id)

    sendProgress(strUser[0].channel_name, 5, list)

    workout = Workout.objects.get(pk=pk)
    types = [
        'time', 'distance', 'latlng', 'altitude', 'heartrate',
        'velocity_smooth'
    ]
    print('WorkoutDetail, workout.actId=', workout.actId)
    activity = get_object_or_404(Activity, id=workout.actId)
    print('WorkoutDetail, activity.stravaId=', activity.stravaId)
    print('Resolution required=', strUser[0].resolution)

    distance = Distance.objects.filter(workout__id=workout.id)
    if not distance.count():
        resolution = 'medium'
        if strUser[0].resolution == 100:
            resolution = 'low'
        elif strUser[0].resolution == 1000:
            resolution = 'medium'
        elif strUser[0].resolution == 10000:
            resolution = 'high'
        print('Get streams begin')
        streams = client.get_activity_streams(activity_id=activity.stravaId,
                                              resolution=resolution,
                                              types=types)
        print('streams=', streams)
        sendProgress(strUser[0].channel_name, 10, list)

        #print('time seq size=',len(streams['time'].data))
        #print('dist seq',streams['distance'].data)
        #print('speed seq',streams['velocity_smooth'].data)
        #print('elevation seq',streams['altitude'].data)
        #print('HR seq',streams['heartrate'].data)
        #print('gps seq',streams['latlng'].data)
        gps = GpsCoord.objects.filter(workout__id=workout.id)
        print('gps first element=', gps.count())
        if not gps.count() and 'latlng' in streams:
            print('empty query, create SQL record')
            objs = [
                GpsCoord(gps_index=i,
                         gps_time=streams['time'].data[i],
                         gps_lat=gps[0],
                         gps_long=gps[1],
                         workout=workout)
                for i, gps in enumerate(streams['latlng'].data)
            ]
            #print ('GPS seq')
            #for i, gps in enumerate(streams['latlng'].data):
            #    print ('gps_index:',i,'gps_lat:',gps[0],'gps_long:',gps[1],'gps_time:',streams['time'].data[i])
            coord = GpsCoord.objects.bulk_create(objs)

        sendProgress(strUser[0].channel_name, 20, list)
        hr = HeartRate.objects.filter(workout__id=workout.id)
        if not hr.count() and 'heartrate' in streams:
            objs = [
                HeartRate(hr_index=i, hr_value=hr, workout=workout)
                for i, hr in enumerate(streams['heartrate'].data)
            ]
            coord = HeartRate.objects.bulk_create(objs)

        sendProgress(strUser[0].channel_name, 25, list)
        if not distance.count() and 'distance' in streams:
            objs = [
                Distance(distance_index=i,
                         distance_value=dist,
                         workout=workout)
                for i, dist in enumerate(streams['distance'].data)
            ]
            coord = Distance.objects.bulk_create(objs)

        speed = Speed.objects.filter(workout__id=workout.id)
        if not speed.count() and 'velocity_smooth' in streams:
            objs = [
                Speed(speed_index=i, speed_value=speed, workout=workout)
                for i, speed in enumerate(streams['velocity_smooth'].data)
            ]
            coord = Speed.objects.bulk_create(objs)

        sendProgress(strUser[0].channel_name, 30, list)
        elevation = Elevation.objects.filter(workout__id=workout.id)
        if not elevation.count() and 'altitude' in streams:
            objs = [
                Elevation(elevation_index=i,
                          elevation_value=elevation,
                          workout=workout)
                for i, elevation in enumerate(streams['altitude'].data)
            ]
            coord = Elevation.objects.bulk_create(objs)

        sendProgress(strUser[0].channel_name, 35, list)
        laps = client.get_activity_laps(activity.stravaId)
        i = 0
        for strLap in laps:
            i += 1
            print('lap=', strLap)
            print('strLap,start_index=', strLap.start_index)
            print('strLap,end_index=', strLap.end_index)
            print('strLap,lap_average_cadence=', strLap.average_cadence)
            print('start_date=', strLap.start_date)
            print('lap_time=', strLap.elapsed_time)
            print('lap_distance=', strLap.distance)
            print('lap_pace_zone=', strLap.pace_zone)
            if strLap.pace_zone is None:
                strLap.pace_zone = 0
            if strLap.average_cadence is None:
                strLap.average_cadence = 0
            lap = Lap.objects.filter(workout__id=workout.id, lap_index=i)
            if not lap.exists():
                lap = Lap.objects.create(
                    lap_index=strLap.lap_index,
                    lap_start_index=strLap.start_index,
                    lap_end_index=strLap.end_index,
                    lap_distance=strLap.distance,
                    lap_time=strLap.elapsed_time,
                    lap_start_date=strLap.start_date,
                    lap_average_speed=strLap.average_speed,
                    lap_average_cadence=strLap.average_cadence,
                    lap_pace_zone=strLap.pace_zone,
                    lap_total_elevation_gain=strLap.total_elevation_gain,
                    workout=workout)
                print('total_elevation_gain=', strLap.total_elevation_gain)
                print('pace_zone=', strLap.pace_zone)

    sendProgress(strUser[0].channel_name, 40, list)
    #workout_sq=Workout.objects.filter(id=workout.id)
    #workout_sq = WorkoutSerializer.setup_eager_loading(workout_sq)
    #serializer = WorkoutSerializer(workout_sq, many=True)
    serializer = WorkoutSerializer(workout)
    #print ('serializer.data size=',sys.getsizeof(serializer.data))
    sendProgress(strUser[0].channel_name, 75, list)
    #print ('jsonData=',workout.jsonData)
    data = ""

    if send:
        data = {'progress': 75, 'workout': serializer.data}
        sendMessage('workout', data, strUser[0].channel_name)

    print('Store Json data ...')
    Workout.objects.filter(id=workout.id).update(
        jsonData=json.dumps(serializer.data))
    Activity.objects.filter(id=activity.id).update(progress=100)
    print('Store Json data done')

    sendProgress(strUser[0].channel_name, 100, list)
Example #9
0
def authorization(request):
    client = Client()
    code = request.GET['code']
    access_token = client.exchange_code_for_token(
        client_id=MY_STRAVA_CLIENT_ID,
        client_secret=MY_STRAVA_CLIENT_SECRET,
        code=code)

    # making a global variable to be used across views. don't know how this will work in practice

    client = Client(access_token=access_token)
    athlete = client.get_athlete()  # Get current athlete details

    global athleteId
    athleteId = athlete.id

    # if athlete doesn't exist, add them
    if len(Athlete.objects.filter(athleteId=athleteId)) == 0:
        ath = Athlete.objects.create(
            name=str(athlete.firstname + ' ' + athlete.lastname),
            athleteId=athleteId,
            profilePic=athlete.profile,
            city=athlete.city,
            country=athlete.country,
            sex=athlete.sex,
            premium=athlete.premium,
            created_at=athlete.created_at,
            updated_at=athlete.updated_at,
            followers=athlete.follower_count,
            friends=athlete.friend_count,
            email=athlete.email,
            weight=athlete.weight,
            meas_pref=athlete.measurement_preference,
            runsSummary=DataFrame({}).to_json(orient='records'),
            fitLines=DataFrame({}).to_json(orient='records'),
            masterList=DataFrame({}).to_json(orient='records'))

        ath.profilePic.name = "rudyzPic"
        ath.save(update_fields=['profilePic'])

    # if athlete already exists, draw their file
    elif len(Athlete.objects.filter(athleteId=athleteId)) == 1:
        ath = Athlete.objects.get(athleteId=athleteId)

    ############################################
    ##### compiling new runs, updating summary

    # athlete's existing runs summary
    existingSummary = DataFrame(pd.read_json(ath.runsSummary))
    existingFitlines = DataFrame(pd.read_json(ath.fitLines))
    masterList = DataFrame(pd.read_json(ath.masterList))

    activities = list(client.get_activities())

    # activity IDs of runs already in the system
    try:
        ids = existingSummary.activityId
    except AttributeError:
        ids = []

    for i in range(len(activities)):
        #for i in range(30,37):
        # Ignoring activities already in the system
        if (len(ids) == 0) or (float(activities[i].id) not in list(ids)):

            try:
                # compiling df for raw json-ization
                activityId = activities[i].id
                run = client.get_activity_streams(activityId,
                                                  types=[
                                                      'time', 'latlng',
                                                      'distance', 'heartrate',
                                                      'altitude', 'cadence'
                                                  ])
                latlng = run['latlng'].data
                time = run['time'].data
                distance = run['distance'].data
                heartrate = run['heartrate'].data
                altitude = run['altitude'].data
                cadence = run['cadence'].data
                date = activities[i].start_date_local
                activity = activityId
                dfi = thresher.assemble(date, activityId, heartrate, distance,
                                        time, altitude, latlng, cadence)

                # basic cleanup, only removing totally unreasonable values
                dfi = thresher.basicClean(dfi)

                # if we ever want to try our hand at improving strava's speed data (ie by predicting speed when GPS blanks), intervene here:

                #dfi = thresher.addDistDeltas(dfi)

                try:
                    fitline = thresher.getFitlineLws(
                        dfi)  # this adds speed-shifted columns
                except:
                    fitline = pd.DataFrame({})

                try:
                    mafScore = fitline[fitline.hr == 140.0].avgSpeed.iloc[0]
                    print "MAF "
                    print mafScore
                except:
                    mafScore = np.nan

                fitline_json = fitline.to_json(orient='records')

                # getting summary info for run (as one-entry dict)
                runSummary = thresher.getSingleSummaryDf(dfi)

                # adding mafScore to summary
                runSummary['mafScore'] = mafScore

                print runSummary

                # adding predicted hr and speed values
                #dfi = thresher.getPred(dfi)

                # saving entry to database
                Activity.objects.create(act_id=activityId,
                                        name=str(activities[i].name),
                                        description=activities[i].description,
                                        act_type=activities[i].type,
                                        date=activities[i].start_date_local,
                                        timezone=activities[i].timezone,
                                        df=dfi.to_json(orient='records'),
                                        avgHr=runSummary['avgHr'],
                                        hrVar=runSummary['variation'],
                                        realMiles=runSummary['realMiles'],
                                        recovery=runSummary['recovery'],
                                        easy=runSummary['easy'],
                                        stamina=runSummary['stamina'],
                                        impulse=runSummary['impulse'],
                                        totalTime=runSummary['totalTime'],
                                        totalDist=runSummary['totalDist'],
                                        climb=runSummary['climb'],
                                        fitline=fitline_json,
                                        mafScore=mafScore,
                                        athlete=ath)

                # updating runs summary
                existingSummary = existingSummary.append(runSummary,
                                                         ignore_index=True)
                existingFitlines = existingFitlines.append(fitline,
                                                           ignore_index=True)
                masterList = masterList.append(dfi, ignore_index=True)

            except:
                continue

    # saving updated runs summary to athlete profile
    ath.runsSummary = existingSummary.to_json(orient='records')
    ath.save(update_fields=['runsSummary'])

    existingSummary.to_pickle("runsSummary.txt")

    # saving updated runs summary to athlete profile
    ath.fitLines = existingFitlines.to_json(orient='records')
    ath.save(update_fields=['fitLines'])

    ath.masterList = masterList.to_json(orient='records')
    ath.save(update_fields=['masterList'])

    # testing...
    existingSummary = pd.read_json(ath.runsSummary)
    #print(existingSummary)

    existingFitlines = pd.read_json(ath.fitLines)
    #print(existingFitlines)

    global path
    path = os.path.dirname(__file__)
    # updating dataframe, pickling for use in other views
    #global df
    #df = thresher.masterAssemble(client)

    masterDf = pd.read_json(ath.masterList)
    #print(masterDf)
    masterDf.to_pickle(str(path) + "/" + str(athlete.id) + "masterDf.txt")

    return render(request, 'stravaChimp/authorization.html', {
        'code': code,
        'access_token': access_token,
        'athleteId': athleteId
    })
Example #10
0
    def import_strava_api(self):

        self._init_database_handler()

        st = StravaTokenHandler()
        st.set_db_handler(self.dbh, self.core_info.get("db_hash"))
        st.update_token()
        del st

        user = self.dbh.list_user_by_hash(
            user_hash=self.core_info.get("db_hash"))

        client = Client(access_token=user["strava_bearer"]["access_token"])
        athlete = client.get_athlete()
        athlete_id = athlete.id

        activity_stream_types = [
            "time", "latlng", "distance", "altitude", "velocity_smooth",
            "heartrate", "cadence", "watts", "temp", "moving", "grade_smooth"
        ]

        for activity in client.get_activities(
                before=self.activity_raw_date_end.strftime(
                    "%Y-%m-%dT%H:%M:%SZ"),
                after=self.activity_raw_date_beg.strftime(
                    "%Y-%m-%dT%H:%M:%SZ"),
                limit=100):
            # The first order task is to form a common branch description
            # from the Strava API activity (aka. activity -> branch/track)
            write_success, hash_str = self._handle_activity_from__strava_api(
                activity=activity)

            if write_success is False:
                print(
                    "We do not need to continue when creating the branch/track creation fails."
                )
                break

            #Extract more details from the activity via the API
            activity_stream = client.get_activity_streams(
                activity_id=activity.id,
                types=activity_stream_types,
                resolution="high",
                # series_type=None
            )

            #continue
            # Create a GPS data leaf
            self._handle_activity_from_strava_api_gps(
                activity=activity,
                activity_stream=activity_stream,
                hash_str=hash_str)

            # Create a strava based distance leaf
            self._handle_activity_from_strava_api_distances(
                activity=activity,
                activity_stream=activity_stream,
                hash_str=hash_str)

            # Create strava based metadata leaf
            self._handle_activity_from_stravi_api_metadata(
                activity=activity,
                activity_stream=activity_stream,
                hash_str=hash_str)
Example #11
0
#print("*** - Athlete Method")
#athlete = client.get_athlete()
#print("Hello, {}. I know your email is {}".format(athlete.firstname, athlete.email))

#print("*** - Activity Method")
#activity = client.get_activity(anal_sess)
#print("type = " + activity.type)
#print("distance = " + format(activity.distance))

# Activities can have many streams, you can request desired stream types
#print("type={0.type} distance={1} km".format(activity,unithelper.kilometers(activity.distance)))

#print("*** - Streams Method")
streams = client.get_activity_streams(args.STRactsess,
                                      types=a_stypes,
                                      series_type=strm_ser_type)

header = strm_ser_type
for x in streams.keys():
    header += (",{}").format(x)
print(header)

#header=','.join(streams.keys()).format()
#print("HEADER",header)
#sys.exit()

iter = 0
for t in streams[strm_ser_type].data:
    row = format(streams[strm_ser_type].data[iter])
    for k in streams.keys():
Example #12
0
def generate_plot(strava_auth, selected_activity, strava_activity_data):
    if strava_auth is None:
        raise PreventUpdate
    if selected_activity is None:
        # TODO: show different data
        raise PreventUpdate
    current = selected_activity
    activity_cache = dash.no_update
    if (not strava_activity_data is None) and (str(current['id'])
                                               in strava_activity_data):
        graph_data = strava_activity_data[str(current['id'])]
    else:
        # Activities can have many streams, you can request desired stream types
        types = [
            'time',
            'latlng',
            'altitude',
            'heartrate',
            'temp',
            'distance',
            'velocity_smooth',
            # 'cadence', 'moving', 'grade_smooth'
        ]
        #  Result is a dictionary object.  The dict's key are the stream type.
        client = Client(access_token=strava_auth['access_token'])
        streams = client.get_activity_streams(current['id'],
                                              types=types,
                                              resolution='medium')

        if streams:
            data_hr = streams['heartrate'].data if 'heartrate' in streams.keys(
            ) else []
            data_velocity = streams[
                'velocity_smooth'].data if 'velocity_smooth' in streams.keys(
                ) else []
            data_time = streams['time'].data if 'time' in streams.keys(
            ) else []
            data_distance = streams[
                'distance'].data if 'distance' in streams.keys() else []
        else:
            data_hr = []
            data_velocity = []
            data_time = []
            data_distance = []

        activity_data = {
            'activity_data':
            selected_activity,
            'time':
            data_time,
            'distance':
            data_distance,
            'heartrate':
            data_hr,
            'velocity_smooth':
            data_velocity,
            'beats_per_meter':
            list(
                map(lambda a: 0 if a[1] == 0 else a[0] / 60 / a[1],
                    zip(data_hr, data_velocity))),
            # 'cadence': streams['cadence'].data if 'cadence' in streams.keys() else [],
            # 'moving': streams['moving'].data if 'moving' in streams.keys() else [],
            # 'grade_smooth': streams['grade_smooth'].data if 'grade_smooth' in streams.keys() else [],
        }

        new_data = {} if strava_activity_data is None else copy.deepcopy(
            strava_activity_data)
        new_data[current['id']] = activity_data
        activity_cache = new_data

        graph_data = activity_data
    x = []
    if 'heartrate' in graph_data and 'distance' in graph_data:
        x = graph_data['distance']

    figure = go.Figure(data=[
        go.Scatter(
            x=x,
            y=graph_data['heartrate'],
            name="Heartbeat",
            customdata=list(
                zip(
                    # %{customdata[0]}
                    [style.format_time(t) for t in graph_data['time']],
                    # %{customdata[1]}
                    graph_data['distance'],
                )),
            hovertemplate="%{customdata[0]} min<br>"
            "%{customdata[1]:.1f} m<br>"
            "<b>%{y} bpm</b><extra></extra>"),
        go.Scatter(
            x=x,
            y=graph_data['beats_per_meter'],
            name="Beats per meter",
            customdata=list(
                zip(
                    # %{customdata[0]}
                    [style.format_time(t) for t in graph_data['time']],
                    # %{customdata[1]}
                    graph_data['distance'],
                )),
            hovertemplate="%{customdata[0]} min<br>"
            "%{customdata[1]:.1f} m<br>"
            "<b>%{y} b/m</b><extra></extra>"),
        go.Scatter(
            x=x,
            y=graph_data['velocity_smooth'],
            name="Speed",
            customdata=list(
                zip(
                    # %{customdata[0]}
                    [style.format_time(t) for t in graph_data['time']],
                    # %{customdata[1]}
                    graph_data['distance'],
                )),
            hovertemplate="%{customdata[0]} min<br>"
            "%{customdata[1]:.1f} m<br>"
            "<b>%{y} m/s</b><extra></extra>")
    ])
    return [figure, activity_cache]
Example #13
0
from stravalib import Client
import matplotlib.pyplot as plt, numpy as np
import api

client = Client(access_token=api.access_token)

activities = list(client.get_activities())  # Get current athlete details

id = activities[0].id

types = ['time', 'altitude', 'latlng', 'moving', 'distance']

s = client.get_activity_streams(id, types=types)
distance = s['distance']
altitude = s['altitude']
latlng = s['latlng']
y, x = zip(*latlng.data)

fig, ax = plt.subplots(2)
ax[0].plot(distance.data, altitude.data, 'k')  # elevation profile
ax[1].plot(x, y, 'k')  # route overview
ax[1].set_aspect('equal')
plt.show()

# Elevation heatmap
fig, ax = plt.subplots()

points = np.array([x, y]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)

lc = LineCollection(segments,
Example #14
0
from stravalib import Client
import csv

client = Client(access_token='YourAccessToken')

athlete = client.get_athlete()
activities = client.get_activities(limit=9000)

for a in activities:
  #Add an if statement to filter activity type: e.g. if a.type == 'Ride': 
  id = a.id
  name = a.name
  tp = a.type
  da = a.start_date
  types = ['time','latlng']
  streams = client.get_activity_streams(id, types=types, resolution='high')
  times = streams['time'].data
  coords = streams['latlng'].data
  dateStart = (str(da)[:-15]).replace("-", "_")

  csvFile = "C:/strava/csv/" + dateStart + "_" + str(id) + ".csv"
  with open(str(csvFile), 'wb') as myfile:
      wr = csv.writer(myfile, quoting=csv.QUOTE_ALL)
      wr.writerow(["Date", "ActivityName", "ActivityType", "lat", "lon"])
      for cor in coords:
          wr.writerows(zip([dateStart], [name], [tp], [str(cor[0])], [str(cor[1])]))
  myfile.close()