Ejemplo n.º 1
0
 def get_outfilename(self):
 #-------------------------------------------------------------------------------
     '''
     get output filename
     
     must be called after :meth:`set_runner` and :meth:`set_renderfname`
     
     :rtype: name of output file for plot
     '''
     tdate = timeu.asctime('%Y-%m-%d')
     ttime = timeu.asctime('%H%M')
     outfilename = self.renderfname.format(who=self.who,date=tdate.epoch2asc(self.exectime),time=ttime.epoch2asc(self.exectime))
     return outfilename
Ejemplo n.º 2
0
 def crunch(self):
 #-------------------------------------------------------------------------------
     '''
     crunch the race data to put the age grade data into the stats
     
     '''
     ### DEBUG>
     debug = False
     if debug:
         tim = timeu.asctime('%Y-%m-%d-%H%M')
         _DEB = open('analyzeagegrade-debug-{}-crunch-{}.csv'.format(tim.epoch2asc(self.exectime,self.who)),'wb')
         fields = ['date','dist','time','ag']
         DEB = csv.DictWriter(_DEB,fields)
         DEB.writeheader()
     ### <DEBUG
         
     # calculate age grade for each sample    
     for i in range(len(self.stats)):
         racedate = self.stats[i].date
         agegradeage = racedate.year - self.dob.year - int((racedate.month, racedate.day) < (self.dob.month, self.dob.day))
         distmiles = self.stats[i].dist/METERSPERMILE
         agpercentage,agtime,agfactor = ag.agegrade(agegradeage,self.gender,distmiles,self.stats[i].time)
         self.stats[i].ag = agpercentage
         
         ### DEBUG>
         if debug:
             thisstat = {}
             for field in fields:
                 thisstat[field] = getattr(self.stats[i],field)
             DEB.writerow(thisstat)
         ### <DEBUG
         
     ### DEBUG>
     if debug:
         _DEB.close()
Ejemplo n.º 3
0
 def get_outfilename(self):
     #-------------------------------------------------------------------------------
     '''
     get output filename
     
     must be called after :meth:`set_runner` and :meth:`set_renderfname`
     
     :rtype: name of output file for plot
     '''
     tdate = timeu.asctime('%Y-%m-%d')
     ttime = timeu.asctime('%H%M')
     outfilename = self.renderfname.format(
         who=self.who,
         date=tdate.epoch2asc(self.exectime),
         time=ttime.epoch2asc(self.exectime))
     return outfilename
Ejemplo n.º 4
0
 def getdatafromra(self):
 #-------------------------------------------------------------------------------
     '''
     get the user's data from RunningAHEAD
     
     :rtype: dists,stats,dob,gender where dists =  set of distances included in stats, stats = {'date':[datetime of race,...], 'dist':[distance(meters),...], 'time':[racetime(seconds),...]}, dob = date of birth (datetime), gender = 'M'|'F'
     '''
     # set up RunningAhead object and get users we're allowed to look at
     ra = runningahead.RunningAhead()    
     users = ra.listusers()
     day = timeu.asctime('%Y-%m-%d') # date format in RunningAhead workout object
     
     # find correct user, grab their workouts
     workouts = None
     for user in users:
         thisuser = ra.getuser(user['token'])
         if 'givenName' not in thisuser: continue    # we need to know the name
         givenName = thisuser['givenName'] if 'givenName' in thisuser else ''
         familyName = thisuser['familyName'] if 'familyName' in thisuser else ''
         thisusername = '******'.join([givenName,familyName])
         if thisusername != self.who: continue            # not this user, keep looking
         
         # grab user's date of birth and gender, if not already supplied
         if not self.dob:
             self.dob = day.asc2dt(thisuser['birthDate'])
         if not self.gender:
             self.gender = 'M' if thisuser['gender']=='male' else 'F'
         
         # if we're here, found the right user, now let's look at the workouts
         firstdate = day.asc2dt('1980-01-01')
         lastdate = day.asc2dt('2199-12-31')
         workouts = ra.listworkouts(user['token'],begindate=firstdate,enddate=lastdate,getfields=FIELD['workout'].keys())
 
         # we've found the right user and collected their data, so we're done
         break
         
     # save race workouts, if any found
     if workouts:
         tempstats = []
         for wo in workouts:
             if wo['workoutName'].lower() != 'race': continue
             thisdate = day.asc2dt(wo['date'])
             thisdist = runningahead.dist2meters(wo['details']['distance'])
             thistime = wo['details']['duration']
             
             tempstats.append((thisdate,AgeGradeStat(thisdate,thisdist,thistime)))
             
     # these may come sorted already, but just in case
     #tempstats.sort()
     
     # put the stats in the right format
     for thisdate,thisstat in tempstats:
         self.stats.append(thisstat)
         self.dists.add(round(thisstat.dist))      # keep track of distances to nearest meter
Ejemplo n.º 5
0
def main():
    #----------------------------------------------------------------------
    descr = '''
    collect race results from runningahead
    
    searchfile must have at least the following headings:
    
        * GivenName - first name
        * FamilyName - last name
        * Gender - Male or Female (or M or F)
        * DOB - date of birth in yyyy-mm-dd format
        * City - city of residence [optional]
        * State - state of residence [optional]
    '''

    parser = argparse.ArgumentParser(
        description=descr,
        formatter_class=argparse.RawDescriptionHelpFormatter,
        version='{0} {1}'.format('running', version.__version__))
    parser.add_argument(
        'searchfile',
        help=
        "file with names, genders and birth dates of athletes to search for")
    parser.add_argument('outfile', help="output file contains race results")
    parser.add_argument(
        '-b',
        '--begindate',
        help="choose races between begindate and enddate, yyyy-mm-dd",
        default=None)
    parser.add_argument(
        '-e',
        '--enddate',
        help="choose races between begindate and enddate, yyyy-mm-dd",
        default=None)
    args = parser.parse_args()

    searchfile = args.searchfile
    outfile = args.outfile

    argtime = timeu.asctime('%Y-%m-%d')
    if args.begindate:
        begindate = argtime.asc2epoch(args.begindate)
    else:
        begindate = argtime.asc2epoch('1970-01-01')
    if args.enddate:
        enddate = argtime.asc2epoch(args.enddate)
    else:
        enddate = argtime.asc2epoch('2030-12-31')

    # collect all the data
    collect(searchfile, outfile, begindate, enddate)
Ejemplo n.º 6
0
def collectclub(aag,clubfile):
#----------------------------------------------------------------------
    '''
    Collect club age grade statistics, based on collected athlinks statistics (collectathlinksresults)
    
    :param aag: :class:`AnalyzeAgeGrade` objects, by runner name
    :param clubfile: file with club results, output from runningclub.exportresults
    '''
    # reading clubfile
    _clubf = open(clubfile,'rb')
    clubf = csv.DictReader(_clubf)
    
    # TODO: move this to exportresults, a la athlinksresults; rename exportresults to clubresults
    tfile = timeu.asctime('%Y-%m-%d')
    class ClubResult():
        def __init__(self,name,dob,gender,racename,racedate,distmiles,distkm,resulttime,ag):
            self.name = name
            self.dob = tfile.asc2dt(dob)
            self.gender = gender
            self.racename = racename
            self.racedate = tfile.asc2dt(racedate)
            self.distmiles = float(distmiles)
            self.distkm = float(distkm)
            self.resulttime = timeu.timesecs(resulttime)
            self.ag = float(ag)
    
    # read records from clubfile
    # gather each individual's result statistics, render later
    while True:
        # if we've completed the last runner's result collection,
        # render the results, and set up for the next runner
        try:
            row = clubf.next()
            result = ClubResult(row['name'],row['dob'],row['gender'],row['race'],row['date'],row['miles'],row['km'],row['time'],row['ag'])
        except StopIteration:
            result = None
            
        # are we done?
        if result is None: break
        
        thisname = result.name.lower()

        # initialize aag data structure, if not already done
        initaagrunner(aag,thisname,result.gender,result.dob)
    
        # collect this result
        timesecs = result.resulttime
        if timesecs > 0:
            aag[thisname].add_stat(result.racedate,result.distkm*1000,timesecs,race=result.racename,source='clubraces',priority=PRIO_CLUBRACES)
Ejemplo n.º 7
0
    def post(self):
    #----------------------------------------------------------------------
        try:
            club_id = flask.session['club_id']

            readcheck = ViewClubDataPermission(club_id)
            writecheck = UpdateClubDataPermission(club_id)
            
            # verify user can at least read the data, otherwise abort
            if not readcheck.can():
                db.session.rollback()
                flask.abort(403)

            # specify the form
            form = ExportResultsForm()

            # this should not fire if date fields are StringField
            if not form.validate():
                flask.flash('Dates must be formatted as yyyy-mm-dd')
                db.session.rollback()
                return flask.render_template('exportresults.html', form=form, pagename='Export Results')

            # check date validity
            ddymd = timeu.asctime('%Y-%m-%d')
            try:
                if form.start.data:
                    temp = ddymd.asc2dt(form.start.data)
                    form.start.data = ddymd.dt2asc(temp)    # normalize format
                if form.end.data:
                    temp = ddymd.asc2dt(form.end.data)
                    form.end.data = ddymd.dt2asc(temp)    # normalize format
            except ValueError:
                flask.flash('Dates must be formatted as yyyy-mm-dd')
                db.session.rollback()
                return flask.render_template('exportresults.html', form=form, pagename='Export Results')
            
            # return results
            today = ddymd.epoch2asc(time.time())
            response = make_response(collectresults(club_id, begindate=form.start.data, enddate=form.end.data))
            response.headers["Content-Disposition"] = "attachment; filename=clubresults-{}.csv".format(today)
            
            # commit database updates and close transaction
            db.session.commit()
            return response
        
        except:
            # roll back database updates and close transaction
            db.session.rollback()
            raise
Ejemplo n.º 8
0
    def crunch(self):
        #-------------------------------------------------------------------------------
        '''
        crunch the race data to put the age grade data into the stats
        
        '''
        ### DEBUG>
        debug = False
        if debug:
            tim = timeu.asctime('%Y-%m-%d-%H%M')
            _DEB = open(
                'analyzeagegrade-debug-{}-crunch-{}.csv'.format(
                    tim.epoch2asc(self.exectime, self.who)), 'wb')
            fields = ['date', 'dist', 'time', 'ag']
            DEB = csv.DictWriter(_DEB, fields)
            DEB.writeheader()
        ### <DEBUG

        # calculate age grade for each sample
        for i in range(len(self.stats)):
            racedate = self.stats[i].date
            agegradeage = racedate.year - self.dob.year - int(
                (racedate.month, racedate.day) <
                (self.dob.month, self.dob.day))
            distmiles = self.stats[i].dist / METERSPERMILE
            agpercentage, agtime, agfactor = ag.agegrade(
                agegradeage, self.gender, distmiles, self.stats[i].time)
            self.stats[i].ag = agpercentage

            ### DEBUG>
            if debug:
                thisstat = {}
                for field in fields:
                    thisstat[field] = getattr(self.stats[i], field)
                DEB.writerow(thisstat)
            ### <DEBUG

        ### DEBUG>
        if debug:
            _DEB.close()
Ejemplo n.º 9
0
def main(): 
#----------------------------------------------------------------------
    descr = '''
    collect race results from ultrasignup
    
    searchfile must have at least the following headings:
    
        * GivenName - first name
        * FamilyName - last name
        * Gender - Male or Female (or M or F)
        * DOB - date of birth in yyyy-mm-dd format
        * City - city of residence [optional]
        * State - state of residence [optional]
    '''
    
    parser = argparse.ArgumentParser(description=descr,formatter_class=argparse.RawDescriptionHelpFormatter,
                                     version='{0} {1}'.format('running',version.__version__))
    parser.add_argument('searchfile', help="file with names, genders and birth dates of athletes to search for")
    parser.add_argument('outfile', help="output file contains race results")
    parser.add_argument('-b','--begindate', help="choose races between begindate and enddate, yyyy-mm-dd",default=None)
    parser.add_argument('-e','--enddate', help="choose races between begindate and enddate, yyyy-mm-dd",default=None)
    args = parser.parse_args()

    searchfile = args.searchfile
    outfile = args.outfile

    argtime = timeu.asctime('%Y-%m-%d')
    if args.begindate:
        begindate = argtime.asc2epoch(args.begindate)
    else:
        begindate = argtime.asc2epoch('1970-01-01')
    if args.enddate:
        enddate = argtime.asc2epoch(args.enddate)
    else:
        enddate = argtime.asc2epoch('2030-12-31')
        
    # collect all the data
    collect(searchfile,outfile,begindate,enddate)
Ejemplo n.º 10
0
from collections import OrderedDict
import time
import json
import types

# pypi

# other

# home grown
from running.runningaheadmembers import RunningAheadMembers
from running.ra2membersfile import ra2members
from analyzemembership import analyzemembership
from running.ra2membersfile import ra2members
from loutilities import timeu
ymd = timeu.asctime('%Y-%m-%d')
mdy = timeu.asctime('%m/%d/%Y')
md = timeu.asctime('%m-%d')
from loutilities.csvwt import wlist
from loutilities import apikey

import version

class invalidParameter(): pass

#----------------------------------------------------------------------
def membercount(ordyears,statsfile=None): 
#----------------------------------------------------------------------
    '''
    convert members added per day (ordyears) to membercount per day
    
Ejemplo n.º 11
0
from loutilities.user.model import Interest
from loutilities.filters import filtercontainerdiv, filterdiv
from dominate.tags import div, button, input, span, i


class parameterError():
    pass


# homegrown
from ..admin.viewhelpers import localinterest
from ...model import db, LocalInterest
from ...model import Member, TableUpdateTime
from . import bp

ymd = asctime('%Y-%m-%d')
mdy = asctime('%m/%d/%Y')

# https://stackoverflow.com/questions/49674902/datetime-object-without-leading-zero
if system() != 'Windows':
    cachet = asctime('%-m/%-d/%Y %-I:%M %p')
else:
    cachet = asctime('%#m/%#d/%Y %#I:%M %p')


def _getdivision(member):
    '''
    gets division as of Jan 1 

    :param member: Member record
    :rtype: division text
Ejemplo n.º 12
0
import version
from loutilities import timeu
from runningclub import agegrade
import running.runningahead as runningahead
from running.runningahead import FIELD

class unexpectedEOF(Exception): pass
class invalidParameter(Exception): pass

METERSPERMILE = 1609.344
MAXMETER = 4999
SUBS = {1609:'1M',3219:'2M',4989:'5K',5000:'5K',8047:'5M',10000:'10K',15000:'15K',
        16093:'10M',21082:'HM',21097:'HM',42165:'Marathon',42195:'Marathon',
        80467:'50M',160934:'100M'} #

tdisp = timeu.asctime('%m/%d/%Y')
# pull in age grade object
ag = agegrade.AgeGrade()
    

#-------------------------------------------------------------------------------
def distmap(dist):
#-------------------------------------------------------------------------------
    """
    map distance to display metric
    
    :param dist: distance to map
    :rtype: float display metric for distance
    """
    return dist/100
    
Ejemplo n.º 13
0
import argparse
import csv
from datetime import timedelta
import copy
import logging
logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s')
logger = logging.getLogger('runningclub.getresultsmembers')

# home grown
from .raceresults import RaceResults, headerError  #, dataError
from .clubmember import CsvClubMember
from loutilities import timeu
from loutilities import agegrade
from loutilities.namesplitter import split_full_name
from loutilities.renderrun import rendertime
dbdate = timeu.asctime('%Y-%m-%d')
from . import version

# control behavior of import
DIFF_CUTOFF = 0.7  # ratio of matching characters for cutoff handled by 'clubmember'
AGE_DELTAMAX = 3  # +/- num years to be included in DISP_MISSED
JOIN_GRACEPERIOD = timedelta(7)  # allow member to join 1 week beyond race date

# support age grade
ag = agegrade.AgeGrade()

# disposition values
# * match - exact name match found in member table, with age consistent with dateofbirth
# * close - close name match found, with age consistent with dateofbirth
# * missed - close name match found, but age is inconsistent with dateofbirth
# * excluded - this name is in the exclusion table, either prior to import **or as a result of user decision**
Ejemplo n.º 14
0
from loutilities import csvu
from runningclub import render
from running import accessError, parameterError

# access stuff
PAGESIZE = 100
COMPETITOR_URL = 'http://running.competitor.com'
RESULTS_METHOD = 'cgiresults'
RESULTS_SEARCH = 'firstname={firstname}&lastname={lastname}&bib={bib}&gender={gender}&division={division}&city={city}&state={state}'
# RACE_RESULTS = 'eId={raceid}&eiId={yearid}&seId={eventid}'
#PAGING = 'resultsPage={pagenum}&rowCount={pagesize}'.format(pagesize=PAGESIZE)

HTTPTIMEOUT = 10
MPERMILE = 1609.344

tindate  = timeu.asctime('%m/%d/%Y %I:%M:%S %p')
toutdate = timeu.asctime('%Y-%m-%d')

#----------------------------------------------------------------------
def racenameanddist(soup):
#----------------------------------------------------------------------
    '''
    get race name and distance from soup
    
    :param soup: BeautifulSoup object for whole page
    :rtype: racename, distmiles, distkm
    '''

    ensoup = soup.find(class_='event-name')
    if ensoup:
        eventstring = ensoup.text
Ejemplo n.º 15
0
# github

# other

# home grown
from loutilities import timeu
from loutilities import csvu
from runningclub import agegrade
import ultrasignup
import version

# see http://api.ultrasignup.com/Enums/RaceCategories
ag = agegrade.AgeGrade()
class invalidParameter(Exception): pass

ftime = timeu.asctime('%Y-%m-%d')

#----------------------------------------------------------------------
def collect(searchfile,outfile,begindate,enddate):
#----------------------------------------------------------------------
    '''
    collect race results from ultrasignup
    
    :param searchfile: path to file containing names, genders, birth dates to search for
    :param outfile: output file path
    :param begindate: epoch time - choose races between begindate and enddate
    :param enddate: epoch time - choose races between begindate and enddate
    '''
    
    # open files
    _IN = open(searchfile,'rb')
Ejemplo n.º 16
0
def timenow():
    """useful for logpoints"""
    return asctime('%H:%M:%S.%f').dt2asc(datetime.now())
Ejemplo n.º 17
0
import csv
from os.path import join as pathjoin

# pypi

# github

# home grown
from . import version
from loutilities.transform import Transform
from loutilities.timeu import asctime, age
from datetime import date
from collections import defaultdict, OrderedDict

# time stuff
tymd = asctime('%Y-%m-%d')

# transform DETAILS file produced by scoretility Results Analysis
xform = Transform(
            {
                'name'      : 'runnername',
                'gender'    : 'gender',
                'age'       : lambda result: age(date.today(), tymd.asc2dt(result['dob'])),
                'distmiles' : 'distmiles',
                'ag'        : lambda result: int(float(result['agpercent'])),
                'year'      : lambda result: tymd.asc2dt(result['racedate']).year
            },
            sourceattr=False,
            targetattr=True)

# # from https://gist.github.com/shenwei356/71dcc393ec4143f3447d
Ejemplo n.º 18
0
# other
import sqlalchemy   # see http://www.sqlalchemy.org/ written with 0.8.0b2
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()   # create sqlalchemy Base class
from sqlalchemy import Column, Integer, Float, Boolean, String, Sequence, UniqueConstraint, ForeignKey
from sqlalchemy.orm import sessionmaker, object_mapper, relationship, backref
Session = sessionmaker()    # create sqalchemy Session class

# home grown
from .config import CF,SECCF,OPTUSERPWAPI,OPTCLUBABBREV,OPTDBTYPE,OPTDBSERVER,OPTDBNAME,OPTDBGLOBUSER,OPTUNAME,KF,SECKEY,OPTPRIVKEY
from . import userpw
from . import version
from loutilities import timeu

DBDATEFMT = '%Y-%m-%d'
t = timeu.asctime(DBDATEFMT)

# will be handle for persistent storage in webapp
PERSIST = None

class dbConsistencyError(Exception): pass

#----------------------------------------------------------------------
def setracedb(dbfilename=None):
#----------------------------------------------------------------------
    '''
    initialize race database
    
    :params dbfilename: filename for race database, if None get from configuration
    '''
    # set up connection to db
Ejemplo n.º 19
0
def main():
    #----------------------------------------------------------------------

    parser = argparse.ArgumentParser(
        version='{0} {1}'.format('running', version.__version__))
    parser.add_argument('gpxfile', help='gpx formatted file')
    parser.add_argument(
        'racestarttime',
        help="time of race start in '%%Y-%%m-%%dT%%H:%%M' format")
    parser.add_argument('-o',
                        '--output',
                        help='name of output file (default %(default)s)',
                        default='racewx.csv')
    args = parser.parse_args()

    gpxfile = args.gpxfile
    racestarttime = args.racestarttime
    timrace = timeu.asctime('%Y-%m-%dT%H:%M')
    racestartdt = timrace.asc2dt(racestarttime)  # naive
    output = args.output

    # get input
    _GPX = open(gpxfile, 'r')
    gpx = gpxpy.parse(_GPX)

    # loop through gpx tracks
    wxdata = []
    lasttime = None
    exectime = int(round(time.time()))
    for track in gpx.tracks:
        for segment in track.segments:
            for point in segment.points:
                pepoch = timeu.dt2epoch(point.time)
                if not lasttime or pepoch - lasttime >= WXPERIOD:
                    plon = point.longitude
                    plat = point.latitude
                    if not lasttime:
                        starttime = pepoch
                        tzid = gettzid(plat, plon)
                        tz = pytz.timezone(tzid)
                        racestartlocdt = tz.normalize(tz.localize(racestartdt))
                        racestartepoch = timeu.dt2epoch(racestartlocdt)
                        shift = racestartepoch - starttime
                    targtime = timeu.dt2epoch(
                        point.time) + shift  # shift to race time

                    # get weather
                    # temp, dew point, cloud cover, precip intensity
                    # wind speed/bearing: http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.barbs
                    #   (http://matplotlib.1069221.n5.nabble.com/plot-arrows-for-wind-direction-degrees-td13499.html)
                    wx = getwx(plat, plon, targtime)
                    wx['lat'] = plat
                    wx['lon'] = plon
                    wx['dewpoint'] = dewpoint(wx['temperature'],
                                              wx['humidity'] * 100)
                    wx['windchill'] = windchill(wx['temperature'],
                                                wx['windSpeed'])
                    wx['heatindex'] = heatindex(wx['temperature'],
                                                wx['humidity'] * 100)
                    wxdata.append(wx)

                    lasttime = pepoch

    # create the file and write header if it doesn't exist
    if not os.path.exists(output):
        writeheader = True
        _WX = open(output, 'wb')
    else:
        writeheader = False
        _WX = open(output, 'ab')

    heading = [
        'exectime', 'time', 'lat', 'lon', 'temperature', 'humidity',
        'dewpoint', 'windchill', 'heatindex', 'precipType',
        'precipProbability', 'precipIntensity', 'windSpeed', 'windBearing',
        'cloudCover', 'summary', 'pressure', 'visibility'
    ]
    WX = csv.DictWriter(_WX, heading, extrasaction='ignore')
    if writeheader:
        WX.writeheader()
    for wx in wxdata:
        wx['exectime'] = exectime
        WX.writerow(wx)
    _WX.close()
Ejemplo n.º 20
0
import math

# pypi

# github

# other

# home grown
from . import version
from . import racedb
from .config import softwareError
from loutilities import timeu

DBDATEFMT = racedb.DBDATEFMT
dbtime = timeu.asctime(DBDATEFMT)
rndrtim = timeu.asctime('%m/%d/%Y')


#----------------------------------------------------------------------
def getprecision(distance):
    #----------------------------------------------------------------------
    '''
    get the precision for rendering, based on distance
    
    precision might be different for time vs. age group adjusted time
    
    :param distance: distance (miles)
    :rtype: (timeprecision,agtimeprecision)
    '''
Ejemplo n.º 21
0
def main():
    #----------------------------------------------------------------------

    parser = argparse.ArgumentParser(
        version='{0} {1}'.format('running', version.__version__))
    parser.add_argument('infile', help='file generated by racewx')
    parser.add_argument('racename', help='race name')
    args = parser.parse_args()

    infile = args.infile
    racename = args.racename

    # get input
    _WX = open(infile, 'rb')
    WX = csv.DictReader(_WX)
    wxdata = []
    for wx in WX:
        wxdata.append(wx)
    _WX.close()

    # for now, filter out all but the max 'exectime' entries
    lastexec = max([int(wx['exectime']) for wx in wxdata])
    while int(wxdata[0]['exectime']) != lastexec:
        wxdata.pop(0)

    # pull out fields to plot
    wxplot = {}
    plotfields = [
        'time', 'temperature', 'windchill', 'heatindex', 'dewpoint',
        'windSpeed', 'windBearing', 'cloudCover', 'precipProbability',
        'precipIntensity', 'cloudCover'
    ]
    for f in plotfields:
        wxplot[f] = [float(wx[f]) if wx[f] != '' else None for wx in wxdata]

    # get range on 30 minute boundaries
    starttime = int(wxplot['time'][0])
    fintime = int(wxplot['time'][-1])
    adjstart = (starttime / (30 * 60)) * (
        30 * 60)  # rounds to next lowest 30 minute boundary
    adjfin = (
        (fintime - 1 + 30 * 60) /
        (30 * 60)) * (30 * 60)  # rounds to next highest 30 minute boundary
    startdt = timeu.epoch2dt(adjstart)
    findt = timeu.epoch2dt(adjfin)

    # time zone stuff, based on starting point
    lat = float(wxdata[0]['lat'])
    lon = float(wxdata[0]['lon'])
    tzid = racewx.gettzid(lat, lon)
    tz = pytz.timezone(tzid)
    wxplot['localtime'] = [
        timeu.utcdt2tzdt(timeu.epoch2dt(tm), tzid) for tm in wxplot['time']
    ]

    # plot data
    fig = plt.figure()
    ttitle = timeu.asctime('%m/%d/%Y')
    racedate = ttitle.epoch2asc(wxplot['time'][0])
    fdate = ttitle.epoch2asc(lastexec)
    fig.suptitle(
        'forecast for {race} {date}\nforecast date {fdate}\nPowered by Forecast.io'
        .format(race=racename, date=racedate, fdate=fdate),
        size='small')

    # set some formatting parameters
    lw = 0.5  # line width
    windcolor = 'b'
    legendx = 1.35

    # plot control
    exists = {}
    for f in ['windchill', 'heatindex']:
        exists[f] = len([it for it in wxplot[f] if it is not None]) != 0
    for f in ['precipIntensity']:
        exists[f] = len([it for it in wxplot[f] if it > 0.0]) != 0

    # plot temperatures
    ax1 = fig.add_subplot(311)
    ax1.plot(wxplot['localtime'],
             wxplot['temperature'],
             'k-',
             label='temperature',
             linewidth=lw)
    if exists['windchill']:
        ax1.plot(wxplot['localtime'],
                 wxplot['windchill'],
                 'b-',
                 label='wind chill',
                 linewidth=lw)
    if exists['heatindex']:
        ax1.plot(wxplot['localtime'],
                 wxplot['heatindex'],
                 'r-',
                 label='heat index',
                 linewidth=lw)
    ax1.plot(wxplot['localtime'],
             wxplot['dewpoint'],
             'g-',
             label='dew point',
             linewidth=lw)

    ax1.set_xlim(startdt, findt)
    fig.subplots_adjust(top=0.88, right=0.75, bottom=0.15)

    hfmt = dates.DateFormatter('%H:%M', tz=tz)
    ax1.xaxis.set_major_formatter(hfmt)
    ax1.xaxis.set_major_locator(dates.MinuteLocator(interval=30))
    ax1.grid('on')
    plt.setp(ax1.get_xticklabels(), visible=False)
    plt.setp(ax1.get_yticklabels(), fontsize='small')
    ax1.set_ylabel('degrees  \nFahrenheit', fontsize='small')

    #font = fm.FontProperties(fname='Humor-Sans.ttf')
    font = fm.FontProperties()
    xsmallfont = copy.deepcopy(font)
    xsmallfont.set_size('x-small')
    ax1.legend(prop=xsmallfont, loc='upper right', bbox_to_anchor=(legendx, 1))

    # plot wind
    ax2 = fig.add_subplot(312)
    ax2.plot(wxplot['localtime'],
             wxplot['windSpeed'],
             label='wind speed',
             linewidth=lw,
             color=windcolor)
    # note polar-> rectangular flips x,y from standard transformation because theta is from North instead of East
    # not sure why need to invert U and V to get barb to point in right direction.  Maybe vector comes from U,V and points to origin?
    U = [
        -1 * wxplot['windSpeed'][i] *
        math.sin(math.radians(wxplot['windBearing'][i]))
        for i in range(len(wxplot['windSpeed']))
    ]
    V = [
        -1 * wxplot['windSpeed'][i] *
        math.cos(math.radians(wxplot['windBearing'][i]))
        for i in range(len(wxplot['windSpeed']))
    ]
    xdates = dates.date2num(
        wxplot['localtime'])  # barbs requires floats, not datetime
    ax2.barbs(xdates,
              wxplot['windSpeed'],
              U,
              V,
              length=5,
              barbcolor=windcolor,
              flagcolor=windcolor,
              linewidth=lw)

    ax2.set_xlim(dates.date2num(startdt), dates.date2num(findt))
    miny, maxy = ax2.get_ylim()
    ax2.set_ylim(round(miny * 0.8), round(maxy * 1.2))
    ax2.xaxis.set_major_formatter(hfmt)
    ax2.xaxis.set_major_locator(dates.MinuteLocator(interval=30))
    ax2.grid('on')
    #plt.setp(ax2.get_xticklabels(), rotation='vertical', fontsize='small')
    plt.setp(ax2.get_xticklabels(), visible=False)
    plt.setp(ax2.get_yticklabels(), fontsize='small')
    ax2.set_ylabel('miles per hour', fontsize='small')
    ax2.legend(prop=xsmallfont, loc='upper right', bbox_to_anchor=(legendx, 1))

    ax3 = fig.add_subplot(313)
    precipprob = [100 * (prob or 0) for prob in wxplot['precipProbability']]
    cloudcover = [100 * (cover or 0) for cover in wxplot['cloudCover']]
    ax3.plot(wxplot['localtime'],
             precipprob,
             label='rain probability',
             linewidth=lw,
             color='b')
    ax3.plot(wxplot['localtime'],
             cloudcover,
             label='cloud cover',
             linewidth=lw,
             color='g')
    ax3.set_ylabel('percent', fontsize='small')

    ax3.set_xlim(dates.date2num(startdt), dates.date2num(findt))
    ax3.xaxis.set_major_formatter(hfmt)
    ax3.xaxis.set_major_locator(dates.MinuteLocator(interval=30))
    ax3.grid('on')
    ax3.set_ylim(0, 100)
    plt.setp(ax3.get_xticklabels(), rotation='vertical', fontsize='small')
    plt.setp(ax3.get_yticklabels(), fontsize='small')
    ax3.legend(prop=xsmallfont,
               loc='upper right',
               bbox_to_anchor=(legendx, 1.1))

    if exists['precipIntensity']:
        ax4 = ax3.twinx()
        #ax4.plot(wxplot['localtime'],wxplot['precipIntensity'],label='intensity', linewidth=lw, color='r')
        #ax4.set_yscale('log')
        ax4.semilogy(wxplot['localtime'],
                     wxplot['precipIntensity'],
                     label='intensity',
                     nonposy='mask',
                     linewidth=lw,
                     color='r')
        #ax4.set_ylabel('precipitation 0.002 very light sprinkling, 0.017 light precipitation, 0.1 precipitation, and 0.4 very heavy precipitation')
        ax4.set_ylabel('intensity', fontsize='small')
        ax4.set_ylim(0, 0.5)
        plt.setp(ax4.get_yticklabels(), fontsize='small')
        ax4.legend(prop=xsmallfont,
                   loc='upper right',
                   bbox_to_anchor=(legendx, 0.75))

    tfile = timeu.asctime('%Y-%m-%d')
    fdate = tfile.epoch2asc(lastexec)
    racename = re.sub('\s', '', racename)  # remove whitespace
    outfile = 'race-weather-{race}-{fdate}.png'.format(race=racename,
                                                       fdate=fdate)
    fig.savefig(outfile, format='png')
Ejemplo n.º 22
0
def collectresults(club_id, begindate=None, enddate=None): 
#----------------------------------------------------------------------
    '''
    collect race information from database, and save to file
    
    :param club_id: id of club to collect data for
    :param begindate: collect races between begindate and enddate, yyyy-mm-dd
    :param enddate: collect races between begindate and enddate, yyyy-mm-dd
    :rtype: csv file data, string format (e.g., data for make_response(data))
    '''
    # TODO: check format of begindate, enddate
    
    # output fields
    outfields = 'name,dob,gender,race,date,miles,km,time,ag'.split(',')
    
    # create/open results file
    tfile = timeu.asctime('%Y-%m-%d')

    # get ready for output
    outdatalist = wlist()
    OUT = csv.DictWriter(outdatalist,outfields)
    OUT.writeheader()

    # set defaults for begin and end date
    if not begindate:
        begindate = '1970-01-01'
    if not enddate:
        enddate = '2500-12-31'

    # for each member, gather results
    members = Runner.query.filter_by(club_id=club_id,member=True,active=True).all()
    rows = []
    for member in members:
        runnername = member.name
        runnerdob = member.dateofbirth
        runnergender = member.gender

        # loop through each of the runner's results
        # NOTE: results are possibly stored multiple times, for different series -- these will be deduplicated later
        for result in member.results:
            race = Race.query.filter_by(id=result.raceid).first()
            if race.date < begindate or race.date > enddate: continue
            
            resulttime = result.time
            rendertime = render.rendertime(resulttime,0)
            while len(rendertime.split(':')) < 3:
                rendertime = '0:' + rendertime
            resultag = result.agpercent
            racename = race.name
            racedate = race.date
            racemiles = race.distance
            racekm = (race.distance*METERSPERMILE)/1000
            
            # send to output - name,dob,gender,race,date,miles,km,time,ag
            row = {}
            row['name'] = runnername
            row['dob'] = runnerdob
            row['gender'] = runnergender
            row['race'] = racename
            row['date'] = racedate
            row['miles'] = racemiles
            row['km'] = racekm
            row['time'] = rendertime
            row['ag'] = resultag
            
            # deduplicate
            if row not in rows:
                rows.append(row)
    
    OUT.writerows(rows)
    
    # one big string for return data
    outputdata = ''.join(outdatalist)
    return outputdata
Ejemplo n.º 23
0
import csv

# pypi
#from IPython.core.debugger import Tracer; debughere = Tracer(); debughere() # set breakpoint where needed

# github

# home grown
from . import version
from . import racedb
from loutilities import timeu, csvwt

# exceptions for this module.  See __init__.py for package exceptions

# module globals
tYmd = timeu.asctime('%Y-%m-%d')
tHMS = timeu.asctime('%H:%M:%S')
tMS = timeu.asctime('%M:%S')

# SequenceMatcher to determine matching ratio, which can be used to evaluate CUTOFF value
sm = difflib.SequenceMatcher()


#----------------------------------------------------------------------
def getratio(a, b):
    #----------------------------------------------------------------------
    '''
    return the SequenceMatcher ratio for two strings
    
    :rettype: float in range [0,1]
    '''
Ejemplo n.º 24
0
#
#   Unless required by applicable law or agreed to in writing, software
#   distributed under the License is distributed on an "AS IS" BASIS,
#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#   See the License for the specific language governing permissions and
#   limitations under the License.
#
###########################################################################################

# standard
import pdb
import csv

# home grown
from loutilities import timeu
tYMD = timeu.asctime('%Y-%m-%d')

#----------------------------------------------------------------------
def analyzemembership(directory,files): 
#----------------------------------------------------------------------
    '''
    compare membership statistic, year on year
    
    :param directory: directory for files and output
    :param files: list of files which contain membership data
    '''
    
    membersbymonth = {} # {year:{month:totalmembers,...}...}
    members = {}        # {dob:[{'GivenName':GivenName,'FamilyName':FamilyName},...],...}
    
    
Ejemplo n.º 25
0
def render(aag,outfile,summaryfile,detailfile,minagegrade,minraces,mintrend,begindate,enddate):
#----------------------------------------------------------------------
    '''
    render collected results

    :param outfile: output file name template, like '{who}-ag-analysis-{date}-{time}.png'
    :param summaryfile: summary file name template (.csv), may include {date} field
    :param detailfile: summary file name template (.csv), may include {date} field
    :param minagegrade: minimum age grade
    :param minraces: minimum races in the same year as enddate
    :param mintrend: minimum races over the full period for trendline
    :param begindate: render races between begindate and enddate, datetime
    :param enddate: render races between begindate and enddate, datetime
    '''
    firstyear = begindate.year
    lastyear = enddate.year
    yearrange = range(firstyear,lastyear+1)
    
    summfields = ['name','age','gender']
    distcategories = ['overall'] + [TRENDLIMITS[tlimit][0] for tlimit in TRENDLIMITS]
    for stattype in ['1yr agegrade','avg agegrade','trend','numraces','stderr','r-squared','pvalue']:
        for distcategory in distcategories:
            summfields.append('{}\n{}'.format(stattype,distcategory))
        if stattype == 'numraces':
            for year in yearrange:
                summfields.append('{}\n{}'.format(stattype,year))
    
    tfile = timeu.asctime('%Y-%m-%d')
    summaryfname = summaryfile.format(date=tfile.epoch2asc(time.time()))
    _SUMM = open(summaryfname,'wb')
    SUMM = csv.DictWriter(_SUMM,summfields)
    SUMM.writeheader()
    
    detailfname = detailfile.format(date=tfile.epoch2asc(time.time()))
    detlfields = ['name','dob','gender'] + analyzeagegrade.AgeGradeStat.attrs + ['distmiles','distkm','rendertime']
    detlfields.remove('priority')   # priority is internal
    _DETL = open(detailfname,'wb')
    DETL = csv.DictWriter(_DETL,detlfields,extrasaction='ignore')
    DETL.writeheader()
    
    # create a figure used for everyone -- required to save memory
    fig = plt.figure()
    
    # loop through each member we've recorded information about
    for thisname in aag:
        rendername = thisname.title()
        
        # remove duplicate entries
        aag[thisname].deduplicate()   
        
        # crunch the numbers, and remove entries less than minagegrade
        aag[thisname].crunch()    # calculate age grade for each result
        stats = aag[thisname].get_stats()
        #for stat in stats:
        #    if stat.ag < minagegrade:
        #        aag[thisname].del_stat(stat)
        
        # write detailed file before filtering
        name,gender,dob = aag[thisname].get_runner()
        detlout = {'name':rendername,'gender':gender,'dob':tfile.dt2asc(dob)}
        for stat in stats:
            for attr in analyzeagegrade.AgeGradeStat.attrs:
                detlout[attr] = getattr(stat,attr)
                if attr == 'date':
                    detlout[attr] = tfile.dt2asc(detlout[attr])
            # interpret some of the data from the raw stat
            detlout['distkm'] = detlout['dist'] / 1000.0
            detlout['distmiles'] = detlout['dist']/METERSPERMILE
            rendertime = ren.rendertime(detlout['time'],0)
            while len(rendertime.split(':')) < 3:
                rendertime = '0:'+rendertime
            detlout['rendertime'] = rendertime
            DETL.writerow(detlout)
            
        jan1 = tfile.asc2dt('{}-1-1'.format(lastyear))
        runnerage = timeu.age(jan1,dob)
        
        # filter out runners younger than 14
        if runnerage < 14: continue

        # filter out runners who have not run enough races
        stats = aag[thisname].get_stats()
        if enddate:
            lastyear = enddate.year
        else:
            lastyear = timeu.epoch2dt(time.time()).year
        lastyearstats = [s for s in stats if s.date.year==lastyear]
        if len(lastyearstats) < minraces: continue
        
        # set up output file name template
        if outfile:
            aag[thisname].set_renderfname(outfile)

        # set up rendering parameters
        aag[thisname].set_xlim(begindate,enddate)
        aag[thisname].set_ylim(minagegrade,100)
        aag[thisname].set_colormap([200,100*METERSPERMILE])

        # clear figure, set up axes
        fig.clear()
        ax = fig.add_subplot(111)
        
        # render the results
        aag[thisname].render_stats(fig)    # plot statistics

        # set up to collect averages
        avg = collections.OrderedDict()

        # draw trendlines, write output
        allstats = aag[thisname].get_stats()
        avg['overall'] = mean([s.ag for s in allstats])
        trend = aag[thisname].render_trendline(fig,'overall',color='k')

        # retrieve output filename for hyperlink
        # must be called after set_runner and set_renderfname
        thisoutfile = aag[thisname].get_outfilename()
       
        summout = {}
        summout['name'] = '=HYPERLINK("{}","{}")'.format(thisoutfile,rendername)
        summout['age'] = runnerage
        summout['gender'] = gender
        oneyrstats = [s.ag for s in allstats if s.date.year == lastyear]
        if len(oneyrstats) > 0:
            summout['1yr agegrade\noverall'] = mean(oneyrstats)
        summout['avg agegrade\noverall'] = avg['overall']
        if len(allstats) >= mintrend:
            summout['trend\noverall'] = trend.slope
            summout['stderr\noverall'] = trend.stderr
            summout['r-squared\noverall'] = trend.rvalue**2
            summout['pvalue\noverall'] = trend.pvalue
        summout['numraces\noverall'] = len(allstats)
        for year in yearrange:
            summout['numraces\n{}'.format(year)] = len([s for s in allstats if s.date.year==year])
        for tlimit in TRENDLIMITS:
            distcategory,distcolor = TRENDLIMITS[tlimit]
            tstats = [s for s in allstats if s.dist >= tlimit[0] and s.dist <= tlimit[1]]
            if len(tstats) < mintrend: continue
            avg[distcategory] = mean([s.ag for s in tstats])
            trend = aag[thisname].render_trendline(fig,distcategory,thesestats=tstats,color=distcolor)
            
            oneyrcategory = [s.ag for s in tstats if s.date.year == lastyear]
            if len(oneyrcategory) > 0:
                summout['1yr agegrade\n{}'.format(distcategory)] = mean(oneyrcategory)
            summout['avg agegrade\n{}'.format(distcategory)] = avg[distcategory]
            summout['trend\n{}'.format(distcategory)] = trend.slope
            summout['stderr\n{}'.format(distcategory)] = trend.stderr
            summout['r-squared\n{}'.format(distcategory)] = trend.rvalue**2
            summout['pvalue\n{}'.format(distcategory)] = trend.pvalue
            summout['numraces\n{}'.format(distcategory)] = len(tstats)
        SUMM.writerow(summout)
        
        # annotate with averages
        avgstr = 'averages\n'
        for lab in avg:
            thisavg = int(round(avg[lab]))
            avgstr += '  {}: {}%\n'.format(lab,thisavg)
        avgstr += 'age (1/1/{}): {}'.format(lastyear,runnerage)
        
        # TODO: add get_*lim() to aag -- xlim and ylim are currently side-effect of aag.render_stats()
        x1,xn = ax.get_xlim()
        y1,yn = ax.get_ylim()
        xy = (x1+10,y1+10)
        aag[thisname].render_annotate(fig,avgstr,xy)
        
        # save file
        aag[thisname].save(fig)
         
    _SUMM.close()
    _DETL.close()
Ejemplo n.º 26
0
from jinja2 import Template

# homegrown
from contracts.dbmodel import db, State, Sponsor, SponsorRaceDate, SponsorBenefit, SponsorLevel
from contracts.dbmodel import SponsorRaceVbl
from contracts.dbmodel import Contract, ContractType, TemplateType
from contracts.dbmodel import STATE_COMMITTED
from contracts.contractmanager import ContractManager
from contracts.mailer import sendmail
from contracts.runsignup import RunSignUp
from contracts.trends import calculateTrend

from loutilities.tables import DbCrudApiRolePermissions, get_request_data
from loutilities.timeu import asctime

dt = asctime('%Y-%m-%d')
humandt = asctime('%B %d, %Y')


class parameterError(Exception):
    pass


debug = True


###########################################################################################
class SponsorContract(DbCrudApiRolePermissions):
    ###########################################################################################
    '''
    extend DbCrudApiRolePermissions to handle send contract request within put() [edit] method
Ejemplo n.º 27
0
# pypi
#from IPython.core.debugger import Tracer; debughere = Tracer(); debughere() # set breakpoint where needed

# github

# other

# home grown
from . import version
from . import clubmember
from . import racedb
from .racedb import dbConsistencyError
from loutilities import timeu

# module globals
tYmd = timeu.asctime('%Y-%m-%d')


#----------------------------------------------------------------------
def main():
    #----------------------------------------------------------------------
    '''
    update club membership information
    '''
    parser = argparse.ArgumentParser(
        version='{0} {1}'.format('runningclub', version.__version__))
    parser.add_argument('memberfile',
                        help='csv, xls or xlsx file with member information')
    parser.add_argument(
        '-r',
        '--racedb',
Ejemplo n.º 28
0
import time
import traceback

# pypi

# homegrown
from . import app
from database_flask import db   
from racedb import insert_or_update, RaceResult, Runner, Race
from race import race_fixeddist
from loutilities.csvu import str2num
from loutilities.timeu import age, asctime, epoch2dt, dt2epoch
from loutilities.agegrade import AgeGrade
from loutilities.transform import Transform

ftime = asctime('%Y-%m-%d')

RACEEPSILON = .01  # in miles, to allow for floating point precision error in database
ag = AgeGrade()

class ParameterError(Exception): pass

########################################################################
class Record():
########################################################################
    pass

########################################################################
class StoreServiceResults():
########################################################################
    '''
Ejemplo n.º 29
0
from . import bp
from ...model import db, LocalInterest
from ...model import Member, Membership, TableUpdateTime
from ...version import __docversion__
from .viewhelpers import localinterest


class parameterError(Exception):
    pass


class dataError(Exception):
    pass


ymd = asctime('%Y-%m-%d')
isodate = asctime('%Y-%m-%d')

# https://stackoverflow.com/questions/49674902/datetime-object-without-leading-zero
if system() != 'Windows':
    cachet = asctime('%-m/%-d/%Y %-I:%M %p')
else:
    cachet = asctime('%#m/%#d/%Y %#I:%M %p')

adminguide = 'https://members.readthedocs.io/en/{docversion}/membership-admin-guide.html'.format(
    docversion=__docversion__)

##########################################################################################
# members endpoint
##########################################################################################
Ejemplo n.º 30
0
from os.path import getmtime

# pypi
import flask
from flask import make_response,request
from flask.views import MethodView

# home grown
from . import app
from apicommon import failure_response, success_response
from running.runsignup import RunSignUp
from loutilities.transform import Transform
from loutilities.csvwt import wlist
from loutilities import timeu

ymd = timeu.asctime('%Y-%m-%d')
mdy = timeu.asctime('%m/%d/%Y')
cachet = timeu.asctime('%-m/%-d/%Y %-I:%M %p')

#----------------------------------------------------------------------
def members2file(club_id, mapping, key, secret, outfile=None): 
#----------------------------------------------------------------------
    '''
    convert members added per day (ordyears) to membercount per day in json format
    
    :param club_id: RunSignUp club id
    :param mapping: OrderedDict {'outfield1':'infield1', 'outfield2':outfunction(memberrec), ...}
    :param outfile: optional output file
    :param key: RunSignUp key
    :param secret: RunSignUp secret
    :rtype: lines from output file
Ejemplo n.º 31
0
# standard
import csv

# pypi

# github

# other

# home grown
from racedb import Runner, Race
from loutilities import timeu
from loutilities.csvwt import wlist
import loutilities.renderrun as render

tdb = timeu.asctime('%Y-%m-%d')

METERSPERMILE = 1609.334

#----------------------------------------------------------------------
def collectresults(club_id, begindate=None, enddate=None): 
#----------------------------------------------------------------------
    '''
    collect race information from database, and save to file
    
    :param club_id: id of club to collect data for
    :param begindate: collect races between begindate and enddate, yyyy-mm-dd
    :param enddate: collect races between begindate and enddate, yyyy-mm-dd
    :rtype: csv file data, string format (e.g., data for make_response(data))
    '''
    # TODO: check format of begindate, enddate
Ejemplo n.º 32
0
# other

# home grown
from loutilities import timeu
from loutilities import csvu
from loutilities import renderrun as render
from running import accessError, parameterError

# access stuff
ULTRASIGNUP_URL = 'http://ultrasignup.com'
RESULTS_SEARCH = 'service/events.svc/history/{fname}/{lname}'

HTTPTIMEOUT = 10
MPERMILE = 1609.344

tindate  = timeu.asctime('%m/%d/%Y %I:%M:%S %p')
toutdate = timeu.asctime('%Y-%m-%d')

#----------------------------------------------------------------------
def racenameanddist(eventname):
#----------------------------------------------------------------------
    '''
    get race name and distance 
    
    :param eventname: eventname from untrasignup.com
    :rtype: racename, distmiles, distkm
    '''

    # eventname is formatted as <racename> - <dist><units>
    racetext = eventname.strip()
    fields = racetext.split('-')
Ejemplo n.º 33
0
import os.path
from lxml import etree

# pypi
from pykml.factory import KML_ElementMaker as KML
from pykml.factory import GX_ElementMaker as GX

# github
import gpxpy
import gpxpy.geo

# home grown
from loutilities import timeu

METERPMILE = 1609.3439941
t = timeu.asctime('%Y-%m-%dT%H:%M:%SZ')

# ###############################################################################
def main():
# ###############################################################################

    usage = "usage: %prog [options] <gpxfile>\n\n"
    usage += "where:\n"
    usage += "  <gpxfile>\tgpx formatted file"

    parser = optparse.OptionParser(usage=usage)
    parser.add_option("-p", "--points", dest="points", action="store_true", help="specify if points output is desired", default=False)
    parser.add_option("-f", "--flyover", dest="flyover", action="store_true", help="specify if flyover output is desired", default=False)
    parser.add_option("-c", "--color", dest="color", help="track color if not flyover", default='641400FF')
    parser.add_option("-o", "--output", dest="output", help="output file", default=None)
    (options, args) = parser.parse_args()
Ejemplo n.º 34
0
# other

# home grown
from loutilities import timeu
from loutilities import csvu
from runningclub import agegrade
from runningclub import render
import runningahead
from runningahead import FIELD
import version

ag = agegrade.AgeGrade()
class invalidParameter(Exception): pass

fdate = timeu.asctime('%Y-%m-%d')
METERSPERMILE = 1609.344

#----------------------------------------------------------------------
def collect(searchfile,outfile,begindate,enddate):
#----------------------------------------------------------------------
    '''
    collect race results from runningahead
    
    :param searchfile: path to file containing names, genders, birth dates to search for
    :param outfile: output file path
    :param begindate: epoch time - choose races between begindate and enddate
    :param enddate: epoch time - choose races between begindate and enddate
    '''
    
    outfilehdr = 'GivenName,FamilyName,name,DOB,Gender,race,date,age,miles,km,time'.split(',')
Ejemplo n.º 35
0
def main():
#----------------------------------------------------------------------

    parser = argparse.ArgumentParser(version='{0} {1}'.format('running',version.__version__))
    parser.add_argument('gpxfile',help='gpx formatted file')
    parser.add_argument('racestarttime',help="time of race start in '%%Y-%%m-%%dT%%H:%%M' format")
    parser.add_argument('-o','--output',help='name of output file (default %(default)s)',default='racewx.csv')
    args = parser.parse_args()

    gpxfile = args.gpxfile
    racestarttime = args.racestarttime
    timrace = timeu.asctime('%Y-%m-%dT%H:%M')
    racestartdt = timrace.asc2dt(racestarttime) # naive
    output = args.output
    
    # get input
    _GPX = open(gpxfile,'r')
    gpx = gpxpy.parse(_GPX)

    # loop through gpx tracks
    wxdata = []
    lasttime = None
    exectime = int(round(time.time()))
    for track in gpx.tracks:
        for segment in track.segments:
            for point in segment.points:
                pepoch = timeu.dt2epoch(point.time)
                if not lasttime or pepoch-lasttime >= WXPERIOD:
                    plon = point.longitude
                    plat = point.latitude
                    if not lasttime:
                        starttime = pepoch
                        tzid = gettzid(plat, plon)
                        tz = pytz.timezone(tzid)
                        racestartlocdt = tz.normalize(tz.localize(racestartdt))
                        racestartepoch = timeu.dt2epoch(racestartlocdt)
                        shift = racestartepoch - starttime
                    targtime = timeu.dt2epoch(point.time)+shift # shift to race time
                    
                    # get weather
                    # temp, dew point, cloud cover, precip intensity
                    # wind speed/bearing: http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.barbs
                    #   (http://matplotlib.1069221.n5.nabble.com/plot-arrows-for-wind-direction-degrees-td13499.html)
                    wx = getwx(plat,plon,targtime)
                    wx['lat'] = plat
                    wx['lon'] = plon
                    wx['dewpoint'] = dewpoint(wx['temperature'],wx['humidity']*100)
                    wx['windchill'] = windchill(wx['temperature'],wx['windSpeed'])
                    wx['heatindex'] = heatindex(wx['temperature'],wx['humidity']*100)
                    wxdata.append(wx)
                    
                    lasttime = pepoch

    # create the file and write header if it doesn't exist
    if not os.path.exists(output):
        writeheader = True
        _WX = open(output,'wb')
    else:
        writeheader = False        
        _WX = open(output,'ab')
        
    heading = ['exectime', 'time', 'lat', 'lon', 'temperature', 'humidity', 'dewpoint', 'windchill', 'heatindex', 'precipType', 'precipProbability', 'precipIntensity', 'windSpeed', 'windBearing', 'cloudCover', 'summary', 'pressure', 'visibility']
    WX = csv.DictWriter(_WX,heading,extrasaction='ignore')
    if writeheader:
        WX.writeheader()
    for wx in wxdata:
        wx['exectime'] = exectime
        WX.writerow(wx)
    _WX.close()
Ejemplo n.º 36
0
import csv

# pypi
#from IPython.core.debugger import Tracer; debughere = Tracer(); debughere() # set breakpoint where needed

# github

# home grown
import version
import racedb
from loutilities import timeu, csvwt

# exceptions for this module.  See __init__.py for package exceptions

# module globals
tYmd = timeu.asctime('%Y-%m-%d')
tHMS = timeu.asctime('%H:%M:%S')
tMS  = timeu.asctime('%M:%S')


# SequenceMatcher to determine matching ratio, which can be used to evaluate CUTOFF value
sm = difflib.SequenceMatcher()

#----------------------------------------------------------------------
def getratio(a,b):
#----------------------------------------------------------------------
    '''
    return the SequenceMatcher ratio for two strings
    
    :rettype: float in range [0,1]
    '''
Ejemplo n.º 37
0
def updatemembercache(club_id,
                      membercachefilename,
                      key=None,
                      secret=None,
                      email=None,
                      password=None,
                      debug=False):
    #----------------------------------------------------------------------
    if debug:
        # set up debug logging
        thislogger.setLevel(logging.DEBUG)
        thislogger.propagate = True
    else:
        # error logging
        thislogger.setLevel(logging.ERROR)
        thislogger.propagate = True

    # set up access to RunSignUp
    rsu = RunSignUp(key=key,
                    secret=secret,
                    email=email,
                    password=password,
                    debug=debug)
    rsu.open()

    # transform from RunSignUp to membercache format
    xform = Transform(
        {
            'MemberID':
            lambda mem: mem['user']['user_id'],
            'MembershipID':
            'membership_id',
            'MembershipType':
            'club_membership_level_name',
            'FamilyName':
            lambda mem: mem['user']['last_name'],
            'GivenName':
            lambda mem: mem['user']['first_name'],
            'MiddleName':
            lambda mem: mem['user']['middle_name'],
            'Gender':
            lambda mem: 'Female' if mem['user']['gender'] == 'F' else 'Male',
            'DOB':
            lambda mem: mem['user']['dob'],
            'Email':
            lambda mem: mem['user']['email'] if 'email' in mem['user'] else '',
            'PrimaryMember':
            'primary_member',
            'JoinDate':
            'membership_start',
            'ExpirationDate':
            'membership_end',
            'LastModified':
            'last_modified',
        },
        sourceattr=False,  # source and target are dicts
        targetattr=False)

    # members maintains the current cache through this processing {memberkey: [memberrec, ...]}
    # currmemberrecs maintains the records for current members as of today {memberkey: memberrec}
    members = {}
    currmemberrecs = {}

    # need today's date, in same sortable date format as data coming from RunSignUp
    dt = asctime('%Y-%m-%d')
    today = dt.dt2asc(datetime.now())

    # construct key from member cache record
    def getmemberkey(memberrec):
        lastname = memberrec['FamilyName']
        firstname = memberrec['GivenName']
        dob = memberrec['DOB']
        memberkey = '{},{},{}'.format(lastname, firstname, dob)
        return memberkey

    # add record to cache, return key
    def add2cache(memberrec):
        memberkey = getmemberkey(memberrec)
        members.setdefault(memberkey, [])

        # replace any records having same expiration date
        recordlist = [
            mr for mr in members[memberkey]
            if mr['ExpirationDate'] != memberrec['ExpirationDate']
        ] + [memberrec]
        members[memberkey] = recordlist

        # keep list sorted
        sortby = 'ExpirationDate'
        members[memberkey].sort(lambda a, b: cmp(a[sortby], b[sortby]))

        # remove any overlaps
        for i in range(1, len(members[memberkey])):
            lastrec = members[memberkey][i - 1]
            thisrec = members[memberkey][i]
            # if there's an overlap, change join date to expiration date + 1 day
            if thisrec['JoinDate'] <= lastrec['ExpirationDate']:
                exp = thisrec['ExpirationDate']
                oldstart = thisrec['JoinDate']
                newstart = dt.dt2asc(
                    dt.asc2dt(lastrec['ExpirationDate']) + timedelta(1))
                thislogger.error(
                    'overlap detected: {} end={} was start={} now start={}'.
                    format(memberkey, exp, oldstart, newstart))
                thisrec['JoinDate'] = newstart

        return memberkey

    # test if in cache
    def incache(memberrec):
        memberkey = getmemberkey(memberrec)
        if memberkey not in members:
            cachedmember = False
        elif memberrec['ExpirationDate'] in [
                m['ExpirationDate'] for m in members[memberkey]
        ]:
            cachedmember = True
        else:
            cachedmember = False

        return cachedmember

    # lock cache update during execution
    rlock = RLock()
    with rlock:
        # track duration of update
        starttime = datetime.now()

        # import current cache
        # records in cache are organized in members dict with 'last,first,dob' key
        # within is list of memberships ordered by expiration date
        with open(membercachefilename, 'rb') as memfile:
            # members maintains the current cache through this processing
            # currmemberrecs maintains the records for current members as of today
            cachedmembers = DictReader(memfile)
            for memberrec in cachedmembers:
                memberkey = add2cache(memberrec)

                # current member?
                if memberrec['JoinDate'] <= today and memberrec[
                        'ExpirationDate'] >= today:
                    # member should only be in current members once
                    if memberkey in currmemberrecs:
                        thislogger.error(
                            'member duplicated in cache: {}'.format(memberkey))

                    # regardless add this record to current members
                    currmemberrecs[memberkey] = memberrec

        # get current members from RunSignUp, transforming each to cache format
        rsumembers = rsu.members(club_id)
        rsucurrmembers = []
        for rsumember in rsumembers:
            memberrec = {}
            xform.transform(rsumember, memberrec)
            rsucurrmembers.append(memberrec)

        # add new member records to cache
        # remove known (not new) member records from currmemberrecs
        # after loop currmemberrecs should contain only deleted member records
        for memberrec in rsucurrmembers:
            # remember if was incache before we add
            currmember = incache(memberrec)

            # this will replace record with same ExpirationDate
            # this allows admin updated RunSignUp data to be captured in cache
            memberkey = add2cache(memberrec)

            # remove member records we knew about already
            if currmember:
                del currmemberrecs[memberkey]

        # remove member records for deleted members
        for memberkey in currmemberrecs:
            removedrec = currmemberrecs[memberkey]
            memberkey = getmemberkey(removedrec)
            members[memberkey] = [
                mr for mr in members[memberkey] if mr != removedrec
            ]
            thislogger.debug(
                'membership removed from cache: {}'.format(removedrec))

        # recreate cache file
        # start with temporary file
        # sort members keys for ease of debugging
        cachedir = dirname(abspath(membercachefilename))
        sortedmembers = sorted(members.keys())
        with NamedTemporaryFile(mode='wb',
                                suffix='.rsucache',
                                delete=False,
                                dir=cachedir) as tempcache:
            tempmembercachefilename = tempcache.name
            cachehdr = 'MemberID,MembershipID,MembershipType,FamilyName,GivenName,MiddleName,Gender,DOB,Email,PrimaryMember,JoinDate,ExpirationDate,LastModified'.split(
                ',')
            cache = DictWriter(tempcache, cachehdr)
            cache.writeheader()
            for memberkey in sortedmembers:
                for memberrec in members[memberkey]:
                    cache.writerow(memberrec)

        # set mode of temp file to be same as current cache file (see https://stackoverflow.com/questions/5337070/how-can-i-get-a-files-permission-mask)
        cachemode = stat(membercachefilename).st_mode & 0777
        chmod(tempmembercachefilename, cachemode)

        # now overwrite the previous version of the membercachefile with the new membercachefile
        try:
            # atomic operation in Linux
            rename(tempmembercachefilename, membercachefilename)

        # should only happen under windows
        except OSError:
            remove(membercachefilename)
            rename(tempmembercachefilename, membercachefilename)

        # track duration of update
        finishtime = datetime.now()
        thislogger.debug('updatemembercache() duration={}'.format(finishtime -
                                                                  starttime))

    # release access
    rsu.close()

    # let caller know the current members, in rsu api format
    return rsumembers
Ejemplo n.º 38
0
from flask import g
from charset_normalizer import detect

# homegrown
from members import create_app
from members.settings import Development
from members.model import RacingTeamVolunteer, db
from members.applogging import setlogging

from loutilities.timeu import asctime
from members.model import LocalUser, RacingTeamMember, RacingTeamInfo
from members.views.admin.viewhelpers import localinterest_query_params, localinterest

class parameterError(Exception): pass

tstamp = asctime("%a %b %d %Y %H:%M:%S")
isodate = asctime("%Y-%m-%d")

def main():
    descr = '''
    Update racing team info volunteer records from csv file
    '''
    parser = ArgumentParser(description=descr)
    parser.add_argument('inputfile', help='csv file with input records', default=None)
    args = parser.parse_args()
    
    scriptdir = dirname(__file__)
    # two levels up
    scriptfolder = dirname(dirname(scriptdir))
    configdir = join(scriptfolder, 'config')
    memberconfigfile = "members.cfg"
Ejemplo n.º 39
0
# standard
import argparse
from csv import DictReader
from datetime import datetime, timedelta
from collections import OrderedDict
from json import dumps
import time

# pypi

# homegrown
from running.runsignup import RunSignUp, updatemembercache
from loutilities.configparser import getitems

from loutilities import timeu
ymd = timeu.asctime('%Y-%m-%d')
mdy = timeu.asctime('%m/%d/%Y')
md = timeu.asctime('%m-%d')


class parameterError(Exception):
    pass


#----------------------------------------------------------------------
def analyzemembership(membercachefile, statsfile=None):
    #----------------------------------------------------------------------
    # stats will be unordered dict {year1: {date1:count1, date2:count2...}, year2: {...}, ... }
    stats = {}

    with open(membercachefile, 'r', newline='') as memfile:
Ejemplo n.º 40
0
'''
use this script to run the steeps api locally for debug

Usage::

    python localapi.py
'''

# standard
import pdb
import os
import os.path

from apiapp import app
from apiapp.config import getconfig

import time
from loutilities import timeu
tu = timeu.asctime('%Y-%m-%d %H:%M:%S')

configpath = os.path.abspath('./developapi.cfg')
config = getconfig(configpath)
app.config.update(config)
app.configtime = tu.epoch2asc(time.time())
app.configpath = configpath

# must set up logging after setting configuration
from apiapp import applogging
applogging.setlogging()

app.run()
Ejemplo n.º 41
0
import unicodecsv

# github

# home grown
import version
import racedb
from loutilities import timeu, csvwt
from loutilities.transform import Transform
from database_flask import db
from . import app

# exceptions for this module.  See __init__.py for package exceptions

# module globals
tYmd = timeu.asctime('%Y-%m-%d')
tmdY = timeu.asctime('%m/%d/%Y')
tHMS = timeu.asctime('%H:%M:%S')
tMS  = timeu.asctime('%M:%S')
rsudate = lambda date: tYmd.dt2asc(tmdY.asc2dt(date))

# SequenceMatcher to determine matching ratio, which can be used to evaluate CUTOFF value
sm = difflib.SequenceMatcher()

# normalize format from RunSignUp API
rsu_api2filemapping = OrderedDict([
                        ('MembershipID'   , 'membership_id'),
                        ('User ID'        , lambda mem: mem['user']['user_id']),
                        ('MembershipType' , 'club_membership_level_name'),
                        ('FamilyName'     , lambda mem: mem['user']['last_name']),
                        ('GivenName'      , lambda mem: mem['user']['first_name']),
Ejemplo n.º 42
0
import time

# pypi
from werkzeug.security import generate_password_hash, check_password_hash

# github

# other
from database_flask import *

# home grown
import version
from loutilities import timeu

DBDATEFMT = '%Y-%m-%d'
t = timeu.asctime(DBDATEFMT)
dbdate = timeu.asctime(DBDATEFMT)

class dbConsistencyError(Exception): pass
class parameterError(Exception): pass

rolenames = ['admin','viewer']

MAX_RACENAME_LEN = 50
MAX_LOCATION_LEN = 64

#----------------------------------------------------------------------
def getunique(session, model, **kwargs):
#----------------------------------------------------------------------
    '''
    retrieve a row from the database, raising exception of more than one row exists for query criteria
Ejemplo n.º 43
0
def main(): 
#----------------------------------------------------------------------
    descr = '''
    render race results from athlinks, club
    '''
    
    parser = argparse.ArgumentParser(description=descr,formatter_class=argparse.RawDescriptionHelpFormatter,
                                     version='{0} {1}'.format('running',version.__version__))
    parser.add_argument('-c','--clubfile', help="file with club results, output from exportresults",default=None)
    parser.add_argument('-a','--athlinksfile', help="file with athlinks results, output from athlinksresults",default=None)
    parser.add_argument('-u','--ultrasignupfile', help="file with club results, output from ultrasignupresults",default=None)
    parser.add_argument('-R','--runningaheadfile', help="file with club results, output from runningaheadresults",default=None)
    parser.add_argument('-o','--outfile', help="output file name template, like '{who}-ag-analysis-{date}-{time}.png', default=%(default)s",default='{who}-ag-analysis-{date}.png')
    parser.add_argument('-s','--summaryfile', help="summary file name template, default=%(default)s",default='ag-analysis-summary-{date}.csv')
    parser.add_argument('-d','--detailfile', help="detail file name template, default=%(default)s",default='ag-analysis-detail-{date}.csv')
    parser.add_argument('-g','--minagegrade', help="minimum age grade for charts, default=%(default)s",default=25)
    parser.add_argument('-r','--minraces', help="minimum races in the same year as ENDDATE, default=%(default)s",default=3)
    parser.add_argument('-t','--mintrend', help="minimum races between BEGINDATE and ENDDATE for trendline, default=%(default)s",default=5)
    parser.add_argument('-b','--begindate', help="render races between begindate and enddate, yyyy-mm-dd",default=None)
    parser.add_argument('-e','--enddate', help="render races between begindate and enddate, yyyy-mm-dd",default=None)
    args = parser.parse_args()

    athlinksfile = args.athlinksfile
    ultrasignupfile = args.ultrasignupfile
    runningaheadfile = args.runningaheadfile
    clubfile = args.clubfile
    outfile = args.outfile
    summaryfile = args.summaryfile
    detailfile = args.detailfile
    minagegrade = args.minagegrade
    minraces = args.minraces
    mintrend = args.mintrend

    argtime = timeu.asctime('%Y-%m-%d')
    if args.begindate:
        begindate = argtime.asc2dt(args.begindate)
    else:
        begindate = None
    if args.enddate:
        tmpenddate = argtime.asc2dt(args.enddate)
        enddate = datetime.datetime(tmpenddate.year,tmpenddate.month,tmpenddate.day,23,59,59)
    else:
        enddate = None
    
    # data structure to hold AnalyzeAgeGrade objects
    aag = {}
    
    # need data source file
    if not athlinksfile and not clubfile and not ultrasignupfile and not runningaheadfile:
        raise invalidParameter, 'athlinksfile, ultrasignupfile, runningaheadfile and/or clubfile required'

    # collect data from athlinks, if desired
    if athlinksfile:
        collectathlinks(aag,athlinksfile)
        
    # collect data from ultrasignup, if desired
    if ultrasignupfile:
        collectultrasignup(aag,ultrasignupfile)
        
    # collect data from runningahead, if desired
    if runningaheadfile:
        collectrunningahead(aag,runningaheadfile)
        
    # collect data from results database, if desired
    if clubfile:
        collectclub(aag,clubfile)
        
    # render all the data
    render(aag,outfile,summaryfile,detailfile,minagegrade,minraces,mintrend,begindate,enddate)
Ejemplo n.º 44
0
from datetime import date
from copy import deepcopy

# pypi
from flask import current_app, url_for, request
from jinja2 import Template

# homegrown
from contracts.dbmodel import db, Event, State, FeeBasedOn, Contract, ContractType, TemplateType
from contracts.dbmodel import STATE_COMMITTED, STATE_CONTRACT_SENT
from contracts.contractmanager import ContractManager
from contracts.mailer import sendmail
from loutilities.tables import DbCrudApiRolePermissions, get_request_data
from loutilities.timeu import asctime

dt = asctime('%Y-%m-%d')


class parameterError(Exception):
    pass


debug = True


###########################################################################################
class EventsContract(DbCrudApiRolePermissions):
    ###########################################################################################
    '''
    extend DbCrudApiRolePermissions to handle send contract request within put() [edit] method
    '''
Ejemplo n.º 45
0
import csv
from copy import copy
from racedb import Runner, Club, RaceResult, ApiCredentials
from forms import MemberForm 
#from runningclub import memberfile   # required for xlsx support
from loutilities.csvu import DictReaderStr2Num
from loutilities import timeu
from running.runsignup import RunSignUp, members2csv as rsu_members2csv
import clubmember
from clubmember import rsu_api2filemapping
from request import addscripts
from crudapi import CrudApi
from datatables_utils import getDataTableParams

# module globals
tYmd = timeu.asctime('%Y-%m-%d')
MINHDR = ['FamilyName','GivenName','Gender','DOB','RenewalDate','ExpirationDate','City','State']

class InvalidUser(Exception): pass

#----------------------------------------------------------------------
def normalizeRAmemberlist(inputstream,filterexpdate=None):
#----------------------------------------------------------------------
    '''
    Take RunningAHEAD membership list (Export individual membership records), and produce member list.
    For a given expiration date, the earliest renewal date is used
    This allows "first renewal for year" computations
    
    :param inputstream: open file with csv exported from RunningAHEAD (e.g., from request.files['file'].stream)
    :param filterexpdate: yyyy-mm-dd for expiration date to filter on, else None
    :rtype: csv file data, string format (e.g., data for make_response(data))
Ejemplo n.º 46
0
 def render_stats(self,fig):
 #-------------------------------------------------------------------------------
     '''
     plot the data in dists
     
     :param size: true if size needed
     '''
     DEFAULTSIZE = 60
     
     ### DEBUG>
     debug = False
     if debug:
         tim = timeu.asctime('%Y-%m-%d-%H-%M')
         _DEB = open('analyzeagegrade-debug-{}-render.csv'.format(tim.epoch2asc(self.exectime)),'wb')
         fields = ['date','dist','ag','color','label']
         DEB = csv.DictWriter(_DEB,fields)
         DEB.writeheader()
     ### <DEBUG
 
     # make hashed scatter lists
     hdate = {}
     hag = {}
     hsize = {}
     for thisd in self.dists:
         hdate[thisd] = []
         hag[thisd] = []
         hsize[thisd] = []
     for i in range(len(self.stats)):
         d = round(self.stats[i].dist)
         hdate[d].append(self.stats[i].date)
         hag[d].append(self.stats[i].ag)
         if self.size:
             hsize[d].append(distmap(d))
 #            hsize[d].append(self.stats['size'][i])
         else:
             hsize[d].append(DEFAULTSIZE)
     
     # create figure and axes
     fig.autofmt_xdate()
     ax = fig.get_axes()[0]  # only one axes instance
     ax.set_ylabel('age grade percentage')
     #ax.fmt_xdata = mdates.DateFormatter('%Y-%m-%d') # dead?
     fig.suptitle("{}".format(self.who))
         
     lines = []
     labs = []
     l_dists = list(self.dists)
     l_dists.sort()
     fig.subplots_adjust(bottom=0.1, right=0.85, top=0.93)
     ax.grid(b=True)
     for thisd in l_dists:
         # some results for this distance may have been pulled out due to filtering
         if len(hag[thisd]) == 0: continue
         
         if int(thisd) in SUBS:
             lab = SUBS[int(thisd)]
         else:
             if thisd <=MAXMETER:
                 lab = '{0}m'.format(int(thisd))
             else:
                 lab = '{0:.1f}K'.format(thisd/1000)
         labs.append(lab)
         color = self.cmapsm.to_rgba(thisd)
         numels = len(hdate[thisd])
         line = ax.scatter(hdate[thisd],hag[thisd],s=hsize[thisd],c=[color for i in range(numels)],label=lab,linewidth=.5)
         #lines.append(line)
 
         ### DEBUG>
         if debug:
             thisstat = {}
             for i in range(len(hdate[thisd])):
                 thisstat['date'] = hdate[thisd][i]
                 thisstat['ag'] = hag[thisd][i]
                 thisstat['dist'] = thisd
                 thisstat['label'] = lab
                 thisstat['color'] = self.cmapsm.to_rgba(thisd)
                 DEB.writerow(thisstat)
         ### <DEBUG
 
     # set x (date) label format
     hfmt = mdates.DateFormatter('%m/%d/%y')
     ax.xaxis.set_major_formatter(hfmt)
     ax.xaxis.set_minor_formatter(hfmt)
     labels = ax.get_xticklabels()
     for label in labels:
         label.set_rotation(65)
         label.set_size('xx-small')
 
     # maybe user wants to set xlim
     ax.set_xlim(left=self.xlim['left'],right=self.xlim['right'])
         
     # maybe user wants to set ylim
     # check to see if any points are outside this limit, and print warning
     if self.ylim:
         ax.set_ylim(self.ylim)
         outsidelimits = 0
         numpoints = 0
         for thisd in l_dists:
             for i in range(len(hdate[thisd])):
                 numpoints += 1
                 if hag[thisd][i] < self.ylim[0] or hag[thisd][i] > self.ylim[1]:
                     outsidelimits += 1
         if outsidelimits > 0:
             log.warning('{} of {} points found outside of ylim {}, runner {}'.format(outsidelimits,numpoints,self.ylim,self.who))
     
     # annotate
     
     ### DEBUG>
     if debug:
         _DEB.close()
Ejemplo n.º 47
0
'''
use this script to run the steeps api locally for debug

Usage::

    python localapi.py
'''

# standard
import pdb
import os
import os.path

from apiapp import app
from apiapp.config import getconfig

import time
from loutilities import timeu
tu = timeu.asctime('%Y-%m-%d %H:%M:%S')

configpath = os.path.abspath('./developapi.cfg')
config = getconfig(configpath)
app.config.update(config)
app.configtime = tu.epoch2asc(time.time())
app.configpath = configpath

# must set up logging after setting configuration
from apiapp import applogging
applogging.setlogging()

app.run()
Ejemplo n.º 48
0
import math

# pypi

# github

# other

# home grown
import version
import racedb
from config import softwareError
from loutilities import timeu

DBDATEFMT = racedb.DBDATEFMT
dbtime = timeu.asctime(DBDATEFMT)
rndrtim = timeu.asctime('%m/%d/%Y')


#----------------------------------------------------------------------
def getprecision(distance): 
#----------------------------------------------------------------------
    '''
    get the precision for rendering, based on distance
    
    precision might be different for time vs. age group adjusted time
    
    :param distance: distance (miles)
    :rtype: (timeprecision,agtimeprecision)
    '''
    
Ejemplo n.º 49
0
from collections import OrderedDict, defaultdict

# pypi
import requests

# github

# other

# home grown
import version
from loutilities import apikey
from loutilities import timeu
from loutilities.csvwt import record2csv

stravatime = timeu.asctime('%Y-%m-%dT%H:%M:%SZ')

KMPERMILE = 1.609344

DATEFIELD = 'start_date'

# from https://strava.github.io/api/v3/activities/
xworkout_type = {
    None : 'default',
    0    : 'default',
    1    : 'race',
    2    : 'long run',
    3    : 'workout',
    10   : 'default',
    11   : 'race',
    12   : 'workout',
Ejemplo n.º 50
0
def updatemembercache(club_id, membercachefilename, key=None, secret=None, email=None, password=None, debug=False):
#----------------------------------------------------------------------
    if debug:
        # set up debug logging
        thislogger.setLevel(logging.DEBUG)
        thislogger.propagate = True
    else:
        # error logging
        thislogger.setLevel(logging.ERROR)
        thislogger.propagate = True

    # set up access to RunSignUp
    rsu = RunSignUp(key=key, secret=secret, email=email, password=password, debug=debug)
    rsu.open()

    # transform from RunSignUp to membercache format
    xform = Transform( {
                        'MemberID'       : lambda mem: mem['user']['user_id'],
                        'MembershipID'   : 'membership_id',
                        'MembershipType' : 'club_membership_level_name',
                        'FamilyName'     : lambda mem: mem['user']['last_name'],
                        'GivenName'      : lambda mem: mem['user']['first_name'],
                        'MiddleName'     : lambda mem: mem['user']['middle_name'],
                        'Gender'         : lambda mem: 'Female' if mem['user']['gender'] == 'F' else 'Male',
                        'DOB'            : lambda mem: mem['user']['dob'],
                        'Email'          : lambda mem: mem['user']['email'] if 'email' in mem['user'] else '',
                        'PrimaryMember'  : 'primary_member',
                        'JoinDate'       : 'membership_start',
                        'ExpirationDate' : 'membership_end',
                        'LastModified'   : 'last_modified',
                       },
                       sourceattr=False, # source and target are dicts
                       targetattr=False
                     )

    # members maintains the current cache through this processing {memberkey: [memberrec, ...]}
    # currmemberrecs maintains the records for current members as of today {memberkey: memberrec}
    members = {}
    currmemberrecs = {}

    # need today's date, in same sortable date format as data coming from RunSignUp
    dt = asctime('%Y-%m-%d')
    today = dt.dt2asc(datetime.now())

    # construct key from member cache record
    def getmemberkey(memberrec):
        lastname = memberrec['FamilyName']
        firstname = memberrec['GivenName']
        dob = memberrec['DOB']
        memberkey = '{},{},{}'.format(lastname, firstname, dob)
        return memberkey

    # add record to cache, return key
    def add2cache(memberrec):
        memberkey = getmemberkey(memberrec)
        members.setdefault(memberkey,[])

        # replace any records having same expiration date
        recordlist = [mr for mr in members[memberkey] if mr['ExpirationDate'] != memberrec['ExpirationDate']] + [memberrec]
        members[memberkey] = recordlist

        # keep list sorted
        sortby = 'ExpirationDate'
        members[memberkey].sort(lambda a,b: cmp(a[sortby],b[sortby]))

        # remove any overlaps
        for i in range(1, len(members[memberkey])):
            lastrec = members[memberkey][i-1]
            thisrec = members[memberkey][i]
            # if there's an overlap, change join date to expiration date + 1 day
            if thisrec['JoinDate'] <= lastrec['ExpirationDate']:
                exp = thisrec['ExpirationDate']
                oldstart = thisrec['JoinDate']
                newstart = dt.dt2asc( dt.asc2dt(lastrec['ExpirationDate']) + timedelta(1) )
                thislogger.error('overlap detected: {} end={} was start={} now start={}'.format(memberkey, exp, oldstart, newstart))
                thisrec['JoinDate'] = newstart

        return memberkey

    # test if in cache
    def incache(memberrec):
        memberkey = getmemberkey(memberrec)
        if memberkey not in members:
            cachedmember = False
        elif memberrec['ExpirationDate'] in [m['ExpirationDate'] for m in members[memberkey]]:
            cachedmember = True
        else:
            cachedmember = False

        return cachedmember

    # lock cache update during execution
    rlock = RLock()
    with rlock:
        # track duration of update
        starttime = datetime.now()

        # import current cache
        # records in cache are organized in members dict with 'last,first,dob' key
        # within is list of memberships ordered by expiration date
        with open(membercachefilename, 'rb') as memfile:
            # members maintains the current cache through this processing
            # currmemberrecs maintains the records for current members as of today
            cachedmembers = DictReader(memfile)
            for memberrec in cachedmembers:
                memberkey = add2cache(memberrec)

                # current member?
                if memberrec['JoinDate'] <= today and memberrec['ExpirationDate'] >= today:
                    # member should only be in current members once
                    if memberkey in currmemberrecs:
                        thislogger.error( 'member duplicated in cache: {}'.format(memberkey) )
                    
                    # regardless add this record to current members
                    currmemberrecs[memberkey] = memberrec

        # get current members from RunSignUp, transforming each to cache format
        rsumembers = rsu.members(club_id)
        rsucurrmembers = []
        for rsumember in rsumembers:
            memberrec = {}
            xform.transform(rsumember, memberrec)
            rsucurrmembers.append(memberrec)

        # add new member records to cache
        # remove known (not new) member records from currmemberrecs
        # after loop currmemberrecs should contain only deleted member records
        for memberrec in rsucurrmembers:
            # remember if was incache before we add
            currmember = incache(memberrec)

            # this will replace record with same ExpirationDate
            # this allows admin updated RunSignUp data to be captured in cache
            memberkey = add2cache(memberrec)

            # remove member records we knew about already
            # if not there, skip. probably replaced record in cache
            if currmember:
                try:
                    del currmemberrecs[memberkey]
                except KeyError:
                    pass

        # remove member records for deleted members
        for memberkey in currmemberrecs:
            removedrec = currmemberrecs[memberkey]
            memberkey = getmemberkey(removedrec)
            members[memberkey] = [mr for mr in members[memberkey] if mr != removedrec]
            thislogger.debug('membership removed from cache: {}'.format(removedrec))

        # recreate cache file
        # start with temporary file
        # sort members keys for ease of debugging
        cachedir = dirname(abspath(membercachefilename))
        sortedmembers = sorted(members.keys())
        with NamedTemporaryFile(mode='wb', suffix='.rsucache', delete=False, dir=cachedir) as tempcache:
            tempmembercachefilename = tempcache.name
            cachehdr = 'MemberID,MembershipID,MembershipType,FamilyName,GivenName,MiddleName,Gender,DOB,Email,PrimaryMember,JoinDate,ExpirationDate,LastModified'.split(',')
            cache = DictWriter(tempcache, cachehdr)
            cache.writeheader()
            for memberkey in sortedmembers:
                for memberrec in members[memberkey]:
                    cache.writerow(memberrec)

        # set mode of temp file to be same as current cache file (see https://stackoverflow.com/questions/5337070/how-can-i-get-a-files-permission-mask)
        cachemode = stat(membercachefilename).st_mode & 0777
        chmod(tempmembercachefilename, cachemode)

        # now overwrite the previous version of the membercachefile with the new membercachefile
        try:
            # atomic operation in Linux
            rename(tempmembercachefilename, membercachefilename)

        # should only happen under windows
        except OSError:
            remove(membercachefilename)
            rename(tempmembercachefilename, membercachefilename)

        # track duration of update
        finishtime = datetime.now()
        thislogger.debug( 'updatemembercache() duration={}'.format(finishtime-starttime) )

    # release access
    rsu.close()

    # let caller know the current members, in rsu api format
    return rsumembers
Ejemplo n.º 51
0
import csv
import collections

# pypi
from pykml.factory import KML_ElementMaker as KML
from pykml.factory import GX_ElementMaker as GX

# github
import gpxpy
import gpxpy.geo

# home grown
from loutilities import timeu

METERPMILE = 1609.3439941
t = timeu.asctime('%Y-%m-%dT%H:%M:%SZ')


class invalidCoeff(Exception):
    pass


# ###############################################################################
def main():
    # ###############################################################################

    usage = "usage: %prog [options] <gpxfile1> <gpxfile2>\n\n"
    usage += "where:\n"
    usage += "  <gpxfile>\tgpx formatted file"

    parser = optparse.OptionParser(usage=usage)
Ejemplo n.º 52
0
#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#   See the License for the specific language governing permissions and
#   limitations under the License.


from argparse import ArgumentParser
from csv import reader
from datetime import timedelta
from urllib2 import urlopen
from json import loads

from loutilities.textreader import TextDictReader
from loutilities.xmldict import XmlDictObject as DictObject

from loutilities.timeu import asctime
dtf = asctime('%Y-%m-%d %H:%M:%S')

NSFILTER = timedelta(minutes=15)

# checkswap records are datetime, recordtype, value
class CheckSwapRec (object):
    def __init__(self, row):
        for field in ['datetime', 'recordtype', 'value']:
            try:
                setattr(self, field, row.pop(0))
                if field == 'value':
                    self.value = int(self.value)
            except IndexError:
                setattr(self, field, None)

# check if row is start of netstat
Ejemplo n.º 53
0
from datetime import date, timedelta, datetime

# pypi
from flask import g
from flask.cli import with_appcontext
from click import argument, group

# homegrown
from scripts import catch_errors, ParameterError
from members.model import db, Meeting, Invite, StatusReport, DiscussionItem, localinterest_query_params
from members.meeting_invites import generateinvites
from members.reports import meeting_gen_reports, meeting_reports_nightly, meeting_reports_status, meeting_report2attr
from loutilities.timeu import asctime

# set up datatabase date formatter
dbdate = asctime('%Y-%m-%d')

# debug
debug = False


# needs to be before any commands
@group()
def meetings():
    """Perform meeting module tasks"""
    pass


def getstartenddates(startdate, enddate, endwindow):
    """
    calculate start and end date window from arguments
Ejemplo n.º 54
0
# pypi
from loutilities.user.model import User, Interest, Role
from loutilities.tables import DteDbRelationship, SEPARATOR
from flask import g, current_app, url_for
from dateutil.relativedelta import relativedelta
from markdown import markdown
from dominate.tags import a

# homegrown
from ...model import TaskCompletion, LocalUser, LocalInterest
from ...model import db, InputFieldData, Files, INPUT_TYPE_DATE, INPUT_TYPE_UPLOAD, localinterest_query_params
from ...helpers import positions_active, members_active, localinterest

from loutilities.timeu import asctime

dtrender = asctime('%Y-%m-%d')
dttimerender = asctime('%Y-%m-%d %H:%M:%S')
EXPIRES_SOON = 14  #days
PERIOD_WINDOW_DISPLAY = 7  # number of days, i.e., on view 2 would be stored as 2*PERIOD_WINDOW_DISPLAY days

STATUS_EXPIRES_SOON = 'expires soon'
STATUS_OVERDUE = 'overdue'
STATUS_DONE = 'done'
STATUS_NO_EXPIRATION = 'no expiration'
STATUS_OPTIONAL = 'optional'
STATUS_UP_TO_DATE = 'up to date'

# STATUS_DISPLAYORDER needs to match values in beforedatatables.js fn set_cell_status_class.classes
STATUS_DISPLAYORDER = [
    STATUS_OVERDUE, STATUS_EXPIRES_SOON, STATUS_OPTIONAL, STATUS_UP_TO_DATE,
    STATUS_DONE
Ejemplo n.º 55
0
import csv
from os.path import join as pathjoin

# pypi

# github

# home grown
import version
from loutilities.transform import Transform
from loutilities.timeu import asctime, age
from datetime import date
from collections import defaultdict, OrderedDict

# time stuff
tymd = asctime('%Y-%m-%d')

# transform DETAILS file produced by scoretility Results Analysis
xform = Transform(
            {
                'name'      : 'runnername',
                'gender'    : 'gender',
                'age'       : lambda result: age(date.today(), tymd.asc2dt(result['dob'])),
                'distmiles' : 'distmiles',
                'ag'        : lambda result: int(float(result['agpercent'])),
                'year'      : lambda result: tymd.asc2dt(result['racedate']).year
            },
            sourceattr=False,
            targetattr=True)

# # from https://gist.github.com/shenwei356/71dcc393ec4143f3447d
Ejemplo n.º 56
0
'''

# standard
from datetime import datetime, timedelta
from json import dumps

# pypi
from loutilities.timeu import asctime
from sortedcollections import SortedDict

# homegrown
from ..model import Member
from .admin.viewhelpers import localinterest
from ..applogging import timenow  # for logpoints

md = asctime('%m-%d')


def analyzemembership(statsfile=None):
    # stats will be unordered dict {year1: {date1:count1, date2:count2...}, year2: {...}, ... }
    stats = {}
    # stats = SortedDict(key=lambda k, v: k)

    members = Member.query.filter_by(interest=localinterest()).all()

    # for each member, add 1 for every date the membership represents
    # only go through today
    today = datetime.now().date()
    for member in members:
        thisdate = member.start_date
        enddate = member.end_date