def teams_competing_this_week(): # Create a list of team names teams = [] # Get all weeks and their dates for the season scraper = Scraper() weeks = scraper.get_year_weeks(ScraperConstants.Men, datetime.datetime.now().year) # Get date to scrape this week # Fixes index error once in post season try: date = [week for week in weeks if int(week['current']) == 1][0]['date'] except IndexError: return teams # Gets the schedule for this week schedule = scraper.get_schedule(ScraperConstants.Men, date) # For each day in the schedule with a meet on it for day in schedule: # For each meet on that day for meet in schedule[day]['meets']: # Create a name for the meet depending on home vs. away teams or virtual; if meet['away_teams'] != None: for team in meet['away_teams'].split(", "): teams.append(team) if meet['home_teams'] != None: for team in meet['home_teams'].split(", "): teams.append(team) return teams
def get_context_data(self, **kwargs): scraper = Scraper() context = super().get_context_data(**kwargs) if context['object'].team2.user == self.request.user: context['team1'] = context['object'].team2 context['team2'] = context['object'].team1 else: context['team1'] = context['object'].team1 context['team2'] = context['object'].team2 gymnasts = (Gymnast.objects.filter(LineUp__in=(LineUp.objects.filter(team=context['team1'], week=context['object'].week).all() | LineUp.objects.filter(team=context['team2'], week=context['object'].week).all())) | Gymnast.objects.filter(id__in=(context['team1'].roster.all() | context['team2'].roster.all()))).distinct() current_week = int(scraper.get_current_and_max_week(ScraperConstants.Men, datetime.datetime.now().year)['week']) context['current_week'] = current_week context['teams_competing'] = teams_competing_this_week() context['meet_started'] = {} #Could this be optimized? weeks = scraper.get_year_weeks(ScraperConstants.Men, datetime.datetime.now().year) date = [week for week in weeks if int(week['wk']) == int(context['object'].week)][0]['date'] schedule = scraper.get_schedule(ScraperConstants.Men, date) # Loops through every meet day this week for day in schedule: # Loops through every meet on day for meet in schedule[day]['meets']: # Loops through gymnasts for gymnast in gymnasts: #Could this be optimized? # Checks if gymnasts team is in this meet if gymnast.team in str(meet['home_teams']) or gymnast.team in str(meet['away_teams']): #Could this be optimized? # Checks if this is gymnasts first meet of week if gymnast.name not in context['meet_started']: # Meet start datetime meet_datetime = datetime.datetime.strptime(str(meet['d']) + " " + str(meet['time']), "%Y-%m-%d %H:%M:%S") # Current datetime (eastern because thats what RTN uses) now = datetime.datetime.now(timezone('US/Eastern')) if now.date() > meet_datetime.date(): context['meet_started'][gymnast.name] = True elif now.date() == meet_datetime.date(): if meet_datetime.time() != datetime.time(0, 0, 0): if now.time() > meet_datetime.time(): context['meet_started'][gymnast.name] = True else: context['meet_started'][gymnast.name] = False else: if now.time() >= datetime.time(12, 0, 0): context['meet_started'][gymnast.name] = True else: context['meet_started'][gymnast.name] = False else: context['meet_started'][gymnast.name] = False return context
def get_context_data(self, **kwargs): scraper = Scraper() context = super().get_context_data(**kwargs) context["roster"] = context["object"].roster.all() context['current_week'] = int( scraper.get_current_and_max_week( ScraperConstants.Men, datetime.datetime.now().year)['week']) context["lineups"] = LineUp.objects.filter( team=context['object'], week=context['current_week']).order_by('pk') context['teams_competing'] = teams_competing_this_week() context['meet_started'] = {} #Could this be optimized? weeks = scraper.get_year_weeks(ScraperConstants.Men, datetime.datetime.now().year) # Fixes index error once in post season try: date = [ week for week in weeks if int(week['wk']) == context['current_week'] ][0]['date'] except IndexError: return context schedule = scraper.get_schedule(ScraperConstants.Men, date) gymnasts = context["roster"] # Loops through every meet day this week for day in schedule: # Loops through every meet on day for meet in schedule[day]['meets']: # Loops through gymnasts for gymnast in gymnasts: #Could this be optimized? # Checks if gymnasts team is in this meet if gymnast.team in str( meet['home_teams']) or gymnast.team in str( meet['away_teams']): #Could this be optimized? # Checks if this is gymnasts first meet of week if gymnast.name not in context['meet_started']: # Meet start datetime meet_datetime = datetime.datetime.strptime( str(meet['d']) + " " + str(meet['time']), "%Y-%m-%d %H:%M:%S") # Current datetime (eastern because thats what RTN uses) now = datetime.datetime.now(timezone('US/Eastern')) if now.date() > meet_datetime.date(): context['meet_started'][gymnast.name] = True elif now.date() == meet_datetime.date(): if meet_datetime.time() != datetime.time( 0, 0, 0): if now.time() > meet_datetime.time(): context['meet_started'][ gymnast.name] = True else: context['meet_started'][ gymnast.name] = False else: if now.time() >= datetime.time(12, 0, 0): context['meet_started'][ gymnast.name] = True else: context['meet_started'][ gymnast.name] = False else: context['meet_started'][gymnast.name] = False return context
def handle(self, *args, **options): scraper = Scraper() start_time = round(time.time() * 1000) print("Getting all matchups for week %s" % options['week']) try: # Get all weeks and their dates for the season weeks = scraper.get_year_weeks(ScraperConstants.Men, options['year']) # Get date to scrape specified week date = [ week for week in weeks if int(week['wk']) == options['week'] ][0]['date'] except: traceback.print_exc() return try: # Get schedule for the week schedule = scraper.get_schedule(ScraperConstants.Men, date) except: traceback.print_exc() return # Create a list of (meet id, day of meet, meet name) meets = [] # For each day in the schedule with a meet on it for day in schedule: # For each meet on that day for meet in schedule[day]['meets']: # Create a name for the meet depenending on home vs. away teams or virtua; away_teams = meet['away_teams'] home_teams = meet['home_teams'] if home_teams == None: meet_name = "%s (Virtual)" % away_teams else: meet_name = "%s @ %s" % (away_teams, home_teams) # Add to list of (meet id, day of meet, meet name) meets.append((meet['meet_id'], day, meet_name)) # Keep track of new scores added and number skipped scores = [] num_skipped = 0 # Go through list of (meet id, day of meet, meet name) for meet_id, day, meet_name in meets: print("Getting meet results for %s" % meet_name) # Get the meet's results try: meet_results = scraper.get_meet_results( ScraperConstants.Men, meet_id) except: traceback.print_exc() return # Get the scores of every person who competed in the meet and save them # For each event in the meet's results for event_index_name in EVENT_NAMES_DICT: # Get each score for the event for score in meet_results[event_index_name]: # Lookup the gymnast who had the score gymnast = Gymnast.objects.filter( rtn_id=score['gid']).first() # Create a new score object score = Score(event=EVENT_NAMES_DICT[event_index_name], score=float(score['score']), gymnast=gymnast, date=day, meet=meet_name, week=options['week']) # Check if the score already exists in the database if Score.objects.filter( gymnast=gymnast, date=day, event=EVENT_NAMES_DICT[event_index_name], week=options['week']).exists(): num_skipped = num_skipped + 1 else: scores.append(score) # Save new scores to the database for score in scores: average = Average.objects.get(gymnast=score.gymnast, event=score.event) average.number_of_scores += 1 average.score = ( (average.score * (average.number_of_scores - 1)) + decimal.Decimal(score.score)) / average.number_of_scores average.save() score.save() print("") print("------ RESULTS ------") print("Added %s scores" % len(scores)) print("Skipped %s existing scores" % num_skipped) print("Took %s ms" % (round(time.time() * 1000) - start_time))