def commit(self): """ all matches selected are scheduled for data collection using crontab all matches selected that are 100 minutes past their kickoff time are updated """ committed_schedule = self.refresh_jobs() matches_past = committed_schedule["MATCHES_PAST"] matches_to_schedule = committed_schedule["SCHEDULE"] for match in matches_past: for i in range(3): try: scrape.Games(self.config).refresh_json(match) updated = 'updated' except WebDriverException: logging.info(f'>> WebDriverException, retrying...') kill_all(self.config) time.sleep(uniform(3, 5)) continue except TimeoutException: logging.info(f'>> TimeoutException, retrying...') kill_all(self.config) time.sleep(uniform(3, 5)) continue except Exception: logging.info(traceback.format_exc()) kill_all(self.config) sys.exit() else: break else: updated = f'update failed after {i+1} attempts' logging.info(f'>> {match[0]} {updated}') self.update_crontab(matches_to_schedule)
def commit(self): """ all matches selected are scheduled for data collection using crontab all matches selected that are 100 minutes past their kickoff time are updated """ committed_schedule = self.refresh_jobs() matches_past = committed_schedule["MATCHES_PAST"] matches_to_schedule = committed_schedule["SCHEDULE"] for match in matches_past: scrape.Games(self.config).refresh_json(match) self.update_crontab(matches_to_schedule) logging.info('>> new schedule committed')
def cron_job(config, match_id): """ used as the arugment when running updates from cron """ with open(f'{config["PROJECT_PATH"]}user_selection.json') as json_file: user_schedule = json.load(json_file) json_file.close() file_name = None for guid, m_id in user_schedule["SCHEDULE"].items(): if m_id == match_id: file_name = guid.replace('/', '').replace(':', '') else: pass display = Display(visible=0, size=(1024, 768)) display.start() scrape.Games(config).refresh_json((file_name, match_id)) display.stop() logging.info(f'>> {file_name} updated')
def update(self): """ """ games_list = [] for i in range(3): try: games_list = scrape.Games(config=self.config).get_game_list() with open(f'{self.config["PROJECT_PATH"]}games_list.json', 'w+') as outfile: json.dump(games_list, outfile, indent=4) time.sleep(uniform(3, 6)) except TimeoutException: continue else: break else: logging.info('>> update failed after 3 attempts.') sys.exit() return games_list
def update(self): """ updates the games_list.json cache """ games_list = [] for i in range(3): try: games_list = scrape.Games(config=self.config).get_game_list() with open('./games_list.json', 'w+') as outfile: json.dump(games_list, outfile, indent=4) time.sleep(uniform(3, 6)) except TimeoutException: time.sleep(uniform(2, 3)) continue except WebDriverException: time.sleep(uniform(2, 3)) continue else: break else: logging.info(f'>> update failed after {i+1} attempts.') return games_list
def cron_job(config, match_id): """ used as the arugment when running updates from cron """ with open( f'{os.path.dirname(os.path.realpath(__file__))}/user_selection.json' ) as json_file: user_schedule = json.load(json_file) json_file.close() file_name = None for guid, m_id in user_schedule["SCHEDULE"].items(): if m_id == match_id: file_name = guid.replace('/', '').replace(':', '') else: pass for i in range(3): try: scrape.Games(config).refresh_json((file_name, match_id)) updated = 'updated' except WebDriverException: logging.info(f'>> WebDriverException, retrying...') kill_all(config) time.sleep(uniform(3, 5)) continue except TimeoutException: logging.info(f'>> TimeoutException, retrying...') kill_all(config) time.sleep(uniform(3, 5)) continue except Exception: logging.info(traceback.format_exc()) kill_all(config) sys.exit() else: break else: updated = f'update failed after {i+1} attempts' logging.info(f'>> {file_name} {updated}')