for file in repo['files']: response = github_api.get_file(repo['repository'], file) sha = response['sha'] path = file_path(repo['repository'], file) if file in hashes[repo['repository']][branch] and hashes[ repo['repository']][branch][file] != sha: response = github_api.get_file_commits( repo['repository'], file) commit = response[0] notifier.append(notifier.notify_commit(file, commit)) hashes[repo['repository']][branch][file] = sha notifier.send_notifications( '[{}] A new commit changed watched files.'.format( repo['repository'])) except PermissionError as e: notifier.append(notifier.error(e.args[0]), '[{}] API Error!'.format(repo['repository'])) notifier.send_notifications( '[{}] A new commit changed watched files.'.format( repo['repository'])) raise e with open('hashes.json', 'w') as fp: json.dump(hashes, fp)
try: notifier = Notifier(config=config["notifier"], verbosity=verbosity) except OutputMethodError as e: sys.exit(e.message) else: notifier = Notifier(verbosity=verbosity) # Setup directory finder dirs = AppDirs(__app_name__, __author__) # Setup calendar handler if args.dry_run: cal = NoOpCalendarHandler() else: if "secrets" not in config or "client_secret_file" not in config["secrets"]: notifier.error("Client secret file not specified.") sys.exit(1) cal = CalendarHandler(config["secrets"]["client_secret_file"], dirs.user_cache_dir, config["secrets"]["calendar_id"], notifier) # == Scrape website == # Define structure for storing scraped values scraped_subs = [] scraped_subs_titles = [] URL = "https://subscriptions.marvel.com/accounts/myaccount.asp" # Create cookie header, to get past the authentication if "secrets" not in config or "marvel_cookie" not in config["secrets"]: notifier.error("Marvel cookie not found.") sys.exit(1)