def __init__(self, args, config, driver): self.refresh_interval = config.refresh_interval self.max_price = config.max_price self.scheduler = sched.scheduler() self.scrapers = init_scrapers(driver, config.urls) for s in self.scrapers: self.schedule(s)
def main(): args = parse_args() logging.getLogger().setLevel(logging.DEBUG if args.verbose else logging.INFO) try: config = parse_config(args.config) drivers = init_drivers(config) scrapers = init_scrapers(config, drivers) hunt(args, config, scrapers) except Exception: logging.exception('caught exception') sys.exit(1)
def main(): try: alerters = init_alerters(args) config = parse_config(args.config) drivers = init_drivers(config) scrapers = init_scrapers(config, drivers) if args.test_alerts: logging.info("Sending test alert") alerters(subject="This is a test", content="This is only a test") hunt(alerters, config, scrapers) except Exception: logging.exception('caught exception') sys.exit(1)
def __init__(self, args, config, driver): alert_types = { "email": EmailAlerter, "discord": DiscordAlerter, "slack": SlackAlerter } self.alerter = alert_types[args.alerter_type](args) logging.debug( f"selected alerter: {args.alerter_type} -> {self.alerter}") self.refresh_interval = config.refresh_interval self.max_price = config.max_price self.scheduler = sched.scheduler() self.scrapers = init_scrapers(driver, config.urls) for s in self.scrapers: self.schedule(s)