def __init__(self, supermarkets=supermarket_names(), force_refresh=False): """Create a CachingScraper for the given supermarket(s). Keyword arguments: supermarkets -- a list of supermarkets to scrape force_refresh -- if True, cachefiles will not be used """ self.force_refresh = force_refresh self.supermarkets = supermarkets self.reactor_control = ReactorControl()
def run(): """Main method. Check which supermarkets were requested, create a scraper, then search the scraped data. """ (options, args) = parse_args() if (options.all): supermarkets = supermarket_names() else: supermarkets = [options.supermarket] scraper = CachingScraper(supermarkets, options.force_refresh) log.start() scraper.get_data() search_phrases = [] for line in fileinput.input(args): search_phrases.append(line.split()) for supermarket in supermarkets: log.msg("*** Savvy buys in %s ***" % supermarket.upper()) search_file(search_phrases, supermarket_filename(supermarket))