def _load_history(self):
     if not os.path.isfile(self.history_file_name):
         log.info('Unable to locate history file %s. Skipping Load',
                  self.history_file_name)
         return
     with open(self.history_file_name, 'r') as f:
         for line in f:
             self.sent_notifications.append(line.strip('\n'))
     log.info('Loaded %s notifications from history',
              len(self.sent_notifications))
예제 #2
0
 def parse_search_page(self, page: BeautifulSoup) -> List[ProductInfo]:
     products = []
     search_results = self._get_search_results(page)
     log.info('Loaded %s search results', len(search_results))
     for r in search_results:
         if self._is_sponsored_search_result(r):
             continue
         product_data = self._get_product_data_from_search_result(r)
         if product_data:
             products.append(product_data)
     return products
예제 #3
0
 def send_notificaiton(self, msg: Text, identifier: Text) -> NoReturn:
     if self.notification_history.has_been_notified(identifier):
         log.info('Already sent notification for identifier %s', identifier)
         return
     for agent in self.notificaiton_agents:
         log.info('Sending notification to %s', agent.name)
         log.debug(msg)
         try:
             agent.send(msg)
             self.notification_history.add_history(identifier)
         except Exception as e:
             log.exception('Failed to send notification', exc_info=True)
예제 #4
0
    def check_product_pages(self) -> List[ProductInfo]:
        all_results = []
        for url in self.product_pages:
            log.info('Checking product page: %s', url)
            page_source = self._load_page(url)
            page = BeautifulSoup(page_source, 'html.parser')

            result = self.parse_product_page(page)
            if result:
                result.url = url
                all_results.append(result)
        return all_results
예제 #5
0
 def check_search_pages(self) -> List[ProductInfo]:
     all_results = []
     for page in self.search_pages:
         log.info('Checking search page: %s', page)
         page_source = self._load_page(page)
         if not page_source:
             log.error('Did not get page source.  Skipping %s', page)
             continue
         page = BeautifulSoup(page_source, 'html.parser')
         all_results += self.parse_search_page(page)
     for r in all_results:
         log.debug(r)
     return all_results
예제 #6
0
 def register_agent(self, agent: NotificationAgent) -> NoReturn:
     log.info('Registered notification agent %s', agent.name)
     self.notificaiton_agents.append(agent)
예제 #7
0
def get_discord_agent(config: dict) -> DiscordAgent:
    if 'webhook' not in config or not config['webhook']:
        raise InvalidNotificationAgentConfig(
            'Discord config is missing webhook URL')
    log.info('Creating discord agent')
    return DiscordAgent(config['webhook'], config['name'])
예제 #8
0
 def _is_sponsored_search_result(self, result: Tag) -> bool:
     ad_box = result.find('a', {'class': 'txt-ads-box'})
     if ad_box:
         log.info('Skipping ad in search results')
         return True
     return False