def update(self): since_last_run = datetime.now(timezone('UTC')) - self.frequency for item in self.update_xml('item', ['title', 'link', 'pubDate', 'description']): pub_date = parse_date_to_utc(item['pubDate']) if self.last_run is not None: if since_last_run > pub_date: return self.analyze(item, pub_date)
def update(self): since_last_run = datetime.now(timezone('UTC')) - self.frequency for item in self.update_xml( 'item', ["title", "link", "pubDate", "description"]): pub_date = parse_date_to_utc(item['pubDate']) if self.last_run is not None: if since_last_run > pub_date: continue self.analyze(item, pub_date)
def update(self): # Using update_lines because the pull should result in # a list of URLs, 1 per line. Split on newline since_last_run = datetime.now(timezone('UTC')) - self.frequency for line in self.update_csv(delimiter=',', quotechar='"'): if not line or line[0].startswith('phish_id'): continue first_seen = parse_date_to_utc(line[3]) if self.last_run is not None: if since_last_run > first_seen: return self.analyze(line, first_seen)