def extract_news(trending_terms, get_objects=False, dangerous=False, safe=True): # Returns a list of NewsWebsite Objects websites = Navigator.navigate(trending_terms, dangerous, safe) # Returns a list of Link Objects articles = Seeker.seek(websites) links = [] # Writes to file, to be FTP'ed, additionally returns as Link objects if specified if get_objects: for article in articles: link = MaxSubSequence.extract_data(article, get_objects) if link: links.append(link) return links else: for article in articles: MaxSubSequence.extract_data(article, get_objects)