def _get_item(self, soup): li_item_list = soup.findAll('li') for li_item in li_item_list: if li_item.find('div'): if li_item.find('div').find('h4'): href = li_item.find('div').find('h4').find('a').get('href') news = News( str(li_item.find('div').find('h4').find('a').string), self.dns + href.replace('../', ''), '') self.news_list.append(news)
def _scrap(): LogUtils.start('Exame') hrefs, titles = get_news_ignoring_fetched_links(db.get_fetched_links()) news = [] for i in range(len(hrefs)): href = hrefs[i] title = titles[i] paragraphs, publish_date = get_news_content_by_href(href) news.append(News(title, href, paragraphs, 'EXAME NotÃcias', 'https://abrilexame.files.wordpress.com/2019/08/logo-exame.png?w=150', publish_date)) db.save_all_news(news) LogUtils.end('Exame', hrefs)
def _scrap(): LogUtils.start('Suno') news = [] hrefs, titles = get_news_ignoring_fetched_links(db.get_fetched_links()) for i in range(len(hrefs)): href = hrefs[i] title = titles[i] paragraphs, publish_date = get_news_content_by_href(href) news.append( News( title, href, paragraphs, 'Suno NotÃcias', 'https://www.sunoresearch.com.br/wp-content/uploads/2019/12/suno-research.jpg', publish_date)) db.save_all_news(news) LogUtils.end('Suno', hrefs)
def _scrap(): LogUtils.start('Info Money') news = [] hrefs, titles = get_news_ignoring_fetched_links(db.get_fetched_links()) for i in range(len(hrefs)): href = hrefs[i] title = titles[i] paragraphs, publish_date = get_news_content_by_href(href) news.append( News( title, href, paragraphs, 'InfoMoney', 'https://is2-ssl.mzstatic.com/image/thumb/Purple123/v4/5c/df/a9/5cdfa9b4-913f-8b4d-a99d-1c6f2662061e/AppIcon-0-1x_U007emarketing-0-0-85-220-0-4.png/1200x630wa.png', publish_date)) db.save_all_news(news) LogUtils.end('Info Money', hrefs)
def run_updater(): stock_dict = json.load(open('resources/stock_links.json')) stock_list = list(json.load(open('resources/stock_links.json'))) for stock in stock_dict.keys(): try: news = News.get_by_stock_code(collection="news_articles", stock_code=stock) if news is None: new_news = News(stock_code=stock) new_news.load_articles(query=stock_dict[stock]['name']) new_news.update_articles() else: news.load_articles(stock_dict[stock]['name']) news.update_articles() print("Updated: " + stock) except ConnectionFailure: print("Unable to connect Mongodb...") finally: time.sleep(10) print("Update completed...")
if d > bdeg: bdeg = d best = world.agents[a] degree_centrality.append(best) s2 = {} for agent in degree_centrality: s2[agent] = news.name return s2 def activate_agents(agents): for agent in agents.keys(): agent.states[agents[agent]] = AgentState.ACTIVE fake_news = News(0, 0.9, 0.5) counter_news = News(1, 0.5, 0.1) news_cycle = {fake_news.name: fake_news, counter_news.name: counter_news} number_agents = 500 names_agents = [_ for _ in range(number_agents)] threshold = np.clip(np.random.normal(0.5, 0.1, number_agents), 0.0, 1.0) #mu = 0.5, sigma = 0.1 independence = np.full( number_agents, 0.1 ) #np.clip(np.random.normal(0.5, 0.1, number_agents), 0.0, 1.0) #mu = 0.5, sigma = 0.1 w = construct_world(names_agents, threshold, independence, news_cycle) fakenews_spreader = find_degreecentral_nodes(w, 1, [], fake_news) counternews_spreader = find_degreecentral_nodes( w, 5, [a for a in fakenews_spreader.keys()], counter_news) steps = 30
def create_news(self, json_news): news_list = [] for news in json_news: news_list.append(News(news)) return news_list