def test_get_comments_per_tag(self): tag = Tag("tagname1") tag.register_article(Article('t', 0, 0, 0, None, ["tagname1"])) tag.register_article(Article('t', 0, 0, 2, None, ["tagname1"])) self.assertEqual({tag: 1}, tag_analytics.get_comments_per_article_for_each_tag( [tag]))
def test_get_reactions_per_tag(self): tag = Tag("tagname1") tag.register_article(Article('t', 0, 6, 0, None, ["tagname1"])) tag.register_article(Article('t', 0, 8, 0, None, ["tagname1"])) self.assertEqual({tag: 7}, tag_analytics.get_reactions_per_article_for_each_tag( [tag]))
def test_get_views_per_tag(self): tag = Tag("tagname1") tag.register_article(Article('t', 300, 0, 0, None, ["tagname1"])) tag.register_article(Article('t', 500, 0, 0, None, ["tagname1"])) self.assertEqual({tag: 400}, tag_analytics.get_views_per_article_for_each_tag( [tag]))
def setUp(self): self.articles = [ Article("title1", 100, 10, 1, None), Article("title2", 900, 90, 9, None), Article("title3", 300, 30, 3, None), Article("title4", 400, 40, 4, None), Article("title5", 500, 50, 5, None), Article("title6", 600, 60, 6, None), Article("title7", 700, 70, 7, None), Article("title8", 800, 80, 8, None), Article("title9", 200, 20, 2, None), Article("title10", 1000, 100, 10, None), ]
def test_get_articles_json(self): user = User("laura_miller", "laura", "miller") article = Article("Some text", "A Title", user) article_two = Article("Other text", "Other article", user) self.session.add_all([user, article, article_two]) self.session.commit() response = self.client.get("/json/articles") self.assertEqual(response.status_code, 200) self.assertEqual(response.get_json(), [{ "title": "A Title" }, { "title": "Other article" }])
def test_get_most_reactions_per_article_tag(self): most_popular_tag = Tag("tagname1") most_popular_tag.register_article( Article('t', 0, 6, 0, None, ["tagname1"])) most_popular_tag.register_article( Article('t', 0, 8, 0, None, ["tagname1"])) other_tag = Tag("tagname2") other_tag.register_article(Article('t', 0, 2, 0, None, ["tagname2"])) other_tag.register_article(Article('t', 0, 8, 0, None, ["tagname2"])) self.assertEqual( most_popular_tag, tag_analytics.get_most_reactions_per_article_tag( [most_popular_tag, other_tag]))
def test_get_most_comments_per_article_tag(self): most_popular_tag = Tag("tagname1") most_popular_tag.register_article( Article('t', 0, 0, 3, None, ["tagname1"])) most_popular_tag.register_article( Article('t', 0, 0, 3, None, ["tagname1"])) other_tag = Tag("tagname2") other_tag.register_article(Article('t', 0, 0, 3, None, ["tagname2"])) other_tag.register_article(Article('t', 0, 0, 1, None, ["tagname2"])) self.assertEqual( most_popular_tag, tag_analytics.get_most_comments_per_article_tag( [most_popular_tag, other_tag]))
def test_article_of_tag_is_registered(self): article = Article("article", 100, 10, 1, None, ["tagname1"]) self.tags[0].register_article(article) self.assertEqual(self.tags[0].views, 100) self.assertEqual(self.tags[0].reactions, 10) self.assertEqual(self.tags[0].comments, 1) self.assertEqual(self.tags[0].articles, [article])
def get_article(id): articles = mongo.db.article.find_one_or_404({'_id': id}) article = Article(entries=articles) if article.access_level == 0: return article.serialize() else: raise ApiError(NO_AUTH, 403)
def get_random_article(self): """Get a random article on the page :rtype: Article """ links = self.soup.select(".article-img-link") random_index = random.randint(0, len(links) - 1) return Article(links[random_index].get('href'))
def delete(self, id): article = Article().find(id) if article.user().id == Auth.user().id: article.delete() return RedirectResponse(request.base_uri + '/articles')
def test_article_of_different_tag_is_not_registered(self): article = Article("article", 100, 10, 1, None, ["tagname0"]) self.tags[0].register_article(article) self.assertEqual(self.tags[0].views, 0) self.assertEqual(self.tags[0].reactions, 0) self.assertEqual(self.tags[0].comments, 0) self.assertEqual(len(self.tags[0].articles), 0)
def update(self, id, request_data): article = Article().find(id) if Auth.user().id == article.user_id: article.update(request_data) return RedirectResponse(article.url())
def edit(self, id): article = Article().find(id) if article.user().id == Auth.user().id: return view('articles.edit', article=article) return ErrorResponse('Unauthorised', 500)
def get_articles_from_climate_feedback_feedbacks_scrap(articles_max=3, editor_user=None, store=None): # store is an object that sets in it # all the currently being created objects # as { articles: [], ..., users: [] } # because we don't want to create a new user object # when we scrap a review article made by an already created user one. if store is None: store = {"users": []} result = requests.get('https://climatefeedback.org/feedbacks') soup = BeautifulSoup(result.text, 'html.parser') articles = [] evaluation_rows = soup.find("main").find_all("div", class_="row") logger.info('{} evaluation rows and we pick {} max'.format( len(evaluation_rows), articles_max)) for (evaluation_row_index, evaluation_row) in enumerate(evaluation_rows[:articles_max]): logger.info('article {}...'.format(evaluation_row_index)) evaluation_media_body = evaluation_row.find("div", class_="media-body") article = Article() evaluation_url = evaluation_media_body.find("a")['href'] set_article_from_climate_feedback_evaluation_scrap( article, evaluation_url, editor_user, store) articles.append(article) return articles
def test_parse_articles_from_lines_many_articles( param_parse_articles_from_lines_many_articles): article_param, string = param_parse_articles_from_lines_many_articles articles = [] for param in article_param: name, author, date, text = param articles.append(Article(name, author, date, text)) assert parse_articles_from_lines(string.split('\n')) == articles
def new(): if request.method == 'GET': return render_template('admin/new.html') if request.method == 'POST': form = request.form a = Article(form) a.save() return redirect(url_for('index.index'))
def set_user_from_climate_feedback_user_scrap(user, path, store=None): if store is None: store = {} result = requests.get('https://climatefeedback.org{}'.format(path)) soup = BeautifulSoup(result.text, 'html.parser') info = soup.find("div", class_="med-body") user.affiliation = situation_line.split(",")[1] user.external_thumb_url = soup.find("img", class_="avatar")['src'] user.title = situation_line.split(",")[0] name = info.find("h2", class_="noborder").text first_name = None last_name = name if ' ' in name: name_chunks = name.split(' ') first_name = name_chunks[0] last_name = name_chunks[1:] paragraphs = info.find_all("p") situation_line = paragraphs[0].text user.firstName = first_name user.lastName = last_name expertise_line = paragraphs[1].text if 'Expertise:' in expertise_line: expertise = expertise_line.split('Expertise: ')[1] else: expertise = None user.expertise = expertise orcid = info.find("a", href=re.compile("https://orcid.org/(.*)")) if orcid: user.orcidId = orcid['href'].split('https://orcid.org/')[1] website = info.find("a", text="Website") if website: user.websiteUrl = website['href'] publication_image = info.find("img", alt="publication") if publication_image: publication_anchors = publication_image.parent.find_all("a") for publication_anchor in publication_anchors: publication_dict = { "tags": "isValidatedAsPeerPublication", "url": publication_anchor['href'] } publication = Publication.query.filter_by(url=data['url'])\ .first() if not publication: publication = Article(**publication_dict) publication.populate_from_dict( resolve_with_url(publication.url)) UserArticle(article=publication, user=user)
def test_delete_article_json(self): user = User("laura_miller", "laura", "miller") article = Article("Some text", "A Title", user) self.session.add_all([user, article]) self.session.commit() response = self.client.delete("/json/articles/{0}".format(article.id)) self.assertEqual(response.status_code, 204) self.assertEqual(response.get_json(), None)
def test_get_all_lines_from_articles(param_get_all_lines_from_articles): articles_param, result = param_get_all_lines_from_articles articles = [] for param in articles_param: name, author, data, text = param articles.append(Article(name, author, data, text)) assert is_equal_lists(get_all_lines_from_articles(articles), result)
def get_article(): form = json.loads(request.get_data(as_text=True)) article = Article.find_by(chapter_id=form["chapter_id"]) if article == None: article = Article() article.title = "空" article.content = "当前章节没有文章,请直接刷题" return Response(json.dumps(article.__dict__, ensure_ascii=False), content_type='application/json') else: return Response(json_util.dumps(article.__dict__, ensure_ascii=False), content_type='application/json')
def test_article_constructor_and_to_string(param_article_model_example): name, author, date, text, string = param_article_model_example article = Article(name, author, date, text) assert article.name == name assert article.author == author assert article.date == strptime(date, '%d.%m.%Y, %H:%M') assert article.text == text assert str(article) == string assert article.all_text == [name, text]
def when_get_articles_with_one_far_around_keyword_returns_no_result(app): # given article1 = Article(url="http://article1.com", authors=None, summary=None, tags=None, title="Can hipster-neo-farmers save the world ?") article2 = Article(url="http://article2.com", authors=None, summary=None, tags=None, title="Do we have enough quinoa for all the children ?") ApiHandler.save(article1, article2) # when articles = filter_articles_with_keywords('hipsterssss').all() # then assert len(articles) == 0
def post_article(title, content): logging.error("DB QUERY BEING RUN") b = Article(path=title, content=content) key = b.put() # update memcache m = memcache.get(title) m.append(b) memcache.set(title, m) return b
def resolve_news(self, info): news = picture_domain.get_news() for article in news: yield Article( title=article.get('title', ''), url=article.get('url', ''), image=article.get('featured_image', ''), site=article.get('news_site_long', ''), date=article.get('published_date', ''), tags=article.get('tags', ''), categories=article.get('categories', ''), )
def parse_detail(self): box_prices = self.get_by_class("buybox__prices") description = self.get_by_class("product-header__name").text.replace("\"", "") info_boxes = self.find_by_class("nutrition-more-info__container") offer_price = self.try_get( box_prices, "buybox__price-strikethrough", lambda x: float(x.strip().replace("€","").replace(",",".")), print_exc=False) price = self.try_get( box_prices, "buybox__price--current", lambda x: float(x.strip().replace("€","").replace(",",".")), print_exc=False) if price is None: price = self.try_get( box_prices, "buybox__price", lambda x: float(x.strip().replace("€/l","").replace("€","").replace(",","."))) if price is None: print(self.browser.current_url) print(box_prices, box_prices.text) pum = self.try_get( box_prices, "buybox__price-per-unit", lambda x: float(x.strip().split(" ")[0].replace(",","."))) info_dt = self.get_info_dt(info_boxes) meassure_str = info_dt.get("Medidas", None) size = self.get_size_from_desc(description) meassure_desc = self.get_meassure_desc(description) matches = re.search("R-\d+",self.browser.current_url) identifier = matches.group(0) if matches is not None else "" return Article( description=description, brand=info_dt.get("Marca", "").replace(",", ""), name=info_dt.get("Denominación legal", "").replace(",",""), price=price, market="carrefour", offer_price=str(offer_price), meassure=meassure_str[-1] if meassure_str is not None else "", pum=pum, size=size, meassure_description="", identifier=identifier, timestamp=time.time())
def test_get_article_json(self): user = User("laura_miller", "laura", "miller") article = Article("Some text", "A Title", user) self.session.add_all([user, article]) self.session.commit() response = self.client.get("/json/articles/{0}".format(article.id)) self.assertEqual(response.status_code, 200) self.assertEqual( response.get_json(), { "title": article.title, "text": article.text, "created_by": article.created_by.username })
def post(self): data = NewArticle.parse.parse_args() article_data = Article(data['id_author'], data['title'], data['content']) try: article_data.save_to_db() except: return {"message": "An error occurred creating the article."}, 500 return {"message": "New article created successfully."}, 201
def when_get_articles_with_keyword_tag_returns_result(app): # given article1 = Article(url="http://article1.com", authors=None, summary=None, tags=None, title="Can hipster-neo-farmers save the world ?") article2 = Article(url="http://article2.com", authors=None, summary=None, tags=None, title="Do we have enough quinoa for all the children ?") tag1 = Tag(text="climate") article_tag1 = ArticleTag(article=article1, tag=tag1) ApiHandler.save(article1, article2, article_tag1) # when articles = filter_articles_with_keywords('clim').all() # then assert len(articles) == 1 assert article1 in articles
def parse_detail(self): description = self.find_element_by_xpath("//h1[@itemprop='name']").text try: product_name = self.find_by_id("nutritionalinformation")\ .find_elements_by_class_name("form_field-label")[0]\ .get_attribute("innerHTML").strip() except NoSuchElementException: product_name = "" brand = re.sub('[^A-Z]', '', description) prices = self.find_by_class("big-price") price = float(prices[0].text.strip().replace("€", "").replace(",", ".")) if len(prices) == 2: offer_price = float(prices[1].text.strip().replace("€", "").replace( ",", ".")) else: offer_price = None pum = float( re.sub('[^0-9 | ^","]', '', self.get_by_class("average-price").text.replace(",", "."))) raw_description = description.split() meassure_description = ' '.join(raw_description[-3:]) meassure_raw = raw_description[-2:] size = float( re.sub('[^0-9 | ^"."]', '', meassure_raw[-2])) if meassure_raw is not None else "" meassure = meassure_raw[-1] if meassure_raw is not None else "" measure = meassure.replace("t", "") return Article(description=description, brand=brand, name=product_name, price=price, market="dia", offer_price=offer_price, meassure=meassure, pum=pum, size=size, meassure_description=meassure_description, identifier=self.browser.current_url.split("/")[-1], timestamp=time.time())