Ejemplo n.º 1
0
def test_convert_to_dict():
    post = Post(123, 'TestTitle', 'http://google.com')
    post_dict = post.to_dict()

    assert post_dict['_id'] == 123
    assert post_dict['title'] == 'TestTitle'
    assert post_dict['url'] == 'http://google.com'
Ejemplo n.º 2
0
def test_initialization():
    post = Post(123, 'TestTitle', 'http://google.com')

    assert post._id == 123
    assert post.title == 'TestTitle'
    assert post.url == 'http://google.com'
    assert type(post.created) == datetime
Ejemplo n.º 3
0
def profile():
    form = PostForm()
    if form.validate_on_submit():
        url = ''
        now = datetime.datetime.now().strftime("%H:%M %d-%m-%y")
        post = Post(current_user.username, form.title.data, form.content.data,
                    now, url, current_user.profile_pic)
        post.upload()
        socketio.emit("newPost", include_self=True)
        return redirect(url_for('home'))

    var = 'yes'
    while (var == 'yes'):
        inp = input("How are you feeling today?: ")

        print(doc.predict(inp, search_by='answer', topk=1, answer_only=True))
        var = input("Do you want more assistance?")

    return render_template('post.html', title='Post', form=form)
Ejemplo n.º 4
0
def test_init_from_tr():
    tr_text = '<tr class="athing" id="22098832"><td align="right" valign="top" class="title"><span class="rank">4.</span></td><td valign="top" class="votelinks"><center><a id="up_22098832" href="vote?id=22098832&amp;how=up&amp;goto=news"><div class="votearrow" title="upvote"></div></a></center></td><td class="title"><a href="https://www.sqlite.org/howtocorrupt.html" class="storylink">How to Corrupt an SQLite Database File</a><span class="sitebit comhead"> (<a href="from?site=sqlite.org"><span class="sitestr">sqlite.org</span></a>)</span></td></tr>'
    soup = BeautifulSoup(tr_text, features="html.parser")
    tr = soup.find('tr')

    post = Post.from_html_elem(tr)

    assert post._id == 22098832
    assert post.url == 'https://www.sqlite.org/howtocorrupt.html'
    assert post.title == 'How to Corrupt an SQLite Database File'
Ejemplo n.º 5
0
    def fetchPosts(self):
        pageDict = []
        for username in self.usernames: 
            pageDict.append({'name': username, 'data': []}) 
            pageDict = sorted(pageDict, key=lambda page: page['name'].lower())

        getPageLikes = self.graph.get_objects(ids=self.usernames, fields='feed{likes.summary(true), message}, fan_count')

        for username in getPageLikes:
            item = self.findInListDict(pageDict, 'name', username)
            item['page_likes'] = getPageLikes[username]['fan_count']
            for data in getPageLikes[username]['feed']['data']:
                message = ""
                if('message' in data):
                    message = data['message']
                item['data'].append(Post(data['id'], message, data['likes']['summary']['total_count']))

        return pageDict
Ejemplo n.º 6
0
 def __init__(self, from_feed=False, debug=False):
     self.debug = debug
     self.from_feed = from_feed
     self.main_url = 'https://pikabu.ru'
     self.post = Post()
Ejemplo n.º 7
0
class Parse_Post():
    def __init__(self, from_feed=False, debug=False):
        self.debug = debug
        self.from_feed = from_feed
        self.main_url = 'https://pikabu.ru'
        self.post = Post()

    def parse(self, soup):
        self.post.set_title(
            self._clean_text(
                soup.findAll("header", {"class": "story__header"})[0].text))
        self.post.set_rating(
            soup.findAll("div", {"class": "story__rating-count"})[0].text)
        self.post.set_user_url(
            self.main_url +
            soup.findAll("a", {"class": "user__nick"})[0]['href'])
        self.post.set_datetime(
            soup.findAll("time", {"class": "story__datetime"})[0]['datetime'])
        self.post.set_url(
            soup.findAll(
                "a",
                {"class": "story__comments-link"})[0]['href'].split("#")[0])

        if soup.findAll("div", {"class": "story__content-inner"}):
            self.post.set_content(
                self._clean_text(
                    soup.findAll("div",
                                 {"class": "story__content-inner"})[0].text))
            self.post.set_content_html(
                soup.findAll("div",
                             {"class": "story__content-inner"})[0].prettify())
            if soup.findAll(
                    "div",
                {"class": "story__content-inner"})[0].findAll("img"):
                for img in soup.findAll(
                        "div",
                    {"class": "story__content-inner"})[0].findAll("img"):
                    self.post.add_media(img['data-large-image'])
            if soup.findAll("div",
                            {"class": "story__content-inner"})[0].findAll("a"):
                for img in soup.findAll(
                        "div",
                    {"class": "story__content-inner"})[0].findAll("a"):
                    if img['href'].split('.')[-1] != 'png':
                        self.post.add_link(img['href'])
            if soup.findAll("div", {"class": "story-block"}):
                for block in soup.findAll("div", {"class": "story-block"}):
                    if block.findAll('img'):
                        self.post.add_content_block(
                            block.findAll('img')[0]['data-large-image'])
                    else:
                        self.post.add_content_block(
                            self._clean_text(block.text))

        for tag in soup.findAll("div",
                                {"class": "story__tags"})[0].findChildren():
            self.post.add_tag({
                'name': tag.text,
                'url': self.main_url + tag['href']
            })

        if not self.from_feed:
            self.post.set_pluses(
                soup.findAll(
                    "div", {"class": "page-story__rating"})[0]['data-pluses'])
            self.post.set_minuses(
                soup.findAll(
                    "div", {"class": "page-story__rating"})[0]['data-minuses'])
            self.post.set_post_id(
                soup.findAll("section",
                             {"class": "comments_show"})[0]['data-story-id'])
            self.post.set_username(
                soup.findAll(
                    "section",
                    {"class": "comments_show"})[0]['data-story-username'])
            self.post.set_comments_count(
                soup.findAll("section",
                             {"class": "comments_show"})[0]['data-total'])
            self.post.set_saves(
                soup.findAll("span", {"class": "story__save-count"})[0].text)
            self.post.set_shares(
                soup.findAll("span", {"class": "story__share-count"})[0].text)
            parent_comment = Parse_Comment()
            self.post.set_comments(parent_comment.parse(soup))
        else:
            self.post.set_post_id(
                soup.findAll(
                    "div",
                    {"class": "story__rating-block"})[0]['data-story-id'])
            self.post.set_username(
                soup.findAll("a", {"class": "user__nick"})[0].text)
            self.post.set_comments_count(
                soup.findAll("span",
                             {"class": "story__comments-link-count"})[0].text)
        if self.debug:
            self.post.print_values()
        return self.post

    def _clean_text(self, text):
        clean_words = ['\n', '\r', '\t']
        for clean_word in clean_words:
            text = text.replace(clean_word, '')
        return text
Ejemplo n.º 8
0
def test_convert_to_string():
    post = Post(123, 'TestTitle', 'http://google.com')

    assert str(post).startswith("Post #123: TestTitle [http://google.com]")